repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
aignas/project_euler | src/completed/solution104.py | 2 | 3071 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Project Euler 104
Question:
The Fibonacci sequence is defined by the recurrence relation:
Fn = Fn−1 + Fn−2, where F1 = 1 and F2 = 1.
It turns out that F541, which contains 113 digits, is the first
Fibonacci number for which the last nine digits are 1-9 pandigital
(contain all the digits 1 to 9, but not necessarily in order). And
F2749, which contains 575 digits, is the first Fibonacci number for
which the first nine digits are 1-9 pandigital.
Given that Fk is the first Fibonacci number for which the first nine
digits AND the last nine digits are 1-9 pandigital, find k.
"""
def fib(stop=0):
"""Fibonacci sequence
>>> [(k + 1, n) for k, n in enumerate(fib(34))]
[(1, 1), (2, 1), (3, 2), (4, 3), (5, 5), (6, 8), (7, 13), (8, 21), (9, 34)]
"""
n = 1
m = 1
yield n
while n <= stop or stop == 0:
yield n
n, m = m, n
n = n + m
def is_pandigital(number):
"""This just checks whether the number is pandigital
>>> is_pandigital(123456789)
True
>>> is_pandigital(123456787)
False
"""
number = str(number)
return all(i in number for i in '1234356789')
def get_last_digits(number, number_of_digits=9):
"""This gets the last numbers of a digit
>>> get_last_digits(12345678987654321)
987654321
>>> get_last_digits(1234567)
1234567
"""
return number % 10**number_of_digits
def get_first_digits(number, number_of_digits=9):
"""This gets the first numbers of a digit
>>> get_first_digits(12345678987654321)
123456789
>>> get_first_digits(1234567)
1234567
"""
return int(str(number)[:number_of_digits])
def example_pandigital_last():
"""F541, which contains 113 digits,
>>> example_pandigital_last()
(541, 113)
"""
for k, n in enumerate(fib()):
if is_pandigital(get_last_digits(n)):
return k + 1, len(str(n))
def example_pandigital_first():
""" F2749, which contains 575 digits
>>> example_pandigital_first()
(2749, 575)
"""
for k, n in enumerate(fib()):
if is_pandigital(get_first_digits(n)):
return k + 1, len(str(n))
def solution():
"""Main solution
>>> solution()
(329468, 68855)
"""
for k, n in enumerate(fib()):
# Skip all the cases which are not front pandigital
if k < 2748:
continue
# If the number is not end-pandigital, we skip the front check
if not is_pandigital(get_last_digits(n)):
continue
if is_pandigital(get_first_digits(n)):
return k + 1, len(str(n))
def main(benchmark=False):
"""Main loop
Sum of the digits.
"""
from timeit import default_timer as timer
start = timer()
answer = solution()
end = timer()
if benchmark:
print("Completed in: {}".format(end - start))
return answer
if __name__ == "__main__":
import doctest
doctest.testmod()
print(main(True))
| gpl-3.0 | 311,591,602,052,016,200 | 22.234848 | 79 | 0.594066 | false |
zenweasel/hendrix | hendrix/resources.py | 1 | 5118 | import os
import sys
import importlib
from twisted.web import resource, static
from twisted.web.wsgi import WSGIResource
import logging
logger = logging.getLogger(__name__)
class HendrixResource(resource.Resource):
"""
HendrixResource initialises a WSGIResource and stores it as wsgi_resource.
It also overrides its own getChild method so to only serve wsgi_resource.
This means that only the WSGIResource is able to serve dynamic content from
the root url "/". However it is still possible to extend the resource tree
via putChild. This is due the fact that getChildFromRequest checks for
children of the resource before handling the dynamic content (through
getChild). The modified getChild resource on HendrixResource also restores
the request.postpath list to its original state. This is essentially a hack
to ensure that django always gets the full path.
"""
def __init__(self, reactor, threads, application):
resource.Resource.__init__(self)
self.wsgi_resource = WSGIResource(reactor, threads, application)
def getChild(self, name, request):
"""
Postpath needs to contain all segments of
the url, if it is incomplete then that incomplete url will be passed on
to the child resource (in this case our wsgi application).
"""
request.prepath = []
request.postpath.insert(0, name)
# re-establishes request.postpath so to contain the entire path
return self.wsgi_resource
def putNamedChild(self, resource):
"""
putNamedChild takes either an instance of hendrix.contrib.NamedResource
or any resource.Resource with a "namespace" attribute as a means of
allowing application level control of resource namespacing.
"""
try:
path = resource.namespace
self.putChild(path, resource)
except AttributeError, e:
msg = '%r improperly configured. additional_resources instances must have a namespace attribute'%resource
raise AttributeError(msg), None, sys.exc_info()[2]
class NamedResource(resource.Resource):
"""
A resource that can be used to namespace other resources. Expected usage of
this resource in a django application is:
... in myproject.myapp.somemodule ...
NamespacedRes = NamedResource('some-namespace')
NamespacedRes.putChild('namex', SockJSResource(FactoryX...))
NamespacedRes.putChild('namey', SockJSResource(FactoryY...))
... then in settings ...
HENDRIX_CHILD_RESOURCES = (
'myproject.myapp.somemodule.NamespacedRes',
...,
)
"""
def __init__(self, namespace):
resource.Resource.__init__(self)
self.namespace = namespace
def getChild(self, path, request):
"""
By default this resource will yield a ForbiddenResource instance unless
a request is made for a static child i.e. a child added using putChild
"""
# override this method if you want to serve dynamic child resources
return resource.ForbiddenResource("This is a resource namespace.")
class MediaResource(static.File):
'''
A simple static service with directory listing disabled
(gives the client a 403 instead of letting them browse
a static directory).
'''
def directoryListing(self):
# Override to forbid directory listing
return resource.ForbiddenResource()
def DjangoStaticResource(path, rel_url='static'):
"""
takes an app level file dir to find the site root and servers static files
from static
Usage:
[...in app.resource...]
from hendrix.resources import DjangoStaticResource
StaticResource = DjangoStaticResource('/abspath/to/static/folder')
... OR ...
StaticResource = DjangoStaticResource('/abspath/to/static/folder', 'custom-static-relative-url')
[...in settings...]
HENDRIX_CHILD_RESOURCES = (
...,
'app.resource.StaticResource',
...
)
"""
rel_url = rel_url.strip('/')
StaticFilesResource = MediaResource(path)
StaticFilesResource.namespace = rel_url
return StaticFilesResource
def get_additional_resources(settings_module):
"""
if HENDRIX_CHILD_RESOURCES is specified in settings_module,
it should be a list resources subclassed from hendrix.contrib.NamedResource
example:
HENDRIX_CHILD_RESOURCES = (
'apps.offload.resources.LongRunningProcessResource',
'apps.chat.resources.ChatResource',
)
"""
additional_resources = []
if hasattr(settings_module, 'HENDRIX_CHILD_RESOURCES'):
for module_path in settings_module.HENDRIX_CHILD_RESOURCES:
path_to_module, resource_name = module_path.rsplit('.', 1)
resource_module = importlib.import_module(path_to_module)
additional_resources.append(getattr(resource_module, resource_name))
return additional_resources
| mit | -9,037,357,989,585,551,000 | 34.296552 | 117 | 0.665494 | false |
jiwang576/incubator-airflow | docs/conf.py | 23 | 8948 | # -*- coding: utf-8 -*-
#
# Airflow documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 9 20:50:01 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
import mock
MOCK_MODULES = [
'apiclient',
'apiclient.discovery',
'apiclient.http',
'mesos',
'mesos.interface',
'mesos.native',
'oauth2client.service_account',
'pandas.io.gbq',
]
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = mock.Mock()
# Hack to allow changing for piece of the code to behave differently while
# the docs are being built. The main objective was to alter the
# behavior of the utils.apply_default that was hiding function headers
os.environ['BUILDING_AIRFLOW_DOCS'] = 'TRUE'
from airflow import settings
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinxarg.ext',
]
viewcode_import = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Airflow'
#copyright = u''
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
#version = '1.0.0'
# The full version, including alpha/beta/rc tags.
#release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
import sphinx_rtd_theme
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "Airflow Documentation"
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = ""
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Airflowdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Airflow.tex', u'Airflow Documentation',
u'Apache Airflow', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'airflow', u'Airflow Documentation',
[u'Apache Airflow'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [(
'index', 'Airflow', u'Airflow Documentation',
u'Apache Airflow', 'Airflow',
'Airflow is a system to programmaticaly author, schedule and monitor data pipelines.',
'Miscellaneous'
),]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| apache-2.0 | -4,600,793,877,415,594,500 | 30.286713 | 90 | 0.707085 | false |
Impactstory/total-impact-core | test/unit_tests/test_models.py | 2 | 2450 | from nose.tools import raises, assert_equals, nottest
import os, unittest, hashlib, json, pprint, datetime
from time import sleep
from werkzeug.security import generate_password_hash
from totalimpact import models, tiredis
from totalimpact.providers import bibtex, github
from totalimpact import REDIS_UNITTEST_DATABASE_NUMBER
class TestMemberItems():
def setUp(self):
# setup a clean new redis database at our unittest redis DB location: Number 8
self.r = tiredis.from_url("redis://localhost:6379", db=REDIS_UNITTEST_DATABASE_NUMBER)
self.r.flushdb()
bibtex.Bibtex.paginate = lambda self, x: {"pages": [1,2,3,4], "number_entries":10}
bibtex.Bibtex.member_items = lambda self, x: ("doi", str(x))
self.memberitems_resp = [
["doi", "1"],
["doi", "2"],
["doi", "3"],
["doi", "4"],
]
self.mi = models.MemberItems(bibtex.Bibtex(), self.r)
def test_init(self):
assert_equals(self.mi.__class__.__name__, "MemberItems")
assert_equals(self.mi.provider.__class__.__name__, "Bibtex")
def test_start_update(self):
ret = self.mi.start_update("1234")
input_hash = hashlib.md5("1234").hexdigest()
assert_equals(input_hash, ret)
sleep(.1) # give the thread a chance to finish.
status = self.r.get_memberitems_status(input_hash)
assert_equals(status["memberitems"], self.memberitems_resp )
assert_equals(status["complete"], 4 )
def test_get_sync(self):
github.Github.member_items = lambda self, x: \
[("github", name) for name in ["project1", "project2", "project3"]]
synch_mi = models.MemberItems(github.Github(), self.r)
# we haven't put q in redis with MemberItems.start_update(q),
# so this should update while we wait.
ret = synch_mi.get_sync("jasonpriem")
assert_equals(ret["pages"], 1)
assert_equals(ret["complete"], 1)
assert_equals(ret["memberitems"],
[
("github", "project1"),
("github", "project2"),
("github", "project3")
]
)
def test_get_async(self):
ret = self.mi.start_update("1234")
sleep(.1)
res = self.mi.get_async(ret)
print res
assert_equals(res["complete"], 4)
assert_equals(res["memberitems"], self.memberitems_resp)
| mit | 7,698,163,245,883,986,000 | 33.507042 | 94 | 0.59551 | false |
hansjorg/rust-ci | tpt/ppatrigger/management/commands/fetchbuilds.py | 2 | 4184 | import traceback
from dateutil import parser
from django.db import connection
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from ppatrigger.models import Project
from ppatrigger.models import Build
import travisclient
import json
class Command(BaseCommand):
args = ''
help = 'Fetch data for builds that have been marked as started'
def handle(self, *args, **options):
projects = Project.objects.filter(build_started__exact = True,
deleted = False)
if not len(projects):
self.stdout.write('fetchbuilds: No started builds.')
else:
self.stdout.write('fetchbuilds: Fetching data for {} '
'started builds.'.format(len(projects)))
for project in projects:
# A run of the checkpackage command has triggered a
# build which may have finished by now. Check and
# save a build data entry if so.
build = travisclient.get_build_by_id(project.build_id)
if build and 'state' in build:
build_state = build['state']
if build_state == 'finished':
self.stdout.write(str(project) + ': Build '
'finished, saving data ')
# Result and status for some builds have been returned
# as null from Travis for some reason
result = -1
if build['result'] != None:
result = build['result']
status = -1
if build['status'] != None:
status = build['status']
build_data = Build(
project = project,
build_id = build['id'],
package_version = project.package.version,
package_created_at = project.package.created_at,
result = result,
status = status,
duration = build['duration'],
started_at = parser.parse(build['started_at']),
finished_at = parser.parse(build['finished_at']),
committer_email = build['committer_email'],
committer_name = build['committer_name'],
commited_at = parser.parse(build['committed_at']),
event_type = build['event_type'],
commit = build['commit'],
message = build['message'],
compare_url = build['compare_url']
)
try:
build_data.save()
project.last_build = build_data
project.build_started = False
project.author_name = build['author_name']
project.author_email = build['author_email']
repo = travisclient.get_repo(project.username,
project.repository)
if repo and 'description' in repo and repo['description']:
project.description = repo['description']
project.save()
except IntegrityError:
# TODO: temp fix for missing result from Travis
connection._rollback()
self.stdout.write(str(project) + ': Error storing build state for project')
self.stdout.write(json.dumps(build, sort_keys=True, indent=4))
self.stdout.write(traceback.format_exc())
else:
self.stdout.write(str(project) + ': Build not '
'finished, state: ' + build_state)
| apache-2.0 | 5,352,895,900,383,375,000 | 43.042105 | 103 | 0.459369 | false |
MycChiu/tensorflow | tensorflow/compiler/aot/tests/make_test_graphs.py | 27 | 4478 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generate tensorflow graphs for testing tfcompile."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import app
from tensorflow.python.training import saver as saver_lib
FLAGS = None
def tfadd(_):
x = constant_op.constant([1], name='x_const')
y = constant_op.constant([2], name='y_const')
math_ops.add(x, y, name='x_y_sum')
def tfadd_with_ckpt(out_dir):
x = array_ops.placeholder(dtypes.int32, name='x_hold')
y = variables.Variable(constant_op.constant([0]), name='y_saved')
math_ops.add(x, y, name='x_y_sum')
init_op = variables.initialize_all_variables()
saver = saver_lib.Saver(write_version=saver_pb2.SaverDef.V1)
with session.Session() as sess:
sess.run(init_op)
sess.run(y.assign(y + 42))
# Without the checkpoint, the variable won't be set to 42.
ckpt = '%s/test_graph_tfadd_with_ckpt.ckpt' % out_dir
saver.save(sess, ckpt)
def tfadd_with_ckpt_saver(out_dir):
x = array_ops.placeholder(dtypes.int32, name='x_hold')
y = variables.Variable(constant_op.constant([0]), name='y_saved')
math_ops.add(x, y, name='x_y_sum')
init_op = variables.initialize_all_variables()
saver = saver_lib.Saver(name='abcprefix', write_version=saver_pb2.SaverDef.V1)
with session.Session() as sess:
sess.run(init_op)
sess.run(y.assign(y + 42))
# Without the checkpoint, the variable won't be set to 42.
ckpt_file = '%s/test_graph_tfadd_with_ckpt_saver.ckpt' % out_dir
saver.save(sess, ckpt_file)
# Without the SaverDef, the restore op won't be named correctly.
saver_file = '%s/test_graph_tfadd_with_ckpt_saver.saver' % out_dir
with open(saver_file, 'w') as f:
f.write(saver.as_saver_def().SerializeToString())
def tfgather(_):
params = array_ops.placeholder(dtypes.float32, name='params')
indices = array_ops.placeholder(dtypes.int32, name='indices')
array_ops.gather(params, indices, name='gather_output')
def tfmatmul(_):
x = array_ops.placeholder(dtypes.float32, name='x_hold')
y = array_ops.placeholder(dtypes.float32, name='y_hold')
math_ops.matmul(x, y, name='x_y_prod')
def tfmatmulandadd(_):
# This tests multiple outputs.
x = array_ops.placeholder(dtypes.float32, name='x_hold')
y = array_ops.placeholder(dtypes.float32, name='y_hold')
math_ops.matmul(x, y, name='x_y_prod')
math_ops.add(x, y, name='x_y_sum')
def write_graph(build_graph, out_dir):
"""Build a graph using build_graph and write it out."""
g = ops.Graph()
with g.as_default():
build_graph(out_dir)
filename = '%s/test_graph_%s.pb' % (out_dir, build_graph.__name__)
with open(filename, 'w') as f:
f.write(g.as_graph_def().SerializeToString())
def main(_):
write_graph(tfadd, FLAGS.out_dir)
write_graph(tfadd_with_ckpt, FLAGS.out_dir)
write_graph(tfadd_with_ckpt_saver, FLAGS.out_dir)
write_graph(tfgather, FLAGS.out_dir)
write_graph(tfmatmul, FLAGS.out_dir)
write_graph(tfmatmulandadd, FLAGS.out_dir)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.register('type', 'bool', lambda v: v.lower() == 'true')
parser.add_argument(
'--out_dir',
type=str,
default='',
help='Output directory for graphs, checkpoints and savers.'
)
FLAGS, unparsed = parser.parse_known_args()
app.run(main=main, argv=[sys.argv[0]] + unparsed)
| apache-2.0 | 6,892,081,276,979,587,000 | 34.259843 | 80 | 0.693613 | false |
keith-pedersen/delphes | python/LeptonControlPlots.py | 15 | 1505 | from BaseControlPlots import BaseControlPlots
# Requirements:
# event.muons
# event.electrons
class LeptonControlPlots(BaseControlPlots):
"""A class to create control plots for leptons"""
def __init__(self, dir=None, dataset=None, mode="plots"):
# create output file if needed. If no file is given, it means it is delegated
BaseControlPlots.__init__(self, dir=dir, purpose="leptons", dataset=dataset, mode=mode)
def beginJob(self):
# declare histograms
self.add("ElectronPt","Electron Pt",100,0,200)
self.add("MuonPt","Muon Pt",100,0,200)
self.add("ElectronEta","Electron Eta",50,-2.5,2.5)
self.add("MuonEta","Muon Eta",50,-2.5,2.5)
self.add("NMuons","Muon multiplicity",10,0,10)
self.add("NElectrons","Electron multiplicity",10,0,10)
def process(self, event):
#get information
result = { }
result["ElectronPt"] = [ ]
result["MuonPt"] = [ ]
result["ElectronEta"] = [ ]
result["MuonEta"] = [ ]
for mu in event.muons:
result["MuonPt"].append(mu.PT)
result["MuonEta"].append(mu.Eta)
for ele in event.electrons:
result["ElectronPt"].append(ele.PT)
result["ElectronEta"].append(ele.Eta)
result["NMuons"] = event.muons.GetEntries()
result["NElectrons"] = event.electrons.GetEntries()
return result
if __name__=="__main__":
import sys
from DelphesAnalysis.BaseControlPlots import runTest
runTest(sys.argv[1], LeptonControlPlots())
| gpl-3.0 | 4,277,532,438,591,927,300 | 33.204545 | 93 | 0.643189 | false |
xzturn/tensorflow | tensorflow/python/client/virtual_gpu_test.py | 17 | 9731 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for multiple virtual GPU support."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import numpy as np
from google.protobuf import text_format
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
class VirtualGpuTestUtil(object):
def __init__(self,
dim=1000,
num_ops=100,
virtual_devices_per_gpu=None,
device_probabilities=None):
self._dim = dim
self._num_ops = num_ops
if virtual_devices_per_gpu is None:
self._virtual_devices_per_gpu = [3]
else:
self._virtual_devices_per_gpu = virtual_devices_per_gpu
self._visible_device_list = [
i for i in range(len(self._virtual_devices_per_gpu))
]
gpu_devices = [
('/gpu:' + str(i)) for i in range(sum(self._virtual_devices_per_gpu))
]
self.devices = ['/cpu:0'] + gpu_devices
self._num_devices = len(self.devices)
# Each virtual device gets 2GB memory.
self._mem_limits_mb = [
([1 << 11] * i) for i in self._virtual_devices_per_gpu
]
self.config = self._GetSessionConfig()
if device_probabilities is not None:
self._device_probabilities = list(device_probabilities) # Deep copy
for i in range(1, self._num_devices):
self._device_probabilities[i] += self._device_probabilities[i - 1]
else:
# Each device gets same probability to be assigned an operation.
step = 1.0 / self._num_devices
self._device_probabilities = [
(x + 1) * step for x in range(self._num_devices)
]
# To prevent rounding error causing problems.
self._device_probabilities[self._num_devices - 1] = 1.1
logging.info('dim: %d', self._dim)
logging.info('num_ops: %d', self._num_ops)
logging.info('visible_device_list: %s', str(self._visible_device_list))
logging.info('virtual_devices_per_gpu: %s',
str(self._virtual_devices_per_gpu))
logging.info('mem_limits: %s', str(self._mem_limits_mb))
logging.info('devices: %s', str(self.devices))
logging.info('config: %s', text_format.MessageToString(self.config))
logging.info('device_probabilities: %s', str(self._device_probabilities))
# Creates virtual GPU devices
def _GetSessionConfig(self):
virtual_device_gpu_options = config_pb2.GPUOptions(
visible_device_list=','.join(str(d) for d in self._visible_device_list),
experimental=config_pb2.GPUOptions.Experimental(virtual_devices=[
config_pb2.GPUOptions.Experimental.VirtualDevices(
memory_limit_mb=i) for i in self._mem_limits_mb
]))
return config_pb2.ConfigProto(gpu_options=virtual_device_gpu_options)
# Generates a list of 3-tuples, each tuple contains the source and destination
# device index for a binary operation like 'add', like:
# (src_device_1, src_device_2, dst_device)
def _GenerateOperationPlacement(self):
result = []
for unused_i in range(self._num_ops):
op_device = ()
for unused_j in range(3):
random_num = random.random()
for device_index in range(self._num_devices):
if self._device_probabilities[device_index] > random_num:
op_device += (device_index,)
break
result.append(op_device)
return result
# Logs part of the matrix for debugging purposes.
def _LogMatrix(self, mat, dim):
logging.info('---- printing the first 10*10 submatrix ----')
for i in range(min(10, dim)):
row = ''
for j in range(min(10, dim)):
row += ' ' + str(mat[i][j])
logging.info(row)
# Runs a list of 'add' operations where each operation satisfies the device
# placement constraints in `op_placement`, and returns the result.
def _TestRandomGraphWithDevices(self,
sess,
seed,
op_placement,
devices,
debug_mode=False):
data = []
shape = (self._dim, self._dim)
feed_dict = {}
# Initialize the matrices
for i in range(len(devices)):
with ops.device(devices[i]):
var = array_ops.placeholder(dtypes.float32, shape=shape)
np.random.seed(seed + i)
feed_dict[var] = np.random.uniform(
low=0, high=0.1, size=shape).astype(np.float32)
data.append(var)
# Run the 'add' operations on those matrices
for op in op_placement:
with ops.device(devices[op[2]]):
data[op[2]] = math_ops.add(data[op[0]], data[op[1]])
with ops.device('/cpu:0'):
s = data[0]
for i in range(1, len(data)):
s = math_ops.add(s, data[i])
if debug_mode:
logging.info(ops.get_default_graph().as_graph_def())
result = sess.run(s, feed_dict=feed_dict)
self._LogMatrix(result, self._dim)
return result
# Generates a random graph with `self._num_ops` 'add' operations with each
# operation placed on different virtual device, test that the result is
# identical to the result obtained by running the same graph on cpu only.
def TestRandomGraph(self, sess, op_placement=None, random_seed=None):
debug_mode = False
if op_placement is None:
op_placement = self._GenerateOperationPlacement()
else:
debug_mode = True
if random_seed is None:
random_seed = random.randint(0, 1 << 31)
else:
debug_mode = True
logging.info('Virtual gpu functional test for random graph...')
logging.info('operation placement: %s', str(op_placement))
logging.info('random seed: %d', random_seed)
# Run with multiple virtual gpus.
result_vgd = self._TestRandomGraphWithDevices(
sess, random_seed, op_placement, self.devices, debug_mode=debug_mode)
# Run with single cpu.
result_cpu = self._TestRandomGraphWithDevices(
sess,
random_seed,
op_placement, ['/cpu:0'] * self._num_devices,
debug_mode=debug_mode)
# Test the result
for i in range(self._dim):
for j in range(self._dim):
if result_vgd[i][j] != result_cpu[i][j]:
logging.error(
'Result mismatch at row %d column %d: expected %f, actual %f', i,
j, result_cpu[i][j], result_vgd[i][j])
logging.error('Devices: %s', self.devices)
logging.error('Memory limits (in MB): %s', self._mem_limits_mb)
return False
return True
class VirtualGpuTest(test_util.TensorFlowTestCase):
def __init__(self, method_name):
super(VirtualGpuTest, self).__init__(method_name)
self._util = VirtualGpuTestUtil()
@test_util.deprecated_graph_mode_only
def testStatsContainAllDeviceNames(self):
with self.session(config=self._util.config) as sess:
# TODO(laigd): b/70811538. The is_gpu_available() call will invoke
# DeviceFactory::AddDevices() with a default SessionOption, which prevents
# adding virtual devices in the future, thus must be called within a
# context of a session within which virtual devices are created. Same in
# the following test case.
if not test.is_gpu_available(cuda_only=True):
self.skipTest('No GPU available')
run_options = config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE)
run_metadata = config_pb2.RunMetadata()
mat_shape = [10, 10]
data = []
for d in self._util.devices:
with ops.device(d):
var = variables.Variable(random_ops.random_uniform(mat_shape))
self.evaluate(var.initializer)
data.append(var)
s = data[0]
for i in range(1, len(data)):
s = math_ops.add(s, data[i])
sess.run(s, options=run_options, run_metadata=run_metadata)
self.assertTrue(run_metadata.HasField('step_stats'))
step_stats = run_metadata.step_stats
devices = [d.device for d in step_stats.dev_stats]
self.assertTrue('/job:localhost/replica:0/task:0/device:CPU:0' in devices)
self.assertTrue('/job:localhost/replica:0/task:0/device:GPU:0' in devices)
self.assertTrue('/job:localhost/replica:0/task:0/device:GPU:1' in devices)
self.assertTrue('/job:localhost/replica:0/task:0/device:GPU:2' in devices)
@test_util.deprecated_graph_mode_only
def testLargeRandomGraph(self):
with self.session(config=self._util.config) as sess:
if not test.is_gpu_available(cuda_only=True):
self.skipTest('No GPU available')
for _ in range(5):
if not self._util.TestRandomGraph(sess):
return
if __name__ == '__main__':
test.main()
| apache-2.0 | 7,085,940,314,819,711,000 | 38.556911 | 80 | 0.644127 | false |
Tisseo/navitia | source/jormungandr/jormungandr/autocomplete/__init__.py | 7 | 1297 | # encoding: utf-8
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
| agpl-3.0 | 3,307,676,820,902,341,600 | 39.53125 | 82 | 0.753277 | false |
laplaceliu/readthedocs.org | readthedocs/rtd_tests/tests/test_project_symlinks.py | 23 | 4206 | import os
from functools import wraps
from mock import patch
from django.test import TestCase
from readthedocs.builds.models import Version
from readthedocs.projects.models import Project
from readthedocs.projects.symlinks import symlink_translations
def patched(fn):
'''Patches calls to run_on_app_servers on instance methods'''
@wraps(fn)
def wrapper(self):
def _collect_commands(cmd):
self.commands.append(cmd)
with patch('readthedocs.projects.symlinks.run_on_app_servers', _collect_commands):
with patch('readthedocs.projects.symlinks.run_on_app_servers', _collect_commands):
return fn(self)
return wrapper
class TestSymlinkTranslations(TestCase):
fixtures = ['eric', 'test_data']
commands = []
def setUp(self):
self.project = Project.objects.get(slug='kong')
self.translation = Project.objects.get(slug='pip')
self.translation.language = 'de'
self.translation.main_lanuage_project = self.project
self.project.translations.add(self.translation)
self.translation.save()
self.project.save()
Version.objects.create(verbose_name='master',
active=True, project=self.project)
Version.objects.create(verbose_name='master',
active=True, project=self.translation)
self.args = {
'project': self.project.doc_path,
'translation': self.translation.doc_path,
'builds': os.path.join(self.project.doc_path, 'rtd-builds'),
}
self.assertIn(self.translation, self.project.translations.all())
self.commands = []
@patched
def test_symlink_basic(self):
'''Test basic scenario, language english, translation german'''
symlink_translations(self.project.versions.first())
commands = [
'mkdir -p {project}/translations',
'ln -nsf {translation}/rtd-builds {project}/translations/de',
'ln -nsf {builds} {project}/translations/en',
]
for command in commands:
self.assertIsNotNone(
self.commands.pop(
self.commands.index(command.format(**self.args))
))
@patched
def test_symlink_non_english(self):
'''Test language german, translation english'''
# Change the languages, and then clear commands, as project.save calls
# the symlinking
self.project.language = 'de'
self.translation.language = 'en'
self.project.save()
self.translation.save()
self.commands = []
symlink_translations(self.project.versions.first())
commands = [
'mkdir -p {project}/translations',
'ln -nsf {project}/rtd-builds {project}/translations/de',
'ln -nsf {translation}/rtd-builds {project}/translations/en',
]
for command in commands:
self.assertIsNotNone(
self.commands.pop(
self.commands.index(command.format(**self.args))
))
@patched
def test_symlink_no_english(self):
'''Test language german, no english
This should symlink the translation to 'en' even though there is no 'en'
language in translations or project language
'''
# Change the languages, and then clear commands, as project.save calls
# the symlinking
self.project.language = 'de'
version = self.project.translations.first()
self.project.translations.remove(version)
self.project.save()
self.assertNotIn(version, self.project.translations.all())
self.commands = []
symlink_translations(self.project.versions.first())
commands = [
'mkdir -p {project}/translations',
'ln -nsf {project}/rtd-builds {project}/translations/de',
'ln -nsf {project}/rtd-builds {project}/translations/en',
]
for command in commands:
self.assertIsNotNone(
self.commands.pop(
self.commands.index(command.format(**self.args))
))
| mit | 6,129,964,824,988,754,000 | 34.344538 | 94 | 0.607466 | false |
shishaochen/TensorFlow-0.8-Win | tensorflow/python/training/moving_averages_test.py | 9 | 10390 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional test for moving_averages.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.ops import state_ops
from tensorflow.python.training import moving_averages
class MovingAveragesTest(tf.test.TestCase):
def testAssignMovingAverage(self):
with self.test_session():
var = tf.Variable([10.0, 11.0])
val = tf.constant([1.0, 2.0], tf.float32)
decay = 0.25
assign = moving_averages.assign_moving_average(var, val, decay)
tf.initialize_all_variables().run()
self.assertAllClose([10.0, 11.0], var.eval())
assign.op.run()
self.assertAllClose([10.0 * 0.25 + 1.0 * (1.0 - 0.25),
11.0 * 0.25 + 2.0 * (1.0 - 0.25)],
var.eval())
def testWeightedMovingAverage(self):
with self.test_session() as sess:
decay = 0.5
weight = tf.placeholder(tf.float32, [])
val = tf.placeholder(tf.float32, [])
wma = moving_averages.weighted_moving_average(val, decay, weight)
tf.initialize_all_variables().run()
# Get the first weighted moving average.
val_1 = 3.0
weight_1 = 4.0
wma_array = sess.run(
wma, feed_dict={val: val_1, weight: weight_1})
numerator_1 = val_1 * weight_1 * (1.0 - decay)
denominator_1 = weight_1 * (1.0 - decay)
self.assertAllClose(numerator_1 / denominator_1, wma_array)
# Get the second weighted moving average.
val_2 = 11.0
weight_2 = 22.0
wma_array = sess.run(
wma, feed_dict={val: val_2, weight: weight_2})
numerator_2 = numerator_1 * decay + val_2 * weight_2 * (1.0 - decay)
denominator_2 = denominator_1 * decay + weight_2 * (1.0 - decay)
self.assertAllClose(numerator_2 / denominator_2, wma_array)
def _Repeat(value, dim):
if dim == 1:
return value
return [value] * dim
class ExponentialMovingAverageTest(tf.test.TestCase):
def _CheckDecay(self, ema, actual_decay, dim):
tens = _Repeat(10.0, dim)
thirties = _Repeat(30.0, dim)
var0 = tf.Variable(tens, name="v0")
var1 = tf.Variable(thirties, name="v1")
tf.initialize_all_variables().run()
# Note that tensor2 is not a Variable but just a plain Tensor resulting
# from the sum operation.
tensor2 = var0 + var1
update = ema.apply([var0, var1, tensor2])
avg0 = ema.average(var0)
avg1 = ema.average(var1)
avg2 = ema.average(tensor2)
self.assertItemsEqual([var0, var1], tf.moving_average_variables())
self.assertFalse(avg0 in tf.trainable_variables())
self.assertFalse(avg1 in tf.trainable_variables())
self.assertFalse(avg2 in tf.trainable_variables())
tf.initialize_all_variables().run()
self.assertEqual("v0/ExponentialMovingAverage:0", avg0.name)
self.assertEqual("v1/ExponentialMovingAverage:0", avg1.name)
self.assertEqual("add/ExponentialMovingAverage:0", avg2.name)
# Check initial values.
self.assertAllClose(tens, var0.eval())
self.assertAllClose(thirties, var1.eval())
self.assertAllClose(_Repeat(10.0 + 30.0, dim), tensor2.eval())
# Check that averages are initialized correctly.
self.assertAllClose(tens, avg0.eval())
self.assertAllClose(thirties, avg1.eval())
# Note that averages of Tensor's initialize to zeros_like since no value
# of the Tensor is known because the Op has not been run (yet).
self.assertAllClose(_Repeat(0.0, dim), avg2.eval())
# Update the averages and check.
update.run()
dk = actual_decay
expected = _Repeat(10.0 * dk + 10.0 * (1 - dk), dim)
self.assertAllClose(expected, avg0.eval())
expected = _Repeat(30.0 * dk + 30.0 * (1 - dk), dim)
self.assertAllClose(expected, avg1.eval())
expected = _Repeat(0.0 * dk + (10.0 + 30.0) * (1 - dk), dim)
self.assertAllClose(expected, avg2.eval())
# Again, update the averages and check.
update.run()
expected = _Repeat((10.0 * dk + 10.0 * (1 - dk)) * dk + 10.0 * (1 - dk),
dim)
self.assertAllClose(expected, avg0.eval())
expected = _Repeat((30.0 * dk + 30.0 * (1 - dk)) * dk + 30.0 * (1 - dk),
dim)
self.assertAllClose(expected, avg1.eval())
expected = _Repeat(((0.0 * dk + (10.0 + 30.0) * (1 - dk)) * dk +
(10.0 + 30.0) * (1 - dk)),
dim)
self.assertAllClose(expected, avg2.eval())
def testAverageVariablesNoNumUpdates_Scalar(self):
with self.test_session():
ema = tf.train.ExponentialMovingAverage(0.25)
self._CheckDecay(ema, actual_decay=0.25, dim=1)
def testAverageVariablesNoNumUpdates_Vector(self):
with self.test_session():
ema = tf.train.ExponentialMovingAverage(0.25)
self._CheckDecay(ema, actual_decay=0.25, dim=5)
def testAverageVariablesNumUpdates_Scalar(self):
with self.test_session():
# With num_updates 1, the decay applied is 0.1818
ema = tf.train.ExponentialMovingAverage(0.25, num_updates=1)
self._CheckDecay(ema, actual_decay=0.181818, dim=1)
def testAverageVariablesNumUpdates_Vector(self):
with self.test_session():
# With num_updates 1, the decay applied is 0.1818
ema = tf.train.ExponentialMovingAverage(0.25, num_updates=1)
self._CheckDecay(ema, actual_decay=0.181818, dim=5)
def testAverageVariablesWithControlDeps(self):
with self.test_session() as sess:
v0 = tf.Variable(0, name="v0")
add_to_v0 = v0.assign_add(1)
v1 = tf.Variable([10.0], name="v1")
assign_to_v1 = v1.assign([20.0])
ema = tf.train.ExponentialMovingAverage(0.25)
with tf.control_dependencies([add_to_v0]):
ema_op = ema.apply([v1])
# the moving average of v1 should not have any control inputs
v1_avg = ema.average(v1)
self.assertEqual([], v1_avg.initializer.control_inputs)
self.assertEqual([], v1_avg.value().op.control_inputs)
self.assertEqual([], v1_avg.ref().op.control_inputs)
# We should be able to initialize v1_avg before v0.
sess.run(v1_avg.initializer)
sess.run(v0.initializer)
self.assertEqual([10.0], sess.run(v1_avg))
# running ema_op should add to v0 (in addition to updating v1_avg)
sess.run(assign_to_v1)
sess.run(ema_op)
self.assertEqual(1, sess.run(v0))
self.assertEqual([17.5], sess.run(v1_avg))
def testAverageVariablesNames(self):
with self.test_session():
v0 = tf.Variable(10.0, name="v0")
v1 = tf.Variable(30.0, name="v1")
# Add a non-trainable variable.
v2 = tf.Variable(20.0, name="v2", trainable=False)
tensor2 = v0 + v1
ema = tf.train.ExponentialMovingAverage(0.25, name="foo_avg")
self.assertEqual("v0/foo_avg", ema.average_name(v0))
self.assertEqual("v1/foo_avg", ema.average_name(v1))
self.assertEqual("add/foo_avg", ema.average_name(tensor2))
ema.apply([v0, v1, tensor2])
vars_to_restore = ema.variables_to_restore()
# vars_to_restore should contain the following:
# {v0/foo_avg : v0,
# v1/foo_avg : v1,
# add/foo_avg : add/foo_avg
# v2 : v2}
self.assertEqual(sorted(vars_to_restore.keys()),
sorted([ema.average_name(v0),
ema.average_name(v1),
ema.average_name(tensor2),
v2.op.name]))
self.assertEqual(ema.average_name(v0), ema.average(v0).op.name)
self.assertEqual(ema.average_name(v1), ema.average(v1).op.name)
self.assertEqual(ema.average_name(tensor2), ema.average(tensor2).op.name)
def testSubsetAverageVariablesNames(self):
with self.test_session():
v0 = tf.Variable(10.0, name="v0")
v1 = tf.Variable(30.0, name="v1")
# Add a non-trainable variable.
v2 = tf.Variable(20.0, name="v2", trainable=False)
tensor2 = v0 + v1
ema = tf.train.ExponentialMovingAverage(0.25, name="foo_avg")
self.assertEqual("v0/foo_avg", ema.average_name(v0))
self.assertEqual("v1/foo_avg", ema.average_name(v1))
self.assertEqual("add/foo_avg", ema.average_name(tensor2))
vars_to_restore = ema.variables_to_restore([v0, tensor2])
# vars_to_restore should contain the following:
# {v0/foo_avg : v0,
# add/foo_avg : add
# v1 : v1,
# v2 : v2}
self.assertEqual(sorted(vars_to_restore.keys()),
sorted([ema.average_name(v0),
ema.average_name(tensor2),
v1.op.name,
v2.op.name]))
ema.apply([v0, v1, tensor2])
self.assertEqual(ema.average_name(v0), ema.average(v0).op.name)
self.assertEqual(ema.average_name(v1), ema.average(v1).op.name)
self.assertEqual(ema.average_name(tensor2), ema.average(tensor2).op.name)
def testAverageVariablesDeviceAssignment(self):
with tf.device("/job:dev_v0"):
v0 = tf.Variable(10.0, name="v0")
with tf.device("/job:dev_v1"):
v1 = state_ops.variable_op(shape=[1], dtype=tf.float32, name="v1")
tensor2 = v0 + v1
ema = tf.train.ExponentialMovingAverage(0.25, name="foo_avg")
with tf.device("/job:default"):
ema.apply([v0, v1, tensor2])
self.assertDeviceEqual("/job:dev_v0", ema.average(v0).device)
self.assertDeviceEqual("/job:dev_v1", ema.average(v1).device)
# However, the colocation property is maintained.
self.assertEqual([b"loc:@v1"],
ema.average(v1).op.colocation_groups())
self.assertDeviceEqual("/job:default", ema.average(tensor2).device)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 | 4,180,381,887,809,863,700 | 39.585938 | 80 | 0.626372 | false |
cl4u2/chirp | chirp/directory.py | 1 | 4465 | # Copyright 2010 Dan Smith <[email protected]>
# Copyright 2012 Tom Hayward <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import tempfile
from chirp import icf
from chirp import chirp_common, util, rfinder, radioreference, errors
def radio_class_id(cls):
"""Return a unique identification string for @cls"""
ident = "%s_%s" % (cls.VENDOR, cls.MODEL)
if cls.VARIANT:
ident += "_%s" % cls.VARIANT
ident = ident.replace("/", "_")
ident = ident.replace(" ", "_")
ident = ident.replace("(", "")
ident = ident.replace(")", "")
return ident
ALLOW_DUPS = False
def enable_reregistrations():
"""Set the global flag ALLOW_DUPS=True, which will enable a driver
to re-register for a slot in the directory without triggering an
exception"""
global ALLOW_DUPS
if not ALLOW_DUPS:
print "NOTE: driver re-registration enabled"
ALLOW_DUPS = True
def register(cls):
"""Register radio @cls with the directory"""
global DRV_TO_RADIO
ident = radio_class_id(cls)
if ident in DRV_TO_RADIO.keys():
if ALLOW_DUPS:
print "Replacing existing driver id `%s'" % ident
else:
raise Exception("Duplicate radio driver id `%s'" % ident)
DRV_TO_RADIO[ident] = cls
RADIO_TO_DRV[cls] = ident
print "Registered %s = %s" % (ident, cls.__name__)
return cls
DRV_TO_RADIO = {}
RADIO_TO_DRV = {}
def get_radio(driver):
"""Get radio driver class by identification string"""
if DRV_TO_RADIO.has_key(driver):
return DRV_TO_RADIO[driver]
else:
raise Exception("Unknown radio type `%s'" % driver)
def get_driver(rclass):
"""Get the identification string for a given class"""
if RADIO_TO_DRV.has_key(rclass):
return RADIO_TO_DRV[rclass]
elif RADIO_TO_DRV.has_key(rclass.__bases__[0]):
return RADIO_TO_DRV[rclass.__bases__[0]]
else:
raise Exception("Unknown radio type `%s'" % rclass)
def icf_to_image(icf_file, img_file):
# FIXME: Why is this here?
"""Convert an ICF file to a .img file"""
mdata, mmap = icf.read_file(icf_file)
img_data = None
for model in DRV_TO_RADIO.values():
try:
if model._model == mdata:
img_data = mmap.get_packed()[:model._memsize]
break
except Exception:
pass # Skip non-Icoms
if img_data:
f = file(img_file, "wb")
f.write(img_data)
f.close()
else:
print "Unsupported model data:"
print util.hexprint(mdata)
raise Exception("Unsupported model")
def get_radio_by_image(image_file):
"""Attempt to get the radio class that owns @image_file"""
if image_file.startswith("radioreference://"):
_, _, zipcode, username, password = image_file.split("/", 4)
rr = radioreference.RadioReferenceRadio(None)
rr.set_params(zipcode, username, password)
return rr
if image_file.startswith("rfinder://"):
_, _, email, passwd, lat, lon, miles = image_file.split("/")
rf = rfinder.RFinderRadio(None)
rf.set_params((float(lat), float(lon)), int(miles), email, passwd)
return rf
if os.path.exists(image_file) and icf.is_icf_file(image_file):
tempf = tempfile.mktemp()
icf_to_image(image_file, tempf)
print "Auto-converted %s -> %s" % (image_file, tempf)
image_file = tempf
if os.path.exists(image_file):
f = file(image_file, "rb")
filedata = f.read()
f.close()
else:
filedata = ""
for rclass in DRV_TO_RADIO.values():
if not issubclass(rclass, chirp_common.FileBackedRadio):
continue
if rclass.match_model(filedata, image_file):
return rclass(image_file)
raise errors.ImageDetectFailed("Unknown file format")
| gpl-3.0 | 4,150,729,444,155,058,700 | 32.571429 | 74 | 0.630683 | false |
WoLpH/CouchPotatoServer | couchpotato/core/downloaders/transmission/__init__.py | 10 | 3417 | from .main import Transmission
def start():
return Transmission()
config = [{
'name': 'transmission',
'groups': [
{
'tab': 'downloaders',
'list': 'download_providers',
'name': 'transmission',
'label': 'Transmission',
'description': 'Use <a href="http://www.transmissionbt.com/" target="_blank">Transmission</a> to download torrents.',
'wizard': True,
'options': [
{
'name': 'enabled',
'default': 0,
'type': 'enabler',
'radio_group': 'torrent',
},
{
'name': 'host',
'default': 'localhost:9091',
'description': 'Hostname with port. Usually <strong>localhost:9091</strong>',
},
{
'name': 'rpc_url',
'type': 'string',
'default': 'transmission',
'advanced': True,
'description': 'Change if you don\'t run Transmission RPC at the default url.',
},
{
'name': 'username',
},
{
'name': 'password',
'type': 'password',
},
{
'name': 'directory',
'type': 'directory',
'description': 'Download to this directory. Keep empty for default Transmission download directory.',
},
{
'name': 'remove_complete',
'label': 'Remove torrent',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Remove the torrent from Transmission after it finished seeding.',
},
{
'name': 'delete_files',
'label': 'Remove files',
'default': True,
'type': 'bool',
'advanced': True,
'description': 'Also remove the leftover files.',
},
{
'name': 'paused',
'type': 'bool',
'advanced': True,
'default': False,
'description': 'Add the torrent paused.',
},
{
'name': 'manual',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
},
{
'name': 'stalled_as_failed',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Consider a stalled torrent as failed',
},
{
'name': 'delete_failed',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Delete a release after the download has failed.',
},
],
}
],
}]
| gpl-3.0 | 4,895,560,613,471,390,000 | 35.351064 | 129 | 0.366696 | false |
bclau/nova | nova/image/glance.py | 1 | 21879 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of an image service that uses Glance as the backend."""
from __future__ import absolute_import
import copy
import itertools
import json
import random
import sys
import time
import urlparse
import glanceclient
import glanceclient.exc
from oslo.config import cfg
from nova import exception
import nova.image.download as image_xfers
from nova.openstack.common.gettextutils import _
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
from nova import utils
glance_opts = [
cfg.StrOpt('glance_host',
default='$my_ip',
help='default glance hostname or ip'),
cfg.IntOpt('glance_port',
default=9292,
help='default glance port'),
cfg.StrOpt('glance_protocol',
default='http',
help='Default protocol to use when connecting to glance. '
'Set to https for SSL.'),
cfg.ListOpt('glance_api_servers',
default=['$glance_host:$glance_port'],
help='A list of the glance api servers available to nova. '
'Prefix with https:// for ssl-based glance api servers. '
'([hostname|ip]:port)'),
cfg.BoolOpt('glance_api_insecure',
default=False,
help='Allow to perform insecure SSL (https) requests to '
'glance'),
cfg.IntOpt('glance_num_retries',
default=0,
help='Number retries when downloading an image from glance'),
cfg.ListOpt('allowed_direct_url_schemes',
default=[],
help='A list of url scheme that can be downloaded directly '
'via the direct_url. Currently supported schemes: '
'[file].'),
]
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.register_opts(glance_opts)
CONF.import_opt('auth_strategy', 'nova.api.auth')
CONF.import_opt('my_ip', 'nova.netconf')
def generate_glance_url():
"""Generate the URL to glance."""
glance_host = CONF.glance_host
if utils.is_valid_ipv6(glance_host):
glance_host = '[%s]' % glance_host
return "%s://%s:%d" % (CONF.glance_protocol, glance_host,
CONF.glance_port)
def generate_image_url(image_ref):
"""Generate an image URL from an image_ref."""
return "%s/images/%s" % (generate_glance_url(), image_ref)
def _parse_image_ref(image_href):
"""Parse an image href into composite parts.
:param image_href: href of an image
:returns: a tuple of the form (image_id, host, port)
:raises ValueError
"""
o = urlparse.urlparse(image_href)
port = o.port or 80
host = o.netloc.rsplit(':', 1)[0]
image_id = o.path.split('/')[-1]
use_ssl = (o.scheme == 'https')
return (image_id, host, port, use_ssl)
def generate_identity_headers(context, status='Confirmed'):
return {
'X-Auth-Token': getattr(context, 'auth_token', None),
'X-User-Id': getattr(context, 'user', None),
'X-Tenant-Id': getattr(context, 'tenant', None),
'X-Roles': ','.join(context.roles),
'X-Identity-Status': status,
'X-Service-Catalog': json.dumps(context.service_catalog),
}
def _create_glance_client(context, host, port, use_ssl, version=1):
"""Instantiate a new glanceclient.Client object."""
params = {}
if use_ssl:
scheme = 'https'
# https specific params
params['insecure'] = CONF.glance_api_insecure
params['ssl_compression'] = False
else:
scheme = 'http'
if CONF.auth_strategy == 'keystone':
# NOTE(isethi): Glanceclient <= 0.9.0.49 accepts only
# keyword 'token', but later versions accept both the
# header 'X-Auth-Token' and 'token'
params['token'] = context.auth_token
params['identity_headers'] = generate_identity_headers(context)
if utils.is_valid_ipv6(host):
#if so, it is ipv6 address, need to wrap it with '[]'
host = '[%s]' % host
endpoint = '%s://%s:%s' % (scheme, host, port)
return glanceclient.Client(str(version), endpoint, **params)
def get_api_servers():
"""
Shuffle a list of CONF.glance_api_servers and return an iterator
that will cycle through the list, looping around to the beginning
if necessary.
"""
api_servers = []
for api_server in CONF.glance_api_servers:
if '//' not in api_server:
api_server = 'http://' + api_server
o = urlparse.urlparse(api_server)
port = o.port or 80
host = o.netloc.rsplit(':', 1)[0]
if host[0] == '[' and host[-1] == ']':
host = host[1:-1]
use_ssl = (o.scheme == 'https')
api_servers.append((host, port, use_ssl))
random.shuffle(api_servers)
return itertools.cycle(api_servers)
class GlanceClientWrapper(object):
"""Glance client wrapper class that implements retries."""
def __init__(self, context=None, host=None, port=None, use_ssl=False,
version=1):
if host is not None:
self.client = self._create_static_client(context,
host, port,
use_ssl, version)
else:
self.client = None
self.api_servers = None
def _create_static_client(self, context, host, port, use_ssl, version):
"""Create a client that we'll use for every call."""
self.host = host
self.port = port
self.use_ssl = use_ssl
self.version = version
return _create_glance_client(context,
self.host, self.port,
self.use_ssl, self.version)
def _create_onetime_client(self, context, version):
"""Create a client that will be used for one call."""
if self.api_servers is None:
self.api_servers = get_api_servers()
self.host, self.port, self.use_ssl = self.api_servers.next()
return _create_glance_client(context,
self.host, self.port,
self.use_ssl, version)
def call(self, context, version, method, *args, **kwargs):
"""
Call a glance client method. If we get a connection error,
retry the request according to CONF.glance_num_retries.
"""
retry_excs = (glanceclient.exc.ServiceUnavailable,
glanceclient.exc.InvalidEndpoint,
glanceclient.exc.CommunicationError)
num_attempts = 1 + CONF.glance_num_retries
for attempt in xrange(1, num_attempts + 1):
client = self.client or self._create_onetime_client(context,
version)
try:
return getattr(client.images, method)(*args, **kwargs)
except retry_excs as e:
host = self.host
port = self.port
extra = "retrying"
error_msg = (_("Error contacting glance server "
"'%(host)s:%(port)s' for '%(method)s', "
"%(extra)s.") %
{'host': host, 'port': port,
'method': method, 'extra': extra})
if attempt == num_attempts:
extra = 'done trying'
LOG.exception(error_msg)
raise exception.GlanceConnectionFailed(
host=host, port=port, reason=str(e))
LOG.exception(error_msg)
time.sleep(1)
class GlanceImageService(object):
"""Provides storage and retrieval of disk image objects within Glance."""
def __init__(self, client=None):
self._client = client or GlanceClientWrapper()
#NOTE(jbresnah) build the table of download handlers at the beginning
# so that operators can catch errors at load time rather than whenever
# a user attempts to use a module. Note this cannot be done in glance
# space when this python module is loaded because the download module
# may require configuration options to be parsed.
self._download_handlers = {}
download_modules = image_xfers.load_transfer_modules()
for scheme in download_modules:
if scheme in CONF.allowed_direct_url_schemes:
mod = download_modules[scheme]
try:
self._download_handlers[scheme] = mod.get_download_hander()
except Exception as ex:
msg = _('When loading the module %(module_str)s the '
'following error occurred: %(ex)s')\
% {'module_str': str(mod), 'ex': ex}
LOG.error(msg)
def detail(self, context, **kwargs):
"""Calls out to Glance for a list of detailed image information."""
params = self._extract_query_params(kwargs)
try:
images = self._client.call(context, 1, 'list', **params)
except Exception:
_reraise_translated_exception()
_images = []
for image in images:
if self._is_image_available(context, image):
_images.append(self._translate_from_glance(image))
return _images
def _extract_query_params(self, params):
_params = {}
accepted_params = ('filters', 'marker', 'limit',
'page_size', 'sort_key', 'sort_dir')
for param in accepted_params:
if params.get(param):
_params[param] = params.get(param)
# ensure filters is a dict
_params.setdefault('filters', {})
# NOTE(vish): don't filter out private images
_params['filters'].setdefault('is_public', 'none')
return _params
def show(self, context, image_id):
"""Returns a dict with image data for the given opaque image id."""
try:
image = self._client.call(context, 1, 'get', image_id)
except Exception:
_reraise_translated_image_exception(image_id)
if not self._is_image_available(context, image):
raise exception.ImageNotFound(image_id=image_id)
base_image_meta = self._translate_from_glance(image)
return base_image_meta
def _get_locations(self, context, image_id):
"""Returns the direct url representing the backend storage location,
or None if this attribute is not shown by Glance.
"""
try:
client = GlanceClientWrapper()
image_meta = client.call(context, 2, 'get', image_id)
except Exception:
_reraise_translated_image_exception(image_id)
if not self._is_image_available(context, image_meta):
raise exception.ImageNotFound(image_id=image_id)
locations = getattr(image_meta, 'locations', [])
du = getattr(image_meta, 'direct_url', None)
if du:
locations.append({'url': du, 'metadata': {}})
return locations
def _get_transfer_module(self, scheme):
try:
return self._download_handlers[scheme]
except KeyError:
return None
except Exception as ex:
LOG.error(_("Failed to instantiate the download handler "
"for %(scheme)s") % {'scheme': scheme})
return
def download(self, context, image_id, data=None, dst_path=None):
"""Calls out to Glance for data and writes data."""
if CONF.allowed_direct_url_schemes and dst_path is not None:
locations = self._get_locations(context, image_id)
for entry in locations:
loc_url = entry['url']
loc_meta = entry['metadata']
o = urlparse.urlparse(loc_url)
xfer_mod = self._get_transfer_module(o.scheme)
if xfer_mod:
try:
xfer_mod.download(context, o, dst_path, loc_meta)
msg = _("Successfully transferred "
"using %s") % o.scheme
LOG.info(msg)
return
except Exception as ex:
LOG.exception(ex)
try:
image_chunks = self._client.call(context, 1, 'data', image_id)
except Exception:
_reraise_translated_image_exception(image_id)
close_file = False
if data is None and dst_path:
data = open(dst_path, 'wb')
close_file = True
if data is None:
return image_chunks
else:
try:
for chunk in image_chunks:
data.write(chunk)
finally:
if close_file:
data.close()
def create(self, context, image_meta, data=None):
"""Store the image data and return the new image object."""
sent_service_image_meta = self._translate_to_glance(image_meta)
if data:
sent_service_image_meta['data'] = data
try:
recv_service_image_meta = self._client.call(
context, 1, 'create', **sent_service_image_meta)
except glanceclient.exc.HTTPException:
_reraise_translated_exception()
return self._translate_from_glance(recv_service_image_meta)
def update(self, context, image_id, image_meta, data=None,
purge_props=True):
"""Modify the given image with the new data."""
image_meta = self._translate_to_glance(image_meta)
image_meta['purge_props'] = purge_props
#NOTE(bcwaldon): id is not an editable field, but it is likely to be
# passed in by calling code. Let's be nice and ignore it.
image_meta.pop('id', None)
if data:
image_meta['data'] = data
try:
image_meta = self._client.call(context, 1, 'update',
image_id, **image_meta)
except Exception:
_reraise_translated_image_exception(image_id)
else:
return self._translate_from_glance(image_meta)
def delete(self, context, image_id):
"""Delete the given image.
:raises: ImageNotFound if the image does not exist.
:raises: NotAuthorized if the user is not an owner.
:raises: ImageNotAuthorized if the user is not authorized.
"""
try:
self._client.call(context, 1, 'delete', image_id)
except glanceclient.exc.NotFound:
raise exception.ImageNotFound(image_id=image_id)
except glanceclient.exc.HTTPForbidden:
raise exception.ImageNotAuthorized(image_id=image_id)
return True
@staticmethod
def _translate_to_glance(image_meta):
image_meta = _convert_to_string(image_meta)
image_meta = _remove_read_only(image_meta)
return image_meta
@staticmethod
def _translate_from_glance(image):
image_meta = _extract_attributes(image)
image_meta = _convert_timestamps_to_datetimes(image_meta)
image_meta = _convert_from_string(image_meta)
return image_meta
@staticmethod
def _is_image_available(context, image):
"""Check image availability.
This check is needed in case Nova and Glance are deployed
without authentication turned on.
"""
# The presence of an auth token implies this is an authenticated
# request and we need not handle the noauth use-case.
if hasattr(context, 'auth_token') and context.auth_token:
return True
if image.is_public or context.is_admin:
return True
properties = image.properties
if context.project_id and ('owner_id' in properties):
return str(properties['owner_id']) == str(context.project_id)
if context.project_id and ('project_id' in properties):
return str(properties['project_id']) == str(context.project_id)
try:
user_id = properties['user_id']
except KeyError:
return False
return str(user_id) == str(context.user_id)
def _convert_timestamps_to_datetimes(image_meta):
"""Returns image with timestamp fields converted to datetime objects."""
for attr in ['created_at', 'updated_at', 'deleted_at']:
if image_meta.get(attr):
image_meta[attr] = timeutils.parse_isotime(image_meta[attr])
return image_meta
# NOTE(bcwaldon): used to store non-string data in glance metadata
def _json_loads(properties, attr):
prop = properties[attr]
if isinstance(prop, basestring):
properties[attr] = jsonutils.loads(prop)
def _json_dumps(properties, attr):
prop = properties[attr]
if not isinstance(prop, basestring):
properties[attr] = jsonutils.dumps(prop)
_CONVERT_PROPS = ('block_device_mapping', 'mappings')
def _convert(method, metadata):
metadata = copy.deepcopy(metadata)
properties = metadata.get('properties')
if properties:
for attr in _CONVERT_PROPS:
if attr in properties:
method(properties, attr)
return metadata
def _convert_from_string(metadata):
return _convert(_json_loads, metadata)
def _convert_to_string(metadata):
return _convert(_json_dumps, metadata)
def _extract_attributes(image):
IMAGE_ATTRIBUTES = ['size', 'disk_format', 'owner',
'container_format', 'checksum', 'id',
'name', 'created_at', 'updated_at',
'deleted_at', 'deleted', 'status',
'min_disk', 'min_ram', 'is_public']
output = {}
for attr in IMAGE_ATTRIBUTES:
output[attr] = getattr(image, attr, None)
output['properties'] = getattr(image, 'properties', {})
return output
def _remove_read_only(image_meta):
IMAGE_ATTRIBUTES = ['status', 'updated_at', 'created_at', 'deleted_at']
output = copy.deepcopy(image_meta)
for attr in IMAGE_ATTRIBUTES:
if attr in output:
del output[attr]
return output
def _reraise_translated_image_exception(image_id):
"""Transform the exception for the image but keep its traceback intact."""
exc_type, exc_value, exc_trace = sys.exc_info()
new_exc = _translate_image_exception(image_id, exc_value)
raise new_exc, None, exc_trace
def _reraise_translated_exception():
"""Transform the exception but keep its traceback intact."""
exc_type, exc_value, exc_trace = sys.exc_info()
new_exc = _translate_plain_exception(exc_value)
raise new_exc, None, exc_trace
def _translate_image_exception(image_id, exc_value):
if isinstance(exc_value, (glanceclient.exc.Forbidden,
glanceclient.exc.Unauthorized)):
return exception.ImageNotAuthorized(image_id=image_id)
if isinstance(exc_value, glanceclient.exc.NotFound):
return exception.ImageNotFound(image_id=image_id)
if isinstance(exc_value, glanceclient.exc.BadRequest):
return exception.Invalid(exc_value)
return exc_value
def _translate_plain_exception(exc_value):
if isinstance(exc_value, (glanceclient.exc.Forbidden,
glanceclient.exc.Unauthorized)):
return exception.NotAuthorized(exc_value)
if isinstance(exc_value, glanceclient.exc.NotFound):
return exception.NotFound(exc_value)
if isinstance(exc_value, glanceclient.exc.BadRequest):
return exception.Invalid(exc_value)
return exc_value
def get_remote_image_service(context, image_href):
"""Create an image_service and parse the id from the given image_href.
The image_href param can be an href of the form
'http://example.com:9292/v1/images/b8b2c6f7-7345-4e2f-afa2-eedaba9cbbe3',
or just an id such as 'b8b2c6f7-7345-4e2f-afa2-eedaba9cbbe3'. If the
image_href is a standalone id, then the default image service is returned.
:param image_href: href that describes the location of an image
:returns: a tuple of the form (image_service, image_id)
"""
# Calling out to another service may take a while, so lets log this
LOG.debug(_("fetching image %s from glance") % image_href)
#NOTE(bcwaldon): If image_href doesn't look like a URI, assume its a
# standalone image ID
if '/' not in str(image_href):
image_service = get_default_image_service()
return image_service, image_href
try:
(image_id, glance_host, glance_port, use_ssl) = \
_parse_image_ref(image_href)
glance_client = GlanceClientWrapper(context=context,
host=glance_host, port=glance_port, use_ssl=use_ssl)
except ValueError:
raise exception.InvalidImageRef(image_href=image_href)
image_service = GlanceImageService(client=glance_client)
return image_service, image_id
def get_default_image_service():
return GlanceImageService()
| apache-2.0 | 4,636,261,614,438,614,000 | 35.771429 | 79 | 0.594817 | false |
disqus/zumanji | src/zumanji/github.py | 1 | 2438 | from django.conf import settings
import logging
import requests
github_logger = logging.getLogger('github.api')
class RequestError(Exception):
pass
class NotFound(Exception):
pass
class GitHub(object):
def __init__(self, access_token, host='https://api.github.com'):
self.access_token = access_token
self.host = host
def request(self, method, url, params=None):
request = self.build_request(method, url, params)
return self.get_response(request).json
def build_request(self, method, url, params=None):
if params and method.upper() == 'GET':
kwargs = {'params': params.copy()}
else:
kwargs = {'data': params, 'params': {}}
kwargs['params']['access_token'] = self.access_token
if '://' not in url:
url = '%s/%s' % (self.host, url)
return (method, url, kwargs)
def get_response(self, request):
method, url, kwargs = request
logging.info('Fetching %s', url)
response = getattr(requests, method.lower())(url, **kwargs)
if response.status_code == 404:
raise NotFound(response.json['message'])
elif response.status_code != 200:
raise RequestError(response.json['message'])
return response
def get_commit(self, user, repo, sha):
return self.request('GET', 'repos/%s/%s/commits/%s' % (user, repo, sha))
def get_commit_url(self, user, repo, sha):
return '%s/%s/%s/commit/%s' % (self.host.replace('api.', ''), user, repo, sha)
def iter_commits(self, user, repo):
stack = []
request = self.build_request('GET', 'repos/%s/%s/commits' % (user, repo), {
'per_page': 100,
})
while True:
response = self.get_response(request)
data = response.json
if not data:
break
for result in data:
stack.append(result)
if 'next' not in response.links:
break
pagelink = response.links['next']['url']
request = self.build_request('GET', pagelink)
# FML
for result in reversed(stack):
yield result
def compare_commits(self, user, repo, prev, cur='HEAD'):
return self.request('GET', 'repos/%s/%s/compare/%s...%s' % (user, repo, prev, cur))
github = GitHub(access_token=settings.GITHUB_ACCESS_TOKEN)
| apache-2.0 | -7,869,444,380,287,161,000 | 28.373494 | 91 | 0.57178 | false |
leag/swftools | wx/gui/boldstatictext.py | 10 | 1137 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# gpdf2swf.py
# graphical user interface for pdf2swf
#
# Part of the swftools package.
#
# Copyright (c) 2008,2009 Matthias Kramm <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */
import wx
class BoldStaticText(wx.StaticText):
def __init__(self, *args, **kwargs):
wx.StaticText.__init__(self, *args, **kwargs)
font = self.GetFont()
font.SetWeight(wx.BOLD)
self.SetFont(font)
| gpl-2.0 | 1,137,047,202,852,911,900 | 33.454545 | 78 | 0.71416 | false |
muupan/chainer | tests/chainer_tests/functions_tests/array_tests/test_select_item.py | 3 | 2059 | import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
class TestSelectItem(unittest.TestCase):
in_shape = (10, 5)
out_shape = (10,)
def setUp(self):
self.x_data = numpy.random.uniform(
-1, 1, self.in_shape).astype(numpy.float32)
self.t_data = numpy.random.randint(
0, 2, self.out_shape).astype(numpy.int32)
self.gy_data = numpy.random.uniform(
-1, 1, self.out_shape).astype(numpy.float32)
def check_forward(self, x_data, t_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = functions.select_item(x, t)
y_exp = cuda.to_cpu(x_data)[range(t_data.size), cuda.to_cpu(t_data)]
numpy.testing.assert_equal(cuda.to_cpu(y.data), y_exp)
def test_forward_cpu(self):
self.check_forward(self.x_data, self.t_data)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x_data),
cuda.to_gpu(self.t_data))
def check_backward(self, x_data, t_data, gy_data):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = functions.select_item(x, t)
y.grad = gy_data
y.backward()
self.assertEqual(None, t.grad)
func = y.creator
f = lambda: func.forward((x.data, t.data))
gx, = gradient_check.numerical_grad(f, (x.data,), (gy_data,), eps=0.01)
gradient_check.assert_allclose(gx, x.grad)
def test_backward_cpu(self):
self.check_backward(self.x_data, self.t_data, self.gy_data)
@attr.gpu
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x_data),
cuda.to_gpu(self.t_data),
cuda.to_gpu(self.gy_data))
class TestSelectItemZeroSize(unittest.TestCase):
in_shape = (0, 5)
out_shape = (0,)
testing.run_module(__name__, __file__)
| mit | 2,196,119,370,395,829,200 | 28.414286 | 79 | 0.60272 | false |
rohan07/flask-peewee | flask_peewee/serializer.py | 7 | 1411 | import datetime
import sys
from peewee import Model
from flask_peewee.utils import get_dictionary_from_model
from flask_peewee.utils import get_model_from_dictionary
class Serializer(object):
date_format = '%Y-%m-%d'
time_format = '%H:%M:%S'
datetime_format = ' '.join([date_format, time_format])
def convert_value(self, value):
if isinstance(value, datetime.datetime):
return value.strftime(self.datetime_format)
elif isinstance(value, datetime.date):
return value.strftime(self.date_format)
elif isinstance(value, datetime.time):
return value.strftime(self.time_format)
elif isinstance(value, Model):
return value.get_id()
else:
return value
def clean_data(self, data):
for key, value in data.items():
if isinstance(value, dict):
self.clean_data(value)
elif isinstance(value, (list, tuple)):
data[key] = map(self.clean_data, value)
else:
data[key] = self.convert_value(value)
return data
def serialize_object(self, obj, fields=None, exclude=None):
data = get_dictionary_from_model(obj, fields, exclude)
return self.clean_data(data)
class Deserializer(object):
def deserialize_object(self, model, data):
return get_model_from_dictionary(model, data)
| mit | 7,912,314,017,186,323,000 | 31.813953 | 63 | 0.627215 | false |
DifferentSC/incubator-reef | dev/change_version.py | 6 | 9030 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This script changes versions in every pom.xml and relevant files.
(How to run)
python change_version <reef_home> <reef_version_for_pom.xml> -s <true or false> (optional) -p
-s option changes value of 'IsSnapshot' in lang/cs/build.props.
If you use the option "-s false", bulid.props changes as,
<RemoveIncubating>true</RemoveIncubating>
<IsSnapshot>false</IsSnapshot>
<SnapshotNumber>00</SnapshotNumber>
If you use "-s true", then the value of 'IsSnapshot' is changed to true.
If you use "-p", then only the "pom.xml" files are changed.
You can also see how to run the script with "python change_version.py -h"
(Example)
python change_version ~/incubator_reef 0.12.0-incubating -s true
python change_version ~/incubator_reef 0.12.0-incubating -s false
python change_version ~/incubator_reef 0.12.0-incubating -p -s true
"""
import os
import re
import sys
import argparse
"""
Get list of path for every file in a directory
"""
def get_filepaths(directory):
file_paths = []
for root, directories, files in os.walk(directory):
for filename in files:
filepath = os.path.join(root, filename)
file_paths.append(filepath)
return file_paths
"""
Change REEF version to new_version in every pom.xml
"""
def change_pom(file, new_version):
changed_str = ""
ready_to_change = False
f = open(file, 'r')
while True:
line = f.readline()
if not line:
break
if "<groupId>org.apache.reef</groupId>" in line:
ready_to_change = True
if "<version>" in line and ready_to_change:
break
changed_str += line
r = re.compile('<version>(.*?)</version>')
m = r.search(line)
old_version = m.group(1)
changed_str += line.replace(old_version, new_version)
while True:
line = f.readline()
if not line:
break
changed_str += line
f = open(file, 'w')
f.write(changed_str)
f.close()
"""
Change JavaBridgeJarFileName in lang/cs/Org.Apache.REEF.Driver/Constants.cs
"""
def change_constants_cs(file, new_version):
changed_str = ""
f = open(file, 'r')
while True:
line = f.readline()
if not line:
break
if "JavaBridgeJarFileName" in line:
r = re.compile('"(.*?)"')
m = r.search(line)
old_version = m.group(1)
new_version = "reef-bridge-java-" + new_version + "-shaded.jar"
changed_str += line.replace(old_version, new_version)
else:
changed_str += line
f = open(file, 'w')
f.write(changed_str)
f.close()
"""
Change version in every AssemblyInfo.cs and lang/cs/Org.Apache.REEF.Bridge/AssemblyInfo.cpp
"""
def change_assembly_info_cs(file, new_version):
changed_str = ""
new_version = new_version.split("-")[0] + ".0"
f = open(file, 'r')
r = re.compile('"(.*?)"')
while True:
line = f.readline()
if not line:
break
if ("[assembly: AssemblyVersion(" in line and "*" not in line) or ("[assembly: AssemblyFileVersion(" in line) \
or ("[assembly:AssemblyVersionAttribute(" in line and "*" not in line) \
or ("[assembly:AssemblyFileVersion(" in line):
m = r.search(line)
old_version = m.group(1)
changed_str += line.replace(old_version, new_version)
else:
changed_str += line
f = open(file, 'w')
f.write(changed_str)
f.close()
"""
Read 'IsSnapshot' from lang/cs/build.props
"""
def read_is_snapshot(file):
f = open(file, 'r')
r = re.compile('<IsSnapshot>(.*?)</IsSnapshot>')
while True:
line = f.readline()
if not line:
break
if "<IsSnapshot>" in line and "</IsSnapshot>" in line:
m = r.search(line)
if(m.group(1)=="true"):
return True
else:
return False
"""
Change lang/cs/build.props for the release branch
"""
def change_build_props(file, is_snapshot):
changed_str = ""
f = open(file, 'r')
r1 = re.compile('<RemoveIncubating>(.*?)</RemoveIncubating>')
r2 = re.compile('<IsSnapshot>(.*?)</IsSnapshot>')
r3 = re.compile('<SnapshotNumber>(.*?)</SnapshotNumber>')
while True:
line = f.readline()
if not line:
break
if "<RemoveIncubating>" and "</RemoveIncubating>" in line and is_snapshot=="false":
old_remove_incubating = r1.search(line).group(1)
changed_str += line.replace(old_remove_incubating, "true")
elif "<IsSnapshot>" in line and "</IsSnapshot>" in line:
old_is_snapshot = r2.search(line).group(1)
changed_str += line.replace(old_is_snapshot, is_snapshot)
elif "<SnapshotNumber>" in line and "</SnapshotNumber>" in line and is_snapshot=="false":
old_snapshot_number = r3.search(line).group(1)
changed_str += line.replace(old_snapshot_number, "00")
else:
changed_str += line
f = open(file, 'w')
f.write(changed_str)
f.close()
print file
"""
Change the name of shaded.jar in run.cmd and lang/cs/Org.Apache.REEF.Client/Properties/Resources.xml
"""
def change_shaded_jar_name(file, new_version):
changed_str = ""
f = open(file, 'r')
r1 = re.compile('reef-bridge-java-(.*?)-shaded.jar')
r2 = re.compile('reef-bridge-client-(.*?)-shaded.jar')
while True:
line = f.readline()
if not line:
break
m1 = r1.search(line)
m2 = r2.search(line)
if m1 is not None:
changed_str += line.replace(m1.group(1), new_version)
elif m2 is not None:
changed_str += line.replace(m2.group(1), new_version)
else:
changed_str += line
f = open(file, 'w')
f.write(changed_str)
f.close()
"""
Change version of every pom.xml, every AsssemblyInfo.cs,
Constants.cs, AssemblyInfo.cpp, run.cmd and Resources.xml
"""
def change_version(reef_home, new_version, pom_only):
if pom_only:
for fi in get_filepaths(reef_home):
if "pom.xml" in fi:
print fi
change_pom(fi, new_version)
else:
for fi in get_filepaths(reef_home):
if "pom.xml" in fi:
print fi
change_pom(fi, new_version)
if "AssemblyInfo.cs" in fi:
print fi
change_assembly_info_cs(fi, new_version)
change_assembly_info_cs(reef_home + "/lang/cs/Org.Apache.REEF.Bridge/AssemblyInfo.cpp", new_version)
print reef_home + "/lang/cs/Org.Apache.REEF.Bridge/AssemblyInfo.cpp"
change_constants_cs(reef_home + "/lang/cs/Org.Apache.REEF.Driver/Constants.cs", new_version)
print reef_home + "/lang/cs/Org.Apache.REEF.Driver/Constants.cs"
change_shaded_jar_name(reef_home + "/lang/cs/Org.Apache.REEF.Client/Properties/Resources.xml", new_version)
print reef_home + "/lang/cs/Org.Apache.REEF.Client/Properties/Resources.xml"
change_shaded_jar_name(reef_home + "/lang/cs/Org.Apache.REEF.Client/run.cmd", new_version)
print reef_home + "/lang/cs/Org.Apache.REEF.Client/run.cmd"
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Script for changing version of every pom.xml, " \
+ "every AssemblyInfo.cs, Constants.cs, and AssemblyInfo.cpp")
parser.add_argument("reef_home", type=str, help="REEF home")
parser.add_argument("reef_version", type=str, help="REEF version")
parser.add_argument("-s", "--isSnapshot", type=str, metavar="<true or false>", help="Change 'IsSnapshot' to true or false", required=True)
parser.add_argument("-p", "--pomonly", help="Change only poms", action="store_true")
args = parser.parse_args()
reef_home = os.path.abspath(args.reef_home)
reef_version = args.reef_version
is_snapshot = args.isSnapshot
pom_only = args.pomonly
if is_snapshot is not None and not pom_only:
change_build_props(reef_home + "/lang/cs/build.props", is_snapshot)
if is_snapshot=="true":
reef_version += "-SNAPSHOT"
change_version(reef_home, reef_version, pom_only)
| apache-2.0 | -8,572,030,765,708,308,000 | 31.717391 | 142 | 0.618715 | false |
danielru/pySDC | projects/parallelSDC/minimization.py | 1 | 2450 | import numpy as np
import scipy.optimize as opt
import matplotlib.pylab as plt
import matplotlib
from pySDC.implementations.collocation_classes.gauss_radau_right import CollGaussRadau_Right
def main():
def rho(x):
return max(abs(np.linalg.eigvals(np.eye(M) - np.diag([x[i] for i in range(M)]).dot(coll.Qmat[1:, 1:]))))
M = 2
coll = CollGaussRadau_Right(M, 0, 1)
x0 = np.ones(M)
d = opt.minimize(rho, x0, method='Nelder-Mead')
print(d)
numsteps = 800
xdim = np.linspace(0, 8, numsteps)
ydim = np.linspace(0, 13, numsteps)
minfield = np.zeros((len(xdim), len(ydim)))
for idx, x in enumerate(xdim):
for idy, y in enumerate(ydim):
minfield[idx, idy] = max(abs(np.linalg.eigvals(np.eye(M) - np.diag([x, y]).dot(coll.Qmat[1:, 1:]))))
# Set up plotting parameters
params = {'legend.fontsize': 20,
'figure.figsize': (12, 8),
'axes.labelsize': 20,
'axes.titlesize': 20,
'xtick.labelsize': 16,
'ytick.labelsize': 16,
'lines.linewidth': 3
}
plt.rcParams.update(params)
matplotlib.style.use('classic')
plt.figure()
plt.pcolor(xdim, ydim, minfield.T, cmap='Reds', vmin=0, vmax=1)
plt.text(d.x[0], d.x[1], 'X', horizontalalignment='center', verticalalignment='center')
plt.xlim((min(xdim), max(xdim)))
plt.ylim((min(ydim), max(ydim)))
plt.xlabel('component 1')
plt.ylabel('component 2')
cbar = plt.colorbar()
cbar.set_label('spectral radius')
fname = 'data/parallelSDC_minimizer_full.png'
plt.savefig(fname, rasterized=True, bbox_inches='tight')
plt.figure()
xdim_part = xdim[int(0.25 * numsteps):int(0.75 * numsteps) + 1]
ydim_part = ydim[0:int(0.25 * numsteps)]
minfield_part = minfield[int(0.25 * numsteps):int(0.75 * numsteps) + 1, 0:int(0.25 * numsteps)]
plt.pcolor(xdim_part, ydim_part, minfield_part.T, cmap='Reds', vmin=0, vmax=1)
plt.text(d.x[0], d.x[1], 'X', horizontalalignment='center', verticalalignment='center')
plt.xlim((min(xdim_part), max(xdim_part)))
plt.ylim((min(ydim_part), max(ydim_part)))
plt.xlabel('component 1')
plt.ylabel('component 2')
cbar = plt.colorbar()
cbar.set_label('spectral radius')
fname = 'data/parallelSDC_minimizer_zoom.png'
plt.savefig(fname, rasterized=True, bbox_inches='tight')
if __name__ == "__main__":
main()
| bsd-2-clause | 1,681,728,549,459,951,000 | 32.108108 | 112 | 0.614694 | false |
matty-jones/MorphCT | morphct/code/obtain_chromophores.py | 1 | 42989 | import copy
import itertools
import os
import sys
import numpy as np
from collections import defaultdict
from scipy.spatial import Delaunay
from morphct.code import helper_functions as hf
class chromophore:
def __init__(
self,
chromo_ID,
chromophore_CG_sites,
CG_morphology_dict,
AA_morphology_dict,
CG_to_AAID_master,
parameter_dict,
sim_dims,
):
self.ID = chromo_ID
self.orca_input = "/chromophores/input_orca/single/{:05d}.inp".format(self.ID)
self.orca_output = "/chromophores/output_orca/single/{:05d}.out".format(self.ID)
self.CGIDs = chromophore_CG_sites
# Determine whether this chromophore is a donor or an acceptor, as well
# as the site types that have been defined as the electronically active
# in the chromophore
if CG_morphology_dict is not None:
# Normal operation
self.CG_types = sorted(
list(set([CG_morphology_dict["type"][CGID] for CGID in self.CGIDs]))
)
active_CG_sites, self.sub_species = self.obtain_electronic_species(
chromophore_CG_sites,
CG_morphology_dict["type"],
parameter_dict["CG_site_species"],
)
self.species = parameter_dict["chromophore_species"][self.sub_species][
"species"
]
self.reorganisation_energy = parameter_dict["chromophore_species"][
self.sub_species
]["reorganisation_energy"]
self.VRH_delocalisation = parameter_dict["chromophore_species"][
self.sub_species
]["VRH_delocalisation"]
# CG_to_AAID_master is a list of dictionaries where each list
# element corresponds to a new molecule. Firstly, flatten this out
# so that it becomes a single CG:AAID dictionary
flattened_CG_to_AAID_master = {
dict_key: dict_val[1]
for dictionary in CG_to_AAID_master
for dict_key, dict_val in dictionary.items()
}
# Now, using chromophore_CG_sites as the keys, build up a list of
# all of the AAIDs in the chromophore, where each element
# corresponds to each CG site, and then flatten it.
self.AAIDs = [
AAID
for AAIDs in [
flattened_CG_to_AAID_master[CGID] for CGID in chromophore_CG_sites
]
for AAID in AAIDs
]
# By using active_CG_sites, determine the AAIDs for
# the electrically active proportion of the chromophore, so that we
# can calculate its proper position. Again each element corresponds
# to each CG site so the list needs to be flattened afterwards.
electronically_active_AAIDs = [
AAID
for AAIDs in [
flattened_CG_to_AAID_master[CGID] for CGID in active_CG_sites
]
for AAID in AAIDs
]
else:
# No fine-graining has been performed by MorphCT, so we know that
# the input morphology is already atomistic.
if len(parameter_dict["CG_site_species"]) == 1:
# If the morphology contains only a single type of electronic
# species, then the parameter_dict['CG_site_species'] should
# only have one entry, and we can set all chromophores to be
# this species.
active_CG_sites = chromophore_CG_sites
electronically_active_AAIDs = chromophore_CG_sites
self.sub_species = list(parameter_dict["CG_site_species"].values())[0]
self.species = parameter_dict["chromophore_species"][self.sub_species][
"species"
]
self.reorganisation_energy = parameter_dict["chromophore_species"][
self.sub_species
]["reorganisation_energy"]
self.VRH_delocalisation = parameter_dict["chromophore_species"][
self.sub_species
]["VRH_delocalisation"]
elif (len(parameter_dict["CG_site_species"]) == 0) and (
len(parameter_dict["AA_rigid_body_species"]) > 0
):
# If the CG_site_species have not been specified, then look to
# the AA_rigid_body_species dictionary to determine which rigid
# bodies are donors and which are acceptors
electronically_active_AAIDs = []
for AAID in chromophore_CG_sites:
if AA_morphology_dict["body"][AAID] != -1:
electronically_active_AAIDs.append(AAID)
active_CG_sites = copy.deepcopy(electronically_active_AAIDs)
# Now work out what the species is:
for sub_species, rigid_bodies in parameter_dict[
"AA_rigid_body_species"
].items():
if AA_morphology_dict["body"][active_CG_sites[0]] in rigid_bodies:
self.sub_species = sub_species
self.species = parameter_dict["chromophore_species"][
self.sub_species
]["species"]
self.reorganisation_energy = parameter_dict[
"chromophore_species"
][self.sub_species]["reorganisation_energy"]
self.VRH_delocalisation = parameter_dict["chromophore_species"][
self.sub_species
]["VRH_delocalisation"]
break
try:
self.species
except AttributeError:
for key, val in self.__dict__:
print(key, val)
raise SystemError(
"Chromophore {:d} has no species! Exiting...".format(self.ID)
)
else:
raise SystemError(
"Multiple electronic species defined, but no way to map them"
" without a coarse-grained morphology (no CG morph has been given)"
)
self.AAIDs = chromophore_CG_sites
# The position of the chromophore can be calculated easily. Note that
# here, the `self.image' is the periodic image that the
# unwrapped_position of the chromophore is located in, relative to the
# original simulation volume.
electronically_active_unwrapped_posns = [
AA_morphology_dict["unwrapped_position"][AAID]
for AAID in electronically_active_AAIDs
]
electronically_active_types = [
AA_morphology_dict["type"][AAID] for AAID in electronically_active_AAIDs
]
self.unwrapped_posn, self.posn, self.image = self.obtain_chromophore_COM(
electronically_active_unwrapped_posns, electronically_active_types, sim_dims
)
# A list of the important bonds for this chromophore from the morphology
# would be useful when determining if a terminating group is already
# present on this monomer
self.bonds = self.get_important_bonds(AA_morphology_dict["bond"])
if CG_morphology_dict is not None:
# Determine if this chromophore is a repeat unit and therefore will
# need terminating before orca
CG_types = set(
[CG_morphology_dict["type"][CGID] for CGID in chromophore_CG_sites]
)
# self.terminate = True if any of the CGTypes in this chromophore
# are defined as having termination conditions in the parameter file
self.terminate = any(
CG_type in CG_types
for CG_type in [
connection[0]
for connection in parameter_dict["molecule_terminating_connections"]
]
)
else:
try:
if len(parameter_dict["molecule_terminating_connections"].keys()) == 0:
# Small molecules in atomistic morphology therefore no
# terminations needed
self.terminate = False
except AttributeError:
if len(parameter_dict["molecule_terminating_connections"]) == 0:
self.terminate = False
else:
# No CG morphology, but terminations have been specified, so
# we're dealing with a polymer
AA_types = set(
[AA_morphology_dict["type"][AAID] for AAID in self.AAIDs]
)
self.terminate = any(
AA_type in AA_types
for AA_type in [
connection
for connection in parameter_dict[
"molecule_terminating_connections"
]
]
)
# Now to create a load of placeholder parameters to update later when we
# have the full list/energy levels.
# The self.neighbours list contains one element for each chromophore
# within parameterDict['maximum_hop_distance'] of this one (including
# periodic boundary conditions). Its format is
# [[neighbour1_ID, relative_image_of_neighbour1],...]
self.neighbours = []
self.dissociation_neighbours = []
# The molecular orbitals of this chromophore have not yet been
# calculated, but they will simply be floats.
self.HOMO = None
self.HOMO_1 = None
self.LUMO = None
self.LUMO_1 = None
# The neighbour_delta_E and neighbour_TI are lists where each element
# describes the different in important molecular orbital or transfer
# integral between this chromophore and each neighbour. The list indices
# here are the same as in self.neighbours for coherence.
self.neighbours_delta_E = []
self.neighbours_TI = []
def get_important_bonds(self, bond_list):
important_bonds = []
for bond in bond_list:
if (bond[1] in self.AAIDs) and (bond[2] in self.AAIDs):
important_bonds.append(bond)
return important_bonds
def obtain_chromophore_COM(
self,
electronically_active_unwrapped_posns,
electronically_active_types,
sim_dims,
):
# Calculate the chromophore's position in the morphology (CoM of all
# atoms in self.AAIDs from AA_morphology_dict)
chromo_unwrapped_posn = hf.calc_COM(
electronically_active_unwrapped_posns,
list_of_atom_types=electronically_active_types,
)
chromo_wrapped_posn = copy.deepcopy(chromo_unwrapped_posn)
chromo_wrapped_image = [0, 0, 0]
# Now calculate the wrapped position of the chromophore and its image
for axis in range(3):
sim_extent = sim_dims[axis][1] - sim_dims[axis][0]
while chromo_wrapped_posn[axis] < sim_dims[axis][0]:
chromo_wrapped_posn[axis] += sim_extent
chromo_wrapped_image[axis] -= 1
while chromo_wrapped_posn[axis] > sim_dims[axis][1]:
chromo_wrapped_posn[axis] -= sim_extent
chromo_wrapped_image[axis] += 1
return chromo_unwrapped_posn, chromo_wrapped_posn, chromo_wrapped_image
def obtain_electronic_species(
self, chromophore_CG_sites, CG_site_types, CG_to_species
):
electronically_active_sites = []
current_chromophore_species = None
for CG_site_ID in chromophore_CG_sites:
site_type = CG_site_types[CG_site_ID]
site_species = CG_to_species[site_type]
if site_species.lower() != "none":
if (current_chromophore_species is not None) and (
current_chromophore_species != site_species
):
raise SystemError(
"Problem - multiple electronic species defined in the same "
" chromophore. Please modify the chromophore generation code "
" to fix this issue for your molecule!"
)
else:
current_chromophore_species = site_species
electronically_active_sites.append(CG_site_ID)
return electronically_active_sites, current_chromophore_species
def get_MO_energy(self):
if self.species.lower() == "acceptor":
return self.LUMO
elif self.species.lower() == "donor":
return self.HOMO
else:
raise Exception("Chromo MUST be donor OR acceptor")
def calculate_chromophores(
CG_morphology_dict, AA_morphology_dict, CG_to_AAID_master, parameter_dict, sim_dims
):
# We make the assumption that a chromophore consists of one of each of the
# CG site types described by the same template file. For instance, if we
# have 3 sites 'A', 'B' and 'C' described in one file and one site 'D'
# described in another file then there are two chromophores species
# described by A-B-C and D. This will be treated automatically because the
# D's shouldn't be bonded to anything in the CGMorphologyDict if they are
# small molecules.
# Therefore, we need to assign each CG site in the morphology to a
# particular chromophore, so first, it's important to generate a
# `neighbour_list' of all bonded atoms
print("Determining chromophores in the system...")
bonded_atoms = hf.obtain_bonded_list(CG_morphology_dict["bond"])
chromophore_list = [i for i in range(len(CG_morphology_dict["type"]))]
for CG_site_ID, chromophore_ID in enumerate(chromophore_list):
CG_site_type = CG_morphology_dict["type"][CG_site_ID]
types_in_this_chromophore = [CG_site_type]
chromophore_list, types_in_this_chromophore = update_chromophores(
CG_site_ID,
chromophore_list,
bonded_atoms,
CG_morphology_dict["type"],
types_in_this_chromophore,
parameter_dict,
)
chromophore_data = {}
for atom_ID, chromo_ID in enumerate(chromophore_list):
if chromo_ID not in list(chromophore_data.keys()):
chromophore_data[chromo_ID] = [atom_ID]
else:
chromophore_data[chromo_ID].append(atom_ID)
# Now rename the chromophore IDs so that they increment sensibly (they will
# be used later for the orca files)
old_keys = sorted(chromophore_data.keys())
for new_key, old_key in enumerate(old_keys):
chromophore_data[new_key] = chromophore_data.pop(old_key)
print(
"{:d} chromophores successfully identified!".format(
len(list(chromophore_data.keys()))
)
)
# Now let's create a list of all the chromophore instances which contain all
# of the information we could ever want about them.
chromophore_instances = []
for chromo_ID, chromophore_CG_sites in chromophore_data.items():
print(
"\rCalculating properties of chromophore {:05d} of {:05d}...".format(
chromo_ID, len(list(chromophore_data.keys())) - 1
),
end=" ",
)
if sys.stdout is not None:
sys.stdout.flush()
chromophore_instances.append(
chromophore(
chromo_ID,
chromophore_CG_sites,
CG_morphology_dict,
AA_morphology_dict,
CG_to_AAID_master,
parameter_dict,
sim_dims,
)
)
print("")
return chromophore_instances
def calculate_chromophores_AA(
CG_morphology_dict,
AA_morphology_dict,
CG_to_AAID_master,
parameter_dict,
sim_dims,
rigid_bodies=None,
):
# If rigid_bodies == None:
# This function works in the same way as the coarse-grained version above,
# except this one iterates through the AA bonds instead. This is FAR SLOWER
# and so shouldn't be done, except in the case where the coarse-grained
# morphology does not exist (because we started with an atomistic morphology
# and are only interested in running KMC on it)
# If rigid_bodies == AA_morphology_dict['body']:
# This function uses the rigid bodies specified in
# parameter_dict['AA_rigid_body_species'], and those which have not been
# specified by iterating through the AA bond list, to determine the
# chromophores in the system. This is the slowest way to calculate
# chromophores, but is useful for systems such as BDT-TPD, where there are
# multiple chromophores of differing species present in the same molecule.
# As above, this code will only run if an atomistic morphology has been
# input to MorphCT. If it is coarse-grained, the CG-based
# "calculate_chromophore" function will be used, and will also be a lot
# faster.
# The parameter_dict['AA_rigid_body_species'] is a dictionary with two keys,
# 'donor' or 'acceptor'. Each element in the value list corresponds to a new
# chromophore. These aren't the only atoms that belong to this chromophore,
# however - there might be a bunch of aliphatic/flexible atoms that are
# connected, so we need to make sure that we add those too.
print("Determining chromophores in the system...")
bonded_atoms = hf.obtain_bonded_list(AA_morphology_dict["bond"])
chromophore_list = [i for i in range(len(AA_morphology_dict["type"]))]
for AA_site_ID, chromophore_ID in enumerate(chromophore_list):
AA_site_type = AA_morphology_dict["type"][AA_site_ID]
chromophore_list = update_chromophores_AA(
AA_site_ID, chromophore_list, bonded_atoms, parameter_dict, rigid_bodies
)
chromophore_data = {}
for atom_ID, chromo_ID in enumerate(chromophore_list):
if chromo_ID not in list(chromophore_data.keys()):
chromophore_data[chromo_ID] = [atom_ID]
else:
chromophore_data[chromo_ID].append(atom_ID)
# Now rename the chromophore IDs so that they increment sensibly (they will
# be used later for the orca files)
old_keys = sorted(chromophore_data.keys())
for new_key, old_key in enumerate(old_keys):
chromophore_data[new_key] = chromophore_data.pop(old_key)
print(
"{:d} chromophores successfully identified!".format(
len(list(chromophore_data.keys()))
)
)
# Now let's create a list of all the chromophore instances which contain all
# of the information we could ever want about them.
chromophore_instances = []
for chromo_ID, chromophore_CG_sites in chromophore_data.items():
print(
"\rCalculating properties of chromophore {:05d} of {:05d}...".format(
chromo_ID, len(list(chromophore_data.keys())) - 1
),
end=" ",
)
if sys.stdout is not None:
sys.stdout.flush()
chromophore_instances.append(
chromophore(
chromo_ID,
chromophore_CG_sites,
CG_morphology_dict,
AA_morphology_dict,
CG_to_AAID_master,
parameter_dict,
sim_dims,
)
)
print("")
return chromophore_instances
def update_chromophores(
atom_ID,
chromophore_list,
bonded_atoms,
CG_type_list,
types_in_this_chromophore,
parameter_dict,
):
# Recursively add all neighbours of atom number atom_ID to this chromophore,
# providing the same type does not already exist in it
try:
for bonded_atom in bonded_atoms[atom_ID]:
bonded_type = CG_type_list[bonded_atom]
# First check that the bonded_atom's type is not already in this
# chromophore.
# Also, check that the type to be added is of the same electronic
# species as the ones added previously, or == 'None'
if (bonded_type not in types_in_this_chromophore) and (
(parameter_dict["CG_site_species"][bonded_type].lower() == "none")
or (
parameter_dict["CG_site_species"][bonded_type].lower()
== list(
set(
[
parameter_dict["CG_site_species"][x].lower()
for x in types_in_this_chromophore
]
)
)[0]
)
):
# If the atomID of the bonded atom is larger than that of the
# current one, update the bonded atom's ID to the current one's
# to put it in this chromophore, then iterate through all of the
# bonded atom's neighbours
if chromophore_list[bonded_atom] > chromophore_list[atom_ID]:
chromophore_list[bonded_atom] = chromophore_list[atom_ID]
types_in_this_chromophore.append(bonded_type)
chromophore_list, types_in_this_chromophore = update_chromophores(
bonded_atom,
chromophore_list,
bonded_atoms,
CG_type_list,
types_in_this_chromophore,
parameter_dict,
)
# If the atomID of the current atom is larger than that of the
# bonded one, update the current atom's ID to the bonded one's
# to put it in this chromophore, then iterate through all of the
# current atom's neighbours
elif chromophore_list[bonded_atom] < chromophore_list[atom_ID]:
chromophore_list[atom_ID] = chromophore_list[bonded_atom]
types_in_this_chromophore.append(CG_type_list[atom_ID])
chromophore_list, types_in_this_chromophore = update_chromophores(
atom_ID,
chromophore_list,
bonded_atoms,
CG_type_list,
types_in_this_chromophore,
parameter_dict,
)
# Else: both the current and the bonded atom are already known
# to be in this chromophore, so we don't have to do anything
# else.
except KeyError:
# This means that there are no bonded CG sites (i.e. it's a single
# chromophore)
pass
return chromophore_list, types_in_this_chromophore
def update_chromophores_AA(
atom_ID, chromophore_list, bonded_atoms, parameter_dict, rigid_bodies=None
):
# This version of the update chromophores function does not check for CG
# site types, instead just adding all bonded atoms. Therefore it should only
# be used in the case of already-atomistic morphologies (no CG morph
# specified) containing ONLY small molecules
try:
for bonded_atom in bonded_atoms[atom_ID]:
if rigid_bodies is not None:
# Skip if the bonded atom belongs to a different rigid body
if (
(rigid_bodies[bonded_atom] != -1) and (rigid_bodies[atom_ID] != -1)
) and (rigid_bodies[bonded_atom] != rigid_bodies[atom_ID]):
continue
# If the atomID of the bonded atom is larger than that of the
# current one, update the bonded atom's ID to the current one's to
# put it in this chromophore, then iterate through all of the bonded
# atom's neighbours
if chromophore_list[bonded_atom] > chromophore_list[atom_ID]:
chromophore_list[bonded_atom] = chromophore_list[atom_ID]
chromophore_list = update_chromophores_AA(
bonded_atom,
chromophore_list,
bonded_atoms,
parameter_dict,
rigid_bodies,
)
# If the atomID of the current atom is larger than that of the
# bonded one, update the current atom's ID to the bonded one's to
# put it in this chromophore, then iterate through all of the
# current atom's neighbours
elif chromophore_list[bonded_atom] < chromophore_list[atom_ID]:
chromophore_list[atom_ID] = chromophore_list[bonded_atom]
chromophore_list = update_chromophores_AA(
atom_ID,
chromophore_list,
bonded_atoms,
parameter_dict,
rigid_bodies,
)
# Else: both the current and the bonded atom are already known to be
# in this chromophore, so we don't have to do anything else.
except KeyError:
# This means that there are no bonded CG sites (i.e. it's a single
# chromophore)
pass
return chromophore_list
def create_super_cell(chromophore_list, box_size):
for chromophore in chromophore_list:
chromophore.super_cell_posns = []
chromophore.super_cell_images = []
for x_image in range(-1, 2):
for y_image in range(-1, 2):
for z_image in range(-1, 2):
chromophore.super_cell_posns.append(
np.array(chromophore.posn)
+ (np.array([x_image, y_image, z_image]) * (np.array(box_size)))
)
chromophore.super_cell_images.append(
np.array([x_image, y_image, z_image])
)
return chromophore_list
def get_voronoi_neighbours(tri, chromo_list):
n_list = defaultdict(set)
for p in tri.vertices:
for i, j in itertools.permutations(p, 2):
n_list[chromo_list[i].periodic_ID].add(chromo_list[j].periodic_ID)
return n_list
class super_cell_chromo:
def __init__(self):
self.species = None
self.original_ID = None
self.periodic_ID = None
self.position = None
self.image = None
def update_chromophore_list_voronoi(
IDs_to_update, super_cell_chromos, neighbour_IDs, chromophore_list, sim_dims
):
# IDs to Update is a list of the periodic chromophores with the image
# [0, 0, 0]
for periodic_ID in IDs_to_update:
# Obtain the real chromophore corresponding to this periodic_ID
chromophore1 = chromophore_list[super_cell_chromos[periodic_ID].original_ID]
assert np.array_equal(super_cell_chromos[periodic_ID].image, [0, 0, 0])
# Get latest neighbour information
chromo1neighbour_IDs = [
neighbour_data[0] for neighbour_data in chromophore1.neighbours
]
chromo1dissociation_neighbour_IDs = [
neighbour_data[0] for neighbour_data in chromophore1.dissociation_neighbours
]
for neighbour_periodic_ID in neighbour_IDs[periodic_ID]:
neighbour_super_cell_chromo = super_cell_chromos[neighbour_periodic_ID]
chromophore2 = chromophore_list[neighbour_super_cell_chromo.original_ID]
chromo2neighbour_IDs = [
neighbour_data[0] for neighbour_data in chromophore2.neighbours
]
chromo2dissociation_neighbour_IDs = [
neighbour_data[0]
for neighbour_data in chromophore2.dissociation_neighbours
]
relative_image = neighbour_super_cell_chromo.image
if chromophore1.species == chromophore2.species:
if chromophore2.ID not in chromo1neighbour_IDs:
chromophore1.neighbours.append(
[chromophore2.ID, list(np.array(relative_image))]
)
chromophore1.neighbours_delta_E.append(None)
chromophore1.neighbours_TI.append(None)
chromo1neighbour_IDs.append(chromophore2.ID)
if chromophore1.ID not in chromo2neighbour_IDs:
chromophore2.neighbours.append(
[chromophore1.ID, list(-np.array(relative_image))]
)
chromophore2.neighbours_delta_E.append(None)
chromophore2.neighbours_TI.append(None)
chromo2neighbour_IDs.append(chromophore1.ID)
else:
if chromophore2.ID not in chromo1dissociation_neighbour_IDs:
chromophore1.dissociation_neighbours.append(
[chromophore2.ID, list(np.array(relative_image))]
)
chromo1dissociation_neighbour_IDs.append(chromophore2.ID)
if chromophore1.ID not in chromo2dissociation_neighbour_IDs:
chromophore2.dissociation_neighbours.append(
[chromophore1.ID, list(-np.array(relative_image))]
)
chromo2dissociation_neighbour_IDs.append(chromophore1.ID)
return chromophore_list
def determine_neighbours_voronoi(chromophore_list, parameter_dict, sim_dims):
box_size = [axis[1] - axis[0] for axis in sim_dims]
# First create the supercell
super_cell = create_super_cell(chromophore_list, box_size)
donor_chromos = []
acceptor_chromos = []
all_chromos = []
chromo_index = 0
for chromophore in super_cell:
for index, position in enumerate(chromophore.super_cell_posns):
chromo = super_cell_chromo()
chromo.species = chromophore.species
chromo.original_ID = chromophore.ID
chromo.periodic_ID = chromo_index
chromo.position = position
chromo.image = chromophore.super_cell_images[index]
chromo_index += 1
if chromophore.species.lower() == "donor":
donor_chromos.append(chromo)
elif chromophore.species.lower() == "acceptor":
acceptor_chromos.append(chromo)
all_chromos.append(chromo)
# Now obtain the positions and send them to the Delaunay Triangulation
# Then get the voronoi neighbours
all_positions = [chromo.position for chromo in all_chromos]
# Initialise the neighbour dictionaries
all_neighbours = {}
# Update the relevant neighbour dictionaries if we have the right
# chromophore types in the system. Also log the chromophoreIDs from the
# original simulation volume (non-periodic). Chromophores in the original
# simulation volume will be every 27th (there are 27 periodic images in the
# triple range(-1,2)), beginning from #13 ((0, 0, 0) is the thirteenth
# element of the triple range(-1,2)) up to the length of the list in
# question.
original_all_chromo_IDs = []
try:
if parameter_dict["permit_hops_through_opposing_chromophores"]:
# Need to only consider the neighbours of like chromophore species
donor_positions = [chromo.position for chromo in donor_chromos]
acceptor_positions = [chromo.position for chromo in acceptor_chromos]
donor_neighbours = {}
acceptor_neighbours = {}
original_donor_chromo_IDs = []
original_acceptor_chromo_IDs = []
for chromophore in all_chromos:
if np.array_equal(chromophore.image, [0, 0, 0]):
original_all_chromo_IDs.append(chromophore.periodic_ID)
if chromophore.species.lower() == "donor":
original_donor_chromo_IDs.append(chromophore.periodic_ID)
elif chromophore.species.lower() == "acceptor":
original_acceptor_chromo_IDs.append(chromophore.periodic_ID)
if len(donor_positions) > 0:
print("Calculating Neighbours of donor Moieties")
donor_neighbours = get_voronoi_neighbours(
Delaunay(donor_positions), donor_chromos
)
print("Updating the chromophore list for donor chromos")
chromophore_list = update_chromophore_list_voronoi(
original_donor_chromo_IDs,
all_chromos,
donor_neighbours,
chromophore_list,
sim_dims,
)
if len(acceptor_positions) > 0:
print("Calculating Neighbours of acceptor Moieties")
acceptor_neighbours = get_voronoi_neighbours(
Delaunay(acceptor_positions), acceptor_chromos
)
print("Updating the chromophore list for acceptor chromos")
chromophore_list = update_chromophore_list_voronoi(
original_acceptor_chromo_IDs,
all_chromos,
acceptor_neighbours,
chromophore_list,
sim_dims,
)
else:
raise KeyError
except KeyError:
# Default behaviour - carriers are blocked by the opposing species
for chromophore in all_chromos:
if np.array_equal(chromophore.image, [0, 0, 0]):
original_all_chromo_IDs.append(chromophore.periodic_ID)
print("Calculating Neighbours of All Moieties")
all_neighbours = get_voronoi_neighbours(Delaunay(all_positions), all_chromos)
print("Updating the chromophore list for dissociation neighbours")
chromophore_list = update_chromophore_list_voronoi(
original_all_chromo_IDs, all_chromos, all_neighbours, chromophore_list, sim_dims
)
return chromophore_list
def determine_neighbours_cut_off(chromophore_list, parameter_dict, sim_dims):
for chromophore1 in chromophore_list:
print(
"\rIdentifying neighbours of chromophore {:05d} of {:05d}...".format(
chromophore1.ID, len(chromophore_list) - 1
),
end=" ",
)
if sys.stdout is not None:
sys.stdout.flush()
for chromophore2 in chromophore_list:
# Skip if chromo2 is chromo1
if chromophore1.ID == chromophore2.ID:
continue
delta_posn = chromophore2.posn - chromophore1.posn
relative_image_of_chromo2 = [0, 0, 0]
# Consider periodic boundary conditions
for axis in range(3):
half_box_length = (sim_dims[axis][1] - sim_dims[axis][0]) / 2.0
while delta_posn[axis] > half_box_length:
delta_posn[axis] -= sim_dims[axis][1] - sim_dims[axis][0]
relative_image_of_chromo2[axis] -= 1
while delta_posn[axis] < -half_box_length:
delta_posn[axis] += sim_dims[axis][1] - sim_dims[axis][0]
relative_image_of_chromo2[axis] += 1
separation = np.linalg.norm(delta_posn)
# If proximity is within tolerance, add these chromophores as
# neighbours. Base check is against the maximum of the donor and
# acceptor hop distances. A further separation check is made if the
# chromophores are the same type to make sure we don't exceed the
# maximum specified hop distance for the carrier type.
if separation <= max(
[
parameter_dict["maximum_hole_hop_distance"],
parameter_dict["maximum_electron_hop_distance"],
]
):
# Only add the neighbours if they haven't already been added so
# far
chromo1neighbour_IDs = [
neighbour_data[0] for neighbour_data in chromophore1.neighbours
]
chromo2neighbour_IDs = [
neighbour_data[0] for neighbour_data in chromophore2.neighbours
]
chromo1dissociation_neighbour_IDs = [
neighbour_data[0]
for neighbour_data in chromophore1.dissociation_neighbours
]
chromo2dissociation_neighbour_IDs = [
neighbour_data[0]
for neighbour_data in chromophore2.dissociation_neighbours
]
# Also, make the delta_E and the T_ij lists as long as the
# neighbour lists for easy access later
if chromophore1.species == chromophore2.species:
if (
(chromophore1.species.lower() == "donor")
and (separation >= parameter_dict["maximum_hole_hop_distance"])
) or (
(chromophore1.species.lower() == "acceptor")
and (
separation
>= parameter_dict["maximum_electron_hop_distance"]
)
):
continue
if chromophore2.ID not in chromo1neighbour_IDs:
chromophore1.neighbours.append(
[chromophore2.ID, relative_image_of_chromo2]
)
chromophore1.neighbours_delta_E.append(None)
chromophore1.neighbours_TI.append(None)
if chromophore1.ID not in chromo2neighbour_IDs:
chromophore2.neighbours.append(
[
chromophore1.ID,
list(-np.array(relative_image_of_chromo2)),
]
)
chromophore2.neighbours_delta_E.append(None)
chromophore2.neighbours_TI.append(None)
else:
# NOTE: Modifying this so that only dissociation neigbours in the
# same periodic image are considered.
if (chromophore2.ID not in chromo2dissociation_neighbour_IDs) and (
np.all(np.isclose(relative_image_of_chromo2, [0, 0, 0]))
):
chromophore1.dissociation_neighbours.append(
[chromophore2.ID, [0, 0, 0]]
)
if (chromophore1.ID not in chromo1dissociation_neighbour_IDs) and (
np.all(np.isclose(relative_image_of_chromo2, [0, 0, 0]))
):
chromophore2.dissociation_neighbours.append(
[chromophore1.ID, [0, 0, 0]]
)
print("")
return chromophore_list
def chromo_sort(chromophore_list):
for index, chromo in enumerate(chromophore_list):
if index != chromo.ID:
print(
"Inconsistency found in the ordering of the chromophore_list, rewriting"
" the chromophore_list in the correct order..."
)
new_chromophore_list = []
for chromo in chromophore_list:
new_chromophore_list.append(0)
for chromo in chromophore_list:
new_chromophore_list[chromo.ID] = chromo
chromophore_list = new_chromophore_list
return chromophore_list
return chromophore_list
def main(
AA_morphology_dict,
CG_morphology_dict,
CG_to_AAID_master,
parameter_dict,
chromophore_list,
):
sim_dims = [
[-AA_morphology_dict["lx"] / 2.0, AA_morphology_dict["lx"] / 2.0],
[-AA_morphology_dict["ly"] / 2.0, AA_morphology_dict["ly"] / 2.0],
[-AA_morphology_dict["lz"] / 2.0, AA_morphology_dict["lz"] / 2.0],
]
if len(parameter_dict["CG_to_template_dirs"]) > 0:
# Normal operation using the coarse-grained morphology
chromophore_list = calculate_chromophores(
CG_morphology_dict,
AA_morphology_dict,
CG_to_AAID_master,
parameter_dict,
sim_dims,
)
elif (len(parameter_dict["CG_site_species"]) == 1) and (
len(parameter_dict["AA_rigid_body_species"]) == 0
):
# Small molecule system with only one electronic species
chromophore_list = calculate_chromophores_AA(
CG_morphology_dict,
AA_morphology_dict,
CG_to_AAID_master,
parameter_dict,
sim_dims,
)
else:
# Other system, with electronically active species specified as rigid
# bodies using AA_rigid_body_species in parameter file
chromophore_list = calculate_chromophores_AA(
CG_morphology_dict,
AA_morphology_dict,
CG_to_AAID_master,
parameter_dict,
sim_dims,
rigid_bodies=AA_morphology_dict["body"],
)
chromophore_list = chromo_sort(chromophore_list)
if parameter_dict["use_voronoi_neighbours"] is True:
chromophore_list = determine_neighbours_voronoi(
chromophore_list, parameter_dict, sim_dims
)
else:
chromophore_list = determine_neighbours_cut_off(
chromophore_list, parameter_dict, sim_dims
)
# Now we have updated the chromophore_list, rewrite the pickle with this new
# information.
pickle_name = os.path.join(
parameter_dict["output_morphology_directory"],
"code",
"".join([os.path.splitext(parameter_dict["morphology"])[0], ".pickle"]),
)
hf.write_pickle(
(
AA_morphology_dict,
CG_morphology_dict,
CG_to_AAID_master,
parameter_dict,
chromophore_list,
),
pickle_name,
)
return (
AA_morphology_dict,
CG_morphology_dict,
CG_to_AAID_master,
parameter_dict,
chromophore_list,
)
if __name__ == "__main__":
try:
pickle_file = sys.argv[1]
except:
print(
"Please specify the pickle file to load to continue the pipeline from this"
" point."
)
pickle_data = hf.load_pickle(pickle_file)
AA_morphology_dict = pickle_data[0]
CG_morphology_dict = pickle_data[1]
CG_to_AAID_master = pickle_data[2]
parameter_dict = pickle_data[3]
chromophore_list = pickle_data[4]
main(
AA_morphology_dict,
CG_morphology_dict,
CG_to_AAID_master,
parameter_dict,
chromophore_list,
)
| gpl-3.0 | -9,052,352,564,157,246,000 | 44.251579 | 88 | 0.56996 | false |
tbabej/astropy | astropy/io/ascii/connect.py | 4 | 2658 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# This file connects the readers/writers to the astropy.table.Table class
from __future__ import absolute_import, division, print_function
import re
import functools
from .. import registry as io_registry
from ...table import Table
from ...extern.six.moves import zip
__all__ = []
# Generic
# =======
def read_asciitable(filename, **kwargs):
from .ui import read
return read(filename, **kwargs)
io_registry.register_reader('ascii', Table, read_asciitable)
def write_asciitable(table, filename, **kwargs):
from .ui import write
return write(table, filename, **kwargs)
io_registry.register_writer('ascii', Table, write_asciitable)
def io_read(format, filename, **kwargs):
from .ui import read
format = re.sub(r'^ascii\.', '', format)
return read(filename, format=format, **kwargs)
def io_write(format, table, filename, **kwargs):
from .ui import write
format = re.sub(r'^ascii\.', '', format)
return write(table, filename, format=format, **kwargs)
def io_identify(suffix, origin, filepath, fileobj, *args, **kwargs):
return filepath is not None and filepath.endswith(suffix)
def _get_connectors_table():
from .core import FORMAT_CLASSES
rows = []
rows.append(('ascii', '', 'Yes', 'ASCII table in any supported format (uses guessing)'))
for format in sorted(FORMAT_CLASSES):
cls = FORMAT_CLASSES[format]
io_format = 'ascii.' + cls._format_name
description = getattr(cls, '_description', '')
class_link = ':class:`~{0}.{1}`'.format(cls.__module__, cls.__name__)
suffix = getattr(cls, '_io_registry_suffix', '')
can_write = 'Yes' if getattr(cls, '_io_registry_can_write', True) else ''
rows.append((io_format, suffix, can_write,
'{0}: {1}'.format(class_link, description)))
out = Table(list(zip(*rows)), names=('Format', 'Suffix', 'Write', 'Description'))
for colname in ('Format', 'Description'):
width = max(len(x) for x in out[colname])
out[colname].format = '%-{0}s'.format(width)
return out
# Specific
# ========
def read_csv(filename, **kwargs):
from .ui import read
kwargs['format'] = 'csv'
return read(filename, **kwargs)
def write_csv(table, filename, **kwargs):
from .ui import write
kwargs['format'] = 'csv'
return write(table, filename, **kwargs)
csv_identify = functools.partial(io_identify, '.csv')
io_registry.register_reader('csv', Table, read_csv)
io_registry.register_writer('csv', Table, write_csv)
io_registry.register_identifier('csv', Table, csv_identify)
| bsd-3-clause | 1,356,188,286,023,232,300 | 27.580645 | 92 | 0.65237 | false |
gizatt/director | src/python/ddapp/depthimageprovider.py | 6 | 1182 | import ddapp.vtkAll as vtk
import ddapp.vtkNumpy as vnp
import drc as lcmdrc
import numpy as np
import time
class DepthImageProvider(object):
def __init__(self):
self.source = vtk.vtkMapServerSource()
self.source.Start()
def waitForSceneHeight(self):
viewId = lcmdrc.data_request_t.HEIGHT_MAP_SCENE
while self.source.GetCurrentMapId(viewId) < 0:
time.sleep(0.1)
def getSceneHeightData(self):
return self.getDepthMapData(lcmdrc.data_request_t.HEIGHT_MAP_SCENE)
def getDepthMapData(self, viewId):
mapId = self.source.GetCurrentMapId(viewId)
if mapId < 0:
return None, None
depthImage = vtk.vtkImageData()
transform = vtk.vtkTransform()
# print "getting depth image for viewId {:d} mapId {:d}".format(viewId, mapId)
self.source.GetDataForMapId(viewId, mapId, depthImage, transform)
dims = depthImage.GetDimensions()
d = vnp.getNumpyFromVtk(depthImage, 'ImageScalars')
d = d.reshape(dims[1], dims[0])
t = np.array([[transform.GetMatrix().GetElement(r, c) for c in xrange(4)] for r in xrange(4)])
return d, t
| bsd-3-clause | -3,404,061,133,051,140,600 | 30.105263 | 102 | 0.652284 | false |
kangkot/fbthrift | thrift/test/py/SimpleJSONReadTest.py | 12 | 4981 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
# @lint-avoid-pyflakes2
# @lint-avoid-pyflakes3
import unittest
import math
from thrift.protocol import TSimpleJSONProtocol
from thrift.transport.TTransport import TMemoryBuffer
from SimpleJSONRead.ttypes import SomeStruct, Stuff
def writeToJSON(obj):
trans = TMemoryBuffer()
proto = TSimpleJSONProtocol.TSimpleJSONProtocol(trans)
obj.write(proto)
return trans.getvalue()
def readStuffFromJSON(jstr, struct_type=Stuff):
stuff = struct_type()
trans = TMemoryBuffer(jstr)
proto = TSimpleJSONProtocol.TSimpleJSONProtocol(trans,
struct_type.thrift_spec)
stuff.read(proto)
return stuff
class TestSimpleJSONRead(unittest.TestCase):
def test_primitive_type(self):
stuff = Stuff(
aString="hello",
aShort=10,
anInteger=23990,
aLong=123456789012,
aDouble=1234567.9,
aBool=True)
j = writeToJSON(stuff)
stuff_read = readStuffFromJSON(j)
self.assertEqual(stuff_read.aString, "hello")
self.assertEqual(stuff_read.aShort, 10)
self.assertEqual(stuff_read.anInteger, 23990)
self.assertEqual(stuff_read.aLong, 123456789012)
self.assertEqual(stuff_read.aDouble, 1234567.9)
self.assertTrue(stuff_read.aBool)
def test_unusual_numbers(self):
j = '{ "aListOfDouble": ["inf", "-inf", "nan"]}'
stuff_read = readStuffFromJSON(j)
self.assertEqual(len(stuff_read.aListOfDouble), 3)
self.assertTrue(math.isinf(stuff_read.aListOfDouble[0]))
self.assertTrue(math.isinf(stuff_read.aListOfDouble[1]))
self.assertTrue(math.isnan(stuff_read.aListOfDouble[2]))
def test_unexpected_field(self):
ss = SomeStruct(anInteger=1)
j = '{ "anInteger": 101, "unexpected": 111.1}'
struct_read = readStuffFromJSON(j, struct_type=SomeStruct)
self.assertEqual(struct_read.anInteger, 101)
def test_map(self):
stuff = Stuff(
aMap={1: {"hello": [1,2,3,4],
"world": [5,6,7,8]},
2: {"good": [100, 200],
"bye": [300, 400]}
},
anotherString="Hey")
j = writeToJSON(stuff)
stuff_read = readStuffFromJSON(j)
self.assertEqual(len(stuff_read.aMap), 2)
self.assertEqual(stuff_read.aMap[1]["hello"], [1,2,3,4])
self.assertEqual(stuff_read.aMap[1]["world"], [5,6,7,8])
self.assertEqual(stuff_read.aMap[2]["good"], [100, 200])
self.assertEqual(stuff_read.aMap[2]["bye"], [300, 400])
self.assertEqual(stuff_read.anotherString, "Hey")
def test_list(self):
stuff = Stuff(
aList=[
[[["hello", "world"], ["good", "bye"]]],
[[["what", "is"], ["going", "on"]]]],
anotherString="Hey")
j = writeToJSON(stuff)
stuff_read = readStuffFromJSON(j)
self.assertEqual(len(stuff_read.aList), 2)
self.assertEqual(stuff_read.aList[0][0][0], ["hello", "world"])
self.assertEqual(stuff_read.aList[0][0][1], ["good", "bye"])
self.assertEqual(stuff_read.aList[1][0][0], ["what", "is"])
self.assertEqual(stuff_read.aList[1][0][1], ["going", "on"])
self.assertEqual(stuff_read.anotherString, "Hey")
def test_set(self):
stuff = Stuff(
aListOfSet=[set(["hello"]), set(["world"])],
anotherString="Hey")
j = writeToJSON(stuff)
stuff_read = readStuffFromJSON(j)
self.assertEqual(len(stuff_read.aListOfSet), 2)
self.assertEqual(stuff_read.aListOfSet[0], set(["hello"]))
self.assertEqual(stuff_read.aListOfSet[1], set(["world"]))
self.assertEqual(stuff_read.anotherString, "Hey")
def test_struct(self):
stuff = Stuff(
aStruct=SomeStruct(anInteger=12,
aMap={"hi": 1.5}),
aListOfStruct=[
SomeStruct(anInteger=10,
aMap={"good": 2.0}),
SomeStruct(anInteger=11,
aMap={"bye": 1.0})],
anotherString="Hey"
)
j = writeToJSON(stuff)
stuff_read = readStuffFromJSON(j)
self.assertEqual(len(stuff_read.aListOfStruct), 2)
self.assertEqual(stuff_read.aListOfStruct[0].anInteger, 10)
self.assertEqual(stuff_read.aListOfStruct[0].aMap["good"], 2.0)
self.assertEqual(stuff_read.aListOfStruct[1].anInteger, 11)
self.assertEqual(stuff_read.aListOfStruct[1].aMap["bye"], 1.0)
self.assertEqual(stuff_read.anotherString, "Hey")
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 1,021,787,994,650,664,100 | 38.220472 | 76 | 0.579 | false |
JeromeParadis/django-mailing | mailing/mail.py | 1 | 5077 | from django.conf import settings
from django.core.mail import EmailMultiAlternatives, EmailMessage
from django.template.loader import render_to_string
from django.utils import translation
# Define exception classes
# --------------------------------
class MailerInvalidBodyError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class MailerMissingSubjectError(Exception):
def __init__(self, value=None):
self.value = value
def __str__(self):
return repr(self.value if value else '')
def send_email_default(*args, **kwargs):
send_email(args[3],args[0],args[1], from_email=args[2], category='django core email')
def send_email(recipients, subject, text_content=None, html_content=None, from_email=None, use_base_template=True, category=None, fail_silently=False, language=None, cc=None, bcc=None, attachments=None, headers=None, bypass_queue=False, bypass_hijacking=False, attach_files=None):
"""
Will send a multi-format email to recipients. Email may be queued through celery
"""
from django.conf import settings
if not bypass_queue and hasattr(settings, 'MAILING_USE_CELERY') and settings.MAILING_USE_CELERY:
from celery.execute import send_task
return send_task('mailing.queue_send_email',[recipients, subject, text_content, html_content, from_email, use_base_template, category, fail_silently, language if language else translation.get_language(), cc, bcc, attachments, headers, bypass_hijacking, attach_files])
else:
header_category_value = '%s%s' % (settings.MAILING_HEADER_CATEGORY_PREFIX if hasattr(settings, 'MAILING_HEADER_CATEGORY_PREFIX') else '', category)
# Check for sendgrid support and add category header
# --------------------------------
if hasattr(settings, 'MAILING_USE_SENDGRID'):
send_grid_support = settings.MAILING_USE_SENDGRID
else:
send_grid_support = False
if not headers:
headers = dict()
if send_grid_support and category:
headers['X-SMTPAPI'] = '{"category": "%s"}' % header_category_value
# Check for Mailgun support and add label header
# --------------------------------
if hasattr(settings, 'MAILING_USE_MAILGUN'):
mailgun_support = settings.MAILING_USE_MAILGUN
else:
mailgun_support = False
if not headers:
headers = dict()
if mailgun_support and category:
headers['X-Mailgun-Tag'] = header_category_value
# Ensure recipients are in a list
# --------------------------------
if isinstance(recipients, basestring):
recipients_list = [recipients]
else:
recipients_list = recipients
# Check if we need to hijack the email
# --------------------------------
if hasattr(settings, 'MAILING_MAILTO_HIJACK') and not bypass_hijacking:
headers['X-MAILER-ORIGINAL-MAILTO'] = ','.join(recipients_list)
recipients_list = [settings.MAILING_MAILTO_HIJACK]
if not subject:
raise MailerMissingSubjectError('Subject not supplied')
# Send ascii, html or multi-part email
# --------------------------------
if text_content or html_content:
if use_base_template:
prev_language = translation.get_language()
language and translation.activate(language)
text_content = render_to_string('mailing/base.txt', {'mailing_text_body': text_content, 'mailing_subject': subject, 'settings': settings}) if text_content else None
html_content = render_to_string('mailing/base.html', {'mailing_html_body': html_content, 'mailing_subject': subject, 'settings': settings}) if html_content else None
translation.activate(prev_language)
msg = EmailMultiAlternatives(subject, text_content if text_content else html_content, from_email if from_email else settings.DEFAULT_FROM_EMAIL, recipients_list, cc=cc, bcc=bcc, attachments=attachments, headers = headers)
if html_content and text_content:
msg.attach_alternative(html_content, "text/html")
elif html_content: # Only HTML
msg.content_subtype = "html"
# Attach files through attach_files helper
# --------------------------------
if attach_files:
for att in attach_files: # attachments are tuples of (filepath, mimetype, filename)
with open(att[0], 'rb') as f:
content = f.read()
msg.attach(att[2], content, att[1])
# Send email
# --------------------------------
msg.send(fail_silently=fail_silently)
else:
raise MailerInvalidBodyError('No text or html body supplied.')
| mit | 587,348,768,199,324,700 | 47.291262 | 280 | 0.592279 | false |
debugger06/MiroX | lib/fileutil.py | 3 | 14425 | # Miro - an RSS based video player application
# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
# Participatory Culture Foundation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
#
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
"""Functions to handle moving/deleting files, especially on windows
where file locking semantics can cause problems.
"""
import logging
import os
import shutil
from miro import u3info
from miro.plat.filebundle import is_file_bundle
def makedirs(path):
path = expand_filename(path)
return os.makedirs(path)
def isfile(path):
if not path:
return False
path = expand_filename(path)
return os.path.isfile(path)
def isdir(path):
if not path:
return False
path = expand_filename(path)
return os.path.isdir(path)
def isabs(path):
path = expand_filename(path)
return os.path.isabs(path)
def getctime(path):
path = expand_filename(path)
time = os.path.getctime(path)
# work around python bug - see #15818
time = max(time, 0)
return time
def getmtime(path):
path = expand_filename(path)
time = os.path.getmtime(path)
# work around python bug - see #15818
time = max(time, 0)
return time
def exists(path):
if not path:
# DeprecationWarning
logging.debug("special case used in fileutil.exists(): path=%r", path)
return False
path = expand_filename(path)
return os.path.exists(path)
def remove(path):
path = expand_filename(path)
return os.remove(path)
def rmtree(path):
path = expand_filename(path)
return shutil.rmtree(path)
def listdir(path):
path = expand_filename(path)
return os.listdir(path)
def open_file(path, *args, **kwargs):
path = expand_filename(path)
return file(path, *args, **kwargs)
def access(path, *args, **kwargs):
path = expand_filename(path)
return os.access(path, *args, **kwargs)
def move(src, dest):
src = expand_filename(src)
dest = expand_filename(dest)
shutil.move(src, dest)
def rmdir(path):
path = expand_filename(path)
os.rmdir (path)
def rename(src, dest):
src = expand_filename(src)
dest = expand_filename(dest)
os.rename (src, dest)
def abspath(path):
path = expand_filename(path)
path = os.path.abspath(path)
path = collapse_filename(path)
return path
def copy_with_progress(input_path, output_path, block_size=32*1024):
flags = os.O_WRONLY | os.O_CREAT
if hasattr(os, 'O_SYNC'):
flags |= os.O_SYNC
output_fd = os.open(output_path, flags)
with file(input_path, 'rb') as input:
with os.fdopen(output_fd, 'wb') as output:
data = input.read(block_size)
while data:
output.write(data)
result = yield len(data)
if result:
# return True to cancel. NB: you should probably remove the
# output file
break
data = input.read(block_size)
try:
samefile = os.path.samefile
except AttributeError:
# Windows doesn't have samefile()
def samefile(path1, path2):
abspath1 = os.path.normcase(abspath(path1))
abspath2 = os.path.normcase(abspath(path2))
return abspath1 == abspath2
def is_windows_file_in_use_error(exception):
"""Check if an exception was caused by a file being in use on windows
This is for errors like #15312, where we try to delete/move a file on
windows and fail because of it's filesystem semantics. To work around
that we need to check if an error is caused by the windows weirdness, or
if it's some other filesystem error.
"""
if not isinstance(exception, Exception):
raise TypeError("%r is not an exception" % exception)
# errno 13 is permission denied, windows error 32 is that the file is in
# use. For non WindowsErrors winerror isn't set and for non OSErrors,
# errno isn't set. If either of those attributes are missing, then we can
# safely return False
try:
return exception.errno == 13 and exception.winerror == 32
except AttributeError:
return False
def migrate_file(source, dest, callback, retry_after=10, retry_for=60):
"""Try to migrate a file, if this works, callback is called. If
we fail because the file is open, we retry migrating the file
every so often (by default every 10 seconds, stopping after 60
seconds). This probably only makes a difference on Windows.
"""
import eventloop
source = expand_filename(source)
dest = expand_filename(dest)
try:
shutil.move(source, dest)
except EnvironmentError, e:
logging.warn("Error migrating %s to %s (Error: %s)", source, dest, e)
try:
os.remove(dest)
except EnvironmentError:
pass
if retry_for > 0:
if is_windows_file_in_use_error(e):
# permission denied, assume this means it's open by
# another process on windows.
logging.info('Retrying migration for %s', source)
eventloop.add_timeout(retry_after, migrate_file,
"Migrate File Retry", args=(source, dest, callback,
retry_after, retry_for - retry_after))
except TypeError, e:
logging.warn ("Type error migrating %s (%s) to %s (%s) (Error %s)",
source, type(source), dest, type(dest), e)
raise
else:
callback()
class DeletesInProgressTracker(object):
def __init__(self):
self.set = set()
def normalize(self, path):
return os.path.abspath(os.path.normcase(path))
def add(self, path):
self.set.add(self.normalize(path))
def discard(self, path):
self.set.discard(self.normalize(path))
def __contains__(self, path):
return self.normalize(path) in self.set
deletes_in_progress = DeletesInProgressTracker()
def delete(path, retry_after=10, retry_for=60, firsttime=True):
"""Try to delete a file or directory. If this fails because the
file is open, we retry deleting the file every so often This
probably only makes a difference on Windows.
"""
import eventloop
path = expand_filename(path)
try:
if os.path.isfile(path):
os.remove (path)
elif os.path.isdir(path):
shutil.rmtree (path)
else:
logging.warn("asked to delete '%s' but it's not there." % path)
except EnvironmentError, e:
logging.warn("Error deleting %s", path)
if retry_for > 0 and is_windows_file_in_use_error(e):
# permission denied, assume this means it's open by another
# process on windows.
deletes_in_progress.add(path)
logging.info('Retrying delete for %s (%d)', path, retry_after)
eventloop.add_timeout(retry_after, delete,
"Delete File Retry", args=(path, retry_after,
retry_for - retry_after, False))
if firsttime:
from miro.workerprocess import _subprocess_manager
if _subprocess_manager.is_running:
logging.debug('restarting subprocess_manager to hopefully '
'free file references')
_subprocess_manager.restart(clean=True)
else:
deletes_in_progress.discard(path)
def miro_listdir(directory):
"""Directory listing that's safe and convenient for finding new
videos in a directory.
Returns the tuple (files, directories) where both elements are a
list of absolute pathnames. OSErrors are silently ignored.
Hidden files aren't returned. Pathnames are run through
os.path.normcase.
"""
# FIXME - this doesn't look used anywhere
files = []
directories = []
expanded_directory = expand_filename(directory)
expanded_directory = os.path.abspath(os.path.normcase(expanded_directory))
if expanded_directory in deletes_in_progress:
return
try:
listing = os.listdir(expanded_directory)
except OSError:
return [], []
for name in listing:
if name[0] == '.' or name.lower() == 'thumbs.db':
# thumbs.db is a windows file that speeds up thumbnails.
# We know it's not a movie file.
continue
expanded_path = os.path.join(expanded_directory, os.path.normcase(name))
path = os.path.join(directory, os.path.normcase(name))
if expanded_path in deletes_in_progress:
continue
try:
if os.path.isdir(expanded_path):
directories.append(path)
else:
files.append(path)
except OSError:
pass
return files, directories
def miro_allfiles(directory, checked=None):
"""Directory listing that's safe and convenient for finding new
videos in a directory.
Returns a list of files consisting of absolute pathnames.
OSErrors are silently ignored. Hidden files aren't returned.
Pathnames are run through os.path.normcase.
"""
if checked is None:
checked = set()
expanded_directory = expand_filename(directory)
expanded_directory = os.path.abspath(os.path.normcase(expanded_directory))
real_directory = os.path.realpath(expanded_directory)
if real_directory in checked:
logging.debug('%s is a symlink to a directory that has '
'already been checked; skipping', repr(expanded_directory))
return
checked.add(real_directory)
if expanded_directory in deletes_in_progress:
return
if is_file_bundle(expanded_directory):
return
try:
listing = os.listdir(expanded_directory)
except OSError:
logging.debug('OSError walking directory; continuing', exc_info=1)
return
for name in listing:
name_lower = name.lower()
if (name.startswith('.') or name_lower == 'thumbs.db' or
name_lower == "incomplete downloads"):
# thumbs.db is a windows file that speeds up thumbnails.
# We know it's not a movie file.
continue
path = os.path.join(directory, os.path.normcase(name))
expanded_path = os.path.join(expanded_directory, os.path.normcase(name))
if expanded_path in deletes_in_progress:
continue
try:
if (os.path.isdir(expanded_path) and
not is_file_bundle(expanded_path)):
for fn in miro_allfiles(path, checked):
yield fn
elif os.path.isfile(expanded_path):
yield path
except OSError:
logging.debug('OSError walking directory; continuing', exc_info=1)
pass
def expand_filename(filename):
if not filename:
return filename
if u3info.u3_active:
if filename.startswith(u3info.APP_DATA_PREFIX):
filename = filename[len(u3info.APP_DATA_PREFIX):]
while len(filename) > 0 and filename[0] in ['/', '\\']:
filename = filename[1:]
if len(filename) == 0:
return u3info.app_data_path
return os.path.join (u3info.app_data_path, filename)
if filename.startswith(u3info.DEVICE_DOCUMENT_PREFIX):
filename = filename[len(u3info.DEVICE_DOCUMENT_PREFIX):]
while len(filename) > 0 and filename[0] in ['/', '\\']:
filename = filename[1:]
if len(filename) == 0:
return u3info.device_document_path
return os.path.join (u3info.device_document_path, filename)
return filename
def collapse_filename(filename):
if not filename:
return filename
if u3info.u3_active:
if filename.startswith(u3info.app_data_path):
filename = filename[len(u3info.app_data_path):]
while len(filename) > 0 and filename[0] in ['/', '\\']:
filename = filename[1:]
if len(filename) == 0:
return u3info.APP_DATA_PREFIX
return u3info.APP_DATA_PREFIX + '\\' + filename
elif filename.startswith(u3info.device_document_path):
filename = filename[len(u3info.device_document_path):]
while len(filename) > 0 and filename[0] in ['/', '\\']:
filename = filename[1:]
if len(filename) == 0:
return u3info.DEVICE_DOCUMENT_PREFIX
return u3info.DEVICE_DOCUMENT_PREFIX + '\\' + filename
return filename
class FileSet(object):
"""Store a set of files and check if a path is contained.
The reason this is hard is because of filesystem case issues. Right now
we handle it by always comparing files using lowercase. Eventually we
should have a better system, see #17108 for discussion
"""
def __init__(self, initial_files=None):
self.pathset = set()
if initial_files:
for path in initial_files:
self.add_path(path)
def add_path(self, path):
self.pathset.add(path.lower())
def contains_path(self, path):
return path.lower() in self.pathset
| gpl-2.0 | -3,731,946,045,298,159,000 | 34.883085 | 80 | 0.632929 | false |
samuel/kokki | kokki/cookbooks/mdadm/libraries/providers.py | 2 | 1145 |
import subprocess
from kokki import Provider
class ArrayProvider(Provider):
def action_create(self):
if not self.exists():
subprocess.check_call(["/sbin/mdadm",
"--create", self.resource.name,
"-R",
"-c", str(self.resource.chunksize),
"--level", str(self.resource.level),
"--metadata", self.resource.metadata,
"--raid-devices", str(len(self.resource.devices)),
] + self.resource.devices)
self.resource.updated()
def action_stop(self):
if self.exists():
subprocess.check_call(["/sbin/mdadm",
"--stop", self.resource.name])
self.resource.updated()
def action_assemble(self):
if not self.exists():
subprocess.check_call(["/sbin/mdadm",
"--assemble", self.resource.name,
] + self.resource.devices)
self.resource.updated()
def exists(self):
ret = subprocess.call(["/sbin/mdadm", "-Q", self.resource.name])
return not ret
| bsd-3-clause | 4,079,987,464,155,967,500 | 33.69697 | 72 | 0.517031 | false |
witgo/spark | python/pyspark/statcounter.py | 8 | 5149 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This file is ported from spark/util/StatCounter.scala
import copy
import math
try:
from numpy import maximum, minimum, sqrt
except ImportError:
maximum = max
minimum = min
sqrt = math.sqrt
class StatCounter(object):
def __init__(self, values=None):
if values is None:
values = list()
self.n = 0 # Running count of our values
self.mu = 0.0 # Running mean of our values
self.m2 = 0.0 # Running variance numerator (sum of (x - mean)^2)
self.maxValue = float("-inf")
self.minValue = float("inf")
for v in values:
self.merge(v)
# Add a value into this StatCounter, updating the internal statistics.
def merge(self, value):
delta = value - self.mu
self.n += 1
self.mu += delta / self.n
self.m2 += delta * (value - self.mu)
self.maxValue = maximum(self.maxValue, value)
self.minValue = minimum(self.minValue, value)
return self
# Merge another StatCounter into this one, adding up the internal statistics.
def mergeStats(self, other):
if not isinstance(other, StatCounter):
raise Exception("Can only merge Statcounters!")
if other is self: # reference equality holds
self.merge(copy.deepcopy(other)) # Avoid overwriting fields in a weird order
else:
if self.n == 0:
self.mu = other.mu
self.m2 = other.m2
self.n = other.n
self.maxValue = other.maxValue
self.minValue = other.minValue
elif other.n != 0:
delta = other.mu - self.mu
if other.n * 10 < self.n:
self.mu = self.mu + (delta * other.n) / (self.n + other.n)
elif self.n * 10 < other.n:
self.mu = other.mu - (delta * self.n) / (self.n + other.n)
else:
self.mu = (self.mu * self.n + other.mu * other.n) / (self.n + other.n)
self.maxValue = maximum(self.maxValue, other.maxValue)
self.minValue = minimum(self.minValue, other.minValue)
self.m2 += other.m2 + (delta * delta * self.n * other.n) / (self.n + other.n)
self.n += other.n
return self
# Clone this StatCounter
def copy(self):
return copy.deepcopy(self)
def count(self):
return int(self.n)
def mean(self):
return self.mu
def sum(self):
return self.n * self.mu
def min(self):
return self.minValue
def max(self):
return self.maxValue
# Return the variance of the values.
def variance(self):
if self.n == 0:
return float('nan')
else:
return self.m2 / self.n
#
# Return the sample variance, which corrects for bias in estimating the variance by dividing
# by N-1 instead of N.
#
def sampleVariance(self):
if self.n <= 1:
return float('nan')
else:
return self.m2 / (self.n - 1)
# Return the standard deviation of the values.
def stdev(self):
return sqrt(self.variance())
#
# Return the sample standard deviation of the values, which corrects for bias in estimating the
# variance by dividing by N-1 instead of N.
#
def sampleStdev(self):
return sqrt(self.sampleVariance())
def asDict(self, sample=False):
"""Returns the :class:`StatCounter` members as a ``dict``.
Examples
--------
>>> sc.parallelize([1., 2., 3., 4.]).stats().asDict()
{'count': 4L,
'max': 4.0,
'mean': 2.5,
'min': 1.0,
'stdev': 1.2909944487358056,
'sum': 10.0,
'variance': 1.6666666666666667}
"""
return {
'count': self.count(),
'mean': self.mean(),
'sum': self.sum(),
'min': self.min(),
'max': self.max(),
'stdev': self.stdev() if sample else self.sampleStdev(),
'variance': self.variance() if sample else self.sampleVariance()
}
def __repr__(self):
return ("(count: %s, mean: %s, stdev: %s, max: %s, min: %s)" %
(self.count(), self.mean(), self.stdev(), self.max(), self.min()))
| apache-2.0 | -6,891,856,690,993,652,000 | 31.18125 | 99 | 0.571956 | false |
mortonjt/scipy | scipy/io/tests/test_mmio.py | 40 | 11787 | #!/usr/bin/env python
from __future__ import division, print_function, absolute_import
from tempfile import mkdtemp, mktemp
import os
import shutil
from numpy import array,transpose
from numpy.testing import TestCase, run_module_suite, assert_array_almost_equal, \
assert_equal, rand
import scipy.sparse
from scipy.io.mmio import mminfo,mmread,mmwrite
class TestMMIOArray(TestCase):
def setUp(self):
self.tmpdir = mkdtemp()
self.fn = os.path.join(self.tmpdir, 'testfile.mtx')
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_simple(self):
a = [[1,2],[3,4]]
fn = self.fn
mmwrite(fn,a)
assert_equal(mminfo(fn),(2,2,4,'array','integer','general'))
b = mmread(fn)
assert_array_almost_equal(a,b)
def test_simple_rectangular(self):
a = [[1,2,3],[4,5,6]]
fn = self.fn
mmwrite(fn,a)
assert_equal(mminfo(fn),(2,3,6,'array','integer','general'))
b = mmread(fn)
assert_array_almost_equal(a,b)
def test_simple_rectangular_real(self):
a = [[1,2],[3.5,4],[5,6]]
fn = self.fn
mmwrite(fn,a)
assert_equal(mminfo(fn),(3,2,6,'array','real','general'))
b = mmread(fn)
assert_array_almost_equal(a,b)
def test_simple_real(self):
a = [[1,2],[3,4.0]]
fn = self.fn
mmwrite(fn,a)
assert_equal(mminfo(fn),(2,2,4,'array','real','general'))
b = mmread(fn)
assert_array_almost_equal(a,b)
def test_simple_complex(self):
a = [[1,2],[3,4j]]
fn = self.fn
mmwrite(fn,a)
assert_equal(mminfo(fn),(2,2,4,'array','complex','general'))
b = mmread(fn)
assert_array_almost_equal(a,b)
def test_simple_symmetric(self):
a = [[1,2],[2,4]]
fn = self.fn
mmwrite(fn,a)
assert_equal(mminfo(fn),(2,2,4,'array','integer','symmetric'))
b = mmread(fn)
assert_array_almost_equal(a,b)
def test_simple_skew_symmetric(self):
a = [[1,2],[-2,4]]
fn = self.fn
mmwrite(fn,a)
assert_equal(mminfo(fn),(2,2,4,'array','integer','skew-symmetric'))
b = mmread(fn)
assert_array_almost_equal(a,b)
def test_simple_skew_symmetric_float(self):
a = array([[1,2],[-2.0,4]],'f')
fn = self.fn
mmwrite(fn,a)
assert_equal(mminfo(fn),(2,2,4,'array','real','skew-symmetric'))
b = mmread(fn)
assert_array_almost_equal(a,b)
def test_simple_hermitian(self):
a = [[1,2+3j],[2-3j,4]]
fn = self.fn
mmwrite(fn,a)
assert_equal(mminfo(fn),(2,2,4,'array','complex','hermitian'))
b = mmread(fn)
assert_array_almost_equal(a,b)
def test_random_symmetric_real(self):
sz = (20,20)
a = rand(*sz)
a = a + transpose(a)
fn = self.fn
mmwrite(fn,a)
assert_equal(mminfo(fn),(20,20,400,'array','real','symmetric'))
b = mmread(fn)
assert_array_almost_equal(a,b)
def test_random_rect_real(self):
sz = (20,15)
a = rand(*sz)
fn = self.fn
mmwrite(fn,a)
assert_equal(mminfo(fn),(20,15,300,'array','real','general'))
b = mmread(fn)
assert_array_almost_equal(a,b)
_general_example = '''\
%%MatrixMarket matrix coordinate real general
%=================================================================================
%
% This ASCII file represents a sparse MxN matrix with L
% nonzeros in the following Matrix Market format:
%
% +----------------------------------------------+
% |%%MatrixMarket matrix coordinate real general | <--- header line
% |% | <--+
% |% comments | |-- 0 or more comment lines
% |% | <--+
% | M N L | <--- rows, columns, entries
% | I1 J1 A(I1, J1) | <--+
% | I2 J2 A(I2, J2) | |
% | I3 J3 A(I3, J3) | |-- L lines
% | . . . | |
% | IL JL A(IL, JL) | <--+
% +----------------------------------------------+
%
% Indices are 1-based, i.e. A(1,1) is the first element.
%
%=================================================================================
5 5 8
1 1 1.000e+00
2 2 1.050e+01
3 3 1.500e-02
1 4 6.000e+00
4 2 2.505e+02
4 4 -2.800e+02
4 5 3.332e+01
5 5 1.200e+01
'''
_hermitian_example = '''\
%%MatrixMarket matrix coordinate complex hermitian
5 5 7
1 1 1.0 0
2 2 10.5 0
4 2 250.5 22.22
3 3 1.5e-2 0
4 4 -2.8e2 0
5 5 12. 0
5 4 0 33.32
'''
_skew_example = '''\
%%MatrixMarket matrix coordinate real skew-symmetric
5 5 7
1 1 1.0
2 2 10.5
4 2 250.5
3 3 1.5e-2
4 4 -2.8e2
5 5 12.
5 4 0
'''
_symmetric_example = '''\
%%MatrixMarket matrix coordinate real symmetric
5 5 7
1 1 1.0
2 2 10.5
4 2 250.5
3 3 1.5e-2
4 4 -2.8e2
5 5 12.
5 4 8
'''
_symmetric_pattern_example = '''\
%%MatrixMarket matrix coordinate pattern symmetric
5 5 7
1 1
2 2
4 2
3 3
4 4
5 5
5 4
'''
class TestMMIOCoordinate(TestCase):
def setUp(self):
self.tmpdir = mkdtemp()
self.fn = os.path.join(self.tmpdir, 'testfile.mtx')
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_read_general(self):
fn = self.fn
f = open(fn,'w')
f.write(_general_example)
f.close()
assert_equal(mminfo(fn),(5,5,8,'coordinate','real','general'))
a = [[1, 0, 0, 6, 0],
[0, 10.5, 0, 0, 0],
[0, 0, .015, 0, 0],
[0, 250.5, 0, -280, 33.32],
[0, 0, 0, 0, 12]]
b = mmread(fn).todense()
assert_array_almost_equal(a,b)
def test_read_hermitian(self):
fn = self.fn
f = open(fn,'w')
f.write(_hermitian_example)
f.close()
assert_equal(mminfo(fn),(5,5,7,'coordinate','complex','hermitian'))
a = [[1, 0, 0, 0, 0],
[0, 10.5, 0, 250.5 - 22.22j, 0],
[0, 0, .015, 0, 0],
[0, 250.5 + 22.22j, 0, -280, -33.32j],
[0, 0, 0, 33.32j, 12]]
b = mmread(fn).todense()
assert_array_almost_equal(a,b)
def test_read_skew(self):
fn = self.fn
f = open(fn,'w')
f.write(_skew_example)
f.close()
assert_equal(mminfo(fn),(5,5,7,'coordinate','real','skew-symmetric'))
a = [[1, 0, 0, 0, 0],
[0, 10.5, 0, -250.5, 0],
[0, 0, .015, 0, 0],
[0, 250.5, 0, -280, 0],
[0, 0, 0, 0, 12]]
b = mmread(fn).todense()
assert_array_almost_equal(a,b)
def test_read_symmetric(self):
fn = self.fn
f = open(fn,'w')
f.write(_symmetric_example)
f.close()
assert_equal(mminfo(fn),(5,5,7,'coordinate','real','symmetric'))
a = [[1, 0, 0, 0, 0],
[0, 10.5, 0, 250.5, 0],
[0, 0, .015, 0, 0],
[0, 250.5, 0, -280, 8],
[0, 0, 0, 8, 12]]
b = mmread(fn).todense()
assert_array_almost_equal(a,b)
def test_read_symmetric_pattern(self):
fn = self.fn
f = open(fn,'w')
f.write(_symmetric_pattern_example)
f.close()
assert_equal(mminfo(fn),(5,5,7,'coordinate','pattern','symmetric'))
a = [[1, 0, 0, 0, 0],
[0, 1, 0, 1, 0],
[0, 0, 1, 0, 0],
[0, 1, 0, 1, 1],
[0, 0, 0, 1, 1]]
b = mmread(fn).todense()
assert_array_almost_equal(a,b)
def test_empty_write_read(self):
#http://projects.scipy.org/scipy/ticket/883
b = scipy.sparse.coo_matrix((10,10))
fn = self.fn
mmwrite(fn,b)
assert_equal(mminfo(fn),(10,10,0,'coordinate','real','general'))
a = b.todense()
b = mmread(fn).todense()
assert_array_almost_equal(a,b)
def test_bzip2_py3(self):
# test if fix for #2152 works
try:
# bz2 module isn't always built when building Python.
import bz2
except:
return
I = array([0, 0, 1, 2, 3, 3, 3, 4])
J = array([0, 3, 1, 2, 1, 3, 4, 4])
V = array([1.0, 6.0, 10.5, 0.015, 250.5, -280.0, 33.32, 12.0])
b = scipy.sparse.coo_matrix((V,(I,J)),shape=(5,5))
fn = self.fn
mmwrite(fn, b)
fn_bzip2 = "%s.bz2" % fn
with open(fn, 'rb') as f_in:
f_out = bz2.BZ2File(fn_bzip2, 'wb')
f_out.write(f_in.read())
f_out.close()
a = mmread(fn_bzip2).todense()
assert_array_almost_equal(a, b.todense())
def test_gzip_py3(self):
# test if fix for #2152 works
try:
# gzip module can be missing from Python installation
import gzip
except:
return
I = array([0, 0, 1, 2, 3, 3, 3, 4])
J = array([0, 3, 1, 2, 1, 3, 4, 4])
V = array([1.0, 6.0, 10.5, 0.015, 250.5, -280.0, 33.32, 12.0])
b = scipy.sparse.coo_matrix((V,(I,J)),shape=(5,5))
fn = self.fn
mmwrite(fn, b)
fn_gzip = "%s.gz" % fn
with open(fn, 'rb') as f_in:
f_out = gzip.open(fn_gzip, 'wb')
f_out.write(f_in.read())
f_out.close()
a = mmread(fn_gzip).todense()
assert_array_almost_equal(a, b.todense())
def test_real_write_read(self):
I = array([0, 0, 1, 2, 3, 3, 3, 4])
J = array([0, 3, 1, 2, 1, 3, 4, 4])
V = array([1.0, 6.0, 10.5, 0.015, 250.5, -280.0, 33.32, 12.0])
b = scipy.sparse.coo_matrix((V,(I,J)),shape=(5,5))
fn = self.fn
mmwrite(fn,b)
assert_equal(mminfo(fn),(5,5,8,'coordinate','real','general'))
a = b.todense()
b = mmread(fn).todense()
assert_array_almost_equal(a,b)
def test_complex_write_read(self):
I = array([0, 0, 1, 2, 3, 3, 3, 4])
J = array([0, 3, 1, 2, 1, 3, 4, 4])
V = array([1.0 + 3j, 6.0 + 2j, 10.50 + 0.9j, 0.015 + -4.4j,
250.5 + 0j, -280.0 + 5j, 33.32 + 6.4j, 12.00 + 0.8j])
b = scipy.sparse.coo_matrix((V,(I,J)),shape=(5,5))
fn = self.fn
mmwrite(fn,b)
assert_equal(mminfo(fn),(5,5,8,'coordinate','complex','general'))
a = b.todense()
b = mmread(fn).todense()
assert_array_almost_equal(a,b)
def test_sparse_formats(self):
mats = []
I = array([0, 0, 1, 2, 3, 3, 3, 4])
J = array([0, 3, 1, 2, 1, 3, 4, 4])
V = array([1.0, 6.0, 10.5, 0.015, 250.5, -280.0, 33.32, 12.0])
mats.append(scipy.sparse.coo_matrix((V,(I,J)),shape=(5,5)))
V = array([1.0 + 3j, 6.0 + 2j, 10.50 + 0.9j, 0.015 + -4.4j,
250.5 + 0j, -280.0 + 5j, 33.32 + 6.4j, 12.00 + 0.8j])
mats.append(scipy.sparse.coo_matrix((V,(I,J)),shape=(5,5)))
for mat in mats:
expected = mat.todense()
for fmt in ['csr','csc','coo']:
fn = mktemp(dir=self.tmpdir) # safe, we own tmpdir
mmwrite(fn, mat.asformat(fmt))
result = mmread(fn).todense()
assert_array_almost_equal(result, expected)
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause | 3,614,891,984,012,234,000 | 29.068878 | 82 | 0.464664 | false |
home-assistant/home-assistant | homeassistant/components/gdacs/sensor.py | 5 | 5128 | """Feed Entity Manager Sensor support for GDACS Feed."""
from __future__ import annotations
import logging
from homeassistant.components.sensor import SensorEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util import dt
from .const import DEFAULT_ICON, DOMAIN, FEED
_LOGGER = logging.getLogger(__name__)
ATTR_STATUS = "status"
ATTR_LAST_UPDATE = "last_update"
ATTR_LAST_UPDATE_SUCCESSFUL = "last_update_successful"
ATTR_LAST_TIMESTAMP = "last_timestamp"
ATTR_CREATED = "created"
ATTR_UPDATED = "updated"
ATTR_REMOVED = "removed"
DEFAULT_UNIT_OF_MEASUREMENT = "alerts"
# An update of this entity is not making a web request, but uses internal data only.
PARALLEL_UPDATES = 0
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the GDACS Feed platform."""
manager = hass.data[DOMAIN][FEED][entry.entry_id]
sensor = GdacsSensor(entry.entry_id, entry.unique_id, entry.title, manager)
async_add_entities([sensor])
_LOGGER.debug("Sensor setup done")
class GdacsSensor(SensorEntity):
"""This is a status sensor for the GDACS integration."""
def __init__(self, config_entry_id, config_unique_id, config_title, manager):
"""Initialize entity."""
self._config_entry_id = config_entry_id
self._config_unique_id = config_unique_id
self._config_title = config_title
self._manager = manager
self._status = None
self._last_update = None
self._last_update_successful = None
self._last_timestamp = None
self._total = None
self._created = None
self._updated = None
self._removed = None
self._remove_signal_status = None
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self._remove_signal_status = async_dispatcher_connect(
self.hass,
f"gdacs_status_{self._config_entry_id}",
self._update_status_callback,
)
_LOGGER.debug("Waiting for updates %s", self._config_entry_id)
# First update is manual because of how the feed entity manager is updated.
await self.async_update()
async def async_will_remove_from_hass(self) -> None:
"""Call when entity will be removed from hass."""
if self._remove_signal_status:
self._remove_signal_status()
@callback
def _update_status_callback(self):
"""Call status update method."""
_LOGGER.debug("Received status update for %s", self._config_entry_id)
self.async_schedule_update_ha_state(True)
@property
def should_poll(self):
"""No polling needed for GDACS status sensor."""
return False
async def async_update(self):
"""Update this entity from the data held in the feed manager."""
_LOGGER.debug("Updating %s", self._config_entry_id)
if self._manager:
status_info = self._manager.status_info()
if status_info:
self._update_from_status_info(status_info)
def _update_from_status_info(self, status_info):
"""Update the internal state from the provided information."""
self._status = status_info.status
self._last_update = (
dt.as_utc(status_info.last_update) if status_info.last_update else None
)
if status_info.last_update_successful:
self._last_update_successful = dt.as_utc(status_info.last_update_successful)
else:
self._last_update_successful = None
self._last_timestamp = status_info.last_timestamp
self._total = status_info.total
self._created = status_info.created
self._updated = status_info.updated
self._removed = status_info.removed
@property
def state(self):
"""Return the state of the sensor."""
return self._total
@property
def unique_id(self) -> str | None:
"""Return a unique ID containing latitude/longitude."""
return self._config_unique_id
@property
def name(self) -> str | None:
"""Return the name of the entity."""
return f"GDACS ({self._config_title})"
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return DEFAULT_ICON
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return DEFAULT_UNIT_OF_MEASUREMENT
@property
def extra_state_attributes(self):
"""Return the device state attributes."""
attributes = {}
for key, value in (
(ATTR_STATUS, self._status),
(ATTR_LAST_UPDATE, self._last_update),
(ATTR_LAST_UPDATE_SUCCESSFUL, self._last_update_successful),
(ATTR_LAST_TIMESTAMP, self._last_timestamp),
(ATTR_CREATED, self._created),
(ATTR_UPDATED, self._updated),
(ATTR_REMOVED, self._removed),
):
if value or isinstance(value, bool):
attributes[key] = value
return attributes
| apache-2.0 | -8,803,254,087,218,481,000 | 33.884354 | 88 | 0.631825 | false |
GreenWolf13/GordonLite | plugins/scp.py | 1 | 8760 | """
scp.py: Written by Gnosis for Grapewhistle 2009, inherited by Glacon in 2011, even later inherited by Gordon in 2015 and expanded by GreenWolf.
v1.1 of basic in-channel data acquisition plugins for IRC channels of the SCP Foundation.
"""
from util import hook, http
import threading
from time import sleep
import sqlite3
import re
import os
class SCPThread(threading.Thread):
def __init__(self, dbpath):
threading.Thread.__init__(self, name="SCP")
self.dbpath = dbpath
def run(self):
db = sqlite3.connect(self.dbpath)
db.execute("create table if not exists scps(number varchar primary key, title varchar)")
db.execute("create table if not exists tales(page varchar primary key, title varchar)")
db.execute("create table if not exists gois(page varchar primary key, title varchar)")
db.text_factory = str
basescpurl = "http://www.scp-wiki.net"
scpseriespages = ["/scp-series", "/scp-series-2", "/scp-series-3"]
scpextrapages = ["/scp-ex", "/joke-scps", "/archived-scps"]
scptalepages = ["/system%3Apage-tags/tag/tale"]
scpgoipages = ["/system%3Apage-tags/tag/goi-format"]
scpinterval = 60 * 6
scpcounter = 0
talecounter = 0
goicounter = 0
# Infinite query loop
while True:
try:
c = db.cursor()
c.execute("delete from scps")
scpcounter = 0
talecounter = 0
goicounter = 0
# Regex fuckery to bypass wikidot api
scp_re = re.compile(r'<a href="/scp-(.*)">SCP-\1</a> - (.*?)</li>', re.I)
scpx_re = re.compile(r'<a href="/scp-(.*)">SCP-\1</a> - (.*?)</li>', re.I)
# Grab main list entries from each page
for scpseriespage in scpseriespages+scpextrapages:
page = http.to_utf8(http.get(basescpurl + scpseriespage))
scp_list = scp_re.findall(page)
#print scp_list
# Add entries to database
for (k, v) in scp_list:
#print k, v
c.execute(u"replace into scps(number, title) values (upper(?), ?)", (k, v))
scpcounter = scpcounter + 1
db.commit()
#print "scp.py - Updated SCP database from listing on", scpseriespage
#print "scp.py: Grabbing tales from", (basescpurl+scptalepages[0])
talepage = http.get_html(basescpurl + scptalepages[0])
# This is so ugly, why two xpaths?
talelinklist = talepage.xpath("//*[@class='title']/a/@href")
talelist = talepage.xpath("//*[@class='title']/a/text()")
# Grab list of tales from the 'tale' tag page
for i in range(len(talelist)):
talelink = talelinklist[i]
taletitle = talelist[i]
#print talelink#, unicode(taletitle, "utf-8")
c.execute(u"replace into tales (page, title) values(?,?)",
(talelink, taletitle)
)
talecounter = talecounter + 1
db.commit()
# Shamelessly copy/pasted from tale code - GW
goipage = http.get_html(basescpurl + scpgoipages[0])
# This is so ugly, why two xpaths?
# I don't know, gnosis. - GW
goilinklist = goipage.xpath("//*[@class='title']/a/@href")
goilist = goipage.xpath("//*[@class='title']/a/text()")
# Grab list of goi formats from the 'goi-format' tag page
for i in range(len(goilist)):
goilink = goilinklist[i]
goititle = goilist[i]
c.execute(u"replace into gois (page, title) values(?,?)",
(goilink, goititle)
)
goicounter = goicounter + 1
db.commit()
c.close()
print "scp.py - SCP database update complete, %d total entries." % scpcounter
print "scp.py - SCP tale database update complete, %d total entries." % talecounter
print "scp.py - GOI format database update complete, %d total entries." % goicounter
except Exception, e:
print "ERROR ERROR ERROR, ", e
# Query every -5- -30- 15 minutes
sleep(scpinterval)
# Looks up SCP from cached database. Returns [ACCESS DENIED] if not in database.
def scp_lookup(number, title=None):
url = "http://www.scp-wiki.net/scp-%s" % number
db = sqlite3.connect(scp_path)
if not title:
try: title = db.execute("select title from scps where number = ?", (number.upper(),)).fetchone()[0]
except TypeError: title = "[ACCESS DENIED]"
return "%s - %s" % (url, title)
def scp_init(dbpath):
if all([thread.name != "SCP" for thread in threading.enumerate()]):
scp_thread = SCPThread(scp_path)
scp_thread.start()
sleep(1)
@hook.regex(r'^SCP-((?:\w|-|J)+)$', re.I)
def scp(inp, bot=None, input=None):
try: inp = inp.groups()[0]
except AttributeError: pass
dbpath = os.path.join(bot.persist_dir, "%s.%s.db" % (input.conn.nick, input.conn.server))
return scp_lookup(inp)
# Query multiple SCPs at once with a comma-delimited list of !scp-xxxx tokens
@hook.event('PRIVMSG')
def multiscp(inp, bot=None, input=None):
scps = re.compile('!SCP-((?:\w|-|J)+)', re.I).findall(inp[1])
for scp in scps:
input.reply(scp_lookup(scp))
# Fetches the rating from the HTML page of the target SCP.
@hook.command
def rating(inp):
print ("Calling http.get() on http://www.scp-wiki.net/%s" % inp)
page = http.get("http://www.scp-wiki.net/%s" % inp)
rating = http.get_html("http://www.scp-wiki.net/%s" % inp).xpath("//*[@class='number prw54353']/text()")[0]
return rating
# Searches cached database for keyword in SCP name/title.
@hook.command
def search(inp):
inp = "%" + inp + "%"
db = sqlite3.connect(scp_path)
scps = db.execute("SELECT NUMBER, TITLE FROM SCPS WHERE TITLE LIKE ?", (inp,)).fetchall()
if len(scps) == 1:
(number, title) = scps[0]
return "SCP-%s - %s - http://www.scp-wiki.net/scp-%s" % (number, title, number)
printed = scps[:5]
output = ""
for (number, title) in printed:
output += "SCP-%s (%s), " % (number, title)
if len(scps) > 5: output += " plus %d more" % (len(scps) - 5)
else: output = output[:-2]
if not output: return "No SCPs found."
return output
# Look up function for tales. Wish there was a good way that didn't involve
# so much code duplication.
@hook.command
def tale(inp):
#print "tale command called"
inp = "%" + inp + "%"
db = sqlite3.connect(scp_path)
tales = db.execute("SELECT PAGE, TITLE FROM TALES WHERE TITLE LIKE ?", (inp,)).fetchall()
if len(tales) == 1:
(taleurl, title) = tales[0]
return u"%s - http://www.scp-wiki.net%s" % (title, taleurl)
printed = tales[:5]
#output = "http://www.scp-wiki.net" + printed[0][0] + " "
output = ""
for (taleurl, title) in printed:
output += title + ", "
if len(tales) > 5:
output += " plus %d more" % (len(tales) - 5)
else:
output = output[:-2]
if not output:
return u"No tales found."
return output
# More code duplication for gois - GW
@hook.command
def goi(inp):
inp = "%" + inp + "%"
db = sqlite3.connect(scp_path)
gois = db.execute("SELECT PAGE, TITLE FROM GOIS WHERE TITLE LIKE ?", (inp,)).fetchall()
if len(gois) == 1:
(goiurl, title) = gois[0]
return u"%s - http://www.scp-wiki.net%s" % (title, goiurl)
printed = gois[:5]
#output = "http://www.scp-wiki.net" + printed[0][0] + " "
output = ""
for (goiurl, title) in printed:
output += title + ", "
if len(gois) > 5:
output += " plus %d more" % (len(gois) - 5)
else:
output = output[:-2]
if not output:
return u"No GOI formats found."
return output
# Returns a random SCP in cached database.
@hook.command
def random(inp):
db = sqlite3.connect(scp_path)
(number, title) = db.execute("SELECT NUMBER, TITLE FROM SCPS ORDER BY RANDOM() LIMIT 1").fetchone()
return "SCP-%s - %s - http://www.scp-wiki.net/scp-%s" % (number, title, number)
# Just returns the tag page for now.
# TODO Make it return a list of tagged pages. - GW
@hook.command
def tag(inp):
return "http://www.scp-wiki.net/system:page-tags/tag/" + inp
mydir = dir()
scp_path = os.path.join(os.path.abspath('persist'), "scp.db")
scp_init(scp_path)
| unlicense | -8,271,666,105,897,371,000 | 38.818182 | 143 | 0.567237 | false |
ehuss/Sublime-Wrap-Plus | tests/test_wrap.py | 1 | 4875 | import os
import re
import sublime
import unittest
plugin_path = os.path.dirname(os.path.dirname(__file__))
# Sentinel to indicate that a setting should not be set.
UNSET = '__UNSET__'
DEFAULT_SETTINGS = {
'word_wrap': False,
'wrap_width': 0,
'rulers': [],
'tab_size': 4,
'translate_tabs_to_spaces': False,
'WrapPlus.break_long_words': False,
'WrapPlus.break_on_hyphens': False,
'WrapPlus.include_line_endings': 'auto',
'WrapPlus.wrap_width': UNSET,
'WrapPlus.skip_range': False, # Workaround for a bug.
}
class TestWrap(unittest.TestCase):
def test_wrap(self):
base = os.path.join(plugin_path, 'tests', 'wrap_tests')
to_test = os.listdir(base)
for filename in to_test:
abspath = os.path.join(base, filename)
contents = open(abspath, encoding='utf8').read()
contents = contents.replace('\r\n', '\n')
i = contents.find('\n')
syntax = contents[:i]
contents = contents[i + 1:]
assert contents.startswith('==='), 'bad file %r' % (filename,)
self._test_wrap(filename, contents, syntax)
def _test_wrap(self, filename, contents, syntax):
# Split test file into separate tests.
starts = re.finditer(r'^===((?:[A-Za-z0-9._-]+=[^,\n]+,?)+)?$',
contents, flags=re.MULTILINE)
starts = list(starts)
for i, start in enumerate(starts):
# Get individual test substring.
try:
end = starts[i + 1]
except IndexError:
end = len(contents)
else:
end = end.start(0)
test_str = contents[start.end(0) + 1:end]
orig, expected = re.split('^---\n', test_str, flags=re.MULTILINE)
# Get optional settings.
settings = {}
if start.group(1):
for setting in start.group(1).split(','):
key, value = setting.split('=')
settings[key] = eval(value)
# Open a new view to run the test in.
self._wrap_with_scratch(filename, orig, expected, syntax, settings,
self._test_wrap_individual)
if not settings.get('WrapPlus.skip_range', False):
self._wrap_with_scratch(filename, orig, expected, syntax, settings,
self._test_wrap_ranges)
def _test_wrap_individual(self, view):
# Test wrapping every line.
for r in self._tagged_regions(view):
pos = r[0]
rel_end = view.size() - r[1]
sel = view.sel()
sel.clear()
sel.add(pos)
while pos < view.size() - rel_end:
view.run_command('wrap_lines_plus')
# Assume it advances the cursor past the wrapped section.
next_pos = view.sel()[0].a
if next_pos < view.size() - rel_end:
self.assertGreater(next_pos, pos)
pos = next_pos
def _tagged_regions(self, view):
if not view.find('<START>', 0):
yield (0, view.size())
return
while True:
start = view.find('<START>', 0)
if not start:
return
view.sel().clear()
view.sel().add(start)
view.run_command('left_delete')
end = view.find('<END>', 0)
view.sel().clear()
view.sel().add(end)
view.run_command('left_delete')
yield (start.a, end.a)
def _test_wrap_ranges(self, view):
regions = [sublime.Region(*r) for r in self._tagged_regions(view)]
view.sel().clear()
view.sel().add_all(regions)
view.run_command('wrap_lines_plus')
def _wrap_with_scratch(self, filename, contents, expected, syntax, settings, f):
window = sublime.active_window()
view = window.new_file()
view.set_scratch(True)
view.set_syntax_file(syntax)
# Update settings.
view_settings = view.settings()
bad_keys = set(settings.keys()) - set(DEFAULT_SETTINGS.keys())
self.assertEqual(bad_keys, set())
for key, value in DEFAULT_SETTINGS.items():
value = settings.get(key, value)
if value == UNSET:
view_settings.erase(key)
else:
view_settings.set(key, value)
view.run_command('append', {'characters': contents})
f(view)
actual = view.substr(sublime.Region(0, view.size()))
if actual != expected:
raise AssertionError('Wrapping did not match: %s %r\n%s---Expected:\n%s---' % (
filename, settings, actual, expected))
window.focus_view(view)
window.run_command('close_file')
| mit | 1,502,760,912,017,990,400 | 36.5 | 91 | 0.528821 | false |
timm/timmnix | pypy3-v5.5.0-linux64/lib_pypy/_sqlite3.py | 1 | 48936 | #-*- coding: utf-8 -*-
# pysqlite2/dbapi.py: pysqlite DB-API module
#
# Copyright (C) 2007-2008 Gerhard Häring <[email protected]>
#
# This file is part of pysqlite.
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
#
# Note: This software has been modified for use in PyPy.
from collections import OrderedDict
from functools import wraps
import datetime
import string
import sys
import weakref
import threading
try:
from __pypy__ import newlist_hint
except ImportError:
assert '__pypy__' not in sys.builtin_module_names
newlist_hint = lambda sizehint: []
if sys.version_info[0] >= 3:
StandardError = Exception
cmp = lambda x, y: (x > y) - (x < y)
long = int
xrange = range
basestring = unicode = str
buffer = memoryview
_BLOB_TYPE = bytes
else:
_BLOB_TYPE = buffer
from _sqlite3_cffi import ffi as _ffi, lib as _lib
exported_sqlite_symbols = [
'SQLITE_ALTER_TABLE',
'SQLITE_ANALYZE',
'SQLITE_ATTACH',
'SQLITE_CREATE_INDEX',
'SQLITE_CREATE_TABLE',
'SQLITE_CREATE_TEMP_INDEX',
'SQLITE_CREATE_TEMP_TABLE',
'SQLITE_CREATE_TEMP_TRIGGER',
'SQLITE_CREATE_TEMP_VIEW',
'SQLITE_CREATE_TRIGGER',
'SQLITE_CREATE_VIEW',
'SQLITE_DELETE',
'SQLITE_DENY',
'SQLITE_DETACH',
'SQLITE_DROP_INDEX',
'SQLITE_DROP_TABLE',
'SQLITE_DROP_TEMP_INDEX',
'SQLITE_DROP_TEMP_TABLE',
'SQLITE_DROP_TEMP_TRIGGER',
'SQLITE_DROP_TEMP_VIEW',
'SQLITE_DROP_TRIGGER',
'SQLITE_DROP_VIEW',
'SQLITE_IGNORE',
'SQLITE_INSERT',
'SQLITE_OK',
'SQLITE_PRAGMA',
'SQLITE_READ',
'SQLITE_REINDEX',
'SQLITE_SELECT',
'SQLITE_TRANSACTION',
'SQLITE_UPDATE',
]
for symbol in exported_sqlite_symbols:
globals()[symbol] = getattr(_lib, symbol)
_SQLITE_TRANSIENT = _lib.SQLITE_TRANSIENT
# pysqlite version information
version = "2.6.0"
# pysqlite constants
PARSE_COLNAMES = 1
PARSE_DECLTYPES = 2
# SQLite version information
sqlite_version = str(_ffi.string(_lib.sqlite3_libversion()).decode('ascii'))
_STMT_TYPE_UPDATE = 0
_STMT_TYPE_DELETE = 1
_STMT_TYPE_INSERT = 2
_STMT_TYPE_REPLACE = 3
_STMT_TYPE_OTHER = 4
_STMT_TYPE_SELECT = 5
_STMT_TYPE_INVALID = 6
class Error(StandardError):
pass
class Warning(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
def connect(database, timeout=5.0, detect_types=0, isolation_level="",
check_same_thread=True, factory=None, cached_statements=100):
factory = Connection if not factory else factory
return factory(database, timeout, detect_types, isolation_level,
check_same_thread, factory, cached_statements)
def _unicode_text_factory(x):
return unicode(x, 'utf-8')
if sys.version_info[0] < 3:
def OptimizedUnicode(s):
try:
val = unicode(s, "ascii").encode("ascii")
except UnicodeDecodeError:
val = unicode(s, "utf-8")
return val
else:
OptimizedUnicode = _unicode_text_factory
class _StatementCache(object):
def __init__(self, connection, maxcount):
self.connection = connection
self.maxcount = maxcount
self.cache = OrderedDict()
def get(self, sql):
try:
stat = self.cache[sql]
except KeyError:
stat = Statement(self.connection, sql)
self.cache[sql] = stat
if len(self.cache) > self.maxcount:
self.cache.popitem(0)
else:
if stat._in_use:
stat = Statement(self.connection, sql)
self.cache[sql] = stat
return stat
class Connection(object):
__initialized = False
_db = None
def __init__(self, database, timeout=5.0, detect_types=0, isolation_level="",
check_same_thread=True, factory=None, cached_statements=100):
self.__initialized = True
db_star = _ffi.new('sqlite3 **')
if isinstance(database, unicode):
database = database.encode('utf-8')
if _lib.sqlite3_open(database, db_star) != _lib.SQLITE_OK:
raise OperationalError("Could not open database")
self._db = db_star[0]
if timeout is not None:
timeout = int(timeout * 1000) # pysqlite2 uses timeout in seconds
_lib.sqlite3_busy_timeout(self._db, timeout)
self.row_factory = None
self.text_factory = _unicode_text_factory
self._detect_types = detect_types
self._in_transaction = False
self.isolation_level = isolation_level
self.__cursors = []
self.__cursors_counter = 0
self.__statements = []
self.__statements_counter = 0
self.__rawstatements = set()
self._statement_cache = _StatementCache(self, cached_statements)
self.__func_cache = {}
self.__aggregates = {}
self.__aggregate_instances = {}
self.__collations = {}
if check_same_thread:
self.__thread_ident = threading.get_ident()
self.Error = Error
self.Warning = Warning
self.InterfaceError = InterfaceError
self.DatabaseError = DatabaseError
self.InternalError = InternalError
self.OperationalError = OperationalError
self.ProgrammingError = ProgrammingError
self.IntegrityError = IntegrityError
self.DataError = DataError
self.NotSupportedError = NotSupportedError
def __del__(self):
if self._db:
_lib.sqlite3_close(self._db)
def close(self):
self._check_thread()
self.__do_all_statements(Statement._finalize, True)
# depending on when this close() is called, the statements' weakrefs
# may be already dead, even though Statement.__del__() was not called
# yet. In this case, self.__rawstatements is not empty.
if self.__rawstatements is not None:
for stmt in list(self.__rawstatements):
self._finalize_raw_statement(stmt)
self.__rawstatements = None
if self._db:
ret = _lib.sqlite3_close(self._db)
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
self._db = None
def _check_closed(self):
if not self.__initialized:
raise ProgrammingError("Base Connection.__init__ not called.")
if not self._db:
raise ProgrammingError("Cannot operate on a closed database.")
def _check_closed_wrap(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
self._check_closed()
return func(self, *args, **kwargs)
return wrapper
def _check_thread(self):
try:
if self.__thread_ident == threading.get_ident():
return
except AttributeError:
pass
else:
raise ProgrammingError(
"SQLite objects created in a thread can only be used in that "
"same thread. The object was created in thread id %d and this "
"is thread id %d" % (self.__thread_ident, threading.get_ident()))
def _check_thread_wrap(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
self._check_thread()
return func(self, *args, **kwargs)
return wrapper
def _get_exception(self, error_code=None):
if error_code is None:
error_code = _lib.sqlite3_errcode(self._db)
error_message = _ffi.string(_lib.sqlite3_errmsg(self._db)).decode('utf-8')
if error_code == _lib.SQLITE_OK:
raise ValueError("error signalled but got SQLITE_OK")
elif error_code in (_lib.SQLITE_INTERNAL, _lib.SQLITE_NOTFOUND):
exc = InternalError
elif error_code == _lib.SQLITE_NOMEM:
exc = MemoryError
elif error_code in (
_lib.SQLITE_ERROR, _lib.SQLITE_PERM, _lib.SQLITE_ABORT,
_lib.SQLITE_BUSY, _lib.SQLITE_LOCKED, _lib.SQLITE_READONLY,
_lib.SQLITE_INTERRUPT, _lib.SQLITE_IOERR, _lib.SQLITE_FULL,
_lib.SQLITE_CANTOPEN, _lib.SQLITE_PROTOCOL, _lib.SQLITE_EMPTY,
_lib.SQLITE_SCHEMA):
exc = OperationalError
elif error_code == _lib.SQLITE_CORRUPT:
exc = DatabaseError
elif error_code == _lib.SQLITE_TOOBIG:
exc = DataError
elif error_code in (_lib.SQLITE_CONSTRAINT, _lib.SQLITE_MISMATCH):
exc = IntegrityError
elif error_code == _lib.SQLITE_MISUSE:
exc = ProgrammingError
else:
exc = DatabaseError
exc = exc(error_message)
exc.error_code = error_code
return exc
def _remember_cursor(self, cursor):
self.__cursors.append(weakref.ref(cursor))
self.__cursors_counter += 1
if self.__cursors_counter < 200:
return
self.__cursors_counter = 0
self.__cursors = [r for r in self.__cursors if r() is not None]
def _remember_statement(self, statement):
self.__rawstatements.add(statement._statement)
self.__statements.append(weakref.ref(statement))
self.__statements_counter += 1
if self.__statements_counter < 200:
return
self.__statements_counter = 0
self.__statements = [r for r in self.__statements if r() is not None]
def _finalize_raw_statement(self, _statement):
if self.__rawstatements is not None:
try:
self.__rawstatements.remove(_statement)
except KeyError:
return # rare case: already finalized, see issue #2097
_lib.sqlite3_finalize(_statement)
def __do_all_statements(self, action, reset_cursors):
for weakref in self.__statements:
statement = weakref()
if statement is not None:
action(statement)
if reset_cursors:
for weakref in self.__cursors:
cursor = weakref()
if cursor is not None:
cursor._reset = True
@_check_thread_wrap
@_check_closed_wrap
def __call__(self, sql):
return self._statement_cache.get(sql)
def cursor(self, factory=None):
self._check_thread()
self._check_closed()
if factory is None:
factory = Cursor
cur = factory(self)
if self.row_factory is not None:
cur.row_factory = self.row_factory
return cur
def execute(self, *args):
cur = self.cursor()
return cur.execute(*args)
def executemany(self, *args):
cur = self.cursor()
return cur.executemany(*args)
def executescript(self, *args):
cur = self.cursor()
return cur.executescript(*args)
def iterdump(self):
from sqlite3.dump import _iterdump
return _iterdump(self)
def _begin(self):
statement_star = _ffi.new('sqlite3_stmt **')
ret = _lib.sqlite3_prepare_v2(self._db, self.__begin_statement, -1,
statement_star, _ffi.NULL)
try:
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
ret = _lib.sqlite3_step(statement_star[0])
if ret != _lib.SQLITE_DONE:
raise self._get_exception(ret)
self._in_transaction = True
finally:
_lib.sqlite3_finalize(statement_star[0])
def commit(self):
self._check_thread()
self._check_closed()
if not self._in_transaction:
return
self.__do_all_statements(Statement._reset, False)
statement_star = _ffi.new('sqlite3_stmt **')
ret = _lib.sqlite3_prepare_v2(self._db, b"COMMIT", -1,
statement_star, _ffi.NULL)
try:
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
ret = _lib.sqlite3_step(statement_star[0])
if ret != _lib.SQLITE_DONE:
raise self._get_exception(ret)
self._in_transaction = False
finally:
_lib.sqlite3_finalize(statement_star[0])
def rollback(self):
self._check_thread()
self._check_closed()
if not self._in_transaction:
return
self.__do_all_statements(Statement._reset, True)
statement_star = _ffi.new('sqlite3_stmt **')
ret = _lib.sqlite3_prepare_v2(self._db, b"ROLLBACK", -1,
statement_star, _ffi.NULL)
try:
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
ret = _lib.sqlite3_step(statement_star[0])
if ret != _lib.SQLITE_DONE:
raise self._get_exception(ret)
self._in_transaction = False
finally:
_lib.sqlite3_finalize(statement_star[0])
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
if exc_type is None and exc_value is None and exc_tb is None:
self.commit()
else:
self.rollback()
@_check_thread_wrap
@_check_closed_wrap
def create_function(self, name, num_args, callback):
try:
closure = self.__func_cache[callback]
except KeyError:
@_ffi.callback("void(sqlite3_context*, int, sqlite3_value**)")
def closure(context, nargs, c_params):
_function_callback(callback, context, nargs, c_params)
self.__func_cache[callback] = closure
if isinstance(name, unicode):
name = name.encode('utf-8')
ret = _lib.sqlite3_create_function(self._db, name, num_args,
_lib.SQLITE_UTF8, _ffi.NULL,
closure, _ffi.NULL, _ffi.NULL)
if ret != _lib.SQLITE_OK:
raise self.OperationalError("Error creating function")
@_check_thread_wrap
@_check_closed_wrap
def create_aggregate(self, name, num_args, cls):
try:
step_callback, final_callback = self.__aggregates[cls]
except KeyError:
@_ffi.callback("void(sqlite3_context*, int, sqlite3_value**)")
def step_callback(context, argc, c_params):
res = _lib.sqlite3_aggregate_context(context,
_ffi.sizeof("size_t"))
aggregate_ptr = _ffi.cast("size_t[1]", res)
if not aggregate_ptr[0]:
try:
aggregate = cls()
except Exception:
msg = (b"user-defined aggregate's '__init__' "
b"method raised error")
_lib.sqlite3_result_error(context, msg, len(msg))
return
aggregate_id = id(aggregate)
self.__aggregate_instances[aggregate_id] = aggregate
aggregate_ptr[0] = aggregate_id
else:
aggregate = self.__aggregate_instances[aggregate_ptr[0]]
params = _convert_params(context, argc, c_params)
try:
aggregate.step(*params)
except Exception:
msg = (b"user-defined aggregate's 'step' "
b"method raised error")
_lib.sqlite3_result_error(context, msg, len(msg))
@_ffi.callback("void(sqlite3_context*)")
def final_callback(context):
res = _lib.sqlite3_aggregate_context(context,
_ffi.sizeof("size_t"))
aggregate_ptr = _ffi.cast("size_t[1]", res)
if aggregate_ptr[0]:
aggregate = self.__aggregate_instances[aggregate_ptr[0]]
try:
val = aggregate.finalize()
except Exception:
msg = (b"user-defined aggregate's 'finalize' "
b"method raised error")
_lib.sqlite3_result_error(context, msg, len(msg))
else:
_convert_result(context, val)
finally:
del self.__aggregate_instances[aggregate_ptr[0]]
self.__aggregates[cls] = (step_callback, final_callback)
if isinstance(name, unicode):
name = name.encode('utf-8')
ret = _lib.sqlite3_create_function(self._db, name, num_args,
_lib.SQLITE_UTF8, _ffi.NULL,
_ffi.NULL,
step_callback,
final_callback)
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
@_check_thread_wrap
@_check_closed_wrap
def create_collation(self, name, callback):
name = name.upper()
if not all(c in string.ascii_uppercase + string.digits + '_' for c in name):
raise ProgrammingError("invalid character in collation name")
if callback is None:
del self.__collations[name]
collation_callback = _ffi.NULL
else:
if not callable(callback):
raise TypeError("parameter must be callable")
@_ffi.callback("int(void*, int, const void*, int, const void*)")
def collation_callback(context, len1, str1, len2, str2):
text1 = _ffi.buffer(str1, len1)[:]
text2 = _ffi.buffer(str2, len2)[:]
try:
ret = callback(text1, text2)
assert isinstance(ret, (int, long))
return cmp(ret, 0)
except Exception:
return 0
self.__collations[name] = collation_callback
if isinstance(name, unicode):
name = name.encode('utf-8')
ret = _lib.sqlite3_create_collation(self._db, name,
_lib.SQLITE_UTF8,
_ffi.NULL,
collation_callback)
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
@_check_thread_wrap
@_check_closed_wrap
def set_authorizer(self, callback):
try:
authorizer = self.__func_cache[callback]
except KeyError:
@_ffi.callback("int(void*, int, const char*, const char*, "
"const char*, const char*)")
def authorizer(userdata, action, arg1, arg2, dbname, source):
try:
ret = callback(action, arg1, arg2, dbname, source)
assert isinstance(ret, int)
# try to detect cases in which cffi would swallow
# OverflowError when casting the return value
assert int(_ffi.cast('int', ret)) == ret
return ret
except Exception:
return _lib.SQLITE_DENY
self.__func_cache[callback] = authorizer
ret = _lib.sqlite3_set_authorizer(self._db, authorizer, _ffi.NULL)
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
@_check_thread_wrap
@_check_closed_wrap
def set_progress_handler(self, callable, nsteps):
if callable is None:
progress_handler = _ffi.NULL
else:
try:
progress_handler = self.__func_cache[callable]
except KeyError:
@_ffi.callback("int(void*)")
def progress_handler(userdata):
try:
return bool(callable())
except Exception:
# abort query if error occurred
return 1
self.__func_cache[callable] = progress_handler
_lib.sqlite3_progress_handler(self._db, nsteps, progress_handler,
_ffi.NULL)
@_check_thread_wrap
@_check_closed_wrap
def set_trace_callback(self, callable):
if callable is None:
trace_callback = _ffi.NULL
else:
try:
trace_callback = self.__func_cache[callable]
except KeyError:
@_ffi.callback("void(void*, const char*)")
def trace_callback(userdata, statement):
stmt = _ffi.string(statement).decode('utf-8')
callable(stmt)
self.__func_cache[callable] = trace_callback
_lib.sqlite3_trace(self._db, trace_callback, _ffi.NULL)
if sys.version_info[0] >= 3:
def __get_in_transaction(self):
return self._in_transaction
in_transaction = property(__get_in_transaction)
def __get_total_changes(self):
self._check_closed()
return _lib.sqlite3_total_changes(self._db)
total_changes = property(__get_total_changes)
def __get_isolation_level(self):
return self._isolation_level
def __set_isolation_level(self, val):
if val is None:
self.commit()
else:
self.__begin_statement = str("BEGIN " + val).encode('utf-8')
self._isolation_level = val
isolation_level = property(__get_isolation_level, __set_isolation_level)
if hasattr(_lib, 'sqlite3_enable_load_extension'):
@_check_thread_wrap
@_check_closed_wrap
def enable_load_extension(self, enabled):
rc = _lib.sqlite3_enable_load_extension(self._db, int(enabled))
if rc != _lib.SQLITE_OK:
raise OperationalError("Error enabling load extension")
class Cursor(object):
__initialized = False
__statement = None
def __init__(self, con):
if not isinstance(con, Connection):
raise TypeError
self.__connection = con
self.arraysize = 1
self.row_factory = None
self._reset = False
self.__locked = False
self.__closed = False
self.__lastrowid = None
self.__rowcount = -1
con._check_thread()
con._remember_cursor(self)
self.__initialized = True
def close(self):
self.__connection._check_thread()
self.__connection._check_closed()
if self.__statement:
self.__statement._reset()
self.__statement = None
self.__closed = True
def __check_cursor(self):
if not self.__initialized:
raise ProgrammingError("Base Cursor.__init__ not called.")
if self.__closed:
raise ProgrammingError("Cannot operate on a closed cursor.")
if self.__locked:
raise ProgrammingError("Recursive use of cursors not allowed.")
self.__connection._check_thread()
self.__connection._check_closed()
def __check_cursor_wrap(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
self.__check_cursor()
return func(self, *args, **kwargs)
return wrapper
def __check_reset(self):
if self._reset:
raise InterfaceError(
"Cursor needed to be reset because of commit/rollback "
"and can no longer be fetched from.")
def __build_row_cast_map(self):
if not self.__connection._detect_types:
return
self.__row_cast_map = []
for i in xrange(_lib.sqlite3_column_count(self.__statement._statement)):
converter = None
if self.__connection._detect_types & PARSE_COLNAMES:
colname = _lib.sqlite3_column_name(self.__statement._statement, i)
if colname:
colname = _ffi.string(colname).decode('utf-8')
type_start = -1
key = None
for pos in range(len(colname)):
if colname[pos] == '[':
type_start = pos + 1
elif colname[pos] == ']' and type_start != -1:
key = colname[type_start:pos]
converter = converters[key.upper()]
if converter is None and self.__connection._detect_types & PARSE_DECLTYPES:
decltype = _lib.sqlite3_column_decltype(self.__statement._statement, i)
if decltype:
decltype = _ffi.string(decltype).decode('utf-8')
# if multiple words, use first, eg.
# "INTEGER NOT NULL" => "INTEGER"
decltype = decltype.split()[0]
if '(' in decltype:
decltype = decltype[:decltype.index('(')]
converter = converters.get(decltype.upper(), None)
self.__row_cast_map.append(converter)
def __fetch_one_row(self):
num_cols = _lib.sqlite3_data_count(self.__statement._statement)
row = newlist_hint(num_cols)
for i in xrange(num_cols):
if self.__connection._detect_types:
converter = self.__row_cast_map[i]
else:
converter = None
if converter is not None:
blob = _lib.sqlite3_column_blob(self.__statement._statement, i)
if not blob:
val = None
else:
blob_len = _lib.sqlite3_column_bytes(self.__statement._statement, i)
val = _ffi.buffer(blob, blob_len)[:]
val = converter(val)
else:
typ = _lib.sqlite3_column_type(self.__statement._statement, i)
if typ == _lib.SQLITE_NULL:
val = None
elif typ == _lib.SQLITE_INTEGER:
val = _lib.sqlite3_column_int64(self.__statement._statement, i)
val = int(val)
elif typ == _lib.SQLITE_FLOAT:
val = _lib.sqlite3_column_double(self.__statement._statement, i)
elif typ == _lib.SQLITE_TEXT:
text = _lib.sqlite3_column_text(self.__statement._statement, i)
text_len = _lib.sqlite3_column_bytes(self.__statement._statement, i)
val = _ffi.buffer(text, text_len)[:]
try:
val = self.__connection.text_factory(val)
except Exception:
column_name = _lib.sqlite3_column_name(
self.__statement._statement, i)
if column_name:
column_name = _ffi.string(column_name).decode('utf-8')
else:
column_name = "<unknown column name>"
val = val.decode('ascii', 'replace')
raise OperationalError(
"Could not decode to UTF-8 column '%s' with text '%s'" % (
column_name, val))
elif typ == _lib.SQLITE_BLOB:
blob = _lib.sqlite3_column_blob(self.__statement._statement, i)
blob_len = _lib.sqlite3_column_bytes(self.__statement._statement, i)
val = _BLOB_TYPE(_ffi.buffer(blob, blob_len)[:])
row.append(val)
return tuple(row)
def __execute(self, multiple, sql, many_params):
self.__locked = True
self._reset = False
try:
del self.__next_row
except AttributeError:
pass
try:
if not isinstance(sql, basestring):
raise ValueError("operation parameter must be str or unicode")
try:
del self.__description
except AttributeError:
pass
self.__rowcount = -1
self.__statement = self.__connection._statement_cache.get(sql)
if self.__connection._isolation_level is not None:
if self.__statement._type in (
_STMT_TYPE_UPDATE,
_STMT_TYPE_DELETE,
_STMT_TYPE_INSERT,
_STMT_TYPE_REPLACE
):
if not self.__connection._in_transaction:
self.__connection._begin()
elif self.__statement._type == _STMT_TYPE_OTHER:
if self.__connection._in_transaction:
self.__connection.commit()
elif self.__statement._type == _STMT_TYPE_SELECT:
if multiple:
raise ProgrammingError("You cannot execute SELECT "
"statements in executemany().")
for params in many_params:
self.__statement._set_params(params)
# Actually execute the SQL statement
ret = _lib.sqlite3_step(self.__statement._statement)
if ret == _lib.SQLITE_ROW:
if multiple:
raise ProgrammingError("executemany() can only execute DML statements.")
self.__build_row_cast_map()
self.__next_row = self.__fetch_one_row()
elif ret == _lib.SQLITE_DONE:
if not multiple:
self.__statement._reset()
else:
self.__statement._reset()
raise self.__connection._get_exception(ret)
if self.__statement._type in (
_STMT_TYPE_UPDATE,
_STMT_TYPE_DELETE,
_STMT_TYPE_INSERT,
_STMT_TYPE_REPLACE
):
if self.__rowcount == -1:
self.__rowcount = 0
self.__rowcount += _lib.sqlite3_changes(self.__connection._db)
if not multiple and self.__statement._type == _STMT_TYPE_INSERT:
self.__lastrowid = _lib.sqlite3_last_insert_rowid(self.__connection._db)
else:
self.__lastrowid = None
if multiple:
self.__statement._reset()
finally:
self.__connection._in_transaction = \
not _lib.sqlite3_get_autocommit(self.__connection._db)
self.__locked = False
return self
@__check_cursor_wrap
def execute(self, sql, params=[]):
return self.__execute(False, sql, [params])
@__check_cursor_wrap
def executemany(self, sql, many_params):
return self.__execute(True, sql, many_params)
def executescript(self, sql):
self.__check_cursor()
self._reset = False
if isinstance(sql, unicode):
sql = sql.encode('utf-8')
elif not isinstance(sql, str):
raise ValueError("script argument must be unicode or string.")
statement_star = _ffi.new('sqlite3_stmt **')
next_char = _ffi.new('char **')
self.__connection.commit()
while True:
c_sql = _ffi.new("char[]", sql)
rc = _lib.sqlite3_prepare(self.__connection._db, c_sql, -1,
statement_star, next_char)
if rc != _lib.SQLITE_OK:
raise self.__connection._get_exception(rc)
rc = _lib.SQLITE_ROW
while rc == _lib.SQLITE_ROW:
if not statement_star[0]:
rc = _lib.SQLITE_OK
else:
rc = _lib.sqlite3_step(statement_star[0])
if rc != _lib.SQLITE_DONE:
_lib.sqlite3_finalize(statement_star[0])
if rc == _lib.SQLITE_OK:
break
else:
raise self.__connection._get_exception(rc)
rc = _lib.sqlite3_finalize(statement_star[0])
if rc != _lib.SQLITE_OK:
raise self.__connection._get_exception(rc)
sql = _ffi.string(next_char[0])
if not sql:
break
return self
def __iter__(self):
return self
def __next__(self):
self.__check_cursor()
self.__check_reset()
if not self.__statement:
raise StopIteration
try:
next_row = self.__next_row
except AttributeError:
raise StopIteration
del self.__next_row
if self.row_factory is not None:
next_row = self.row_factory(self, next_row)
ret = _lib.sqlite3_step(self.__statement._statement)
if ret == _lib.SQLITE_ROW:
self.__next_row = self.__fetch_one_row()
else:
self.__statement._reset()
if ret != _lib.SQLITE_DONE:
raise self.__connection._get_exception(ret)
return next_row
if sys.version_info[0] < 3:
next = __next__
del __next__
def fetchone(self):
return next(self, None)
def fetchmany(self, size=None):
if size is None:
size = self.arraysize
lst = []
for row in self:
lst.append(row)
if len(lst) == size:
break
return lst
def fetchall(self):
return list(self)
def __get_connection(self):
return self.__connection
connection = property(__get_connection)
def __get_rowcount(self):
return self.__rowcount
rowcount = property(__get_rowcount)
def __get_description(self):
try:
return self.__description
except AttributeError:
if self.__statement:
self.__description = self.__statement._get_description()
return self.__description
description = property(__get_description)
def __get_lastrowid(self):
return self.__lastrowid
lastrowid = property(__get_lastrowid)
def setinputsizes(self, *args):
pass
def setoutputsize(self, *args):
pass
class Statement(object):
_statement = None
def __init__(self, connection, sql):
self.__con = connection
self._in_use = False
if not isinstance(sql, basestring):
raise Warning("SQL is of wrong type. Must be string or unicode.")
if '\0' in sql:
raise ValueError("the query contains a null character")
first_word = sql.lstrip().split(" ")[0].upper()
if first_word == "":
self._type = _STMT_TYPE_INVALID
elif first_word == "SELECT":
self._type = _STMT_TYPE_SELECT
elif first_word == "INSERT":
self._type = _STMT_TYPE_INSERT
elif first_word == "UPDATE":
self._type = _STMT_TYPE_UPDATE
elif first_word == "DELETE":
self._type = _STMT_TYPE_DELETE
elif first_word == "REPLACE":
self._type = _STMT_TYPE_REPLACE
else:
self._type = _STMT_TYPE_OTHER
if isinstance(sql, unicode):
sql = sql.encode('utf-8')
statement_star = _ffi.new('sqlite3_stmt **')
next_char = _ffi.new('char **')
c_sql = _ffi.new("char[]", sql)
ret = _lib.sqlite3_prepare_v2(self.__con._db, c_sql, -1,
statement_star, next_char)
self._statement = statement_star[0]
if ret == _lib.SQLITE_OK and not self._statement:
# an empty statement, work around that, as it's the least trouble
self._type = _STMT_TYPE_SELECT
c_sql = _ffi.new("char[]", b"select 42")
ret = _lib.sqlite3_prepare_v2(self.__con._db, c_sql, -1,
statement_star, next_char)
self._statement = statement_star[0]
if ret != _lib.SQLITE_OK:
raise self.__con._get_exception(ret)
self.__con._remember_statement(self)
tail = _ffi.string(next_char[0]).decode('utf-8')
if _check_remaining_sql(tail):
raise Warning("You can only execute one statement at a time.")
def __del__(self):
if self._statement:
self.__con._finalize_raw_statement(self._statement)
def _finalize(self):
if self._statement:
self.__con._finalize_raw_statement(self._statement)
self._statement = None
self._in_use = False
def _reset(self):
if self._in_use and self._statement:
_lib.sqlite3_reset(self._statement)
self._in_use = False
if sys.version_info[0] < 3:
def __check_decodable(self, param):
if self.__con.text_factory in (unicode, OptimizedUnicode,
_unicode_text_factory):
for c in param:
if ord(c) & 0x80 != 0:
raise self.__con.ProgrammingError(
"You must not use 8-bit bytestrings unless "
"you use a text_factory that can interpret "
"8-bit bytestrings (like text_factory = str). "
"It is highly recommended that you instead "
"just switch your application to Unicode strings.")
def __set_param(self, idx, param):
cvt = converters.get(type(param))
if cvt is not None:
param = cvt(param)
try:
param = adapt(param)
except:
pass # And use previous value
if param is None:
rc = _lib.sqlite3_bind_null(self._statement, idx)
elif isinstance(param, (bool, int, long)):
if -2147483648 <= param <= 2147483647:
rc = _lib.sqlite3_bind_int(self._statement, idx, param)
else:
rc = _lib.sqlite3_bind_int64(self._statement, idx, param)
elif isinstance(param, float):
rc = _lib.sqlite3_bind_double(self._statement, idx, param)
elif isinstance(param, unicode):
param = param.encode("utf-8")
rc = _lib.sqlite3_bind_text(self._statement, idx, param,
len(param), _SQLITE_TRANSIENT)
elif isinstance(param, str):
self.__check_decodable(param)
rc = _lib.sqlite3_bind_text(self._statement, idx, param,
len(param), _SQLITE_TRANSIENT)
elif isinstance(param, (buffer, bytes)):
param = bytes(param)
rc = _lib.sqlite3_bind_blob(self._statement, idx, param,
len(param), _SQLITE_TRANSIENT)
else:
rc = -1
return rc
def _set_params(self, params):
self._in_use = True
num_params_needed = _lib.sqlite3_bind_parameter_count(self._statement)
if isinstance(params, (tuple, list)) or \
not isinstance(params, dict) and \
hasattr(params, '__getitem__'):
try:
num_params = len(params)
except TypeError:
num_params = -1
if num_params != num_params_needed:
raise ProgrammingError("Incorrect number of bindings supplied. "
"The current statement uses %d, and "
"there are %d supplied." %
(num_params_needed, num_params))
for i in range(num_params):
rc = self.__set_param(i + 1, params[i])
if rc != _lib.SQLITE_OK:
raise InterfaceError("Error binding parameter %d - "
"probably unsupported type." % i)
elif isinstance(params, dict):
for i in range(1, num_params_needed + 1):
param_name = _lib.sqlite3_bind_parameter_name(self._statement, i)
if not param_name:
raise ProgrammingError("Binding %d has no name, but you "
"supplied a dictionary (which has "
"only names)." % i)
param_name = _ffi.string(param_name).decode('utf-8')[1:]
try:
param = params[param_name]
except KeyError:
raise ProgrammingError("You did not supply a value for "
"binding %d." % i)
rc = self.__set_param(i, param)
if rc != _lib.SQLITE_OK:
raise InterfaceError("Error binding parameter :%s - "
"probably unsupported type." %
param_name)
else:
raise ValueError("parameters are of unsupported type")
def _get_description(self):
if self._type in (
_STMT_TYPE_INSERT,
_STMT_TYPE_UPDATE,
_STMT_TYPE_DELETE,
_STMT_TYPE_REPLACE
):
return None
desc = []
for i in xrange(_lib.sqlite3_column_count(self._statement)):
name = _lib.sqlite3_column_name(self._statement, i)
if name:
name = _ffi.string(name).decode('utf-8').split("[")[0].strip()
desc.append((name, None, None, None, None, None, None))
return desc
class Row(object):
def __init__(self, cursor, values):
self.description = cursor.description
self.values = values
def __len__(self):
return len(self.values)
def __getitem__(self, item):
if isinstance(item, (int, long)):
return self.values[item]
else:
item = item.lower()
for idx, desc in enumerate(self.description):
if desc[0].lower() == item:
return self.values[idx]
raise IndexError("No item with that key")
def keys(self):
return [desc[0] for desc in self.description]
def __eq__(self, other):
if not isinstance(other, Row):
return NotImplemented
if self.description != other.description:
return False
if self.values != other.values:
return False
return True
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(tuple(self.description)) ^ hash(tuple(self.values))
def _check_remaining_sql(s):
state = "NORMAL"
for char in s:
if char == chr(0):
return 0
elif char == '-':
if state == "NORMAL":
state = "LINECOMMENT_1"
elif state == "LINECOMMENT_1":
state = "IN_LINECOMMENT"
elif char in (' ', '\t'):
pass
elif char == '\n':
if state == "IN_LINECOMMENT":
state = "NORMAL"
elif char == '/':
if state == "NORMAL":
state = "COMMENTSTART_1"
elif state == "COMMENTEND_1":
state = "NORMAL"
elif state == "COMMENTSTART_1":
return 1
elif char == '*':
if state == "NORMAL":
return 1
elif state == "LINECOMMENT_1":
return 1
elif state == "COMMENTSTART_1":
state = "IN_COMMENT"
elif state == "IN_COMMENT":
state = "COMMENTEND_1"
else:
if state == "COMMENTEND_1":
state = "IN_COMMENT"
elif state == "IN_LINECOMMENT":
pass
elif state == "IN_COMMENT":
pass
else:
return 1
return 0
def _convert_params(con, nargs, params):
_params = []
for i in range(nargs):
typ = _lib.sqlite3_value_type(params[i])
if typ == _lib.SQLITE_NULL:
val = None
elif typ == _lib.SQLITE_INTEGER:
val = _lib.sqlite3_value_int64(params[i])
val = int(val)
elif typ == _lib.SQLITE_FLOAT:
val = _lib.sqlite3_value_double(params[i])
elif typ == _lib.SQLITE_TEXT:
val = _lib.sqlite3_value_text(params[i])
val = _ffi.string(val).decode('utf-8')
elif typ == _lib.SQLITE_BLOB:
blob = _lib.sqlite3_value_blob(params[i])
blob_len = _lib.sqlite3_value_bytes(params[i])
val = _BLOB_TYPE(_ffi.buffer(blob, blob_len)[:])
else:
raise NotImplementedError
_params.append(val)
return _params
def _convert_result(con, val):
if val is None:
_lib.sqlite3_result_null(con)
elif isinstance(val, (bool, int, long)):
_lib.sqlite3_result_int64(con, int(val))
elif isinstance(val, float):
_lib.sqlite3_result_double(con, val)
elif isinstance(val, unicode):
val = val.encode('utf-8')
_lib.sqlite3_result_text(con, val, len(val), _SQLITE_TRANSIENT)
elif isinstance(val, str):
_lib.sqlite3_result_text(con, val, len(val), _SQLITE_TRANSIENT)
elif isinstance(val, (buffer, bytes)):
_lib.sqlite3_result_blob(con, bytes(val), len(val), _SQLITE_TRANSIENT)
else:
raise NotImplementedError
def _function_callback(real_cb, context, nargs, c_params):
params = _convert_params(context, nargs, c_params)
try:
val = real_cb(*params)
except Exception:
msg = b"user-defined function raised exception"
_lib.sqlite3_result_error(context, msg, len(msg))
else:
_convert_result(context, val)
converters = {}
adapters = {}
class PrepareProtocol(object):
pass
def register_adapter(typ, callable):
adapters[typ, PrepareProtocol] = callable
def register_converter(name, callable):
converters[name.upper()] = callable
def register_adapters_and_converters():
def adapt_date(val):
return val.isoformat()
def adapt_datetime(val):
return val.isoformat(" ")
def convert_date(val):
return datetime.date(*map(int, val.split("-")))
def convert_timestamp(val):
datepart, timepart = val.split(" ")
year, month, day = map(int, datepart.split("-"))
timepart_full = timepart.split(".")
hours, minutes, seconds = map(int, timepart_full[0].split(":"))
if len(timepart_full) == 2:
microseconds = int(timepart_full[1])
else:
microseconds = 0
return datetime.datetime(year, month, day, hours, minutes, seconds,
microseconds)
register_adapter(datetime.date, adapt_date)
register_adapter(datetime.datetime, adapt_datetime)
register_converter("date", convert_date)
register_converter("timestamp", convert_timestamp)
def adapt(val, proto=PrepareProtocol):
# look for an adapter in the registry
adapter = adapters.get((type(val), proto), None)
if adapter is not None:
return adapter(val)
# try to have the protocol adapt this object
if hasattr(proto, '__adapt__'):
try:
adapted = proto.__adapt__(val)
except TypeError:
pass
else:
if adapted is not None:
return adapted
# and finally try to have the object adapt itself
if hasattr(val, '__conform__'):
try:
adapted = val.__conform__(proto)
except TypeError:
pass
else:
if adapted is not None:
return adapted
return val
register_adapters_and_converters()
| mit | -6,683,226,338,269,561,000 | 34.230382 | 96 | 0.530602 | false |
dhalperi/incubator-beam | sdks/python/apache_beam/runners/api/beam_fn_api_pb2_grpc.py | 3 | 6684 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
import beam_fn_api_pb2 as beam__fn__api__pb2
# This module is experimental. No backwards-compatibility guarantees.
class BeamFnControlStub(object):
"""
Control Plane API
Progress reporting and splitting still need further vetting. Also, this may change
with the addition of new types of instructions/responses related to metrics.
An API that describes the work that a SDK harness is meant to do.
Stable
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Control = channel.stream_stream(
'/org.apache.beam.fn.v1.BeamFnControl/Control',
request_serializer=beam__fn__api__pb2.InstructionResponse.SerializeToString,
response_deserializer=beam__fn__api__pb2.InstructionRequest.FromString,
)
class BeamFnControlServicer(object):
"""
Control Plane API
Progress reporting and splitting still need further vetting. Also, this may change
with the addition of new types of instructions/responses related to metrics.
An API that describes the work that a SDK harness is meant to do.
Stable
"""
def Control(self, request_iterator, context):
"""Instructions sent by the runner to the SDK requesting different types
of work.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_BeamFnControlServicer_to_server(servicer, server):
rpc_method_handlers = {
'Control': grpc.stream_stream_rpc_method_handler(
servicer.Control,
request_deserializer=beam__fn__api__pb2.InstructionResponse.FromString,
response_serializer=beam__fn__api__pb2.InstructionRequest.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'org.apache.beam.fn.v1.BeamFnControl', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class BeamFnDataStub(object):
"""Stable
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Data = channel.stream_stream(
'/org.apache.beam.fn.v1.BeamFnData/Data',
request_serializer=beam__fn__api__pb2.Elements.SerializeToString,
response_deserializer=beam__fn__api__pb2.Elements.FromString,
)
class BeamFnDataServicer(object):
"""Stable
"""
def Data(self, request_iterator, context):
"""Used to send data between harnesses.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_BeamFnDataServicer_to_server(servicer, server):
rpc_method_handlers = {
'Data': grpc.stream_stream_rpc_method_handler(
servicer.Data,
request_deserializer=beam__fn__api__pb2.Elements.FromString,
response_serializer=beam__fn__api__pb2.Elements.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'org.apache.beam.fn.v1.BeamFnData', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class BeamFnStateStub(object):
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.State = channel.stream_stream(
'/org.apache.beam.fn.v1.BeamFnState/State',
request_serializer=beam__fn__api__pb2.StateRequest.SerializeToString,
response_deserializer=beam__fn__api__pb2.StateResponse.FromString,
)
class BeamFnStateServicer(object):
def State(self, request_iterator, context):
"""Used to get/append/clear state stored by the runner on behalf of the SDK.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_BeamFnStateServicer_to_server(servicer, server):
rpc_method_handlers = {
'State': grpc.stream_stream_rpc_method_handler(
servicer.State,
request_deserializer=beam__fn__api__pb2.StateRequest.FromString,
response_serializer=beam__fn__api__pb2.StateResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'org.apache.beam.fn.v1.BeamFnState', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class BeamFnLoggingStub(object):
"""Stable
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Logging = channel.stream_stream(
'/org.apache.beam.fn.v1.BeamFnLogging/Logging',
request_serializer=beam__fn__api__pb2.LogEntry.List.SerializeToString,
response_deserializer=beam__fn__api__pb2.LogControl.FromString,
)
class BeamFnLoggingServicer(object):
"""Stable
"""
def Logging(self, request_iterator, context):
"""Allows for the SDK to emit log entries which the runner can
associate with the active job.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_BeamFnLoggingServicer_to_server(servicer, server):
rpc_method_handlers = {
'Logging': grpc.stream_stream_rpc_method_handler(
servicer.Logging,
request_deserializer=beam__fn__api__pb2.LogEntry.List.FromString,
response_serializer=beam__fn__api__pb2.LogControl.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'org.apache.beam.fn.v1.BeamFnLogging', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| apache-2.0 | 8,502,844,380,194,271,000 | 31.604878 | 86 | 0.709306 | false |
sacharya/nova | nova/api/openstack/compute/contrib/volumes.py | 4 | 24854 | # Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The volumes extension."""
import webob
from webob import exc
from nova.api.openstack import common
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import compute
from nova import exception
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.openstack.common import strutils
from nova.openstack.common import uuidutils
from nova import volume
LOG = logging.getLogger(__name__)
authorize = extensions.extension_authorizer('compute', 'volumes')
authorize_attach = extensions.extension_authorizer('compute',
'volume_attachments')
def _translate_volume_detail_view(context, vol):
"""Maps keys for volumes details view."""
d = _translate_volume_summary_view(context, vol)
# No additional data / lookups at the moment
return d
def _translate_volume_summary_view(context, vol):
"""Maps keys for volumes summary view."""
d = {}
d['id'] = vol['id']
d['status'] = vol['status']
d['size'] = vol['size']
d['availabilityZone'] = vol['availability_zone']
d['createdAt'] = vol['created_at']
if vol['attach_status'] == 'attached':
d['attachments'] = [_translate_attachment_detail_view(vol['id'],
vol['instance_uuid'],
vol['mountpoint'])]
else:
d['attachments'] = [{}]
d['displayName'] = vol['display_name']
d['displayDescription'] = vol['display_description']
if vol['volume_type_id'] and vol.get('volume_type'):
d['volumeType'] = vol['volume_type']['name']
else:
d['volumeType'] = vol['volume_type_id']
d['snapshotId'] = vol['snapshot_id']
LOG.audit(_("vol=%s"), vol, context=context)
if vol.get('volume_metadata'):
d['metadata'] = vol.get('volume_metadata')
else:
d['metadata'] = {}
return d
def make_volume(elem):
elem.set('id')
elem.set('status')
elem.set('size')
elem.set('availabilityZone')
elem.set('createdAt')
elem.set('displayName')
elem.set('displayDescription')
elem.set('volumeType')
elem.set('snapshotId')
attachments = xmlutil.SubTemplateElement(elem, 'attachments')
attachment = xmlutil.SubTemplateElement(attachments, 'attachment',
selector='attachments')
make_attachment(attachment)
# Attach metadata node
elem.append(common.MetadataTemplate())
class VolumeTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('volume', selector='volume')
make_volume(root)
return xmlutil.MasterTemplate(root, 1)
class VolumesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('volumes')
elem = xmlutil.SubTemplateElement(root, 'volume', selector='volumes')
make_volume(elem)
return xmlutil.MasterTemplate(root, 1)
class CommonDeserializer(wsgi.MetadataXMLDeserializer):
"""Common deserializer to handle xml-formatted volume requests.
Handles standard volume attributes as well as the optional metadata
attribute
"""
metadata_deserializer = common.MetadataXMLDeserializer()
def _extract_volume(self, node):
"""Marshal the volume attribute of a parsed request."""
vol = {}
volume_node = self.find_first_child_named(node, 'volume')
attributes = ['display_name', 'display_description', 'size',
'volume_type', 'availability_zone']
for attr in attributes:
if volume_node.getAttribute(attr):
vol[attr] = volume_node.getAttribute(attr)
metadata_node = self.find_first_child_named(volume_node, 'metadata')
if metadata_node is not None:
vol['metadata'] = self.extract_metadata(metadata_node)
return vol
class CreateDeserializer(CommonDeserializer):
"""Deserializer to handle xml-formatted create volume requests.
Handles standard volume attributes as well as the optional metadata
attribute
"""
def default(self, string):
"""Deserialize an xml-formatted volume create request."""
dom = xmlutil.safe_minidom_parse_string(string)
vol = self._extract_volume(dom)
return {'body': {'volume': vol}}
class VolumeController(wsgi.Controller):
"""The Volumes API controller for the OpenStack API."""
def __init__(self):
self.volume_api = volume.API()
super(VolumeController, self).__init__()
@wsgi.serializers(xml=VolumeTemplate)
def show(self, req, id):
"""Return data about the given volume."""
context = req.environ['nova.context']
authorize(context)
try:
vol = self.volume_api.get(context, id)
except exception.NotFound:
raise exc.HTTPNotFound()
return {'volume': _translate_volume_detail_view(context, vol)}
def delete(self, req, id):
"""Delete a volume."""
context = req.environ['nova.context']
authorize(context)
LOG.audit(_("Delete volume with id: %s"), id, context=context)
try:
self.volume_api.delete(context, id)
except exception.NotFound:
raise exc.HTTPNotFound()
return webob.Response(status_int=202)
@wsgi.serializers(xml=VolumesTemplate)
def index(self, req):
"""Returns a summary list of volumes."""
return self._items(req, entity_maker=_translate_volume_summary_view)
@wsgi.serializers(xml=VolumesTemplate)
def detail(self, req):
"""Returns a detailed list of volumes."""
return self._items(req, entity_maker=_translate_volume_detail_view)
def _items(self, req, entity_maker):
"""Returns a list of volumes, transformed through entity_maker."""
context = req.environ['nova.context']
authorize(context)
volumes = self.volume_api.get_all(context)
limited_list = common.limited(volumes, req)
res = [entity_maker(context, vol) for vol in limited_list]
return {'volumes': res}
@wsgi.serializers(xml=VolumeTemplate)
@wsgi.deserializers(xml=CreateDeserializer)
def create(self, req, body):
"""Creates a new volume."""
context = req.environ['nova.context']
authorize(context)
if not self.is_valid_body(body, 'volume'):
raise exc.HTTPUnprocessableEntity()
vol = body['volume']
vol_type = vol.get('volume_type', None)
metadata = vol.get('metadata', None)
snapshot_id = vol.get('snapshot_id')
if snapshot_id is not None:
snapshot = self.volume_api.get_snapshot(context, snapshot_id)
else:
snapshot = None
size = vol.get('size', None)
if size is None and snapshot is not None:
size = snapshot['volume_size']
LOG.audit(_("Create volume of %s GB"), size, context=context)
availability_zone = vol.get('availability_zone', None)
try:
new_volume = self.volume_api.create(
context,
size,
vol.get('display_name'),
vol.get('display_description'),
snapshot=snapshot,
volume_type=vol_type,
metadata=metadata,
availability_zone=availability_zone
)
except exception.InvalidInput as err:
raise exc.HTTPBadRequest(explanation=err.format_message())
# TODO(vish): Instance should be None at db layer instead of
# trying to lazy load, but for now we turn it into
# a dict to avoid an error.
retval = _translate_volume_detail_view(context, dict(new_volume))
result = {'volume': retval}
location = '%s/%s' % (req.url, new_volume['id'])
return wsgi.ResponseObject(result, headers=dict(location=location))
def _translate_attachment_detail_view(volume_id, instance_uuid, mountpoint):
"""Maps keys for attachment details view."""
d = _translate_attachment_summary_view(volume_id,
instance_uuid,
mountpoint)
# No additional data / lookups at the moment
return d
def _translate_attachment_summary_view(volume_id, instance_uuid, mountpoint):
"""Maps keys for attachment summary view."""
d = {}
# NOTE(justinsb): We use the volume id as the id of the attachment object
d['id'] = volume_id
d['volumeId'] = volume_id
d['serverId'] = instance_uuid
if mountpoint:
d['device'] = mountpoint
return d
def make_attachment(elem):
elem.set('id')
elem.set('serverId')
elem.set('volumeId')
elem.set('device')
class VolumeAttachmentTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('volumeAttachment',
selector='volumeAttachment')
make_attachment(root)
return xmlutil.MasterTemplate(root, 1)
class VolumeAttachmentsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('volumeAttachments')
elem = xmlutil.SubTemplateElement(root, 'volumeAttachment',
selector='volumeAttachments')
make_attachment(elem)
return xmlutil.MasterTemplate(root, 1)
class VolumeAttachmentController(wsgi.Controller):
"""The volume attachment API controller for the OpenStack API.
A child resource of the server. Note that we use the volume id
as the ID of the attachment (though this is not guaranteed externally)
"""
def __init__(self, ext_mgr=None):
self.compute_api = compute.API()
self.volume_api = volume.API()
self.ext_mgr = ext_mgr
super(VolumeAttachmentController, self).__init__()
@wsgi.serializers(xml=VolumeAttachmentsTemplate)
def index(self, req, server_id):
"""Returns the list of volume attachments for a given instance."""
context = req.environ['nova.context']
authorize_attach(context, action='index')
return self._items(req, server_id,
entity_maker=_translate_attachment_summary_view)
@wsgi.serializers(xml=VolumeAttachmentTemplate)
def show(self, req, server_id, id):
"""Return data about the given volume attachment."""
context = req.environ['nova.context']
authorize(context)
authorize_attach(context, action='show')
volume_id = id
try:
instance = self.compute_api.get(context, server_id)
except exception.NotFound:
raise exc.HTTPNotFound()
bdms = self.compute_api.get_instance_bdms(context, instance)
if not bdms:
LOG.debug(_("Instance %s is not attached."), server_id)
raise exc.HTTPNotFound()
assigned_mountpoint = None
for bdm in bdms:
if bdm['volume_id'] == volume_id:
assigned_mountpoint = bdm['device_name']
break
if assigned_mountpoint is None:
LOG.debug("volume_id not found")
raise exc.HTTPNotFound()
return {'volumeAttachment': _translate_attachment_detail_view(
volume_id,
instance['uuid'],
assigned_mountpoint)}
def _validate_volume_id(self, volume_id):
if not uuidutils.is_uuid_like(volume_id):
msg = _("Bad volumeId format: volumeId is "
"not in proper format (%s)") % volume_id
raise exc.HTTPBadRequest(explanation=msg)
@wsgi.serializers(xml=VolumeAttachmentTemplate)
def create(self, req, server_id, body):
"""Attach a volume to an instance."""
context = req.environ['nova.context']
authorize(context)
authorize_attach(context, action='create')
if not self.is_valid_body(body, 'volumeAttachment'):
raise exc.HTTPUnprocessableEntity()
volume_id = body['volumeAttachment']['volumeId']
device = body['volumeAttachment'].get('device')
self._validate_volume_id(volume_id)
LOG.audit(_("Attach volume %(volume_id)s to instance %(server_id)s "
"at %(device)s"),
{'volume_id': volume_id,
'device': device,
'server_id': server_id},
context=context)
try:
instance = self.compute_api.get(context, server_id)
device = self.compute_api.attach_volume(context, instance,
volume_id, device)
except exception.NotFound:
raise exc.HTTPNotFound()
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'attach_volume')
# The attach is async
attachment = {}
attachment['id'] = volume_id
attachment['serverId'] = server_id
attachment['volumeId'] = volume_id
attachment['device'] = device
# NOTE(justinsb): And now, we have a problem...
# The attach is async, so there's a window in which we don't see
# the attachment (until the attachment completes). We could also
# get problems with concurrent requests. I think we need an
# attachment state, and to write to the DB here, but that's a bigger
# change.
# For now, we'll probably have to rely on libraries being smart
# TODO(justinsb): How do I return "accepted" here?
return {'volumeAttachment': attachment}
def update(self, req, server_id, id, body):
if (not self.ext_mgr or
not self.ext_mgr.is_loaded('os-volume-attachment-update')):
raise exc.HTTPBadRequest()
context = req.environ['nova.context']
authorize(context)
authorize_attach(context, action='update')
if not self.is_valid_body(body, 'volumeAttachment'):
raise exc.HTTPUnprocessableEntity()
old_volume_id = id
old_volume = self.volume_api.get(context, old_volume_id)
new_volume_id = body['volumeAttachment']['volumeId']
self._validate_volume_id(new_volume_id)
new_volume = self.volume_api.get(context, new_volume_id)
try:
instance = self.compute_api.get(context, server_id,
want_objects=True)
except exception.NotFound:
raise exc.HTTPNotFound()
bdms = self.compute_api.get_instance_bdms(context, instance)
found = False
try:
for bdm in bdms:
if bdm['volume_id'] != old_volume_id:
continue
try:
self.compute_api.swap_volume(context, instance, old_volume,
new_volume)
found = True
break
except exception.VolumeUnattached:
# The volume is not attached. Treat it as NotFound
# by falling through.
pass
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'swap_volume')
if not found:
raise exc.HTTPNotFound()
else:
return webob.Response(status_int=202)
def delete(self, req, server_id, id):
"""Detach a volume from an instance."""
context = req.environ['nova.context']
authorize(context)
authorize_attach(context, action='delete')
volume_id = id
LOG.audit(_("Detach volume %s"), volume_id, context=context)
try:
instance = self.compute_api.get(context, server_id)
except exception.NotFound:
raise exc.HTTPNotFound()
volume = self.volume_api.get(context, volume_id)
bdms = self.compute_api.get_instance_bdms(context, instance)
if not bdms:
LOG.debug(_("Instance %s is not attached."), server_id)
raise exc.HTTPNotFound()
found = False
try:
for bdm in bdms:
if bdm['volume_id'] != volume_id:
continue
try:
self.compute_api.detach_volume(context, instance, volume)
found = True
break
except exception.VolumeUnattached:
# The volume is not attached. Treat it as NotFound
# by falling through.
pass
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'detach_volume')
if not found:
raise exc.HTTPNotFound()
else:
return webob.Response(status_int=202)
def _items(self, req, server_id, entity_maker):
"""Returns a list of attachments, transformed through entity_maker."""
context = req.environ['nova.context']
authorize(context)
try:
instance = self.compute_api.get(context, server_id)
except exception.NotFound:
raise exc.HTTPNotFound()
bdms = self.compute_api.get_instance_bdms(context, instance)
limited_list = common.limited(bdms, req)
results = []
for bdm in limited_list:
if bdm['volume_id']:
results.append(entity_maker(bdm['volume_id'],
bdm['instance_uuid'],
bdm['device_name']))
return {'volumeAttachments': results}
def _translate_snapshot_detail_view(context, vol):
"""Maps keys for snapshots details view."""
d = _translate_snapshot_summary_view(context, vol)
# NOTE(gagupta): No additional data / lookups at the moment
return d
def _translate_snapshot_summary_view(context, vol):
"""Maps keys for snapshots summary view."""
d = {}
d['id'] = vol['id']
d['volumeId'] = vol['volume_id']
d['status'] = vol['status']
# NOTE(gagupta): We map volume_size as the snapshot size
d['size'] = vol['volume_size']
d['createdAt'] = vol['created_at']
d['displayName'] = vol['display_name']
d['displayDescription'] = vol['display_description']
return d
def make_snapshot(elem):
elem.set('id')
elem.set('status')
elem.set('size')
elem.set('createdAt')
elem.set('displayName')
elem.set('displayDescription')
elem.set('volumeId')
class SnapshotTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('snapshot', selector='snapshot')
make_snapshot(root)
return xmlutil.MasterTemplate(root, 1)
class SnapshotsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('snapshots')
elem = xmlutil.SubTemplateElement(root, 'snapshot',
selector='snapshots')
make_snapshot(elem)
return xmlutil.MasterTemplate(root, 1)
class SnapshotController(wsgi.Controller):
"""The Snapshots API controller for the OpenStack API."""
def __init__(self):
self.volume_api = volume.API()
super(SnapshotController, self).__init__()
@wsgi.serializers(xml=SnapshotTemplate)
def show(self, req, id):
"""Return data about the given snapshot."""
context = req.environ['nova.context']
authorize(context)
try:
vol = self.volume_api.get_snapshot(context, id)
except exception.NotFound:
return exc.HTTPNotFound()
return {'snapshot': _translate_snapshot_detail_view(context, vol)}
def delete(self, req, id):
"""Delete a snapshot."""
context = req.environ['nova.context']
authorize(context)
LOG.audit(_("Delete snapshot with id: %s"), id, context=context)
try:
self.volume_api.delete_snapshot(context, id)
except exception.NotFound:
return exc.HTTPNotFound()
return webob.Response(status_int=202)
@wsgi.serializers(xml=SnapshotsTemplate)
def index(self, req):
"""Returns a summary list of snapshots."""
return self._items(req, entity_maker=_translate_snapshot_summary_view)
@wsgi.serializers(xml=SnapshotsTemplate)
def detail(self, req):
"""Returns a detailed list of snapshots."""
return self._items(req, entity_maker=_translate_snapshot_detail_view)
def _items(self, req, entity_maker):
"""Returns a list of snapshots, transformed through entity_maker."""
context = req.environ['nova.context']
authorize(context)
snapshots = self.volume_api.get_all_snapshots(context)
limited_list = common.limited(snapshots, req)
res = [entity_maker(context, snapshot) for snapshot in limited_list]
return {'snapshots': res}
@wsgi.serializers(xml=SnapshotTemplate)
def create(self, req, body):
"""Creates a new snapshot."""
context = req.environ['nova.context']
authorize(context)
if not self.is_valid_body(body, 'snapshot'):
raise exc.HTTPUnprocessableEntity()
snapshot = body['snapshot']
volume_id = snapshot['volume_id']
LOG.audit(_("Create snapshot from volume %s"), volume_id,
context=context)
force = snapshot.get('force', False)
try:
force = strutils.bool_from_string(force, strict=True)
except ValueError:
msg = _("Invalid value '%s' for force.") % force
raise exception.InvalidParameterValue(err=msg)
if force:
create_func = self.volume_api.create_snapshot_force
else:
create_func = self.volume_api.create_snapshot
new_snapshot = create_func(context, volume_id,
snapshot.get('display_name'),
snapshot.get('display_description'))
retval = _translate_snapshot_detail_view(context, new_snapshot)
return {'snapshot': retval}
class Volumes(extensions.ExtensionDescriptor):
"""Volumes support."""
name = "Volumes"
alias = "os-volumes"
namespace = "http://docs.openstack.org/compute/ext/volumes/api/v1.1"
updated = "2011-03-25T00:00:00+00:00"
def get_resources(self):
resources = []
# NOTE(justinsb): No way to provide singular name ('volume')
# Does this matter?
res = extensions.ResourceExtension('os-volumes',
VolumeController(),
collection_actions={'detail': 'GET'})
resources.append(res)
attachment_controller = VolumeAttachmentController(self.ext_mgr)
res = extensions.ResourceExtension('os-volume_attachments',
attachment_controller,
parent=dict(
member_name='server',
collection_name='servers'))
resources.append(res)
res = extensions.ResourceExtension('os-volumes_boot',
inherits='servers')
resources.append(res)
res = extensions.ResourceExtension('os-snapshots',
SnapshotController(),
collection_actions={'detail': 'GET'})
resources.append(res)
return resources
| apache-2.0 | 1,694,395,710,938,700,800 | 33.14011 | 79 | 0.60111 | false |
frePPLe/frePPLe | src/setup.py | 1 | 1852 | #
# Copyright (C) 2015 by frePPLe bv
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
# General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This setup script is used to build a Python extension for the frePPLe library.
# The script is intended to be called ONLY from the makefile, and not as standalone command.
from distutils.core import setup, Extension
import os
mod = Extension(
'frepple',
sources=['pythonextension.cpp'],
include_dirs=["../include"],
define_macros=[("HAVE_LOCALTIME_R","1")],
libraries=['frepple', 'xerces-c'],
extra_compile_args=['-std=c++0x'],
library_dirs=[os.environ['LIB_DIR']]
)
setup (
name = 'frepple',
version = os.environ['VERSION'],
author = "frepple.com",
author_email = "[email protected]",
url = "http://frepple.com",
ext_modules = [mod],
license="GNU Affero General Public License (AGPL)",
classifiers = [
'License :: OSI Approved :: GNU Affero General Public License (AGPL)',
'Intended Audience :: Manufacturing',
],
description = 'Bindings for the frePPLe production planning application',
long_description = '''FrePPLe stands for "Free Production Planning Library".
It is a framework for modeling and solving production planning problems,
targeted primarily at discrete manufacturing industries.
'''
)
| agpl-3.0 | -422,228,479,393,895,400 | 35.313725 | 92 | 0.725702 | false |
xuru/restler | tests/test_json_django_serialization.py | 1 | 9609 | from env_setup import setup_django; setup_django()
import json
from datetime import datetime, time
from unittest import TestCase
from restler import UnsupportedTypeError
from restler.serializers import ModelStrategy, to_json
from tests.django_models import connection, install_model, Model1, Poll, Choice
class TestDjangoUnsupportedFields(TestCase):
def setUp(self):
connection.creation.create_test_db(0, autoclobber=True)
install_model(Model1)
self.model1 = Model1()
self.model1.save()
self.strategy = ModelStrategy(Model1, include_all_fields=False)
def test_file_field_unsupported(self):
with self.assertRaises(UnsupportedTypeError):
strategy = self.strategy.include('_file')
to_json(Model1.objects.all(), strategy)
def test_file_path_field_unsupported(self):
with self.assertRaises(UnsupportedTypeError):
strategy = self.strategy.include('file_path')
to_json(Model1.objects.all(), strategy)
def test_image_field_unsupported(self):
with self.assertRaises(UnsupportedTypeError):
strategy = self.strategy.include('image')
to_json(Model1.objects.all(), strategy)
class TestDjangoFieldJsonSerialization(TestCase):
def setUp(self):
connection.creation.create_test_db(0, autoclobber=True)
install_model(Model1)
self.model1 = Model1(
big_integer=1,
boolean=True,
char="2",
comma_separated_int=[2, 4, 6],
decimal=9,
email='[email protected]',
_float=9.5,
integer=4,
ip_address='8.8.1.1.',
null_boolean=None,
positive_int=100,
positive_small_int=1,
slug='Slug Field',
small_int=1,
text='More Text',
url='www.google.com'
)
self.model1.save()
self.strategy = ModelStrategy(Model1, include_all_fields=False)
def test_auto_field(self):
field = 'id'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_big_integer_field(self):
field = 'big_integer'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_boolean_field(self):
field = 'boolean'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_char_field(self):
field = 'char'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_comma_separated_integer_field(self):
field = 'comma_separated_int'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_date_field(self):
field = '_date'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), datetime.strftime(getattr(self.model1, field), '%Y-%m-%d'))
def test_datetime_field(self):
field = '_datetime'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), datetime.strftime(getattr(self.model1, field), '%Y-%m-%d %H:%M:%S'))
def test_decimal_field(self):
field = 'decimal'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), str(getattr(self.model1, field)))
def test_email_field(self):
field = 'email'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_float_field(self):
field = '_float'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_integer_field(self):
field = 'integer'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_ip_address_field(self):
field = 'ip_address'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_null_boolean_field(self):
field = 'null_boolean'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_positive_integer_field(self):
field = 'positive_int'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_positive_small_integer_field(self):
field = 'positive_small_int'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_slug_field(self):
field = 'slug'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_small_integer_field(self):
field = 'small_int'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_text_field(self):
field = 'text'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
def test_time_field(self):
field = '_time'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), time.strftime(getattr(self.model1, field), '%H:%M:%S'))
def test_url_field(self):
field = 'url'
strategy = self.strategy.include(field)
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get(field), getattr(self.model1, field))
class TestLambdaJsonSerialization(TestCase):
def setUp(self):
connection.creation.create_test_db(0, autoclobber=True)
install_model(Model1)
self.model1 = Model1(
big_integer=1,
boolean=True,
char="2",
comma_separated_int=[2, 4, 6],
decimal=9,
email='[email protected]',
_float=9.5,
integer=4,
ip_address='8.8.1.1.',
null_boolean=None,
positive_int=100,
positive_small_int=1,
slug='Slug Field',
small_int=1,
text='More Text',
url='www.google.com'
)
self.model1.save()
self.strategy = ModelStrategy(Model1, include_all_fields=True).exclude('_file', 'file_path', 'image')
def test_lambda(self):
strategy = self.strategy.include(aggregate=lambda o: '%s, %s' % (o.big_integer, o.char))
sj = json.loads(to_json(Model1.objects.get(pk=self.model1.id), strategy))
self.assertEqual(sj.get('aggregate'), u'1, 2')
class TestRelationships(TestCase):
def setUp(self):
connection.creation.create_test_db(0, autoclobber=True)
install_model(Poll)
install_model(Choice)
self.choices = ('Not much', 'The sky', 'Just hacking around')
poll = Poll(question="What's new?", pub_date=datetime.now())
poll.save()
poll.choice_set.create(choice=self.choices[0], votes=0)
poll.choice_set.create(choice=self.choices[1], votes=0)
poll.choice_set.create(choice=self.choices[2], votes=0)
poll.save()
def choices(poll):
return [choice for choice in poll.choice_set.all()]
poll_strategy = ModelStrategy(Poll, include_all_fields=True).include(choices=choices)
choice_strategy = ModelStrategy(Choice, include_all_fields=True)
self.poll_choice_strategy = poll_strategy + choice_strategy
def test_poll_with_choices(self):
sj = json.loads(to_json(Poll.objects.all(), self.poll_choice_strategy))
serialized_choices = [choice.get('choice') for choice in sj[0].get('choices')]
for choice in serialized_choices:
self.assertIn(choice, self.choices)
| mit | -869,937,953,381,092,200 | 39.0375 | 109 | 0.63222 | false |
Saint-Joe/weewx | bin/weewx/imagegenerator.py | 4 | 12634 | #
# Copyright (c) 2009-2015 Tom Keffer <[email protected]>
#
# See the file LICENSE.txt for your full rights.
#
"""Generate images for up to an effective date.
Needs to be refactored into smaller functions."""
from __future__ import with_statement
import time
import datetime
import syslog
import os.path
import weeplot.genplot
import weeplot.utilities
import weeutil.weeutil
import weewx.reportengine
import weewx.units
from weeutil.weeutil import to_bool, to_int, to_float
#===============================================================================
# Class ImageGenerator
#===============================================================================
class ImageGenerator(weewx.reportengine.ReportGenerator):
"""Class for managing the image generator."""
def run(self):
self.setup()
# Generate any images
self.genImages(self.gen_ts)
def setup(self):
self.image_dict = self.skin_dict['ImageGenerator']
self.title_dict = self.skin_dict.get('Labels', {}).get('Generic', {})
self.formatter = weewx.units.Formatter.fromSkinDict(self.skin_dict)
self.converter = weewx.units.Converter.fromSkinDict(self.skin_dict)
def genImages(self, gen_ts):
"""Generate the images.
The time scales will be chosen to include the given timestamp, with nice beginning
and ending times.
gen_ts: The time around which plots are to be generated. This will also be used as
the bottom label in the plots. [optional. Default is to use the time of the last record
in the database.]
"""
t1 = time.time()
ngen = 0
# Loop over each time span class (day, week, month, etc.):
for timespan in self.image_dict.sections :
# Now, loop over all plot names in this time span class:
for plotname in self.image_dict[timespan].sections :
# Accumulate all options from parent nodes:
plot_options = weeutil.weeutil.accumulateLeaves(self.image_dict[timespan][plotname])
plotgen_ts = gen_ts
if not plotgen_ts:
binding = plot_options['data_binding']
archive = self.db_binder.get_manager(binding)
plotgen_ts = archive.lastGoodStamp()
if not plotgen_ts:
plotgen_ts = time.time()
image_root = os.path.join(self.config_dict['WEEWX_ROOT'], plot_options['HTML_ROOT'])
# Get the path of the file that the image is going to be saved to:
img_file = os.path.join(image_root, '%s.png' % plotname)
# Check whether this plot needs to be done at all:
ai = plot_options.as_int('aggregate_interval') if plot_options.has_key('aggregate_interval') else None
if skipThisPlot(plotgen_ts, ai, img_file) :
continue
# Create the subdirectory that the image is to be put in.
# Wrap in a try block in case it already exists.
try:
os.makedirs(os.path.dirname(img_file))
except OSError:
pass
# Create a new instance of a time plot and start adding to it
plot = weeplot.genplot.TimePlot(plot_options)
# Calculate a suitable min, max time for the requested time span and set it
(minstamp, maxstamp, timeinc) = weeplot.utilities.scaletime(plotgen_ts - int(plot_options.get('time_length', 86400)), plotgen_ts)
plot.setXScaling((minstamp, maxstamp, timeinc))
# Set the y-scaling, using any user-supplied hints:
plot.setYScaling(weeutil.weeutil.convertToFloat(plot_options.get('yscale', ['None', 'None', 'None'])))
# Get a suitable bottom label:
bottom_label_format = plot_options.get('bottom_label_format', '%m/%d/%y %H:%M')
bottom_label = time.strftime(bottom_label_format, time.localtime(plotgen_ts))
plot.setBottomLabel(bottom_label)
# Set day/night display
plot.setLocation(self.stn_info.latitude_f, self.stn_info.longitude_f)
plot.setDayNight(to_bool(plot_options.get('show_daynight', False)),
weeplot.utilities.tobgr(plot_options.get('daynight_day_color', '0xffffff')),
weeplot.utilities.tobgr(plot_options.get('daynight_night_color', '0xf0f0f0')),
weeplot.utilities.tobgr(plot_options.get('daynight_edge_color', '0xefefef')))
# Loop over each line to be added to the plot.
for line_name in self.image_dict[timespan][plotname].sections:
# Accumulate options from parent nodes.
line_options = weeutil.weeutil.accumulateLeaves(self.image_dict[timespan][plotname][line_name])
# See what SQL variable type to use for this line. By default,
# use the section name.
var_type = line_options.get('data_type', line_name)
# Look for aggregation type:
aggregate_type = line_options.get('aggregate_type')
if aggregate_type in (None, '', 'None', 'none'):
# No aggregation specified.
aggregate_type = aggregate_interval = None
else :
try:
# Aggregation specified. Get the interval.
aggregate_interval = line_options.as_int('aggregate_interval')
except KeyError:
syslog.syslog(syslog.LOG_ERR, "genimages: aggregate interval required for aggregate type %s" % aggregate_type)
syslog.syslog(syslog.LOG_ERR, "genimages: line type %s skipped" % var_type)
continue
# Now we have everything we need to find and hit the database:
binding = line_options['data_binding']
archive = self.db_binder.get_manager(binding)
(start_vec_t, stop_vec_t, data_vec_t) = \
archive.getSqlVectors((minstamp, maxstamp), var_type, aggregate_type=aggregate_type,
aggregate_interval=aggregate_interval)
if weewx.debug:
assert(len(start_vec_t) == len(stop_vec_t))
# Do any necessary unit conversions:
new_start_vec_t = self.converter.convert(start_vec_t)
new_stop_vec_t = self.converter.convert(stop_vec_t)
new_data_vec_t = self.converter.convert(data_vec_t)
# Add a unit label. NB: all will get overwritten except the last.
# Get the label from the configuration dictionary.
# TODO: Allow multiple unit labels, one for each plot line?
unit_label = line_options.get('y_label', weewx.units.get_label_string(self.formatter, self.converter, var_type))
# Strip off any leading and trailing whitespace so it's easy to center
plot.setUnitLabel(unit_label.strip())
# See if a line label has been explicitly requested:
label = line_options.get('label')
if not label:
# No explicit label. Is there a generic one?
# If not, then the SQL type will be used instead
label = self.title_dict.get(var_type, var_type)
# See if a color has been explicitly requested.
color = line_options.get('color')
if color is not None: color = weeplot.utilities.tobgr(color)
# Get the line width, if explicitly requested.
width = to_int(line_options.get('width'))
# Get the type of plot ("bar', 'line', or 'vector')
plot_type = line_options.get('plot_type', 'line')
interval_vec = None
# Some plot types require special treatments:
if plot_type == 'vector':
vector_rotate_str = line_options.get('vector_rotate')
vector_rotate = -float(vector_rotate_str) if vector_rotate_str is not None else None
else:
vector_rotate = None
gap_fraction = None
if plot_type == 'bar':
interval_vec = [x[1] - x[0]for x in zip(new_start_vec_t.value, new_stop_vec_t.value)]
elif plot_type == 'line':
gap_fraction = to_float(line_options.get('line_gap_fraction'))
if gap_fraction is not None:
if not 0 < gap_fraction < 1:
syslog.syslog(syslog.LOG_ERR, "genimages: Gap fraction %5.3f outside range 0 to 1. Ignored." % gap_fraction)
gap_fraction = None
# Get the type of line ('solid' or 'none' is all that's offered now)
line_type = line_options.get('line_type', 'solid')
if line_type.strip().lower() in ['', 'none']:
line_type = None
marker_type = line_options.get('marker_type')
marker_size = to_int(line_options.get('marker_size', 8))
# Add the line to the emerging plot:
plot.addLine(weeplot.genplot.PlotLine(new_stop_vec_t[0], new_data_vec_t[0],
label = label,
color = color,
width = width,
plot_type = plot_type,
line_type = line_type,
marker_type = marker_type,
marker_size = marker_size,
bar_width = interval_vec,
vector_rotate = vector_rotate,
gap_fraction = gap_fraction))
# OK, the plot is ready. Render it onto an image
image = plot.render()
try:
# Now save the image
image.save(img_file)
ngen += 1
except IOError, e:
syslog.syslog(syslog.LOG_CRIT, "genimages: Unable to save to file '%s' %s:" % (img_file, e))
t2 = time.time()
syslog.syslog(syslog.LOG_INFO, "genimages: Generated %d images for %s in %.2f seconds" % (ngen, self.skin_dict['REPORT_NAME'], t2 - t1))
def skipThisPlot(time_ts, aggregate_interval, img_file):
"""A plot can be skipped if it was generated recently and has not changed.
This happens if the time since the plot was generated is less than the
aggregation interval."""
# Images without an aggregation interval have to be plotted every time.
# Also, the image definitely has to be generated if it doesn't exist.
if aggregate_interval is None or not os.path.exists(img_file):
return False
# If its a very old image, then it has to be regenerated
if time_ts - os.stat(img_file).st_mtime >= aggregate_interval:
return False
# Finally, if we're on an aggregation boundary, regenerate.
time_dt = datetime.datetime.fromtimestamp(time_ts)
tdiff = time_dt - time_dt.replace(hour=0, minute=0, second=0, microsecond=0)
return abs(tdiff.seconds % aggregate_interval) > 1
| gpl-3.0 | -5,267,385,790,621,854,000 | 50.567347 | 145 | 0.507678 | false |
peterdemin/stupid | stupid/lunchbot.py | 1 | 3957 | import datetime
import logging
import time
from stupid.chatbot import ChatBot, every_minute, trigger
from stupid.utils import weekday
from stupid.weather import WeatherForecast
logger = logging.getLogger(__name__)
class LunchBot(ChatBot):
ASK_TIMEOUT = 15
EXCLUDE = {
"sobolevi": (datetime.datetime(2018, 12, 24), datetime.datetime(2019, 1, 7)),
"ramusus": (datetime.datetime(2018, 12, 24), datetime.datetime(2019, 1, 7)),
"deminp": (datetime.datetime(2017, 9, 15), datetime.datetime(2017, 10, 17)),
"ivanchen": (datetime.datetime(2019, 7, 15), datetime.datetime(2018, 7, 29)),
}
def __init__(self, *args, **kwargs):
super(LunchBot, self).__init__(*args, **kwargs)
self.weather = WeatherForecast()
self.schedule.every().day.at("11:55").do(self.eat_some)
self.schedule.every().day.at("15:55").do(self.eat_some)
self.announce_ts = None
self.ask_for_reply_after = None
self.users_to_ask = []
@trigger
def on_weather(self):
return self.weather.report()
@every_minute
@weekday
def on_timeout(self):
if self.ask_for_reply_after is not None:
delta = round((time.time() - self.ask_for_reply_after) / 60)
disable = False
if 0 <= delta < self.ASK_TIMEOUT:
disable = self.ask_for_reply()
if delta > self.ASK_TIMEOUT:
logger.debug("Asking for reply timeout - %d - cancelling", delta)
disable = True
if disable:
self.ask_for_reply_after = None
self.users_to_ask = []
@weekday
def eat_some(self):
response = self.broker.post(
"Who is going to eat? Beware, it's {0}".format(self.weather.report()),
color='warning',
)
logger.debug('Posted %r', response)
self.announce_ts = float(response['message']['ts'])
self.ask_for_reply_after = self.announce_ts + 60 * 3
self.users_to_ask = self.dont_mention(self.users_on_channel())
logger.debug('Scheduling ask_for_reply for %r after %r',
self.users_to_ask, self.ask_for_reply_after)
def dont_mention(self, users):
now = datetime.datetime.now()
to_keep = set()
for username in users.values():
if username in self.EXCLUDE:
if self.EXCLUDE[username][0] <= now < self.EXCLUDE[username][1]:
continue
to_keep.add(username)
return {k: v
for k, v in users.items()
if v in to_keep}
def users_on_channel(self):
return {user_id: self.username(user_id)
for user_id in self.broker.channel_info(self.broker.CHANNEL_NAME)['members']
if user_id != self.broker.MY_ID}
def ask_for_reply(self):
"""
Check if not all chatroom participants replied
Ask for reply if found any.
"""
logger.debug("Asking for reply")
# Bot messages do not have 'user' field
replied_user_ids = {x.get('user') for x in self.broker.read_new_messages(self.announce_ts)}
logger.debug("Users replied after announcement: %r", replied_user_ids)
if replied_user_ids.intersection(self.users_to_ask):
# At least one user replied
to_ask = set(self.users_to_ask).difference(replied_user_ids)
if to_ask:
for user_id in to_ask:
logger.debug("Asking %r", self.users_to_ask[user_id])
self.broker.post('@{0}, are you going to eat some?'.format(self.users_to_ask[user_id]))
logger.debug('Looks like one reminder is enough... Canceling join')
else:
logger.debug('Everyone replied, canceling')
return True
logger.debug('Do not be first to reply to yourself, skipping')
return False
| bsd-3-clause | -2,628,365,634,184,614,400 | 38.178218 | 107 | 0.577963 | false |
huangtao-sh/orange | orange/tools/mongodb.py | 1 | 2892 | # 项目:标准库函数
# 模块:配置mongodb数据库
# 作者:黄涛
# License:GPL
# Email:[email protected]
# 创建:2016-11-19 10:18
# 修订:2018-10-25 重写安装程序
# 修订:2019-12-15 09:35 修改写配置文件功能
from orange.pykit.pyver import Ver, get_cur_ver
from orange import sh, Path
from pkgutil import get_data
MONGOFILES = ('mongo.exe', 'mongod.exe', 'mongodump.exe', 'mongoexport.exe',
'mongoimport.exe')
SERVERNAME = 'MongoDb'
MONGOCONFIG = get_data('orange', 'data/mongo.yaml').decode()
def win_deploy():
import platform
root = Path('%Programdata%/Mongodb')
root.ensure()
data_path = root / 'data'
if not data_path:
data_path.ensure()
print('创建数据目录:%s' % (data_path))
config_file = root / 'mongo.conf'
config = {
'dbpath': data_path.as_posix(),
'logpath': root.as_posix(),
'engine': 'wiredTiger'
}
if platform.architecture()[0] != '64bit':
config['engine'] = 'mmapv1'
print('本机使用32位处理器,使用 mmapv1 引擎')
result = sh(f'sc query {SERVERNAME}')
if not result[0]:
sh > 'sc stop {SERVERNAME}'
print('停止 MongoDb 服务')
config_file.write_bytes(MONGOCONFIG.format(**config).encode())
prg_path = Path('%PROGRAMFILES%/MongoDB').find('bin')
print(f'最新版程序安装路径为:{prg_path}')
dest = Path('%windir%')
for exefile in MONGOFILES:
(dest / exefile) >> (prg_path / exefile)
print('连接 %s 到 %s 成功' % (dest / exefile, prg_path / exefile))
print('删除服务配置')
sh > f'sc delete {SERVERNAME}'
print('重新配置服务')
sh > f'mongod --install --serviceName {SERVERNAME} --config "{config_file}"'
print('启动 MongoDB 服务')
sh > f'sc start {SERVERNAME}'
input('Press any key to continue')
def darwin_deploy():
config = {
'dbpath': '/usr/local/var/mongodb',
'logpath': 'usr/local/var/log',
'engine': 'wiredTiger'
}
config_path = Path('/usr/local/etc/mongod.conf')
plist_file = Path('/Library/LaunchDaemons/com.mongodb.plist')
config_path.text = MONGOCONFIG.format(**config)
plist_file.text = get_data('orange', 'data/mongo.plist').decode()
def linux_deploy():
dbpath = '/var/local/mongodb'
logpath = '/var/log'
Path(dbpath).ensure()
config = {'dbpath': dbpath, 'logpath': logpath}
config_path = Path('/usr/local/etc/mongod.conf')
MONGOCONFIG = get_data('orange', 'data/mongo_linux.yaml').decode()
config_path.text = MONGOCONFIG.format(**config)
def main():
import sys
if sys.platform == 'win32':
win_deploy()
elif sys.platform == 'darwin':
darwin_deploy()
elif sys.platform == 'linux':
linux_deploy()
else:
print('操作系统%下的配置未实现')
| gpl-2.0 | -289,762,516,641,525,800 | 28.977528 | 80 | 0.611319 | false |
anaruse/chainer | tests/chainer_tests/functions_tests/activation_tests/test_lstm.py | 4 | 7906 | import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer.functions.activation import lstm
from chainer import gradient_check
from chainer import testing
from chainer.testing import backend
def _sigmoid(x):
half = x.dtype.type(0.5)
return numpy.tanh(x * half) * half + half
def inject_backend_tests(method_names):
decorator = backend.inject_backend_tests(
method_names,
# CPU tests
testing.product({
'use_cuda': [False],
'use_ideep': ['never', 'always'],
})
# GPU tests
+ [{'use_cuda': True}])
return decorator
@testing.parameterize(*(testing.product({
'batch': [3, 2, 0],
'dtype': [numpy.float32],
}) + testing.product({
'batch': [3],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
})))
@testing.fix_random()
@inject_backend_tests([
'test_forward',
'test_flat_forward',
'test_full_backward',
'test_flat_full_backward',
'test_no_gc_backward',
'test_flat_no_gc_backward',
'test_no_gh_backward',
'test_flat_no_gh_backward',
'test_double_backward'])
class TestLSTM(unittest.TestCase):
def setUp(self):
dtype = self.dtype
hidden_shape = (3, 2, 4)
x_shape = (self.batch, 8, 4)
y_shape = (self.batch, 2, 4)
c_prev = numpy.random.uniform(-1, 1, hidden_shape).astype(dtype)
x = numpy.random.uniform(-1, 1, x_shape).astype(dtype)
gc = numpy.random.uniform(-1, 1, hidden_shape).astype(dtype)
gh = numpy.random.uniform(-1, 1, y_shape).astype(dtype)
ggc = numpy.random.uniform(-1, 1, hidden_shape).astype(dtype)
ggx = numpy.random.uniform(-1, 1, x_shape).astype(dtype)
self.inputs = [c_prev, x]
self.grad_outputs = [gc, gh]
self.grad_grad_inputs = [ggc, ggx]
self.check_forward_options = {}
self.check_backward_options = {'dtype': numpy.float64}
self.check_double_backward_options = {'dtype': numpy.float64}
if self.dtype == numpy.float16:
self.check_forward_options = {'atol': 1e-3, 'rtol': 1e-2}
self.check_backward_options = {
'dtype': numpy.float64, 'atol': 5e-3, 'rtol': 5e-2}
self.check_double_backward_options = {
'dtype': numpy.float64, 'atol': 5e-3, 'rtol': 5e-2}
def flat(self, arrays):
return [None if a is None else a[:, :, 0] for a in arrays]
def forward_cpu(self, inputs):
c_prev, x = inputs
batch = x.shape[0]
a_in = x[:, [0, 4]]
i_in = x[:, [1, 5]]
f_in = x[:, [2, 6]]
o_in = x[:, [3, 7]]
c_expect = (_sigmoid(i_in) * numpy.tanh(a_in)
+ _sigmoid(f_in) * c_prev[:batch])
h_expect = _sigmoid(o_in) * numpy.tanh(c_expect)
return c_expect, h_expect
def check_forward(self, inputs, backend_config):
# Compute expected out
c_prev, x = inputs
batch = x.shape[0]
c_expect_2 = c_prev[batch:]
c_expect_1, h_expect = self.forward_cpu(inputs)
if backend_config.use_cuda:
inputs = cuda.to_gpu(inputs)
inputs = [chainer.Variable(xx) for xx in inputs]
with backend_config:
c, h = functions.lstm(*inputs)
assert c.data.dtype == self.dtype
assert h.data.dtype == self.dtype
testing.assert_allclose(
c_expect_1, c.data[:batch], **self.check_forward_options)
testing.assert_allclose(
c_expect_2, c.data[batch:], **self.check_forward_options)
testing.assert_allclose(
h_expect, h.data, **self.check_forward_options)
def test_forward(self, backend_config):
self.check_forward(self.inputs, backend_config)
def test_flat_forward(self, backend_config):
self.check_forward(self.flat(self.inputs), backend_config)
def check_backward(self, inputs, grad_outputs, backend_config):
if backend_config.use_cuda:
inputs = cuda.to_gpu(inputs)
grad_outputs = cuda.to_gpu(grad_outputs)
with backend_config:
gradient_check.check_backward(
functions.lstm, inputs, grad_outputs,
**self.check_backward_options)
def test_full_backward(self, backend_config):
self.check_backward(self.inputs, self.grad_outputs, backend_config)
def test_flat_full_backward(self, backend_config):
self.check_backward(
self.flat(self.inputs), self.flat(self.grad_outputs),
backend_config)
def test_no_gc_backward(self, backend_config):
grad_outputs = [None, self.grad_outputs[1]]
self.check_backward(self.inputs, grad_outputs, backend_config)
def test_flat_no_gc_backward(self, backend_config):
grad_outputs = [None, self.grad_outputs[1]]
self.check_backward(
self.flat(self.inputs), self.flat(grad_outputs), backend_config)
def test_no_gh_backward(self, backend_config):
grad_outputs = [self.grad_outputs[0], None]
self.check_backward(self.inputs, grad_outputs, backend_config)
def test_flat_no_gh_backward(self, backend_config):
grad_outputs = [self.grad_outputs[0], None]
self.check_backward(
self.flat(self.inputs), self.flat(grad_outputs), backend_config)
def check_double_backward(
self, inputs, grad_outputs, grad_grad_inputs, backend_config):
if backend_config.use_cuda:
inputs = cuda.to_gpu(inputs)
grad_outputs = cuda.to_gpu(grad_outputs)
grad_grad_inputs = cuda.to_gpu(grad_grad_inputs)
with backend_config:
gradient_check.check_double_backward(
chainer.functions.lstm, inputs, grad_outputs, grad_grad_inputs,
**self.check_double_backward_options)
def test_double_backward(self, backend_config):
self.check_double_backward(
self.inputs, self.grad_outputs, self.grad_grad_inputs,
backend_config)
@testing.parameterize(*(testing.product({
'batch': [3, 2, 0],
'dtype': [numpy.float32],
}) + testing.product({
'batch': [3],
'dtype': [numpy.float16, numpy.float32, numpy.float64],
})))
@testing.fix_random()
@inject_backend_tests(['test_backward'])
class TestLSTMGrad(unittest.TestCase):
def setUp(self):
hidden_shape = (3, 2, 4)
dtype = self.dtype
x_shape = (self.batch, 8, 4)
y_shape = (self.batch, 2, 4)
c_prev = numpy.random.uniform(
-1, 1, hidden_shape).astype(dtype)
x = numpy.random.uniform(-1, 1, x_shape).astype(dtype)
c_next = numpy.random.uniform(-1, 1, hidden_shape).astype(dtype)
gc = numpy.random.uniform(-1, 1, hidden_shape).astype(dtype)
gh = numpy.random.uniform(-1, 1, y_shape).astype(dtype)
ggc_prev = numpy.random.uniform(-1, 1, hidden_shape).astype(dtype)
ggx = numpy.random.uniform(-1, 1, x_shape).astype(dtype)
self.inputs = [c_prev, x, c_next, gc, gh]
self.grad_outputs = [ggc_prev, ggx]
self.check_backward_options = {'dtype': numpy.float64}
if self.dtype == numpy.float16:
self.check_backward_options = {
'dtype': numpy.float64, 'atol': 1e-3, 'rtol': 1e-2}
def check_backward(self, inputs, grad_outputs, backend_config):
if backend_config.use_cuda:
inputs = cuda.to_gpu(inputs)
grad_outputs = cuda.to_gpu(grad_outputs)
with backend_config:
gradient_check.check_backward(
lstm.LSTMGrad(), inputs, grad_outputs,
**self.check_backward_options)
def test_backward(self, backend_config):
self.check_backward(self.inputs, self.grad_outputs, backend_config)
testing.run_module(__name__, __file__)
| mit | -2,363,930,353,243,313,700 | 33.524017 | 79 | 0.599798 | false |
roadmapper/ansible | test/units/modules/network/fortios/test_fortios_firewall_service_custom.py | 21 | 19284 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_firewall_service_custom
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_firewall_service_custom.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_firewall_service_custom_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_service_custom': {'app_service_type': 'disable',
'category': 'test_value_4',
'check_reset_range': 'disable',
'color': '6',
'comment': 'Comment.',
'fqdn': 'test_value_8',
'helper': 'auto',
'icmpcode': '10',
'icmptype': '11',
'iprange': 'test_value_12',
'name': 'default_name_13',
'protocol': 'TCP/UDP/SCTP',
'protocol_number': '15',
'proxy': 'enable',
'sctp_portrange': 'test_value_17',
'session_ttl': '18',
'tcp_halfclose_timer': '19',
'tcp_halfopen_timer': '20',
'tcp_portrange': 'test_value_21',
'tcp_timewait_timer': '22',
'udp_idle_timer': '23',
'udp_portrange': 'test_value_24',
'visibility': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_service_custom.fortios_firewall_service(input_data, fos_instance)
expected_data = {'app-service-type': 'disable',
'category': 'test_value_4',
'check-reset-range': 'disable',
'color': '6',
'comment': 'Comment.',
'fqdn': 'test_value_8',
'helper': 'auto',
'icmpcode': '10',
'icmptype': '11',
'iprange': 'test_value_12',
'name': 'default_name_13',
'protocol': 'TCP/UDP/SCTP',
'protocol-number': '15',
'proxy': 'enable',
'sctp-portrange': 'test_value_17',
'session-ttl': '18',
'tcp-halfclose-timer': '19',
'tcp-halfopen-timer': '20',
'tcp-portrange': 'test_value_21',
'tcp-timewait-timer': '22',
'udp-idle-timer': '23',
'udp-portrange': 'test_value_24',
'visibility': 'enable'
}
set_method_mock.assert_called_with('firewall.service', 'custom', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_firewall_service_custom_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_service_custom': {'app_service_type': 'disable',
'category': 'test_value_4',
'check_reset_range': 'disable',
'color': '6',
'comment': 'Comment.',
'fqdn': 'test_value_8',
'helper': 'auto',
'icmpcode': '10',
'icmptype': '11',
'iprange': 'test_value_12',
'name': 'default_name_13',
'protocol': 'TCP/UDP/SCTP',
'protocol_number': '15',
'proxy': 'enable',
'sctp_portrange': 'test_value_17',
'session_ttl': '18',
'tcp_halfclose_timer': '19',
'tcp_halfopen_timer': '20',
'tcp_portrange': 'test_value_21',
'tcp_timewait_timer': '22',
'udp_idle_timer': '23',
'udp_portrange': 'test_value_24',
'visibility': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_service_custom.fortios_firewall_service(input_data, fos_instance)
expected_data = {'app-service-type': 'disable',
'category': 'test_value_4',
'check-reset-range': 'disable',
'color': '6',
'comment': 'Comment.',
'fqdn': 'test_value_8',
'helper': 'auto',
'icmpcode': '10',
'icmptype': '11',
'iprange': 'test_value_12',
'name': 'default_name_13',
'protocol': 'TCP/UDP/SCTP',
'protocol-number': '15',
'proxy': 'enable',
'sctp-portrange': 'test_value_17',
'session-ttl': '18',
'tcp-halfclose-timer': '19',
'tcp-halfopen-timer': '20',
'tcp-portrange': 'test_value_21',
'tcp-timewait-timer': '22',
'udp-idle-timer': '23',
'udp-portrange': 'test_value_24',
'visibility': 'enable'
}
set_method_mock.assert_called_with('firewall.service', 'custom', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_firewall_service_custom_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'firewall_service_custom': {'app_service_type': 'disable',
'category': 'test_value_4',
'check_reset_range': 'disable',
'color': '6',
'comment': 'Comment.',
'fqdn': 'test_value_8',
'helper': 'auto',
'icmpcode': '10',
'icmptype': '11',
'iprange': 'test_value_12',
'name': 'default_name_13',
'protocol': 'TCP/UDP/SCTP',
'protocol_number': '15',
'proxy': 'enable',
'sctp_portrange': 'test_value_17',
'session_ttl': '18',
'tcp_halfclose_timer': '19',
'tcp_halfopen_timer': '20',
'tcp_portrange': 'test_value_21',
'tcp_timewait_timer': '22',
'udp_idle_timer': '23',
'udp_portrange': 'test_value_24',
'visibility': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_service_custom.fortios_firewall_service(input_data, fos_instance)
delete_method_mock.assert_called_with('firewall.service', 'custom', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_firewall_service_custom_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'firewall_service_custom': {'app_service_type': 'disable',
'category': 'test_value_4',
'check_reset_range': 'disable',
'color': '6',
'comment': 'Comment.',
'fqdn': 'test_value_8',
'helper': 'auto',
'icmpcode': '10',
'icmptype': '11',
'iprange': 'test_value_12',
'name': 'default_name_13',
'protocol': 'TCP/UDP/SCTP',
'protocol_number': '15',
'proxy': 'enable',
'sctp_portrange': 'test_value_17',
'session_ttl': '18',
'tcp_halfclose_timer': '19',
'tcp_halfopen_timer': '20',
'tcp_portrange': 'test_value_21',
'tcp_timewait_timer': '22',
'udp_idle_timer': '23',
'udp_portrange': 'test_value_24',
'visibility': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_service_custom.fortios_firewall_service(input_data, fos_instance)
delete_method_mock.assert_called_with('firewall.service', 'custom', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_firewall_service_custom_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_service_custom': {'app_service_type': 'disable',
'category': 'test_value_4',
'check_reset_range': 'disable',
'color': '6',
'comment': 'Comment.',
'fqdn': 'test_value_8',
'helper': 'auto',
'icmpcode': '10',
'icmptype': '11',
'iprange': 'test_value_12',
'name': 'default_name_13',
'protocol': 'TCP/UDP/SCTP',
'protocol_number': '15',
'proxy': 'enable',
'sctp_portrange': 'test_value_17',
'session_ttl': '18',
'tcp_halfclose_timer': '19',
'tcp_halfopen_timer': '20',
'tcp_portrange': 'test_value_21',
'tcp_timewait_timer': '22',
'udp_idle_timer': '23',
'udp_portrange': 'test_value_24',
'visibility': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_service_custom.fortios_firewall_service(input_data, fos_instance)
expected_data = {'app-service-type': 'disable',
'category': 'test_value_4',
'check-reset-range': 'disable',
'color': '6',
'comment': 'Comment.',
'fqdn': 'test_value_8',
'helper': 'auto',
'icmpcode': '10',
'icmptype': '11',
'iprange': 'test_value_12',
'name': 'default_name_13',
'protocol': 'TCP/UDP/SCTP',
'protocol-number': '15',
'proxy': 'enable',
'sctp-portrange': 'test_value_17',
'session-ttl': '18',
'tcp-halfclose-timer': '19',
'tcp-halfopen-timer': '20',
'tcp-portrange': 'test_value_21',
'tcp-timewait-timer': '22',
'udp-idle-timer': '23',
'udp-portrange': 'test_value_24',
'visibility': 'enable'
}
set_method_mock.assert_called_with('firewall.service', 'custom', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_firewall_service_custom_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_service_custom': {
'random_attribute_not_valid': 'tag', 'app_service_type': 'disable',
'category': 'test_value_4',
'check_reset_range': 'disable',
'color': '6',
'comment': 'Comment.',
'fqdn': 'test_value_8',
'helper': 'auto',
'icmpcode': '10',
'icmptype': '11',
'iprange': 'test_value_12',
'name': 'default_name_13',
'protocol': 'TCP/UDP/SCTP',
'protocol_number': '15',
'proxy': 'enable',
'sctp_portrange': 'test_value_17',
'session_ttl': '18',
'tcp_halfclose_timer': '19',
'tcp_halfopen_timer': '20',
'tcp_portrange': 'test_value_21',
'tcp_timewait_timer': '22',
'udp_idle_timer': '23',
'udp_portrange': 'test_value_24',
'visibility': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_service_custom.fortios_firewall_service(input_data, fos_instance)
expected_data = {'app-service-type': 'disable',
'category': 'test_value_4',
'check-reset-range': 'disable',
'color': '6',
'comment': 'Comment.',
'fqdn': 'test_value_8',
'helper': 'auto',
'icmpcode': '10',
'icmptype': '11',
'iprange': 'test_value_12',
'name': 'default_name_13',
'protocol': 'TCP/UDP/SCTP',
'protocol-number': '15',
'proxy': 'enable',
'sctp-portrange': 'test_value_17',
'session-ttl': '18',
'tcp-halfclose-timer': '19',
'tcp-halfopen-timer': '20',
'tcp-portrange': 'test_value_21',
'tcp-timewait-timer': '22',
'udp-idle-timer': '23',
'udp-portrange': 'test_value_24',
'visibility': 'enable'
}
set_method_mock.assert_called_with('firewall.service', 'custom', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 | -5,491,412,293,693,135,000 | 46.149144 | 142 | 0.450114 | false |
thorshand/electionBuster | electionBuster.py | 1 | 13338 | #!/usr/bin/python3.5
##################################################
## Author: Joshua Franklin, Kevin Franklin
## Example input to start:
## sudo ./electionBuster.py -f josh -l franklin -y 2014 -e senate -s pennsyltucky
## 6 arguments are passed:
## 1: The first name of the candidate (mandatory)
## 2: The middle name of the candidate (optional)
## 2: The last name of the candidate (mandatory)
## 3: The year of the election (mandatory)
## 4: The type of race, such as congress, senate, or president. (mandatory)
## 5: The state or region the candidate is from (optional)
##################################################
#TODO: Add a keyboard interrupt
import requests
import sys
import time
import string
import argparse
import socket
from datetime import date
import urllib
from multiprocessing import Pool as ThreadPool, Manager
import collections
import csv
import operator
from modules.utils import genAllDonate,genAll,generate_urls, tryURLforReal
from modules.text_tools import alphabet,alt_alphabets,skipLetter,stringAndStrip,removeDups,reverseLetter,wrongVowel,tlds
confirmedURLs = Manager().list()
allURLS = Manager().list()
class NameDenormalizer(object):
def __init__(self, filename=None):
filename = filename or 'names.csv'
lookup = collections.defaultdict(list)
with open(filename) as f:
reader = csv.reader(f)
for line in reader:
matches = set(line)
for match in matches:
lookup[match].append(matches)
self.lookup = lookup
def __getitem__(self, name):
name = name.upper()
if name not in self.lookup:
raise KeyError(name)
return self.lookup[name]
def get(self, name, default=None):
try:
return self[name]
except KeyError:
return set( [name] )
# Program Timer
start_time = time.time()
# Function: casts and removes those pesky \r and \n
#Parse command line arguments
parser = argparse.ArgumentParser(description='Identifies registered candidate domains')
parser.add_argument('-f','--firstName', help='Candidate\'s first name',required=True)
parser.add_argument('-m','--middleName',help='Candidate\'s optional middle name')
parser.add_argument('-l','--lastName',help='Candidate\'s last name', required=True)
parser.add_argument('-y','--year', help='Year of the election',required=True)
parser.add_argument('-e','--electionType',help='Type of election (congress, senate, president)', required=True)
parser.add_argument('-s','--state', help='Candidate\'s state of origin', action='append' )
#Exists for candidates like Mitt Romney that possibly have an attachment to two states (i.e., Utah, Massachusetts)
parser.add_argument('-a','--aliasFileName', help='Filename containing a list of aliases')
parser.add_argument('-p','--party', help='Party Affiliation')
args = parser.parse_args()
# Stores command line argumetns
# Make all lowercase
fName = args.firstName
fName = fName.lower()
lName = args.lastName
lName = lName.lower()
party = ""
year = args.year
shortYear = year[-2:]
electionType = args.electionType
electionType = electionType.lower()
state = []
stateText = ""
if (args.party) :
party = args.party
fileName = "states.csv"
if (args.aliasFileName) :
fileName = stringAndStrip( args.aliasFileName)
if (args.state) :
nd = NameDenormalizer( fileName )
for aState in args.state:
stateText = stateText + aState.lower()
state.append( stringAndStrip( aState.upper( ) ) )
statenick = list( nd.get( aState.upper() ) )
for s1 in statenick:
for s in s1:
state.append( s )
mName = ""
middleInitial = ""
if (args.middleName) :
mName = args.middleName
mName = mName.lower()
middleInitial = mName[0]
# This assigns the position variable
if (electionType == 'congress') or (electionType == 'congressional') :
position = 'congress'
altPosition = 'congressman' # congresswoman??
elif electionType == 'senate' :
position = 'senator'
altPosition = 'senate'
elif (electionType == 'governor') or (electionType == 'gubernatorial'):
position = 'governor'
altPosition = 'gov'
elif (electionType == 'president') or (electionType == 'presidential') :
position = 'president'
altPosition = 'prez'
elif (electionType == 'mayoral') or (electionType == 'mayor') :
position = 'mayor'
altPosition = 'mayoral'
else :
position = electionType
altPosition = electionType
# top-level domain-names
# # consider removing .me, .info, and .biz if they aren't adding value
# Runs stringAndStrip on everything except fileName b/c that's used elsewhere
fName = stringAndStrip(fName)
lName = stringAndStrip(lName)
year = stringAndStrip(year)
electionType = stringAndStrip(electionType)
# Alerting the users to the types of sites we're expecting to find
# This differs at times since the state variable isn't mandatory to run the script
## Consider deleting this - does it actually provide value?
if (args.state) :
print('We expect to find these URLs excluding subtle variances:')
print('http://www.' + fName + lName + '.com')
print('http://www.' + lName + fName + '.com')
print('http://www.' + fName + year + '.com')
print('http://www.' + lName + year + '.com')
print('http://www.' + fName + lName + year + '.com' )
for stateAlias in state:
print('http://www.' + fName + lName + 'for' + stateAlias + '.com')
print('http://www.' + lName + 'for' + stateAlias + '.com')
print('http://www.' + fName + 'for' + stateAlias + '.com')
print('http://www.' + fName + lName + 'for' + position + '.com')
print('http://www.' + fName + 'for' + position + '.com')
print('http://www.' + fName + 'for' + position + year + '.com')
print('http://www.' + position + fName + lName + '.com')
else :
print('We expect to find these URLs excluding subtle variances:')
print('http://www.' + fName + lName + '.com')
print('http://www.' + lName + fName + '.com')
print('http://www.' + fName + year + '.com')
print('http://www.' + lName + year + '.com')
print('http://www.' + fName + lName + year + '.com' )
print('http://www.' + fName + lName + 'for' + position + '.com')
print('http://www.' + fName + 'for' + position + '.com')
print('http://www.' + fName + 'for' + position + year + '.com')
print('http://www.' + position + fName + lName + '.com')
# This is the result output files
# Makes a unique filename based on data and time
now = date.today()
partyString = ""
if ( args.party ) :
partyString = "-" + party.lower()
tempResults = 'results-' + fName + '-' + lName + '-' + stateText + partyString + '-' + str(now) + '.txt'
resultsFile = open(tempResults, "w")
# This clears the results files before reopening them
resultsFile.close()
resultsFile = open(tempResults, "a")
## Other alphabets are defined as a quick way of doing URL mangling.
## Is this a candidate for deletion?
# alternative alphabets
# 0: No change
# 1: i -> 1 "Eye to One"
# 2: l -> i "El to Eye"
# 3: i -> l "Eye to El"
# 4: o -> 0 "Oh to Zero"
# 5: 0 -> o "Zero to Oh"
# 6: n -> m "En to Em" TODO: Does this swap wrok right?
# 7: m -> n "Em to En"
# 8: e -> 3 "Ee to three"
# 9: 3 -> e "Three to ee"
# These are the template that we'll use based on the optional input parameters.
# The first one is if the state was input.
templates = generate_urls(first_name=args.firstName,
last_name=args.lastName,
state=state,
middlename=args.middleName,
position=position,
altPosition=altPosition,
year=args.year)
# This generates the text mangling
results = genAll(templates, alt_alphabets)
# This generates the text mangling with some other alternatives
resultsDonate = genAllDonate(templates, alt_alphabets)
#### LOOP 1 ####
# All examples use the input of 'josh franklin 2014 president DC'
#################
#http://www.joshfranklin.com
#http://www.josh2014.com
#http://www.franklin2014.com
#http://www.joshfranklin2014.com
#http://www.joshfranklinforDC.com
#http://www.joshfranklinDC.com
#http://www.joshforpresident.com
#http://www.josh4president.com
#http://www.joshforpresident2014.com
#http://www.josh4president2014.com
#http://www.presidentjoshfranklin.com
#http://www.president-josh-franklin.com
#http://www.presidentjoshforpresident2014.com
#http://www.presidentjosh4president2014.com
#http://www.presidentjoshfranklinforpresident2014.com
#http://www.presidentjosh-franklinforpresident2014.com
#http://www.presidentjoshfranklin4president2014.com
#http://www.presidentjosh-franklin4president2014.com
def tryURL(url):
url = stringAndStrip(url)
for domain_name in tlds:
print('Trying: ' + url + domain_name)
allURLS.append(url + domain_name)
print("Entering template loop 1^^^^^^^^^^^^^^^^^^^^^^^^^^" )
print(time.time() - start_time, "seconds")
for r in results:
tryURL( 'http://www.' + r , )
### LOOP 2 ###
# Puts donate at the beginning &
# Removes the period after 'www'
##############tlds a little
tlds.append( '.republican' )
tlds.append( '.democrat' )
tlds.append( '.red' )
tlds.append( '.blue' )
tlds.append( '.vote' )
#These next few look for some of the larger parties
tryURL( 'http://www.republican' + fName + lName )
tryURL( 'http://www.democrat' + fName + lName )
tryURL( 'http://www.libertarian' + fName + lName )
tryURL( 'http://www.independent' + fName + lName )
tryURL( 'http://www.vote' + fName + lName ) #Example: votejoshfranklin.com
tryURL( 'http://www.vote' + fName + middleInitial + lName ) #Example: votejoshmichaelfranklin.com
tryURL( 'http://www.vote' + fName ) #Example: votejosh.com
tryURL( 'http://www.vote' + lName ) #Example: votefranklin.com
tryURL( 'http://www.' + lName + position ) #Example: franklinpresident.com
tryURL( 'http://www.' + lName + altPosition ) #Example: franklinprez.com
tryURL( 'http://www.real' + fName + lName ) #Example: realjoshfranklin.com
for stateAlias in state:
tryURL( 'http://www.' + lName + 'for' + stateAlias ) #Example: franklinforDC.com
tryURL( 'http://www.' + lName + '4' + stateAlias ) #Example: franklin4DC.com
tryURL( 'http://www.friendsof' + fName ) #Example: friendsofjosh.com
tryURL( 'http://www.friendsof' + lName ) #Example: friendsofjosh.com
tryURL( 'http://www.' + fName + 'sucks' ) #Example: joshsucks.com
tryURL( 'http://www.' + lName + 'sucks' ) #Example: franklinsucks.com
tryURL( 'http://www.' + fName ) #Example: josh.vote
tryURL( 'http://www.' + lName ) #Example: franklin.vote
tryURL( 'http://www.' + fName + lName ) #Example: joshfranklin.vote
tryURL( 'http://www.elect' + fName + lName )
tryURL( 'http://www.elect' + fName + middleInitial + lName )
tryURL( 'http://www.elect' + fName )
tryURL( 'http://www.elect' + lName )
tryURL( 'http://www.' + fName + middleInitial + year )
tryURL( 'http://www.' + middleInitial + lName )
print( ' Total URLS: ' + str(len(allURLS)) + "\n" )
allURLS = removeDups( allURLS )
print( 'Unique URLS: ' + str(len(allURLS)) + "\n" )
pool = ThreadPool( 24 )
# Open the urls in their own threads
# and return the results
results = pool.map( tryURLforReal, allURLS )
pool.close()
pool.join()
#print(results)
# Each thread added an entry for each result (found or not, gotta filter the blanks)
# I'm doing this here sinced the file writes might not have been synchronized
# its just a fear I had
for i in results:
resultsFile.write( i )
totalRuntime = time.time() - start_time, "seconds"
###### Write final results to logfile ###########
resultsFile.write( "######################################" + "\n" )
resultsFile.write( "ElectionBuster Scan Results: " + "\n" )
resultsFile.write( "######################################" + "\n" )
resultsFile.write( "INPUTS = " + str(fName) + ", " + str(mName) + ", " + str(lName) + ", " + str(year) + ", " + str(position) + ", " + str(altPosition) + ", " + str(stateText) + ", " + str(party) + "\n" )
resultsFile.write( "Total runtime was " + str(totalRuntime) + "\n" )
resultsFile.write( "There were " + str(len(confirmedURLs)) + " positive results." + "\n" )
resultsFile.write( "There were " + str(len(testedURLs)) + " unique URLs tested." + "\n" )
resultsFile.write( "-------------------------------------" + "\n" )
resultsFile.write( "Positive results: " + "\n" )
resultsFile.write( "-------------------------------------" + "\n" )
for url in confirmedURLs:
resultsFile.write( str(url) + "\n" )
resultsFile.write( "\n" )
resultsFile.write( "-------------------------------------" + "\n" )
resultsFile.write( "EOF " + "\n" )
#for url in allURLS:
# resultsFile.write( str(url) + "\n" )
# print( str( url ) + "\n" )
###### Print final results to screen ###########
print( "###################################### " + "\n" )
print( "ElectionBuster Scan Results: " + "\n" )
print( "###################################### " + "\n" )
print( "INPUTS" + "\n" )
print( "First name: " + fName + "\n" )
print( "Middle name: " + mName + "\n" )
print( "Last name: " + lName + "\n" )
print( "Year: " + year + "\n" )
print( "Election type: " + electionType + "\n" )
print( "-------------------------------------" + "\n" )
print( "Total runtime was " + str(totalRuntime) + "\n" )
print( "-------------------------------------" + "\n" )
print( "Positive results: " + "\n" )
print( "There were " + str(len(confirmedURLs)) + " hits:" + "\n" )
print( "-------------------------------------" + "\n" )
print( "\n" )
for url in confirmedURLs:
print( url )
print( "\n" )
# Bad things happen if these files are not properly closed
resultsFile.close()
| mit | 7,579,118,042,481,559,000 | 34.663102 | 204 | 0.645299 | false |
lj8385174/PyComms | MPU6050/Examples/gpsdData.py | 1 | 1914 | #! /usr/bin/python
# Written by Dan Mandle http://dan.mandle.me September 2012
# License: GPL 2.0
import os
from gps import *
from time import *
import time
import threading
class GpsPoller(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.gpsd = gps(mode=WATCH_ENABLE) #starting the stream of info
self.current_value = None
self.running = True #setting the thread running to true
def run(self):
while self.running:
self.gpsd.next() #this will continue to loop and grab EACH set of gpsd info to clear the buffer
if __name__ == '__main__':
os.system('clear') #clear the terminal (optional)
gpsp = GpsPoller() # create the thread
try:
gpsp.start() # start it up
while True:
#It may take a second or two to get good data
#print gpsd.fix.latitude,', ',gpsd.fix.longitude,' Time: ',gpsd.utc
os.system('clear')
print 'len of time:' , len(gpsp.gpsd.utc)
print
print ' GPS reading'
print '----------------------------------------'
print 'latitude ' , gpsp.gpsd.fix.latitude
print 'longitude ' , gpsp.gpsd.fix.longitude
print 'time utc ' , gpsp.gpsd.utc,' + ', gpsp.gpsd.fix.time
print 'time utc+8 ' , gpsp.gpsd.utc
print 'altitude (m)' , gpsp.gpsd.fix.altitude
print 'eps ' , gpsp.gpsd.fix.eps
print 'epx ' , gpsp.gpsd.fix.epx
print 'epv ' , gpsp.gpsd.fix.epv
print 'ept ' , gpsp.gpsd.fix.ept
print 'speed (m/s) ' , gpsp.gpsd.fix.speed
print 'climb ' , gpsp.gpsd.fix.climb
print 'track ' , gpsp.gpsd.fix.track
print 'mode ' , gpsp.gpsd.fix.mode
print
print 'sats ' , gpsp.gpsd.satellites
time.sleep(0.1) #set to whatever
except (KeyboardInterrupt, SystemExit): #when you press ctrl+c
print "\nKilling Thread..."
gpsp.running = False
gpsp.join() # wait for the thread to finish what it's doing
print "Done.\nExiting."
| mit | -6,798,758,300,697,332,000 | 31.440678 | 98 | 0.633751 | false |
manqala/erpnext | erpnext/schools/doctype/course_schedule/test_course_schedule.py | 7 | 2683 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
import datetime
from frappe.utils import today, to_timedelta
from erpnext.schools.utils import OverlapError
# test_records = frappe.get_test_records('Course Schedule')
class TestCourseSchedule(unittest.TestCase):
def test_student_group_conflict(self):
cs1 = make_course_schedule_test_record(simulate= True)
cs2 = make_course_schedule_test_record(schedule_date=cs1.schedule_date, from_time= cs1.from_time,
to_time= cs1.to_time, instructor="_Test Instructor 2", room="RM0002", do_not_save= 1)
self.assertRaises(OverlapError, cs2.save)
def test_instructor_conflict(self):
cs1 = make_course_schedule_test_record(simulate= True)
cs2 = make_course_schedule_test_record(from_time= cs1.from_time, to_time= cs1.to_time,
student_group="Course-TC101-2014-2015 (_Test Academic Term)", room="RM0002", do_not_save= 1)
self.assertRaises(OverlapError, cs2.save)
def test_room_conflict(self):
cs1 = make_course_schedule_test_record(simulate= True)
cs2 = make_course_schedule_test_record(from_time= cs1.from_time, to_time= cs1.to_time,
student_group="Course-TC101-2014-2015 (_Test Academic Term)", instructor="_Test Instructor 2", do_not_save= 1)
self.assertRaises(OverlapError, cs2.save)
def test_no_conflict(self):
cs1 = make_course_schedule_test_record(simulate= True)
make_course_schedule_test_record(from_time= cs1.from_time, to_time= cs1.to_time,
student_group="Course-TC102-2014-2015 (_Test Academic Term)", instructor="_Test Instructor 2", room="RM0002")
def make_course_schedule_test_record(**args):
args = frappe._dict(args)
course_schedule = frappe.new_doc("Course Schedule")
course_schedule.student_group = args.student_group or "Course-TC101-2014-2015 (_Test Academic Term)"
course_schedule.course = args.course or "TC101"
course_schedule.instructor = args.instructor or "_Test Instructor"
course_schedule.room = args.room or "RM0001"
course_schedule.schedule_date = args.schedule_date or today()
course_schedule.from_time = args.from_time or to_timedelta("01:00:00")
course_schedule.to_time = args.to_time or course_schedule.from_time + datetime.timedelta(hours= 1)
if not args.do_not_save:
if args.simulate:
while True:
try:
course_schedule.save()
break
except OverlapError:
course_schedule.from_time = course_schedule.from_time + datetime.timedelta(minutes=10)
course_schedule.to_time = course_schedule.from_time + datetime.timedelta(hours= 1)
else:
course_schedule.save()
return course_schedule
| gpl-3.0 | -1,052,708,622,552,771,500 | 37.884058 | 113 | 0.738725 | false |
lermit/mod-webui | module/plugins/depgraph/depgraph.py | 3 | 4719 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2012:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
import time
### Will be populated by the UI with it's own value
app = None
def depgraph_host(name):
# First we look for the user sid
# so we bail out if it's a false one
user = app.get_user_auth()
if not user:
return {'app': app, 'elt': None, 'valid_user': False}
# Ok we are in a detail page but the user ask for a specific search
search = app.request.GET.get('global_search', None)
loop = bool(int(app.request.GET.get('loop', '0')))
loop_time = int(app.request.GET.get('loop_time', '10'))
if search:
new_h = app.datamgr.get_host(search)
if new_h:
app.bottle.redirect("/depgraph/" + search)
h = app.datamgr.get_host(name)
return {'app': app, 'elt': h, 'user': user, 'valid_user': True, 'loop' : loop, 'loop_time' : loop_time}
def depgraph_srv(hname, desc):
# First we look for the user sid
# so we bail out if it's a false one
user = app.get_user_auth()
if not user:
return {'app': app, 'elt': None, 'valid_user': False}
loop = bool(int(app.request.GET.get('loop', '0')))
loop_time = int(app.request.GET.get('loop_time', '10'))
# Ok we are in a detail page but the user ask for a specific search
search = app.request.GET.get('global_search', None)
if search:
new_h = app.datamgr.get_host(search)
if new_h:
app.bottle.redirect("/depgraph/" + search)
s = app.datamgr.get_service(hname, desc)
return {'app': app, 'elt': s, 'user': user, 'valid_user': True, 'loop' : loop, 'loop_time' : loop_time}
def get_depgraph_widget():
# First we look for the user sid
# so we bail out if it's a false one
user = app.get_user_auth()
if not user:
return {'app': app, 'elt': None, 'valid_user': False}
search = app.request.GET.get('search', '').strip()
if not search:
# Ok look for the first host we can find
hosts = app.datamgr.get_hosts()
for h in hosts:
search = h.get_name()
break
elts = search.split('/', 1)
if len(elts) == 1:
s = app.datamgr.get_host(search)
else: # ok we got a service! :)
s = app.datamgr.get_service(elts[0], elts[1])
wid = app.request.GET.get('wid', 'widget_depgraph_' + str(int(time.time())))
collapsed = (app.request.GET.get('collapsed', 'False') == 'True')
options = {'search': {'value': search, 'type': 'hst_srv', 'label': 'Search an element'},
}
title = 'Relation graph for %s' % search
return {'app': app, 'elt': s, 'user': user,
'wid': wid, 'collapsed': collapsed, 'options': options, 'base_url': '/widget/depgraph', 'title': title,
}
def get_depgraph_inner(name):
# First we look for the user sid
# so we bail out if it's a false one
user = app.get_user_auth()
if not user:
return {'app': app, 'elt': None, 'user': None}
elt = None
if '/' in name:
elts = name.split('/', 1)
elt = app.datamgr.get_service(elts[0], elts[1])
else:
elt = app.datamgr.get_host(name)
return {'app': app, 'elt': elt, 'user': user}
widget_desc = '''<h4>Relation graph</h4>
Show a graph of an object relations
'''
pages = {depgraph_host: {'routes': ['/depgraph/:name'], 'view': 'depgraph', 'static': True},
depgraph_srv: {'routes': ['/depgraph/:hname/:desc'], 'view': 'depgraph', 'static': True},
get_depgraph_widget: {'routes': ['/widget/depgraph'], 'view': 'widget_depgraph', 'static': True, 'widget': ['dashboard'], 'widget_desc': widget_desc, 'widget_name': 'depgraph', 'widget_picture': '/static/depgraph/img/widget_depgraph.png'},
get_depgraph_inner: {'routes': ['/inner/depgraph/:name#.+#'], 'view': 'inner_depgraph', 'static': True},
}
| agpl-3.0 | -6,691,723,807,787,364,000 | 33.195652 | 248 | 0.613054 | false |
assassinen/python_training | _test/test_1.py | 1 | 1315 | __author__ = 'NovikovII'
from point import Point
import string
import random
# #random_contact()
# print(random_contact())
#
#
# testdata = [
# (str(n1)+str(n2)+str(n3)+str(n4))
# for n1 in range (10)
# for n2 in range (10)
# for n3 in range (10)
# for n4 in range (10)
# ]
#
# for i in testdata:
# print(i)
#print(''.join([random.choice(string.ascii_letters + string.digits) for i in range(random.randrange(20))]))
#map
# l1 = [Point(i, i*i) for i in range(-5,6)]
#
# l2 = list(map(lambda i: Point(i, i*i), range(-5,6)))
# l3 = list(map(lambda p: Point(p.x, -p.y), l1))
# print(l1)
# print(l2)
# print(l3)
#
# #filter
# l3 = list(filter(lambda p: p.x > 0, l1))
# print(l3)
#
# l3 = list(filter(lambda p: p.x % 2 == 0, l1))
# print(l3)
#
# l3 = [i for i in range(0,5)]
# print(l3)
#
# print(l3[0:2])
# print(l3[0])
#
# l3[0:2] = []
# print(l3)
# циклы
# l = []
# l2 = []
#
# for i in range(-5, 6):
# l.append(Point(i, i*i))
#
# for el in l:
# l2.append(Point(el.x, -el.y))
#
# l3 = [Point(i, i*i) for i in range(-5, 6)]
#
# print(l)
# print(l2)
# print(l3)
#сортировка списков
#l1 = [Point(2,1), Point(1,2), Point(0,0)]
# def y(p):
# return p.y
# print(l1)
# l2 = sorted(l1, key=lambda p: p.x)
# print(l2)
# l2 = sorted(l1, key=y)
# print(l2)
| apache-2.0 | 22,715,702,390,826,624 | 14.962963 | 107 | 0.552204 | false |
sparkslabs/kamaelia_ | Code/Python/Kamaelia/Kamaelia/Visualisation/Axon/ExtraWindowFurniture.py | 3 | 3691 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
"""\
==========================
Kamaelia Cat logo renderer
==========================
Renderer for the topology viewer framework that renders a Kamaelia cat logo
to the top left corner on rendering pass 10.
Example Usage
-------------
Create a topology viewer component that also renders 'ExtraWindowFurniture' to the
display surface::
TopologyViewer( extraDrawing = ExtraWindowFurniture(),
...
).activate()
How does it work?
-----------------
Instances of this class provide a render() generator that renders a Kamaelia cat
logo at coordinates (8,8) to the specified pygame surface. The cat logo is scaled
so its longest dimension (width or height) is 64 pixels.
Rendering is performed by the generator, returned when the render() method is
called. Its behaviour is that needed for the framework for multi-pass rendering
that is used by TopologyViewer.
The generator yields the number of the rendering pass it wishes to be next on
next. Each time it is subsequently called, it performs the rendering required
for that pass. It then yields the number of the next required pass or completes
if there is no more rendering required.
An setOffset() method is also implemented to allow the particles coordinates
to be offset. This therefore makes it possible to scroll the particles around
the display surface.
See TopologyViewer for more details.
"""
import pygame
from pygame.locals import *
class ExtraWindowFurniture(object):
"""\
ExtraWindowFurniture() -> new ExtraWindowFurniture object.
Renders a Kamaelia cat logo to the top left corner of a pygame surface
when the render() generator is called.
"""
def __init__(self):
"""x.__init__(...) initializes x; see x.__class__.__doc__ for signature"""
super(ExtraWindowFurniture,self).__init__()
self.logo = None
try:
self.logo = pygame.image.load("kamaelia_logo.png")
except pygame.error:
try:
self.logo = pygame.image.load("/usr/local/share/kamaelia/kamaelia_logo.png")
except pygame.error:
pass # Give up for now. FIXME: Could do something new
if self.logo:
biggest = max( self.logo.get_width(), self.logo.get_height() )
from pygame.transform import rotozoom
self.logo = rotozoom(self.logo, 0.0, 64.0 / biggest)
def render(self, surface):
"""\
Generator that yields 10 then renders a Kamaelia cat logo to the
specified pygame surface at coordinates (8,8)
"""
yield 10
if self.logo:
surface.blit(self.logo, (8,8))
def setOffset( self, offset ):
"""Dummy method."""
pass
| apache-2.0 | 1,493,377,255,343,978,800 | 34.490385 | 92 | 0.653481 | false |
tequa/ammisoft | ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Lib/site-packages/idlexlib/idlexMain.py | 2 | 12959 | #! /usr/bin/env python
## """
## Copyright(C) 2011 The Board of Trustees of the University of Illinois.
## All rights reserved.
##
## Developed by: Roger D. Serwy
## University of Illinois
##
## Permission is hereby granted, free of charge, to any person obtaining
## a copy of this software and associated documentation files (the
## "Software"), to deal with the Software without restriction, including
## without limitation the rights to use, copy, modify, merge, publish,
## distribute, sublicense, and/or sell copies of the Software, and to
## permit persons to whom the Software is furnished to do so, subject to
## the following conditions:
##
## + Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimers.
## + Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimers in the
## documentation and/or other materials provided with the distribution.
## + Neither the names of Roger D. Serwy, the University of Illinois, nor
## the names of its contributors may be used to endorse or promote
## products derived from this Software without specific prior written
## permission.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
## OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
## MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
## IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR
## ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
## CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH
## THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE SOFTWARE.
##
##
## """
# This module hotpatches EditorWindow.py to load idlex extensions properly
from __future__ import print_function
import sys
from idlexlib.extensionManager import extensionManager
import idlelib
import os
import __main__
import imp
import traceback
import re
from idlelib import macosxSupport
version = "1.12" # IdleX version
IDLE_DEFAULT_EXT = [] # list of default extensions that IDLE has
if sys.version < '3':
from StringIO import StringIO
from Tkinter import *
import Tkinter as tkinter
import tkFileDialog
import tkMessageBox
else:
from io import StringIO
from tkinter import *
import tkinter
import tkinter.filedialog as tkFileDialog
import tkinter.messagebox as tkMessageBox
from idlelib.configHandler import idleConf, IdleConfParser
ansi_re = re.compile(r'\x01?\x1b\[(.*?)m\x02?')
def strip_ansi(s):
return ansi_re.sub("", s)
def install_idlex_manager():
""" install IDLEX extension manager into IDLE """
# 2011-11-15 Bugfix - change the user config file names for IdleX
# to avoid a problem on Windows where pythonw.exe refuses to run
# idle.pyw when an error occurs. However python.exe runs idle.py just fine.
# See http://bugs.python.org/issue13582
u = idleConf.userCfg
for key, value in list(u.items()):
# add "idlex-" to user config file names
fullfile = value.file
directory, filename = os.path.split(fullfile)
if filename.startswith('idlex-'):
new_filename = filename
else:
new_filename = 'idlex-' + filename
new_fullfile = os.path.join(directory, new_filename)
value.file = new_fullfile
value.Load()
mod = extensionManager.load_extension('idlexManager')
mod.extensionManager = extensionManager
mod.version = version
mod.update_globals()
# add idlex to the extension list
e = idleConf.userCfg['extensions']
if not e.has_section('idlexManager'):
e.add_section('idlexManager')
e.set('idlexManager', 'enable', '1')
def _printExt():
a = []
for i in idleConf.defaultCfg['extensions'].sections():
if i.endswith('_cfgBindings') or i.endswith('_bindings'):
continue
a.append(i)
print('Extensions: %s' % a)
###########################################################################
##
## HOTPATCHING CODE
##
###########################################################################
def fix_tk86():
tkinter._Tk = tkinter.Tk
def wrapper(func, name):
Tcl_Obj = tkinter._tkinter.Tcl_Obj
def f(*args, **kwargs):
#print(name, 'wrapped', args, kwargs)
#t = [i for i in args if isinstance(i, Tcl_Obj)]
#for i in t:
# print(name, 'FOUND arg:', repr(i), type(i), str(i))
args = [i if not isinstance(i, Tcl_Obj) else str(i)
for i in args]
for key, value in kwargs.items():
if isinstance(value, Tcl_Obj):
#print(name, 'FOUND kwarg:', key, value)
kwargs[key] = str(value)
return func(*args, **kwargs)
return f
class TkReflector(object):
def __init__(self, tk):
self.tk = tk
def __getattribute__(self, name):
a = getattr(object.__getattribute__(self, 'tk'), name)
if name in ['splitlist']:
#if hasattr(a, '__call__'):
return wrapper(a, name)
else:
return a
class TkFix(tkinter.Tk):
def __init__(self, *args, **kwargs):
tkinter._Tk.__init__(self, *args, **kwargs)
self.__tk = self.tk
version = self.tk.call('info', 'patchlevel')
if version.startswith('8.6'):
self.tk = TkReflector(self.__tk)
tkinter.Tk = TkFix
def _hotpatch():
# Fix numerous outstanding IDLE issues...
import idlelib.EditorWindow
EditorWindowOrig = idlelib.EditorWindow.EditorWindow
class EditorWindow(EditorWindowOrig):
_invalid_keybindings = [] # keep track of invalid keybindings encountered
_valid_keybindings = []
# Work around a bug in IDLE for handling events bound to menu items.
# The <<event-variables>> are stored globally, not locally to
# each editor window. Without this, toggling a checked menu item
# in one editor window toggles the item in ALL editor windows.
# Issue 13179
def __init__(self, flist=None, filename=None, key=None, root=None):
if flist is not None:
flist.vars = {}
EditorWindowOrig.__init__(self, flist, filename, key, root)
# FIXME: Do not transfer custom keybindings if IDLE keyset is set to default
# Fix broken keybindings that has plagued IDLE for years.
# Issue 12387, 4765, 13071, 6739, 5707, 11437
def apply_bindings(self, keydefs=None): # SUBCLASS to catch errors
#return EditorWindowOrig.apply_bindings(self, keydefs)
if keydefs is None:
keydefs = self.Bindings.default_keydefs
text = self.text
text.keydefs = keydefs
invalid = []
for event, keylist in keydefs.items():
for key in keylist:
try:
text.event_add(event, key)
except TclError as err:
#print(' Apply bindings error:', event, key)
invalid.append((event, key))
if invalid: # notify errors
self._keybinding_error(invalid)
def RemoveKeybindings(self): # SUBCLASS to catch errors
"Remove the keybindings before they are changed."
EditorWindow._invalid_keybindings = []
# Called from configDialog.py
self.Bindings.default_keydefs = keydefs = idleConf.GetCurrentKeySet()
for event, keylist in keydefs.items():
for key in keylist:
try:
self.text.event_delete(event, key)
except Exception as err:
print(' Caught event_delete error:', err)
print(' For %s, %s' % (event, key))
pass
for extensionName in self.get_standard_extension_names():
xkeydefs = idleConf.GetExtensionBindings(extensionName)
if xkeydefs:
for event, keylist in xkeydefs.items():
for key in keylist:
try:
self.text.event_delete(event, key)
except Exception as err:
print(' Caught event_delete error:', err)
print(' For %s, %s' % (event, key))
pass
def _keybinding_error(self, invalid):
""" Create an error message about keybindings. """
new_invalid = [i for i in invalid if i not in EditorWindow._invalid_keybindings]
if new_invalid:
msg = ['There are invalid key bindings:', '']
for ev, k in new_invalid:
while ev[0] == '<' and ev[-1] == '>':
ev = ev[1:-1]
msg.append('Action:%s' % ev)
msg.append('Key:%s' % k)
msg.append('')
msg.extend(['Please reconfigure these bindings.'])
def errormsg(msg=msg):
tkMessageBox.showerror(title='Invalid Key Bindings',
message='\n'.join(msg),
master=self.top,
parent=self.top)
EditorWindow._invalid_keybindings.extend(new_invalid)
self.top.after(100, errormsg)
def load_standard_extensions(self):
for name in self.get_standard_extension_names():
try:
if name in extensionManager.IDLE_EXTENSIONS:
self.load_extension(name)
else:
self.load_idlex_extension(name)
except:
print("Failed to load extension", repr(name))
import traceback
traceback.print_exc()
def load_idlex_extension(self, name):
# import from idlex
mod = extensionManager.load_extension(name)
if mod is None:
print("\nFailed to import IDLEX extension: %s" % name)
return
cls = getattr(mod, name)
keydefs = idleConf.GetExtensionBindings(name)
if hasattr(cls, "menudefs"):
self.fill_menus(cls.menudefs, keydefs)
ins = cls(self)
self.extensions[name] = ins
if keydefs:
self.apply_bindings(keydefs)
for vevent in keydefs.keys():
methodname = vevent.replace("-", "_")
while methodname[:1] == '<':
methodname = methodname[1:]
while methodname[-1:] == '>':
methodname = methodname[:-1]
methodname = methodname + "_event"
if hasattr(ins, methodname):
self.text.bind(vevent, getattr(ins, methodname))
idlelib.EditorWindow.EditorWindow = EditorWindow
def macosx_workaround():
# restore "Options" menu on MacOSX
if not macosxSupport.runningAsOSXApp():
return
def restore(menu_specs):
c = [a for a,b in menu_specs]
if "options" not in c:
menu_specs.insert(-2, ("options", "Options"))
import idlelib.EditorWindow
restore(idlelib.EditorWindow.EditorWindow.menu_specs)
import idlelib.PyShell
restore(idlelib.PyShell.PyShell.menu_specs)
class devnull:
# For pythonw.exe on Windows...
def __init__(self):
pass
def write(self, *args, **kwargs):
pass
def __getattr__(self, *args, **kwargs):
return self.write
def pythonw_workaround():
# Work around a bug in pythonw.exe that prevents IdleX from starting.
if sys.stderr is None:
sys.stderr = devnull()
if sys.stdout is None:
sys.stdout = devnull()
def main():
pythonw_workaround()
_hotpatch()
fix_tk86()
try:
macosx_workaround()
except:
pass # Tk on OSX should not be this fragile...
install_idlex_manager()
extensionManager.load_idlex_extensions()
# Force a reset on Bindings...
# Without this, user reconfiguration of the key bindings within IDLE may
# generate an error on MultiCall unbind.
import idlelib.Bindings
idlelib.Bindings.default_keydefs = idleConf.GetCurrentKeySet()
import idlelib.PyShell
idlelib.PyShell.main()
if __name__ == '__main__':
# start up IDLE with IdleX
main()
| bsd-3-clause | 6,166,643,924,308,765,000 | 34.119241 | 92 | 0.570337 | false |
cemoody/chainer | chainer/functions/evaluation/accuracy.py | 18 | 1320 | import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class Accuracy(function.Function):
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
x_type, t_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
x_type.ndim >= 2,
t_type.dtype == numpy.int32,
t_type.ndim == 1,
t_type.shape[0] == x_type.shape[0],
)
for i in range(2, x_type.ndim.eval()):
type_check.expect(x_type.shape[i] == 1)
def forward(self, inputs):
xp = cuda.get_array_module(*inputs)
y, t = inputs
y = y.reshape(len(y), -1) # flatten
pred = y.argmax(axis=1)
return xp.asarray((pred == t).mean(dtype='f')),
def accuracy(y, t):
"""Computes muticlass classification accuracy of the minibatch.
Args:
y (Variable): Variable holding a matrix whose (i, j)-th element
indicates the score of the class j at the i-th example.
t (Variable): Variable holding an int32 vector of groundtruth labels.
Returns:
Variable: A variable holding a scalar array of the accuracy.
.. note:: This function is non-differentiable.
"""
return Accuracy()(y, t)
| mit | 7,409,308,581,682,934,000 | 27.695652 | 77 | 0.595455 | false |
gitfred/fuel-extension-volume-manager | volume_manager/alembic_migrations/migrations/versions/001_add_volumes_table.py | 4 | 3469 | # Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""volume_manager
Revision ID: 086cde3de7cf
Revises: None
Create Date: 2015-06-19 16:16:44.513714
"""
# revision identifiers, used by Alembic.
revision = '086cde3de7cf'
down_revision = None
import logging
from alembic import context
from alembic import op
from oslo_serialization import jsonutils
import sqlalchemy as sa
from nailgun.db.sqlalchemy.models.fields import JSON
from nailgun.extensions.consts import extensions_migration_buffer_table_name
from nailgun.extensions.utils import is_buffer_table_exist
logger = logging.getLogger('alembic.migration')
config = context.config
table_prefix = config.get_main_option('table_prefix')
table_volumes_name = '{0}node_volumes'.format(table_prefix)
def migrate_data_from_core(connection):
if not is_buffer_table_exist(connection):
# NOTE(eli): if there is no buffer table it means that there
# is no core database we should not run data migrations includes
# this case because extension might be installed and used
# separately from Nailgun core and its database
logger.warn(
"Cannot find buffer table '{0}'. "
"Don't run data migrations from buffer table, "
"because extension might be installed and used "
"separately from Nailgun core and its database".format(
extensions_migration_buffer_table_name))
return
ext_name = 'volume_manager'
select_query = sa.sql.text(
'SELECT id, data FROM {0} '
'WHERE extension_name=:extension_name'.format(
extensions_migration_buffer_table_name))
delete_query = sa.sql.text(
'DELETE FROM {0} WHERE id=:record_id'.format(
extensions_migration_buffer_table_name))
insert_query = sa.sql.text(
"INSERT INTO {0} (node_id, volumes)"
"VALUES (:node_id, :volumes)".format(
table_volumes_name))
for buffer_record_id, volumes_data in connection.execute(
select_query,
extension_name=ext_name):
volumes_parsed = jsonutils.loads(volumes_data)
volumes = volumes_parsed.get('volumes')
node_id = volumes_parsed.get('node_id')
connection.execute(
insert_query,
node_id=node_id,
volumes=jsonutils.dumps(volumes))
connection.execute(
delete_query,
record_id=buffer_record_id)
def upgrade():
connection = op.get_bind()
op.create_table(
table_volumes_name,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('node_id', sa.Integer(), nullable=False),
sa.Column('volumes', JSON(), server_default='[]', nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('node_id'))
migrate_data_from_core(connection)
def downgrade():
op.drop_table(table_volumes_name)
| apache-2.0 | 1,113,558,085,920,697,900 | 31.420561 | 78 | 0.668492 | false |
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/OpenGL/GL/EXT/texture_integer.py | 9 | 3866 | '''OpenGL extension EXT.texture_integer
This module customises the behaviour of the
OpenGL.raw.GL.EXT.texture_integer to provide a more
Python-friendly API
Overview (from the spec)
Fixed-point textures in unextended OpenGL have integer components,
but those values are taken to represent floating-point values in
the range [0,1]. These integer components are considered
"normalized" integers. When such a texture is accessed by a
shader or by fixed-function fragment processing, floating-point
values are returned.
This extension provides a set of new "unnormalized" integer texture
formats. Formats with both signed and unsigned integers are provided. In
these formats, the components are treated as true integers. When such
textures are accessed by a shader, actual integer values are returned.
Pixel operations that read from or write to a texture or color
buffer with unnormalized integer components follow a path similar
to that used for color index pixel operations, except that more
than one component may be provided at once. Integer values flow
through the pixel processing pipe, and no pixel transfer
operations are performed. Integer format enumerants used for such
operations indicate unnormalized integer data.
Textures or render buffers with unnormalized integer formats may also be
attached to framebuffer objects to receive fragment color values written
by a fragment shader. Per-fragment operations that require floating-point
color components, including multisample alpha operations, alpha test,
blending, and dithering, have no effect when the corresponding colors are
written to an integer color buffer. The NV_gpu_program4 and
EXT_gpu_shader4 extensions add the capability to fragment programs and
fragment shaders to write signed and unsigned integer output values.
This extension does not enforce type consistency for texture accesses or
between fragment shaders and the corresponding framebuffer attachments.
The results of a texture lookup from an integer texture are undefined:
* for fixed-function fragment processing, or
* for shader texture accesses expecting floating-point return values.
The color components used for per-fragment operations and written into a
color buffer are undefined:
* for fixed-function fragment processing with an integer color buffer,
* for fragment shaders that write floating-point color components to an
integer color buffer, or
* for fragment shaders that write integer color components to a color
buffer with floating point or normalized integer components.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/EXT/texture_integer.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.EXT.texture_integer import *
from OpenGL.raw.GL.EXT.texture_integer import _EXTENSION_NAME
def glInitTextureIntegerEXT():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
# INPUT glTexParameterIivEXT.params size not checked against 'pname'
glTexParameterIivEXT=wrapper.wrapper(glTexParameterIivEXT).setInputArraySize(
'params', None
)
# INPUT glTexParameterIuivEXT.params size not checked against 'pname'
glTexParameterIuivEXT=wrapper.wrapper(glTexParameterIuivEXT).setInputArraySize(
'params', None
)
glGetTexParameterIivEXT=wrapper.wrapper(glGetTexParameterIivEXT).setOutput(
'params',size=_glgets._glget_size_mapping,pnameArg='pname',orPassIn=True
)
glGetTexParameterIuivEXT=wrapper.wrapper(glGetTexParameterIuivEXT).setOutput(
'params',size=_glgets._glget_size_mapping,pnameArg='pname',orPassIn=True
)
### END AUTOGENERATED SECTION | gpl-3.0 | 5,966,556,501,597,512,000 | 43.448276 | 79 | 0.801086 | false |
chrys87/fenrir | src/fenrirscreenreader/soundDriver/gstreamerDriver.py | 1 | 4151 | #!/bin/python
# -*- coding: utf-8 -*-
# Fenrir TTY screen reader
# By Chrys, Storm Dragon, and contributers.
from fenrirscreenreader.core import debug
import time, threading
from fenrirscreenreader.core.soundDriver import soundDriver
_gstreamerAvailable = False
try:
import gi
from gi.repository import GLib
gi.require_version('Gst', '1.0')
from gi.repository import Gst
_gstreamerAvailable, args = Gst.init_check(None)
except Exception as e:
_gstreamerAvailable = False
_availableError = str(e)
class driver(soundDriver):
def __init__(self):
soundDriver.__init__(self)
self._source = None
self._sink = None
def initialize(self, environment):
self.env = environment
global _gstreamerAvailable
self._initialized = _gstreamerAvailable
if not self._initialized:
global _availableError
self.environment['runtime']['debug'].writeDebugOut('Gstreamer not available ' + _availableError,debug.debugLevel.ERROR)
return
self._player = Gst.ElementFactory.make('playbin', 'player')
bus = self._player.get_bus()
bus.add_signal_watch()
bus.connect("message", self._onPlayerMessage)
self._pipeline = Gst.Pipeline(name='fenrir-pipeline')
bus = self._pipeline.get_bus()
bus.add_signal_watch()
bus.connect("message", self._onPipelineMessage)
self._source = Gst.ElementFactory.make('audiotestsrc', 'src')
self._sink = Gst.ElementFactory.make('autoaudiosink', 'output')
self._pipeline.add(self._source)
self._pipeline.add(self._sink)
self._source.link(self._sink)
self.mainloop = GLib.MainLoop()
self.thread = threading.Thread(target=self.mainloop.run)
self.thread.start()
def shutdown(self):
if not self._initialized:
return
self.cancel()
self.mainloop.quit()
def _onPlayerMessage(self, bus, message):
if not self._initialized:
return
if message.type == Gst.MessageType.EOS:
self._player.set_state(Gst.State.NULL)
elif message.type == Gst.MessageType.ERROR:
self._player.set_state(Gst.State.NULL)
error, info = message.parse_error()
self.env['runtime']['debug'].writeDebugOut('GSTREAMER: _onPlayerMessage'+ str(error) + str(info),debug.debugLevel.WARNING)
def _onPipelineMessage(self, bus, message):
if not self._initialized:
return
if message.type == Gst.MessageType.EOS:
self._pipeline.set_state(Gst.State.NULL)
elif message.type == Gst.MessageType.ERROR:
self._pipeline.set_state(Gst.State.NULL)
error, info = message.parse_error()
self.env['runtime']['debug'].writeDebugOut('GSTREAMER: _onPipelineMessage'+ str(error) + str(info),debug.debugLevel.WARNING)
def _onTimeout(self, element):
if not self._initialized:
return
element.set_state(Gst.State.NULL)
def playSoundFile(self, fileName, interrupt=True):
if not self._initialized:
return
if interrupt:
self.cancel()
self._player.set_property('volume', self.volume)
self._player.set_property('uri', 'file://%s' % fileName)
self._player.set_state(Gst.State.PLAYING)
def playFrequence(self, frequence, duration, adjustVolume = 0.0, interrupt=True):
if not self._initialized:
return
if interrupt:
self.cancel()
duration = duration * 1000
self._source.set_property('volume', self.volume * adjustVolume)
self._source.set_property('freq', frequence)
self._pipeline.set_state(Gst.State.PLAYING)
GLib.timeout_add(duration, self._onTimeout, self._pipeline)
def cancel(self, element=None):
if not self._initialized:
return
if element:
element.set_state(Gst.State.NULL)
return
self._player.set_state(Gst.State.NULL)
self._pipeline.set_state(Gst.State.NULL)
| lgpl-3.0 | 8,183,369,646,824,083,000 | 35.734513 | 138 | 0.623464 | false |
t794104/ansible | lib/ansible/modules/cloud/vmware/vmware_vm_facts.py | 8 | 8911 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Joseph Callen <jcallen () csc.com>
# Copyright: (c) 2018, Ansible Project
# Copyright: (c) 2018, Fedor Vompe <f.vompe () comptek.ru>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_vm_facts
short_description: Return basic facts pertaining to a VMware machine guest
description:
- Return basic facts pertaining to a vSphere or ESXi virtual machine guest.
- Cluster name as fact is added in version 2.7.
version_added: '2.0'
author:
- Joseph Callen (@jcpowermac)
- Abhijeet Kasurde (@Akasurde)
- Fedor Vompe (@sumkincpp)
notes:
- Tested on ESXi 6.7, vSphere 5.5 and vSphere 6.5
- From 2.8 and onwards, facts are returned as list of dict instead of dict.
requirements:
- python >= 2.6
- PyVmomi
options:
vm_type:
description:
- If set to C(vm), then facts are gathered for virtual machines only.
- If set to C(template), then facts are gathered for virtual machine templates only.
- If set to C(all), then facts are gathered for all virtual machines and virtual machine templates.
required: False
default: 'all'
choices: [ all, vm, template ]
version_added: 2.5
type: str
show_attribute:
description:
- Attributes related to VM guest shown in facts only when this is set C(true).
default: no
type: bool
version_added: 2.8
folder:
description:
- Specify a folder location of VMs to gather facts from.
- 'Examples:'
- ' folder: /ha-datacenter/vm'
- ' folder: ha-datacenter/vm'
- ' folder: /datacenter1/vm'
- ' folder: datacenter1/vm'
- ' folder: /datacenter1/vm/folder1'
- ' folder: datacenter1/vm/folder1'
- ' folder: /folder1/datacenter1/vm'
- ' folder: folder1/datacenter1/vm'
- ' folder: /folder1/datacenter1/vm/folder2'
type: str
version_added: 2.9
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Gather all registered virtual machines
vmware_vm_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
delegate_to: localhost
register: vmfacts
- debug:
var: vmfacts.virtual_machines
- name: Gather only registered virtual machine templates
vmware_vm_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
vm_type: template
delegate_to: localhost
register: template_facts
- debug:
var: template_facts.virtual_machines
- name: Gather only registered virtual machines
vmware_vm_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
vm_type: vm
delegate_to: localhost
register: vm_facts
- debug:
var: vm_facts.virtual_machines
- name: Get UUID from given VM Name
vmware_vm_facts:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
vm_type: vm
delegate_to: localhost
register: vm_facts
- debug:
msg: "{{ item.uuid }}"
with_items:
- "{{ vm_facts.virtual_machines | json_query(query) }}"
vars:
query: "[?guest_name=='DC0_H0_VM0']"
'''
RETURN = r'''
virtual_machines:
description: list of dictionary of virtual machines and their facts
returned: success
type: list
sample: [
{
"guest_name": "ubuntu_t",
"cluster": null,
"esxi_hostname": "10.76.33.226",
"guest_fullname": "Ubuntu Linux (64-bit)",
"ip_address": "",
"mac_address": [
"00:50:56:87:a5:9a"
],
"power_state": "poweredOff",
"uuid": "4207072c-edd8-3bd5-64dc-903fd3a0db04",
"vm_network": {
"00:50:56:87:a5:9a": {
"ipv4": [
"10.76.33.228"
],
"ipv6": []
}
},
"attributes": {
"job": "backup-prepare"
}
}
]
'''
try:
from pyVmomi import vim
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import PyVmomi, get_all_objs, vmware_argument_spec, _get_vm_prop
class VmwareVmFacts(PyVmomi):
def __init__(self, module):
super(VmwareVmFacts, self).__init__(module)
def get_vm_attributes(self, vm):
return dict((x.name, v.value) for x in self.custom_field_mgr
for v in vm.customValue if x.key == v.key)
# https://github.com/vmware/pyvmomi-community-samples/blob/master/samples/getallvms.py
def get_all_virtual_machines(self):
"""
Get all virtual machines and related configurations information
"""
folder = self.params.get('folder')
folder_obj = None
if folder:
folder_obj = self.content.searchIndex.FindByInventoryPath(folder)
if not folder_obj:
self.module.fail_json(msg="Failed to find folder specified by %(folder)s" % self.params)
virtual_machines = get_all_objs(self.content, [vim.VirtualMachine], folder=folder_obj)
_virtual_machines = []
for vm in virtual_machines:
_ip_address = ""
summary = vm.summary
if summary.guest is not None:
_ip_address = summary.guest.ipAddress
if _ip_address is None:
_ip_address = ""
_mac_address = []
all_devices = _get_vm_prop(vm, ('config', 'hardware', 'device'))
if all_devices:
for dev in all_devices:
if isinstance(dev, vim.vm.device.VirtualEthernetCard):
_mac_address.append(dev.macAddress)
net_dict = {}
vmnet = _get_vm_prop(vm, ('guest', 'net'))
if vmnet:
for device in vmnet:
net_dict[device.macAddress] = dict()
net_dict[device.macAddress]['ipv4'] = []
net_dict[device.macAddress]['ipv6'] = []
for ip_addr in device.ipAddress:
if "::" in ip_addr:
net_dict[device.macAddress]['ipv6'].append(ip_addr)
else:
net_dict[device.macAddress]['ipv4'].append(ip_addr)
esxi_hostname = None
esxi_parent = None
if summary.runtime.host:
esxi_hostname = summary.runtime.host.summary.config.name
esxi_parent = summary.runtime.host.parent
cluster_name = None
if esxi_parent and isinstance(esxi_parent, vim.ClusterComputeResource):
cluster_name = summary.runtime.host.parent.name
vm_attributes = dict()
if self.module.params.get('show_attribute'):
vm_attributes = self.get_vm_attributes(vm)
virtual_machine = {
"guest_name": summary.config.name,
"guest_fullname": summary.config.guestFullName,
"power_state": summary.runtime.powerState,
"ip_address": _ip_address, # Kept for backward compatibility
"mac_address": _mac_address, # Kept for backward compatibility
"uuid": summary.config.uuid,
"vm_network": net_dict,
"esxi_hostname": esxi_hostname,
"cluster": cluster_name,
"attributes": vm_attributes
}
vm_type = self.module.params.get('vm_type')
is_template = _get_vm_prop(vm, ('config', 'template'))
if vm_type == 'vm' and not is_template:
_virtual_machines.append(virtual_machine)
elif vm_type == 'template' and is_template:
_virtual_machines.append(virtual_machine)
elif vm_type == 'all':
_virtual_machines.append(virtual_machine)
return _virtual_machines
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
vm_type=dict(type='str', choices=['vm', 'all', 'template'], default='all'),
show_attribute=dict(type='bool', default='no'),
folder=dict(type='str'),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True
)
vmware_vm_facts = VmwareVmFacts(module)
_virtual_machines = vmware_vm_facts.get_all_virtual_machines()
module.exit_json(changed=False, virtual_machines=_virtual_machines)
if __name__ == '__main__':
main()
| gpl-3.0 | 5,044,652,790,495,908,000 | 31.641026 | 105 | 0.586466 | false |
lthurlow/Network-Grapher | proj/external/networkx-1.7/build/lib.linux-i686-2.7/networkx/testing/tests/test_utils.py | 32 | 3058 | from nose.tools import *
import networkx as nx
from networkx.testing import *
# thanks to numpy for this GenericTest class (numpy/testing/test_utils.py)
class _GenericTest(object):
def _test_equal(self, a, b):
self._assert_func(a, b)
def _test_not_equal(self, a, b):
try:
self._assert_func(a, b)
passed = True
except AssertionError:
pass
else:
raise AssertionError("a and b are found equal but are not")
class TestNodesEqual(_GenericTest):
def setUp(self):
self._assert_func = assert_nodes_equal
def test_nodes_equal(self):
a = [1,2,5,4]
b = [4,5,1,2]
self._test_equal(a,b)
def test_nodes_not_equal(self):
a = [1,2,5,4]
b = [4,5,1,3]
self._test_not_equal(a,b)
def test_nodes_with_data_equal(self):
G = nx.Graph()
G.add_nodes_from([1,2,3],color='red')
H = nx.Graph()
H.add_nodes_from([1,2,3],color='red')
self._test_equal(G.nodes(data=True), H.nodes(data=True))
def test_edges_with_data_not_equal(self):
G = nx.Graph()
G.add_nodes_from([1,2,3],color='red')
H = nx.Graph()
H.add_nodes_from([1,2,3],color='blue')
self._test_not_equal(G.nodes(data=True), H.nodes(data=True))
class TestEdgesEqual(_GenericTest):
def setUp(self):
self._assert_func = assert_edges_equal
def test_edges_equal(self):
a = [(1,2),(5,4)]
b = [(4,5),(1,2)]
self._test_equal(a,b)
def test_edges_not_equal(self):
a = [(1,2),(5,4)]
b = [(4,5),(1,3)]
self._test_not_equal(a,b)
def test_edges_with_data_equal(self):
G = nx.MultiGraph()
G.add_path([0,1,2],weight=1)
H = nx.MultiGraph()
H.add_path([0,1,2],weight=1)
self._test_equal(G.edges(data=True, keys=True),
H.edges(data=True, keys=True))
def test_edges_with_data_not_equal(self):
G = nx.MultiGraph()
G.add_path([0,1,2],weight=1)
H = nx.MultiGraph()
H.add_path([0,1,2],weight=2)
self._test_not_equal(G.edges(data=True, keys=True),
H.edges(data=True, keys=True))
class TestGraphsEqual(_GenericTest):
def setUp(self):
self._assert_func = assert_graphs_equal
def test_graphs_equal(self):
G = nx.path_graph(4)
H = nx.Graph()
H.add_path(range(4))
H.name='path_graph(4)'
self._test_equal(G,H)
def test_graphs_not_equal(self):
G = nx.path_graph(4)
H = nx.Graph()
H.add_cycle(range(4))
self._test_not_equal(G,H)
def test_graphs_not_equal2(self):
G = nx.path_graph(4)
H = nx.Graph()
H.add_path(range(3))
H.name='path_graph(4)'
self._test_not_equal(G,H)
def test_graphs_not_equal3(self):
G = nx.path_graph(4)
H = nx.Graph()
H.add_path(range(4))
H.name='path_graph(foo)'
self._test_not_equal(G,H)
| mit | -1,808,296,592,899,554,000 | 27.314815 | 74 | 0.538914 | false |
vmturbo/nova | nova/tests/unit/fake_request_spec.py | 5 | 3302 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from oslo_utils import uuidutils
from nova import context
from nova import objects
from nova.tests.unit import fake_flavor
INSTANCE_NUMA_TOPOLOGY = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(id=0, cpuset=set([1, 2]), memory=512),
objects.InstanceNUMACell(id=1, cpuset=set([3, 4]), memory=512)])
INSTANCE_NUMA_TOPOLOGY.obj_reset_changes(recursive=True)
IMAGE_META = objects.ImageMeta.from_dict(
{'status': 'active',
'container_format': 'bare',
'min_ram': 0,
'updated_at': '2014-12-12T11:16:36.000000',
'min_disk': '0',
'owner': '2d8b9502858c406ebee60f0849486222',
'protected': 'yes',
'properties': {
'os_type': 'Linux',
'hw_video_model': 'vga',
'hw_video_ram': '512',
'hw_qemu_guest_agent': 'yes',
'hw_scsi_model': 'virtio-scsi',
},
'size': 213581824,
'name': 'f16-x86_64-openstack-sda',
'checksum': '755122332caeb9f661d5c978adb8b45f',
'created_at': '2014-12-10T16:23:14.000000',
'disk_format': 'qcow2',
'id': 'c8b1790e-a07d-4971-b137-44f2432936cd',
}
)
IMAGE_META.obj_reset_changes(recursive=True)
PCI_REQUESTS = objects.InstancePCIRequests(
requests=[objects.InstancePCIRequest(count=1),
objects.InstancePCIRequest(count=2)])
PCI_REQUESTS.obj_reset_changes(recursive=True)
def fake_db_spec():
req_obj = fake_spec_obj()
db_request_spec = {
'id': 1,
'instance_uuid': req_obj.instance_uuid,
'spec': jsonutils.dumps(req_obj.obj_to_primitive()),
}
return db_request_spec
def fake_spec_obj(remove_id=False):
ctxt = context.RequestContext('fake', 'fake')
req_obj = objects.RequestSpec(ctxt)
if not remove_id:
req_obj.id = 42
req_obj.instance_uuid = uuidutils.generate_uuid()
req_obj.image = IMAGE_META
req_obj.numa_topology = INSTANCE_NUMA_TOPOLOGY
req_obj.pci_requests = PCI_REQUESTS
req_obj.flavor = fake_flavor.fake_flavor_obj(ctxt)
req_obj.retry = objects.SchedulerRetries()
req_obj.limits = objects.SchedulerLimits()
req_obj.instance_group = objects.InstanceGroup()
req_obj.project_id = 'fake'
req_obj.num_instances = 1
req_obj.availability_zone = None
req_obj.ignore_hosts = ['host2', 'host4']
req_obj.force_hosts = ['host1', 'host3']
req_obj.force_nodes = ['node1', 'node2']
req_obj.scheduler_hints = {'hint': ['over-there']}
req_obj.requested_destination = None
# This should never be a changed field
req_obj.obj_reset_changes(['id'])
return req_obj
| apache-2.0 | 6,353,026,256,658,427,000 | 35.285714 | 78 | 0.641732 | false |
TobiBu/POV | Raspi/pov_control.py | 1 | 3927 | #!/usr/bin/env python
import time
from dotstar import Adafruit_DotStar
import get_freq
import threading
class povThread(threading.Thread):
def __init__(self, FreqAverage=5, Path="pov/", start=""):
threading.Thread.__init__(self)
self.datapin = 2 # GPIO-Numbering!
self.clockpin = 3 # GPIO-Numbering!
self.strip = Adafruit_DotStar(0, self.datapin, self.clockpin)
# Notice the number of LEDs is set to 0. This is on purpose...we're asking
# the DotStar module to NOT allocate any memory for this strip...we'll handle
# our own allocation and conversion and will feed it 'raw' data.
self.strip.begin() # Initialize pins for output
self.empty_array=bytearray(60*4) # prepare empty-flash
for x in range(60):
self.empty_array[x*4]=0xFF
self.empty_array[x*4+1]=0x00
self.empty_array[x*4+2]=0x00
self.empty_array[x*4+3]=0x00
self.povPath=Path
self.povFile = start
self.size=[0,0]
self.FAverage=FreqAverage
self.actPeriod=0
self.freq = get_freq.freqThread(NAverage=self.FAverage) # initialize frequency-thread
self.running=False # is Thread displaying a pov-File?
self.NEWrunning=False # want to stop and start new?
self.active=True # is Thread active? (& playing OR waiting to play) -> only False if quitting main.
self.pause=False
self.pos=0
self.loading=False # loading? --> main waits for finishing loading-process
if start!="":
self.running=True
else:
self.off()
self.start()
####status data / show status function
def off(self):
self.running=False
self.pause=False
self.strip.show(self.empty_array)
self.size=[0,0]
self.pos=0
def doPause(self):
if self.running==True:
if self.pause:
self.pause=False
else:
self.pause=True
def stop(self): # only to quit!! else: use self.off()
self.off()
self.freq.running=False
self.active=False
# def restart(self):
# self.off()
# self.running=True
def showPOV(self, File=""):
self.off()
if File!="":
self.povFile=File
self.NEWrunning=True
self.loading=True
def run(self):
while self.active:
if self.NEWrunning==True:
self.running=True
self.NEWrunning=False
if self.running == True:
if self.povFile == "":
print "Error: No pov-file specified!"
self.off()
self.loading=False
else:
f=open(self.povPath+self.povFile,'r')
self.size=map(int,(f.readline().rstrip("\n")).split(','))
width=self.size[0]
print "\nLoading "+self.povPath+self.povFile + " into buffer; size: "+str(self.size[0])+"x"+str(self.size[1])
lines = f.read().splitlines()
array=[0 for i in range(width)]
for i in range(width):
array[i]=bytearray(60*4)
j=0
for LED in lines[i].split(','):
array[i][j]=int(LED)
j=j+1
if i%900==0:
print "\r"+str(100*i/width)+"% loaded."
print "\r100% loaded."
print "\nwaiting for valid frequency-values..."
while (self.freq.period>2.0) or (self.freq.period<0.05):
pass
print "Displaying... period="+ str(self.freq.period) + "\t frequency=" + str(self.freq.frequency)
self.pos=0 # start at beginning; first rotation
# for calculating needed time for one period
timeB=time.time()
timeDiff=0
timeDiff_new=0
self.loading=False
period=.16
while self.running: # Loop
period=self.freq.period
pixel_time=(period-timeDiff)/60.
if self.pause==False: # pause?
for x in range(60): # For each column of image...
self.strip.show(array[x+self.pos*60]) # Write raw data to strip
time.sleep(pixel_time)
self.pos=(self.pos+1)%(width/60) # next slice/rotation; modulo(%)=>endless loop
else:
time.sleep(period)
self.actPeriod=time.time()-timeB
timeDiff_new=self.actPeriod-period
timeDiff=(timeDiff+timeDiff_new)%period
timeB=time.time()
self.off()
time.sleep(0.1)
| mit | 1,625,728,731,807,414,800 | 24.835526 | 114 | 0.648077 | false |
vaygr/ansible | lib/ansible/plugins/action/fetch.py | 28 | 10628 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import base64
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes
from ansible.module_utils.six import string_types
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.plugins.action import ActionBase
from ansible.utils.hashing import checksum, checksum_s, md5, secure_hash
from ansible.utils.path import makedirs_safe
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=None):
''' handler for fetch operations '''
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
try:
if self._play_context.check_mode:
result['skipped'] = True
result['msg'] = 'check mode not (yet) supported for this module'
return result
source = self._task.args.get('src', None)
dest = self._task.args.get('dest', None)
flat = boolean(self._task.args.get('flat'), strict=False)
fail_on_missing = boolean(self._task.args.get('fail_on_missing', True), strict=False)
validate_checksum = boolean(self._task.args.get('validate_checksum',
self._task.args.get('validate_md5', True)),
strict=False)
# validate source and dest are strings FIXME: use basic.py and module specs
if not isinstance(source, string_types):
result['msg'] = "Invalid type supplied for source option, it must be a string"
if not isinstance(dest, string_types):
result['msg'] = "Invalid type supplied for dest option, it must be a string"
# validate_md5 is the deprecated way to specify validate_checksum
if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args:
result['msg'] = "validate_checksum and validate_md5 cannot both be specified"
if 'validate_md5' in self._task.args:
display.deprecated('Use validate_checksum instead of validate_md5', version='2.8')
if source is None or dest is None:
result['msg'] = "src and dest are required"
if result.get('msg'):
result['failed'] = True
return result
source = self._connection._shell.join_path(source)
source = self._remote_expand_user(source)
remote_checksum = None
if not self._play_context.become:
# calculate checksum for the remote file, don't bother if using become as slurp will be used
# Force remote_checksum to follow symlinks because fetch always follows symlinks
remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True)
# use slurp if permissions are lacking or privilege escalation is needed
remote_data = None
if remote_checksum in ('1', '2', None):
slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars)
if slurpres.get('failed'):
if not fail_on_missing and (slurpres.get('msg').startswith('file not found') or remote_checksum == '1'):
result['msg'] = "the remote file does not exist, not transferring, ignored"
result['file'] = source
result['changed'] = False
else:
result.update(slurpres)
return result
else:
if slurpres['encoding'] == 'base64':
remote_data = base64.b64decode(slurpres['content'])
if remote_data is not None:
remote_checksum = checksum_s(remote_data)
# the source path may have been expanded on the
# target system, so we compare it here and use the
# expanded version if it's different
remote_source = slurpres.get('source')
if remote_source and remote_source != source:
source = remote_source
# calculate the destination name
if os.path.sep not in self._connection._shell.join_path('a', ''):
source = self._connection._shell._unquote(source)
source_local = source.replace('\\', '/')
else:
source_local = source
dest = os.path.expanduser(dest)
if flat:
if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')) and not dest.endswith(os.sep):
result['msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory"
result['file'] = dest
result['failed'] = True
return result
if dest.endswith(os.sep):
# if the path ends with "/", we'll use the source filename as the
# destination filename
base = os.path.basename(source_local)
dest = os.path.join(dest, base)
if not dest.startswith("/"):
# if dest does not start with "/", we'll assume a relative path
dest = self._loader.path_dwim(dest)
else:
# files are saved in dest dir, with a subdir for each host, then the filename
if 'inventory_hostname' in task_vars:
target_name = task_vars['inventory_hostname']
else:
target_name = self._play_context.remote_addr
dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)
dest = dest.replace("//", "/")
if remote_checksum in ('0', '1', '2', '3', '4', '5'):
result['changed'] = False
result['file'] = source
if remote_checksum == '0':
result['msg'] = "unable to calculate the checksum of the remote file"
elif remote_checksum == '1':
result['msg'] = "the remote file does not exist"
elif remote_checksum == '2':
result['msg'] = "no read permission on remote file"
elif remote_checksum == '3':
result['msg'] = "remote file is a directory, fetch cannot work on directories"
elif remote_checksum == '4':
result['msg'] = "python isn't present on the system. Unable to compute checksum"
elif remote_checksum == '5':
result['msg'] = "stdlib json or simplejson was not found on the remote machine. Only the raw module can work without those installed"
# Historically, these don't fail because you may want to transfer
# a log file that possibly MAY exist but keep going to fetch other
# log files. Today, this is better achieved by adding
# ignore_errors or failed_when to the task. Control the behaviour
# via fail_when_missing
if fail_on_missing:
result['failed'] = True
del result['changed']
else:
result['msg'] += ", not transferring, ignored"
return result
# calculate checksum for the local file
local_checksum = checksum(dest)
if remote_checksum != local_checksum:
# create the containing directories, if needed
makedirs_safe(os.path.dirname(dest))
# fetch the file and check for changes
if remote_data is None:
self._connection.fetch_file(source, dest)
else:
try:
f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb')
f.write(remote_data)
f.close()
except (IOError, OSError) as e:
raise AnsibleError("Failed to fetch the file: %s" % e)
new_checksum = secure_hash(dest)
# For backwards compatibility. We'll return None on FIPS enabled systems
try:
new_md5 = md5(dest)
except ValueError:
new_md5 = None
if validate_checksum and new_checksum != remote_checksum:
result.update(dict(failed=True, md5sum=new_md5,
msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None,
checksum=new_checksum, remote_checksum=remote_checksum))
else:
result.update({'changed': True, 'md5sum': new_md5, 'dest': dest,
'remote_md5sum': None, 'checksum': new_checksum,
'remote_checksum': remote_checksum})
else:
# For backwards compatibility. We'll return None on FIPS enabled systems
try:
local_md5 = md5(dest)
except ValueError:
local_md5 = None
result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))
finally:
self._remove_tmp_path(self._connection._shell.tmpdir)
return result
| gpl-3.0 | -3,589,237,397,303,847,400 | 47.752294 | 153 | 0.553444 | false |
gnina/scripts | calccenters.py | 1 | 1799 | #!/usr/bin/env python3
'''Glob through files in current directory looking for */*_ligand.sdf and */*.gninatypes (assuming PDBbind layout).
Calculate the distance between centers. If types files are passed, create versions with this information,
optionally filtering.
'''
import sys,glob,argparse,os
import numpy as np
import pybel
import struct
import openbabel
openbabel.obErrorLog.StopLogging()
parser = argparse.ArgumentParser()
parser.add_argument('typefiles',metavar='file',type=str, nargs='+',help='Types files to process')
parser.add_argument('--filter',type=float,default=100.0,help='Filter out examples greater the specified value')
parser.add_argument('--suffix',type=str,default='_wc',help='Suffix for new types files')
args = parser.parse_args()
centerinfo = dict()
#first process all gninatypes files in current directory tree
for ligfile in glob.glob('*/*_ligand.sdf'):
mol = next(pybel.readfile('sdf',ligfile))
#calc center
center = np.mean([a.coords for a in mol.atoms],axis=0)
dir = ligfile.split('/')[0]
for gtypes in glob.glob('%s/*.gninatypes'%dir):
buf = open(gtypes,'rb').read()
n = len(buf)/4
vals = np.array(struct.unpack('f'*n,buf)).reshape(n/4,4)
lcenter = np.mean(vals,axis=0)[0:3]
dist = np.linalg.norm(center-lcenter)
centerinfo[gtypes] = dist
for tfile in args.typefiles:
fname,ext = os.path.splitext(tfile)
outname = fname+args.suffix+ext
out = open(outname,'w')
for line in open(tfile):
lfile = line.split('#')[0].split()[-1]
if lfile not in centerinfo:
print("Missing",lfile,tfile)
sys.exit(0)
else:
d = centerinfo[lfile]
if d < args.filter:
out.write(line.rstrip()+" %f\n"%d)
| bsd-3-clause | -4,663,845,698,511,210,000 | 34.98 | 115 | 0.657588 | false |
KatonaLab/vividstorm | controllers/viewer/StormLUT.py | 1 | 36415 | from pyqtgraph.Qt import QtGui, QtCore
from pyqtgraph.python2_3 import sortList
import pyqtgraph.functions as fn
import pyqtgraph as pg
import weakref
from pyqtgraph.pgcollections import OrderedDict
from pyqtgraph.colormap import ColorMap
import numpy as np
__all__ = ['TickSliderItem', 'GradientEditorItem']
Gradients = OrderedDict([
('my', {'ticks': [(0.0, (0, 0, 255, 255)), (1.0, (255, 255, 255, 255))], 'mode': 'rgb'}),
('thermal', {'ticks': [(0.3333, (185, 0, 0, 255)), (0.6666, (255, 220, 0, 255)), (1, (255, 255, 255, 255)), (0, (0, 0, 0, 255))], 'mode': 'rgb'}),
('flame', {'ticks': [(0.2, (7, 0, 220, 255)), (0.5, (236, 0, 134, 255)), (0.8, (246, 246, 0, 255)), (1.0, (255, 255, 255, 255)), (0.0, (0, 0, 0, 255))], 'mode': 'rgb'}),
('yellowy', {'ticks': [(0.0, (0, 0, 0, 255)), (0.2328863796753704, (32, 0, 129, 255)), (0.8362738179251941, (255, 255, 0, 255)), (0.5257586450247, (115, 15, 255, 255)), (1.0, (255, 255, 255, 255))], 'mode': 'rgb'} ),
('bipolar', {'ticks': [(0.0, (0, 255, 255, 255)), (1.0, (255, 255, 0, 255)), (0.5, (0, 0, 0, 255)), (0.25, (0, 0, 255, 255)), (0.75, (255, 0, 0, 255))], 'mode': 'rgb'}),
('spectrum', {'ticks': [(1.0, (255, 0, 255, 255)), (0.0, (255, 0, 0, 255))], 'mode': 'hsv'}),
('cyclic', {'ticks': [(0.0, (255, 0, 4, 255)), (1.0, (255, 0, 0, 255))], 'mode': 'hsv'}),
('greyclip', {'ticks': [(0.0, (0, 0, 0, 255)), (0.99, (255, 255, 255, 255)), (1.0, (255, 0, 0, 255))], 'mode': 'rgb'}),
('grey', {'ticks': [(0.0, (0, 0, 0, 255)), (1.0, (255, 255, 255, 255))], 'mode': 'rgb'}),
])
class TickSliderItem(pg.GraphicsWidget):
## public class
"""**Bases:** :class:`GraphicsWidget <pyqtgraph.GraphicsWidget>`
A rectangular item with tick marks along its length that can (optionally) be moved by the user."""
def __init__(self, color='w', orientation='bottom', allowAdd=True, **kargs):
"""
============= =================================================================================
**Arguments**
orientation Set the orientation of the gradient. Options are: 'left', 'right'
'top', and 'bottom'.
allowAdd Specifies whether ticks can be added to the item by the user.
tickPen Default is white. Specifies the color of the outline of the ticks.
Can be any of the valid arguments for :func:`mkPen <pyqtgraph.mkPen>`
============= =================================================================================
"""
## public
pg.GraphicsWidget.__init__(self)
self.orientation = orientation
self.length = 100
self.tickSize = 15
self.ticks = {}
self.maxDim = 20
self.channelColor=color
self.allowAdd = allowAdd
if 'tickPen' in kargs:
self.tickPen = fn.mkPen(self.channelColor)
else:
self.tickPen = fn.mkPen('w')
self.orientations = {
'left': (90, 1, 1),
'right': (90, 1, 1),
'top': (0, 1, -1),
'bottom': (0, 1, 1)
}
self.setOrientation(orientation)
#self.setFrameStyle(QtGui.QFrame.NoFrame | QtGui.QFrame.Plain)
#self.setBackgroundRole(QtGui.QPalette.NoRole)
#self.setMouseTracking(True)
#def boundingRect(self):
#return self.mapRectFromParent(self.geometry()).normalized()
#def shape(self): ## No idea why this is necessary, but rotated items do not receive clicks otherwise.
#p = QtGui.QPainterPath()
#p.addRect(self.boundingRect())
#return p
def paint(self, p, opt, widget):
#p.setPen(fn.mkPen('g', width=3))
#p.drawRect(self.boundingRect())
return
def keyPressEvent(self, ev):
ev.ignore()
def setMaxDim(self, mx=None):
if mx is None:
mx = self.maxDim
else:
self.maxDim = mx
if self.orientation in ['bottom', 'top']:
self.setFixedHeight(mx)
self.setMaximumWidth(16777215)
else:
self.setFixedWidth(mx)
self.setMaximumHeight(16777215)
self.setFixedWidth(320)
self.translate(40,0)
#set length here!!!
#self.setFixedWidth(mx*5)
def setOrientation(self, orientation):
## public
"""Set the orientation of the TickSliderItem.
============= ===================================================================
**Arguments**
orientation Options are: 'left', 'right', 'top', 'bottom'
The orientation option specifies which side of the slider the
ticks are on, as well as whether the slider is vertical ('right'
and 'left') or horizontal ('top' and 'bottom').
============= ===================================================================
"""
self.orientation = orientation
self.setMaxDim()
self.resetTransform()
ort = orientation
if ort == 'top':
self.scale(1, -1)
self.translate(0, -self.height())
elif ort == 'left':
self.rotate(270)
self.scale(1, -1)
self.translate(-self.height(), -self.maxDim)
elif ort == 'right':
self.rotate(270)
self.translate(-self.height(), 0)
#self.setPos(0, -self.height())
elif ort != 'bottom':
raise Exception("%s is not a valid orientation. Options are 'left', 'right', 'top', and 'bottom'" %str(ort))
self.translate(self.tickSize/2., 0)
def addTick(self, x, color=None, movable=True):
## public
"""
Add a tick to the item.
============= ==================================================================
**Arguments**
x Position where tick should be added.
color Color of added tick. If color is not specified, the color will be
white.
movable Specifies whether the tick is movable with the mouse.
============= ==================================================================
"""
if color is None:
color = QtGui.QColor(255,255,255)
tick = Tick(self, [x*self.length, 0], color, movable, self.tickSize, pen=self.tickPen)
self.ticks[tick] = x
tick.setParentItem(self)
return tick
def removeTick(self, tick):
## public
"""
Removes the specified tick.
"""
del self.ticks[tick]
tick.setParentItem(None)
if self.scene() is not None:
self.scene().removeItem(tick)
def tickMoved(self, tick, pos):
#print "tick changed"
## Correct position of tick if it has left bounds.
newX = min(max(0, pos.x()), self.length)
pos.setX(newX)
tick.setPos(pos)
self.ticks[tick] = float(newX) / self.length
def tickMoveFinished(self, tick):
pass
def tickClicked(self, tick, ev):
if ev.button() == QtCore.Qt.RightButton:
self.removeTick(tick)
def widgetLength(self):
if self.orientation in ['bottom', 'top']:
return self.width()
else:
return self.height()
def resizeEvent(self, ev):
wlen = max(40, self.widgetLength())
self.setLength(wlen-self.tickSize-2)
self.setOrientation(self.orientation)
#bounds = self.scene().itemsBoundingRect()
#bounds.setLeft(min(-self.tickSize*0.5, bounds.left()))
#bounds.setRight(max(self.length + self.tickSize, bounds.right()))
#self.setSceneRect(bounds)
#self.fitInView(bounds, QtCore.Qt.KeepAspectRatio)
def setLength(self, newLen):
#private
for t, x in list(self.ticks.items()):
t.setPos(x * newLen + 1, t.pos().y())
self.length = float(newLen)
#def mousePressEvent(self, ev):
#QtGui.QGraphicsView.mousePressEvent(self, ev)
#self.ignoreRelease = False
#for i in self.items(ev.pos()):
#if isinstance(i, Tick):
#self.ignoreRelease = True
#break
##if len(self.items(ev.pos())) > 0: ## Let items handle their own clicks
##self.ignoreRelease = True
#def mouseReleaseEvent(self, ev):
#QtGui.QGraphicsView.mouseReleaseEvent(self, ev)
#if self.ignoreRelease:
#return
#pos = self.mapToScene(ev.pos())
#if ev.button() == QtCore.Qt.LeftButton and self.allowAdd:
#if pos.x() < 0 or pos.x() > self.length:
#return
#if pos.y() < 0 or pos.y() > self.tickSize:
#return
#pos.setX(min(max(pos.x(), 0), self.length))
#self.addTick(pos.x()/self.length)
#elif ev.button() == QtCore.Qt.RightButton:
#self.showMenu(ev)
def mouseClickEvent(self, ev):
if ev.button() == QtCore.Qt.LeftButton and self.allowAdd:
pos = ev.pos()
if pos.x() < 0 or pos.x() > self.length:
return
if pos.y() < 0 or pos.y() > self.tickSize:
return
pos.setX(min(max(pos.x(), 0), self.length))
elif ev.button() == QtCore.Qt.RightButton:
a=5
#if ev.button() == QtCore.Qt.RightButton:
#if self.moving:
#ev.accept()
#self.setPos(self.startPosition)
#self.moving = False
#self.sigMoving.emit(self)
#self.sigMoved.emit(self)
#else:
#pass
#self.view().tickClicked(self, ev)
###remove
def hoverEvent(self, ev):
if (not ev.isExit()) and ev.acceptClicks(QtCore.Qt.LeftButton):
ev.acceptClicks(QtCore.Qt.RightButton)
## show ghost tick
#self.currentPen = fn.mkPen(255, 0,0)
#else:
#self.currentPen = self.pen
#self.update()
def showMenu(self, ev):
pass
def setTickColor(self, tick, color):
"""Set the color of the specified tick.
============= ==================================================================
**Arguments**
tick Can be either an integer corresponding to the index of the tick
or a Tick object. Ex: if you had a slider with 3 ticks and you
wanted to change the middle tick, the index would be 1.
color The color to make the tick. Can be any argument that is valid for
:func:`mkBrush <pyqtgraph.mkBrush>`
============= ==================================================================
"""
tick = self.getTick(tick)
tick.color = color
tick.update()
#tick.setBrush(QtGui.QBrush(QtGui.QColor(tick.color)))
def setTickValue(self, tick, val):
## public
"""
Set the position (along the slider) of the tick.
============= ==================================================================
**Arguments**
tick Can be either an integer corresponding to the index of the tick
or a Tick object. Ex: if you had a slider with 3 ticks and you
wanted to change the middle tick, the index would be 1.
val The desired position of the tick. If val is < 0, position will be
set to 0. If val is > 1, position will be set to 1.
============= ==================================================================
"""
tick = self.getTick(tick)
val = min(max(0.0, val), 1.0)
x = val * self.length
pos = tick.pos()
pos.setX(x)
tick.setPos(pos)
self.ticks[tick] = val
def tickValue(self, tick):
## public
"""Return the value (from 0.0 to 1.0) of the specified tick.
============= ==================================================================
**Arguments**
tick Can be either an integer corresponding to the index of the tick
or a Tick object. Ex: if you had a slider with 3 ticks and you
wanted the value of the middle tick, the index would be 1.
============= ==================================================================
"""
tick = self.getTick(tick)
return self.ticks[tick]
def getTick(self, tick):
## public
"""Return the Tick object at the specified index.
============= ==================================================================
**Arguments**
tick An integer corresponding to the index of the desired tick. If the
argument is not an integer it will be returned unchanged.
============= ==================================================================
"""
if type(tick) is int:
tick = self.listTicks()[tick][0]
return tick
#def mouseMoveEvent(self, ev):
#QtGui.QGraphicsView.mouseMoveEvent(self, ev)
def listTicks(self):
"""Return a sorted list of all the Tick objects on the slider."""
## public
ticks = list(self.ticks.items())
sortList(ticks, lambda a,b: cmp(a[1], b[1])) ## see pyqtgraph.python2_3.sortList
return ticks
class GradientEditorItem(TickSliderItem):
"""
**Bases:** :class:`TickSliderItem <pyqtgraph.TickSliderItem>`
An item that can be used to define a color gradient. Implements common pre-defined gradients that are
customizable by the user. :class: `GradientWidget <pyqtgraph.GradientWidget>` provides a widget
with a GradientEditorItem that can be added to a GUI.
================================ ===========================================================
**Signals**
sigGradientChanged(self) Signal is emitted anytime the gradient changes. The signal
is emitted in real time while ticks are being dragged or
colors are being changed.
sigGradientChangeFinished(self) Signal is emitted when the gradient is finished changing.
================================ ===========================================================
"""
sigGradientChanged = QtCore.Signal(object)
sigGradientChangeFinished = QtCore.Signal(object)
def ThickValuechangedSpin(self,Value):
try:
if self.LowerSpin.value()<=self.UpperSpin.value():
self.setTickValue(self.listTicks()[0][0], self.LowerSpin.value()/100.0)
self.setTickValue(self.listTicks()[1][0], self.UpperSpin.value()/100.0)
else:
self.setTickValue(self.listTicks()[1][0], self.LowerSpin.value()/100.0)
self.setTickValue(self.listTicks()[0][0], self.UpperSpin.value()/100.0)
except:
pass
def __init__(self, PlotWindow, ChannelNumber, LowerSpin, UpperSpin, *args, **kargs):
"""
Create a new GradientEditorItem.
All arguments are passed to :func:`TickSliderItem.__init__ <pyqtgraph.TickSliderItem.__init__>`
============= =================================================================================
**Arguments**
orientation Set the orientation of the gradient. Options are: 'left', 'right'
'top', and 'bottom'.
allowAdd Default is True. Specifies whether ticks can be added to the item.
tickPen Default is white. Specifies the color of the outline of the ticks.
Can be any of the valid arguments for :func:`mkPen <pyqtgraph.mkPen>`
============= =================================================================================
"""
self.LowerSpin=LowerSpin
self.LowerSpin.valueChanged.connect(self.ThickValuechangedSpin)
self.UpperSpin=UpperSpin
self.UpperSpin.valueChanged.connect(self.ThickValuechangedSpin)
self.PlotWindow=PlotWindow
self.ChannelNumber=ChannelNumber
self.currentTick = None
self.currentTickColor = None
self.rectSize = 15
#adjust the size of the gradientitem for the plot
self.gradRect = QtGui.QGraphicsRectItem(QtCore.QRectF(0, self.rectSize, 100, self.rectSize))
self.backgroundRect = QtGui.QGraphicsRectItem(QtCore.QRectF(0, -self.rectSize, 100, self.rectSize))
self.backgroundRect.setBrush(QtGui.QBrush(QtCore.Qt.DiagCrossPattern))
self.colorMode = 'rgb'
TickSliderItem.__init__(self, *args, **kargs)
self.colorDialog = QtGui.QColorDialog()
self.colorDialog.setOption(QtGui.QColorDialog.ShowAlphaChannel, True)
self.colorDialog.setOption(QtGui.QColorDialog.DontUseNativeDialog, True)
self.colorDialog.currentColorChanged.connect(self.currentColorChanged)
self.colorDialog.rejected.connect(self.currentColorRejected)
self.colorDialog.accepted.connect(self.currentColorAccepted)
self.backgroundRect.setParentItem(self)
self.gradRect.setParentItem(self)
self.setMaxDim(self.rectSize + self.tickSize)
self.rgbAction = QtGui.QAction('RGB', self)
self.rgbAction.setCheckable(True)
self.rgbAction.triggered.connect(lambda: self.setColorMode('rgb'))
self.hsvAction = QtGui.QAction('HSV', self)
self.hsvAction.setCheckable(True)
self.hsvAction.triggered.connect(lambda: self.setColorMode('hsv'))
## build context menu of gradients
l = self.length
self.length = 100
global Gradients
for g in Gradients:
px = QtGui.QPixmap(100, 15)
p = QtGui.QPainter(px)
self.restoreState(Gradients[g])
grad = self.getGradient()
brush = QtGui.QBrush(grad)
p.fillRect(QtCore.QRect(0, 0, 100, 15), brush)
p.end()
label = QtGui.QLabel()
label.setPixmap(px)
label.setContentsMargins(1, 1, 1, 1)
self.length = l
for t in list(self.ticks.keys()):
self.removeTick(t)
self.addTick(0, QtGui.QColor(0,0,0), True)
self.addTick(1, QtGui.QColor(255,0,0), True)
self.setColorMode('rgb')
self.updateGradient()
def setOrientation(self, orientation):
## public
"""
Set the orientation of the GradientEditorItem.
============= ===================================================================
**Arguments**
orientation Options are: 'left', 'right', 'top', 'bottom'
The orientation option specifies which side of the gradient the
ticks are on, as well as whether the gradient is vertical ('right'
and 'left') or horizontal ('top' and 'bottom').
============= ===================================================================
"""
TickSliderItem.setOrientation(self, orientation)
self.translate(0, self.rectSize)
def showMenu(self, ev):
#private
self.menu.popup(ev.screenPos().toQPoint())
def contextMenuClicked(self, b=None):
#private
#global Gradients
act = self.sender()
self.loadPreset(act.name)
def loadPreset(self, name):
"""
Load a predefined gradient.
""" ## TODO: provide image with names of defined gradients
#global Gradients
self.restoreState(Gradients[name])
def setColorMode(self, cm):
"""
Set the color mode for the gradient. Options are: 'hsv', 'rgb'
"""
## public
if cm not in ['rgb', 'hsv']:
raise Exception("Unknown color mode %s. Options are 'rgb' and 'hsv'." % str(cm))
try:
self.rgbAction.blockSignals(True)
self.hsvAction.blockSignals(True)
self.rgbAction.setChecked(cm == 'rgb')
self.hsvAction.setChecked(cm == 'hsv')
finally:
self.rgbAction.blockSignals(False)
self.hsvAction.blockSignals(False)
self.colorMode = cm
self.updateGradient()
def colorMap(self):
"""Return a ColorMap object representing the current state of the editor."""
if self.colorMode == 'hsv':
raise NotImplementedError('hsv colormaps not yet supported')
pos = []
color = []
for t,x in self.listTicks():
pos.append(x)
c = t.color
color.append([c.red(), c.green(), c.blue(), c.alpha()])
return ColorMap(np.array(pos), np.array(color, dtype=np.ubyte))
def updateGradient(self):
#private
self.gradient = self.getGradient()
self.gradRect.setBrush(QtGui.QBrush(self.gradient))
self.sigGradientChanged.emit(self)
def setLength(self, newLen):
#private (but maybe public)
TickSliderItem.setLength(self, newLen)
self.backgroundRect.setRect(1, -self.rectSize, newLen, self.rectSize)
self.gradRect.setRect(1, -self.rectSize, newLen, self.rectSize)
self.updateGradient()
def currentColorChanged(self, color):
#private
if color.isValid() and self.currentTick is not None:
self.setTickColor(self.currentTick, color)
self.updateGradient()
def currentColorRejected(self):
#private
self.setTickColor(self.currentTick, self.currentTickColor)
self.updateGradient()
def currentColorAccepted(self):
self.sigGradientChangeFinished.emit(self)
def tickClicked(self, tick, ev):
#private
if ev.button() == QtCore.Qt.LeftButton:
if not tick.colorChangeAllowed:
return
self.currentTick = tick
self.currentTickColor = tick.color
#self.colorDialog.setCurrentColor(tick.color)
#self.colorDialog.open()
#color = QtGui.QColorDialog.getColor(tick.color, self, "Select Color", QtGui.QColorDialog.ShowAlphaChannel)
#if color.isValid():
#self.setTickColor(tick, color)
#self.updateGradient()
elif ev.button() == QtCore.Qt.RightButton:
if not tick.removeAllowed:
return
if len(self.ticks) > 2:
self.removeTick(tick)
self.updateGradient()
def tickMoved(self, tick, pos):
#private
TickSliderItem.tickMoved(self, tick, pos)
self.updateGradient()
def tickMoveFinished(self, tick):
Pos=[1,0]
count=0
for x,t in self.listTicks():
Pos[count]=t
count+=1
(self.PlotWindow).UpdateStormChannel(self.ChannelNumber,Pos)
#update spinboxes
self.LowerSpin.setValue(Pos[0]*100)
self.UpperSpin.setValue(Pos[1]*100)
#update plot
self.sigGradientChangeFinished.emit(self)
def getGradient(self):
"""Return a QLinearGradient object."""
g = QtGui.QLinearGradient(QtCore.QPointF(0,0), QtCore.QPointF(self.length,0))
if self.colorMode == 'rgb':
ticks = self.listTicks()
g.setStops([(x, QtGui.QColor(t.color)) for t,x in ticks])
elif self.colorMode == 'hsv': ## HSV mode is approximated for display by interpolating 10 points between each stop
ticks = self.listTicks()
stops = []
stops.append((ticks[0][1], ticks[0][0].color))
for i in range(1,len(ticks)):
x1 = ticks[i-1][1]
x2 = ticks[i][1]
dx = (x2-x1) / 10.
for j in range(1,10):
x = x1 + dx*j
stops.append((x, self.getColor(x)))
stops.append((x2, self.getColor(x2)))
g.setStops(stops)
return g
def getColor(self, x, toQColor=True):
"""
Return a color for a given value.
============= ==================================================================
**Arguments**
x Value (position on gradient) of requested color.
toQColor If true, returns a QColor object, else returns a (r,g,b,a) tuple.
============= ==================================================================
"""
ticks = self.listTicks()
if x <= ticks[0][1]:
c = ticks[0][0].color
if toQColor:
return QtGui.QColor(c) # always copy colors before handing them out
else:
return (c.red(), c.green(), c.blue(), c.alpha())
if x >= ticks[-1][1]:
c = ticks[-1][0].color
if toQColor:
return QtGui.QColor(c) # always copy colors before handing them out
else:
return (c.red(), c.green(), c.blue(), c.alpha())
x2 = ticks[0][1]
for i in range(1,len(ticks)):
x1 = x2
x2 = ticks[i][1]
if x1 <= x and x2 >= x:
break
dx = (x2-x1)
if dx == 0:
f = 0.
else:
f = (x-x1) / dx
c1 = ticks[i-1][0].color
c2 = ticks[i][0].color
if self.colorMode == 'rgb':
r = c1.red() * (1.-f) + c2.red() * f
g = c1.green() * (1.-f) + c2.green() * f
b = c1.blue() * (1.-f) + c2.blue() * f
a = c1.alpha() * (1.-f) + c2.alpha() * f
if toQColor:
return QtGui.QColor(int(r), int(g), int(b), int(a))
else:
return (r,g,b,a)
elif self.colorMode == 'hsv':
h1,s1,v1,_ = c1.getHsv()
h2,s2,v2,_ = c2.getHsv()
h = h1 * (1.-f) + h2 * f
s = s1 * (1.-f) + s2 * f
v = v1 * (1.-f) + v2 * f
c = QtGui.QColor()
c.setHsv(h,s,v)
if toQColor:
return c
else:
return (c.red(), c.green(), c.blue(), c.alpha())
def getLookupTable(self, nPts, alpha=None):
"""
Return an RGB(A) lookup table (ndarray).
============= ============================================================================
**Arguments**
nPts The number of points in the returned lookup table.
alpha True, False, or None - Specifies whether or not alpha values are included
in the table.If alpha is None, alpha will be automatically determined.
============= ============================================================================
"""
if alpha is None:
alpha = self.usesAlpha()
if alpha:
table = np.empty((nPts,4), dtype=np.ubyte)
else:
table = np.empty((nPts,3), dtype=np.ubyte)
for i in range(nPts):
x = float(i)/(nPts-1)
color = self.getColor(x, toQColor=False)
table[i] = color[:table.shape[1]]
return table
def usesAlpha(self):
"""Return True if any ticks have an alpha < 255"""
ticks = self.listTicks()
for t in ticks:
if t[0].color.alpha() < 255:
return True
return False
def isLookupTrivial(self):
"""Return True if the gradient has exactly two stops in it: black at 0.0 and white at 1.0"""
ticks = self.listTicks()
if len(ticks) != 2:
return False
if ticks[0][1] != 0.0 or ticks[1][1] != 1.0:
return False
c1 = fn.colorTuple(ticks[0][0].color)
c2 = fn.colorTuple(ticks[1][0].color)
if c1 != (0,0,0,255) or c2 != (255,255,255,255):
return False
return True
def mouseReleaseEvent(self, ev):
#private
TickSliderItem.mouseReleaseEvent(self, ev)
self.updateGradient()
def addTick(self, x, color=None, movable=True, finish=True):
"""
Add a tick to the gradient. Return the tick.
============= ==================================================================
**Arguments**
x Position where tick should be added.
color Color of added tick. If color is not specified, the color will be
the color of the gradient at the specified position.
movable Specifies whether the tick is movable with the mouse.
============= ==================================================================
"""
if color is None:
color = self.getColor(x)
t = TickSliderItem.addTick(self, x, color=color, movable=movable)
t.colorChangeAllowed = True
t.removeAllowed = True
if finish:
self.sigGradientChangeFinished.emit(self)
return t
def removeTick(self, tick, finish=True):
TickSliderItem.removeTick(self, tick)
if finish:
self.sigGradientChangeFinished.emit(self)
def saveState(self):
"""
Return a dictionary with parameters for rebuilding the gradient. Keys will include:
- 'mode': hsv or rgb
- 'ticks': a list of tuples (pos, (r,g,b,a))
"""
## public
ticks = []
for t in self.ticks:
c = t.color
ticks.append((self.ticks[t], (c.red(), c.green(), c.blue(), c.alpha())))
state = {'mode': self.colorMode, 'ticks': ticks}
return state
def restoreState(self, state):
"""
Restore the gradient specified in state.
============= ====================================================================
**Arguments**
state A dictionary with same structure as those returned by
:func:`saveState <pyqtgraph.GradientEditorItem.saveState>`
Keys must include:
- 'mode': hsv or rgb
- 'ticks': a list of tuples (pos, (r,g,b,a))
============= ====================================================================
"""
## public
self.setColorMode(state['mode'])
for t in list(self.ticks.keys()):
self.removeTick(t, finish=False)
for t in state['ticks']:
c = QtGui.QColor(*t[1])
self.addTick(t[0], c, finish=False)
self.updateGradient()
self.sigGradientChangeFinished.emit(self)
def setColorMap(self, cm):
self.setColorMode('rgb')
for t in list(self.ticks.keys()):
self.removeTick(t, finish=False)
colors = cm.getColors(mode='qcolor')
for i in range(len(cm.pos)):
x = cm.pos[i]
c = colors[i]
self.addTick(x, c, finish=False)
self.updateGradient()
self.sigGradientChangeFinished.emit(self)
class Tick(QtGui.QGraphicsObject): ## NOTE: Making this a subclass of GraphicsObject instead results in
## activating this bug: https://bugreports.qt-project.org/browse/PYSIDE-86
## private class
sigMoving = QtCore.Signal(object)
sigMoved = QtCore.Signal(object)
def __init__(self, view, pos, color, movable=True, scale=10, pen='w'):
self.movable = movable
self.moving = False
self.view = weakref.ref(view)
self.scale = scale
self.color = color
self.pen = fn.mkPen(pen)
self.hoverPen = fn.mkPen(255,255,0)
self.currentPen = self.pen
self.pg = QtGui.QPainterPath(QtCore.QPointF(0,0))
self.pg.lineTo(QtCore.QPointF(-scale/3**0.5, scale))
self.pg.lineTo(QtCore.QPointF(scale/3**0.5, scale))
self.pg.closeSubpath()
QtGui.QGraphicsObject.__init__(self)
self.setPos(pos[0], pos[1])
if self.movable:
self.setZValue(1)
else:
self.setZValue(0)
def boundingRect(self):
return self.pg.boundingRect()
def shape(self):
return self.pg
def paint(self, p, *args):
p.setRenderHints(QtGui.QPainter.Antialiasing)
p.fillPath(self.pg, fn.mkBrush(self.color))
p.setPen(self.currentPen)
p.drawPath(self.pg)
def mouseDragEvent(self, ev):
if self.movable and ev.button() == QtCore.Qt.LeftButton:
if ev.isStart():
self.moving = True
self.cursorOffset = self.pos() - self.mapToParent(ev.buttonDownPos())
self.startPosition = self.pos()
ev.accept()
if not self.moving:
return
newPos = self.cursorOffset + self.mapToParent(ev.pos())
newPos.setY(self.pos().y())
self.setPos(newPos)
self.view().tickMoved(self, newPos)
self.sigMoving.emit(self)
if ev.isFinish():
self.moving = False
self.sigMoved.emit(self)
self.view().tickMoveFinished(self)
def mouseClickEvent(self, ev):
if ev.button() == QtCore.Qt.RightButton and self.moving:
ev.accept()
self.setPos(self.startPosition)
self.view().tickMoved(self, self.startPosition)
self.moving = False
self.sigMoving.emit(self)
self.sigMoved.emit(self)
else:
self.view().tickClicked(self, ev)
##remove
def hoverEvent(self, ev):
if (not ev.isExit()) and ev.acceptDrags(QtCore.Qt.LeftButton):
ev.acceptClicks(QtCore.Qt.LeftButton)
ev.acceptClicks(QtCore.Qt.RightButton)
self.currentPen = self.hoverPen
else:
self.currentPen = self.pen
self.update()
#def mouseMoveEvent(self, ev):
##print self, "move", ev.scenePos()
#if not self.movable:
#return
#if not ev.buttons() & QtCore.Qt.LeftButton:
#return
#newPos = ev.scenePos() + self.mouseOffset
#newPos.setY(self.pos().y())
##newPos.setX(min(max(newPos.x(), 0), 100))
#self.setPos(newPos)
#self.view().tickMoved(self, newPos)
#self.movedSincePress = True
##self.emit(QtCore.SIGNAL('tickChanged'), self)
#ev.accept()
#def mousePressEvent(self, ev):
#self.movedSincePress = False
#if ev.button() == QtCore.Qt.LeftButton:
#ev.accept()
#self.mouseOffset = self.pos() - ev.scenePos()
#self.pressPos = ev.scenePos()
#elif ev.button() == QtCore.Qt.RightButton:
#ev.accept()
##if self.endTick:
##return
##self.view.tickChanged(self, delete=True)
def mouseReleaseEvent(self, ev):
a=5
#if not self.movedSincePress:
#self.view().tickClicked(self, ev)
##if ev.button() == QtCore.Qt.LeftButton and ev.scenePos() == self.pressPos:
##color = QtGui.QColorDialog.getColor(self.color, None, "Select Color", QtGui.QColorDialog.ShowAlphaChannel)
##if color.isValid():
##self.color = color
##self.setBrush(QtGui.QBrush(QtGui.QColor(self.color)))
###self.emit(QtCore.SIGNAL('tickChanged'), self)
##self.view.tickChanged(self)
| lgpl-3.0 | 1,666,403,142,351,277,600 | 37.988223 | 220 | 0.504051 | false |
Teknologforeningen/tf-info | manager/migrations/0001_initial.py | 2 | 2539 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Page',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('order', models.PositiveIntegerField(editable=False, db_index=True)),
('url', models.CharField(max_length=90, verbose_name=b'Url of page to display (relative to root).')),
('duration', models.PositiveIntegerField(default=10, verbose_name=b'Duration (seconds)')),
('title', models.CharField(max_length=100)),
('description', models.TextField(null=True, blank=True)),
('pause_at', models.DateTimeField(null=True, blank=True)),
('hide_top_bar', models.BooleanField(default=False, verbose_name=b'Hide the top bar of the screen')),
('hide_bottom_bar', models.BooleanField(default=False, verbose_name=b'Hide the bottom bar of the screen')),
('active_time_start', models.TimeField(default=datetime.time(0, 0), verbose_name=b'Time of day to start displaying page.')),
('active_time_end', models.TimeField(default=datetime.time(0, 0), verbose_name=b'Time of day to stop displaying page. ')),
('active_date_start', models.DateField(default=datetime.date(2016, 4, 4), verbose_name=b'Date to start displayig page.')),
('active_date_end', models.DateField(null=True, verbose_name=b'Last date to display page.', blank=True)),
('monday', models.BooleanField(default=True)),
('tuesday', models.BooleanField(default=True)),
('wednesday', models.BooleanField(default=True)),
('thursday', models.BooleanField(default=True)),
('friday', models.BooleanField(default=True)),
('saturday', models.BooleanField(default=True)),
('sunday', models.BooleanField(default=True)),
('edited_by', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('order',),
'abstract': False,
},
bases=(models.Model,),
),
]
| bsd-3-clause | -4,253,794,352,449,102,300 | 53.021277 | 140 | 0.599449 | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_12_01/aio/operations/_network_interfaces_operations.py | 1 | 63787 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkInterfacesOperations:
"""NetworkInterfacesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def get(
self,
resource_group_name: str,
network_interface_name: str,
expand: Optional[str] = None,
**kwargs
) -> "_models.NetworkInterface":
"""Gets information about the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_12_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
network_interface_name: str,
parameters: "_models.NetworkInterface",
**kwargs
) -> "_models.NetworkInterface":
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkInterface')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
network_interface_name: str,
parameters: "_models.NetworkInterface",
**kwargs
) -> AsyncLROPoller["_models.NetworkInterface"]:
"""Creates or updates a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param parameters: Parameters supplied to the create or update network interface operation.
:type parameters: ~azure.mgmt.network.v2018_12_01.models.NetworkInterface
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkInterface or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_12_01.models.NetworkInterface]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def _update_tags_initial(
self,
resource_group_name: str,
network_interface_name: str,
parameters: "_models.TagsObject",
**kwargs
) -> "_models.NetworkInterface":
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
async def begin_update_tags(
self,
resource_group_name: str,
network_interface_name: str,
parameters: "_models.TagsObject",
**kwargs
) -> AsyncLROPoller["_models.NetworkInterface"]:
"""Updates a network interface tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param parameters: Parameters supplied to update network interface tags.
:type parameters: ~azure.mgmt.network.v2018_12_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkInterface or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_12_01.models.NetworkInterface]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_tags_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def list_all(
self,
**kwargs
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets all network interfaces in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_12_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkInterfaces'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets all network interfaces in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_12_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces'} # type: ignore
async def _get_effective_route_table_initial(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs
) -> Optional["_models.EffectiveRouteListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EffectiveRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
accept = "application/json"
# Construct URL
url = self._get_effective_route_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_effective_route_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable'} # type: ignore
async def begin_get_effective_route_table(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs
) -> AsyncLROPoller["_models.EffectiveRouteListResult"]:
"""Gets all route tables applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either EffectiveRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_12_01.models.EffectiveRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.EffectiveRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_effective_route_table_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_effective_route_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable'} # type: ignore
async def _list_effective_network_security_groups_initial(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs
) -> Optional["_models.EffectiveNetworkSecurityGroupListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EffectiveNetworkSecurityGroupListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
accept = "application/json"
# Construct URL
url = self._list_effective_network_security_groups_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_effective_network_security_groups_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups'} # type: ignore
async def begin_list_effective_network_security_groups(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs
) -> AsyncLROPoller["_models.EffectiveNetworkSecurityGroupListResult"]:
"""Gets all network security groups applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either EffectiveNetworkSecurityGroupListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_12_01.models.EffectiveNetworkSecurityGroupListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.EffectiveNetworkSecurityGroupListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._list_effective_network_security_groups_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_effective_network_security_groups.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups'} # type: ignore
def list_virtual_machine_scale_set_vm_network_interfaces(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
**kwargs
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets information about all network interfaces in a virtual machine in a virtual machine scale
set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_12_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_vm_network_interfaces.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_vm_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces'} # type: ignore
def list_virtual_machine_scale_set_network_interfaces(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
**kwargs
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets all network interfaces in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_12_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_network_interfaces.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/networkInterfaces'} # type: ignore
async def get_virtual_machine_scale_set_network_interface(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
expand: Optional[str] = None,
**kwargs
) -> "_models.NetworkInterface":
"""Get the specified network interface in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_12_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = self.get_virtual_machine_scale_set_network_interface.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_network_interface.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}'} # type: ignore
def list_virtual_machine_scale_set_ip_configurations(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
expand: Optional[str] = None,
**kwargs
) -> AsyncIterable["_models.NetworkInterfaceIPConfigurationListResult"]:
"""Get the specified network interface ip configuration in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceIPConfigurationListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_12_01.models.NetworkInterfaceIPConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceIPConfigurationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_ip_configurations.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceIPConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_ip_configurations.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipConfigurations'} # type: ignore
async def get_virtual_machine_scale_set_ip_configuration(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
ip_configuration_name: str,
expand: Optional[str] = None,
**kwargs
) -> "_models.NetworkInterfaceIPConfiguration":
"""Get the specified network interface ip configuration in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param ip_configuration_name: The name of the ip configuration.
:type ip_configuration_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterfaceIPConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_12_01.models.NetworkInterfaceIPConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceIPConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = self.get_virtual_machine_scale_set_ip_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterfaceIPConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_ip_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipConfigurations/{ipConfigurationName}'} # type: ignore
| mit | -1,274,775,144,004,012,000 | 51.327317 | 354 | 0.653017 | false |
abtink/openthread | tests/toranj/test-702-multi-radio-discovery-by-rx.py | 4 | 8658 | #!/usr/bin/env python3
#
# Copyright (c) 2020, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import time
import wpan
from wpan import verify
# -----------------------------------------------------------------------------------------------------------------------
# Test description: This test covers behavior of device after TREL network is temporarily disabled
# and rediscovery of TREL by receiving message over that radio from the neighbor.
#
# r1 ---------- r2
# (15.4+trel) (15.4+trel)
#
# On r2 we disable trel temporarily.
#
test_name = __file__[:-3] if __file__.endswith('.py') else __file__
print('-' * 120)
print('Starting \'{}\''.format(test_name))
# -----------------------------------------------------------------------------------------------------------------------
# Creating `wpan.Nodes` instances
speedup = 4
wpan.Node.set_time_speedup_factor(speedup)
r1 = wpan.Node(wpan.NODE_15_4_TREL)
r2 = wpan.Node(wpan.NODE_15_4_TREL)
c2 = wpan.Node(wpan.NODE_15_4)
# -----------------------------------------------------------------------------------------------------------------------
# Init all nodes
wpan.Node.init_all_nodes()
# -----------------------------------------------------------------------------------------------------------------------
# Build network topology
#
r1.allowlist_node(r2)
r2.allowlist_node(r1)
r2.allowlist_node(c2)
c2.allowlist_node(r2)
r1.form("discover-by-rx")
r2.join_node(r1, wpan.JOIN_TYPE_ROUTER)
c2.join_node(r2, wpan.JOIN_TYPE_SLEEPY_END_DEVICE)
# -----------------------------------------------------------------------------------------------------------------------
# Test implementation
WAIT_TIME = 5
HIGH_PREFERENCE_THRESHOLD = 220
MIN_PREFERENCE_THRESHOLD = 0
r1_ext_address = r1.get(wpan.WPAN_EXT_ADDRESS)[1:-1]
r1_rloc = int(r1.get(wpan.WPAN_THREAD_RLOC16), 16)
r1_ml_address = r1.get(wpan.WPAN_IP6_MESH_LOCAL_ADDRESS)[1:-1]
r2_ext_address = r2.get(wpan.WPAN_EXT_ADDRESS)[1:-1]
r2_rloc = int(r2.get(wpan.WPAN_THREAD_RLOC16), 16)
r2_ml_address = r2.get(wpan.WPAN_IP6_MESH_LOCAL_ADDRESS)[1:-1]
# Wait for r2 to become router and r1 establishes a link with it
def check_r1_router_table():
router_table = wpan.parse_router_table_result(r1.get(wpan.WPAN_THREAD_ROUTER_TABLE))
verify(len(router_table) == 2)
for entry in router_table:
verify(entry.rloc16 == r1_rloc or entry.is_link_established())
wpan.verify_within(check_r1_router_table, WAIT_TIME)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Check that r1 detected both TREL and 15.4 as supported radio by r2
r1_radios = wpan.parse_list(r1.get(wpan.WPAN_OT_SUPPORTED_RADIO_LINKS))
verify(
len(r1_radios) == 2 and (wpan.RADIO_LINK_IEEE_802_15_4 in r1_radios) and (wpan.RADIO_LINK_TREL_UDP6 in r1_radios))
r2_radios = wpan.parse_list(r2.get(wpan.WPAN_OT_SUPPORTED_RADIO_LINKS))
verify(
len(r2_radios) == 2 and (wpan.RADIO_LINK_IEEE_802_15_4 in r2_radios) and (wpan.RADIO_LINK_TREL_UDP6 in r2_radios))
def check_r1_sees_r2_has_two_radio_links():
r1_neighbor_radios = wpan.parse_multi_radio_result(r1.get(wpan.WPAN_OT_NEIGHBOR_TABLE_MULTI_RADIO_INFO))
verify(len(r1_neighbor_radios) == 1)
verify(len(r1_neighbor_radios[0].radios) == 2)
wpan.verify_within(check_r1_sees_r2_has_two_radio_links, WAIT_TIME)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Send from r1 to r2 and verify that r1 prefers TREL radio link for sending to r2.
sender = r1.prepare_tx(r1_ml_address, r2_ml_address, "Hi r2 from r1", 5)
recver = r2.prepare_rx(sender)
wpan.Node.perform_async_tx_rx()
verify(sender.was_successful)
verify(recver.was_successful)
r1_neighbor_radios = wpan.parse_multi_radio_result(r1.get(wpan.WPAN_OT_NEIGHBOR_TABLE_MULTI_RADIO_INFO))
verify(len(r1_neighbor_radios) == 1)
r2_radio_info = r1_neighbor_radios[0]
verify(r2_radio_info.supports(wpan.RADIO_LINK_TREL_UDP6))
verify(r2_radio_info.preference(wpan.RADIO_LINK_TREL_UDP6) >= HIGH_PREFERENCE_THRESHOLD)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Now disable TREL link on r2 and send again. We expect that r1 would quickly learn that trel is
# no longer supported by r2 and prefer 15.4 for tx to r2.
r2.set(wpan.WPAN_OT_TREL_TEST_MODE_ENABLE, 'false')
verify(r2.get(wpan.WPAN_OT_TREL_TEST_MODE_ENABLE) == 'false')
sender = r1.prepare_tx(r1_ml_address, r2_ml_address, "Hi again r2 from r1", 5)
wpan.Node.perform_async_tx_rx()
verify(sender.was_successful)
def check_r1_does_not_prefer_trel_for_r2():
r1_neighbor_radios = wpan.parse_multi_radio_result(r1.get(wpan.WPAN_OT_NEIGHBOR_TABLE_MULTI_RADIO_INFO))
verify(len(r1_neighbor_radios) == 1)
r2_radio_info = r1_neighbor_radios[0]
verify(r2_radio_info.supports(wpan.RADIO_LINK_TREL_UDP6))
verify(r2_radio_info.preference(wpan.RADIO_LINK_TREL_UDP6) <= MIN_PREFERENCE_THRESHOLD)
wpan.verify_within(check_r1_does_not_prefer_trel_for_r2, WAIT_TIME)
# Check that we can send between r1 and r2 (now all tx should use 15.4)
sender = r1.prepare_tx(r1_ml_address, r2_ml_address, "Hi on 15.4 r2 from r1", 5)
recver = r2.prepare_rx(sender)
wpan.Node.perform_async_tx_rx()
verify(sender.was_successful)
verify(recver.was_successful)
r1_neighbor_radios = wpan.parse_multi_radio_result(r1.get(wpan.WPAN_OT_NEIGHBOR_TABLE_MULTI_RADIO_INFO))
verify(len(r1_neighbor_radios) == 1)
r2_radio_info = r1_neighbor_radios[0]
verify(r2_radio_info.supports(wpan.RADIO_LINK_TREL_UDP6))
verify(r2_radio_info.preference(wpan.RADIO_LINK_TREL_UDP6) <= MIN_PREFERENCE_THRESHOLD)
verify(r2_radio_info.supports(wpan.RADIO_LINK_IEEE_802_15_4))
verify(r2_radio_info.preference(wpan.RADIO_LINK_IEEE_802_15_4) >= HIGH_PREFERENCE_THRESHOLD)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Enable trel back on r2, start sending traffic from r2 to r1.
# r2 would use probe mehcnasim to discover that trel is enabled again
# r1 should notice new rx on trel and update trel preference for r1
r2.set(wpan.WPAN_OT_TREL_TEST_MODE_ENABLE, 'true')
verify(r2.get(wpan.WPAN_OT_TREL_TEST_MODE_ENABLE) == 'true')
sender = r2.prepare_tx(r2_ml_address, r1_ml_address, "Probing r1 from r2", 80)
recver = r1.prepare_rx(sender)
wpan.Node.perform_async_tx_rx()
verify(sender.was_successful)
verify(recver.was_successful)
def check_r1_again_prefers_trel_for_r2():
r1_neighbor_radios = wpan.parse_multi_radio_result(r1.get(wpan.WPAN_OT_NEIGHBOR_TABLE_MULTI_RADIO_INFO))
verify(len(r1_neighbor_radios) == 1)
r2_radio_info = r1_neighbor_radios[0]
verify(r2_radio_info.supports(wpan.RADIO_LINK_TREL_UDP6))
verify(r2_radio_info.preference(wpan.RADIO_LINK_TREL_UDP6) >= HIGH_PREFERENCE_THRESHOLD)
wpan.verify_within(check_r1_again_prefers_trel_for_r2, WAIT_TIME)
# -----------------------------------------------------------------------------------------------------------------------
# Test finished
wpan.Node.finalize_all_nodes()
print('\'{}\' passed.'.format(test_name))
| bsd-3-clause | -5,075,471,036,851,587,000 | 41.029126 | 121 | 0.636059 | false |
PaulMakepeace/refine-client-py | google/refine/history.py | 2 | 1201 | #!/usr/bin/env python
"""
OpenRefine history: parsing responses.
"""
# Copyright (c) 2011 Paul Makepeace, Real Programmers. All rights reserved.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
class HistoryEntry(object):
# N.B. e.g. **response['historyEntry'] won't work as keys are unicode :-/
#noinspection PyUnusedLocal
def __init__(self, history_entry_id=None, time=None, description=None, **kwargs):
if history_entry_id is None:
raise ValueError('History entry id must be set')
self.id = history_entry_id
self.description = description
self.time = time
| gpl-3.0 | -1,874,277,796,525,987,000 | 39.033333 | 85 | 0.720233 | false |
mssever/sleep-inhibit | sleepinhibit/config.py | 1 | 3549 | #!/usr/bin/python3
# coding=utf-8
#
# Copyright © 2016 Scott Severance
# Code mixed in from Caffeine Plus and Jacob Vlijm
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''The settings module.
The only public object is the get_config() function. Everything else should be
deemed private.
'''
import json
import os
from sleepinhibit.collection import Collection
__all__ = ['get_config']
_settings_obj = None
def get_config():
'''Creates, if necessary, and returns the settings object, ensuring that only one such object exists.'''
global _settings_obj
if _settings_obj:
obj = _settings_obj
else:
obj = _SettingsObject()
_settings_obj = obj
return obj
class _SettingsObject(Collection):
'''The class which stores settings. Don't create an instance directly.
instead, use get_config().'''
def __init__(self):
''' Initialize.
In addition to regular init, this method also initializes certain
settings to values found under data/managed_settings.json, which may be
overwritten by init_settings(), and looks for a config file at a
hard-coded location (~/.config/sleep_inhibit.json).
'''
Collection.__init__(self)
self.config_file = '{}/.config/sleep_inhibit.json'.format(os.environ['HOME'])
# self.managed_settings holds the settings which should be saved to disk,
# as well as their default (initial) values.
with open(os.path.realpath(__file__ + '/../data/managed_settings.json')) as f:
data = '\n'.join(line.strip() for line in f.readlines() if not line.strip().startswith('//'))
self.managed_settings = json.loads(data)
if not os.path.isfile(self.config_file):
with open(self.config_file, 'w') as f:
f.write('//\n//\n{}\n')
for key, value in self.managed_settings.items():
self.add_property(key, value)
self._init_settings()
def _init_settings(self):
'''Initialize settings from the config file.'''
with open(self.config_file) as f:
lines = [line.strip() for line in f.readlines() if not line.strip().startswith('//')]
data = json.loads('\n'.join(lines))
for key, value in data.items():
self.update_property(key, value)
def save_settings(self):
'''Save managed settings to the configuration file.
Call this after you update a managed setting. Calling it after setting a
non-managed setting just wastes processor cycles.'''
output = {}
warning = "// Sleep Inhibit Settings\n// Don't edit this file manually while the program is running lest your changes be overwritten."
for setting in self.managed_settings.keys():
output[setting] = self.__dict__[setting]
with open(self.config_file, 'w') as f:
f.write('\n'.join([warning, json.dumps(output, sort_keys=True, indent=2)]))
| gpl-3.0 | 1,605,912,444,025,256,700 | 40.255814 | 142 | 0.657835 | false |
Darthkpo/xtt | openpyxl/charts/tests/test_axis.py | 1 | 1529 | import pytest
@pytest.mark.parametrize("value, result",
[
(1, None),
(0.9, 10),
(0.09, 100),
(-0.09, 100)
]
)
def test_less_than_one(value, result):
from openpyxl.charts.axis import less_than_one
assert less_than_one(value) == result
def test_axis_ctor(Axis):
axis = Axis()
assert axis.title == ""
assert axis.auto_axis is False
def test_axis_auto_axis(Axis):
axis = Axis(auto_axis=True)
with pytest.raises(ZeroDivisionError):
axis.max == 0
with pytest.raises(ZeroDivisionError):
axis.min == 0
with pytest.raises(ZeroDivisionError):
axis.unit == 0
@pytest.mark.parametrize("set_max, set_min, min, max, unit",
[
(10, 0, 0, 12, 2),
(5, 0, 0, 6, 1),
(50000, 0, 0, 60000, 12000),
(1, 0, 0, 2, 1),
(0.9, 0, 0, 1, 0.2),
(0.09, 0, 0, 0.1, 0.02),
(0, -0.09, -0.1, 0, 0.02),
(8, -2, -3, 10, 2)
]
)
def test_scaling(Axis, set_max, set_min, min, max, unit):
axis = Axis(auto_axis=True)
axis.max = set_max
axis.min = set_min
assert axis.min == min
assert axis.max == max
assert axis.unit == unit
| mit | 3,049,963,018,847,955,500 | 29.58 | 60 | 0.419228 | false |
Darkess00/Touch-The-Square | main.py | 1 | 5699 | #imports
import pygame, random, sys, menu
from pygame.locals import *
try:
import android
except ImportError:
android = None
#constantes
WIDTH = 640
HEIGHT = 480
REWARDS = {'1': 20, '2': 30, '3': 40, '4': 50, '5': 60}
salir=False
#clases
class Square(pygame.sprite.Sprite):
def __init__(self,info,screen):
pygame.sprite.Sprite.__init__(self)
self.image=load_image('images/square.png')
self.image=pygame.transform.scale(self.image,(info.current_w/15,info.current_w/15))
self.rect=self.image.get_rect()
self.rect.centerx=info.current_w/2
self.rect.centery=info.current_h/2
self.speedx=3
self.speedy=3
def update(self,time,info):
if self.rect.left<=0:
self.speedx=random.randint(5,10)*random.uniform(0.4,1)
if(self.rect.centery<info.current_h/2):
self.speedy=random.randint(5,10)*random.uniform(0.4,1)
elif(self.rect.centery>=info.current_h/2):
self.speedy=random.randint(-10,-5)*random.uniform(0.4,1)
elif self.rect.right>=info.current_w:
self.speedx=random.randint(-10,-5)*random.uniform(0.4,1)
if(self.rect.centery<info.current_h/2):
self.speedy=random.randint(5,10)*random.uniform(0.4,1)
elif(self.rect.centery>=info.current_h/2):
self.speedy=random.randint(-10,-5)*random.uniform(0.4,1)
if self.rect.top<=0:
self.speedy=random.randint(5,10)*random.uniform(0.4,1)
if self.rect.centerx<info.current_w/2:
self.speedx=random.randint(5,10)*random.uniform(0.4,1)
elif(self.rect.centery>=info.current_h/2):
self.speedx=random.randint(-10,-5)*random.uniform(0.4,1)
elif self.rect.bottom>=info.current_h:
self.speedy=random.randint(-10,-5)*random.uniform(0.4,1)
if(self.rect.centerx<info.current_w/2):
self.speedx=random.randint(5,10)*random.uniform(0.4,1)
elif(self.rect.centerx>=info.current_w/2):
self.speedx=random.randint(-10,-5)*random.uniform(0.4,1)
self.rect.centerx+=self.speedx*time
self.rect.centery+=self.speedy*time
class Texto(pygame.sprite.Sprite):
text=''
text_rect=''
def __init__(self,cadena,x,y):
pygame.sprite.Sprite.__init__(self)
self.text,self.text_rect=load_text(cadena,x,y)
#funciones
#--------------------------------------------------
def load_image(filename,transparent=False):
try: image=pygame.image.load(filename)
except pygame.error, message:
raise SystemExit, message
image = image.convert()
if transparent:
color=image.get_at((0,0))
image.set_colorkey(color,RLEACCEL)
return image
def load_text(texto,x,y,color=(255,255,255)):
font=pygame.font.Font('fonts/fontx.ttf',25)
salida=pygame.font.Font.render(font,texto,1,color)
salida_rect=salida.get_rect()
salida_rect.centerx=x
salida_rect.centery=y
return salida, salida_rect
def check_rewards(contador):
f=open('data/rewards','r')
number=int(f.read())
f.close()
f=open('data/rewards','w')
if number == 0 and contador==REWARDS['1']:
f.write('1')
f.close()
elif number == 1 and contador==REWARDS['2']:
f.write('2')
f.close()
elif number == 2 and contador==REWARDS['3']:
f.write('3')
f.close()
elif number == 3 and contador==REWARDS['4']:
f.write('4')
f.close()
elif number == 4 and contador==REWARDS['5']:
f.write('5')
f.close()
else:
f.write(str(number))
f.close()
def your_top():
f=open('data/max','r')
max=int(f.read())
f.close()
return max
def write_top(top):
f=open('data/max','r')
ftop=int(f.read())
f.close()
if top>ftop:
f=open('data/max','w')
f.write(str(top))
f.close()
#--------------------------------------------------
def main():
intento=0
#screen=pygame.display.set_mode((WIDTH,HEIGHT))
info=pygame.display.Info()
screen=pygame.display.set_mode((info.current_w,info.current_h))
pygame.display.set_caption('Square Game')
#background=load_image('images/black_background.png')
menos=menu.menu(your_top())
square=Square(info,screen)
if menos=='quit':
return None
contador=0
handled=False
if android:
android.init()
android.map_key(android.KEYCODE_BACK, K_ESCAPE)
while True:
keys=pygame.key.get_pressed()
clic=pygame.mouse.get_pressed()
clock=pygame.time.Clock()
time=clock.tick(144)
timer=pygame.time.get_ticks()-intento
restante,restante_rect=load_text(str(contador),info.current_w-30,30)
pasado,pasado_rect=load_text(str((timer-menos)/float(1000)),50,30)
for eventos in pygame.event.get():
if eventos.type == QUIT:
pygame.quit()
break
if eventos.type == MOUSEBUTTONUP:
handled=False
if eventos.type == MOUSEBUTTONDOWN:
square.update(time,info)
if contador<your_top():
top=your_top()
else:
top=contador
write_top(top)
square.update(time,info)
if pygame.Rect.collidepoint(square.rect,pygame.mouse.get_pos()) and not handled:
contador +=1
handled=True
check_rewards(contador)
if keys[K_ESCAPE]:
break
if (timer-menos)/float(1000)>=60:
fin,fin_rect=load_text('You touched the square ' + str(contador) +' times',info.current_w/2,info.current_h/3)
max,max_rect=load_text('Your max touches: ' + str(top),info.current_w/2,info.current_h*0.66)
while True:
for events in pygame.event.get():
if events.type==MOUSEBUTTONDOWN:
main()
if events.type==KEYDOWN:
pygame.quit()
break
screen.fill((0,0,0))
screen.blit(fin,fin_rect)
screen.blit(max,max_rect)
pygame.display.flip()
print square.rect
screen.fill((0,0,0))
#screen.blit(background,(0,0))
screen.blit(restante,restante_rect)
screen.blit(pasado,pasado_rect)
screen.blit(square.image,square.rect)
pygame.display.flip()
return 0
if __name__ == '__main__':
pygame.init()
main()
pygame.quit() | gpl-2.0 | -3,495,836,190,296,383,000 | 25.146789 | 112 | 0.660642 | false |
ewheeler/rapidpro | temba/ivr/views.py | 1 | 2953 | from __future__ import unicode_literals
import json
from django.core.exceptions import ValidationError
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from temba.utils import build_json_response
from temba.flows.models import Flow, FlowRun
from .models import IVRCall, IN_PROGRESS, COMPLETED, RINGING
class CallHandler(View):
@csrf_exempt
def dispatch(self, *args, **kwargs):
return super(CallHandler, self).dispatch(*args, **kwargs)
def get(self, request, *args, **kwargs):
return HttpResponse("ILLEGAL METHOD")
def post(self, request, *args, **kwargs):
call = IVRCall.objects.filter(pk=kwargs['pk']).first()
if not call:
return HttpResponse("Not found", status=404)
client = call.channel.get_ivr_client()
if request.REQUEST.get('hangup', 0):
if not request.user.is_anonymous():
user_org = request.user.get_org()
if user_org and user_org.pk == call.org.pk:
client.calls.hangup(call.external_id)
return HttpResponse(json.dumps(dict(status='Canceled')), content_type="application/json")
else:
return HttpResponse("Not found", status=404)
if client.validate(request):
status = request.POST.get('CallStatus', None)
duration = request.POST.get('CallDuration', None)
call.update_status(status, duration)
# update any calls we have spawned with the same
for child in call.child_calls.all():
child.update_status(status, duration)
child.save()
call.save()
# figure out if this is a callback due to an empty gather
is_empty = '1' == request.GET.get('empty', '0')
user_response = request.POST.copy()
# if the user pressed pound, then record no digits as the input
if is_empty:
user_response['Digits'] = ''
hangup = 'hangup' == user_response.get('Digits', None)
if call.status in [IN_PROGRESS, RINGING] or hangup:
if call.is_flow():
response = Flow.handle_call(call, user_response, hangup=hangup)
return HttpResponse(unicode(response))
else:
if call.status == COMPLETED:
# if our call is completed, hangup
run = FlowRun.objects.filter(call=call).first()
if run:
run.set_completed()
return build_json_response(dict(message="Updated call status"))
else: # pragma: no cover
# raise an exception that things weren't properly signed
raise ValidationError("Invalid request signature")
return build_json_response(dict(message="Unhandled"))
| agpl-3.0 | 2,395,663,859,341,281,300 | 37.350649 | 109 | 0.593634 | false |
frankhale/nyana | nyana/plugins/SnippetViewPlugin.py | 1 | 18007 | # SnippetViewPlugin - Provides a templated/abbreviation expansion mechanism for
# the editor.
#
# Copyright (C) 2006-2010 Frank Hale <[email protected]>
#
# ##sandbox - irc.freenode.net
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import pygtk
pygtk.require('2.0')
import gio
import gtk
import gtk.gdk
import gtksourceview2
import pango
import re
import datetime
import os.path
import xml.etree.ElementTree as ET
SNIPPET_XML = "snippets.xml"
MIME_ALIAS = {
"text/x-python" : ["python"],
"application/x-ruby" : ["ruby", "rails"]
}
# Change Date/Time formats as needed
DATE_FORMAT = "%B %d %Y"
TIME_FORMAT = "%H:%M"
DATE = datetime.datetime.now().strftime(DATE_FORMAT)
TIME = datetime.datetime.now().strftime(TIME_FORMAT)
DATETIME = "%s @ %s" % (datetime.datetime.now().strftime(DATE_FORMAT), datetime.datetime.now().strftime(TIME_FORMAT))
class Snippet:
def __init__(self):
self.language=""
self.shortcut=""
self.snippet=""
def mimetype(self):
return MIME[self.language]
class SnippetLoader:
def load_all(self):
SNIPPETS = []
root = ET.parse(SNIPPET_XML)
for snippet in root.getiterator("snippet"):
if snippet.get("language") and snippet.get("shortcut"):
snip = Snippet()
snip.language = snippet.get("language")
snip.shortcut = snippet.get("shortcut")
snip.snippet = snippet.text.strip()
SNIPPETS.append(snip)
return SNIPPETS
def load(self, language):
all_snips = self.load_all()
return [s for s in all_snips if s.language==language]
def get_common(self):
return self.load("common")
# Common snippets that are useful regardless of document, used for built in snippets
COMMON_SNIPPETS = {
"^d" : DATE, # expands to the current date supplied by the date format above
"^t" : TIME, # expands to the current time supplied by the time format above
"^dt" : DATETIME # expands to a combination of the date and time supplied by the formats above
}
BUILT_IN_SNIPPETS = []
# For each of the common snippets make a Snippet object, plug in the key,value and add it to the built in snippets
# list
for KEY,VALUE in COMMON_SNIPPETS.items():
s = Snippet()
s.shortcut = KEY
s.snippet = VALUE
s.language = "common"
BUILT_IN_SNIPPETS.append(s)
class SnippetViewPlugin(object):
metadata = {
"name" : "Snippet Source View Plugin",
"authors" : ["Frank Hale <[email protected]>"],
"website" : "http://github.com/frankhale/nyana",
"version" : "0.6.0",
"development status" : "beta",
"date" : "31 JULY 2007",
"enabled" : True,
"short description" : "Provides abbreviation expansion via tab key",
"long description" : "Provides a snippet feature which allows one to create abbreviations that are expandable by hitting the tab key. Special variables can be inserted into the snippets to make them tabbable and provide a quick way to create code."
}
def __init__(self, editor):
self.editor = editor
self.editor.event_manager.register_listener("buffer_change", self.event_buffer_change)
self.editor.event_manager.register_listener("scroll_to_insert", self.scroll_to_insert)
self.editor.source_view.set_highlight_current_line(True)
self.editor.source_view.set_wrap_mode(gtk.WRAP_NONE)
# regular expression used to find our special variables.
#
# variables look like ${foo}
self.variable_re = re.compile('\${([^\${}]*)}')
self.SNIPPETS = []
self.SNIPPET_MARKS = []
self.SNIPPET_OFFSETS = []
self.SNIPPET_START_MARK = None
self.SNIPPET_END_MARK = None
self.IN_SNIPPET = False
self.HAS_NO_VARIABLES=False
self.TABBED = True
self.mime_type = None
self.editor.source_view.set_show_line_numbers(True)
self.editor.source_view.set_auto_indent(True)
self.editor.source_view.set_resize_mode(gtk.RESIZE_PARENT)
### Comment this out if you don't want Monospace and want the default
### system font. Or change to suit your needs.
default_font = pango.FontDescription("Monospace 10")
if default_font:
self.editor.source_view.modify_font(default_font)
### -------------------------------------------------------- ###
self.editor.source_view.connect("key-press-event", self.key_event)
self.editor.buff.connect("mark-set", self.mark_set)
self.SL = SnippetLoader()
self.SNIPPETS.extend(self.SL.get_common())
self.SNIPPETS.extend(BUILT_IN_SNIPPETS)
# For testing purposes.
#self.syntax_highlight(os.path.abspath("/home/majyk/dev/python/test.py"))
def load_snippets(self):
types = []
try:
types = MIME_ALIAS[self.mime_type]
except KeyError:
print "This mime-type has no snippets defined"
types=None
if not types == None:
print types
if len(types)==1:
self.SNIPPETS.extend(self.SL.load(types[0]))
elif len(types)>1:
for t in types:
self.SNIPPETS.extend(self.SL.load(t))
#print "snippets available:"
#for s in self.SNIPPETS:
# print s.shortcut
def scroll_to_insert(self, parms=None):
self.editor.source_view.scroll_mark_onscreen( self.editor.buff.get_mark("insert"))
def event_buffer_change(self, parms):
if(parms.has_key("filename") and parms.has_key("text")):
self.set_text(parms["filename"], parms["text"])
def set_text(self, filename, text):
if(filename):
self.syntax_highlight(filename)
self.editor.buff.set_text(text)
self.editor.buff.place_cursor(self.editor.buff.get_start_iter())
def mark_set(self, textbuffer, _iter, textmark):
# if we are in a snippet and the user moves the cursor out of the snippet bounds without
# finishing the snippet then we need to clean up and turn the snippet mode off
if self.IN_SNIPPET and self.SNIPPET_START_MARK and self.SNIPPET_END_MARK:
SNIPPET_START_ITER = self.editor.buff.get_iter_at_mark(self.SNIPPET_START_MARK)
SNIPPET_END_ITER = self.editor.buff.get_iter_at_mark(self.SNIPPET_END_MARK)
curr_iter = self.get_cursor_iter()
if not curr_iter.in_range(SNIPPET_START_ITER, SNIPPET_END_ITER):
if self.SNIPPET_START_MARK and self.SNIPPET_END_MARK:
self.IN_SNIPPET = False
# Do mime-type magic and switch the language syntax highlight mode and snippets
def syntax_highlight(self, filename):
if not (os.path.exists(filename)):
print "(%s) does not exist" % (filename)
return
print "filename = (%s)" % (filename)
language = self.get_language(filename)
if language:
self.editor.buff.set_highlight_syntax(True)
self.editor.buff.set_language(language)
#print "Setting the snippets to the following language mime-type: " + mime_type
self.load_snippets()
else:
print "A syntax highlight mode for this mime-type does not exist."
self.editor.buff.set_highlight_syntax(False)
def complete_special_chars(self, widget, char):
curr_iter = self.editor.buff.get_iter_at_mark( self.editor.buff.get_insert() )
self.editor.buff.insert(curr_iter, char)
curr_iter = self.editor.buff.get_iter_at_mark( self.editor.buff.get_insert() )
curr_iter.backward_chars(1)
self.editor.buff.place_cursor(curr_iter)
def get_cursor_iter(self):
cursor_mark = self.editor.buff.get_insert()
cursor_iter = self.editor.buff.get_iter_at_mark(cursor_mark)
return cursor_iter
def get_line_number(self):
cursor_iter = self.get_cursor_iter(self.editor.buff)
line_number = cursor_iter.get_line()
return line_number
# Adds marks into the buffer for the start and end offsets for each variable
def mark_variables(self, offsets):
marks = []
for o in offsets:
start_iter = self.editor.buff.get_iter_at_offset(o["start"])
end_iter = self.editor.buff.get_iter_at_offset(o["end"])
start_mark = self.editor.buff.create_mark(None, start_iter, True)
end_mark = self.editor.buff.create_mark(None, end_iter, False)
insert_mark = { "start" : start_mark,
"end" : end_mark }
marks.append(insert_mark)
return marks
# This algorithm gets variable offsets so that we can use those offsets
# to get iterators to create marks, the marks are used in order to select
# the text and move the cursor using the tab key
#
# This does a little more than just get the variable offsets, it also
# deletes the variable and replaces it with just the word identifier
#
# If the variable is a ${cursor} it will delete it and get it's start offset
# so when we mark it we can tab to a nonvisibly marked location in the snippet.
def get_variable_offsets(self,string, current_offset):
offsets = []
start_and_end_offsets = {}
# use the regular expression to get an iterator over our string
# variables will hold match objects
variables = self.variable_re.finditer(string)
for var in variables:
occur_offset_start = current_offset + var.span()[0]
occur_offset_end = current_offset + var.span()[1]
start_iter = self.editor.buff.get_iter_at_offset(occur_offset_start)
end_iter = self.editor.buff.get_iter_at_offset(occur_offset_end)
# delete the full variable
self.editor.buff.delete(start_iter, end_iter)
# if it's a ${cursor} variable we don't want to insert
# any new text. Just go to the else and get it's start
# offset, used later to mark that location
if not var.group() == "${cursor}":
# insert the variable identifier into the buffer
# at the start location
self.editor.buff.insert(start_iter, var.group(1))
current_offset = current_offset-3
# record our start and end offsets used later
# to mark these variables so we can select the text
start_and_end_offsets = {
"start" : occur_offset_start,
"end" : occur_offset_end-3
}
#print "START = %d | END = %d" % (start_and_end_offsets["start"], start_and_end_offsets["end"])
else:
# if we have a ${cursor} then we want a
# marker added with no text so we can
# tab to it.
start_and_end_offsets = {
"start" : occur_offset_start,
"end" : occur_offset_start
}
current_offset = current_offset-len(var.group())
# put the start/end offsets into a list of dictionaries
offsets.append( start_and_end_offsets )
return offsets
# This functions purpose is to add spaces/tabs to the snippets according
# to what level we have indented to
def auto_indent_snippet(self, snippet):
cursor_iter = self.get_cursor_iter()
line_number = cursor_iter.get_line()
start_of_current_line_iter = self.editor.buff.get_iter_at_line(line_number)
text = self.editor.buff.get_text(cursor_iter, start_of_current_line_iter)
space_re = re.compile(' ')
tab_re = re.compile('\t')
tab_count = len(tab_re.findall(text))
space_count = len(space_re.findall(text))
lines = snippet.split("\n")
new_lines = []
tabs = ""
spaces = ""
if tab_count > 0:
for i in range(tab_count):
tabs = tabs + "\t"
if space_count > 0:
for i in range(space_count):
spaces = spaces + " "
for i,line in enumerate(lines):
# don't add any of the spaces/tabs to the first
# line in the snippet
if not i == 0:
snip = tabs + spaces + line
new_lines.append(snip)
else:
new_lines.append(line)
return "\n".join(new_lines)
def snippet_completion(self):
cursor_iter = self.get_cursor_iter()
line_number = cursor_iter.get_line()
start_of_current_line_iter = self.editor.buff.get_iter_at_line(line_number)
text = self.editor.buff.get_text(start_of_current_line_iter, cursor_iter)
words = text.split()
if words:
word_last_typed = words.pop()
word_index = text.find(word_last_typed)
# Run through all snippets trying to find a match
for s in self.SNIPPETS:
key=s.shortcut
value=s.snippet
if word_last_typed == key:
self.TABBED = True
value = self.auto_indent_snippet(value)
word_index = text.rfind(word_last_typed)
index_iter = self.editor.buff.get_iter_at_line_offset(line_number, word_index)
end_iter = self.editor.buff.get_iter_at_line_offset(line_number, word_index+len(word_last_typed))
self.editor.buff.delete(index_iter, end_iter)
overall_offset = index_iter.get_offset()
self.editor.buff.insert(index_iter, value)
start_mark_iter = self.editor.buff.get_iter_at_line_offset(line_number, word_index)
end_mark_iter = self.editor.buff.get_iter_at_offset(start_mark_iter.get_offset()+len(value))
self.SNIPPET_START_MARK = self.editor.buff.create_mark(None, start_mark_iter, True)
self.SNIPPET_END_MARK = self.editor.buff.create_mark(None, end_mark_iter, False)
offsets = self.get_variable_offsets(value, overall_offset)
if offsets:
marks = self.mark_variables(offsets)
if marks:
_iter = self.editor.buff.get_iter_at_offset( offsets[0]["start"] )
self.editor.buff.place_cursor(_iter)
marks.reverse()
for mark in marks:
self.SNIPPET_MARKS.insert(0, mark)
offsets.reverse()
for offset in offsets:
self.SNIPPET_OFFSETS.insert(0,offset)
self.IN_SNIPPET = True
else:
self.HAS_NO_VARIABLES=True
def pair_text(self, pair_chars):
selection = self.editor.buff.get_selection_bounds()
if(selection):
selected_text = self.editor.buff.get_text(selection[0], selection[1])
self.editor.buff.delete(selection[0], selection[1])
self.editor.buff.insert_at_cursor("%s%s%s" % (pair_chars[0],selected_text,pair_chars[1]))
return True
return False
def comment_line(self, comment_char):
selection = self.editor.buff.get_selection_bounds()
if(selection):
selected_text = self.editor.buff.get_text(selection[0], selection[1])
self.editor.buff.delete(selection[0], selection[1])
for line in selected_text.splitlines(True):
self.editor.buff.insert_at_cursor("%s %s" % (comment_char, line))
return True
return False
def key_event(self, widget, key_press):
keycodes = {
"space" : 32,
"tab" : 65289,
"quote" : 34,
"open_brace" : 123,
"open_bracket" : 91,
"open_paren" : 40,
"less_than" : 60,
"single_quote" : 39,
"pound" : 35
}
# Need to add a new key, just uncomment this, run the program
# and look at the output from the key press
#print key_press.keyval
if not key_press.keyval == keycodes["tab"]:
self.TABBED = False
if key_press.keyval == keycodes["pound"]:
if key_press.state & gtk.gdk.SHIFT_MASK:
comment_char = None
if(self.mime_type == ("text/x-python") or
self.mime_type == ("application/x-ruby") or
self.mime_type == ("application/x-shellscript")
):
comment_char = "#"
elif (self.mime_type == ("text/x-java") or
self.mime_type == ("text/x-c++src")
):
comment_char = "//"
if(comment_char):
if(self.comment_line(comment_char)):
return True
if key_press.keyval == keycodes["quote"]:
if (self.pair_text(["\"", "\""])):
return True
elif key_press.keyval == keycodes["open_brace"]:
if (self.pair_text(["{", "}"])):
return True
elif key_press.keyval == keycodes["open_bracket"]:
if (self.pair_text(["[", "]"])):
return True
elif key_press.keyval == keycodes["open_paren"]:
if (self.pair_text(["(", ")"])):
return True
elif key_press.keyval == keycodes["less_than"]:
if (self.pair_text(["<", ">"])):
return True
elif key_press.keyval == keycodes["single_quote"]:
if (self.pair_text(["\'", "\'"])):
return True
elif key_press.keyval == keycodes["tab"]:
if not self.TABBED:
self.snippet_completion()
if self.HAS_NO_VARIABLES:
self.HAS_NO_VARIABLES=False
return True
if(len(self.SNIPPET_MARKS)>0):
for i, v in enumerate(self.SNIPPET_MARKS):
if len(self.SNIPPET_MARKS)>1:
self.editor.source_view.scroll_mark_onscreen(self.SNIPPET_MARKS[i+1]["start"])
_iter = self.editor.buff.get_iter_at_mark(v["start"])
mark_offset = _iter.get_offset()
self.editor.buff.select_range( self.editor.buff.get_iter_at_mark(v["start"]), self.editor.buff.get_iter_at_mark(v["end"]))
self.editor.buff.delete_mark(v["start"])
self.editor.buff.delete_mark(v["end"])
del self.SNIPPET_MARKS[i]
del self.SNIPPET_OFFSETS[i]
if (i==len(self.SNIPPET_OFFSETS)):
self.IN_SNIPPET = False
self.editor.buff.delete_mark(self.SNIPPET_START_MARK)
self.editor.buff.delete_mark(self.SNIPPET_END_MARK)
break
return True
return False
def load(self):
pass
def unload(self):
pass
def __get_language_for_mime_type(self, mime):
from gtksourceview2 import language_manager_get_default
lang_manager = language_manager_get_default()
lang_ids = lang_manager.get_language_ids()
for i in lang_ids:
lang = lang_manager.get_language(i)
for m in lang.get_mime_types():
if m == mime: return lang
return None
def get_language(self, uri):
try:
if uri is None: return None
from gnomevfs import get_mime_type
self.mime_type = gio.File(uri.strip()).query_info("*").get_content_type()
language = self.__get_language_for_mime_type(self.mime_type)
except RuntimeError:
print "Caught runtime error when determining mimetype or language"
return None
return language
| gpl-3.0 | -7,259,528,532,522,187,000 | 29.468697 | 250 | 0.673738 | false |
google/makani | lib/python/batch_sim/scoring_functions/hover.py | 1 | 14809 | # Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Scoring functions relating to the hover controller."""
from makani.analysis.control import geometry
from makani.avionics.common import plc_messages
from makani.lib.python import c_helpers
from makani.lib.python.batch_sim import scoring_functions
from makani.lib.python.h5_utils import numpy_utils
import numpy as np
import pandas as pd
import scoring_functions_util as scoring_util
_GROUND_STATION_MODE_HELPER = c_helpers.EnumHelper(
'GroundStationMode', plc_messages)
class TetherElevationScoringFunction(
scoring_functions.DoubleSidedLimitScoringFunction):
"""Tests the tether elevation."""
def __init__(self, bad_lower_limit, good_lower_limit, good_upper_limit,
bad_upper_limit, severity, transform_stages=None,
sustained_duration=0.0, extra_system_labels=None):
super(TetherElevationScoringFunction, self).__init__(
('Tether Elevation %s' % transform_stages if transform_stages
else 'Tether Elevation'),
'deg', bad_lower_limit, good_lower_limit,
good_upper_limit, bad_upper_limit, severity)
self._sustained_duration = sustained_duration
self._transform_stages = transform_stages
self._system_labels = ['controls', 'tether elevation']
if extra_system_labels:
self._system_labels += extra_system_labels
def GetSystemLabels(self):
return self._system_labels
def GetValue(self, output):
if (np.isnan(output['tether_elevation_min']) or
np.isnan(output['tether_elevation_max'])):
return float('nan')
return np.array([output['tether_elevation_min'],
output['tether_elevation_max']])
def GetOutput(self, timeseries):
tether_elevation = timeseries['tether_elevation']
if tether_elevation is None:
return {
'tether_elevation_max': float('nan'),
'tether_elevation_min': float('nan')
}
if self._transform_stages:
gs02_mode = timeseries['gs02_mode']
gs02_transform_stage = timeseries['gs02_transform_stage']
mask = gs02_mode == _GROUND_STATION_MODE_HELPER.Value('Transform')
stage_mask = np.zeros((np.size(tether_elevation),), dtype=bool)
for s in self._transform_stages:
stage_mask |= gs02_transform_stage == s
mask &= stage_mask
tether_elevation = tether_elevation[mask]
if not np.size(tether_elevation):
return {
'tether_elevation_max': float('nan'),
'tether_elevation_min': float('nan')
}
else:
return {
'tether_elevation_max': max(tether_elevation),
'tether_elevation_min': min(tether_elevation),
}
else:
if self._sustained_duration is None:
return {
'tether_elevation_max': max(tether_elevation),
'tether_elevation_min': min(tether_elevation),
}
min_sustained_tether_ele, max_sustained_tether_ele = (
scoring_util.GetSustainedValue(
tether_elevation, self._good_lower_limit, self._good_upper_limit,
self._sustained_duration, self._t_samp))
return {
'tether_elevation_max': max_sustained_tether_ele,
'tether_elevation_min': min_sustained_tether_ele,
}
def GetTimeSeries(self, params, sim, control):
# TODO: Use the Gs02TransformStageFilter.
if self._transform_stages:
gs02_mode, gs02_transform_stage, tether_elevation = (
self._SelectTelemetry(
sim, control, ['gs02_mode', 'gs02_transform_stage',
'tether_elevation']))
if (not scoring_util.IsSelectionValid(gs02_mode) or
not scoring_util.IsSelectionValid(gs02_transform_stage) or
not scoring_util.IsSelectionValid(tether_elevation)):
return {
'gs02_mode': None,
'gs02_transform_stage': None,
'tether_elevation': None
}
else:
tether_elevation_deg = np.rad2deg(tether_elevation)
return {
'gs02_mode': gs02_mode,
'gs02_transform_stage': gs02_transform_stage,
'tether_elevation': tether_elevation_deg
}
else:
time, tether_elevation = self._SelectTelemetry(
sim, control, ['time', 'tether_elevation'])
if not (scoring_util.IsSelectionValid(tether_elevation) or
scoring_util.IsSelectionValid(time)):
return {
'tether_elevation': None
}
tether_elevation_deg = np.rad2deg(tether_elevation)
# Low pass, symmetrically (2nd order) filter the tether elevation.
cut_off_freq = 0.4
tether_elevation_deg_f = scoring_util.LpFiltFiltTimeSeries(
time, tether_elevation_deg, cut_off_freq)
self._t_samp = scoring_util.GetTimeSamp(time)
# Return filtered data.
return {
'tether_elevation': tether_elevation_deg_f
}
class TetherElevationOscillationScoringFunction(
scoring_functions.SingleSidedLimitScoringFunction):
"""Tests the tether elevation oscillations."""
def __init__(self, good_limit, bad_limit, severity, cut_off_freq=1.0/30.0):
super(TetherElevationOscillationScoringFunction, self).__init__(
('Tether Elevation Oscillations'),
'deg', good_limit, bad_limit, severity)
self._cut_off_freq = cut_off_freq
self._t_window = 2./self._cut_off_freq
def GetSystemLabels(self):
return ['controls', 'tether elevation']
def GetValue(self, output):
return output['tether_elevation_std']
def GetOutput(self, timeseries):
return {'tether_elevation_std': np.nanmax(
timeseries['tether_elevation_std'])}
def GetTimeSeries(self, params, sim, control):
time, tether_elevation, tether_elevation_valid = self._SelectTelemetry(
sim, control, ['time', 'tether_elevation', 'tether_elevation_valid'])
if not (scoring_util.IsSelectionValid(tether_elevation) or
scoring_util.IsSelectionValid(time)):
return {'tether_elevation_std': np.array([float('nan')])}
tether_elevation_deg = np.rad2deg(
tether_elevation[tether_elevation_valid == 1])
# Low pass, symmetrically (2nd order) filter the tether elevation.
tether_elevation_deg_f = scoring_util.LpFiltFiltTimeSeries(
time, tether_elevation_deg, self._cut_off_freq)
# Calculate a rolling StDev of the difference between the raw signal and
# the filtered signal.
t_samp = scoring_util.GetTimeSamp(time)
if np.isnan(t_samp):
return {'tether_elevation_std': np.array([float('nan')])}
n_window = int(self._t_window / t_samp)
elevation_deviation_df = pd.DataFrame(
tether_elevation_deg - tether_elevation_deg_f)
tether_elevation_std = elevation_deviation_df.rolling(
n_window, min_periods=n_window).std().values.flatten()
return {'tether_elevation_std': tether_elevation_std}
class PanelAzimuthTrackingScoringFunction(
scoring_functions.DoubleSidedLimitScoringFunction):
"""Tests whether the tether azimuth in p-frame is within acceptable limits."""
def __init__(self, bad_lower_limit, good_lower_limit, good_upper_limit,
bad_upper_limit, severity, extra_system_labels=None):
super(PanelAzimuthTrackingScoringFunction, self).__init__(
'Panel Azimuth Limit', 'deg', bad_lower_limit, good_lower_limit,
good_upper_limit, bad_upper_limit, severity)
# TODO(b/143912508): Fix the numerous false positive cases before removing
# the experimental label.
self._system_labels = ['controls', 'experimental']
if extra_system_labels:
self._system_labels += extra_system_labels
def GetSystemLabels(self):
return self._system_labels
def GetValue(self, output):
if (np.isnan(output['tether_azimuth_min']) or
np.isnan(output['tether_azimuth_max'])):
return float('nan')
return np.array([output['tether_azimuth_min'],
output['tether_azimuth_max']])
def GetOutput(self, timeseries):
tether_azimuth = timeseries['tether_azimuth_p']
if tether_azimuth is None or tether_azimuth.size == 0:
return {
'tether_azimuth_max': float('nan'),
'tether_azimuth_min': float('nan')
}
else:
return {
'tether_azimuth_max': max(tether_azimuth),
'tether_azimuth_min': min(tether_azimuth),
}
def GetTimeSeries(self, params, sim, control):
tether_azimuth, platform_azi = (
self._SelectTelemetry(sim, control, ['tether_azimuth', 'platform_azi']))
if not scoring_util.IsSelectionValid(tether_azimuth):
return {
'tether_azimuth_p': None,
}
else:
# Only evaluate panel azimuth tracking when the kite is in the air.
tether_azimuth = numpy_utils.Wrap(
(tether_azimuth - platform_azi), -np.pi, np.pi)
return {
'tether_azimuth_p': np.rad2deg(tether_azimuth),
}
class HoverPositionScoringFunction(
scoring_functions.SingleSidedLimitScoringFunction):
"""Tests the maximum error in hover position."""
def __init__(self, dim, good_limit, bad_limit, severity,
dist_from_perch_limit=8.5):
super(HoverPositionScoringFunction, self).__init__(
'Position Error ' + dim, 'm', good_limit, bad_limit, severity)
self._dim = dim
assert dist_from_perch_limit > 0.0
self._dist_from_perch_limit = dist_from_perch_limit
def GetSystemLabels(self):
return ['controls']
def GetValue(self, output):
return output['max_pos_err_g']
def GetOutput(self, timeseries):
return {'max_pos_err_g': np.max(timeseries['pos_err_g'])}
def GetTimeSeries(self, params, sim, control):
wing_xg, wing_pos_g_cmd = self._SelectTelemetry(
sim, control, ['wing_xg', 'wing_pos_g_cmd'])
pos_err_g = np.abs(wing_pos_g_cmd[self._dim] - wing_xg[self._dim])
return {'pos_err_g': pos_err_g}
class PerchedPositionScoringFunction(
scoring_functions.SingleSidedLimitScoringFunction):
"""Evaluates the error between the perched position target and actual."""
def __init__(self, good_limit, bad_limit, severity):
super(PerchedPositionScoringFunction, self).__init__(
'Perched Position Error', 'm', good_limit, bad_limit, severity)
def GetSystemLabels(self):
# Manually add descend flight mode since mode filter doesn't seem to work.
# TODO(b/145831658): Figure out why flight mode filter doesn't work.
return ['controls', 'kFlightModeHoverDescend']
def GetValue(self, output):
return output['xy_perched_pos_err']
def GetOutput(self, timeseries):
return {
'xy_perched_pos_err': timeseries['xy_perched_pos_err']
}
def GetTimeSeries(self, params, sim, control):
wing_xg, buoy_xg, dcm_g2v, platform_azi, gain_ramp = self._SelectTelemetry(
sim, control, ['wing_xg', 'buoy_xg', 'dcm_g2v', 'platform_azi',
'hover_gain_ramp_scale'],
flight_modes='kFlightModeHoverDescend')
hover_path_params = params['control_params']['hover']['path']
gain_ramp_down_idx = np.where(gain_ramp < 1e-8)
if (np.size(gain_ramp_down_idx) == 0 or
not scoring_util.IsSelectionValid(platform_azi)):
xy_perched_pos_err = float('nan')
else:
last_gain_ramp_down_idx = gain_ramp_down_idx[0][0]
dcm_v2p = geometry.AngleToDcm(
platform_azi[last_gain_ramp_down_idx], 0.0, 0.0, 'ZYX')
dcm_g2p = np.matmul(np.matrix(dcm_g2v[last_gain_ramp_down_idx, :, :]),
dcm_v2p)
final_wing_pos_g = np.array(wing_xg[last_gain_ramp_down_idx].tolist())
final_buoy_pos_g = np.array(buoy_xg[last_gain_ramp_down_idx].tolist())
final_wing_pos_p = np.matmul(dcm_g2p, final_wing_pos_g - final_buoy_pos_g)
perch_wing_pos_p = hover_path_params['perched_wing_pos_p'].tolist()
perched_wing_pos_err = final_wing_pos_p - perch_wing_pos_p
xy_perched_pos_err = np.sqrt(
perched_wing_pos_err[0, 0]**2 + perched_wing_pos_err[0, 1]**2)
return {'xy_perched_pos_err': xy_perched_pos_err}
class HoverAngleScoringFunction(
scoring_functions.SingleSidedLimitScoringFunction):
"""Tests the approximate maximum error in hover attitude."""
def __init__(self, dim, good_limit, bad_limit, severity,
dist_from_perch_limit=8.5):
super(HoverAngleScoringFunction, self).__init__(
'(Approx.) Angle Error ' + dim, 'rad', good_limit, bad_limit, severity)
self._dim = dim
assert dist_from_perch_limit > 0.0
self._dist_from_perch_limit = dist_from_perch_limit
def GetSystemLabels(self):
return ['controls']
def GetValue(self, output):
return output['approx_angle_error_b_max']
def GetOutput(self, timeseries):
return {
'approx_angle_error_b_max': np.max(timeseries['approx_angle_error_b'])
}
def GetTimeSeries(self, params, sim, control):
hover_angles, hover_angles_cmd = self._SelectTelemetry(
sim, control, ['hover_angles', 'hover_angles_cmd'])
# TODO: This angle error is approximate because it is
# treating the axis-angle command and measured values as vectors.
# The correct angle error is calculated in hover_angles.c.
approx_angle_error_b = np.abs(hover_angles_cmd[self._dim]
- hover_angles[self._dim])
return {'approx_angle_error_b': approx_angle_error_b}
class HoverTensionControlScoringFunction(
scoring_functions.SingleSidedLimitScoringFunction):
"""Tests the efficacy of hover tension control."""
def __init__(self, good_limit, bad_limit, severity):
super(HoverTensionControlScoringFunction, self).__init__(
'Hover Tension Error (RMS)', 'kN', good_limit, bad_limit, severity)
self._sources = ['control']
def GetSystemLabels(self):
return ['controls', 'experimental']
def GetValue(self, output):
return output['tension_error_rms']
def GetOutput(self, timeseries):
return {
'tension_error_rms': (
np.sqrt(np.mean(timeseries['tension_error']**2.0))/1e3)
}
def GetTimeSeries(self, params, sim, control):
tension, tension_cmd = self._SelectTelemetry(
sim, control, ['tether_tension', 'tether_tension_cmd'])
return {'tension_error': tension_cmd - tension}
| apache-2.0 | -2,537,646,379,653,478,400 | 36.491139 | 80 | 0.658181 | false |
Zogg/digidone | release/scripts/startup/component.py | 1 | 1104 | # -*- coding: utf-8 -*-
import bpy
from bpy.props import *
from digidone.props import component_type_save, component_type_get, dgd_add_props, ComponentProps
class ComponentTypeOperator(bpy.types.Operator):
bl_idname = "object.component_type_op"
bl_label = "Save Component Type"
ct_name = StringProperty(name="Component Type Name")
def execute(self, context):
try:
obj = bpy.context.active_object
ct_props = ComponentProps(obj['dgd_width'], obj['dgd_depth'], obj['dgd_height'])
component_type_save(self.ct_name, props=ct_props)
except Exception as e:
print(e)
return {'FINISHED'}
def invoke(self, context, event):
obj = bpy.context.active_object
self.ct_name = component_type_get(obj['dgd_component_type'])['name']
return context.window_manager.invoke_props_dialog(self)
def register():
dgd_add_props()
bpy.utils.register_class(ComponentTypeOperator)
def unregister():
bpy.utils.unregister_class(ComponentTypeOperator)
if __name__ == "__main__":
register()
| gpl-2.0 | 5,949,753,946,365,939,000 | 28.052632 | 97 | 0.652174 | false |
jshlbrd/laikaboss | cloudscan.py | 4 | 21343 | #!/usr/bin/env python
# Copyright 2015 Lockheed Martin Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Copyright Lockheed Martin 2015
#
# A networked client for the laikaboss framework.
# Must have an instance of laikad running locally or on a server
# accessible by this client over ssh.
#
# This client is based on the ZeroMQ Lazy Pirate pattern
#
from multiprocessing import Process, Queue
import os, sys, time, logging, select
import getpass
from socket import gethostname
from optparse import OptionParser
import ConfigParser
import zlib, cPickle as pickle
from laikaboss.objectmodel import ExternalObject, ExternalVars
from laikaboss.constants import level_minimal, level_metadata, level_full
from laikaboss.clientLib import Client, getRootObject, get_scanObjectUID, \
getJSON
from random import randint
import json
from copy import deepcopy as clone_object
from distutils.util import strtobool
job_queue = Queue()
result_queue = Queue()
failed_queue = Queue()
# Variable to store configs from file
configs = {}
# Defaults for all available configurations
# To be used if not specified on command line or config file
default_configs = {
'use_ssh': 'False',
'broker_host': 'tcp://localhost:5558',
'ssh_host': 'localhost',
'request_timeout': '600000',
'request_retries': '1',
'return_level': 'metadata',
'num_procs': '8',
}
def getConfig(option):
value = ''
if option in configs:
value = configs[option]
else:
value = default_configs[option]
return value
def main():
parser = OptionParser(usage="usage: %prog [options] (/path/to/file | stdin)")
parser.add_option("-d", "--debug",
action="store_true",
dest="debug",
help="enable debug messages to the console.")
parser.add_option("-r", "--remove-limit",
action="store_true",
dest="nolimit",
help="disable 20mb size limit (be careful!)")
parser.add_option("-t", "--timeout",
action="store", type="int",
dest="timeout",
help="adjust request timeout period (in seconds)")
parser.add_option("-c", "--config-path",
action="store", type="string",
dest="config_path",
help="specify a path to cloudscan.conf.")
parser.add_option("-a", "--address",
action="store", type="string",
dest="broker_host",
help="specify an IP and port to connect to the broker")
parser.add_option("-f", "--file-list",
action="store", type="string",
dest="file_list",
help="Specify a list of files to scan")
parser.add_option("-s", "--ssh-host",
action="store", type="string",
dest="ssh_host",
help="specify a host for the SSH tunnel")
parser.add_option("-p", "--num-procs",
action="store", type="int", default=6,
dest="num_procs",
help="Specify the number of processors to use for recursion")
parser.add_option("-u", "--source",
action="store", type="string",
dest="source",
help="specify a custom source")
parser.add_option("--ssh",
action="store_true",
default=False,
dest="use_ssh",
help="Use SSH tunneling")
parser.add_option("-l", "--level",
action="store", type="string",
dest="return_level",
help="Return Level: minimal, metadata, full [default: metadata]")
parser.add_option("-o", "--out-path",
action="store", type="string",
dest="save_path",
help="If Return Level Full has been specified, provide a path to "
"save the results to [default: current directory]")
parser.add_option("-b", "--buffer",
action="store_true",
dest="stdin_buffer",
help="Specify to allow a buffer to be collected by stdin.")
parser.add_option("-e", "--ephID",
action="store", type="string",
dest="ephID", default="",
help="Specify an ephID to send to Laika.")
parser.add_option("-m", "--ext-metadata",
action="store",
dest="ext_metadata",
help="Specify external metadata to be passed into the scanner.")
parser.add_option("-z", "--log",
action="store_true",
dest="log_db",
help="Specify to turn on logging results.")
parser.add_option("-R", "--recursive",
action="store_true",
default=False,
dest="recursive",
help="Enable recursive directory scanning. If enabled, all files "
"in the specified directory will be scanned. Results will "
"be output to cloudscan.log in the current directory.")
(options, args) = parser.parse_args()
# Define default configuration location
CONFIG_PATH = "/etc/laikaboss/cloudscan.conf"
if options.config_path:
CONFIG_PATH = options.config_path
Config = ConfigParser.ConfigParser()
Config.read(CONFIG_PATH)
# Parse through the config file and append each section to a single dictionary
global configs
for section in Config.sections():
configs.update(dict(Config.items(section)))
# Set the working path, this will be used for file ouput if another
# path is not specified
WORKING_PATH = os.getcwd()
if options.use_ssh:
USE_SSH = True
else:
if strtobool(getConfig('use_ssh')):
USE_SSH = True
else:
USE_SSH = False
if options.ssh_host:
SSH_HOST = options.ssh_host
else:
SSH_HOST = getConfig('ssh_host')
if options.broker_host:
BROKER_HOST = options.broker_host
else:
BROKER_HOST = getConfig('broker_host')
if options.debug:
logging.basicConfig(level=logging.DEBUG)
logging.debug("Host: %s" % BROKER_HOST)
if options.return_level:
RETURN_LEVEL = options.return_level
else:
RETURN_LEVEL = getConfig('return_level')
if options.source:
SOURCE = options.source
else:
SOURCE = "cloudscan"
if not options.log_db:
SOURCE += "-nolog"
if options.save_path:
SAVE_PATH = options.save_path
else:
SAVE_PATH = WORKING_PATH
if options.num_procs:
num_procs = int(options.num_procs)
else:
num_procs = int(getConfig('num_procs'))
if options.timeout:
logging.debug("default timeout changed to %i" % options.timeout)
REQUEST_TIMEOUT = options.timeout * 1000
else:
REQUEST_TIMEOUT = int(getConfig('request_timeout'))
if options.ext_metadata:
try:
if os.path.exists(options.ext_metadata):
with open(options.ext_metadata) as metafile:
ext_metadata = json.loads(metafile.read())
else:
ext_metadata = json.loads(options.ext_metadata)
assert isinstance(ext_metadata, dict)
except:
print "External Metadata must be a dictionary!"
sys.exit(0)
else:
ext_metadata = dict()
REQUEST_RETRIES = int(getConfig('request_retries'))
# Attempt to get the hostname
try:
hostname = gethostname().split('.')[0]
except:
hostname = "none"
# Attempt to set the return level, throw an error if it doesn't exist.
try:
return_level = globals()["level_%s" % RETURN_LEVEL]
except KeyError as e:
print "Please specify a valid return level: minimal, metadata or full"
sys.exit(1)
if not options.recursive:
try:
file_buffer = ''
# Try to read the file
if len(args) > 0:
file_buffer = open(args[0], 'rb').read()
file_len = len(file_buffer)
logging.debug("opened file %s with len %i" % (args[0], file_len))
else:
while sys.stdin in select.select([sys.stdin], [], [], 0)[0]:
line = sys.stdin.readline()
if not line:
break
else:
file_buffer += line
if not file_buffer:
parser.print_usage()
sys.exit(1)
file_len = len(file_buffer)
if file_len > 20971520 and not options.nolimit:
print "You're trying to scan a file larger than 20mb.. Are you sure?"
print "Use the --remove-limit flag if you really want to do this."
sys.exit(1)
except IOError as e:
print "\nERROR: The file does not exist: %s\n" % (args[0],)
sys.exit(1)
else:
try:
fileList = []
if options.file_list:
fileList = open(options.file_list).read().splitlines()
else:
if len(args) > 0:
rootdir = args[0]
for root, subFolders, files in os.walk(rootdir):
for fname in files:
fileList.append(os.path.join(root, fname))
else:
while sys.stdin in select.select([sys.stdin], [], [], 0)[0]:
line = sys.stdin.readline()
if not line:
break
else:
fileList.append(line)
if not fileList:
parser.print_usage()
sys.exit(1)
if len(fileList) > 1000 and not options.nolimit:
print "You're trying to scan over 1000 files... Are you sure?"
print "Use the --remove-limit flag if you really want to do this."
sys.exit(1)
except IOError as e:
print "\nERROR: Directory does not exist: %s\n" % (args[0],)
sys.exit(1)
if not options.recursive:
# Construct the object to be sent for scanning
if args:
filename = args[0]
else:
filename = "stdin"
ext_metadata['server'] = hostname
ext_metadata['user'] = getpass.getuser()
externalObject = ExternalObject(buffer=file_buffer,
externalVars=ExternalVars(filename=filename,
ephID=options.ephID,
extMetaData=ext_metadata,
source="%s-%s-%s" % (SOURCE,
hostname,
getpass.getuser())),
level=return_level)
try:
if not options.recursive:
# Set up ZMQ context
if USE_SSH:
try:
logging.debug("attempting to connect to broker at %s and SSH host %s" % (BROKER_HOST, SSH_HOST))
client = Client(BROKER_HOST, useSSH=True, sshHost=SSH_HOST, useGevent=True)
except RuntimeError as e:
logging.exception("could not set up SSH tunnel to %s" % SSH_HOST)
sys.exit(1)
else:
logging.debug("SSH has been disabled.")
client = Client(BROKER_HOST, useGevent=True)
starttime = time.time()
result = client.send(externalObject, retry=REQUEST_RETRIES, timeout=REQUEST_TIMEOUT)
logging.debug("got reply in %s seconds" % str(time.time() - starttime))
if result:
rootObject = getRootObject(result)
try:
jsonResult = getJSON(result)
print jsonResult
except:
logging.exception("error occured collecting results")
return
if return_level == level_full:
SAVE_PATH = "%s/%s" % (SAVE_PATH, get_scanObjectUID(rootObject))
if not os.path.exists(SAVE_PATH):
try:
os.makedirs(SAVE_PATH)
print "\nWriting results to %s...\n" % SAVE_PATH
except (OSError, IOError) as e:
print "\nERROR: unable to write to %s...\n" % SAVE_PATH
return
else:
print "\nOutput folder already exists! Skipping results output...\n"
return
for uid, scanObject in result.files.iteritems():
f = open("%s/%s" % (SAVE_PATH, uid), "wb")
f.write(scanObject.buffer)
f.close()
try:
if scanObject.filename and scanObject.parent:
linkPath = "%s/%s" % (SAVE_PATH, scanObject.filename.replace("/","_"))
if not os.path.lexists(linkPath):
os.symlink("%s" % (uid), linkPath)
elif scanObject.filename:
filenameParts = scanObject.filename.split("/")
os.symlink("%s" % (uid), "%s/%s" % (SAVE_PATH, filenameParts[-1]))
except:
print "Unable to create symlink for %s" % (uid)
f = open("%s/%s" % (SAVE_PATH, "results.log"), "wb")
f.write(jsonResult)
f.close()
sys.exit(1)
else:
print "ERROR: No result received (scan timed out)"
return
else:
try:
fh = open('cloudscan.log', 'w')
fh.close()
except:
pass
for fname in fileList:
job_queue.put(fname)
for i in range(num_procs):
job_queue.put("STOP")
print "File list length: %s" % len(fileList)
for i in range(num_procs):
Process(target=worker, args=(options.nolimit, REQUEST_RETRIES, REQUEST_TIMEOUT, SAVE_PATH, SOURCE, return_level, hostname, USE_SSH, BROKER_HOST, SSH_HOST,ext_metadata,options.ephID,)).start()
results_processed = 0
while results_processed < len(fileList):
logging.debug("Files left: %s" % ((len(fileList) - results_processed)))
resultText = result_queue.get()
try:
# Process results
fh = open('cloudscan.log', 'ab')
fh.write('%s\n' % resultText)
fh.close()
results_processed += 1
except Exception as e:
raise
print 'Wrote results to cloudscan.log'
except KeyboardInterrupt:
print "Interrupted by user, exiting..."
sys.exit(1)
def worker(nolimit, REQUEST_RETRIES, REQUEST_TIMEOUT, SAVE_PATH, SOURCE, return_level, hostname, USE_SSH, BROKER_HOST, SSH_HOST, ext_metadata, ephID):
# Set up ZMQ context
if USE_SSH:
try:
logging.debug("attempting to connect to broker at %s and SSH host %s" % (BROKER_HOST, SSH_HOST))
client = Client(BROKER_HOST, useSSH=True, sshHost=SSH_HOST)
except RuntimeError as e:
logging.exception("could not set up SSH tunnel to %s" % SSH_HOST)
sys.exit(1)
else:
logging.debug("SSH has been disabled.")
client = Client(BROKER_HOST)
randNum = randint(1, 10000)
for fname in iter(job_queue.get, 'STOP'):
print "Worker %s: Starting new request" % randNum
try:
# Try to read the file
file_buffer = open(fname, 'rb').read()
file_len = len(file_buffer)
logging.debug("opened file %s with len %i" % (fname, file_len))
if file_len > 20971520 and not nolimit:
print "You're trying to scan a file larger than 20mb.. Are you sure?"
print "Use the --remove-limit flag if you really want to do this."
print "File has not been scanned: %s" % fname
result_queue.put("~~~~~~~~~~~~~~~~~~~~\nFile has not been scanned due to size: %s\n~~~~~~~~~~~~~~~~~~~~" % fname)
continue
except IOError as e:
print "\nERROR: The file does not exist: %s\n" % (fname,)
print "Moving to next file..."
result_queue.put("~~~~~~~~~~~~~~~~~~~~\nFile has not been scanned due to an IO Error: %s\n~~~~~~~~~~~~~~~~~~~~" % fname)
continue
try:
# Construct the object to be sent for scanning
externalObject = ExternalObject(buffer=file_buffer,
externalVars=ExternalVars(filename=fname,
ephID=ephID,
extMetaData=ext_metadata,
source="%s-%s-%s" % (SOURCE,
hostname,
getpass.getuser())),
level=return_level)
starttime = time.time()
result = client.send(externalObject, retry=REQUEST_RETRIES, timeout=REQUEST_TIMEOUT)
if not result:
result_queue.put("~~~~~~~~~~~~~~~~~~~~\nFile timed out in the scanner: %s\n~~~~~~~~~~~~~~~~~~~~" % fname)
continue
logging.debug("got reply in %s seconds" % str(time.time() - starttime))
rootObject = getRootObject(result)
jsonResult = getJSON(result)
resultText = '%s\n' % jsonResult
if return_level == level_full:
FILE_SAVE_PATH = "%s/%s" % (SAVE_PATH, get_scanObjectUID(rootObject))
if not os.path.exists(FILE_SAVE_PATH):
try:
os.makedirs(FILE_SAVE_PATH)
print "Writing results to %s..." % FILE_SAVE_PATH
except (OSError, IOError) as e:
print "\nERROR: unable to write to %s...\n" % FILE_SAVE_PATH
return
else:
print "\nOutput folder already exists! Skipping results output...\n"
return
for uid, scanObject in result.files.iteritems():
f = open("%s/%s" % (FILE_SAVE_PATH, uid), "wb")
f.write(scanObject.buffer)
f.close()
if scanObject.filename and scanObject.depth != 0:
linkPath = "%s/%s" % (FILE_SAVE_PATH, scanObject.filename.replace("/","_"))
if not os.path.lexists(linkPath):
os.symlink("%s" % (uid), linkPath)
elif scanObject.filename:
filenameParts = scanObject.filename.split("/")
linkPath = "%s/%s" % (FILE_SAVE_PATH, filenameParts[-1])
if not os.path.lexists(linkPath):
os.symlink("%s" % (uid), linkPath)
f = open("%s/%s" % (FILE_SAVE_PATH, "results.json"), "wb")
f.write(jsonResult)
f.close()
result_queue.put(resultText)
except:
#logging.exception("error occured collecting results")
result_queue.put("~~~~~~~~~~~~~~~~~~~~\nUNKNOWN ERROR OCCURRED: %s\n~~~~~~~~~~~~~~~~~~~~" % fname)
continue
if __name__ == "__main__":
main()
| apache-2.0 | -6,036,542,411,581,162,000 | 40.044231 | 207 | 0.495572 | false |
cherokee/webserver | qa/251-php-localcheck.py | 7 | 1807 | from base import *
DIR = "php_checkfile"
HOST = "test251"
MAGIC = 'This test checks the PHP check local file'
FORBIDDEN = 'This is a comment'
CONF_PART = """
vserver!251!nick = %(HOST)s
vserver!251!document_root = %(vserver_droot)s
vserver!251!rule!%(php_plus1)s!match = directory
vserver!251!rule!%(php_plus1)s!match!directory = /%(DIR)s
vserver!251!rule!%(php_plus1)s!match!final = 0
vserver!251!rule!%(php_plus1)s!document_root = %(document_root)s
# PHP comes here
vserver!251!rule!1!match = default
vserver!251!rule!1!handler = custom_error
vserver!251!rule!1!handler!error = 403
"""
PHP_SRC = """<?php
/* %s */
phpinfo();
echo '%s';
?>""" %(FORBIDDEN, MAGIC)
class Test (TestBase):
def __init__ (self):
TestBase.__init__ (self, __file__)
self.name = "PHP: Document root + Check local file"
self.request = "GET /%s/internal/test.php HTTP/1.0\r\n" %(DIR) +\
"Host: %s\r\n" %(HOST)
self.expected_error = 200
self.forbidden_content = FORBIDDEN
def Prepare (self, www):
# Build directories
vserver_droot = self.Mkdir (www, "test251_droot")
document_root = self.Mkdir (www, "test251_outside")
internal_dir = self.Mkdir (www, "test251_outside/internal")
self.WriteFile (internal_dir, "test.php", 0666, PHP_SRC)
# Config
php_plus1 = int(self.php_conf.split('!')[0]) + 1
self.conf = CONF_PART %(dict (locals(), **globals()))
# Config: PHP
for php in self.php_conf.split("\n"):
self.conf += "vserver!251!rule!%s\n" % (php)
self.conf += "vserver!251!rule!10000!match!check_local_file = 1\n"
def Precondition (self):
return os.path.exists (look_for_php())
| gpl-2.0 | 7,362,429,352,918,926,000 | 28.622951 | 83 | 0.589375 | false |
twilio/twilio-python | twilio/rest/sync/v1/service/sync_list/sync_list_permission.py | 1 | 15865 | # coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class SyncListPermissionList(ListResource):
def __init__(self, version, service_sid, list_sid):
"""
Initialize the SyncListPermissionList
:param Version version: Version that contains the resource
:param service_sid: The SID of the Sync Service that the resource is associated with
:param list_sid: The SID of the Sync List to which the Permission applies
:returns: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionList
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionList
"""
super(SyncListPermissionList, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, 'list_sid': list_sid, }
self._uri = '/Services/{service_sid}/Lists/{list_sid}/Permissions'.format(**self._solution)
def stream(self, limit=None, page_size=None):
"""
Streams SyncListPermissionInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'])
def list(self, limit=None, page_size=None):
"""
Lists SyncListPermissionInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of SyncListPermissionInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of SyncListPermissionInstance
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionPage
"""
data = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(method='GET', uri=self._uri, params=data, )
return SyncListPermissionPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of SyncListPermissionInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of SyncListPermissionInstance
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return SyncListPermissionPage(self._version, response, self._solution)
def get(self, identity):
"""
Constructs a SyncListPermissionContext
:param identity: The application-defined string that uniquely identifies the User's Sync List Permission resource to fetch
:returns: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionContext
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionContext
"""
return SyncListPermissionContext(
self._version,
service_sid=self._solution['service_sid'],
list_sid=self._solution['list_sid'],
identity=identity,
)
def __call__(self, identity):
"""
Constructs a SyncListPermissionContext
:param identity: The application-defined string that uniquely identifies the User's Sync List Permission resource to fetch
:returns: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionContext
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionContext
"""
return SyncListPermissionContext(
self._version,
service_sid=self._solution['service_sid'],
list_sid=self._solution['list_sid'],
identity=identity,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Sync.V1.SyncListPermissionList>'
class SyncListPermissionPage(Page):
def __init__(self, version, response, solution):
"""
Initialize the SyncListPermissionPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param service_sid: The SID of the Sync Service that the resource is associated with
:param list_sid: The SID of the Sync List to which the Permission applies
:returns: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionPage
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionPage
"""
super(SyncListPermissionPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of SyncListPermissionInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionInstance
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionInstance
"""
return SyncListPermissionInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
list_sid=self._solution['list_sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Sync.V1.SyncListPermissionPage>'
class SyncListPermissionContext(InstanceContext):
def __init__(self, version, service_sid, list_sid, identity):
"""
Initialize the SyncListPermissionContext
:param Version version: Version that contains the resource
:param service_sid: The SID of the Sync Service with the Sync List Permission resource to fetch
:param list_sid: The SID of the Sync List with the Sync List Permission resource to fetch
:param identity: The application-defined string that uniquely identifies the User's Sync List Permission resource to fetch
:returns: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionContext
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionContext
"""
super(SyncListPermissionContext, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, 'list_sid': list_sid, 'identity': identity, }
self._uri = '/Services/{service_sid}/Lists/{list_sid}/Permissions/{identity}'.format(**self._solution)
def fetch(self):
"""
Fetch the SyncListPermissionInstance
:returns: The fetched SyncListPermissionInstance
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionInstance
"""
payload = self._version.fetch(method='GET', uri=self._uri, )
return SyncListPermissionInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
list_sid=self._solution['list_sid'],
identity=self._solution['identity'],
)
def delete(self):
"""
Deletes the SyncListPermissionInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete(method='DELETE', uri=self._uri, )
def update(self, read, write, manage):
"""
Update the SyncListPermissionInstance
:param bool read: Read access
:param bool write: Write access
:param bool manage: Manage access
:returns: The updated SyncListPermissionInstance
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionInstance
"""
data = values.of({'Read': read, 'Write': write, 'Manage': manage, })
payload = self._version.update(method='POST', uri=self._uri, data=data, )
return SyncListPermissionInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
list_sid=self._solution['list_sid'],
identity=self._solution['identity'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Sync.V1.SyncListPermissionContext {}>'.format(context)
class SyncListPermissionInstance(InstanceResource):
def __init__(self, version, payload, service_sid, list_sid, identity=None):
"""
Initialize the SyncListPermissionInstance
:returns: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionInstance
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionInstance
"""
super(SyncListPermissionInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'account_sid': payload.get('account_sid'),
'service_sid': payload.get('service_sid'),
'list_sid': payload.get('list_sid'),
'identity': payload.get('identity'),
'read': payload.get('read'),
'write': payload.get('write'),
'manage': payload.get('manage'),
'url': payload.get('url'),
}
# Context
self._context = None
self._solution = {
'service_sid': service_sid,
'list_sid': list_sid,
'identity': identity or self._properties['identity'],
}
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: SyncListPermissionContext for this SyncListPermissionInstance
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionContext
"""
if self._context is None:
self._context = SyncListPermissionContext(
self._version,
service_sid=self._solution['service_sid'],
list_sid=self._solution['list_sid'],
identity=self._solution['identity'],
)
return self._context
@property
def account_sid(self):
"""
:returns: The SID of the Account that created the resource
:rtype: unicode
"""
return self._properties['account_sid']
@property
def service_sid(self):
"""
:returns: The SID of the Sync Service that the resource is associated with
:rtype: unicode
"""
return self._properties['service_sid']
@property
def list_sid(self):
"""
:returns: The SID of the Sync List to which the Permission applies
:rtype: unicode
"""
return self._properties['list_sid']
@property
def identity(self):
"""
:returns: The identity of the user to whom the Sync List Permission applies
:rtype: unicode
"""
return self._properties['identity']
@property
def read(self):
"""
:returns: Read access
:rtype: bool
"""
return self._properties['read']
@property
def write(self):
"""
:returns: Write access
:rtype: bool
"""
return self._properties['write']
@property
def manage(self):
"""
:returns: Manage access
:rtype: bool
"""
return self._properties['manage']
@property
def url(self):
"""
:returns: The absolute URL of the Sync List Permission resource
:rtype: unicode
"""
return self._properties['url']
def fetch(self):
"""
Fetch the SyncListPermissionInstance
:returns: The fetched SyncListPermissionInstance
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the SyncListPermissionInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def update(self, read, write, manage):
"""
Update the SyncListPermissionInstance
:param bool read: Read access
:param bool write: Write access
:param bool manage: Manage access
:returns: The updated SyncListPermissionInstance
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionInstance
"""
return self._proxy.update(read, write, manage, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Sync.V1.SyncListPermissionInstance {}>'.format(context)
| mit | -5,737,604,081,525,161,000 | 35.809745 | 130 | 0.630507 | false |
simonwydooghe/ansible | lib/ansible/modules/cloud/hcloud/hcloud_location_info.py | 6 | 4889 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Hetzner Cloud GmbH <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: hcloud_location_info
short_description: Gather infos about your Hetzner Cloud locations.
version_added: "2.8"
description:
- Gather infos about your Hetzner Cloud locations.
- This module was called C(hcloud_location_facts) before Ansible 2.9, returning C(ansible_facts) and C(hcloud_location_facts).
Note that the M(hcloud_location_info) module no longer returns C(ansible_facts) and the value was renamed to C(hcloud_location_info)!
author:
- Lukas Kaemmerling (@LKaemmerling)
options:
id:
description:
- The ID of the location you want to get.
type: int
name:
description:
- The name of the location you want to get.
type: str
extends_documentation_fragment: hcloud
"""
EXAMPLES = """
- name: Gather hcloud location infos
hcloud_location_info:
register: output
- name: Print the gathered infos
debug:
var: output
"""
RETURN = """
hcloud_location_info:
description: The location infos as list
returned: always
type: complex
contains:
id:
description: Numeric identifier of the location
returned: always
type: int
sample: 1937415
name:
description: Name of the location
returned: always
type: str
sample: fsn1
description:
description: Detail description of the location
returned: always
type: str
sample: Falkenstein DC Park 1
country:
description: Country code of the location
returned: always
type: str
sample: DE
city:
description: City of the location
returned: always
type: str
sample: Falkenstein
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.hcloud import Hcloud
try:
from hcloud import APIException
except ImportError:
pass
class AnsibleHcloudLocationInfo(Hcloud):
def __init__(self, module):
Hcloud.__init__(self, module, "hcloud_location_info")
self.hcloud_location_info = None
def _prepare_result(self):
tmp = []
for location in self.hcloud_location_info:
if location is not None:
tmp.append({
"id": to_native(location.id),
"name": to_native(location.name),
"description": to_native(location.description),
"city": to_native(location.city),
"country": to_native(location.country)
})
return tmp
def get_locations(self):
try:
if self.module.params.get("id") is not None:
self.hcloud_location_info = [self.client.locations.get_by_id(
self.module.params.get("id")
)]
elif self.module.params.get("name") is not None:
self.hcloud_location_info = [self.client.locations.get_by_name(
self.module.params.get("name")
)]
else:
self.hcloud_location_info = self.client.locations.get_all()
except APIException as e:
self.module.fail_json(msg=e.message)
@staticmethod
def define_module():
return AnsibleModule(
argument_spec=dict(
id={"type": "int"},
name={"type": "str"},
**Hcloud.base_module_arguments()
),
supports_check_mode=True,
)
def main():
module = AnsibleHcloudLocationInfo.define_module()
is_old_facts = module._name == 'hcloud_location_facts'
if is_old_facts:
module.deprecate("The 'hcloud_location_info' module has been renamed to 'hcloud_location_info', "
"and the renamed one no longer returns ansible_facts", version='2.13')
hcloud = AnsibleHcloudLocationInfo(module)
hcloud.get_locations()
result = hcloud.get_result()
if is_old_facts:
ansible_info = {
'hcloud_location_facts': result['hcloud_location_info']
}
module.exit_json(ansible_facts=ansible_info)
else:
ansible_info = {
'hcloud_location_info': result['hcloud_location_info']
}
module.exit_json(**ansible_info)
if __name__ == "__main__":
main()
| gpl-3.0 | 7,917,162,840,477,041,000 | 27.928994 | 139 | 0.588873 | false |
jaymin-panchal/zang-python | tests/unit/test_transcriptions.py | 2 | 2256 | import unittest
from zang.configuration.configuration import Configuration
from zang.connectors.connector_factory import ConnectorFactory
from zang.domain.enums.http_method import HttpMethod
from zang.domain.enums.transcribe_quality import TranscribeQuality
from zang.domain.enums.transcription_status import TranscriptionStatus
from tests.test_util import TestUtil, SID, AUTH_TOKEN, URL
from datetime import date
class TestTranscriptions(unittest.TestCase):
def setUp(self):
configuration = Configuration(SID, AUTH_TOKEN, URL)
connectorFactory = ConnectorFactory(configuration)
self.connector = connectorFactory.transcriptionsConnector
def test_view(self):
TestUtil.start('TranscriptionsTest', 'viewTranscription')
obj = self.connector.viewTranscription('TestTranscriptionSid')
self.checkTranscription(obj)
def test_list(self):
TestUtil.start('TranscriptionsTest', 'listTranscriptions')
obj = self.connector.listTranscriptions(
status=TranscriptionStatus.COMPLETED,
dateTranscribedGte=date(2016, 12, 12),
dateTranscribedLt=date(2017, 3, 19),
page=0,
pageSize=33)
assert len(obj.elements) == 2
assert (obj.elements[0].transcriptionText ==
'Hello from St. Cloud. We hope you like.')
def test_transcribeRecording(self):
TestUtil.start('TranscriptionsTest', 'transcribeRecording')
obj = self.connector.transcribeRecording(
'TestRecordingSid', 'TranscribeCallback', HttpMethod.GET, 0, 33,
TranscribeQuality.HYBRID)
self.checkTranscription(obj)
def test_transcribeAudioUrl(self):
TestUtil.start('TranscriptionsTest', 'transcribeAudioUrl')
obj = self.connector.transcribeAudioUrl(
audioUrl='AudioUrl',
transcribeCallback='TranscribeCallback',
sliceStart=0,
sliceDuration=33,
callbackMethod=HttpMethod.GET,
quality=TranscribeQuality.AUTO)
self.checkTranscription(obj)
def checkTranscription(self, transcription):
assert ('Hello from St. Cloud. We hope you like this.' ==
transcription.transcriptionText)
| mit | -6,542,018,725,149,351,000 | 37.896552 | 76 | 0.695035 | false |
chipx86/reviewboard | reviewboard/search/forms.py | 2 | 6284 | """Forms for searching Review Board."""
from __future__ import unicode_literals
from collections import OrderedDict
from django import forms
from django.contrib.auth.models import User
from django.utils import six
from django.utils.translation import ugettext_lazy as _
from haystack.forms import ModelSearchForm
from haystack.inputs import Raw
from haystack.query import SQ
from reviewboard.reviews.models import Group, ReviewRequest
from reviewboard.scmtools.models import Repository
from reviewboard.search.indexes import BaseSearchIndex
class RBSearchForm(ModelSearchForm):
"""The Review Board search form.
This form is capable of searching for :py:class:`ReviewRequests
<reviewboard.reviews.models.review_request.ReviewRequest>` and
:py:class:`Users <django.contrib.auth.models.User>`.
"""
FILTER_ALL = ''
FILTER_REVIEW_REQUESTS = 'reviewrequests'
FILTER_USERS = 'users'
#: Available model filters.
FILTER_TYPES = OrderedDict([
(FILTER_ALL, {
'models': [ReviewRequest, User],
'name': _('All results'),
}),
(FILTER_REVIEW_REQUESTS, {
'models': [ReviewRequest],
'name': _('Review Requests'),
}),
(FILTER_USERS, {
'models': [User],
'name': _('Users'),
}),
])
model_filter = forms.MultipleChoiceField(
choices=(
(filter_id, filter_type['name'])
for filter_id, filter_type in six.iteritems(FILTER_TYPES)
),
required=False,
)
id = forms.IntegerField(required=False)
def __init__(self, user=None, local_site=None, **kwargs):
"""Initialize the search form.
Args:
user (django.contrib.auth.models.User):
The user performing the search.
Results will be limited to those visible to the user.
local_site (reviewboard.site.models.LocalSite):
The Local Site the search is being performed on.
Results will be limited to those on the LocalSite.
**kwargs (dict):
Additional keyword arguments to forward to the parent form.
"""
super(RBSearchForm, self).__init__(**kwargs)
self.user = user
self.local_site = local_site
def clean_q(self):
"""Clean the ``q`` field.
The field will be stripped of leading and trailing whitespace.
Returns:
unicode:
The stripped query.
"""
return self.cleaned_data['q'].strip()
def clean_model_filter(self):
"""Clean the ``model_filter`` field.
If no filter is provided, the default (all models) will be used.
Returns:
list of unicode:
The cleaned ``filter`` field.
"""
return self.cleaned_data['model_filter'] or ['']
def search(self):
"""Perform a search.
Returns:
haystack.query.SearchQuerySet:
The search results.
"""
if not self.is_valid():
return self.no_query_found()
user = self.user
q = self.cleaned_data['q']
id_q = self.cleaned_data.get('id')
model_filters = set()
for filter_type in self.cleaned_data.get('model_filter', ['']):
model_filters.update(self.FILTER_TYPES[filter_type]['models'])
model_filters = list(model_filters)
sqs = (
self.searchqueryset
.filter(content=Raw(q))
.models(*model_filters)
)
if id_q:
sqs = sqs.filter_or(SQ(id=q))
if self.local_site:
local_site_id = self.local_site.pk
else:
local_site_id = BaseSearchIndex.NO_LOCAL_SITE_ID
sqs = sqs.filter_and(local_sites__contains=local_site_id)
# Filter out any private review requests the user doesn't have
# access to.
if not user.is_superuser:
private_sq = (SQ(django_ct='reviews.reviewrequest') &
SQ(private=True))
if user.is_authenticated():
# We're going to build a series of queries that mimic the
# accessibility checks we have internally, based on the access
# permissions the user currently has, and the IDs listed in
# the indexed review request.
#
# This must always be kept in sync with
# ReviewRequestManager._query.
#
# Note that we are not performing Local Site checks here,
# because we're already filtering by Local Sites.
# Make sure they have access to the repository, if any.
accessible_repo_ids = list(Repository.objects.accessible_ids(
user,
visible_only=False,
local_site=self.local_site,
))
accessible_group_ids = Group.objects.accessible_ids(
user,
visible_only=False,
)
repository_sq = SQ(
private_repository_id__in=[0] + accessible_repo_ids
)
# Next, build a query to see if the review request targets any
# invite-only groups the user is a member of.
target_groups_sq = SQ(private_target_groups__contains=0)
for pk in accessible_group_ids:
target_groups_sq |= SQ(private_target_groups__contains=pk)
# Build a query to see if the user is explicitly listed
# in the list of reviewers.
target_users_sq = SQ(target_users__contains=user.pk)
# And, of course, the owner of the review request can see it.
#
# With that, we'll put the whole query together, in the order
# matching ReviewRequest.is_accessible_by.
private_sq &= ~(SQ(username=user.username) |
(repository_sq &
(target_users_sq | target_groups_sq)))
sqs = sqs.exclude(private_sq)
return sqs.order_by('-last_updated')
| mit | -7,383,928,477,562,632,000 | 31.559585 | 78 | 0.560471 | false |
Wei1234c/Elastic_Network_of_Things_with_MQTT_and_MicroPython | codes/node/mqtt_client_umqtt.py | 1 | 4718 | # coding: utf-8
import time
import socket
import gc
gc.collect()
import simple as umqtt
import config_mqtt
class Message_client:
# Object control
def __init__(self, server_ip, server_port):
self.server_address = socket.getaddrinfo(server_ip, server_port)[-1][-1]
self.mqtt_client = umqtt.MQTTClient(client_id = self.name,
server = self.server_address[0], port = self.server_address[1],
user = config_mqtt.USERNAME, password = config_mqtt.PASSWORD,
keepalive=60,
ssl=False, ssl_params={})
self.addr = self.server_address
self.parent = None
self.message = None
self.receive_cycles = 0
self.status = {'Datatransceiver ready': False,
'Is connected': False,
'Stop': False}
self.mqtt_client.set_callback(self.on_message)
def __del__(self):
self.parent = None
def set_parent(self, parent=None):
self.parent = parent
def run(self):
self.connect()
def stop(self):
self.status['Stop'] = True
self.close()
def stopped(self):
return self.status['Stop']
def connect(self):
while True:
if self.stopped(): break
try:
self.status['Datatransceiver ready'] = False
self.status['Is connected'] = False
self.message = None
self.mqtt_client.connect(clean_session=True)
self.status['Datatransceiver ready'] = True
self.on_connect()
except Exception as e:
print(e)
time.sleep(config_mqtt.CLIENT_RETRY_TO_CONNECT_AFTER_SECONDS)
def on_connect(self):
self.subscribe(topic = '/'.join([config_mqtt.GROUP_NAME, self.name]), qos = config_mqtt.QOS_LEVEL)
self.subscribe(topic = '/'.join([config_mqtt.GROUP_NAME, config_mqtt.SERVER_NAME]), qos=config_mqtt.QOS_LEVEL)
print('\n[Connected: {0}]'.format(self.server_address))
self.status['Is connected'] = True
self.mqtt_client.check_msg()
self.receive()
def subscribe(self, topic, qos = config_mqtt.QOS_LEVEL):
self.mqtt_client.subscribe(topic = topic, qos = qos)
def on_message(self, topic, msg):
# print('Message topic: {}, payload: {}'.format(topic, str(msg)))
self.on_receive(msg)
def close(self):
self.mqtt_client.disconnect()
self.on_close()
def on_close(self):
print('[Closed: {}]'.format(self.server_address))
def receive(self):
print('[Listen to messages]')
while True:
if self.stopped(): break
try:
self.mqtt_client.sock.settimeout(config_mqtt.CLIENT_RECEIVE_TIME_OUT_SECONDS)
res = self.mqtt_client.wait_msg()
except Exception as e:
# Connection reset.
if config_mqtt.IS_MICROPYTHON:
if str(e) == config_mqtt.MICROPYTHON_MQTT_CONNECTION_RESET_ERROR_MESSAGE:
raise e
elif isinstance(e, ConnectionResetError):
raise e
# Receiving process timeout.
self.receive_cycles += 1
if self.receive_cycles % config_mqtt.PING_BROKER_TO_KEEP_ALIVE_EVERY_CLIENT_RECEIVE_CYCLES == 0:
self.mqtt_client.ping()
self.receive_cycles = 0
self.process_messages()
# def process_messages(self):
# raise Exception('Need to be overriden.')
def receive_one_cycle(self):
try:
self.mqtt_client.check_msg()
except Exception as e:
pass
def on_receive(self, data):
if data:
self.message = data.decode()
print('\nData received: {0} bytes'.format(len(data)))
def send_message(self, receiver, message_string):
print('\nSending {} bytes'.format(len(message_string)))
topic = '/'.join([config_mqtt.GROUP_NAME, receiver])
self.mqtt_client.sock.settimeout(config_mqtt.CLIENT_RECEIVE_TIME_OUT_SECONDS)
self.mqtt_client.publish(topic = topic,
msg = message_string.encode(),
retain = False,
qos = config_mqtt.QOS_LEVEL)
self.mqtt_client.check_msg() | gpl-3.0 | 5,052,911,248,970,306,000 | 30.46 | 118 | 0.524799 | false |
jkbgbr/BeamFE2 | BeamFE2/Structure.py | 1 | 13753 | # -*- coding: utf-8 -*-
from drawing import draw_beam
import copy
from BeamFE2.helpers import *
from BeamFE2.Loads import *
from BeamFE2 import Loads as BL
from BeamFE2 import Results
from BeamFE2 import Solver
VERY_LARGE_NUMBER = 10e40
class Structure(object):
dof = 3
loadnames = 'FX', 'FY', 'MZ'
dofnames = 'ux', 'uy', 'rotz'
massnames = 'mx', 'my'
"""
The Structure, composed of the FE Beams.
"""
def __init__(self, beams=None, supports=None):
self.beams = beams
self.nodal_loads = []
self.nodal_masses = []
self._load_vector = None
self._mass_vector = None
self.supports = supports
self.results = {'linear static': Results.LinearStaticResult(),
'modal': Results.ModalResult(),
'buckling': Results.BucklingResult()
}
self.solver = {'linear static': Solver.LinearStaticSolver(self),
'modal': Solver.ModalSolver(self),
'buckling': Solver.BucklingSolver(self)
}
@property
def supported_nodes(self):
return (x for x in self.nodes if x.ID in self.supports.keys())
def set_mass_matrix_type(self, matrixtype='consistent', beam_IDs='all'):
if beam_IDs == 'all':
for beam in self.beams:
beam.mass_matrix = matrixtype
else:
assert all([x in [y.ID for y in self.beams] for x in beam_IDs])
for beam in self.beams:
if beam.ID in beam_IDs:
beam.mass_matrix = matrixtype
def add_nodal_mass(self, nodeID=None, mass=None, clear=False):
"""
:param clear:
:param nodeID:
:param mass:
:return:
"""
# finding the node
node = [x for x in self.nodes if nodeID == x.ID]
if len(node) != 1:
raise Exception('There is no node with ID %d' % nodeID)
else:
node = node[0]
# making sure the dynam is full, if some component is mssing we replace it with a zero
for ln in self.massnames:
if ln not in mass.keys():
mass[ln] = 0
self.add_mass_to_node(nodeID=nodeID, mass=mass, clear=clear)
self.nodal_masses.append(BL.NodalMass(node=node, mass=mass))
def add_nodal_load(self, nodeID=None, dynam=None, clear=False):
"""
:param nodeID: ID of the node the last acts on
:param dynam: values of the load components
:param local_input: True: input is understood in the beams local system. False: in the global system
:param clear: clear all previously defined loads before applying this
:return:
"""
# finding the node
node = [x for x in self.nodes if nodeID == x.ID]
if len(node) != 1:
raise Exception('There is no node with ID %d' % nodeID)
else:
node = node[0]
# making sure the dynam is full, if some component is mssing we replace it with a zero
for ln in self.loadnames:
if ln not in dynam.keys():
dynam[ln] = 0
self.add_single_dynam_to_node(nodeID=nodeID, dynam=dynam, clear=clear)
self.nodal_loads.append(BL.NodalLoad(node=node, dynam=dynam))
# @property
# def beam_mass(self):
# """ Structural mass of the beams"""
# return [x.mass for x in self.beams]
#
# def mass_of_nodal_masses(self, direction='x'):
# """ Nodal masses defined for a given direction"""
# _dir = 'm%s' % direction
# assert _dir in self.massnames
# return [x[_dir] for x in self.nodal_masses]
def draw(self, show=True, analysistype=None, mode=0, internal_action=None):
if self.results[analysistype].solved:
draw_beam.draw_structure(self, show=show, analysistype=analysistype, mode=mode, intac=internal_action)
else:
print('no results available, no printing')
def node_by_ID(self, id=None):
# return the Node object that has the ID
_ret = [x for x in self.nodes if x.ID == id]
assert len(_ret) == 1
return _ret[0]
# @property
# def dof(self):
# return self.beams[0].dof
#
# @property
# def loadnames(self):
# assert self.dof in [3, 6]
# if self.dof == 3:
# return 'FX', 'FY', 'MZ'
# else:
# return 'FX', 'FY', 'FZ', 'MX', 'MY', 'MZ'
#
# # todo: string helyett valtozonev, konstanssal
# # type hinting-et hasznalni, de csak ott igazan fontos amiket kivulrol is hivhatnak
# # egyszerubb refraktor
#
# @property
# def dofnames(self):
# assert self.dof in [3, 6]
# if self.dof == 3:
# return 'ux', 'uy', 'rotz'
# else:
# return 'ux', 'uy', 'uz', 'rotx', 'roty', 'rotz'
#
# @property
# def dofnumbers(self):
# assert self.dof in [3, 6]
# if self.dof == 3:
# return 0, 1, 2
# else:
# return 0, 1, 2, 3, 4, 5
@property
def nodes(self):
# set of Nodes of the model
return set(itertools.chain.from_iterable([x.nodes for x in self.beams]))
@property
def sumdof(self):
# sum of DOFs, without eliminating for BCs
return self.dof * len(self.nodes)
@property
def positions_to_eliminate(self):
"""
Numbers of rows and columns to be eliminated when condensing the K and M matrices for the modal analysis, based
on the boundary conditions defined.
Possible zero-rows are not considered here.
Returned is a sorted list of these.
When deleting the rows, one should begin with the highest number, that is, the reversed list of positions
When re-populating the displacement vectors, the list should not be reversed.
:return: the list with the numbers of the rows, columns
"""
_to_eliminate = [] # list of rows and columns to eliminate
for nodeID, DOFs in self.supports.items():
for DOF in DOFs:
_to_eliminate.append(self.position_in_matrix(nodeID=nodeID, DOF=DOF))
_to_eliminate.sort()
return _to_eliminate
def condense(self, mtrx=None):
"""
Eliminates the rows and columns of the BC
"""
for _ in self.positions_to_eliminate[::-1]:
mtrx = np.delete(mtrx, _, axis=0)
mtrx = np.delete(mtrx, _, axis=1)
return mtrx
@property
def M(self):
# the compiled mass matrix
return compile_global_matrix(self.beams, mass=True)
@property
def M_with_masses(self):
# copy of the the stiffness matrix with the boundary conditions taken into account
_M = copy.deepcopy(self.M)
if self._mass_vector is None:
self._mass_vector = np.matrix(np.zeros(self.sumdof))
for mindex, m in enumerate(np_matrix_tolist(self.mass_vector)):
_M[mindex, mindex] += m
return _M
@property
def K_geom(self):
# the compiled geometrical stiffness matrix
return compile_global_matrix(self.beams, geometrical=True)
@property
def K(self):
# the compiled stiffness matrix, without supports
return compile_global_matrix(self.beams, stiffness=True)
@property
def K_with_BC(self):
# copy of the the stiffness matrix with the boundary conditions taken into account
_K = copy.deepcopy(self.K)
for k, v in self.supports.items(): # k is the nodeID that has support
for dof in v: # dof to be fixed: 'ux', 'rotz' etc.
_pos = self.position_in_matrix(nodeID=k, DOF=dof)
# check, if the DOF has been released previously
_K[_pos, _pos] += VERY_LARGE_NUMBER
# print('added support: nodeID %d, DOF %s' % (k, dof))
return _K
@property
def load_vector(self):
# the load vector
return self._load_vector.T
@property
def mass_vector(self):
# a vector containing the masses
# currently no distinction between directions, defined massesact in all directions
return self._mass_vector
def add_internal_loads(self, beam=None, loadtype=None, **kwargs):
beam.add_internal_load(loadtype=loadtype, **kwargs)
for node in beam.nodes:
dynam_as_dict = beam.reduce_internal_load(load=beam.internal_loads[-1])
self.add_single_dynam_to_node(nodeID=node.ID, dynam=dynam_as_dict[node])
def clear_loads(self):
# clear all loads defined previously
# loads defined on beams
for b in self.beams:
b.internal_loads = []
self.nodal_loads = [] # deleting all nodal loads
self._load_vector = None # zeroing the load vector
def add_single_dynam_to_node(self, nodeID=None, dynam=None, clear=False):
"""
Adds a dynam (FX, FY, MZ) to the chosen Node of the model.
Checks if the node exists.
Checks if the name of the dynam is OK.
clears previous loads on the node, if clear is True
:param nodeID:
:param dynam:
:param clear:
:return:
"""
#
assert nodeID in [x.ID for x in self.nodes]
assert all([x in self.loadnames for x in dynam.keys()])
# clear all loads defined previously
if clear:
self.clear_loads()
if self._load_vector is None:
self._load_vector = np.matrix(np.zeros(self.sumdof))
for k, v in dynam.items():
for name, number in zip(self.loadnames, range(self.dof)): # pairs e.g. 'FX' with 0, 'FY' with 1 etc.
# _sti = nodeID * self.dof + number # starting index
if k == name:
_sti = self.position_in_matrix(nodeID=nodeID, dynam=k)
self._load_vector[0, _sti] += v
def add_mass_to_node(self, nodeID=None, mass=None, clear=False):
"""
Adds a mass point (mx, my, mz) to the chosen Node of the model.
clears previous masses on the node, if clear is True
:param nodeID: ID of the node the mass is added to
:param mass: mass to be added in [kg]
:param clear: if True, all previously defined masses will be deleted
:return:
"""
#
assert nodeID in [x.ID for x in self.nodes]
if self._mass_vector is None:
self._mass_vector = np.matrix(np.zeros(self.sumdof))
# clear all masses defined previously
if clear:
self._mass_vector = np.matrix(np.zeros(self.sumdof))
for k, v in mass.items():
dofname = self.dofnames[self.massnames.index(k)]
_sti = self.position_in_matrix(nodeID=nodeID, DOF=dofname)
self._mass_vector[0, _sti] += v
@property
def stiffness_matrix_is_symmetric(self):
# checks if the global stiffness matrix (without BCs) is symmetric. MUST be.
diff = self.K.transpose() - self.K
if np.allclose(diff, np.zeros(self.sumdof)):
return True
else:
print('The stiffness matrix is not symmetric')
return False
@property
def stiffness_matrix_is_nonsingular(self):
# checks if the global stiffness matrix (with BCs) is positive definite. MUST be.
try:
np.linalg.cholesky(self.K_with_BC)
return True
except np.linalg.linalg.LinAlgError:
return False
@property
def node_numbering_is_ok(self):
# checks node numbering: they must be sequential, without missing values
if set([x.ID for x in self.nodes]) == set(range(1, len(self.nodes)+1)):
return True
else:
print('Node numbering is not ok')
return False
@property
def mass_matrix_is_ok(self):
return True
# todo: check for symmetry, positive definiteness
@property
def stiffness_matrix_is_ok(self):
if self.stiffness_matrix_is_nonsingular:
return True
else:
print('The stiffness matrix is singular')
return False
def compile_global_geometrical_stiffness_matrix(self):
"""
Compiles the global stiffness matrix from the element matrices.
:return:
"""
return compile_global_matrix(self.beams, geometrical=True)
def compile_global_stiffness_matrix(self):
"""
Compiles the global stiffness matrix from the element matrices.
:return:
"""
return compile_global_matrix(self.beams, stiffness=True)
def nodenr_dof_from_position(self, position=None):
"""
Tells the node number, DOF from the position provided.
:param position:
:return:
"""
_nodeID, DOFnr = divmod(position, self.dof)
return _nodeID+1, self.loadnames[DOFnr]
def position_in_matrix(self, nodeID=None, DOF=None, dynam=None):
"""
Tells the index of the given nodeID, DOF in a global K or M matrix.
:param nodeID:
:param DOF:
:return:
"""
# assert nodeID in [x.nodeID for x in self.nodes]
if DOF is not None:
assert DOF in self.dofnames
return (nodeID - 1) * self.dof + self.dofnames.index(DOF)
if dynam is not None:
assert dynam in self.loadnames
return (nodeID - 1) * self.dof + self.loadnames.index(dynam)
# print('Adding support to Node %d, DOF %s at index %d' % (nodeID, DOF, _ret))
| gpl-3.0 | -5,464,730,670,279,479,000 | 33.906091 | 119 | 0.578056 | false |
leighpauls/k2cro4 | tools/telemetry/telemetry/discover.py | 2 | 1331 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import inspect
import logging
import os
import traceback
def Discover(start_dir, suffix, clazz):
"""Discover all classes in |start_dir| which subclass |clazz|.
Args:
start_dir: The directory to recursively search.
suffix: file name suffix for files to import, without the '.py' ending.
clazz: The base class to search for.
Returns:
dict of {module_name: class}.
"""
top_level_dir = os.path.join(start_dir, '..')
classes = {}
for dirpath, _, filenames in os.walk(start_dir):
for filename in filenames:
if not filename.endswith(suffix + '.py'):
continue
name, _ = os.path.splitext(filename)
relpath = os.path.relpath(dirpath, top_level_dir)
fqn = relpath.replace('/', '.') + '.' + name
try:
module = __import__(fqn, fromlist=[True])
except Exception:
logging.error('While importing [%s]\n' % fqn)
traceback.print_exc()
continue
for name, obj in inspect.getmembers(module):
if inspect.isclass(obj):
if clazz in inspect.getmro(obj):
name = module.__name__.split('.')[-1]
classes[name] = obj
return classes
| bsd-3-clause | 4,992,938,095,331,741,000 | 32.275 | 75 | 0.635612 | false |
piluke/BasicEventEngineFrontend | ui/treectrl.py | 1 | 16802 | # Copyright (c) 2017-18 Luke Montalvo <[email protected]>
#
# This file is part of BEEF.
# BEEF is free software and comes with ABSOLUTELY NO WARANTY.
# See LICENSE for more details.
try:
import wx
except ImportError:
raise ImportError("The wxPython module is required to run this program")
import os
import shutil
from resources.enum import EResource
class BEEFTreeCtrl(wx.TreeCtrl):
def __init__(self, top, parent):
wx.TreeCtrl.__init__(self, parent, -1, wx.DefaultPosition, wx.DefaultSize, wx.TR_HAS_BUTTONS | wx.TR_EDIT_LABELS | wx.TR_HIDE_ROOT | wx.TR_FULL_ROW_HIGHLIGHT)
self.top = top
self.parent = parent
self.parent.Bind(wx.EVT_SIZE, self.OnSize)
size = (16,16)
self.il = wx.ImageList(size[0], size[1])
self.iconFolder = self.il.Add(wx.ArtProvider.GetBitmap(wx.ART_FOLDER, wx.ART_OTHER, size))
self.iconFolderOpen = self.il.Add(wx.ArtProvider.GetBitmap(wx.ART_FOLDER_OPEN, wx.ART_OTHER, size))
self.iconFile = self.il.Add(wx.ArtProvider.GetBitmap(wx.ART_NORMAL_FILE, wx.ART_OTHER, size))
self.SetImageList(self.il)
self.root = self.AddRoot("Resources")
self.SetItemImage(self.root, self.iconFolder, wx.TreeItemIcon_Normal)
self.SetItemImage(self.root, self.iconFolderOpen, wx.TreeItemIcon_Expanded)
self.rootList = []
def addTree(name, data):
c = self.AppendItem(self.root, name)
self.SetItemData(c, data)
self.SetItemImage(c, self.iconFolder, wx.TreeItemIcon_Normal)
self.SetItemImage(c, self.iconFolderOpen, wx.TreeItemIcon_Expanded)
self.rootList.append(c)
for r in EResource.getAll():
addTree(EResource.getPlural(r), EResource.get(r))
self.SetItemData(self.AppendItem(self.root, "-"*15), None)
addTree("Configs", None)
addTree("Extras", None)
def reset(self):
for r in self.rootList:
self.DeleteChildren(r)
def Bind(self):
self.parent.Bind(wx.EVT_TREE_BEGIN_LABEL_EDIT, self.OnBeginEdit, self)
self.parent.Bind(wx.EVT_TREE_END_LABEL_EDIT, self.OnEndEdit, self)
self.parent.Bind(wx.EVT_TREE_ITEM_ACTIVATED, self.OnActivate, self)
self.parent.Bind(wx.EVT_TREE_ITEM_RIGHT_CLICK, self.ShowCMenu, self)
# Bind context menu actions
self.parent.Bind(wx.EVT_MENU, self.CMenuRootTextureCreate, id=1001)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootTextureExpand, id=1002)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootTextureCollapse, id=1003)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootSoundCreate, id=1011)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootSoundExpand, id=1012)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootSoundCollapse, id=1013)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootFontCreate, id=1021)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootFontExpand, id=1022)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootFontCollapse, id=1023)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootPathCreate, id=1031)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootPathExpand, id=1032)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootPathCollapse, id=1033)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootTimelineCreate, id=1041)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootTimelineExpand, id=1042)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootTimelineCollapse, id=1043)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootMeshCreate, id=1051)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootMeshExpand, id=1052)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootMeshCollapse, id=1053)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootLightCreate, id=1061)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootLightExpand, id=1062)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootLightCollapse, id=1063)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootObjectCreate, id=1071)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootObjectExpand, id=1072)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootObjectCollapse, id=1073)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootRoomCreate, id=1081)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootRoomExpand, id=1082)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootRoomCollapse, id=1083)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootConfigCreate, id=1091)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootConfigImport, id=1092)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootConfigExpand, id=1093)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootConfigCollapse, id=1094)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootExtraCreate, id=1101)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootExtraImport, id=1102)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootExtraExpand, id=1103)
self.parent.Bind(wx.EVT_MENU, self.CMenuRootExtraCollapse, id=1104)
# Bind resource context menu actions
self.top.Bind(wx.EVT_MENU, self.CMenuResourceOpen, id=10001)
self.top.Bind(wx.EVT_MENU, self.CMenuResourceCreate, id=10002)
self.top.Bind(wx.EVT_MENU, self.CMenuResourceRename, id=10003)
self.top.Bind(wx.EVT_MENU, self.CMenuResourceDelete, id=10004)
self.top.Bind(wx.EVT_MENU, self.CMenuResourceDuplicate, id=10005)
self.top.Bind(wx.EVT_MENU, self.CMenuRoomSetAsFirst, id=10006)
accelTable = wx.AcceleratorTable([
(wx.ACCEL_NORMAL, wx.WXK_RETURN, 10001),
(wx.ACCEL_NORMAL, wx.WXK_F2, 10003),
(wx.ACCEL_NORMAL, wx.WXK_DELETE, 10004),
(wx.ACCEL_ALT, wx.WXK_INSERT, 10005)
])
self.SetAcceleratorTable(accelTable)
def OnSize(self, event):
w, h = self.parent.GetClientSize()
self.SetSize(0, 0, w, h)
def OnBeginEdit(self, event):
item = event.GetItem()
if item in self.rootList or not self.GetItemData(item):
event.Veto()
def OnEndEdit(self, event):
item = event.GetItem()
if not item in self.rootList:
r = self.GetItemData(item)
if r:
l = event.GetLabel()
if r.name == l or event.IsEditCancelled():
return
if not r.rename(l):
event.Veto()
return
self.top.setUnsaved()
def OnActivate(self, event):
item = event.GetItem()
if item in self.rootList:
self.Toggle(item)
else:
r = self.GetItemData(item)
if r:
r.MenuOpen(None)
def expandRoot(self):
for rt in self.rootList:
self.Expand(rt)
def ShowCMenu(self, event):
item = event.GetItem()
self.cmenu = wx.Menu()
if item in self.rootList:
t = self.GetItemData(item)
for i in range(EResource._MAX):
if t == EResource.get(i):
self.cmenu.Append(1001+i*10, "Create new "+EResource.get(i))
self.cmenu.Append(1002+i*10, "Expand all "+EResource.getPlural(i))
self.cmenu.Append(1003+i*10, "Collapse all "+EResource.getPlural(i))
break;
else: # Show Configs/Extras CMenu
rt = self.GetItemText(item)
if rt == "Configs":
self.cmenu.Append(1091, "Create new Config File")
self.cmenu.Append(1092, "Import new Config File")
self.cmenu.Append(1093, "Expand all Config Files")
self.cmenu.Append(1094, "Collapse all Config Files")
elif rt == "Extras":
self.cmenu.Append(1101, "Create new Extra File")
self.cmenu.Append(1102, "Import new Extra File")
self.cmenu.Append(1103, "Expand all Extra Files")
self.cmenu.Append(1104, "Collapse all Extra Files")
else:
raise RuntimeError("Invalid resource type: {}".format(rt))
else:
r = self.GetItemData(item)
if r:
self.cmenu.Append(10001, "Open properties\tEnter")
self.cmenu.Append(10002, "Create new " + EResource.get(r.type))
self.cmenu.Append(10003, "Rename \"" + r.name + "\"\tF2")
self.cmenu.Append(10004, "Delete \"" + r.name + "\"\tDelete")
self.cmenu.Append(10005, "Duplicate\tAlt+Insert")
if r.type == EResource.ROOM:
self.cmenu.Append(10006, "Set as the first room")
else:
self.cmenu.Destroy()
return
self.PopupMenu(self.cmenu)
self.cmenu.Destroy()
def addTexture(self, name, resource):
item = self.AppendItem(self.rootList[EResource.TEXTURE], name)
self.SetItemData(item, resource)
self.SetItemImage(item, self.iconFile, wx.TreeItemIcon_Normal)
return item
def addSound(self, name, resource):
item = self.AppendItem(self.rootList[EResource.SOUND], name)
self.SetItemData(item, resource)
self.SetItemImage(item, self.iconFile, wx.TreeItemIcon_Normal)
return item
def addFont(self, name, resource):
item = self.AppendItem(self.rootList[EResource.FONT], name)
self.SetItemData(item, resource)
self.SetItemImage(item, self.iconFile, wx.TreeItemIcon_Normal)
return item
def addPath(self, name, resource):
item = self.AppendItem(self.rootList[EResource.PATH], name)
self.SetItemData(item, resource)
self.SetItemImage(item, self.iconFile, wx.TreeItemIcon_Normal)
return item
def addTimeline(self, name, resource):
item = self.AppendItem(self.rootList[EResource.TIMELINE], name)
self.SetItemData(item, resource)
self.SetItemImage(item, self.iconFile, wx.TreeItemIcon_Normal)
return item
def addMesh(self, name, resource):
item = self.AppendItem(self.rootList[EResource.MESH], name)
self.SetItemData(item, resource)
self.SetItemImage(item, self.iconFile, wx.TreeItemIcon_Normal)
return item
def addLight(self, name, resource):
item = self.AppendItem(self.rootList[EResource.LIGHT], name)
self.SetItemData(item, resource)
self.SetItemImage(item, self.iconFile, wx.TreeItemIcon_Normal)
return item
def addObject(self, name, resource):
item = self.AppendItem(self.rootList[EResource.OBJECT], name)
self.SetItemData(item, resource)
self.SetItemImage(item, self.iconFile, wx.TreeItemIcon_Normal)
return item
def addRoom(self, name, resource):
item = self.AppendItem(self.rootList[EResource.ROOM], name)
self.SetItemData(item, resource)
self.SetItemImage(item, self.iconFile, wx.TreeItemIcon_Normal)
if resource.name == self.top.gameCfg["first_room"]:
self.SetItemBold(item, True)
return item
def addConfig(self, name, resource):
item = self.AppendItem(self.rootList[9], name)
self.SetItemData(item, resource)
self.SetItemImage(item, self.iconFile, wx.TreeItemIcon_Normal)
return item
def addExtra(self, name, resource):
item = self.AppendItem(self.rootList[10], name)
self.SetItemData(item, resource)
self.SetItemImage(item, self.iconFile, wx.TreeItemIcon_Normal)
return item
def CMenuRootTextureCreate(self, event):
name = "spr_" + str(len(self.top.textures))
_, item = self.top.addTexture(name)
if item:
self.CMenuRootTextureExpand(None)
self.SelectItem(item)
def CMenuRootTextureExpand(self, event):
self.Expand(self.rootList[EResource.TEXTURE])
def CMenuRootTextureCollapse(self, event):
self.Collapse(self.rootList[EResource.TEXTURE])
def CMenuRootSoundCreate(self, event):
name = "snd_" + str(len(self.top.sounds))
_, item = self.top.addSound(name)
if item:
self.CMenuRootSoundExpand(None)
self.SelectItem(item)
def CMenuRootSoundExpand(self, event):
self.Expand(self.rootList[EResource.SOUND])
def CMenuRootSoundCollapse(self, event):
self.Collapse(self.rootList[EResource.SOUND])
def CMenuRootFontCreate(self, event):
name = "font_" + str(len(self.top.fonts))
_, item = self.top.addFont(name)
if item:
self.CMenuRootFontExpand(None)
self.SelectItem(item)
def CMenuRootFontExpand(self, event):
self.Expand(self.rootList[EResource.FONT])
def CMenuRootFontCollapse(self, event):
self.Collapse(self.rootList[EResource.FONT])
def CMenuRootPathCreate(self, event):
name = "path_" + str(len(self.top.paths))
_, item = self.top.addPath(name)
if item:
self.CMenuRootPathExpand(None)
self.SelectItem(item)
def CMenuRootPathExpand(self, event):
self.Expand(self.rootList[EResource.PATH])
def CMenuRootPathCollapse(self, event):
self.Collapse(self.rootList[EResource.PATH])
def CMenuRootTimelineCreate(self, event):
name = "tl_" + str(len(self.top.timelines))
_, item = self.top.addTimeline(name)
if item:
self.CMenuRootTimelineExpand(None)
self.SelectItem(item)
def CMenuRootTimelineExpand(self, event):
self.Expand(self.rootList[EResource.TIMELINE])
def CMenuRootTimelineCollapse(self, event):
self.Collapse(self.rootList[EResource.TIMELINE])
def CMenuRootMeshCreate(self, event):
name = "mesh_" + str(len(self.top.meshes))
_, item = self.top.addMesh(name)
if item:
self.CMenuRootMeshExpand(None)
self.SelectItem(item)
def CMenuRootMeshExpand(self, event):
self.Expand(self.rootList[EResource.MESH])
def CMenuRootMeshCollapse(self, event):
self.Collapse(self.rootList[EResource.MESH])
def CMenuRootLightCreate(self, event):
name = "lt_" + str(len(self.top.lights))
_, item = self.top.addLight(name)
if item:
self.CMenuRootLightExpand(None)
self.SelectItem(item)
def CMenuRootLightExpand(self, event):
self.Expand(self.rootList[EResource.LIGHT])
def CMenuRootLightCollapse(self, event):
self.Collapse(self.rootList[EResource.LIGHT])
def CMenuRootObjectCreate(self, event):
name = "obj_" + str(len(self.top.objects))
_, item = self.top.addObject(name)
if item:
self.CMenuRootObjectExpand(None)
self.SelectItem(item)
def CMenuRootObjectExpand(self, event):
self.Expand(self.rootList[EResource.OBJECT])
def CMenuRootObjectCollapse(self, event):
self.Collapse(self.rootList[EResource.OBJECT])
def CMenuRootRoomCreate(self, event):
name = "rm_" + str(len(self.top.rooms))
_, item = self.top.addRoom(name)
if item:
self.CMenuRootRoomExpand(None)
self.SelectItem(item)
def CMenuRootRoomExpand(self, event):
self.Expand(self.rootList[EResource.ROOM])
def CMenuRootRoomCollapse(self, event):
self.Collapse(self.rootList[EResource.ROOM])
def CMenuRootConfigCreate(self, event):
name = self.top.dialogRename("")
if name:
_, item = self.top.addConfig(name)
if item:
self.CMenuRootConfigExpand(None)
self.SelectItem(item)
def CMenuRootConfigImport(self, event):
wildcards = (
"Config File (*.cfg)|*.cfg|"
"All files (*)|*"
)
dialog = wx.FileDialog(
self.top, message="Import Config File",
defaultDir=self.top.rootDir,
wildcard=wildcards,
style=wx.FD_OPEN
)
if dialog.ShowModal() == wx.ID_OK:
path = dialog.GetPath()
name = os.path.basename(path)
r, item = self.top.addConfig(name, None)
if item:
shutil.copyfile(path, self.top.rootDir+r.path+r.name)
r.update()
self.CMenuRootConfigExpand(None)
self.SelectItem(item)
dialog.Destroy()
def CMenuRootConfigExpand(self, event):
self.Expand(self.rootList[9])
def CMenuRootConfigCollapse(self, event):
self.Collapse(self.rootList[9])
def CMenuRootExtraCreate(self, event):
name = self.top.dialogRename("")
if name:
_, item = self.top.addExtra(name)
if item:
self.CMenuRootExtraExpand(None)
self.SelectItem(item)
def CMenuRootExtraImport(self, event):
wildcards = (
"All files (*)|*"
)
dialog = wx.FileDialog(
self.top, message="Import Extra File",
defaultDir=self.top.rootDir,
wildcard=wildcards,
style=wx.FD_OPEN
)
if dialog.ShowModal() == wx.ID_OK:
path = dialog.GetPath()
name = os.path.basename(path)
r, item = self.top.addExtra(name, None)
if item:
shutil.copyfile(path, self.top.rootDir+r.path+r.name)
r.update()
self.CMenuRootExtraExpand(None)
self.SelectItem(item)
dialog.Destroy()
def CMenuRootExtraExpand(self, event):
self.Expand(self.rootList[10])
def CMenuRootExtraCollapse(self, event):
self.Collapse(self.rootList[10])
def CMenuResourceOpen(self, event):
item = self.GetFocusedItem()
if not item in self.rootList:
r = self.GetItemData(item)
if r:
r.MenuOpen(event)
def CMenuResourceCreate(self, event):
item = self.GetFocusedItem()
if not item in self.rootList:
r = self.GetItemData(item)
if r:
creators = [
self.CMenuRootTextureCreate, self.CMenuRootSoundCreate, self.CMenuRootFontCreate,
self.CMenuRootPathCreate, self.CMenuRootTimelineCreate, self.CMenuRootMeshCreate,
self.CMenuRootLightCreate, self.CMenuRootObjectCreate, self.CMenuRootRoomCreate,
None, self.CMenuRootConfigCreate, self.CMenuRootExtraCreate
]
creators[r.type](None)
def CMenuResourceRename(self, event):
item = self.GetFocusedItem()
if not item in self.rootList:
r = self.GetItemData(item)
if r:
r.MenuRename(event)
def CMenuResourceDelete(self, event):
item = self.GetFocusedItem()
if not item in self.rootList:
r = self.GetItemData(item)
if r:
r.MenuDelete(event)
def CMenuResourceDuplicate(self, event):
item = self.GetFocusedItem()
if not item in self.rootList:
r = self.GetItemData(item)
if r:
r.MenuDuplicate(event)
def CMenuRoomSetAsFirst(self, event):
item = self.GetFocusedItem()
if not item in self.rootList:
r = self.GetItemData(item)
if r:
first_room = self.top.gameCfg["first_room"]
if first_room:
for rm in self.top.rooms:
if first_room == rm.name:
self.SetItemBold(rm.treeitem, False)
break
self.top.gameCfg["first_room"] = r.name
self.SetItemBold(item, True)
self.top.setUnsaved()
| mit | -717,678,448,196,155,100 | 31.24952 | 160 | 0.72539 | false |
liqd/adhocracy | src/adhocracy/controllers/page.py | 4 | 37135 | import json
import logging
from operator import itemgetter
import formencode
from formencode import htmlfill, Invalid, validators
from pylons import request, tmpl_context as c
from pylons.controllers.util import abort, redirect
from pylons.decorators import validate
from pylons.i18n import _
from adhocracy import config
from adhocracy import forms, model
from adhocracy.lib import democracy, event, helpers as h
from adhocracy.lib import pager, sorting, tiles, watchlist, logo
from adhocracy.lib.auth import guard
from adhocracy.lib.auth import can, require
from adhocracy.lib.auth.csrf import RequireInternalRequest
from adhocracy.lib.base import BaseController
from adhocracy.lib.instance import RequireInstance
from adhocracy.lib.staticpage import add_static_content
from adhocracy.lib.templating import (render, render_json, ret_abort,
render_logo)
from adhocracy.lib.templating import OVERLAY_SMALL
from adhocracy.lib.text.diff import (norm_texts_inline_compare,
page_titles_compare)
from adhocracy.lib.text.render import render_line_based, render as render_text
import adhocracy.lib.text as libtext
from adhocracy.lib.util import get_entity_or_abort
log = logging.getLogger(__name__)
class NoneObject(object):
pass
NoPage = NoneObject()
class PageCreateForm(formencode.Schema):
allow_extra_fields = True
title = forms.ValidTitle(unused_label=True)
text = validators.String(max=20000, min=0, not_empty=False,
if_empty=None, if_missing=None)
parent = forms.ValidPage(if_missing=None, if_empty=None, not_empty=False)
proposal = forms.ValidProposal(not_empty=False, if_empty=None,
if_missing=None)
tags = validators.String(max=20000, not_empty=False)
milestone = forms.MaybeMilestone(if_empty=None, if_missing=None)
category = formencode.foreach.ForEach(forms.ValidCategoryBadge())
formatting = validators.StringBool(not_empty=False, if_empty=False,
if_missing=False)
container = validators.StringBool(not_empty=False, if_empty=False,
if_missing=False)
sectionpage = validators.StringBool(not_empty=False, if_empty=False,
if_missing=False)
allow_comment = validators.StringBool(not_empty=False, if_empty=False,
if_missing=False)
allow_selection = validators.StringBool(not_empty=False, if_empty=False,
if_missing=False)
always_show_original = validators.StringBool(not_empty=False,
if_empty=False,
if_missing=False)
watch = validators.StringBool(not_empty=False, if_empty=False,
if_missing=False)
if config.get_bool('adhocracy.page.allow_abstracts'):
abstract = validators.String(max=255, not_empty=False, if_empty=None,
if_missing=None)
class PageEditForm(formencode.Schema):
allow_extra_fields = True
class PageUpdateForm(formencode.Schema):
allow_extra_fields = True
title = forms.ValidTitle()
variant = forms.VariantName(not_empty=True)
text = validators.String(max=20000, min=0, not_empty=False,
if_empty=None, if_missing=None)
parent_text = forms.ValidText(if_missing=None, if_empty=None,
not_empty=False)
parent_page = forms.ValidPage(if_missing=NoPage, if_empty=None,
not_empty=False)
proposal = forms.ValidProposal(not_empty=False, if_empty=None,
if_missing=None)
milestone = forms.MaybeMilestone(if_empty=None,
if_missing=None)
category = formencode.foreach.ForEach(forms.ValidCategoryBadge())
formatting = validators.StringBool(not_empty=False, if_empty=False,
if_missing=False)
sectionpage = validators.StringBool(not_empty=False, if_empty=False,
if_missing=False)
allow_comment = validators.StringBool(not_empty=False, if_empty=False,
if_missing=False)
allow_selection = validators.StringBool(not_empty=False, if_empty=False,
if_missing=False)
always_show_original = validators.StringBool(not_empty=False,
if_empty=False,
if_missing=False)
watch = validators.StringBool(not_empty=False, if_empty=False,
if_missing=False)
if config.get_bool('adhocracy.page.allow_abstracts'):
abstract = validators.String(max=255, not_empty=False, if_empty=None,
if_missing=None)
class PageFilterForm(formencode.Schema):
allow_extra_fields = True
pages_q = validators.String(max=255, not_empty=False, if_empty=u'',
if_missing=u'')
class PageDiffForm(formencode.Schema):
allow_extra_fields = True
left = forms.ValidText()
right = forms.ValidText()
class PageController(BaseController):
identifier = 'norms'
@RequireInstance
@guard.page.index()
@validate(schema=PageFilterForm(), post_only=False, on_get=True)
def index(self, format="html"):
data = {}
pages = model.Page.all(instance=c.instance,
functions=model.Page.LISTED)
if request.params.get('pages_sort', '4') == '4':
# crude hack to get only top level pages cause the pager
# cannot handle this and we can not pass arguments to the tile
# WARNING: This will break if the index of the sort changes.
c.is_hierarchical = True
pages = [page for page in pages if page.parent is None]
data['pages_pager'] = pager.pages(pages)
if format == 'json':
return render_json(data['pages_pager'])
tags = model.Tag.popular_tags(limit=30)
data['cloud_tags'] = sorted(h.tag.tag_cloud_normalize(tags),
key=lambda (k, c, v): k.name)
data['tutorial_intro'] = _('tutorial_norms_overview_tab')
data['tutorial'] = 'page_index'
add_static_content(data, u'adhocracy.static.page_index_heading',
body_key=u'heading_text',
title_key=u'heading_title')
if c.instance.page_index_as_tiles:
return render("/page/index_tiles.html", data,
overlay=format == u'overlay',
overlay_size=OVERLAY_SMALL)
else:
return render("/page/index.html", data,
overlay=format == u'overlay',
overlay_size=OVERLAY_SMALL)
@RequireInstance
@guard.page.create()
def new(self, errors=None, format=u'html'):
defaults = dict(request.params)
if not defaults:
defaults['watch'] = True
c.title = request.params.get('title', None)
proposal_id = request.params.get("proposal")
c.categories = model.CategoryBadge.all(
c.instance, include_global=not c.instance.hide_global_categories)
c.section = u'section_parent' in request.params
if c.section:
c.parent = get_entity_or_abort(
model.Page, request.params.get(u'section_parent'))
if c.title is None:
c.title = u"%s %i" % (c.parent.label,
len(c.parent.children))
html = None
if proposal_id is not None:
c.proposal = model.Proposal.find(proposal_id)
html = render('/selection/propose.html',
overlay=format == u'overlay')
else:
c.propose = None
html = render("/page/new.html", overlay=format == u'overlay')
return htmlfill.render(html, defaults=defaults, errors=errors,
force_defaults=False)
@RequireInstance
@RequireInternalRequest(methods=['POST'])
@guard.page.create()
def create(self, format='html'):
try:
self.form_result = PageCreateForm().to_python(request.params)
# a proposal that this norm should be integrated with
proposal = self.form_result.get("proposal")
_text = self.form_result.get("text")
if not can.norm.create():
if not proposal:
msg = _("No proposal has been specified")
raise Invalid(msg, branch, state_(),
error_dict={'title': msg})
if not c.instance.allow_propose:
msg = _("You cannot create a new norm")
raise Invalid(msg, branch, state_(),
error_dict={'title': msg})
# if a proposal is specified, create a stub:
_text = None
except Invalid, i:
return self.new(errors=i.unpack_errors())
variant = self.form_result.get("title")
container = self.form_result.get('container')
page = model.Page.create(
c.instance, variant, _text, c.user,
function=(model.Page.CONTAINER if container else model.Page.NORM),
formatting=(self.form_result.get("formatting")
or self.form_result.get("container")),
sectionpage=(False if container
else self.form_result.get("sectionpage")),
allow_comment=self.form_result.get("allow_comment"),
allow_selection=self.form_result.get("allow_selection"),
always_show_original=self.form_result.get("always_show_original"),
tags=self.form_result.get("tags"))
page.milestone = self.form_result.get('milestone')
if self.form_result.get("parent") is not None:
page.parents.append(self.form_result.get("parent"))
if (config.get_bool('adhocracy.page.allow_abstracts')
and c.instance.page_index_as_tiles
and not page.is_section()):
page.abstract = self.form_result.get('abstract')
if c.came_from != u'':
came_from = c.came_from
elif proposal is not None and can.selection.create(proposal):
model.Selection.create(proposal, page, c.user, variant=variant)
# if a selection was created, go there instead:
came_from = h.page.url(page, member='branch',
query={'proposal': proposal.id})
else:
came_from = h.entity_url(page) # by default, redirect to the page
categories = self.form_result.get('category')
category = categories[0] if categories else None
page.set_category(category, c.user)
model.meta.Session.commit()
try:
# fixme: show image errors in the form
if ('logo' in request.POST and
hasattr(request.POST.get('logo'), 'file') and
request.POST.get('logo').file):
logo.store(page, request.POST.get('logo').file)
except Exception, e:
h.flash(_(u"errors while uploading image: %s") % unicode(e),
'error')
log.debug(e)
if can.watch.create():
watchlist.set_watch(page, self.form_result.get('watch'))
event.emit(event.T_PAGE_CREATE, c.user, instance=c.instance,
topics=[page], page=page, rev=page.head)
redirect(came_from)
@RequireInstance
@validate(schema=PageEditForm(), form='edit', post_only=False, on_get=True)
def edit(self, id, variant=None, text=None, branch=False, errors={},
format=u'html'):
c.page, c.text, c.variant = self._get_page_and_text(id, variant, text)
c.variant = request.params.get("variant", c.variant)
c.proposal = request.params.get("proposal")
c.formatting = request.params.get("formatting", False)
c.sectionpage = request.params.get("sectionpage", True)
c.allow_comment = request.params.get("allow_comment", False)
c.allow_selection = request.params.get("allow_selection", False)
c.always_show_original = request.params.get("always_show_original",
False)
c.branch = branch
c.container = c.page.function == c.page.CONTAINER
c.abstract = request.params.get("abstract")
c.section = 'section_parent' in request.params
if c.section:
c.parent = get_entity_or_abort(
model.Page, request.params.get(u'section_parent'))
if branch or c.variant is None:
c.variant = ""
require.norm.edit(c.page, c.variant)
# all available categories
c.categories = model.CategoryBadge.all(c.instance, include_global=True)
# categories for this page
# (single category not assured in db model)
c.category = c.page.category
if logo.exists(c.page):
c.logo = '<img src="%s" />' % h.logo_url(c.page, 48)
defaults = dict(request.params)
if 'watch' not in defaults:
defaults['watch'] = h.find_watch(c.page)
if branch and c.text is None:
c.text = c.page.head.text
if c.came_from != u'':
c.came_from = c.came_from
elif c.section:
c.came_from = h.entity_url(c.parent,
anchor="subpage-%i" % c.page.id)
else:
c.came_from = h.entity_url(c.text)
c.text_rows = libtext.text_rows(c.text)
c.left = c.page.head
html = render('/page/edit.html', overlay=format == u'overlay',
overlay_size=OVERLAY_SMALL)
return htmlfill.render(html, defaults=defaults,
errors=errors, force_defaults=False)
@RequireInstance
@RequireInternalRequest(methods=['POST'])
def update(self, id, variant=None, text=None, format='html'):
c.page, c.text, c.variant = self._get_page_and_text(id, variant, text)
branch = False
try:
class state_(object):
page = c.page
# branch is validated on its own, since it needs to be
# carried to the
# error page.
branch_val = validators.StringBool(not_empty=False,
if_empty=False,
if_missing=False)
branch = branch_val.to_python(request.params.get('branch'))
self.form_result = PageUpdateForm().to_python(request.params,
state=state_())
# delete the logo if the button was pressed and exit
if 'delete_logo' in self.form_result:
updated = logo.delete(c.page)
h.flash(_(u'The logo has been deleted.'), 'success')
redirect(h.entity_url(c.page))
try:
# fixme: show image errors in the form
if ('logo' in request.POST and
hasattr(request.POST.get('logo'), 'file') and
request.POST.get('logo').file):
logo.store(c.page, request.POST.get('logo').file)
except Exception, e:
model.meta.Session.rollback()
h.flash(unicode(e), 'error')
log.debug(e)
return self.edit(id, variant=c.variant, text=c.text.id,
branch=branch, format=format)
parent_text = self.form_result.get("parent_text")
if ((branch or
parent_text.variant != self.form_result.get("variant")) and
self.form_result.get("variant") in c.page.variants):
msg = (_("Variant %s is already present, cannot branch.") %
self.form_result.get("variant"))
raise Invalid(msg, branch, state_(),
error_dict={'variant': msg})
except Invalid, i:
return self.edit(id, variant=c.variant, text=c.text.id,
branch=branch, errors=i.unpack_errors(),
format=format)
c.variant = self.form_result.get("variant")
require.norm.edit(c.page, c.variant)
if parent_text.page != c.page:
return ret_abort(_("You're trying to update to a text which is "
"not part of this pages history"),
code=400, format=format)
if can.variant.edit(c.page, model.Text.HEAD):
parent_page = self.form_result.get("parent_page", NoPage)
if parent_page != NoPage and parent_page != c.page:
c.page.parent = parent_page
if can.page.manage(c.page):
c.page.milestone = self.form_result.get('milestone')
categories = self.form_result.get('category')
category = categories[0] if categories else None
c.page.set_category(category, c.user)
c.page.formatting = self.form_result.get('formatting')
c.page.sectionpage = self.form_result.get('sectionpage')
c.page.allow_comment = self.form_result.get('allow_comment')
c.page.allow_selection = self.form_result.get('allow_selection')
c.page.always_show_original = self.form_result.get(
'always_show_original')
if not branch and c.variant != parent_text.variant \
and parent_text.variant != model.Text.HEAD:
c.page.rename_variant(parent_text.variant, c.variant)
text = model.Text.create(c.page, c.variant, c.user,
self.form_result.get("title"),
self.form_result.get("text"),
parent=parent_text)
target = text
proposal = self.form_result.get("proposal")
if proposal is not None and can.selection.create(proposal):
target = model.Selection.create(proposal, c.page, c.user,
variant=c.variant)
poll = target.variant_poll(c.variant)
if poll and can.poll.vote(poll):
decision = democracy.Decision(c.user, poll)
decision.make(model.Vote.YES)
model.Tally.create_from_poll(poll)
if (config.get_bool('adhocracy.page.allow_abstracts')
and c.instance.page_index_as_tiles
and not c.page.is_section()):
c.page.abstract = self.form_result.get('abstract')
model.meta.Session.commit()
if can.watch.create():
watchlist.set_watch(c.page, self.form_result.get('watch'))
event.emit(event.T_PAGE_EDIT, c.user, instance=c.instance,
topics=[c.page], page=c.page, rev=text)
if c.came_from != u'':
redirect(c.came_from)
else:
redirect(h.entity_url(text))
@classmethod
def _diff_details(cls, left, right, formatting):
left_text = left.text.strip() if left.text else ''
right_text = right.text.strip() if right.text else ''
has_changes = ((left_text != right_text))
title = right.title
if formatting:
text = render_text(right.text)
else:
text = render_line_based(right)
text_diff = norm_texts_inline_compare(left, right)
title_diff = page_titles_compare(left, right)
return dict(title=title, text=text, title_diff=title_diff,
text_diff=text_diff, has_changes=has_changes,
is_head=(right.variant == model.Text.HEAD))
@classmethod
def _selection_urls(cls, selection):
urls = {}
for (variant, poll) in selection.variant_polls:
urls[variant] = {
'votes': h.entity_url(poll, member="votes"),
'poll_widget': h.entity_url(poll, member="widget.big")}
return {'urls': urls}
@classmethod
def _selections_details(cls, page, variant, current_selection=None):
try:
selections = model.Selection.by_variant(page, variant)
except IndexError:
selections = []
return [cls._selection_details(selection, variant,
current_selection=current_selection)
for selection in selections]
@classmethod
def _selection_details(cls, selection, variant, current_selection=None):
try:
score = selection.variant_poll(variant).tally.score
except:
score = 0
rendered_score = "%+d" % score
current = False
if current_selection is not None:
current = selection.id == current_selection.id
return {'score': score,
'rendered_score': rendered_score,
'selection_id': selection.id,
'proposal_title': selection.proposal.title,
'proposal_text': render_text(
selection.proposal.description.head.text),
'proposal_url': h.selection.url(selection),
'proposal_creator_name': selection.proposal.creator.name,
'proposal_creator_url': h.entity_url(
selection.proposal.creator),
'proposal_create_time': h.datetime_tag(
selection.proposal.create_time),
'proposal_edit_url': h.entity_url(
selection.proposal, member='edit'),
'proposal_can_edit': can.proposal.edit(selection.proposal),
'proposal_delete_url': h.entity_url(selection.proposal,
member='ask_delete'),
'proposal_can_delete': can.proposal.delete(selection.proposal),
'current': current,
}
@classmethod
def _variant_details(cls, page, variant):
'''
Return details for a variant including diff information
and details about the proposals that selected this variant.
'''
head_text = page.head
variant_text = page.variant_head(variant)
details = cls._diff_details(head_text, variant_text, page.formatting)
# Replace items coming from diff_details for the UI
messages = (('text', _('<i>(No text)</i>')),
('title', _('<i>(No title)</i>')),
('text_diff', _('<i>(No differences)</i>')),
('title_diff', _('<i>(No differences)</i>')))
for (key, message) in messages:
if details[key].strip() == '':
details[key] = message
selections = cls._selections_details(page, variant)
if variant == model.Text.HEAD:
is_head = True
votewidget_url = ''
else:
is_head = False
try:
selection = model.Selection.by_variant(page, variant)[0]
votewidget_url = h.entity_url(
selection.proposal.rate_poll,
member="widget.big")
except IndexError:
votewidget_url = ''
details.update(
{'variant': variant,
'display_title': cls._variant_display_title(variant),
'history_url': h.entity_url(variant_text, member='history'),
'history_count': len(variant_text.history),
'selections': selections,
'num_selections': len(selections),
'is_head': is_head,
'can_edit': can.variant.edit(page, variant),
'edit_url': h.entity_url(variant_text, member='edit'),
'votewidget_url': votewidget_url})
return details
@classmethod
def _variant_display_title(cls, variant):
if variant == model.Text.HEAD:
return _('Original version')
return _(u'Variant: "%s"') % variant
@classmethod
def _variant_item(cls, page, variant):
'''
Return a `dict` with information about the variant.
'''
is_head = (variant == model.Text.HEAD)
title = _('Original Version') if is_head else variant
return {'href': h.page.page_variant_url(page, variant=variant),
'title': title,
'display_title': title, # bbb
'is_head': is_head,
'variant': variant}
@classmethod
def _variant_items(self, page, selection=None):
'''
Return a `list` of `dicts` with information about the variants.
'''
items = []
for variant in page.variants:
if selection and (variant not in selection.variants):
continue
item = self._variant_item(page, variant)
items.append(item)
return items
@classmethod
def _insert_variant_score_and_sort(self, items, score_func):
'''
Insert the score into the items and sort the variant items based
on their *score* with mode.Text.HEAD as the first item.
score_func is a method that receives the item as the only
argument.
'''
head_item = None
other_items = []
for item in items:
if item['variant'] == model.Text.HEAD:
item['score'] = None
item['rendered_score'] = ''
head_item = item
else:
score = score_func(item)
item['score'] = score
item['rendered_score'] = '%+d' % score
other_items.append(item)
items = sorted(other_items, key=itemgetter('score'), reverse=True)
items.insert(0, head_item)
return items
@RequireInstance
def show(self, id, variant=None, text=None, format='html',
amendment=False):
if amendment:
# variant may actually be a proposal id
proposal = model.Proposal.find(variant)
if proposal is not None and proposal.is_amendment:
variant = proposal.selection.selected
c.page, c.text, c.variant = self._get_page_and_text(id, variant, text)
require.page.show(c.page)
c.overlay = format == 'overlay'
c.amendment = amendment
if c.amendment and not c.page.allow_selection:
return ret_abort(
_("Page %s does not allow selections") % c.page.title,
code=400, format=format)
# Error handling and json api
if c.text.variant != c.variant:
abort(404, _("Variant %s does not exist!") % c.variant)
if format == 'json':
return render_json(c.page.to_dict(text=c.text))
c.category = c.page.category
# variant details and returning them as json when requested.
c.variant_details = self._variant_details(c.page, c.variant)
if 'variant_json' in request.params:
return render_json(c.variant_details)
c.variant_details_json = json.dumps(c.variant_details, indent=4)
# Make a list of variants to render the vertical tab navigation
variant_items = self._variant_items(c.page)
def get_score(item):
selections = model.Selection.by_variant(c.page,
item['variant'])
if len(selections):
return selections[0].proposal.rate_poll.tally.score
else:
return 0
variant_items = self._insert_variant_score_and_sort(variant_items,
get_score)
# filter out all but the highest rated variant from a proposal
c.variant_items = []
selections = []
for item in variant_items:
variant = item['variant']
if variant == model.Text.HEAD:
c.variant_items.append(item)
continue
selections_ = model.Selection.by_variant(c.page, variant)
if not selections_:
log.warning('continue - no selection: %s' % variant)
continue
selection = selections_[0]
if selection not in selections:
selections.append(selection)
c.variant_items.append(item)
# Metadata and subpages pager
sorts = {_("oldest"): sorting.entity_oldest,
_("newest"): sorting.entity_newest,
_("alphabetically"): sorting.delegateable_title}
c.subpages_pager = pager.NamedPager(
'subpages', c.page.subpages,
(tiles.page.row
if c.page.function == model.Page.CONTAINER
else tiles.page.smallrow),
sorts=sorts, default_sort=sorting.delegateable_title)
self._common_metadata(c.page, c.text)
c.tutorial_intro = _('tutorial_norm_show_tab')
c.tutorial = 'page_show'
if c.page.function == c.page.CONTAINER:
return render("/page/show_container.html")
elif not c.amendment and c.page.is_sectionpage():
return render("/page/show_sectionpage.html",
overlay=(format == 'overlay'))
else:
return render("/page/show.html",
overlay=(format == 'overlay'))
@RequireInstance
def history(self, id, variant=model.Text.HEAD, text=None, format='html'):
c.page, c.text, c.variant = self._get_page_and_text(id, variant, text)
require.page.show(c.page)
if c.text is None:
h.flash(_("No such text revision."), 'notice')
redirect(h.entity_url(c.page))
c.texts_pager = pager.NamedPager(
'texts', c.text.history, tiles.text.history_row, count=10,
sorts={},
default_sort=sorting.entity_newest)
if format == 'json':
return render_json(c.texts_pager)
c.tile = tiles.page.PageTile(c.page)
self._common_metadata(c.page, c.text)
if format == 'ajax':
return c.texts_pager.here()
elif format == 'overlay':
return render('/page/history.html', overlay=True)
else:
return render('/page/history.html')
@RequireInstance
def comments(self, id, variant=model.Text.HEAD, text=None, format=None):
c.page, c.text, c.variant = self._get_page_and_text(id, variant, text)
require.page.show(c.page)
if not c.page.allow_comment:
return ret_abort(
_("Page %s does not allow comments") % c.page.title,
code=400, format=format)
if c.text is None:
h.flash(_("No such text revision."), 'notice')
redirect(h.entity_url(c.page))
self._common_metadata(c.page, c.text)
c.came_from = ''
if format == 'ajax':
return tiles.comment.list(c.page)
elif format == 'overlay':
c.came_from = h.entity_url(c.page, member='comments') + '.overlay'
return render('/page/comments.html', overlay=True,
overlay_size=OVERLAY_SMALL)
else:
return render('/page/comments.html')
@RequireInstance
@validate(schema=PageDiffForm(), form='bad_request', post_only=False,
on_get=True)
def diff(self):
left = self.form_result.get('left')
right = self.form_result.get('right')
require.page.show(left.page)
require.page.show(right.page)
options = [right.page.variant_head(v) for v in right.page.variants]
return self._differ(left, right, options=options)
def _differ(self, left, right, options=None):
if left == right:
h.flash(_("Cannot compare identical text revisions."), 'notice')
redirect(h.entity_url(right))
c.left, c.right = (left, right)
c.left_options = options
if c.left.page != c.right.page:
h.flash(_("Cannot compare versions of different texts."), 'notice')
redirect(h.entity_url(c.right))
c.tile = tiles.page.PageTile(c.right.page)
self._common_metadata(c.right.page, c.right)
return render("/page/diff.html")
@RequireInstance
def ask_purge(self, id, variant):
c.page, c.text, c.variant = self._get_page_and_text(id, variant, None)
require.variant.delete(c.page, c.variant)
c.tile = tiles.page.PageTile(c.page)
return render("/page/ask_purge.html")
@RequireInstance
@RequireInternalRequest()
def purge(self, id, variant):
c.page, c.text, c.variant = self._get_page_and_text(id, variant, None)
require.variant.delete(c.page, c.variant)
c.page.purge_variant(c.variant)
model.meta.Session.commit()
# event.emit(event.T_PAGE_DELETE, c.user, instance=c.instance,
# topics=[c.page], page=c.page)
h.flash(_("The variant %s has been deleted.") % c.variant,
'success')
redirect(h.entity_url(c.page))
@RequireInstance
def ask_purge_history(self, id, text):
c.page, c.text, c.variant = self._get_page_and_text(id, None, text)
require.page.delete_history(c.page)
if c.text.valid_child() is None and c.text.valid_parent() is None:
h.flash(_("Cannot delete, if there's only one version"), 'error')
return redirect(h.entity_url(c.text))
return render("/page/ask_purge_history.html")
@RequireInstance
@RequireInternalRequest()
def purge_history(self, id, text):
c.page, c.text, c.variant = self._get_page_and_text(id, None, text)
require.page.delete_history(c.page)
if c.text.valid_child() is None and c.text.valid_parent() is None:
h.flash(_("Cannot delete, if there's only one version"), 'error')
return redirect(h.entity_url(c.text))
c.text.delete()
model.meta.Session.commit()
h.flash(_("The selected version has been deleted."), 'success')
redirect(h.entity_url(c.page))
@RequireInstance
def ask_delete(self, id, format="html"):
c.page = get_entity_or_abort(model.Page, id)
require.page.delete(c.page)
c.tile = tiles.page.PageTile(c.page)
c.section = u'section_parent' in request.params
if c.section:
c.parent = get_entity_or_abort(
model.Page, request.params.get(u'section_parent'))
c.came_from = h.entity_url(c.parent)
else:
c.came_from = h.entity_url(c.page.instance)
return render("/page/ask_delete.html", overlay=(format == u'overlay'))
@RequireInstance
@RequireInternalRequest()
def delete(self, id):
c.page = get_entity_or_abort(model.Page, id)
require.page.delete(c.page)
c.page.delete()
model.meta.Session.commit()
event.emit(event.T_PAGE_DELETE, c.user, instance=c.instance,
topics=[c.page], page=c.page)
h.flash(_("The page %s has been deleted.") % c.page.title,
'success')
redirect(c.came_from)
def _get_page_and_text(self, id, variant, text):
page = get_entity_or_abort(model.Page, id)
_text = page.head
if text is not None:
_text = get_entity_or_abort(model.Text, text)
if _text.page != page or (variant and _text.variant != variant):
abort(404, _("Invalid text ID %s for this page/variant!") %
text)
variant = _text.variant
elif variant is not None:
_text = page.variant_head(variant)
if _text is None:
_text = page.head
else:
variant = _text.variant
return (page, _text, variant)
def _common_metadata(self, page, text):
if text and text.text and len(text.text):
h.add_meta("description",
libtext.meta_escape(text.text, markdown=False)[0:160])
tags = page.tags
if len(tags):
h.add_meta("keywords", ", ".join([k.name for (k, v) in tags]))
h.add_meta("dc.title",
libtext.meta_escape(page.title, markdown=False))
h.add_meta("dc.date",
page.create_time.strftime("%Y-%m-%d"))
h.add_meta("dc.author",
libtext.meta_escape(text.user.name, markdown=False))
@RequireInstance
def logo(self, id, y, x=None):
page = get_entity_or_abort(model.Page, id)
return render_logo(page, y, x=x)
| agpl-3.0 | -1,086,032,969,583,987,700 | 40.86584 | 79 | 0.559715 | false |
debugger06/MiroX | osx/build/bdist.macosx-10.5-fat/lib.macosx-10.5-fat-2.7/miro/displaytext.py | 2 | 7953 | # Miro - an RSS based video player application
# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
# Participatory Culture Foundation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
#
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
"""``miro.displaytext`` -- Format strings to send to the user.
"""
import datetime
from miro import gtcache
from miro.gtcache import gettext as _
from miro.gtcache import ngettext
LOCALE_HAS_UNIT_CONVERSIONS = True
def strftime_to_unicode(nbytes):
"""Convert the value return by strftime() to a unicode string.
By default, it's in whatever the default codeset is. If we can't find a
codeset then assume utf-8 to give us a base to always return unicode.
"""
global LOCALE_HAS_UNIT_CONVERSIONS
if gtcache.codeset is None or not LOCALE_HAS_UNIT_CONVERSIONS:
return nbytes.decode('utf-8', 'replace')
else:
# bug #14713: some locales don't have unit conversions
# defined, so then decode throws an error
try:
return nbytes.decode(gtcache.codeset)
except LookupError:
LOCALE_HAS_UNIT_CONVERSIONS = False
return nbytes.decode('utf-8', 'replace')
def download_rate(rate):
if rate >= (1 << 30):
value = "%1.1f" % (rate / float(1 << 30))
return _("%(size)s GB/s", {"size": value})
elif rate >= (1 << 20):
value = "%1.1f" % (rate / float(1 << 20))
return _("%(size)s MB/s", {"size": value})
elif rate >= (1 << 10):
value = "%1.1f" % (rate / float(1 << 10))
return _("%(size)s kB/s", {"size": value})
elif rate > 0:
value = "%1.1f" % rate
return _("%(size)s B/s", {"size": value})
else:
return ""
def short_time_string(secs):
"""Takes an integer number of seconds and returns a string
of the form MM:SS.
"""
try:
return "%d:%02d" % divmod(int(round(secs)), 60)
except TypeError:
return ""
def time_string(secs):
if secs >= (60 * 60 * 24):
return days_string(secs)
if secs >= (60 * 60):
return hrs_string(secs)
if secs >= 60:
return mins_string(secs)
return secs_string(secs)
def days_string(secs):
t_dy = int(round(secs / (60.0 * 60.0 * 24.0)))
return ngettext('%(num)d day', '%(num)d days', t_dy,
{"num": t_dy})
def hrs_string(secs):
t_hr = int(round(secs / (60.0 * 60.0)))
return ngettext('%(num)d hr', '%(num)d hrs', t_hr,
{"num": t_hr})
def mins_string(secs):
t_min = int(round(secs / 60.0))
return ngettext('%(num)d min', '%(num)d mins', t_min,
{"num": t_min})
def secs_string(secs):
return ngettext('%(num)d sec', '%(num)d secs', secs, {"num": secs})
def time_string_0_blank(secs):
if secs <= 0:
return ''
else:
return time_string(secs)
def size_string(nbytes):
# when switching from the enclosure reported size to the
# downloader reported size, it takes a while to get the new size
# and the downloader returns -1. the user sees the size go to -1B
# which is weird.... better to return an empty string.
if nbytes == -1 or nbytes == 0:
return ""
# FIXME this is a repeat of util.format_size_for_user ... should
# probably ditch one of them.
if nbytes >= (1 << 30):
value = "%.1f" % (nbytes / float(1 << 30))
return _("%(size)s GB", {"size": value})
elif nbytes >= (1 << 20):
value = "%.1f" % (nbytes / float(1 << 20))
return _("%(size)s MB", {"size": value})
elif nbytes >= (1 << 10):
value = "%.1f" % (nbytes / float(1 << 10))
return _("%(size)s KB", {"size": value})
else:
return _("%(size)s B", {"size": nbytes})
def expiration_date(exp_date):
offset = exp_date - datetime.datetime.now()
if offset.days > 0:
return ngettext("Expires in %(count)d day",
"Expires in %(count)d days",
offset.days,
{"count": offset.days})
elif offset.seconds > 3600:
hours = int(round(offset.seconds / 3600.0))
return ngettext("Expires in %(count)d hour",
"Expires in %(count)d hours",
hours,
{"count": hours})
else:
minutes = int(round(offset.seconds / 60.0))
return ngettext("Expires in %(count)d minute",
"Expires in %(count)d minutes",
minutes,
{"count": minutes})
def expiration_date_short(exp_date):
offset = exp_date - datetime.datetime.now()
if offset.days > 0:
return ngettext("Expires: %(count)d day",
"Expires: %(count)d days",
offset.days,
{"count": offset.days})
elif offset.seconds > 3600:
hours = int(round(offset.seconds / 3600.0))
return ngettext("Expires: %(count)d hour",
"Expires: %(count)d hours",
hours,
{"count": hours})
else:
minutes = int(round(offset.seconds / 60.0))
return ngettext("Expires: %(count)d minute",
"Expires: %(count)d minutes",
minutes,
{"count": minutes})
def date(rdate):
"""Takes a date object and returns the "month day, year"
representation.
If the rdate is below the minimum date, then this returns an
empty string.
"""
if rdate is None:
return ''
if rdate > datetime.datetime.min:
# figure out the date pieces, convert to unicode, then split
# it on "::" so we can run gettext on it allowing translators
# to reorder it. see bug 11662.
m, d, y = strftime_to_unicode(rdate.strftime("%B::%d::%Y")).split("::")
return _("%(month)s %(dayofmonth)s, %(year)s",
{"month": m, "dayofmonth": d, "year": y})
else:
return ''
def date_slashes(rdate):
"""Takes a date object and returns the "MM/DD/YYYY"
representation.
If the rdate is below the minimum date, then this returns an
empty string.
"""
if rdate is None:
return ''
if rdate > datetime.datetime.min:
# note: %x is locale-appropriate
return strftime_to_unicode(rdate.strftime("%x"))
else:
return ''
def duration(secs):
if secs >= 60:
return mins_string(secs)
elif secs > 0:
return secs_string(secs)
else:
return ''
def integer(num):
if num < 0:
return ""
try:
num = int(num)
except (ValueError, TypeError):
return ""
return str(num)
| gpl-2.0 | -1,340,470,692,234,610,200 | 33.881579 | 79 | 0.579404 | false |
khandavally/devstack | EPAQA/pci_manager_patch.py | 1 | 2622 | from nova.pci.pci_manager import PciDevTracker
class PciDevTrackerPatch(PciDevTracker):
""" Extension of nova.pci.pci_manager.PciDevTracker class to help pci_device allocation
based on vf allocation algorithm"""
def __init__(self, node_id=None):
super(PciDevTrackerPatch, self).__init__(node_id)
self.selected_vfs = None
def set_selected_vfs(self, limits):
"""Function sets member variable selected_vfs from the passed argument limits"""
self.selected_vfs = limits.get('selected_vfs', {})
def get_free_devices_for_requests(self, pci_requests):
"""This is an over loaded function which helps in short listing
pci_devices as per the requirement for an instance.
"""
alloc = []
for request in pci_requests:
if self.selected_vfs:
available = self._get_selected_devices_for_request(
request,
[p for p in self.free_devs if p not in alloc])
else:
available = self._get_free_devices_for_request(
request,
[p for p in self.free_devs if p not in alloc])
if not available:
return []
alloc.extend(available)
return alloc
def _get_selected_devices_for_request(self, pci_request, pci_devs):
"""Function selects required pci_devices from dev pool as the addrss values are given."""
count = pci_request.get('count', 1)
spec = pci_request.get('spec', [])
devs = self._filter_devices_for_spec(spec, pci_devs)
selected_dev_addresses = [vf_tuple[0] for vf_tuple in self.selected_vfs[pci_request.get('alias_name', 'default')]]
devs = [self.add_workload_type(dev, pci_request) for dev in devs if dev.address in selected_dev_addresses]
if len(devs) < count:
return None
else:
return devs[:count]
def add_workload_type(self, dev, pci_request):
for vf_tuple in self.selected_vfs[pci_request.get('alias_name', 'default')]:
if vf_tuple[0] == dev.address:
dev.workload = vf_tuple[1]
return dev
def notify_decorator(name, fn):
"""Decorator for notify which is used from utils.monkey_patch()."""
return fn
@classmethod
def pci_dev_tracker_new(cls, *args, **kwargs):
"""Function which helps overridden of __new__ method in nova.pci.pci_manager.PciDevTracker"""
pci_dev_tracker = object.__new__(PciDevTrackerPatch)
return pci_dev_tracker
def PciManagerPatchMain():
PciDevTracker.__new__ = pci_dev_tracker_new
PciManagerPatchMain()
| apache-2.0 | -3,515,214,527,976,748,000 | 37.558824 | 122 | 0.628909 | false |
Yelp/love | views/tasks.py | 1 | 2557 | # -*- coding: utf-8 -*-
from flask import request
from flask import Response
from google.appengine.api import taskqueue
from google.appengine.ext import ndb
import logic.employee
import logic.notifier
import logic.love
import logic.love_count
import logic.love_link
from main import app
from models import Love
# All tasks that are to be executed by cron need to use HTTP GET
# see https://cloud.google.com/appengine/docs/python/config/cron
@app.route('/tasks/employees/load/s3', methods=['GET'])
def load_employees_from_s3():
logic.employee.load_employees()
# we need to rebuild the love count index as the departments may have changed.
taskqueue.add(url='/tasks/love_count/rebuild')
return Response(status=200)
# This task has a web UI to trigger it, so let's use POST
@app.route('/tasks/employees/load/csv', methods=['POST'])
def load_employees_from_csv():
logic.employee.load_employees_from_csv()
# we need to rebuild the love count index as the departments may have changed.
taskqueue.add(url='/tasks/love_count/rebuild')
return Response(status=200)
# One-off tasks are much easier to trigger using GET
@app.route('/tasks/employees/combine', methods=['GET'])
def combine_employees():
old_username, new_username = request.args['old'], request.args['new']
if not old_username:
return Response(response='{} is not a valid username'.format(old_username), status=400)
elif not new_username:
return Response(response='{} is not a valid username'.format(new_username), status=400)
logic.employee.combine_employees(old_username, new_username)
return Response(status=200)
@app.route('/tasks/index/rebuild', methods=['GET'])
def rebuild_index():
logic.employee.rebuild_index()
return Response(status=200)
@app.route('/tasks/love/email', methods=['POST'])
def email_love():
love_id = int(request.form['id'])
love = ndb.Key(Love, love_id).get()
logic.love.send_love_email(love)
return Response(status=200)
@app.route('/tasks/love_count/rebuild', methods=['GET'])
def rebuild_love_count():
logic.love_count.rebuild_love_count()
return Response(status=200)
@app.route('/tasks/subscribers/notify', methods=['POST'])
def notify_subscribers():
notifier = logic.notifier.notifier_for_event(request.json['event'])(**request.json['options'])
notifier.notify()
return Response(status=200)
@app.route('/tasks/lovelinks/cleanup', methods=['GET'])
def lovelinks_cleanup():
logic.love_link.love_links_cleanup()
return Response(status=200)
| mit | 6,597,596,622,504,343,000 | 31.782051 | 98 | 0.717638 | false |
quarkslab/irma | frontend/api/scans/services.py | 1 | 15404 | #
# Copyright (c) 2013-2018 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
import logging
from fasteners import interprocess_locked
from sqlalchemy import inspect
from api.common.sessions import session_transaction, session_query
import api.common.ftp as ftp_ctrl
import api.tasks.braintasks as celery_brain
from api.files.models import File
from api.files_ext.models import FileExt, FileProbeResult
from api.probe_results.models import ProbeResult
from api.scans.models import Scan
from config.parser import get_lock_path, get_max_resubmit_level
from irma.common.base.exceptions import IrmaValueError, IrmaTaskError
from irma.common.base.utils import IrmaReturnCode, IrmaScanStatus
from irma.common.base.utils import IrmaScanRequest
log = logging.getLogger(__name__)
interprocess_lock_path = get_lock_path()
CSV_SEPARATOR = ";"
# ===================
# Internals helpers
# ===================
def _add_empty_result(file_ext, probelist, scan, session):
log.debug("scan %s: file %s add empty results",
scan.external_id, file_ext.external_id)
updated_probelist = []
for probe_name in probelist:
# Fetch the ref results for the file
ref_result = file_ext.file.get_ref_result(probe_name)
if ref_result is not None and not scan.force:
# we ask for results already present
# and we found one use it
file_ext.probe_results.append(ref_result)
log.debug("scan %s: link refresult for %s probe %s",
scan.external_id,
file_ext.external_id,
probe_name)
else:
# results is not known or analysis is forced
# create empty result
# TODO probe types
log.debug("scan %s: creating empty result for %s probe %s",
scan.external_id,
file_ext.external_id,
probe_name)
probe_result = ProbeResult(
None,
probe_name,
None,
None,
files_ext=file_ext
)
# A job scan should be sent
# let the probe in scan_request
updated_probelist.append(probe_name)
session.add(probe_result)
session.commit()
return updated_probelist
def _add_empty_results(file_ext_list, scan_request, scan, session):
log.debug("scan %s: scan_request: %s", scan.external_id,
scan_request.to_dict())
new_scan_request = IrmaScanRequest()
for file_ext in file_ext_list:
probelist = scan_request.get_probelist(file_ext.external_id)
updated_probe_list = _add_empty_result(file_ext, probelist,
scan, session)
# Update scan_request according to results already known linked
# in _add_empty_result
if len(updated_probe_list) > 0:
mimetype = scan_request.get_mimetype(file_ext.external_id)
log.debug("scan %s: update scan_request for file %s"
"previously asked %s now %s",
scan.external_id, file_ext.external_id,
scan_request.get_probelist(file_ext.external_id),
updated_probe_list)
new_scan_request.add_file(file_ext.external_id,
updated_probe_list,
mimetype)
log.debug("scan %s: new scan_request %s",
scan.external_id, new_scan_request.to_dict())
return new_scan_request
def _create_scan_request(file_ext_list, probelist, mimetype_filtering):
# Create scan request
# dict of filename : probe_list
# force parameter taken into account
log.debug("probelist: %s mimetype_filtering: %s",
probelist, mimetype_filtering)
scan_request = IrmaScanRequest()
for file_ext in file_ext_list:
scan_request.add_file(file_ext.external_id,
probelist,
file_ext.file.mimetype)
if mimetype_filtering is True:
srdict = scan_request.to_dict()
filtered_srdict = celery_brain.mimetype_filter_scan_request(srdict)
scan_request = IrmaScanRequest(filtered_srdict)
return scan_request
def _sanitize_res(d):
if isinstance(d, str):
# Fix for JSONB
return d.replace("\u0000", "").replace("\x00", "")
elif isinstance(d, list):
return [_sanitize_res(x) for x in d]
elif isinstance(d, dict):
new = {}
for k, v in d.items():
newk = k.replace('.', '_').replace('$', '')
new[newk] = _sanitize_res(v)
return new
else:
return d
def _get_or_create_new_files(uploaded_files, session):
new_files = {}
for file_tmp_id in uploaded_files:
file_obj = ftp_ctrl.download_file_data(file_tmp_id)
file = File.get_or_create(file_obj, session)
file_obj.close()
new_files[file_tmp_id] = file
return new_files
def _append_new_files_to_scan(scan, uploaded_files, probe_result, depth):
new_files_ext = []
session = inspect(scan).session
# Do it in two times for allowing retries
# First create all new files then rename temp name on brain
# to file_id name
new_files = _get_or_create_new_files(uploaded_files.values(), session)
for (file_realname, file_tmp_id) in uploaded_files.items():
file = new_files[file_tmp_id]
file_ext = FileProbeResult(file, file_realname, probe_result, depth)
file_ext.scan = scan
session.add(file_ext)
session.commit()
log.debug("scan %s: new file_ext id %s for file %s",
scan.external_id, file_ext.external_id, file_ext.name)
ftp_ctrl.rename_file(file_tmp_id, file_ext.external_id)
new_files_ext.append(file_ext)
return new_files_ext
# ================
# Public methods
# ================
def cancel(scan, session):
""" cancel all remaining jobs for specified scan
:param scan_id: id returned by scan_new
:rtype: dict of 'cancel_details': total':int, 'finished':int,
'cancelled':int
:return:
informations about number of cancelled jobs by irma-brain
:raise: IrmaDatabaseError, IrmaTaskError
"""
log.debug("scan %s: cancel", scan.external_id)
if scan.status < IrmaScanStatus.uploaded:
# If not launched answer directly
scan.set_status(IrmaScanStatus.cancelled)
session.commit()
return None
if scan.status != IrmaScanStatus.launched:
# If too late answer directly
status_str = IrmaScanStatus.label[scan.status]
if IrmaScanStatus.is_error(scan.status):
# let the cancel finish and keep the error status
return None
else:
reason = "can not cancel scan in {0} status".format(status_str)
log.error("scan %s: %s", scan.external_id, reason)
raise IrmaValueError(reason)
# Else ask brain for job cancel
(retcode, res) = celery_brain.scan_cancel(scan.external_id)
if retcode == IrmaReturnCode.success:
s_processed = IrmaScanStatus.label[IrmaScanStatus.processed]
if 'cancel_details' in res:
scan.set_status(IrmaScanStatus.cancelled)
session.commit()
return res['cancel_details']
elif res['status'] == s_processed:
# if scan is finished for the brain
# it means we are just waiting for results
scan.set_status(IrmaScanStatus.processed)
session.commit()
reason = "can not cancel scan in {0} status".format(res['status'])
log.error("scan %s: %s", scan.external_id, reason)
raise IrmaValueError(reason)
else:
raise IrmaTaskError(res)
def set_result(file_ext_id, probe, result):
with session_transaction() as session:
file_ext = FileExt.load_from_ext_id(file_ext_id, session=session)
sanitized_res = _sanitize_res(result)
file_ext.set_result(probe, sanitized_res)
scan_id = file_ext.scan.external_id
log.info("scan %s: file %s result from %s",
scan_id, file_ext_id, probe)
is_finished(scan_id)
def set_status(scan_id, status):
log.debug("scan %s: set status %s", scan_id, status)
with session_transaction() as session:
scan = Scan.load_from_ext_id(scan_id, session=session)
scan.set_status(status)
# insure there is only one call running at a time
# among the different workers
@interprocess_locked(interprocess_lock_path)
def is_finished(scan_id):
with session_transaction() as session:
scan = Scan.load_from_ext_id(scan_id, session)
log.debug("scan %s: is_finished %d/%d", scan_id,
scan.probes_finished, scan.probes_total)
if scan.finished() and scan.status != IrmaScanStatus.finished:
# call finished hook for each files
for file_ext in scan.files_ext:
file_ext.hook_finished()
scan.set_status(IrmaScanStatus.finished)
session.commit()
# launch flush celery task on brain
log.debug("scan %s: calling scan_flush", scan.external_id)
celery_brain.scan_flush(scan.external_id)
def handle_output_files(file_ext_id, result, error_case=False):
log.info("Handling output for file %s", file_ext_id)
with session_transaction() as session:
file_ext = FileExt.load_from_ext_id(file_ext_id, session)
scan = file_ext.scan
uploaded_files = result.get('uploaded_files', None)
log.debug("scan %s file %s depth %s", scan.external_id,
file_ext_id, file_ext.depth)
if uploaded_files is None:
return
resubmit = scan.resubmit_files
max_resubmit_level = get_max_resubmit_level()
if max_resubmit_level != 0 and file_ext.depth > \
max_resubmit_level:
log.warning("scan %s file %s resubmit level %s exceeded max "
"level (%s)", scan.external_id,
file_ext_id, file_ext.depth,
max_resubmit_level
)
resubmit = False
if not resubmit or error_case:
reason = "Error case" if error_case else "Resubmit disabled"
log.debug("scan %s: %s flushing files", scan.external_id, reason)
celery_brain.files_flush(list(uploaded_files.values()),
scan.external_id)
return
log.debug("scan %s: found files %s", scan.external_id, uploaded_files)
# Retrieve the DB probe_result to link it with
# a new FileProbeResult in _append_new_files
probe_result = file_ext.fetch_probe_result(result['name'])
new_fws = _append_new_files_to_scan(scan, uploaded_files,
probe_result, file_ext.depth+1)
parent_file = file_ext.file
for new_fw in new_fws:
parent_file.children.append(new_fw)
session.commit()
log.debug("scan %s: %d new files to resubmit",
scan.external_id, len(new_fws))
scan_request = _create_scan_request(new_fws,
scan.get_probelist(),
scan.mimetype_filtering)
scan_request = _add_empty_results(new_fws, scan_request, scan, session)
if scan_request.nb_files == 0:
scan.set_status(IrmaScanStatus.finished)
log.info("scan %s: nothing to do flushing files",
scan.external_id)
celery_brain.files_flush(list(uploaded_files.values()),
scan.external_id)
return
for new_fw in new_fws:
celery_brain.scan_launch(new_fw.external_id,
new_fw.probes,
scan.external_id)
return
def generate_csv_report_as_stream(scan_proxy):
# If you try to use the `scan_proxy` object, it won't be available anymore
# as the session (from Hug middleware) has already been closed.
with session_query() as session:
# Using this `merge` function with `load=False` will prevent the ORM to
# entirely query the object from the database.
scan = session.merge(scan_proxy, load=False)
# CSV Header
header = [
"Date",
"MD5",
"SHA1",
"SHA256",
"Filename",
"First seen",
"Last seen",
"Size",
"Status",
"Submitter",
"Submitter's IP address",
]
if scan.files_ext:
# To display the probe list (with the right names), we use an
# file_ext value from the database, and iterate over the probes
# needed. This is a workaround, as getting the list of Probes
# directly from the scan object (using Scan `get_probelist()`
# function) doesn't provide the information regarding the Probe
# type (Antivirus, External, …).
probe_results = scan.files_ext[0].get_probe_results()
try:
# Python3, Dict `.keys()` function doesn't return a list, but
# an `dict_keys`.
# Casting is needed here for further list concatenation.
av_list = list(probe_results['antivirus'].keys())
except KeyError:
av_list = []
try:
external_list = [name for name in probe_results['external']
if name == 'VirusTotal']
except KeyError:
external_list = []
header += (av_list + external_list)
# The `str` cast (via the map function) is only there in case a Probe
# name isn't a string, which will break the `bytes` convert.
yield bytes(CSV_SEPARATOR.join(map(str, header)), 'utf-8')
yield b"\r\n"
# CSV rows
for f in scan.files_ext:
row = [
scan.date,
f.file.md5,
f.file.sha1,
f.file.sha256,
f.name,
f.file.timestamp_first_scan,
f.file.timestamp_last_scan,
f.file.size,
f.status,
f.submitter,
scan.ip,
]
probe_results = f.get_probe_results()
row.extend(probe_results['antivirus'][name]['status'] for name in
av_list)
row.extend(probe_results['external'][name]['results'] for name in
external_list)
yield bytes(CSV_SEPARATOR.join(map(str, row)), 'utf-8')
yield b"\r\n"
| apache-2.0 | 6,972,977,802,855,987,000 | 38.091371 | 79 | 0.5796 | false |
paulromano/openmc | tests/unit_tests/test_data_photon.py | 8 | 5177 | #!/usr/bin/env python
from collections.abc import Mapping, Callable
import os
from pathlib import Path
import numpy as np
import pandas as pd
import pytest
import openmc.data
@pytest.fixture(scope='module')
def elements_endf():
"""Dictionary of element ENDF data indexed by atomic symbol."""
endf_data = os.environ['OPENMC_ENDF_DATA']
elements = {'H': 1, 'O': 8, 'Al': 13, 'Cu': 29, 'Ag': 47, 'U': 92, 'Pu': 94}
data = {}
for symbol, Z in elements.items():
p_file = 'photoat-{:03}_{}_000.endf'.format(Z, symbol)
p_path = os.path.join(endf_data, 'photoat', p_file)
a_file = 'atom-{:03}_{}_000.endf'.format(Z, symbol)
a_path = os.path.join(endf_data, 'atomic_relax', a_file)
data[symbol] = openmc.data.IncidentPhoton.from_endf(p_path, a_path)
return data
@pytest.fixture()
def element(request, elements_endf):
"""Element ENDF data"""
return elements_endf[request.param]
@pytest.mark.parametrize(
'element, atomic_number', [
('Al', 13),
('Cu', 29),
('Pu', 94)
],
indirect=['element']
)
def test_attributes(element, atomic_number):
assert element.atomic_number == atomic_number
@pytest.mark.parametrize(
'element, subshell, binding_energy, num_electrons', [
('H', 'K', 13.61, 1.0),
('O', 'L3', 14.15, 2.67),
('U', 'P2', 34.09, 2.0)
],
indirect=['element']
)
def test_atomic_relaxation(element, subshell, binding_energy, num_electrons):
atom_relax = element.atomic_relaxation
assert isinstance(atom_relax, openmc.data.photon.AtomicRelaxation)
assert subshell in atom_relax.subshells
assert atom_relax.binding_energy[subshell] == binding_energy
assert atom_relax.num_electrons[subshell] == num_electrons
@pytest.mark.parametrize('element', ['Al', 'Cu', 'Pu'], indirect=True)
def test_transitions(element):
transitions = element.atomic_relaxation.transitions
assert transitions
assert isinstance(transitions, Mapping)
for matrix in transitions.values():
assert isinstance(matrix, pd.core.frame.DataFrame)
assert len(matrix.columns) == 4
assert sum(matrix['probability']) == pytest.approx(1.0)
@pytest.mark.parametrize(
'element, I, i_shell, ionization_energy, num_electrons', [
('H', 19.2, 0, 13.6, 1),
('O', 95.0, 2, 13.62, 4),
('U', 890.0, 25, 6.033, -3)
],
indirect=['element']
)
def test_bremsstrahlung(element, I, i_shell, ionization_energy, num_electrons):
brems = element.bremsstrahlung
assert isinstance(brems, Mapping)
assert brems['I'] == I
assert brems['num_electrons'][i_shell] == num_electrons
assert brems['ionization_energy'][i_shell] == ionization_energy
assert np.all(np.diff(brems['electron_energy']) > 0.0)
assert np.all(np.diff(brems['photon_energy']) > 0.0)
assert brems['photon_energy'][0] == 0.0
assert brems['photon_energy'][-1] == 1.0
assert brems['dcs'].shape == (200, 30)
@pytest.mark.parametrize(
'element, n_shell', [
('H', 1),
('O', 3),
('Al', 5)
],
indirect=['element']
)
def test_compton_profiles(element, n_shell):
profile = element.compton_profiles
assert profile
assert isinstance(profile, Mapping)
assert all(isinstance(x, Callable) for x in profile['J'])
assert all(len(x) == n_shell for x in profile.values())
@pytest.mark.parametrize(
'element, reaction', [
('Cu', 541),
('Ag', 502),
('Pu', 504)
],
indirect=['element']
)
def test_reactions(element, reaction):
reactions = element.reactions
assert all(isinstance(x, openmc.data.PhotonReaction) for x in reactions.values())
assert reaction in reactions
with pytest.raises(KeyError):
reactions[18]
@pytest.mark.parametrize('element', ['Pu'], indirect=True)
def test_export_to_hdf5(tmpdir, element):
filename = str(tmpdir.join('tmp.h5'))
element.export_to_hdf5(filename)
assert os.path.exists(filename)
# Read in data from hdf5
element2 = openmc.data.IncidentPhoton.from_hdf5(filename)
# Check for some cross section and datasets of element and element2
energy = np.logspace(np.log10(1.0), np.log10(1.0e10), num=100)
for mt in (502, 504, 515, 517, 522, 541, 570):
xs = element[mt].xs(energy)
xs2 = element2[mt].xs(energy)
assert np.allclose(xs, xs2)
assert element[502].scattering_factor == element2[502].scattering_factor
assert element.atomic_relaxation.transitions['O3'].equals(
element2.atomic_relaxation.transitions['O3'])
assert (element.compton_profiles['binding_energy'] ==
element2.compton_profiles['binding_energy']).all()
assert (element.bremsstrahlung['electron_energy'] ==
element2.bremsstrahlung['electron_energy']).all()
# Export to hdf5 again
element2.export_to_hdf5(filename, 'w')
def test_photodat_only(run_in_tmpdir):
endf_dir = Path(os.environ['OPENMC_ENDF_DATA'])
photoatomic_file = endf_dir / 'photoat' / 'photoat-001_H_000.endf'
data = openmc.data.IncidentPhoton.from_endf(photoatomic_file)
data.export_to_hdf5('tmp.h5', 'w') | mit | 5,076,806,017,285,863,000 | 32.843137 | 85 | 0.645548 | false |
songmonit/CTTMSONLINE_V8 | openerp/addons/base/__openerp__.py | 1 | 3670 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Base',
'version': '1.3',
'category': 'Hidden',
'description': """
The kernel, needed for all installation.
===================================================
""",
'author': 'OpenERP SA',
'maintainer': 'OpenERP SA',
'website': '',
'depends': [],
'data': [
'base_data.xml',
'res/res_currency_data.xml',
'res/res_country_data.xml',
'security/base_security.xml',
'base_menu.xml',
'res/res_config.xml',
'res/res.country.state.csv',
'ir/ir_actions.xml',
'ir/ir_config_parameter_view.xml',
'ir/ir_cron_view.xml',
'ir/ir_filters.xml',
'ir/ir_mail_server_view.xml',
'ir/ir_model_view.xml',
'ir/ir_attachment_view.xml',
'ir/ir_rule_view.xml',
'ir/ir_sequence_view.xml',
'ir/ir_translation_view.xml',
'ir/ir_ui_menu_view.xml',
'ir/ir_ui_view_view.xml',
'ir/ir_values_view.xml',
'ir/osv_memory_autovacuum.xml',
'ir/ir_model_report.xml',
'ir/ir_logging_view.xml',
'ir/ir_qweb.xml',
'workflow/workflow_view.xml',
'module/module_view.xml',
'module/module_data.xml',
'module/module_report.xml',
'module/wizard/base_module_update_view.xml',
'module/wizard/base_language_install_view.xml',
'module/wizard/base_import_language_view.xml',
'module/wizard/base_module_upgrade_view.xml',
'module/wizard/base_module_configuration_view.xml',
'module/wizard/base_export_language_view.xml',
'module/wizard/base_update_translations_view.xml',
'module/wizard/base_module_immediate_install.xml',
'res/res_company_view.xml',
'res/res_request_view.xml',
'res/res_lang_view.xml',
'res/res_partner_report.xml',
'res/res_partner_view.xml',
'res/res_bank_view.xml',
'res/res_country_view.xml',
'res/res_currency_view.xml',
'res/res_users_view.xml',
'res/res_partner_data.xml',
'res/ir_property_view.xml',
'res/res_security.xml',
'security/ir.model.access.csv',
],
'demo': [
'base_demo.xml',
'res/res_partner_demo.xml',
'res/res_partner_demo.yml',
'res/res_partner_image_demo.xml',
],
'test': [
'tests/base_test.yml',
'tests/test_osv_expression.yml',
'tests/test_ir_rule.yml', # <-- These tests modify/add/delete ir_rules.
],
'installable': True,
'auto_install': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -180,165,108,019,026,620 | 35.336634 | 79 | 0.567847 | false |
HurtowniaPixeli/pixelcms-server | cms/router/views.py | 1 | 3060 | from django.shortcuts import Http404
from rest_framework.decorators import api_view
from rest_framework.response import Response
from cms.common.utils import served_langs
from cms.pages.models import Page, PageCategory
from cms.content.models import Category, Article
from cms.content.serializers import ArticleSerializer, CategorySerializer
@api_view()
def pageView(request, slug, homepage=False):
if homepage:
try:
page = Page.objects.get(
homepage=True, published=True, language__in=served_langs()
)
return page.get_view(request)
except Page.DoesNotExist:
raise Http404
try:
page = Page.objects.get(
homepage=False, slug=slug,
published=True, language__in=served_langs()
)
return page.get_view(request)
except Page.DoesNotExist:
raise Http404
# routes to category descendant (subcategory or article)
@api_view()
def contentView(request, path):
path = path.split('/')
try:
# first slice of path must be PageCategory
root_category_page = Page.objects \
.instance_of(PageCategory) \
.get(slug=path[0], published=True, language__in=served_langs())
if root_category_page.deps_published is False:
raise Http404
except Page.DoesNotExist:
raise Http404
remaining_path = path[1:]
parent = root_category_page.category
for i, slug in enumerate(remaining_path):
if i == len(remaining_path) - 1:
# last slice can be Article or Category
try:
category = Category.objects.get(
slug=slug, published=True, parent=parent,
language__in=served_langs()
)
return Response({
'component_name': 'Category',
'component_data': CategorySerializer(
category, context={'request': request}
).data,
'meta': category.meta
})
except Category.DoesNotExist:
pass
try:
article = Article.objects.get(
slug=slug, published=True, category=parent,
language__in=served_langs()
)
return Response({
'component_name': 'Article',
'component_data': ArticleSerializer(
article, context={'request': request}
).data,
'meta': article.meta
})
except Article.DoesNotExist:
raise Http404
else:
# not-last slice must be Category
try:
category = Category.objects.get(
slug=slug, published=True, parent=parent,
language__in=served_langs()
)
parent = category
except Category.DoesNotExist:
raise Http404
| mit | 8,950,102,878,471,108,000 | 33.382022 | 75 | 0.544444 | false |
iMichka/mini-iw | pygccxml/parser/__init__.py | 12 | 2227 | # Copyright 2014 Insight Software Consortium.
# Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0.
# See http://www.boost.org/LICENSE_1_0.txt
"""Parser sub-package.
"""
from .config import gccxml_configuration_t
from .config import load_gccxml_configuration
from .config import gccxml_configuration_example
from .project_reader import COMPILATION_MODE
from .project_reader import project_reader_t
from .project_reader import file_configuration_t
from .project_reader import create_text_fc
from .project_reader import create_source_fc
from .project_reader import create_gccxml_fc
from .project_reader import create_cached_source_fc
from .source_reader import source_reader_t
from .source_reader import gccxml_runtime_error_t
from .declarations_cache import cache_base_t
from .declarations_cache import file_cache_t
from .declarations_cache import dummy_cache_t
from .directory_cache import directory_cache_t
# shortcut
CONTENT_TYPE = file_configuration_t.CONTENT_TYPE
def parse(
files,
config=None,
compilation_mode=COMPILATION_MODE.FILE_BY_FILE,
cache=None):
"""
Parse header files.
:param files: The header files that should be parsed
:type files: list of str
:param config: Configuration object or None
:type config: :class:`parser.gccxml_configuration_t`
:param compilation_mode: Determines whether the files are parsed
individually or as one single chunk
:type compilation_mode: :class:`parser.COMPILATION_MODE`
:param cache: Declaration cache (None=no cache)
:type cache: :class:`parser.cache_base_t` or str
:rtype: list of :class:`declarations.declaration_t`
"""
if not config:
config = gccxml_configuration_t()
parser = project_reader_t(config=config, cache=cache)
answer = parser.read_files(files, compilation_mode)
return answer
def parse_string(content, config=None):
if not config:
config = gccxml_configuration_t()
parser = project_reader_t(config)
return parser.read_string(content)
def parse_xml_file(content, config=None):
parser = source_reader_t(config)
return parser.read_xml_file(content)
| apache-2.0 | 3,112,565,969,304,757,000 | 32.742424 | 68 | 0.732375 | false |
Dzess/ALFIRT | resources/x3dTools/povtoyz.py | 1 | 8601 | """
Kirby Urner
4D Solutions
First published: May 10 2007
May 13: added finer grain control of textures (now per shape),
turned gl_settings into a Template to provide more control, new tests.
Simple framework for studying POV-Ray's Scene Description Language,
use of Template class in the Standard Library string module.
Dependencies (outside of Standard Library):
http://www.4dsolutions.net/ocn/python/stickworks.py
http://www.4dsolutions.net/ocn/python/polyhedra.py
"""
from string import Template
from time import asctime, localtime, time
from random import randint
from stickworks import Vector, Edge
from polyhedra import Tetrahedron, Cube, Icosahedron, Octahedron, Coupler, Mite
from math import sqrt
gl_theheader = Template(
"""
// Persistence of Vision Ray Tracer Scene Description File
// File: $filename
// Vers: 3.6
// Desc: $thedescript
// Date: $thedate
// Auth: $theauthor
// ==== Standard POV-Ray Includes ====
#include "colors.inc" // Standard Color definitions
#include "textures.inc" // Standard Texture definitions
#include "functions.inc" // internal functions usable in user defined functions
// ==== Additional Includes ====
// Don't have all of the following included at once, it'll cost memory and time
// to parse!
// --- general include files ---
#include "chars.inc" // A complete library of character objects, by Ken Maeno
#include "skies.inc" // Ready defined sky spheres
#include "stars.inc" // Some star fields
#include "strings.inc" // macros for generating and manipulating text strings
// --- textures ---
#include "finish.inc" // Some basic finishes
#include "glass.inc" // Glass textures/interiors
#include "golds.inc" // Gold textures
#include "metals.inc" // Metallic pigments, finishes, and textures
#include "stones.inc" // Binding include-file for STONES1 and STONES2
#include "stones1.inc" // Great stone-textures created by Mike Miller
#include "stones2.inc" // More, done by Dan Farmer and Paul Novak
#include "woodmaps.inc" // Basic wooden colormaps
#include "woods.inc" // Great wooden textures created by Dan Farmer and Paul Novak
"""
)
gl_thesettings = Template("""
// perspective (default) camera
camera {
location <$camx, $camy, $camz>
look_at <0.0, 0.0, 0.0>
right x*image_width/image_height
}
// create a regular point light source
light_source {
0*x // light's position (translated below)
color rgb <1,1,1> // light's color
translate <-20, 40, -20>
}
background { color rgb <0.0, 0.0, 0.0> }
"""
)
gl_theedge = Template(
"""
cylinder {
<$x0, $y0, $z0>, // Center of one end
<$x1, $y1, $z1>, // Center of other end
$radius // Radius
open // Remove end caps
texture { $edge_texture }
}
"""
)
gl_thevertex = Template(
"""
sphere { <$x0, $y0, $z0>, $radius
texture { $vertex_texture }
}
"""
)
gl_theface = Template (
"""
polygon {
$numcorners,
$eachcorner
texture { $face_texture }
}
"""
)
class Scene (object) :
thepath = 'c:/python25/Lib/site-packages/'
def __init__(self, thefile='test.pov', desc = 'test file', author = 'me'):
self.header = dict(
filename = thefile,
thedescript = desc,
thedate = asctime(localtime(time())),
theauthor = author)
self.settings = dict(
camx = 0.0,
camy = 2.0,
camz = -3.0)
self.objects = []
def _edges(self, someobj):
# cylinders
for edge in someobj.edges:
edict = dict(x0 = edge.v0.xyz[0],
y0 = edge.v0.xyz[1],
z0 = edge.v0.xyz[2],
x1 = edge.v1.xyz[0],
y1 = edge.v1.xyz[1],
z1 = edge.v1.xyz[2],
radius = edge.radius,
edge_texture = someobj.edge_texture)
self.fileobject.write(gl_theedge.substitute(edict))
def _vertexes(self, someobj):
# spheres
thevertices = someobj.vertices
for vertex in someobj.vertices:
vdict = dict(x0 = thevertices[vertex].xyz[0],
y0 = thevertices[vertex].xyz[1],
z0 = thevertices[vertex].xyz[2],
radius = thevertices[vertex].radius,
vertex_texture = someobj.vertex_texture
)
self.fileobject.write(gl_thevertex.substitute(vdict))
def _faces(self, someobj):
# polygons
thevertices = someobj.vertices
for face in someobj.faces:
# first corner
v = face[0]
x0 = thevertices[v].xyz[0]
y0 = thevertices[v].xyz[1]
z0 = thevertices[v].xyz[2]
firstcorner = "<%s, %s, %s>" % (x0, y0, z0)
eachcorner = firstcorner
for v in face[1:]: # the rest of 'em
x0 = thevertices[v].xyz[0]
y0 = thevertices[v].xyz[1]
z0 = thevertices[v].xyz[2]
eachcorner = eachcorner + ", <%s, %s, %s> " % (x0, y0, z0)
eachcorner = eachcorner + ", " + firstcorner
# POV-Ray closes polygon by repeating first corner
fdict = dict(numcorners = len(face)+1,
eachcorner = eachcorner,
face_texture = someobj.face_texture)
self.fileobject.write(gl_theface.substitute(fdict))
def write(self):
# set the stage
self.fileobject = open(Scene.thepath + self.header['filename'], 'w')
self.fileobject.write(gl_theheader.substitute(self.header))
self.fileobject.write(gl_thesettings.substitute(self.settings))
# write each object
for obj in self.objects:
if obj.showvertices:
self._vertexes(obj)
if obj.showedges:
self._edges(obj)
if obj.showfaces:
self._faces(obj)
self.fileobject.close()
def makecoupler():
thecube = Cube()
thecube.showfaces = False
thecube.edge_texture = 'T_Chrome_2A'
thecoupler = Coupler()
output = Scene('test0.pov')
output.objects.append(thecube)
output.objects.append(thecoupler)
output.write()
def makemite():
thecube = Cube()
thecube.showfaces = False
thecube.edge_texture = 'T_Brass_3A'
thecoupler = Coupler()
thecoupler.showfaces = False
thecoupler.edge_texture = 'T_Chrome_2A'
themite = Mite()
themite.face_texture = 'T_Stone18'
themite.edge_texture = 'T_Chrome_2A'
output = Scene('test1.pov')
output.settings['camy'] = 2.5
output.objects.append(thecube)
output.objects.append(thecoupler)
output.objects.append(themite)
output.write()
def maketent():
output = Scene('test2.pov') # naming disk file
output.objects.append(Tetrahedron()) # appending Polyhedron object
output.write()
def makeicosa():
output = Scene('test3.pov') # naming disk file
# appending a scaled Polyhedron object
output.objects.append( Icosahedron() * sqrt(2) )
output.write()
def manymes():
pass
def test():
"""list the functions"""
thetests = [
makecoupler, # Coupler
makemite, # Mighty Mite
maketent, # tetra tent
makeicosa, # i, icosa
manymes] # many mes
while True:
print """
Choose:
0 Coupler
1 Mighty Mite
2 Tetra Tent
3 I, Icosa
4 Many Mes
Q Outta here!
"""
ans = raw_input('Choice? ')
if ans in 'Qq':
break
# trap more errors here
thetests[int(ans)]() # perform user selection (or crash?)
print "View output, hit Enter to continue..."
# pause to look in the POV-Ray window
ok = raw_input()
return # null
if __name__ == '__main__':
test()
| mit | 7,903,312,855,648,600,000 | 28.178947 | 87 | 0.543658 | false |
willpearse/gbif_clean | text_stripping.py | 1 | 1760 | #!/usr/bin/python
#Preliminary checking and filtering of GBIF data
import sys, yaml
#####################
#FUNCTIONS###########
#####################
#Define mask and header lookup
def define_mask(header, fields={"hasGeospatialIssues":"FALSE", "hasCoordinates":"TRUE", "basisOfRecord":"SPECIMEN"}):
header = header.strip().split("\t")
mask = {key:value for (key,value) in zip([header.index(x) for x in fields], fields.values())}
issue_column = header.index("issue")
return mask, issue_column
#Mask and lookup functions
def mask_data(line, mask):
line = line.strip().split("\t")
for key,value in mask.iteritems():
if line[key] != value:
return False
return True
def trim_data(line, header, fields=['gbifID', 'decimalLatitude', 'decimalLongitude', 'species']):
line = line.strip().split("\t")
header = header.strip().split("\t")
return [line[header.index(x)] for x in fields]
def issue_mask(line, mask, issue_column):
line = line.strip().split("\t")
for each in mask:
if line[issue_column] == each:
return False
return True
#####################
#MAIN################
#####################
with open(sys.argv[1]) as handle:
params = yaml.load(handle)
#Load file
with open(params["output_file"], "w") as write_handle:
with open(params["input_file"]) as handle:
#Setup mask and header
header = handle.next()
mask, issue_column = define_mask(header, params["mask"])
write_handle.write("\t".join(params["output_fields"])+"\n")
#Do work
for line in handle:
if mask_data(line, mask):
write_handle.write("\t".join(trim_data(line, header, params["output_fields"]))+"\n")
| mit | -2,752,310,805,868,721,000 | 32.846154 | 117 | 0.586932 | false |
JackNokia/howdoi | test_howdoi.py | 12 | 3744 | #!/usr/bin/env python
"""Tests for Howdoi."""
import os
import unittest
from howdoi import howdoi
class HowdoiTestCase(unittest.TestCase):
def call_howdoi(self, query):
parser = howdoi.get_parser()
args = vars(parser.parse_args(query.split(' ')))
return howdoi.howdoi(args)
def setUp(self):
self.queries = ['format date bash',
'print stack trace python',
'convert mp4 to animated gif',
'create tar archive']
self.pt_queries = ['abrir arquivo em python',
'enviar email em django',
'hello world em c']
self.bad_queries = ['moe',
'mel']
def tearDown(self):
pass
def test_get_link_at_pos(self):
self.assertEqual(howdoi.get_link_at_pos(['/questions/42/'], 1),
'/questions/42/')
self.assertEqual(howdoi.get_link_at_pos(['/questions/42/'], 2),
'/questions/42/')
self.assertEqual(howdoi.get_link_at_pos(['/howdoi', '/questions/42/'], 1),
'/questions/42/')
self.assertEqual(howdoi.get_link_at_pos(['/howdoi', '/questions/42/'], 2),
'/questions/42/')
self.assertEqual(howdoi.get_link_at_pos(['/questions/42/', '/questions/142/'], 1),
'/questions/42/')
def test_answers(self):
for query in self.queries:
self.assertTrue(self.call_howdoi(query))
for query in self.bad_queries:
self.assertTrue(self.call_howdoi(query))
os.environ['HOWDOI_URL'] = 'pt.stackoverflow.com'
for query in self.pt_queries:
self.assertTrue(self.call_howdoi(query))
def test_answer_links(self):
for query in self.queries:
self.assertTrue('http://' in self.call_howdoi(query + ' -l'))
def test_position(self):
query = self.queries[0]
first_answer = self.call_howdoi(query)
second_answer = self.call_howdoi(query + ' -p2')
self.assertNotEqual(first_answer, second_answer)
def test_all_text(self):
query = self.queries[0]
first_answer = self.call_howdoi(query)
second_answer = self.call_howdoi(query + ' -a')
self.assertNotEqual(first_answer, second_answer)
self.assertTrue("Answer from http://stackoverflow.com" in second_answer)
def test_multiple_answers(self):
query = self.queries[0]
first_answer = self.call_howdoi(query)
second_answer = self.call_howdoi(query + ' -n3')
self.assertNotEqual(first_answer, second_answer)
def test_unicode_answer(self):
assert self.call_howdoi('make a log scale d3')
assert self.call_howdoi('python unittest -n3')
assert self.call_howdoi('parse html regex -a')
assert self.call_howdoi('delete remote git branch -a')
class HowdoiTestCaseEnvProxies(unittest.TestCase):
def setUp(self):
self.temp_get_proxies = howdoi.getproxies
def tearDown(self):
howdoi.getproxies = self.temp_get_proxies
def test_get_proxies1(self):
def getproxies1():
proxies = {'http': 'wwwproxy.company.com',
'https': 'wwwproxy.company.com',
'ftp': 'ftpproxy.company.com'}
return proxies
howdoi.getproxies = getproxies1
filtered_proxies = howdoi.get_proxies()
self.assertTrue('http://' in filtered_proxies['http'])
self.assertTrue('http://' in filtered_proxies['https'])
self.assertTrue('ftp' not in filtered_proxies.keys())
if __name__ == '__main__':
unittest.main()
| mit | 5,545,844,973,357,012,000 | 34.320755 | 90 | 0.575588 | false |
pengli09/Paddle | python/paddle/v2/framework/tests/test_reduce_op.py | 1 | 2525 | import unittest
import numpy as np
from op_test import OpTest
class TestSumOp(OpTest):
def setUp(self):
self.op_type = "reduce_sum"
self.inputs = {'X': np.random.random((5, 6, 10)).astype("float32")}
self.outputs = {'Out': self.inputs['X'].sum(axis=0)}
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out')
class TestMeanOp(OpTest):
def setUp(self):
self.op_type = "reduce_mean"
self.inputs = {'X': np.random.random((5, 6, 2, 10)).astype("float32")}
self.attrs = {'dim': 1}
self.outputs = {'Out': self.inputs['X'].mean(axis=self.attrs['dim'])}
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out')
class TestMaxOp(OpTest):
"""Remove Max with subgradient from gradient check to confirm the success of CI."""
def setUp(self):
self.op_type = "reduce_max"
self.inputs = {'X': np.random.random((5, 6, 10)).astype("float32")}
self.attrs = {'dim': -1}
self.outputs = {'Out': self.inputs['X'].max(axis=self.attrs['dim'])}
def test_check_output(self):
self.check_output()
class TestMinOp(OpTest):
"""Remove Min with subgradient from gradient check to confirm the success of CI."""
def setUp(self):
self.op_type = "reduce_min"
self.inputs = {'X': np.random.random((5, 6, 10)).astype("float32")}
self.attrs = {'dim': 2}
self.outputs = {'Out': self.inputs['X'].min(axis=self.attrs['dim'])}
def test_check_output(self):
self.check_output()
class TestKeepDimReduce(OpTest):
def setUp(self):
self.op_type = "reduce_sum"
self.inputs = {'X': np.random.random((5, 6, 10)).astype("float32")}
self.attrs = {'dim': -2, 'keep_dim': True}
self.outputs = {
'Out': self.inputs['X'].sum(axis=self.attrs['dim'], keepdims=True)
}
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out')
class Test1DReduce(OpTest):
def setUp(self):
self.op_type = "reduce_sum"
self.inputs = {'X': np.random.random(20).astype("float32")}
self.outputs = {'Out': self.inputs['X'].sum(axis=0)}
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out')
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -4,209,349,135,747,711,000 | 27.370787 | 87 | 0.577822 | false |
mbr/unleash | unleash/depgraph.py | 1 | 1677 | import networkx as nx
from networkx.algorithms.dag import (is_directed_acyclic_graph, ancestors,
descendants, topological_sort)
class DependencyGraph(object):
def __init__(self):
self.g = nx.DiGraph()
def add_dependency(self, obj, depending_on):
ng = self.g.copy()
ng.add_edge(obj, depending_on)
# check if adding these dependencies would add a cyclic dependency
if not is_directed_acyclic_graph(ng):
raise ValueError('Adding a dependency of {} on {} introduces a '
'dependency cycle!.'.format(obj, depending_on))
self.g = ng
def add_obj(self, obj, depends_on=[]):
self.g.add_node(obj)
ng = self.g.copy()
ng.add_edges_from((obj, d) for d in depends_on)
# check if adding these dependencies would add a cyclic dependency
if not is_directed_acyclic_graph(ng):
raise ValueError('Adding {} with dependencies {} introduces a '
'dependency cycle!.'.format(obj, depends_on))
self.g = ng
def get_dependants(self, obj):
return self.g.predecessors(obj)
def get_dependencies(self, obj):
return self.g.successors(obj)
def get_full_dependants(self, obj):
return ancestors(self.g, obj)
def get_full_dependencies(self, obj):
return descendants(self.g, obj)
def remove_obj(self, obj):
self.g.remove_node(obj)
def remove_dependency(self, obj, depending_on):
self.g.remove_edge(obj, depending_on)
def resolve_order(self):
return topological_sort(self.g, reverse=True)
| mit | -8,186,896,312,569,232,000 | 30.641509 | 76 | 0.605247 | false |
bbbenja/SickRage | sickbeard/providers/tntvillage.py | 2 | 20461 | # Author: Giovanni Borri
# Modified by gborri, https://github.com/gborri for TNTVillage
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback
import datetime
import sickbeard
import generic
from sickbeard.common import Quality
from sickbeard import logger
from sickbeard import tvcache
from sickbeard import db
from sickbeard import classes
from sickbeard import helpers
from sickbeard import show_name_helpers
from sickbeard.bs4_parser import BS4Parser
from unidecode import unidecode
from sickbeard.helpers import sanitizeSceneName
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickrage.helper.exceptions import AuthException
category_excluded = {
'Sport' : 22,
'Teatro' : 23,
'Video Musicali' : 21,
'Film' : 4,
'Musica' : 2,
'Students Releases' : 13,
'E Books' : 3,
'Linux' : 6,
'Macintosh' : 9,
'Windows Software' : 10,
'Pc Game' : 11,
'Playstation 2' : 12,
'Wrestling' : 24,
'Varie' : 25,
'Xbox' : 26,
'Immagini sfondi' : 27,
'Altri Giochi' : 28,
'Fumetteria' : 30,
'Trash' : 31,
'PlayStation 1' : 32,
'PSP Portable' : 33,
'A Book' : 34,
'Podcast' : 35,
'Edicola' : 36,
'Mobile' : 37,
}
class TNTVillageProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, "TNTVillage")
self.supportsBacklog = True
self.public = False
self.enabled = False
self._uid = None
self._hash = None
self.username = None
self.password = None
self.ratio = None
self.cat = None
self.page = 10
self.subtitle = None
self.minseed = None
self.minleech = None
self.hdtext = [
' - Versione 720p',
' Versione 720p',
' V 720p',
' V 720',
' V HEVC',
' V HEVC',
' V 1080',
' Versione 1080p',
' 720p HEVC',
' Ver 720',
' 720p HEVC',
' 720p',
]
self.category_dict = {
'Serie TV' : 29,
'Cartoni' : 8,
'Anime' : 7,
'Programmi e Film TV' : 1,
'Documentari' : 14,
'All' : 0,
}
self.urls = {'base_url' : 'http://forum.tntvillage.scambioetico.org',
'login' : 'http://forum.tntvillage.scambioetico.org/index.php?act=Login&CODE=01',
'detail' : 'http://forum.tntvillage.scambioetico.org/index.php?showtopic=%s',
'search' : 'http://forum.tntvillage.scambioetico.org/?act=allreleases&%s',
'search_page' : 'http://forum.tntvillage.scambioetico.org/?act=allreleases&st={0}&{1}',
'download' : 'http://forum.tntvillage.scambioetico.org/index.php?act=Attach&type=post&id=%s',
}
self.sub_string = ['sub', 'softsub']
self.url = self.urls['base_url']
self.cache = TNTVillageCache(self)
self.categories = "cat=29"
self.cookies = None
def isEnabled(self):
return self.enabled
def imageName(self):
return 'tntvillage.png'
def getQuality(self, item, anime=False):
quality = Quality.sceneQuality(item[0], anime)
return quality
def _checkAuth(self):
if not self.username or not self.password:
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
return True
def _doLogin(self):
login_params = {'UserName': self.username,
'PassWord': self.password,
'CookieDate': 0,
'submit': 'Connettiti al Forum',
}
response = self.getURL(self.urls['login'], post_data=login_params, timeout=30)
if not response:
logger.log(u'Unable to connect to ' + self.name + ' provider.', logger.ERROR)
return False
if re.search('Sono stati riscontrati i seguenti errori', response) \
or re.search('<title>Connettiti</title>', response):
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
return False
return True
def _get_season_search_strings(self, ep_obj):
search_string = {'Season': []}
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
if ep_obj.show.air_by_date or ep_obj.show.sports:
ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
elif ep_obj.show.anime:
ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
else:
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
search_string['Season'].append(ep_string)
return [search_string]
def _get_episode_search_strings(self, ep_obj, add_string=''):
search_string = {'Episode': []}
if not ep_obj:
return []
if self.show.air_by_date:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = sanitizeSceneName(show_name) + ' ' + \
str(ep_obj.airdate).replace('-', '|')
search_string['Episode'].append(ep_string)
elif self.show.sports:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = sanitizeSceneName(show_name) + ' ' + \
str(ep_obj.airdate).replace('-', '|') + '|' + \
ep_obj.airdate.strftime('%b')
search_string['Episode'].append(ep_string)
elif self.show.anime:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = sanitizeSceneName(show_name) + ' ' + \
"%i" % int(ep_obj.scene_absolute_number)
search_string['Episode'].append(ep_string)
else:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = sanitizeSceneName(show_name) + ' ' + \
sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
return [search_string]
def _reverseQuality(self, quality):
quality_string = ''
if quality == Quality.SDTV:
quality_string = ' HDTV x264'
if quality == Quality.SDDVD:
quality_string = ' DVDRIP'
elif quality == Quality.HDTV:
quality_string = ' 720p HDTV x264'
elif quality == Quality.FULLHDTV:
quality_string = ' 1080p HDTV x264'
elif quality == Quality.RAWHDTV:
quality_string = ' 1080i HDTV mpeg2'
elif quality == Quality.HDWEBDL:
quality_string = ' 720p WEB-DL h264'
elif quality == Quality.FULLHDWEBDL:
quality_string = ' 1080p WEB-DL h264'
elif quality == Quality.HDBLURAY:
quality_string = ' 720p Bluray x264'
elif quality == Quality.FULLHDBLURAY:
quality_string = ' 1080p Bluray x264'
return quality_string
def _episodeQuality(self,torrent_rows):
"""
Return The quality from the scene episode HTML row.
"""
file_quality=''
img_all = (torrent_rows.find_all('td'))[1].find_all('img')
if len(img_all) > 0:
for img_type in img_all:
try:
file_quality = file_quality + " " + img_type['src'].replace("style_images/mkportal-636/","").replace(".gif","").replace(".png","")
except Exception:
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
else:
file_quality = (torrent_rows.find_all('td'))[1].get_text()
logger.log(u"Episode quality: " + str(file_quality), logger.DEBUG)
checkName = lambda list, func: func([re.search(x, file_quality, re.I) for x in list])
dvdOptions = checkName(["dvd", "dvdrip", "dvdmux", "DVD9", "DVD5"], any)
bluRayOptions = checkName(["BD","BDmux", "BDrip", "BRrip", "Bluray"], any)
sdOptions = checkName(["h264", "divx", "XviD", "tv", "TVrip", "SATRip", "DTTrip", "Mpeg2"], any)
hdOptions = checkName(["720p"], any)
fullHD = checkName(["1080p", "fullHD"], any)
if len(img_all) > 0:
file_quality = (torrent_rows.find_all('td'))[1].get_text()
webdl = checkName(["webdl", "webmux", "webrip", "dl-webmux", "web-dlmux", "webdl-mux", "web-dl", "webdlmux", "dlmux"], any)
logger.log(u"Episode options: dvdOptions: " + str(dvdOptions) + ", bluRayOptions: " + str(bluRayOptions) + \
", sdOptions: " + str(sdOptions) + ", hdOptions: " + str(hdOptions) + ", fullHD: " + str(fullHD) + ", webdl: " + str(webdl), logger.DEBUG)
if sdOptions and not dvdOptions and not fullHD and not hdOptions:
return Quality.SDTV
elif dvdOptions:
return Quality.SDDVD
elif hdOptions and not bluRayOptions and not fullHD and not webdl:
return Quality.HDTV
elif not hdOptions and not bluRayOptions and fullHD and not webdl:
return Quality.FULLHDTV
elif hdOptions and not bluRayOptions and not fullHD and webdl:
return Quality.HDWEBDL
elif not hdOptions and not bluRayOptions and fullHD and webdl:
return Quality.FULLHDWEBDL
elif bluRayOptions and hdOptions and not fullHD:
return Quality.HDBLURAY
elif bluRayOptions and fullHD and not hdOptions:
return Quality.FULLHDBLURAY
else:
return Quality.UNKNOWN
def _is_italian(self, torrent_rows):
name = str(torrent_rows.find_all('td')[1].find('b').find('span'))
if not name or name is 'None':
return False
subFound = italian = False
for sub in self.sub_string:
if re.search(sub, name, re.I):
subFound = True
else:
continue
if re.search("ita", name.split(sub)[0], re.I):
logger.log(u"Found Italian release", logger.DEBUG)
italian = True
break
if not subFound and re.search("ita", name, re.I):
logger.log(u"Found Italian release", logger.DEBUG)
italian = True
return italian
def _is_season_pack(self, name):
try:
myParser = NameParser(tryIndexers=True, trySceneExceptions=True)
parse_result = myParser.parse(name)
except InvalidNameException:
logger.log(u"Unable to parse the filename " + str(name) + " into a valid episode", logger.DEBUG)
return False
except InvalidShowException:
logger.log(u"Unable to parse the filename " + str(name) + " into a valid show", logger.DEBUG)
return False
myDB = db.DBConnection()
sql_selection="select count(*) as count from tv_episodes where showid = ? and season = ?"
episodes = myDB.select(sql_selection, [parse_result.show.indexerid, parse_result.season_number])
if int(episodes[0]['count']) == len(parse_result.episode_numbers):
return True
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
results = []
items = {'Season': [], 'Episode': [], 'RSS': []}
self.categories = "cat=" + str(self.cat)
if not self._doLogin():
return results
for mode in search_params.keys():
for search_string in search_params[mode]:
if isinstance(search_string, unicode):
search_string = unidecode(search_string)
if mode == 'RSS':
self.page = 2
last_page=0
y=int(self.page)
if search_string == '':
continue
search_string = str(search_string).replace('.', ' ')
for x in range(0,y):
z=x*20
if last_page:
break
if mode != 'RSS':
searchURL = (self.urls['search_page'] + '&filter={2}').format(z,self.categories,search_string)
else:
searchURL = self.urls['search_page'].format(z,self.categories)
logger.log(u"Search string: " + searchURL, logger.DEBUG)
data = self.getURL(searchURL)
if not data:
logger.log(u"Received no data from the server", logger.DEBUG)
continue
try:
with BS4Parser(data, features=["html5lib", "permissive"]) as html:
torrent_table = html.find('table', attrs = {'class' : 'copyright'})
torrent_rows = torrent_table.find_all('tr') if torrent_table else []
#Continue only if one Release is found
if len(torrent_rows)<3:
logger.log(u"The server returned no torrents", logger.DEBUG)
last_page=1
continue
logger.log(u"Parsing results from page " + str(x+1), logger.DEBUG)
if len(torrent_rows) < 42:
last_page=1
for result in torrent_table.find_all('tr')[2:]:
try:
link = result.find('td').find('a')
title = link.string
id = ((result.find_all('td')[8].find('a'))['href'])[-8:]
download_url = self.urls['download'] % (id)
leechers = result.find_all('td')[3].find_all('td')[1].text
leechers = int(leechers.strip('[]'))
seeders = result.find_all('td')[3].find_all('td')[2].text
seeders = int(seeders.strip('[]'))
except (AttributeError, TypeError):
continue
if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
continue
if not title or not download_url:
continue
filename_qt = self._reverseQuality(self._episodeQuality(result))
for text in self.hdtext:
title1 = title
title = title.replace(text,filename_qt)
if title != title1:
break
if Quality.nameQuality(title) == Quality.UNKNOWN:
title += filename_qt
if not self._is_italian(result) and not self.subtitle:
logger.log(u"Subtitled, skipping " + title + "(" + searchURL + ")", logger.DEBUG)
continue
search_show = re.split(r'([Ss][\d{1,2}]+)', search_string)[0]
show_title = search_show
rindex = re.search(r'([Ss][\d{1,2}]+)', title)
if rindex:
show_title = title[:rindex.start()]
ep_params = title[rindex.start():]
if show_title.lower() != search_show.lower() and search_show.lower() in show_title.lower():
new_title = search_show + ep_params
logger.log(u"WARNING - Changing found title from: " + title + " to: " + new_title, logger.DEBUG)
title = new_title
if self._is_season_pack(title):
title = re.sub(r'([Ee][\d{1,2}\-?]+)', '', title)
item = title, download_url, id, seeders, leechers
logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG)
items[mode].append(item)
except Exception:
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
#For each search mode sort all the items by seeders
items[mode].sort(key=lambda tup: tup[3], reverse=True)
results += items[mode]
return results
def _get_title_and_url(self, item):
title, url, id, seeders, leechers = item
if title:
title = self._clean_title_from_provider(title)
if url:
url = str(url).replace('&', '&')
return (title, url)
def findPropers(self, search_date=datetime.datetime.today()):
results = []
myDB = db.DBConnection()
sqlResults = myDB.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
)
if not sqlResults:
return []
for sqlshow in sqlResults:
self.show = curshow = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"]))
if not self.show: continue
curEp = curshow.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))
searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')
for item in self._doSearch(searchString[0]):
title, url = self._get_title_and_url(item)
results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
return results
def seedRatio(self):
return self.ratio
class TNTVillageCache(tvcache.TVCache):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
# only poll TNTVillage every 30 minutes max
self.minTime = 30
def _getRSSData(self):
search_params = {'RSS': []}
return {'entries': self.provider._doSearch(search_params)}
provider = TNTVillageProvider()
| gpl-3.0 | -4,623,389,586,505,805,000 | 38.88499 | 157 | 0.513318 | false |
andyfaff/scipy | scipy/signal/tests/test_result_type.py | 18 | 1642 | # Regressions tests on result types of some signal functions
import numpy as np
from numpy.testing import assert_
import pytest
from scipy.signal import (decimate,
lfilter_zi,
lfiltic,
sos2tf,
sosfilt_zi)
def test_decimate():
ones_f32 = np.ones(32, dtype=np.float32)
assert_(decimate(ones_f32, 2).dtype == np.float32)
ones_i64 = np.ones(32, dtype=np.int64)
assert_(decimate(ones_i64, 2).dtype == np.float64)
def test_lfilter_zi():
b_f32 = np.array([1, 2, 3], dtype=np.float32)
a_f32 = np.array([4, 5, 6], dtype=np.float32)
assert_(lfilter_zi(b_f32, a_f32).dtype == np.float32)
def test_lfiltic():
# this would return f32 when given a mix of f32 / f64 args
b_f32 = np.array([1, 2, 3], dtype=np.float32)
a_f32 = np.array([4, 5, 6], dtype=np.float32)
x_f32 = np.ones(32, dtype=np.float32)
b_f64 = b_f32.astype(np.float64)
a_f64 = a_f32.astype(np.float64)
x_f64 = x_f32.astype(np.float64)
assert_(lfiltic(b_f64, a_f32, x_f32).dtype == np.float64)
assert_(lfiltic(b_f32, a_f64, x_f32).dtype == np.float64)
assert_(lfiltic(b_f32, a_f32, x_f64).dtype == np.float64)
assert_(lfiltic(b_f32, a_f32, x_f32, x_f64).dtype == np.float64)
def test_sos2tf():
sos_f32 = np.array([[4, 5, 6, 1, 2, 3]], dtype=np.float32)
b, a = sos2tf(sos_f32)
assert_(b.dtype == np.float32)
assert_(a.dtype == np.float32)
def test_sosfilt_zi():
sos_f32 = np.array([[4, 5, 6, 1, 2, 3]], dtype=np.float32)
assert_(sosfilt_zi(sos_f32).dtype == np.float32)
| bsd-3-clause | -7,011,933,095,546,168,000 | 29.407407 | 68 | 0.591352 | false |
jjbrophy47/sn_spam | relational/scripts/tests/test_comments.py | 1 | 4016 | """
Tests the comments module.
"""
import os
import unittest
import mock
from .context import comments
from .context import config
from .context import util
from .context import test_utils as tu
class CommentsTestCase(unittest.TestCase):
def setUp(self):
config_obj = tu.sample_config()
util_obj = util.Util()
self.test_obj = comments.Comments(config_obj, util_obj)
def tearDown(self):
self.test_obj = None
def test_init(self):
# setup
result = self.test_obj
# assert
self.assertTrue(isinstance(result.config_obj, config.Config))
self.assertTrue(isinstance(result.util_obj, util.Util))
def test_build_no_data_f(self):
self.test_obj.define_file_folders = mock.Mock(return_value='d/')
self.test_obj.drop_duplicate_comments = mock.Mock(
return_value='unique_df')
self.test_obj.write_predicates = mock.Mock()
self.test_obj.build('df', 'dset')
self.test_obj.define_file_folders.assert_called()
self.test_obj.drop_duplicate_comments.assert_called_with('df')
self.test_obj.write_predicates.assert_called_with('unique_df',
'dset', 'd/')
def test_build_with_data_f(self):
self.test_obj.define_file_folders = mock.Mock()
self.test_obj.drop_duplicate_comments = mock.Mock(
return_value='unique_df')
self.test_obj.write_predicates = mock.Mock()
self.test_obj.build('df', 'dset', data_f='b/')
self.test_obj.define_file_folders.assert_not_called()
self.test_obj.drop_duplicate_comments.assert_called_with('df')
self.test_obj.write_predicates.assert_called_with('unique_df',
'dset', 'b/')
def test_build_no_data_tuffy(self):
self.test_obj.define_file_folders = mock.Mock(return_value='d/')
self.test_obj.drop_duplicate_comments = mock.Mock(
return_value='unique_df')
self.test_obj.write_tuffy_predicates = mock.Mock()
self.test_obj.build('df', 'dset', tuffy=True)
self.test_obj.define_file_folders.assert_called()
self.test_obj.drop_duplicate_comments.assert_called_with('df')
self.test_obj.write_tuffy_predicates.assert_called_with('unique_df',
'dset', 'd/')
def test_define_file_folders(self):
os.makedirs = mock.Mock()
result = self.test_obj.define_file_folders()
self.assertTrue(result == 'rel/data/soundcloud/')
def test_drop_duplicate_comments(self):
df = tu.sample_df(10)
temp_df = tu.sample_df(10)
unique_df = tu.sample_df(10)
df.filter = mock.Mock(return_value=temp_df)
temp_df.drop_duplicates = mock.Mock(return_value=unique_df)
result = self.test_obj.drop_duplicate_comments(df)
df.filter.assert_called_with(['com_id', 'ind_pred', 'label'], axis=1)
temp_df.drop_duplicates.assert_called()
self.assertTrue(result.equals(unique_df))
def test_write_predicates(self):
df = tu.sample_df(10)
df.to_csv = mock.Mock()
self.test_obj.write_predicates(df, 'dset', 'd/')
expected = [mock.call('d/dset_no_label_1.tsv', columns=['com_id'],
sep='\t', header=None, index=None),
mock.call('d/dset_1.tsv', columns=['com_id', 'label'],
sep='\t', header=None, index=None),
mock.call('d/dset_pred_1.tsv', columns=['com_id', 'ind_pred'],
sep='\t', header=None, index=None)]
self.assertTrue(df.to_csv.call_args_list == expected)
def test_write_tuffy_predicates(self):
df = tu.sample_df(2)
df['ind_pred'] = [0.77, 0.27]
resource_dir = 'relational/scripts/tests/resources/'
self.test_obj.write_tuffy_predicates(df, 'test', resource_dir)
def test_suite():
suite = unittest.TestLoader().loadTestsFromTestCase(CommentsTestCase)
return suite
if __name__ == '__main__':
unittest.main()
| mit | -206,610,186,306,508,380 | 33.62069 | 78 | 0.61504 | false |
quozl/sugar | extensions/cpsection/power/view.py | 2 | 4686 | # Copyright (C) 2008, OLPC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk
from gettext import gettext as _
from sugar3.graphics import style
from jarabe.controlpanel.sectionview import SectionView
from jarabe.controlpanel.inlinealert import InlineAlert
class Power(SectionView):
def __init__(self, model, alerts):
SectionView.__init__(self)
self._model = model
self.restart_alerts = alerts
self._automatic_pm_valid = True
self._automatic_pm_change_handler = None
self.set_border_width(style.DEFAULT_SPACING * 2)
self.set_spacing(style.DEFAULT_SPACING)
group = Gtk.SizeGroup(Gtk.SizeGroupMode.HORIZONTAL)
self._automatic_pm_alert_box = Gtk.HBox(spacing=style.DEFAULT_SPACING)
separator_pm = Gtk.HSeparator()
self.pack_start(separator_pm, False, True, 0)
separator_pm.show()
label_pm = Gtk.Label(label=_('Power management'))
label_pm.set_alignment(0, 0)
self.pack_start(label_pm, False, True, 0)
label_pm.show()
box_pm = Gtk.VBox()
box_pm.set_border_width(style.DEFAULT_SPACING * 2)
box_pm.set_spacing(style.DEFAULT_SPACING)
box_automatic_pm = Gtk.HBox(spacing=style.DEFAULT_SPACING)
label_automatic_pm = Gtk.Label(
label=_('Automatic power management (increases battery life)'))
label_automatic_pm.set_alignment(0, 0.5)
self._automatic_button = Gtk.CheckButton()
self._automatic_button.set_alignment(0, 0)
box_automatic_pm.pack_start(self._automatic_button, False, True, 0)
box_automatic_pm.pack_start(label_automatic_pm, False, True, 0)
self._automatic_button.show()
label_automatic_pm.show()
group.add_widget(label_automatic_pm)
box_pm.pack_start(box_automatic_pm, False, True, 0)
box_automatic_pm.show()
self._automatic_pm_alert = InlineAlert()
label_automatic_pm_error = Gtk.Label()
group.add_widget(label_automatic_pm_error)
self._automatic_pm_alert_box.pack_start(label_automatic_pm_error,
expand=False, fill=True,
padding=0)
label_automatic_pm_error.show()
self._automatic_pm_alert_box.pack_start(self._automatic_pm_alert,
expand=False, fill=True,
padding=0)
box_pm.pack_end(self._automatic_pm_alert_box, False, True, 0)
self._automatic_pm_alert_box.show()
if 'automatic_pm' in self.restart_alerts:
self._automatic_pm_alert.props.msg = self.restart_msg
self._automatic_pm_alert.show()
self.pack_start(box_pm, False, True, 0)
box_pm.show()
self.setup()
def setup(self):
try:
automatic_state = self._model.get_automatic_pm()
except Exception as detail:
self._automatic_pm_alert.props.msg = detail
self._automatic_pm_alert.show()
else:
self._automatic_button.set_active(automatic_state)
self._automatic_pm_valid = True
self.needs_restart = False
self._automatic_pm_change_handler = self._automatic_button.connect(
'toggled', self.__automatic_pm_toggled_cb)
def undo(self):
self._automatic_button.disconnect(self._automatic_pm_change_handler)
self._model.undo()
self._automatic_pm_alert.hide()
def _validate(self):
if self._automatic_pm_valid:
self.props.is_valid = True
else:
self.props.is_valid = False
def __automatic_pm_toggled_cb(self, widget, data=None):
state = widget.get_active()
try:
self._model.set_automatic_pm(state)
except Exception as detail:
print(detail)
self._automatic_pm_alert.props.msg = detail
else:
self._automatic_pm_valid = True
self._validate()
return False
| gpl-3.0 | 514,856,993,487,086,460 | 36.790323 | 78 | 0.621639 | false |
leafclick/intellij-community | python/helpers/pycharm_display/datalore/display/supported_data_type.py | 14 | 2135 | import json
from abc import abstractmethod
from datetime import datetime
try:
import numpy
except ImportError:
numpy = None
try:
import pandas
except ImportError:
pandas = None
# Parameter 'value' can also be pandas.DataFrame
def _standardize_dict(value):
result = {}
for k, v in value.items():
result[_standardize_value(k)] = _standardize_value(v)
return result
def is_int(v):
return isinstance(v, int) or (numpy and isinstance(v, numpy.integer))
def is_float(v):
return isinstance(v, float) or (numpy and isinstance(v, numpy.floating))
def is_number(v):
return is_int(v) or is_float(v)
def is_shapely_geometry(v):
try:
from shapely.geometry.base import BaseGeometry
return isinstance(v, BaseGeometry)
except ImportError:
return False
def _standardize_value(v):
if v is None:
return v
if isinstance(v, bool):
return bool(v)
if is_int(v):
return int(v)
if isinstance(v, str):
return str(v)
if is_float(v):
return float(v)
if isinstance(v, dict) or (pandas and isinstance(v, pandas.DataFrame)):
return _standardize_dict(v)
if isinstance(v, list):
return [_standardize_value(elem) for elem in v]
if isinstance(v, tuple):
return tuple(_standardize_value(elem) for elem in v)
if (numpy and isinstance(v, numpy.ndarray)) or (pandas and isinstance(v, pandas.Series)):
return _standardize_value(v.tolist())
if isinstance(v, datetime):
return v.timestamp() * 1000 # convert from second to millisecond
if isinstance(v, CanToDataFrame):
return _standardize_dict(v.to_data_frame())
if is_shapely_geometry(v):
from shapely.geometry import mapping
return json.dumps(mapping(v))
try:
return repr(v)
except Exception as e:
# TODO This needs a test case; Also exception should be logged somewhere
raise Exception('Unsupported type: {0}({1})'.format(v, type(v)))
class CanToDataFrame:
@abstractmethod
def to_data_frame(self): # -> pandas.DataFrame
pass
| apache-2.0 | 6,490,971,174,700,318,000 | 25.358025 | 93 | 0.650585 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.