repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
Phrozyn/MozDef | mq/plugins/ttl_auditd.py | 2 | 3716 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
class message(object):
def __init__(self):
'''
register our criteria for being passed a message
'''
# this plugin inspects messages for whitelist stuff that
# should be stored with a TTL so we keep it for a little while
# and delete rather than waiting for the index purge
self.registration = ['auditd', 'command']
self.priority = 1
def onMessage(self, message, metadata):
# ganglia monitor daemon -> 3d
if ('details' in message and
'parentprocess' in message['details'] and
message['details']['parentprocess'] == 'gmond' and
'duser' in message['details'] and
message['details']['duser'] == 'nobody' and
'command' in message['details'] and
message['details']['command'] == '/bin/sh -c netstat -t -a -n'):
message['_ttl'] = '3d'
# rabbitmq -> 3d
if (
('details' in message and
'parentprocess' in message['details'] and
message['details']['parentprocess'] == 'beam.smp' and
'duser' in message['details'] and
message['details']['duser'] == 'rabbitmq' and
'command' in message['details']
) and
(
message['details']['command'] == '/usr/lib64/erlang/erts-5.8.5/bin/epmd -daemon' or
message['details']['command'].startswith('inet_gethost 4') or
message['details']['command'].startswith('sh -c exec inet_gethost 4') or
message['details']['command'].startswith('/bin/sh -s unix:cmd') or
message['details']['command'].startswith('sh -c exec /bin/sh -s unix:cmd'))):
message['_ttl'] = '3d'
# sshd -> 3d
if ('details' in message and
'parentprocess' in message['details'] and
message['details']['parentprocess'] == 'sshd' and
'duser' in message['details'] and
message['details']['duser'] == 'root' and
'command' in message['details'] and
message['details']['command'] == '/usr/sbin/sshd -R'):
message['_ttl'] = '3d'
# chkconfig -> 3d
if (
('details' in message and
'parentprocess' in message['details'] and
message['details']['parentprocess'] == 'chkconfig' and
'suser' in message['details'] and
message['details']['suser'] == 'root' and
'command' in message['details']
) and
(
message['details']['command'].startswith('/sbin/runlevel') or
message['details']['command'].startswith('sh -c /sbin/runlevel'))):
message['_ttl'] = '3d'
# nagios -> 3d
if (
('details' in message and
'duser' in message['details'] and
message['details']['duser'] == 'nagios' and
'suser' in message['details'] and
message['details']['suser'] == 'root' and
'command' in message['details']
) and
(
message['details']['command'].startswith('/usr/lib64/nagios/plugins') or
message['details']['command'].startswith('sh -c /usr/lib64/nagios/plugins'))):
message['_ttl'] = '3d'
return (message, metadata)
| mpl-2.0 |
publicloudapp/csrutil | linux-4.3/tools/perf/util/setup.py | 766 | 1540 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = getenv('CFLAGS', '').split()
# switch off several checks (need to be at the end of cflags list)
cflags += ['-fno-strict-aliasing', '-Wno-write-strings', '-Wno-unused-parameter' ]
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
libtraceevent = getenv('LIBTRACEEVENT')
libapikfs = getenv('LIBAPI')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
extra_objects = [libtraceevent, libapikfs],
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='[email protected]',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| mit |
splunk/splunk-demo-yelp-search-command | bin/requests/packages/charade/constants.py | 231 | 1374 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
_debug = 0
eDetecting = 0
eFoundIt = 1
eNotMe = 2
eStart = 0
eError = 1
eItsMe = 2
SHORTCUT_THRESHOLD = 0.95
| apache-2.0 |
AndroidOpenDevelopment/android_external_chromium_org | tools/perf/benchmarks/scheduler.py | 8 | 1116 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from measurements import smoothness
import page_sets
@test.Disabled('linux') # crbug.com/368767
class SchedulerToughSchedulingCases(test.Test):
"""Measures rendering statistics while interacting with pages that have
challenging scheduling properties.
https://docs.google.com/a/chromium.org/document/d/
17yhE5Po9By0sCdM1yZT3LiUECaUr_94rQt9j-4tOQIM/view"""
test = smoothness.Smoothness
page_set = page_sets.ToughSchedulingCasesPageSet
# Pepper plugin is not supported on android.
@test.Disabled('android', 'win') # crbug.com/384733
class SchedulerToughPepperCases(test.Test):
"""Measures rendering statistics while interacting with pages that have
pepper plugins"""
test = smoothness.Smoothness
page_set = page_sets.ToughPepperCasesPageSet
def CustomizeBrowserOptions(self, options):
# This is needed for testing pepper plugin.
options.AppendExtraBrowserArgs('--enable-pepper-testing')
| bsd-3-clause |
jawad6233/android_kernel_samsung_t110 | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py | 11088 | 3246 | # Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <[email protected]>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
| gpl-2.0 |
BlueLens/bl-magi | tensorflow/object_detection/protos/faster_rcnn_box_coder_pb2.py | 4 | 3445 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: object_detection/protos/faster_rcnn_box_coder.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='object_detection/protos/faster_rcnn_box_coder.proto',
package='object_detection.protos',
syntax='proto2',
serialized_pb=_b('\n3object_detection/protos/faster_rcnn_box_coder.proto\x12\x17object_detection.protos\"o\n\x12\x46\x61sterRcnnBoxCoder\x12\x13\n\x07y_scale\x18\x01 \x01(\x02:\x02\x31\x30\x12\x13\n\x07x_scale\x18\x02 \x01(\x02:\x02\x31\x30\x12\x17\n\x0cheight_scale\x18\x03 \x01(\x02:\x01\x35\x12\x16\n\x0bwidth_scale\x18\x04 \x01(\x02:\x01\x35')
)
_FASTERRCNNBOXCODER = _descriptor.Descriptor(
name='FasterRcnnBoxCoder',
full_name='object_detection.protos.FasterRcnnBoxCoder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='y_scale', full_name='object_detection.protos.FasterRcnnBoxCoder.y_scale', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(10),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='x_scale', full_name='object_detection.protos.FasterRcnnBoxCoder.x_scale', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(10),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height_scale', full_name='object_detection.protos.FasterRcnnBoxCoder.height_scale', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width_scale', full_name='object_detection.protos.FasterRcnnBoxCoder.width_scale', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=float(5),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=80,
serialized_end=191,
)
DESCRIPTOR.message_types_by_name['FasterRcnnBoxCoder'] = _FASTERRCNNBOXCODER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
FasterRcnnBoxCoder = _reflection.GeneratedProtocolMessageType('FasterRcnnBoxCoder', (_message.Message,), dict(
DESCRIPTOR = _FASTERRCNNBOXCODER,
__module__ = 'object_detection.protos.faster_rcnn_box_coder_pb2'
# @@protoc_insertion_point(class_scope:object_detection.protos.FasterRcnnBoxCoder)
))
_sym_db.RegisterMessage(FasterRcnnBoxCoder)
# @@protoc_insertion_point(module_scope)
| apache-2.0 |
fossilet/ansible | lib/ansible/plugins/lookup/credstash.py | 131 | 1651 | # (c) 2015, Ensighten <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
CREDSTASH_INSTALLED = False
try:
import credstash
CREDSTASH_INSTALLED = True
except ImportError:
CREDSTASH_INSTALLED = False
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
if not CREDSTASH_INSTALLED:
raise AnsibleError('The credstash lookup plugin requires credstash to be installed.')
ret = []
for term in terms:
try:
val = credstash.getSecret(term, **kwargs)
except credstash.ItemNotFound:
raise AnsibleError('Key {0} not found'.format(term))
except Exception as e:
raise AnsibleError('Encountered exception while fetching {0}: {1}'.format(term, e.message))
ret.append(val)
return ret
| gpl-3.0 |
fdouetteau/PyBabe | pybabe/format_csv.py | 1 | 3107 |
from base import BabeBase, StreamHeader, StreamFooter
import csv
from charset import UTF8Recoder, UTF8RecoderWithCleanup, PrefixReader, UnicodeCSVWriter
import codecs
import logging
log = logging.getLogger("csv")
def linepull(stream, dialect, kwargs):
it = iter(stream)
fields = kwargs.get('fields', None)
if not fields:
fields = [it.next().rstrip('\r\n')]
metainfo = StreamHeader(**dict(kwargs, fields=fields))
yield metainfo
for row in it:
yield metainfo.t._make([row.rstrip('\r\n')])
yield StreamFooter()
def build_value(x, null_value):
if x == null_value:
return None
else:
return unicode(x, "utf-8")
def csvpull(stream, dialect, kwargs):
reader = csv.reader(stream, dialect)
fields = kwargs.get('fields', None)
null_value = kwargs.get('null_value', "")
ignore_malformed = kwargs.get('ignore_bad_lines', False)
if not fields:
fields = reader.next()
metainfo = StreamHeader(**dict(kwargs, fields=fields))
yield metainfo
for row in reader:
try:
yield metainfo.t._make([build_value(x, null_value) for x in row])
except Exception, e:
if ignore_malformed:
log.warn("Malformed line: %s, %s" % (row, e))
else:
raise e
yield StreamFooter()
def pull(format, stream, kwargs):
if kwargs.get('utf8_cleanup', False):
stream = UTF8RecoderWithCleanup(stream, kwargs.get('encoding', 'utf-8'))
elif codecs.getreader(kwargs.get('encoding', 'utf-8')) != codecs.getreader('utf-8'):
stream = UTF8Recoder(stream, kwargs.get('encoding', None))
else:
pass
delimiter = kwargs.get('delimiter', None)
sniff_read = stream.next()
stream = PrefixReader(sniff_read, stream, linefilter=kwargs.get("linefilter", None))
dialect = csv.Sniffer().sniff(sniff_read)
if sniff_read.endswith('\r\n'):
dialect.lineterminator = '\r\n'
else:
dialect.lineterminator = '\n'
if dialect.delimiter.isalpha() and not delimiter:
# http://bugs.python.org/issue2078
for row in linepull(stream, dialect, kwargs):
yield row
return
if delimiter:
dialect.delimiter = delimiter
for row in csvpull(stream, dialect, kwargs):
yield row
class default_dialect(csv.Dialect):
lineterminator = '\n'
delimiter = ','
doublequote = False
escapechar = '\\'
quoting = csv.QUOTE_MINIMAL
quotechar = '"'
def push(format, metainfo, instream, outfile, encoding, delimiter=None, **kwargs):
if not encoding:
encoding = "utf8"
dialect = kwargs.get('dialect', default_dialect)
if delimiter:
dialect.delimiter = delimiter
writer = UnicodeCSVWriter(outfile, dialect=dialect, encoding=encoding)
writer.writerow(metainfo.fields)
for k in instream:
if isinstance(k, StreamFooter):
break
else:
writer.writerow(k)
BabeBase.addPullPlugin('csv', ['csv', 'tsv', 'txt'], pull)
BabeBase.addPushPlugin('csv', ['csv', 'tsv', 'txt'], push)
| bsd-3-clause |
raychorn/knowu | django/djangononrelsample2/django/contrib/sessions/backends/signed_cookies.py | 288 | 2798 | from django.conf import settings
from django.core import signing
from django.contrib.sessions.backends.base import SessionBase
class SessionStore(SessionBase):
def load(self):
"""
We load the data from the key itself instead of fetching from
some external data store. Opposite of _get_session_key(),
raises BadSignature if signature fails.
"""
try:
return signing.loads(self.session_key,
serializer=self.serializer,
# This doesn't handle non-default expiry dates, see #19201
max_age=settings.SESSION_COOKIE_AGE,
salt='django.contrib.sessions.backends.signed_cookies')
except (signing.BadSignature, ValueError):
self.create()
return {}
def create(self):
"""
To create a new key, we simply make sure that the modified flag is set
so that the cookie is set on the client for the current request.
"""
self.modified = True
def save(self, must_create=False):
"""
To save, we get the session key as a securely signed string and then
set the modified flag so that the cookie is set on the client for the
current request.
"""
self._session_key = self._get_session_key()
self.modified = True
def exists(self, session_key=None):
"""
This method makes sense when you're talking to a shared resource, but
it doesn't matter when you're storing the information in the client's
cookie.
"""
return False
def delete(self, session_key=None):
"""
To delete, we clear the session key and the underlying data structure
and set the modified flag so that the cookie is set on the client for
the current request.
"""
self._session_key = ''
self._session_cache = {}
self.modified = True
def cycle_key(self):
"""
Keeps the same data but with a new key. To do this, we just have to
call ``save()`` and it will automatically save a cookie with a new key
at the end of the request.
"""
self.save()
def _get_session_key(self):
"""
Most session backends don't need to override this method, but we do,
because instead of generating a random string, we want to actually
generate a secure url-safe Base64-encoded string of data as our
session key.
"""
session_cache = getattr(self, '_session_cache', {})
return signing.dumps(session_cache, compress=True,
salt='django.contrib.sessions.backends.signed_cookies',
serializer=self.serializer)
@classmethod
def clear_expired(cls):
pass
| lgpl-3.0 |
filippog/pysnmp | examples/hlapi/asyncore/sync/agent/ntforg/v3-trap.py | 1 | 1601 | """
SNMPv3 TRAP: auth SHA, privacy: AES128
++++++++++++++++++++++++++++++++++++++
Send SNMP notification using the following options:
* SNMPv3
* with authoritative snmpEngineId = 0x8000000001020304
(USM must be configured at the Receiver accordingly)
* with user 'usr-sha-aes128', auth: SHA, priv: AES128
* over IPv4/UDP
* send TRAP notification
* with TRAP ID 'authenticationFailure' specified as a MIB symbol
* do not include any additional managed object information
SNMPv3 TRAPs requires pre-sharing the Notification Originator's
value of SnmpEngineId with Notification Receiver. To facilitate that
we will use static (e.g. not autogenerated) version of snmpEngineId.
Functionally similar to:
| $ snmptrap -v3 -e 8000000001020304 -l authPriv -u usr-sha-aes -A authkey1 -X privkey1 -a SHA -x AES demo.snmplabs.com 12345 1.3.6.1.4.1.20408.4.1.1.2 1.3.6.1.2.1.1.1.0 s "my system"
"""#
from pysnmp.hlapi import *
errorIndication, errorStatus, errorIndex, varBinds = next(
sendNotification(SnmpEngine(OctetString(hexValue='8000000001020304')),
UsmUserData('usr-sha-aes128', 'authkey1', 'privkey1',
authProtocol=usmHMACSHAAuthProtocol,
privProtocol=usmAesCfb128Protocol),
UdpTransportTarget(('demo.snmplabs.com', 162)),
ContextData(),
'trap',
NotificationType(
ObjectIdentity('SNMPv2-MIB', 'authenticationFailure')
)
)
)
if errorIndication:
print(errorIndication)
| bsd-3-clause |
sander76/home-assistant | homeassistant/components/trace/__init__.py | 1 | 3528 | """Support for script and automation tracing and debugging."""
from __future__ import annotations
import datetime as dt
from itertools import count
from typing import Any, Deque
from homeassistant.core import Context
from homeassistant.helpers.trace import (
TraceElement,
trace_id_get,
trace_id_set,
trace_set_child_id,
)
import homeassistant.util.dt as dt_util
from . import websocket_api
from .const import DATA_TRACE, STORED_TRACES
from .utils import LimitedSizeDict
DOMAIN = "trace"
async def async_setup(hass, config):
"""Initialize the trace integration."""
hass.data[DATA_TRACE] = {}
websocket_api.async_setup(hass)
return True
def async_store_trace(hass, trace):
"""Store a trace if its item_id is valid."""
key = trace.key
if key[1]:
traces = hass.data[DATA_TRACE]
if key not in traces:
traces[key] = LimitedSizeDict(size_limit=STORED_TRACES)
traces[key][trace.run_id] = trace
class ActionTrace:
"""Base container for an script or automation trace."""
_run_ids = count(0)
def __init__(
self,
key: tuple[str, str],
config: dict[str, Any],
context: Context,
):
"""Container for script trace."""
self._trace: dict[str, Deque[TraceElement]] | None = None
self._config: dict[str, Any] = config
self.context: Context = context
self._error: Exception | None = None
self._state: str = "running"
self.run_id: str = str(next(self._run_ids))
self._timestamp_finish: dt.datetime | None = None
self._timestamp_start: dt.datetime = dt_util.utcnow()
self.key: tuple[str, str] = key
if trace_id_get():
trace_set_child_id(self.key, self.run_id)
trace_id_set((key, self.run_id))
def set_trace(self, trace: dict[str, Deque[TraceElement]]) -> None:
"""Set trace."""
self._trace = trace
def set_error(self, ex: Exception) -> None:
"""Set error."""
self._error = ex
def finished(self) -> None:
"""Set finish time."""
self._timestamp_finish = dt_util.utcnow()
self._state = "stopped"
def as_dict(self) -> dict[str, Any]:
"""Return dictionary version of this ActionTrace."""
result = self.as_short_dict()
traces = {}
if self._trace:
for key, trace_list in self._trace.items():
traces[key] = [item.as_dict() for item in trace_list]
result.update(
{
"trace": traces,
"config": self._config,
"context": self.context,
}
)
if self._error is not None:
result["error"] = str(self._error)
return result
def as_short_dict(self) -> dict[str, Any]:
"""Return a brief dictionary version of this ActionTrace."""
last_step = None
if self._trace:
last_step = list(self._trace)[-1]
result = {
"last_step": last_step,
"run_id": self.run_id,
"state": self._state,
"timestamp": {
"start": self._timestamp_start,
"finish": self._timestamp_finish,
},
"domain": self.key[0],
"item_id": self.key[1],
}
if self._error is not None:
result["error"] = str(self._error)
if last_step is not None:
result["last_step"] = last_step
return result
| apache-2.0 |
IronLanguages/ironpython2 | Tests/test_decimal.py | 3 | 1134 | # Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
import unittest
from decimal import *
from iptest import run_test, skipUnlessIronPython
@skipUnlessIronPython()
class DecimalTest(unittest.TestCase):
def test_explicit_from_System_Decimal(self):
import System
#int
self.assertEqual(str(Decimal(System.Decimal.Parse('45'))), '45')
#float
self.assertEqual(str(Decimal(System.Decimal.Parse('45.34'))), '45.34')
def test_formatting(self):
import System
d = System.Decimal.Parse('1.4274243253253245432543254545')
self.assertEqual('{}'.format(d), '1.4274243253253245432543254545')
self.assertEqual('{:,.2f}'.format(d), '1.43')
self.assertEqual('{:e}'.format(d), '1.427424325325e+00')
d = System.Decimal.Parse('4000000000.40000000')
self.assertEqual('{}'.format(d), '4000000000.40000000')
self.assertEqual('{:e}'.format(d), '4.000000000400e+09')
run_test(__name__)
| apache-2.0 |
elzaggo/pydoop | test/avro/test_io.py | 1 | 4807 | # BEGIN_COPYRIGHT
#
# Copyright 2009-2018 CRS4.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# END_COPYRIGHT
import os
import unittest
import itertools as it
import avro.datafile as avdf
from avro.io import DatumReader, DatumWriter
from pydoop.mapreduce.pipes import InputSplit
from pydoop.avrolib import (
SeekableDataFileReader, AvroReader, AvroWriter, parse
)
from pydoop.test_utils import WDTestCase
from pydoop.utils.py3compat import czip, cmap
import pydoop.hdfs as hdfs
from common import avro_user_record
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
class TestAvroIO(WDTestCase):
def setUp(self):
super(TestAvroIO, self).setUp()
with open(os.path.join(THIS_DIR, "user.avsc")) as f:
self.schema = parse(f.read())
def write_avro_file(self, rec_creator, n_samples, sync_interval):
avdf.SYNC_INTERVAL = sync_interval
self.assertEqual(avdf.SYNC_INTERVAL, sync_interval)
fo = self._mkf('data.avro', mode='wb')
with avdf.DataFileWriter(fo, DatumWriter(), self.schema) as writer:
for i in range(n_samples):
writer.append(rec_creator(i))
return fo.name
def test_seekable(self):
fn = self.write_avro_file(avro_user_record, 500, 1024)
with open(fn, 'rb') as f:
sreader = SeekableDataFileReader(f, DatumReader())
res = [t for t in czip(cmap(
lambda _: f.tell(), it.repeat(1)
), sreader)]
sreader.align_after(res[-1][0])
with self.assertRaises(StopIteration):
r = next(sreader)
sreader.align_after(0)
r = next(sreader)
self.assertEqual(r, res[0][1])
def offset_iterator():
s = -1
for o, r in res:
sreader.align_after(o)
t = f.tell()
if t == s:
continue
s = t
x = next(sreader)
yield (t, x)
i = 0
for xo, x in offset_iterator():
sreader.align_after(xo)
for o, r in res[i:]:
if o >= xo:
self.assertEqual(x, r)
break
i += 1
def test_avro_reader(self):
N = 500
fn = self.write_avro_file(avro_user_record, N, 1024)
url = hdfs.path.abspath(fn, local=True)
class FunkyCtx(object):
def __init__(self, isplit):
self.input_split = isplit
def get_areader(offset, length):
isplit = InputSplit(InputSplit.to_string(url, offset, length))
ctx = FunkyCtx(isplit)
return AvroReader(ctx)
areader = get_areader(0, 14)
file_length = areader.reader.file_length
with self.assertRaises(StopIteration):
next(areader)
areader = get_areader(0, file_length)
with SeekableDataFileReader(open(fn, 'rb'), DatumReader()) as sreader:
for (o, a), s in czip(areader, sreader):
self.assertEqual(a, s)
mid_len = int(file_length / 2)
lows = [x for x in get_areader(0, mid_len)]
highs = [x for x in get_areader(mid_len, file_length)]
self.assertEqual(N, len(lows) + len(highs))
def test_avro_writer(self):
class FunkyCtx(object):
def __init__(self_, job_conf):
self_.job_conf = job_conf
class AWriter(AvroWriter):
schema = self.schema
def emit(self_, key, value):
self_.writer.append(key)
ctx = FunkyCtx({
'mapreduce.task.partition': 1,
'mapreduce.task.output.dir': hdfs.path.abspath(self.wd, local=True)
})
awriter = AWriter(ctx)
N = 10
for i in range(N):
awriter.emit(avro_user_record(i), '')
awriter.close()
def suite():
suite_ = unittest.TestSuite()
suite_.addTest(TestAvroIO('test_seekable'))
suite_.addTest(TestAvroIO('test_avro_reader'))
suite_.addTest(TestAvroIO('test_avro_writer'))
return suite_
if __name__ == '__main__':
_RUNNER = unittest.TextTestRunner(verbosity=2)
_RUNNER.run((suite()))
| apache-2.0 |
mou4e/zirconium | third_party/mojo/src/mojo/public/third_party/jinja2/compiler.py | 623 | 61785 | # -*- coding: utf-8 -*-
"""
jinja2.compiler
~~~~~~~~~~~~~~~
Compiles nodes into python code.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from itertools import chain
from copy import deepcopy
from keyword import iskeyword as is_python_keyword
from jinja2 import nodes
from jinja2.nodes import EvalContext
from jinja2.visitor import NodeVisitor
from jinja2.exceptions import TemplateAssertionError
from jinja2.utils import Markup, concat, escape
from jinja2._compat import range_type, next, text_type, string_types, \
iteritems, NativeStringIO, imap
operators = {
'eq': '==',
'ne': '!=',
'gt': '>',
'gteq': '>=',
'lt': '<',
'lteq': '<=',
'in': 'in',
'notin': 'not in'
}
# what method to iterate over items do we want to use for dict iteration
# in generated code? on 2.x let's go with iteritems, on 3.x with items
if hasattr(dict, 'iteritems'):
dict_item_iter = 'iteritems'
else:
dict_item_iter = 'items'
# does if 0: dummy(x) get us x into the scope?
def unoptimize_before_dead_code():
x = 42
def f():
if 0: dummy(x)
return f
# The getattr is necessary for pypy which does not set this attribute if
# no closure is on the function
unoptimize_before_dead_code = bool(
getattr(unoptimize_before_dead_code(), '__closure__', None))
def generate(node, environment, name, filename, stream=None,
defer_init=False):
"""Generate the python source for a node tree."""
if not isinstance(node, nodes.Template):
raise TypeError('Can\'t compile non template nodes')
generator = CodeGenerator(environment, name, filename, stream, defer_init)
generator.visit(node)
if stream is None:
return generator.stream.getvalue()
def has_safe_repr(value):
"""Does the node have a safe representation?"""
if value is None or value is NotImplemented or value is Ellipsis:
return True
if isinstance(value, (bool, int, float, complex, range_type,
Markup) + string_types):
return True
if isinstance(value, (tuple, list, set, frozenset)):
for item in value:
if not has_safe_repr(item):
return False
return True
elif isinstance(value, dict):
for key, value in iteritems(value):
if not has_safe_repr(key):
return False
if not has_safe_repr(value):
return False
return True
return False
def find_undeclared(nodes, names):
"""Check if the names passed are accessed undeclared. The return value
is a set of all the undeclared names from the sequence of names found.
"""
visitor = UndeclaredNameVisitor(names)
try:
for node in nodes:
visitor.visit(node)
except VisitorExit:
pass
return visitor.undeclared
class Identifiers(object):
"""Tracks the status of identifiers in frames."""
def __init__(self):
# variables that are known to be declared (probably from outer
# frames or because they are special for the frame)
self.declared = set()
# undeclared variables from outer scopes
self.outer_undeclared = set()
# names that are accessed without being explicitly declared by
# this one or any of the outer scopes. Names can appear both in
# declared and undeclared.
self.undeclared = set()
# names that are declared locally
self.declared_locally = set()
# names that are declared by parameters
self.declared_parameter = set()
def add_special(self, name):
"""Register a special name like `loop`."""
self.undeclared.discard(name)
self.declared.add(name)
def is_declared(self, name):
"""Check if a name is declared in this or an outer scope."""
if name in self.declared_locally or name in self.declared_parameter:
return True
return name in self.declared
def copy(self):
return deepcopy(self)
class Frame(object):
"""Holds compile time information for us."""
def __init__(self, eval_ctx, parent=None):
self.eval_ctx = eval_ctx
self.identifiers = Identifiers()
# a toplevel frame is the root + soft frames such as if conditions.
self.toplevel = False
# the root frame is basically just the outermost frame, so no if
# conditions. This information is used to optimize inheritance
# situations.
self.rootlevel = False
# in some dynamic inheritance situations the compiler needs to add
# write tests around output statements.
self.require_output_check = parent and parent.require_output_check
# inside some tags we are using a buffer rather than yield statements.
# this for example affects {% filter %} or {% macro %}. If a frame
# is buffered this variable points to the name of the list used as
# buffer.
self.buffer = None
# the name of the block we're in, otherwise None.
self.block = parent and parent.block or None
# a set of actually assigned names
self.assigned_names = set()
# the parent of this frame
self.parent = parent
if parent is not None:
self.identifiers.declared.update(
parent.identifiers.declared |
parent.identifiers.declared_parameter |
parent.assigned_names
)
self.identifiers.outer_undeclared.update(
parent.identifiers.undeclared -
self.identifiers.declared
)
self.buffer = parent.buffer
def copy(self):
"""Create a copy of the current one."""
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.identifiers = object.__new__(self.identifiers.__class__)
rv.identifiers.__dict__.update(self.identifiers.__dict__)
return rv
def inspect(self, nodes):
"""Walk the node and check for identifiers. If the scope is hard (eg:
enforce on a python level) overrides from outer scopes are tracked
differently.
"""
visitor = FrameIdentifierVisitor(self.identifiers)
for node in nodes:
visitor.visit(node)
def find_shadowed(self, extra=()):
"""Find all the shadowed names. extra is an iterable of variables
that may be defined with `add_special` which may occour scoped.
"""
i = self.identifiers
return (i.declared | i.outer_undeclared) & \
(i.declared_locally | i.declared_parameter) | \
set(x for x in extra if i.is_declared(x))
def inner(self):
"""Return an inner frame."""
return Frame(self.eval_ctx, self)
def soft(self):
"""Return a soft frame. A soft frame may not be modified as
standalone thing as it shares the resources with the frame it
was created of, but it's not a rootlevel frame any longer.
"""
rv = self.copy()
rv.rootlevel = False
return rv
__copy__ = copy
class VisitorExit(RuntimeError):
"""Exception used by the `UndeclaredNameVisitor` to signal a stop."""
class DependencyFinderVisitor(NodeVisitor):
"""A visitor that collects filter and test calls."""
def __init__(self):
self.filters = set()
self.tests = set()
def visit_Filter(self, node):
self.generic_visit(node)
self.filters.add(node.name)
def visit_Test(self, node):
self.generic_visit(node)
self.tests.add(node.name)
def visit_Block(self, node):
"""Stop visiting at blocks."""
class UndeclaredNameVisitor(NodeVisitor):
"""A visitor that checks if a name is accessed without being
declared. This is different from the frame visitor as it will
not stop at closure frames.
"""
def __init__(self, names):
self.names = set(names)
self.undeclared = set()
def visit_Name(self, node):
if node.ctx == 'load' and node.name in self.names:
self.undeclared.add(node.name)
if self.undeclared == self.names:
raise VisitorExit()
else:
self.names.discard(node.name)
def visit_Block(self, node):
"""Stop visiting a blocks."""
class FrameIdentifierVisitor(NodeVisitor):
"""A visitor for `Frame.inspect`."""
def __init__(self, identifiers):
self.identifiers = identifiers
def visit_Name(self, node):
"""All assignments to names go through this function."""
if node.ctx == 'store':
self.identifiers.declared_locally.add(node.name)
elif node.ctx == 'param':
self.identifiers.declared_parameter.add(node.name)
elif node.ctx == 'load' and not \
self.identifiers.is_declared(node.name):
self.identifiers.undeclared.add(node.name)
def visit_If(self, node):
self.visit(node.test)
real_identifiers = self.identifiers
old_names = real_identifiers.declared_locally | \
real_identifiers.declared_parameter
def inner_visit(nodes):
if not nodes:
return set()
self.identifiers = real_identifiers.copy()
for subnode in nodes:
self.visit(subnode)
rv = self.identifiers.declared_locally - old_names
# we have to remember the undeclared variables of this branch
# because we will have to pull them.
real_identifiers.undeclared.update(self.identifiers.undeclared)
self.identifiers = real_identifiers
return rv
body = inner_visit(node.body)
else_ = inner_visit(node.else_ or ())
# the differences between the two branches are also pulled as
# undeclared variables
real_identifiers.undeclared.update(body.symmetric_difference(else_) -
real_identifiers.declared)
# remember those that are declared.
real_identifiers.declared_locally.update(body | else_)
def visit_Macro(self, node):
self.identifiers.declared_locally.add(node.name)
def visit_Import(self, node):
self.generic_visit(node)
self.identifiers.declared_locally.add(node.target)
def visit_FromImport(self, node):
self.generic_visit(node)
for name in node.names:
if isinstance(name, tuple):
self.identifiers.declared_locally.add(name[1])
else:
self.identifiers.declared_locally.add(name)
def visit_Assign(self, node):
"""Visit assignments in the correct order."""
self.visit(node.node)
self.visit(node.target)
def visit_For(self, node):
"""Visiting stops at for blocks. However the block sequence
is visited as part of the outer scope.
"""
self.visit(node.iter)
def visit_CallBlock(self, node):
self.visit(node.call)
def visit_FilterBlock(self, node):
self.visit(node.filter)
def visit_Scope(self, node):
"""Stop visiting at scopes."""
def visit_Block(self, node):
"""Stop visiting at blocks."""
class CompilerExit(Exception):
"""Raised if the compiler encountered a situation where it just
doesn't make sense to further process the code. Any block that
raises such an exception is not further processed.
"""
class CodeGenerator(NodeVisitor):
def __init__(self, environment, name, filename, stream=None,
defer_init=False):
if stream is None:
stream = NativeStringIO()
self.environment = environment
self.name = name
self.filename = filename
self.stream = stream
self.created_block_context = False
self.defer_init = defer_init
# aliases for imports
self.import_aliases = {}
# a registry for all blocks. Because blocks are moved out
# into the global python scope they are registered here
self.blocks = {}
# the number of extends statements so far
self.extends_so_far = 0
# some templates have a rootlevel extends. In this case we
# can safely assume that we're a child template and do some
# more optimizations.
self.has_known_extends = False
# the current line number
self.code_lineno = 1
# registry of all filters and tests (global, not block local)
self.tests = {}
self.filters = {}
# the debug information
self.debug_info = []
self._write_debug_info = None
# the number of new lines before the next write()
self._new_lines = 0
# the line number of the last written statement
self._last_line = 0
# true if nothing was written so far.
self._first_write = True
# used by the `temporary_identifier` method to get new
# unique, temporary identifier
self._last_identifier = 0
# the current indentation
self._indentation = 0
# -- Various compilation helpers
def fail(self, msg, lineno):
"""Fail with a :exc:`TemplateAssertionError`."""
raise TemplateAssertionError(msg, lineno, self.name, self.filename)
def temporary_identifier(self):
"""Get a new unique identifier."""
self._last_identifier += 1
return 't_%d' % self._last_identifier
def buffer(self, frame):
"""Enable buffering for the frame from that point onwards."""
frame.buffer = self.temporary_identifier()
self.writeline('%s = []' % frame.buffer)
def return_buffer_contents(self, frame):
"""Return the buffer contents of the frame."""
if frame.eval_ctx.volatile:
self.writeline('if context.eval_ctx.autoescape:')
self.indent()
self.writeline('return Markup(concat(%s))' % frame.buffer)
self.outdent()
self.writeline('else:')
self.indent()
self.writeline('return concat(%s)' % frame.buffer)
self.outdent()
elif frame.eval_ctx.autoescape:
self.writeline('return Markup(concat(%s))' % frame.buffer)
else:
self.writeline('return concat(%s)' % frame.buffer)
def indent(self):
"""Indent by one."""
self._indentation += 1
def outdent(self, step=1):
"""Outdent by step."""
self._indentation -= step
def start_write(self, frame, node=None):
"""Yield or write into the frame buffer."""
if frame.buffer is None:
self.writeline('yield ', node)
else:
self.writeline('%s.append(' % frame.buffer, node)
def end_write(self, frame):
"""End the writing process started by `start_write`."""
if frame.buffer is not None:
self.write(')')
def simple_write(self, s, frame, node=None):
"""Simple shortcut for start_write + write + end_write."""
self.start_write(frame, node)
self.write(s)
self.end_write(frame)
def blockvisit(self, nodes, frame):
"""Visit a list of nodes as block in a frame. If the current frame
is no buffer a dummy ``if 0: yield None`` is written automatically
unless the force_generator parameter is set to False.
"""
if frame.buffer is None:
self.writeline('if 0: yield None')
else:
self.writeline('pass')
try:
for node in nodes:
self.visit(node, frame)
except CompilerExit:
pass
def write(self, x):
"""Write a string into the output stream."""
if self._new_lines:
if not self._first_write:
self.stream.write('\n' * self._new_lines)
self.code_lineno += self._new_lines
if self._write_debug_info is not None:
self.debug_info.append((self._write_debug_info,
self.code_lineno))
self._write_debug_info = None
self._first_write = False
self.stream.write(' ' * self._indentation)
self._new_lines = 0
self.stream.write(x)
def writeline(self, x, node=None, extra=0):
"""Combination of newline and write."""
self.newline(node, extra)
self.write(x)
def newline(self, node=None, extra=0):
"""Add one or more newlines before the next write."""
self._new_lines = max(self._new_lines, 1 + extra)
if node is not None and node.lineno != self._last_line:
self._write_debug_info = node.lineno
self._last_line = node.lineno
def signature(self, node, frame, extra_kwargs=None):
"""Writes a function call to the stream for the current node.
A leading comma is added automatically. The extra keyword
arguments may not include python keywords otherwise a syntax
error could occour. The extra keyword arguments should be given
as python dict.
"""
# if any of the given keyword arguments is a python keyword
# we have to make sure that no invalid call is created.
kwarg_workaround = False
for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()):
if is_python_keyword(kwarg):
kwarg_workaround = True
break
for arg in node.args:
self.write(', ')
self.visit(arg, frame)
if not kwarg_workaround:
for kwarg in node.kwargs:
self.write(', ')
self.visit(kwarg, frame)
if extra_kwargs is not None:
for key, value in iteritems(extra_kwargs):
self.write(', %s=%s' % (key, value))
if node.dyn_args:
self.write(', *')
self.visit(node.dyn_args, frame)
if kwarg_workaround:
if node.dyn_kwargs is not None:
self.write(', **dict({')
else:
self.write(', **{')
for kwarg in node.kwargs:
self.write('%r: ' % kwarg.key)
self.visit(kwarg.value, frame)
self.write(', ')
if extra_kwargs is not None:
for key, value in iteritems(extra_kwargs):
self.write('%r: %s, ' % (key, value))
if node.dyn_kwargs is not None:
self.write('}, **')
self.visit(node.dyn_kwargs, frame)
self.write(')')
else:
self.write('}')
elif node.dyn_kwargs is not None:
self.write(', **')
self.visit(node.dyn_kwargs, frame)
def pull_locals(self, frame):
"""Pull all the references identifiers into the local scope."""
for name in frame.identifiers.undeclared:
self.writeline('l_%s = context.resolve(%r)' % (name, name))
def pull_dependencies(self, nodes):
"""Pull all the dependencies."""
visitor = DependencyFinderVisitor()
for node in nodes:
visitor.visit(node)
for dependency in 'filters', 'tests':
mapping = getattr(self, dependency)
for name in getattr(visitor, dependency):
if name not in mapping:
mapping[name] = self.temporary_identifier()
self.writeline('%s = environment.%s[%r]' %
(mapping[name], dependency, name))
def unoptimize_scope(self, frame):
"""Disable Python optimizations for the frame."""
# XXX: this is not that nice but it has no real overhead. It
# mainly works because python finds the locals before dead code
# is removed. If that breaks we have to add a dummy function
# that just accepts the arguments and does nothing.
if frame.identifiers.declared:
self.writeline('%sdummy(%s)' % (
unoptimize_before_dead_code and 'if 0: ' or '',
', '.join('l_' + name for name in frame.identifiers.declared)
))
def push_scope(self, frame, extra_vars=()):
"""This function returns all the shadowed variables in a dict
in the form name: alias and will write the required assignments
into the current scope. No indentation takes place.
This also predefines locally declared variables from the loop
body because under some circumstances it may be the case that
`extra_vars` is passed to `Frame.find_shadowed`.
"""
aliases = {}
for name in frame.find_shadowed(extra_vars):
aliases[name] = ident = self.temporary_identifier()
self.writeline('%s = l_%s' % (ident, name))
to_declare = set()
for name in frame.identifiers.declared_locally:
if name not in aliases:
to_declare.add('l_' + name)
if to_declare:
self.writeline(' = '.join(to_declare) + ' = missing')
return aliases
def pop_scope(self, aliases, frame):
"""Restore all aliases and delete unused variables."""
for name, alias in iteritems(aliases):
self.writeline('l_%s = %s' % (name, alias))
to_delete = set()
for name in frame.identifiers.declared_locally:
if name not in aliases:
to_delete.add('l_' + name)
if to_delete:
# we cannot use the del statement here because enclosed
# scopes can trigger a SyntaxError:
# a = 42; b = lambda: a; del a
self.writeline(' = '.join(to_delete) + ' = missing')
def function_scoping(self, node, frame, children=None,
find_special=True):
"""In Jinja a few statements require the help of anonymous
functions. Those are currently macros and call blocks and in
the future also recursive loops. As there is currently
technical limitation that doesn't allow reading and writing a
variable in a scope where the initial value is coming from an
outer scope, this function tries to fall back with a common
error message. Additionally the frame passed is modified so
that the argumetns are collected and callers are looked up.
This will return the modified frame.
"""
# we have to iterate twice over it, make sure that works
if children is None:
children = node.iter_child_nodes()
children = list(children)
func_frame = frame.inner()
func_frame.inspect(children)
# variables that are undeclared (accessed before declaration) and
# declared locally *and* part of an outside scope raise a template
# assertion error. Reason: we can't generate reasonable code from
# it without aliasing all the variables.
# this could be fixed in Python 3 where we have the nonlocal
# keyword or if we switch to bytecode generation
overridden_closure_vars = (
func_frame.identifiers.undeclared &
func_frame.identifiers.declared &
(func_frame.identifiers.declared_locally |
func_frame.identifiers.declared_parameter)
)
if overridden_closure_vars:
self.fail('It\'s not possible to set and access variables '
'derived from an outer scope! (affects: %s)' %
', '.join(sorted(overridden_closure_vars)), node.lineno)
# remove variables from a closure from the frame's undeclared
# identifiers.
func_frame.identifiers.undeclared -= (
func_frame.identifiers.undeclared &
func_frame.identifiers.declared
)
# no special variables for this scope, abort early
if not find_special:
return func_frame
func_frame.accesses_kwargs = False
func_frame.accesses_varargs = False
func_frame.accesses_caller = False
func_frame.arguments = args = ['l_' + x.name for x in node.args]
undeclared = find_undeclared(children, ('caller', 'kwargs', 'varargs'))
if 'caller' in undeclared:
func_frame.accesses_caller = True
func_frame.identifiers.add_special('caller')
args.append('l_caller')
if 'kwargs' in undeclared:
func_frame.accesses_kwargs = True
func_frame.identifiers.add_special('kwargs')
args.append('l_kwargs')
if 'varargs' in undeclared:
func_frame.accesses_varargs = True
func_frame.identifiers.add_special('varargs')
args.append('l_varargs')
return func_frame
def macro_body(self, node, frame, children=None):
"""Dump the function def of a macro or call block."""
frame = self.function_scoping(node, frame, children)
# macros are delayed, they never require output checks
frame.require_output_check = False
args = frame.arguments
# XXX: this is an ugly fix for the loop nesting bug
# (tests.test_old_bugs.test_loop_call_bug). This works around
# a identifier nesting problem we have in general. It's just more
# likely to happen in loops which is why we work around it. The
# real solution would be "nonlocal" all the identifiers that are
# leaking into a new python frame and might be used both unassigned
# and assigned.
if 'loop' in frame.identifiers.declared:
args = args + ['l_loop=l_loop']
self.writeline('def macro(%s):' % ', '.join(args), node)
self.indent()
self.buffer(frame)
self.pull_locals(frame)
self.blockvisit(node.body, frame)
self.return_buffer_contents(frame)
self.outdent()
return frame
def macro_def(self, node, frame):
"""Dump the macro definition for the def created by macro_body."""
arg_tuple = ', '.join(repr(x.name) for x in node.args)
name = getattr(node, 'name', None)
if len(node.args) == 1:
arg_tuple += ','
self.write('Macro(environment, macro, %r, (%s), (' %
(name, arg_tuple))
for arg in node.defaults:
self.visit(arg, frame)
self.write(', ')
self.write('), %r, %r, %r)' % (
bool(frame.accesses_kwargs),
bool(frame.accesses_varargs),
bool(frame.accesses_caller)
))
def position(self, node):
"""Return a human readable position for the node."""
rv = 'line %d' % node.lineno
if self.name is not None:
rv += ' in ' + repr(self.name)
return rv
# -- Statement Visitors
def visit_Template(self, node, frame=None):
assert frame is None, 'no root frame allowed'
eval_ctx = EvalContext(self.environment, self.name)
from jinja2.runtime import __all__ as exported
self.writeline('from __future__ import division')
self.writeline('from jinja2.runtime import ' + ', '.join(exported))
if not unoptimize_before_dead_code:
self.writeline('dummy = lambda *x: None')
# if we want a deferred initialization we cannot move the
# environment into a local name
envenv = not self.defer_init and ', environment=environment' or ''
# do we have an extends tag at all? If not, we can save some
# overhead by just not processing any inheritance code.
have_extends = node.find(nodes.Extends) is not None
# find all blocks
for block in node.find_all(nodes.Block):
if block.name in self.blocks:
self.fail('block %r defined twice' % block.name, block.lineno)
self.blocks[block.name] = block
# find all imports and import them
for import_ in node.find_all(nodes.ImportedName):
if import_.importname not in self.import_aliases:
imp = import_.importname
self.import_aliases[imp] = alias = self.temporary_identifier()
if '.' in imp:
module, obj = imp.rsplit('.', 1)
self.writeline('from %s import %s as %s' %
(module, obj, alias))
else:
self.writeline('import %s as %s' % (imp, alias))
# add the load name
self.writeline('name = %r' % self.name)
# generate the root render function.
self.writeline('def root(context%s):' % envenv, extra=1)
# process the root
frame = Frame(eval_ctx)
frame.inspect(node.body)
frame.toplevel = frame.rootlevel = True
frame.require_output_check = have_extends and not self.has_known_extends
self.indent()
if have_extends:
self.writeline('parent_template = None')
if 'self' in find_undeclared(node.body, ('self',)):
frame.identifiers.add_special('self')
self.writeline('l_self = TemplateReference(context)')
self.pull_locals(frame)
self.pull_dependencies(node.body)
self.blockvisit(node.body, frame)
self.outdent()
# make sure that the parent root is called.
if have_extends:
if not self.has_known_extends:
self.indent()
self.writeline('if parent_template is not None:')
self.indent()
self.writeline('for event in parent_template.'
'root_render_func(context):')
self.indent()
self.writeline('yield event')
self.outdent(2 + (not self.has_known_extends))
# at this point we now have the blocks collected and can visit them too.
for name, block in iteritems(self.blocks):
block_frame = Frame(eval_ctx)
block_frame.inspect(block.body)
block_frame.block = name
self.writeline('def block_%s(context%s):' % (name, envenv),
block, 1)
self.indent()
undeclared = find_undeclared(block.body, ('self', 'super'))
if 'self' in undeclared:
block_frame.identifiers.add_special('self')
self.writeline('l_self = TemplateReference(context)')
if 'super' in undeclared:
block_frame.identifiers.add_special('super')
self.writeline('l_super = context.super(%r, '
'block_%s)' % (name, name))
self.pull_locals(block_frame)
self.pull_dependencies(block.body)
self.blockvisit(block.body, block_frame)
self.outdent()
self.writeline('blocks = {%s}' % ', '.join('%r: block_%s' % (x, x)
for x in self.blocks),
extra=1)
# add a function that returns the debug info
self.writeline('debug_info = %r' % '&'.join('%s=%s' % x for x
in self.debug_info))
def visit_Block(self, node, frame):
"""Call a block and register it for the template."""
level = 1
if frame.toplevel:
# if we know that we are a child template, there is no need to
# check if we are one
if self.has_known_extends:
return
if self.extends_so_far > 0:
self.writeline('if parent_template is None:')
self.indent()
level += 1
context = node.scoped and 'context.derived(locals())' or 'context'
self.writeline('for event in context.blocks[%r][0](%s):' % (
node.name, context), node)
self.indent()
self.simple_write('event', frame)
self.outdent(level)
def visit_Extends(self, node, frame):
"""Calls the extender."""
if not frame.toplevel:
self.fail('cannot use extend from a non top-level scope',
node.lineno)
# if the number of extends statements in general is zero so
# far, we don't have to add a check if something extended
# the template before this one.
if self.extends_so_far > 0:
# if we have a known extends we just add a template runtime
# error into the generated code. We could catch that at compile
# time too, but i welcome it not to confuse users by throwing the
# same error at different times just "because we can".
if not self.has_known_extends:
self.writeline('if parent_template is not None:')
self.indent()
self.writeline('raise TemplateRuntimeError(%r)' %
'extended multiple times')
# if we have a known extends already we don't need that code here
# as we know that the template execution will end here.
if self.has_known_extends:
raise CompilerExit()
else:
self.outdent()
self.writeline('parent_template = environment.get_template(', node)
self.visit(node.template, frame)
self.write(', %r)' % self.name)
self.writeline('for name, parent_block in parent_template.'
'blocks.%s():' % dict_item_iter)
self.indent()
self.writeline('context.blocks.setdefault(name, []).'
'append(parent_block)')
self.outdent()
# if this extends statement was in the root level we can take
# advantage of that information and simplify the generated code
# in the top level from this point onwards
if frame.rootlevel:
self.has_known_extends = True
# and now we have one more
self.extends_so_far += 1
def visit_Include(self, node, frame):
"""Handles includes."""
if node.with_context:
self.unoptimize_scope(frame)
if node.ignore_missing:
self.writeline('try:')
self.indent()
func_name = 'get_or_select_template'
if isinstance(node.template, nodes.Const):
if isinstance(node.template.value, string_types):
func_name = 'get_template'
elif isinstance(node.template.value, (tuple, list)):
func_name = 'select_template'
elif isinstance(node.template, (nodes.Tuple, nodes.List)):
func_name = 'select_template'
self.writeline('template = environment.%s(' % func_name, node)
self.visit(node.template, frame)
self.write(', %r)' % self.name)
if node.ignore_missing:
self.outdent()
self.writeline('except TemplateNotFound:')
self.indent()
self.writeline('pass')
self.outdent()
self.writeline('else:')
self.indent()
if node.with_context:
self.writeline('for event in template.root_render_func('
'template.new_context(context.parent, True, '
'locals())):')
else:
self.writeline('for event in template.module._body_stream:')
self.indent()
self.simple_write('event', frame)
self.outdent()
if node.ignore_missing:
self.outdent()
def visit_Import(self, node, frame):
"""Visit regular imports."""
if node.with_context:
self.unoptimize_scope(frame)
self.writeline('l_%s = ' % node.target, node)
if frame.toplevel:
self.write('context.vars[%r] = ' % node.target)
self.write('environment.get_template(')
self.visit(node.template, frame)
self.write(', %r).' % self.name)
if node.with_context:
self.write('make_module(context.parent, True, locals())')
else:
self.write('module')
if frame.toplevel and not node.target.startswith('_'):
self.writeline('context.exported_vars.discard(%r)' % node.target)
frame.assigned_names.add(node.target)
def visit_FromImport(self, node, frame):
"""Visit named imports."""
self.newline(node)
self.write('included_template = environment.get_template(')
self.visit(node.template, frame)
self.write(', %r).' % self.name)
if node.with_context:
self.write('make_module(context.parent, True)')
else:
self.write('module')
var_names = []
discarded_names = []
for name in node.names:
if isinstance(name, tuple):
name, alias = name
else:
alias = name
self.writeline('l_%s = getattr(included_template, '
'%r, missing)' % (alias, name))
self.writeline('if l_%s is missing:' % alias)
self.indent()
self.writeline('l_%s = environment.undefined(%r %% '
'included_template.__name__, '
'name=%r)' %
(alias, 'the template %%r (imported on %s) does '
'not export the requested name %s' % (
self.position(node),
repr(name)
), name))
self.outdent()
if frame.toplevel:
var_names.append(alias)
if not alias.startswith('_'):
discarded_names.append(alias)
frame.assigned_names.add(alias)
if var_names:
if len(var_names) == 1:
name = var_names[0]
self.writeline('context.vars[%r] = l_%s' % (name, name))
else:
self.writeline('context.vars.update({%s})' % ', '.join(
'%r: l_%s' % (name, name) for name in var_names
))
if discarded_names:
if len(discarded_names) == 1:
self.writeline('context.exported_vars.discard(%r)' %
discarded_names[0])
else:
self.writeline('context.exported_vars.difference_'
'update((%s))' % ', '.join(imap(repr, discarded_names)))
def visit_For(self, node, frame):
# when calculating the nodes for the inner frame we have to exclude
# the iterator contents from it
children = node.iter_child_nodes(exclude=('iter',))
if node.recursive:
loop_frame = self.function_scoping(node, frame, children,
find_special=False)
else:
loop_frame = frame.inner()
loop_frame.inspect(children)
# try to figure out if we have an extended loop. An extended loop
# is necessary if the loop is in recursive mode if the special loop
# variable is accessed in the body.
extended_loop = node.recursive or 'loop' in \
find_undeclared(node.iter_child_nodes(
only=('body',)), ('loop',))
# if we don't have an recursive loop we have to find the shadowed
# variables at that point. Because loops can be nested but the loop
# variable is a special one we have to enforce aliasing for it.
if not node.recursive:
aliases = self.push_scope(loop_frame, ('loop',))
# otherwise we set up a buffer and add a function def
else:
self.writeline('def loop(reciter, loop_render_func, depth=0):', node)
self.indent()
self.buffer(loop_frame)
aliases = {}
# make sure the loop variable is a special one and raise a template
# assertion error if a loop tries to write to loop
if extended_loop:
self.writeline('l_loop = missing')
loop_frame.identifiers.add_special('loop')
for name in node.find_all(nodes.Name):
if name.ctx == 'store' and name.name == 'loop':
self.fail('Can\'t assign to special loop variable '
'in for-loop target', name.lineno)
self.pull_locals(loop_frame)
if node.else_:
iteration_indicator = self.temporary_identifier()
self.writeline('%s = 1' % iteration_indicator)
# Create a fake parent loop if the else or test section of a
# loop is accessing the special loop variable and no parent loop
# exists.
if 'loop' not in aliases and 'loop' in find_undeclared(
node.iter_child_nodes(only=('else_', 'test')), ('loop',)):
self.writeline("l_loop = environment.undefined(%r, name='loop')" %
("'loop' is undefined. the filter section of a loop as well "
"as the else block don't have access to the special 'loop'"
" variable of the current loop. Because there is no parent "
"loop it's undefined. Happened in loop on %s" %
self.position(node)))
self.writeline('for ', node)
self.visit(node.target, loop_frame)
self.write(extended_loop and ', l_loop in LoopContext(' or ' in ')
# if we have an extened loop and a node test, we filter in the
# "outer frame".
if extended_loop and node.test is not None:
self.write('(')
self.visit(node.target, loop_frame)
self.write(' for ')
self.visit(node.target, loop_frame)
self.write(' in ')
if node.recursive:
self.write('reciter')
else:
self.visit(node.iter, loop_frame)
self.write(' if (')
test_frame = loop_frame.copy()
self.visit(node.test, test_frame)
self.write('))')
elif node.recursive:
self.write('reciter')
else:
self.visit(node.iter, loop_frame)
if node.recursive:
self.write(', loop_render_func, depth):')
else:
self.write(extended_loop and '):' or ':')
# tests in not extended loops become a continue
if not extended_loop and node.test is not None:
self.indent()
self.writeline('if not ')
self.visit(node.test, loop_frame)
self.write(':')
self.indent()
self.writeline('continue')
self.outdent(2)
self.indent()
self.blockvisit(node.body, loop_frame)
if node.else_:
self.writeline('%s = 0' % iteration_indicator)
self.outdent()
if node.else_:
self.writeline('if %s:' % iteration_indicator)
self.indent()
self.blockvisit(node.else_, loop_frame)
self.outdent()
# reset the aliases if there are any.
if not node.recursive:
self.pop_scope(aliases, loop_frame)
# if the node was recursive we have to return the buffer contents
# and start the iteration code
if node.recursive:
self.return_buffer_contents(loop_frame)
self.outdent()
self.start_write(frame, node)
self.write('loop(')
self.visit(node.iter, frame)
self.write(', loop)')
self.end_write(frame)
def visit_If(self, node, frame):
if_frame = frame.soft()
self.writeline('if ', node)
self.visit(node.test, if_frame)
self.write(':')
self.indent()
self.blockvisit(node.body, if_frame)
self.outdent()
if node.else_:
self.writeline('else:')
self.indent()
self.blockvisit(node.else_, if_frame)
self.outdent()
def visit_Macro(self, node, frame):
macro_frame = self.macro_body(node, frame)
self.newline()
if frame.toplevel:
if not node.name.startswith('_'):
self.write('context.exported_vars.add(%r)' % node.name)
self.writeline('context.vars[%r] = ' % node.name)
self.write('l_%s = ' % node.name)
self.macro_def(node, macro_frame)
frame.assigned_names.add(node.name)
def visit_CallBlock(self, node, frame):
children = node.iter_child_nodes(exclude=('call',))
call_frame = self.macro_body(node, frame, children)
self.writeline('caller = ')
self.macro_def(node, call_frame)
self.start_write(frame, node)
self.visit_Call(node.call, call_frame, forward_caller=True)
self.end_write(frame)
def visit_FilterBlock(self, node, frame):
filter_frame = frame.inner()
filter_frame.inspect(node.iter_child_nodes())
aliases = self.push_scope(filter_frame)
self.pull_locals(filter_frame)
self.buffer(filter_frame)
self.blockvisit(node.body, filter_frame)
self.start_write(frame, node)
self.visit_Filter(node.filter, filter_frame)
self.end_write(frame)
self.pop_scope(aliases, filter_frame)
def visit_ExprStmt(self, node, frame):
self.newline(node)
self.visit(node.node, frame)
def visit_Output(self, node, frame):
# if we have a known extends statement, we don't output anything
# if we are in a require_output_check section
if self.has_known_extends and frame.require_output_check:
return
if self.environment.finalize:
finalize = lambda x: text_type(self.environment.finalize(x))
else:
finalize = text_type
# if we are inside a frame that requires output checking, we do so
outdent_later = False
if frame.require_output_check:
self.writeline('if parent_template is None:')
self.indent()
outdent_later = True
# try to evaluate as many chunks as possible into a static
# string at compile time.
body = []
for child in node.nodes:
try:
const = child.as_const(frame.eval_ctx)
except nodes.Impossible:
body.append(child)
continue
# the frame can't be volatile here, becaus otherwise the
# as_const() function would raise an Impossible exception
# at that point.
try:
if frame.eval_ctx.autoescape:
if hasattr(const, '__html__'):
const = const.__html__()
else:
const = escape(const)
const = finalize(const)
except Exception:
# if something goes wrong here we evaluate the node
# at runtime for easier debugging
body.append(child)
continue
if body and isinstance(body[-1], list):
body[-1].append(const)
else:
body.append([const])
# if we have less than 3 nodes or a buffer we yield or extend/append
if len(body) < 3 or frame.buffer is not None:
if frame.buffer is not None:
# for one item we append, for more we extend
if len(body) == 1:
self.writeline('%s.append(' % frame.buffer)
else:
self.writeline('%s.extend((' % frame.buffer)
self.indent()
for item in body:
if isinstance(item, list):
val = repr(concat(item))
if frame.buffer is None:
self.writeline('yield ' + val)
else:
self.writeline(val + ', ')
else:
if frame.buffer is None:
self.writeline('yield ', item)
else:
self.newline(item)
close = 1
if frame.eval_ctx.volatile:
self.write('(context.eval_ctx.autoescape and'
' escape or to_string)(')
elif frame.eval_ctx.autoescape:
self.write('escape(')
else:
self.write('to_string(')
if self.environment.finalize is not None:
self.write('environment.finalize(')
close += 1
self.visit(item, frame)
self.write(')' * close)
if frame.buffer is not None:
self.write(', ')
if frame.buffer is not None:
# close the open parentheses
self.outdent()
self.writeline(len(body) == 1 and ')' or '))')
# otherwise we create a format string as this is faster in that case
else:
format = []
arguments = []
for item in body:
if isinstance(item, list):
format.append(concat(item).replace('%', '%%'))
else:
format.append('%s')
arguments.append(item)
self.writeline('yield ')
self.write(repr(concat(format)) + ' % (')
idx = -1
self.indent()
for argument in arguments:
self.newline(argument)
close = 0
if frame.eval_ctx.volatile:
self.write('(context.eval_ctx.autoescape and'
' escape or to_string)(')
close += 1
elif frame.eval_ctx.autoescape:
self.write('escape(')
close += 1
if self.environment.finalize is not None:
self.write('environment.finalize(')
close += 1
self.visit(argument, frame)
self.write(')' * close + ', ')
self.outdent()
self.writeline(')')
if outdent_later:
self.outdent()
def visit_Assign(self, node, frame):
self.newline(node)
# toplevel assignments however go into the local namespace and
# the current template's context. We create a copy of the frame
# here and add a set so that the Name visitor can add the assigned
# names here.
if frame.toplevel:
assignment_frame = frame.copy()
assignment_frame.toplevel_assignments = set()
else:
assignment_frame = frame
self.visit(node.target, assignment_frame)
self.write(' = ')
self.visit(node.node, frame)
# make sure toplevel assignments are added to the context.
if frame.toplevel:
public_names = [x for x in assignment_frame.toplevel_assignments
if not x.startswith('_')]
if len(assignment_frame.toplevel_assignments) == 1:
name = next(iter(assignment_frame.toplevel_assignments))
self.writeline('context.vars[%r] = l_%s' % (name, name))
else:
self.writeline('context.vars.update({')
for idx, name in enumerate(assignment_frame.toplevel_assignments):
if idx:
self.write(', ')
self.write('%r: l_%s' % (name, name))
self.write('})')
if public_names:
if len(public_names) == 1:
self.writeline('context.exported_vars.add(%r)' %
public_names[0])
else:
self.writeline('context.exported_vars.update((%s))' %
', '.join(imap(repr, public_names)))
# -- Expression Visitors
def visit_Name(self, node, frame):
if node.ctx == 'store' and frame.toplevel:
frame.toplevel_assignments.add(node.name)
self.write('l_' + node.name)
frame.assigned_names.add(node.name)
def visit_Const(self, node, frame):
val = node.value
if isinstance(val, float):
self.write(str(val))
else:
self.write(repr(val))
def visit_TemplateData(self, node, frame):
try:
self.write(repr(node.as_const(frame.eval_ctx)))
except nodes.Impossible:
self.write('(context.eval_ctx.autoescape and Markup or identity)(%r)'
% node.data)
def visit_Tuple(self, node, frame):
self.write('(')
idx = -1
for idx, item in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item, frame)
self.write(idx == 0 and ',)' or ')')
def visit_List(self, node, frame):
self.write('[')
for idx, item in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item, frame)
self.write(']')
def visit_Dict(self, node, frame):
self.write('{')
for idx, item in enumerate(node.items):
if idx:
self.write(', ')
self.visit(item.key, frame)
self.write(': ')
self.visit(item.value, frame)
self.write('}')
def binop(operator, interceptable=True):
def visitor(self, node, frame):
if self.environment.sandboxed and \
operator in self.environment.intercepted_binops:
self.write('environment.call_binop(context, %r, ' % operator)
self.visit(node.left, frame)
self.write(', ')
self.visit(node.right, frame)
else:
self.write('(')
self.visit(node.left, frame)
self.write(' %s ' % operator)
self.visit(node.right, frame)
self.write(')')
return visitor
def uaop(operator, interceptable=True):
def visitor(self, node, frame):
if self.environment.sandboxed and \
operator in self.environment.intercepted_unops:
self.write('environment.call_unop(context, %r, ' % operator)
self.visit(node.node, frame)
else:
self.write('(' + operator)
self.visit(node.node, frame)
self.write(')')
return visitor
visit_Add = binop('+')
visit_Sub = binop('-')
visit_Mul = binop('*')
visit_Div = binop('/')
visit_FloorDiv = binop('//')
visit_Pow = binop('**')
visit_Mod = binop('%')
visit_And = binop('and', interceptable=False)
visit_Or = binop('or', interceptable=False)
visit_Pos = uaop('+')
visit_Neg = uaop('-')
visit_Not = uaop('not ', interceptable=False)
del binop, uaop
def visit_Concat(self, node, frame):
if frame.eval_ctx.volatile:
func_name = '(context.eval_ctx.volatile and' \
' markup_join or unicode_join)'
elif frame.eval_ctx.autoescape:
func_name = 'markup_join'
else:
func_name = 'unicode_join'
self.write('%s((' % func_name)
for arg in node.nodes:
self.visit(arg, frame)
self.write(', ')
self.write('))')
def visit_Compare(self, node, frame):
self.visit(node.expr, frame)
for op in node.ops:
self.visit(op, frame)
def visit_Operand(self, node, frame):
self.write(' %s ' % operators[node.op])
self.visit(node.expr, frame)
def visit_Getattr(self, node, frame):
self.write('environment.getattr(')
self.visit(node.node, frame)
self.write(', %r)' % node.attr)
def visit_Getitem(self, node, frame):
# slices bypass the environment getitem method.
if isinstance(node.arg, nodes.Slice):
self.visit(node.node, frame)
self.write('[')
self.visit(node.arg, frame)
self.write(']')
else:
self.write('environment.getitem(')
self.visit(node.node, frame)
self.write(', ')
self.visit(node.arg, frame)
self.write(')')
def visit_Slice(self, node, frame):
if node.start is not None:
self.visit(node.start, frame)
self.write(':')
if node.stop is not None:
self.visit(node.stop, frame)
if node.step is not None:
self.write(':')
self.visit(node.step, frame)
def visit_Filter(self, node, frame):
self.write(self.filters[node.name] + '(')
func = self.environment.filters.get(node.name)
if func is None:
self.fail('no filter named %r' % node.name, node.lineno)
if getattr(func, 'contextfilter', False):
self.write('context, ')
elif getattr(func, 'evalcontextfilter', False):
self.write('context.eval_ctx, ')
elif getattr(func, 'environmentfilter', False):
self.write('environment, ')
# if the filter node is None we are inside a filter block
# and want to write to the current buffer
if node.node is not None:
self.visit(node.node, frame)
elif frame.eval_ctx.volatile:
self.write('(context.eval_ctx.autoescape and'
' Markup(concat(%s)) or concat(%s))' %
(frame.buffer, frame.buffer))
elif frame.eval_ctx.autoescape:
self.write('Markup(concat(%s))' % frame.buffer)
else:
self.write('concat(%s)' % frame.buffer)
self.signature(node, frame)
self.write(')')
def visit_Test(self, node, frame):
self.write(self.tests[node.name] + '(')
if node.name not in self.environment.tests:
self.fail('no test named %r' % node.name, node.lineno)
self.visit(node.node, frame)
self.signature(node, frame)
self.write(')')
def visit_CondExpr(self, node, frame):
def write_expr2():
if node.expr2 is not None:
return self.visit(node.expr2, frame)
self.write('environment.undefined(%r)' % ('the inline if-'
'expression on %s evaluated to false and '
'no else section was defined.' % self.position(node)))
self.write('(')
self.visit(node.expr1, frame)
self.write(' if ')
self.visit(node.test, frame)
self.write(' else ')
write_expr2()
self.write(')')
def visit_Call(self, node, frame, forward_caller=False):
if self.environment.sandboxed:
self.write('environment.call(context, ')
else:
self.write('context.call(')
self.visit(node.node, frame)
extra_kwargs = forward_caller and {'caller': 'caller'} or None
self.signature(node, frame, extra_kwargs)
self.write(')')
def visit_Keyword(self, node, frame):
self.write(node.key + '=')
self.visit(node.value, frame)
# -- Unused nodes for extensions
def visit_MarkSafe(self, node, frame):
self.write('Markup(')
self.visit(node.expr, frame)
self.write(')')
def visit_MarkSafeIfAutoescape(self, node, frame):
self.write('(context.eval_ctx.autoescape and Markup or identity)(')
self.visit(node.expr, frame)
self.write(')')
def visit_EnvironmentAttribute(self, node, frame):
self.write('environment.' + node.name)
def visit_ExtensionAttribute(self, node, frame):
self.write('environment.extensions[%r].%s' % (node.identifier, node.name))
def visit_ImportedName(self, node, frame):
self.write(self.import_aliases[node.importname])
def visit_InternalName(self, node, frame):
self.write(node.name)
def visit_ContextReference(self, node, frame):
self.write('context')
def visit_Continue(self, node, frame):
self.writeline('continue', node)
def visit_Break(self, node, frame):
self.writeline('break', node)
def visit_Scope(self, node, frame):
scope_frame = frame.inner()
scope_frame.inspect(node.iter_child_nodes())
aliases = self.push_scope(scope_frame)
self.pull_locals(scope_frame)
self.blockvisit(node.body, scope_frame)
self.pop_scope(aliases, scope_frame)
def visit_EvalContextModifier(self, node, frame):
for keyword in node.options:
self.writeline('context.eval_ctx.%s = ' % keyword.key)
self.visit(keyword.value, frame)
try:
val = keyword.value.as_const(frame.eval_ctx)
except nodes.Impossible:
frame.eval_ctx.volatile = True
else:
setattr(frame.eval_ctx, keyword.key, val)
def visit_ScopedEvalContextModifier(self, node, frame):
old_ctx_name = self.temporary_identifier()
safed_ctx = frame.eval_ctx.save()
self.writeline('%s = context.eval_ctx.save()' % old_ctx_name)
self.visit_EvalContextModifier(node, frame)
for child in node.body:
self.visit(child, frame)
frame.eval_ctx.revert(safed_ctx)
self.writeline('context.eval_ctx.revert(%s)' % old_ctx_name)
| bsd-3-clause |
SU-ECE-17-7/hotspotter | hsviz/draw_func2.py | 1 | 54605 | ''' Lots of functions for drawing and plotting visiony things '''
# TODO: New naming scheme
# viz_<func_name> will clear everything. The current axes and fig: clf, cla. # Will add annotations
# interact_<func_name> will clear everything and start user interactions.
# show_<func_name> will always clear the current axes, but not fig: cla # Might # add annotates?
# plot_<func_name> will not clear the axes or figure. More useful for graphs
# draw_<func_name> same as plot for now. More useful for images
from __future__ import division, print_function
from hscom import __common__
(print, print_, print_on, print_off, rrr, profile,
printDBG) = __common__.init(__name__, '[df2]', DEBUG=False, initmpl=True)
# Python
from itertools import izip
from os.path import splitext, split, join, normpath, exists
import colorsys
import itertools
import pylab
import sys
import textwrap
import time
import warnings
# Matplotlib / Qt
import matplotlib
import matplotlib as mpl # NOQA
from matplotlib.collections import PatchCollection, LineCollection
from matplotlib.font_manager import FontProperties
from matplotlib.patches import Rectangle, Circle, FancyArrow
from matplotlib.transforms import Affine2D
from matplotlib.backends import backend_qt4
import matplotlib.pyplot as plt
# Qt
from PyQt4 import QtCore, QtGui
from PyQt4.QtCore import Qt
# Scientific
import numpy as np
import scipy.stats
import cv2
# HotSpotter
from hscom import helpers
from hscom import tools
from hscom.Printable import DynStruct
#================
# GLOBALS
#================
TMP_mevent = None
QT4_WINS = []
plotWidget = None
# GENERAL FONTS
SMALLER = 8
SMALL = 10
MED = 12
LARGE = 14
#fpargs = dict(family=None, style=None, variant=None, stretch=None, fname=None)
FONTS = DynStruct()
FONTS.small = FontProperties(weight='light', size=SMALL)
FONTS.smaller = FontProperties(weight='light', size=SMALLER)
FONTS.med = FontProperties(weight='light', size=MED)
FONTS.large = FontProperties(weight='light', size=LARGE)
FONTS.medbold = FontProperties(weight='bold', size=MED)
FONTS.largebold = FontProperties(weight='bold', size=LARGE)
# SPECIFIC FONTS
FONTS.legend = FONTS.small
FONTS.figtitle = FONTS.med
FONTS.axtitle = FONTS.med
FONTS.subtitle = FONTS.med
FONTS.xlabel = FONTS.smaller
FONTS.ylabel = FONTS.small
FONTS.relative = FONTS.smaller
# COLORS
ORANGE = np.array((255, 127, 0, 255)) / 255.0
RED = np.array((255, 0, 0, 255)) / 255.0
GREEN = np.array(( 0, 255, 0, 255)) / 255.0
BLUE = np.array(( 0, 0, 255, 255)) / 255.0
YELLOW = np.array((255, 255, 0, 255)) / 255.0
BLACK = np.array(( 0, 0, 0, 255)) / 255.0
WHITE = np.array((255, 255, 255, 255)) / 255.0
GRAY = np.array((127, 127, 127, 255)) / 255.0
DEEP_PINK = np.array((255, 20, 147, 255)) / 255.0
PINK = np.array((255, 100, 100, 255)) / 255.0
FALSE_RED = np.array((255, 51, 0, 255)) / 255.0
TRUE_GREEN = np.array(( 0, 255, 0, 255)) / 255.0
DARK_ORANGE = np.array((127, 63, 0, 255)) / 255.0
DARK_YELLOW = np.array((127, 127, 0, 255)) / 255.0
PURPLE = np.array((102, 0, 153, 255)) / 255.0
UNKNOWN_PURP = PURPLE
# FIGURE GEOMETRY
DPI = 80
#DPI = 160
#FIGSIZE = (24) # default windows fullscreen
FIGSIZE_MED = (12, 6)
FIGSIZE_SQUARE = (12, 12)
FIGSIZE_BIGGER = (24, 12)
FIGSIZE_HUGE = (32, 16)
FIGSIZE = FIGSIZE_MED
# Quality drawings
#FIGSIZE = FIGSIZE_SQUARE
#DPI = 120
tile_within = (-1, 30, 969, 1041)
if helpers.get_computer_name() == 'Ooo':
TILE_WITHIN = (-1912, 30, -969, 1071)
# DEFAULTS. (TODO: Can these be cleaned up?)
DISTINCT_COLORS = True # and False
DARKEN = None
ELL_LINEWIDTH = 1.5
if DISTINCT_COLORS:
ELL_ALPHA = .6
LINE_ALPHA = .35
else:
ELL_ALPHA = .4
LINE_ALPHA = .4
LINE_ALPHA_OVERRIDE = helpers.get_arg('--line-alpha-override', type_=float, default=None)
ELL_ALPHA_OVERRIDE = helpers.get_arg('--ell-alpha-override', type_=float, default=None)
#LINE_ALPHA_OVERRIDE = None
#ELL_ALPHA_OVERRIDE = None
ELL_COLOR = BLUE
LINE_COLOR = RED
LINE_WIDTH = 1.4
SHOW_LINES = True # True
SHOW_ELLS = True
POINT_SIZE = 2
base_fnum = 9001
def next_fnum():
global base_fnum
base_fnum += 1
return base_fnum
def my_prefs():
global LINE_COLOR
global ELL_COLOR
global ELL_LINEWIDTH
global ELL_ALPHA
LINE_COLOR = (1, 0, 0)
ELL_COLOR = (0, 0, 1)
ELL_LINEWIDTH = 2
ELL_ALPHA = .5
def execstr_global():
execstr = ['global' + key for key in globals().keys()]
return execstr
def register_matplotlib_widget(plotWidget_):
'talks to PyQt4 guis'
global plotWidget
plotWidget = plotWidget_
#fig = plotWidget.figure
#axes_list = fig.get_axes()
#ax = axes_list[0]
#plt.sca(ax)
def unregister_qt4_win(win):
global QT4_WINS
if win == 'all':
QT4_WINS = []
def register_qt4_win(win):
global QT4_WINS
QT4_WINS.append(win)
def OooScreen2():
nRows = 1
nCols = 1
x_off = 30 * 4
y_off = 30 * 4
x_0 = -1920
y_0 = 30
w = (1912 - x_off) / nRows
h = (1080 - y_off) / nCols
return dict(num_rc=(1, 1), wh=(w, h), xy_off=(x_0, y_0), wh_off=(0, 10),
row_first=True, no_tile=False)
def deterministic_shuffle(list_):
randS = int(np.random.rand() * np.uint(0 - 2) / 2)
np.random.seed(len(list_))
np.random.shuffle(list_)
np.random.seed(randS)
def distinct_colors(N, brightness=.878):
# http://blog.jianhuashao.com/2011/09/generate-n-distinct-colors.html
sat = brightness
val = brightness
HSV_tuples = [(x * 1.0 / N, sat, val) for x in xrange(N)]
RGB_tuples = map(lambda x: colorsys.hsv_to_rgb(*x), HSV_tuples)
deterministic_shuffle(RGB_tuples)
return RGB_tuples
def add_alpha(colors):
return [list(color) + [1] for color in colors]
def _axis_xy_width_height(ax, xaug=0, yaug=0, waug=0, haug=0):
'gets geometry of a subplot'
autoAxis = ax.axis()
xy = (autoAxis[0] + xaug, autoAxis[2] + yaug)
width = (autoAxis[1] - autoAxis[0]) + waug
height = (autoAxis[3] - autoAxis[2]) + haug
return xy, width, height
def draw_border(ax, color=GREEN, lw=2, offset=None):
'draws rectangle border around a subplot'
xy, width, height = _axis_xy_width_height(ax, -.7, -.2, 1, .4)
if offset is not None:
xoff, yoff = offset
xy = [xoff, yoff]
height = - height - yoff
width = width - xoff
rect = matplotlib.patches.Rectangle(xy, width, height, lw=lw)
rect = ax.add_patch(rect)
rect.set_clip_on(False)
rect.set_fill(False)
rect.set_edgecolor(color)
def draw_roi(roi, label=None, bbox_color=(1, 0, 0),
lbl_bgcolor=(0, 0, 0), lbl_txtcolor=(1, 1, 1), theta=0, ax=None):
if ax is None:
ax = gca()
(rx, ry, rw, rh) = roi
#cos_ = np.cos(theta)
#sin_ = np.sin(theta)
#rot_t = Affine2D([( cos_, -sin_, 0),
#( sin_, cos_, 0),
#( 0, 0, 1)])
#scale_t = Affine2D([( rw, 0, 0),
#( 0, rh, 0),
#( 0, 0, 1)])
#trans_t = Affine2D([( 1, 0, rx + rw / 2),
#( 0, 1, ry + rh / 2),
#( 0, 0, 1)])
#t_end = scale_t + rot_t + trans_t + t_start
# Transformations are specified in backwards order.
trans_roi = Affine2D()
trans_roi.scale(rw, rh)
trans_roi.rotate(theta)
trans_roi.translate(rx + rw / 2, ry + rh / 2)
t_end = trans_roi + ax.transData
bbox = matplotlib.patches.Rectangle((-.5, -.5), 1, 1, lw=2, transform=t_end)
arw_x, arw_y, arw_dx, arw_dy = (-0.5, -0.5, 1.0, 0.0)
arrowargs = dict(head_width=.1, transform=t_end, length_includes_head=True)
arrow = FancyArrow(arw_x, arw_y, arw_dx, arw_dy, **arrowargs)
bbox.set_fill(False)
#bbox.set_transform(trans)
bbox.set_edgecolor(bbox_color)
arrow.set_edgecolor(bbox_color)
arrow.set_facecolor(bbox_color)
ax.add_patch(bbox)
ax.add_patch(arrow)
#ax.add_patch(arrow2)
if label is not None:
ax_absolute_text(rx, ry, label, ax=ax,
horizontalalignment='center',
verticalalignment='center',
color=lbl_txtcolor,
backgroundcolor=lbl_bgcolor)
# ---- GENERAL FIGURE COMMANDS ----
def sanatize_img_fname(fname):
fname_clean = fname
search_replace_list = [(' ', '_'), ('\n', '--'), ('\\', ''), ('/', '')]
for old, new in search_replace_list:
fname_clean = fname_clean.replace(old, new)
fname_noext, ext = splitext(fname_clean)
fname_clean = fname_noext + ext.lower()
# Check for correct extensions
if not ext.lower() in helpers.IMG_EXTENSIONS:
fname_clean += '.png'
return fname_clean
def sanatize_img_fpath(fpath):
[dpath, fname] = split(fpath)
fname_clean = sanatize_img_fname(fname)
fpath_clean = join(dpath, fname_clean)
fpath_clean = normpath(fpath_clean)
return fpath_clean
def set_geometry(fnum, x, y, w, h):
fig = get_fig(fnum)
qtwin = fig.canvas.manager.window
qtwin.setGeometry(x, y, w, h)
def get_geometry(fnum):
fig = get_fig(fnum)
qtwin = fig.canvas.manager.window
(x1, y1, x2, y2) = qtwin.geometry().getCoords()
(x, y, w, h) = (x1, y1, x2 - x1, y2 - y1)
return (x, y, w, h)
def get_screen_info():
from PyQt4 import Qt, QtGui # NOQA
desktop = QtGui.QDesktopWidget()
mask = desktop.mask() # NOQA
layout_direction = desktop.layoutDirection() # NOQA
screen_number = desktop.screenNumber() # NOQA
normal_geometry = desktop.normalGeometry() # NOQA
num_screens = desktop.screenCount() # NOQA
avail_rect = desktop.availableGeometry() # NOQA
screen_rect = desktop.screenGeometry() # NOQA
QtGui.QDesktopWidget().availableGeometry().center() # NOQA
normal_geometry = desktop.normalGeometry() # NOQA
def get_all_figures():
all_figures_ = [manager.canvas.figure for manager in
matplotlib._pylab_helpers.Gcf.get_all_fig_managers()]
all_figures = []
# Make sure you dont show figures that this module closed
for fig in iter(all_figures_):
if not 'df2_closed' in fig.__dict__.keys() or not fig.df2_closed:
all_figures.append(fig)
# Return all the figures sorted by their number
all_figures = sorted(all_figures, key=lambda fig: fig.number)
return all_figures
def get_all_qt4_wins():
return QT4_WINS
def all_figures_show():
if plotWidget is not None:
plotWidget.figure.show()
plotWidget.figure.canvas.draw()
for fig in iter(get_all_figures()):
time.sleep(.1)
fig.show()
fig.canvas.draw()
def all_figures_tight_layout():
for fig in iter(get_all_figures()):
fig.tight_layout()
#adjust_subplots()
time.sleep(.1)
def get_monitor_geom(monitor_num=0):
from PyQt4 import QtGui # NOQA
desktop = QtGui.QDesktopWidget()
rect = desktop.availableGeometry()
geom = (rect.x(), rect.y(), rect.width(), rect.height())
return geom
def golden_wh(x):
'returns a width / height with a golden aspect ratio'
return map(int, map(round, (x * .618, x * .312)))
def all_figures_tile(num_rc=(3, 4), wh=1000, xy_off=(0, 0), wh_off=(0, 10),
row_first=True, no_tile=False, override1=False):
'Lays out all figures in a grid. if wh is a scalar, a golden ratio is used'
# RCOS TODO:
# I want this function to layout all the figures and qt windows within the
# bounds of a rectangle. (taken from the get_monitor_geom, or specified by
# the user i.e. left half of monitor 0). It should lay them out
# rectangularly and choose figure sizes such that all of them will fit.
if no_tile:
return
if not np.iterable(wh):
wh = golden_wh(wh)
all_figures = get_all_figures()
all_qt4wins = get_all_qt4_wins()
if override1:
if len(all_figures) == 1:
fig = all_figures[0]
win = fig.canvas.manager.window
win.setGeometry(0, 0, 900, 900)
update()
return
#nFigs = len(all_figures) + len(all_qt4_wins)
num_rows, num_cols = num_rc
w, h = wh
x_off, y_off = xy_off
w_off, h_off = wh_off
x_pad, y_pad = (0, 0)
printDBG('[df2] Tile all figures: ')
printDBG('[df2] wh = %r' % ((w, h),))
printDBG('[df2] xy_offsets = %r' % ((x_off, y_off),))
printDBG('[df2] wh_offsets = %r' % ((w_off, h_off),))
printDBG('[df2] xy_pads = %r' % ((x_pad, y_pad),))
if sys.platform == 'win32':
h_off += 0
w_off += 40
x_off += 40
y_off += 40
x_pad += 0
y_pad += 100
def position_window(i, win):
isqt4_mpl = isinstance(win, backend_qt4.MainWindow)
isqt4_back = isinstance(win, QtGui.QMainWindow)
if not isqt4_mpl and not isqt4_back:
raise NotImplementedError('%r-th Backend %r is not a Qt Window' % (i, win))
if row_first:
y = (i % num_rows) * (h + h_off) + 40
x = (int(i / num_rows)) * (w + w_off) + x_pad
else:
x = (i % num_cols) * (w + w_off) + 40
y = (int(i / num_cols)) * (h + h_off) + y_pad
x += x_off
y += y_off
win.setGeometry(x, y, w, h)
ioff = 0
for i, win in enumerate(all_qt4wins):
position_window(i, win)
ioff += 1
for i, fig in enumerate(all_figures):
win = fig.canvas.manager.window
position_window(i + ioff, win)
def all_figures_bring_to_front():
all_figures = get_all_figures()
for fig in iter(all_figures):
bring_to_front(fig)
def close_all_figures():
all_figures = get_all_figures()
for fig in iter(all_figures):
close_figure(fig)
def close_figure(fig):
fig.clf()
fig.df2_closed = True
qtwin = fig.canvas.manager.window
qtwin.close()
def bring_to_front(fig):
#what is difference between show and show normal?
qtwin = fig.canvas.manager.window
qtwin.raise_()
qtwin.activateWindow()
qtwin.setWindowFlags(Qt.WindowStaysOnTopHint)
qtwin.setWindowFlags(Qt.WindowFlags(0))
qtwin.show()
def show():
all_figures_show()
all_figures_bring_to_front()
plt.show()
def reset():
close_all_figures()
def draw():
all_figures_show()
def update():
draw()
all_figures_bring_to_front()
def present(*args, **kwargs):
'execing present should cause IPython magic'
print('[df2] Presenting figures...')
with warnings.catch_warnings():
warnings.simplefilter("ignore")
all_figures_tile(*args, **kwargs)
all_figures_show()
all_figures_bring_to_front()
# Return an exec string
execstr = helpers.ipython_execstr()
execstr += textwrap.dedent('''
if not embedded:
print('[df2] Presenting in normal shell.')
print('[df2] ... plt.show()')
plt.show()
''')
return execstr
def save_figure(fnum=None, fpath=None, usetitle=False, overwrite=True):
#import warnings
#warnings.simplefilter("error")
# Find the figure
if fnum is None:
fig = gcf()
else:
fig = plt.figure(fnum, figsize=FIGSIZE, dpi=DPI)
# Enforce inches and DPI
fig.set_size_inches(FIGSIZE[0], FIGSIZE[1])
fnum = fig.number
if fpath is None:
# Find the title
fpath = sanatize_img_fname(fig.canvas.get_window_title())
if usetitle:
title = sanatize_img_fname(fig.canvas.get_window_title())
fpath = join(fpath, title)
# Add in DPI information
fpath_noext, ext = splitext(fpath)
size_suffix = '_DPI=%r_FIGSIZE=%d,%d' % (DPI, FIGSIZE[0], FIGSIZE[1])
fpath = fpath_noext + size_suffix + ext
# Sanatize the filename
fpath_clean = sanatize_img_fpath(fpath)
#fname_clean = split(fpath_clean)[1]
print('[df2] save_figure() %r' % (fpath_clean,))
#adjust_subplots()
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
if not exists(fpath_clean) or overwrite:
fig.savefig(fpath_clean, dpi=DPI)
def set_ticks(xticks, yticks):
ax = gca()
ax.set_xticks(xticks)
ax.set_yticks(yticks)
def set_xticks(tick_set):
ax = gca()
ax.set_xticks(tick_set)
def set_yticks(tick_set):
ax = gca()
ax.set_yticks(tick_set)
def set_xlabel(lbl, ax=None):
if ax is None:
ax = gca()
ax.set_xlabel(lbl, fontproperties=FONTS.xlabel)
def set_title(title, ax=None):
if ax is None:
ax = gca()
ax.set_title(title, fontproperties=FONTS.axtitle)
def set_ylabel(lbl):
ax = gca()
ax.set_ylabel(lbl, fontproperties=FONTS.xlabel)
def plot(*args, **kwargs):
return plt.plot(*args, **kwargs)
def plot2(x_data, y_data, marker='o', title_pref='', x_label='x', y_label='y', *args,
**kwargs):
do_plot = True
ax = gca()
if len(x_data) != len(y_data):
warnstr = '[df2] ! Warning: len(x_data) != len(y_data). Cannot plot2'
warnings.warn(warnstr)
draw_text(warnstr)
do_plot = False
if len(x_data) == 0:
warnstr = '[df2] ! Warning: len(x_data) == 0. Cannot plot2'
warnings.warn(warnstr)
draw_text(warnstr)
do_plot = False
if do_plot:
ax.plot(x_data, y_data, marker, *args, **kwargs)
min_ = min(x_data.min(), y_data.min())
max_ = max(x_data.max(), y_data.max())
# Equal aspect ratio
ax.set_xlim(min_, max_)
ax.set_ylim(min_, max_)
ax.set_aspect('equal')
ax.set_xlabel(x_label, fontproperties=FONTS.xlabel)
ax.set_ylabel(y_label, fontproperties=FONTS.xlabel)
ax.set_title(title_pref + ' ' + x_label + ' vs ' + y_label,
fontproperties=FONTS.axtitle)
def adjust_subplots_xlabels():
adjust_subplots(left=.03, right=.97, bottom=.2, top=.9, hspace=.15)
def adjust_subplots_xylabels():
adjust_subplots(left=.03, right=1, bottom=.1, top=.9, hspace=.15)
def adjust_subplots_safe(left=.1, right=.9, bottom=.1, top=.9, wspace=.3, hspace=.5):
adjust_subplots(left, bottom, right, top, wspace, hspace)
def adjust_subplots(left=0.02, bottom=0.02,
right=0.98, top=0.90,
wspace=0.1, hspace=0.15):
'''
left = 0.125 # the left side of the subplots of the figure
right = 0.9 # the right side of the subplots of the figure
bottom = 0.1 # the bottom of the subplots of the figure
top = 0.9 # the top of the subplots of the figure
wspace = 0.2 # the amount of width reserved for blank space between subplots
hspace = 0.2
'''
#print('[df2] adjust_subplots(%r)' % locals())
plt.subplots_adjust(left, bottom, right, top, wspace, hspace)
#=======================
# TEXT FUNCTIONS
# TODO: I have too many of these. Need to consolidate
#=======================
def upperleft_text(txt):
txtargs = dict(horizontalalignment='left',
verticalalignment='top',
#fontsize='smaller',
#fontweight='ultralight',
backgroundcolor=(0, 0, 0, .5),
color=ORANGE)
ax_relative_text(.02, .02, txt, **txtargs)
def upperright_text(txt, offset=None):
txtargs = dict(horizontalalignment='right',
verticalalignment='top',
#fontsize='smaller',
#fontweight='ultralight',
backgroundcolor=(0, 0, 0, .5),
color=ORANGE,
offset=offset)
ax_relative_text(.98, .02, txt, **txtargs)
def lowerright_text(txt):
txtargs = dict(horizontalalignment='right',
verticalalignment='top',
#fontsize='smaller',
#fontweight='ultralight',
backgroundcolor=(0, 0, 0, .5),
color=ORANGE)
ax_relative_text(.98, .92, txt, **txtargs)
def absolute_lbl(x_, y_, txt, roffset=(-.02, -.02), **kwargs):
txtargs = dict(horizontalalignment='right',
verticalalignment='top',
backgroundcolor=(0, 0, 0, .5),
color=ORANGE,
**kwargs)
ax_absolute_text(x_, y_, txt, roffset=roffset, **txtargs)
def ax_relative_text(x, y, txt, ax=None, offset=None, **kwargs):
if ax is None:
ax = gca()
xy, width, height = _axis_xy_width_height(ax)
x_, y_ = ((xy[0]) + x * width, (xy[1] + height) - y * height)
if offset is not None:
xoff, yoff = offset
x_ += xoff
y_ += yoff
ax_absolute_text(x_, y_, txt, ax=ax, **kwargs)
def ax_absolute_text(x_, y_, txt, ax=None, roffset=None, **kwargs):
if ax is None:
ax = gca()
if 'fontproperties' in kwargs:
kwargs['fontproperties'] = FONTS.relative
if roffset is not None:
xroff, yroff = roffset
xy, width, height = _axis_xy_width_height(ax)
x_ += xroff * width
y_ += yroff * height
ax.text(x_, y_, txt, **kwargs)
def fig_relative_text(x, y, txt, **kwargs):
kwargs['horizontalalignment'] = 'center'
kwargs['verticalalignment'] = 'center'
fig = gcf()
#xy, width, height = _axis_xy_width_height(ax)
#x_, y_ = ((xy[0]+width)+x*width, (xy[1]+height)-y*height)
fig.text(x, y, txt, **kwargs)
def draw_text(text_str, rgb_textFG=(0, 0, 0), rgb_textBG=(1, 1, 1)):
ax = gca()
xy, width, height = _axis_xy_width_height(ax)
text_x = xy[0] + (width / 2)
text_y = xy[1] + (height / 2)
ax.text(text_x, text_y, text_str,
horizontalalignment='center',
verticalalignment='center',
color=rgb_textFG,
backgroundcolor=rgb_textBG)
def set_figtitle(figtitle, subtitle='', forcefignum=True, incanvas=True):
if figtitle is None:
figtitle = ''
fig = gcf()
if incanvas:
if subtitle != '':
subtitle = '\n' + subtitle
fig.suptitle(figtitle + subtitle, fontsize=14, fontweight='bold')
#fig.suptitle(figtitle, x=.5, y=.98, fontproperties=FONTS.figtitle)
#fig_relative_text(.5, .96, subtitle, fontproperties=FONTS.subtitle)
else:
fig.suptitle('')
window_figtitle = ('fig(%d) ' % fig.number) + figtitle
fig.canvas.set_window_title(window_figtitle)
def convert_keypress_event_mpl_to_qt4(mevent):
global TMP_mevent
TMP_mevent = mevent
# Grab the key from the mpl.KeyPressEvent
key = mevent.key
print('[df2] convert event mpl -> qt4')
print('[df2] key=%r' % key)
# dicts modified from backend_qt4.py
mpl2qtkey = {'control': Qt.Key_Control, 'shift': Qt.Key_Shift,
'alt': Qt.Key_Alt, 'super': Qt.Key_Meta,
'enter': Qt.Key_Return, 'left': Qt.Key_Left, 'up': Qt.Key_Up,
'right': Qt.Key_Right, 'down': Qt.Key_Down,
'escape': Qt.Key_Escape, 'f1': Qt.Key_F1, 'f2': Qt.Key_F2,
'f3': Qt.Key_F3, 'f4': Qt.Key_F4, 'f5': Qt.Key_F5,
'f6': Qt.Key_F6, 'f7': Qt.Key_F7, 'f8': Qt.Key_F8,
'f9': Qt.Key_F9, 'f10': Qt.Key_F10, 'f11': Qt.Key_F11,
'f12': Qt.Key_F12, 'home': Qt.Key_Home, 'end': Qt.Key_End,
'pageup': Qt.Key_PageUp, 'pagedown': Qt.Key_PageDown}
# Reverse the control and super (aka cmd/apple) keys on OSX
if sys.platform == 'darwin':
mpl2qtkey.update({'super': Qt.Key_Control, 'control': Qt.Key_Meta, })
# Try to reconstruct QtGui.KeyEvent
type_ = QtCore.QEvent.Type(QtCore.QEvent.KeyPress) # The type should always be KeyPress
text = ''
# Try to extract the original modifiers
modifiers = QtCore.Qt.NoModifier # initialize to no modifiers
if key.find(u'ctrl+') >= 0:
modifiers = modifiers | QtCore.Qt.ControlModifier
key = key.replace(u'ctrl+', u'')
print('[df2] has ctrl modifier')
text += 'Ctrl+'
if key.find(u'alt+') >= 0:
modifiers = modifiers | QtCore.Qt.AltModifier
key = key.replace(u'alt+', u'')
print('[df2] has alt modifier')
text += 'Alt+'
if key.find(u'super+') >= 0:
modifiers = modifiers | QtCore.Qt.MetaModifier
key = key.replace(u'super+', u'')
print('[df2] has super modifier')
text += 'Super+'
if key.isupper():
modifiers = modifiers | QtCore.Qt.ShiftModifier
print('[df2] has shift modifier')
text += 'Shift+'
# Try to extract the original key
try:
if key in mpl2qtkey:
key_ = mpl2qtkey[key]
else:
key_ = ord(key.upper()) # Qt works with uppercase keys
text += key.upper()
except Exception as ex:
print('[df2] ERROR key=%r' % key)
print('[df2] ERROR %r' % ex)
raise
autorep = False # default false
count = 1 # default 1
text = QtCore.QString(text) # The text is somewhat arbitrary
# Create the QEvent
print('----------------')
print('[df2] Create event')
print('[df2] type_ = %r' % type_)
print('[df2] text = %r' % text)
print('[df2] modifiers = %r' % modifiers)
print('[df2] autorep = %r' % autorep)
print('[df2] count = %r ' % count)
print('----------------')
qevent = QtGui.QKeyEvent(type_, key_, modifiers, text, autorep, count)
return qevent
def test_build_qkeyevent():
import draw_func2 as df2
qtwin = df2.QT4_WINS[0]
# This reconstructs an test mplevent
canvas = df2.figure(1).canvas
mevent = matplotlib.backend_bases.KeyEvent('key_press_event', canvas, u'ctrl+p', x=672, y=230.0)
qevent = df2.convert_keypress_event_mpl_to_qt4(mevent)
app = qtwin.backend.app
app.sendEvent(qtwin.ui, mevent)
#type_ = QtCore.QEvent.Type(QtCore.QEvent.KeyPress) # The type should always be KeyPress
#text = QtCore.QString('A') # The text is somewhat arbitrary
#modifiers = QtCore.Qt.NoModifier # initialize to no modifiers
#modifiers = modifiers | QtCore.Qt.ControlModifier
#modifiers = modifiers | QtCore.Qt.AltModifier
#key_ = ord('A') # Qt works with uppercase keys
#autorep = False # default false
#count = 1 # default 1
#qevent = QtGui.QKeyEvent(type_, key_, modifiers, text, autorep, count)
return qevent
# This actually doesn't matter
def on_key_press_event(event):
'redirects keypress events to main window'
global QT4_WINS
print('[df2] %r' % event)
print('[df2] %r' % str(event.__dict__))
for qtwin in QT4_WINS:
qevent = convert_keypress_event_mpl_to_qt4(event)
app = qtwin.backend.app
print('[df2] attempting to send qevent to qtwin')
app.sendEvent(qtwin, qevent)
# TODO: FINISH ME
#PyQt4.QtGui.QKeyEvent
#qtwin.keyPressEvent(event)
#fig.canvas.manager.window.keyPressEvent()
def customize_figure(fig, docla):
if not 'user_stat_list' in fig.__dict__.keys() or docla:
fig.user_stat_list = []
fig.user_notes = []
# We dont need to catch keypress events because you just need to set it as
# an application level shortcut
# Catch key press events
#key_event_cbid = fig.__dict__.get('key_event_cbid', None)
#if key_event_cbid is not None:
#fig.canvas.mpl_disconnect(key_event_cbid)
#fig.key_event_cbid = fig.canvas.mpl_connect('key_press_event', on_key_press_event)
fig.df2_closed = False
def gcf():
if plotWidget is not None:
#print('is plotwidget visible = %r' % plotWidget.isVisible())
fig = plotWidget.figure
return fig
return plt.gcf()
def gca():
if plotWidget is not None:
#print('is plotwidget visible = %r' % plotWidget.isVisible())
axes_list = plotWidget.figure.get_axes()
current = 0
ax = axes_list[current]
return ax
return plt.gca()
def cla():
return plt.cla()
def clf():
return plt.clf()
def get_fig(fnum=None):
printDBG('[df2] get_fig(fnum=%r)' % fnum)
fig_kwargs = dict(figsize=FIGSIZE, dpi=DPI)
if plotWidget is not None:
return gcf()
if fnum is None:
try:
fig = gcf()
except Exception as ex:
printDBG('[df2] get_fig(): ex=%r' % ex)
fig = plt.figure(**fig_kwargs)
fnum = fig.number
else:
try:
fig = plt.figure(fnum, **fig_kwargs)
except Exception as ex:
print(repr(ex))
warnings.warn(repr(ex))
fig = gcf()
return fig
def get_ax(fnum=None, pnum=None):
figure(fnum=fnum, pnum=pnum)
ax = gca()
return ax
def figure(fnum=None, docla=False, title=None, pnum=(1, 1, 1), figtitle=None,
doclf=False, **kwargs):
'''
fnum = fignum = figure number
pnum = plotnum = plot tuple
'''
#matplotlib.pyplot.xkcd()
fig = get_fig(fnum)
axes_list = fig.get_axes()
# Ensure my customized settings
customize_figure(fig, docla)
# Convert pnum to tuple format
if tools.is_int(pnum):
nr = pnum // 100
nc = pnum // 10 - (nr * 10)
px = pnum - (nr * 100) - (nc * 10)
pnum = (nr, nc, px)
if doclf: # a bit hacky. Need to rectify docla and doclf
fig.clf()
# Get the subplot
if docla or len(axes_list) == 0:
printDBG('[df2] *** NEW FIGURE %r.%r ***' % (fnum, pnum))
if not pnum is None:
#ax = plt.subplot(*pnum)
ax = fig.add_subplot(*pnum)
ax.cla()
else:
ax = gca()
else:
printDBG('[df2] *** OLD FIGURE %r.%r ***' % (fnum, pnum))
if not pnum is None:
ax = plt.subplot(*pnum) # fig.add_subplot fails here
#ax = fig.add_subplot(*pnum)
else:
ax = gca()
#ax = axes_list[0]
# Set the title
if not title is None:
ax = gca()
ax.set_title(title, fontproperties=FONTS.axtitle)
# Add title to figure
if figtitle is None and pnum == (1, 1, 1):
figtitle = title
if not figtitle is None:
set_figtitle(figtitle, incanvas=False)
return fig
def plot_pdf(data, draw_support=True, scale_to=None, label=None, color=0,
nYTicks=3):
fig = gcf()
ax = gca()
data = np.array(data)
if len(data) == 0:
warnstr = '[df2] ! Warning: len(data) = 0. Cannot visualize pdf'
warnings.warn(warnstr)
draw_text(warnstr)
return
bw_factor = .05
if isinstance(color, (int, float)):
colorx = color
line_color = plt.get_cmap('gist_rainbow')(colorx)
else:
line_color = color
# Estimate a pdf
data_pdf = estimate_pdf(data, bw_factor)
# Get probability of seen data
prob_x = data_pdf(data)
# Get probability of unseen data data
x_data = np.linspace(0, data.max(), 500)
y_data = data_pdf(x_data)
# Scale if requested
if not scale_to is None:
scale_factor = scale_to / y_data.max()
y_data *= scale_factor
prob_x *= scale_factor
#Plot the actual datas on near the bottom perterbed in Y
if draw_support:
pdfrange = prob_x.max() - prob_x.min()
perb = (np.random.randn(len(data))) * pdfrange / 30.
preb_y_data = np.abs([pdfrange / 50. for _ in data] + perb)
ax.plot(data, preb_y_data, 'o', color=line_color, figure=fig, alpha=.1)
# Plot the pdf (unseen data)
ax.plot(x_data, y_data, color=line_color, label=label)
if nYTicks is not None:
yticks = np.linspace(min(y_data), max(y_data), nYTicks)
ax.set_yticks(yticks)
def estimate_pdf(data, bw_factor):
try:
data_pdf = scipy.stats.gaussian_kde(data, bw_factor)
data_pdf.covariance_factor = bw_factor
except Exception as ex:
print('[df2] ! Exception while estimating kernel density')
print('[df2] data=%r' % (data,))
print('[df2] ex=%r' % (ex,))
raise
return data_pdf
def show_histogram(data, bins=None, **kwargs):
print('[df2] show_histogram()')
dmin = int(np.floor(data.min()))
dmax = int(np.ceil(data.max()))
if bins is None:
bins = dmax - dmin
fig = figure(**kwargs)
ax = gca()
ax.hist(data, bins=bins, range=(dmin, dmax))
#help(np.bincount)
fig.show()
def show_signature(sig, **kwargs):
fig = figure(**kwargs)
plt.plot(sig)
fig.show()
def plot_stems(x_data=None, y_data=None):
if y_data is not None and x_data is None:
x_data = np.arange(len(y_data))
pass
if len(x_data) != len(y_data):
print('[df2] WARNING plot_stems(): len(x_data)!=len(y_data)')
if len(x_data) == 0:
print('[df2] WARNING plot_stems(): len(x_data)=len(y_data)=0')
x_data_ = np.array(x_data)
y_data_ = np.array(y_data)
x_data_sort = x_data_[y_data_.argsort()[::-1]]
y_data_sort = y_data_[y_data_.argsort()[::-1]]
markerline, stemlines, baseline = pylab.stem(x_data_sort, y_data_sort, linefmt='-')
pylab.setp(markerline, 'markerfacecolor', 'b')
pylab.setp(baseline, 'linewidth', 0)
ax = gca()
ax.set_xlim(min(x_data) - 1, max(x_data) + 1)
ax.set_ylim(min(y_data) - 1, max(max(y_data), max(x_data)) + 1)
def plot_sift_signature(sift, title='', fnum=None, pnum=None):
figure(fnum=fnum, pnum=pnum)
ax = gca()
plot_bars(sift, 16)
ax.set_xlim(0, 128)
ax.set_ylim(0, 256)
space_xticks(9, 16)
space_yticks(5, 64)
ax.set_title(title)
dark_background(ax)
return ax
def dark_background(ax=None, doubleit=False):
if ax is None:
ax = gca()
xy, width, height = _axis_xy_width_height(ax)
if doubleit:
halfw = (doubleit) * (width / 2)
halfh = (doubleit) * (height / 2)
xy = (xy[0] - halfw, xy[1] - halfh)
width *= (doubleit + 1)
height *= (doubleit + 1)
rect = matplotlib.patches.Rectangle(xy, width, height, lw=0, zorder=0)
rect.set_clip_on(True)
rect.set_fill(True)
rect.set_color(BLACK * .9)
rect = ax.add_patch(rect)
def space_xticks(nTicks=9, spacing=16, ax=None):
if ax is None:
ax = gca()
ax.set_xticks(np.arange(nTicks) * spacing)
small_xticks(ax)
def space_yticks(nTicks=9, spacing=32, ax=None):
if ax is None:
ax = gca()
ax.set_yticks(np.arange(nTicks) * spacing)
small_yticks(ax)
def small_xticks(ax=None):
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(8)
def small_yticks(ax=None):
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(8)
def plot_bars(y_data, nColorSplits=1):
width = 1
nDims = len(y_data)
nGroup = nDims // nColorSplits
ori_colors = distinct_colors(nColorSplits)
x_data = np.arange(nDims)
ax = gca()
for ix in xrange(nColorSplits):
xs = np.arange(nGroup) + (nGroup * ix)
color = ori_colors[ix]
x_dat = x_data[xs]
y_dat = y_data[xs]
ax.bar(x_dat, y_dat, width, color=color, edgecolor=np.array(color) * .8)
def phantom_legend_label(label, color, loc='upper right'):
'adds a legend label without displaying an actor'
pass
#phantom_actor = plt.Circle((0, 0), 1, fc=color, prop=FONTS.legend, loc=loc)
#plt.legend(phant_actor, label, framealpha=.2)
#plt.legend(*zip(*legend_tups), framealpha=.2)
#legend_tups = []
#legend_tups.append((phantom_actor, label))
def legend(loc='upper right'):
ax = gca()
ax.legend(prop=FONTS.legend, loc=loc)
def plot_histpdf(data, label=None, draw_support=False, nbins=10):
freq, _ = plot_hist(data, nbins=nbins)
plot_pdf(data, draw_support=draw_support, scale_to=freq.max(), label=label)
def plot_hist(data, bins=None, nbins=10, weights=None):
if isinstance(data, list):
data = np.array(data)
if bins is None:
dmin = data.min()
dmax = data.max()
bins = dmax - dmin
ax = gca()
freq, bins_, patches = ax.hist(data, bins=nbins, weights=weights, range=(dmin, dmax))
return freq, bins_
def variation_trunctate(data):
ax = gca()
data = np.array(data)
if len(data) == 0:
warnstr = '[df2] ! Warning: len(data) = 0. Cannot variation_truncate'
warnings.warn(warnstr)
return
trunc_max = data.mean() + data.std() * 2
trunc_min = np.floor(data.min())
ax.set_xlim(trunc_min, trunc_max)
#trunc_xticks = np.linspace(0, int(trunc_max),11)
#trunc_xticks = trunc_xticks[trunc_xticks >= trunc_min]
#trunc_xticks = np.append([int(trunc_min)], trunc_xticks)
#no_zero_yticks = ax.get_yticks()[ax.get_yticks() > 0]
#ax.set_xticks(trunc_xticks)
#ax.set_yticks(no_zero_yticks)
#_----------------- HELPERS ^^^ ---------
# ---- IMAGE CREATION FUNCTIONS ----
@tools.debug_exception
def draw_sift(desc, kp=None):
# TODO: There might be a divide by zero warning in here.
''' desc = np.random.rand(128)
desc = desc / np.sqrt((desc**2).sum())
desc = np.round(desc * 255) '''
# This is draw, because it is an overlay
ax = gca()
tau = 2 * np.pi
DSCALE = .25
XYSCALE = .5
XYSHIFT = -.75
ORI_SHIFT = 0 # -tau #1/8 * tau
# SIFT CONSTANTS
NORIENTS = 8
NX = 4
NY = 4
NBINS = NX * NY
def cirlce_rad2xy(radians, mag):
return np.cos(radians) * mag, np.sin(radians) * mag
discrete_ori = (np.arange(0, NORIENTS) * (tau / NORIENTS) + ORI_SHIFT)
# Build list of plot positions
# Build an "arm" for each sift measurement
arm_mag = desc / 255.0
arm_ori = np.tile(discrete_ori, (NBINS, 1)).flatten()
# The offset x,y's for each sift measurment
arm_dxy = np.array(zip(*cirlce_rad2xy(arm_ori, arm_mag)))
yxt_gen = itertools.product(xrange(NY), xrange(NX), xrange(NORIENTS))
yx_gen = itertools.product(xrange(NY), xrange(NX))
# Transform the drawing of the SIFT descriptor to the its elliptical patch
axTrans = ax.transData
kpTrans = None
if kp is None:
kp = [0, 0, 1, 0, 1]
kp = np.array(kp)
kpT = kp.T
x, y, a, c, d = kpT[:, 0]
kpTrans = Affine2D([( a, 0, x),
( c, d, y),
( 0, 0, 1)])
axTrans = ax.transData
# Draw 8 directional arms in each of the 4x4 grid cells
arrow_patches = []
arrow_patches2 = []
for y, x, t in yxt_gen:
index = y * NX * NORIENTS + x * NORIENTS + t
(dx, dy) = arm_dxy[index]
arw_x = x * XYSCALE + XYSHIFT
arw_y = y * XYSCALE + XYSHIFT
arw_dy = dy * DSCALE * 1.5 # scale for viz Hack
arw_dx = dx * DSCALE * 1.5
#posA = (arw_x, arw_y)
#posB = (arw_x+arw_dx, arw_y+arw_dy)
_args = [arw_x, arw_y, arw_dx, arw_dy]
_kwargs = dict(head_width=.0001, transform=kpTrans, length_includes_head=False)
arrow_patches += [FancyArrow(*_args, **_kwargs)]
arrow_patches2 += [FancyArrow(*_args, **_kwargs)]
# Draw circles around each of the 4x4 grid cells
circle_patches = []
for y, x in yx_gen:
circ_xy = (x * XYSCALE + XYSHIFT, y * XYSCALE + XYSHIFT)
circ_radius = DSCALE
circle_patches += [Circle(circ_xy, circ_radius, transform=kpTrans)]
# Efficiently draw many patches with PatchCollections
circ_collection = PatchCollection(circle_patches)
circ_collection.set_facecolor('none')
circ_collection.set_transform(axTrans)
circ_collection.set_edgecolor(BLACK)
circ_collection.set_alpha(.5)
# Body of arrows
arw_collection = PatchCollection(arrow_patches)
arw_collection.set_transform(axTrans)
arw_collection.set_linewidth(.5)
arw_collection.set_color(RED)
arw_collection.set_alpha(1)
# Border of arrows
arw_collection2 = matplotlib.collections.PatchCollection(arrow_patches2)
arw_collection2.set_transform(axTrans)
arw_collection2.set_linewidth(1)
arw_collection2.set_color(BLACK)
arw_collection2.set_alpha(1)
# Add artists to axes
ax.add_collection(circ_collection)
ax.add_collection(arw_collection2)
ax.add_collection(arw_collection)
def feat_scores_to_color(fs, cmap_='hot'):
assert len(fs.shape) == 1, 'score must be 1d'
cmap = plt.get_cmap(cmap_)
mins = fs.min()
rnge = fs.max() - mins
if rnge == 0:
return [cmap(.5) for fx in xrange(len(fs))]
score2_01 = lambda score: .1 + .9 * (float(score) - mins) / (rnge)
colors = [cmap(score2_01(score)) for score in fs]
return colors
def colorbar(scalars, colors):
'adds a color bar next to the axes'
orientation = ['vertical', 'horizontal'][0]
TICK_FONTSIZE = 8
# Put colors and scalars in correct order
sorted_scalars = sorted(scalars)
sorted_colors = [x for (y, x) in sorted(zip(scalars, colors))]
# Make a listed colormap and mappable object
listed_cmap = mpl.colors.ListedColormap(sorted_colors)
sm = plt.cm.ScalarMappable(cmap=listed_cmap)
sm.set_array(sorted_scalars)
# Use mapable object to create the colorbar
cb = plt.colorbar(sm, orientation=orientation)
# Add the colorbar to the correct label
axis = cb.ax.xaxis if orientation == 'horizontal' else cb.ax.yaxis
position = 'bottom' if orientation == 'horizontal' else 'right'
axis.set_ticks_position(position)
axis.set_ticks([0, .5, 1])
cb.ax.tick_params(labelsize=TICK_FONTSIZE)
def draw_lines2(kpts1, kpts2, fm=None, fs=None, kpts2_offset=(0, 0),
color_list=None, **kwargs):
if not DISTINCT_COLORS:
color_list = None
# input data
if not SHOW_LINES:
return
if fm is None: # assume kpts are in director correspondence
assert kpts1.shape == kpts2.shape
if len(fm) == 0:
return
ax = gca()
woff, hoff = kpts2_offset
# Draw line collection
kpts1_m = kpts1[fm[:, 0]].T
kpts2_m = kpts2[fm[:, 1]].T
xxyy_iter = iter(zip(kpts1_m[0],
kpts2_m[0] + woff,
kpts1_m[1],
kpts2_m[1] + hoff))
if color_list is None:
if fs is None: # Draw with solid color
color_list = [ LINE_COLOR for fx in xrange(len(fm))]
else: # Draw with colors proportional to score difference
color_list = feat_scores_to_color(fs)
segments = [((x1, y1), (x2, y2)) for (x1, x2, y1, y2) in xxyy_iter]
linewidth = [LINE_WIDTH for fx in xrange(len(fm))]
line_alpha = LINE_ALPHA
if LINE_ALPHA_OVERRIDE is not None:
line_alpha = LINE_ALPHA_OVERRIDE
line_group = LineCollection(segments, linewidth, color_list, alpha=line_alpha)
#plt.colorbar(line_group, ax=ax)
ax.add_collection(line_group)
#figure(100)
#plt.hexbin(x,y, cmap=plt.cm.YlOrRd_r)
def draw_kpts(kpts, *args, **kwargs):
draw_kpts2(kpts, *args, **kwargs)
def draw_kpts2(kpts, offset=(0, 0), ell=SHOW_ELLS, pts=False, pts_color=ORANGE,
pts_size=POINT_SIZE, ell_alpha=ELL_ALPHA,
ell_linewidth=ELL_LINEWIDTH, ell_color=ELL_COLOR,
color_list=None, rect=None, arrow=False, **kwargs):
if not DISTINCT_COLORS:
color_list = None
printDBG('drawkpts2: Drawing Keypoints! ell=%r pts=%r' % (ell, pts))
# get matplotlib info
ax = gca()
pltTrans = ax.transData
ell_actors = []
# data
kpts = np.array(kpts)
kptsT = kpts.T
x = kptsT[0, :] + offset[0]
y = kptsT[1, :] + offset[1]
printDBG('[df2] draw_kpts()----------')
printDBG('[df2] draw_kpts() ell=%r pts=%r' % (ell, pts))
printDBG('[df2] draw_kpts() drawing kpts.shape=%r' % (kpts.shape,))
if rect is None:
rect = ell
rect = False
if pts is True:
rect = False
if ell or rect:
printDBG('[df2] draw_kpts() drawing ell kptsT.shape=%r' % (kptsT.shape,))
# We have the transformation from unit circle to ellipse here. (inv(A))
a = kptsT[2]
b = np.zeros(len(a))
c = kptsT[3]
d = kptsT[4]
kpts_iter = izip(x, y, a, b, c, d)
aff_list = [Affine2D([( a_, b_, x_),
( c_, d_, y_),
( 0, 0, 1)])
for (x_, y_, a_, b_, c_, d_) in kpts_iter]
patch_list = []
ell_actors = [Circle( (0, 0), 1, transform=aff) for aff in aff_list]
if ell:
patch_list += ell_actors
if rect:
rect_actors = [Rectangle( (-1, -1), 2, 2, transform=aff) for aff in aff_list]
patch_list += rect_actors
if arrow:
_kwargs = dict(head_width=.01, length_includes_head=False)
arrow_actors1 = [FancyArrow(0, 0, 0, 1, transform=aff, **_kwargs) for aff in aff_list]
arrow_actors2 = [FancyArrow(0, 0, 1, 0, transform=aff, **_kwargs) for aff in aff_list]
patch_list += arrow_actors1
patch_list += arrow_actors2
ellipse_collection = matplotlib.collections.PatchCollection(patch_list)
ellipse_collection.set_facecolor('none')
ellipse_collection.set_transform(pltTrans)
if ELL_ALPHA_OVERRIDE is not None:
ell_alpha = ELL_ALPHA_OVERRIDE
ellipse_collection.set_alpha(ell_alpha)
ellipse_collection.set_linewidth(ell_linewidth)
if not color_list is None:
ell_color = color_list
if ell_color == 'distinct':
ell_color = distinct_colors(len(kpts))
ellipse_collection.set_edgecolor(ell_color)
ax.add_collection(ellipse_collection)
if pts:
printDBG('[df2] draw_kpts() drawing pts x.shape=%r y.shape=%r' % (x.shape, y.shape))
if color_list is None:
color_list = [pts_color for _ in xrange(len(x))]
ax.autoscale(enable=False)
ax.scatter(x, y, c=color_list, s=2 * pts_size, marker='o', edgecolor='none')
#ax.autoscale(enable=False)
#ax.plot(x, y, linestyle='None', marker='o', markerfacecolor=pts_color, markersize=pts_size, markeredgewidth=0)
# ---- CHIP DISPLAY COMMANDS ----
def imshow(img, fnum=None, title=None, figtitle=None, pnum=None,
interpolation='nearest', **kwargs):
'other interpolations = nearest, bicubic, bilinear'
#printDBG('[df2] ----- IMSHOW ------ ')
#printDBG('[***df2.imshow] fnum=%r pnum=%r title=%r *** ' % (fnum, pnum, title))
#printDBG('[***df2.imshow] img.shape = %r ' % (img.shape,))
#printDBG('[***df2.imshow] img.stats = %r ' % (helpers.printable_mystats(img),))
fig = figure(fnum=fnum, pnum=pnum, title=title, figtitle=figtitle, **kwargs)
ax = gca()
if not DARKEN is None:
imgdtype = img.dtype
img = np.array(img, dtype=float) * DARKEN
img = np.array(img, dtype=imgdtype)
plt_imshow_kwargs = {
'interpolation': interpolation,
#'cmap': plt.get_cmap('gray'),
'vmin': 0,
'vmax': 255,
}
try:
if len(img.shape) == 3 and img.shape[2] == 3:
# img is in a color format
imgBGR = img
if imgBGR.dtype == np.float64:
if imgBGR.max() <= 1:
imgBGR = np.array(imgBGR, dtype=np.float32)
else:
imgBGR = np.array(imgBGR, dtype=np.uint8)
imgRGB = cv2.cvtColor(imgBGR, cv2.COLOR_BGR2RGB)
ax.imshow(imgRGB, **plt_imshow_kwargs)
elif len(img.shape) == 2:
# img is in grayscale
imgGRAY = img
ax.imshow(imgGRAY, cmap=plt.get_cmap('gray'), **plt_imshow_kwargs)
else:
raise Exception('unknown image format')
except TypeError as te:
print('[df2] imshow ERROR %r' % te)
raise
except Exception as ex:
print('[df2] img.dtype = %r' % (img.dtype,))
print('[df2] type(img) = %r' % (type(img),))
print('[df2] img.shape = %r' % (img.shape,))
print('[df2] imshow ERROR %r' % ex)
raise
#plt.set_cmap('gray')
ax.set_xticks([])
ax.set_yticks([])
#ax.set_autoscale(False)
#try:
#if pnum == 111:
#fig.tight_layout()
#except Exception as ex:
#print('[df2] !! Exception durring fig.tight_layout: '+repr(ex))
#raise
return fig, ax
def get_num_channels(img):
ndims = len(img.shape)
if ndims == 2:
nChannels = 1
elif ndims == 3 and img.shape[2] == 3:
nChannels = 3
elif ndims == 3 and img.shape[2] == 1:
nChannels = 1
else:
raise Exception('Cannot determine number of channels')
return nChannels
def stack_images(img1, img2, vert=None):
nChannels = get_num_channels(img1)
nChannels2 = get_num_channels(img2)
assert nChannels == nChannels2
(h1, w1) = img1.shape[0: 2] # get chip dimensions
(h2, w2) = img2.shape[0: 2]
woff, hoff = 0, 0
vert_wh = max(w1, w2), h1 + h2
horiz_wh = w1 + w2, max(h1, h2)
if vert is None:
# Display the orientation with the better (closer to 1) aspect ratio
vert_ar = max(vert_wh) / min(vert_wh)
horiz_ar = max(horiz_wh) / min(horiz_wh)
vert = vert_ar < horiz_ar
if vert:
wB, hB = vert_wh
hoff = h1
else:
wB, hB = horiz_wh
woff = w1
# concatentate images
if nChannels == 3:
imgB = np.zeros((hB, wB, 3), np.uint8)
imgB[0:h1, 0:w1, :] = img1
imgB[hoff:(hoff + h2), woff:(woff + w2), :] = img2
elif nChannels == 1:
imgB = np.zeros((hB, wB), np.uint8)
imgB[0:h1, 0:w1] = img1
imgB[hoff:(hoff + h2), woff:(woff + w2)] = img2
return imgB, woff, hoff
def show_chipmatch2(rchip1, rchip2, kpts1, kpts2, fm=None, fs=None, title=None,
vert=None, fnum=None, pnum=None, **kwargs):
'''Draws two chips and the feature matches between them. feature matches
kpts1 and kpts2 use the (x,y,a,c,d)
'''
printDBG('[df2] draw_matches2() fnum=%r, pnum=%r' % (fnum, pnum))
# get matching keypoints + offset
(h1, w1) = rchip1.shape[0:2] # get chip (h, w) dimensions
(h2, w2) = rchip2.shape[0:2]
# Stack the compared chips
match_img, woff, hoff = stack_images(rchip1, rchip2, vert)
xywh1 = (0, 0, w1, h1)
xywh2 = (woff, hoff, w2, h2)
# Show the stacked chips
fig, ax = imshow(match_img, title=title, fnum=fnum, pnum=pnum)
# Overlay feature match nnotations
draw_fmatch(xywh1, xywh2, kpts1, kpts2, fm, fs, **kwargs)
return ax, xywh1, xywh2
# draw feature match
def draw_fmatch(xywh1, xywh2, kpts1, kpts2, fm, fs=None, lbl1=None, lbl2=None,
fnum=None, pnum=None, rect=False, colorbar_=True, **kwargs):
'''Draws the matching features. This is draw because it is an overlay
xywh1 - location of rchip1 in the axes
xywh2 - location or rchip2 in the axes
'''
if fm is None:
assert kpts1.shape == kpts2.shape, 'shapes different or fm not none'
fm = np.tile(np.arange(0, len(kpts1)), (2, 1)).T
pts = kwargs.get('draw_pts', False)
ell = kwargs.get('draw_ell', True)
lines = kwargs.get('draw_lines', True)
ell_alpha = kwargs.get('ell_alpha', .4)
nMatch = len(fm)
#printDBG('[df2.draw_fnmatch] nMatch=%r' % nMatch)
x1, y1, w1, h1 = xywh1
x2, y2, w2, h2 = xywh2
offset2 = (x2, y2)
# Custom user label for chips 1 and 2
if lbl1 is not None:
absolute_lbl(x1 + w1, y1, lbl1)
if lbl2 is not None:
absolute_lbl(x2 + w2, y2, lbl2)
# Plot the number of matches
if kwargs.get('show_nMatches', False):
upperleft_text('#match=%d' % nMatch)
# Draw all keypoints in both chips as points
if kwargs.get('all_kpts', False):
all_args = dict(ell=False, pts=pts, pts_color=GREEN, pts_size=2,
ell_alpha=ell_alpha, rect=rect)
all_args.update(kwargs)
draw_kpts2(kpts1, **all_args)
draw_kpts2(kpts2, offset=offset2, **all_args)
# Draw Lines and Ellipses and Points oh my
if nMatch > 0:
colors = [kwargs['colors']] * nMatch if 'colors' in kwargs else distinct_colors(nMatch)
if fs is not None:
colors = feat_scores_to_color(fs, 'hot')
acols = add_alpha(colors)
# Helper functions
def _drawkpts(**_kwargs):
_kwargs.update(kwargs)
fxs1 = fm[:, 0]
fxs2 = fm[:, 1]
draw_kpts2(kpts1[fxs1], rect=rect, **_kwargs)
draw_kpts2(kpts2[fxs2], offset=offset2, rect=rect, **_kwargs)
def _drawlines(**_kwargs):
_kwargs.update(kwargs)
draw_lines2(kpts1, kpts2, fm, fs, kpts2_offset=offset2, **_kwargs)
# User helpers
if ell:
_drawkpts(pts=False, ell=True, color_list=colors)
if pts:
_drawkpts(pts_size=8, pts=True, ell=False, pts_color=BLACK)
_drawkpts(pts_size=6, pts=True, ell=False, color_list=acols)
if lines:
_drawlines(color_list=colors)
else:
draw_boxedX(xywh2)
if fs is not None and colorbar_ and 'colors' in vars() and colors is not None:
colorbar(fs, colors)
#legend()
return None
def draw_boxedX(xywh, color=RED, lw=2, alpha=.5, theta=0):
'draws a big red x. redx'
ax = gca()
x1, y1, w, h = xywh
x2, y2 = x1 + w, y1 + h
segments = [((x1, y1), (x2, y2)),
((x1, y2), (x2, y1))]
trans = Affine2D()
trans.rotate(theta)
trans = trans + ax.transData
width_list = [lw] * len(segments)
color_list = [color] * len(segments)
line_group = LineCollection(segments, width_list, color_list, alpha=alpha,
transOffset=trans)
ax.add_collection(line_group)
def disconnect_callback(fig, callback_type, **kwargs):
#print('[df2] disconnect %r callback' % callback_type)
axes = kwargs.get('axes', [])
for ax in axes:
ax._hs_viewtype = ''
cbid_type = callback_type + '_cbid'
cbfn_type = callback_type + '_func'
cbid = fig.__dict__.get(cbid_type, None)
cbfn = fig.__dict__.get(cbfn_type, None)
if cbid is not None:
fig.canvas.mpl_disconnect(cbid)
else:
cbfn = None
fig.__dict__[cbid_type] = None
return cbid, cbfn
def connect_callback(fig, callback_type, callback_fn):
#print('[df2] register %r callback' % callback_type)
if callback_fn is None:
return
cbid_type = callback_type + '_cbid'
cbfn_type = callback_type + '_func'
fig.__dict__[cbid_type] = fig.canvas.mpl_connect(callback_type, callback_fn)
fig.__dict__[cbfn_type] = callback_fn
| apache-2.0 |
boyuegame/kbengine | kbe/src/lib/python/Lib/sqlite3/test/factory.py | 67 | 9620 | #-*- coding: iso-8859-1 -*-
# pysqlite2/test/factory.py: tests for the various factories in pysqlite
#
# Copyright (C) 2005-2007 Gerhard Häring <[email protected]>
#
# This file is part of pysqlite.
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
import unittest
import sqlite3 as sqlite
from collections.abc import Sequence
class MyConnection(sqlite.Connection):
def __init__(self, *args, **kwargs):
sqlite.Connection.__init__(self, *args, **kwargs)
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
class MyCursor(sqlite.Cursor):
def __init__(self, *args, **kwargs):
sqlite.Cursor.__init__(self, *args, **kwargs)
self.row_factory = dict_factory
class ConnectionFactoryTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:", factory=MyConnection)
def tearDown(self):
self.con.close()
def CheckIsInstance(self):
self.assertIsInstance(self.con, MyConnection)
class CursorFactoryTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
def tearDown(self):
self.con.close()
def CheckIsInstance(self):
cur = self.con.cursor(factory=MyCursor)
self.assertIsInstance(cur, MyCursor)
class RowFactoryTestsBackwardsCompat(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
def CheckIsProducedByFactory(self):
cur = self.con.cursor(factory=MyCursor)
cur.execute("select 4+5 as foo")
row = cur.fetchone()
self.assertIsInstance(row, dict)
cur.close()
def tearDown(self):
self.con.close()
class RowFactoryTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
def CheckCustomFactory(self):
self.con.row_factory = lambda cur, row: list(row)
row = self.con.execute("select 1, 2").fetchone()
self.assertIsInstance(row, list)
def CheckSqliteRowIndex(self):
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
self.assertIsInstance(row, sqlite.Row)
col1, col2 = row["a"], row["b"]
self.assertEqual(col1, 1, "by name: wrong result for column 'a'")
self.assertEqual(col2, 2, "by name: wrong result for column 'a'")
col1, col2 = row["A"], row["B"]
self.assertEqual(col1, 1, "by name: wrong result for column 'A'")
self.assertEqual(col2, 2, "by name: wrong result for column 'B'")
self.assertEqual(row[0], 1, "by index: wrong result for column 0")
self.assertEqual(row[1], 2, "by index: wrong result for column 1")
self.assertEqual(row[-1], 2, "by index: wrong result for column -1")
self.assertEqual(row[-2], 1, "by index: wrong result for column -2")
with self.assertRaises(IndexError):
row['c']
with self.assertRaises(IndexError):
row[2]
with self.assertRaises(IndexError):
row[-3]
with self.assertRaises(IndexError):
row[2**1000]
def CheckSqliteRowIter(self):
"""Checks if the row object is iterable"""
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
for col in row:
pass
def CheckSqliteRowAsTuple(self):
"""Checks if the row object can be converted to a tuple"""
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
t = tuple(row)
self.assertEqual(t, (row['a'], row['b']))
def CheckSqliteRowAsDict(self):
"""Checks if the row object can be correctly converted to a dictionary"""
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
d = dict(row)
self.assertEqual(d["a"], row["a"])
self.assertEqual(d["b"], row["b"])
def CheckSqliteRowHashCmp(self):
"""Checks if the row object compares and hashes correctly"""
self.con.row_factory = sqlite.Row
row_1 = self.con.execute("select 1 as a, 2 as b").fetchone()
row_2 = self.con.execute("select 1 as a, 2 as b").fetchone()
row_3 = self.con.execute("select 1 as a, 3 as b").fetchone()
self.assertEqual(row_1, row_1)
self.assertEqual(row_1, row_2)
self.assertTrue(row_2 != row_3)
self.assertFalse(row_1 != row_1)
self.assertFalse(row_1 != row_2)
self.assertFalse(row_2 == row_3)
self.assertEqual(row_1, row_2)
self.assertEqual(hash(row_1), hash(row_2))
self.assertNotEqual(row_1, row_3)
self.assertNotEqual(hash(row_1), hash(row_3))
def CheckSqliteRowAsSequence(self):
""" Checks if the row object can act like a sequence """
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
as_tuple = tuple(row)
self.assertEqual(list(reversed(row)), list(reversed(as_tuple)))
self.assertIsInstance(row, Sequence)
def tearDown(self):
self.con.close()
class TextFactoryTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
def CheckUnicode(self):
austria = "Österreich"
row = self.con.execute("select ?", (austria,)).fetchone()
self.assertEqual(type(row[0]), str, "type of row[0] must be unicode")
def CheckString(self):
self.con.text_factory = bytes
austria = "Österreich"
row = self.con.execute("select ?", (austria,)).fetchone()
self.assertEqual(type(row[0]), bytes, "type of row[0] must be bytes")
self.assertEqual(row[0], austria.encode("utf-8"), "column must equal original data in UTF-8")
def CheckCustom(self):
self.con.text_factory = lambda x: str(x, "utf-8", "ignore")
austria = "Österreich"
row = self.con.execute("select ?", (austria,)).fetchone()
self.assertEqual(type(row[0]), str, "type of row[0] must be unicode")
self.assertTrue(row[0].endswith("reich"), "column must contain original data")
def CheckOptimizedUnicode(self):
# In py3k, str objects are always returned when text_factory
# is OptimizedUnicode
self.con.text_factory = sqlite.OptimizedUnicode
austria = "Österreich"
germany = "Deutchland"
a_row = self.con.execute("select ?", (austria,)).fetchone()
d_row = self.con.execute("select ?", (germany,)).fetchone()
self.assertEqual(type(a_row[0]), str, "type of non-ASCII row must be str")
self.assertEqual(type(d_row[0]), str, "type of ASCII-only row must be str")
def tearDown(self):
self.con.close()
class TextFactoryTestsWithEmbeddedZeroBytes(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
self.con.execute("create table test (value text)")
self.con.execute("insert into test (value) values (?)", ("a\x00b",))
def CheckString(self):
# text_factory defaults to str
row = self.con.execute("select value from test").fetchone()
self.assertIs(type(row[0]), str)
self.assertEqual(row[0], "a\x00b")
def CheckBytes(self):
self.con.text_factory = bytes
row = self.con.execute("select value from test").fetchone()
self.assertIs(type(row[0]), bytes)
self.assertEqual(row[0], b"a\x00b")
def CheckBytearray(self):
self.con.text_factory = bytearray
row = self.con.execute("select value from test").fetchone()
self.assertIs(type(row[0]), bytearray)
self.assertEqual(row[0], b"a\x00b")
def CheckCustom(self):
# A custom factory should receive a bytes argument
self.con.text_factory = lambda x: x
row = self.con.execute("select value from test").fetchone()
self.assertIs(type(row[0]), bytes)
self.assertEqual(row[0], b"a\x00b")
def tearDown(self):
self.con.close()
def suite():
connection_suite = unittest.makeSuite(ConnectionFactoryTests, "Check")
cursor_suite = unittest.makeSuite(CursorFactoryTests, "Check")
row_suite_compat = unittest.makeSuite(RowFactoryTestsBackwardsCompat, "Check")
row_suite = unittest.makeSuite(RowFactoryTests, "Check")
text_suite = unittest.makeSuite(TextFactoryTests, "Check")
text_zero_bytes_suite = unittest.makeSuite(TextFactoryTestsWithEmbeddedZeroBytes, "Check")
return unittest.TestSuite((connection_suite, cursor_suite, row_suite_compat, row_suite, text_suite, text_zero_bytes_suite))
def test():
runner = unittest.TextTestRunner()
runner.run(suite())
if __name__ == "__main__":
test()
| lgpl-3.0 |
crosswalk-project/chromium-crosswalk-efl | tools/telemetry/telemetry/core/heap/chrome_js_heap_snapshot_parser_unittest.py | 44 | 2529 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import unittest
from telemetry.core.heap import chrome_js_heap_snapshot_parser
class ChromeJsHeapSnapshotParserUnittest(unittest.TestCase):
def _HeapSnapshotData(self, node_types, edge_types, node_fields, edge_fields,
node_list, edge_list, strings):
"""Helper for creating heap snapshot data."""
return {'snapshot': {'meta': {'node_types': [node_types],
'edge_types': [edge_types],
'node_fields': node_fields,
'edge_fields': edge_fields}},
'nodes': node_list,
'edges': edge_list,
'strings': strings}
def testParseSimpleSnapshot(self):
# Create a snapshot containing 2 nodes and an edge between them.
node_types = ['object']
edge_types = ['property']
node_fields = ['type', 'name', 'id', 'edge_count']
edge_fields = ['type', 'name_or_index', 'to_node']
node_list = [0, 0, 0, 1,
0, 1, 1, 0]
edge_list = [0, 2, 4]
strings = ['node1', 'node2', 'edge1']
heap = self._HeapSnapshotData(node_types, edge_types, node_fields,
edge_fields, node_list, edge_list, strings)
objects = list(chrome_js_heap_snapshot_parser.ChromeJsHeapSnapshotParser(
json.dumps(heap)).GetAllLiveHeapObjects())
self.assertEqual(2, len(objects))
if objects[0].edges_from:
from_ix = 0
to_ix = 1
else:
from_ix = 1
to_ix = 0
self.assertEqual('node1', objects[from_ix].class_name)
self.assertEqual('node2', objects[to_ix].class_name)
self.assertEqual(1, len(objects[from_ix].edges_from))
self.assertEqual(0, len(objects[from_ix].edges_to))
self.assertEqual(0, len(objects[to_ix].edges_from))
self.assertEqual(1, len(objects[to_ix].edges_to))
self.assertEqual('node1',
objects[from_ix].edges_from[0].from_object.class_name)
self.assertEqual('node2',
objects[from_ix].edges_from[0].to_object.class_name)
self.assertEqual('edge1', objects[from_ix].edges_from[0].name_string)
self.assertEqual('node1', objects[to_ix].edges_to[0].from_object.class_name)
self.assertEqual('node2', objects[to_ix].edges_to[0].to_object.class_name)
self.assertEqual('edge1', objects[to_ix].edges_to[0].name_string)
| bsd-3-clause |
Semi-global/edx-platform | common/djangoapps/django_locale/middleware.py | 81 | 3736 | # TODO: This file is imported from the stable Django 1.8 branch. Remove this file
# and re-import this middleware from Django once the codebase is upgraded. [PLAT-671]
# pylint: disable=invalid-name, missing-docstring
"This is the locale selecting middleware that will look at accept headers"
from django.conf import settings
from django.core.urlresolvers import (
LocaleRegexURLResolver, get_resolver, get_script_prefix, is_valid_path,
)
from django.http import HttpResponseRedirect
from django.utils import translation
from django.utils.cache import patch_vary_headers
# Override the Django 1.4 implementation with the 1.8 implementation
from django_locale.trans_real import get_language_from_request
class LocaleMiddleware(object):
"""
This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context. This allows pages to be dynamically
translated to the language the user desires (if the language
is available, of course).
"""
response_redirect_class = HttpResponseRedirect
def __init__(self):
self._is_language_prefix_patterns_used = False
for url_pattern in get_resolver(None).url_patterns:
if isinstance(url_pattern, LocaleRegexURLResolver):
self._is_language_prefix_patterns_used = True
break
def process_request(self, request):
check_path = self.is_language_prefix_patterns_used()
# This call is broken in Django 1.4:
# https://github.com/django/django/blob/stable/1.4.x/django/utils/translation/trans_real.py#L399
# (we override parse_accept_lang_header to a fixed version in dark_lang.middleware)
language = get_language_from_request(
request, check_path=check_path)
translation.activate(language)
request.LANGUAGE_CODE = translation.get_language()
def process_response(self, request, response):
language = translation.get_language()
language_from_path = translation.get_language_from_path(request.path_info)
if (response.status_code == 404 and not language_from_path
and self.is_language_prefix_patterns_used()):
urlconf = getattr(request, 'urlconf', None)
language_path = '/%s%s' % (language, request.path_info)
path_valid = is_valid_path(language_path, urlconf)
if (not path_valid and settings.APPEND_SLASH
and not language_path.endswith('/')):
path_valid = is_valid_path("%s/" % language_path, urlconf)
if path_valid:
script_prefix = get_script_prefix()
language_url = "%s://%s%s" % (
request.scheme,
request.get_host(),
# insert language after the script prefix and before the
# rest of the URL
request.get_full_path().replace(
script_prefix,
'%s%s/' % (script_prefix, language),
1
)
)
return self.response_redirect_class(language_url)
if not (self.is_language_prefix_patterns_used()
and language_from_path):
patch_vary_headers(response, ('Accept-Language',))
if 'Content-Language' not in response:
response['Content-Language'] = language
return response
def is_language_prefix_patterns_used(self):
"""
Returns `True` if the `LocaleRegexURLResolver` is used
at root level of the urlpatterns, else it returns `False`.
"""
return self._is_language_prefix_patterns_used
| agpl-3.0 |
rjschwei/azure-sdk-for-python | azure-batch/azure/batch/models/job_list_preparation_and_release_task_status_options.py | 3 | 2209 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class JobListPreparationAndReleaseTaskStatusOptions(Model):
"""Additional parameters for the Job_list_preparation_and_release_task_status
operation.
:param filter: An OData $filter clause.
:type filter: str
:param select: An OData $select clause.
:type select: str
:param max_results: The maximum number of items to return in the response.
A maximum of 1000 tasks can be returned. Default value: 1000 .
:type max_results: int
:param timeout: The maximum time that the server can spend processing the
request, in seconds. The default is 30 seconds. Default value: 30 .
:type timeout: int
:param client_request_id: The caller-generated request identity, in the
form of a GUID with no decoration such as curly braces, e.g.
9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
:type client_request_id: str
:param return_client_request_id: Whether the server should return the
client-request-id in the response. Default value: False .
:type return_client_request_id: bool
:param ocp_date: The time the request was issued. Client libraries
typically set this to the current system clock time; set it explicitly if
you are calling the REST API directly.
:type ocp_date: datetime
"""
def __init__(self, filter=None, select=None, max_results=1000, timeout=30, client_request_id=None, return_client_request_id=False, ocp_date=None):
self.filter = filter
self.select = select
self.max_results = max_results
self.timeout = timeout
self.client_request_id = client_request_id
self.return_client_request_id = return_client_request_id
self.ocp_date = ocp_date
| mit |
foreverfaint/scrapy | scrapy/tests/test_utils_reqser.py | 30 | 2683 | import unittest
from scrapy.http import Request
from scrapy.spider import Spider
from scrapy.utils.reqser import request_to_dict, request_from_dict
class RequestSerializationTest(unittest.TestCase):
def setUp(self):
self.spider = TestSpider()
def test_basic(self):
r = Request("http://www.example.com")
self._assert_serializes_ok(r)
def test_all_attributes(self):
r = Request("http://www.example.com",
callback='parse_item',
errback='handle_error',
method="POST",
body="some body",
headers={'content-encoding': 'text/html; charset=latin-1'},
cookies={'currency': 'usd'},
encoding='latin-1',
priority=20,
meta={'a': 'b'})
self._assert_serializes_ok(r)
def test_latin1_body(self):
r = Request("http://www.example.com", body="\xa3")
self._assert_serializes_ok(r)
def test_utf8_body(self):
r = Request("http://www.example.com", body="\xc2\xa3")
self._assert_serializes_ok(r)
def _assert_serializes_ok(self, request, spider=None):
d = request_to_dict(request, spider=spider)
request2 = request_from_dict(d, spider=spider)
self._assert_same_request(request, request2)
def _assert_same_request(self, r1, r2):
self.assertEqual(r1.url, r2.url)
self.assertEqual(r1.callback, r2.callback)
self.assertEqual(r1.errback, r2.errback)
self.assertEqual(r1.method, r2.method)
self.assertEqual(r1.body, r2.body)
self.assertEqual(r1.headers, r2.headers)
self.assertEqual(r1.cookies, r2.cookies)
self.assertEqual(r1.meta, r2.meta)
self.assertEqual(r1._encoding, r2._encoding)
self.assertEqual(r1.priority, r2.priority)
self.assertEqual(r1.dont_filter, r2.dont_filter)
def test_callback_serialization(self):
r = Request("http://www.example.com", callback=self.spider.parse_item, \
errback=self.spider.handle_error)
self._assert_serializes_ok(r, spider=self.spider)
def test_unserializable_callback1(self):
r = Request("http://www.example.com", callback=lambda x: x)
self.assertRaises(ValueError, request_to_dict, r)
self.assertRaises(ValueError, request_to_dict, r, spider=self.spider)
def test_unserializable_callback2(self):
r = Request("http://www.example.com", callback=self.spider.parse_item)
self.assertRaises(ValueError, request_to_dict, r)
class TestSpider(Spider):
name = 'test'
def parse_item(self, response):
pass
def handle_error(self, failure):
pass
| bsd-3-clause |
silas/rock | rock/text.py | 1 | 1235 | from __future__ import unicode_literals
def _(text):
return text.strip('\n')
USAGE = _("""
Usage: rock [--help] [--env=ENV] [--path=PATH] [--runtime=RUNTIME] command
""")
HELP = _("""
--help show help message
--verbose show script while running
--dry-run show script without running
--version show version
project:
--env=ENV set env
--path=PATH set path
--runtime=RUNTIME set runtime
commands:
build run build
test run tests
run run in environment
clean clean project files
other commands:
config show project configuration
env show evaluable environment variables
init generates project skeleton
runtime show installed runtimes
""")
CONFIG_USAGE = _("""
Usage: rock config [--format=FORMAT]
""")
CONFIG_HELP = _("""
--help show help message
--format set output format (json, yaml)
""")
ENV_USAGE = _("""
Usage: rock env
""")
ENV_HELP = _("""
--help show help message
""")
RUNTIME_USAGE = _("""
Usage: rock runtime
""")
RUNTIME_HELP = _("""
--help show help message
""")
| mit |
andrew-szymanski/gae_django | django/contrib/localflavor/es/es_provinces.py | 436 | 1482 | # -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
PROVINCE_CHOICES = (
('01', _('Arava')),
('02', _('Albacete')),
('03', _('Alacant')),
('04', _('Almeria')),
('05', _('Avila')),
('06', _('Badajoz')),
('07', _('Illes Balears')),
('08', _('Barcelona')),
('09', _('Burgos')),
('10', _('Caceres')),
('11', _('Cadiz')),
('12', _('Castello')),
('13', _('Ciudad Real')),
('14', _('Cordoba')),
('15', _('A Coruna')),
('16', _('Cuenca')),
('17', _('Girona')),
('18', _('Granada')),
('19', _('Guadalajara')),
('20', _('Guipuzkoa')),
('21', _('Huelva')),
('22', _('Huesca')),
('23', _('Jaen')),
('24', _('Leon')),
('25', _('Lleida')),
('26', _('La Rioja')),
('27', _('Lugo')),
('28', _('Madrid')),
('29', _('Malaga')),
('30', _('Murcia')),
('31', _('Navarre')),
('32', _('Ourense')),
('33', _('Asturias')),
('34', _('Palencia')),
('35', _('Las Palmas')),
('36', _('Pontevedra')),
('37', _('Salamanca')),
('38', _('Santa Cruz de Tenerife')),
('39', _('Cantabria')),
('40', _('Segovia')),
('41', _('Seville')),
('42', _('Soria')),
('43', _('Tarragona')),
('44', _('Teruel')),
('45', _('Toledo')),
('46', _('Valencia')),
('47', _('Valladolid')),
('48', _('Bizkaia')),
('49', _('Zamora')),
('50', _('Zaragoza')),
('51', _('Ceuta')),
('52', _('Melilla')),
)
| bsd-3-clause |
xianian/qt-creator | tests/system/suite_general/tst_session_handling/test.py | 3 | 8199 | #############################################################################
##
## Copyright (C) 2015 The Qt Company Ltd.
## Contact: http://www.qt.io/licensing
##
## This file is part of Qt Creator.
##
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms and
## conditions see http://www.qt.io/terms-conditions. For further information
## use the contact form at http://www.qt.io/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 or version 3 as published by the Free
## Software Foundation and appearing in the file LICENSE.LGPLv21 and
## LICENSE.LGPLv3 included in the packaging of this file. Please review the
## following information to ensure the GNU Lesser General Public License
## requirements will be met: https://www.gnu.org/licenses/lgpl.html and
## http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, The Qt Company gives you certain additional
## rights. These rights are described in The Qt Company LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
#############################################################################
source("../../shared/qtcreator.py")
def main():
projects = prepareTestExamples()
if not projects:
return
sessionName = "SampleSession"
startApplication("qtcreator" + SettingsPath)
if not startedWithoutPluginError():
return
createAndSwitchToSession(sessionName)
mainWindow = waitForObject(":Qt Creator_Core::Internal::MainWindow")
test.verify(waitFor("sessionName in str(mainWindow.windowTitle)", 2000),
"Verifying window title contains created session name.")
if canTestEmbeddedQtQuick():
checkWelcomePage(sessionName, True)
for project in projects:
openQmakeProject(project, Targets.DESKTOP_480_DEFAULT)
progressBarWait(20000)
checkNavigator(68, "Verifying whether all projects have been opened.")
openDocument("propertyanimation.QML.qml.color-animation\\.qml")
openDocument("declarative-music-browser.Headers.utility\\.h")
checkOpenDocuments(2, "Verifying whether 2 files are open.")
originalText = str(waitForObject(":Qt Creator_CppEditor::Internal::CPPEditorWidget").plainText)
switchSession("default")
test.verify(waitFor("'Qt Creator' == str(mainWindow.windowTitle)", 2000),
"Verifying window title is set to default.")
if canTestEmbeddedQtQuick():
checkWelcomePage(sessionName, False)
switchViewTo(ViewConstants.EDIT)
checkNavigator(1, "Verifying that no more project is opened.")
checkOpenDocuments(0, "Verifying whether all files have been closed.")
switchSession(sessionName)
test.verify(waitFor("sessionName in str(mainWindow.windowTitle)", 2000),
"Verifying window title contains created session name.")
checkNavigator(68, "Verifying whether all projects have been re-opened.")
checkOpenDocuments(2, "Verifying whether 2 files have been re-opened.")
if test.verify("utility.h" in str(mainWindow.windowTitle),
"Verifying whether utility.h has been opened."):
current = str(waitForObject(":Qt Creator_CppEditor::Internal::CPPEditorWidget").plainText)
test.verify(originalText == current, "Verifying that same file has been opened.")
checkForSessionFile(sessionName, projects)
invokeMenuItem("File", "Exit")
def prepareTestExamples():
examples = [os.path.join(sdkPath, "Examples", "4.7", "declarative", "animation", "basics",
"property-animation", "propertyanimation.pro"),
os.path.join(sdkPath, "Examples", "QtMobility", "declarative-music-browser",
"declarative-music-browser.pro")
]
projects = []
for sourceExample in examples:
if not neededFilePresent(sourceExample):
return None
# copy example projects to temp directory
for sourceExample in examples:
templateDir = prepareTemplate(os.path.dirname(sourceExample))
projects.append(os.path.join(templateDir, os.path.basename(sourceExample)))
return projects
def switchSession(toSession):
test.log("Switching to session '%s'" % toSession)
invokeMenuItem("File", "Session Manager...")
clickItem(waitForObject("{name='sessionList' type='QListWidget' visible='1' "
"window=':Session Manager_ProjectExplorer::Internal::SessionDialog'}"),
toSession, 5, 5, 0, Qt.LeftButton)
clickButton(waitForObject("{name='btSwitch' text='Switch to' type='QPushButton' visible='1' "
"window=':Session Manager_ProjectExplorer::Internal::SessionDialog'}"))
def createAndSwitchToSession(toSession):
sessionInputDialog = ("{type='ProjectExplorer::Internal::SessionNameInputDialog' unnamed='1' "
"visible='1' windowTitle='New Session Name'}")
test.log("Switching to session '%s' after creating it." % toSession)
invokeMenuItem("File", "Session Manager...")
clickButton(waitForObject("{name='btCreateNew' text='New' type='QPushButton' visible='1' "
"window=':Session Manager_ProjectExplorer::Internal::SessionDialog'}"))
lineEdit = waitForObject("{type='QLineEdit' unnamed='1' visible='1' window=%s}"
% sessionInputDialog)
replaceEditorContent(lineEdit, toSession)
clickButton(waitForObject("{text='Switch To' type='QPushButton' unnamed='1' visible='1' "
"window=%s}" % sessionInputDialog))
def checkWelcomePage(sessionName, isCurrent=False):
if isQt54Build:
welcomePage = ":WelcomePageStyledBar.WelcomePage_QQuickView"
else:
welcomePage = ":Qt Creator.WelcomePage_QQuickWidget"
switchViewTo(ViewConstants.WELCOME)
mouseClick(waitForObject("{container='%s' text='Projects' type='Button' "
"unnamed='1' visible='true'}" % welcomePage))
waitForObject("{container='%s' id='sessionsTitle' text='Sessions' type='Text' "
"unnamed='1' visible='true'}" % welcomePage)
if isCurrent:
sessions = ["default", "%s (current session)" % sessionName]
else:
sessions = ["default (current session)", sessionName]
for sessionName in sessions:
test.verify(object.exists("{container='%s' enabled='true' type='LinkedText' unnamed='1' "
"visible='true' text='%s'}" % (welcomePage, sessionName)),
"Verifying session '%s' exists." % sessionName)
def checkNavigator(expectedRows, message):
navigatorModel = waitForObject(":Qt Creator_Utils::NavigationTreeView").model()
test.compare(expectedRows, len(__iterateChildren__(navigatorModel, QModelIndex())), message)
def checkOpenDocuments(expectedRows, message):
selectFromCombo(":Qt Creator_Core::Internal::NavComboBox", "Open Documents")
openDocsWidget = waitForObject(":OpenDocuments_Widget")
test.compare(openDocsWidget.model().rowCount(), expectedRows, message)
def checkForSessionFile(sessionName, proFiles):
global tmpSettingsDir
sessionFile = os.path.join(tmpSettingsDir, "QtProject", "qtcreator", "%s.qws" % sessionName)
if test.verify(os.path.exists(sessionFile),
"Verifying whether session file '%s' has been created." % sessionFile):
content = readFile(sessionFile)
for proFile in proFiles:
if platform.system() in ('Microsoft', 'Windows'):
proFile = proFile.replace('\\', '/')
test.verify(proFile in content, "Verifying whether expected .pro file (%s) is listed "
"inside session file." % proFile)
def init():
removeQmlDebugFolderIfExists()
| lgpl-2.1 |
setsid/yacron | yacron/time.py | 1 | 5052 | """
This file is part of yacron.
Copyright (C) 2016 Vadim Kuznetsov <[email protected]>
yacron is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
yacron is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with yacron. If not, see <http://www.gnu.org/licenses/>.
"""
class CronTime(object):
"""
Parse and store scheduled time.
"""
def __init__(self, minutes, hours, weekdays):
"""
Parse and store the minutes, hours and weekdays values.
:param minutes: Minutes (str)
:param hours: Hours (str)
:param weekdays: Weekdays (str)
:raise ValueError if any of the values is invalid
"""
self._minutes = self._parse_value(0, minutes, 59)
self._hours = self._parse_value(0, hours, 23)
# slashes are unacceptable in weekdays value
self._weekdays = self._parse_value(1, weekdays, 7, slash_acceptable=False)
@property
def minutes(self):
return self._minutes
@property
def hours(self):
return self._hours
@property
def weekdays(self):
return self._weekdays
def _check_value_range(self, min_value, value, max_value):
"""
Check is value in range.
:param min_value: Minimal valid value
:param value: Value
:param max_value: Maximum valid value
:return True if the value is in range
:raise ValueError if the value is out of range
"""
if not (min_value <= value <= max_value):
raise ValueError("invalid value '{0:d}', must be in [{1:d}..{2:d}]".format(value, min_value, max_value))
return True
def _check_special_chars(self, value):
"""
Check special characters in the value:
1) value can not contains more than one '*' or '/' or '-' characters;
2) special characters can not be mixed (there can be the only one except ',');
:param value: Value.
:raise ValueError if any invalid sequence of special characters found in the value.
"""
all_count = value.count('*')
slash_count = value.count('/')
comma_count = value.count(',')
hyphen_count = value.count('-')
is_invalid = any((
all_count > 1,
slash_count > 1,
hyphen_count > 1,
all_count and (slash_count or comma_count or hyphen_count),
slash_count and (all_count or comma_count or hyphen_count),
comma_count and (all_count or slash_count or hyphen_count),
hyphen_count and (all_count or slash_count or comma_count),
))
if is_invalid:
raise ValueError("invalid format in value '{0:s}'".format(value))
def _parse_value(self, min_value, value, max_value, slash_acceptable=True):
"""
Parse and check a value.
:param min_value: Minimal valid value
:param value: Value
:param max_value: Maximum valid value
:param slash_acceptable: Slash is valid in the value
:return: List of values.
:raise ValueError if parsing failed
"""
self._check_special_chars(value)
if value == '*':
return list(range(min_value, max_value + 1))
if value.startswith('/'):
if not slash_acceptable:
raise ValueError("value '{0:s}' can not contains slash".format(value))
divisor = int(value[1:])
self._check_value_range(min_value, divisor, max_value)
return [n for n in range(min_value, max_value + 1) if n % divisor == 0]
if '-' in value:
start_value, stop_value = map(int, value.split('-'))
self._check_value_range(min_value, start_value, max_value)
self._check_value_range(min_value, stop_value, max_value)
if start_value >= stop_value:
raise ValueError("start value can not be greater or equal to stop value")
return list(range(start_value, stop_value + 1))
if ',' in value:
return [n for n in map(int, value.split(',')) if self._check_value_range(min_value, n, max_value)]
return [int(value)]
def check_time(self, cur_time):
"""
Compare parsed time and current time.
:param cur_time: Current time (datetime).
:return: True if current time matches with parser time and False otherwise
"""
return all((
cur_time.minute in self._minutes,
cur_time.hour in self._hours,
cur_time.isoweekday() in self._weekdays,
))
| gpl-3.0 |
Dandandan/wikiprogramming | jsrepl/build/extern/python/reloop-closured/lib/python2.7/distutils/command/install_lib.py | 251 | 8338 | """distutils.command.install_lib
Implements the Distutils 'install_lib' command
(install all Python modules)."""
__revision__ = "$Id$"
import os
import sys
from distutils.core import Command
from distutils.errors import DistutilsOptionError
# Extension for Python source files.
if hasattr(os, 'extsep'):
PYTHON_SOURCE_EXTENSION = os.extsep + "py"
else:
PYTHON_SOURCE_EXTENSION = ".py"
class install_lib(Command):
description = "install all Python modules (extensions and pure Python)"
# The byte-compilation options are a tad confusing. Here are the
# possible scenarios:
# 1) no compilation at all (--no-compile --no-optimize)
# 2) compile .pyc only (--compile --no-optimize; default)
# 3) compile .pyc and "level 1" .pyo (--compile --optimize)
# 4) compile "level 1" .pyo only (--no-compile --optimize)
# 5) compile .pyc and "level 2" .pyo (--compile --optimize-more)
# 6) compile "level 2" .pyo only (--no-compile --optimize-more)
#
# The UI for this is two option, 'compile' and 'optimize'.
# 'compile' is strictly boolean, and only decides whether to
# generate .pyc files. 'optimize' is three-way (0, 1, or 2), and
# decides both whether to generate .pyo files and what level of
# optimization to use.
user_options = [
('install-dir=', 'd', "directory to install to"),
('build-dir=','b', "build directory (where to install from)"),
('force', 'f', "force installation (overwrite existing files)"),
('compile', 'c', "compile .py to .pyc [default]"),
('no-compile', None, "don't compile .py files"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('skip-build', None, "skip the build steps"),
]
boolean_options = ['force', 'compile', 'skip-build']
negative_opt = {'no-compile' : 'compile'}
def initialize_options(self):
# let the 'install' command dictate our installation directory
self.install_dir = None
self.build_dir = None
self.force = 0
self.compile = None
self.optimize = None
self.skip_build = None
def finalize_options(self):
# Get all the information we need to install pure Python modules
# from the umbrella 'install' command -- build (source) directory,
# install (target) directory, and whether to compile .py files.
self.set_undefined_options('install',
('build_lib', 'build_dir'),
('install_lib', 'install_dir'),
('force', 'force'),
('compile', 'compile'),
('optimize', 'optimize'),
('skip_build', 'skip_build'),
)
if self.compile is None:
self.compile = 1
if self.optimize is None:
self.optimize = 0
if not isinstance(self.optimize, int):
try:
self.optimize = int(self.optimize)
if self.optimize not in (0, 1, 2):
raise AssertionError
except (ValueError, AssertionError):
raise DistutilsOptionError, "optimize must be 0, 1, or 2"
def run(self):
# Make sure we have built everything we need first
self.build()
# Install everything: simply dump the entire contents of the build
# directory to the installation directory (that's the beauty of
# having a build directory!)
outfiles = self.install()
# (Optionally) compile .py to .pyc
if outfiles is not None and self.distribution.has_pure_modules():
self.byte_compile(outfiles)
# -- Top-level worker functions ------------------------------------
# (called from 'run()')
def build(self):
if not self.skip_build:
if self.distribution.has_pure_modules():
self.run_command('build_py')
if self.distribution.has_ext_modules():
self.run_command('build_ext')
def install(self):
if os.path.isdir(self.build_dir):
outfiles = self.copy_tree(self.build_dir, self.install_dir)
else:
self.warn("'%s' does not exist -- no Python modules to install" %
self.build_dir)
return
return outfiles
def byte_compile(self, files):
if sys.dont_write_bytecode:
self.warn('byte-compiling is disabled, skipping.')
return
from distutils.util import byte_compile
# Get the "--root" directory supplied to the "install" command,
# and use it as a prefix to strip off the purported filename
# encoded in bytecode files. This is far from complete, but it
# should at least generate usable bytecode in RPM distributions.
install_root = self.get_finalized_command('install').root
if self.compile:
byte_compile(files, optimize=0,
force=self.force, prefix=install_root,
dry_run=self.dry_run)
if self.optimize > 0:
byte_compile(files, optimize=self.optimize,
force=self.force, prefix=install_root,
verbose=self.verbose, dry_run=self.dry_run)
# -- Utility methods -----------------------------------------------
def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
if not has_any:
return []
build_cmd = self.get_finalized_command(build_cmd)
build_files = build_cmd.get_outputs()
build_dir = getattr(build_cmd, cmd_option)
prefix_len = len(build_dir) + len(os.sep)
outputs = []
for file in build_files:
outputs.append(os.path.join(output_dir, file[prefix_len:]))
return outputs
def _bytecode_filenames(self, py_filenames):
bytecode_files = []
for py_file in py_filenames:
# Since build_py handles package data installation, the
# list of outputs can contain more than just .py files.
# Make sure we only report bytecode for the .py files.
ext = os.path.splitext(os.path.normcase(py_file))[1]
if ext != PYTHON_SOURCE_EXTENSION:
continue
if self.compile:
bytecode_files.append(py_file + "c")
if self.optimize > 0:
bytecode_files.append(py_file + "o")
return bytecode_files
# -- External interface --------------------------------------------
# (called by outsiders)
def get_outputs(self):
"""Return the list of files that would be installed if this command
were actually run. Not affected by the "dry-run" flag or whether
modules have actually been built yet.
"""
pure_outputs = \
self._mutate_outputs(self.distribution.has_pure_modules(),
'build_py', 'build_lib',
self.install_dir)
if self.compile:
bytecode_outputs = self._bytecode_filenames(pure_outputs)
else:
bytecode_outputs = []
ext_outputs = \
self._mutate_outputs(self.distribution.has_ext_modules(),
'build_ext', 'build_lib',
self.install_dir)
return pure_outputs + bytecode_outputs + ext_outputs
def get_inputs(self):
"""Get the list of files that are input to this command, ie. the
files that get installed as they are named in the build tree.
The files in this list correspond one-to-one to the output
filenames returned by 'get_outputs()'.
"""
inputs = []
if self.distribution.has_pure_modules():
build_py = self.get_finalized_command('build_py')
inputs.extend(build_py.get_outputs())
if self.distribution.has_ext_modules():
build_ext = self.get_finalized_command('build_ext')
inputs.extend(build_ext.get_outputs())
return inputs
| mit |
diydrones/visual-followme | src/drone_script.py | 4 | 1146 | import cv2
import time
import os.path
import os
from file_utils import Logger
from polyphemus import process_stream
mustarm = False
def get_vehicle():
api = local_connect() # @UndefinedVariable
v = api.get_vehicles()[0]
return v
def wait_for_arm(v):
print "Waiting for arming"
while not v.armed:
time.sleep(0.001)
print "ARMED"
def open_camera():
# yuck - opencv has no way to count # of cameras, so do this hack of looking for /dev/video*
numCameras = len(filter(lambda s: s.startswith("video"), os.listdir("/dev")))
c = cv2.VideoCapture()
# We start our search with higher numbered (likely external) cameras
for cnum in range(0, numCameras):
c.open(numCameras - cnum - 1)
if c.isOpened():
return c
raise Exception('No cameras found')
print "DroneScript - Visual-Follow Running"
v = get_vehicle()
while True:
if mustarm:
wait_for_arm(v)
video_in = open_camera()
homedir = os.path.expanduser("~")
logger = Logger(path= homedir + "/Videos/")
process_stream(video_in, logger, vehicle=v, require_arming=mustarm)
| gpl-3.0 |
daymer/xWIKI_Karma | Migration_to_xWiki/migration_sample.py | 1 | 1532 | from PythonConfluenceAPI import ConfluenceAPI
import Configuration
import CustomModules.SQL_Connector
from Configuration import MySQLConfig, MediaWIKIConfig
from Migration_to_xWiki.Users_association import Users
from CustomModules import Mechanics
from CustomModules.Mechanics import XWikiClient, MysqlConnector, MigrationAssistant
target_pool = 'Migration pool'
parent = 'Migration pool'
MySQLconfig_INSTANCE = MySQLConfig()
MysqlConnector_INSTANCE = MysqlConnector(MySQLconfig_INSTANCE)
SQLConfig = Configuration.SQLConfig()
xWikiConfig = Configuration.XWikiConfig(target_pool)
xWikiClient = XWikiClient(xWikiConfig.api_root, xWikiConfig.auth_user, xWikiConfig.auth_pass)
ConfluenceConfig_instance = Configuration.ConfluenceConfig()
confluenceAPI_instance = ConfluenceAPI(username=ConfluenceConfig_instance.USER, password=ConfluenceConfig_instance.PASS, uri_base=ConfluenceConfig_instance.ULR)
MediaWIKIConfig = MediaWIKIConfig()
Migrator = MigrationAssistant(ConfluenceConfig=ConfluenceConfig_instance, MediaWIKIConfig=MediaWIKIConfig, xWikiConfig=xWikiConfig)
UserList = Users()
SQLConnector_instance = CustomModules.SQL_Connector.SQLConnector(SQLConfig)
title = 'Hyper-V Basics'
platform = 'Confluence'
result = Mechanics.migrate_page(title, platform, target_pool, parent, MySQLconfig_INSTANCE,
MysqlConnector_INSTANCE, SQLConfig, SQLConnector_instance, ConfluenceConfig_instance,
MediaWIKIConfig, xWikiConfig, xWikiClient, Migrator, UserList)
print(result)
| apache-2.0 |
moondrop-entertainment/django-nonrel-drawp | tests/regressiontests/comment_tests/tests/app_api_tests.py | 55 | 2592 | from django.conf import settings
from django.contrib import comments
from django.contrib.comments.models import Comment
from django.contrib.comments.forms import CommentForm
from regressiontests.comment_tests.tests import CommentTestCase
class CommentAppAPITests(CommentTestCase):
"""Tests for the "comment app" API"""
def testGetCommentApp(self):
self.assertEqual(comments.get_comment_app(), comments)
def testGetForm(self):
self.assertEqual(comments.get_form(), CommentForm)
def testGetFormTarget(self):
self.assertEqual(comments.get_form_target(), "/post/")
def testGetFlagURL(self):
c = Comment(id=12345)
self.assertEqual(comments.get_flag_url(c), "/flag/12345/")
def getGetDeleteURL(self):
c = Comment(id=12345)
self.assertEqual(comments.get_delete_url(c), "/delete/12345/")
def getGetApproveURL(self):
c = Comment(id=12345)
self.assertEqual(comments.get_approve_url(c), "/approve/12345/")
class CustomCommentTest(CommentTestCase):
urls = 'regressiontests.comment_tests.urls'
def setUp(self):
self.old_comments_app = getattr(settings, 'COMMENTS_APP', None)
settings.COMMENTS_APP = 'regressiontests.comment_tests.custom_comments'
settings.INSTALLED_APPS = list(settings.INSTALLED_APPS) + [settings.COMMENTS_APP,]
def tearDown(self):
del settings.INSTALLED_APPS[-1]
settings.COMMENTS_APP = self.old_comments_app
if settings.COMMENTS_APP is None:
del settings._wrapped.COMMENTS_APP
def testGetCommentApp(self):
from regressiontests.comment_tests import custom_comments
self.assertEqual(comments.get_comment_app(), custom_comments)
def testGetModel(self):
from regressiontests.comment_tests.custom_comments.models import CustomComment
self.assertEqual(comments.get_model(), CustomComment)
def testGetForm(self):
from regressiontests.comment_tests.custom_comments.forms import CustomCommentForm
self.assertEqual(comments.get_form(), CustomCommentForm)
def testGetFormTarget(self):
self.assertEqual(comments.get_form_target(), "/post/")
def testGetFlagURL(self):
c = Comment(id=12345)
self.assertEqual(comments.get_flag_url(c), "/flag/12345/")
def getGetDeleteURL(self):
c = Comment(id=12345)
self.assertEqual(comments.get_delete_url(c), "/delete/12345/")
def getGetApproveURL(self):
c = Comment(id=12345)
self.assertEqual(comments.get_approve_url(c), "/approve/12345/")
| bsd-3-clause |
UCSC-iGEM-2016/taris_controller | taris_controller/taris_sensor.py | 1 | 9944 | #!/usr/bin/python
from __future__ import print_function
import io # used to create file streams
import fcntl # used to access I2C parameters like addresses
import sys
import time # used for sleep delay and timestamps
class Taris_Sensor():
''' This object holds all required interface data for the Atlas Scientific \
EZO pH and RTD sensors. Built off of the base library, with new functions \
added for calibration and additional testing. '''
def __init__(self, address, bus):
# open two file streams, one for reading and one for writing
# the specific I2C channel is selected with bus
# it is usually 1, except for older revisions where it's 0
# wb and rb indicate binary read and write
self.file_read = io.open("/dev/i2c-"+str(bus), "rb", buffering=0)
self.file_write = io.open("/dev/i2c-"+str(bus), "wb", buffering=0)
# initializes I2C to either a user specified or default address
self.set_i2c_address(address)
self.cal_timeout = 1.6 # timeout for calibrations
self.read_timeout = 1.0 # timeout for reads
self.short_timeout = 0.3 # timeout for regular commands
# Set if testing board
self.DEBUG = True
def set_i2c_address(self, addr):
'''Set the I2C communications to the slave specified by the address. \
The commands for I2C dev using the ioctl functions are specified in \
the i2c-dev.h file from i2c-tools'''
I2C_SLAVE = 0x703
fcntl.ioctl(self.file_read, I2C_SLAVE, addr)
fcntl.ioctl(self.file_write, I2C_SLAVE, addr)
def write(self, cmd):
'''Writes a command to the sensor.'''
# appends the null character and sends the string over I2C
cmd += "\00"
self.file_write.write(cmd)
def read(self, num_of_bytes=31,startbit=1):
'''Reads data from the sensor and parses the incoming response.'''
# reads a specified number of bytes from I2C, then parses and displays the result
res = self.file_read.read(num_of_bytes) # read from the board
response = filter(lambda x: x != '\x00', res) # remove the null characters to get the response
if ord(response[0]) == 1: # if the response isn't an error
# change MSB to 0 for all received characters except the first and get a list of characters
char_list = map(lambda x: chr(ord(x) & ~0x80), list(response[startbit:]))
# NOTE: having to change the MSB to 0 is a glitch in the raspberry pi, and you shouldn't have to do this!
return ''.join(char_list) # convert the char list to a string and returns it
else:
return "Error " + str(ord(response[0]))
def query(self, string, start=1):
'''For commands that require a write, a wait, and a response. For instance, \
calibration requires writing an initial CAL command, waiting 300ms, \
then checking for a pass/fail indicator message.'''
# write a command to the board, wait the correct timeout, and read the response
self.write(string)
# the read and calibration commands require a longer timeout
if string.upper().startswith("R"):
time.sleep(self.read_timeout)
elif string.upper().startswith("CAL"):
time.sleep(self.cal_timeout)
else:
time.sleep(self.short_timeout)
return self.read(startbit=start)
def verify(self):
'''Verifies that the sensor is connected, also returns firmware version.'''
device_ID = self.query("I")
if device_ID.startswith("?I"):
print("Connected sensor: " + str(device_ID)[3:])
else:
raw_input("EZO not connected: " + device_ID)
def close(self):
'''Closes the sensor's filestream, not usually required.'''
self.file_read.close()
self.file_write.close()
def getData(self):
'''Gets data from sensor reading as a float.'''
data = self.query("R")
return float(data)
def cal_wait(self, cal_time):
'''UI for waiting for pH sensor to stabilize during calibration'''
x=1
if self.DEBUG == True:
cal_time = 4
while x<cal_time:
if x==1:
sys.stdout.write("Please wait for sensor to stabilize:")
else:
sys.stdout.write(".")
sys.stdout.flush()
time.sleep(1)
x+=1
print('\n')
def pH_calibrateSensor(self):
'''Performs pH sensor calibration using included buffers.'''
# Clear previous calibration data
print("Starting pH sensor calibration...")
q = self.query("Cal,clear", 0)
if str(ord(q)) != '1':
print("Calibration failed with response " + str(q))
time.sleep(2)
return False
# Midpoint calibration. This will also reset previous data.
raw_input("Please rinse probe. Press [Enter] when pH 7 buffer is loaded.")
self.cal_wait(60)
mid_pH = "7.00"
q = self.query("CAL,MID," + mid_pH, 0)
if str(ord(q)) != '1':
print("Calibration failed with response " + str(q))
time.sleep(2)
return False
# Lowpoint calibration
raw_input("Please rinse probe. Press [Enter] when pH 4 buffer is loaded.")
self.cal_wait(60)
low_pH = "4.00"
q = self.query("CAL,LOW," + low_pH, 0)
if str(ord(q)) != '1':
print("Calibration failed with response " + str(q))
time.sleep(2)
return False
# Highpoint calibration
raw_input("Please rinse probe. Press [Enter] when pH 10 buffer is loaded.")
self.cal_wait(60)
high_pH = "10.00"
q = self.query("CAL,HIGH," + high_pH, 0)
if str(ord(q)) != '1':
print("Calibration failed with response " + str(q))
time.sleep(2)
return False
q = str(self.query("Cal,?"))
# Check that 3-point calibration is complete, otherwise return ERROR
if q != "?CAL,3":
print("Three point calibration incomplete!" + str(q))
cal_response = raw_input("Enter 'R' to retry or Enter to exit.")
if cal_response == "R" or cal_response == "r":
self.pH_calibrateSensor()
else:
return False
print("Three point pH calibration complete!")
time.sleep(1)
return True
def temp_calibrateSensor(self):
'''Calibrates the temperature sensor. Requires an external thermometer.'''
print("Clearing previous temperature calibration.")
q = str(ord(self.query("Cal,clear\0x0d", 0)))
if q == "1":
cal_temp = raw_input("Enter room temperature\n>>")
self.cal_wait(5)
q = str(ord(self.query("Cal,"+str(cal_temp) + "\0x0d", 0)))
if q == "1":
q = str(self.query("Cal,?"))
if q == "?CAL,1":
print("One point temperature calibration complete!")
return True
elif q == "?CAL,0":
print("One point temperature calibration incomplete!")
cal_response = raw_input("Enter R to retry or Enter to exit.")
if cal_response == "R" or cal_response == "r":
self.temp_calibrateSensor()
else:
return False
else:
print("Error setting new calibration temperature: " + str(q))
time.sleep(1)
return False
else:
print("Could not set new calibration temperature: " + str(q))
time.sleep(1)
return False
else:
print("Could not clear RTD sensor: " + str(q))
time.sleep(1)
return False
return False
def pH_compensateTemp(self,temp):
'''Compensates the pH sensor for temperature, is used in conjunction with \
a reading from the RTD sensor.'''
comp_status = self.query("T," + str(temp),0)
if str(ord(comp_status)) != '1':
print("Temperature compensation failed!: ")
time.sleep(2)
return False
else:
comp_status = str(self.query("T,?"))
print("Temperature compensation set for: " + comp_status[3:] + u'\xb0' + "C")
time.sleep(2)
return False
def lockProtocol(self,command):
'''Not currently working. Normally used for locking some of the \
internal parameters (e.g. baud rate for UART mode).'''
read_bytes = 9
print("1.\tDisconnect power to device and any signal wires.\n\
2.\tShort PRB to TX.\n\
3.\tTurn device on and wait for LED to change to blue.\n\
4.\tRemove short from PRB to TX, then restart device.\n\
5.\tConnect data lines to Raspberry Pi I2C pins.")
raw_input("Press Enter when this is complete.")
raw_input("Press Enter to prevent further changes to device configuration.")
command_message = "PLOCK," + str(command)
self.sensorQ(command_message)
time.sleep(0.3)
lock_status = self.sensorRead(read_bytes)
if lock_status == "?PLOCK,1":
print("Sensor settings locked.")
return_code = 1
elif lock_status == "?PLOCK,0":
print("Sensor settings unlocked.")
return_code = 0
else:
print("False locking sensor settings.")
return False
return return_code
| gpl-3.0 |
sixty-north/segpy | test/test_header.py | 2 | 5354 | import inspect
import pickle
from copy import copy
from pytest import raises
from hypothesis import given, assume
from hypothesis.strategies import integers
import segpy
from segpy.header import field, Header, are_equal
from segpy.field_types import Int32, NNInt32
from segpy.datatypes import LIMITS, SegYType
from test.predicates import check_balanced
class ExampleHeader(Header):
field_a = field(
Int32, offset=1, default=0, documentation=
"""Field A. This is field A.""")
field_b = field(
NNInt32, offset=5, default=42, documentation=
"Field B. This is field B.")
field_c = field(
Int32, offset=9, default=-1, documentation=
"Field C. This is field C.")
class TestHeader:
def test_initialize_with_defaults(self):
h = ExampleHeader()
assert h.field_a == 0
assert h.field_b == 42
assert h.field_c == -1
def test_initialize_with_positional_arguments(self):
h = ExampleHeader(14, 22, 8)
assert h.field_a == 14
assert h.field_b == 22
assert h.field_c == 8
def test_initialize_with_keyword_arguments(self):
h = ExampleHeader(field_a=14, field_b=22, field_c=8)
assert h.field_a == 14
assert h.field_b == 22
assert h.field_c == 8
def test_initialize_with_positional_and_keyword_arguments(self):
h = ExampleHeader(14, 22, field_c=8)
assert h.field_a == 14
assert h.field_b == 22
assert h.field_c == 8
def test_out_of_range_field_values_raises_value_error(self):
with raises(ValueError):
ExampleHeader(14, -1, field_c=8)
def test_illegal_keyword_argument_raises_type_error(self):
with raises(TypeError):
ExampleHeader(14, 1, field_x=8)
def test_ordered_field_names(self):
assert ExampleHeader.ordered_field_names() == ('field_a', 'field_b', 'field_c')
@given(a=integers(*LIMITS[SegYType.INT32]),
b=integers(*LIMITS[SegYType.NNINT32]),
c=integers(*LIMITS[SegYType.INT32]))
def test_copy(self, a, b, c):
h1 = ExampleHeader(a, b, c)
h2 = copy(h1)
assert h1 is not h2
assert h1.field_a == h2.field_a
assert h1.field_a == h2.field_a
assert h1.field_a == h2.field_a
@given(a=integers(*LIMITS[SegYType.INT32]),
b=integers(*LIMITS[SegYType.NNINT32]),
c=integers(*LIMITS[SegYType.INT32]))
def test_repr(self, a, b, c):
r = repr(ExampleHeader(a, b, c))
assert str(a) in r
assert str(b) in r
assert str(c) in r
assert 'field_a' in r
assert 'field_b' in r
assert 'field_c' in r
assert 'ExampleHeader' in r
assert check_balanced(r)
@given(a=integers(*LIMITS[SegYType.INT32]),
b=integers(*LIMITS[SegYType.NNINT32]),
c=integers(*LIMITS[SegYType.INT32]))
def test_equality(self, a, b, c):
lhs = ExampleHeader(a, b, c)
rhs = ExampleHeader(a, b, c)
assert are_equal(lhs, rhs)
@given(a=integers(*LIMITS[SegYType.INT32]),
b=integers(*LIMITS[SegYType.NNINT32]),
c=integers(*LIMITS[SegYType.INT32]))
def test_inequality(self, a, b, c):
assume(a != 0)
lhs = ExampleHeader(-a, b, c)
rhs = ExampleHeader(a, b, c)
assert not are_equal(lhs, rhs)
def test_inequality_different_type(self):
h = ExampleHeader(1, 2, 3)
assert not are_equal(h, 42)
def test_read_illegal_attribute_raises_attribute_error(self):
h = ExampleHeader(1, 2, 3)
with raises(AttributeError):
_ = h.field_x
@given(a=integers(*LIMITS[SegYType.INT32]),
b=integers(*LIMITS[SegYType.NNINT32]),
c=integers(*LIMITS[SegYType.INT32]))
def test_pickle_roundtrip(self, a, b, c):
h1 = ExampleHeader(a, b, c)
s = pickle.dumps(h1)
h2 = pickle.loads(s)
assert are_equal(h1, h2)
@given(a=integers(*LIMITS[SegYType.INT32]),
b=integers(*LIMITS[SegYType.NNINT32]),
c=integers(*LIMITS[SegYType.INT32]))
def test_pickle_versioning_mismatch_raises_type_error(self, a, b, c):
h1 = ExampleHeader(a, b, c)
s = pickle.dumps(h1)
s = s.replace(segpy.__version__.encode('ascii'), b'xxxxx')
with raises(TypeError):
pickle.loads(s)
def test_delete_field_raises_attribute_error(self):
h1 = ExampleHeader(1, 2, 3)
with raises(AttributeError):
del h1.field_a
class TestNamedField:
def test_name(self):
assert ExampleHeader.field_a.name == 'field_a'
def test_value_type(self):
assert ExampleHeader.field_a.value_type == Int32
def test_offset(self):
assert ExampleHeader.field_a.offset == 1
def test_default(self):
assert ExampleHeader.field_a.default == 0
def test_doc(self):
assert inspect.getdoc(ExampleHeader.field_a) == "Field A. This is field A."
def test_repr(self):
r = repr(ExampleHeader.field_a)
assert 'FieldAField' in r
assert 'name' in r
assert 'value_type' in r
assert 'default' in r
assert 'field_a' in r
assert 'Int32' in r
assert '1' in r
assert '0' in r
assert check_balanced(r) | agpl-3.0 |
xy515258/Xia | make_new_application.py | 6 | 4027 | #!/usr/bin/env python
# This script is for creating a new herd animal. Just run this script
# from the "stork" directory supplying a new animal name and it should
# create a complete application template built with support for both
# MOOSE and ELK. Enjoy!
import os, sys, string, re, subprocess
from optparse import OptionParser
from shutil import copytree, ignore_patterns
# DO NOT MODIFY
# This value should be set to true if this stork is within the svn herd repository
global_in_herd = False
global_ignores = ['.svn', '.git']
global_app_name = ''
global_rename_suffix = 'app'
def renameFiles(app_path):
rename_pattern = re.compile(r'(stork)(.*)', re.I)
suffix_pattern = re.compile(r'(.*)\.' + global_rename_suffix + '$')
for dirpath, dirnames, filenames in os.walk(app_path):
# Don't traverse into ignored directories
for ignore in global_ignores:
if ignore in dirnames:
dirnames.remove(ignore)
for file in filenames:
match = rename_pattern.match(file)
# Replace 'stork' in the contents
replaceNameInContents(dirpath + '/' + file)
# See if the file needs to be renamed and rename
if match != None:
replace_string = replacementFunction(match)
os.rename(dirpath + '/' + file, dirpath + '/' + replace_string + match.group(2))
# update the file
file = replace_string + match.group(2)
# If there are files with .app suffixes drop the suffix
match = suffix_pattern.search(file)
if match != None:
os.rename(dirpath + '/' + file, dirpath + '/' + match.group(1))
def replaceNameInContents(filename):
f = open(filename)
text = f.read()
f.close()
# Replace all instances of the word stork with the right case
pattern = re.compile(r'(stork)', re.I)
text = pattern.sub(replacementFunction, text)
# Retrieve original file attribute to be applied later
mode = os.stat(filename).st_mode
# Now write the file back out
f = open(filename + '~tmp', 'w')
f.write(text)
f.close()
os.chmod(filename + '~tmp', mode)
os.rename(filename + '~tmp', filename)
def replacementFunction(match):
# There are 3 "case" cases
# Case 1: all lower case
if match.group(1) == 'stork':
return global_app_name
# Case 2: all upper case
if match.group(1) == 'STORK':
return string.upper(global_app_name)
# Case 3: First letter is capitalized
if match.group(1) == 'Stork':
name = global_app_name.replace("_", " ")
name = name.title()
name = name.replace(" ", "")
return name
print match.group(0) + "\nBad Case Detected!"
sys.exit(1)
if __name__ == '__main__':
parser = OptionParser()
(global_options, args) = parser.parse_args()
# Get the animal name
if global_in_herd:
if len(args) != 1:
print 'Usage: ./make_new_application.py <animal name>'
sys.exit()
global_app_name = string.lower(args[0])
else:
if len(args) != 0:
print 'Usage: ./make_new_application.py'
sys.exit()
global_app_name = os.path.basename(os.path.dirname(os.path.realpath(__file__)))
# Make the new application
if global_in_herd:
copytree('.', '../' + global_app_name, ignore=ignore_patterns('.svn', '.git', '*.module', 'make_new*', 'LICENSE'))
renameFiles('../' + global_app_name)
print 'Your application should be ready!\nAdd the directory ../' + global_app_name + ' to your checkout and commit.'
else:
# We are in a git clone
renameFiles('.')
try:
os.remove('Makefile.module')
os.remove('run_tests.module')
os.remove(os.path.join('src', 'base', 'StorkApp.C.module'))
os.remove('make_new_application.py')
os.remove('make_new_module.py')
except:
pass
# Add the newly created untracked files and delete the removed ones
subprocess.check_output("git rm -f *.py Makefile.* run_tests.*", shell=True)
subprocess.call("git add --all *", shell=True)
print 'Your application should be ready!\nCommit this directory to your local repository and push.'
| lgpl-2.1 |
smerritt/swift | test/unit/common/middleware/test_versioned_writes.py | 3 | 63681 | # Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import json
import os
import time
import mock
import unittest
from swift.common import swob, utils
from swift.common.middleware import versioned_writes, copy
from swift.common.swob import Request
from test.unit.common.middleware.helpers import FakeSwift
class FakeCache(object):
def __init__(self, val):
if 'status' not in val:
val['status'] = 200
self.val = val
def get(self, *args):
return self.val
def local_tz(func):
'''
Decorator to change the timezone when running a test.
This uses the Eastern Time Zone definition from the time module's docs.
Note that the timezone affects things like time.time() and time.mktime().
'''
@functools.wraps(func)
def wrapper(*args, **kwargs):
tz = os.environ.get('TZ', '')
try:
os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0'
time.tzset()
return func(*args, **kwargs)
finally:
os.environ['TZ'] = tz
time.tzset()
return wrapper
class VersionedWritesBaseTestCase(unittest.TestCase):
def setUp(self):
self.app = FakeSwift()
conf = {'allow_versioned_writes': 'true'}
self.vw = versioned_writes.filter_factory(conf)(self.app)
def tearDown(self):
self.assertEqual(self.app.unclosed_requests, {})
def call_app(self, req, app=None):
if app is None:
app = self.app
self.authorized = []
def authorize(req):
self.authorized.append(req)
if 'swift.authorize' not in req.environ:
req.environ['swift.authorize'] = authorize
req.headers.setdefault("User-Agent", "Marula Kruger")
status = [None]
headers = [None]
def start_response(s, h, ei=None):
status[0] = s
headers[0] = h
body_iter = app(req.environ, start_response)
with utils.closing_if_possible(body_iter):
body = b''.join(body_iter)
return status[0], headers[0], body
def call_vw(self, req):
return self.call_app(req, app=self.vw)
def assertRequestEqual(self, req, other):
self.assertEqual(req.method, other.method)
self.assertEqual(req.path, other.path)
class VersionedWritesTestCase(VersionedWritesBaseTestCase):
def test_put_container(self):
self.app.register('PUT', '/v1/a/c', swob.HTTPOk, {}, 'passed')
req = Request.blank('/v1/a/c',
headers={'X-Versions-Location': 'ver_cont'},
environ={'REQUEST_METHOD': 'PUT'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c', path)
self.assertIn('x-container-sysmeta-versions-location', req_headers)
self.assertEqual(req.headers['x-container-sysmeta-versions-location'],
'ver_cont')
self.assertIn('x-container-sysmeta-versions-mode', req_headers)
self.assertEqual(req.headers['x-container-sysmeta-versions-mode'],
'stack')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_put_container_history_header(self):
self.app.register('PUT', '/v1/a/c', swob.HTTPOk, {}, 'passed')
req = Request.blank('/v1/a/c',
headers={'X-History-Location': 'ver_cont'},
environ={'REQUEST_METHOD': 'PUT'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c', path)
self.assertIn('x-container-sysmeta-versions-location', req_headers)
self.assertEqual('ver_cont',
req_headers['x-container-sysmeta-versions-location'])
self.assertIn('x-container-sysmeta-versions-mode', req_headers)
self.assertEqual('history',
req_headers['x-container-sysmeta-versions-mode'])
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_put_container_both_headers(self):
req = Request.blank('/v1/a/c',
headers={'X-Versions-Location': 'ver_cont',
'X-History-Location': 'ver_cont'},
environ={'REQUEST_METHOD': 'PUT'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '400 Bad Request')
self.assertFalse(self.app.calls)
def test_container_allow_versioned_writes_false(self):
self.vw.conf = {'allow_versioned_writes': 'false'}
# PUT/POST container must fail as 412 when allow_versioned_writes
# set to false
for method in ('PUT', 'POST'):
for header in ('X-Versions-Location', 'X-History-Location'):
req = Request.blank('/v1/a/c',
headers={header: 'ver_cont'},
environ={'REQUEST_METHOD': method})
status, headers, body = self.call_vw(req)
self.assertEqual(status, "412 Precondition Failed",
'Got %s instead of 412 when %sing '
'with %s header' % (status, method, header))
# GET performs as normal
self.app.register('GET', '/v1/a/c', swob.HTTPOk, {}, 'passed')
for method in ('GET', 'HEAD'):
req = Request.blank('/v1/a/c',
headers={'X-Versions-Location': 'ver_cont'},
environ={'REQUEST_METHOD': method})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
def _test_removal(self, headers):
self.app.register('POST', '/v1/a/c', swob.HTTPNoContent, {}, 'passed')
req = Request.blank('/v1/a/c',
headers=headers,
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('POST', method)
self.assertEqual('/v1/a/c', path)
for header in ['x-container-sysmeta-versions-location',
'x-container-sysmeta-versions-mode',
'x-versions-location']:
self.assertIn(header, req_headers)
self.assertEqual('', req_headers[header])
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_remove_headers(self):
self._test_removal({'X-Remove-Versions-Location': 'x'})
self._test_removal({'X-Remove-History-Location': 'x'})
def test_empty_versions_location(self):
self._test_removal({'X-Versions-Location': ''})
self._test_removal({'X-History-Location': ''})
def test_remove_add_versions_precedence(self):
self.app.register(
'POST', '/v1/a/c', swob.HTTPOk,
{'x-container-sysmeta-versions-location': 'ver_cont'},
'passed')
req = Request.blank('/v1/a/c',
headers={'X-Remove-Versions-Location': 'x',
'X-Versions-Location': 'ver_cont'},
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Location', 'ver_cont'), headers)
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('POST', method)
self.assertEqual('/v1/a/c', path)
self.assertIn('x-container-sysmeta-versions-location', req_headers)
self.assertNotIn('x-remove-versions-location', req_headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def _test_blank_add_versions_precedence(self, blank_header, add_header):
self.app.register(
'POST', '/v1/a/c', swob.HTTPOk,
{'x-container-sysmeta-versions-location': 'ver_cont'},
'passed')
req = Request.blank('/v1/a/c',
headers={blank_header: '',
add_header: 'ver_cont'},
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[-1]
self.assertEqual('POST', method)
self.assertEqual('/v1/a/c', path)
self.assertIn('x-container-sysmeta-versions-location', req_headers)
self.assertEqual('ver_cont',
req_headers['x-container-sysmeta-versions-location'])
self.assertIn('x-container-sysmeta-versions-mode', req_headers)
self.assertEqual('history' if add_header == 'X-History-Location'
else 'stack',
req_headers['x-container-sysmeta-versions-mode'])
self.assertNotIn('x-remove-versions-location', req_headers)
self.assertIn('x-versions-location', req_headers)
self.assertEqual('', req_headers['x-versions-location'])
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_blank_add_versions_precedence(self):
self._test_blank_add_versions_precedence(
'X-Versions-Location', 'X-History-Location')
self._test_blank_add_versions_precedence(
'X-History-Location', 'X-Versions-Location')
def test_get_container(self):
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{'x-container-sysmeta-versions-location': 'ver_cont',
'x-container-sysmeta-versions-mode': 'stack'}, None)
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Location', 'ver_cont'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_head_container(self):
self.app.register(
'HEAD', '/v1/a/c', swob.HTTPOk,
{'x-container-sysmeta-versions-location': 'other_ver_cont',
'x-container-sysmeta-versions-mode': 'history'}, None)
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'HEAD'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-History-Location', 'other_ver_cont'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_get_head(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.app.register('HEAD', '/v1/a/c/o', swob.HTTPOk, {}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_put_object_no_versioning(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
cache = FakeCache({})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_put_first_object_success(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPNotFound, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100',
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 2)
# Versioned writes middleware now calls auth on the incoming request
# before we try the GET and then at the proxy, so there are 2
# atuhorized for the same request.
self.assertRequestEqual(req, self.authorized[0])
self.assertRequestEqual(req, self.authorized[1])
self.assertEqual(2, self.app.call_count)
self.assertEqual(['VW', None], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
def test_put_object_no_versioning_with_container_config_true(self):
# set False to versions_write and expect no GET occurred
self.vw.conf = {'allow_versioned_writes': 'false'}
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
called_method = [method for (method, path, hdrs) in self.app._calls]
self.assertNotIn('GET', called_method)
def test_put_request_is_dlo_manifest_with_container_config_true(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, 'old version')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000060.00000', swob.HTTPCreated,
{}, '')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
headers={'X-Object-Manifest': 'req/manifest'},
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertRequestEqual(req, self.authorized[1])
self.assertEqual(3, self.app.call_count)
self.assertEqual([
('GET', '/v1/a/c/o'),
('PUT', '/v1/a/ver_cont/001o/0000000060.00000'),
('PUT', '/v1/a/c/o'),
], self.app.calls)
self.assertIn('x-object-manifest',
self.app.calls_with_headers[2].headers)
def test_put_version_is_dlo_manifest_with_container_config_true(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'X-Object-Manifest': 'resp/manifest',
'last-modified': 'Thu, 1 Jan 1970 01:00:00 GMT'},
'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000003600.00000', swob.HTTPCreated,
{}, '')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '201 Created')
# The middleware now auths the request before the initial GET, the
# same GET that gets the X-Object-Manifest back. So a second auth is
# now done.
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertRequestEqual(req, self.authorized[1])
self.assertEqual(3, self.app.call_count)
self.assertEqual([
('GET', '/v1/a/c/o'),
('PUT', '/v1/a/ver_cont/001o/0000003600.00000'),
('PUT', '/v1/a/c/o'),
], self.app.calls)
self.assertIn('x-object-manifest',
self.app.calls_with_headers[1].headers)
def test_delete_object_no_versioning_with_container_config_true(self):
# set False to versions_write obviously and expect no GET versioning
# container and GET/PUT called (just delete object as normal)
self.vw.conf = {'allow_versioned_writes': 'false'}
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, 'passed')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
called_method = \
[method for (method, path, rheaders) in self.app._calls]
self.assertNotIn('PUT', called_method)
self.assertNotIn('GET', called_method)
self.assertEqual(1, self.app.call_count)
def test_new_version_success(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000001.00000', swob.HTTPCreated,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100',
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '201 Created')
# authorized twice now because versioned_writes now makes a check on
# PUT
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(['VW', 'VW', None], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
def test_new_version_get_errors(self):
# GET on source fails, expect client error response,
# no PUT should happen
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPBadRequest, {}, None)
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertEqual(1, self.app.call_count)
# GET on source fails, expect server error response
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPBadGateway, {}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(2, self.app.call_count)
def test_new_version_put_errors(self):
# PUT of version fails, expect client error response
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000001.00000',
swob.HTTPUnauthorized, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertEqual(2, self.app.call_count)
# PUT of version fails, expect server error response
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000001.00000', swob.HTTPBadGateway,
{}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(4, self.app.call_count)
@local_tz
def test_new_version_sysmeta_precedence(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:00 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000000.00000', swob.HTTPOk,
{}, None)
# fill cache with two different values for versions location
# new middleware should use sysmeta first
cache = FakeCache({'versions': 'old_ver_cont',
'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
# authorized twice now because versioned_writes now makes a check on
# PUT
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
# check that sysmeta header was used
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/ver_cont/001o/0000000000.00000', path)
def test_delete_no_versions_container_success(self):
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPNotFound, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0', 'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(2, self.app.call_count)
self.assertEqual(['VW', None], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('DELETE', '/v1/a/c/o'),
])
def test_delete_first_object_success(self):
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {}, '[]')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('DELETE', '/v1/a/c/o'),
])
def test_delete_latest_version_no_marker_success(self):
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}, '
'{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPCreated,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/2', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0', 'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(4, self.app.call_count)
self.assertEqual(['VW', 'VW', 'VW', 'VW'], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
# check that X-If-Delete-At was removed from DELETE request
req_headers = self.app.headers[-1]
self.assertNotIn('x-if-delete-at', [h.lower() for h in req_headers])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/2'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/2'),
])
def test_delete_latest_version_restores_marker_success(self):
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "application/x-deleted;swift_versions_deleted=1"'
'}, {"hash": "y", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"'
'}]')
self.app.register(
'HEAD', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {})
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertRequestEqual(req, self.authorized[1])
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'HEAD', 'DELETE'],
[c.method for c in calls])
self.assertIn('X-Newest', calls[1].headers)
self.assertEqual('True', calls[1].headers['X-Newest'])
method, path, req_headers = calls.pop()
self.assertTrue(path.startswith('/v1/a/c/o'))
# Since we're deleting the original, this *should* still be present:
self.assertEqual('1', req_headers.get('X-If-Delete-At'))
def test_delete_latest_version_is_marker_success(self):
# Test popping a delete marker off the stack. So, there's data in the
# versions container, topped by a delete marker, and there's nothing
# in the base versioned container.
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "application/x-deleted;swift_versions_deleted=1"'
'},{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"'
'}]')
self.app.register(
'HEAD', '/v1/a/c/o', swob.HTTPNotFound, {}, 'passed')
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPOk, {}, 'passed')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/2', swob.HTTPOk, {}, 'passed')
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPOk, {}, 'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('HEAD', '/v1/a/c/o'),
('GET', '/v1/a/ver_cont/001o/1'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
('DELETE', '/v1/a/ver_cont/001o/2'),
])
self.assertIn('X-Newest', self.app.headers[1])
self.assertEqual('True', self.app.headers[1]['X-Newest'])
self.assertIn('X-Newest', self.app.headers[2])
self.assertEqual('True', self.app.headers[2]['X-Newest'])
# check that X-If-Delete-At was removed from DELETE request
for req_headers in self.app.headers[-2:]:
self.assertNotIn('x-if-delete-at',
[h.lower() for h in req_headers])
def test_delete_latest_version_doubled_up_markers_success(self):
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/'
'&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/3", '
'"content_type": "application/x-deleted;swift_versions_deleted=1"'
'}, {"hash": "y", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "application/x-deleted;swift_versions_deleted=1"'
'}, {"hash": "y", '
'"last_modified": "2014-11-20T14:23:02.206740", '
'"bytes": 30, '
'"name": "001o/1", '
'"content_type": "text/plain"'
'}]')
self.app.register(
'HEAD', '/v1/a/c/o', swob.HTTPNotFound, {}, 'passed')
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/3', swob.HTTPOk, {}, 'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
# check that X-If-Delete-At was removed from DELETE request
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'HEAD', 'DELETE'],
[c.method for c in calls])
method, path, req_headers = calls.pop()
self.assertTrue(path.startswith('/v1/a/ver_cont/001o/3'))
self.assertNotIn('x-if-delete-at', [h.lower() for h in req_headers])
@mock.patch('swift.common.middleware.versioned_writes.time.time',
return_value=1234)
def test_history_delete_marker_no_object_success(self, mock_time):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPNotFound,
{}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000001234.00000', swob.HTTPCreated,
{}, 'passed')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNotFound, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont',
'versions-mode': 'history'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(len(self.authorized), 2)
req.environ['REQUEST_METHOD'] = 'PUT'
self.assertRequestEqual(req, self.authorized[0])
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'PUT', 'DELETE'], [c.method for c in calls])
self.assertEqual('application/x-deleted;swift_versions_deleted=1',
calls[1].headers.get('Content-Type'))
@mock.patch('swift.common.middleware.versioned_writes.time.time',
return_value=123456789.54321)
def test_history_delete_marker_over_object_success(self, mock_time):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Wed, 19 Nov 2014 18:19:02 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/1416421142.00000', swob.HTTPCreated,
{}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0123456789.54321', swob.HTTPCreated,
{}, 'passed')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont',
'versions-mode': 'history'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
self.assertEqual('', body)
self.assertEqual(len(self.authorized), 2)
req.environ['REQUEST_METHOD'] = 'PUT'
self.assertRequestEqual(req, self.authorized[0])
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'PUT', 'PUT', 'DELETE'],
[c.method for c in calls])
self.assertEqual('/v1/a/ver_cont/001o/1416421142.00000',
calls[1].path)
self.assertEqual('application/x-deleted;swift_versions_deleted=1',
calls[2].headers.get('Content-Type'))
def test_delete_single_version_success(self):
# check that if the first listing page has just a single item then
# it is not erroneously inferred to be a non-reversed listing
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/1'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
])
def test_DELETE_on_expired_versioned_object(self):
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}, '
'{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}]')
# expired object
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPCreated,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(5, self.app.call_count)
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/2'),
('GET', '/v1/a/ver_cont/001o/1'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
])
def test_denied_DELETE_of_versioned_object(self):
authorize_call = []
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}, '
'{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}]')
def fake_authorize(req):
# the container GET is pre-auth'd so here we deny the object DELETE
authorize_call.append(req)
return swob.HTTPForbidden()
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'swift.authorize': fake_authorize,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '403 Forbidden')
self.assertEqual(len(authorize_call), 1)
self.assertRequestEqual(req, authorize_call[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
])
def test_denied_PUT_of_versioned_object(self):
authorize_call = []
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
def fake_authorize(req):
# we should deny the object PUT
authorize_call.append(req)
return swob.HTTPForbidden()
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'swift.authorize': fake_authorize,
'CONTENT_LENGTH': '0'})
# Save off a copy, as the middleware may modify the original
expected_req = Request(req.environ.copy())
status, headers, body = self.call_vw(req)
self.assertEqual(status, '403 Forbidden')
self.assertEqual(len(authorize_call), 1)
self.assertRequestEqual(expected_req, authorize_call[0])
self.assertEqual(self.app.calls, [])
class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
def test_delete_latest_version_success(self):
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}, '
'{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/'
'&marker=001o/2',
swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPCreated,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/2', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0', 'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(5, self.app.call_count)
self.assertEqual(['VW', 'VW', 'VW', 'VW', 'VW'],
self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
# check that X-If-Delete-At was removed from DELETE request
req_headers = self.app.headers[-1]
self.assertNotIn('x-if-delete-at', [h.lower() for h in req_headers])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', prefix_listing_prefix + 'marker=001o/2'),
('GET', '/v1/a/ver_cont/001o/2'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/2'),
])
def test_DELETE_on_expired_versioned_object(self):
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}, '
'{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/'
'&marker=001o/2',
swob.HTTPNotFound, {}, None)
# expired object
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPCreated,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(6, self.app.call_count)
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', prefix_listing_prefix + 'marker=001o/2'),
('GET', '/v1/a/ver_cont/001o/2'),
('GET', '/v1/a/ver_cont/001o/1'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
])
def test_denied_DELETE_of_versioned_object(self):
authorize_call = []
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}, '
'{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/'
'&marker=001o/2',
swob.HTTPNotFound, {}, None)
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPForbidden,
{}, None)
def fake_authorize(req):
authorize_call.append(req)
return swob.HTTPForbidden()
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'swift.authorize': fake_authorize,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '403 Forbidden')
self.assertEqual(len(authorize_call), 1)
self.assertRequestEqual(req, authorize_call[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', prefix_listing_prefix + 'marker=001o/2'),
])
def test_partially_upgraded_cluster(self):
old_versions = [
{'hash': 'etag%d' % x,
'last_modified': "2014-11-21T14:14:%02d.409100" % x,
'bytes': 3,
'name': '001o/%d' % x,
'content_type': 'text/plain'}
for x in range(5)]
# first container server can reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {}, json.dumps(list(reversed(old_versions[2:]))))
# but all objects are already gone
self.app.register(
'GET', '/v1/a/ver_cont/001o/4', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/3', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPNotFound,
{}, None)
# second container server can't reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/2&reverse=on',
swob.HTTPOk, {}, json.dumps(old_versions[3:]))
# subsequent requests shouldn't reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&end_marker=001o/2',
swob.HTTPOk, {}, json.dumps(old_versions[:1]))
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/0&end_marker=001o/2',
swob.HTTPOk, {}, json.dumps(old_versions[1:2]))
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/1&end_marker=001o/2',
swob.HTTPOk, {}, '[]')
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPNoContent,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/4'),
('GET', '/v1/a/ver_cont/001o/3'),
('GET', '/v1/a/ver_cont/001o/2'),
('GET', prefix_listing_prefix + 'marker=001o/2&reverse=on'),
('GET', prefix_listing_prefix + 'marker=&end_marker=001o/2'),
('GET', prefix_listing_prefix + 'marker=001o/0&end_marker=001o/2'),
('GET', prefix_listing_prefix + 'marker=001o/1&end_marker=001o/2'),
('GET', '/v1/a/ver_cont/001o/1'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
])
def test_partially_upgraded_cluster_single_result_on_second_page(self):
old_versions = [
{'hash': 'etag%d' % x,
'last_modified': "2014-11-21T14:14:%02d.409100" % x,
'bytes': 3,
'name': '001o/%d' % x,
'content_type': 'text/plain'}
for x in range(5)]
# first container server can reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {}, json.dumps(list(reversed(old_versions[-2:]))))
# but both objects are already gone
self.app.register(
'GET', '/v1/a/ver_cont/001o/4', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/3', swob.HTTPNotFound,
{}, None)
# second container server can't reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/3&reverse=on',
swob.HTTPOk, {}, json.dumps(old_versions[4:]))
# subsequent requests shouldn't reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&end_marker=001o/3',
swob.HTTPOk, {}, json.dumps(old_versions[:2]))
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/1&end_marker=001o/3',
swob.HTTPOk, {}, json.dumps(old_versions[2:3]))
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/2&end_marker=001o/3',
swob.HTTPOk, {}, '[]')
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPOk,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/2', swob.HTTPNoContent,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/4'),
('GET', '/v1/a/ver_cont/001o/3'),
('GET', prefix_listing_prefix + 'marker=001o/3&reverse=on'),
('GET', prefix_listing_prefix + 'marker=&end_marker=001o/3'),
('GET', prefix_listing_prefix + 'marker=001o/1&end_marker=001o/3'),
('GET', prefix_listing_prefix + 'marker=001o/2&end_marker=001o/3'),
('GET', '/v1/a/ver_cont/001o/2'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/2'),
])
class VersionedWritesCopyingTestCase(VersionedWritesBaseTestCase):
# verify interaction of copy and versioned_writes middlewares
def setUp(self):
self.app = FakeSwift()
conf = {'allow_versioned_writes': 'true'}
self.vw = versioned_writes.filter_factory(conf)(self.app)
self.filter = copy.filter_factory({})(self.vw)
def call_filter(self, req, **kwargs):
return self.call_app(req, app=self.filter, **kwargs)
def test_copy_first_version(self):
# no existing object to move to the versions container
self.app.register(
'GET', '/v1/a/tgt_cont/tgt_obj', swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, 'passed')
self.app.register(
'PUT', '/v1/a/tgt_cont/tgt_obj', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'tgt_cont/tgt_obj'})
status, headers, body = self.call_filter(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 3)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/src_cont/src_obj', self.authorized[0].path)
# At the moment we are calling authorize on the incoming request in
# the middleware before we do the PUT (and the source GET) and again
# on the incoming request when it gets to the proxy. So the 2nd and
# 3rd auths look the same.
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/tgt_cont/tgt_obj', self.authorized[1].path)
self.assertEqual('PUT', self.authorized[2].method)
self.assertEqual('/v1/a/tgt_cont/tgt_obj', self.authorized[2].path)
# note the GET on tgt_cont/tgt_obj is pre-authed
self.assertEqual(3, self.app.call_count, self.app.calls)
def test_copy_new_version(self):
# existing object should be moved to versions container
self.app.register(
'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/tgt_cont/tgt_obj', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/007tgt_obj/0000000001.00000', swob.HTTPOk,
{}, None)
self.app.register(
'PUT', '/v1/a/tgt_cont/tgt_obj', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'tgt_cont/tgt_obj'})
status, headers, body = self.call_filter(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 3)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/src_cont/src_obj', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/tgt_cont/tgt_obj', self.authorized[1].path)
self.assertEqual(4, self.app.call_count)
def test_copy_new_version_different_account(self):
self.app.register(
'GET', '/v1/src_a/src_cont/src_obj', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/tgt_a/tgt_cont/tgt_obj', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/tgt_a/ver_cont/007tgt_obj/0000000001.00000',
swob.HTTPOk, {}, None)
self.app.register(
'PUT', '/v1/tgt_a/tgt_cont/tgt_obj', swob.HTTPCreated, {},
'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/src_a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'tgt_cont/tgt_obj',
'Destination-Account': 'tgt_a'})
status, headers, body = self.call_filter(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 3)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/src_a/src_cont/src_obj', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/tgt_a/tgt_cont/tgt_obj', self.authorized[1].path)
self.assertEqual(4, self.app.call_count)
def test_copy_object_no_versioning_with_container_config_true(self):
# set False to versions_write obviously and expect no extra
# COPY called (just copy object as normal)
self.vw.conf = {'allow_versioned_writes': 'false'}
self.app.register(
'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, 'passed')
self.app.register(
'PUT', '/v1/a/tgt_cont/tgt_obj', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache},
headers={'Destination': '/tgt_cont/tgt_obj'})
status, headers, body = self.call_filter(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/src_cont/src_obj', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/tgt_cont/tgt_obj', self.authorized[1].path)
self.assertEqual(2, self.app.call_count)
class TestSwiftInfo(unittest.TestCase):
def setUp(self):
utils._swift_info = {}
utils._swift_admin_info = {}
def test_registered_defaults(self):
versioned_writes.filter_factory({})('have to pass in an app')
swift_info = utils.get_swift_info()
# in default, versioned_writes is not in swift_info
self.assertNotIn('versioned_writes', swift_info)
def test_registered_explicitly_set(self):
versioned_writes.filter_factory(
{'allow_versioned_writes': 'true'})('have to pass in an app')
swift_info = utils.get_swift_info()
self.assertIn('versioned_writes', swift_info)
self.assertEqual(
swift_info['versioned_writes'].get('allowed_flags'),
('x-versions-location', 'x-history-location'))
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
mahendra-r/edx-platform | common/djangoapps/student/migrations/0006_expand_meta_field.py | 188 | 9246 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'UserProfile.meta'
db.alter_column('auth_userprofile', 'meta', self.gf('django.db.models.fields.TextField')())
def backwards(self, orm):
# Changing field 'UserProfile.meta'
db.alter_column('auth_userprofile', 'meta', self.gf('django.db.models.fields.CharField')(max_length=255))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'student.pendingemailchange': {
'Meta': {'object_name': 'PendingEmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_email': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.pendingnamechange': {
'Meta': {'object_name': 'PendingNameChange'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'rationale': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.registration': {
'Meta': {'object_name': 'Registration', 'db_table': "'auth_registration'"},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'auth_userprofile'"},
'courseware': ('django.db.models.fields.CharField', [], {'default': "'course.xml'", 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'student.usertestgroup': {
'Meta': {'object_name': 'UserTestGroup'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'db_index': 'True', 'symmetrical': 'False'})
}
}
complete_apps = ['student']
| agpl-3.0 |
classicboyir/BuildingMachineLearningSystemsWithPython | ch10/simple_classification.py | 21 | 2299 | # This code is supporting material for the book
# Building Machine Learning Systems with Python
# by Willi Richert and Luis Pedro Coelho
# published by PACKT Publishing
#
# It is made available under the MIT License
import mahotas as mh
import numpy as np
from glob import glob
from features import texture, color_histogram
from sklearn.linear_model import LogisticRegression
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
basedir = '../SimpleImageDataset/'
haralicks = []
labels = []
chists = []
print('This script will test (with cross-validation) classification of the simple 3 class dataset')
print('Computing features...')
# Use glob to get all the images
images = glob('{}/*.jpg'.format(basedir))
# We sort the images to ensure that they are always processed in the same order
# Otherwise, this would introduce some variation just based on the random
# ordering that the filesystem uses
for fname in sorted(images):
imc = mh.imread(fname)
haralicks.append(texture(mh.colors.rgb2grey(imc)))
chists.append(color_histogram(imc))
# Files are named like building00.jpg, scene23.jpg...
labels.append(fname[:-len('xx.jpg')])
print('Finished computing features.')
haralicks = np.array(haralicks)
labels = np.array(labels)
chists = np.array(chists)
haralick_plus_chists = np.hstack([chists, haralicks])
# We use Logistic Regression because it achieves high accuracy on small(ish) datasets
# Feel free to experiment with other classifiers
clf = Pipeline([('preproc', StandardScaler()),
('classifier', LogisticRegression())])
from sklearn import cross_validation
cv = cross_validation.LeaveOneOut(len(images))
scores = cross_validation.cross_val_score(
clf, haralicks, labels, cv=cv)
print('Accuracy (Leave-one-out) with Logistic Regression [haralick features]: {:.1%}'.format(
scores.mean()))
scores = cross_validation.cross_val_score(
clf, chists, labels, cv=cv)
print('Accuracy (Leave-one-out) with Logistic Regression [color histograms]: {:.1%}'.format(
scores.mean()))
scores = cross_validation.cross_val_score(
clf, haralick_plus_chists, labels, cv=cv)
print('Accuracy (Leave-one-out) with Logistic Regression [texture features + color histograms]: {:.1%}'.format(
scores.mean()))
| mit |
halaszk/Perseus-UNIVERSAL5410 | tools/perf/scripts/python/netdev-times.py | 11271 | 15048 | # Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
| gpl-2.0 |
lopezloo/mtasa-blue | vendor/google-breakpad/src/tools/python/deps-to-manifest.py | 19 | 4857 | #!/usr/bin/python
# Copyright 2016 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Convert gclient's DEPS file to repo's manifest xml file."""
from __future__ import print_function
import argparse
import os
import sys
REMOTES = {
'chromium': 'https://chromium.googlesource.com/',
'github': 'https://github.com/',
}
REVIEWS = {
'chromium': 'https://chromium-review.googlesource.com',
}
MANIFEST_HEAD = """<?xml version='1.0' encoding='UTF-8'?>
<!-- AUTOGENERATED BY %(prog)s; DO NOT EDIT -->
<manifest>
<default revision='refs/heads/master'
remote='chromium'
sync-c='true'
sync-j='8' />
"""
MANIFEST_REMOTE = """
<remote name='%(name)s'
fetch='%(fetch)s'
review='%(review)s' />
"""
MANIFEST_PROJECT = """
<project path='%(path)s'
name='%(name)s'
revision='%(revision)s'
remote='%(remote)s' />
"""
MANIFEST_TAIL = """
</manifest>
"""
def ConvertDepsToManifest(deps, manifest):
"""Convert the |deps| file to the |manifest|."""
# Load the DEPS file data.
ctx = {}
execfile(deps, ctx)
new_contents = ''
# Write out the common header.
data = {
'prog': os.path.basename(__file__),
}
new_contents += MANIFEST_HEAD % data
# Write out the <remote> sections.
for name, fetch in REMOTES.items():
data = {
'name': name,
'fetch': fetch,
'review': REVIEWS.get(name, ''),
}
new_contents += MANIFEST_REMOTE % data
# Write out the main repo itself.
data = {
'path': 'src',
'name': 'breakpad/breakpad',
'revision': 'refs/heads/master',
'remote': 'chromium',
}
new_contents += MANIFEST_PROJECT % data
# Write out the <project> sections.
for path, url in ctx['deps'].items():
for name, fetch in REMOTES.items():
if url.startswith(fetch):
remote = name
break
else:
raise ValueError('Unknown DEPS remote: %s: %s' % (path, url))
# The DEPS url will look like:
# https://chromium.googlesource.com/external/gyp/@e8ab0833a42691cd2
remote_path, rev = url.split('@')
remote_path = remote_path[len(fetch):]
# If it's not a revision, assume it's a tag. Repo wants full ref names.
if len(rev) != 40:
rev = 'refs/tags/%s' % rev
data = {
'path': path,
'name': remote_path,
'revision': rev,
'remote': remote,
}
new_contents += MANIFEST_PROJECT % data
# Write out the common footer.
new_contents += MANIFEST_TAIL
# See if the manifest has actually changed contents to avoid thrashing.
try:
old_contents = open(manifest).read()
except IOError:
# In case the file doesn't exist yet.
old_contents = ''
if old_contents != new_contents:
print('Updating %s due to changed %s' % (manifest, deps))
with open(manifest, 'w') as fp:
fp.write(new_contents)
def GetParser():
"""Return a CLI parser."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('deps',
help='The DEPS file to convert')
parser.add_argument('manifest',
help='The manifest xml to generate')
return parser
def main(argv):
"""The main func!"""
parser = GetParser()
opts = parser.parse_args(argv)
ConvertDepsToManifest(opts.deps, opts.manifest)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| gpl-3.0 |
fevxie/sale-workflow | sale_jit_on_services/__openerp__.py | 34 | 1916 | # -*- coding: utf-8 -*-
#
#
# Author: Joël Grand-Guillaume, Yannick Vaucher
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
'name': 'Sale Service Just In Time',
'version': '1.0',
'category': 'Generic Modules/Sale',
'description': """
Sale Service Just In Time
=========================
When you make a SO with products and services, the workflow of the SO will not
reach the state done unless you deliver all products and procurements
linked to service products are done.
Usually, when the MRP runs, it marks procurements of services' lines as
done. But, you may want to mark them as done like if you were using the
mrp_jit module.
This module provide that feature: It bring the behavior of the mrp_jit module
but only on services products.
Contributors
------------
* Joël Grand-Guillaume <[email protected]>
* Yannick Vaucher <[email protected]>
""",
'author': "Camptocamp,Odoo Community Association (OCA)",
'depends': ['procurement'],
'website': 'http://www.camptocamp.com',
'data': [
'procurement_workflow.xml'
],
'test': [
'test/sale_service_jit_test.yml',
],
'demo': [],
'installable': False,
'active': False,
}
| agpl-3.0 |
lezizi/A-Framework | python/local-source/source.py | 1 | 2324 | #!/usr/bin/env python
#
# Copyright (C) 2012 LeZiZi Studio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class SourceHandler():
'''
Provides basic source handling.
Property:
source: source object
'''
from base import Source
def __init__(self, source=None):
if source is None:
self.source = self.Source()
else:
self.source = source
def append(self,action):
'''
Append an Action to current source.
Argument:
action: An Action.
Return:
Boolean. True for success and False when action exsisits.
'''
self.source.list.append(action)
def delete(self,act):
'''
Argument:
act: An Action OR a string of action key.
Return:
Boolean. True for success.
'''
if self.source.list.count(act) == 0:
del(self.list[self.list.index(act)])
return(True)
else:
return(False)
def join(self, source):
'''
Copy source form another souce to current source.
'''
for each in source:
if self.list.count(each) == 0 :
self.list.append(each)
def match(self,ingroups=[],outgroups=[],implementation=None,key=None):
### NOT YET IMP ##
pass
def test():
from base import Action
b = Action()
b.key = "1"
c = Action()
c.key = "1"
print(cmp(b,c))
a = SourceHandler()
print(a.append(b))
print(a.append(c))
print(a.source.list)
print(a.delete(b))
#for each in dir(a):
# print(getattr(a,each))
# test()
| apache-2.0 |
melvon22/osmc | package/mediacenter-skin-osmc/files/usr/share/kodi/addons/script.module.unidecode/lib/unidecode/x00f.py | 252 | 4061 | data = (
'AUM', # 0x00
'', # 0x01
'', # 0x02
'', # 0x03
'', # 0x04
'', # 0x05
'', # 0x06
'', # 0x07
' // ', # 0x08
' * ', # 0x09
'', # 0x0a
'-', # 0x0b
' / ', # 0x0c
' / ', # 0x0d
' // ', # 0x0e
' -/ ', # 0x0f
' +/ ', # 0x10
' X/ ', # 0x11
' /XX/ ', # 0x12
' /X/ ', # 0x13
', ', # 0x14
'', # 0x15
'', # 0x16
'', # 0x17
'', # 0x18
'', # 0x19
'', # 0x1a
'', # 0x1b
'', # 0x1c
'', # 0x1d
'', # 0x1e
'', # 0x1f
'0', # 0x20
'1', # 0x21
'2', # 0x22
'3', # 0x23
'4', # 0x24
'5', # 0x25
'6', # 0x26
'7', # 0x27
'8', # 0x28
'9', # 0x29
'.5', # 0x2a
'1.5', # 0x2b
'2.5', # 0x2c
'3.5', # 0x2d
'4.5', # 0x2e
'5.5', # 0x2f
'6.5', # 0x30
'7.5', # 0x31
'8.5', # 0x32
'-.5', # 0x33
'+', # 0x34
'*', # 0x35
'^', # 0x36
'_', # 0x37
'', # 0x38
'~', # 0x39
'[?]', # 0x3a
']', # 0x3b
'[[', # 0x3c
']]', # 0x3d
'', # 0x3e
'', # 0x3f
'k', # 0x40
'kh', # 0x41
'g', # 0x42
'gh', # 0x43
'ng', # 0x44
'c', # 0x45
'ch', # 0x46
'j', # 0x47
'[?]', # 0x48
'ny', # 0x49
'tt', # 0x4a
'tth', # 0x4b
'dd', # 0x4c
'ddh', # 0x4d
'nn', # 0x4e
't', # 0x4f
'th', # 0x50
'd', # 0x51
'dh', # 0x52
'n', # 0x53
'p', # 0x54
'ph', # 0x55
'b', # 0x56
'bh', # 0x57
'm', # 0x58
'ts', # 0x59
'tsh', # 0x5a
'dz', # 0x5b
'dzh', # 0x5c
'w', # 0x5d
'zh', # 0x5e
'z', # 0x5f
'\'', # 0x60
'y', # 0x61
'r', # 0x62
'l', # 0x63
'sh', # 0x64
'ssh', # 0x65
's', # 0x66
'h', # 0x67
'a', # 0x68
'kss', # 0x69
'r', # 0x6a
'[?]', # 0x6b
'[?]', # 0x6c
'[?]', # 0x6d
'[?]', # 0x6e
'[?]', # 0x6f
'[?]', # 0x70
'aa', # 0x71
'i', # 0x72
'ii', # 0x73
'u', # 0x74
'uu', # 0x75
'R', # 0x76
'RR', # 0x77
'L', # 0x78
'LL', # 0x79
'e', # 0x7a
'ee', # 0x7b
'o', # 0x7c
'oo', # 0x7d
'M', # 0x7e
'H', # 0x7f
'i', # 0x80
'ii', # 0x81
'', # 0x82
'', # 0x83
'', # 0x84
'', # 0x85
'', # 0x86
'', # 0x87
'', # 0x88
'', # 0x89
'', # 0x8a
'', # 0x8b
'[?]', # 0x8c
'[?]', # 0x8d
'[?]', # 0x8e
'[?]', # 0x8f
'k', # 0x90
'kh', # 0x91
'g', # 0x92
'gh', # 0x93
'ng', # 0x94
'c', # 0x95
'ch', # 0x96
'j', # 0x97
'[?]', # 0x98
'ny', # 0x99
'tt', # 0x9a
'tth', # 0x9b
'dd', # 0x9c
'ddh', # 0x9d
'nn', # 0x9e
't', # 0x9f
'th', # 0xa0
'd', # 0xa1
'dh', # 0xa2
'n', # 0xa3
'p', # 0xa4
'ph', # 0xa5
'b', # 0xa6
'bh', # 0xa7
'm', # 0xa8
'ts', # 0xa9
'tsh', # 0xaa
'dz', # 0xab
'dzh', # 0xac
'w', # 0xad
'zh', # 0xae
'z', # 0xaf
'\'', # 0xb0
'y', # 0xb1
'r', # 0xb2
'l', # 0xb3
'sh', # 0xb4
'ss', # 0xb5
's', # 0xb6
'h', # 0xb7
'a', # 0xb8
'kss', # 0xb9
'w', # 0xba
'y', # 0xbb
'r', # 0xbc
'[?]', # 0xbd
'X', # 0xbe
' :X: ', # 0xbf
' /O/ ', # 0xc0
' /o/ ', # 0xc1
' \\o\\ ', # 0xc2
' (O) ', # 0xc3
'', # 0xc4
'', # 0xc5
'', # 0xc6
'', # 0xc7
'', # 0xc8
'', # 0xc9
'', # 0xca
'', # 0xcb
'', # 0xcc
'[?]', # 0xcd
'[?]', # 0xce
'', # 0xcf
'[?]', # 0xd0
'[?]', # 0xd1
'[?]', # 0xd2
'[?]', # 0xd3
'[?]', # 0xd4
'[?]', # 0xd5
'[?]', # 0xd6
'[?]', # 0xd7
'[?]', # 0xd8
'[?]', # 0xd9
'[?]', # 0xda
'[?]', # 0xdb
'[?]', # 0xdc
'[?]', # 0xdd
'[?]', # 0xde
'[?]', # 0xdf
'[?]', # 0xe0
'[?]', # 0xe1
'[?]', # 0xe2
'[?]', # 0xe3
'[?]', # 0xe4
'[?]', # 0xe5
'[?]', # 0xe6
'[?]', # 0xe7
'[?]', # 0xe8
'[?]', # 0xe9
'[?]', # 0xea
'[?]', # 0xeb
'[?]', # 0xec
'[?]', # 0xed
'[?]', # 0xee
'[?]', # 0xef
'[?]', # 0xf0
'[?]', # 0xf1
'[?]', # 0xf2
'[?]', # 0xf3
'[?]', # 0xf4
'[?]', # 0xf5
'[?]', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
'[?]', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
| gpl-2.0 |
lyndsysimon/hgrid-git-example | app.py | 1 | 1874 | from flask import Flask, jsonify, render_template, request
import json
import os
import tempfile
app = Flask(__name__)
from git_subprocess import Repository
repo_path = '/tmp/test/'
# Set up a git repository for a storage backend
repo = Repository(repo_path or tempfile.mkdtemp())
repo.init()
# Homepage - just render the template
@app.route('/')
def index():
return render_template('index.html')
# DELETE verb
@app.route('/api/files/', methods=['DELETE', ])
def delete_files():
# since multiple items could be deleted at once, iterate the list.
for id in json.loads(request.form.get('ids', '[]')):
repo._rm_file(id)
repo.commit(
author='Internet User <[email protected]>',
message='Deleted file(s)',
)
return jsonify({'deleted': request.form.get('ids')})
# GET verb
@app.route('/api/files/', methods=['GET', ])
def get_files():
return jsonify({
'files': [
_file_dict(f)
for f in os.listdir(repo.path)
if os.path.isfile(os.path.join(repo.path, f))
]
})
# POST verb
@app.route('/api/files/', methods=['POST', ])
def add_file():
f = request.files.get('file')
# write the file out to its new location
new_path = os.path.join(repo.path, f.filename)
with open(new_path, 'w') as outfile:
outfile.write(f.read())
# add it to git and commit
repo.add_file(
file_path=f.filename,
commit_author='Internet User <[email protected]>',
commit_message='Commited file {}'.format(f.filename)
)
return json.dumps([_file_dict(new_path), ])
def _file_dict(f):
return {
'uid': f,
'name': f,
'size': os.path.getsize(os.path.join(repo.path, f)),
'type': 'file',
'parent_uid': 'null'
}
if __name__ == '__main__':
app.run(debug=True, port=5000)
| bsd-2-clause |
junrao/kafka | tests/kafkatest/tests/replication_test.py | 4 | 6417 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ducktape.utils.util import wait_until
from ducktape.mark import matrix
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.services.kafka import KafkaService
from kafkatest.services.verifiable_producer import VerifiableProducer
from kafkatest.services.console_consumer import ConsoleConsumer, is_int
from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
import signal
def clean_shutdown(test):
"""Discover leader node for our topic and shut it down cleanly."""
test.kafka.signal_leader(test.topic, partition=0, sig=signal.SIGTERM)
def hard_shutdown(test):
"""Discover leader node for our topic and shut it down with a hard kill."""
test.kafka.signal_leader(test.topic, partition=0, sig=signal.SIGKILL)
def clean_bounce(test):
"""Chase the leader of one partition and restart it cleanly."""
for i in range(5):
prev_leader_node = test.kafka.leader(topic=test.topic, partition=0)
test.kafka.restart_node(prev_leader_node, clean_shutdown=True)
def hard_bounce(test):
"""Chase the leader and restart it with a hard kill."""
for i in range(5):
prev_leader_node = test.kafka.leader(topic=test.topic, partition=0)
test.kafka.signal_node(prev_leader_node, sig=signal.SIGKILL)
# Since this is a hard kill, we need to make sure the process is down and that
# zookeeper and the broker cluster have registered the loss of the leader.
# Waiting for a new leader to be elected on the topic-partition is a reasonable heuristic for this.
def leader_changed():
current_leader = test.kafka.leader(topic=test.topic, partition=0)
return current_leader is not None and current_leader != prev_leader_node
wait_until(lambda: len(test.kafka.pids(prev_leader_node)) == 0, timeout_sec=5)
wait_until(leader_changed, timeout_sec=10, backoff_sec=.5)
test.kafka.start_node(prev_leader_node)
failures = {
"clean_shutdown": clean_shutdown,
"hard_shutdown": hard_shutdown,
"clean_bounce": clean_bounce,
"hard_bounce": hard_bounce
}
class ReplicationTest(ProduceConsumeValidateTest):
"""
Note that consuming is a bit tricky, at least with console consumer. The goal is to consume all messages
(foreach partition) in the topic. In this case, waiting for the last message may cause the consumer to stop
too soon since console consumer is consuming multiple partitions from a single thread and therefore we lose
ordering guarantees.
Waiting on a count of consumed messages can be unreliable: if we stop consuming when num_consumed == num_acked,
we might exit early if some messages are duplicated (though not an issue here since producer retries==0)
Therefore rely here on the consumer.timeout.ms setting which times out on the interval between successively
consumed messages. Since we run the producer to completion before running the consumer, this is a reliable
indicator that nothing is left to consume.
"""
def __init__(self, test_context):
""":type test_context: ducktape.tests.test.TestContext"""
super(ReplicationTest, self).__init__(test_context=test_context)
self.topic = "test_topic"
self.zk = ZookeeperService(test_context, num_nodes=1)
self.kafka = KafkaService(test_context, num_nodes=3, zk=self.zk, topics={self.topic: {
"partitions": 3,
"replication-factor": 3,
"min.insync.replicas": 2}
})
self.producer_throughput = 10000
self.num_producers = 1
self.num_consumers = 1
def setUp(self):
self.zk.start()
def min_cluster_size(self):
"""Override this since we're adding services outside of the constructor"""
return super(ReplicationTest, self).min_cluster_size() + self.num_producers + self.num_consumers
@matrix(failure_mode=["clean_shutdown", "hard_shutdown", "clean_bounce", "hard_bounce"],
security_protocol=["PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL"])
def test_replication_with_broker_failure(self, failure_mode, security_protocol):
"""Replication tests.
These tests verify that replication provides simple durability guarantees by checking that data acked by
brokers is still available for consumption in the face of various failure scenarios.
Setup: 1 zk, 3 kafka nodes, 1 topic with partitions=3, replication-factor=3, and min.insync.replicas=2
- Produce messages in the background
- Consume messages in the background
- Drive broker failures (shutdown, or bounce repeatedly with kill -15 or kill -9)
- When done driving failures, stop producing, and finish consuming
- Validate that every acked message was consumed
"""
self.kafka.security_protocol = 'PLAINTEXT'
self.kafka.interbroker_security_protocol = security_protocol
self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka, self.topic, throughput=self.producer_throughput)
self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka, self.topic, consumer_timeout_ms=60000, message_validator=is_int)
self.kafka.start()
self.run_produce_consume_validate(core_test_action=lambda: failures[failure_mode](self))
| apache-2.0 |
sarvex/tensorflow | tensorflow/python/data/util/traverse_test.py | 8 | 4741 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for utilities for traversing the dataset construction graph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.data.experimental.ops import data_service_ops
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.util import traverse
from tensorflow.python.framework import combinations
from tensorflow.python.ops import gen_dataset_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class _TestDataset(dataset_ops.UnaryUnchangedStructureDataset):
def __init__(self, input_dataset):
self._input_dataset = input_dataset
temp_variant_tensor = gen_dataset_ops.prefetch_dataset(
input_dataset._variant_tensor,
buffer_size=1,
**self._flat_structure)
variant_tensor = gen_dataset_ops.model_dataset(
temp_variant_tensor, **self._flat_structure)
super(_TestDataset, self).__init__(input_dataset, variant_tensor)
class TraverseTest(test_base.DatasetTestBase, parameterized.TestCase):
@combinations.generate(test_base.graph_only_combinations())
def testOnlySource(self):
ds = dataset_ops.Dataset.range(10)
variant_tensor_ops = traverse.obtain_all_variant_tensor_ops(ds)
self.assertAllEqual(["RangeDataset"], [x.name for x in variant_tensor_ops])
@combinations.generate(test_base.graph_only_combinations())
def testSimplePipeline(self):
ds = dataset_ops.Dataset.range(10).map(math_ops.square)
variant_tensor_ops = traverse.obtain_all_variant_tensor_ops(ds)
self.assertSetEqual(
set(["MapDataset", "RangeDataset"]),
set(x.name for x in variant_tensor_ops))
@combinations.generate(test_base.graph_only_combinations())
def testConcat(self):
ds1 = dataset_ops.Dataset.range(10)
ds2 = dataset_ops.Dataset.range(10)
ds = ds1.concatenate(ds2)
variant_tensor_ops = traverse.obtain_all_variant_tensor_ops(ds)
self.assertSetEqual(
set(["ConcatenateDataset", "RangeDataset", "RangeDataset_1"]),
set(x.name for x in variant_tensor_ops))
@combinations.generate(test_base.graph_only_combinations())
def testZip(self):
ds1 = dataset_ops.Dataset.range(10)
ds2 = dataset_ops.Dataset.range(10)
ds = dataset_ops.Dataset.zip((ds1, ds2))
variant_tensor_ops = traverse.obtain_all_variant_tensor_ops(ds)
self.assertSetEqual(
set(["ZipDataset", "RangeDataset", "RangeDataset_1"]),
set(x.name for x in variant_tensor_ops))
@combinations.generate(test_base.graph_only_combinations())
def testMultipleVariantTensors(self):
ds = dataset_ops.Dataset.range(10)
ds = _TestDataset(ds)
variant_tensor_ops = traverse.obtain_all_variant_tensor_ops(ds)
self.assertSetEqual(
set(["RangeDataset", "ModelDataset", "PrefetchDataset"]),
set(x.name for x in variant_tensor_ops))
@combinations.generate(test_base.graph_only_combinations())
def testFlatMap(self):
ds1 = dataset_ops.Dataset.range(10).repeat(10)
def map_fn(ds):
def _map(x):
return ds.batch(x)
return _map
ds2 = dataset_ops.Dataset.range(20).prefetch(1)
ds2 = ds2.flat_map(map_fn(ds1))
variant_tensor_ops = traverse.obtain_all_variant_tensor_ops(ds2)
self.assertSetEqual(
set([
"FlatMapDataset", "PrefetchDataset", "RepeatDataset",
"RangeDataset", "RangeDataset_1"
]), set(x.name for x in variant_tensor_ops))
@combinations.generate(test_base.graph_only_combinations())
def testTfDataService(self):
ds = dataset_ops.Dataset.range(10)
ds = ds.apply(
data_service_ops.distribute("parallel_epochs", "grpc://foo:0"))
ops = traverse.obtain_capture_by_value_ops(ds)
self.assertContainsSubset(
["RangeDataset", "DataServiceDatasetV2", "DummyIterationCounter"],
set(x.name for x in ops))
if __name__ == "__main__":
test.main()
| apache-2.0 |
foss-transportationmodeling/rettina-server | flask/lib/python2.7/site-packages/sqlalchemy/dialects/__init__.py | 21 | 1027 | # dialects/__init__.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__all__ = (
'drizzle',
'firebird',
'mssql',
'mysql',
'oracle',
'postgresql',
'sqlite',
'sybase',
)
from .. import util
def _auto_fn(name):
"""default dialect importer.
plugs into the :class:`.PluginLoader`
as a first-hit system.
"""
if "." in name:
dialect, driver = name.split(".")
else:
dialect = name
driver = "base"
try:
module = __import__('sqlalchemy.dialects.%s' % (dialect, )).dialects
except ImportError:
return None
module = getattr(module, dialect)
if hasattr(module, driver):
module = getattr(module, driver)
return lambda: module.dialect
else:
return None
registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
| apache-2.0 |
pombredanne/RESTandra | drivers/py/cql/connection.py | 2 | 7350 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import exists, abspath, dirname, join
from thrift.transport import TTransport, TSocket
from thrift.protocol import TBinaryProtocol
from thrift.Thrift import TApplicationException
from errors import CQLException, InvalidCompressionScheme
from marshal import prepare
from decoders import SchemaDecoder
from results import RowsProxy
import zlib, re
try:
from cassandra import Cassandra
from cassandra.ttypes import Compression, InvalidRequestException, \
CqlResultType, AuthenticationRequest
except ImportError:
# Hack to run from a source tree
import sys
sys.path.append(join(abspath(dirname(__file__)),
'..',
'..',
'..',
'interface',
'thrift',
'gen-py'))
from cassandra import Cassandra
from cassandra.ttypes import Compression, InvalidRequestException, \
CqlResultType, AuthenticationRequest
COMPRESSION_SCHEMES = ['GZIP']
DEFAULT_COMPRESSION = 'GZIP'
__all__ = ['COMPRESSION_SCHEMES', 'DEFAULT_COMPRESSION', 'Connection']
class Connection(object):
"""
CQL connection object.
Example usage:
>>> conn = Connection("localhost", keyspace="Keyspace1")
>>> r = conn.execute('SELECT "age" FROM Users')
>>> for row in r.rows:
... for column in row.columns:
... print "%s is %s years of age" % (r.key, column.age)
"""
_keyspace_re = re.compile("USE (\w+);?", re.I | re.M)
_cfamily_re = re.compile("SELECT\s+.+\s+FROM\s+(\w+)", re.I | re.M)
def __init__(self, host, port=9160, keyspace=None, username=None,
password=None, decoder=None):
"""
Params:
* host .........: hostname of Cassandra node.
* port .........: port number to connect to (optional).
* keyspace .....: keyspace name (optional).
* username .....: username used in authentication (optional).
* password .....: password used in authentication (optional).
* decoder ......: result decoder instance (optional, defaults to none).
"""
socket = TSocket.TSocket(host, port)
self.transport = TTransport.TFramedTransport(socket)
protocol = TBinaryProtocol.TBinaryProtocolAccelerated(self.transport)
self.client = Cassandra.Client(protocol)
socket.open()
# XXX: "current" is probably a misnomer.
self._cur_keyspace = None
self._cur_column_family = None
if username and password:
credentials = {"username": username, "password": password}
self.client.login(AuthenticationRequest(credentials=credentials))
if keyspace:
self.execute('USE %s;' % keyspace)
self._cur_keyspace = keyspace
if not decoder:
self.decoder = SchemaDecoder(self.__get_schema())
else:
self.decoder = decoder
def __get_schema(self):
def columns(metadata):
results = {}
for col in metadata:
results[col.name] = col.validation_class
return results
def column_families(cf_defs):
cfresults = {}
for cf in cf_defs:
cfresults[cf.name] = {"comparator": cf.comparator_type}
cfresults[cf.name]["default_validation_class"] = \
cf.default_validation_class
cfresults[cf.name]["columns"] = columns(cf.column_metadata)
return cfresults
schema = {}
for ksdef in self.client.describe_keyspaces():
schema[ksdef.name] = column_families(ksdef.cf_defs)
return schema
def prepare(self, query, *args):
prepared_query = prepare(query, *args)
# Snag the keyspace or column family and stash it for later use in
# decoding columns. These regexes don't match every query, but the
# current column family only needs to be current for SELECTs.
match = Connection._cfamily_re.match(prepared_query)
if match:
self._cur_column_family = match.group(1)
else:
match = Connection._keyspace_re.match(prepared_query)
if match:
self._cur_keyspace = match.group(1)
return prepared_query
def execute(self, query, *args, **kwargs):
"""
Execute a CQL query on a remote node.
Params:
* query .........: CQL query string.
* args ..........: Query parameters.
* compression ...: Query compression type (optional).
"""
if kwargs.has_key("compression"):
compress = kwargs.get("compression").upper()
else:
compress = DEFAULT_COMPRESSION
compressed_query = Connection.compress_query(self.prepare(query, *args),
compress)
request_compression = getattr(Compression, compress)
try:
response = self.client.execute_cql_query(compressed_query,
request_compression)
except InvalidRequestException, ire:
raise CQLException("Bad Request: %s" % ire.why)
except TApplicationException, tapp:
raise CQLException("Internal application error")
except Exception, exc:
raise CQLException(exc)
if response.type == CqlResultType.ROWS:
return RowsProxy(response.rows,
self._cur_keyspace,
self._cur_column_family,
self.decoder)
if response.type == CqlResultType.INT:
return response.num
return None
def close(self):
self.transport.close()
def is_open(self):
return self.transport.isOpen()
@classmethod
def compress_query(cls, query, compression):
"""
Returns a query string compressed with the specified compression type.
Params:
* query .........: The query string to compress.
* compression ...: Type of compression to use.
"""
if not compression in COMPRESSION_SCHEMES:
raise InvalidCompressionScheme(compression)
if compression == 'GZIP':
return zlib.compress(query)
# vi: ai ts=4 tw=0 sw=4 et
| apache-2.0 |
Michaelmwirigi/lynsays | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/lexers/_postgres_builtins.py | 95 | 9692 | # -*- coding: utf-8 -*-
"""
pygments.lexers._postgres_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Self-updating data files for PostgreSQL lexer.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import urllib.request, urllib.parse, urllib.error
# One man's constant is another man's variable.
SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
KEYWORDS_URL = SOURCE_URL + '/doc/src/sgml/keywords.sgml'
DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml'
def update_myself():
data_file = list(fetch(DATATYPES_URL))
datatypes = parse_datatypes(data_file)
pseudos = parse_pseudos(data_file)
keywords = parse_keywords(fetch(KEYWORDS_URL))
update_consts(__file__, 'DATATYPES', datatypes)
update_consts(__file__, 'PSEUDO_TYPES', pseudos)
update_consts(__file__, 'KEYWORDS', keywords)
def parse_keywords(f):
kw = []
for m in re.finditer(
r'\s*<entry><token>([^<]+)</token></entry>\s*'
r'<entry>([^<]+)</entry>', f.read()):
kw.append(m.group(1))
if not kw:
raise ValueError('no keyword found')
kw.sort()
return kw
def parse_datatypes(f):
dt = set()
for line in f:
if '<sect1' in line:
break
if '<entry><type>' not in line:
continue
# Parse a string such as
# time [ (<replaceable>p</replaceable>) ] [ without time zone ]
# into types "time" and "without time zone"
# remove all the tags
line = re.sub("<replaceable>[^<]+</replaceable>", "", line)
line = re.sub("<[^>]+>", "", line)
# Drop the parts containing braces
for tmp in [t for tmp in line.split('[')
for t in tmp.split(']') if "(" not in t]:
for t in tmp.split(','):
t = t.strip()
if not t: continue
dt.add(" ".join(t.split()))
dt = list(dt)
dt.sort()
return dt
def parse_pseudos(f):
dt = []
re_start = re.compile(r'\s*<table id="datatype-pseudotypes-table">')
re_entry = re.compile(r'\s*<entry><type>([^<]+)</></entry>')
re_end = re.compile(r'\s*</table>')
f = iter(f)
for line in f:
if re_start.match(line) is not None:
break
else:
raise ValueError('pseudo datatypes table not found')
for line in f:
m = re_entry.match(line)
if m is not None:
dt.append(m.group(1))
if re_end.match(line) is not None:
break
else:
raise ValueError('end of pseudo datatypes table not found')
if not dt:
raise ValueError('pseudo datatypes not found')
return dt
def fetch(url):
return urllib.request.urlopen(url)
def update_consts(filename, constname, content):
f = open(filename)
lines = f.readlines()
f.close()
# Line to start/end inserting
re_start = re.compile(r'^%s\s*=\s*\[\s*$' % constname)
re_end = re.compile(r'^\s*\]\s*$')
start = [ n for n, l in enumerate(lines) if re_start.match(l) ]
if not start:
raise ValueError("couldn't find line containing '%s = ['" % constname)
if len(start) > 1:
raise ValueError("too many lines containing '%s = ['" % constname)
start = start[0] + 1
end = [ n for n, l in enumerate(lines) if n >= start and re_end.match(l) ]
if not end:
raise ValueError("couldn't find line containing ']' after %s " % constname)
end = end[0]
# Pack the new content in lines not too long
content = [repr(item) for item in content ]
new_lines = [[]]
for item in content:
if sum(map(len, new_lines[-1])) + 2 * len(new_lines[-1]) + len(item) + 4 > 75:
new_lines.append([])
new_lines[-1].append(item)
lines[start:end] = [ " %s,\n" % ", ".join(items) for items in new_lines ]
f = open(filename, 'w')
f.write(''.join(lines))
f.close()
# Autogenerated: please edit them if you like wasting your time.
KEYWORDS = [
'ABORT', 'ABSOLUTE', 'ACCESS', 'ACTION', 'ADD', 'ADMIN', 'AFTER',
'AGGREGATE', 'ALL', 'ALSO', 'ALTER', 'ALWAYS', 'ANALYSE', 'ANALYZE',
'AND', 'ANY', 'ARRAY', 'AS', 'ASC', 'ASSERTION', 'ASSIGNMENT',
'ASYMMETRIC', 'AT', 'ATTRIBUTE', 'AUTHORIZATION', 'BACKWARD', 'BEFORE',
'BEGIN', 'BETWEEN', 'BIGINT', 'BINARY', 'BIT', 'BOOLEAN', 'BOTH', 'BY',
'CACHE', 'CALLED', 'CASCADE', 'CASCADED', 'CASE', 'CAST', 'CATALOG',
'CHAIN', 'CHAR', 'CHARACTER', 'CHARACTERISTICS', 'CHECK', 'CHECKPOINT',
'CLASS', 'CLOSE', 'CLUSTER', 'COALESCE', 'COLLATE', 'COLLATION',
'COLUMN', 'COMMENT', 'COMMENTS', 'COMMIT', 'COMMITTED', 'CONCURRENTLY',
'CONFIGURATION', 'CONNECTION', 'CONSTRAINT', 'CONSTRAINTS', 'CONTENT',
'CONTINUE', 'CONVERSION', 'COPY', 'COST', 'CREATE', 'CROSS', 'CSV',
'CURRENT', 'CURRENT_CATALOG', 'CURRENT_DATE', 'CURRENT_ROLE',
'CURRENT_SCHEMA', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
'CURSOR', 'CYCLE', 'DATA', 'DATABASE', 'DAY', 'DEALLOCATE', 'DEC',
'DECIMAL', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE', 'DEFERRED',
'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS', 'DESC', 'DICTIONARY',
'DISABLE', 'DISCARD', 'DISTINCT', 'DO', 'DOCUMENT', 'DOMAIN', 'DOUBLE',
'DROP', 'EACH', 'ELSE', 'ENABLE', 'ENCODING', 'ENCRYPTED', 'END',
'ENUM', 'ESCAPE', 'EXCEPT', 'EXCLUDE', 'EXCLUDING', 'EXCLUSIVE',
'EXECUTE', 'EXISTS', 'EXPLAIN', 'EXTENSION', 'EXTERNAL', 'EXTRACT',
'FALSE', 'FAMILY', 'FETCH', 'FIRST', 'FLOAT', 'FOLLOWING', 'FOR',
'FORCE', 'FOREIGN', 'FORWARD', 'FREEZE', 'FROM', 'FULL', 'FUNCTION',
'FUNCTIONS', 'GLOBAL', 'GRANT', 'GRANTED', 'GREATEST', 'GROUP',
'HANDLER', 'HAVING', 'HEADER', 'HOLD', 'HOUR', 'IDENTITY', 'IF',
'ILIKE', 'IMMEDIATE', 'IMMUTABLE', 'IMPLICIT', 'IN', 'INCLUDING',
'INCREMENT', 'INDEX', 'INDEXES', 'INHERIT', 'INHERITS', 'INITIALLY',
'INLINE', 'INNER', 'INOUT', 'INPUT', 'INSENSITIVE', 'INSERT', 'INSTEAD',
'INT', 'INTEGER', 'INTERSECT', 'INTERVAL', 'INTO', 'INVOKER', 'IS',
'ISNULL', 'ISOLATION', 'JOIN', 'KEY', 'LABEL', 'LANGUAGE', 'LARGE',
'LAST', 'LC_COLLATE', 'LC_CTYPE', 'LEADING', 'LEAST', 'LEFT', 'LEVEL',
'LIKE', 'LIMIT', 'LISTEN', 'LOAD', 'LOCAL', 'LOCALTIME',
'LOCALTIMESTAMP', 'LOCATION', 'LOCK', 'MAPPING', 'MATCH', 'MAXVALUE',
'MINUTE', 'MINVALUE', 'MODE', 'MONTH', 'MOVE', 'NAME', 'NAMES',
'NATIONAL', 'NATURAL', 'NCHAR', 'NEXT', 'NO', 'NONE', 'NOT', 'NOTHING',
'NOTIFY', 'NOTNULL', 'NOWAIT', 'NULL', 'NULLIF', 'NULLS', 'NUMERIC',
'OBJECT', 'OF', 'OFF', 'OFFSET', 'OIDS', 'ON', 'ONLY', 'OPERATOR',
'OPTION', 'OPTIONS', 'OR', 'ORDER', 'OUT', 'OUTER', 'OVER', 'OVERLAPS',
'OVERLAY', 'OWNED', 'OWNER', 'PARSER', 'PARTIAL', 'PARTITION',
'PASSING', 'PASSWORD', 'PLACING', 'PLANS', 'POSITION', 'PRECEDING',
'PRECISION', 'PREPARE', 'PREPARED', 'PRESERVE', 'PRIMARY', 'PRIOR',
'PRIVILEGES', 'PROCEDURAL', 'PROCEDURE', 'QUOTE', 'RANGE', 'READ',
'REAL', 'REASSIGN', 'RECHECK', 'RECURSIVE', 'REF', 'REFERENCES',
'REINDEX', 'RELATIVE', 'RELEASE', 'RENAME', 'REPEATABLE', 'REPLACE',
'REPLICA', 'RESET', 'RESTART', 'RESTRICT', 'RETURNING', 'RETURNS',
'REVOKE', 'RIGHT', 'ROLE', 'ROLLBACK', 'ROW', 'ROWS', 'RULE',
'SAVEPOINT', 'SCHEMA', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY',
'SELECT', 'SEQUENCE', 'SEQUENCES', 'SERIALIZABLE', 'SERVER', 'SESSION',
'SESSION_USER', 'SET', 'SETOF', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE',
'SMALLINT', 'SOME', 'STABLE', 'STANDALONE', 'START', 'STATEMENT',
'STATISTICS', 'STDIN', 'STDOUT', 'STORAGE', 'STRICT', 'STRIP',
'SUBSTRING', 'SYMMETRIC', 'SYSID', 'SYSTEM', 'TABLE', 'TABLES',
'TABLESPACE', 'TEMP', 'TEMPLATE', 'TEMPORARY', 'TEXT', 'THEN', 'TIME',
'TIMESTAMP', 'TO', 'TRAILING', 'TRANSACTION', 'TREAT', 'TRIGGER',
'TRIM', 'TRUE', 'TRUNCATE', 'TRUSTED', 'TYPE', 'UNBOUNDED',
'UNCOMMITTED', 'UNENCRYPTED', 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN',
'UNLOGGED', 'UNTIL', 'UPDATE', 'USER', 'USING', 'VACUUM', 'VALID',
'VALIDATE', 'VALIDATOR', 'VALUE', 'VALUES', 'VARCHAR', 'VARIADIC',
'VARYING', 'VERBOSE', 'VERSION', 'VIEW', 'VOLATILE', 'WHEN', 'WHERE',
'WHITESPACE', 'WINDOW', 'WITH', 'WITHOUT', 'WORK', 'WRAPPER', 'WRITE',
'XML', 'XMLATTRIBUTES', 'XMLCONCAT', 'XMLELEMENT', 'XMLEXISTS',
'XMLFOREST', 'XMLPARSE', 'XMLPI', 'XMLROOT', 'XMLSERIALIZE', 'YEAR',
'YES', 'ZONE',
]
DATATYPES = [
'bigint', 'bigserial', 'bit', 'bit varying', 'bool', 'boolean', 'box',
'bytea', 'char', 'character', 'character varying', 'cidr', 'circle',
'date', 'decimal', 'double precision', 'float4', 'float8', 'inet',
'int', 'int2', 'int4', 'int8', 'integer', 'interval', 'json', 'line',
'lseg', 'macaddr', 'money', 'numeric', 'path', 'point', 'polygon',
'real', 'serial', 'serial2', 'serial4', 'serial8', 'smallint',
'smallserial', 'text', 'time', 'timestamp', 'timestamptz', 'timetz',
'tsquery', 'tsvector', 'txid_snapshot', 'uuid', 'varbit', 'varchar',
'with time zone', 'without time zone', 'xml',
]
PSEUDO_TYPES = [
'any', 'anyelement', 'anyarray', 'anynonarray', 'anyenum', 'anyrange',
'cstring', 'internal', 'language_handler', 'fdw_handler', 'record',
'trigger', 'void', 'opaque',
]
# Remove 'trigger' from types
PSEUDO_TYPES = sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS)))
PLPGSQL_KEYWORDS = [
'ALIAS', 'CONSTANT', 'DIAGNOSTICS', 'ELSIF', 'EXCEPTION', 'EXIT',
'FOREACH', 'GET', 'LOOP', 'NOTICE', 'OPEN', 'PERFORM', 'QUERY', 'RAISE',
'RETURN', 'REVERSE', 'SQLSTATE', 'WHILE',
]
if __name__ == '__main__':
update_myself()
| mit |
autopulated/ninja | misc/ninja_syntax_test.py | 24 | 6158 | #!/usr/bin/env python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import ninja_syntax
LONGWORD = 'a' * 10
LONGWORDWITHSPACES = 'a'*5 + '$ ' + 'a'*5
INDENT = ' '
class TestLineWordWrap(unittest.TestCase):
def setUp(self):
self.out = StringIO()
self.n = ninja_syntax.Writer(self.out, width=8)
def test_single_long_word(self):
# We shouldn't wrap a single long word.
self.n._line(LONGWORD)
self.assertEqual(LONGWORD + '\n', self.out.getvalue())
def test_few_long_words(self):
# We should wrap a line where the second word is overlong.
self.n._line(' '.join(['x', LONGWORD, 'y']))
self.assertEqual(' $\n'.join(['x',
INDENT + LONGWORD,
INDENT + 'y']) + '\n',
self.out.getvalue())
def test_short_words_indented(self):
# Test that indent is taking into acount when breaking subsequent lines.
# The second line should not be ' to tree', as that's longer than the
# test layout width of 8.
self.n._line('line_one to tree')
self.assertEqual('''\
line_one $
to $
tree
''',
self.out.getvalue())
def test_few_long_words_indented(self):
# Check wrapping in the presence of indenting.
self.n._line(' '.join(['x', LONGWORD, 'y']), indent=1)
self.assertEqual(' $\n'.join([' ' + 'x',
' ' + INDENT + LONGWORD,
' ' + INDENT + 'y']) + '\n',
self.out.getvalue())
def test_escaped_spaces(self):
self.n._line(' '.join(['x', LONGWORDWITHSPACES, 'y']))
self.assertEqual(' $\n'.join(['x',
INDENT + LONGWORDWITHSPACES,
INDENT + 'y']) + '\n',
self.out.getvalue())
def test_fit_many_words(self):
self.n = ninja_syntax.Writer(self.out, width=78)
self.n._line('command = cd ../../chrome; python ../tools/grit/grit/format/repack.py ../out/Debug/obj/chrome/chrome_dll.gen/repack/theme_resources_large.pak ../out/Debug/gen/chrome/theme_resources_large.pak', 1)
self.assertEqual('''\
command = cd ../../chrome; python ../tools/grit/grit/format/repack.py $
../out/Debug/obj/chrome/chrome_dll.gen/repack/theme_resources_large.pak $
../out/Debug/gen/chrome/theme_resources_large.pak
''',
self.out.getvalue())
def test_leading_space(self):
self.n = ninja_syntax.Writer(self.out, width=14) # force wrapping
self.n.variable('foo', ['', '-bar', '-somethinglong'], 0)
self.assertEqual('''\
foo = -bar $
-somethinglong
''',
self.out.getvalue())
def test_embedded_dollar_dollar(self):
self.n = ninja_syntax.Writer(self.out, width=15) # force wrapping
self.n.variable('foo', ['a$$b', '-somethinglong'], 0)
self.assertEqual('''\
foo = a$$b $
-somethinglong
''',
self.out.getvalue())
def test_two_embedded_dollar_dollars(self):
self.n = ninja_syntax.Writer(self.out, width=17) # force wrapping
self.n.variable('foo', ['a$$b', '-somethinglong'], 0)
self.assertEqual('''\
foo = a$$b $
-somethinglong
''',
self.out.getvalue())
def test_leading_dollar_dollar(self):
self.n = ninja_syntax.Writer(self.out, width=14) # force wrapping
self.n.variable('foo', ['$$b', '-somethinglong'], 0)
self.assertEqual('''\
foo = $$b $
-somethinglong
''',
self.out.getvalue())
def test_trailing_dollar_dollar(self):
self.n = ninja_syntax.Writer(self.out, width=14) # force wrapping
self.n.variable('foo', ['a$$', '-somethinglong'], 0)
self.assertEqual('''\
foo = a$$ $
-somethinglong
''',
self.out.getvalue())
class TestBuild(unittest.TestCase):
def setUp(self):
self.out = StringIO()
self.n = ninja_syntax.Writer(self.out)
def test_variables_dict(self):
self.n.build('out', 'cc', 'in', variables={'name': 'value'})
self.assertEqual('''\
build out: cc in
name = value
''',
self.out.getvalue())
def test_variables_list(self):
self.n.build('out', 'cc', 'in', variables=[('name', 'value')])
self.assertEqual('''\
build out: cc in
name = value
''',
self.out.getvalue())
class TestExpand(unittest.TestCase):
def test_basic(self):
vars = {'x': 'X'}
self.assertEqual('foo', ninja_syntax.expand('foo', vars))
def test_var(self):
vars = {'xyz': 'XYZ'}
self.assertEqual('fooXYZ', ninja_syntax.expand('foo$xyz', vars))
def test_vars(self):
vars = {'x': 'X', 'y': 'YYY'}
self.assertEqual('XYYY', ninja_syntax.expand('$x$y', vars))
def test_space(self):
vars = {}
self.assertEqual('x y z', ninja_syntax.expand('x$ y$ z', vars))
def test_locals(self):
vars = {'x': 'a'}
local_vars = {'x': 'b'}
self.assertEqual('a', ninja_syntax.expand('$x', vars))
self.assertEqual('b', ninja_syntax.expand('$x', vars, local_vars))
def test_double(self):
self.assertEqual('a b$c', ninja_syntax.expand('a$ b$$c', {}))
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
lliss/tr-55 | tr55/model.py | 1 | 14151 | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
"""
TR-55 Model Implementation
A mapping between variable/parameter names found in the TR-55 document
and variables used in this program are as follows:
* `precip` is referred to as P in the report
* `runoff` is Q
* `evaptrans` maps to ET, the evapotranspiration
* `inf` is the amount of water that infiltrates into the soil (in inches)
* `init_abs` is Ia, the initial abstraction, another form of infiltration
"""
import copy
from tr55.tablelookup import lookup_cn, lookup_bmp_infiltration, \
lookup_ki, is_bmp, is_built_type, make_precolumbian, get_pollutants
from tr55.water_quality import get_volume_of_runoff, get_pollutant_load
from tr55.operations import dict_plus
def runoff_pitt(precip, land_use):
"""
The Pitt Small Storm Hydrology method. The output is a runoff
value in inches.
"""
c1 = +3.638858398e-2
c2 = -1.243464039e-1
c3 = +1.295682223e-1
c4 = +9.375868043e-1
c5 = -2.235170859e-2
c6 = +0.170228067e+0
c7 = -3.971810782e-1
c8 = +3.887275538e-1
c9 = -2.289321859e-2
p4 = pow(precip, 4)
p3 = pow(precip, 3)
p2 = pow(precip, 2)
impervious = (c1 * p3) + (c2 * p2) + (c3 * precip) + c4
urb_grass = (c5 * p4) + (c6 * p3) + (c7 * p2) + (c8 * precip) + c9
runoff_vals = {
'open_water': impervious,
'developed_low': 0.20 * impervious + 0.80 * urb_grass,
'cluster_housing': 0.20 * impervious + 0.80 * urb_grass,
'developed_med': 0.65 * impervious + 0.35 * urb_grass,
'developed_high': impervious,
'developed_open': urb_grass
}
if land_use not in runoff_vals:
raise Exception('Land use %s not a built-type.' % land_use)
else:
return min(runoff_vals[land_use], precip)
def nrcs_cutoff(precip, curve_number):
"""
A function to find the cutoff between precipitation/curve number
pairs that have zero runoff by definition, and those that do not.
"""
if precip <= -1 * (2 * (curve_number - 100.0) / curve_number):
return True
else:
return False
def runoff_nrcs(precip, evaptrans, soil_type, land_use):
"""
The runoff equation from the TR-55 document. The output is a
runoff value in inches.
"""
if land_use == 'cluster_housing':
land_use = 'developed_low'
curve_number = lookup_cn(soil_type, land_use)
if nrcs_cutoff(precip, curve_number):
return 0.0
potential_retention = (1000.0 / curve_number) - 10
initial_abs = 0.2 * potential_retention
precip_minus_initial_abs = precip - initial_abs
numerator = pow(precip_minus_initial_abs, 2)
denominator = (precip_minus_initial_abs + potential_retention)
runoff = numerator / denominator
return min(runoff, precip - evaptrans)
def simulate_cell_day(precip, evaptrans, cell, cell_count):
"""
Simulate a bunch of cells of the same type during a one-day event.
`precip` is the amount of precipitation in inches.
`evaptrans` is evapotranspiration.
`cell` is a string which contains a soil type and land use
separated by a colon.
`cell_count` is the number of cells to simulate.
The return value is a dictionary of runoff, evapotranspiration, and
infiltration as volumes of water.
"""
def clamp(runoff, et, inf, precip):
"""
This function clamps ensures that runoff + et + inf <= precip.
NOTE: infiltration is normally independent of the
precipitation level, but this function introduces a slight
dependency (that is, at very low levels of precipitation, this
function can cause infiltration to be smaller than it
ordinarily would be.
"""
total = runoff + et + inf
if (total > precip):
scale = precip / total
runoff *= scale
et *= scale
inf *= scale
return (runoff, et, inf)
precip = max(0.0, precip)
soil_type, land_use, bmp = cell.lower().split(':')
# If there is no precipitation, then there is no runoff or
# infiltration. There is evapotranspiration, however (it is
# understood that over a period of time, this can lead to the sum
# of the three values exceeding the total precipitation).
if precip == 0.0:
return {
'runoff-vol': 0.0,
# 'et-vol': cell_count * evaptrans,
'et-vol': 0.0,
'inf-vol': 0.0,
}
# Deal with the Best Management Practices (BMPs). For most BMPs,
# the infiltration is read from the table and the runoff is what
# is left over after infiltration and evapotranspiration. Rain
# gardens are treated differently.
if bmp and is_bmp(bmp) and bmp != 'rain_garden':
inf = lookup_bmp_infiltration(soil_type, bmp) # infiltration
runoff = max(0.0, precip - (evaptrans + inf)) # runoff
(runoff, evaptrans, inf) = clamp(runoff, evaptrans, inf, precip)
return {
'runoff-vol': cell_count * runoff,
'et-vol': cell_count * evaptrans,
'inf-vol': cell_count * inf
}
elif bmp and bmp == 'rain_garden':
# Here, return a mixture of 20% ideal rain garden and 80%
# high-intensity residential.
inf = lookup_bmp_infiltration(soil_type, bmp)
runoff = max(0.0, precip - (evaptrans + inf))
hi_res_cell = soil_type + ':developed_med:'
hi_res = simulate_cell_day(precip, evaptrans, hi_res_cell, 1)
hir_run = hi_res['runoff-vol']
hir_et = hi_res['et-vol']
hir_inf = hi_res['inf-vol']
final_runoff = (0.2 * runoff + 0.8 * hir_run)
final_et = (0.2 * evaptrans + 0.8 * hir_et)
final_inf = (0.2 * inf + 0.8 * hir_inf)
final = clamp(final_runoff, final_et, final_inf, precip)
(final_runoff, final_et, final_inf) = final
return {
'runoff-vol': cell_count * final_runoff,
'et-vol': cell_count * final_et,
'inf-vol': cell_count * final_inf
}
# At this point, if the `bmp` string has non-zero length, it is
# equal to either 'no_till' or 'cluster_housing'.
if bmp and bmp != 'no_till' and bmp != 'cluster_housing':
raise KeyError('Unexpected BMP: %s' % bmp)
land_use = bmp or land_use
# When the land use is a built-type and the level of precipitation
# is two inches or less, use the Pitt Small Storm Hydrology Model.
# When the land use is a built-type but the level of precipitation
# is higher, the runoff is the larger of that predicted by the
# Pitt model and NRCS model. Otherwise, return the NRCS amount.
if is_built_type(land_use) and precip <= 2.0:
runoff = runoff_pitt(precip, land_use)
elif is_built_type(land_use):
pitt_runoff = runoff_pitt(2.0, land_use)
nrcs_runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
runoff = max(pitt_runoff, nrcs_runoff)
else:
runoff = runoff_nrcs(precip, evaptrans, soil_type, land_use)
inf = max(0.0, precip - (evaptrans + runoff))
(runoff, evaptrans, inf) = clamp(runoff, evaptrans, inf, precip)
return {
'runoff-vol': cell_count * runoff,
'et-vol': cell_count * evaptrans,
'inf-vol': cell_count * inf,
}
def create_unmodified_census(census):
"""
This creates a cell census, ignoring any modifications. The
output is suitable for use with `simulate_water_quality`.
"""
unmod = copy.deepcopy(census)
unmod.pop('modifications', None)
return unmod
def create_modified_census(census):
"""
This creates a cell census, with modifications, that is suitable
for use with `simulate_water_quality`.
For every type of cell that undergoes modification, the
modifications are indicated with a sub-distribution under that
cell type.
"""
mod = copy.deepcopy(census)
mod.pop('modifications', None)
for (cell, subcensus) in mod['distribution'].items():
n = subcensus['cell_count']
changes = {
'distribution': {
cell: {
'distribution': {
cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
for modification in (census.get('modifications') or []):
for (orig_cell, subcensus) in modification['distribution'].items():
n = subcensus['cell_count']
soil1, land1 = orig_cell.split(':')
soil2, land2, bmp = modification['change'].split(':')
changed_cell = '%s:%s:%s' % (soil2 or soil1, land2 or land1, bmp)
changes = {
'distribution': {
orig_cell: {
'distribution': {
orig_cell: {'cell_count': -n},
changed_cell: {'cell_count': n}
}
}
}
}
mod = dict_plus(mod, changes)
return mod
def simulate_water_quality(tree, cell_res, fn,
current_cell=None, precolumbian=False):
"""
Perform a water quality simulation by doing simulations on each of
the cell types (leaves), then adding them together by summing the
values of a node's subtrees and storing them at that node.
`tree` is the (sub)tree of cell distributions that is currently
under consideration.
`cell_res` is the size of each cell (used for turning inches of
water into volumes of water).
`fn` is a function that takes a cell type and a number of cells
and returns a dictionary containing runoff, et, and inf as
volumes.
`current_cell` is the cell type for the present node.
"""
# Internal node.
if 'cell_count' in tree and 'distribution' in tree:
n = tree['cell_count']
# simulate subtrees
if n != 0:
tally = {}
for cell, subtree in tree['distribution'].items():
simulate_water_quality(subtree, cell_res, fn,
cell, precolumbian)
subtree_ex_dist = subtree.copy()
subtree_ex_dist.pop('distribution', None)
tally = dict_plus(tally, subtree_ex_dist)
tree.update(tally) # update this node
# effectively a leaf
elif n == 0:
for pol in get_pollutants():
tree[pol] = 0.0
# Leaf node.
elif 'cell_count' in tree and 'distribution' not in tree:
# the number of cells covered by this leaf
n = tree['cell_count']
# canonicalize the current_cell string
split = current_cell.split(':')
if (len(split) == 2):
split.append('')
if precolumbian:
split[1] = make_precolumbian(split[1])
current_cell = '%s:%s:%s' % tuple(split)
# run the runoff model on this leaf
result = fn(current_cell, n) # runoff, et, inf
tree.update(result)
# perform water quality calculation
if n != 0:
soil_type, land_use, bmp = split
runoff_per_cell = result['runoff-vol'] / n
liters = get_volume_of_runoff(runoff_per_cell, n, cell_res)
for pol in get_pollutants():
tree[pol] = get_pollutant_load(land_use, pol, liters)
def postpass(tree):
"""
Remove volume units and replace them with inches.
"""
if 'cell_count' in tree:
if tree['cell_count'] > 0:
n = tree['cell_count']
tree['runoff'] = tree['runoff-vol'] / n
tree['et'] = tree['et-vol'] / n
tree['inf'] = tree['inf-vol'] / n
else:
tree['runoff'] = 0
tree['et'] = 0
tree['inf'] = 0
tree.pop('runoff-vol', None)
tree.pop('et-vol', None)
tree.pop('inf-vol', None)
if 'distribution' in tree:
for subtree in tree['distribution'].values():
postpass(subtree)
def simulate_modifications(census, fn, cell_res, precolumbian=False):
"""
Simulate effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`fn` is as described in `simulate_water_quality`.
`cell_res` is as described in `simulate_water_quality`.
"""
mod = create_modified_census(census)
simulate_water_quality(mod, cell_res, fn, precolumbian=precolumbian)
postpass(mod)
unmod = create_unmodified_census(census)
simulate_water_quality(unmod, cell_res, fn, precolumbian=precolumbian)
postpass(unmod)
return {
'unmodified': unmod,
'modified': mod
}
def simulate_day(census, precip, cell_res=10, precolumbian=False):
"""
Simulate a day, including water quality effects of modifications.
`census` contains a distribution of cell-types in the area of interest.
`cell_res` is as described in `simulate_water_quality`.
`precolumbian` indicates that artificial types should be turned
into forest.
"""
et_max = 0.207
if 'modifications' in census:
verify_census(census)
def fn(cell, cell_count):
# Compute et for cell type
split = cell.split(':')
if (len(split) == 2):
(land_use, bmp) = split
else:
(_, land_use, bmp) = split
et = et_max * lookup_ki(bmp or land_use)
# Simulate the cell for one day
return simulate_cell_day(precip, et, cell, cell_count)
return simulate_modifications(census, fn, cell_res, precolumbian)
def verify_census(census):
"""
Assures that there is no soil type/land cover pair
in a modification census that isn't in the AoI census.
"""
for modification in census['modifications']:
for land_cover in modification['distribution']:
if land_cover not in census['distribution']:
raise ValueError("Invalid modification census")
| apache-2.0 |
spaceof7/QGIS | python/plugins/processing/algs/qgis/HubDistanceLines.py | 6 | 6462 | # -*- coding: utf-8 -*-
"""
***************************************************************************
HubDistanceLines.py
---------------------
Date : May 2010
Copyright : (C) 2010 by Michael Minn
Email : pyqgis at michaelminn dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Michael Minn'
__date__ = 'May 2010'
__copyright__ = '(C) 2010, Michael Minn'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.PyQt.QtCore import QVariant
from qgis.core import (QgsField,
QgsGeometry,
QgsDistanceArea,
QgsFeature,
QgsFeatureSink,
QgsFeatureRequest,
QgsWkbTypes,
QgsUnitTypes,
QgsProcessing,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterField,
QgsProcessingParameterEnum,
QgsProcessingParameterFeatureSink,
QgsProcessingException,
QgsSpatialIndex)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
from math import sqrt
class HubDistanceLines(QgisAlgorithm):
INPUT = 'INPUT'
HUBS = 'HUBS'
FIELD = 'FIELD'
UNIT = 'UNIT'
OUTPUT = 'OUTPUT'
LAYER_UNITS = 'LAYER_UNITS'
UNITS = [QgsUnitTypes.DistanceMeters,
QgsUnitTypes.DistanceFeet,
QgsUnitTypes.DistanceMiles,
QgsUnitTypes.DistanceKilometers,
LAYER_UNITS
]
def group(self):
return self.tr('Vector analysis')
def groupId(self):
return 'vectoranalysis'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.units = [self.tr('Meters'),
self.tr('Feet'),
self.tr('Miles'),
self.tr('Kilometers'),
self.tr('Layer units')]
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Source points layer')))
self.addParameter(QgsProcessingParameterFeatureSource(self.HUBS,
self.tr('Destination hubs layer')))
self.addParameter(QgsProcessingParameterField(self.FIELD,
self.tr('Hub layer name attribute'), parentLayerParameterName=self.HUBS))
self.addParameter(QgsProcessingParameterEnum(self.UNIT,
self.tr('Measurement unit'), self.units))
self.addParameter(QgsProcessingParameterFeatureSink(self.OUTPUT, self.tr('Hub distance'), QgsProcessing.TypeVectorLine))
def name(self):
return 'distancetonearesthublinetohub'
def displayName(self):
return self.tr('Distance to nearest hub (line to hub)')
def processAlgorithm(self, parameters, context, feedback):
if parameters[self.INPUT] == parameters[self.HUBS]:
raise QgsProcessingException(
self.tr('Same layer given for both hubs and spokes'))
point_source = self.parameterAsSource(parameters, self.INPUT, context)
hub_source = self.parameterAsSource(parameters, self.HUBS, context)
fieldName = self.parameterAsString(parameters, self.FIELD, context)
units = self.UNITS[self.parameterAsEnum(parameters, self.UNIT, context)]
fields = point_source.fields()
fields.append(QgsField('HubName', QVariant.String))
fields.append(QgsField('HubDist', QVariant.Double))
(sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context,
fields, QgsWkbTypes.LineString, point_source.sourceCrs())
index = QgsSpatialIndex(hub_source.getFeatures(QgsFeatureRequest().setSubsetOfAttributes([]).setDestinationCrs(point_source.sourceCrs(), context.transformContext())))
distance = QgsDistanceArea()
distance.setSourceCrs(point_source.sourceCrs(), context.transformContext())
distance.setEllipsoid(context.project().ellipsoid())
# Scan source points, find nearest hub, and write to output file
features = point_source.getFeatures()
total = 100.0 / point_source.featureCount() if point_source.featureCount() else 0
for current, f in enumerate(features):
if feedback.isCanceled():
break
if not f.hasGeometry():
sink.addFeature(f, QgsFeatureSink.FastInsert)
continue
src = f.geometry().boundingBox().center()
neighbors = index.nearestNeighbor(src, 1)
ft = next(hub_source.getFeatures(QgsFeatureRequest().setFilterFid(neighbors[0]).setSubsetOfAttributes([fieldName], hub_source.fields()).setDestinationCrs(point_source.sourceCrs(), context.transformContext())))
closest = ft.geometry().boundingBox().center()
hubDist = distance.measureLine(src, closest)
if units != self.LAYER_UNITS:
hub_dist_in_desired_units = distance.convertLengthMeasurement(hubDist, units)
else:
hub_dist_in_desired_units = hubDist
attributes = f.attributes()
attributes.append(ft[fieldName])
attributes.append(hub_dist_in_desired_units)
feat = QgsFeature()
feat.setAttributes(attributes)
feat.setGeometry(QgsGeometry.fromPolylineXY([src, closest]))
sink.addFeature(feat, QgsFeatureSink.FastInsert)
feedback.setProgress(int(current * total))
return {self.OUTPUT: dest_id}
| gpl-2.0 |
resmo/cloudstack | test/integration/smoke/test_affinity_groups_projects.py | 3 | 6214 | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from marvin.codes import FAILED
from marvin.cloudstackTestCase import *
from marvin.cloudstackAPI import *
from marvin.lib.utils import *
from marvin.lib.base import *
from marvin.lib.common import *
from marvin.sshClient import SshClient
from nose.plugins.attrib import attr
class TestDeployVmWithAffinityGroup(cloudstackTestCase):
"""
This test deploys a virtual machine for a project
using the small service offering and builtin template
"""
@classmethod
def setUpClass(cls):
cls.testClient = super(TestDeployVmWithAffinityGroup, cls).getClsTestClient()
zone_name = cls.testClient.getZoneForTests()
cls.apiclient = cls.testClient.getApiClient()
cls.domain = get_domain(cls.apiclient)
cls.services = cls.testClient.getParsedTestDataConfig()
# Get Zone, Domain and templates
cls.zone = get_zone(cls.apiclient, cls.testClient.getZoneForTests())
cls.template = get_template(
cls.apiclient,
cls.zone.id,
cls.services["ostype"]
)
if cls.template == FAILED:
assert False, "get_template() failed to return template with description %s" % cls.services["ostype"]
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["template"] = cls.template.id
cls.services["zoneid"] = cls.zone.id
cls.account = Account.create(
cls.apiclient,
cls.services["account"],
domainid=cls.domain.id
)
projectData = {
"name": "Project",
"displaytext": "Test project",
}
cls.project = Project.create(
cls.apiclient,
projectData,
account=cls.account.name,
domainid=cls.account.domainid
)
# Add user to the project
cls.project.addAccount(
cls.apiclient,
cls.account.name
)
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offerings"]["tiny"]
)
cls.ag = AffinityGroup.create(cls.apiclient, cls.services["virtual_machine"]["affinity"],projectid=cls.project.id)
cls._cleanup = [
cls.service_offering,
cls.ag,
cls.project,
cls.account,
]
return
@attr(tags=["basic", "advanced", "multihost"], required_hardware="false")
def test_DeployVmAntiAffinityGroup_in_project(self):
"""
test DeployVM in anti-affinity groups for project
deploy VM1 and VM2 in the same host-anti-affinity groups
Verify that the vms are deployed on separate hosts
"""
#deploy VM1 in affinity group created in setUp
vm1 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
projectid=self.project.id,
serviceofferingid=self.service_offering.id,
affinitygroupnames=[self.ag.name]
)
list_vm1 = list_virtual_machines(
self.apiclient,
id=vm1.id
)
self.assertEqual(
isinstance(list_vm1, list),
True,
"Check list response returns a valid list"
)
self.assertNotEqual(
len(list_vm1),
0,
"Check VM available in List Virtual Machines"
)
vm1_response = list_vm1[0]
self.assertEqual(
vm1_response.state,
'Running',
msg="VM is not in Running state"
)
self.assertEqual(
vm1_response.projectid,
self.project.id,
msg="VM1 is not deployed in project"
)
host_of_vm1 = vm1_response.hostid
#deploy VM2 in affinity group created in setUp
vm2 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
projectid=self.project.id,
serviceofferingid=self.service_offering.id,
affinitygroupnames=[self.ag.name]
)
list_vm2 = list_virtual_machines(
self.apiclient,
id=vm2.id
)
self.assertEqual(
isinstance(list_vm2, list),
True,
"Check list response returns a valid list"
)
self.assertNotEqual(
len(list_vm2),
0,
"Check VM available in List Virtual Machines"
)
vm2_response = list_vm2[0]
self.assertEqual(
vm2_response.state,
'Running',
msg="VM is not in Running state"
)
self.assertEqual(
vm2_response.projectid,
self.project.id,
msg="VM2 is not deployed in project"
)
host_of_vm2 = vm2_response.hostid
self.assertNotEqual(host_of_vm1, host_of_vm2,
msg="Both VMs of affinity group %s are on the same host" % self.ag.name)
@classmethod
def tearDownClass(cls):
try:
#Clean up, terminate the created templates
cleanup_resources(cls.apiclient, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
| apache-2.0 |
ryanmockabee/golfr | flask/lib/python3.6/site-packages/jinja2/idtracking.py | 130 | 8760 | from jinja2.visitor import NodeVisitor
from jinja2._compat import iteritems
VAR_LOAD_PARAMETER = 'param'
VAR_LOAD_RESOLVE = 'resolve'
VAR_LOAD_ALIAS = 'alias'
VAR_LOAD_UNDEFINED = 'undefined'
def find_symbols(nodes, parent_symbols=None):
sym = Symbols(parent=parent_symbols)
visitor = FrameSymbolVisitor(sym)
for node in nodes:
visitor.visit(node)
return sym
def symbols_for_node(node, parent_symbols=None):
sym = Symbols(parent=parent_symbols)
sym.analyze_node(node)
return sym
class Symbols(object):
def __init__(self, parent=None):
if parent is None:
self.level = 0
else:
self.level = parent.level + 1
self.parent = parent
self.refs = {}
self.loads = {}
self.stores = set()
def analyze_node(self, node, **kwargs):
visitor = RootVisitor(self)
visitor.visit(node, **kwargs)
def _define_ref(self, name, load=None):
ident = 'l_%d_%s' % (self.level, name)
self.refs[name] = ident
if load is not None:
self.loads[ident] = load
return ident
def find_load(self, target):
if target in self.loads:
return self.loads[target]
if self.parent is not None:
return self.parent.find_load(target)
def find_ref(self, name):
if name in self.refs:
return self.refs[name]
if self.parent is not None:
return self.parent.find_ref(name)
def ref(self, name):
rv = self.find_ref(name)
if rv is None:
raise AssertionError('Tried to resolve a name to a reference that '
'was unknown to the frame (%r)' % name)
return rv
def copy(self):
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.refs = self.refs.copy()
rv.loads = self.loads.copy()
rv.stores = self.stores.copy()
return rv
def store(self, name):
self.stores.add(name)
# If we have not see the name referenced yet, we need to figure
# out what to set it to.
if name not in self.refs:
# If there is a parent scope we check if the name has a
# reference there. If it does it means we might have to alias
# to a variable there.
if self.parent is not None:
outer_ref = self.parent.find_ref(name)
if outer_ref is not None:
self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref))
return
# Otherwise we can just set it to undefined.
self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None))
def declare_parameter(self, name):
self.stores.add(name)
return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None))
def load(self, name):
target = self.find_ref(name)
if target is None:
self._define_ref(name, load=(VAR_LOAD_RESOLVE, name))
def branch_update(self, branch_symbols):
stores = {}
for branch in branch_symbols:
for target in branch.stores:
if target in self.stores:
continue
stores[target] = stores.get(target, 0) + 1
for sym in branch_symbols:
self.refs.update(sym.refs)
self.loads.update(sym.loads)
self.stores.update(sym.stores)
for name, branch_count in iteritems(stores):
if branch_count == len(branch_symbols):
continue
target = self.find_ref(name)
assert target is not None, 'should not happen'
if self.parent is not None:
outer_target = self.parent.find_ref(name)
if outer_target is not None:
self.loads[target] = (VAR_LOAD_ALIAS, outer_target)
continue
self.loads[target] = (VAR_LOAD_RESOLVE, name)
def dump_stores(self):
rv = {}
node = self
while node is not None:
for name in node.stores:
if name not in rv:
rv[name] = self.find_ref(name)
node = node.parent
return rv
def dump_param_targets(self):
rv = set()
node = self
while node is not None:
for target, (instr, _) in iteritems(self.loads):
if instr == VAR_LOAD_PARAMETER:
rv.add(target)
node = node.parent
return rv
class RootVisitor(NodeVisitor):
def __init__(self, symbols):
self.sym_visitor = FrameSymbolVisitor(symbols)
def _simple_visit(self, node, **kwargs):
for child in node.iter_child_nodes():
self.sym_visitor.visit(child)
visit_Template = visit_Block = visit_Macro = visit_FilterBlock = \
visit_Scope = visit_If = visit_ScopedEvalContextModifier = \
_simple_visit
def visit_AssignBlock(self, node, **kwargs):
for child in node.body:
self.sym_visitor.visit(child)
def visit_CallBlock(self, node, **kwargs):
for child in node.iter_child_nodes(exclude=('call',)):
self.sym_visitor.visit(child)
def visit_For(self, node, for_branch='body', **kwargs):
if for_branch == 'body':
self.sym_visitor.visit(node.target, store_as_param=True)
branch = node.body
elif for_branch == 'else':
branch = node.else_
elif for_branch == 'test':
self.sym_visitor.visit(node.target, store_as_param=True)
if node.test is not None:
self.sym_visitor.visit(node.test)
return
else:
raise RuntimeError('Unknown for branch')
for item in branch or ():
self.sym_visitor.visit(item)
def visit_With(self, node, **kwargs):
for target in node.targets:
self.sym_visitor.visit(target)
for child in node.body:
self.sym_visitor.visit(child)
def generic_visit(self, node, *args, **kwargs):
raise NotImplementedError('Cannot find symbols for %r' %
node.__class__.__name__)
class FrameSymbolVisitor(NodeVisitor):
"""A visitor for `Frame.inspect`."""
def __init__(self, symbols):
self.symbols = symbols
def visit_Name(self, node, store_as_param=False, **kwargs):
"""All assignments to names go through this function."""
if store_as_param or node.ctx == 'param':
self.symbols.declare_parameter(node.name)
elif node.ctx == 'store':
self.symbols.store(node.name)
elif node.ctx == 'load':
self.symbols.load(node.name)
def visit_If(self, node, **kwargs):
self.visit(node.test, **kwargs)
original_symbols = self.symbols
def inner_visit(nodes):
self.symbols = rv = original_symbols.copy()
for subnode in nodes:
self.visit(subnode, **kwargs)
self.symbols = original_symbols
return rv
body_symbols = inner_visit(node.body)
else_symbols = inner_visit(node.else_ or ())
self.symbols.branch_update([body_symbols, else_symbols])
def visit_Macro(self, node, **kwargs):
self.symbols.store(node.name)
def visit_Import(self, node, **kwargs):
self.generic_visit(node, **kwargs)
self.symbols.store(node.target)
def visit_FromImport(self, node, **kwargs):
self.generic_visit(node, **kwargs)
for name in node.names:
if isinstance(name, tuple):
self.symbols.store(name[1])
else:
self.symbols.store(name)
def visit_Assign(self, node, **kwargs):
"""Visit assignments in the correct order."""
self.visit(node.node, **kwargs)
self.visit(node.target, **kwargs)
def visit_For(self, node, **kwargs):
"""Visiting stops at for blocks. However the block sequence
is visited as part of the outer scope.
"""
self.visit(node.iter, **kwargs)
def visit_CallBlock(self, node, **kwargs):
self.visit(node.call, **kwargs)
def visit_FilterBlock(self, node, **kwargs):
self.visit(node.filter, **kwargs)
def visit_With(self, node, **kwargs):
for target in node.values:
self.visit(target)
def visit_AssignBlock(self, node, **kwargs):
"""Stop visiting at block assigns."""
self.visit(node.target, **kwargs)
def visit_Scope(self, node, **kwargs):
"""Stop visiting at scopes."""
def visit_Block(self, node, **kwargs):
"""Stop visiting at blocks."""
| mit |
SCSSG/Odoo-SCS | addons/base_report_designer/plugin/openerp_report_designer/bin/script/LoginTest.py | 384 | 1320 | #########################################################################
#
# Copyright (c) 2003-2004 Danny Brewer [email protected]
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#############################################################################
if __name__<>"package":
from ServerParameter import *
from lib.gui import *
class LoginTest:
def __init__(self):
if not loginstatus:
Change(None)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
kmonsoor/npyscreen | npyscreen/wgmultilineeditable.py | 15 | 4413 | import curses
from . import wgwidget
from . import wgmultiline
from . import wgtextbox as textbox
from . import wgboxwidget
class MultiLineEditable(wgmultiline.MultiLine):
_contained_widgets = textbox.Textfield
CHECK_VALUE = True
ALLOW_CONTINUE_EDITING = True
CONTINUE_EDITING_AFTER_EDITING_ONE_LINE = True
def get_new_value(self):
return ''
def check_line_value(self, vl):
if not vl:
return False
else:
return True
def edit_cursor_line_value(self):
if len(self.values) == 0:
self.insert_line_value()
return False
try:
active_line = self._my_widgets[(self.cursor_line-self.start_display_at)]
except IndexError:
self._my_widgets[0]
self.cursor_line = 0
self.insert_line_value()
return True
active_line.highlight = False
active_line.edit()
try:
self.values[self.cursor_line] = active_line.value
except IndexError:
self.values.append(active_line.value)
if not self.cursor_line:
self.cursor_line = 0
self.cursor_line = len(self.values) - 1
self.reset_display_cache()
if self.CHECK_VALUE:
if not self.check_line_value(self.values[self.cursor_line]):
self.delete_line_value()
return False
self.display()
return True
def insert_line_value(self):
if self.cursor_line is None:
self.cursor_line = 0
self.values.insert(self.cursor_line, self.get_new_value())
self.display()
cont = self.edit_cursor_line_value()
if cont and self.ALLOW_CONTINUE_EDITING:
self._continue_editing()
def delete_line_value(self):
if len(self.values) > 0:
del self.values[self.cursor_line]
self.display()
def _continue_editing(self):
active_line = self._my_widgets[(self.cursor_line-self.start_display_at)]
continue_editing = self.ALLOW_CONTINUE_EDITING
if hasattr(active_line, 'how_exited'):
while active_line.how_exited == wgwidget.EXITED_DOWN and continue_editing:
self.values.insert(self.cursor_line+1, self.get_new_value())
self.cursor_line += 1
self.display()
continue_editing = self.edit_cursor_line_value()
active_line = self._my_widgets[(self.cursor_line-self.start_display_at)]
def h_insert_next_line(self, ch):
if len(self.values) == self.cursor_line - 1 or len(self.values) == 0:
self.values.append(self.get_new_value())
self.cursor_line += 1
self.display()
cont = self.edit_cursor_line_value()
if cont and self.ALLOW_CONTINUE_EDITING:
self._continue_editing()
else:
self.cursor_line += 1
self.insert_line_value()
def h_edit_cursor_line_value(self, ch):
continue_line = self.edit_cursor_line_value()
if continue_line and self.CONTINUE_EDITING_AFTER_EDITING_ONE_LINE:
self._continue_editing()
def h_insert_value(self, ch):
return self.insert_line_value()
def h_delete_line_value(self, ch):
self.delete_line_value()
def set_up_handlers(self):
super(MultiLineEditable, self).set_up_handlers()
self.handlers.update ( {
ord('i'): self.h_insert_value,
ord('o'): self.h_insert_next_line,
curses.ascii.CR: self.h_edit_cursor_line_value,
curses.ascii.NL: self.h_edit_cursor_line_value,
curses.ascii.SP: self.h_edit_cursor_line_value,
curses.ascii.DEL: self.h_delete_line_value,
curses.ascii.BS: self.h_delete_line_value,
curses.KEY_BACKSPACE: self.h_delete_line_value,
} )
class MultiLineEditableTitle(wgmultiline.TitleMultiLine):
_entry_type = MultiLineEditable
class MultiLineEditableBoxed(wgboxwidget.BoxTitle):
_contained_widget = MultiLineEditable
| bsd-2-clause |
Lukc/ospace-lukc | client-pygame/lib/pygameui/ProgressBar.py | 2 | 1287 | #
# Copyright 2001 - 2006 Ludek Smid [http://www.ospace.net/]
#
# This file is part of Pygame.UI.
#
# Pygame.UI is free software; you can redistribute it and/or modify
# it under the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# Pygame.UI is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with Pygame.UI; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
from pygame.locals import *
from Const import *
from Widget import Widget, registerWidget
class ProgressBar(Widget):
def __init__(self, parent, **kwargs):
Widget.__init__(self, parent)
# data
self.min = 0
self.max = 100
self.value = 0
# flags
self.processKWArguments(kwargs)
parent.registerWidget(self)
def draw(self, surface):
self.theme.drawProgressBar(surface, self)
return self.rect
registerWidget(ProgressBar, 'progressbar')
| gpl-2.0 |
kevin-coder/tensorflow-fork | tensorflow/lite/experimental/micro/examples/micro_speech/apollo3/captured_data_to_wav.py | 11 | 1442 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Converts values pulled from the microcontroller into audio files."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import struct
# import matplotlib.pyplot as plt
import numpy as np
import soundfile as sf
def new_data_to_array(fn):
vals = []
with open(fn) as f:
for n, line in enumerate(f):
if n != 0:
vals.extend([int(v, 16) for v in line.split()])
b = ''.join(map(chr, vals))
y = struct.unpack('<' + 'h' * int(len(b) / 2), b)
return y
data = 'captured_data.txt'
values = np.array(new_data_to_array(data)).astype(float)
# plt.plot(values, 'o-')
# plt.show(block=False)
wav = values / np.max(np.abs(values))
sf.write('captured_data.wav', wav, 16000)
| apache-2.0 |
luwei0917/awsemmd_script | small_script/computeRg.py | 1 | 2040 | from Bio.PDB.PDBParser import PDBParser
import argparse
parser = argparse.ArgumentParser(description="Compute Rg of pdb")
parser.add_argument("pdb", help="pdb file")
args = parser.parse_args()
def computeRg(pdb_file, chain="A"):
# compute Radius of gyration
# pdb_file = f"/Users/weilu/Research/server/feb_2019/iterative_optimization_new_temp_range/all_simulations/{p}/{p}/crystal_structure.pdb"
chain_name = chain
parser = PDBParser()
structure = parser.get_structure('X', pdb_file)
chain = list(structure[0][chain_name])
all_res = list(structure.get_residues())
# n = len(all_res)
# n = len(chain)
regular_res_list = [res for res in all_res if res.get_id()[0] == ' ']
n = len(regular_res_list)
print("all chains")
cutoff = 15
for residue in regular_res_list:
if residue.get_id()[0] == ' ' and abs(residue["CA"].get_vector()[-1]) < cutoff:
print(residue.get_id()[1])
rg = 0.0
for i, residue_i in enumerate(regular_res_list):
for j, residue_j in enumerate(regular_res_list[i+1:]):
try:
r = residue_i["CA"] - residue_j["CA"]
except:
print(residue_i, residue_j)
rg += r**2
return (rg/(n**2))**0.5
rg = computeRg(args.pdb)
print(rg)
def cylindrical_rg_bias_term(oa, k_rg=4.184, rg0=0, atomGroup=-1, forceGroup=27):
nres, ca = oa.nres, oa.ca
if atomGroup == -1:
group = list(range(nres))
else:
group = atomGroup # atomGroup = [0, 1, 10, 12] means include residue 1, 2, 11, 13.
n = len(group)
rg_square = CustomBondForce("1/normalization*(x^2+y^2)")
# rg = CustomBondForce("1")
rg_square.addGlobalParameter("normalization", n*n)
for i in group:
for j in group:
if j <= i:
continue
rg_square.addBond(ca[i], ca[j], [])
rg = CustomCVForce(f"{k_rg}*(rg_square^0.5-{rg0})^2")
rg.addCollectiveVariable("rg_square", rg_square)
rg.setForceGroup(forceGroup)
return rg
| mit |
CSC-ORG/Dynamic-Dashboard-2015 | engine/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/langhungarianmodel.py | 2763 | 12536 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
Latin2_HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
win1250HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 94.7368%
# first 1024 sequences:5.2623%
# rest sequences: 0.8894%
# negative sequences: 0.0009%
HungarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
)
Latin2HungarianModel = {
'charToOrderMap': Latin2_HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "ISO-8859-2"
}
Win1250HungarianModel = {
'charToOrderMap': win1250HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "windows-1250"
}
# flake8: noqa
| mit |
batisteo/pasportaservo | hosting/migrations/0042_create_visibility.py | 4 | 3311 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-07-17 10:47
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('hosting', '0041_auto_20170929_1743'),
]
operations = [
migrations.CreateModel(
name='VisibilitySettings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('model_type', models.CharField(default='Unknown', max_length=25, verbose_name='type')),
('model_id', models.PositiveIntegerField(null=True)),
('content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
('visible_online_public', models.BooleanField(verbose_name='visible online for all')),
('visible_online_authed', models.BooleanField(verbose_name='visible online w/authorization')),
('visible_in_book', models.BooleanField(verbose_name='visible in the book')),
],
options={'verbose_name': 'visibility settings', 'verbose_name_plural': 'visibility settings'},
),
migrations.CreateModel(
name='VisibilitySettingsForPublicEmail',
fields=[
],
options={
'proxy': True,
},
bases=('hosting.visibilitysettings',),
),
migrations.CreateModel(
name='VisibilitySettingsForPhone',
fields=[
],
options={
'proxy': True,
},
bases=('hosting.visibilitysettings',),
),
migrations.CreateModel(
name='VisibilitySettingsForPlace',
fields=[
],
options={
'proxy': True,
},
bases=('hosting.visibilitysettings',),
),
migrations.CreateModel(
name='VisibilitySettingsForFamilyMembers',
fields=[
],
options={
'proxy': True,
},
bases=('hosting.visibilitysettings',),
),
migrations.AddField(
model_name='profile',
name='email_visibility',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='hosting.VisibilitySettings'),
),
migrations.AddField(
model_name='phone',
name='visibility',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='hosting.VisibilitySettings'),
),
migrations.AddField(
model_name='place',
name='visibility',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='hosting.VisibilitySettings'),
),
migrations.AddField(
model_name='place',
name='family_members_visibility',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='hosting.VisibilitySettings'),
),
]
| agpl-3.0 |
freakynit/kaggle-ndsb | configurations/featharalick_bagged_convroll4_big_wd_maxout512.py | 6 | 2632 | import numpy as np
import theano
import theano.tensor as T
import lasagne as nn
import data
import load
import nn_plankton
import dihedral
import tmp_dnn
import tta
features = [
# "hu",
# "tutorial",
"haralick",
# "aaronmoments",
# "lbp",
# "pftas",
# "zernike_moments",
# "image_size",
]
batch_size = 128
chunk_size = 32768
num_chunks_train = 240
momentum = 0.9
learning_rate_schedule = {
0: 0.001,
100: 0.0001,
200: 0.00001,
}
validate_every = 40
save_every = 240
if save_every > num_chunks_train: print "\n"*5; print "WARNING: NOT SAVING METADATA!!!"; print "\n"*5;
sdir = "/mnt/storage/users/sedielem/git/kaggle-plankton/predictions/"
train_pred_file = ""
valid_pred_file = ""
test_pred_file = sdir+"bagged--test--convroll4_big_wd_maxout512--avg-probs.npy"
data_loader = load.PredictionsWithFeaturesDataLoader(
features = features,
train_pred_file=train_pred_file,
valid_pred_file=valid_pred_file,
test_pred_file=test_pred_file,
num_chunks_train=num_chunks_train,
chunk_size=chunk_size)
create_train_gen = lambda: data_loader.create_random_gen()
create_eval_train_gen = lambda: data_loader.create_fixed_gen("train")
create_eval_valid_gen = lambda: data_loader.create_fixed_gen("valid")
create_eval_test_gen = lambda: data_loader.create_fixed_gen("test")
def build_model():
l0 = nn.layers.InputLayer((batch_size, data.num_classes))
l0_size = nn.layers.InputLayer((batch_size, 52))
l1_size = nn.layers.DenseLayer(l0_size, num_units=80, W=nn_plankton.Orthogonal('relu'), b=nn.init.Constant(0.1))
l2_size = nn.layers.DenseLayer(l1_size, num_units=80, W=nn_plankton.Orthogonal('relu'), b=nn.init.Constant(0.1))
l3_size = nn.layers.DenseLayer(l2_size, num_units=data.num_classes, W=nn_plankton.Orthogonal(), b=nn.init.Constant(0.1), nonlinearity=None)
l1 = nn_plankton.NonlinLayer(l0, T.log)
ltot = nn.layers.ElemwiseSumLayer([l1, l3_size])
# norm_by_sum = lambda x: x / x.sum(1).dimshuffle(0, "x")
lout = nn_plankton.NonlinLayer(ltot, nonlinearity=T.nnet.softmax)
return [l0, l0_size], lout
def build_objective(l_ins, l_out):
reg_param = 0.0002
alpha = 0. # 0 -> L2 1-> L1
print "regu", reg_param, alpha
# lambda_reg = 0.005
params = nn.layers.get_all_non_bias_params(l_out)
# reg_term = sum(T.sum(p**2) for p in params)
L2 = sum(T.sum(p**2) for p in params)
L1 = sum(T.sum(T.abs_(p)) for p in params)
def loss(y, t):
return nn_plankton.log_loss(y, t) + reg_param*(alpha * L1 + (1-alpha) * L2)
return nn.objectives.Objective(l_out, loss_function=loss) | mit |
thomsonreuters/electron | script/dump-symbols.py | 6 | 1697 | #!/usr/bin/env python
import os
import sys
from lib.config import PLATFORM
from lib.util import electron_gyp, execute, rm_rf
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
CHROMIUM_DIR = os.path.join(SOURCE_ROOT, 'vendor', 'download',
'libchromiumcontent', 'static_library')
def main(destination):
rm_rf(destination)
(project_name, product_name) = get_names_from_gyp()
if PLATFORM in ['darwin', 'linux']:
generate_breakpad_symbols = os.path.join(SOURCE_ROOT, 'tools', 'posix',
'generate_breakpad_symbols.py')
if PLATFORM == 'darwin':
start = os.path.join(OUT_DIR, '{0}.app'.format(product_name), 'Contents',
'MacOS', product_name)
else:
start = os.path.join(OUT_DIR, project_name)
args = [
'--build-dir={0}'.format(OUT_DIR),
'--binary={0}'.format(start),
'--symbols-dir={0}'.format(destination),
'--libchromiumcontent-dir={0}'.format(CHROMIUM_DIR),
'--clear',
'--jobs=16',
]
else:
generate_breakpad_symbols = os.path.join(SOURCE_ROOT, 'tools', 'win',
'generate_breakpad_symbols.py')
args = [
'--symbols-dir={0}'.format(destination),
'--jobs=16',
os.path.relpath(OUT_DIR),
]
execute([sys.executable, generate_breakpad_symbols] + args)
def get_names_from_gyp():
variables = electron_gyp()
return (variables['project_name%'], variables['product_name%'])
if __name__ == '__main__':
sys.exit(main(sys.argv[1]))
| mit |
Jai-Chaudhary/gensim | gensim/corpora/wikicorpus.py | 4 | 12990 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <[email protected]>
# Copyright (C) 2012 Lars Buitinck <[email protected]>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Construct a corpus from a Wikipedia (or other MediaWiki-based) database dump.
If you have the `pattern` package installed, this module will use a fancy
lemmatization to get a lemma of each token (instead of plain alphabetic
tokenizer). The package is available at https://github.com/clips/pattern .
See scripts/process_wiki.py for a canned (example) script based on this
module.
"""
import bz2
import logging
import re
from xml.etree.cElementTree import iterparse # LXML isn't faster, so let's go with the built-in solution
import multiprocessing
from gensim import utils
# cannot import whole gensim.corpora, because that imports wikicorpus...
from gensim.corpora.dictionary import Dictionary
from gensim.corpora.textcorpus import TextCorpus
logger = logging.getLogger('gensim.corpora.wikicorpus')
# ignore articles shorter than ARTICLE_MIN_WORDS characters (after full preprocessing)
ARTICLE_MIN_WORDS = 50
RE_P0 = re.compile('<!--.*?-->', re.DOTALL | re.UNICODE) # comments
RE_P1 = re.compile('<ref([> ].*?)(</ref>|/>)', re.DOTALL | re.UNICODE) # footnotes
RE_P2 = re.compile("(\n\[\[[a-z][a-z][\w-]*:[^:\]]+\]\])+$", re.UNICODE) # links to languages
RE_P3 = re.compile("{{([^}{]*)}}", re.DOTALL | re.UNICODE) # template
RE_P4 = re.compile("{{([^}]*)}}", re.DOTALL | re.UNICODE) # template
RE_P5 = re.compile('\[(\w+):\/\/(.*?)(( (.*?))|())\]', re.UNICODE) # remove URL, keep description
RE_P6 = re.compile("\[([^][]*)\|([^][]*)\]", re.DOTALL | re.UNICODE) # simplify links, keep description
RE_P7 = re.compile('\n\[\[[iI]mage(.*?)(\|.*?)*\|(.*?)\]\]', re.UNICODE) # keep description of images
RE_P8 = re.compile('\n\[\[[fF]ile(.*?)(\|.*?)*\|(.*?)\]\]', re.UNICODE) # keep description of files
RE_P9 = re.compile('<nowiki([> ].*?)(</nowiki>|/>)', re.DOTALL | re.UNICODE) # outside links
RE_P10 = re.compile('<math([> ].*?)(</math>|/>)', re.DOTALL | re.UNICODE) # math content
RE_P11 = re.compile('<(.*?)>', re.DOTALL | re.UNICODE) # all other tags
RE_P12 = re.compile('\n(({\|)|(\|-)|(\|}))(.*?)(?=\n)', re.UNICODE) # table formatting
RE_P13 = re.compile('\n(\||\!)(.*?\|)*([^|]*?)', re.UNICODE) # table cell formatting
RE_P14 = re.compile('\[\[Category:[^][]*\]\]', re.UNICODE) # categories
# Remove File and Image template
RE_P15 = re.compile('\[\[([fF]ile:|[iI]mage)[^]]*(\]\])', re.UNICODE)
def filter_wiki(raw):
"""
Filter out wiki mark-up from `raw`, leaving only text. `raw` is either unicode
or utf-8 encoded string.
"""
# parsing of the wiki markup is not perfect, but sufficient for our purposes
# contributions to improving this code are welcome :)
text = utils.to_unicode(raw, 'utf8', errors='ignore')
text = utils.decode_htmlentities(text) # '&nbsp;' --> '\xa0'
return remove_markup(text)
def remove_markup(text):
text = re.sub(RE_P2, "", text) # remove the last list (=languages)
# the wiki markup is recursive (markup inside markup etc)
# instead of writing a recursive grammar, here we deal with that by removing
# markup in a loop, starting with inner-most expressions and working outwards,
# for as long as something changes.
text = remove_template(text)
text = remove_file(text)
iters = 0
while True:
old, iters = text, iters + 1
text = re.sub(RE_P0, "", text) # remove comments
text = re.sub(RE_P1, '', text) # remove footnotes
text = re.sub(RE_P9, "", text) # remove outside links
text = re.sub(RE_P10, "", text) # remove math content
text = re.sub(RE_P11, "", text) # remove all remaining tags
text = re.sub(RE_P14, '', text) # remove categories
text = re.sub(RE_P5, '\\3', text) # remove urls, keep description
text = re.sub(RE_P6, '\\2', text) # simplify links, keep description only
# remove table markup
text = text.replace('||', '\n|') # each table cell on a separate line
text = re.sub(RE_P12, '\n', text) # remove formatting lines
text = re.sub(RE_P13, '\n\\3', text) # leave only cell content
# remove empty mark-up
text = text.replace('[]', '')
if old == text or iters > 2: # stop if nothing changed between two iterations or after a fixed number of iterations
break
# the following is needed to make the tokenizer see '[[socialist]]s' as a single word 'socialists'
# TODO is this really desirable?
text = text.replace('[', '').replace(']', '') # promote all remaining markup to plain text
return text
def remove_template(s):
"""Remove template wikimedia markup.
Return a copy of `s` with all the wikimedia markup template removed. See
http://meta.wikimedia.org/wiki/Help:Template for wikimedia templates
details.
Note: Since template can be nested, it is difficult remove them using
regular expresssions.
"""
# Find the start and end position of each template by finding the opening
# '{{' and closing '}}'
n_open, n_close = 0, 0
starts, ends = [], []
in_template = False
prev_c = None
for i, c in enumerate(iter(s)):
if not in_template:
if c == '{' and c == prev_c:
starts.append(i - 1)
in_template = True
n_open = 1
if in_template:
if c == '{':
n_open += 1
elif c == '}':
n_close += 1
if n_open == n_close:
ends.append(i)
in_template = False
n_open, n_close = 0, 0
prev_c = c
# Remove all the templates
s = ''.join([s[end + 1:start] for start, end in
zip(starts + [None], [-1] + ends)])
return s
def remove_file(s):
"""Remove the 'File:' and 'Image:' markup, keeping the file caption.
Return a copy of `s` with all the 'File:' and 'Image:' markup replaced by
their corresponding captions. See http://www.mediawiki.org/wiki/Help:Images
for the markup details.
"""
# The regex RE_P15 match a File: or Image: markup
for match in re.finditer(RE_P15, s):
m = match.group(0)
caption = m[:-2].split('|')[-1]
s = s.replace(m, caption, 1)
return s
def tokenize(content):
"""
Tokenize a piece of text from wikipedia. The input string `content` is assumed
to be mark-up free (see `filter_wiki()`).
Return list of tokens as utf8 bytestrings. Ignore words shorted than 2 or longer
that 15 characters (not bytes!).
"""
# TODO maybe ignore tokens with non-latin characters? (no chinese, arabic, russian etc.)
return [token.encode('utf8') for token in utils.tokenize(content, lower=True, errors='ignore')
if 2 <= len(token) <= 15 and not token.startswith('_')]
def get_namespace(tag):
"""Returns the namespace of tag."""
m = re.match("^{(.*?)}", tag)
namespace = m.group(1) if m else ""
if not namespace.startswith("http://www.mediawiki.org/xml/export-"):
raise ValueError("%s not recognized as MediaWiki dump namespace"
% namespace)
return namespace
_get_namespace = get_namespace
def extract_pages(f, filter_namespaces=False):
"""
Extract pages from MediaWiki database dump.
Return an iterable over (str, str) which generates (title, content) pairs.
"""
elems = (elem for _, elem in iterparse(f, events=("end",)))
# We can't rely on the namespace for database dumps, since it's changed
# it every time a small modification to the format is made. So, determine
# those from the first element we find, which will be part of the metadata,
# and construct element paths.
elem = next(elems)
namespace = get_namespace(elem.tag)
ns_mapping = {"ns": namespace}
page_tag = "{%(ns)s}page" % ns_mapping
text_path = "./{%(ns)s}revision/{%(ns)s}text" % ns_mapping
title_path = "./{%(ns)s}title" % ns_mapping
ns_path = "./{%(ns)s}ns" % ns_mapping
pageid_path = "./{%(ns)s}id" % ns_mapping
for elem in elems:
if elem.tag == page_tag:
title = elem.find(title_path).text
text = elem.find(text_path).text
ns = elem.find(ns_path).text
if filter_namespaces and ns not in filter_namespaces:
text = None
pageid = elem.find(pageid_path).text
yield title, text or "", pageid # empty page will yield None
# Prune the element tree, as per
# http://www.ibm.com/developerworks/xml/library/x-hiperfparse/
# except that we don't need to prune backlinks from the parent
# because we don't use LXML.
# We do this only for <page>s, since we need to inspect the
# ./revision/text element. The pages comprise the bulk of the
# file, so in practice we prune away enough.
elem.clear()
_extract_pages = extract_pages # for backward compatibility
def process_article(args):
"""
Parse a wikipedia article, returning its content as a list of tokens
(utf8-encoded strings).
"""
text, lemmatize, title, pageid = args
text = filter_wiki(text)
if lemmatize:
result = utils.lemmatize(text)
else:
result = tokenize(text)
return result, title, pageid
class WikiCorpus(TextCorpus):
"""
Treat a wikipedia articles dump (\*articles.xml.bz2) as a (read-only) corpus.
The documents are extracted on-the-fly, so that the whole (massive) dump
can stay compressed on disk.
>>> wiki = WikiCorpus('enwiki-20100622-pages-articles.xml.bz2') # create word->word_id mapping, takes almost 8h
>>> wiki.saveAsText('wiki_en_vocab200k') # another 8h, creates a file in MatrixMarket format plus file with id->word
"""
def __init__(self, fname, processes=None, lemmatize=utils.HAS_PATTERN, dictionary=None, filter_namespaces=('0',)):
"""
Initialize the corpus. Unless a dictionary is provided, this scans the
corpus once, to determine its vocabulary.
If `pattern` package is installed, use fancier shallow parsing to get
token lemmas. Otherwise, use simple regexp tokenization. You can override
this automatic logic by forcing the `lemmatize` parameter explicitly.
"""
self.fname = fname
self.filter_namespaces = filter_namespaces
self.metadata = False
if processes is None:
processes = max(1, multiprocessing.cpu_count() - 1)
self.processes = processes
self.lemmatize = lemmatize
if dictionary is None:
self.dictionary = Dictionary(self.get_texts())
else:
self.dictionary = dictionary
def get_texts(self):
"""
Iterate over the dump, returning text version of each article as a list
of tokens.
Only articles of sufficient length are returned (short articles & redirects
etc are ignored).
Note that this iterates over the **texts**; if you want vectors, just use
the standard corpus interface instead of this function::
>>> for vec in wiki_corpus:
>>> print(vec)
"""
articles, articles_all = 0, 0
positions, positions_all = 0, 0
texts = ((text, self.lemmatize, title, pageid) for title, text, pageid in extract_pages(bz2.BZ2File(self.fname), self.filter_namespaces))
pool = multiprocessing.Pool(self.processes)
# process the corpus in smaller chunks of docs, because multiprocessing.Pool
# is dumb and would load the entire input into RAM at once...
ignore_namespaces = 'Wikipedia Category File Portal Template MediaWiki User Help Book Draft'.split()
for group in utils.chunkize(texts, chunksize=10 * self.processes, maxsize=1):
for tokens, title, pageid in pool.imap(process_article, group): # chunksize=10):
articles_all += 1
positions_all += len(tokens)
# article redirects and short stubs are pruned here
if len(tokens) < ARTICLE_MIN_WORDS or any(title.startswith(ignore + ':') for ignore in ignore_namespaces):
continue
articles += 1
positions += len(tokens)
if self.metadata:
yield (tokens, (pageid, title))
else:
yield tokens
pool.terminate()
logger.info("finished iterating over Wikipedia corpus of %i documents with %i positions"
" (total %i articles, %i positions before pruning articles shorter than %i words)" %
(articles, positions, articles_all, positions_all, ARTICLE_MIN_WORDS))
self.length = articles # cache corpus length
# endclass WikiCorpus
| gpl-3.0 |
pylixm/sae-django-demo | django1.7-sae/site-packages/django/core/files/uploadedfile.py | 91 | 4334 | """
Classes representing uploaded files.
"""
import errno
import os
from io import BytesIO
from django.conf import settings
from django.core.files.base import File
from django.core.files import temp as tempfile
from django.utils.encoding import force_str
__all__ = ('UploadedFile', 'TemporaryUploadedFile', 'InMemoryUploadedFile',
'SimpleUploadedFile')
class UploadedFile(File):
"""
A abstract uploaded file (``TemporaryUploadedFile`` and
``InMemoryUploadedFile`` are the built-in concrete subclasses).
An ``UploadedFile`` object behaves somewhat like a file object and
represents some file data that the user submitted with a form.
"""
DEFAULT_CHUNK_SIZE = 64 * 2 ** 10
def __init__(self, file=None, name=None, content_type=None, size=None, charset=None, content_type_extra=None):
super(UploadedFile, self).__init__(file, name)
self.size = size
self.content_type = content_type
self.charset = charset
self.content_type_extra = content_type_extra
def __repr__(self):
return force_str("<%s: %s (%s)>" % (
self.__class__.__name__, self.name, self.content_type))
def _get_name(self):
return self._name
def _set_name(self, name):
# Sanitize the file name so that it can't be dangerous.
if name is not None:
# Just use the basename of the file -- anything else is dangerous.
name = os.path.basename(name)
# File names longer than 255 characters can cause problems on older OSes.
if len(name) > 255:
name, ext = os.path.splitext(name)
ext = ext[:255]
name = name[:255 - len(ext)] + ext
self._name = name
name = property(_get_name, _set_name)
class TemporaryUploadedFile(UploadedFile):
"""
A file uploaded to a temporary location (i.e. stream-to-disk).
"""
def __init__(self, name, content_type, size, charset, content_type_extra=None):
if settings.FILE_UPLOAD_TEMP_DIR:
file = tempfile.NamedTemporaryFile(suffix='.upload',
dir=settings.FILE_UPLOAD_TEMP_DIR)
else:
file = tempfile.NamedTemporaryFile(suffix='.upload')
super(TemporaryUploadedFile, self).__init__(file, name, content_type, size, charset, content_type_extra)
def temporary_file_path(self):
"""
Returns the full path of this file.
"""
return self.file.name
def close(self):
try:
return self.file.close()
except OSError as e:
if e.errno != errno.ENOENT:
# Means the file was moved or deleted before the tempfile
# could unlink it. Still sets self.file.close_called and
# calls self.file.file.close() before the exception
raise
class InMemoryUploadedFile(UploadedFile):
"""
A file uploaded into memory (i.e. stream-to-memory).
"""
def __init__(self, file, field_name, name, content_type, size, charset, content_type_extra=None):
super(InMemoryUploadedFile, self).__init__(file, name, content_type, size, charset, content_type_extra)
self.field_name = field_name
def open(self, mode=None):
self.file.seek(0)
def chunks(self, chunk_size=None):
self.file.seek(0)
yield self.read()
def multiple_chunks(self, chunk_size=None):
# Since it's in memory, we'll never have multiple chunks.
return False
class SimpleUploadedFile(InMemoryUploadedFile):
"""
A simple representation of a file, which just has content, size, and a name.
"""
def __init__(self, name, content, content_type='text/plain'):
content = content or b''
super(SimpleUploadedFile, self).__init__(BytesIO(content), None, name,
content_type, len(content), None, None)
@classmethod
def from_dict(cls, file_dict):
"""
Creates a SimpleUploadedFile object from
a dictionary object with the following keys:
- filename
- content-type
- content
"""
return cls(file_dict['filename'],
file_dict['content'],
file_dict.get('content-type', 'text/plain'))
| apache-2.0 |
Soovox/django-socialregistration | socialregistration/contrib/openid/storage.py | 10 | 2208 | import base64
from openid.association import Association
from openid.store.interface import OpenIDStore
from socialregistration.contrib.openid.models import (OpenIDNonce,
OpenIDStore as OpenIDStoreModel)
class OpenIDStore(OpenIDStore):
max_nonce_age = 6 * 60 * 60
def storeAssociation(self, server_url, assoc=None):
stored_assoc = OpenIDStoreModel.objects.create(
server_url=server_url,
handle=assoc.handle,
secret=base64.encodestring(assoc.secret),
issued=assoc.issued,
lifetime=assoc.issued,
assoc_type=assoc.assoc_type
)
def getAssociation(self, server_url, handle=None):
stored_assocs = OpenIDStoreModel.objects.filter(
server_url=server_url
)
if handle:
stored_assocs = stored_assocs.filter(handle=handle)
stored_assocs.order_by('-issued')
if stored_assocs.count() == 0:
return None
return_val = None
for stored_assoc in stored_assocs:
assoc = Association(
stored_assoc.handle, base64.decodestring(stored_assoc.secret),
stored_assoc.issued, stored_assoc.lifetime, stored_assoc.assoc_type
)
if assoc.getExpiresIn() == 0:
stored_assoc.delete()
else:
if return_val is None:
return_val = assoc
return return_val
def removeAssociation(self, server_url, handle):
stored_assocs = OpenIDStoreModel.objects.filter(
server_url=server_url
)
if handle:
stored_assocs = stored_assocs.filter(handle=handle)
stored_assocs.delete()
def useNonce(self, server_url, timestamp, salt):
try:
nonce = OpenIDNonce.objects.get(
server_url=server_url,
timestamp=timestamp,
salt=salt
)
except OpenIDNonce.DoesNotExist:
nonce = OpenIDNonce.objects.create(
server_url=server_url,
timestamp=timestamp,
salt=salt
)
return True
return False
| mit |
s-hertel/ansible | lib/ansible/cli/arguments/option_helpers.py | 12 | 17132 | # Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import operator
import argparse
import os
import os.path
import sys
import time
import yaml
try:
import _yaml
HAS_LIBYAML = True
except ImportError:
HAS_LIBYAML = False
from jinja2 import __version__ as j2_version
import ansible
from ansible import constants as C
from ansible.module_utils._text import to_native
from ansible.release import __version__
from ansible.utils.path import unfrackpath
#
# Special purpose OptionParsers
#
class SortingHelpFormatter(argparse.HelpFormatter):
def add_arguments(self, actions):
actions = sorted(actions, key=operator.attrgetter('option_strings'))
super(SortingHelpFormatter, self).add_arguments(actions)
class AnsibleVersion(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
ansible_version = to_native(version(getattr(parser, 'prog')))
print(ansible_version)
parser.exit()
class UnrecognizedArgument(argparse.Action):
def __init__(self, option_strings, dest, const=True, default=None, required=False, help=None, metavar=None, nargs=0):
super(UnrecognizedArgument, self).__init__(option_strings=option_strings, dest=dest, nargs=nargs, const=const,
default=default, required=required, help=help)
def __call__(self, parser, namespace, values, option_string=None):
parser.error('unrecognized arguments: %s' % option_string)
class PrependListAction(argparse.Action):
"""A near clone of ``argparse._AppendAction``, but designed to prepend list values
instead of appending.
"""
def __init__(self, option_strings, dest, nargs=None, const=None, default=None, type=None,
choices=None, required=False, help=None, metavar=None):
if nargs == 0:
raise ValueError('nargs for append actions must be > 0; if arg '
'strings are not supplying the value to append, '
'the append const action may be more appropriate')
if const is not None and nargs != argparse.OPTIONAL:
raise ValueError('nargs must be %r to supply const' % argparse.OPTIONAL)
super(PrependListAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar
)
def __call__(self, parser, namespace, values, option_string=None):
items = copy.copy(ensure_value(namespace, self.dest, []))
items[0:0] = values
setattr(namespace, self.dest, items)
def ensure_value(namespace, name, value):
if getattr(namespace, name, None) is None:
setattr(namespace, name, value)
return getattr(namespace, name)
#
# Callbacks to validate and normalize Options
#
def unfrack_path(pathsep=False):
"""Turn an Option's data into a single path in Ansible locations"""
def inner(value):
if pathsep:
return [unfrackpath(x) for x in value.split(os.pathsep) if x]
if value == '-':
return value
return unfrackpath(value)
return inner
def _git_repo_info(repo_path):
""" returns a string containing git branch, commit id and commit date """
result = None
if os.path.exists(repo_path):
# Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
if os.path.isfile(repo_path):
try:
gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
# There is a possibility the .git file to have an absolute path.
if os.path.isabs(gitdir):
repo_path = gitdir
else:
repo_path = os.path.join(repo_path[:-4], gitdir)
except (IOError, AttributeError):
return ''
with open(os.path.join(repo_path, "HEAD")) as f:
line = f.readline().rstrip("\n")
if line.startswith("ref:"):
branch_path = os.path.join(repo_path, line[5:])
else:
branch_path = None
if branch_path and os.path.exists(branch_path):
branch = '/'.join(line.split('/')[2:])
with open(branch_path) as f:
commit = f.readline()[:10]
else:
# detached HEAD
commit = line[:10]
branch = 'detached HEAD'
branch_path = os.path.join(repo_path, "HEAD")
date = time.localtime(os.stat(branch_path).st_mtime)
if time.daylight == 0:
offset = time.timezone
else:
offset = time.altzone
result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit, time.strftime("%Y/%m/%d %H:%M:%S", date), int(offset / -36))
else:
result = ''
return result
def _gitinfo():
basedir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
repo_path = os.path.join(basedir, '.git')
return _git_repo_info(repo_path)
def version(prog=None):
""" return ansible version """
if prog:
result = [" ".join((prog, __version__))]
else:
result = [__version__]
gitinfo = _gitinfo()
if gitinfo:
result[0] = "{0} {1}".format(result[0], gitinfo)
result.append(" config file = %s" % C.CONFIG_FILE)
if C.DEFAULT_MODULE_PATH is None:
cpath = "Default w/o overrides"
else:
cpath = C.DEFAULT_MODULE_PATH
result.append(" configured module search path = %s" % cpath)
result.append(" ansible python module location = %s" % ':'.join(ansible.__path__))
result.append(" ansible collection location = %s" % ':'.join(C.COLLECTIONS_PATHS))
result.append(" executable location = %s" % sys.argv[0])
result.append(" python version = %s" % ''.join(sys.version.splitlines()))
result.append(" jinja version = %s" % j2_version)
result.append(" libyaml = %s" % HAS_LIBYAML)
return "\n".join(result)
#
# Functions to add pre-canned options to an OptionParser
#
def create_base_parser(prog, usage="", desc=None, epilog=None):
"""
Create an options parser for all ansible scripts
"""
# base opts
parser = argparse.ArgumentParser(
prog=prog,
formatter_class=SortingHelpFormatter,
epilog=epilog,
description=desc,
conflict_handler='resolve',
)
version_help = "show program's version number, config file location, configured module search path," \
" module location, executable location and exit"
parser.add_argument('--version', action=AnsibleVersion, nargs=0, help=version_help)
add_verbosity_options(parser)
return parser
def add_verbosity_options(parser):
"""Add options for verbosity"""
parser.add_argument('-v', '--verbose', dest='verbosity', default=C.DEFAULT_VERBOSITY, action="count",
help="verbose mode (-vvv for more, -vvvv to enable connection debugging)")
def add_async_options(parser):
"""Add options for commands which can launch async tasks"""
parser.add_argument('-P', '--poll', default=C.DEFAULT_POLL_INTERVAL, type=int, dest='poll_interval',
help="set the poll interval if using -B (default=%s)" % C.DEFAULT_POLL_INTERVAL)
parser.add_argument('-B', '--background', dest='seconds', type=int, default=0,
help='run asynchronously, failing after X seconds (default=N/A)')
def add_basedir_options(parser):
"""Add options for commands which can set a playbook basedir"""
parser.add_argument('--playbook-dir', default=C.config.get_config_value('PLAYBOOK_DIR'), dest='basedir', action='store',
help="Since this tool does not use playbooks, use this as a substitute playbook directory."
"This sets the relative path for many features including roles/ group_vars/ etc.",
type=unfrack_path())
def add_check_options(parser):
"""Add options for commands which can run with diagnostic information of tasks"""
parser.add_argument("-C", "--check", default=False, dest='check', action='store_true',
help="don't make any changes; instead, try to predict some of the changes that may occur")
parser.add_argument('--syntax-check', dest='syntax', action='store_true',
help="perform a syntax check on the playbook, but do not execute it")
parser.add_argument("-D", "--diff", default=C.DIFF_ALWAYS, dest='diff', action='store_true',
help="when changing (small) files and templates, show the differences in those"
" files; works great with --check")
def add_connect_options(parser):
"""Add options for commands which need to connection to other hosts"""
connect_group = parser.add_argument_group("Connection Options", "control as whom and how to connect to hosts")
connect_group.add_argument('-k', '--ask-pass', default=C.DEFAULT_ASK_PASS, dest='ask_pass', action='store_true',
help='ask for connection password')
connect_group.add_argument('--private-key', '--key-file', default=C.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
help='use this file to authenticate the connection', type=unfrack_path())
connect_group.add_argument('-u', '--user', default=C.DEFAULT_REMOTE_USER, dest='remote_user',
help='connect as this user (default=%s)' % C.DEFAULT_REMOTE_USER)
connect_group.add_argument('-c', '--connection', dest='connection', default=C.DEFAULT_TRANSPORT,
help="connection type to use (default=%s)" % C.DEFAULT_TRANSPORT)
connect_group.add_argument('-T', '--timeout', default=C.DEFAULT_TIMEOUT, type=int, dest='timeout',
help="override the connection timeout in seconds (default=%s)" % C.DEFAULT_TIMEOUT)
connect_group.add_argument('--ssh-common-args', default='', dest='ssh_common_args',
help="specify common arguments to pass to sftp/scp/ssh (e.g. ProxyCommand)")
connect_group.add_argument('--sftp-extra-args', default='', dest='sftp_extra_args',
help="specify extra arguments to pass to sftp only (e.g. -f, -l)")
connect_group.add_argument('--scp-extra-args', default='', dest='scp_extra_args',
help="specify extra arguments to pass to scp only (e.g. -l)")
connect_group.add_argument('--ssh-extra-args', default='', dest='ssh_extra_args',
help="specify extra arguments to pass to ssh only (e.g. -R)")
parser.add_argument_group(connect_group)
def add_fork_options(parser):
"""Add options for commands that can fork worker processes"""
parser.add_argument('-f', '--forks', dest='forks', default=C.DEFAULT_FORKS, type=int,
help="specify number of parallel processes to use (default=%s)" % C.DEFAULT_FORKS)
def add_inventory_options(parser):
"""Add options for commands that utilize inventory"""
parser.add_argument('-i', '--inventory', '--inventory-file', dest='inventory', action="append",
help="specify inventory host path or comma separated host list. --inventory-file is deprecated")
parser.add_argument('--list-hosts', dest='listhosts', action='store_true',
help='outputs a list of matching hosts; does not execute anything else')
parser.add_argument('-l', '--limit', default=C.DEFAULT_SUBSET, dest='subset',
help='further limit selected hosts to an additional pattern')
def add_meta_options(parser):
"""Add options for commands which can launch meta tasks from the command line"""
parser.add_argument('--force-handlers', default=C.DEFAULT_FORCE_HANDLERS, dest='force_handlers', action='store_true',
help="run handlers even if a task fails")
parser.add_argument('--flush-cache', dest='flush_cache', action='store_true',
help="clear the fact cache for every host in inventory")
def add_module_options(parser):
"""Add options for commands that load modules"""
module_path = C.config.get_configuration_definition('DEFAULT_MODULE_PATH').get('default', '')
parser.add_argument('-M', '--module-path', dest='module_path', default=None,
help="prepend colon-separated path(s) to module library (default=%s)" % module_path,
type=unfrack_path(pathsep=True), action=PrependListAction)
def add_output_options(parser):
"""Add options for commands which can change their output"""
parser.add_argument('-o', '--one-line', dest='one_line', action='store_true',
help='condense output')
parser.add_argument('-t', '--tree', dest='tree', default=None,
help='log output to this directory')
def add_runas_options(parser):
"""
Add options for commands which can run tasks as another user
Note that this includes the options from add_runas_prompt_options(). Only one of these
functions should be used.
"""
runas_group = parser.add_argument_group("Privilege Escalation Options", "control how and which user you become as on target hosts")
# consolidated privilege escalation (become)
runas_group.add_argument("-b", "--become", default=C.DEFAULT_BECOME, action="store_true", dest='become',
help="run operations with become (does not imply password prompting)")
runas_group.add_argument('--become-method', dest='become_method', default=C.DEFAULT_BECOME_METHOD,
help='privilege escalation method to use (default=%s)' % C.DEFAULT_BECOME_METHOD +
', use `ansible-doc -t become -l` to list valid choices.')
runas_group.add_argument('--become-user', default=None, dest='become_user', type=str,
help='run operations as this user (default=%s)' % C.DEFAULT_BECOME_USER)
add_runas_prompt_options(parser, runas_group=runas_group)
def add_runas_prompt_options(parser, runas_group=None):
"""
Add options for commands which need to prompt for privilege escalation credentials
Note that add_runas_options() includes these options already. Only one of the two functions
should be used.
"""
if runas_group is None:
runas_group = parser.add_argument_group("Privilege Escalation Options",
"control how and which user you become as on target hosts")
runas_group.add_argument('-K', '--ask-become-pass', dest='become_ask_pass', action='store_true',
default=C.DEFAULT_BECOME_ASK_PASS,
help='ask for privilege escalation password')
parser.add_argument_group(runas_group)
def add_runtask_options(parser):
"""Add options for commands that run a task"""
parser.add_argument('-e', '--extra-vars', dest="extra_vars", action="append",
help="set additional variables as key=value or YAML/JSON, if filename prepend with @", default=[])
def add_tasknoplay_options(parser):
"""Add options for commands that run a task w/o a defined play"""
parser.add_argument('--task-timeout', type=int, dest="task_timeout", action="store", default=C.TASK_TIMEOUT,
help="set task timeout limit in seconds, must be positive integer.")
def add_subset_options(parser):
"""Add options for commands which can run a subset of tasks"""
parser.add_argument('-t', '--tags', dest='tags', default=C.TAGS_RUN, action='append',
help="only run plays and tasks tagged with these values")
parser.add_argument('--skip-tags', dest='skip_tags', default=C.TAGS_SKIP, action='append',
help="only run plays and tasks whose tags do not match these values")
def add_vault_options(parser):
"""Add options for loading vault files"""
parser.add_argument('--vault-id', default=[], dest='vault_ids', action='append', type=str,
help='the vault identity to use')
base_group = parser.add_mutually_exclusive_group()
base_group.add_argument('--ask-vault-password', '--ask-vault-pass', default=C.DEFAULT_ASK_VAULT_PASS, dest='ask_vault_pass', action='store_true',
help='ask for vault password')
base_group.add_argument('--vault-password-file', '--vault-pass-file', default=[], dest='vault_password_files',
help="vault password file", type=unfrack_path(), action='append')
| gpl-3.0 |
brandonPurvis/osf.io | scripts/migrate_categories.py | 44 | 2635 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from tests.base import OsfTestCase
from tests.factories import NodeFactory
from website.app import init_app
from website.project.model import Node
logger = logging.getLogger(__name__)
# legacy => new category
MIGRATE_MAP = {
'category': '',
'measure': 'methods and measures',
}
def main():
init_app()
migrate_nodes()
def migrate_category(node):
"""Migrate legacy, invalid category to new, valid category. Return whether
the node was changed.
"""
if node.category not in Node.CATEGORY_MAP.keys(): # invalid category
node.category = MIGRATE_MAP.get(node.category, 'other')
return True
return False
def migrate_nodes():
migrated_count = 0
for node in Node.find():
was_migrated = migrate_category(node)
if was_migrated:
node.save()
logger.info('Migrated {0}'.format(node._id))
migrated_count += 1
logger.info('Finished migrating {0} nodes.'.format(migrated_count))
class TestMigratingCategories(OsfTestCase):
def test_migrate_category(self):
node = NodeFactory(category='category')
was_migrated = migrate_category(node)
assert was_migrated is True
node.save()
assert node.category == ''
def test_migrate_measure(self):
node = NodeFactory(category='measure')
migrate_category(node)
node.save()
assert node.category == 'methods and measures'
def test_everything_else_is_migrated_to_other(self):
node1 = NodeFactory(category='background')
migrate_category(node1)
node1.save()
assert node1.category == 'other'
node2 = NodeFactory(category=u'プロジェクト')
migrate_category(node2)
node2.save()
assert node2.category == 'other'
def test_valid_categories_not_migrated(self):
node1 = NodeFactory(category='project')
node2 = NodeFactory(category='hypothesis')
was_migrated1 = migrate_category(node1)
was_migrated2 = migrate_category(node2)
node1.save()
node2.save()
assert was_migrated1 is False
assert was_migrated2 is False
assert node1.category == 'project'
assert node2.category == 'hypothesis'
class TestMigrateAll(OsfTestCase):
def test_migrate_categories_all(self):
n1 = NodeFactory(category='hypothesis')
n2 = NodeFactory(category='category')
migrate_nodes()
assert n1.category == 'hypothesis'
assert n2.category == ''
if __name__ == '__main__':
main()
| apache-2.0 |
akretion/purchase-workflow | purchase_requisition_delivery_address/__openerp__.py | 4 | 1242 | # -*- coding: utf-8 -*-
#
#
# Author: Yannick Vaucher
# Copyright 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{'name': "Purchase Requisition - Delivery Address",
'summary': "Adds delivery address on Purchase requisition",
'version': "0.1",
'author': "Camptocamp",
'category': "Purchase Management",
'license': "AGPL-3",
'complexity': "easy",
'images': [],
'depends': ['purchase_requisition',
'stock_dropshipping',
],
'demo': [],
'data': ['view/purchase_requisition.xml'],
'test': [],
'installable': True,
'auto_install': False,
}
| agpl-3.0 |
abtreece/ansible | lib/ansible/plugins/connection/jail.py | 8 | 8028 | # Based on local.py (c) 2012, Michael DeHaan <[email protected]>
# and chroot.py (c) 2013, Maykel Moya <[email protected]>
# (c) 2013, Michael Scherer <[email protected]>
# (c) 2015, Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import distutils.spawn
import os
import os.path
import pipes
import subprocess
import traceback
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes
from ansible.plugins.connection import ConnectionBase, BUFSIZE
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class Connection(ConnectionBase):
''' Local BSD Jail based connections '''
transport = 'jail'
# Pipelining may work. Someone needs to test by setting this to True and
# having pipelining=True in their ansible.cfg
has_pipelining = True
# Some become_methods may work in v2 (sudo works for other chroot-based
# plugins while su seems to be failing). If some work, check chroot.py to
# see how to disable just some methods.
become_methods = frozenset()
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self.jail = self._play_context.remote_addr
if os.geteuid() != 0:
raise AnsibleError("jail connection requires running as root")
self.jls_cmd = self._search_executable('jls')
self.jexec_cmd = self._search_executable('jexec')
if self.jail not in self.list_jails():
raise AnsibleError("incorrect jail name %s" % self.jail)
@staticmethod
def _search_executable(executable):
cmd = distutils.spawn.find_executable(executable)
if not cmd:
raise AnsibleError("%s command not found in PATH" % executable)
return cmd
def list_jails(self):
p = subprocess.Popen([self.jls_cmd, '-q', 'name'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
return stdout.split()
def get_jail_path(self):
p = subprocess.Popen([self.jls_cmd, '-j', to_bytes(self.jail), '-q', 'path'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
# remove \n
return stdout[:-1]
def _connect(self):
''' connect to the jail; nothing to do here '''
super(Connection, self)._connect()
if not self._connected:
display.vvv(u"ESTABLISH JAIL CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self.jail)
self._connected = True
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
''' run a command on the jail. This is only needed for implementing
put_file() get_file() so that we don't have to read the whole file
into memory.
compared to exec_command() it looses some niceties like being able to
return the process's exit code immediately.
'''
local_cmd = [self.jexec_cmd]
set_env = ''
if self._play_context.remote_user is not None:
local_cmd += ['-U', self._play_context.remote_user]
# update HOME since -U does not update the jail environment
set_env = 'HOME=~' + self._play_context.remote_user + ' '
local_cmd += [self.jail, self._play_context.executable, '-c', set_env + cmd]
display.vvv("EXEC %s" % (local_cmd,), host=self.jail)
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p
def exec_command(self, cmd, in_data=None, sudoable=False):
''' run a command on the jail '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
p = self._buffered_exec_command(cmd)
stdout, stderr = p.communicate(in_data)
return (p.returncode, stdout, stderr)
def _prefix_login_path(self, remote_path):
''' Make sure that we put files into a standard path
If a path is relative, then we need to choose where to put it.
ssh chooses $HOME but we aren't guaranteed that a home dir will
exist in any given chroot. So for now we're choosing "/" instead.
This also happens to be the former default.
Can revisit using $HOME instead if it's a problem
'''
if not remote_path.startswith(os.path.sep):
remote_path = os.path.join(os.path.sep, remote_path)
return os.path.normpath(remote_path)
def put_file(self, in_path, out_path):
''' transfer a file from local to jail '''
super(Connection, self).put_file(in_path, out_path)
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.jail)
out_path = pipes.quote(self._prefix_login_path(out_path))
try:
with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
try:
p = self._buffered_exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), stdin=in_file)
except OSError:
raise AnsibleError("jail connection requires dd command in the jail")
try:
stdout, stderr = p.communicate()
except:
traceback.print_exc()
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
if p.returncode != 0:
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
except IOError:
raise AnsibleError("file or module does not exist at: %s" % in_path)
def fetch_file(self, in_path, out_path):
''' fetch a file from jail to local '''
super(Connection, self).fetch_file(in_path, out_path)
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.jail)
in_path = pipes.quote(self._prefix_login_path(in_path))
try:
p = self._buffered_exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE))
except OSError:
raise AnsibleError("jail connection requires dd command in the jail")
with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb+') as out_file:
try:
chunk = p.stdout.read(BUFSIZE)
while chunk:
out_file.write(chunk)
chunk = p.stdout.read(BUFSIZE)
except:
traceback.print_exc()
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
stdout, stderr = p.communicate()
if p.returncode != 0:
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
def close(self):
''' terminate the connection; nothing to do here '''
super(Connection, self).close()
self._connected = False
| mit |
Maximilian-Reuter/SickRage-1 | lib/guessit/rules/properties/video_codec.py | 19 | 2897 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
video_codec and video_profile property
"""
from rebulk.remodule import re
from rebulk import Rebulk, Rule, RemoveMatch
from guessit.rules.common.validators import seps_after, seps_before
from ..common import dash
from ..common.validators import seps_surround
def video_codec():
"""
Builder for rebulk object.
:return: Created Rebulk object
:rtype: Rebulk
"""
rebulk = Rebulk().regex_defaults(flags=re.IGNORECASE, abbreviations=[dash]).string_defaults(ignore_case=True)
rebulk.defaults(name="video_codec")
rebulk.regex(r"Rv\d{2}", value="Real")
rebulk.regex("Mpeg2", value="Mpeg2")
rebulk.regex("DVDivX", "DivX", value="DivX")
rebulk.regex("XviD", value="XviD")
rebulk.regex("[hx]-?264(?:-?AVC(HD)?)?", "MPEG-?4(?:-?AVC(HD)?)", "AVCHD", value="h264")
rebulk.regex("[hx]-?265(?:-?HEVC)?", "HEVC", value="h265")
# http://blog.mediacoderhq.com/h264-profiles-and-levels/
# http://fr.wikipedia.org/wiki/H.264
rebulk.defaults(name="video_profile", validator=seps_surround)
rebulk.regex('10.?bit', 'Hi10P', value='10bit')
rebulk.regex('8.?bit', value='8bit')
rebulk.string('BP', value='BP', tags='video_profile.rule')
rebulk.string('XP', 'EP', value='XP', tags='video_profile.rule')
rebulk.string('MP', value='MP', tags='video_profile.rule')
rebulk.string('HP', 'HiP', value='HP', tags='video_profile.rule')
rebulk.regex('Hi422P', value='Hi422P', tags='video_profile.rule')
rebulk.regex('Hi444PP', value='Hi444PP', tags='video_profile.rule')
rebulk.string('DXVA', value='DXVA', name='video_api')
rebulk.rules(ValidateVideoCodec, VideoProfileRule)
return rebulk
class ValidateVideoCodec(Rule):
"""
Validate video_codec with format property or separated
"""
priority = 64
consequence = RemoveMatch
def when(self, matches, context):
ret = []
for codec in matches.named('video_codec'):
if not seps_before(codec) and \
not matches.at_index(codec.start - 1, lambda match: match.name == 'format'):
ret.append(codec)
continue
if not seps_after(codec):
ret.append(codec)
continue
return ret
class VideoProfileRule(Rule):
"""
Rule to validate video_profile
"""
consequence = RemoveMatch
def when(self, matches, context):
profile_list = matches.named('video_profile', lambda match: 'video_profile.rule' in match.tags)
ret = []
for profile in profile_list:
codec = matches.previous(profile, lambda match: match.name == 'video_codec')
if not codec:
codec = matches.next(profile, lambda match: match.name == 'video_codec')
if not codec:
ret.append(profile)
return ret
| gpl-3.0 |
nttks/jenkins-test | cms/djangoapps/contentstore/git_export_utils.py | 12 | 7181 | """
Utilities for export a course's XML into a git repository,
committing and pushing the changes.
"""
import logging
import os
import subprocess
from urlparse import urlparse
from django.conf import settings
from django.contrib.auth.models import User
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from xmodule.contentstore.django import contentstore
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.xml_exporter import export_to_xml
log = logging.getLogger(__name__)
GIT_REPO_EXPORT_DIR = getattr(settings, 'GIT_REPO_EXPORT_DIR', None)
GIT_EXPORT_DEFAULT_IDENT = getattr(settings, 'GIT_EXPORT_DEFAULT_IDENT',
{'name': 'STUDIO_EXPORT_TO_GIT',
'email': '[email protected]'})
class GitExportError(Exception):
"""
Convenience exception class for git export error conditions.
"""
NO_EXPORT_DIR = _("GIT_REPO_EXPORT_DIR not set or path {0} doesn't exist, "
"please create it, or configure a different path with "
"GIT_REPO_EXPORT_DIR".format(GIT_REPO_EXPORT_DIR))
URL_BAD = _('Non writable git url provided. Expecting something like:'
' [email protected]:mitocw/edx4edx_lite.git')
URL_NO_AUTH = _('If using http urls, you must provide the username '
'and password in the url. Similar to '
'https://user:[email protected]/user/course.')
DETACHED_HEAD = _('Unable to determine branch, repo in detached HEAD mode')
CANNOT_PULL = _('Unable to update or clone git repository.')
XML_EXPORT_FAIL = _('Unable to export course to xml.')
CONFIG_ERROR = _('Unable to configure git username and password')
CANNOT_COMMIT = _('Unable to commit changes. This is usually '
'because there are no changes to be committed')
CANNOT_PUSH = _('Unable to push changes. This is usually '
'because the remote repository cannot be contacted')
BAD_COURSE = _('Bad course location provided')
MISSING_BRANCH = _('Missing branch on fresh clone')
def cmd_log(cmd, cwd):
"""
Helper function to redirect stderr to stdout and log the command
used along with the output. Will raise subprocess.CalledProcessError if
command doesn't return 0, and returns the command's output.
"""
output = subprocess.check_output(cmd, cwd=cwd, stderr=subprocess.STDOUT)
log.debug('Command was: {0!r}. '
'Working directory was: {1!r}'.format(' '.join(cmd), cwd))
log.debug('Command output was: {0!r}'.format(output))
return output
def export_to_git(course_id, repo, user='', rdir=None):
"""Export a course to git."""
# pylint: disable=too-many-statements
if not GIT_REPO_EXPORT_DIR:
raise GitExportError(GitExportError.NO_EXPORT_DIR)
if not os.path.isdir(GIT_REPO_EXPORT_DIR):
raise GitExportError(GitExportError.NO_EXPORT_DIR)
# Check for valid writable git url
if not (repo.endswith('.git') or
repo.startswith(('http:', 'https:', 'file:'))):
raise GitExportError(GitExportError.URL_BAD)
# Check for username and password if using http[s]
if repo.startswith('http:') or repo.startswith('https:'):
parsed = urlparse(repo)
if parsed.username is None or parsed.password is None:
raise GitExportError(GitExportError.URL_NO_AUTH)
if rdir:
rdir = os.path.basename(rdir)
else:
rdir = repo.rsplit('/', 1)[-1].rsplit('.git', 1)[0]
log.debug("rdir = %s", rdir)
# Pull or clone repo before exporting to xml
# and update url in case origin changed.
rdirp = '{0}/{1}'.format(GIT_REPO_EXPORT_DIR, rdir)
branch = None
if os.path.exists(rdirp):
log.info('Directory already exists, doing a git reset and pull '
'instead of git clone.')
cwd = rdirp
# Get current branch
cmd = ['git', 'symbolic-ref', '--short', 'HEAD']
try:
branch = cmd_log(cmd, cwd).strip('\n')
except subprocess.CalledProcessError as ex:
log.exception('Failed to get branch: %r', ex.output)
raise GitExportError(GitExportError.DETACHED_HEAD)
cmds = [
['git', 'remote', 'set-url', 'origin', repo],
['git', 'fetch', 'origin'],
['git', 'reset', '--hard', 'origin/{0}'.format(branch)],
['git', 'pull'],
['git', 'clean', '-d', '-f'],
]
else:
cmds = [['git', 'clone', repo]]
cwd = GIT_REPO_EXPORT_DIR
cwd = os.path.abspath(cwd)
for cmd in cmds:
try:
cmd_log(cmd, cwd)
except subprocess.CalledProcessError as ex:
log.exception('Failed to pull git repository: %r', ex.output)
raise GitExportError(GitExportError.CANNOT_PULL)
# export course as xml before commiting and pushing
root_dir = os.path.dirname(rdirp)
course_dir = os.path.basename(rdirp).rsplit('.git', 1)[0]
try:
export_to_xml(modulestore(), contentstore(), course_id,
root_dir, course_dir)
except (EnvironmentError, AttributeError):
log.exception('Failed export to xml')
raise GitExportError(GitExportError.XML_EXPORT_FAIL)
# Get current branch if not already set
if not branch:
cmd = ['git', 'symbolic-ref', '--short', 'HEAD']
try:
branch = cmd_log(cmd, os.path.abspath(rdirp)).strip('\n')
except subprocess.CalledProcessError as ex:
log.exception('Failed to get branch from freshly cloned repo: %r',
ex.output)
raise GitExportError(GitExportError.MISSING_BRANCH)
# Now that we have fresh xml exported, set identity, add
# everything to git, commit, and push to the right branch.
ident = {}
try:
user = User.objects.get(username=user)
ident['name'] = user.username
ident['email'] = user.email
except User.DoesNotExist:
# That's ok, just use default ident
ident = GIT_EXPORT_DEFAULT_IDENT
time_stamp = timezone.now()
cwd = os.path.abspath(rdirp)
commit_msg = 'Export from Studio at {1}'.format(user, time_stamp)
try:
cmd_log(['git', 'config', 'user.email', ident['email']], cwd)
cmd_log(['git', 'config', 'user.name', ident['name']], cwd)
except subprocess.CalledProcessError as ex:
log.exception('Error running git configure commands: %r', ex.output)
raise GitExportError(GitExportError.CONFIG_ERROR)
try:
cmd_log(['git', 'add', '.'], cwd)
cmd_log(['git', 'commit', '-a', '-m', commit_msg], cwd)
except subprocess.CalledProcessError as ex:
log.exception('Unable to commit changes: %r', ex.output)
raise GitExportError(GitExportError.CANNOT_COMMIT)
try:
cmd_log(['git', 'push', '-q', 'origin', branch], cwd)
except subprocess.CalledProcessError as ex:
log.exception('Error running git push command: %r', ex.output)
raise GitExportError(GitExportError.CANNOT_PUSH)
| agpl-3.0 |
marrocamp/weevely3 | modules/file/tar.py | 16 | 1981 | from core.vectors import PhpFile, ModuleExec
from core.module import Module
from core import messages
from core import modules
from core.loggers import log
import os
class Tar(Module):
"""Compress or expand tar archives."""
aliases = [ 'tar' ]
def init(self):
self.register_info(
{
'author': [
'Emilio Pinna'
],
'license': 'GPLv3'
}
)
self.register_vectors(
[
PhpFile(
payload_path = os.path.join(self.folder, 'php_tar.tpl'),
name = 'php_tar',
)
]
)
self.register_arguments([
{ 'name' : 'rtar', 'help' : 'Remote Tar file' },
{ 'name' : 'rfiles', 'help' : 'Remote files to compress. If decompressing, set destination folder.', 'nargs' : '+' },
{ 'name' : '--decompress', 'action' : 'store_true', 'default' : False, 'help' : 'Simulate tar -x' },
{ 'name' : '-z', 'action' : 'store_true', 'default' : False, 'help' : 'Simulate tar -xz for gzip compressed archives' },
{ 'name' : '-j', 'action' : 'store_true', 'default' : False, 'help' : 'Simulate tar -xj for bzip2 compressed archives' },
])
def run(self):
if self.args.get('z'):
ModuleExec('file_gzip', [ '--keep', '--decompress', self.args['rtar'] ]).run()
self.args['rtar'] = '.'.join(self.args['rtar'].split('.')[:-1])
elif self.args.get('j'):
ModuleExec('file_bzip2', [ '--keep', '--decompress', self.args['rtar'] ]).run()
self.args['rtar'] = '.'.join(self.args['rtar'].split('.')[:-1])
# The correct execution returns something only on errors
result_err = self.vectors.get_result(
name = 'php_tar',
format_args = self.args,
)
if result_err:
log.warn(result_err)
return
return True
| gpl-3.0 |
mfherbst/spack | var/spack/repos/builtin/packages/mvapich2/package.py | 2 | 10014 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
from spack import *
from spack.error import SpackError
def _process_manager_validator(values):
if len(values) > 1 and 'slurm' in values:
raise SpackError(
'slurm cannot be activated along with other process managers'
)
class Mvapich2(AutotoolsPackage):
"""MVAPICH2 is an MPI implementation for Infiniband networks."""
homepage = "http://mvapich.cse.ohio-state.edu/"
url = "http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.2.tar.gz"
list_url = "http://mvapich.cse.ohio-state.edu/downloads/"
version('2.3rc2', '6fcf22fe2a16023b462ef57614daa357')
version('2.3rc1', '386d79ae36b2136d203826465ad8b6cc')
version('2.3a', '87c3fbf8a755b53806fa9ecb21453445')
# Prefer the latest stable release
version('2.3', sha256='01d5fb592454ddd9ecc17e91c8983b6aea0e7559aa38f410b111c8ef385b50dd', preferred=True)
version('2.2', '939b65ebe5b89a5bc822cdab0f31f96e')
version('2.1', '0095ceecb19bbb7fb262131cb9c2cdd6')
version('2.0', '9fbb68a4111a8b6338e476dc657388b4')
provides('mpi')
provides('mpi@:3.0')
variant('debug', default=False,
description='Enable debug info and error messages at run-time')
variant('cuda', default=False,
description='Enable CUDA extension')
variant('regcache', default=True,
description='Enable memory registration cache')
# Accepted values are:
# single - No threads (MPI_THREAD_SINGLE)
# funneled - Only the main thread calls MPI (MPI_THREAD_FUNNELED)
# serialized - User serializes calls to MPI (MPI_THREAD_SERIALIZED)
# multiple - Fully multi-threaded (MPI_THREAD_MULTIPLE)
# runtime - Alias to "multiple"
variant(
'threads',
default='multiple',
values=('single', 'funneled', 'serialized', 'multiple'),
multi=False,
description='Control the level of thread support'
)
# 32 is needed when job size exceeds 32768 cores
variant(
'ch3_rank_bits',
default='32',
values=('16', '32'),
multi=False,
description='Number of bits allocated to the rank field (16 or 32)'
)
variant(
'process_managers',
description='List of the process managers to activate',
values=('slurm', 'hydra', 'gforker', 'remshell'),
multi=True,
validator=_process_manager_validator
)
variant(
'fabrics',
description='The fabric enabled for this build',
default='psm',
values=(
'psm', 'sock', 'nemesisib', 'nemesis', 'mrail', 'nemesisibtcp',
'nemesistcpib'
)
)
variant(
'alloca',
default=False,
description='Use alloca to allocate temporary memory if available'
)
variant(
'file_systems',
description='List of the ROMIO file systems to activate',
values=('lustre', 'gpfs', 'nfs', 'ufs'),
multi=True
)
depends_on('bison', type='build')
depends_on('libpciaccess', when=(sys.platform != 'darwin'))
depends_on('cuda', when='+cuda')
depends_on('psm', when='fabrics=psm')
filter_compiler_wrappers(
'mpicc', 'mpicxx', 'mpif77', 'mpif90', 'mpifort', relative_root='bin'
)
@property
def libs(self):
query_parameters = self.spec.last_query.extra_parameters
libraries = ['libmpi']
if 'cxx' in query_parameters:
libraries = ['libmpicxx'] + libraries
return find_libraries(
libraries, root=self.prefix, shared=True, recursive=True
)
@property
def process_manager_options(self):
spec = self.spec
other_pms = []
for x in ('hydra', 'gforker', 'remshell'):
if 'process_managers={0}'.format(x) in spec:
other_pms.append(x)
opts = []
if len(other_pms) > 0:
opts = ['--with-pm=%s' % ':'.join(other_pms)]
# See: http://slurm.schedmd.com/mpi_guide.html#mvapich2
if 'process_managers=slurm' in spec:
opts = [
'--with-pmi=pmi2',
'--with-pm=slurm'
]
return opts
@property
def network_options(self):
opts = []
# From here on I can suppose that only one variant has been selected
if 'fabrics=psm' in self.spec:
opts = [
"--with-device=ch3:psm",
"--with-psm={0}".format(self.spec['psm'].prefix)
]
elif 'fabrics=sock' in self.spec:
opts = ["--with-device=ch3:sock"]
elif 'fabrics=nemesistcpib' in self.spec:
opts = ["--with-device=ch3:nemesis:tcp,ib"]
elif 'fabrics=nemesisibtcp' in self.spec:
opts = ["--with-device=ch3:nemesis:ib,tcp"]
elif 'fabrics=nemesisib' in self.spec:
opts = ["--with-device=ch3:nemesis:ib"]
elif 'fabrics=nemesis' in self.spec:
opts = ["--with-device=ch3:nemesis"]
elif 'fabrics=mrail' in self.spec:
opts = ["--with-device=ch3:mrail", "--with-rdma=gen2"]
return opts
@property
def file_system_options(self):
spec = self.spec
fs = []
for x in ('lustre', 'gpfs', 'nfs', 'ufs'):
if 'file_systems={0}'.format(x) in spec:
fs.append(x)
opts = []
if len(fs) > 0:
opts.append('--with-file-system=%s' % '+'.join(fs))
return opts
def setup_environment(self, spack_env, run_env):
spec = self.spec
# mvapich2 configure fails when F90 and F90FLAGS are set
spack_env.unset('F90')
spack_env.unset('F90FLAGS')
if 'process_managers=slurm' in spec:
run_env.set('SLURM_MPI_TYPE', 'pmi2')
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
spack_env.set('MPICC', join_path(self.prefix.bin, 'mpicc'))
spack_env.set('MPICXX', join_path(self.prefix.bin, 'mpicxx'))
spack_env.set('MPIF77', join_path(self.prefix.bin, 'mpif77'))
spack_env.set('MPIF90', join_path(self.prefix.bin, 'mpif90'))
spack_env.set('MPICH_CC', spack_cc)
spack_env.set('MPICH_CXX', spack_cxx)
spack_env.set('MPICH_F77', spack_f77)
spack_env.set('MPICH_F90', spack_fc)
spack_env.set('MPICH_FC', spack_fc)
def setup_dependent_package(self, module, dependent_spec):
self.spec.mpicc = join_path(self.prefix.bin, 'mpicc')
self.spec.mpicxx = join_path(self.prefix.bin, 'mpicxx')
self.spec.mpifc = join_path(self.prefix.bin, 'mpif90')
self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
self.spec.mpicxx_shared_libs = [
join_path(self.prefix.lib, 'libmpicxx.{0}'.format(dso_suffix)),
join_path(self.prefix.lib, 'libmpi.{0}'.format(dso_suffix))
]
@run_before('configure')
def die_without_fortran(self):
# Until we can pass variants such as +fortran through virtual
# dependencies depends_on('mpi'), require Fortran compiler to
# avoid delayed build errors in dependents.
if (self.compiler.f77 is None) or (self.compiler.fc is None):
raise InstallError(
'Mvapich2 requires both C and Fortran compilers!'
)
def configure_args(self):
spec = self.spec
args = [
'--enable-shared',
'--enable-romio',
'--disable-silent-rules',
'--disable-new-dtags',
'--enable-fortran=all',
"--enable-threads={0}".format(spec.variants['threads'].value),
"--with-ch3-rank-bits={0}".format(
spec.variants['ch3_rank_bits'].value),
]
args.extend(self.enable_or_disable('alloca'))
if '+debug' in self.spec:
args.extend([
'--disable-fast',
'--enable-error-checking=runtime',
'--enable-error-messages=all',
# Permits debugging with TotalView
'--enable-g=dbg',
'--enable-debuginfo'
])
else:
args.append('--enable-fast=all')
if '+cuda' in self.spec:
args.extend([
'--enable-cuda',
'--with-cuda={0}'.format(spec['cuda'].prefix)
])
else:
args.append('--disable-cuda')
if '+regcache' in self.spec:
args.append('--enable-registration-cache')
else:
args.append('--disable-registration-cache')
args.extend(self.process_manager_options)
args.extend(self.network_options)
args.extend(self.file_system_options)
return args
| lgpl-2.1 |
ernestask/meson | tools/ac_converter.py | 8 | 19281 | #!/usr/bin/env python3
# Copyright 2015 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
help_message = """Usage: %s <config.h.meson>
This script reads config.h.meson, looks for header
checks and writes the corresponding meson declaration.
Copy config.h.in to config.h.meson, replace #undef
with #mesondefine and run this. We can't do this automatically
because some configure scripts have #undef statements
that are unrelated to configure checks.
"""
import sys
# Add stuff here as it is encountered.
function_data = \
{'HAVE_FEENABLEEXCEPT': ('feenableexcept', 'fenv.h'),
'HAVE_FECLEAREXCEPT': ('feclearexcept', 'fenv.h'),
'HAVE_FEDISABLEEXCEPT': ('fedisableexcept', 'fenv.h'),
'HAVE_MMAP': ('mmap', 'sys/mman.h'),
'HAVE_GETPAGESIZE': ('getpagesize', 'unistd.h'),
'HAVE_GETISAX': ('getisax', 'sys/auxv.h'),
'HAVE_GETTIMEOFDAY': ('gettimeofday', 'sys/time.h'),
'HAVE_MPROTECT': ('mprotect', 'sys/mman.h'),
'HAVE_POSIX_MEMALIGN': ('posix_memalign', 'stdlib.h'),
'HAVE_SIGACTION': ('sigaction', 'signal.h'),
'HAVE_ALARM': ('alarm', 'unistd.h'),
'HAVE_CTIME_R': ('ctime_r', 'time.h'),
'HAVE_DRAND48': ('drand48', 'stdlib.h'),
'HAVE_FLOCKFILE': ('flockfile', 'stdio.h'),
'HAVE_FORK': ('fork', 'unistd.h'),
'HAVE_FUNLOCKFILE': ('funlockfile', 'stdio.h'),
'HAVE_GETLINE': ('getline', 'stdio.h'),
'HAVE_LINK': ('link', 'unistd.h'),
'HAVE_RAISE': ('raise', 'signal.h'),
'HAVE_STRNDUP': ('strndup', 'string.h'),
'HAVE_SCHED_GETAFFINITY': ('sched_getaffinity', 'sched.h'),
'HAVE_WAITPID': ('waitpid', 'sys/wait.h'),
'HAVE_XRENDERCREATECONICALGRADIENT': ('XRenderCreateConicalGradient', 'xcb/render.h'),
'HAVE_XRENDERCREATELINEARGRADIENT': ('XRenderCreateLinearGradient', 'xcb/render.h'),
'HAVE_XRENDERCREATERADIALGRADIENT': ('XRenderCreateRadialGradient', 'xcb/render.h'),
'HAVE_XRENDERCREATESOLIDFILL': ('XRenderCreateSolidFill', 'xcb/render.h'),
'HAVE_DCGETTEXT': ('dcgettext', 'libintl.h'),
'HAVE_ENDMNTENT': ('endmntent', 'mntent.h'),
'HAVE_ENDSERVENT': ('endservent', 'netdb.h'),
'HAVE_EVENTFD': ('eventfd', 'sys/eventfd.h'),
'HAVE_FALLOCATE': ('fallocate', 'fcntl.h'),
'HAVE_FCHMOD': ('fchmod', 'sys/stat.h'),
'HAVE_FCHOWN': ('fchown', 'unistd.h'),
'HAVE_FDWALK': ('fdwalk', 'stdlib.h'),
'HAVE_FSYNC': ('fsync', 'unistd.h'),
'HAVE_GETC_UNLOCKED': ('getc_unlocked', 'stdio.h'),
'HAVE_GETFSSTAT': ('getfsstat', 'sys/mount.h'),
'HAVE_GETMNTENT_R': ('getmntent_r', 'mntent.h'),
'HAVE_GETPROTOBYNAME_R': ('getprotobyname_r', 'netdb.h'),
'HAVE_GETRESUID': ('getresuid', 'unistd.h'),
'HAVE_GETVFSSTAT': ('getvfsstat', 'sys/statvfs.h'),
'HAVE_GMTIME_R': ('gmtime_r', 'time.h'),
'HAVE_HASMNTOPT': ('hasmntopt', 'mntent.h'),
'HAVE_IF_INDEXTONAME': ('if_indextoname', 'net/if.h'),
'HAVE_IF_NAMETOINDEX': ('if_nametoindex', 'net/if.h'),
'HAVE_INOTIFY_INIT1': ('inotify_init1', 'sys/inotify.h'),
'HAVE_ISSETUGID': ('issetugid', 'unistd.h'),
'HAVE_KEVENT': ('kevent', 'sys/event.h'),
'HAVE_KQUEUE': ('kqueue', 'sys/event.h'),
'HAVE_LCHMOD': ('lchmod', 'sys/stat.h'),
'HAVE_LCHOWN': ('lchown', 'unistd.h'),
'HAVE_LSTAT': ('lstat', 'sys/stat.h'),
'HAVE_MEMCPY': ('memcpy', 'string.h'),
'HAVE_MEMALIGN': ('memalign', 'stdlib.h'),
'HAVE_MEMMEM': ('memmem', 'string.h'),
'HAVE_NEWLOCALE': ('newlocale', 'locale.h'),
'HAVE_PIPE2': ('pipe2', 'fcntl.h'),
'HAVE_POLL': ('poll', 'poll.h'),
'HAVE_PRLIMIT': ('prlimit', 'sys/resource.h'),
'HAVE_PTHREAD_ATTR_SETSTACKSIZE': ('pthread_attr_setstacksize', 'pthread.h'),
'HAVE_PTHREAD_CONDATTR_SETCLOCK': ('pthread_condattr_setclock', 'pthread.h'),
'HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE_NP': ('pthread_cond_timedwait_relative_np', 'pthread.h'),
'HAVE_READLINK': ('readlink', 'unistd.h'),
'HAVE_RES_INIT': ('res_init', 'resolv.h'),
'HAVE_SENDMMSG': ('sendmmsg', 'sys/socket.h'),
'HAVE_SOCKET': ('socket', 'sys/socket.h'),
'HAVE_GETENV': ('getenv', 'stdlib.h'),
'HAVE_SETENV': ('setenv', 'stdlib.h'),
'HAVE_PUTENV': ('putenv', 'stdlib.h'),
'HAVE_UNSETENV': ('unsetenv', 'stdlib.h'),
'HAVE_SETMNTENT': ('setmntent', 'mntent.h'),
'HAVE_SNPRINTF': ('snprintf', 'stdio.h'),
'HAVE_SPLICE': ('splice', 'fcntl.h'),
'HAVE_STATFS': ('statfs', 'mount.h'),
'HAVE_STATVFS': ('statvfs', 'sys/statvfs.h'),
'HAVE_STPCOPY': ('stpcopy', 'string.h'),
'HAVE_STRCASECMP': ('strcasecmp', 'strings.h'),
'HAVE_STRLCPY': ('strlcpy', 'string.h'),
'HAVE_STRNCASECMP': ('strncasecmp', 'strings.h'),
'HAVE_STRSIGNAL': ('strsignal', 'signal.h'),
'HAVE_STRTOD_L': ('strtod_l', 'stdlib.h'),
'HAVE_STRTOLL_L': ('strtoll_l', 'stdlib.h'),
'HAVE_STRTOULL_L': ('strtoull_l', 'stdlib.h'),
'HAVE_SYMLINK': ('symlink', 'unistd.h'),
'HAVE_SYSCTLBYNAME': ('sysctlbyname', 'sys/sysctl.h'),
'HAVE_TIMEGM': ('timegm', 'time.h'),
'HAVE_USELOCALE': ('uselocale', 'xlocale.h'),
'HAVE_UTIMES': ('utimes', 'sys/time.h'),
'HAVE_VALLOC': ('valloc', 'stdlib.h'),
'HAVE_VASPRINTF': ('vasprintf', 'stdio.h'),
'HAVE_VSNPRINTF': ('vsnprintf', 'stdio.h'),
'HAVE_BCOPY': ('bcopy', 'strings.h'),
'HAVE_STRERROR': ('strerror', 'string.h'),
'HAVE_MEMMOVE': ('memmove', 'string.h'),
'HAVE_STRTOIMAX': ('strtoimax', 'inttypes.h'),
'HAVE_STRTOLL': ('strtoll', 'stdlib.h'),
'HAVE_STRTOQ': ('strtoq', 'stdlib.h'),
'HAVE_ACCEPT4': ('accept4', 'sys/socket.h'),
'HAVE_CHMOD': ('chmod', 'sys/stat.h'),
'HAVE_CHOWN': ('chown', 'unistd.h'),
'HAVE_FSTAT': ('fstat', 'sys/stat.h'),
'HAVE_GETADDRINFO': ('getaddrinfo', 'netdb.h'),
'HAVE_GETGRGID_R': ('getgrgid_r', 'grp.h'),
'HAVE_GETGRNAM_R': ('getgrnam_r', 'grp.h'),
'HAVE_GETGROUPS': ('getgroups', 'grp.h'),
'HAVE_GETOPT_LONG': ('getopt_long', 'getopt.h'),
'HAVE_GETPWNAM_R': ('getpwnam', 'pwd.h'),
'HAVE_GETPWUID_R': ('getpwuid_r', 'pwd.h'),
'HAVE_GETUID': ('getuid', 'unistd.h'),
'HAVE_LRINTF': ('lrintf', 'math.h'),
'HAVE_DECL_ISNAN': ('isnan', 'math.h'),
'HAVE_DECL_ISINF': ('isinf', 'math.h'),
'HAVE_ROUND': ('round', 'math.h'),
'HAVE_NEARBYINT': ('nearbyint', 'math.h'),
'HAVE_RINT': ('rint', 'math.h'),
'HAVE_MKFIFO': ('mkfifo', 'sys/stat.h'),
'HAVE_MLOCK': ('mlock', 'sys/mman.h'),
'HAVE_NANOSLEEP': ('nanosleep', 'time.h'),
'HAVE_PIPE': ('pipe', 'unistd.h'),
'HAVE_PPOLL': ('ppoll', 'poll.h'),
'HAVE_REGEXEC': ('regexec', 'regex.h'),
'HAVE_SETEGID': ('setegid', 'unistd.h'),
'HAVE_SETEUID': ('seteuid', 'unistd.h'),
'HAVE_SETPGID': ('setpgid', 'unistd.h'),
'HAVE_SETREGID': ('setregid', 'unistd.h'),
'HAVE_SETRESGID': ('setresgid', 'unistd.h'),
'HAVE_SETRESUID': ('setresuid', 'unistd.h'),
'HAVE_SHM_OPEN': ('shm_open', 'fcntl.h'),
'HAVE_SLEEP': ('sleep', 'unistd.h'),
'HAVE_STRERROR_R': ('strerror_r', 'string.h'),
'HAVE_STRTOF': ('strtof', 'stdlib.h'),
'HAVE_SYSCONF': ('sysconf', 'unistd.h'),
'HAVE_USLEEP': ('usleep', 'unistd.h'),
'HAVE_VFORK': ('vfork', 'unistd.h'),
'HAVE_MALLOC': ('malloc', 'stdlib.h'),
'HAVE_CALLOC': ('calloc', 'stdlib.h'),
'HAVE_REALLOC': ('realloc', 'stdlib.h'),
'HAVE_FREE': ('free', 'stdlib.h'),
'HAVE_ALLOCA': ('alloca', 'alloca.h'),
'HAVE_QSORT': ('qsort', 'stdlib.h'),
'HAVE_ABS': ('abs', 'stdlib.h'),
'HAVE_MEMSET': ('memset', 'string.h'),
'HAVE_MEMCMP': ('memcmp', 'string.h'),
'HAVE_STRLEN': ('strlen', 'string.h'),
'HAVE_STRLCAT': ('strlcat', 'string.h'),
'HAVE_STRDUP': ('strdup', 'string.h'),
'HAVE__STRREV': ('_strrev', 'string.h'),
'HAVE__STRUPR': ('_strupr', 'string.h'),
'HAVE__STRLWR': ('_strlwr', 'string.h'),
'HAVE_INDEX': ('index', 'strings.h'),
'HAVE_RINDEX': ('rindex', 'strings.h'),
'HAVE_STRCHR': ('strchr', 'string.h'),
'HAVE_STRRCHR': ('strrchr', 'string.h'),
'HAVE_STRSTR': ('strstr', 'string.h'),
'HAVE_STRTOL': ('strtol', 'stdlib.h'),
'HAVE_STRTOUL': ('strtoul', 'stdlib.h'),
'HAVE_STRTOULL': ('strtoull', 'stdlib.h'),
'HAVE_STRTOD': ('strtod', 'stdlib.h'),
'HAVE_ATOI': ('atoi', 'stdlib.h'),
'HAVE_ATOF': ('atof', 'stdlib.h'),
'HAVE_STRCMP': ('strcmp', 'string.h'),
'HAVE_STRNCMP': ('strncmp', 'string.h'),
'HAVE_VSSCANF': ('vsscanf', 'stdio.h'),
'HAVE_CHROOT': ('chroot', 'unistd.h'),
'HAVE_CLOCK': ('clock', 'time.h'),
'HAVE_CLOCK_GETRES': ('clock_getres', 'time.h'),
'HAVE_CLOCK_GETTIME': ('clock_gettime', 'time.h'),
'HAVE_CLOCK_SETTIME': ('clock_settime', 'time.h'),
'HAVE_CONFSTR': ('confstr', 'time.h'),
'HAVE_CTERMID': ('ctermid', 'stdio.h'),
'HAVE_DIRFD': ('dirfd', 'dirent.h'),
'HAVE_DLOPEN': ('dlopen', 'dlfcn.h'),
'HAVE_DUP2': ('dup2', 'unistd.h'),
'HAVE_DUP3': ('dup3', 'unistd.h'),
'HAVE_EPOLL_CREATE1': ('epoll_create1', 'sys/epoll.h'),
'HAVE_ERF': ('erf', 'math.h'),
'HAVE_ERFC': ('erfc', 'math.h'),
'HAVE_EXECV': ('execv', 'unistd.h'),
'HAVE_FACCESSAT': ('faccessat', 'unistd.h'),
'HAVE_FCHDIR': ('fchdir', 'unistd.h'),
'HAVE_FCHMODAT': ('fchmodat', 'sys/stat.h'),
'HAVE_FDATASYNC': ('fdatasync', 'unistd.h'),
'HAVE_FDOPENDIR': ('fdopendir', 'dirent.h'),
'HAVE_FEXECVE': ('fexecve', 'unistd.h'),
'HAVE_FLOCK': ('flock', 'sys/file.h'),
'HAVE_FORKPTY': ('forkpty', 'pty.h'),
'HAVE_FPATHCONF': ('fpathconf', 'unistd.h'),
'HAVE_FSTATAT': ('fstatat', 'unistd.h'),
'HAVE_FSTATVFS': ('fstatvfs', 'sys/statvfs.h'),
'HAVE_FTELLO': ('ftello', 'stdio.h'),
'HAVE_FTIME': ('ftime', 'sys/timeb.h'),
'HAVE_FTRUNCATE': ('ftruncate', 'unistd.h'),
'HAVE_FUTIMENS': ('futimens', 'sys/stat.h'),
'HAVE_FUTIMES': ('futimes', 'sys/time.h'),
'HAVE_GAI_STRERROR': ('gai_strerror', 'netdb.h'),
'HAVE_GETGROUPLIST': ('getgrouplist', 'grp.h'),
'HAVE_GETHOSTBYNAME': ('gethostbyname', 'netdb.h'),
'HAVE_GETHOSTBYNAME_R': ('gethostbyname_r', 'netdb.h'),
'HAVE_GETITIMER': ('getitimer', 'sys/time.h'),
'HAVE_GETLOADAVG': ('getloadavg', 'stdlib.h'),
'HAVE_GETLOGIN': ('getlogin', 'unistd.h'),
'HAVE_GETNAMEINFO': ('getnameinfo', 'netdb.h'),
'HAVE_GETPEERNAME': ('getpeername', 'sys/socket.h'),
'HAVE_GETPGID': ('getpgid', 'unistd.h'),
'HAVE_GETPGRP': ('getpgrp', 'unistd.h'),
'HAVE_GETPID': ('getpid', 'unistd.h'),
'HAVE_GETPRIORITY': ('getpriority', 'sys/resource.h'),
'HAVE_GETPWENT': ('getpwent', 'pwd.h'),
'HAVE_GETRANDOM': ('getrandom', 'linux/random.h'),
'HAVE_GETRESGID': ('getresgid', 'unistd.h'),
'HAVE_GETSID': ('getsid', 'unistd.h'),
'HAVE_GETSPENT': ('getspent', 'shadow.h'),
'HAVE_GETSPNAM': ('getspnam', 'shadow.h'),
'HAVE_GETWD': ('getwd', 'unistd.h'),
'HAVE_HSTRERROR': ('hstrerror', 'netdb.h'),
'HAVE_HTOLE64': ('htole64', 'endian.h'),
'HAVE_IF_NAMEINDEX': ('if_nameindex', 'net/if.h'),
'HAVE_INET_ATON': ('inet_aton', 'arpa/inet.h'),
'HAVE_INET_PTON': ('inet_pton', 'arpa/inet.h'),
'HAVE_INITGROUPS': ('initgroups', 'grp.h'),
'HAVE_KILL': ('kill', 'signal.h'),
'HAVE_KILLPG': ('killpg', 'signal.h'),
'HAVE_LINKAT': ('linkat', 'unistd.h'),
'HAVE_LOCKF': ('lockf', 'unistd.h'),
'HAVE_LUTIMES': ('lutimes', 'sys/time.h'),
'HAVE_MAKEDEV': ('makedev', 'sys/sysmacros.h'),
'HAVE_MBRTOWC': ('mbrtowc', 'wchar.h'),
'HAVE_MEMRCHR': ('memrchr', 'string.h'),
'HAVE_MKDIRAT': ('mkdirat', 'sys/stat.h'),
'HAVE_MKFIFOAT': ('mkfifoat', 'sys/stat.h'),
'HAVE_MKNOD': ('mknod', 'unistd.h'),
'HAVE_MKNODAT': ('mknodat', 'unistd.h'),
'HAVE_MKTIME': ('mktime', 'unistd.h'),
'HAVE_MKREMAP': ('mkremap', 'sys/mman.h'),
'HAVE_NICE': ('nice', 'unistd.h'),
'HAVE_OPENAT': ('openat', 'fcntl.h'),
'HAVE_OPENPTY': ('openpty', 'pty.h'),
'HAVE_PATHCONF': ('pathconf', 'unistd.h'),
'HAVE_PAUSE': ('pause', 'unistd.h'),
'HAVE_PREAD': ('pread', 'unistd.h'),
'HAVE_PTHREAD_KILL': ('pthread_kill', 'signal.h'),
'HAVE_PTHREAD_SIGMASK': ('pthread_sigmask', 'signal.h'),
'HAVE_PWRITE': ('pwrite', 'unistd.h'),
'HAVE_READLINKAT': ('readlinkat', 'unistd.h'),
'HAVE_READV': ('readv', 'sys/uio.h'),
'HAVE_RENAMEAT': ('renamat', 'stdio.h'),
'HAVE_SCHED_GET_PRIORITY_MAX': ('sched_get_priority_max', 'sched.h'),
'HAVE_SCHED_RR_GET_INTERVAL': ('sched_rr_get_interval', 'sched.h'),
'HAVE_SCHED_SETAFFINITY': ('sched_setaffinity', 'sched.h'),
'HAVE_SCHED_SETPARAM': ('sched_setparam', 'sched.h'),
'HAVE_SCHED_SETSCHEDULER': ('sched_setscheduler', 'sched.h'),
'HAVE_SELECT': ('select', 'sys/select.h'),
'HAVE_SEM_GETVALUE': ('sem_getvalue', 'semaphore.h'),
'HAVE_SEM_OPEN': ('sem_open', 'semaphore.h'),
'HAVE_SEM_TIMEDWAIT': ('sem_timedwait', 'semaphore.h'),
'HAVE_SEM_UNLINK': ('sem_unlink', 'semaphore.h'),
'HAVE_SENDFILE': ('sendfile', 'sys/sendfile.h'),
'HAVE_SETGID': ('setgid', 'unistd.h'),
'HAVE_SETGROUPS': ('setgroups', 'grp.h'),
'HAVE_SETHOSTNAME': ('sethostname', 'unistd.h'),
'HAVE_SETITIMER': ('setitimer', 'sys/time.h'),
'HAVE_SETLOCALE': ('setlocale', 'locale.h'),
'HAVE_SETPGRP': ('setpgrp', 'unistd.h'),
'HAVE_SETPRIORITY': ('setpriority', 'sys/resource.h'),
'HAVE_SETREUID': ('setreuid', 'unistd.h'),
'HAVE_SETSID': ('setsid', 'unistd.h'),
'HAVE_SETUID': ('setuid', 'unistd.h'),
'HAVE_SETVBUF': ('setvbuf', 'unistd.h'),
'HAVE_SIGALTSTACK': ('sigaltstack', 'signal.h'),
'HAVE_SIGINTERRUPT': ('siginterrupt', 'signal.h'),
'HAVE_SIGPENDING': ('sigpending', 'signal.h'),
'HAVE_SIGRELSE': ('sigrelse', 'signal.h'),
'HAVE_SIGTIMEDWAIT': ('sigtimedwait', 'signal.h'),
'HAVE_SIGWAIT': ('sigwait', 'signal.h'),
'HAVE_SIGWAITINFO': ('sigwaitinfo', 'signal.h'),
'HAVE_SOCKETPAIR': ('socketpair', 'sys/socket.h'),
'HAVE_STRFTIME': ('strftime', 'time.h'),
'HAVE_SYMLINKAT': ('symlinkat', 'unistd.h'),
'HAVE_SYNC': ('sync', 'unistd.h'),
'HAVE_TCGETPGRP': ('tcgetpgrp', 'unistd.h'),
'HAVE_TCSETPGRP': ('tcsetpgrp', 'unistd.h'),
'HAVE_TEMPNAM': ('tempnam', 'stdio.h'),
'HAVE_TIMES': ('times', 'sys/times.h'),
'HAVE_TEMPFILE': ('tempfile', 'stdio.h'),
'HAVE_TMPNAM': ('tmpnam', 'stdio.h'),
'HAVE_TMPNAM_R': ('tmpnam_r', 'stdio.h'),
'HAVE_TRUNCATE': ('truncate', 'unistd.h'),
'HAVE_TZNAME': ('tzname', 'time.h'),
'HAVE_UNAME': ('uname', 'sys/utsname.h'),
'HAVE_UNLINKAT': ('unlinkat', 'unistd.h'),
'HAVE_UTIMENSAT': ('utimensat', 'sys/stat.h'),
'HAVE_WAIT3': ('wait3', 'sys/wait.h'),
'HAVE_WAIT4': ('wait4', 'sys/wait.h'),
'HAVE_WAITID': ('waitid', 'sys/wait.h'),
'HAVE_WRITEV': ('writev', 'sys/uio.h'),
'HAVE_WMEMCMP': ('wmemcmp', 'wchar.h'),
'HAVE_ATAN': ('atan', 'math.h'),
'HAVE_ATAN2': ('atan2', 'math.h'),
'HAVE_ACOS': ('acos', 'math.h'),
'HAVE_ACOSH': ('acosh', 'math.h'),
'HAVE_ASIN': ('asin', 'math.h'),
'HAVE_ASINH': ('asinh', 'math.h'),
'HAVE_ATANH': ('atanh', 'math.h'),
'HAVE_CEIL': ('ceil', 'math.h'),
'HAVE_COPYSIGN': ('copysign', 'math.h'),
'HAVE_COS': ('cos', 'math.h'),
'HAVE_COSH': ('cosh', 'math.h'),
'HAVE_COSF': ('cosf', 'math.h'),
'HAVE_EXPM1': ('expm1', 'math.h'),
'HAVE_FABS': ('fabs', 'math.h'),
'HAVE_FINITE': ('finite', 'math.h'),
'HAVE_FLOOR': ('floor', 'math.h'),
'HAVE_GAMMA': ('gamma', 'math.h'),
'HAVE_HYPOT': ('hypot', 'math.h'),
'HAVE_ISINF': ('isinf', 'math.h'),
'HAVE_LOG': ('log', 'math.h'),
'HAVE_LOG1P': ('log1p', 'math.h'),
'HAVE_LOG2': ('log2', 'math.h'),
'HAVE_LGAMMA': ('lgamma', 'math.h'),
'HAVE_POW': ('pow', 'math.h'),
'HAVE_SCALBN': ('scalbn', 'math.h'),
'HAVE_SIN': ('sin', 'math.h'),
'HAVE_SINF': ('sinf', 'math.h'),
'HAVE_SINH': ('sinh', 'math.h'),
'HAVE_SQRT': ('sqrt', 'math.h'),
'HAVE_TGAMMA': ('tgamma', 'math.h'),
'HAVE_FSEEKO': ('fseeko', 'stdio.h'),
'HAVE_FSEEKO64': ('fseeko64', 'stdio.h'),
'HAVE_SETJMP': ('setjmp', 'setjmp.h'),
'HAVE_PTHREAD_SETNAME_NP': ('pthread_setname_np', 'pthread.h'),
'HAVE_PTHREAD_SET_NAME_NP': ('pthread_set_name_np', 'pthread.h'),
}
headers = []
functions = []
sizes = []
if len(sys.argv) != 2:
print(help_message % sys.argv[0])
sys.exit(0)
with open(sys.argv[1]) as f:
for line in f:
line = line.strip()
arr = line.split()
# Check for headers.
if line.startswith('#mesondefine') and line.endswith('_H'):
token = line.split()[1]
tarr = token.split('_')[1:-1]
tarr = [x.lower() for x in tarr]
hname = '/'.join(tarr) + '.h'
headers.append((token, hname))
# Check for functions.
try:
token = arr[1]
if token in function_data:
fdata = function_data[token]
functions.append((token, fdata[0], fdata[1]))
elif token.startswith('HAVE_') and not token.endswith('_H'):
functions.append((token, ))
except Exception:
pass
# Check for sizeof tests.
if len(arr) != 2:
continue
elem = arr[1]
if elem.startswith('SIZEOF_'):
typename = elem.split('_', 1)[1] \
.replace('_P', '*') \
.replace('_', ' ') \
.lower() \
.replace('size t', 'size_t')
sizes.append((elem, typename))
print('''cc = meson.get_compiler('c')
cdata = configuration_data()''')
# Convert header checks.
print('check_headers = [')
for token, hname in headers:
print(" ['%s', '%s']," % (token, hname))
print(']\n')
print('''foreach h : check_headers
if cc.has_header(h.get(1))
cdata.set(h.get(0), 1)
endif
endforeach
''')
# Convert function checks.
print('check_functions = [')
for token in functions:
if len(token) == 3:
token, fdata0, fdata1 = token
print(" ['%s', '%s', '#include<%s>']," % (token, fdata0, fdata1))
else:
print('# check token', token)
print(']\n')
print('''foreach f : check_functions
if cc.has_function(f.get(1), prefix : f.get(2))
cdata.set(f.get(0), 1)
endif
endforeach
''')
# Convert sizeof checks.
for elem, typename in sizes:
print("cdata.set('%s', cc.sizeof('%s'))" % (elem, typename))
print('''
configure_file(input : 'config.h.meson',
output : 'config.h',
configuration : cdata)''')
| apache-2.0 |
alrusdi/lettuce | tests/integration/lib/Django-1.3/django/core/files/move.py | 403 | 2931 | """
Move a file in the safest way possible::
>>> from django.core.files.move import file_move_safe
>>> file_move_safe("/tmp/old_file", "/tmp/new_file")
"""
import os
from django.core.files import locks
try:
from shutil import copystat
except ImportError:
import stat
def copystat(src, dst):
"""Copy all stat info (mode bits, atime and mtime) from src to dst"""
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
if hasattr(os, 'utime'):
os.utime(dst, (st.st_atime, st.st_mtime))
if hasattr(os, 'chmod'):
os.chmod(dst, mode)
__all__ = ['file_move_safe']
def _samefile(src, dst):
# Macintosh, Unix.
if hasattr(os.path,'samefile'):
try:
return os.path.samefile(src, dst)
except OSError:
return False
# All other platforms: check for same pathname.
return (os.path.normcase(os.path.abspath(src)) ==
os.path.normcase(os.path.abspath(dst)))
def file_move_safe(old_file_name, new_file_name, chunk_size = 1024*64, allow_overwrite=False):
"""
Moves a file from one location to another in the safest way possible.
First, tries ``os.rename``, which is simple but will break across filesystems.
If that fails, streams manually from one file to another in pure Python.
If the destination file exists and ``allow_overwrite`` is ``False``, this
function will throw an ``IOError``.
"""
# There's no reason to move if we don't have to.
if _samefile(old_file_name, new_file_name):
return
try:
os.rename(old_file_name, new_file_name)
return
except OSError:
# This will happen with os.rename if moving to another filesystem
# or when moving opened files on certain operating systems
pass
# first open the old file, so that it won't go away
old_file = open(old_file_name, 'rb')
try:
# now open the new file, not forgetting allow_overwrite
fd = os.open(new_file_name, os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) |
(not allow_overwrite and os.O_EXCL or 0))
try:
locks.lock(fd, locks.LOCK_EX)
current_chunk = None
while current_chunk != '':
current_chunk = old_file.read(chunk_size)
os.write(fd, current_chunk)
finally:
locks.unlock(fd)
os.close(fd)
finally:
old_file.close()
copystat(old_file_name, new_file_name)
try:
os.remove(old_file_name)
except OSError, e:
# Certain operating systems (Cygwin and Windows)
# fail when deleting opened files, ignore it. (For the
# systems where this happens, temporary files will be auto-deleted
# on close anyway.)
if getattr(e, 'winerror', 0) != 32 and getattr(e, 'errno', 0) != 13:
raise
| gpl-3.0 |
morenopc/edx-platform | lms/djangoapps/courseware/migrations/0003_done_grade_cache.py | 194 | 8745 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# NOTE (vshnayder): This constraint has the wrong field order, so it doesn't actually
# do anything in sqlite. Migration 0004 actually removes this index for sqlite.
# Removing unique constraint on 'StudentModule', fields ['module_id', 'module_type', 'student']
db.delete_unique('courseware_studentmodule', ['module_id', 'module_type', 'student_id'])
# Adding field 'StudentModule.max_grade'
db.add_column('courseware_studentmodule', 'max_grade', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False)
# Adding field 'StudentModule.done'
db.add_column('courseware_studentmodule', 'done', self.gf('django.db.models.fields.CharField')(default='na', max_length=8, db_index=True), keep_default=False)
# Adding unique constraint on 'StudentModule', fields ['module_id', 'student']
db.create_unique('courseware_studentmodule', ['module_id', 'student_id'])
def backwards(self, orm):
# Removing unique constraint on 'StudentModule', fields ['module_id', 'student']
db.delete_unique('courseware_studentmodule', ['module_id', 'student_id'])
# Deleting field 'StudentModule.max_grade'
db.delete_column('courseware_studentmodule', 'max_grade')
# Deleting field 'StudentModule.done'
db.delete_column('courseware_studentmodule', 'done')
# Adding unique constraint on 'StudentModule', fields ['module_id', 'module_type', 'student']
db.create_unique('courseware_studentmodule', ['module_id', 'module_type', 'student_id'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'courseware.studentmodule': {
'Meta': {'unique_together': "(('student', 'module_id'),)", 'object_name': 'StudentModule'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'done': ('django.db.models.fields.CharField', [], {'default': "'na'", 'max_length': '8', 'db_index': 'True'}),
'grade': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_grade': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'module_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'module_type': ('django.db.models.fields.CharField', [], {'default': "'problem'", 'max_length': '32', 'db_index': 'True'}),
'state': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'student': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['courseware']
| agpl-3.0 |
VitalPet/odoo | addons/email_template/__init__.py | 65 | 1126 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009 Sharoon Thomas
# Copyright (C) 2010-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import email_template
import wizard
import res_partner
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
thegrill/checkin-control | docs/source/conf.py | 1 | 6111 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# grill-checkin-control documentation build configuration file, created by
# sphinx-quickstart on Sun Jun 25 22:20:49 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
# 'sphinx.ext.imgmath',
# 'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
# 'sphinx.ext.githubpages',
'sphinx.ext.graphviz',
'sphinx.ext.inheritance_diagram',
'sphinx_autodoc_typehints']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'checkin-control'
copyright = '2017, Christian Lopez Barron'
author = 'Christian Lopez Barron'
# inheritance_graph_attrs = dict(rankdir="LR", size='"6.0, 8.0"',fontsize=14, ratio='compress')
inheritance_graph_attrs = dict(rankdir="TB", bgcolor='transparent')
# inheritance_node_attrs = dict(shape='Mrecord', fontsize=14, height=0.75, color='dodgerblue1', style='filled')
inheritance_node_attrs = dict(shape='Mrecord', color='"#2573a7"', style='filled', fillcolor='"#eaf4fa"',
size='"6.0, 8.0"')
inheritance_edge_attrs = dict(color='"#123a54"')
autodoc_member_order = 'groupwise'
autodoc_default_flags = ['members', 'show-inheritance']
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'grill-checkin-controldoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'grill-checkin-control.tex', 'grill-checkin-control Documentation',
'Christian Lopez Barron', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'grill-checkin-control', 'grill-checkin-control Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'grill-checkin-control', 'grill-checkin-control Documentation',
author, 'checkin-control', 'One line description of project.',
'Miscellaneous'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'python': ('https://docs.python.org/3.6', None),
'fs': ('https://pyfilesystem2.readthedocs.io/en/latest/', None)}
| mit |
lahnerml/espresso | maintainer/check_features.py | 9 | 1064 | # Copyright (C) 2013,2014,2015,2016 The ESPResSo project
# Copyright (C) 2012 Olaf Lenz
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Check whether all features used in the code are defined
#
from __future__ import print_function
import sys, os
sys.path.append(os.path.join(sys.path[0], '..', 'config'))
import featuredefs
if len(sys.argv) != 2:
print("Usage: %s FILE" % sys.argv[0])
exit(2)
fdefs = featuredefs.defs(sys.argv[1])
| gpl-3.0 |
waldenner/robotframework | install.py | 6 | 3087 | #!/usr/bin/env python
"""Custom Robot Framework installation script.
Usage: python install.py [ in(stall) | un(install) | re(install) ]
Using `python install.py install` simply runs `python setup.py install`
internally. You need to use `setup.py` directly, if you want to alter the
default installation somehow.
To install with with Jython or IronPython instead of Python, replace `python`
with `jython` or `ipy`, respectively.
For more information about installation in general see
http://code.google.com/p/robotframework/wiki/Installation
"""
import glob
import os
import shutil
import sys
def install():
_remove(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'build'))
print 'Installing Robot Framework...'
setup = os.path.join(os.path.dirname(sys.argv[0]), 'setup.py')
rc = os.system('"%s" %s install' % (sys.executable, setup))
if rc != 0:
print 'Installation failed.'
sys.exit(rc)
print 'Installation was successful.'
def uninstall():
print 'Uninstalling Robot Framework...'
try:
instdir = _get_installation_directory()
except Exception:
print 'Robot Framework is not installed or the installation is corrupted.'
sys.exit(1)
_remove(instdir)
if not 'robotframework' in instdir:
_remove_egg_info(instdir)
_remove_runners()
print 'Uninstallation was successful.'
def reinstall():
uninstall()
install()
def _get_installation_directory():
import robot
# Ensure we got correct robot module
if 'Robot' not in robot.pythonpathsetter.__doc__:
raise TypeError
robot_dir = os.path.dirname(robot.__file__)
parent_dir = os.path.dirname(robot_dir)
if 'robotframework' in os.path.basename(parent_dir):
return parent_dir
return robot_dir
def _remove_runners():
runners = ['pybot', 'jybot', 'ipybot', 'rebot', 'jyrebot', 'ipyrebot']
if os.sep == '\\':
runners = [r + '.bat' for r in runners]
for name in runners:
if os.name == 'java':
_remove(os.path.join(sys.prefix, 'bin', name))
elif os.sep == '\\':
_remove(os.path.join(sys.prefix, 'Scripts', name))
else:
for dirpath in ['/bin', '/usr/bin/', '/usr/local/bin']:
_remove(os.path.join(dirpath, name))
def _remove_egg_info(instdir):
pattern = os.path.join(os.path.dirname(instdir), 'robotframework-*.egg-info')
for path in glob.glob(pattern):
_remove(path)
def _remove(path):
if not os.path.exists(path):
return
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except Exception, err:
print "Removing '%s' failed: %s" % (path, err)
else:
print "Removed '%s'" % path
if __name__ == '__main__':
actions = {'install': install, 'in': install,
'uninstall': uninstall, 'un': uninstall,
'reinstall': reinstall, 're': reinstall}
try:
actions[sys.argv[1]]()
except (KeyError, IndexError):
print __doc__
| apache-2.0 |
ProcessOut/processout-python | processout/token.py | 1 | 10402 | try:
from urllib.parse import quote_plus
except ImportError:
from urllib import quote_plus
import processout
from processout.networking.request import Request
from processout.networking.response import Response
# The content of this file was automatically generated
class Token(object):
def __init__(self, client, prefill = None):
self._client = client
self._id = None
self._customer = None
self._customer_id = None
self._gateway_configuration = None
self._gateway_configuration_id = None
self._card = None
self._card_id = None
self._type = None
self._metadata = None
self._is_subscription_only = None
self._is_default = None
self._created_at = None
if prefill != None:
self.fill_with_data(prefill)
@property
def id(self):
"""Get id"""
return self._id
@id.setter
def id(self, val):
"""Set id
Keyword argument:
val -- New id value"""
self._id = val
return self
@property
def customer(self):
"""Get customer"""
return self._customer
@customer.setter
def customer(self, val):
"""Set customer
Keyword argument:
val -- New customer value"""
if val is None:
self._customer = val
return self
if isinstance(val, dict):
obj = processout.Customer(self._client)
obj.fill_with_data(val)
self._customer = obj
else:
self._customer = val
return self
@property
def customer_id(self):
"""Get customer_id"""
return self._customer_id
@customer_id.setter
def customer_id(self, val):
"""Set customer_id
Keyword argument:
val -- New customer_id value"""
self._customer_id = val
return self
@property
def gateway_configuration(self):
"""Get gateway_configuration"""
return self._gateway_configuration
@gateway_configuration.setter
def gateway_configuration(self, val):
"""Set gateway_configuration
Keyword argument:
val -- New gateway_configuration value"""
if val is None:
self._gateway_configuration = val
return self
if isinstance(val, dict):
obj = processout.GatewayConfiguration(self._client)
obj.fill_with_data(val)
self._gateway_configuration = obj
else:
self._gateway_configuration = val
return self
@property
def gateway_configuration_id(self):
"""Get gateway_configuration_id"""
return self._gateway_configuration_id
@gateway_configuration_id.setter
def gateway_configuration_id(self, val):
"""Set gateway_configuration_id
Keyword argument:
val -- New gateway_configuration_id value"""
self._gateway_configuration_id = val
return self
@property
def card(self):
"""Get card"""
return self._card
@card.setter
def card(self, val):
"""Set card
Keyword argument:
val -- New card value"""
if val is None:
self._card = val
return self
if isinstance(val, dict):
obj = processout.Card(self._client)
obj.fill_with_data(val)
self._card = obj
else:
self._card = val
return self
@property
def card_id(self):
"""Get card_id"""
return self._card_id
@card_id.setter
def card_id(self, val):
"""Set card_id
Keyword argument:
val -- New card_id value"""
self._card_id = val
return self
@property
def type(self):
"""Get type"""
return self._type
@type.setter
def type(self, val):
"""Set type
Keyword argument:
val -- New type value"""
self._type = val
return self
@property
def metadata(self):
"""Get metadata"""
return self._metadata
@metadata.setter
def metadata(self, val):
"""Set metadata
Keyword argument:
val -- New metadata value"""
self._metadata = val
return self
@property
def is_subscription_only(self):
"""Get is_subscription_only"""
return self._is_subscription_only
@is_subscription_only.setter
def is_subscription_only(self, val):
"""Set is_subscription_only
Keyword argument:
val -- New is_subscription_only value"""
self._is_subscription_only = val
return self
@property
def is_default(self):
"""Get is_default"""
return self._is_default
@is_default.setter
def is_default(self, val):
"""Set is_default
Keyword argument:
val -- New is_default value"""
self._is_default = val
return self
@property
def created_at(self):
"""Get created_at"""
return self._created_at
@created_at.setter
def created_at(self, val):
"""Set created_at
Keyword argument:
val -- New created_at value"""
self._created_at = val
return self
def fill_with_data(self, data):
"""Fill the current object with the new values pulled from data
Keyword argument:
data -- The data from which to pull the new values"""
if "id" in data.keys():
self.id = data["id"]
if "customer" in data.keys():
self.customer = data["customer"]
if "customer_id" in data.keys():
self.customer_id = data["customer_id"]
if "gateway_configuration" in data.keys():
self.gateway_configuration = data["gateway_configuration"]
if "gateway_configuration_id" in data.keys():
self.gateway_configuration_id = data["gateway_configuration_id"]
if "card" in data.keys():
self.card = data["card"]
if "card_id" in data.keys():
self.card_id = data["card_id"]
if "type" in data.keys():
self.type = data["type"]
if "metadata" in data.keys():
self.metadata = data["metadata"]
if "is_subscription_only" in data.keys():
self.is_subscription_only = data["is_subscription_only"]
if "is_default" in data.keys():
self.is_default = data["is_default"]
if "created_at" in data.keys():
self.created_at = data["created_at"]
return self
def verify(self, options = {}):
"""Verify a customer token's card is valid.
Keyword argument:
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/customers/" + quote_plus(self.customer_id) + "/tokens/" + quote_plus(self.id) + "/verify"
data = {
}
response = Response(request.post(path, data, options))
return_values = []
return_values.append(response.success)
return return_values[0]
def fetch_customer_tokens(self, customer_id, options = {}):
"""Get the customer's tokens.
Keyword argument:
customer_id -- ID of the customer
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/customers/" + quote_plus(customer_id) + "/tokens"
data = {
}
response = Response(request.get(path, data, options))
return_values = []
a = []
body = response.body
for v in body['tokens']:
tmp = processout.Token(self._client)
tmp.fill_with_data(v)
a.append(tmp)
return_values.append(a)
return return_values[0]
def find(self, customer_id, token_id, options = {}):
"""Find a customer's token by its ID.
Keyword argument:
customer_id -- ID of the customer
token_id -- ID of the token
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/customers/" + quote_plus(customer_id) + "/tokens/" + quote_plus(token_id) + ""
data = {
}
response = Response(request.get(path, data, options))
return_values = []
body = response.body
body = body["token"]
obj = processout.Token(self._client)
return_values.append(obj.fill_with_data(body))
return return_values[0]
def create(self, options = {}):
"""Create a new token for the given customer ID.
Keyword argument:
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/customers/" + quote_plus(self.customer_id) + "/tokens"
data = {
'metadata': self.metadata,
'source': options.get("source"),
'settings': options.get("settings"),
'target': options.get("target"),
'verify': options.get("verify"),
'verify_metadata': options.get("verify_metadata"),
'set_default': options.get("set_default")
}
response = Response(request.post(path, data, options))
return_values = []
body = response.body
body = body["token"]
return_values.append(self.fill_with_data(body))
return return_values[0]
def delete(self, options = {}):
"""Delete a customer token
Keyword argument:
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/customers/" + quote_plus(self.customer_id) + "/tokens/" + quote_plus(self.id) + ""
data = {
}
response = Response(request.delete(path, data, options))
return_values = []
return_values.append(response.success)
return return_values[0]
| mit |
BDAsdeCorazones/TestAirlines | tabo/cherrypy/cherrypy/test/test_refleaks.py | 22 | 1438 | """Tests for refleaks."""
from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob
import threading
import cherrypy
data = object()
from cherrypy.test import helper
class ReferenceTests(helper.CPWebCase):
def setup_server():
class Root:
def index(self, *args, **kwargs):
cherrypy.request.thing = data
return "Hello world!"
index.exposed = True
cherrypy.tree.mount(Root())
setup_server = staticmethod(setup_server)
def test_threadlocal_garbage(self):
success = []
def getpage():
host = '%s:%s' % (self.interface(), self.PORT)
if self.scheme == 'https':
c = HTTPSConnection(host)
else:
c = HTTPConnection(host)
try:
c.putrequest('GET', '/')
c.endheaders()
response = c.getresponse()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, ntob("Hello world!"))
finally:
c.close()
success.append(True)
ITERATIONS = 25
ts = []
for _ in range(ITERATIONS):
t = threading.Thread(target=getpage)
ts.append(t)
t.start()
for t in ts:
t.join()
self.assertEqual(len(success), ITERATIONS)
| gpl-2.0 |
Airbitz/airbitz-ofx | qbo.py | 1 | 7851 | #####################################################################
# #
# File: qbo.py #
# Developer: Justin Leto #
# #
# qbo class provides an interface from main csv iterator method #
# to handle qbo formatting, validations, and writing to file. #
# #
# Usage: python csvtoqbo.py <options> <csvfiles> #
# #
#####################################################################
import sys, traceback
import os
from datetime import datetime
import logging
import qboconst
class qbo:
# Holds a list of valid transactions via the addTransaction() method
__transactions = list()
# The full QBO document build from constants and transactions
__document = None
# Flag indicating whether the QBO document is valid
__isValid = None
# constructor
def __init__(self):
# Reads in constant values from file, set to private (const) variables
self.__HEADER = qboconst.HEADER
self.__FOOTER = qboconst.FOOTER
self.__DATE_START = qboconst.DATE_START
self.__DATE_END = qboconst.DATE_END
self.__BANKTRANLIST_START = qboconst.BANKTRANLIST_START
self.__BANKTRANLIST_END = qboconst.BANKTRANLIST_END
self.__TRANSACTION_START = qboconst.TRANSACTION_START
self.__TRANSACTION_END = qboconst.TRANSACTION_END
# Set document to valid
self.__isValid = True
# PUBLIC GET METHODS for constant values - used in unit testing.
#
#
def getHEADER(self):
return self.__HEADER
def getFOOTER(self):
return self.__FOOTER
def getDATE_START(self):
return self.__DATE_START
def getDATE_END(self):
return self.__DATE_END
def getBANKTRANLIST_START(self):
return self.__BANKTRANLIST_START
def getBANKTRANLIST_END(self):
return self.__BANKTRANLIST_END
def getTRANSACTION_START(self):
return self.__TRANSACTION_START
def getTRANSACTION_END(self):
return self.__TRANSACTION_END
# method to validate paramters used to submit transactions
def validateTransaction(self, status, date_posted, txn_type, to_from_flag, txn_amount, txn_exrate, name):
# if str.lower(status) != 'completed':
# #log status failure
# logging.info("Transaction status [" + status + "] invalid.")
# raise Exception("Transaction status [" + status + "] invalid.")
#
#if type(datetime.strptime(str(date_posted), '%m/%d/%Y')) is not datetime:
# logging.info("Transaction posted date [" + date_posted + "] invalid.")
# raise Exception("Transaction posted date [" + date_posted + "] invalid.")
# if str.lower(txn_type) not in ('payment','refund','withdrawal', 'withdraw funds', 'send', 'receive'):
# logging.info("Transaction type [" + str(txn_type) + "] not 'Payment', 'Refund', 'Withdraw Funds', or 'Withdrawal'.")
# raise Exception("Transaction type [" + str(txn_type) + "] not 'Payment', 'Refund', 'Withdraw Funds', or 'Withdrawal'.")
#
# if str.lower(to_from_flag) not in ('to', 'from'):
# logging.info("Transaction 'To/From' field [" + to_from_flag + "] invalid.")
# raise Exception("Transaction 'To/From' field [" + to_from_flag + "] invalid.")
#
# #logical test of txn_type and to_from_flag
# if ((str.lower(txn_type) == 'refund' and str.lower(to_from_flag) != 'to') or (str.lower(txn_type) == 'payment' and str.lower(to_from_flag) != 'from')):
# logging.info("Transaction type inconsistent with 'To/From' field.")
# raise Exception("Transaction type inconsistent with 'To/From' field.")
#
if len(name) == 0 or not name:
logging.info("Transaction name empty or null.")
raise Exception("Transaction name empty or null.")
return True
# Add transaction takes in param values uses the required formatting QBO transactions
# and pushes to list
def addTransaction(self, denom, date_posted, txn_memo, txn_id, txn_amount, txn_curamt, txn_category, name):
# try:
# # Validating param values prior to committing transaction
# self.validateTransaction(status, date_posted, txn_type, txn_id, txn_amount, name)
# except:
# raise Exception
# Construct QBO formatted transaction
transaction = ""
day = ""
month = ""
date_array = date_posted.split('-')
day = date_array[2]
month = date_array[1]
year = date_array[0]
if len(day) == 1:
day = "0"+day
if len(month) ==1:
month = "0"+month
rec_date = datetime.strptime(year+"/"+month+"/"+day, '%Y/%m/%d')
rec_date = rec_date.strftime('%Y%m%d%H%M%S') + '.000'
dtposted = ' <DTPOSTED>' + rec_date
if float(txn_amount) > 0:
trtype = ' <TRNTYPE>CREDIT'
else:
trtype = ' <TRNTYPE>DEBIT'
#
# if str.lower(txn_type) == 'receive':
# trtype = '<TRNTYPE>CREDIT'
# elif str.lower(txn_type) == 'send':
# trtype = '<TRNTYPE>DEBIT'
# if str.lower(txn_type) in ('refund', 'withdrawal', 'withdraw funds'):
# tramt = '<TRNAMT>-' + str(txn_amount).replace('$','')
# else:
# tramt = '<TRNAMT>' + str(txn_amount).replace('$','')
tramtbits = float(txn_amount) * denom
tramt = ' <TRNAMT>' + str(tramtbits)
if name:
trname = ' <NAME>' + str(name) + "\n"
else:
trname = ''
exrate = float(txn_curamt) / (tramtbits)
curamt = "{0:0.2f}".format(abs(float(txn_curamt)))
fmtexrate = "{0:0.6f}".format(float(exrate))
rawmemo = 'Rate=' + fmtexrate + " USD=" + curamt + " category=\"" + str(txn_category) + "\" memo=\"" + str(txn_memo)
memo = ' <MEMO>' + rawmemo[:253] + "\"\n"
fitid = ' <FITID>' + str(txn_id)
exrate = ' <CURRATE>' + fmtexrate
transaction = ("" + self.__TRANSACTION_START + "\n"
"" + trtype + "\n"
"" + dtposted + "\n"
"" + tramt + "\n"
"" + fitid + "\n"
"" + trname +
"" + memo +
"" + " <CURRENCY>" + "\n"
"" + exrate + "\n"
"" + " <CURSYM>USD" + "\n"
"" + " </CURRENCY>" + "\n"
"" + self.__TRANSACTION_END + "\n")
# Commit transaction to the document by adding to private member list object
self.__transactions.append(transaction)
logging.info("Transaction [" + str(self.getCount()) + "] Accepted.")
return True
# get the current number of valid committed transactions
def getCount(self):
return len(self.__transactions)
# get the valid status of the document
def isValid(self):
# If number of valid transactions are 0 document is invalid
if self.getCount() == 0:
self.__isValid = False
return self.__isValid
# get the text of the document
def getDocument(self):
self.Build()
return self.__document
# Construct the document, add the transactions
# save str into private member variable __document
def Build(self):
if not self.isValid():
logging.info("Error: QBO document is not valid.")
raise Exception("Error: QBO document is not valid.")
self.__document = ("" + self.__HEADER + "\n"
"" + self.__BANKTRANLIST_START + "\n"
"" + self.__DATE_START + "\n"
"" + self.__DATE_END + "\n")
for txn in self.__transactions:
self.__document = self.__document + str(txn)
self.__document = self.__document + ("" + self.__BANKTRANLIST_END + "\n"
"" + self.__FOOTER + "")
# Write QBO document to file
def Write(self, filename):
try:
with open(filename, 'w') as f:
# getDocument method will build document
# test for validity and return string for write
f.write(self.getDocument())
return True
except:
#log io error return False
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
print(''.join('!! ' + line for line in lines))
logging.info('qbo.Write() method: '.join('!! ' + line for line in lines))
return False
| mit |
jamesmarva/d3status | d3status/libs/options.py | 3 | 1419 | ## -*- coding: utf-8 -*-
#
# Copyright (c) 2012 feilong.me. All rights reserved.
#
# @author: Felinx Lee <[email protected]>
# Created on Jun 30, 2012
#
import logging
import os
from tornado.options import parse_command_line, options, define
def parse_config_file(path):
"""Rewrite tornado default parse_config_file.
Parses and loads the Python config file at the given path.
This version allow customize new options which are not defined before
from a configuration file.
"""
config = {}
execfile(path, config, config)
for name in config:
if name in options:
options[name].set(config[name])
else:
define(name, config[name])
def parse_options():
_root = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
_settings = os.path.join(_root, "settings.py")
_settings_local = os.path.join(_root, "settings_local.py")
try:
parse_config_file(_settings)
logging.info("Using settings.py as default settings.")
except Exception, e:
logging.error("No any default settings, are you sure? Exception: %s" % e)
try:
parse_config_file(_settings_local)
logging.info("Override some settings with local settings.")
except Exception, e:
logging.error("No local settings. Exception: %s" % e)
parse_command_line()
| apache-2.0 |
zhaodelong/django | django/template/backends/dummy.py | 480 | 2037 | # Since this package contains a "django" module, this is required on Python 2.
from __future__ import absolute_import
import errno
import io
import string
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.template import Origin, TemplateDoesNotExist
from django.utils.html import conditional_escape
from .base import BaseEngine
from .utils import csrf_input_lazy, csrf_token_lazy
class TemplateStrings(BaseEngine):
app_dirname = 'template_strings'
def __init__(self, params):
params = params.copy()
options = params.pop('OPTIONS').copy()
if options:
raise ImproperlyConfigured(
"Unknown options: {}".format(", ".join(options)))
super(TemplateStrings, self).__init__(params)
def from_string(self, template_code):
return Template(template_code)
def get_template(self, template_name):
tried = []
for template_file in self.iter_template_filenames(template_name):
try:
with io.open(template_file, encoding=settings.FILE_CHARSET) as fp:
template_code = fp.read()
except IOError as e:
if e.errno == errno.ENOENT:
tried.append((
Origin(template_file, template_name, self),
'Source does not exist',
))
continue
raise
return Template(template_code)
else:
raise TemplateDoesNotExist(template_name, tried=tried, backend=self)
class Template(string.Template):
def render(self, context=None, request=None):
if context is None:
context = {}
else:
context = {k: conditional_escape(v) for k, v in context.items()}
if request is not None:
context['csrf_input'] = csrf_input_lazy(request)
context['csrf_token'] = csrf_token_lazy(request)
return self.safe_substitute(context)
| bsd-3-clause |
jnewland/home-assistant | homeassistant/components/toon/climate.py | 6 | 4030 | """Support for Toon thermostat."""
from datetime import timedelta
import logging
from typing import Any, Dict, List
from homeassistant.components.climate import ClimateDevice
from homeassistant.components.climate.const import (
STATE_AUTO, STATE_COOL, STATE_ECO, STATE_HEAT, SUPPORT_OPERATION_MODE,
SUPPORT_TARGET_TEMPERATURE)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from homeassistant.helpers.typing import HomeAssistantType
from . import ToonDisplayDeviceEntity
from .const import DATA_TOON_CLIENT, DEFAULT_MAX_TEMP, DEFAULT_MIN_TEMP, DOMAIN
_LOGGER = logging.getLogger(__name__)
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_OPERATION_MODE
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5)
SCAN_INTERVAL = timedelta(seconds=300)
HA_TOON = {
STATE_AUTO: 'Comfort',
STATE_HEAT: 'Home',
STATE_ECO: 'Away',
STATE_COOL: 'Sleep',
}
TOON_HA = {value: key for key, value in HA_TOON.items()}
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry,
async_add_entities) -> None:
"""Set up a Toon binary sensors based on a config entry."""
toon = hass.data[DATA_TOON_CLIENT][entry.entry_id]
async_add_entities([ToonThermostatDevice(toon)], True)
class ToonThermostatDevice(ToonDisplayDeviceEntity, ClimateDevice):
"""Representation of a Toon climate device."""
def __init__(self, toon) -> None:
"""Initialize the Toon climate device."""
self._state = None
self._current_temperature = None
self._target_temperature = None
self._next_target_temperature = None
self._heating_type = None
super().__init__(toon, "Toon Thermostat", 'mdi:thermostat')
@property
def unique_id(self) -> str:
"""Return the unique ID for this thermostat."""
return '_'.join([DOMAIN, self.toon.agreement.id, 'climate'])
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_operation(self) -> str:
"""Return current operation i.e. comfort, home, away."""
return TOON_HA.get(self._state)
@property
def operation_list(self) -> List[str]:
"""Return a list of available operation modes."""
return list(HA_TOON.keys())
@property
def current_temperature(self) -> float:
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self) -> float:
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def min_temp(self) -> float:
"""Return the minimum temperature."""
return DEFAULT_MIN_TEMP
@property
def max_temp(self) -> float:
"""Return the maximum temperature."""
return DEFAULT_MAX_TEMP
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the current state of the burner."""
return {
'heating_type': self._heating_type,
}
def set_temperature(self, **kwargs) -> None:
"""Change the setpoint of the thermostat."""
temperature = kwargs.get(ATTR_TEMPERATURE)
self.toon.thermostat = temperature
def set_operation_mode(self, operation_mode: str) -> None:
"""Set new operation mode."""
self.toon.thermostat_state = HA_TOON[operation_mode]
def update(self) -> None:
"""Update local state."""
if self.toon.thermostat_state is None:
self._state = None
else:
self._state = self.toon.thermostat_state.name
self._current_temperature = self.toon.temperature
self._target_temperature = self.toon.thermostat
self._heating_type = self.toon.agreement.heating_type
| apache-2.0 |
arostm/mbed-os | features/FEATURE_LWIP/TESTS/mbedmicro-net/host_tests/udp_shotgun.py | 39 | 4553 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import socket
import json
import random
import itertools
import time
from sys import stdout
from threading import Thread
from SocketServer import BaseRequestHandler, UDPServer
from mbed_host_tests import BaseHostTest, event_callback
class UDPEchoClientHandler(BaseRequestHandler):
def handle(self):
""" UDP packet handler. Responds with multiple simultaneous packets
"""
data, sock = self.request
pattern = [ord(d) << 4 for d in data]
# Each byte in request indicates size of packet to recieve
# Each packet size is shifted over by 4 to fit in a byte, which
# avoids any issues with endianess or decoding
for packet in pattern:
data = [random.randint(0, 255) for _ in range(packet-1)]
data.append(reduce(lambda a,b: a^b, data))
data = ''.join(map(chr, data))
sock.sendto(data, self.client_address)
# Sleep a tiny bit to compensate for local network
time.sleep(0.01)
class UDPEchoClientTest(BaseHostTest):
def __init__(self):
"""
Initialise test parameters.
:return:
"""
BaseHostTest.__init__(self)
self.SERVER_IP = None # Will be determined after knowing the target IP
self.SERVER_PORT = 0 # Let TCPServer choose an arbitrary port
self.server = None
self.server_thread = None
self.target_ip = None
@staticmethod
def find_interface_to_target_addr(target_ip):
"""
Finds IP address of the interface through which it is connected to the target.
:return:
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect((target_ip, 0)) # Target IP, any port
except socket.error:
s.connect((target_ip, 8000)) # Target IP, 'random' port
ip = s.getsockname()[0]
s.close()
return ip
def setup_udp_server(self):
"""
sets up a UDP server for target to connect and send test data.
:return:
"""
# !NOTE: There should mechanism to assert in the host test
if self.SERVER_IP is None:
self.log("setup_udp_server() called before determining server IP!")
self.notify_complete(False)
# Returning none will suppress host test from printing success code
self.server = UDPServer((self.SERVER_IP, self.SERVER_PORT), UDPEchoClientHandler)
ip, port = self.server.server_address
self.SERVER_PORT = port
self.server.allow_reuse_address = True
self.log("HOST: Listening for UDP packets: " + self.SERVER_IP + ":" + str(self.SERVER_PORT))
self.server_thread = Thread(target=UDPEchoClientTest.server_thread_func, args=(self,))
self.server_thread.start()
@staticmethod
def server_thread_func(this):
"""
Thread function to run TCP server forever.
:param this:
:return:
"""
this.server.serve_forever()
@event_callback("target_ip")
def _callback_target_ip(self, key, value, timestamp):
"""
Callback to handle reception of target's IP address.
:param key:
:param value:
:param timestamp:
:return:
"""
self.target_ip = value
self.SERVER_IP = self.find_interface_to_target_addr(self.target_ip)
self.setup_udp_server()
@event_callback("host_ip")
def _callback_host_ip(self, key, value, timestamp):
"""
Callback for request for host IP Addr
"""
self.send_kv("host_ip", self.SERVER_IP)
@event_callback("host_port")
def _callback_host_port(self, key, value, timestamp):
"""
Callback for request for host port
"""
self.send_kv("host_port", self.SERVER_PORT)
def teardown(self):
if self.server:
self.server.shutdown()
self.server_thread.join()
| apache-2.0 |
franosincic/edx-platform | openedx/core/djangoapps/programs/tests/test_models.py | 8 | 4051 | """Tests for models supporting Program-related functionality."""
import ddt
from django.test import TestCase
import mock
from openedx.core.djangoapps.programs.models import ProgramsApiConfig
from openedx.core.djangoapps.programs.tests.mixins import ProgramsApiConfigMixin
@ddt.ddt
# ConfigurationModels use the cache. Make every cache get a miss.
@mock.patch('config_models.models.cache.get', return_value=None)
class TestProgramsApiConfig(ProgramsApiConfigMixin, TestCase):
"""Tests covering the ProgramsApiConfig model."""
def test_url_construction(self, _mock_cache):
"""Verify that URLs returned by the model are constructed correctly."""
programs_config = self.create_programs_config()
self.assertEqual(
programs_config.internal_api_url,
programs_config.internal_service_url.strip('/') + '/api/v{}/'.format(programs_config.api_version_number)
)
self.assertEqual(
programs_config.public_api_url,
programs_config.public_service_url.strip('/') + '/api/v{}/'.format(programs_config.api_version_number)
)
authoring_app_config = programs_config.authoring_app_config
self.assertEqual(
authoring_app_config.js_url,
programs_config.public_service_url.strip('/') + programs_config.authoring_app_js_path
)
self.assertEqual(
authoring_app_config.css_url,
programs_config.public_service_url.strip('/') + programs_config.authoring_app_css_path
)
@ddt.data(
(0, False),
(1, True),
)
@ddt.unpack
def test_cache_control(self, cache_ttl, is_cache_enabled, _mock_cache):
"""Verify the behavior of the property controlling whether API responses are cached."""
programs_config = self.create_programs_config(cache_ttl=cache_ttl)
self.assertEqual(programs_config.is_cache_enabled, is_cache_enabled)
def test_is_student_dashboard_enabled(self, _mock_cache):
"""
Verify that the property controlling display on the student dashboard is only True
when configuration is enabled and all required configuration is provided.
"""
programs_config = self.create_programs_config(enabled=False)
self.assertFalse(programs_config.is_student_dashboard_enabled)
programs_config = self.create_programs_config(enable_student_dashboard=False)
self.assertFalse(programs_config.is_student_dashboard_enabled)
programs_config = self.create_programs_config()
self.assertTrue(programs_config.is_student_dashboard_enabled)
def test_is_studio_tab_enabled(self, _mock_cache):
"""
Verify that the property controlling display of the Studio tab is only True
when configuration is enabled and all required configuration is provided.
"""
programs_config = self.create_programs_config(enabled=False)
self.assertFalse(programs_config.is_studio_tab_enabled)
programs_config = self.create_programs_config(enable_studio_tab=False)
self.assertFalse(programs_config.is_studio_tab_enabled)
programs_config = self.create_programs_config(authoring_app_js_path='', authoring_app_css_path='')
self.assertFalse(programs_config.is_studio_tab_enabled)
programs_config = self.create_programs_config()
self.assertTrue(programs_config.is_studio_tab_enabled)
def test_is_certification_enabled(self, _mock_cache):
"""
Verify that the property controlling certification-related functionality
for Programs behaves as expected.
"""
programs_config = self.create_programs_config(enabled=False)
self.assertFalse(programs_config.is_certification_enabled)
programs_config = self.create_programs_config(enable_certification=False)
self.assertFalse(programs_config.is_certification_enabled)
programs_config = self.create_programs_config()
self.assertTrue(programs_config.is_certification_enabled)
| agpl-3.0 |
yashu-seth/networkx | networkx/release.py | 24 | 7759 | """Release data for NetworkX.
When NetworkX is imported a number of steps are followed to determine
the version information.
1) If the release is not a development release (dev=False), then version
information is read from version.py, a file containing statically
defined version information. This file should exist on every
downloadable release of NetworkX since setup.py creates it during
packaging/installation. However, version.py might not exist if one
is running NetworkX from the mercurial repository. In the event that
version.py does not exist, then no vcs information will be available.
2) If the release is a development release, then version information
is read dynamically, when possible. If no dynamic information can be
read, then an attempt is made to read the information from version.py.
If version.py does not exist, then no vcs information will be available.
Clarification:
version.py is created only by setup.py
When setup.py creates version.py, it does so before packaging/installation.
So the created file is included in the source distribution. When a user
downloads a tar.gz file and extracts the files, the files will not be in a
live version control repository. So when the user runs setup.py to install
NetworkX, we must make sure write_versionfile() does not overwrite the
revision information contained in the version.py that was included in the
tar.gz file. This is why write_versionfile() includes an early escape.
"""
# Copyright (C) 2004-2015 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
from __future__ import absolute_import
import os
import sys
import time
import datetime
basedir = os.path.abspath(os.path.split(__file__)[0])
def write_versionfile():
"""Creates a static file containing version information."""
versionfile = os.path.join(basedir, 'version.py')
text = '''"""
Version information for NetworkX, created during installation.
Do not add this file to the repository.
"""
import datetime
version = %(version)r
date = %(date)r
# Was NetworkX built from a development version? If so, remember that the major
# and minor versions reference the "target" (rather than "current") release.
dev = %(dev)r
# Format: (name, major, min, revision)
version_info = %(version_info)r
# Format: a 'datetime.datetime' instance
date_info = %(date_info)r
# Format: (vcs, vcs_tuple)
vcs_info = %(vcs_info)r
'''
# Try to update all information
date, date_info, version, version_info, vcs_info = get_info(dynamic=True)
def writefile():
fh = open(versionfile, 'w')
subs = {
'dev' : dev,
'version': version,
'version_info': version_info,
'date': date,
'date_info': date_info,
'vcs_info': vcs_info
}
fh.write(text % subs)
fh.close()
if vcs_info[0] == 'mercurial':
# Then, we want to update version.py.
writefile()
else:
if os.path.isfile(versionfile):
# This is *good*, and the most likely place users will be when
# running setup.py. We do not want to overwrite version.py.
# Grab the version so that setup can use it.
sys.path.insert(0, basedir)
from version import version
del sys.path[0]
else:
# This is *bad*. It means the user might have a tarball that
# does not include version.py. Let this error raise so we can
# fix the tarball.
##raise Exception('version.py not found!')
# We no longer require that prepared tarballs include a version.py
# So we use the possibly trunctated value from get_info()
# Then we write a new file.
writefile()
return version
def get_revision():
"""Returns revision and vcs information, dynamically obtained."""
vcs, revision, tag = None, None, None
hgdir = os.path.join(basedir, '..', '.hg')
gitdir = os.path.join(basedir, '..', '.git')
if os.path.isdir(gitdir):
vcs = 'git'
# For now, we are not bothering with revision and tag.
vcs_info = (vcs, (revision, tag))
return revision, vcs_info
def get_info(dynamic=True):
## Date information
date_info = datetime.datetime.now()
date = time.asctime(date_info.timetuple())
revision, version, version_info, vcs_info = None, None, None, None
import_failed = False
dynamic_failed = False
if dynamic:
revision, vcs_info = get_revision()
if revision is None:
dynamic_failed = True
if dynamic_failed or not dynamic:
# This is where most final releases of NetworkX will be.
# All info should come from version.py. If it does not exist, then
# no vcs information will be provided.
sys.path.insert(0, basedir)
try:
from version import date, date_info, version, version_info, vcs_info
except ImportError:
import_failed = True
vcs_info = (None, (None, None))
else:
revision = vcs_info[1][0]
del sys.path[0]
if import_failed or (dynamic and not dynamic_failed):
# We are here if:
# we failed to determine static versioning info, or
# we successfully obtained dynamic revision info
version = ''.join([str(major), '.', str(minor)])
if dev:
version += '.dev_' + date_info.strftime("%Y%m%d%H%M%S")
version_info = (name, major, minor, revision)
return date, date_info, version, version_info, vcs_info
## Version information
name = 'networkx'
major = "2"
minor = "0"
## Declare current release as a development release.
## Change to False before tagging a release; then change back.
dev = True
description = "Python package for creating and manipulating graphs and networks"
long_description = \
"""
NetworkX is a Python package for the creation, manipulation, and
study of the structure, dynamics, and functions of complex networks.
"""
license = 'BSD'
authors = {'Hagberg' : ('Aric Hagberg','[email protected]'),
'Schult' : ('Dan Schult','[email protected]'),
'Swart' : ('Pieter Swart','[email protected]')
}
maintainer = "NetworkX Developers"
maintainer_email = "[email protected]"
url = 'http://networkx.github.io/'
download_url= 'https://pypi.python.org/pypi/networkx/'
platforms = ['Linux','Mac OSX','Windows','Unix']
keywords = ['Networks', 'Graph Theory', 'Mathematics', 'network', 'graph', 'discrete mathematics', 'math']
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics']
date, date_info, version, version_info, vcs_info = get_info()
if __name__ == '__main__':
# Write versionfile for nightly snapshots.
write_versionfile()
| bsd-3-clause |
guettli/django | tests/m2m_through_regress/tests.py | 31 | 10463 | from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core import management
from django.test import TestCase
from django.utils.six import StringIO
from .models import (
Car, CarDriver, Driver, Group, Membership, Person, UserMembership,
)
class M2MThroughTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.bob = Person.objects.create(name="Bob")
cls.jim = Person.objects.create(name="Jim")
cls.rock = Group.objects.create(name="Rock")
cls.roll = Group.objects.create(name="Roll")
cls.frank = User.objects.create_user("frank", "[email protected]", "password")
cls.jane = User.objects.create_user("jane", "[email protected]", "password")
# normal intermediate model
cls.bob_rock = Membership.objects.create(person=cls.bob, group=cls.rock)
cls.bob_roll = Membership.objects.create(person=cls.bob, group=cls.roll, price=50)
cls.jim_rock = Membership.objects.create(person=cls.jim, group=cls.rock, price=50)
# intermediate model with custom id column
cls.frank_rock = UserMembership.objects.create(user=cls.frank, group=cls.rock)
cls.frank_roll = UserMembership.objects.create(user=cls.frank, group=cls.roll)
cls.jane_rock = UserMembership.objects.create(user=cls.jane, group=cls.rock)
def test_retrieve_reverse_m2m_items(self):
self.assertQuerysetEqual(
self.bob.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
],
ordered=False
)
def test_retrieve_forward_m2m_items(self):
self.assertQuerysetEqual(
self.roll.members.all(), [
"<Person: Bob>",
]
)
def test_cannot_use_setattr_on_reverse_m2m_with_intermediary_model(self):
msg = (
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use m2m_through_regress.Membership's Manager "
"instead."
)
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.set([])
def test_cannot_use_setattr_on_forward_m2m_with_intermediary_model(self):
msg = (
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use m2m_through_regress.Membership's Manager "
"instead."
)
with self.assertRaisesMessage(AttributeError, msg):
self.roll.members.set([])
def test_cannot_use_create_on_m2m_with_intermediary_model(self):
with self.assertRaises(AttributeError):
self.rock.members.create(name="Anne")
def test_cannot_use_create_on_reverse_m2m_with_intermediary_model(self):
with self.assertRaises(AttributeError):
self.bob.group_set.create(name="Funk")
def test_retrieve_reverse_m2m_items_via_custom_id_intermediary(self):
self.assertQuerysetEqual(
self.frank.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
],
ordered=False
)
def test_retrieve_forward_m2m_items_via_custom_id_intermediary(self):
self.assertQuerysetEqual(
self.roll.user_members.all(), [
"<User: frank>",
]
)
def test_join_trimming_forwards(self):
"Check that we don't involve too many copies of the intermediate table when doing a join. Refs #8046, #8254"
self.assertQuerysetEqual(
self.rock.members.filter(membership__price=50), [
"<Person: Jim>",
]
)
def test_join_trimming_reverse(self):
self.assertQuerysetEqual(
self.bob.group_set.filter(membership__price=50), [
"<Group: Roll>",
]
)
class M2MThroughSerializationTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.bob = Person.objects.create(name="Bob")
cls.roll = Group.objects.create(name="Roll")
cls.bob_roll = Membership.objects.create(person=cls.bob, group=cls.roll)
def test_serialization(self):
"m2m-through models aren't serialized as m2m fields. Refs #8134"
pks = {"p_pk": self.bob.pk, "g_pk": self.roll.pk, "m_pk": self.bob_roll.pk}
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertJSONEqual(
out.getvalue().strip(),
'[{"pk": %(m_pk)s, "model": "m2m_through_regress.membership", "fields": {"person": %(p_pk)s, "price": '
'100, "group": %(g_pk)s}}, {"pk": %(p_pk)s, "model": "m2m_through_regress.person", "fields": {"name": '
'"Bob"}}, {"pk": %(g_pk)s, "model": "m2m_through_regress.group", "fields": {"name": "Roll"}}]'
% pks
)
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="xml", indent=2, stdout=out)
self.assertXMLEqual(out.getvalue().strip(), """
<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="%(m_pk)s" model="m2m_through_regress.membership">
<field to="m2m_through_regress.person" name="person" rel="ManyToOneRel">%(p_pk)s</field>
<field to="m2m_through_regress.group" name="group" rel="ManyToOneRel">%(g_pk)s</field>
<field type="IntegerField" name="price">100</field>
</object>
<object pk="%(p_pk)s" model="m2m_through_regress.person">
<field type="CharField" name="name">Bob</field>
</object>
<object pk="%(g_pk)s" model="m2m_through_regress.group">
<field type="CharField" name="name">Roll</field>
</object>
</django-objects>
""".strip() % pks)
class ToFieldThroughTests(TestCase):
def setUp(self):
self.car = Car.objects.create(make="Toyota")
self.driver = Driver.objects.create(name="Ryan Briscoe")
CarDriver.objects.create(car=self.car, driver=self.driver)
# We are testing if wrong objects get deleted due to using wrong
# field value in m2m queries. So, it is essential that the pk
# numberings do not match.
# Create one intentionally unused driver to mix up the autonumbering
self.unused_driver = Driver.objects.create(name="Barney Gumble")
# And two intentionally unused cars.
self.unused_car1 = Car.objects.create(make="Trabant")
self.unused_car2 = Car.objects.create(make="Wartburg")
def test_to_field(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
def test_to_field_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
def test_to_field_clear_reverse(self):
self.driver.car_set.clear()
self.assertQuerysetEqual(
self.driver.car_set.all(), [])
def test_to_field_clear(self):
self.car.drivers.clear()
self.assertQuerysetEqual(
self.car.drivers.all(), [])
# Low level tests for _add_items and _remove_items. We test these methods
# because .add/.remove aren't available for m2m fields with through, but
# through is the only way to set to_field currently. We do want to make
# sure these methods are ready if the ability to use .add or .remove with
# to_field relations is added some day.
def test_add(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
# Yikes - barney is going to drive...
self.car.drivers._add_items('car', 'driver', self.unused_driver)
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Barney Gumble>", "<Driver: Ryan Briscoe>"]
)
def test_add_null(self):
nullcar = Car.objects.create(make=None)
with self.assertRaises(ValueError):
nullcar.drivers._add_items('car', 'driver', self.unused_driver)
def test_add_related_null(self):
nulldriver = Driver.objects.create(name=None)
with self.assertRaises(ValueError):
self.car.drivers._add_items('car', 'driver', nulldriver)
def test_add_reverse(self):
car2 = Car.objects.create(make="Honda")
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
self.driver.car_set._add_items('driver', 'car', car2)
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>", "<Car: Honda>"],
ordered=False
)
def test_add_null_reverse(self):
nullcar = Car.objects.create(make=None)
with self.assertRaises(ValueError):
self.driver.car_set._add_items('driver', 'car', nullcar)
def test_add_null_reverse_related(self):
nulldriver = Driver.objects.create(name=None)
with self.assertRaises(ValueError):
nulldriver.car_set._add_items('driver', 'car', self.car)
def test_remove(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
self.car.drivers._remove_items('car', 'driver', self.driver)
self.assertQuerysetEqual(
self.car.drivers.all(), [])
def test_remove_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
self.driver.car_set._remove_items('driver', 'car', self.car)
self.assertQuerysetEqual(
self.driver.car_set.all(), [])
class ThroughLoadDataTestCase(TestCase):
fixtures = ["m2m_through"]
def test_sequence_creation(self):
"""
Sequences on an m2m_through are created for the through model, not a
phantom auto-generated m2m table (#11107).
"""
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertJSONEqual(
out.getvalue().strip(),
'[{"pk": 1, "model": "m2m_through_regress.usermembership", "fields": {"price": 100, "group": 1, "user"'
': 1}}, {"pk": 1, "model": "m2m_through_regress.person", "fields": {"name": "Guido"}}, {"pk": 1, '
'"model": "m2m_through_regress.group", "fields": {"name": "Python Core Group"}}]'
)
| bsd-3-clause |
CARTAvis/carta | carta/html5/common/skel/source/class/skel/simulation/tStack.py | 3 | 9489 | import unittest
import Util
import time
import selectBrowser
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
#Stack functionality.
class tStack(unittest.TestCase):
def setUp(self):
browser = selectBrowser._getBrowser()
Util.setUp(self, browser)
def verifyCompositionMode(self, driver, mode):
print "verifying mode=", mode
combineCombo = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "layerCompositionMode")))
driver.execute_script( "arguments[0].scrollIntoView(true);", combineCombo )
combineText = combineCombo.find_element_by_xpath( ".//div/div")
combMode = combineText.text
print "Combine mode=",combMode
self.assertTrue( mode==combMode, "Combine modes not as expected")
def _isColorChecked(self, colorBox ):
colorBorder = colorBox.get_attribute( "class")
checked = False
if colorBorder == "qx-line-border":
checked = True
return checked
def _testColor(self, colorBoxId, colorExpected, colorStr, driver ):
filterBox = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, colorBoxId)))
driver.execute_script( "arguments[0].scrollIntoView(true);", filterBox )
colorChecked = self._isColorChecked( filterBox )
print "Color checked=", colorChecked
colorCheckExpected = True
if colorExpected == 0:
colorCheckExpected = False
self.assertEqual( colorChecked, colorCheckExpected, colorStr + " amount is not correct")
def verifyColor(self, driver, redExpected, greenExpected, blueExpected ):
self._testColor( "filterRedBox", redExpected, "Red", driver )
self._testColor( "filterGreenBox", greenExpected, "Green", driver)
self._testColor( "filterBlueBox", blueExpected, "Blue", driver )
def _verifyRGB(self, driver, imageName, rgbStr ):
xPath = "//div[@qxclass='skel.widgets.Image.Stack.TreeItem']/div[text()='" + imageName + "']/../div[@qxclass='skel.widgets.Image.Stack.CustomIcon']"
item = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, xPath)))
styleStr = item.get_attribute("style")
print "Style=",styleStr
rgb = item.value_of_css_property( 'background-color')
print "RGB color=",rgb
print "RGBSTR=", rgbStr
self.assertTrue( rgb==rgbStr, "Red Icon not correct color")
# Load 3 images
# Hide the second image; check the count goes down to 2
# Show the second image; check the count goes up to 3
def test_hideShow(self):
driver = self.driver
timeout = selectBrowser._getSleep()
#Load images
Util.load_image( self, driver, "Default")
Util.load_image( self, driver, "aJ.fits")
Util.load_image( self, driver, "aH.fits")
#Verify the image animator sees three images.
Util.verifyAnimatorUpperBound( self, driver, 2, "Image" )
#Open the image settings
#Open the stack tab
Util.openSettings( self, driver, "Image", True )
Util.clickTab( driver, "Stack" )
#Turn off auto select
autoSelectCheck = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "autoSelectImages")))
ActionChains(driver).click( autoSelectCheck ).perform()
#Hide the second image
secondItem = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@qxclass='skel.widgets.Image.Stack.TreeItem']/div[text()='aJ.fits']/..")))
ActionChains(driver).context_click( secondItem ).perform()
ActionChains(driver).send_keys( Keys.ARROW_DOWN ).send_keys( Keys.ARROW_DOWN).send_keys( Keys.ENTER ).perform()
#Verify the animator sees two images
time.sleep( 2 )
Util.verifyAnimatorUpperBound(self, driver, 1, "Image" )
#Show the second image
secondItem = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@qxclass='skel.widgets.Image.Stack.TreeItem']/div[text()='aJ.fits']/..")))
ActionChains(driver).context_click( secondItem ).perform()
ActionChains(driver).send_keys( Keys.ARROW_DOWN ).send_keys( Keys.ARROW_DOWN).send_keys( Keys.ENTER ).perform()
#Verify the animator sees three images
time.sleep( 2 )
Util.verifyAnimatorUpperBound( self, driver, 2, "Image")
# Load 3 images
# Test that we can group the images into an RGB layer.
# Test that we can ungroup the images.
def test_groupUngroup(self):
driver = self.driver
timeout = selectBrowser._getSleep()
#Load images
Util.load_image( self, driver, "Orion.methanol.cbc.contsub.image.fits")
Util.load_image( self, driver, "Orion.cont.image.fits")
Util.load_image( self, driver, "TWHydra_CO2_1line.image.fits")
time.sleep( 2 )
#Open the image settings
#Open the stack tab
Util.openSettings( self, driver, "Image", True )
Util.clickTab( driver, "Stack" )
#Turn off auto select
autoSelectCheck = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "autoSelectImages")))
ActionChains(driver).click( autoSelectCheck ).perform()
#Select all images (The third should already be selected so selecting
#the first with a shift should do it).
firstItem = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@qxclass='skel.widgets.Image.Stack.TreeItem']/div[text()='Orion.methanol.cbc.contsub.image.fits']/..")))
actions = ActionChains( driver).key_down(Keys.SHIFT).click( firstItem )
actions.key_up( Keys.SHIFT ).perform()
#Click the group check box.
groupCheck = WebDriverWait( driver, 10).until( EC.presence_of_element_located((By.ID, "stackGroupImages")))
ActionChains(driver).click( groupCheck ).perform()
time.sleep(2)
#Verify that the images now have RGB boxes.
self._verifyRGB( driver, "Orion.methanol.cbc.contsub.image.fits", "rgba(255, 0, 0, 1)")
self._verifyRGB( driver, "Orion.cont.image.fits", "rgba(0, 255, 0, 1)")
self._verifyRGB( driver, "TWHydra_CO2_1line.image.fits", "rgba(0, 0, 255, 1)")
#Ungroup the images.
groupCheck = WebDriverWait( driver, 10).until( EC.presence_of_element_located((By.ID, "stackGroupImages")))
ActionChains(driver).click( groupCheck ).perform()
time.sleep(2)
#Verify the images have transparent RGB boxes.
self._verifyRGB( driver, "Orion.methanol.cbc.contsub.image.fits", "rgba(0, 0, 0, 0)")
self._verifyRGB( driver, "Orion.cont.image.fits", "rgba(0, 0, 0, 0)")
self._verifyRGB( driver, "TWHydra_CO2_1line.image.fits", "rgba(0, 0, 0, 0)")
#Test that we can rename a group.
def test_groupRename(self):
driver = self.driver
timeout = selectBrowser._getSleep()
#Load images
Util.load_image( self, driver, "Orion.methanol.cbc.contsub.image.fits")
Util.load_image( self, driver, "Orion.cont.image.fits")
Util.load_image( self, driver, "TWHydra_CO2_1line.image.fits")
time.sleep( 2 )
#Open the image settings
#Open the stack tab
Util.openSettings( self, driver, "Image", True )
time.sleep(4)
Util.clickTab( driver, "Stack" )
#Turn off auto select
autoSelectCheck = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "autoSelectImages")))
ActionChains(driver).click( autoSelectCheck ).perform()
#Group the bottom two images.
secondItem = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@qxclass='skel.widgets.Image.Stack.TreeItem']/div[text()='Orion.cont.image.fits']/..")))
actions = ActionChains( driver).key_down(Keys.SHIFT).click( secondItem )
actions.key_up( Keys.SHIFT ).perform()
#Click the group check box.
groupCheck = WebDriverWait( driver, 10).until( EC.presence_of_element_located((By.ID, "stackGroupImages")))
ActionChains(driver).click( groupCheck ).perform()
time.sleep(2)
#Change the name of the group to twoImageRGB & verify that there is a tree node with that name..
nameText = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//input[starts-with(@id, 'stackLayerName')]")))
Util._changeElementText(self, driver, nameText, "twoImageRGB")
WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[contains(text(),'twoImageRGB')]")))
def tearDown(self):
#Close the browser
self.driver.close()
#Allow browser to fully close before continuing
time.sleep(2)
#Close the session and delete temporary files
self.driver.quit()
if __name__ == "__main__":
unittest.main() | gpl-3.0 |
poondog/kangaroo-m7-mkv | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py | 11088 | 3246 | # Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <[email protected]>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
| gpl-2.0 |
LIKAIMO/MissionPlanner | ExtLibs/Mavlink/mavlink.py | 34 | 259175 | '''
MAVLink protocol implementation (auto-generated by mavgen.py)
Generated from: ardupilotmega.xml,common.xml
Note: this file has been auto-generated. DO NOT EDIT
'''
import struct, array, mavutil, time
WIRE_PROTOCOL_VERSION = "0.9"
class MAVLink_header(object):
'''MAVLink message header'''
def __init__(self, msgId, mlen=0, seq=0, srcSystem=0, srcComponent=0):
self.mlen = mlen
self.seq = seq
self.srcSystem = srcSystem
self.srcComponent = srcComponent
self.msgId = msgId
def pack(self):
return struct.pack('BBBBBB', 85, self.mlen, self.seq,
self.srcSystem, self.srcComponent, self.msgId)
class MAVLink_message(object):
'''base MAVLink message class'''
def __init__(self, msgId, name):
self._header = MAVLink_header(msgId)
self._payload = None
self._msgbuf = None
self._crc = None
self._fieldnames = []
self._type = name
def get_msgbuf(self):
return self._msgbuf
def get_header(self):
return self._header
def get_payload(self):
return self._payload
def get_crc(self):
return self._crc
def get_fieldnames(self):
return self._fieldnames
def get_type(self):
return self._type
def get_msgId(self):
return self._header.msgId
def get_srcSystem(self):
return self._header.srcSystem
def get_srcComponent(self):
return self._header.srcComponent
def get_seq(self):
return self._header.seq
def __str__(self):
ret = '%s {' % self._type
for a in self._fieldnames:
v = getattr(self, a)
ret += '%s : %s, ' % (a, v)
ret = ret[0:-2] + '}'
return ret
def pack(self, mav, crc_extra, payload):
self._payload = payload
self._header = MAVLink_header(self._header.msgId, len(payload), mav.seq,
mav.srcSystem, mav.srcComponent)
self._msgbuf = self._header.pack() + payload
crc = mavutil.x25crc(self._msgbuf[1:])
if False: # using CRC extra
crc.accumulate(chr(crc_extra))
self._crc = crc.crc
self._msgbuf += struct.pack('<H', self._crc)
return self._msgbuf
# enums
# MAV_MOUNT_MODE
MAV_MOUNT_MODE_RETRACT = 0 # Load and keep safe position (Roll,Pitch,Yaw) from EEPROM and stop
# stabilization
MAV_MOUNT_MODE_NEUTRAL = 1 # Load and keep neutral position (Roll,Pitch,Yaw) from EEPROM.
MAV_MOUNT_MODE_MAVLINK_TARGETING = 2 # Load neutral position and start MAVLink Roll,Pitch,Yaw control with
# stabilization
MAV_MOUNT_MODE_RC_TARGETING = 3 # Load neutral position and start RC Roll,Pitch,Yaw control with
# stabilization
MAV_MOUNT_MODE_GPS_POINT = 4 # Load neutral position and start to point to Lat,Lon,Alt
MAV_MOUNT_MODE_ENUM_END = 5 #
# MAV_CMD
MAV_CMD_NAV_WAYPOINT = 16 # Navigate to waypoint.
MAV_CMD_NAV_LOITER_UNLIM = 17 # Loiter around this waypoint an unlimited amount of time
MAV_CMD_NAV_LOITER_TURNS = 18 # Loiter around this waypoint for X turns
MAV_CMD_NAV_LOITER_TIME = 19 # Loiter around this waypoint for X seconds
MAV_CMD_NAV_RETURN_TO_LAUNCH = 20 # Return to launch location
MAV_CMD_NAV_LAND = 21 # Land at location
MAV_CMD_NAV_TAKEOFF = 22 # Takeoff from ground / hand
MAV_CMD_NAV_ROI = 80 # Sets the region of interest (ROI) for a sensor set or the
# vehicle itself. This can then be used by the
# vehicles control system to
# control the vehicle attitude and the
# attitude of various sensors such
# as cameras.
MAV_CMD_NAV_PATHPLANNING = 81 # Control autonomous path planning on the MAV.
MAV_CMD_NAV_LAST = 95 # NOP - This command is only used to mark the upper limit of the
# NAV/ACTION commands in the enumeration
MAV_CMD_CONDITION_DELAY = 112 # Delay mission state machine.
MAV_CMD_CONDITION_CHANGE_ALT = 113 # Ascend/descend at rate. Delay mission state machine until desired
# altitude reached.
MAV_CMD_CONDITION_DISTANCE = 114 # Delay mission state machine until within desired distance of next NAV
# point.
MAV_CMD_CONDITION_YAW = 115 # Reach a certain target angle.
MAV_CMD_CONDITION_LAST = 159 # NOP - This command is only used to mark the upper limit of the
# CONDITION commands in the enumeration
MAV_CMD_DO_SET_MODE = 176 # Set system mode.
MAV_CMD_DO_JUMP = 177 # Jump to the desired command in the mission list. Repeat this action
# only the specified number of times
MAV_CMD_DO_CHANGE_SPEED = 178 # Change speed and/or throttle set points.
MAV_CMD_DO_SET_HOME = 179 # Changes the home location either to the current location or a
# specified location.
MAV_CMD_DO_SET_PARAMETER = 180 # Set a system parameter. Caution! Use of this command requires
# knowledge of the numeric enumeration value
# of the parameter.
MAV_CMD_DO_SET_RELAY = 181 # Set a relay to a condition.
MAV_CMD_DO_REPEAT_RELAY = 182 # Cycle a relay on and off for a desired number of cyles with a desired
# period.
MAV_CMD_DO_SET_SERVO = 183 # Set a servo to a desired PWM value.
MAV_CMD_DO_REPEAT_SERVO = 184 # Cycle a between its nominal setting and a desired PWM for a desired
# number of cycles with a desired period.
MAV_CMD_DO_CONTROL_VIDEO = 200 # Control onboard camera capturing.
MAV_CMD_DO_SET_ROI = 201 # Sets the region of interest (ROI) for a sensor set or the
# vehicle itself. This can then be used by the
# vehicles control system
# to control the vehicle attitude and the
# attitude of various
# devices such as cameras.
MAV_CMD_DO_DIGICAM_CONFIGURE = 202 # Mission command to configure an on-board camera controller system.
MAV_CMD_DO_DIGICAM_CONTROL = 203 # Mission command to control an on-board camera controller system.
MAV_CMD_DO_MOUNT_CONFIGURE = 204 # Mission command to configure a camera or antenna mount
MAV_CMD_DO_MOUNT_CONTROL = 205 # Mission command to control a camera or antenna mount
MAV_CMD_DO_LAST = 240 # NOP - This command is only used to mark the upper limit of the DO
# commands in the enumeration
MAV_CMD_PREFLIGHT_CALIBRATION = 241 # Trigger calibration. This command will be only accepted if in pre-
# flight mode.
MAV_CMD_PREFLIGHT_STORAGE = 245 # Request storage of different parameter values and logs. This command
# will be only accepted if in pre-flight mode.
MAV_CMD_ENUM_END = 246 #
# MAV_DATA_STREAM
MAV_DATA_STREAM_ALL = 0 # Enable all data streams
MAV_DATA_STREAM_RAW_SENSORS = 1 # Enable IMU_RAW, GPS_RAW, GPS_STATUS packets.
MAV_DATA_STREAM_EXTENDED_STATUS = 2 # Enable GPS_STATUS, CONTROL_STATUS, AUX_STATUS
MAV_DATA_STREAM_RC_CHANNELS = 3 # Enable RC_CHANNELS_SCALED, RC_CHANNELS_RAW, SERVO_OUTPUT_RAW
MAV_DATA_STREAM_RAW_CONTROLLER = 4 # Enable ATTITUDE_CONTROLLER_OUTPUT, POSITION_CONTROLLER_OUTPUT,
# NAV_CONTROLLER_OUTPUT.
MAV_DATA_STREAM_POSITION = 6 # Enable LOCAL_POSITION, GLOBAL_POSITION/GLOBAL_POSITION_INT messages.
MAV_DATA_STREAM_EXTRA1 = 10 # Dependent on the autopilot
MAV_DATA_STREAM_EXTRA2 = 11 # Dependent on the autopilot
MAV_DATA_STREAM_EXTRA3 = 12 # Dependent on the autopilot
MAV_DATA_STREAM_ENUM_END = 13 #
# MAV_ROI
MAV_ROI_NONE = 0 # No region of interest.
MAV_ROI_WPNEXT = 1 # Point toward next waypoint.
MAV_ROI_WPINDEX = 2 # Point toward given waypoint.
MAV_ROI_LOCATION = 3 # Point toward fixed location.
MAV_ROI_TARGET = 4 # Point toward of given id.
MAV_ROI_ENUM_END = 5 #
# message IDs
MAVLINK_MSG_ID_BAD_DATA = -1
MAVLINK_MSG_ID_SENSOR_OFFSETS = 150
MAVLINK_MSG_ID_SET_MAG_OFFSETS = 151
MAVLINK_MSG_ID_MEMINFO = 152
MAVLINK_MSG_ID_AP_ADC = 153
MAVLINK_MSG_ID_DIGICAM_CONFIGURE = 154
MAVLINK_MSG_ID_DIGICAM_CONTROL = 155
MAVLINK_MSG_ID_MOUNT_CONFIGURE = 156
MAVLINK_MSG_ID_MOUNT_CONTROL = 157
MAVLINK_MSG_ID_MOUNT_STATUS = 158
MAVLINK_MSG_ID_HEARTBEAT = 0
MAVLINK_MSG_ID_BOOT = 1
MAVLINK_MSG_ID_SYSTEM_TIME = 2
MAVLINK_MSG_ID_PING = 3
MAVLINK_MSG_ID_SYSTEM_TIME_UTC = 4
MAVLINK_MSG_ID_CHANGE_OPERATOR_CONTROL = 5
MAVLINK_MSG_ID_CHANGE_OPERATOR_CONTROL_ACK = 6
MAVLINK_MSG_ID_AUTH_KEY = 7
MAVLINK_MSG_ID_ACTION_ACK = 9
MAVLINK_MSG_ID_ACTION = 10
MAVLINK_MSG_ID_SET_MODE = 11
MAVLINK_MSG_ID_SET_NAV_MODE = 12
MAVLINK_MSG_ID_PARAM_REQUEST_READ = 20
MAVLINK_MSG_ID_PARAM_REQUEST_LIST = 21
MAVLINK_MSG_ID_PARAM_VALUE = 22
MAVLINK_MSG_ID_PARAM_SET = 23
MAVLINK_MSG_ID_GPS_RAW_INT = 25
MAVLINK_MSG_ID_SCALED_IMU = 26
MAVLINK_MSG_ID_GPS_STATUS = 27
MAVLINK_MSG_ID_RAW_IMU = 28
MAVLINK_MSG_ID_RAW_PRESSURE = 29
MAVLINK_MSG_ID_SCALED_PRESSURE = 38
MAVLINK_MSG_ID_ATTITUDE = 30
MAVLINK_MSG_ID_LOCAL_POSITION = 31
MAVLINK_MSG_ID_GLOBAL_POSITION = 33
MAVLINK_MSG_ID_GPS_RAW = 32
MAVLINK_MSG_ID_SYS_STATUS = 34
MAVLINK_MSG_ID_RC_CHANNELS_RAW = 35
MAVLINK_MSG_ID_RC_CHANNELS_SCALED = 36
MAVLINK_MSG_ID_SERVO_OUTPUT_RAW = 37
MAVLINK_MSG_ID_WAYPOINT = 39
MAVLINK_MSG_ID_WAYPOINT_REQUEST = 40
MAVLINK_MSG_ID_WAYPOINT_SET_CURRENT = 41
MAVLINK_MSG_ID_WAYPOINT_CURRENT = 42
MAVLINK_MSG_ID_WAYPOINT_REQUEST_LIST = 43
MAVLINK_MSG_ID_WAYPOINT_COUNT = 44
MAVLINK_MSG_ID_WAYPOINT_CLEAR_ALL = 45
MAVLINK_MSG_ID_WAYPOINT_REACHED = 46
MAVLINK_MSG_ID_WAYPOINT_ACK = 47
MAVLINK_MSG_ID_GPS_SET_GLOBAL_ORIGIN = 48
MAVLINK_MSG_ID_GPS_LOCAL_ORIGIN_SET = 49
MAVLINK_MSG_ID_LOCAL_POSITION_SETPOINT_SET = 50
MAVLINK_MSG_ID_LOCAL_POSITION_SETPOINT = 51
MAVLINK_MSG_ID_CONTROL_STATUS = 52
MAVLINK_MSG_ID_SAFETY_SET_ALLOWED_AREA = 53
MAVLINK_MSG_ID_SAFETY_ALLOWED_AREA = 54
MAVLINK_MSG_ID_SET_ROLL_PITCH_YAW_THRUST = 55
MAVLINK_MSG_ID_SET_ROLL_PITCH_YAW_SPEED_THRUST = 56
MAVLINK_MSG_ID_ROLL_PITCH_YAW_THRUST_SETPOINT = 57
MAVLINK_MSG_ID_ROLL_PITCH_YAW_SPEED_THRUST_SETPOINT = 58
MAVLINK_MSG_ID_NAV_CONTROLLER_OUTPUT = 62
MAVLINK_MSG_ID_POSITION_TARGET = 63
MAVLINK_MSG_ID_STATE_CORRECTION = 64
MAVLINK_MSG_ID_SET_ALTITUDE = 65
MAVLINK_MSG_ID_REQUEST_DATA_STREAM = 66
MAVLINK_MSG_ID_HIL_STATE = 67
MAVLINK_MSG_ID_HIL_CONTROLS = 68
MAVLINK_MSG_ID_MANUAL_CONTROL = 69
MAVLINK_MSG_ID_RC_CHANNELS_OVERRIDE = 70
MAVLINK_MSG_ID_GLOBAL_POSITION_INT = 73
MAVLINK_MSG_ID_VFR_HUD = 74
MAVLINK_MSG_ID_COMMAND = 75
MAVLINK_MSG_ID_COMMAND_ACK = 76
MAVLINK_MSG_ID_OPTICAL_FLOW = 100
MAVLINK_MSG_ID_OBJECT_DETECTION_EVENT = 140
MAVLINK_MSG_ID_DEBUG_VECT = 251
MAVLINK_MSG_ID_NAMED_VALUE_FLOAT = 252
MAVLINK_MSG_ID_NAMED_VALUE_INT = 253
MAVLINK_MSG_ID_STATUSTEXT = 254
MAVLINK_MSG_ID_DEBUG = 255
class MAVLink_sensor_offsets_message(MAVLink_message):
'''
Offsets and calibrations values for hardware sensors.
This makes it easier to debug the calibration process.
'''
def __init__(self, mag_ofs_x, mag_ofs_y, mag_ofs_z, mag_declination, raw_press, raw_temp, gyro_cal_x, gyro_cal_y, gyro_cal_z, accel_cal_x, accel_cal_y, accel_cal_z):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SENSOR_OFFSETS, 'SENSOR_OFFSETS')
self._fieldnames = ['mag_ofs_x', 'mag_ofs_y', 'mag_ofs_z', 'mag_declination', 'raw_press', 'raw_temp', 'gyro_cal_x', 'gyro_cal_y', 'gyro_cal_z', 'accel_cal_x', 'accel_cal_y', 'accel_cal_z']
self.mag_ofs_x = mag_ofs_x
self.mag_ofs_y = mag_ofs_y
self.mag_ofs_z = mag_ofs_z
self.mag_declination = mag_declination
self.raw_press = raw_press
self.raw_temp = raw_temp
self.gyro_cal_x = gyro_cal_x
self.gyro_cal_y = gyro_cal_y
self.gyro_cal_z = gyro_cal_z
self.accel_cal_x = accel_cal_x
self.accel_cal_y = accel_cal_y
self.accel_cal_z = accel_cal_z
def pack(self, mav):
return MAVLink_message.pack(self, mav, 143, struct.pack('>hhhfiiffffff', self.mag_ofs_x, self.mag_ofs_y, self.mag_ofs_z, self.mag_declination, self.raw_press, self.raw_temp, self.gyro_cal_x, self.gyro_cal_y, self.gyro_cal_z, self.accel_cal_x, self.accel_cal_y, self.accel_cal_z))
class MAVLink_set_mag_offsets_message(MAVLink_message):
'''
set the magnetometer offsets
'''
def __init__(self, target_system, target_component, mag_ofs_x, mag_ofs_y, mag_ofs_z):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SET_MAG_OFFSETS, 'SET_MAG_OFFSETS')
self._fieldnames = ['target_system', 'target_component', 'mag_ofs_x', 'mag_ofs_y', 'mag_ofs_z']
self.target_system = target_system
self.target_component = target_component
self.mag_ofs_x = mag_ofs_x
self.mag_ofs_y = mag_ofs_y
self.mag_ofs_z = mag_ofs_z
def pack(self, mav):
return MAVLink_message.pack(self, mav, 29, struct.pack('>BBhhh', self.target_system, self.target_component, self.mag_ofs_x, self.mag_ofs_y, self.mag_ofs_z))
class MAVLink_meminfo_message(MAVLink_message):
'''
state of APM memory
'''
def __init__(self, brkval, freemem):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_MEMINFO, 'MEMINFO')
self._fieldnames = ['brkval', 'freemem']
self.brkval = brkval
self.freemem = freemem
def pack(self, mav):
return MAVLink_message.pack(self, mav, 208, struct.pack('>HH', self.brkval, self.freemem))
class MAVLink_ap_adc_message(MAVLink_message):
'''
raw ADC output
'''
def __init__(self, adc1, adc2, adc3, adc4, adc5, adc6):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_AP_ADC, 'AP_ADC')
self._fieldnames = ['adc1', 'adc2', 'adc3', 'adc4', 'adc5', 'adc6']
self.adc1 = adc1
self.adc2 = adc2
self.adc3 = adc3
self.adc4 = adc4
self.adc5 = adc5
self.adc6 = adc6
def pack(self, mav):
return MAVLink_message.pack(self, mav, 188, struct.pack('>HHHHHH', self.adc1, self.adc2, self.adc3, self.adc4, self.adc5, self.adc6))
class MAVLink_digicam_configure_message(MAVLink_message):
'''
Configure on-board Camera Control System.
'''
def __init__(self, target_system, target_component, mode, shutter_speed, aperture, iso, exposure_type, command_id, engine_cut_off, extra_param, extra_value):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_DIGICAM_CONFIGURE, 'DIGICAM_CONFIGURE')
self._fieldnames = ['target_system', 'target_component', 'mode', 'shutter_speed', 'aperture', 'iso', 'exposure_type', 'command_id', 'engine_cut_off', 'extra_param', 'extra_value']
self.target_system = target_system
self.target_component = target_component
self.mode = mode
self.shutter_speed = shutter_speed
self.aperture = aperture
self.iso = iso
self.exposure_type = exposure_type
self.command_id = command_id
self.engine_cut_off = engine_cut_off
self.extra_param = extra_param
self.extra_value = extra_value
def pack(self, mav):
return MAVLink_message.pack(self, mav, 118, struct.pack('>BBBHBBBBBBf', self.target_system, self.target_component, self.mode, self.shutter_speed, self.aperture, self.iso, self.exposure_type, self.command_id, self.engine_cut_off, self.extra_param, self.extra_value))
class MAVLink_digicam_control_message(MAVLink_message):
'''
Control on-board Camera Control System to take shots.
'''
def __init__(self, target_system, target_component, session, zoom_pos, zoom_step, focus_lock, shot, command_id, extra_param, extra_value):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_DIGICAM_CONTROL, 'DIGICAM_CONTROL')
self._fieldnames = ['target_system', 'target_component', 'session', 'zoom_pos', 'zoom_step', 'focus_lock', 'shot', 'command_id', 'extra_param', 'extra_value']
self.target_system = target_system
self.target_component = target_component
self.session = session
self.zoom_pos = zoom_pos
self.zoom_step = zoom_step
self.focus_lock = focus_lock
self.shot = shot
self.command_id = command_id
self.extra_param = extra_param
self.extra_value = extra_value
def pack(self, mav):
return MAVLink_message.pack(self, mav, 242, struct.pack('>BBBBbBBBBf', self.target_system, self.target_component, self.session, self.zoom_pos, self.zoom_step, self.focus_lock, self.shot, self.command_id, self.extra_param, self.extra_value))
class MAVLink_mount_configure_message(MAVLink_message):
'''
Message to configure a camera mount, directional antenna, etc.
'''
def __init__(self, target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_MOUNT_CONFIGURE, 'MOUNT_CONFIGURE')
self._fieldnames = ['target_system', 'target_component', 'mount_mode', 'stab_roll', 'stab_pitch', 'stab_yaw']
self.target_system = target_system
self.target_component = target_component
self.mount_mode = mount_mode
self.stab_roll = stab_roll
self.stab_pitch = stab_pitch
self.stab_yaw = stab_yaw
def pack(self, mav):
return MAVLink_message.pack(self, mav, 19, struct.pack('>BBBBBB', self.target_system, self.target_component, self.mount_mode, self.stab_roll, self.stab_pitch, self.stab_yaw))
class MAVLink_mount_control_message(MAVLink_message):
'''
Message to control a camera mount, directional antenna, etc.
'''
def __init__(self, target_system, target_component, input_a, input_b, input_c, save_position):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_MOUNT_CONTROL, 'MOUNT_CONTROL')
self._fieldnames = ['target_system', 'target_component', 'input_a', 'input_b', 'input_c', 'save_position']
self.target_system = target_system
self.target_component = target_component
self.input_a = input_a
self.input_b = input_b
self.input_c = input_c
self.save_position = save_position
def pack(self, mav):
return MAVLink_message.pack(self, mav, 97, struct.pack('>BBiiiB', self.target_system, self.target_component, self.input_a, self.input_b, self.input_c, self.save_position))
class MAVLink_mount_status_message(MAVLink_message):
'''
Message with some status from APM to GCS about camera or
antenna mount
'''
def __init__(self, target_system, target_component, pointing_a, pointing_b, pointing_c):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_MOUNT_STATUS, 'MOUNT_STATUS')
self._fieldnames = ['target_system', 'target_component', 'pointing_a', 'pointing_b', 'pointing_c']
self.target_system = target_system
self.target_component = target_component
self.pointing_a = pointing_a
self.pointing_b = pointing_b
self.pointing_c = pointing_c
def pack(self, mav):
return MAVLink_message.pack(self, mav, 233, struct.pack('>BBiii', self.target_system, self.target_component, self.pointing_a, self.pointing_b, self.pointing_c))
class MAVLink_heartbeat_message(MAVLink_message):
'''
The heartbeat message shows that a system is present and
responding. The type of the MAV and Autopilot hardware allow
the receiving system to treat further messages from this
system appropriate (e.g. by laying out the user interface
based on the autopilot).
'''
def __init__(self, type, autopilot, mavlink_version):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_HEARTBEAT, 'HEARTBEAT')
self._fieldnames = ['type', 'autopilot', 'mavlink_version']
self.type = type
self.autopilot = autopilot
self.mavlink_version = mavlink_version
def pack(self, mav):
return MAVLink_message.pack(self, mav, 72, struct.pack('>BBB', self.type, self.autopilot, self.mavlink_version))
class MAVLink_boot_message(MAVLink_message):
'''
The boot message indicates that a system is starting. The
onboard software version allows to keep track of onboard
soft/firmware revisions.
'''
def __init__(self, version):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_BOOT, 'BOOT')
self._fieldnames = ['version']
self.version = version
def pack(self, mav):
return MAVLink_message.pack(self, mav, 39, struct.pack('>I', self.version))
class MAVLink_system_time_message(MAVLink_message):
'''
The system time is the time of the master clock, typically the
computer clock of the main onboard computer.
'''
def __init__(self, time_usec):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SYSTEM_TIME, 'SYSTEM_TIME')
self._fieldnames = ['time_usec']
self.time_usec = time_usec
def pack(self, mav):
return MAVLink_message.pack(self, mav, 190, struct.pack('>Q', self.time_usec))
class MAVLink_ping_message(MAVLink_message):
'''
A ping message either requesting or responding to a ping. This
allows to measure the system latencies, including serial port,
radio modem and UDP connections.
'''
def __init__(self, seq, target_system, target_component, time):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_PING, 'PING')
self._fieldnames = ['seq', 'target_system', 'target_component', 'time']
self.seq = seq
self.target_system = target_system
self.target_component = target_component
self.time = time
def pack(self, mav):
return MAVLink_message.pack(self, mav, 92, struct.pack('>IBBQ', self.seq, self.target_system, self.target_component, self.time))
class MAVLink_system_time_utc_message(MAVLink_message):
'''
UTC date and time from GPS module
'''
def __init__(self, utc_date, utc_time):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SYSTEM_TIME_UTC, 'SYSTEM_TIME_UTC')
self._fieldnames = ['utc_date', 'utc_time']
self.utc_date = utc_date
self.utc_time = utc_time
def pack(self, mav):
return MAVLink_message.pack(self, mav, 191, struct.pack('>II', self.utc_date, self.utc_time))
class MAVLink_change_operator_control_message(MAVLink_message):
'''
Request to control this MAV
'''
def __init__(self, target_system, control_request, version, passkey):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_CHANGE_OPERATOR_CONTROL, 'CHANGE_OPERATOR_CONTROL')
self._fieldnames = ['target_system', 'control_request', 'version', 'passkey']
self.target_system = target_system
self.control_request = control_request
self.version = version
self.passkey = passkey
def pack(self, mav):
return MAVLink_message.pack(self, mav, 217, struct.pack('>BBB25s', self.target_system, self.control_request, self.version, self.passkey))
class MAVLink_change_operator_control_ack_message(MAVLink_message):
'''
Accept / deny control of this MAV
'''
def __init__(self, gcs_system_id, control_request, ack):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_CHANGE_OPERATOR_CONTROL_ACK, 'CHANGE_OPERATOR_CONTROL_ACK')
self._fieldnames = ['gcs_system_id', 'control_request', 'ack']
self.gcs_system_id = gcs_system_id
self.control_request = control_request
self.ack = ack
def pack(self, mav):
return MAVLink_message.pack(self, mav, 104, struct.pack('>BBB', self.gcs_system_id, self.control_request, self.ack))
class MAVLink_auth_key_message(MAVLink_message):
'''
Emit an encrypted signature / key identifying this system.
PLEASE NOTE: This protocol has been kept simple, so
transmitting the key requires an encrypted channel for true
safety.
'''
def __init__(self, key):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_AUTH_KEY, 'AUTH_KEY')
self._fieldnames = ['key']
self.key = key
def pack(self, mav):
return MAVLink_message.pack(self, mav, 119, struct.pack('>32s', self.key))
class MAVLink_action_ack_message(MAVLink_message):
'''
This message acknowledges an action. IMPORTANT: The
acknowledgement can be also negative, e.g. the MAV rejects a
reset message because it is in-flight. The action ids are
defined in ENUM MAV_ACTION in mavlink/include/mavlink_types.h
'''
def __init__(self, action, result):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_ACTION_ACK, 'ACTION_ACK')
self._fieldnames = ['action', 'result']
self.action = action
self.result = result
def pack(self, mav):
return MAVLink_message.pack(self, mav, 219, struct.pack('>BB', self.action, self.result))
class MAVLink_action_message(MAVLink_message):
'''
An action message allows to execute a certain onboard action.
These include liftoff, land, storing parameters too EEPROM,
shutddown, etc. The action ids are defined in ENUM MAV_ACTION
in mavlink/include/mavlink_types.h
'''
def __init__(self, target, target_component, action):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_ACTION, 'ACTION')
self._fieldnames = ['target', 'target_component', 'action']
self.target = target
self.target_component = target_component
self.action = action
def pack(self, mav):
return MAVLink_message.pack(self, mav, 60, struct.pack('>BBB', self.target, self.target_component, self.action))
class MAVLink_set_mode_message(MAVLink_message):
'''
Set the system mode, as defined by enum MAV_MODE in
mavlink/include/mavlink_types.h. There is no target component
id as the mode is by definition for the overall aircraft, not
only for one component.
'''
def __init__(self, target, mode):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SET_MODE, 'SET_MODE')
self._fieldnames = ['target', 'mode']
self.target = target
self.mode = mode
def pack(self, mav):
return MAVLink_message.pack(self, mav, 186, struct.pack('>BB', self.target, self.mode))
class MAVLink_set_nav_mode_message(MAVLink_message):
'''
Set the system navigation mode, as defined by enum
MAV_NAV_MODE in mavlink/include/mavlink_types.h. The
navigation mode applies to the whole aircraft and thus all
components.
'''
def __init__(self, target, nav_mode):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SET_NAV_MODE, 'SET_NAV_MODE')
self._fieldnames = ['target', 'nav_mode']
self.target = target
self.nav_mode = nav_mode
def pack(self, mav):
return MAVLink_message.pack(self, mav, 10, struct.pack('>BB', self.target, self.nav_mode))
class MAVLink_param_request_read_message(MAVLink_message):
'''
Request to read the onboard parameter with the param_id string
id. Onboard parameters are stored as key[const char*] ->
value[float]. This allows to send a parameter to any other
component (such as the GCS) without the need of previous
knowledge of possible parameter names. Thus the same GCS can
store different parameters for different autopilots. See also
http://qgroundcontrol.org/parameter_interface for a full
documentation of QGroundControl and IMU code.
'''
def __init__(self, target_system, target_component, param_id, param_index):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_PARAM_REQUEST_READ, 'PARAM_REQUEST_READ')
self._fieldnames = ['target_system', 'target_component', 'param_id', 'param_index']
self.target_system = target_system
self.target_component = target_component
self.param_id = param_id
self.param_index = param_index
def pack(self, mav):
return MAVLink_message.pack(self, mav, 89, struct.pack('>BB15sh', self.target_system, self.target_component, self.param_id, self.param_index))
class MAVLink_param_request_list_message(MAVLink_message):
'''
Request all parameters of this component. After his request,
all parameters are emitted.
'''
def __init__(self, target_system, target_component):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_PARAM_REQUEST_LIST, 'PARAM_REQUEST_LIST')
self._fieldnames = ['target_system', 'target_component']
self.target_system = target_system
self.target_component = target_component
def pack(self, mav):
return MAVLink_message.pack(self, mav, 159, struct.pack('>BB', self.target_system, self.target_component))
class MAVLink_param_value_message(MAVLink_message):
'''
Emit the value of a onboard parameter. The inclusion of
param_count and param_index in the message allows the
recipient to keep track of received parameters and allows him
to re-request missing parameters after a loss or timeout.
'''
def __init__(self, param_id, param_value, param_count, param_index):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_PARAM_VALUE, 'PARAM_VALUE')
self._fieldnames = ['param_id', 'param_value', 'param_count', 'param_index']
self.param_id = param_id
self.param_value = param_value
self.param_count = param_count
self.param_index = param_index
def pack(self, mav):
return MAVLink_message.pack(self, mav, 162, struct.pack('>15sfHH', self.param_id, self.param_value, self.param_count, self.param_index))
class MAVLink_param_set_message(MAVLink_message):
'''
Set a parameter value TEMPORARILY to RAM. It will be reset to
default on system reboot. Send the ACTION
MAV_ACTION_STORAGE_WRITE to PERMANENTLY write the RAM contents
to EEPROM. IMPORTANT: The receiving component should
acknowledge the new parameter value by sending a param_value
message to all communication partners. This will also ensure
that multiple GCS all have an up-to-date list of all
parameters. If the sending GCS did not receive a PARAM_VALUE
message within its timeout time, it should re-send the
PARAM_SET message.
'''
def __init__(self, target_system, target_component, param_id, param_value):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_PARAM_SET, 'PARAM_SET')
self._fieldnames = ['target_system', 'target_component', 'param_id', 'param_value']
self.target_system = target_system
self.target_component = target_component
self.param_id = param_id
self.param_value = param_value
def pack(self, mav):
return MAVLink_message.pack(self, mav, 121, struct.pack('>BB15sf', self.target_system, self.target_component, self.param_id, self.param_value))
class MAVLink_gps_raw_int_message(MAVLink_message):
'''
The global position, as returned by the Global Positioning
System (GPS). This is NOT the global position estimate of the
sytem, but rather a RAW sensor value. See message
GLOBAL_POSITION for the global position estimate. Coordinate
frame is right-handed, Z-axis up (GPS frame)
'''
def __init__(self, usec, fix_type, lat, lon, alt, eph, epv, v, hdg):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_GPS_RAW_INT, 'GPS_RAW_INT')
self._fieldnames = ['usec', 'fix_type', 'lat', 'lon', 'alt', 'eph', 'epv', 'v', 'hdg']
self.usec = usec
self.fix_type = fix_type
self.lat = lat
self.lon = lon
self.alt = alt
self.eph = eph
self.epv = epv
self.v = v
self.hdg = hdg
def pack(self, mav):
return MAVLink_message.pack(self, mav, 149, struct.pack('>QBiiiffff', self.usec, self.fix_type, self.lat, self.lon, self.alt, self.eph, self.epv, self.v, self.hdg))
class MAVLink_scaled_imu_message(MAVLink_message):
'''
The RAW IMU readings for the usual 9DOF sensor setup. This
message should contain the scaled values to the described
units
'''
def __init__(self, usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SCALED_IMU, 'SCALED_IMU')
self._fieldnames = ['usec', 'xacc', 'yacc', 'zacc', 'xgyro', 'ygyro', 'zgyro', 'xmag', 'ymag', 'zmag']
self.usec = usec
self.xacc = xacc
self.yacc = yacc
self.zacc = zacc
self.xgyro = xgyro
self.ygyro = ygyro
self.zgyro = zgyro
self.xmag = xmag
self.ymag = ymag
self.zmag = zmag
def pack(self, mav):
return MAVLink_message.pack(self, mav, 222, struct.pack('>Qhhhhhhhhh', self.usec, self.xacc, self.yacc, self.zacc, self.xgyro, self.ygyro, self.zgyro, self.xmag, self.ymag, self.zmag))
class MAVLink_gps_status_message(MAVLink_message):
'''
The positioning status, as reported by GPS. This message is
intended to display status information about each satellite
visible to the receiver. See message GLOBAL_POSITION for the
global position estimate. This message can contain information
for up to 20 satellites.
'''
def __init__(self, satellites_visible, satellite_prn, satellite_used, satellite_elevation, satellite_azimuth, satellite_snr):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_GPS_STATUS, 'GPS_STATUS')
self._fieldnames = ['satellites_visible', 'satellite_prn', 'satellite_used', 'satellite_elevation', 'satellite_azimuth', 'satellite_snr']
self.satellites_visible = satellites_visible
self.satellite_prn = satellite_prn
self.satellite_used = satellite_used
self.satellite_elevation = satellite_elevation
self.satellite_azimuth = satellite_azimuth
self.satellite_snr = satellite_snr
def pack(self, mav):
return MAVLink_message.pack(self, mav, 110, struct.pack('>B20s20s20s20s20s', self.satellites_visible, self.satellite_prn, self.satellite_used, self.satellite_elevation, self.satellite_azimuth, self.satellite_snr))
class MAVLink_raw_imu_message(MAVLink_message):
'''
The RAW IMU readings for the usual 9DOF sensor setup. This
message should always contain the true raw values without any
scaling to allow data capture and system debugging.
'''
def __init__(self, usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_RAW_IMU, 'RAW_IMU')
self._fieldnames = ['usec', 'xacc', 'yacc', 'zacc', 'xgyro', 'ygyro', 'zgyro', 'xmag', 'ymag', 'zmag']
self.usec = usec
self.xacc = xacc
self.yacc = yacc
self.zacc = zacc
self.xgyro = xgyro
self.ygyro = ygyro
self.zgyro = zgyro
self.xmag = xmag
self.ymag = ymag
self.zmag = zmag
def pack(self, mav):
return MAVLink_message.pack(self, mav, 179, struct.pack('>Qhhhhhhhhh', self.usec, self.xacc, self.yacc, self.zacc, self.xgyro, self.ygyro, self.zgyro, self.xmag, self.ymag, self.zmag))
class MAVLink_raw_pressure_message(MAVLink_message):
'''
The RAW pressure readings for the typical setup of one
absolute pressure and one differential pressure sensor. The
sensor values should be the raw, UNSCALED ADC values.
'''
def __init__(self, usec, press_abs, press_diff1, press_diff2, temperature):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_RAW_PRESSURE, 'RAW_PRESSURE')
self._fieldnames = ['usec', 'press_abs', 'press_diff1', 'press_diff2', 'temperature']
self.usec = usec
self.press_abs = press_abs
self.press_diff1 = press_diff1
self.press_diff2 = press_diff2
self.temperature = temperature
def pack(self, mav):
return MAVLink_message.pack(self, mav, 136, struct.pack('>Qhhhh', self.usec, self.press_abs, self.press_diff1, self.press_diff2, self.temperature))
class MAVLink_scaled_pressure_message(MAVLink_message):
'''
The pressure readings for the typical setup of one absolute
and differential pressure sensor. The units are as specified
in each field.
'''
def __init__(self, usec, press_abs, press_diff, temperature):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SCALED_PRESSURE, 'SCALED_PRESSURE')
self._fieldnames = ['usec', 'press_abs', 'press_diff', 'temperature']
self.usec = usec
self.press_abs = press_abs
self.press_diff = press_diff
self.temperature = temperature
def pack(self, mav):
return MAVLink_message.pack(self, mav, 229, struct.pack('>Qffh', self.usec, self.press_abs, self.press_diff, self.temperature))
class MAVLink_attitude_message(MAVLink_message):
'''
The attitude in the aeronautical frame (right-handed, Z-down,
X-front, Y-right).
'''
def __init__(self, usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_ATTITUDE, 'ATTITUDE')
self._fieldnames = ['usec', 'roll', 'pitch', 'yaw', 'rollspeed', 'pitchspeed', 'yawspeed']
self.usec = usec
self.roll = roll
self.pitch = pitch
self.yaw = yaw
self.rollspeed = rollspeed
self.pitchspeed = pitchspeed
self.yawspeed = yawspeed
def pack(self, mav):
return MAVLink_message.pack(self, mav, 66, struct.pack('>Qffffff', self.usec, self.roll, self.pitch, self.yaw, self.rollspeed, self.pitchspeed, self.yawspeed))
class MAVLink_local_position_message(MAVLink_message):
'''
The filtered local position (e.g. fused computer vision and
accelerometers). Coordinate frame is right-handed, Z-axis down
(aeronautical frame)
'''
def __init__(self, usec, x, y, z, vx, vy, vz):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_LOCAL_POSITION, 'LOCAL_POSITION')
self._fieldnames = ['usec', 'x', 'y', 'z', 'vx', 'vy', 'vz']
self.usec = usec
self.x = x
self.y = y
self.z = z
self.vx = vx
self.vy = vy
self.vz = vz
def pack(self, mav):
return MAVLink_message.pack(self, mav, 126, struct.pack('>Qffffff', self.usec, self.x, self.y, self.z, self.vx, self.vy, self.vz))
class MAVLink_global_position_message(MAVLink_message):
'''
The filtered global position (e.g. fused GPS and
accelerometers). Coordinate frame is right-handed, Z-axis up
(GPS frame)
'''
def __init__(self, usec, lat, lon, alt, vx, vy, vz):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_GLOBAL_POSITION, 'GLOBAL_POSITION')
self._fieldnames = ['usec', 'lat', 'lon', 'alt', 'vx', 'vy', 'vz']
self.usec = usec
self.lat = lat
self.lon = lon
self.alt = alt
self.vx = vx
self.vy = vy
self.vz = vz
def pack(self, mav):
return MAVLink_message.pack(self, mav, 147, struct.pack('>Qffffff', self.usec, self.lat, self.lon, self.alt, self.vx, self.vy, self.vz))
class MAVLink_gps_raw_message(MAVLink_message):
'''
The global position, as returned by the Global Positioning
System (GPS). This is NOT the global position estimate of the
sytem, but rather a RAW sensor value. See message
GLOBAL_POSITION for the global position estimate. Coordinate
frame is right-handed, Z-axis up (GPS frame)
'''
def __init__(self, usec, fix_type, lat, lon, alt, eph, epv, v, hdg):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_GPS_RAW, 'GPS_RAW')
self._fieldnames = ['usec', 'fix_type', 'lat', 'lon', 'alt', 'eph', 'epv', 'v', 'hdg']
self.usec = usec
self.fix_type = fix_type
self.lat = lat
self.lon = lon
self.alt = alt
self.eph = eph
self.epv = epv
self.v = v
self.hdg = hdg
def pack(self, mav):
return MAVLink_message.pack(self, mav, 185, struct.pack('>QBfffffff', self.usec, self.fix_type, self.lat, self.lon, self.alt, self.eph, self.epv, self.v, self.hdg))
class MAVLink_sys_status_message(MAVLink_message):
'''
The general system state. If the system is following the
MAVLink standard, the system state is mainly defined by three
orthogonal states/modes: The system mode, which is either
LOCKED (motors shut down and locked), MANUAL (system under RC
control), GUIDED (system with autonomous position control,
position setpoint controlled manually) or AUTO (system guided
by path/waypoint planner). The NAV_MODE defined the current
flight state: LIFTOFF (often an open-loop maneuver), LANDING,
WAYPOINTS or VECTOR. This represents the internal navigation
state machine. The system status shows wether the system is
currently active or not and if an emergency occured. During
the CRITICAL and EMERGENCY states the MAV is still considered
to be active, but should start emergency procedures
autonomously. After a failure occured it should first move
from active to critical to allow manual intervention and then
move to emergency after a certain timeout.
'''
def __init__(self, mode, nav_mode, status, load, vbat, battery_remaining, packet_drop):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SYS_STATUS, 'SYS_STATUS')
self._fieldnames = ['mode', 'nav_mode', 'status', 'load', 'vbat', 'battery_remaining', 'packet_drop']
self.mode = mode
self.nav_mode = nav_mode
self.status = status
self.load = load
self.vbat = vbat
self.battery_remaining = battery_remaining
self.packet_drop = packet_drop
def pack(self, mav):
return MAVLink_message.pack(self, mav, 112, struct.pack('>BBBHHHH', self.mode, self.nav_mode, self.status, self.load, self.vbat, self.battery_remaining, self.packet_drop))
class MAVLink_rc_channels_raw_message(MAVLink_message):
'''
The RAW values of the RC channels received. The standard PPM
modulation is as follows: 1000 microseconds: 0%, 2000
microseconds: 100%. Individual receivers/transmitters might
violate this specification.
'''
def __init__(self, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, rssi):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_RC_CHANNELS_RAW, 'RC_CHANNELS_RAW')
self._fieldnames = ['chan1_raw', 'chan2_raw', 'chan3_raw', 'chan4_raw', 'chan5_raw', 'chan6_raw', 'chan7_raw', 'chan8_raw', 'rssi']
self.chan1_raw = chan1_raw
self.chan2_raw = chan2_raw
self.chan3_raw = chan3_raw
self.chan4_raw = chan4_raw
self.chan5_raw = chan5_raw
self.chan6_raw = chan6_raw
self.chan7_raw = chan7_raw
self.chan8_raw = chan8_raw
self.rssi = rssi
def pack(self, mav):
return MAVLink_message.pack(self, mav, 252, struct.pack('>HHHHHHHHB', self.chan1_raw, self.chan2_raw, self.chan3_raw, self.chan4_raw, self.chan5_raw, self.chan6_raw, self.chan7_raw, self.chan8_raw, self.rssi))
class MAVLink_rc_channels_scaled_message(MAVLink_message):
'''
The scaled values of the RC channels received. (-100%) -10000,
(0%) 0, (100%) 10000
'''
def __init__(self, chan1_scaled, chan2_scaled, chan3_scaled, chan4_scaled, chan5_scaled, chan6_scaled, chan7_scaled, chan8_scaled, rssi):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_RC_CHANNELS_SCALED, 'RC_CHANNELS_SCALED')
self._fieldnames = ['chan1_scaled', 'chan2_scaled', 'chan3_scaled', 'chan4_scaled', 'chan5_scaled', 'chan6_scaled', 'chan7_scaled', 'chan8_scaled', 'rssi']
self.chan1_scaled = chan1_scaled
self.chan2_scaled = chan2_scaled
self.chan3_scaled = chan3_scaled
self.chan4_scaled = chan4_scaled
self.chan5_scaled = chan5_scaled
self.chan6_scaled = chan6_scaled
self.chan7_scaled = chan7_scaled
self.chan8_scaled = chan8_scaled
self.rssi = rssi
def pack(self, mav):
return MAVLink_message.pack(self, mav, 162, struct.pack('>hhhhhhhhB', self.chan1_scaled, self.chan2_scaled, self.chan3_scaled, self.chan4_scaled, self.chan5_scaled, self.chan6_scaled, self.chan7_scaled, self.chan8_scaled, self.rssi))
class MAVLink_servo_output_raw_message(MAVLink_message):
'''
The RAW values of the servo outputs (for RC input from the
remote, use the RC_CHANNELS messages). The standard PPM
modulation is as follows: 1000 microseconds: 0%, 2000
microseconds: 100%.
'''
def __init__(self, servo1_raw, servo2_raw, servo3_raw, servo4_raw, servo5_raw, servo6_raw, servo7_raw, servo8_raw):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SERVO_OUTPUT_RAW, 'SERVO_OUTPUT_RAW')
self._fieldnames = ['servo1_raw', 'servo2_raw', 'servo3_raw', 'servo4_raw', 'servo5_raw', 'servo6_raw', 'servo7_raw', 'servo8_raw']
self.servo1_raw = servo1_raw
self.servo2_raw = servo2_raw
self.servo3_raw = servo3_raw
self.servo4_raw = servo4_raw
self.servo5_raw = servo5_raw
self.servo6_raw = servo6_raw
self.servo7_raw = servo7_raw
self.servo8_raw = servo8_raw
def pack(self, mav):
return MAVLink_message.pack(self, mav, 215, struct.pack('>HHHHHHHH', self.servo1_raw, self.servo2_raw, self.servo3_raw, self.servo4_raw, self.servo5_raw, self.servo6_raw, self.servo7_raw, self.servo8_raw))
class MAVLink_waypoint_message(MAVLink_message):
'''
Message encoding a waypoint. This message is emitted to
announce the presence of a waypoint and to set a waypoint
on the system. The waypoint can be either in x, y, z meters
(type: LOCAL) or x:lat, y:lon, z:altitude. Local frame is
Z-down, right handed, global frame is Z-up, right handed
'''
def __init__(self, target_system, target_component, seq, frame, command, current, autocontinue, param1, param2, param3, param4, x, y, z):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_WAYPOINT, 'WAYPOINT')
self._fieldnames = ['target_system', 'target_component', 'seq', 'frame', 'command', 'current', 'autocontinue', 'param1', 'param2', 'param3', 'param4', 'x', 'y', 'z']
self.target_system = target_system
self.target_component = target_component
self.seq = seq
self.frame = frame
self.command = command
self.current = current
self.autocontinue = autocontinue
self.param1 = param1
self.param2 = param2
self.param3 = param3
self.param4 = param4
self.x = x
self.y = y
self.z = z
def pack(self, mav):
return MAVLink_message.pack(self, mav, 128, struct.pack('>BBHBBBBfffffff', self.target_system, self.target_component, self.seq, self.frame, self.command, self.current, self.autocontinue, self.param1, self.param2, self.param3, self.param4, self.x, self.y, self.z))
class MAVLink_waypoint_request_message(MAVLink_message):
'''
Request the information of the waypoint with the sequence
number seq. The response of the system to this message should
be a WAYPOINT message.
'''
def __init__(self, target_system, target_component, seq):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_WAYPOINT_REQUEST, 'WAYPOINT_REQUEST')
self._fieldnames = ['target_system', 'target_component', 'seq']
self.target_system = target_system
self.target_component = target_component
self.seq = seq
def pack(self, mav):
return MAVLink_message.pack(self, mav, 9, struct.pack('>BBH', self.target_system, self.target_component, self.seq))
class MAVLink_waypoint_set_current_message(MAVLink_message):
'''
Set the waypoint with sequence number seq as current waypoint.
This means that the MAV will continue to this waypoint on the
shortest path (not following the waypoints in-between).
'''
def __init__(self, target_system, target_component, seq):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_WAYPOINT_SET_CURRENT, 'WAYPOINT_SET_CURRENT')
self._fieldnames = ['target_system', 'target_component', 'seq']
self.target_system = target_system
self.target_component = target_component
self.seq = seq
def pack(self, mav):
return MAVLink_message.pack(self, mav, 106, struct.pack('>BBH', self.target_system, self.target_component, self.seq))
class MAVLink_waypoint_current_message(MAVLink_message):
'''
Message that announces the sequence number of the current
active waypoint. The MAV will fly towards this waypoint.
'''
def __init__(self, seq):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_WAYPOINT_CURRENT, 'WAYPOINT_CURRENT')
self._fieldnames = ['seq']
self.seq = seq
def pack(self, mav):
return MAVLink_message.pack(self, mav, 101, struct.pack('>H', self.seq))
class MAVLink_waypoint_request_list_message(MAVLink_message):
'''
Request the overall list of waypoints from the
system/component.
'''
def __init__(self, target_system, target_component):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_WAYPOINT_REQUEST_LIST, 'WAYPOINT_REQUEST_LIST')
self._fieldnames = ['target_system', 'target_component']
self.target_system = target_system
self.target_component = target_component
def pack(self, mav):
return MAVLink_message.pack(self, mav, 213, struct.pack('>BB', self.target_system, self.target_component))
class MAVLink_waypoint_count_message(MAVLink_message):
'''
This message is emitted as response to WAYPOINT_REQUEST_LIST
by the MAV. The GCS can then request the individual waypoints
based on the knowledge of the total number of waypoints.
'''
def __init__(self, target_system, target_component, count):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_WAYPOINT_COUNT, 'WAYPOINT_COUNT')
self._fieldnames = ['target_system', 'target_component', 'count']
self.target_system = target_system
self.target_component = target_component
self.count = count
def pack(self, mav):
return MAVLink_message.pack(self, mav, 4, struct.pack('>BBH', self.target_system, self.target_component, self.count))
class MAVLink_waypoint_clear_all_message(MAVLink_message):
'''
Delete all waypoints at once.
'''
def __init__(self, target_system, target_component):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_WAYPOINT_CLEAR_ALL, 'WAYPOINT_CLEAR_ALL')
self._fieldnames = ['target_system', 'target_component']
self.target_system = target_system
self.target_component = target_component
def pack(self, mav):
return MAVLink_message.pack(self, mav, 229, struct.pack('>BB', self.target_system, self.target_component))
class MAVLink_waypoint_reached_message(MAVLink_message):
'''
A certain waypoint has been reached. The system will either
hold this position (or circle on the orbit) or (if the
autocontinue on the WP was set) continue to the next waypoint.
'''
def __init__(self, seq):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_WAYPOINT_REACHED, 'WAYPOINT_REACHED')
self._fieldnames = ['seq']
self.seq = seq
def pack(self, mav):
return MAVLink_message.pack(self, mav, 21, struct.pack('>H', self.seq))
class MAVLink_waypoint_ack_message(MAVLink_message):
'''
Ack message during waypoint handling. The type field states if
this message is a positive ack (type=0) or if an error
happened (type=non-zero).
'''
def __init__(self, target_system, target_component, type):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_WAYPOINT_ACK, 'WAYPOINT_ACK')
self._fieldnames = ['target_system', 'target_component', 'type']
self.target_system = target_system
self.target_component = target_component
self.type = type
def pack(self, mav):
return MAVLink_message.pack(self, mav, 214, struct.pack('>BBB', self.target_system, self.target_component, self.type))
class MAVLink_gps_set_global_origin_message(MAVLink_message):
'''
As local waypoints exist, the global waypoint reference allows
to transform between the local coordinate frame and the global
(GPS) coordinate frame. This can be necessary when e.g. in-
and outdoor settings are connected and the MAV should move
from in- to outdoor.
'''
def __init__(self, target_system, target_component, latitude, longitude, altitude):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_GPS_SET_GLOBAL_ORIGIN, 'GPS_SET_GLOBAL_ORIGIN')
self._fieldnames = ['target_system', 'target_component', 'latitude', 'longitude', 'altitude']
self.target_system = target_system
self.target_component = target_component
self.latitude = latitude
self.longitude = longitude
self.altitude = altitude
def pack(self, mav):
return MAVLink_message.pack(self, mav, 215, struct.pack('>BBiii', self.target_system, self.target_component, self.latitude, self.longitude, self.altitude))
class MAVLink_gps_local_origin_set_message(MAVLink_message):
'''
Once the MAV sets a new GPS-Local correspondence, this message
announces the origin (0,0,0) position
'''
def __init__(self, latitude, longitude, altitude):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_GPS_LOCAL_ORIGIN_SET, 'GPS_LOCAL_ORIGIN_SET')
self._fieldnames = ['latitude', 'longitude', 'altitude']
self.latitude = latitude
self.longitude = longitude
self.altitude = altitude
def pack(self, mav):
return MAVLink_message.pack(self, mav, 14, struct.pack('>iii', self.latitude, self.longitude, self.altitude))
class MAVLink_local_position_setpoint_set_message(MAVLink_message):
'''
Set the setpoint for a local position controller. This is the
position in local coordinates the MAV should fly to. This
message is sent by the path/waypoint planner to the onboard
position controller. As some MAVs have a degree of freedom in
yaw (e.g. all helicopters/quadrotors), the desired yaw angle
is part of the message.
'''
def __init__(self, target_system, target_component, x, y, z, yaw):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_LOCAL_POSITION_SETPOINT_SET, 'LOCAL_POSITION_SETPOINT_SET')
self._fieldnames = ['target_system', 'target_component', 'x', 'y', 'z', 'yaw']
self.target_system = target_system
self.target_component = target_component
self.x = x
self.y = y
self.z = z
self.yaw = yaw
def pack(self, mav):
return MAVLink_message.pack(self, mav, 206, struct.pack('>BBffff', self.target_system, self.target_component, self.x, self.y, self.z, self.yaw))
class MAVLink_local_position_setpoint_message(MAVLink_message):
'''
Transmit the current local setpoint of the controller to other
MAVs (collision avoidance) and to the GCS.
'''
def __init__(self, x, y, z, yaw):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_LOCAL_POSITION_SETPOINT, 'LOCAL_POSITION_SETPOINT')
self._fieldnames = ['x', 'y', 'z', 'yaw']
self.x = x
self.y = y
self.z = z
self.yaw = yaw
def pack(self, mav):
return MAVLink_message.pack(self, mav, 50, struct.pack('>ffff', self.x, self.y, self.z, self.yaw))
class MAVLink_control_status_message(MAVLink_message):
'''
'''
def __init__(self, position_fix, vision_fix, gps_fix, ahrs_health, control_att, control_pos_xy, control_pos_z, control_pos_yaw):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_CONTROL_STATUS, 'CONTROL_STATUS')
self._fieldnames = ['position_fix', 'vision_fix', 'gps_fix', 'ahrs_health', 'control_att', 'control_pos_xy', 'control_pos_z', 'control_pos_yaw']
self.position_fix = position_fix
self.vision_fix = vision_fix
self.gps_fix = gps_fix
self.ahrs_health = ahrs_health
self.control_att = control_att
self.control_pos_xy = control_pos_xy
self.control_pos_z = control_pos_z
self.control_pos_yaw = control_pos_yaw
def pack(self, mav):
return MAVLink_message.pack(self, mav, 157, struct.pack('>BBBBBBBB', self.position_fix, self.vision_fix, self.gps_fix, self.ahrs_health, self.control_att, self.control_pos_xy, self.control_pos_z, self.control_pos_yaw))
class MAVLink_safety_set_allowed_area_message(MAVLink_message):
'''
Set a safety zone (volume), which is defined by two corners of
a cube. This message can be used to tell the MAV which
setpoints/waypoints to accept and which to reject. Safety
areas are often enforced by national or competition
regulations.
'''
def __init__(self, target_system, target_component, frame, p1x, p1y, p1z, p2x, p2y, p2z):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SAFETY_SET_ALLOWED_AREA, 'SAFETY_SET_ALLOWED_AREA')
self._fieldnames = ['target_system', 'target_component', 'frame', 'p1x', 'p1y', 'p1z', 'p2x', 'p2y', 'p2z']
self.target_system = target_system
self.target_component = target_component
self.frame = frame
self.p1x = p1x
self.p1y = p1y
self.p1z = p1z
self.p2x = p2x
self.p2y = p2y
self.p2z = p2z
def pack(self, mav):
return MAVLink_message.pack(self, mav, 126, struct.pack('>BBBffffff', self.target_system, self.target_component, self.frame, self.p1x, self.p1y, self.p1z, self.p2x, self.p2y, self.p2z))
class MAVLink_safety_allowed_area_message(MAVLink_message):
'''
Read out the safety zone the MAV currently assumes.
'''
def __init__(self, frame, p1x, p1y, p1z, p2x, p2y, p2z):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SAFETY_ALLOWED_AREA, 'SAFETY_ALLOWED_AREA')
self._fieldnames = ['frame', 'p1x', 'p1y', 'p1z', 'p2x', 'p2y', 'p2z']
self.frame = frame
self.p1x = p1x
self.p1y = p1y
self.p1z = p1z
self.p2x = p2x
self.p2y = p2y
self.p2z = p2z
def pack(self, mav):
return MAVLink_message.pack(self, mav, 108, struct.pack('>Bffffff', self.frame, self.p1x, self.p1y, self.p1z, self.p2x, self.p2y, self.p2z))
class MAVLink_set_roll_pitch_yaw_thrust_message(MAVLink_message):
'''
Set roll, pitch and yaw.
'''
def __init__(self, target_system, target_component, roll, pitch, yaw, thrust):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SET_ROLL_PITCH_YAW_THRUST, 'SET_ROLL_PITCH_YAW_THRUST')
self._fieldnames = ['target_system', 'target_component', 'roll', 'pitch', 'yaw', 'thrust']
self.target_system = target_system
self.target_component = target_component
self.roll = roll
self.pitch = pitch
self.yaw = yaw
self.thrust = thrust
def pack(self, mav):
return MAVLink_message.pack(self, mav, 213, struct.pack('>BBffff', self.target_system, self.target_component, self.roll, self.pitch, self.yaw, self.thrust))
class MAVLink_set_roll_pitch_yaw_speed_thrust_message(MAVLink_message):
'''
Set roll, pitch and yaw.
'''
def __init__(self, target_system, target_component, roll_speed, pitch_speed, yaw_speed, thrust):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SET_ROLL_PITCH_YAW_SPEED_THRUST, 'SET_ROLL_PITCH_YAW_SPEED_THRUST')
self._fieldnames = ['target_system', 'target_component', 'roll_speed', 'pitch_speed', 'yaw_speed', 'thrust']
self.target_system = target_system
self.target_component = target_component
self.roll_speed = roll_speed
self.pitch_speed = pitch_speed
self.yaw_speed = yaw_speed
self.thrust = thrust
def pack(self, mav):
return MAVLink_message.pack(self, mav, 95, struct.pack('>BBffff', self.target_system, self.target_component, self.roll_speed, self.pitch_speed, self.yaw_speed, self.thrust))
class MAVLink_roll_pitch_yaw_thrust_setpoint_message(MAVLink_message):
'''
Setpoint in roll, pitch, yaw currently active on the system.
'''
def __init__(self, time_us, roll, pitch, yaw, thrust):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_ROLL_PITCH_YAW_THRUST_SETPOINT, 'ROLL_PITCH_YAW_THRUST_SETPOINT')
self._fieldnames = ['time_us', 'roll', 'pitch', 'yaw', 'thrust']
self.time_us = time_us
self.roll = roll
self.pitch = pitch
self.yaw = yaw
self.thrust = thrust
def pack(self, mav):
return MAVLink_message.pack(self, mav, 5, struct.pack('>Qffff', self.time_us, self.roll, self.pitch, self.yaw, self.thrust))
class MAVLink_roll_pitch_yaw_speed_thrust_setpoint_message(MAVLink_message):
'''
Setpoint in rollspeed, pitchspeed, yawspeed currently active
on the system.
'''
def __init__(self, time_us, roll_speed, pitch_speed, yaw_speed, thrust):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_ROLL_PITCH_YAW_SPEED_THRUST_SETPOINT, 'ROLL_PITCH_YAW_SPEED_THRUST_SETPOINT')
self._fieldnames = ['time_us', 'roll_speed', 'pitch_speed', 'yaw_speed', 'thrust']
self.time_us = time_us
self.roll_speed = roll_speed
self.pitch_speed = pitch_speed
self.yaw_speed = yaw_speed
self.thrust = thrust
def pack(self, mav):
return MAVLink_message.pack(self, mav, 127, struct.pack('>Qffff', self.time_us, self.roll_speed, self.pitch_speed, self.yaw_speed, self.thrust))
class MAVLink_nav_controller_output_message(MAVLink_message):
'''
Outputs of the APM navigation controller. The primary use of
this message is to check the response and signs of the
controller before actual flight and to assist with tuning
controller parameters
'''
def __init__(self, nav_roll, nav_pitch, nav_bearing, target_bearing, wp_dist, alt_error, aspd_error, xtrack_error):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_NAV_CONTROLLER_OUTPUT, 'NAV_CONTROLLER_OUTPUT')
self._fieldnames = ['nav_roll', 'nav_pitch', 'nav_bearing', 'target_bearing', 'wp_dist', 'alt_error', 'aspd_error', 'xtrack_error']
self.nav_roll = nav_roll
self.nav_pitch = nav_pitch
self.nav_bearing = nav_bearing
self.target_bearing = target_bearing
self.wp_dist = wp_dist
self.alt_error = alt_error
self.aspd_error = aspd_error
self.xtrack_error = xtrack_error
def pack(self, mav):
return MAVLink_message.pack(self, mav, 57, struct.pack('>ffhhHfff', self.nav_roll, self.nav_pitch, self.nav_bearing, self.target_bearing, self.wp_dist, self.alt_error, self.aspd_error, self.xtrack_error))
class MAVLink_position_target_message(MAVLink_message):
'''
The goal position of the system. This position is the input to
any navigation or path planning algorithm and does NOT
represent the current controller setpoint.
'''
def __init__(self, x, y, z, yaw):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_POSITION_TARGET, 'POSITION_TARGET')
self._fieldnames = ['x', 'y', 'z', 'yaw']
self.x = x
self.y = y
self.z = z
self.yaw = yaw
def pack(self, mav):
return MAVLink_message.pack(self, mav, 126, struct.pack('>ffff', self.x, self.y, self.z, self.yaw))
class MAVLink_state_correction_message(MAVLink_message):
'''
Corrects the systems state by adding an error correction term
to the position and velocity, and by rotating the attitude by
a correction angle.
'''
def __init__(self, xErr, yErr, zErr, rollErr, pitchErr, yawErr, vxErr, vyErr, vzErr):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_STATE_CORRECTION, 'STATE_CORRECTION')
self._fieldnames = ['xErr', 'yErr', 'zErr', 'rollErr', 'pitchErr', 'yawErr', 'vxErr', 'vyErr', 'vzErr']
self.xErr = xErr
self.yErr = yErr
self.zErr = zErr
self.rollErr = rollErr
self.pitchErr = pitchErr
self.yawErr = yawErr
self.vxErr = vxErr
self.vyErr = vyErr
self.vzErr = vzErr
def pack(self, mav):
return MAVLink_message.pack(self, mav, 130, struct.pack('>fffffffff', self.xErr, self.yErr, self.zErr, self.rollErr, self.pitchErr, self.yawErr, self.vxErr, self.vyErr, self.vzErr))
class MAVLink_set_altitude_message(MAVLink_message):
'''
'''
def __init__(self, target, mode):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_SET_ALTITUDE, 'SET_ALTITUDE')
self._fieldnames = ['target', 'mode']
self.target = target
self.mode = mode
def pack(self, mav):
return MAVLink_message.pack(self, mav, 119, struct.pack('>BI', self.target, self.mode))
class MAVLink_request_data_stream_message(MAVLink_message):
'''
'''
def __init__(self, target_system, target_component, req_stream_id, req_message_rate, start_stop):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_REQUEST_DATA_STREAM, 'REQUEST_DATA_STREAM')
self._fieldnames = ['target_system', 'target_component', 'req_stream_id', 'req_message_rate', 'start_stop']
self.target_system = target_system
self.target_component = target_component
self.req_stream_id = req_stream_id
self.req_message_rate = req_message_rate
self.start_stop = start_stop
def pack(self, mav):
return MAVLink_message.pack(self, mav, 193, struct.pack('>BBBHB', self.target_system, self.target_component, self.req_stream_id, self.req_message_rate, self.start_stop))
class MAVLink_hil_state_message(MAVLink_message):
'''
This packet is useful for high throughput
applications such as hardware in the loop simulations.
'''
def __init__(self, usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed, lat, lon, alt, vx, vy, vz, xacc, yacc, zacc):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_HIL_STATE, 'HIL_STATE')
self._fieldnames = ['usec', 'roll', 'pitch', 'yaw', 'rollspeed', 'pitchspeed', 'yawspeed', 'lat', 'lon', 'alt', 'vx', 'vy', 'vz', 'xacc', 'yacc', 'zacc']
self.usec = usec
self.roll = roll
self.pitch = pitch
self.yaw = yaw
self.rollspeed = rollspeed
self.pitchspeed = pitchspeed
self.yawspeed = yawspeed
self.lat = lat
self.lon = lon
self.alt = alt
self.vx = vx
self.vy = vy
self.vz = vz
self.xacc = xacc
self.yacc = yacc
self.zacc = zacc
def pack(self, mav):
return MAVLink_message.pack(self, mav, 191, struct.pack('>Qffffffiiihhhhhh', self.usec, self.roll, self.pitch, self.yaw, self.rollspeed, self.pitchspeed, self.yawspeed, self.lat, self.lon, self.alt, self.vx, self.vy, self.vz, self.xacc, self.yacc, self.zacc))
class MAVLink_hil_controls_message(MAVLink_message):
'''
Hardware in the loop control outputs
'''
def __init__(self, time_us, roll_ailerons, pitch_elevator, yaw_rudder, throttle, mode, nav_mode):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_HIL_CONTROLS, 'HIL_CONTROLS')
self._fieldnames = ['time_us', 'roll_ailerons', 'pitch_elevator', 'yaw_rudder', 'throttle', 'mode', 'nav_mode']
self.time_us = time_us
self.roll_ailerons = roll_ailerons
self.pitch_elevator = pitch_elevator
self.yaw_rudder = yaw_rudder
self.throttle = throttle
self.mode = mode
self.nav_mode = nav_mode
def pack(self, mav):
return MAVLink_message.pack(self, mav, 236, struct.pack('>QffffBB', self.time_us, self.roll_ailerons, self.pitch_elevator, self.yaw_rudder, self.throttle, self.mode, self.nav_mode))
class MAVLink_manual_control_message(MAVLink_message):
'''
'''
def __init__(self, target, roll, pitch, yaw, thrust, roll_manual, pitch_manual, yaw_manual, thrust_manual):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_MANUAL_CONTROL, 'MANUAL_CONTROL')
self._fieldnames = ['target', 'roll', 'pitch', 'yaw', 'thrust', 'roll_manual', 'pitch_manual', 'yaw_manual', 'thrust_manual']
self.target = target
self.roll = roll
self.pitch = pitch
self.yaw = yaw
self.thrust = thrust
self.roll_manual = roll_manual
self.pitch_manual = pitch_manual
self.yaw_manual = yaw_manual
self.thrust_manual = thrust_manual
def pack(self, mav):
return MAVLink_message.pack(self, mav, 158, struct.pack('>BffffBBBB', self.target, self.roll, self.pitch, self.yaw, self.thrust, self.roll_manual, self.pitch_manual, self.yaw_manual, self.thrust_manual))
class MAVLink_rc_channels_override_message(MAVLink_message):
'''
The RAW values of the RC channels sent to the MAV to override
info received from the RC radio. A value of -1 means no change
to that channel. A value of 0 means control of that channel
should be released back to the RC radio. The standard PPM
modulation is as follows: 1000 microseconds: 0%, 2000
microseconds: 100%. Individual receivers/transmitters might
violate this specification.
'''
def __init__(self, target_system, target_component, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_RC_CHANNELS_OVERRIDE, 'RC_CHANNELS_OVERRIDE')
self._fieldnames = ['target_system', 'target_component', 'chan1_raw', 'chan2_raw', 'chan3_raw', 'chan4_raw', 'chan5_raw', 'chan6_raw', 'chan7_raw', 'chan8_raw']
self.target_system = target_system
self.target_component = target_component
self.chan1_raw = chan1_raw
self.chan2_raw = chan2_raw
self.chan3_raw = chan3_raw
self.chan4_raw = chan4_raw
self.chan5_raw = chan5_raw
self.chan6_raw = chan6_raw
self.chan7_raw = chan7_raw
self.chan8_raw = chan8_raw
def pack(self, mav):
return MAVLink_message.pack(self, mav, 143, struct.pack('>BBHHHHHHHH', self.target_system, self.target_component, self.chan1_raw, self.chan2_raw, self.chan3_raw, self.chan4_raw, self.chan5_raw, self.chan6_raw, self.chan7_raw, self.chan8_raw))
class MAVLink_global_position_int_message(MAVLink_message):
'''
The filtered global position (e.g. fused GPS and
accelerometers). The position is in GPS-frame (right-handed,
Z-up)
'''
def __init__(self, lat, lon, alt, vx, vy, vz):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_GLOBAL_POSITION_INT, 'GLOBAL_POSITION_INT')
self._fieldnames = ['lat', 'lon', 'alt', 'vx', 'vy', 'vz']
self.lat = lat
self.lon = lon
self.alt = alt
self.vx = vx
self.vy = vy
self.vz = vz
def pack(self, mav):
return MAVLink_message.pack(self, mav, 104, struct.pack('>iiihhh', self.lat, self.lon, self.alt, self.vx, self.vy, self.vz))
class MAVLink_vfr_hud_message(MAVLink_message):
'''
Metrics typically displayed on a HUD for fixed wing aircraft
'''
def __init__(self, airspeed, groundspeed, heading, throttle, alt, climb):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_VFR_HUD, 'VFR_HUD')
self._fieldnames = ['airspeed', 'groundspeed', 'heading', 'throttle', 'alt', 'climb']
self.airspeed = airspeed
self.groundspeed = groundspeed
self.heading = heading
self.throttle = throttle
self.alt = alt
self.climb = climb
def pack(self, mav):
return MAVLink_message.pack(self, mav, 123, struct.pack('>ffhHff', self.airspeed, self.groundspeed, self.heading, self.throttle, self.alt, self.climb))
class MAVLink_command_message(MAVLink_message):
'''
Send a command with up to four parameters to the MAV
'''
def __init__(self, target_system, target_component, command, confirmation, param1, param2, param3, param4):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_COMMAND, 'COMMAND')
self._fieldnames = ['target_system', 'target_component', 'command', 'confirmation', 'param1', 'param2', 'param3', 'param4']
self.target_system = target_system
self.target_component = target_component
self.command = command
self.confirmation = confirmation
self.param1 = param1
self.param2 = param2
self.param3 = param3
self.param4 = param4
def pack(self, mav):
return MAVLink_message.pack(self, mav, 131, struct.pack('>BBBBffff', self.target_system, self.target_component, self.command, self.confirmation, self.param1, self.param2, self.param3, self.param4))
class MAVLink_command_ack_message(MAVLink_message):
'''
Report status of a command. Includes feedback wether the
command was executed
'''
def __init__(self, command, result):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_COMMAND_ACK, 'COMMAND_ACK')
self._fieldnames = ['command', 'result']
self.command = command
self.result = result
def pack(self, mav):
return MAVLink_message.pack(self, mav, 8, struct.pack('>ff', self.command, self.result))
class MAVLink_optical_flow_message(MAVLink_message):
'''
Optical flow from a flow sensor (e.g. optical mouse sensor)
'''
def __init__(self, time, sensor_id, flow_x, flow_y, quality, ground_distance):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_OPTICAL_FLOW, 'OPTICAL_FLOW')
self._fieldnames = ['time', 'sensor_id', 'flow_x', 'flow_y', 'quality', 'ground_distance']
self.time = time
self.sensor_id = sensor_id
self.flow_x = flow_x
self.flow_y = flow_y
self.quality = quality
self.ground_distance = ground_distance
def pack(self, mav):
return MAVLink_message.pack(self, mav, 174, struct.pack('>QBhhBf', self.time, self.sensor_id, self.flow_x, self.flow_y, self.quality, self.ground_distance))
class MAVLink_object_detection_event_message(MAVLink_message):
'''
Object has been detected
'''
def __init__(self, time, object_id, type, name, quality, bearing, distance):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_OBJECT_DETECTION_EVENT, 'OBJECT_DETECTION_EVENT')
self._fieldnames = ['time', 'object_id', 'type', 'name', 'quality', 'bearing', 'distance']
self.time = time
self.object_id = object_id
self.type = type
self.name = name
self.quality = quality
self.bearing = bearing
self.distance = distance
def pack(self, mav):
return MAVLink_message.pack(self, mav, 155, struct.pack('>IHB20sBff', self.time, self.object_id, self.type, self.name, self.quality, self.bearing, self.distance))
class MAVLink_debug_vect_message(MAVLink_message):
'''
'''
def __init__(self, name, usec, x, y, z):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_DEBUG_VECT, 'DEBUG_VECT')
self._fieldnames = ['name', 'usec', 'x', 'y', 'z']
self.name = name
self.usec = usec
self.x = x
self.y = y
self.z = z
def pack(self, mav):
return MAVLink_message.pack(self, mav, 178, struct.pack('>10sQfff', self.name, self.usec, self.x, self.y, self.z))
class MAVLink_named_value_float_message(MAVLink_message):
'''
Send a key-value pair as float. The use of this message is
discouraged for normal packets, but a quite efficient way for
testing new messages and getting experimental debug output.
'''
def __init__(self, name, value):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_NAMED_VALUE_FLOAT, 'NAMED_VALUE_FLOAT')
self._fieldnames = ['name', 'value']
self.name = name
self.value = value
def pack(self, mav):
return MAVLink_message.pack(self, mav, 224, struct.pack('>10sf', self.name, self.value))
class MAVLink_named_value_int_message(MAVLink_message):
'''
Send a key-value pair as integer. The use of this message is
discouraged for normal packets, but a quite efficient way for
testing new messages and getting experimental debug output.
'''
def __init__(self, name, value):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_NAMED_VALUE_INT, 'NAMED_VALUE_INT')
self._fieldnames = ['name', 'value']
self.name = name
self.value = value
def pack(self, mav):
return MAVLink_message.pack(self, mav, 60, struct.pack('>10si', self.name, self.value))
class MAVLink_statustext_message(MAVLink_message):
'''
Status text message. These messages are printed in yellow in
the COMM console of QGroundControl. WARNING: They consume
quite some bandwidth, so use only for important status and
error messages. If implemented wisely, these messages are
buffered on the MCU and sent only at a limited rate (e.g. 10
Hz).
'''
def __init__(self, severity, text):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_STATUSTEXT, 'STATUSTEXT')
self._fieldnames = ['severity', 'text']
self.severity = severity
self.text = text
def pack(self, mav):
return MAVLink_message.pack(self, mav, 106, struct.pack('>B50s', self.severity, self.text))
class MAVLink_debug_message(MAVLink_message):
'''
Send a debug value. The index is used to discriminate between
values. These values show up in the plot of QGroundControl as
DEBUG N.
'''
def __init__(self, ind, value):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_DEBUG, 'DEBUG')
self._fieldnames = ['ind', 'value']
self.ind = ind
self.value = value
def pack(self, mav):
return MAVLink_message.pack(self, mav, 7, struct.pack('>Bf', self.ind, self.value))
mavlink_map = {
MAVLINK_MSG_ID_SENSOR_OFFSETS : ( '>hhhfiiffffff', MAVLink_sensor_offsets_message, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], 143 ),
MAVLINK_MSG_ID_SET_MAG_OFFSETS : ( '>BBhhh', MAVLink_set_mag_offsets_message, [0, 1, 2, 3, 4], 29 ),
MAVLINK_MSG_ID_MEMINFO : ( '>HH', MAVLink_meminfo_message, [0, 1], 208 ),
MAVLINK_MSG_ID_AP_ADC : ( '>HHHHHH', MAVLink_ap_adc_message, [0, 1, 2, 3, 4, 5], 188 ),
MAVLINK_MSG_ID_DIGICAM_CONFIGURE : ( '>BBBHBBBBBBf', MAVLink_digicam_configure_message, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 118 ),
MAVLINK_MSG_ID_DIGICAM_CONTROL : ( '>BBBBbBBBBf', MAVLink_digicam_control_message, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], 242 ),
MAVLINK_MSG_ID_MOUNT_CONFIGURE : ( '>BBBBBB', MAVLink_mount_configure_message, [0, 1, 2, 3, 4, 5], 19 ),
MAVLINK_MSG_ID_MOUNT_CONTROL : ( '>BBiiiB', MAVLink_mount_control_message, [0, 1, 2, 3, 4, 5], 97 ),
MAVLINK_MSG_ID_MOUNT_STATUS : ( '>BBiii', MAVLink_mount_status_message, [0, 1, 2, 3, 4], 233 ),
MAVLINK_MSG_ID_HEARTBEAT : ( '>BBB', MAVLink_heartbeat_message, [0, 1, 2], 72 ),
MAVLINK_MSG_ID_BOOT : ( '>I', MAVLink_boot_message, [0], 39 ),
MAVLINK_MSG_ID_SYSTEM_TIME : ( '>Q', MAVLink_system_time_message, [0], 190 ),
MAVLINK_MSG_ID_PING : ( '>IBBQ', MAVLink_ping_message, [0, 1, 2, 3], 92 ),
MAVLINK_MSG_ID_SYSTEM_TIME_UTC : ( '>II', MAVLink_system_time_utc_message, [0, 1], 191 ),
MAVLINK_MSG_ID_CHANGE_OPERATOR_CONTROL : ( '>BBB25s', MAVLink_change_operator_control_message, [0, 1, 2, 3], 217 ),
MAVLINK_MSG_ID_CHANGE_OPERATOR_CONTROL_ACK : ( '>BBB', MAVLink_change_operator_control_ack_message, [0, 1, 2], 104 ),
MAVLINK_MSG_ID_AUTH_KEY : ( '>32s', MAVLink_auth_key_message, [0], 119 ),
MAVLINK_MSG_ID_ACTION_ACK : ( '>BB', MAVLink_action_ack_message, [0, 1], 219 ),
MAVLINK_MSG_ID_ACTION : ( '>BBB', MAVLink_action_message, [0, 1, 2], 60 ),
MAVLINK_MSG_ID_SET_MODE : ( '>BB', MAVLink_set_mode_message, [0, 1], 186 ),
MAVLINK_MSG_ID_SET_NAV_MODE : ( '>BB', MAVLink_set_nav_mode_message, [0, 1], 10 ),
MAVLINK_MSG_ID_PARAM_REQUEST_READ : ( '>BB15sh', MAVLink_param_request_read_message, [0, 1, 2, 3], 89 ),
MAVLINK_MSG_ID_PARAM_REQUEST_LIST : ( '>BB', MAVLink_param_request_list_message, [0, 1], 159 ),
MAVLINK_MSG_ID_PARAM_VALUE : ( '>15sfHH', MAVLink_param_value_message, [0, 1, 2, 3], 162 ),
MAVLINK_MSG_ID_PARAM_SET : ( '>BB15sf', MAVLink_param_set_message, [0, 1, 2, 3], 121 ),
MAVLINK_MSG_ID_GPS_RAW_INT : ( '>QBiiiffff', MAVLink_gps_raw_int_message, [0, 1, 2, 3, 4, 5, 6, 7, 8], 149 ),
MAVLINK_MSG_ID_SCALED_IMU : ( '>Qhhhhhhhhh', MAVLink_scaled_imu_message, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], 222 ),
MAVLINK_MSG_ID_GPS_STATUS : ( '>B20s20s20s20s20s', MAVLink_gps_status_message, [0, 1, 2, 3, 4, 5], 110 ),
MAVLINK_MSG_ID_RAW_IMU : ( '>Qhhhhhhhhh', MAVLink_raw_imu_message, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], 179 ),
MAVLINK_MSG_ID_RAW_PRESSURE : ( '>Qhhhh', MAVLink_raw_pressure_message, [0, 1, 2, 3, 4], 136 ),
MAVLINK_MSG_ID_SCALED_PRESSURE : ( '>Qffh', MAVLink_scaled_pressure_message, [0, 1, 2, 3], 229 ),
MAVLINK_MSG_ID_ATTITUDE : ( '>Qffffff', MAVLink_attitude_message, [0, 1, 2, 3, 4, 5, 6], 66 ),
MAVLINK_MSG_ID_LOCAL_POSITION : ( '>Qffffff', MAVLink_local_position_message, [0, 1, 2, 3, 4, 5, 6], 126 ),
MAVLINK_MSG_ID_GLOBAL_POSITION : ( '>Qffffff', MAVLink_global_position_message, [0, 1, 2, 3, 4, 5, 6], 147 ),
MAVLINK_MSG_ID_GPS_RAW : ( '>QBfffffff', MAVLink_gps_raw_message, [0, 1, 2, 3, 4, 5, 6, 7, 8], 185 ),
MAVLINK_MSG_ID_SYS_STATUS : ( '>BBBHHHH', MAVLink_sys_status_message, [0, 1, 2, 3, 4, 5, 6], 112 ),
MAVLINK_MSG_ID_RC_CHANNELS_RAW : ( '>HHHHHHHHB', MAVLink_rc_channels_raw_message, [0, 1, 2, 3, 4, 5, 6, 7, 8], 252 ),
MAVLINK_MSG_ID_RC_CHANNELS_SCALED : ( '>hhhhhhhhB', MAVLink_rc_channels_scaled_message, [0, 1, 2, 3, 4, 5, 6, 7, 8], 162 ),
MAVLINK_MSG_ID_SERVO_OUTPUT_RAW : ( '>HHHHHHHH', MAVLink_servo_output_raw_message, [0, 1, 2, 3, 4, 5, 6, 7], 215 ),
MAVLINK_MSG_ID_WAYPOINT : ( '>BBHBBBBfffffff', MAVLink_waypoint_message, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13], 128 ),
MAVLINK_MSG_ID_WAYPOINT_REQUEST : ( '>BBH', MAVLink_waypoint_request_message, [0, 1, 2], 9 ),
MAVLINK_MSG_ID_WAYPOINT_SET_CURRENT : ( '>BBH', MAVLink_waypoint_set_current_message, [0, 1, 2], 106 ),
MAVLINK_MSG_ID_WAYPOINT_CURRENT : ( '>H', MAVLink_waypoint_current_message, [0], 101 ),
MAVLINK_MSG_ID_WAYPOINT_REQUEST_LIST : ( '>BB', MAVLink_waypoint_request_list_message, [0, 1], 213 ),
MAVLINK_MSG_ID_WAYPOINT_COUNT : ( '>BBH', MAVLink_waypoint_count_message, [0, 1, 2], 4 ),
MAVLINK_MSG_ID_WAYPOINT_CLEAR_ALL : ( '>BB', MAVLink_waypoint_clear_all_message, [0, 1], 229 ),
MAVLINK_MSG_ID_WAYPOINT_REACHED : ( '>H', MAVLink_waypoint_reached_message, [0], 21 ),
MAVLINK_MSG_ID_WAYPOINT_ACK : ( '>BBB', MAVLink_waypoint_ack_message, [0, 1, 2], 214 ),
MAVLINK_MSG_ID_GPS_SET_GLOBAL_ORIGIN : ( '>BBiii', MAVLink_gps_set_global_origin_message, [0, 1, 2, 3, 4], 215 ),
MAVLINK_MSG_ID_GPS_LOCAL_ORIGIN_SET : ( '>iii', MAVLink_gps_local_origin_set_message, [0, 1, 2], 14 ),
MAVLINK_MSG_ID_LOCAL_POSITION_SETPOINT_SET : ( '>BBffff', MAVLink_local_position_setpoint_set_message, [0, 1, 2, 3, 4, 5], 206 ),
MAVLINK_MSG_ID_LOCAL_POSITION_SETPOINT : ( '>ffff', MAVLink_local_position_setpoint_message, [0, 1, 2, 3], 50 ),
MAVLINK_MSG_ID_CONTROL_STATUS : ( '>BBBBBBBB', MAVLink_control_status_message, [0, 1, 2, 3, 4, 5, 6, 7], 157 ),
MAVLINK_MSG_ID_SAFETY_SET_ALLOWED_AREA : ( '>BBBffffff', MAVLink_safety_set_allowed_area_message, [0, 1, 2, 3, 4, 5, 6, 7, 8], 126 ),
MAVLINK_MSG_ID_SAFETY_ALLOWED_AREA : ( '>Bffffff', MAVLink_safety_allowed_area_message, [0, 1, 2, 3, 4, 5, 6], 108 ),
MAVLINK_MSG_ID_SET_ROLL_PITCH_YAW_THRUST : ( '>BBffff', MAVLink_set_roll_pitch_yaw_thrust_message, [0, 1, 2, 3, 4, 5], 213 ),
MAVLINK_MSG_ID_SET_ROLL_PITCH_YAW_SPEED_THRUST : ( '>BBffff', MAVLink_set_roll_pitch_yaw_speed_thrust_message, [0, 1, 2, 3, 4, 5], 95 ),
MAVLINK_MSG_ID_ROLL_PITCH_YAW_THRUST_SETPOINT : ( '>Qffff', MAVLink_roll_pitch_yaw_thrust_setpoint_message, [0, 1, 2, 3, 4], 5 ),
MAVLINK_MSG_ID_ROLL_PITCH_YAW_SPEED_THRUST_SETPOINT : ( '>Qffff', MAVLink_roll_pitch_yaw_speed_thrust_setpoint_message, [0, 1, 2, 3, 4], 127 ),
MAVLINK_MSG_ID_NAV_CONTROLLER_OUTPUT : ( '>ffhhHfff', MAVLink_nav_controller_output_message, [0, 1, 2, 3, 4, 5, 6, 7], 57 ),
MAVLINK_MSG_ID_POSITION_TARGET : ( '>ffff', MAVLink_position_target_message, [0, 1, 2, 3], 126 ),
MAVLINK_MSG_ID_STATE_CORRECTION : ( '>fffffffff', MAVLink_state_correction_message, [0, 1, 2, 3, 4, 5, 6, 7, 8], 130 ),
MAVLINK_MSG_ID_SET_ALTITUDE : ( '>BI', MAVLink_set_altitude_message, [0, 1], 119 ),
MAVLINK_MSG_ID_REQUEST_DATA_STREAM : ( '>BBBHB', MAVLink_request_data_stream_message, [0, 1, 2, 3, 4], 193 ),
MAVLINK_MSG_ID_HIL_STATE : ( '>Qffffffiiihhhhhh', MAVLink_hil_state_message, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], 191 ),
MAVLINK_MSG_ID_HIL_CONTROLS : ( '>QffffBB', MAVLink_hil_controls_message, [0, 1, 2, 3, 4, 5, 6], 236 ),
MAVLINK_MSG_ID_MANUAL_CONTROL : ( '>BffffBBBB', MAVLink_manual_control_message, [0, 1, 2, 3, 4, 5, 6, 7, 8], 158 ),
MAVLINK_MSG_ID_RC_CHANNELS_OVERRIDE : ( '>BBHHHHHHHH', MAVLink_rc_channels_override_message, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], 143 ),
MAVLINK_MSG_ID_GLOBAL_POSITION_INT : ( '>iiihhh', MAVLink_global_position_int_message, [0, 1, 2, 3, 4, 5], 104 ),
MAVLINK_MSG_ID_VFR_HUD : ( '>ffhHff', MAVLink_vfr_hud_message, [0, 1, 2, 3, 4, 5], 123 ),
MAVLINK_MSG_ID_COMMAND : ( '>BBBBffff', MAVLink_command_message, [0, 1, 2, 3, 4, 5, 6, 7], 131 ),
MAVLINK_MSG_ID_COMMAND_ACK : ( '>ff', MAVLink_command_ack_message, [0, 1], 8 ),
MAVLINK_MSG_ID_OPTICAL_FLOW : ( '>QBhhBf', MAVLink_optical_flow_message, [0, 1, 2, 3, 4, 5], 174 ),
MAVLINK_MSG_ID_OBJECT_DETECTION_EVENT : ( '>IHB20sBff', MAVLink_object_detection_event_message, [0, 1, 2, 3, 4, 5, 6], 155 ),
MAVLINK_MSG_ID_DEBUG_VECT : ( '>10sQfff', MAVLink_debug_vect_message, [0, 1, 2, 3, 4], 178 ),
MAVLINK_MSG_ID_NAMED_VALUE_FLOAT : ( '>10sf', MAVLink_named_value_float_message, [0, 1], 224 ),
MAVLINK_MSG_ID_NAMED_VALUE_INT : ( '>10si', MAVLink_named_value_int_message, [0, 1], 60 ),
MAVLINK_MSG_ID_STATUSTEXT : ( '>B50s', MAVLink_statustext_message, [0, 1], 106 ),
MAVLINK_MSG_ID_DEBUG : ( '>Bf', MAVLink_debug_message, [0, 1], 7 ),
}
class MAVError(Exception):
'''MAVLink error class'''
def __init__(self, msg):
Exception.__init__(self, msg)
self.message = msg
class MAVString(str):
'''NUL terminated string'''
def __init__(self, s):
str.__init__(self)
def __str__(self):
i = self.find(chr(0))
if i == -1:
return self[:]
return self[0:i]
class MAVLink_bad_data(MAVLink_message):
'''
a piece of bad data in a mavlink stream
'''
def __init__(self, data, reason):
MAVLink_message.__init__(self, MAVLINK_MSG_ID_BAD_DATA, 'BAD_DATA')
self._fieldnames = ['data', 'reason']
self.data = data
self.reason = reason
self._msgbuf = data
class MAVLink(object):
'''MAVLink protocol handling class'''
def __init__(self, file, srcSystem=0, srcComponent=0):
self.seq = 0
self.file = file
self.srcSystem = srcSystem
self.srcComponent = srcComponent
self.callback = None
self.callback_args = None
self.callback_kwargs = None
self.buf = array.array('B')
self.expected_length = 6
self.have_prefix_error = False
self.robust_parsing = False
self.protocol_marker = 85
self.little_endian = False
self.crc_extra = False
self.sort_fields = False
self.total_packets_sent = 0
self.total_bytes_sent = 0
self.total_packets_received = 0
self.total_bytes_received = 0
self.total_receive_errors = 0
self.startup_time = time.time()
def set_callback(self, callback, *args, **kwargs):
self.callback = callback
self.callback_args = args
self.callback_kwargs = kwargs
def send(self, mavmsg):
'''send a MAVLink message'''
buf = mavmsg.pack(self)
self.file.write(buf)
self.seq = (self.seq + 1) % 255
self.total_packets_sent += 1
self.total_bytes_sent += len(buf)
def bytes_needed(self):
'''return number of bytes needed for next parsing stage'''
ret = self.expected_length - len(self.buf)
if ret <= 0:
return 1
return ret
def parse_char(self, c):
'''input some data bytes, possibly returning a new message'''
if isinstance(c, str):
self.buf.fromstring(c)
else:
self.buf.extend(c)
self.total_bytes_received += len(c)
if len(self.buf) >= 1 and self.buf[0] != 85:
magic = self.buf[0]
self.buf = self.buf[1:]
if self.robust_parsing:
m = MAVLink_bad_data(chr(magic), "Bad prefix")
if self.callback:
self.callback(m, *self.callback_args, **self.callback_kwargs)
self.expected_length = 6
self.total_receive_errors += 1
return m
if self.have_prefix_error:
return None
self.have_prefix_error = True
self.total_receive_errors += 1
raise MAVError("invalid MAVLink prefix '%s'" % magic)
self.have_prefix_error = False
if len(self.buf) >= 2:
(magic, self.expected_length) = struct.unpack('BB', self.buf[0:2])
self.expected_length += 8
if self.expected_length >= 8 and len(self.buf) >= self.expected_length:
mbuf = self.buf[0:self.expected_length]
self.buf = self.buf[self.expected_length:]
self.expected_length = 6
if self.robust_parsing:
try:
m = self.decode(mbuf)
self.total_packets_received += 1
except MAVError as reason:
m = MAVLink_bad_data(mbuf, reason.message)
self.total_receive_errors += 1
else:
m = self.decode(mbuf)
self.total_packets_received += 1
if self.callback:
self.callback(m, *self.callback_args, **self.callback_kwargs)
return m
return None
def parse_buffer(self, s):
'''input some data bytes, possibly returning a list of new messages'''
m = self.parse_char(s)
if m is None:
return None
ret = [m]
while True:
m = self.parse_char("")
if m is None:
return ret
ret.append(m)
return ret
def decode(self, msgbuf):
'''decode a buffer as a MAVLink message'''
# decode the header
try:
magic, mlen, seq, srcSystem, srcComponent, msgId = struct.unpack('cBBBBB', msgbuf[:6])
except struct.error as emsg:
raise MAVError('Unable to unpack MAVLink header: %s' % emsg)
if ord(magic) != 85:
raise MAVError("invalid MAVLink prefix '%s'" % magic)
if mlen != len(msgbuf)-8:
raise MAVError('invalid MAVLink message length. Got %u expected %u, msgId=%u' % (len(msgbuf)-8, mlen, msgId))
if not msgId in mavlink_map:
raise MAVError('unknown MAVLink message ID %u' % msgId)
# decode the payload
(fmt, type, order_map, crc_extra) = mavlink_map[msgId]
# decode the checksum
try:
crc, = struct.unpack('<H', msgbuf[-2:])
except struct.error as emsg:
raise MAVError('Unable to unpack MAVLink CRC: %s' % emsg)
crc2 = mavutil.x25crc(msgbuf[1:-2])
if False: # using CRC extra
crc2.accumulate(chr(crc_extra))
if crc != crc2.crc:
raise MAVError('invalid MAVLink CRC in msgID %u 0x%04x should be 0x%04x' % (msgId, crc, crc2.crc))
try:
t = struct.unpack(fmt, msgbuf[6:-2])
except struct.error as emsg:
raise MAVError('Unable to unpack MAVLink payload type=%s fmt=%s payloadLength=%u: %s' % (
type, fmt, len(msgbuf[6:-2]), emsg))
tlist = list(t)
# handle sorted fields
if False:
t = tlist[:]
for i in range(0, len(tlist)):
tlist[i] = t[order_map[i]]
# terminate any strings
for i in range(0, len(tlist)):
if isinstance(tlist[i], str):
tlist[i] = MAVString(tlist[i])
t = tuple(tlist)
# construct the message object
try:
m = type(*t)
except Exception as emsg:
raise MAVError('Unable to instantiate MAVLink message of type %s : %s' % (type, emsg))
m._msgbuf = msgbuf
m._payload = msgbuf[6:-2]
m._crc = crc
m._header = MAVLink_header(msgId, mlen, seq, srcSystem, srcComponent)
return m
def sensor_offsets_encode(self, mag_ofs_x, mag_ofs_y, mag_ofs_z, mag_declination, raw_press, raw_temp, gyro_cal_x, gyro_cal_y, gyro_cal_z, accel_cal_x, accel_cal_y, accel_cal_z):
'''
Offsets and calibrations values for hardware sensors. This
makes it easier to debug the calibration process.
mag_ofs_x : magnetometer X offset (int16_t)
mag_ofs_y : magnetometer Y offset (int16_t)
mag_ofs_z : magnetometer Z offset (int16_t)
mag_declination : magnetic declination (radians) (float)
raw_press : raw pressure from barometer (int32_t)
raw_temp : raw temperature from barometer (int32_t)
gyro_cal_x : gyro X calibration (float)
gyro_cal_y : gyro Y calibration (float)
gyro_cal_z : gyro Z calibration (float)
accel_cal_x : accel X calibration (float)
accel_cal_y : accel Y calibration (float)
accel_cal_z : accel Z calibration (float)
'''
msg = MAVLink_sensor_offsets_message(mag_ofs_x, mag_ofs_y, mag_ofs_z, mag_declination, raw_press, raw_temp, gyro_cal_x, gyro_cal_y, gyro_cal_z, accel_cal_x, accel_cal_y, accel_cal_z)
msg.pack(self)
return msg
def sensor_offsets_send(self, mag_ofs_x, mag_ofs_y, mag_ofs_z, mag_declination, raw_press, raw_temp, gyro_cal_x, gyro_cal_y, gyro_cal_z, accel_cal_x, accel_cal_y, accel_cal_z):
'''
Offsets and calibrations values for hardware sensors. This
makes it easier to debug the calibration process.
mag_ofs_x : magnetometer X offset (int16_t)
mag_ofs_y : magnetometer Y offset (int16_t)
mag_ofs_z : magnetometer Z offset (int16_t)
mag_declination : magnetic declination (radians) (float)
raw_press : raw pressure from barometer (int32_t)
raw_temp : raw temperature from barometer (int32_t)
gyro_cal_x : gyro X calibration (float)
gyro_cal_y : gyro Y calibration (float)
gyro_cal_z : gyro Z calibration (float)
accel_cal_x : accel X calibration (float)
accel_cal_y : accel Y calibration (float)
accel_cal_z : accel Z calibration (float)
'''
return self.send(self.sensor_offsets_encode(mag_ofs_x, mag_ofs_y, mag_ofs_z, mag_declination, raw_press, raw_temp, gyro_cal_x, gyro_cal_y, gyro_cal_z, accel_cal_x, accel_cal_y, accel_cal_z))
def set_mag_offsets_encode(self, target_system, target_component, mag_ofs_x, mag_ofs_y, mag_ofs_z):
'''
set the magnetometer offsets
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
mag_ofs_x : magnetometer X offset (int16_t)
mag_ofs_y : magnetometer Y offset (int16_t)
mag_ofs_z : magnetometer Z offset (int16_t)
'''
msg = MAVLink_set_mag_offsets_message(target_system, target_component, mag_ofs_x, mag_ofs_y, mag_ofs_z)
msg.pack(self)
return msg
def set_mag_offsets_send(self, target_system, target_component, mag_ofs_x, mag_ofs_y, mag_ofs_z):
'''
set the magnetometer offsets
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
mag_ofs_x : magnetometer X offset (int16_t)
mag_ofs_y : magnetometer Y offset (int16_t)
mag_ofs_z : magnetometer Z offset (int16_t)
'''
return self.send(self.set_mag_offsets_encode(target_system, target_component, mag_ofs_x, mag_ofs_y, mag_ofs_z))
def meminfo_encode(self, brkval, freemem):
'''
state of APM memory
brkval : heap top (uint16_t)
freemem : free memory (uint16_t)
'''
msg = MAVLink_meminfo_message(brkval, freemem)
msg.pack(self)
return msg
def meminfo_send(self, brkval, freemem):
'''
state of APM memory
brkval : heap top (uint16_t)
freemem : free memory (uint16_t)
'''
return self.send(self.meminfo_encode(brkval, freemem))
def ap_adc_encode(self, adc1, adc2, adc3, adc4, adc5, adc6):
'''
raw ADC output
adc1 : ADC output 1 (uint16_t)
adc2 : ADC output 2 (uint16_t)
adc3 : ADC output 3 (uint16_t)
adc4 : ADC output 4 (uint16_t)
adc5 : ADC output 5 (uint16_t)
adc6 : ADC output 6 (uint16_t)
'''
msg = MAVLink_ap_adc_message(adc1, adc2, adc3, adc4, adc5, adc6)
msg.pack(self)
return msg
def ap_adc_send(self, adc1, adc2, adc3, adc4, adc5, adc6):
'''
raw ADC output
adc1 : ADC output 1 (uint16_t)
adc2 : ADC output 2 (uint16_t)
adc3 : ADC output 3 (uint16_t)
adc4 : ADC output 4 (uint16_t)
adc5 : ADC output 5 (uint16_t)
adc6 : ADC output 6 (uint16_t)
'''
return self.send(self.ap_adc_encode(adc1, adc2, adc3, adc4, adc5, adc6))
def digicam_configure_encode(self, target_system, target_component, mode, shutter_speed, aperture, iso, exposure_type, command_id, engine_cut_off, extra_param, extra_value):
'''
Configure on-board Camera Control System.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
mode : Mode enumeration from 1 to N //P, TV, AV, M, Etc (0 means ignore) (uint8_t)
shutter_speed : Divisor number //e.g. 1000 means 1/1000 (0 means ignore) (uint16_t)
aperture : F stop number x 10 //e.g. 28 means 2.8 (0 means ignore) (uint8_t)
iso : ISO enumeration from 1 to N //e.g. 80, 100, 200, Etc (0 means ignore) (uint8_t)
exposure_type : Exposure type enumeration from 1 to N (0 means ignore) (uint8_t)
command_id : Command Identity (incremental loop: 0 to 255)//A command sent multiple times will be executed or pooled just once (uint8_t)
engine_cut_off : Main engine cut-off time before camera trigger in seconds/10 (0 means no cut-off) (uint8_t)
extra_param : Extra parameters enumeration (0 means ignore) (uint8_t)
extra_value : Correspondent value to given extra_param (float)
'''
msg = MAVLink_digicam_configure_message(target_system, target_component, mode, shutter_speed, aperture, iso, exposure_type, command_id, engine_cut_off, extra_param, extra_value)
msg.pack(self)
return msg
def digicam_configure_send(self, target_system, target_component, mode, shutter_speed, aperture, iso, exposure_type, command_id, engine_cut_off, extra_param, extra_value):
'''
Configure on-board Camera Control System.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
mode : Mode enumeration from 1 to N //P, TV, AV, M, Etc (0 means ignore) (uint8_t)
shutter_speed : Divisor number //e.g. 1000 means 1/1000 (0 means ignore) (uint16_t)
aperture : F stop number x 10 //e.g. 28 means 2.8 (0 means ignore) (uint8_t)
iso : ISO enumeration from 1 to N //e.g. 80, 100, 200, Etc (0 means ignore) (uint8_t)
exposure_type : Exposure type enumeration from 1 to N (0 means ignore) (uint8_t)
command_id : Command Identity (incremental loop: 0 to 255)//A command sent multiple times will be executed or pooled just once (uint8_t)
engine_cut_off : Main engine cut-off time before camera trigger in seconds/10 (0 means no cut-off) (uint8_t)
extra_param : Extra parameters enumeration (0 means ignore) (uint8_t)
extra_value : Correspondent value to given extra_param (float)
'''
return self.send(self.digicam_configure_encode(target_system, target_component, mode, shutter_speed, aperture, iso, exposure_type, command_id, engine_cut_off, extra_param, extra_value))
def digicam_control_encode(self, target_system, target_component, session, zoom_pos, zoom_step, focus_lock, shot, command_id, extra_param, extra_value):
'''
Control on-board Camera Control System to take shots.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
session : 0: stop, 1: start or keep it up //Session control e.g. show/hide lens (uint8_t)
zoom_pos : 1 to N //Zoom's absolute position (0 means ignore) (uint8_t)
zoom_step : -100 to 100 //Zooming step value to offset zoom from the current position (int8_t)
focus_lock : 0: unlock focus or keep unlocked, 1: lock focus or keep locked, 3: re-lock focus (uint8_t)
shot : 0: ignore, 1: shot or start filming (uint8_t)
command_id : Command Identity (incremental loop: 0 to 255)//A command sent multiple times will be executed or pooled just once (uint8_t)
extra_param : Extra parameters enumeration (0 means ignore) (uint8_t)
extra_value : Correspondent value to given extra_param (float)
'''
msg = MAVLink_digicam_control_message(target_system, target_component, session, zoom_pos, zoom_step, focus_lock, shot, command_id, extra_param, extra_value)
msg.pack(self)
return msg
def digicam_control_send(self, target_system, target_component, session, zoom_pos, zoom_step, focus_lock, shot, command_id, extra_param, extra_value):
'''
Control on-board Camera Control System to take shots.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
session : 0: stop, 1: start or keep it up //Session control e.g. show/hide lens (uint8_t)
zoom_pos : 1 to N //Zoom's absolute position (0 means ignore) (uint8_t)
zoom_step : -100 to 100 //Zooming step value to offset zoom from the current position (int8_t)
focus_lock : 0: unlock focus or keep unlocked, 1: lock focus or keep locked, 3: re-lock focus (uint8_t)
shot : 0: ignore, 1: shot or start filming (uint8_t)
command_id : Command Identity (incremental loop: 0 to 255)//A command sent multiple times will be executed or pooled just once (uint8_t)
extra_param : Extra parameters enumeration (0 means ignore) (uint8_t)
extra_value : Correspondent value to given extra_param (float)
'''
return self.send(self.digicam_control_encode(target_system, target_component, session, zoom_pos, zoom_step, focus_lock, shot, command_id, extra_param, extra_value))
def mount_configure_encode(self, target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw):
'''
Message to configure a camera mount, directional antenna, etc.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
mount_mode : mount operating mode (see MAV_MOUNT_MODE enum) (uint8_t)
stab_roll : (1 = yes, 0 = no) (uint8_t)
stab_pitch : (1 = yes, 0 = no) (uint8_t)
stab_yaw : (1 = yes, 0 = no) (uint8_t)
'''
msg = MAVLink_mount_configure_message(target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw)
msg.pack(self)
return msg
def mount_configure_send(self, target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw):
'''
Message to configure a camera mount, directional antenna, etc.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
mount_mode : mount operating mode (see MAV_MOUNT_MODE enum) (uint8_t)
stab_roll : (1 = yes, 0 = no) (uint8_t)
stab_pitch : (1 = yes, 0 = no) (uint8_t)
stab_yaw : (1 = yes, 0 = no) (uint8_t)
'''
return self.send(self.mount_configure_encode(target_system, target_component, mount_mode, stab_roll, stab_pitch, stab_yaw))
def mount_control_encode(self, target_system, target_component, input_a, input_b, input_c, save_position):
'''
Message to control a camera mount, directional antenna, etc.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
input_a : pitch(deg*100) or lat, depending on mount mode (int32_t)
input_b : roll(deg*100) or lon depending on mount mode (int32_t)
input_c : yaw(deg*100) or alt (in cm) depending on mount mode (int32_t)
save_position : if "1" it will save current trimmed position on EEPROM (just valid for NEUTRAL and LANDING) (uint8_t)
'''
msg = MAVLink_mount_control_message(target_system, target_component, input_a, input_b, input_c, save_position)
msg.pack(self)
return msg
def mount_control_send(self, target_system, target_component, input_a, input_b, input_c, save_position):
'''
Message to control a camera mount, directional antenna, etc.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
input_a : pitch(deg*100) or lat, depending on mount mode (int32_t)
input_b : roll(deg*100) or lon depending on mount mode (int32_t)
input_c : yaw(deg*100) or alt (in cm) depending on mount mode (int32_t)
save_position : if "1" it will save current trimmed position on EEPROM (just valid for NEUTRAL and LANDING) (uint8_t)
'''
return self.send(self.mount_control_encode(target_system, target_component, input_a, input_b, input_c, save_position))
def mount_status_encode(self, target_system, target_component, pointing_a, pointing_b, pointing_c):
'''
Message with some status from APM to GCS about camera or antenna mount
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
pointing_a : pitch(deg*100) or lat, depending on mount mode (int32_t)
pointing_b : roll(deg*100) or lon depending on mount mode (int32_t)
pointing_c : yaw(deg*100) or alt (in cm) depending on mount mode (int32_t)
'''
msg = MAVLink_mount_status_message(target_system, target_component, pointing_a, pointing_b, pointing_c)
msg.pack(self)
return msg
def mount_status_send(self, target_system, target_component, pointing_a, pointing_b, pointing_c):
'''
Message with some status from APM to GCS about camera or antenna mount
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
pointing_a : pitch(deg*100) or lat, depending on mount mode (int32_t)
pointing_b : roll(deg*100) or lon depending on mount mode (int32_t)
pointing_c : yaw(deg*100) or alt (in cm) depending on mount mode (int32_t)
'''
return self.send(self.mount_status_encode(target_system, target_component, pointing_a, pointing_b, pointing_c))
def heartbeat_encode(self, type, autopilot, mavlink_version=2):
'''
The heartbeat message shows that a system is present and responding.
The type of the MAV and Autopilot hardware allow the
receiving system to treat further messages from this
system appropriate (e.g. by laying out the user
interface based on the autopilot).
type : Type of the MAV (quadrotor, helicopter, etc., up to 15 types, defined in MAV_TYPE ENUM) (uint8_t)
autopilot : Type of the Autopilot: 0: Generic, 1: PIXHAWK, 2: SLUGS, 3: Ardupilot (up to 15 types), defined in MAV_AUTOPILOT_TYPE ENUM (uint8_t)
mavlink_version : MAVLink version (uint8_t)
'''
msg = MAVLink_heartbeat_message(type, autopilot, mavlink_version)
msg.pack(self)
return msg
def heartbeat_send(self, type, autopilot, mavlink_version=2):
'''
The heartbeat message shows that a system is present and responding.
The type of the MAV and Autopilot hardware allow the
receiving system to treat further messages from this
system appropriate (e.g. by laying out the user
interface based on the autopilot).
type : Type of the MAV (quadrotor, helicopter, etc., up to 15 types, defined in MAV_TYPE ENUM) (uint8_t)
autopilot : Type of the Autopilot: 0: Generic, 1: PIXHAWK, 2: SLUGS, 3: Ardupilot (up to 15 types), defined in MAV_AUTOPILOT_TYPE ENUM (uint8_t)
mavlink_version : MAVLink version (uint8_t)
'''
return self.send(self.heartbeat_encode(type, autopilot, mavlink_version))
def boot_encode(self, version):
'''
The boot message indicates that a system is starting. The onboard
software version allows to keep track of onboard
soft/firmware revisions.
version : The onboard software version (uint32_t)
'''
msg = MAVLink_boot_message(version)
msg.pack(self)
return msg
def boot_send(self, version):
'''
The boot message indicates that a system is starting. The onboard
software version allows to keep track of onboard
soft/firmware revisions.
version : The onboard software version (uint32_t)
'''
return self.send(self.boot_encode(version))
def system_time_encode(self, time_usec):
'''
The system time is the time of the master clock, typically the
computer clock of the main onboard computer.
time_usec : Timestamp of the master clock in microseconds since UNIX epoch. (uint64_t)
'''
msg = MAVLink_system_time_message(time_usec)
msg.pack(self)
return msg
def system_time_send(self, time_usec):
'''
The system time is the time of the master clock, typically the
computer clock of the main onboard computer.
time_usec : Timestamp of the master clock in microseconds since UNIX epoch. (uint64_t)
'''
return self.send(self.system_time_encode(time_usec))
def ping_encode(self, seq, target_system, target_component, time):
'''
A ping message either requesting or responding to a ping. This allows
to measure the system latencies, including serial
port, radio modem and UDP connections.
seq : PING sequence (uint32_t)
target_system : 0: request ping from all receiving systems, if greater than 0: message is a ping response and number is the system id of the requesting system (uint8_t)
target_component : 0: request ping from all receiving components, if greater than 0: message is a ping response and number is the system id of the requesting system (uint8_t)
time : Unix timestamp in microseconds (uint64_t)
'''
msg = MAVLink_ping_message(seq, target_system, target_component, time)
msg.pack(self)
return msg
def ping_send(self, seq, target_system, target_component, time):
'''
A ping message either requesting or responding to a ping. This allows
to measure the system latencies, including serial
port, radio modem and UDP connections.
seq : PING sequence (uint32_t)
target_system : 0: request ping from all receiving systems, if greater than 0: message is a ping response and number is the system id of the requesting system (uint8_t)
target_component : 0: request ping from all receiving components, if greater than 0: message is a ping response and number is the system id of the requesting system (uint8_t)
time : Unix timestamp in microseconds (uint64_t)
'''
return self.send(self.ping_encode(seq, target_system, target_component, time))
def system_time_utc_encode(self, utc_date, utc_time):
'''
UTC date and time from GPS module
utc_date : GPS UTC date ddmmyy (uint32_t)
utc_time : GPS UTC time hhmmss (uint32_t)
'''
msg = MAVLink_system_time_utc_message(utc_date, utc_time)
msg.pack(self)
return msg
def system_time_utc_send(self, utc_date, utc_time):
'''
UTC date and time from GPS module
utc_date : GPS UTC date ddmmyy (uint32_t)
utc_time : GPS UTC time hhmmss (uint32_t)
'''
return self.send(self.system_time_utc_encode(utc_date, utc_time))
def change_operator_control_encode(self, target_system, control_request, version, passkey):
'''
Request to control this MAV
target_system : System the GCS requests control for (uint8_t)
control_request : 0: request control of this MAV, 1: Release control of this MAV (uint8_t)
version : 0: key as plaintext, 1-255: future, different hashing/encryption variants. The GCS should in general use the safest mode possible initially and then gradually move down the encryption level if it gets a NACK message indicating an encryption mismatch. (uint8_t)
passkey : Password / Key, depending on version plaintext or encrypted. 25 or less characters, NULL terminated. The characters may involve A-Z, a-z, 0-9, and "!?,.-" (char)
'''
msg = MAVLink_change_operator_control_message(target_system, control_request, version, passkey)
msg.pack(self)
return msg
def change_operator_control_send(self, target_system, control_request, version, passkey):
'''
Request to control this MAV
target_system : System the GCS requests control for (uint8_t)
control_request : 0: request control of this MAV, 1: Release control of this MAV (uint8_t)
version : 0: key as plaintext, 1-255: future, different hashing/encryption variants. The GCS should in general use the safest mode possible initially and then gradually move down the encryption level if it gets a NACK message indicating an encryption mismatch. (uint8_t)
passkey : Password / Key, depending on version plaintext or encrypted. 25 or less characters, NULL terminated. The characters may involve A-Z, a-z, 0-9, and "!?,.-" (char)
'''
return self.send(self.change_operator_control_encode(target_system, control_request, version, passkey))
def change_operator_control_ack_encode(self, gcs_system_id, control_request, ack):
'''
Accept / deny control of this MAV
gcs_system_id : ID of the GCS this message (uint8_t)
control_request : 0: request control of this MAV, 1: Release control of this MAV (uint8_t)
ack : 0: ACK, 1: NACK: Wrong passkey, 2: NACK: Unsupported passkey encryption method, 3: NACK: Already under control (uint8_t)
'''
msg = MAVLink_change_operator_control_ack_message(gcs_system_id, control_request, ack)
msg.pack(self)
return msg
def change_operator_control_ack_send(self, gcs_system_id, control_request, ack):
'''
Accept / deny control of this MAV
gcs_system_id : ID of the GCS this message (uint8_t)
control_request : 0: request control of this MAV, 1: Release control of this MAV (uint8_t)
ack : 0: ACK, 1: NACK: Wrong passkey, 2: NACK: Unsupported passkey encryption method, 3: NACK: Already under control (uint8_t)
'''
return self.send(self.change_operator_control_ack_encode(gcs_system_id, control_request, ack))
def auth_key_encode(self, key):
'''
Emit an encrypted signature / key identifying this system. PLEASE
NOTE: This protocol has been kept simple, so
transmitting the key requires an encrypted channel for
true safety.
key : key (char)
'''
msg = MAVLink_auth_key_message(key)
msg.pack(self)
return msg
def auth_key_send(self, key):
'''
Emit an encrypted signature / key identifying this system. PLEASE
NOTE: This protocol has been kept simple, so
transmitting the key requires an encrypted channel for
true safety.
key : key (char)
'''
return self.send(self.auth_key_encode(key))
def action_ack_encode(self, action, result):
'''
This message acknowledges an action. IMPORTANT: The acknowledgement
can be also negative, e.g. the MAV rejects a reset
message because it is in-flight. The action ids are
defined in ENUM MAV_ACTION in
mavlink/include/mavlink_types.h
action : The action id (uint8_t)
result : 0: Action DENIED, 1: Action executed (uint8_t)
'''
msg = MAVLink_action_ack_message(action, result)
msg.pack(self)
return msg
def action_ack_send(self, action, result):
'''
This message acknowledges an action. IMPORTANT: The acknowledgement
can be also negative, e.g. the MAV rejects a reset
message because it is in-flight. The action ids are
defined in ENUM MAV_ACTION in
mavlink/include/mavlink_types.h
action : The action id (uint8_t)
result : 0: Action DENIED, 1: Action executed (uint8_t)
'''
return self.send(self.action_ack_encode(action, result))
def action_encode(self, target, target_component, action):
'''
An action message allows to execute a certain onboard action. These
include liftoff, land, storing parameters too EEPROM,
shutddown, etc. The action ids are defined in ENUM
MAV_ACTION in mavlink/include/mavlink_types.h
target : The system executing the action (uint8_t)
target_component : The component executing the action (uint8_t)
action : The action id (uint8_t)
'''
msg = MAVLink_action_message(target, target_component, action)
msg.pack(self)
return msg
def action_send(self, target, target_component, action):
'''
An action message allows to execute a certain onboard action. These
include liftoff, land, storing parameters too EEPROM,
shutddown, etc. The action ids are defined in ENUM
MAV_ACTION in mavlink/include/mavlink_types.h
target : The system executing the action (uint8_t)
target_component : The component executing the action (uint8_t)
action : The action id (uint8_t)
'''
return self.send(self.action_encode(target, target_component, action))
def set_mode_encode(self, target, mode):
'''
Set the system mode, as defined by enum MAV_MODE in
mavlink/include/mavlink_types.h. There is no target
component id as the mode is by definition for the
overall aircraft, not only for one component.
target : The system setting the mode (uint8_t)
mode : The new mode (uint8_t)
'''
msg = MAVLink_set_mode_message(target, mode)
msg.pack(self)
return msg
def set_mode_send(self, target, mode):
'''
Set the system mode, as defined by enum MAV_MODE in
mavlink/include/mavlink_types.h. There is no target
component id as the mode is by definition for the
overall aircraft, not only for one component.
target : The system setting the mode (uint8_t)
mode : The new mode (uint8_t)
'''
return self.send(self.set_mode_encode(target, mode))
def set_nav_mode_encode(self, target, nav_mode):
'''
Set the system navigation mode, as defined by enum MAV_NAV_MODE in
mavlink/include/mavlink_types.h. The navigation mode
applies to the whole aircraft and thus all components.
target : The system setting the mode (uint8_t)
nav_mode : The new navigation mode (uint8_t)
'''
msg = MAVLink_set_nav_mode_message(target, nav_mode)
msg.pack(self)
return msg
def set_nav_mode_send(self, target, nav_mode):
'''
Set the system navigation mode, as defined by enum MAV_NAV_MODE in
mavlink/include/mavlink_types.h. The navigation mode
applies to the whole aircraft and thus all components.
target : The system setting the mode (uint8_t)
nav_mode : The new navigation mode (uint8_t)
'''
return self.send(self.set_nav_mode_encode(target, nav_mode))
def param_request_read_encode(self, target_system, target_component, param_id, param_index):
'''
Request to read the onboard parameter with the param_id string id.
Onboard parameters are stored as key[const char*] ->
value[float]. This allows to send a parameter to any
other component (such as the GCS) without the need of
previous knowledge of possible parameter names. Thus
the same GCS can store different parameters for
different autopilots. See also
http://qgroundcontrol.org/parameter_interface for a
full documentation of QGroundControl and IMU code.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
param_id : Onboard parameter id (int8_t)
param_index : Parameter index. Send -1 to use the param ID field as identifier (int16_t)
'''
msg = MAVLink_param_request_read_message(target_system, target_component, param_id, param_index)
msg.pack(self)
return msg
def param_request_read_send(self, target_system, target_component, param_id, param_index):
'''
Request to read the onboard parameter with the param_id string id.
Onboard parameters are stored as key[const char*] ->
value[float]. This allows to send a parameter to any
other component (such as the GCS) without the need of
previous knowledge of possible parameter names. Thus
the same GCS can store different parameters for
different autopilots. See also
http://qgroundcontrol.org/parameter_interface for a
full documentation of QGroundControl and IMU code.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
param_id : Onboard parameter id (int8_t)
param_index : Parameter index. Send -1 to use the param ID field as identifier (int16_t)
'''
return self.send(self.param_request_read_encode(target_system, target_component, param_id, param_index))
def param_request_list_encode(self, target_system, target_component):
'''
Request all parameters of this component. After his request, all
parameters are emitted.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
'''
msg = MAVLink_param_request_list_message(target_system, target_component)
msg.pack(self)
return msg
def param_request_list_send(self, target_system, target_component):
'''
Request all parameters of this component. After his request, all
parameters are emitted.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
'''
return self.send(self.param_request_list_encode(target_system, target_component))
def param_value_encode(self, param_id, param_value, param_count, param_index):
'''
Emit the value of a onboard parameter. The inclusion of param_count
and param_index in the message allows the recipient to
keep track of received parameters and allows him to
re-request missing parameters after a loss or timeout.
param_id : Onboard parameter id (int8_t)
param_value : Onboard parameter value (float)
param_count : Total number of onboard parameters (uint16_t)
param_index : Index of this onboard parameter (uint16_t)
'''
msg = MAVLink_param_value_message(param_id, param_value, param_count, param_index)
msg.pack(self)
return msg
def param_value_send(self, param_id, param_value, param_count, param_index):
'''
Emit the value of a onboard parameter. The inclusion of param_count
and param_index in the message allows the recipient to
keep track of received parameters and allows him to
re-request missing parameters after a loss or timeout.
param_id : Onboard parameter id (int8_t)
param_value : Onboard parameter value (float)
param_count : Total number of onboard parameters (uint16_t)
param_index : Index of this onboard parameter (uint16_t)
'''
return self.send(self.param_value_encode(param_id, param_value, param_count, param_index))
def param_set_encode(self, target_system, target_component, param_id, param_value):
'''
Set a parameter value TEMPORARILY to RAM. It will be reset to default
on system reboot. Send the ACTION
MAV_ACTION_STORAGE_WRITE to PERMANENTLY write the RAM
contents to EEPROM. IMPORTANT: The receiving component
should acknowledge the new parameter value by sending
a param_value message to all communication partners.
This will also ensure that multiple GCS all have an
up-to-date list of all parameters. If the sending GCS
did not receive a PARAM_VALUE message within its
timeout time, it should re-send the PARAM_SET message.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
param_id : Onboard parameter id (int8_t)
param_value : Onboard parameter value (float)
'''
msg = MAVLink_param_set_message(target_system, target_component, param_id, param_value)
msg.pack(self)
return msg
def param_set_send(self, target_system, target_component, param_id, param_value):
'''
Set a parameter value TEMPORARILY to RAM. It will be reset to default
on system reboot. Send the ACTION
MAV_ACTION_STORAGE_WRITE to PERMANENTLY write the RAM
contents to EEPROM. IMPORTANT: The receiving component
should acknowledge the new parameter value by sending
a param_value message to all communication partners.
This will also ensure that multiple GCS all have an
up-to-date list of all parameters. If the sending GCS
did not receive a PARAM_VALUE message within its
timeout time, it should re-send the PARAM_SET message.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
param_id : Onboard parameter id (int8_t)
param_value : Onboard parameter value (float)
'''
return self.send(self.param_set_encode(target_system, target_component, param_id, param_value))
def gps_raw_int_encode(self, usec, fix_type, lat, lon, alt, eph, epv, v, hdg):
'''
The global position, as returned by the Global Positioning System
(GPS). This is NOT the global position estimate of the
sytem, but rather a RAW sensor value. See message
GLOBAL_POSITION for the global position estimate.
Coordinate frame is right-handed, Z-axis up (GPS
frame)
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
fix_type : 0-1: no fix, 2: 2D fix, 3: 3D fix. Some applications will not use the value of this field unless it is at least two, so always correctly fill in the fix. (uint8_t)
lat : Latitude in 1E7 degrees (int32_t)
lon : Longitude in 1E7 degrees (int32_t)
alt : Altitude in 1E3 meters (millimeters) (int32_t)
eph : GPS HDOP (float)
epv : GPS VDOP (float)
v : GPS ground speed (m/s) (float)
hdg : Compass heading in degrees, 0..360 degrees (float)
'''
msg = MAVLink_gps_raw_int_message(usec, fix_type, lat, lon, alt, eph, epv, v, hdg)
msg.pack(self)
return msg
def gps_raw_int_send(self, usec, fix_type, lat, lon, alt, eph, epv, v, hdg):
'''
The global position, as returned by the Global Positioning System
(GPS). This is NOT the global position estimate of the
sytem, but rather a RAW sensor value. See message
GLOBAL_POSITION for the global position estimate.
Coordinate frame is right-handed, Z-axis up (GPS
frame)
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
fix_type : 0-1: no fix, 2: 2D fix, 3: 3D fix. Some applications will not use the value of this field unless it is at least two, so always correctly fill in the fix. (uint8_t)
lat : Latitude in 1E7 degrees (int32_t)
lon : Longitude in 1E7 degrees (int32_t)
alt : Altitude in 1E3 meters (millimeters) (int32_t)
eph : GPS HDOP (float)
epv : GPS VDOP (float)
v : GPS ground speed (m/s) (float)
hdg : Compass heading in degrees, 0..360 degrees (float)
'''
return self.send(self.gps_raw_int_encode(usec, fix_type, lat, lon, alt, eph, epv, v, hdg))
def scaled_imu_encode(self, usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag):
'''
The RAW IMU readings for the usual 9DOF sensor setup. This message
should contain the scaled values to the described
units
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
xacc : X acceleration (mg) (int16_t)
yacc : Y acceleration (mg) (int16_t)
zacc : Z acceleration (mg) (int16_t)
xgyro : Angular speed around X axis (millirad /sec) (int16_t)
ygyro : Angular speed around Y axis (millirad /sec) (int16_t)
zgyro : Angular speed around Z axis (millirad /sec) (int16_t)
xmag : X Magnetic field (milli tesla) (int16_t)
ymag : Y Magnetic field (milli tesla) (int16_t)
zmag : Z Magnetic field (milli tesla) (int16_t)
'''
msg = MAVLink_scaled_imu_message(usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag)
msg.pack(self)
return msg
def scaled_imu_send(self, usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag):
'''
The RAW IMU readings for the usual 9DOF sensor setup. This message
should contain the scaled values to the described
units
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
xacc : X acceleration (mg) (int16_t)
yacc : Y acceleration (mg) (int16_t)
zacc : Z acceleration (mg) (int16_t)
xgyro : Angular speed around X axis (millirad /sec) (int16_t)
ygyro : Angular speed around Y axis (millirad /sec) (int16_t)
zgyro : Angular speed around Z axis (millirad /sec) (int16_t)
xmag : X Magnetic field (milli tesla) (int16_t)
ymag : Y Magnetic field (milli tesla) (int16_t)
zmag : Z Magnetic field (milli tesla) (int16_t)
'''
return self.send(self.scaled_imu_encode(usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag))
def gps_status_encode(self, satellites_visible, satellite_prn, satellite_used, satellite_elevation, satellite_azimuth, satellite_snr):
'''
The positioning status, as reported by GPS. This message is intended
to display status information about each satellite
visible to the receiver. See message GLOBAL_POSITION
for the global position estimate. This message can
contain information for up to 20 satellites.
satellites_visible : Number of satellites visible (uint8_t)
satellite_prn : Global satellite ID (int8_t)
satellite_used : 0: Satellite not used, 1: used for localization (int8_t)
satellite_elevation : Elevation (0: right on top of receiver, 90: on the horizon) of satellite (int8_t)
satellite_azimuth : Direction of satellite, 0: 0 deg, 255: 360 deg. (int8_t)
satellite_snr : Signal to noise ratio of satellite (int8_t)
'''
msg = MAVLink_gps_status_message(satellites_visible, satellite_prn, satellite_used, satellite_elevation, satellite_azimuth, satellite_snr)
msg.pack(self)
return msg
def gps_status_send(self, satellites_visible, satellite_prn, satellite_used, satellite_elevation, satellite_azimuth, satellite_snr):
'''
The positioning status, as reported by GPS. This message is intended
to display status information about each satellite
visible to the receiver. See message GLOBAL_POSITION
for the global position estimate. This message can
contain information for up to 20 satellites.
satellites_visible : Number of satellites visible (uint8_t)
satellite_prn : Global satellite ID (int8_t)
satellite_used : 0: Satellite not used, 1: used for localization (int8_t)
satellite_elevation : Elevation (0: right on top of receiver, 90: on the horizon) of satellite (int8_t)
satellite_azimuth : Direction of satellite, 0: 0 deg, 255: 360 deg. (int8_t)
satellite_snr : Signal to noise ratio of satellite (int8_t)
'''
return self.send(self.gps_status_encode(satellites_visible, satellite_prn, satellite_used, satellite_elevation, satellite_azimuth, satellite_snr))
def raw_imu_encode(self, usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag):
'''
The RAW IMU readings for the usual 9DOF sensor setup. This message
should always contain the true raw values without any
scaling to allow data capture and system debugging.
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
xacc : X acceleration (raw) (int16_t)
yacc : Y acceleration (raw) (int16_t)
zacc : Z acceleration (raw) (int16_t)
xgyro : Angular speed around X axis (raw) (int16_t)
ygyro : Angular speed around Y axis (raw) (int16_t)
zgyro : Angular speed around Z axis (raw) (int16_t)
xmag : X Magnetic field (raw) (int16_t)
ymag : Y Magnetic field (raw) (int16_t)
zmag : Z Magnetic field (raw) (int16_t)
'''
msg = MAVLink_raw_imu_message(usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag)
msg.pack(self)
return msg
def raw_imu_send(self, usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag):
'''
The RAW IMU readings for the usual 9DOF sensor setup. This message
should always contain the true raw values without any
scaling to allow data capture and system debugging.
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
xacc : X acceleration (raw) (int16_t)
yacc : Y acceleration (raw) (int16_t)
zacc : Z acceleration (raw) (int16_t)
xgyro : Angular speed around X axis (raw) (int16_t)
ygyro : Angular speed around Y axis (raw) (int16_t)
zgyro : Angular speed around Z axis (raw) (int16_t)
xmag : X Magnetic field (raw) (int16_t)
ymag : Y Magnetic field (raw) (int16_t)
zmag : Z Magnetic field (raw) (int16_t)
'''
return self.send(self.raw_imu_encode(usec, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag))
def raw_pressure_encode(self, usec, press_abs, press_diff1, press_diff2, temperature):
'''
The RAW pressure readings for the typical setup of one absolute
pressure and one differential pressure sensor. The
sensor values should be the raw, UNSCALED ADC values.
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
press_abs : Absolute pressure (raw) (int16_t)
press_diff1 : Differential pressure 1 (raw) (int16_t)
press_diff2 : Differential pressure 2 (raw) (int16_t)
temperature : Raw Temperature measurement (raw) (int16_t)
'''
msg = MAVLink_raw_pressure_message(usec, press_abs, press_diff1, press_diff2, temperature)
msg.pack(self)
return msg
def raw_pressure_send(self, usec, press_abs, press_diff1, press_diff2, temperature):
'''
The RAW pressure readings for the typical setup of one absolute
pressure and one differential pressure sensor. The
sensor values should be the raw, UNSCALED ADC values.
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
press_abs : Absolute pressure (raw) (int16_t)
press_diff1 : Differential pressure 1 (raw) (int16_t)
press_diff2 : Differential pressure 2 (raw) (int16_t)
temperature : Raw Temperature measurement (raw) (int16_t)
'''
return self.send(self.raw_pressure_encode(usec, press_abs, press_diff1, press_diff2, temperature))
def scaled_pressure_encode(self, usec, press_abs, press_diff, temperature):
'''
The pressure readings for the typical setup of one absolute and
differential pressure sensor. The units are as
specified in each field.
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
press_abs : Absolute pressure (hectopascal) (float)
press_diff : Differential pressure 1 (hectopascal) (float)
temperature : Temperature measurement (0.01 degrees celsius) (int16_t)
'''
msg = MAVLink_scaled_pressure_message(usec, press_abs, press_diff, temperature)
msg.pack(self)
return msg
def scaled_pressure_send(self, usec, press_abs, press_diff, temperature):
'''
The pressure readings for the typical setup of one absolute and
differential pressure sensor. The units are as
specified in each field.
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
press_abs : Absolute pressure (hectopascal) (float)
press_diff : Differential pressure 1 (hectopascal) (float)
temperature : Temperature measurement (0.01 degrees celsius) (int16_t)
'''
return self.send(self.scaled_pressure_encode(usec, press_abs, press_diff, temperature))
def attitude_encode(self, usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed):
'''
The attitude in the aeronautical frame (right-handed, Z-down, X-front,
Y-right).
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
roll : Roll angle (rad) (float)
pitch : Pitch angle (rad) (float)
yaw : Yaw angle (rad) (float)
rollspeed : Roll angular speed (rad/s) (float)
pitchspeed : Pitch angular speed (rad/s) (float)
yawspeed : Yaw angular speed (rad/s) (float)
'''
msg = MAVLink_attitude_message(usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed)
msg.pack(self)
return msg
def attitude_send(self, usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed):
'''
The attitude in the aeronautical frame (right-handed, Z-down, X-front,
Y-right).
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
roll : Roll angle (rad) (float)
pitch : Pitch angle (rad) (float)
yaw : Yaw angle (rad) (float)
rollspeed : Roll angular speed (rad/s) (float)
pitchspeed : Pitch angular speed (rad/s) (float)
yawspeed : Yaw angular speed (rad/s) (float)
'''
return self.send(self.attitude_encode(usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed))
def local_position_encode(self, usec, x, y, z, vx, vy, vz):
'''
The filtered local position (e.g. fused computer vision and
accelerometers). Coordinate frame is right-handed,
Z-axis down (aeronautical frame)
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
x : X Position (float)
y : Y Position (float)
z : Z Position (float)
vx : X Speed (float)
vy : Y Speed (float)
vz : Z Speed (float)
'''
msg = MAVLink_local_position_message(usec, x, y, z, vx, vy, vz)
msg.pack(self)
return msg
def local_position_send(self, usec, x, y, z, vx, vy, vz):
'''
The filtered local position (e.g. fused computer vision and
accelerometers). Coordinate frame is right-handed,
Z-axis down (aeronautical frame)
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
x : X Position (float)
y : Y Position (float)
z : Z Position (float)
vx : X Speed (float)
vy : Y Speed (float)
vz : Z Speed (float)
'''
return self.send(self.local_position_encode(usec, x, y, z, vx, vy, vz))
def global_position_encode(self, usec, lat, lon, alt, vx, vy, vz):
'''
The filtered global position (e.g. fused GPS and accelerometers).
Coordinate frame is right-handed, Z-axis up (GPS
frame)
usec : Timestamp (microseconds since unix epoch) (uint64_t)
lat : Latitude, in degrees (float)
lon : Longitude, in degrees (float)
alt : Absolute altitude, in meters (float)
vx : X Speed (in Latitude direction, positive: going north) (float)
vy : Y Speed (in Longitude direction, positive: going east) (float)
vz : Z Speed (in Altitude direction, positive: going up) (float)
'''
msg = MAVLink_global_position_message(usec, lat, lon, alt, vx, vy, vz)
msg.pack(self)
return msg
def global_position_send(self, usec, lat, lon, alt, vx, vy, vz):
'''
The filtered global position (e.g. fused GPS and accelerometers).
Coordinate frame is right-handed, Z-axis up (GPS
frame)
usec : Timestamp (microseconds since unix epoch) (uint64_t)
lat : Latitude, in degrees (float)
lon : Longitude, in degrees (float)
alt : Absolute altitude, in meters (float)
vx : X Speed (in Latitude direction, positive: going north) (float)
vy : Y Speed (in Longitude direction, positive: going east) (float)
vz : Z Speed (in Altitude direction, positive: going up) (float)
'''
return self.send(self.global_position_encode(usec, lat, lon, alt, vx, vy, vz))
def gps_raw_encode(self, usec, fix_type, lat, lon, alt, eph, epv, v, hdg):
'''
The global position, as returned by the Global Positioning System
(GPS). This is NOT the global position estimate of the
sytem, but rather a RAW sensor value. See message
GLOBAL_POSITION for the global position estimate.
Coordinate frame is right-handed, Z-axis up (GPS
frame)
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
fix_type : 0-1: no fix, 2: 2D fix, 3: 3D fix. Some applications will not use the value of this field unless it is at least two, so always correctly fill in the fix. (uint8_t)
lat : Latitude in degrees (float)
lon : Longitude in degrees (float)
alt : Altitude in meters (float)
eph : GPS HDOP (float)
epv : GPS VDOP (float)
v : GPS ground speed (float)
hdg : Compass heading in degrees, 0..360 degrees (float)
'''
msg = MAVLink_gps_raw_message(usec, fix_type, lat, lon, alt, eph, epv, v, hdg)
msg.pack(self)
return msg
def gps_raw_send(self, usec, fix_type, lat, lon, alt, eph, epv, v, hdg):
'''
The global position, as returned by the Global Positioning System
(GPS). This is NOT the global position estimate of the
sytem, but rather a RAW sensor value. See message
GLOBAL_POSITION for the global position estimate.
Coordinate frame is right-handed, Z-axis up (GPS
frame)
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
fix_type : 0-1: no fix, 2: 2D fix, 3: 3D fix. Some applications will not use the value of this field unless it is at least two, so always correctly fill in the fix. (uint8_t)
lat : Latitude in degrees (float)
lon : Longitude in degrees (float)
alt : Altitude in meters (float)
eph : GPS HDOP (float)
epv : GPS VDOP (float)
v : GPS ground speed (float)
hdg : Compass heading in degrees, 0..360 degrees (float)
'''
return self.send(self.gps_raw_encode(usec, fix_type, lat, lon, alt, eph, epv, v, hdg))
def sys_status_encode(self, mode, nav_mode, status, load, vbat, battery_remaining, packet_drop):
'''
The general system state. If the system is following the MAVLink
standard, the system state is mainly defined by three
orthogonal states/modes: The system mode, which is
either LOCKED (motors shut down and locked), MANUAL
(system under RC control), GUIDED (system with
autonomous position control, position setpoint
controlled manually) or AUTO (system guided by
path/waypoint planner). The NAV_MODE defined the
current flight state: LIFTOFF (often an open-loop
maneuver), LANDING, WAYPOINTS or VECTOR. This
represents the internal navigation state machine. The
system status shows wether the system is currently
active or not and if an emergency occured. During the
CRITICAL and EMERGENCY states the MAV is still
considered to be active, but should start emergency
procedures autonomously. After a failure occured it
should first move from active to critical to allow
manual intervention and then move to emergency after a
certain timeout.
mode : System mode, see MAV_MODE ENUM in mavlink/include/mavlink_types.h (uint8_t)
nav_mode : Navigation mode, see MAV_NAV_MODE ENUM (uint8_t)
status : System status flag, see MAV_STATUS ENUM (uint8_t)
load : Maximum usage in percent of the mainloop time, (0%: 0, 100%: 1000) should be always below 1000 (uint16_t)
vbat : Battery voltage, in millivolts (1 = 1 millivolt) (uint16_t)
battery_remaining : Remaining battery energy: (0%: 0, 100%: 1000) (uint16_t)
packet_drop : Dropped packets (packets that were corrupted on reception on the MAV) (uint16_t)
'''
msg = MAVLink_sys_status_message(mode, nav_mode, status, load, vbat, battery_remaining, packet_drop)
msg.pack(self)
return msg
def sys_status_send(self, mode, nav_mode, status, load, vbat, battery_remaining, packet_drop):
'''
The general system state. If the system is following the MAVLink
standard, the system state is mainly defined by three
orthogonal states/modes: The system mode, which is
either LOCKED (motors shut down and locked), MANUAL
(system under RC control), GUIDED (system with
autonomous position control, position setpoint
controlled manually) or AUTO (system guided by
path/waypoint planner). The NAV_MODE defined the
current flight state: LIFTOFF (often an open-loop
maneuver), LANDING, WAYPOINTS or VECTOR. This
represents the internal navigation state machine. The
system status shows wether the system is currently
active or not and if an emergency occured. During the
CRITICAL and EMERGENCY states the MAV is still
considered to be active, but should start emergency
procedures autonomously. After a failure occured it
should first move from active to critical to allow
manual intervention and then move to emergency after a
certain timeout.
mode : System mode, see MAV_MODE ENUM in mavlink/include/mavlink_types.h (uint8_t)
nav_mode : Navigation mode, see MAV_NAV_MODE ENUM (uint8_t)
status : System status flag, see MAV_STATUS ENUM (uint8_t)
load : Maximum usage in percent of the mainloop time, (0%: 0, 100%: 1000) should be always below 1000 (uint16_t)
vbat : Battery voltage, in millivolts (1 = 1 millivolt) (uint16_t)
battery_remaining : Remaining battery energy: (0%: 0, 100%: 1000) (uint16_t)
packet_drop : Dropped packets (packets that were corrupted on reception on the MAV) (uint16_t)
'''
return self.send(self.sys_status_encode(mode, nav_mode, status, load, vbat, battery_remaining, packet_drop))
def rc_channels_raw_encode(self, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, rssi):
'''
The RAW values of the RC channels received. The standard PPM
modulation is as follows: 1000 microseconds: 0%, 2000
microseconds: 100%. Individual receivers/transmitters
might violate this specification.
chan1_raw : RC channel 1 value, in microseconds (uint16_t)
chan2_raw : RC channel 2 value, in microseconds (uint16_t)
chan3_raw : RC channel 3 value, in microseconds (uint16_t)
chan4_raw : RC channel 4 value, in microseconds (uint16_t)
chan5_raw : RC channel 5 value, in microseconds (uint16_t)
chan6_raw : RC channel 6 value, in microseconds (uint16_t)
chan7_raw : RC channel 7 value, in microseconds (uint16_t)
chan8_raw : RC channel 8 value, in microseconds (uint16_t)
rssi : Receive signal strength indicator, 0: 0%, 255: 100% (uint8_t)
'''
msg = MAVLink_rc_channels_raw_message(chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, rssi)
msg.pack(self)
return msg
def rc_channels_raw_send(self, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, rssi):
'''
The RAW values of the RC channels received. The standard PPM
modulation is as follows: 1000 microseconds: 0%, 2000
microseconds: 100%. Individual receivers/transmitters
might violate this specification.
chan1_raw : RC channel 1 value, in microseconds (uint16_t)
chan2_raw : RC channel 2 value, in microseconds (uint16_t)
chan3_raw : RC channel 3 value, in microseconds (uint16_t)
chan4_raw : RC channel 4 value, in microseconds (uint16_t)
chan5_raw : RC channel 5 value, in microseconds (uint16_t)
chan6_raw : RC channel 6 value, in microseconds (uint16_t)
chan7_raw : RC channel 7 value, in microseconds (uint16_t)
chan8_raw : RC channel 8 value, in microseconds (uint16_t)
rssi : Receive signal strength indicator, 0: 0%, 255: 100% (uint8_t)
'''
return self.send(self.rc_channels_raw_encode(chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw, rssi))
def rc_channels_scaled_encode(self, chan1_scaled, chan2_scaled, chan3_scaled, chan4_scaled, chan5_scaled, chan6_scaled, chan7_scaled, chan8_scaled, rssi):
'''
The scaled values of the RC channels received. (-100%) -10000, (0%) 0,
(100%) 10000
chan1_scaled : RC channel 1 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan2_scaled : RC channel 2 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan3_scaled : RC channel 3 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan4_scaled : RC channel 4 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan5_scaled : RC channel 5 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan6_scaled : RC channel 6 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan7_scaled : RC channel 7 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan8_scaled : RC channel 8 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
rssi : Receive signal strength indicator, 0: 0%, 255: 100% (uint8_t)
'''
msg = MAVLink_rc_channels_scaled_message(chan1_scaled, chan2_scaled, chan3_scaled, chan4_scaled, chan5_scaled, chan6_scaled, chan7_scaled, chan8_scaled, rssi)
msg.pack(self)
return msg
def rc_channels_scaled_send(self, chan1_scaled, chan2_scaled, chan3_scaled, chan4_scaled, chan5_scaled, chan6_scaled, chan7_scaled, chan8_scaled, rssi):
'''
The scaled values of the RC channels received. (-100%) -10000, (0%) 0,
(100%) 10000
chan1_scaled : RC channel 1 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan2_scaled : RC channel 2 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan3_scaled : RC channel 3 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan4_scaled : RC channel 4 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan5_scaled : RC channel 5 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan6_scaled : RC channel 6 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan7_scaled : RC channel 7 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
chan8_scaled : RC channel 8 value scaled, (-100%) -10000, (0%) 0, (100%) 10000 (int16_t)
rssi : Receive signal strength indicator, 0: 0%, 255: 100% (uint8_t)
'''
return self.send(self.rc_channels_scaled_encode(chan1_scaled, chan2_scaled, chan3_scaled, chan4_scaled, chan5_scaled, chan6_scaled, chan7_scaled, chan8_scaled, rssi))
def servo_output_raw_encode(self, servo1_raw, servo2_raw, servo3_raw, servo4_raw, servo5_raw, servo6_raw, servo7_raw, servo8_raw):
'''
The RAW values of the servo outputs (for RC input from the remote, use
the RC_CHANNELS messages). The standard PPM modulation
is as follows: 1000 microseconds: 0%, 2000
microseconds: 100%.
servo1_raw : Servo output 1 value, in microseconds (uint16_t)
servo2_raw : Servo output 2 value, in microseconds (uint16_t)
servo3_raw : Servo output 3 value, in microseconds (uint16_t)
servo4_raw : Servo output 4 value, in microseconds (uint16_t)
servo5_raw : Servo output 5 value, in microseconds (uint16_t)
servo6_raw : Servo output 6 value, in microseconds (uint16_t)
servo7_raw : Servo output 7 value, in microseconds (uint16_t)
servo8_raw : Servo output 8 value, in microseconds (uint16_t)
'''
msg = MAVLink_servo_output_raw_message(servo1_raw, servo2_raw, servo3_raw, servo4_raw, servo5_raw, servo6_raw, servo7_raw, servo8_raw)
msg.pack(self)
return msg
def servo_output_raw_send(self, servo1_raw, servo2_raw, servo3_raw, servo4_raw, servo5_raw, servo6_raw, servo7_raw, servo8_raw):
'''
The RAW values of the servo outputs (for RC input from the remote, use
the RC_CHANNELS messages). The standard PPM modulation
is as follows: 1000 microseconds: 0%, 2000
microseconds: 100%.
servo1_raw : Servo output 1 value, in microseconds (uint16_t)
servo2_raw : Servo output 2 value, in microseconds (uint16_t)
servo3_raw : Servo output 3 value, in microseconds (uint16_t)
servo4_raw : Servo output 4 value, in microseconds (uint16_t)
servo5_raw : Servo output 5 value, in microseconds (uint16_t)
servo6_raw : Servo output 6 value, in microseconds (uint16_t)
servo7_raw : Servo output 7 value, in microseconds (uint16_t)
servo8_raw : Servo output 8 value, in microseconds (uint16_t)
'''
return self.send(self.servo_output_raw_encode(servo1_raw, servo2_raw, servo3_raw, servo4_raw, servo5_raw, servo6_raw, servo7_raw, servo8_raw))
def waypoint_encode(self, target_system, target_component, seq, frame, command, current, autocontinue, param1, param2, param3, param4, x, y, z):
'''
Message encoding a waypoint. This message is emitted to announce
the presence of a waypoint and to set a waypoint on
the system. The waypoint can be either in x, y, z
meters (type: LOCAL) or x:lat, y:lon, z:altitude.
Local frame is Z-down, right handed, global frame is
Z-up, right handed
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
seq : Sequence (uint16_t)
frame : The coordinate system of the waypoint. see MAV_FRAME in mavlink_types.h (uint8_t)
command : The scheduled action for the waypoint. see MAV_COMMAND in common.xml MAVLink specs (uint8_t)
current : false:0, true:1 (uint8_t)
autocontinue : autocontinue to next wp (uint8_t)
param1 : PARAM1 / For NAV command waypoints: Radius in which the waypoint is accepted as reached, in meters (float)
param2 : PARAM2 / For NAV command waypoints: Time that the MAV should stay inside the PARAM1 radius before advancing, in milliseconds (float)
param3 : PARAM3 / For LOITER command waypoints: Orbit to circle around the waypoint, in meters. If positive the orbit direction should be clockwise, if negative the orbit direction should be counter-clockwise. (float)
param4 : PARAM4 / For NAV and LOITER command waypoints: Yaw orientation in degrees, [0..360] 0 = NORTH (float)
x : PARAM5 / local: x position, global: latitude (float)
y : PARAM6 / y position: global: longitude (float)
z : PARAM7 / z position: global: altitude (float)
'''
msg = MAVLink_waypoint_message(target_system, target_component, seq, frame, command, current, autocontinue, param1, param2, param3, param4, x, y, z)
msg.pack(self)
return msg
def waypoint_send(self, target_system, target_component, seq, frame, command, current, autocontinue, param1, param2, param3, param4, x, y, z):
'''
Message encoding a waypoint. This message is emitted to announce
the presence of a waypoint and to set a waypoint on
the system. The waypoint can be either in x, y, z
meters (type: LOCAL) or x:lat, y:lon, z:altitude.
Local frame is Z-down, right handed, global frame is
Z-up, right handed
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
seq : Sequence (uint16_t)
frame : The coordinate system of the waypoint. see MAV_FRAME in mavlink_types.h (uint8_t)
command : The scheduled action for the waypoint. see MAV_COMMAND in common.xml MAVLink specs (uint8_t)
current : false:0, true:1 (uint8_t)
autocontinue : autocontinue to next wp (uint8_t)
param1 : PARAM1 / For NAV command waypoints: Radius in which the waypoint is accepted as reached, in meters (float)
param2 : PARAM2 / For NAV command waypoints: Time that the MAV should stay inside the PARAM1 radius before advancing, in milliseconds (float)
param3 : PARAM3 / For LOITER command waypoints: Orbit to circle around the waypoint, in meters. If positive the orbit direction should be clockwise, if negative the orbit direction should be counter-clockwise. (float)
param4 : PARAM4 / For NAV and LOITER command waypoints: Yaw orientation in degrees, [0..360] 0 = NORTH (float)
x : PARAM5 / local: x position, global: latitude (float)
y : PARAM6 / y position: global: longitude (float)
z : PARAM7 / z position: global: altitude (float)
'''
return self.send(self.waypoint_encode(target_system, target_component, seq, frame, command, current, autocontinue, param1, param2, param3, param4, x, y, z))
def waypoint_request_encode(self, target_system, target_component, seq):
'''
Request the information of the waypoint with the sequence number seq.
The response of the system to this message should be a
WAYPOINT message.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
seq : Sequence (uint16_t)
'''
msg = MAVLink_waypoint_request_message(target_system, target_component, seq)
msg.pack(self)
return msg
def waypoint_request_send(self, target_system, target_component, seq):
'''
Request the information of the waypoint with the sequence number seq.
The response of the system to this message should be a
WAYPOINT message.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
seq : Sequence (uint16_t)
'''
return self.send(self.waypoint_request_encode(target_system, target_component, seq))
def waypoint_set_current_encode(self, target_system, target_component, seq):
'''
Set the waypoint with sequence number seq as current waypoint. This
means that the MAV will continue to this waypoint on
the shortest path (not following the waypoints in-
between).
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
seq : Sequence (uint16_t)
'''
msg = MAVLink_waypoint_set_current_message(target_system, target_component, seq)
msg.pack(self)
return msg
def waypoint_set_current_send(self, target_system, target_component, seq):
'''
Set the waypoint with sequence number seq as current waypoint. This
means that the MAV will continue to this waypoint on
the shortest path (not following the waypoints in-
between).
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
seq : Sequence (uint16_t)
'''
return self.send(self.waypoint_set_current_encode(target_system, target_component, seq))
def waypoint_current_encode(self, seq):
'''
Message that announces the sequence number of the current active
waypoint. The MAV will fly towards this waypoint.
seq : Sequence (uint16_t)
'''
msg = MAVLink_waypoint_current_message(seq)
msg.pack(self)
return msg
def waypoint_current_send(self, seq):
'''
Message that announces the sequence number of the current active
waypoint. The MAV will fly towards this waypoint.
seq : Sequence (uint16_t)
'''
return self.send(self.waypoint_current_encode(seq))
def waypoint_request_list_encode(self, target_system, target_component):
'''
Request the overall list of waypoints from the system/component.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
'''
msg = MAVLink_waypoint_request_list_message(target_system, target_component)
msg.pack(self)
return msg
def waypoint_request_list_send(self, target_system, target_component):
'''
Request the overall list of waypoints from the system/component.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
'''
return self.send(self.waypoint_request_list_encode(target_system, target_component))
def waypoint_count_encode(self, target_system, target_component, count):
'''
This message is emitted as response to WAYPOINT_REQUEST_LIST by the
MAV. The GCS can then request the individual waypoints
based on the knowledge of the total number of
waypoints.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
count : Number of Waypoints in the Sequence (uint16_t)
'''
msg = MAVLink_waypoint_count_message(target_system, target_component, count)
msg.pack(self)
return msg
def waypoint_count_send(self, target_system, target_component, count):
'''
This message is emitted as response to WAYPOINT_REQUEST_LIST by the
MAV. The GCS can then request the individual waypoints
based on the knowledge of the total number of
waypoints.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
count : Number of Waypoints in the Sequence (uint16_t)
'''
return self.send(self.waypoint_count_encode(target_system, target_component, count))
def waypoint_clear_all_encode(self, target_system, target_component):
'''
Delete all waypoints at once.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
'''
msg = MAVLink_waypoint_clear_all_message(target_system, target_component)
msg.pack(self)
return msg
def waypoint_clear_all_send(self, target_system, target_component):
'''
Delete all waypoints at once.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
'''
return self.send(self.waypoint_clear_all_encode(target_system, target_component))
def waypoint_reached_encode(self, seq):
'''
A certain waypoint has been reached. The system will either hold this
position (or circle on the orbit) or (if the
autocontinue on the WP was set) continue to the next
waypoint.
seq : Sequence (uint16_t)
'''
msg = MAVLink_waypoint_reached_message(seq)
msg.pack(self)
return msg
def waypoint_reached_send(self, seq):
'''
A certain waypoint has been reached. The system will either hold this
position (or circle on the orbit) or (if the
autocontinue on the WP was set) continue to the next
waypoint.
seq : Sequence (uint16_t)
'''
return self.send(self.waypoint_reached_encode(seq))
def waypoint_ack_encode(self, target_system, target_component, type):
'''
Ack message during waypoint handling. The type field states if this
message is a positive ack (type=0) or if an error
happened (type=non-zero).
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
type : 0: OK, 1: Error (uint8_t)
'''
msg = MAVLink_waypoint_ack_message(target_system, target_component, type)
msg.pack(self)
return msg
def waypoint_ack_send(self, target_system, target_component, type):
'''
Ack message during waypoint handling. The type field states if this
message is a positive ack (type=0) or if an error
happened (type=non-zero).
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
type : 0: OK, 1: Error (uint8_t)
'''
return self.send(self.waypoint_ack_encode(target_system, target_component, type))
def gps_set_global_origin_encode(self, target_system, target_component, latitude, longitude, altitude):
'''
As local waypoints exist, the global waypoint reference allows to
transform between the local coordinate frame and the
global (GPS) coordinate frame. This can be necessary
when e.g. in- and outdoor settings are connected and
the MAV should move from in- to outdoor.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
latitude : global position * 1E7 (int32_t)
longitude : global position * 1E7 (int32_t)
altitude : global position * 1000 (int32_t)
'''
msg = MAVLink_gps_set_global_origin_message(target_system, target_component, latitude, longitude, altitude)
msg.pack(self)
return msg
def gps_set_global_origin_send(self, target_system, target_component, latitude, longitude, altitude):
'''
As local waypoints exist, the global waypoint reference allows to
transform between the local coordinate frame and the
global (GPS) coordinate frame. This can be necessary
when e.g. in- and outdoor settings are connected and
the MAV should move from in- to outdoor.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
latitude : global position * 1E7 (int32_t)
longitude : global position * 1E7 (int32_t)
altitude : global position * 1000 (int32_t)
'''
return self.send(self.gps_set_global_origin_encode(target_system, target_component, latitude, longitude, altitude))
def gps_local_origin_set_encode(self, latitude, longitude, altitude):
'''
Once the MAV sets a new GPS-Local correspondence, this message
announces the origin (0,0,0) position
latitude : Latitude (WGS84), expressed as * 1E7 (int32_t)
longitude : Longitude (WGS84), expressed as * 1E7 (int32_t)
altitude : Altitude(WGS84), expressed as * 1000 (int32_t)
'''
msg = MAVLink_gps_local_origin_set_message(latitude, longitude, altitude)
msg.pack(self)
return msg
def gps_local_origin_set_send(self, latitude, longitude, altitude):
'''
Once the MAV sets a new GPS-Local correspondence, this message
announces the origin (0,0,0) position
latitude : Latitude (WGS84), expressed as * 1E7 (int32_t)
longitude : Longitude (WGS84), expressed as * 1E7 (int32_t)
altitude : Altitude(WGS84), expressed as * 1000 (int32_t)
'''
return self.send(self.gps_local_origin_set_encode(latitude, longitude, altitude))
def local_position_setpoint_set_encode(self, target_system, target_component, x, y, z, yaw):
'''
Set the setpoint for a local position controller. This is the position
in local coordinates the MAV should fly to. This
message is sent by the path/waypoint planner to the
onboard position controller. As some MAVs have a
degree of freedom in yaw (e.g. all
helicopters/quadrotors), the desired yaw angle is part
of the message.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
x : x position (float)
y : y position (float)
z : z position (float)
yaw : Desired yaw angle (float)
'''
msg = MAVLink_local_position_setpoint_set_message(target_system, target_component, x, y, z, yaw)
msg.pack(self)
return msg
def local_position_setpoint_set_send(self, target_system, target_component, x, y, z, yaw):
'''
Set the setpoint for a local position controller. This is the position
in local coordinates the MAV should fly to. This
message is sent by the path/waypoint planner to the
onboard position controller. As some MAVs have a
degree of freedom in yaw (e.g. all
helicopters/quadrotors), the desired yaw angle is part
of the message.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
x : x position (float)
y : y position (float)
z : z position (float)
yaw : Desired yaw angle (float)
'''
return self.send(self.local_position_setpoint_set_encode(target_system, target_component, x, y, z, yaw))
def local_position_setpoint_encode(self, x, y, z, yaw):
'''
Transmit the current local setpoint of the controller to other MAVs
(collision avoidance) and to the GCS.
x : x position (float)
y : y position (float)
z : z position (float)
yaw : Desired yaw angle (float)
'''
msg = MAVLink_local_position_setpoint_message(x, y, z, yaw)
msg.pack(self)
return msg
def local_position_setpoint_send(self, x, y, z, yaw):
'''
Transmit the current local setpoint of the controller to other MAVs
(collision avoidance) and to the GCS.
x : x position (float)
y : y position (float)
z : z position (float)
yaw : Desired yaw angle (float)
'''
return self.send(self.local_position_setpoint_encode(x, y, z, yaw))
def control_status_encode(self, position_fix, vision_fix, gps_fix, ahrs_health, control_att, control_pos_xy, control_pos_z, control_pos_yaw):
'''
position_fix : Position fix: 0: lost, 2: 2D position fix, 3: 3D position fix (uint8_t)
vision_fix : Vision position fix: 0: lost, 1: 2D local position hold, 2: 2D global position fix, 3: 3D global position fix (uint8_t)
gps_fix : GPS position fix: 0: no reception, 1: Minimum 1 satellite, but no position fix, 2: 2D position fix, 3: 3D position fix (uint8_t)
ahrs_health : Attitude estimation health: 0: poor, 255: excellent (uint8_t)
control_att : 0: Attitude control disabled, 1: enabled (uint8_t)
control_pos_xy : 0: X, Y position control disabled, 1: enabled (uint8_t)
control_pos_z : 0: Z position control disabled, 1: enabled (uint8_t)
control_pos_yaw : 0: Yaw angle control disabled, 1: enabled (uint8_t)
'''
msg = MAVLink_control_status_message(position_fix, vision_fix, gps_fix, ahrs_health, control_att, control_pos_xy, control_pos_z, control_pos_yaw)
msg.pack(self)
return msg
def control_status_send(self, position_fix, vision_fix, gps_fix, ahrs_health, control_att, control_pos_xy, control_pos_z, control_pos_yaw):
'''
position_fix : Position fix: 0: lost, 2: 2D position fix, 3: 3D position fix (uint8_t)
vision_fix : Vision position fix: 0: lost, 1: 2D local position hold, 2: 2D global position fix, 3: 3D global position fix (uint8_t)
gps_fix : GPS position fix: 0: no reception, 1: Minimum 1 satellite, but no position fix, 2: 2D position fix, 3: 3D position fix (uint8_t)
ahrs_health : Attitude estimation health: 0: poor, 255: excellent (uint8_t)
control_att : 0: Attitude control disabled, 1: enabled (uint8_t)
control_pos_xy : 0: X, Y position control disabled, 1: enabled (uint8_t)
control_pos_z : 0: Z position control disabled, 1: enabled (uint8_t)
control_pos_yaw : 0: Yaw angle control disabled, 1: enabled (uint8_t)
'''
return self.send(self.control_status_encode(position_fix, vision_fix, gps_fix, ahrs_health, control_att, control_pos_xy, control_pos_z, control_pos_yaw))
def safety_set_allowed_area_encode(self, target_system, target_component, frame, p1x, p1y, p1z, p2x, p2y, p2z):
'''
Set a safety zone (volume), which is defined by two corners of a cube.
This message can be used to tell the MAV which
setpoints/waypoints to accept and which to reject.
Safety areas are often enforced by national or
competition regulations.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
frame : Coordinate frame, as defined by MAV_FRAME enum in mavlink_types.h. Can be either global, GPS, right-handed with Z axis up or local, right handed, Z axis down. (uint8_t)
p1x : x position 1 / Latitude 1 (float)
p1y : y position 1 / Longitude 1 (float)
p1z : z position 1 / Altitude 1 (float)
p2x : x position 2 / Latitude 2 (float)
p2y : y position 2 / Longitude 2 (float)
p2z : z position 2 / Altitude 2 (float)
'''
msg = MAVLink_safety_set_allowed_area_message(target_system, target_component, frame, p1x, p1y, p1z, p2x, p2y, p2z)
msg.pack(self)
return msg
def safety_set_allowed_area_send(self, target_system, target_component, frame, p1x, p1y, p1z, p2x, p2y, p2z):
'''
Set a safety zone (volume), which is defined by two corners of a cube.
This message can be used to tell the MAV which
setpoints/waypoints to accept and which to reject.
Safety areas are often enforced by national or
competition regulations.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
frame : Coordinate frame, as defined by MAV_FRAME enum in mavlink_types.h. Can be either global, GPS, right-handed with Z axis up or local, right handed, Z axis down. (uint8_t)
p1x : x position 1 / Latitude 1 (float)
p1y : y position 1 / Longitude 1 (float)
p1z : z position 1 / Altitude 1 (float)
p2x : x position 2 / Latitude 2 (float)
p2y : y position 2 / Longitude 2 (float)
p2z : z position 2 / Altitude 2 (float)
'''
return self.send(self.safety_set_allowed_area_encode(target_system, target_component, frame, p1x, p1y, p1z, p2x, p2y, p2z))
def safety_allowed_area_encode(self, frame, p1x, p1y, p1z, p2x, p2y, p2z):
'''
Read out the safety zone the MAV currently assumes.
frame : Coordinate frame, as defined by MAV_FRAME enum in mavlink_types.h. Can be either global, GPS, right-handed with Z axis up or local, right handed, Z axis down. (uint8_t)
p1x : x position 1 / Latitude 1 (float)
p1y : y position 1 / Longitude 1 (float)
p1z : z position 1 / Altitude 1 (float)
p2x : x position 2 / Latitude 2 (float)
p2y : y position 2 / Longitude 2 (float)
p2z : z position 2 / Altitude 2 (float)
'''
msg = MAVLink_safety_allowed_area_message(frame, p1x, p1y, p1z, p2x, p2y, p2z)
msg.pack(self)
return msg
def safety_allowed_area_send(self, frame, p1x, p1y, p1z, p2x, p2y, p2z):
'''
Read out the safety zone the MAV currently assumes.
frame : Coordinate frame, as defined by MAV_FRAME enum in mavlink_types.h. Can be either global, GPS, right-handed with Z axis up or local, right handed, Z axis down. (uint8_t)
p1x : x position 1 / Latitude 1 (float)
p1y : y position 1 / Longitude 1 (float)
p1z : z position 1 / Altitude 1 (float)
p2x : x position 2 / Latitude 2 (float)
p2y : y position 2 / Longitude 2 (float)
p2z : z position 2 / Altitude 2 (float)
'''
return self.send(self.safety_allowed_area_encode(frame, p1x, p1y, p1z, p2x, p2y, p2z))
def set_roll_pitch_yaw_thrust_encode(self, target_system, target_component, roll, pitch, yaw, thrust):
'''
Set roll, pitch and yaw.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
roll : Desired roll angle in radians (float)
pitch : Desired pitch angle in radians (float)
yaw : Desired yaw angle in radians (float)
thrust : Collective thrust, normalized to 0 .. 1 (float)
'''
msg = MAVLink_set_roll_pitch_yaw_thrust_message(target_system, target_component, roll, pitch, yaw, thrust)
msg.pack(self)
return msg
def set_roll_pitch_yaw_thrust_send(self, target_system, target_component, roll, pitch, yaw, thrust):
'''
Set roll, pitch and yaw.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
roll : Desired roll angle in radians (float)
pitch : Desired pitch angle in radians (float)
yaw : Desired yaw angle in radians (float)
thrust : Collective thrust, normalized to 0 .. 1 (float)
'''
return self.send(self.set_roll_pitch_yaw_thrust_encode(target_system, target_component, roll, pitch, yaw, thrust))
def set_roll_pitch_yaw_speed_thrust_encode(self, target_system, target_component, roll_speed, pitch_speed, yaw_speed, thrust):
'''
Set roll, pitch and yaw.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
roll_speed : Desired roll angular speed in rad/s (float)
pitch_speed : Desired pitch angular speed in rad/s (float)
yaw_speed : Desired yaw angular speed in rad/s (float)
thrust : Collective thrust, normalized to 0 .. 1 (float)
'''
msg = MAVLink_set_roll_pitch_yaw_speed_thrust_message(target_system, target_component, roll_speed, pitch_speed, yaw_speed, thrust)
msg.pack(self)
return msg
def set_roll_pitch_yaw_speed_thrust_send(self, target_system, target_component, roll_speed, pitch_speed, yaw_speed, thrust):
'''
Set roll, pitch and yaw.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
roll_speed : Desired roll angular speed in rad/s (float)
pitch_speed : Desired pitch angular speed in rad/s (float)
yaw_speed : Desired yaw angular speed in rad/s (float)
thrust : Collective thrust, normalized to 0 .. 1 (float)
'''
return self.send(self.set_roll_pitch_yaw_speed_thrust_encode(target_system, target_component, roll_speed, pitch_speed, yaw_speed, thrust))
def roll_pitch_yaw_thrust_setpoint_encode(self, time_us, roll, pitch, yaw, thrust):
'''
Setpoint in roll, pitch, yaw currently active on the system.
time_us : Timestamp in micro seconds since unix epoch (uint64_t)
roll : Desired roll angle in radians (float)
pitch : Desired pitch angle in radians (float)
yaw : Desired yaw angle in radians (float)
thrust : Collective thrust, normalized to 0 .. 1 (float)
'''
msg = MAVLink_roll_pitch_yaw_thrust_setpoint_message(time_us, roll, pitch, yaw, thrust)
msg.pack(self)
return msg
def roll_pitch_yaw_thrust_setpoint_send(self, time_us, roll, pitch, yaw, thrust):
'''
Setpoint in roll, pitch, yaw currently active on the system.
time_us : Timestamp in micro seconds since unix epoch (uint64_t)
roll : Desired roll angle in radians (float)
pitch : Desired pitch angle in radians (float)
yaw : Desired yaw angle in radians (float)
thrust : Collective thrust, normalized to 0 .. 1 (float)
'''
return self.send(self.roll_pitch_yaw_thrust_setpoint_encode(time_us, roll, pitch, yaw, thrust))
def roll_pitch_yaw_speed_thrust_setpoint_encode(self, time_us, roll_speed, pitch_speed, yaw_speed, thrust):
'''
Setpoint in rollspeed, pitchspeed, yawspeed currently active on the
system.
time_us : Timestamp in micro seconds since unix epoch (uint64_t)
roll_speed : Desired roll angular speed in rad/s (float)
pitch_speed : Desired pitch angular speed in rad/s (float)
yaw_speed : Desired yaw angular speed in rad/s (float)
thrust : Collective thrust, normalized to 0 .. 1 (float)
'''
msg = MAVLink_roll_pitch_yaw_speed_thrust_setpoint_message(time_us, roll_speed, pitch_speed, yaw_speed, thrust)
msg.pack(self)
return msg
def roll_pitch_yaw_speed_thrust_setpoint_send(self, time_us, roll_speed, pitch_speed, yaw_speed, thrust):
'''
Setpoint in rollspeed, pitchspeed, yawspeed currently active on the
system.
time_us : Timestamp in micro seconds since unix epoch (uint64_t)
roll_speed : Desired roll angular speed in rad/s (float)
pitch_speed : Desired pitch angular speed in rad/s (float)
yaw_speed : Desired yaw angular speed in rad/s (float)
thrust : Collective thrust, normalized to 0 .. 1 (float)
'''
return self.send(self.roll_pitch_yaw_speed_thrust_setpoint_encode(time_us, roll_speed, pitch_speed, yaw_speed, thrust))
def nav_controller_output_encode(self, nav_roll, nav_pitch, nav_bearing, target_bearing, wp_dist, alt_error, aspd_error, xtrack_error):
'''
Outputs of the APM navigation controller. The primary use of this
message is to check the response and signs of the
controller before actual flight and to assist with
tuning controller parameters
nav_roll : Current desired roll in degrees (float)
nav_pitch : Current desired pitch in degrees (float)
nav_bearing : Current desired heading in degrees (int16_t)
target_bearing : Bearing to current waypoint/target in degrees (int16_t)
wp_dist : Distance to active waypoint in meters (uint16_t)
alt_error : Current altitude error in meters (float)
aspd_error : Current airspeed error in meters/second (float)
xtrack_error : Current crosstrack error on x-y plane in meters (float)
'''
msg = MAVLink_nav_controller_output_message(nav_roll, nav_pitch, nav_bearing, target_bearing, wp_dist, alt_error, aspd_error, xtrack_error)
msg.pack(self)
return msg
def nav_controller_output_send(self, nav_roll, nav_pitch, nav_bearing, target_bearing, wp_dist, alt_error, aspd_error, xtrack_error):
'''
Outputs of the APM navigation controller. The primary use of this
message is to check the response and signs of the
controller before actual flight and to assist with
tuning controller parameters
nav_roll : Current desired roll in degrees (float)
nav_pitch : Current desired pitch in degrees (float)
nav_bearing : Current desired heading in degrees (int16_t)
target_bearing : Bearing to current waypoint/target in degrees (int16_t)
wp_dist : Distance to active waypoint in meters (uint16_t)
alt_error : Current altitude error in meters (float)
aspd_error : Current airspeed error in meters/second (float)
xtrack_error : Current crosstrack error on x-y plane in meters (float)
'''
return self.send(self.nav_controller_output_encode(nav_roll, nav_pitch, nav_bearing, target_bearing, wp_dist, alt_error, aspd_error, xtrack_error))
def position_target_encode(self, x, y, z, yaw):
'''
The goal position of the system. This position is the input to any
navigation or path planning algorithm and does NOT
represent the current controller setpoint.
x : x position (float)
y : y position (float)
z : z position (float)
yaw : yaw orientation in radians, 0 = NORTH (float)
'''
msg = MAVLink_position_target_message(x, y, z, yaw)
msg.pack(self)
return msg
def position_target_send(self, x, y, z, yaw):
'''
The goal position of the system. This position is the input to any
navigation or path planning algorithm and does NOT
represent the current controller setpoint.
x : x position (float)
y : y position (float)
z : z position (float)
yaw : yaw orientation in radians, 0 = NORTH (float)
'''
return self.send(self.position_target_encode(x, y, z, yaw))
def state_correction_encode(self, xErr, yErr, zErr, rollErr, pitchErr, yawErr, vxErr, vyErr, vzErr):
'''
Corrects the systems state by adding an error correction term to the
position and velocity, and by rotating the attitude by
a correction angle.
xErr : x position error (float)
yErr : y position error (float)
zErr : z position error (float)
rollErr : roll error (radians) (float)
pitchErr : pitch error (radians) (float)
yawErr : yaw error (radians) (float)
vxErr : x velocity (float)
vyErr : y velocity (float)
vzErr : z velocity (float)
'''
msg = MAVLink_state_correction_message(xErr, yErr, zErr, rollErr, pitchErr, yawErr, vxErr, vyErr, vzErr)
msg.pack(self)
return msg
def state_correction_send(self, xErr, yErr, zErr, rollErr, pitchErr, yawErr, vxErr, vyErr, vzErr):
'''
Corrects the systems state by adding an error correction term to the
position and velocity, and by rotating the attitude by
a correction angle.
xErr : x position error (float)
yErr : y position error (float)
zErr : z position error (float)
rollErr : roll error (radians) (float)
pitchErr : pitch error (radians) (float)
yawErr : yaw error (radians) (float)
vxErr : x velocity (float)
vyErr : y velocity (float)
vzErr : z velocity (float)
'''
return self.send(self.state_correction_encode(xErr, yErr, zErr, rollErr, pitchErr, yawErr, vxErr, vyErr, vzErr))
def set_altitude_encode(self, target, mode):
'''
target : The system setting the altitude (uint8_t)
mode : The new altitude in meters (uint32_t)
'''
msg = MAVLink_set_altitude_message(target, mode)
msg.pack(self)
return msg
def set_altitude_send(self, target, mode):
'''
target : The system setting the altitude (uint8_t)
mode : The new altitude in meters (uint32_t)
'''
return self.send(self.set_altitude_encode(target, mode))
def request_data_stream_encode(self, target_system, target_component, req_stream_id, req_message_rate, start_stop):
'''
target_system : The target requested to send the message stream. (uint8_t)
target_component : The target requested to send the message stream. (uint8_t)
req_stream_id : The ID of the requested message type (uint8_t)
req_message_rate : Update rate in Hertz (uint16_t)
start_stop : 1 to start sending, 0 to stop sending. (uint8_t)
'''
msg = MAVLink_request_data_stream_message(target_system, target_component, req_stream_id, req_message_rate, start_stop)
msg.pack(self)
return msg
def request_data_stream_send(self, target_system, target_component, req_stream_id, req_message_rate, start_stop):
'''
target_system : The target requested to send the message stream. (uint8_t)
target_component : The target requested to send the message stream. (uint8_t)
req_stream_id : The ID of the requested message type (uint8_t)
req_message_rate : Update rate in Hertz (uint16_t)
start_stop : 1 to start sending, 0 to stop sending. (uint8_t)
'''
return self.send(self.request_data_stream_encode(target_system, target_component, req_stream_id, req_message_rate, start_stop))
def hil_state_encode(self, usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed, lat, lon, alt, vx, vy, vz, xacc, yacc, zacc):
'''
This packet is useful for high throughput applications
such as hardware in the loop simulations.
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
roll : Roll angle (rad) (float)
pitch : Pitch angle (rad) (float)
yaw : Yaw angle (rad) (float)
rollspeed : Roll angular speed (rad/s) (float)
pitchspeed : Pitch angular speed (rad/s) (float)
yawspeed : Yaw angular speed (rad/s) (float)
lat : Latitude, expressed as * 1E7 (int32_t)
lon : Longitude, expressed as * 1E7 (int32_t)
alt : Altitude in meters, expressed as * 1000 (millimeters) (int32_t)
vx : Ground X Speed (Latitude), expressed as m/s * 100 (int16_t)
vy : Ground Y Speed (Longitude), expressed as m/s * 100 (int16_t)
vz : Ground Z Speed (Altitude), expressed as m/s * 100 (int16_t)
xacc : X acceleration (mg) (int16_t)
yacc : Y acceleration (mg) (int16_t)
zacc : Z acceleration (mg) (int16_t)
'''
msg = MAVLink_hil_state_message(usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed, lat, lon, alt, vx, vy, vz, xacc, yacc, zacc)
msg.pack(self)
return msg
def hil_state_send(self, usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed, lat, lon, alt, vx, vy, vz, xacc, yacc, zacc):
'''
This packet is useful for high throughput applications
such as hardware in the loop simulations.
usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
roll : Roll angle (rad) (float)
pitch : Pitch angle (rad) (float)
yaw : Yaw angle (rad) (float)
rollspeed : Roll angular speed (rad/s) (float)
pitchspeed : Pitch angular speed (rad/s) (float)
yawspeed : Yaw angular speed (rad/s) (float)
lat : Latitude, expressed as * 1E7 (int32_t)
lon : Longitude, expressed as * 1E7 (int32_t)
alt : Altitude in meters, expressed as * 1000 (millimeters) (int32_t)
vx : Ground X Speed (Latitude), expressed as m/s * 100 (int16_t)
vy : Ground Y Speed (Longitude), expressed as m/s * 100 (int16_t)
vz : Ground Z Speed (Altitude), expressed as m/s * 100 (int16_t)
xacc : X acceleration (mg) (int16_t)
yacc : Y acceleration (mg) (int16_t)
zacc : Z acceleration (mg) (int16_t)
'''
return self.send(self.hil_state_encode(usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed, lat, lon, alt, vx, vy, vz, xacc, yacc, zacc))
def hil_controls_encode(self, time_us, roll_ailerons, pitch_elevator, yaw_rudder, throttle, mode, nav_mode):
'''
Hardware in the loop control outputs
time_us : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
roll_ailerons : Control output -3 .. 1 (float)
pitch_elevator : Control output -1 .. 1 (float)
yaw_rudder : Control output -1 .. 1 (float)
throttle : Throttle 0 .. 1 (float)
mode : System mode (MAV_MODE) (uint8_t)
nav_mode : Navigation mode (MAV_NAV_MODE) (uint8_t)
'''
msg = MAVLink_hil_controls_message(time_us, roll_ailerons, pitch_elevator, yaw_rudder, throttle, mode, nav_mode)
msg.pack(self)
return msg
def hil_controls_send(self, time_us, roll_ailerons, pitch_elevator, yaw_rudder, throttle, mode, nav_mode):
'''
Hardware in the loop control outputs
time_us : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t)
roll_ailerons : Control output -3 .. 1 (float)
pitch_elevator : Control output -1 .. 1 (float)
yaw_rudder : Control output -1 .. 1 (float)
throttle : Throttle 0 .. 1 (float)
mode : System mode (MAV_MODE) (uint8_t)
nav_mode : Navigation mode (MAV_NAV_MODE) (uint8_t)
'''
return self.send(self.hil_controls_encode(time_us, roll_ailerons, pitch_elevator, yaw_rudder, throttle, mode, nav_mode))
def manual_control_encode(self, target, roll, pitch, yaw, thrust, roll_manual, pitch_manual, yaw_manual, thrust_manual):
'''
target : The system to be controlled (uint8_t)
roll : roll (float)
pitch : pitch (float)
yaw : yaw (float)
thrust : thrust (float)
roll_manual : roll control enabled auto:0, manual:1 (uint8_t)
pitch_manual : pitch auto:0, manual:1 (uint8_t)
yaw_manual : yaw auto:0, manual:1 (uint8_t)
thrust_manual : thrust auto:0, manual:1 (uint8_t)
'''
msg = MAVLink_manual_control_message(target, roll, pitch, yaw, thrust, roll_manual, pitch_manual, yaw_manual, thrust_manual)
msg.pack(self)
return msg
def manual_control_send(self, target, roll, pitch, yaw, thrust, roll_manual, pitch_manual, yaw_manual, thrust_manual):
'''
target : The system to be controlled (uint8_t)
roll : roll (float)
pitch : pitch (float)
yaw : yaw (float)
thrust : thrust (float)
roll_manual : roll control enabled auto:0, manual:1 (uint8_t)
pitch_manual : pitch auto:0, manual:1 (uint8_t)
yaw_manual : yaw auto:0, manual:1 (uint8_t)
thrust_manual : thrust auto:0, manual:1 (uint8_t)
'''
return self.send(self.manual_control_encode(target, roll, pitch, yaw, thrust, roll_manual, pitch_manual, yaw_manual, thrust_manual))
def rc_channels_override_encode(self, target_system, target_component, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw):
'''
The RAW values of the RC channels sent to the MAV to override info
received from the RC radio. A value of -1 means no
change to that channel. A value of 0 means control of
that channel should be released back to the RC radio.
The standard PPM modulation is as follows: 1000
microseconds: 0%, 2000 microseconds: 100%. Individual
receivers/transmitters might violate this
specification.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
chan1_raw : RC channel 1 value, in microseconds (uint16_t)
chan2_raw : RC channel 2 value, in microseconds (uint16_t)
chan3_raw : RC channel 3 value, in microseconds (uint16_t)
chan4_raw : RC channel 4 value, in microseconds (uint16_t)
chan5_raw : RC channel 5 value, in microseconds (uint16_t)
chan6_raw : RC channel 6 value, in microseconds (uint16_t)
chan7_raw : RC channel 7 value, in microseconds (uint16_t)
chan8_raw : RC channel 8 value, in microseconds (uint16_t)
'''
msg = MAVLink_rc_channels_override_message(target_system, target_component, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw)
msg.pack(self)
return msg
def rc_channels_override_send(self, target_system, target_component, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw):
'''
The RAW values of the RC channels sent to the MAV to override info
received from the RC radio. A value of -1 means no
change to that channel. A value of 0 means control of
that channel should be released back to the RC radio.
The standard PPM modulation is as follows: 1000
microseconds: 0%, 2000 microseconds: 100%. Individual
receivers/transmitters might violate this
specification.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
chan1_raw : RC channel 1 value, in microseconds (uint16_t)
chan2_raw : RC channel 2 value, in microseconds (uint16_t)
chan3_raw : RC channel 3 value, in microseconds (uint16_t)
chan4_raw : RC channel 4 value, in microseconds (uint16_t)
chan5_raw : RC channel 5 value, in microseconds (uint16_t)
chan6_raw : RC channel 6 value, in microseconds (uint16_t)
chan7_raw : RC channel 7 value, in microseconds (uint16_t)
chan8_raw : RC channel 8 value, in microseconds (uint16_t)
'''
return self.send(self.rc_channels_override_encode(target_system, target_component, chan1_raw, chan2_raw, chan3_raw, chan4_raw, chan5_raw, chan6_raw, chan7_raw, chan8_raw))
def global_position_int_encode(self, lat, lon, alt, vx, vy, vz):
'''
The filtered global position (e.g. fused GPS and accelerometers). The
position is in GPS-frame (right-handed, Z-up)
lat : Latitude, expressed as * 1E7 (int32_t)
lon : Longitude, expressed as * 1E7 (int32_t)
alt : Altitude in meters, expressed as * 1000 (millimeters) (int32_t)
vx : Ground X Speed (Latitude), expressed as m/s * 100 (int16_t)
vy : Ground Y Speed (Longitude), expressed as m/s * 100 (int16_t)
vz : Ground Z Speed (Altitude), expressed as m/s * 100 (int16_t)
'''
msg = MAVLink_global_position_int_message(lat, lon, alt, vx, vy, vz)
msg.pack(self)
return msg
def global_position_int_send(self, lat, lon, alt, vx, vy, vz):
'''
The filtered global position (e.g. fused GPS and accelerometers). The
position is in GPS-frame (right-handed, Z-up)
lat : Latitude, expressed as * 1E7 (int32_t)
lon : Longitude, expressed as * 1E7 (int32_t)
alt : Altitude in meters, expressed as * 1000 (millimeters) (int32_t)
vx : Ground X Speed (Latitude), expressed as m/s * 100 (int16_t)
vy : Ground Y Speed (Longitude), expressed as m/s * 100 (int16_t)
vz : Ground Z Speed (Altitude), expressed as m/s * 100 (int16_t)
'''
return self.send(self.global_position_int_encode(lat, lon, alt, vx, vy, vz))
def vfr_hud_encode(self, airspeed, groundspeed, heading, throttle, alt, climb):
'''
Metrics typically displayed on a HUD for fixed wing aircraft
airspeed : Current airspeed in m/s (float)
groundspeed : Current ground speed in m/s (float)
heading : Current heading in degrees, in compass units (0..360, 0=north) (int16_t)
throttle : Current throttle setting in integer percent, 0 to 100 (uint16_t)
alt : Current altitude (MSL), in meters (float)
climb : Current climb rate in meters/second (float)
'''
msg = MAVLink_vfr_hud_message(airspeed, groundspeed, heading, throttle, alt, climb)
msg.pack(self)
return msg
def vfr_hud_send(self, airspeed, groundspeed, heading, throttle, alt, climb):
'''
Metrics typically displayed on a HUD for fixed wing aircraft
airspeed : Current airspeed in m/s (float)
groundspeed : Current ground speed in m/s (float)
heading : Current heading in degrees, in compass units (0..360, 0=north) (int16_t)
throttle : Current throttle setting in integer percent, 0 to 100 (uint16_t)
alt : Current altitude (MSL), in meters (float)
climb : Current climb rate in meters/second (float)
'''
return self.send(self.vfr_hud_encode(airspeed, groundspeed, heading, throttle, alt, climb))
def command_encode(self, target_system, target_component, command, confirmation, param1, param2, param3, param4):
'''
Send a command with up to four parameters to the MAV
target_system : System which should execute the command (uint8_t)
target_component : Component which should execute the command, 0 for all components (uint8_t)
command : Command ID, as defined by MAV_CMD enum. (uint8_t)
confirmation : 0: First transmission of this command. 1-255: Confirmation transmissions (e.g. for kill command) (uint8_t)
param1 : Parameter 1, as defined by MAV_CMD enum. (float)
param2 : Parameter 2, as defined by MAV_CMD enum. (float)
param3 : Parameter 3, as defined by MAV_CMD enum. (float)
param4 : Parameter 4, as defined by MAV_CMD enum. (float)
'''
msg = MAVLink_command_message(target_system, target_component, command, confirmation, param1, param2, param3, param4)
msg.pack(self)
return msg
def command_send(self, target_system, target_component, command, confirmation, param1, param2, param3, param4):
'''
Send a command with up to four parameters to the MAV
target_system : System which should execute the command (uint8_t)
target_component : Component which should execute the command, 0 for all components (uint8_t)
command : Command ID, as defined by MAV_CMD enum. (uint8_t)
confirmation : 0: First transmission of this command. 1-255: Confirmation transmissions (e.g. for kill command) (uint8_t)
param1 : Parameter 1, as defined by MAV_CMD enum. (float)
param2 : Parameter 2, as defined by MAV_CMD enum. (float)
param3 : Parameter 3, as defined by MAV_CMD enum. (float)
param4 : Parameter 4, as defined by MAV_CMD enum. (float)
'''
return self.send(self.command_encode(target_system, target_component, command, confirmation, param1, param2, param3, param4))
def command_ack_encode(self, command, result):
'''
Report status of a command. Includes feedback wether the command was
executed
command : Current airspeed in m/s (float)
result : 1: Action ACCEPTED and EXECUTED, 1: Action TEMPORARY REJECTED/DENIED, 2: Action PERMANENTLY DENIED, 3: Action UNKNOWN/UNSUPPORTED, 4: Requesting CONFIRMATION (float)
'''
msg = MAVLink_command_ack_message(command, result)
msg.pack(self)
return msg
def command_ack_send(self, command, result):
'''
Report status of a command. Includes feedback wether the command was
executed
command : Current airspeed in m/s (float)
result : 1: Action ACCEPTED and EXECUTED, 1: Action TEMPORARY REJECTED/DENIED, 2: Action PERMANENTLY DENIED, 3: Action UNKNOWN/UNSUPPORTED, 4: Requesting CONFIRMATION (float)
'''
return self.send(self.command_ack_encode(command, result))
def optical_flow_encode(self, time, sensor_id, flow_x, flow_y, quality, ground_distance):
'''
Optical flow from a flow sensor (e.g. optical mouse sensor)
time : Timestamp (UNIX) (uint64_t)
sensor_id : Sensor ID (uint8_t)
flow_x : Flow in pixels in x-sensor direction (int16_t)
flow_y : Flow in pixels in y-sensor direction (int16_t)
quality : Optical flow quality / confidence. 0: bad, 255: maximum quality (uint8_t)
ground_distance : Ground distance in meters (float)
'''
msg = MAVLink_optical_flow_message(time, sensor_id, flow_x, flow_y, quality, ground_distance)
msg.pack(self)
return msg
def optical_flow_send(self, time, sensor_id, flow_x, flow_y, quality, ground_distance):
'''
Optical flow from a flow sensor (e.g. optical mouse sensor)
time : Timestamp (UNIX) (uint64_t)
sensor_id : Sensor ID (uint8_t)
flow_x : Flow in pixels in x-sensor direction (int16_t)
flow_y : Flow in pixels in y-sensor direction (int16_t)
quality : Optical flow quality / confidence. 0: bad, 255: maximum quality (uint8_t)
ground_distance : Ground distance in meters (float)
'''
return self.send(self.optical_flow_encode(time, sensor_id, flow_x, flow_y, quality, ground_distance))
def object_detection_event_encode(self, time, object_id, type, name, quality, bearing, distance):
'''
Object has been detected
time : Timestamp in milliseconds since system boot (uint32_t)
object_id : Object ID (uint16_t)
type : Object type: 0: image, 1: letter, 2: ground vehicle, 3: air vehicle, 4: surface vehicle, 5: sub-surface vehicle, 6: human, 7: animal (uint8_t)
name : Name of the object as defined by the detector (char)
quality : Detection quality / confidence. 0: bad, 255: maximum confidence (uint8_t)
bearing : Angle of the object with respect to the body frame in NED coordinates in radians. 0: front (float)
distance : Ground distance in meters (float)
'''
msg = MAVLink_object_detection_event_message(time, object_id, type, name, quality, bearing, distance)
msg.pack(self)
return msg
def object_detection_event_send(self, time, object_id, type, name, quality, bearing, distance):
'''
Object has been detected
time : Timestamp in milliseconds since system boot (uint32_t)
object_id : Object ID (uint16_t)
type : Object type: 0: image, 1: letter, 2: ground vehicle, 3: air vehicle, 4: surface vehicle, 5: sub-surface vehicle, 6: human, 7: animal (uint8_t)
name : Name of the object as defined by the detector (char)
quality : Detection quality / confidence. 0: bad, 255: maximum confidence (uint8_t)
bearing : Angle of the object with respect to the body frame in NED coordinates in radians. 0: front (float)
distance : Ground distance in meters (float)
'''
return self.send(self.object_detection_event_encode(time, object_id, type, name, quality, bearing, distance))
def debug_vect_encode(self, name, usec, x, y, z):
'''
name : Name (char)
usec : Timestamp (uint64_t)
x : x (float)
y : y (float)
z : z (float)
'''
msg = MAVLink_debug_vect_message(name, usec, x, y, z)
msg.pack(self)
return msg
def debug_vect_send(self, name, usec, x, y, z):
'''
name : Name (char)
usec : Timestamp (uint64_t)
x : x (float)
y : y (float)
z : z (float)
'''
return self.send(self.debug_vect_encode(name, usec, x, y, z))
def named_value_float_encode(self, name, value):
'''
Send a key-value pair as float. The use of this message is discouraged
for normal packets, but a quite efficient way for
testing new messages and getting experimental debug
output.
name : Name of the debug variable (char)
value : Floating point value (float)
'''
msg = MAVLink_named_value_float_message(name, value)
msg.pack(self)
return msg
def named_value_float_send(self, name, value):
'''
Send a key-value pair as float. The use of this message is discouraged
for normal packets, but a quite efficient way for
testing new messages and getting experimental debug
output.
name : Name of the debug variable (char)
value : Floating point value (float)
'''
return self.send(self.named_value_float_encode(name, value))
def named_value_int_encode(self, name, value):
'''
Send a key-value pair as integer. The use of this message is
discouraged for normal packets, but a quite efficient
way for testing new messages and getting experimental
debug output.
name : Name of the debug variable (char)
value : Signed integer value (int32_t)
'''
msg = MAVLink_named_value_int_message(name, value)
msg.pack(self)
return msg
def named_value_int_send(self, name, value):
'''
Send a key-value pair as integer. The use of this message is
discouraged for normal packets, but a quite efficient
way for testing new messages and getting experimental
debug output.
name : Name of the debug variable (char)
value : Signed integer value (int32_t)
'''
return self.send(self.named_value_int_encode(name, value))
def statustext_encode(self, severity, text):
'''
Status text message. These messages are printed in yellow in the COMM
console of QGroundControl. WARNING: They consume quite
some bandwidth, so use only for important status and
error messages. If implemented wisely, these messages
are buffered on the MCU and sent only at a limited
rate (e.g. 10 Hz).
severity : Severity of status, 0 = info message, 255 = critical fault (uint8_t)
text : Status text message, without null termination character (int8_t)
'''
msg = MAVLink_statustext_message(severity, text)
msg.pack(self)
return msg
def statustext_send(self, severity, text):
'''
Status text message. These messages are printed in yellow in the COMM
console of QGroundControl. WARNING: They consume quite
some bandwidth, so use only for important status and
error messages. If implemented wisely, these messages
are buffered on the MCU and sent only at a limited
rate (e.g. 10 Hz).
severity : Severity of status, 0 = info message, 255 = critical fault (uint8_t)
text : Status text message, without null termination character (int8_t)
'''
return self.send(self.statustext_encode(severity, text))
def debug_encode(self, ind, value):
'''
Send a debug value. The index is used to discriminate between values.
These values show up in the plot of QGroundControl as
DEBUG N.
ind : index of debug variable (uint8_t)
value : DEBUG value (float)
'''
msg = MAVLink_debug_message(ind, value)
msg.pack(self)
return msg
def debug_send(self, ind, value):
'''
Send a debug value. The index is used to discriminate between values.
These values show up in the plot of QGroundControl as
DEBUG N.
ind : index of debug variable (uint8_t)
value : DEBUG value (float)
'''
return self.send(self.debug_encode(ind, value))
| gpl-3.0 |
fast-project/fast-lib | vendor/yaml-cpp/test/gmock-1.7.0/gtest/scripts/fuse_gtest_files.py | 2577 | 8813 | #!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""fuse_gtest_files.py v0.2.0
Fuses Google Test source code into a .h file and a .cc file.
SYNOPSIS
fuse_gtest_files.py [GTEST_ROOT_DIR] OUTPUT_DIR
Scans GTEST_ROOT_DIR for Google Test source code, and generates
two files: OUTPUT_DIR/gtest/gtest.h and OUTPUT_DIR/gtest/gtest-all.cc.
Then you can build your tests by adding OUTPUT_DIR to the include
search path and linking with OUTPUT_DIR/gtest/gtest-all.cc. These
two files contain everything you need to use Google Test. Hence
you can "install" Google Test by copying them to wherever you want.
GTEST_ROOT_DIR can be omitted and defaults to the parent
directory of the directory holding this script.
EXAMPLES
./fuse_gtest_files.py fused_gtest
./fuse_gtest_files.py path/to/unpacked/gtest fused_gtest
This tool is experimental. In particular, it assumes that there is no
conditional inclusion of Google Test headers. Please report any
problems to [email protected]. You can read
http://code.google.com/p/googletest/wiki/GoogleTestAdvancedGuide for
more information.
"""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import re
import sets
import sys
# We assume that this file is in the scripts/ directory in the Google
# Test root directory.
DEFAULT_GTEST_ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')
# Regex for matching '#include "gtest/..."'.
INCLUDE_GTEST_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(gtest/.+)"')
# Regex for matching '#include "src/..."'.
INCLUDE_SRC_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(src/.+)"')
# Where to find the source seed files.
GTEST_H_SEED = 'include/gtest/gtest.h'
GTEST_SPI_H_SEED = 'include/gtest/gtest-spi.h'
GTEST_ALL_CC_SEED = 'src/gtest-all.cc'
# Where to put the generated files.
GTEST_H_OUTPUT = 'gtest/gtest.h'
GTEST_ALL_CC_OUTPUT = 'gtest/gtest-all.cc'
def VerifyFileExists(directory, relative_path):
"""Verifies that the given file exists; aborts on failure.
relative_path is the file path relative to the given directory.
"""
if not os.path.isfile(os.path.join(directory, relative_path)):
print 'ERROR: Cannot find %s in directory %s.' % (relative_path,
directory)
print ('Please either specify a valid project root directory '
'or omit it on the command line.')
sys.exit(1)
def ValidateGTestRootDir(gtest_root):
"""Makes sure gtest_root points to a valid gtest root directory.
The function aborts the program on failure.
"""
VerifyFileExists(gtest_root, GTEST_H_SEED)
VerifyFileExists(gtest_root, GTEST_ALL_CC_SEED)
def VerifyOutputFile(output_dir, relative_path):
"""Verifies that the given output file path is valid.
relative_path is relative to the output_dir directory.
"""
# Makes sure the output file either doesn't exist or can be overwritten.
output_file = os.path.join(output_dir, relative_path)
if os.path.exists(output_file):
# TODO([email protected]): The following user-interaction doesn't
# work with automated processes. We should provide a way for the
# Makefile to force overwriting the files.
print ('%s already exists in directory %s - overwrite it? (y/N) ' %
(relative_path, output_dir))
answer = sys.stdin.readline().strip()
if answer not in ['y', 'Y']:
print 'ABORTED.'
sys.exit(1)
# Makes sure the directory holding the output file exists; creates
# it and all its ancestors if necessary.
parent_directory = os.path.dirname(output_file)
if not os.path.isdir(parent_directory):
os.makedirs(parent_directory)
def ValidateOutputDir(output_dir):
"""Makes sure output_dir points to a valid output directory.
The function aborts the program on failure.
"""
VerifyOutputFile(output_dir, GTEST_H_OUTPUT)
VerifyOutputFile(output_dir, GTEST_ALL_CC_OUTPUT)
def FuseGTestH(gtest_root, output_dir):
"""Scans folder gtest_root to generate gtest/gtest.h in output_dir."""
output_file = file(os.path.join(output_dir, GTEST_H_OUTPUT), 'w')
processed_files = sets.Set() # Holds all gtest headers we've processed.
def ProcessFile(gtest_header_path):
"""Processes the given gtest header file."""
# We don't process the same header twice.
if gtest_header_path in processed_files:
return
processed_files.add(gtest_header_path)
# Reads each line in the given gtest header.
for line in file(os.path.join(gtest_root, gtest_header_path), 'r'):
m = INCLUDE_GTEST_FILE_REGEX.match(line)
if m:
# It's '#include "gtest/..."' - let's process it recursively.
ProcessFile('include/' + m.group(1))
else:
# Otherwise we copy the line unchanged to the output file.
output_file.write(line)
ProcessFile(GTEST_H_SEED)
output_file.close()
def FuseGTestAllCcToFile(gtest_root, output_file):
"""Scans folder gtest_root to generate gtest/gtest-all.cc in output_file."""
processed_files = sets.Set()
def ProcessFile(gtest_source_file):
"""Processes the given gtest source file."""
# We don't process the same #included file twice.
if gtest_source_file in processed_files:
return
processed_files.add(gtest_source_file)
# Reads each line in the given gtest source file.
for line in file(os.path.join(gtest_root, gtest_source_file), 'r'):
m = INCLUDE_GTEST_FILE_REGEX.match(line)
if m:
if 'include/' + m.group(1) == GTEST_SPI_H_SEED:
# It's '#include "gtest/gtest-spi.h"'. This file is not
# #included by "gtest/gtest.h", so we need to process it.
ProcessFile(GTEST_SPI_H_SEED)
else:
# It's '#include "gtest/foo.h"' where foo is not gtest-spi.
# We treat it as '#include "gtest/gtest.h"', as all other
# gtest headers are being fused into gtest.h and cannot be
# #included directly.
# There is no need to #include "gtest/gtest.h" more than once.
if not GTEST_H_SEED in processed_files:
processed_files.add(GTEST_H_SEED)
output_file.write('#include "%s"\n' % (GTEST_H_OUTPUT,))
else:
m = INCLUDE_SRC_FILE_REGEX.match(line)
if m:
# It's '#include "src/foo"' - let's process it recursively.
ProcessFile(m.group(1))
else:
output_file.write(line)
ProcessFile(GTEST_ALL_CC_SEED)
def FuseGTestAllCc(gtest_root, output_dir):
"""Scans folder gtest_root to generate gtest/gtest-all.cc in output_dir."""
output_file = file(os.path.join(output_dir, GTEST_ALL_CC_OUTPUT), 'w')
FuseGTestAllCcToFile(gtest_root, output_file)
output_file.close()
def FuseGTest(gtest_root, output_dir):
"""Fuses gtest.h and gtest-all.cc."""
ValidateGTestRootDir(gtest_root)
ValidateOutputDir(output_dir)
FuseGTestH(gtest_root, output_dir)
FuseGTestAllCc(gtest_root, output_dir)
def main():
argc = len(sys.argv)
if argc == 2:
# fuse_gtest_files.py OUTPUT_DIR
FuseGTest(DEFAULT_GTEST_ROOT_DIR, sys.argv[1])
elif argc == 3:
# fuse_gtest_files.py GTEST_ROOT_DIR OUTPUT_DIR
FuseGTest(sys.argv[1], sys.argv[2])
else:
print __doc__
sys.exit(1)
if __name__ == '__main__':
main()
| lgpl-3.0 |
Teamxrtc/webrtc-streaming-node | third_party/depot_tools/external_bin/gsutil/gsutil_4.15/gsutil/gslib/commands/help.py | 25 | 8710 | # -*- coding: utf-8 -*-
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of gsutil help command."""
from __future__ import absolute_import
import itertools
import os
import pkgutil
import re
from subprocess import PIPE
from subprocess import Popen
import gslib.addlhelp
from gslib.command import Command
from gslib.command import OLD_ALIAS_MAP
import gslib.commands
from gslib.exception import CommandException
from gslib.help_provider import HelpProvider
from gslib.help_provider import MAX_HELP_NAME_LEN
from gslib.util import IsRunningInteractively
_SYNOPSIS = """
gsutil help [command or topic]
"""
_DETAILED_HELP_TEXT = ("""
<B>SYNOPSIS</B>
""" + _SYNOPSIS + """
<B>DESCRIPTION</B>
Running:
gsutil help
will provide a summary of all commands and additional topics on which
help is available.
Running:
gsutil help command or topic
will provide help about the specified command or topic.
Running:
gsutil help command sub-command
will provide help about the specified sub-command. For example, running:
gsutil help acl set
will provide help about the "set" subcommand of the "acl" command.
If you set the PAGER environment variable to the path to a pager program
(such as /bin/less on Linux), long help sections will be piped through
the specified pager.
""")
top_level_usage_string = (
'Usage: gsutil [-D] [-DD] [-h header]... '
'[-m] [-o] [-q] [command [opts...] args...]'
)
class HelpCommand(Command):
"""Implementation of gsutil help command."""
# Command specification. See base class for documentation.
command_spec = Command.CreateCommandSpec(
'help',
command_name_aliases=['?', 'man'],
usage_synopsis=_SYNOPSIS,
min_args=0,
max_args=2,
supported_sub_args='',
file_url_ok=True,
provider_url_ok=False,
urls_start_arg=0,
)
# Help specification. See help_provider.py for documentation.
help_spec = Command.HelpSpec(
help_name='help',
help_name_aliases=['?'],
help_type='command_help',
help_one_line_summary='Get help about commands and topics',
help_text=_DETAILED_HELP_TEXT,
subcommand_help_text={},
)
def RunCommand(self):
"""Command entry point for the help command."""
(help_type_map, help_name_map) = self._LoadHelpMaps()
output = []
if not self.args:
output.append('%s\nAvailable commands:\n' % top_level_usage_string)
format_str = ' %-' + str(MAX_HELP_NAME_LEN) + 's%s\n'
for help_prov in sorted(help_type_map['command_help'],
key=lambda hp: hp.help_spec.help_name):
output.append(format_str % (
help_prov.help_spec.help_name,
help_prov.help_spec.help_one_line_summary))
output.append('\nAdditional help topics:\n')
for help_prov in sorted(help_type_map['additional_help'],
key=lambda hp: hp.help_spec.help_name):
output.append(format_str % (
help_prov.help_spec.help_name,
help_prov.help_spec.help_one_line_summary))
output.append('\nUse gsutil help <command or topic> for detailed help.')
else:
invalid_subcommand = False
arg = self.args[0]
if arg not in help_name_map:
output.append('No help available for "%s"' % arg)
else:
help_prov = help_name_map[arg]
help_name = None
if len(self.args) > 1: # We also have a subcommand argument.
subcommand_map = help_prov.help_spec.subcommand_help_text
if subcommand_map and self.args[1] in subcommand_map:
help_name = arg + ' ' + self.args[1]
help_text = subcommand_map[self.args[1]]
else:
invalid_subcommand = True
if not subcommand_map:
output.append((
'The "%s" command has no subcommands. You can ask for the '
'full help by running:\n\n\tgsutil help %s\n') %
(arg, arg))
else:
subcommand_examples = []
for subcommand in subcommand_map:
subcommand_examples.append(
'\tgsutil help %s %s' % (arg, subcommand))
output.append(
('Subcommand "%s" does not exist for command "%s".\n'
'You can either ask for the full help about the command by '
'running:\n\n\tgsutil help %s\n\n'
'Or you can ask for help about one of the subcommands:\n\n%s'
) % (self.args[1], arg, arg, '\n'.join(subcommand_examples)))
if not invalid_subcommand:
if not help_name: # No subcommand or invalid subcommand.
help_name = help_prov.help_spec.help_name
help_text = help_prov.help_spec.help_text
output.append('<B>NAME</B>\n')
output.append(' %s - %s\n' % (
help_name, help_prov.help_spec.help_one_line_summary))
output.append('\n\n')
output.append(help_text.strip('\n'))
new_alias = OLD_ALIAS_MAP.get(arg, [None])[0]
if new_alias:
deprecation_warning = """
The "%s" alias is deprecated, and will eventually be removed completely.
Please use the "%s" command instead.""" % (arg, new_alias)
output.append('\n\n\n<B>DEPRECATION WARNING</B>\n')
output.append(deprecation_warning)
self._OutputHelp(''.join(output))
return 0
def _OutputHelp(self, help_str):
"""Outputs simply formatted string.
This function paginates if the string is too long, PAGER is defined, and
the output is a tty.
Args:
help_str: String to format.
"""
# Replace <B> and </B> with terminal formatting strings if connected to tty.
if not IsRunningInteractively():
help_str = re.sub('<B>', '', help_str)
help_str = re.sub('</B>', '', help_str)
print help_str
return
help_str = re.sub('<B>', '\033[1m', help_str)
help_str = re.sub('</B>', '\033[0;0m', help_str)
num_lines = len(help_str.split('\n'))
if 'PAGER' in os.environ and num_lines >= gslib.util.GetTermLines():
# Use -r option for less to make bolding work right.
pager = os.environ['PAGER'].split(' ')
if pager[0].endswith('less'):
pager.append('-r')
try:
Popen(pager, stdin=PIPE).communicate(input=help_str)
except OSError, e:
raise CommandException('Unable to open pager (%s): %s' %
(' '.join(pager), e))
else:
print help_str
def _LoadHelpMaps(self):
"""Returns tuple of help type and help name.
help type is a dict with key: help type
value: list of HelpProviders
help name is a dict with key: help command name or alias
value: HelpProvider
Returns:
(help type, help name)
"""
# Import all gslib.commands submodules.
for _, module_name, _ in pkgutil.iter_modules(gslib.commands.__path__):
__import__('gslib.commands.%s' % module_name)
# Import all gslib.addlhelp submodules.
for _, module_name, _ in pkgutil.iter_modules(gslib.addlhelp.__path__):
__import__('gslib.addlhelp.%s' % module_name)
help_type_map = {}
help_name_map = {}
for s in gslib.help_provider.ALL_HELP_TYPES:
help_type_map[s] = []
# Only include HelpProvider subclasses in the dict.
for help_prov in itertools.chain(
HelpProvider.__subclasses__(), Command.__subclasses__()):
if help_prov is Command:
# Skip the Command base class itself; we just want its subclasses,
# where the help command text lives (in addition to non-Command
# HelpProviders, like naming.py).
continue
gslib.help_provider.SanityCheck(help_prov, help_name_map)
help_name_map[help_prov.help_spec.help_name] = help_prov
for help_name_aliases in help_prov.help_spec.help_name_aliases:
help_name_map[help_name_aliases] = help_prov
help_type_map[help_prov.help_spec.help_type].append(help_prov)
return (help_type_map, help_name_map)
| mit |
rackerlabs/cache-busters | cache_buster/test/test_driver.py | 1 | 5320 | """
Copyright 2013 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import pretend
from twisted.internet.defer import Deferred, succeed, fail
from twisted.python.failure import Failure
from twisted.trial import unittest
from cache_buster.driver import Driver, count_cache_results
from cache_buster.keys import FormattingKeyMaker
from cache_buster.test.doubles import DummyLogger
class DriverTests(unittest.TestCase):
def test_construct(self):
Driver(FormattingKeyMaker({}), None, None)
def test_invalidate_row_calls_cache_delete(self):
cache = pretend.stub(
delete=pretend.call_recorder(lambda key: succeed(None))
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar", "baz"]
}), cache, DummyLogger())
d.invalidate_row("foo_table", {})
self.assertEqual(cache.delete.calls, [
pretend.call("bar"), pretend.call("baz")
])
def test_invalidate_row_returns_deferred(self):
d = Driver(FormattingKeyMaker({}), None, DummyLogger())
res = self.successResultOf(d.invalidate_row("foo_table", {}))
self.assertIs(res, None)
def test_invalidate_row_waits_for_cache_delete(self):
d1 = Deferred()
cache = pretend.stub(
delete=lambda key: d1,
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar"]
}), cache, DummyLogger())
invalidate_d = d.invalidate_row("foo_table", {})
self.assertNoResult(invalidate_d)
d1.callback(None)
res = self.successResultOf(invalidate_d)
self.assertIs(res, None)
def test_invalidate_row_succeeds_on_cache_delete_failure(self):
cache = pretend.stub(
delete=lambda key: fail(Exception()),
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar"]
}), cache, DummyLogger())
invalidate_d = d.invalidate_row("foo_table", {})
res = self.successResultOf(invalidate_d)
self.assertIs(res, None)
def test_invalidate_row_logs_on_cache_delete_failure(self):
f = Failure(Exception())
cache = pretend.stub(
delete=lambda key: fail(f),
)
logger = pretend.stub(
msg=lambda s, **kwargs: None,
err=pretend.call_recorder(lambda failure, table, key: None)
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar"]
}), cache, logger)
d.invalidate_row("foo_table", {})
self.assertEqual(logger.err.calls, [
pretend.call(f, table="foo_table", key="bar")
])
def test_invalidate_row_logs_counts(self):
cache = pretend.stub(
delete=lambda key: succeed(True)
)
logger = pretend.stub(
err=None,
msg=pretend.call_recorder(lambda *args, **kwargs: None),
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar", "baz"]
}), cache, logger)
d.invalidate_row("foo_table", {})
self.assertEqual(logger.msg.calls, [
pretend.call("cache_buster.driver.invalidated_rows",
deletes=2, nonexistant=0, failures=0,
)
])
def test_invalidate_row_logs_nonexistant_counts(self):
cache = pretend.stub(
delete=lambda key: succeed(False)
)
logger = pretend.stub(
err=None,
msg=pretend.call_recorder(lambda *args, **kwargs: None)
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar"]
}), cache, logger)
d.invalidate_row("foo_table", {})
self.assertEqual(logger.msg.calls, [
pretend.call("cache_buster.driver.invalidated_rows",
deletes=0, nonexistant=1, failures=0,
)
])
def test_invalidate_row_logs_failure_counts(self):
cache = pretend.stub(
delete=lambda key: fail(Exception())
)
logger = pretend.stub(
err=lambda failure, table, key: None,
msg=pretend.call_recorder(lambda *args, **kwargs: None)
)
d = Driver(FormattingKeyMaker({
"foo_table": ["bar"]
}), cache, logger)
d.invalidate_row("foo_table", {})
self.assertEqual(logger.msg.calls, [
pretend.call("cache_buster.driver.invalidated_rows",
deletes=0, nonexistant=0, failures=1,
)
])
class CountCacheResultsTests(unittest.TestCase):
def test_many_results(self):
deletes, nonexistant, failures = count_cache_results([
True,
False,
None,
False,
True
])
self.assertEqual(deletes, 2)
self.assertEqual(nonexistant, 2)
self.assertEqual(failures, 1)
| apache-2.0 |
Azure/azure-sdk-for-python | sdk/core/azure-servicemanagement-legacy/tests/test_legacy_mgmt_sqldatabase.py | 12 | 9488 | # coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import unittest
from azure.servicemanagement import (
EventLog,
ServerQuota,
Server,
Servers,
ServiceObjective,
Database,
FirewallRule,
SqlDatabaseManagementService,
)
from testutils.common_recordingtestcase import (
TestMode,
record,
)
from tests.legacy_mgmt_testcase import LegacyMgmtTestCase
class LegacyMgmtSqlDatabaseTest(LegacyMgmtTestCase):
def setUp(self):
super(LegacyMgmtSqlDatabaseTest, self).setUp()
self.sqlms = self.create_service_management(SqlDatabaseManagementService)
self.created_server = None
def tearDown(self):
if not self.is_playback():
if self.created_server:
try:
self.sqlms.delete_server(self.created_server)
except:
pass
return super(LegacyMgmtSqlDatabaseTest, self).tearDown()
#--Helpers-----------------------------------------------------------------
def _create_server(self):
result = self.sqlms.create_server('azuredb', 'T5ii-B48x', 'West US')
self.created_server = result.server_name
def _server_exists(self, server_name):
result = self.sqlms.list_servers()
match = [s for s in result if s.name == server_name]
return len(match) == 1
def _create_database(self, name):
result = self.sqlms.create_database(
self.created_server,
name,
'dd6d99bb-f193-4ec1-86f2-43d3bccbc49c',
edition='Basic'
)
#--Operations for servers -------------------------------------------------
@record
def test_create_server(self):
# Arrange
# Act
result = self.sqlms.create_server('azuredb', 'T5ii-B48x', 'West US')
self.created_server = result.server_name
# Assert
self.assertGreater(len(result.server_name), 0)
self.assertGreater(len(result.fully_qualified_domain_name), 0)
self.assertTrue(self._server_exists(self.created_server))
@record
def test_set_server_admin_password(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.set_server_admin_password(self.created_server, 'U6jj-C59y')
# Assert
self.assertIsNone(result)
@record
def test_delete_server(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.delete_server(self.created_server)
# Assert
self.assertIsNone(result)
self.assertFalse(self._server_exists(self.created_server))
@record
def test_list_servers(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.list_servers()
# Assert
self.assertIsNotNone(result)
self.assertIsInstance(result, Servers)
for server in result:
self.assertIsInstance(server, Server)
match = [s for s in result if s.name == self.created_server][0]
self.assertEqual(match.name, self.created_server)
self.assertEqual(match.administrator_login, 'azuredb')
self.assertEqual(match.location, 'West US')
self.assertEqual(match.geo_paired_region, '')
self.assertTrue(match.fully_qualified_domain_name.startswith(self.created_server))
self.assertGreater(len(match.version), 0)
@record
def test_list_quotas(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.list_quotas(self.created_server)
# Assert
self.assertIsNotNone(result)
self.assertIsInstance(result, list)
for quota in result:
self.assertIsInstance(quota, ServerQuota)
self.assertGreater(len(quota.name), 0)
self.assertGreater(quota.value, 0)
#--Operations for firewall rules ------------------------------------------
@record
def test_create_firewall_rule(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.create_firewall_rule(self.created_server,
'AllowAll',
'192.168.144.0',
'192.168.144.255')
# Assert
self.assertIsNone(result)
@record
def test_delete_firewall_rule(self):
# Arrange
self._create_server()
result = self.sqlms.create_firewall_rule(self.created_server,
'AllowAll',
'192.168.144.0',
'192.168.144.255')
# Act
result = self.sqlms.delete_firewall_rule(self.created_server,
'AllowAll')
# Assert
self.assertIsNone(result)
@record
def test_update_firewall_rule(self):
# Arrange
self._create_server()
result = self.sqlms.create_firewall_rule(self.created_server,
'AllowAll',
'192.168.144.0',
'192.168.144.255')
# Act
result = self.sqlms.update_firewall_rule(self.created_server,
'AllowAll',
'192.168.116.0',
'192.168.116.255')
# Assert
self.assertIsNone(result)
@record
def test_list_firewall_rules(self):
# Arrange
self._create_server()
result = self.sqlms.create_firewall_rule(self.created_server,
'AllowAll',
'192.168.144.0',
'192.168.144.255')
# Act
result = self.sqlms.list_firewall_rules(self.created_server)
# Assert
self.assertIsNotNone(result)
self.assertIsInstance(result, list)
for rule in result:
self.assertIsInstance(rule, FirewallRule)
@record
def test_list_service_level_objectives(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.list_service_level_objectives(self.created_server)
# Assert
self.assertIsNotNone(result)
self.assertIsInstance(result, list)
for rule in result:
self.assertIsInstance(rule, ServiceObjective)
@record
def test_create_database(self):
# Arrange
self._create_server()
# Act
result = self.sqlms.create_database(
self.created_server,
'testdb',
'dd6d99bb-f193-4ec1-86f2-43d3bccbc49c',
edition='Basic'
)
# Assert
self.assertIsNone(result)
@record
def test_delete_database(self):
# Arrange
self._create_server()
self._create_database('temp')
# Act
result = self.sqlms.delete_database(self.created_server, 'temp')
# Assert
result = self.sqlms.list_databases(self.created_server)
match = [d for d in result if d.name == 'temp']
self.assertEqual(len(match), 0)
@record
def test_update_database(self):
# Arrange
self._create_server()
self._create_database('temp')
# Act
result = self.sqlms.update_database(self.created_server,
'temp',
'newname')
# Assert
result = self.sqlms.list_databases(self.created_server)
match = [d for d in result if d.name == 'newname']
self.assertEqual(len(match), 1)
@record
def test_list_databases(self):
# Arrange
self._create_server()
self._create_database('temp')
# Act
result = self.sqlms.list_databases(self.created_server)
# Assert
self.assertIsNotNone(result)
self.assertIsInstance(result, list)
for db in result:
self.assertIsInstance(db, Database)
match = [d for d in result if d.name == 'temp'][0]
self.assertEqual(match.name, 'temp')
self.assertEqual(match.state, 'Normal')
self.assertGreater(match.max_size_bytes, 0)
self.assertGreater(match.id, 0)
self.assertGreater(len(match.edition), 0)
self.assertGreater(len(match.collation_name), 0)
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
| mit |
Nikea/VisTrails | vistrails/packages/controlflow/__init__.py | 2 | 2025 | ###############################################################################
##
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: [email protected]
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the University of Utah nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
identifier="org.vistrails.vistrails.control_flow"
name="Control Flow"
version="0.2.4"
old_identifiers = ["edu.utah.sci.vistrails.control_flow"]
| bsd-3-clause |
devttys0/binwalk | src/binwalk/modules/compression.py | 2 | 10003 | # Performs raw decompression of various compression algorithms (currently,
# only deflate).
import os
import zlib
import struct
import binwalk.core.compat
import binwalk.core.common
from binwalk.core.module import Option, Kwarg, Module
try:
import lzma
except ImportError:
from backports import lzma
class LZMAHeader(object):
def __init__(self, **kwargs):
for (k, v) in binwalk.core.compat.iterator(kwargs):
setattr(self, k, v)
class LZMA(object):
DESCRIPTION = "Raw LZMA compression stream"
COMMON_PROPERTIES = [0x5D, 0x6E]
MAX_PROP = ((4 * 5 + 4) * 9 + 8)
BLOCK_SIZE = 32 * 1024
def __init__(self, module):
self.module = module
self.properties = None
self.build_properties()
self.build_dictionaries()
self.build_headers()
# Add an extraction rule
if self.module.extractor.enabled:
self.module.extractor.add_rule(regex='^%s' % self.DESCRIPTION.lower(), extension="7z", cmd=self.extractor)
def extractor(self, file_name):
# Open and read the file containing the raw compressed data.
# This is not terribly efficient, especially for large files...
compressed_data = binwalk.core.common.BlockFile(file_name).read()
# Re-run self.decompress to detect the properties for this compressed
# data (stored in self.properties)
if self.decompress(compressed_data[:self.BLOCK_SIZE]):
# Build an LZMA header on top of the raw compressed data and write it back to disk.
# Header consists of the detected properties values, the largest possible dictionary size,
# and a fake output file size field.
header = chr(self.properties) + \
self.dictionaries[-1] + ("\xFF" * 8)
binwalk.core.common.BlockFile(file_name, "wb").write(header + compressed_data)
# Try to extract it with all the normal lzma extractors until one
# works
for exrule in self.module.extractor.match("lzma compressed data"):
if self.module.extractor.execute(exrule['cmd'], file_name) == True:
break
def build_property(self, pb, lp, lc):
prop = (((pb * 5) + lp) * 9) + lc
if prop > self.MAX_PROP:
return None
return int(prop)
def parse_property(self, prop):
prop = int(ord(prop))
if prop > self.MAX_PROP:
return None
pb = prop / (9 * 5)
prop -= pb * 9 * 5
lp = prop / 9
lc = prop - lp * 9
return (pb, lp, lc)
def parse_header(self, header):
(pb, lp, lc) = self.parse_property(header[0])
dictionary = struct.unpack("<I", binwalk.core.compat.str2bytes(header[1:5]))[0]
return LZMAHeader(pb=pb, lp=lp, lc=lc, dictionary=dictionary)
def build_properties(self):
self.properties = set()
if self.module.partial_scan == True:
# For partial scans, only check the most common properties values
for prop in self.COMMON_PROPERTIES:
self.properties.add(chr(prop))
else:
for pb in range(0, 9):
for lp in range(0, 5):
for lc in range(0, 5):
prop = self.build_property(pb, lp, lc)
if prop is not None:
self.properties.add(chr(prop))
def build_dictionaries(self):
self.dictionaries = []
if self.module.partial_scan == True:
# For partial scans, only use the largest dictionary value
self.dictionaries.append(binwalk.core.compat.bytes2str(struct.pack("<I", 2 ** 25)))
else:
for n in range(16, 26):
self.dictionaries.append(binwalk.core.compat.bytes2str(struct.pack("<I", 2 ** n)))
def build_headers(self):
self.headers = set()
for prop in self.properties:
for dictionary in self.dictionaries:
self.headers.add(prop + dictionary + ("\xFF" * 8))
def decompress(self, data):
result = None
description = None
for header in self.headers:
# The only acceptable exceptions are those indicating that the
# input data was truncated.
try:
final_data = binwalk.core.compat.str2bytes(header + data)
lzma.decompress(final_data)
result = self.parse_header(header)
break
except IOError as e:
# The Python2 module gives this error on truncated input data.
if str(e) == "unknown BUF error":
result = self.parse_header(header)
break
except Exception as e:
# The Python3 module gives this error on truncated input data.
# The inconsistency between modules is a bit worrisome.
if str(e) == "Compressed data ended before the end-of-stream marker was reached":
result = self.parse_header(header)
break
if result is not None:
self.properties = self.build_property(result.pb, result.lp, result.lc)
description = "%s, properties: 0x%.2X [pb: %d, lp: %d, lc: %d], dictionary size: %d" % (self.DESCRIPTION,
self.properties,
result.pb,
result.lp,
result.lc,
result.dictionary)
return description
class Deflate(object):
'''
Finds and extracts raw deflate compression streams.
'''
ENABLED = False
BLOCK_SIZE = 33 * 1024
DESCRIPTION = "Raw deflate compression stream"
def __init__(self, module):
self.module = module
# Add an extraction rule
if self.module.extractor.enabled:
self.module.extractor.add_rule(regex='^%s' % self.DESCRIPTION.lower(), extension="deflate", cmd=self.extractor)
def extractor(self, file_name):
in_data = ""
out_data = ""
retval = False
out_file = os.path.splitext(file_name)[0]
with binwalk.core.common.BlockFile(file_name, 'r') as fp_in:
while True:
(data, dlen) = fp_in.read_block()
if not data or dlen == 0:
break
else:
in_data += data[:dlen]
try:
out_data = zlib.decompress(binwalk.core.compat.str2bytes(in_data), -15)
with binwalk.core.common.BlockFile(out_file, 'w') as fp_out:
fp_out.write(out_data)
retval = True
break
except zlib.error as e:
pass
return retval
def decompress(self, data):
# Looking for either a valid decompression, or an error indicating
# truncated input data
try:
# Negative window size (e.g., -15) indicates that raw decompression
# should be performed
zlib.decompress(binwalk.core.compat.str2bytes(data), -15)
except zlib.error as e:
if not str(e).startswith("Error -5"):
# Bad data.
return None
return self.DESCRIPTION
class RawCompression(Module):
TITLE = 'Raw Compression'
CLI = [
Option(short='X',
long='deflate',
kwargs={'enabled': True, 'scan_for_deflate': True},
description='Scan for raw deflate compression streams'),
Option(short='Z',
long='lzma',
kwargs={'enabled': True, 'scan_for_lzma': True},
description='Scan for raw LZMA compression streams'),
Option(short='P',
long='partial',
kwargs={'partial_scan': True},
description='Perform a superficial, but faster, scan'),
Option(short='S',
long='stop',
kwargs={'stop_on_first_hit': True},
description='Stop after the first result'),
]
KWARGS = [
Kwarg(name='enabled', default=False),
Kwarg(name='partial_scan', default=False),
Kwarg(name='stop_on_first_hit', default=False),
Kwarg(name='scan_for_deflate', default=False),
Kwarg(name='scan_for_lzma', default=False),
]
def init(self):
self.decompressors = []
if self.scan_for_deflate:
self.decompressors.append(Deflate(self))
if self.scan_for_lzma:
self.decompressors.append(LZMA(self))
def run(self):
for fp in iter(self.next_file, None):
file_done = False
self.header()
while not file_done:
(data, dlen) = fp.read_block()
if dlen < 1:
break
for i in range(0, dlen):
for decompressor in self.decompressors:
description = decompressor.decompress(data[i:i + decompressor.BLOCK_SIZE])
if description:
self.result(description=description, file=fp, offset=fp.tell() - dlen + i)
if self.stop_on_first_hit:
file_done = True
break
if file_done:
break
self.status.completed += 1
self.status.completed = fp.tell() - fp.offset
self.footer()
| mit |
berkerpeksag/pythondotorg | pydotorg/settings/base.py | 1 | 5943 | import os
import dj_database_url
### Basic config
BASE = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
DEBUG = TEMPLATE_DEBUG = True
SITE_ID = 1
SECRET_KEY = 'its-a-secret-to-everybody'
# Until Sentry works on Py3, do errors the old-fashioned way.
ADMINS = []
# General project information
# These are available in the template as SITE_INFO.<title>
SITE_VARIABLES = {
'site_name': 'Python.org',
'site_descript': 'The official home of the Python Programming Language',
}
### Databases
DATABASES = {
'default': dj_database_url.config(default='postgres:///python.org')
}
### Locale settings
TIME_ZONE = 'UTC'
LANGUAGE_CODE = 'en-us'
USE_I18N = True
USE_L10N = True
USE_TZ = True
DATE_FORMAT = 'Y-m-d'
### Files (media and static)
MEDIA_ROOT = os.path.join(BASE, 'media')
MEDIA_URL = '/m/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = os.path.join(BASE, 'static-root')
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE, 'static'),
]
STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
### Authentication
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
"django.contrib.auth.backends.ModelBackend",
# `allauth` specific authentication methods, such as login by e-mail
"allauth.account.auth_backends.AuthenticationBackend",
)
LOGIN_REDIRECT_URL = 'home'
ACCOUNT_LOGOUT_REDIRECT_URL = 'home'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_UNIQUE_EMAIL = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
SOCIALACCOUNT_EMAIL_REQUIRED = True
SOCIALACCOUNT_EMAIL_VERIFICATION = True
SOCIALACCOUNT_QUERY_EMAIL = True
### Templates
TEMPLATE_DIRS = [
os.path.join(BASE, 'templates')
]
TEMPLATE_CONTEXT_PROCESSORS = [
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.core.context_processors.request",
"allauth.account.context_processors.account",
"allauth.socialaccount.context_processors.socialaccount",
"django.contrib.messages.context_processors.messages",
"pydotorg.context_processors.site_info",
"pydotorg.context_processors.url_name",
]
### URLs, WSGI, middleware, etc.
ROOT_URLCONF = 'pydotorg.urls'
MIDDLEWARE_CLASSES = (
'pydotorg.middleware.AdminNoCaching',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'pages.middleware.PageFallbackMiddleware',
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',
)
AUTH_USER_MODEL = 'users.User'
WSGI_APPLICATION = 'pydotorg.wsgi.application'
### Apps
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.redirects',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.comments',
'django.contrib.admin',
'django.contrib.admindocs',
'django_comments_xtd',
'jsonfield',
'pipeline',
'sitetree',
'timedelta',
'imagekit',
'haystack',
'honeypot',
'users',
'boxes',
'cms',
'companies',
'feedbacks',
'community',
'jobs',
'pages',
'sponsors',
'successstories',
'events',
'minutes',
'peps',
'blogs',
'downloads',
'codesamples',
'allauth',
'allauth.account',
'allauth.socialaccount',
#'allauth.socialaccount.providers.facebook',
#'allauth.socialaccount.providers.github',
#'allauth.socialaccount.providers.openid',
#'allauth.socialaccount.providers.twitter',
# Tastypie needs the `users` app to be already loaded.
'tastypie',
]
# Fixtures
FIXTURE_DIRS = (
os.path.join(BASE, 'fixtures'),
)
### Testing
SKIP_NETWORK_TESTS = True
### Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
### Development
DEV_FIXTURE_URL = 'https://www.python.org/m/fixtures/dev-fixtures.json.gz'
### Comments
COMMENTS_APP = 'django_comments_xtd'
COMMENTS_XTD_MAX_THREAD_LEVEL = 0
COMMENTS_XTD_FORM_CLASS = "jobs.forms.JobCommentForm"
### Honeypot
HONEYPOT_FIELD_NAME = 'email_body_text'
HONEYPOT_VALUE = 'write your message'
### Blog Feed URL
PYTHON_BLOG_FEED_URL = "http://feeds.feedburner.com/PythonInsider"
PYTHON_BLOG_URL = "http://blog.python.org"
### Registration mailing lists
MAILING_LIST_PSF_MEMBERS = "[email protected]"
### PEP Repo Location
PEP_REPO_PATH = ''
### Fastly ###
FASTLY_API_KEY = False # Set to Fastly API key in production to allow pages to
# be purged on save
# Jobs
JOB_THRESHOLD_DAYS = 90
JOB_FROM_EMAIL = '[email protected]'
### Pipeline
from .pipeline import (
PIPELINE_CSS, PIPELINE_JS,
PIPELINE_COMPILERS,
PIPELINE_SASS_BINARY, PIPELINE_SASS_ARGUMENTS,
PIPELINE_CSS_COMPRESSOR, PIPELINE_JS_COMPRESSOR,
)
| apache-2.0 |
soravux/deap | examples/es/cma_bipop.py | 11 | 8270 | # This file is part of DEAP.
#
# DEAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# DEAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with DEAP. If not, see <http://www.gnu.org/licenses/>.
"""Implementation of the BI-Population CMA-ES algorithm. As presented in
*Hansen, 2009, Benchmarking a BI-Population CMA-ES on the BBOB-2009 Function
Testbed* with the exception of the modifications to the original CMA-ES
parameters mentionned at the end of section 2's first paragraph.
"""
from collections import deque
import numpy
from deap import algorithms
from deap import base
from deap import benchmarks
from deap import cma
from deap import creator
from deap import tools
# Problem size
N = 30
creator.create("FitnessMin", base.Fitness, weights=(-1.0,))
creator.create("Individual", list, fitness=creator.FitnessMin)
def main(verbose=True):
NRESTARTS = 10 # Initialization + 9 I-POP restarts
SIGMA0 = 2.0 # 1/5th of the domain [-5 5]
toolbox = base.Toolbox()
toolbox.register("evaluate", benchmarks.rastrigin)
halloffame = tools.HallOfFame(1)
stats = tools.Statistics(lambda ind: ind.fitness.values)
stats.register("avg", numpy.mean)
stats.register("std", numpy.std)
stats.register("min", numpy.min)
stats.register("max", numpy.max)
logbooks = list()
nsmallpopruns = 0
smallbudget = list()
largebudget = list()
lambda0 = 4 + int(3 * numpy.log(N))
regime = 1
i = 0
while i < (NRESTARTS + nsmallpopruns):
# The first regime is enforced on the first and last restart
# The second regime is run if its allocated budget is smaller than the allocated
# large population regime budget
if i > 0 and i < (NRESTARTS + nsmallpopruns) - 1 and sum(smallbudget) < sum(largebudget):
lambda_ = int(lambda0 * (0.5 * (2**(i - nsmallpopruns) * lambda0) / lambda0)**(numpy.random.rand()**2))
sigma = 2 * 10**(-2 * numpy.random.rand())
nsmallpopruns += 1
regime = 2
smallbudget += [0]
else:
lambda_ = 2**(i - nsmallpopruns) * lambda0
sigma = SIGMA0
regime = 1
largebudget += [0]
t = 0
# Set the termination criterion constants
if regime == 1:
MAXITER = 100 + 50 * (N + 3)**2 / numpy.sqrt(lambda_)
elif regime == 2:
MAXITER = 0.5 * largebudget[-1] / lambda_
TOLHISTFUN = 10**-12
TOLHISTFUN_ITER = 10 + int(numpy.ceil(30. * N / lambda_))
EQUALFUNVALS = 1. / 3.
EQUALFUNVALS_K = int(numpy.ceil(0.1 + lambda_ / 4.))
TOLX = 10**-12
TOLUPSIGMA = 10**20
CONDITIONCOV = 10**14
STAGNATION_ITER = int(numpy.ceil(0.2 * t + 120 + 30. * N / lambda_))
NOEFFECTAXIS_INDEX = t % N
equalfunvalues = list()
bestvalues = list()
medianvalues = list()
mins = deque(maxlen=TOLHISTFUN_ITER)
# We start with a centroid in [-4, 4]**D
strategy = cma.Strategy(centroid=numpy.random.uniform(-4, 4, N), sigma=sigma, lambda_=lambda_)
toolbox.register("generate", strategy.generate, creator.Individual)
toolbox.register("update", strategy.update)
logbooks.append(tools.Logbook())
logbooks[-1].header = "gen", "evals", "restart", "regime", "std", "min", "avg", "max"
conditions = {"MaxIter" : False, "TolHistFun" : False, "EqualFunVals" : False,
"TolX" : False, "TolUpSigma" : False, "Stagnation" : False,
"ConditionCov" : False, "NoEffectAxis" : False, "NoEffectCoor" : False}
# Run the current regime until one of the following is true:
## Note that the algorithm won't stop by itself on the optimum (0.0 on rastrigin).
while not any(conditions.values()):
# Generate a new population
population = toolbox.generate()
# Evaluate the individuals
fitnesses = toolbox.map(toolbox.evaluate, population)
for ind, fit in zip(population, fitnesses):
ind.fitness.values = fit
halloffame.update(population)
record = stats.compile(population)
logbooks[-1].record(gen=t, evals=lambda_, restart=i, regime=regime, **record)
if verbose:
print(logbooks[-1].stream)
# Update the strategy with the evaluated individuals
toolbox.update(population)
# Count the number of times the k'th best solution is equal to the best solution
# At this point the population is sorted (method update)
if population[-1].fitness == population[-EQUALFUNVALS_K].fitness:
equalfunvalues.append(1)
# Log the best and median value of this population
bestvalues.append(population[-1].fitness.values)
medianvalues.append(population[int(round(len(population)/2.))].fitness.values)
# First run does not count into the budget
if regime == 1 and i > 0:
largebudget[-1] += lambda_
elif regime == 2:
smallbudget[-1] += lambda_
t += 1
STAGNATION_ITER = int(numpy.ceil(0.2 * t + 120 + 30. * N / lambda_))
NOEFFECTAXIS_INDEX = t % N
if t >= MAXITER:
# The maximum number of iteration per CMA-ES ran
conditions["MaxIter"] = True
mins.append(record["min"])
if (len(mins) == mins.maxlen) and max(mins) - min(mins) < TOLHISTFUN:
# The range of the best values is smaller than the threshold
conditions["TolHistFun"] = True
if t > N and sum(equalfunvalues[-N:]) / float(N) > EQUALFUNVALS:
# In 1/3rd of the last N iterations the best and k'th best solutions are equal
conditions["EqualFunVals"] = True
if all(strategy.pc < TOLX) and all(numpy.sqrt(numpy.diag(strategy.C)) < TOLX):
# All components of pc and sqrt(diag(C)) are smaller than the threshold
conditions["TolX"] = True
if strategy.sigma / sigma > strategy.diagD[-1]**2 * TOLUPSIGMA:
# The sigma ratio is bigger than a threshold
conditions["TolUpSigma"] = True
if len(bestvalues) > STAGNATION_ITER and len(medianvalues) > STAGNATION_ITER and \
numpy.median(bestvalues[-20:]) >= numpy.median(bestvalues[-STAGNATION_ITER:-STAGNATION_ITER + 20]) and \
numpy.median(medianvalues[-20:]) >= numpy.median(medianvalues[-STAGNATION_ITER:-STAGNATION_ITER + 20]):
# Stagnation occured
conditions["Stagnation"] = True
if strategy.cond > 10**14:
# The condition number is bigger than a threshold
conditions["ConditionCov"] = True
if all(strategy.centroid == strategy.centroid + 0.1 * strategy.sigma * strategy.diagD[-NOEFFECTAXIS_INDEX] * strategy.B[-NOEFFECTAXIS_INDEX]):
# The coordinate axis std is too low
conditions["NoEffectAxis"] = True
if any(strategy.centroid == strategy.centroid + 0.2 * strategy.sigma * numpy.diag(strategy.C)):
# The main axis std has no effect
conditions["NoEffectCoor"] = True
stop_causes = [k for k, v in conditions.items() if v]
print("Stopped because of condition%s %s" % ((":" if len(stop_causes) == 1 else "s:"), ",".join(stop_causes)))
i += 1
return halloffame
if __name__ == "__main__":
main()
| lgpl-3.0 |
neuroidss/nupic | src/nupic/datafiles/extra/gym/raw/makeDataset.py | 27 | 8637 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2010-2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Unify the various Gym CSV files to a single coherent CSV file
The Gym dataset has two file types:
1. Hourly attendance data per gym
2. KW consumption in 15 minutes intervals
The createDataset() function merges the two file types and creates
a single CSV file with hourly data. Each record contains the following fields:
Gym name, Date, Hour, # Atendees, KW consumption
"""
import os
import sys
import fileinput
import glob
import operator
import datetime
from nupic.data.file import File
months = 'Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec'.split()
class Record(object):
def __init__(self):
self.club = ''
self.date = None
self.time = 0
self.KW = 0
self.attendeeCount = 0
self.consumption = 0
class Club(object):
def __init__(self, name):
self.name = name
self.records = {}
def processAttendance(self, f):
# Skip first two
line = f.next()
assert line == ',,,,,,,,,,,,,,,,,,,\n'
line = f.next()
assert line == 'Date Of Swipe, < 6 am,6-7 am,7-8 am,8-9 am,9-10 am,10-11 am,11-12 am,12-1 pm,1-2 pm,2-3 pm,3-4 pm,4-5 pm,5-6 pm,6-7 pm,7-8 pm,8-9 pm,9-10 pm,> 10 pm,Totals\n'
for i, line in enumerate(f):
# Check weather we're done with this club
if line == ',,,,,,,,,,,,,,,,,,,\n':
# skip next two lines
line = f.next()
assert line.startswith('Club Totals:')
line = f.next()
assert line == ',,,,,,,,,,,,,,,,,,,\n'
return
else:
self.addRecord(line)
def addRecord(self, line):
fields = line.split(',')
assert len(fields) == 20
date = fields[0].split('-')
# Convert day to 'dd'
dd = int(date[0])
mm = months.index(date[1]) + 1
assert mm in (9, 10)
# Convert year from 'yy' to 'yyyy'
yyyy = 2000 + int(date[2])
date = (yyyy, mm, dd)
# Add 0 for hours without attendants (<12AM-4AM and 11PM)
attendance = [0] * 5 + fields[1:19] + [0]
assert len(attendance) == 24
# Create a record for each hour of the day.
for i, a in enumerate(attendance):
r = Record()
r.club = self.name
r.timestamp = datetime.datetime(yyyy, mm, dd, i)
#r.time = i
r.attendeeCount = a
self.records[(date, i)] = r
def updateRecord(self, date, t, consumption):
# Get rid of time and AM/PM if needed
date = date.split()[0]
# Convert to (yyyy, mmm, dd)
date = date.split('/')
# Convert day to 'dd'
dd = int(date[0])
# Convert month index to month name
mm = int(date[1])
yyyy = int(date[2])
# Locate record
key = ((yyyy, mm, dd), t)
if not key in self.records:
print self.name, 'is missing attendance data for', key
else:
r = self.records[key]
r.consumption = consumption
def processClubAttendance(f, clubs):
"""Process the attendance data of one club
If the club already exists in the list update its data.
If the club is new create a new Club object and add it to the dict
The next step is to iterate over all the lines and add a record for each line.
When reaching an empty line it means there are no more records for this club.
Along the way some redundant lines are skipped. When the file ends the f.next()
call raises a StopIteration exception and that's the sign to return False,
which indicates to the caller that there are no more clubs to process.
"""
try:
# Skip as many empty lines as necessary (file format inconsistent)
line = f.next()
while line == ',,,,,,,,,,,,,,,,,,,\n':
line = f.next()
# The first non-empty line should have the name as the first field
name = line.split(',')[0]
# Create a new club object if needed
if name not in clubs:
clubs[name] = Club(name)
# Get the named club
c = clubs[name]
c.processAttendance(f)
return True
except StopIteration:
return False
def processClubConsumption(f, clubs):
"""Process the consumption a club
- Skip the header line
- Iterate over lines
- Read 4 records at a time
- Parse each line: club, date, time, consumption
- Get club object from dictionary if needed
- Aggregate consumption
- Call club.processConsumption() with data
"""
try:
# Skip header line
line = f.next()
assert line.endswith('" ","SITE_LOCATION_NAME","TIMESTAMP","TOTAL_KWH"\n')
valid_times = range(24)
t = 0 # used to track time
club = None
clubName = None
lastDate = None
while True:
assert t in valid_times
consumption = 0
for x in range(4):
# Read the line and get rid of the newline character
line = f.next()[:-1]
fields = line.split(',')
assert len(fields) == 4
for i, field in enumerate(fields):
# Strip the redundant double quotes
assert field[0] == '"' and field[-1] == '"'
fields[i] = field[1:-1]
# Ignoring field 0, which is just a running count
# Get the club name
name = fields[1]
# Hack to fix inconsistent club names like: "Melbourne CBD - Melbourne Central" vs. "Melbourne Central"
partialNames = ('Melbourne Central', 'North Sydney', 'Park St', 'Pitt St')
for pn in partialNames:
if pn in name:
name = pn
# Locate the club if needed (maybe )
if name != clubName:
clubName = name
club = clubs[name]
# Split the date (time is counted using the t variable)
tokens = fields[2].split()
# Verify that t == 0 and consumption == 0 when there is no time in the file
if len(tokens) == 1:
assert consumption == 0 and t == 0
# The first (and sometimes only) token is the date
date = tokens[0]
# Aggregate the consumption
consumption += float(fields[3])
# Update the Club object after aggregating the consumption of 4 lines
club.updateRecord(date, t, consumption)
# Increment time
t += 1
t %= 24
except StopIteration:
return
def processAttendanceFiles():
files = glob.glob('Attendance*.csv')
f = fileinput.input(files=files)
# Process the input files and create a dictionary of Club objects
clubs = {}
while processClubAttendance(f, clubs):
pass
return clubs
def processConsumptionFiles(clubs):
"""
"""
files = glob.glob('all_group*detail.csv')
f = fileinput.input(files=files)
# Process the input files and create a dictionary of Club objects
while processClubConsumption(f, clubs):
pass
return clubs
def makeDataset():
"""
"""
clubs = processAttendanceFiles()
clubs = processConsumptionFiles(clubs)
fields = [('gym', 'string', 'S'),
('timestamp', 'datetime', 'T'),
('attendeeCount', 'int', ''),
('consumption', 'float', ''),
]
with File('gym.csv', fields) as f:
## write header
#f.write('Gym Name,Date,Time,Attendee Count,Consumption (KWH)\n')
for c in clubs.values():
for k, r in sorted(c.records.iteritems(), key=operator.itemgetter(0)):
#dd = r.date[2]
#mm = r.date[1]
#yyyy = r.date[0]
#line = ','.join(str(x) for x in
# (c.name, '%d-%s-%d' % (dd, mmm, yyyy), r.time, r.attendeeCount, r.consumption))
#f.write(line + '\n')
f.write([r.club, r.timestamp, r.attendeeCount, r.consumption])
if __name__=='__main__':
makeDataset()
print 'Done.'
| agpl-3.0 |
abhik/pebl | src/pebl/learner/simanneal.py | 4 | 4032 | """Classes and functions for Simulated Annealing learner"""
from math import exp
import random
from pebl import network, result, evaluator, config
from pebl.learner.base import *
class SALearnerStatistics:
def __init__(self, starting_temp, delta_temp, max_iterations_at_temp):
self.temp = starting_temp
self.iterations_at_temp = 0
self.max_iterations_at_temp = max_iterations_at_temp
self.delta_temp = delta_temp
self.iterations = 0
self.best_score = 0
self.current_score = 0
def update(self):
self.iterations += 1
self.iterations_at_temp += 1
if self.iterations_at_temp >= self.max_iterations_at_temp:
self.temp *= self.delta_temp
self.iterations_at_temp = 0
class SimulatedAnnealingLearner(Learner):
#
# Parameters
#
_params = (
config.FloatParameter(
'simanneal.start_temp',
"Starting temperature for a run.",
config.atleast(0.0),
default=100.0
),
config.FloatParameter(
'simanneal.delta_temp',
'Change in temp between steps.',
config.atleast(0.0),
default=0.5
),
config.IntParameter(
'simanneal.max_iters_at_temp',
'Max iterations at any temperature.',
config.atleast(0),
default=100
),
config.StringParameter(
'simanneal.seed',
'Starting network for a greedy search.',
default=''
)
)
def __init__(self, data_=None, prior_=None, **options):
"""Create a Simulated Aneaaling learner.
For more information about Simulated Annealing algorithms, consult:
1. http://en.wikipedia.org/wiki/Simulated_annealing
2. D. Heckerman. A Tutorial on Learning with Bayesian Networks.
Microsoft Technical Report MSR-TR-95-06, 1995. p.35-36.
Any config param for 'simanneal' can be passed in via options.
Use just the option part of the parameter name.
"""
super(SimulatedAnnealingLearner,self).__init__(data_, prior_)
config.setparams(self, options)
if not isinstance(self.seed, network.Network):
self.seed = network.Network(self.data.variables, self.seed)
def run(self):
"""Run the learner."""
self.stats = SALearnerStatistics(self.start_temp, self.delta_temp,
self.max_iters_at_temp)
self.result = result.LearnerResult(self)
self.evaluator = evaluator.fromconfig(self.data, self.seed, self.prior)
self.evaluator.score_network(self.seed.copy())
self.result.start_run()
curscore = self.evaluator.score_network()
# temperature decays exponentially, so we'll never get to 0.
# So, we continue until temp < 1
while self.stats.temp >= 1:
try:
newscore = self._alter_network_randomly_and_score()
except CannotAlterNetworkException:
return
self.result.add_network(self.evaluator.network, newscore)
if self._accept(newscore):
# set current score
self.stats.current_score = newscore
if self.stats.current_score > self.stats.best_score:
self.stats.best_score = self.stats.current_score
else:
# undo network alteration
self.evaluator.restore_network()
# temp not updated EVERY iteration. just whenever criteria met.
self.stats.update()
self.result.stop_run()
return self.result
def _accept(self, newscore):
oldscore = self.stats.current_score
if newscore >= oldscore:
return True
elif random.random() < exp((newscore - oldscore)/self.stats.temp):
return True
else:
return False
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.