code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from time import time
from module.common.json_layer import json_loads
from module.plugins.Account import Account
class MyfastfileCom(Account):
__name__ = "MyfastfileCom"
__type__ = "account"
__version__ = "0.02"
__description__ = """Myfastfile.com account plugin"""
__license__ = "GPLv3"
__authors__ = [("stickell", "[email protected]")]
def loadAccountInfo(self, user, req):
if 'days_left' in self.json_data:
validuntil = int(time() + self.json_data['days_left'] * 24 * 60 * 60)
return {"premium": True, "validuntil": validuntil, "trafficleft": -1}
else:
self.logError(_("Unable to get account information"))
def login(self, user, data, req):
# Password to use is the API-Password written in http://myfastfile.com/myaccount
html = req.load("http://myfastfile.com/api.php",
get={"user": user, "pass": data['password']})
self.logDebug("JSON data: " + html)
self.json_data = json_loads(html)
if self.json_data['status'] != 'ok':
self.logError(_('Invalid login. The password to use is the API-Password you find in your "My Account" page'))
self.wrongPassword()
| sebdelsol/pyload | module/plugins/accounts/MyfastfileCom.py | Python | gpl-3.0 | 1,284 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from buildbot.db import builders
from buildbot.test.fake import fakedb
from buildbot.test.fake import fakemaster
from buildbot.test.util import connector_component
from buildbot.test.util import interfaces
from buildbot.test.util import validation
from twisted.internet import defer
from twisted.trial import unittest
class Tests(interfaces.InterfaceTests):
# common sample data
builder_row = [
fakedb.Builder(id=7, name="some:builder"),
]
# tests
def test_signature_findBuilderId(self):
@self.assertArgSpecMatches(self.db.builders.findBuilderId)
def findBuilderId(self, name):
pass
def test_signature_addBuilderMaster(self):
@self.assertArgSpecMatches(self.db.builders.addBuilderMaster)
def addBuilderMaster(self, builderid=None, masterid=None):
pass
def test_signature_removeBuilderMaster(self):
@self.assertArgSpecMatches(self.db.builders.removeBuilderMaster)
def removeBuilderMaster(self, builderid=None, masterid=None):
pass
def test_signature_getBuilder(self):
@self.assertArgSpecMatches(self.db.builders.getBuilder)
def getBuilder(self, builderid):
pass
def test_signature_getBuilders(self):
@self.assertArgSpecMatches(self.db.builders.getBuilders)
def getBuilders(self, masterid=None):
pass
@defer.inlineCallbacks
def test_findBuilderId_new(self):
id = yield self.db.builders.findBuilderId('some:builder')
builderdict = yield self.db.builders.getBuilder(id)
self.assertEqual(builderdict,
dict(id=id, name='some:builder', masterids=[]))
@defer.inlineCallbacks
def test_findBuilderId_exists(self):
yield self.insertTestData([
fakedb.Builder(id=7, name='some:builder'),
])
id = yield self.db.builders.findBuilderId('some:builder')
self.assertEqual(id, 7)
@defer.inlineCallbacks
def test_addBuilderMaster(self):
yield self.insertTestData([
fakedb.Builder(id=7),
fakedb.Master(id=9, name='abc'),
fakedb.Master(id=10, name='def'),
fakedb.BuilderMaster(builderid=7, masterid=10),
])
yield self.db.builders.addBuilderMaster(builderid=7, masterid=9)
builderdict = yield self.db.builders.getBuilder(7)
validation.verifyDbDict(self, 'builderdict', builderdict)
self.assertEqual(builderdict,
dict(id=7, name='some:builder', masterids=[9, 10]))
@defer.inlineCallbacks
def test_addBuilderMaster_already_present(self):
yield self.insertTestData([
fakedb.Builder(id=7),
fakedb.Master(id=9, name='abc'),
fakedb.Master(id=10, name='def'),
fakedb.BuilderMaster(builderid=7, masterid=9),
])
yield self.db.builders.addBuilderMaster(builderid=7, masterid=9)
builderdict = yield self.db.builders.getBuilder(7)
validation.verifyDbDict(self, 'builderdict', builderdict)
self.assertEqual(builderdict,
dict(id=7, name='some:builder', masterids=[9]))
@defer.inlineCallbacks
def test_removeBuilderMaster(self):
yield self.insertTestData([
fakedb.Builder(id=7),
fakedb.Master(id=9, name='some:master'),
fakedb.Master(id=10, name='other:master'),
fakedb.BuilderMaster(builderid=7, masterid=9),
fakedb.BuilderMaster(builderid=7, masterid=10),
])
yield self.db.builders.removeBuilderMaster(builderid=7, masterid=9)
builderdict = yield self.db.builders.getBuilder(7)
validation.verifyDbDict(self, 'builderdict', builderdict)
self.assertEqual(builderdict,
dict(id=7, name='some:builder', masterids=[10]))
@defer.inlineCallbacks
def test_getBuilder_no_masters(self):
yield self.insertTestData([
fakedb.Builder(id=7, name='some:builder'),
])
builderdict = yield self.db.builders.getBuilder(7)
validation.verifyDbDict(self, 'builderdict', builderdict)
self.assertEqual(builderdict,
dict(id=7, name='some:builder', masterids=[]))
@defer.inlineCallbacks
def test_getBuilder_with_masters(self):
yield self.insertTestData([
fakedb.Builder(id=7, name='some:builder'),
fakedb.Master(id=3, name='m1'),
fakedb.Master(id=4, name='m2'),
fakedb.BuilderMaster(builderid=7, masterid=3),
fakedb.BuilderMaster(builderid=7, masterid=4),
])
builderdict = yield self.db.builders.getBuilder(7)
validation.verifyDbDict(self, 'builderdict', builderdict)
self.assertEqual(builderdict,
dict(id=7, name='some:builder', masterids=[3, 4]))
@defer.inlineCallbacks
def test_getBuilder_missing(self):
builderdict = yield self.db.builders.getBuilder(7)
self.assertEqual(builderdict, None)
@defer.inlineCallbacks
def test_getBuilders(self):
yield self.insertTestData([
fakedb.Builder(id=7, name='some:builder'),
fakedb.Builder(id=8, name='other:builder'),
fakedb.Builder(id=9, name='third:builder'),
fakedb.Master(id=3, name='m1'),
fakedb.Master(id=4, name='m2'),
fakedb.BuilderMaster(builderid=7, masterid=3),
fakedb.BuilderMaster(builderid=8, masterid=3),
fakedb.BuilderMaster(builderid=8, masterid=4),
])
builderlist = yield self.db.builders.getBuilders()
for builderdict in builderlist:
validation.verifyDbDict(self, 'builderdict', builderdict)
self.assertEqual(sorted(builderlist), sorted([
dict(id=7, name='some:builder', masterids=[3]),
dict(id=8, name='other:builder', masterids=[3, 4]),
dict(id=9, name='third:builder', masterids=[]),
]))
@defer.inlineCallbacks
def test_getBuilders_masterid(self):
yield self.insertTestData([
fakedb.Builder(id=7, name='some:builder'),
fakedb.Builder(id=8, name='other:builder'),
fakedb.Builder(id=9, name='third:builder'),
fakedb.Master(id=3, name='m1'),
fakedb.Master(id=4, name='m2'),
fakedb.BuilderMaster(builderid=7, masterid=3),
fakedb.BuilderMaster(builderid=8, masterid=3),
fakedb.BuilderMaster(builderid=8, masterid=4),
])
builderlist = yield self.db.builders.getBuilders(masterid=3)
for builderdict in builderlist:
validation.verifyDbDict(self, 'builderdict', builderdict)
self.assertEqual(sorted(builderlist), sorted([
dict(id=7, name='some:builder', masterids=[3]),
dict(id=8, name='other:builder', masterids=[3, 4]),
]))
@defer.inlineCallbacks
def test_getBuilders_empty(self):
builderlist = yield self.db.builders.getBuilders()
self.assertEqual(sorted(builderlist), [])
class RealTests(Tests):
# tests that only "real" implementations will pass
pass
class TestFakeDB(unittest.TestCase, Tests):
def setUp(self):
self.master = fakemaster.make_master()
self.db = fakedb.FakeDBConnector(self.master, self)
self.db.checkForeignKeys = True
self.insertTestData = self.db.insertTestData
class TestRealDB(unittest.TestCase,
connector_component.ConnectorComponentMixin,
RealTests):
def setUp(self):
d = self.setUpConnectorComponent(
table_names=['builders', 'masters', 'builder_masters'])
@d.addCallback
def finish_setup(_):
self.db.builders = builders.BuildersConnectorComponent(self.db)
return d
def tearDown(self):
return self.tearDownConnectorComponent()
| zozo123/buildbot | master/buildbot/test/unit/test_db_builders.py | Python | gpl-3.0 | 8,680 |
# -*- coding: utf-8 -*-
import os
import pipeline_item
class Serialize(pipeline_item.pipeline_stage):
def stage(self, pipeline_value):
storage_path = "%s/%s" % (self.pipeline_storage_prefix, self.attributes['toFile'])
if self.pipeline_storage_prefix is None:
storage_path = self.attributes['toFile']
self.storage[storage_path] = str(pipeline_value)
return pipeline_value
| Br3nda/docvert | core/pipeline_type/serialize.py | Python | gpl-3.0 | 422 |
# This file is part of HamsiManager.
#
# Copyright (c) 2010 - 2015 Murat Demir <[email protected]>
#
# Hamsi Manager is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Hamsi Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HamsiManager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from Core import Universals as uni
import FileUtils as fu
from Databases import sqlite, getDefaultConnection, correctForSql, getAmendedSQLInsertOrUpdateQueries
tableName = "bookmarksOfDirectories"
tableVersion = 2
allForFetch = None
def fetchAll():
global allForFetch
if allForFetch is None:
con = getDefaultConnection()
cur = con.cursor()
cur.execute("SELECT * FROM " + tableName)
allForFetch = cur.fetchall()
return allForFetch
def fetch(_id):
con = getDefaultConnection()
cur = con.cursor()
cur.execute("SELECT * FROM " + tableName + " where id=" + str(int(_id)))
return cur.fetchall()
def checkValues(_bookmark, _value, _type):
if len(_bookmark) == 0 or len(_value) == 0:
return False
return True
def insert(_bookmark, _value, _type=""):
global allForFetch
if checkValues(_bookmark, _value, _type):
allForFetch = None
con = getDefaultConnection()
cur = con.cursor()
sqlQueries = getAmendedSQLInsertOrUpdateQueries(tableName, {"bookmark": "'" + correctForSql(_bookmark) + "'",
"value": "'" + correctForSql(_value) + "'",
"type": "'" + correctForSql(_type) + "'"},
["value"])
cur.execute(sqlQueries[0])
cur.execute(sqlQueries[1])
con.commit()
cur.execute("SELECT last_insert_rowid();")
return cur.fetchall()[0][0]
return None
def update(_id, _bookmark, _value, _type=""):
global allForFetch
if checkValues(_bookmark, _value, _type):
allForFetch = None
con = getDefaultConnection()
cur = con.cursor()
cur.execute(str(
"update " + tableName + " set bookmark='" + correctForSql(_bookmark) + "', value='" + correctForSql(
_value) + "', type='" + correctForSql(_type) + "' where id=" + str(int(_id))))
con.commit()
def delete(_id):
global allForFetch
allForFetch = None
con = getDefaultConnection()
cur = con.cursor()
cur.execute("delete from " + tableName + " where id=" + str(int(_id)))
con.commit()
def getTableCreateQuery():
return "CREATE TABLE IF NOT EXISTS " + tableName + " ('id' INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,'bookmark' TEXT,'value' TEXT,'type' TEXT)"
def getDeleteTableQuery():
return "DELETE FROM " + tableName
def getDefaultsQueries():
sqlQueries = []
sqlQueries += getAmendedSQLInsertOrUpdateQueries(tableName,
{"bookmark": "'Home'", "value": "'" + fu.userDirectoryPath + "'",
"type": "''"}, ["value"])
if uni.isWindows:
sqlQueries += getAmendedSQLInsertOrUpdateQueries(tableName,
{"bookmark": "'C:\\'", "value": "'C:\\'", "type": "''"},
["value"])
else:
sqlQueries += getAmendedSQLInsertOrUpdateQueries(tableName,
{"bookmark": "'MNT'", "value": "'/mnt'", "type": "''"},
["value"])
sqlQueries += getAmendedSQLInsertOrUpdateQueries(tableName,
{"bookmark": "'MEDIA'", "value": "'/media'", "type": "''"},
["value"])
return sqlQueries
def checkUpdates(_oldVersion):
if _oldVersion < 2:
con = getDefaultConnection()
cur = con.cursor()
cur.execute(str("DROP TABLE " + tableName + ";"))
con.commit()
cur.execute(getTableCreateQuery())
con.commit()
for sqlCommand in getDefaultsQueries():
cur = con.cursor()
cur.execute(str(sqlCommand))
con.commit()
| supermurat/hamsi-manager | Databases/BookmarksOfDirectories.py | Python | gpl-3.0 | 4,852 |
from iHunterModel.models import DomainObject, Organization
from django.db import models
# class for specifying different types of EduOrgs. like school, colleges, universities etc.
class EducationalOrganizationType(DomainObject):
name = models.CharField(max_length=255, null=False, blank=False)
description = models.TextField()
def __str__(self):
return self.name
class EducationalOrganization(Organization):
# Relationship. nullable is true.
category = models.ForeignKey(EducationalOrganizationType, on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return self.name | kumarsandeep91/Russet.iHunter.Model | iHunterModel/models/EducationalOrganization.py | Python | gpl-3.0 | 634 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2017 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA.
#
# Authors:
# Santiago Dueñas <[email protected]>
#
import argparse
import hashlib
import importlib
import json
import logging
import os
import pkgutil
import sys
from datetime import datetime as dt
from grimoirelab.toolkit.introspect import find_signature_parameters
from grimoirelab.toolkit.datetime import str_to_datetime
from .archive import Archive, ArchiveManager
from .errors import ArchiveError
from ._version import __version__
logger = logging.getLogger(__name__)
ARCHIVES_DEFAULT_PATH = '~/.perceval/archives/'
class Backend:
"""Abstract class for backends.
Base class to fetch data from a repository. This repository
will be named as 'origin'. During the initialization, an `Archive`
object can be provided for archiving raw data from the repositories.
Derivated classes have to implement `fetch_items`, `has_archiving` and
`has_resuming` methods. Otherwise, `NotImplementedError`
exception will be raised. Metadata decorator can be used together with
fetch methods but requires the implementation of `metadata_id`,
`metadata_updated_on` and `metadata_category` static methods.
The fetched items can be tagged using the `tag` parameter. It will
be useful to trace data. When it is set to `None` or to an empty
string, the tag will be the same that the `origin` attribute.
To track which version of the backend was used during the fetching
process, this class provides a `version` attribute that each backend
may override.
:param origin: identifier of the repository
:param tag: tag items using this label
:param archive: archive to store/retrieve data
:raises ValueError: raised when `archive` is not an instance of
`Archive` class
"""
version = '0.6.0'
def __init__(self, origin, tag=None, archive=None):
self._origin = origin
self.tag = tag if tag else origin
self.archive = archive or None
@property
def origin(self):
return self._origin
@property
def archive(self):
return self._archive
@archive.setter
def archive(self, obj):
if obj and not isinstance(obj, Archive):
msg = "obj is not an instance of Archive. %s object given" \
% (str(type(obj)))
raise ValueError(msg)
self._archive = obj
def fetch_items(self, **kwargs):
raise NotImplementedError
def fetch(self, category, **kwargs):
"""Fetch items from the repository.
The method retrieves items from a repository.
:param category: the category of the items fetched
:param kwargs: a list of other parameters (e.g., from_date, offset, etc.
specific for each backend)
:returns: a generator of items
"""
if self.archive:
self.archive.init_metadata(self.origin, self.__class__.__name__, self.version, category,
kwargs)
self.client = self._init_client()
for item in self.fetch_items(**kwargs):
yield self.metadata(item)
def fetch_from_archive(self):
"""Fetch the questions from an archive.
It returns the items stored within an archive. If this method is called but
no archive was provided, the method will raise a `ArchiveError` exception.
:returns: a generator of items
:raises ArchiveError: raised when an error occurs accessing an archive
"""
if not self.archive:
raise ArchiveError(cause="archive instance was not provided")
self.client = self._init_client(from_archive=True)
for item in self.fetch_items(**self.archive.backend_params):
yield self.metadata(item)
def metadata(self, item):
"""Add metadata to an item.
It adds metadata to a given item such as how and
when it was fetched. The contents from the original item will
be stored under the 'data' keyword.
:param item: an item fetched by a backend
"""
item = {
'backend_name': self.__class__.__name__,
'backend_version': self.version,
'perceval_version': __version__,
'timestamp': dt.utcnow().timestamp(),
'origin': self.origin,
'uuid': uuid(self.origin, self.metadata_id(item)),
'updated_on': self.metadata_updated_on(item),
'category': self.metadata_category(item),
'tag': self.tag,
'data': item,
}
return item
@classmethod
def has_archiving(cls):
raise NotImplementedError
@classmethod
def has_resuming(cls):
raise NotImplementedError
@staticmethod
def metadata_id(item):
raise NotImplementedError
@staticmethod
def metadata_updated_on(item):
raise NotImplementedError
@staticmethod
def metadata_category(item):
raise NotImplementedError
def _init_client(self, from_archive=False):
raise NotImplementedError
class BackendCommandArgumentParser:
"""Manage and parse backend command arguments.
This class defines and parses a set of arguments common to
backends commands. Some parameters like archive or the different
types of authentication can be set during the initialization
of the instance.
:param from_date: set from_date argument
:param to_date: set to_date argument
:param offset: set offset argument
:param basic_auth: set basic authentication arguments
:param token_auth: set token/key authentication arguments
:param archive: set archiving arguments
:param aliases: define aliases for parsed arguments
:raises AttributeArror: when both `from_date` and `offset` are set
to `True`
"""
def __init__(self, from_date=False, to_date=False, offset=False,
basic_auth=False, token_auth=False, archive=False,
aliases=None):
self._from_date = from_date
self._to_date = to_date
self._archive = archive
self.aliases = aliases or {}
self.parser = argparse.ArgumentParser()
group = self.parser.add_argument_group('general arguments')
group.add_argument('--category', dest='category',
help="type of the items to fetch")
group.add_argument('--tag', dest='tag',
help="tag the items generated during the fetching process")
if (from_date or to_date) and offset:
raise AttributeError("date and offset parameters are incompatible")
if from_date:
group.add_argument('--from-date', dest='from_date',
default='1970-01-01',
help="fetch items updated since this date")
if to_date:
group.add_argument('--to-date', dest='to_date',
help="fetch items updated before this date")
if offset:
group.add_argument('--offset', dest='offset',
type=int, default=0,
help="offset to start fetching items")
if basic_auth or token_auth:
self._set_auth_arguments(basic_auth=basic_auth,
token_auth=token_auth)
if archive:
self._set_archive_arguments()
self._set_output_arguments()
def parse(self, *args):
"""Parse a list of arguments.
Parse argument strings needed to run a backend command. The result
will be a `argparse.Namespace` object populated with the values
obtained after the validation of the parameters.
:param args: argument strings
:result: an object with the parsed values
"""
parsed_args = self.parser.parse_args(args)
if self._from_date:
parsed_args.from_date = str_to_datetime(parsed_args.from_date)
if self._to_date and parsed_args.to_date:
parsed_args.to_date = str_to_datetime(parsed_args.to_date)
if self._archive and parsed_args.archived_since:
parsed_args.archived_since = str_to_datetime(parsed_args.archived_since)
if self._archive and parsed_args.fetch_archive and parsed_args.no_archive:
raise AttributeError("fetch-archive and no-archive arguments are not compatible")
if self._archive and parsed_args.fetch_archive and not parsed_args.category:
raise AttributeError("fetch-archive needs a category to work with")
# Set aliases
for alias, arg in self.aliases.items():
if (alias not in parsed_args) and (arg in parsed_args):
value = getattr(parsed_args, arg, None)
setattr(parsed_args, alias, value)
return parsed_args
def _set_auth_arguments(self, basic_auth=True, token_auth=False):
"""Activate authentication arguments parsing"""
group = self.parser.add_argument_group('authentication arguments')
if basic_auth:
group.add_argument('-u', '--backend-user', dest='user',
help="backend user")
group.add_argument('-p', '--backend-password', dest='password',
help="backend password")
if token_auth:
group.add_argument('-t', '--api-token', dest='api_token',
help="backend authentication token / API key")
def _set_archive_arguments(self):
"""Activate archive arguments parsing"""
group = self.parser.add_argument_group('archive arguments')
group.add_argument('--archive-path', dest='archive_path', default=None,
help="directory path to the archives")
group.add_argument('--no-archive', dest='no_archive', action='store_true',
help="do not archive data")
group.add_argument('--fetch-archive', dest='fetch_archive', action='store_true',
help="fetch data from the archives")
group.add_argument('--archived-since', dest='archived_since', default='1970-01-01',
help="retrieve items archived since the given date")
def _set_output_arguments(self):
"""Activate output arguments parsing"""
group = self.parser.add_argument_group('output arguments')
group.add_argument('-o', '--output', type=argparse.FileType('w'),
dest='outfile', default=sys.stdout,
help="output file")
class BackendCommand:
"""Abstract class to run backends from the command line.
When the class is initialized, it parses the given arguments using
the defined argument parser on `setump_cmd_parser` method. Those
arguments will be stored in the attribute `parsed_args`.
The arguments will be used to inizialize and run the `Backend` object
assigned to this command. The backend used to run the command is stored
under `BACKEND` class attributed. Any class derived from this and must
set its own `Backend` class.
Moreover, the method `setup_cmd_parser` must be implemented to exectute
the backend.
"""
BACKEND = None
def __init__(self, *args):
parser = self.setup_cmd_parser()
self.parsed_args = parser.parse(*args)
self.archive_manager = None
self._pre_init()
self._initialize_archive()
self._post_init()
self.outfile = self.parsed_args.outfile
def run(self):
"""Fetch and write items.
This method runs the backend to fetch the items from the given
origin. Items are converted to JSON objects and written to the
defined output.
If `fetch-archive` parameter was given as an argument during
the inizialization of the instance, the items will be retrieved
using the archive manager.
"""
backend_args = vars(self.parsed_args)
if self.archive_manager and self.parsed_args.fetch_archive:
items = fetch_from_archive(self.BACKEND, backend_args,
self.archive_manager,
self.parsed_args.category,
self.parsed_args.archived_since)
else:
items = fetch(self.BACKEND, backend_args,
manager=self.archive_manager)
try:
for item in items:
obj = json.dumps(item, indent=4, sort_keys=True)
self.outfile.write(obj)
self.outfile.write('\n')
except IOError as e:
raise RuntimeError(str(e))
except Exception as e:
raise RuntimeError(str(e))
def _pre_init(self):
"""Override to execute before backend is initialized."""
pass
def _post_init(self):
"""Override to execute after backend is initialized."""
pass
def _initialize_archive(self):
"""Initialize archive based on the parsed parameters"""
if 'archive_path' not in self.parsed_args:
manager = None
elif self.parsed_args.no_archive:
manager = None
else:
if not self.parsed_args.archive_path:
archive_path = os.path.expanduser(ARCHIVES_DEFAULT_PATH)
else:
archive_path = self.parsed_args.archive_path
manager = ArchiveManager(archive_path)
self.archive_manager = manager
@staticmethod
def setup_cmd_parser():
raise NotImplementedError
def uuid(*args):
"""Generate a UUID based on the given parameters.
The UUID will be the SHA1 of the concatenation of the values
from the list. The separator bewteedn these values is ':'.
Each value must be a non-empty string, otherwise, the function
will raise an exception.
:param *args: list of arguments used to generate the UUID
:returns: a universal unique identifier
:raises ValueError: when anyone of the values is not a string,
is empty or `None`.
"""
def check_value(v):
if not isinstance(v, str):
raise ValueError("%s value is not a string instance" % str(v))
elif not v:
raise ValueError("value cannot be None or empty")
else:
return v
s = ':'.join(map(check_value, args))
sha1 = hashlib.sha1(s.encode('utf-8', errors='surrogateescape'))
uuid_sha1 = sha1.hexdigest()
return uuid_sha1
def fetch(backend_class, backend_args, manager=None):
"""Fetch items using the given backend.
Generator to get items using the given backend class. When
an archive manager is given, this function will store
the fetched items in an `Archive`. If an exception is raised,
this archive will be removed to avoid corrupted archives.
The parameters needed to initialize the `backend` class and
get the items are given using `backend_args` dict parameter.
:param backend_class: backend class to fetch items
:param backend_args: dict of arguments needed to fetch the items
:param manager: archive manager needed to store the items
:returns: a generator of items
"""
init_args = find_signature_parameters(backend_class.__init__,
backend_args)
archive = manager.create_archive() if manager else None
init_args['archive'] = archive
backend = backend_class(**init_args)
fetch_args = find_signature_parameters(backend.fetch,
backend_args)
items = backend.fetch(**fetch_args)
try:
for item in items:
yield item
except Exception as e:
if manager:
archive_path = archive.archive_path
manager.remove_archive(archive_path)
raise e
def fetch_from_archive(backend_class, backend_args, manager,
category, archived_after):
"""Fetch items from an archive manager.
Generator to get the items of a category (previously fetched
by the given backend class) from an archive manager. Only those
items archived after the given date will be returned.
The parameters needed to initialize `backend` and get the
items are given using `backend_args` dict parameter.
:param backend_class: backend class to retrive items
:param backend_args: dict of arguments needed to retrieve the items
:param manager: archive manager where the items will be retrieved
:param category: category of the items to retrieve
:param archived_after: return items archived after this date
:returns: a generator of archived items
"""
init_args = find_signature_parameters(backend_class.__init__,
backend_args)
backend = backend_class(**init_args)
filepaths = manager.search(backend.origin,
backend.__class__.__name__,
category,
archived_after)
for filepath in filepaths:
backend.archive = Archive(filepath)
items = backend.fetch_from_archive()
try:
for item in items:
yield item
except ArchiveError as e:
logger.warning("Ignoring %s archive due to: %s", filepath, str(e))
def find_backends(top_package):
"""Find available backends.
Look for the Perceval backends and commands under `top_package`
and its sub-packages. When `top_package` defines a namespace,
backends under that same namespace will be found too.
:param top_package: package storing backends
:returns: a tuple with two dicts: one with `Backend` classes and one
with `BackendCommand` classes
"""
candidates = pkgutil.walk_packages(top_package.__path__,
prefix=top_package.__name__ + '.')
modules = [name for _, name, is_pkg in candidates if not is_pkg]
return _import_backends(modules)
def _import_backends(modules):
for module in modules:
importlib.import_module(module)
bkls = _find_classes(Backend, modules)
ckls = _find_classes(BackendCommand, modules)
backends = {name: kls for name, kls in bkls}
commands = {name: klass for name, klass in ckls}
return backends, commands
def _find_classes(parent, modules):
parents = parent.__subclasses__()
while parents:
kls = parents.pop()
m = kls.__module__
if m not in modules:
continue
name = m.split('.')[-1]
parents.extend(kls.__subclasses__())
yield name, kls
| sduenas/perceval | perceval/backend.py | Python | gpl-3.0 | 19,497 |
#!/usr/bin/env python
from __future__ import print_function
from glob import glob
from tempfile import NamedTemporaryFile
import sys
from alibuild_helpers.log import debug, error, info
from os import remove
from alibuild_helpers.utilities import format
from alibuild_helpers.cmd import execute
from alibuild_helpers.utilities import detectArch
from alibuild_helpers.utilities import parseRecipe, getRecipeReader
def deps(recipesDir, topPackage, outFile, buildRequires, transitiveRed, disable):
dot = {}
keys = [ "requires" ]
if buildRequires:
keys.append("build_requires")
for p in glob("%s/*.sh" % recipesDir):
debug(format("Reading file %(filename)s", filename=p))
try:
err, recipe, _ = parseRecipe(getRecipeReader(p))
name = recipe["package"]
if name in disable:
debug("Ignoring %s, disabled explicitly" % name)
continue
except Exception as e:
error(format("Error reading recipe %(filename)s: %(type)s: %(msg)s",
filename=p, type=type(e).__name__, msg=str(e)))
sys.exit(1)
dot[name] = dot.get(name, [])
for k in keys:
for d in recipe.get(k, []):
d = d.split(":")[0]
d in disable or dot[name].append(d)
selected = None
if topPackage != "all":
if not topPackage in dot:
error(format("Package %(topPackage)s does not exist", topPackage=topPackage))
return False
selected = [ topPackage ]
olen = 0
while len(selected) != olen:
olen = len(selected)
selected += [ x
for s in selected if s in dot
for x in dot[s] if not x in selected ]
selected.sort()
result = "digraph {\n"
for p,deps in list(dot.items()):
if selected and not p in selected: continue
result += " \"%s\";\n" % p
for d in deps:
result += " \"%s\" -> \"%s\";\n" % (p,d)
result += "}\n"
with NamedTemporaryFile(delete=False) as fp:
fp.write(result)
try:
if transitiveRed:
execute(format("tred %(dotFile)s > %(dotFile)s.0 && mv %(dotFile)s.0 %(dotFile)s",
dotFile=fp.name))
execute(["dot", fp.name, "-Tpdf", "-o", outFile])
except Exception as e:
error(format("Error generating dependencies with dot: %(type)s: %(msg)s",
type=type(e).__name__, msg=str(e)))
else:
info(format("Dependencies graph generated: %(outFile)s", outFile=outFile))
remove(fp.name)
return True
def depsArgsParser(parser):
parser.add_argument("topPackage")
parser.add_argument("-a", "--architecture", help="force architecture",
dest="architecture", default=detectArch())
parser.add_argument("--dist", dest="distDir", default="alidist",
help="Recipes directory")
parser.add_argument("--output-file", "-o", dest="outFile", default="dist.pdf",
help="Output file (PDF format)")
parser.add_argument("--debug", "-d", dest="debug", action="store_true", default=False,
help="Debug output")
parser.add_argument("--build-requires", "-b", dest="buildRequires", action="store_true",
default=False, help="Debug output")
parser.add_argument("--neat", dest="neat", action="store_true", default=False,
help="Neat graph with transitive reduction")
parser.add_argument("--disable", dest="disable", default=[],
help="List of packages to ignore")
return parser
| dberzano/alibuild | alibuild_helpers/deps.py | Python | gpl-3.0 | 3,451 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from caed import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$',views.last_incidents,name='index'),
)
| julianofischer/caederm | project/urls.py | Python | gpl-3.0 | 377 |
#!/usr/bin/env python3
# Version 1.0
# Author Alexis Blanchet-Cohen
# Date: 09/06/2014
import argparse
import glob
import os
import subprocess
import util
# Read the command line arguments.
parser = argparse.ArgumentParser(description="Generates Picard target list scripts.")
parser.add_argument("-s", "--scriptsDirectory", help="Scripts directory. DEFAULT=collectinsertsizemetrics", default="collectinsertsizemetrics")
parser.add_argument("-i", "--inputDirectory", help="Input directory with BAM files. DEFAULT=../results/bamtools_merge", default="../results/star")
parser.add_argument("-o", "--outputDirectory", help="Output directory with filterd BAM files. DEFAULT=../results/collectinsertsizemetrics", default="../results/collectinsertsizemetrics")
parser.add_argument("-q", "--submitJobsToQueue", help="Submit jobs to queue immediately.", choices=["yes", "no", "y", "n"], default="no")
args = parser.parse_args()
# If not in the scripts directory, cd to the scripts directory
util.cdMainScriptsDirectory()
# Process the command line arguments.
inputDirectory = os.path.abspath(args.inputDirectory)
outputDirectory = os.path.abspath(args.outputDirectory)
scriptsDirectory = os.path.abspath(args.scriptsDirectory)
# Read configuration files
config = util.readConfigurationFiles()
header = config.getboolean("server", "PBS_header")
picard_folder = config.get("picard", "folder")
genome = config.get("project", "genome")
genomeFile = config.get(genome, "genomeFile")
# Read samples file.
samplesFile = util.readSamplesFile()
samples = samplesFile["sample"]
# Create script and output directories, if they do not exist yet.
util.makeDirectory(outputDirectory)
util.makeDirectory(scriptsDirectory)
# CD to scripts directory
os.chdir(scriptsDirectory)
# Write scripts
for sample in samples:
scriptName = "collectinsertsizemetrics_" + sample + ".sh"
script = open(scriptName, "w")
if header:
util.writeHeader(script, config, "collectinsertsizemetrics")
# Reorder
script.write("java -Xmx4g -jar " + os.path.join(picard_folder, "CollectInsertSizeMetrics.jar") + " \\\n")
script.write("VALIDATION_STRINGENCY=LENIENT " + "\\\n")
script.write("HISTOGRAM_FILE=" + os.path.join(outputDirectory, sample + "_picard_insert_size_plot.pdf") + " \\\n")
script.write("INPUT=" + os.path.join(inputDirectory, sample + ".filtered.bam") + " \\\n")
script.write("OUTPUT=" + os.path.join(outputDirectory, sample + "_picard_insert_size_metrics.txt") + " \\\n")
script.write("&> " + scriptName + ".log")
script.close()
| blancha/abcngspipelines | utils/collectinsertsizemetrics.py | Python | gpl-3.0 | 2,567 |
"""Current-flow betweenness centrality measures for subsets of nodes."""
import networkx as nx
from networkx.algorithms.centrality.flow_matrix import flow_matrix_row
from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering
__all__ = [
"current_flow_betweenness_centrality_subset",
"edge_current_flow_betweenness_centrality_subset",
]
@not_implemented_for("directed")
def current_flow_betweenness_centrality_subset(
G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu"
):
r"""Compute current-flow betweenness centrality for subsets of nodes.
Current-flow betweenness centrality uses an electrical current
model for information spreading in contrast to betweenness
centrality which uses shortest paths.
Current-flow betweenness centrality is also known as
random-walk betweenness centrality [2]_.
Parameters
----------
G : graph
A NetworkX graph
sources: list of nodes
Nodes to use as sources for current
targets: list of nodes
Nodes to use as sinks for current
normalized : bool, optional (default=True)
If True the betweenness values are normalized by b=b/(n-1)(n-2) where
n is the number of nodes in G.
weight : string or None, optional (default=None)
Key for edge data used as the edge weight.
If None, then use 1 as each edge weight.
dtype: data type (float)
Default data type for internal matrices.
Set to np.float32 for lower memory consumption.
solver: string (default='lu')
Type of linear solver to use for computing the flow matrix.
Options are "full" (uses most memory), "lu" (recommended), and
"cg" (uses least memory).
Returns
-------
nodes : dictionary
Dictionary of nodes with betweenness centrality as the value.
See Also
--------
approximate_current_flow_betweenness_centrality
betweenness_centrality
edge_betweenness_centrality
edge_current_flow_betweenness_centrality
Notes
-----
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
time [1]_, where $I(n-1)$ is the time needed to compute the
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
Laplacian matrix condition number.
The space required is $O(nw)$ where $w$ is the width of the sparse
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
If the edges have a 'weight' attribute they will be used as
weights in this algorithm. Unspecified weights are set to 1.
References
----------
.. [1] Centrality Measures Based on Current Flow.
Ulrik Brandes and Daniel Fleischer,
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf
.. [2] A measure of betweenness centrality based on random walks,
M. E. J. Newman, Social Networks 27, 39-54 (2005).
"""
from networkx.utils import reverse_cuthill_mckee_ordering
try:
import numpy as np
except ImportError as e:
raise ImportError(
"current_flow_betweenness_centrality requires NumPy ", "http://numpy.org/"
) from e
if not nx.is_connected(G):
raise nx.NetworkXError("Graph not connected.")
n = G.number_of_nodes()
ordering = list(reverse_cuthill_mckee_ordering(G))
# make a copy with integer labels according to rcm ordering
# this could be done without a copy if we really wanted to
mapping = dict(zip(ordering, range(n)))
H = nx.relabel_nodes(G, mapping)
betweenness = dict.fromkeys(H, 0.0) # b[v]=0 for v in H
for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
for ss in sources:
i = mapping[ss]
for tt in targets:
j = mapping[tt]
betweenness[s] += 0.5 * np.abs(row[i] - row[j])
betweenness[t] += 0.5 * np.abs(row[i] - row[j])
if normalized:
nb = (n - 1.0) * (n - 2.0) # normalization factor
else:
nb = 2.0
for v in H:
betweenness[v] = betweenness[v] / nb + 1.0 / (2 - n)
return {ordering[k]: v for k, v in betweenness.items()}
@not_implemented_for("directed")
def edge_current_flow_betweenness_centrality_subset(
G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu"
):
r"""Compute current-flow betweenness centrality for edges using subsets
of nodes.
Current-flow betweenness centrality uses an electrical current
model for information spreading in contrast to betweenness
centrality which uses shortest paths.
Current-flow betweenness centrality is also known as
random-walk betweenness centrality [2]_.
Parameters
----------
G : graph
A NetworkX graph
sources: list of nodes
Nodes to use as sources for current
targets: list of nodes
Nodes to use as sinks for current
normalized : bool, optional (default=True)
If True the betweenness values are normalized by b=b/(n-1)(n-2) where
n is the number of nodes in G.
weight : string or None, optional (default=None)
Key for edge data used as the edge weight.
If None, then use 1 as each edge weight.
dtype: data type (float)
Default data type for internal matrices.
Set to np.float32 for lower memory consumption.
solver: string (default='lu')
Type of linear solver to use for computing the flow matrix.
Options are "full" (uses most memory), "lu" (recommended), and
"cg" (uses least memory).
Returns
-------
nodes : dict
Dictionary of edge tuples with betweenness centrality as the value.
See Also
--------
betweenness_centrality
edge_betweenness_centrality
current_flow_betweenness_centrality
Notes
-----
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
time [1]_, where $I(n-1)$ is the time needed to compute the
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
Laplacian matrix condition number.
The space required is $O(nw)$ where $w$ is the width of the sparse
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
If the edges have a 'weight' attribute they will be used as
weights in this algorithm. Unspecified weights are set to 1.
References
----------
.. [1] Centrality Measures Based on Current Flow.
Ulrik Brandes and Daniel Fleischer,
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf
.. [2] A measure of betweenness centrality based on random walks,
M. E. J. Newman, Social Networks 27, 39-54 (2005).
"""
try:
import numpy as np
except ImportError as e:
raise ImportError(
"current_flow_betweenness_centrality requires NumPy " "http://numpy.org/"
) from e
if not nx.is_connected(G):
raise nx.NetworkXError("Graph not connected.")
n = G.number_of_nodes()
ordering = list(reverse_cuthill_mckee_ordering(G))
# make a copy with integer labels according to rcm ordering
# this could be done without a copy if we really wanted to
mapping = dict(zip(ordering, range(n)))
H = nx.relabel_nodes(G, mapping)
edges = (tuple(sorted((u, v))) for u, v in H.edges())
betweenness = dict.fromkeys(edges, 0.0)
if normalized:
nb = (n - 1.0) * (n - 2.0) # normalization factor
else:
nb = 2.0
for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
for ss in sources:
i = mapping[ss]
for tt in targets:
j = mapping[tt]
betweenness[e] += 0.5 * np.abs(row[i] - row[j])
betweenness[e] /= nb
return {(ordering[s], ordering[t]): v for (s, t), v in betweenness.items()}
| SpaceGroupUCL/qgisSpaceSyntaxToolkit | esstoolkit/external/networkx/algorithms/centrality/current_flow_betweenness_subset.py | Python | gpl-3.0 | 8,195 |
from ert_gui.models import ErtConnector
from ert_gui.models.mixins import SpinnerModelMixin
class LocalMaxRunning(ErtConnector, SpinnerModelMixin):
def getMaxValue(self):
""" @rtype: int """
return 1000
def getMinValue(self):
""" @rtype: int """
return 1
def getSpinnerValue(self):
""" @rtype: int """
return self.ert().siteConfig().getMaxRunningLocal()
def setSpinnerValue(self, value):
self.ert().siteConfig().setMaxRunningLocal(value)
| iLoop2/ResInsight | ThirdParty/Ert/devel/python/python/ert_gui/models/connectors/queue_system/local_max_running.py | Python | gpl-3.0 | 519 |
from PyQt5 import QtCore
import subprocess
from pyconrad import *
import numpy as np
import jpype
import time
class forward_project_thread(QtCore.QThread):
forward_project_finsihed = QtCore.pyqtSignal(str)
def init(self, use_cl, ForwardProj, Phantom):
self.use_cl = use_cl
self.ForwardProj = ForwardProj
self.Phantom = Phantom
def get_fanogram(self):
return self.fanogram
def run(self):
jpype.attachThreadToJVM()
if self.use_cl:
self.fanogram = self.ForwardProj.projectRayDrivenCL(self.Phantom.grid)
else:
self.fanogram = self.ForwardProj.projectRayDriven(self.Phantom)
# time.sleep(5)
jpype.detachThreadFromJVM()
self.forward_project_finsihed.emit('finished') | alPreuhs/InteractiveReconstruction | Threads/forward_projection_thread.py | Python | gpl-3.0 | 783 |
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright 2009 !j Incorporated
#
# This file is part of Canner.
#
# Canner is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Canner is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Canner. If not, see <http://www.gnu.org/licenses/>.
#
"""
parse-config.py
"""
import sys, os, re
from canner import taglib
def main(filename):
lines = open(filename, 'rU')
n = 0
ssh_version = None
ssh_enable = False
for line in lines:
n += 1
# hostname
m = re.match(r'set hostname ([\w\d.-]+)', line)
if m:
host = m.group(1)
taglib.tag("hostname", host).implied_by(taglib.env_tags.device, line=n)
continue
# time
m = re.match(r'set ntp server( backup\d)? "?([\w\d.-]+)"?', line)
if m:
server = m.group(2)
if not server == '0.0.0.0':
taglib.tag("NTP server", server).implied_by(taglib.env_tags.device, line=n)
continue
# dns
m = re.match(r'set domain ([\w\d.-]+)', line)
if m:
domain = m.group(1)
taglib.tag("domain name", domain).implied_by(taglib.env_tags.device, line=n)
continue
m = re.match(r'set dns host dns\d ([\w\d.-]+)', line)
if m:
server = m.group(1)
taglib.tag("name server", server).implied_by(taglib.env_tags.device, line=n)
continue
m = re.match(r'set xauth ([\w\d.-]+) dns\d ([\w\d.-]+)', line)
if m:
server = m.group(2)
taglib.tag("name server", server).implied_by(taglib.env_tags.device, line=n)
continue
m = re.match(r'set l2tp dns\d ([\w\d.-]+)', line)
if m:
server = m.group(1)
taglib.tag("name server", server).implied_by(taglib.env_tags.device, line=n)
continue
# interfaces
m = re.match(r'set interface ([\w\d]+) ip ([\d.]+)/([\d.]+)( secondary)?', line)
if m:
name, ipaddress, plen, secondary = m.groups()
address = ipaddress + "/" + plen
ifaddr_tag = taglib.ip_address_tag(address, "interface address")
address_tag = taglib.ip_address_tag(address)
subnet_tag = taglib.ip_subnet_tag(address)
name_tag = taglib.tag("interface", "%s %s" % (taglib.env_tags.device.name, name))
name_tag.implied_by(taglib.env_tags.snapshot, line=n)
name_tag.implies(taglib.env_tags.device, line=n)
name_tag.implies(ifaddr_tag, line=n)
ifaddr_tag.implies(address_tag, line=n)
address_tag.implies(subnet_tag, line=n)
continue
# accounts
m = re.match(r'set admin user "?([\w\d.-]+)"?\s+.*', line)
if m:
account = m.group(1)
taglib.tag("user", account).implied_by(taglib.env_tags.device, line=n)
continue
# services
m = re.match(r'set ssh version ([\w\d]+)', line)
if m:
ssh_version = m.group(1)
ssh_version_line = n
continue
m = re.match(r'set ssh enable', line)
if m:
ssh_enable = True
taglib.tag("service", 'SSH').implied_by(taglib.env_tags.device, n)
continue
m = re.match(r'set scp enable', line)
if m:
taglib.tag("service", 'SCP').implied_by(taglib.env_tags.device, n)
continue
# post parse phase
if ssh_enable:
if ssh_version:
taglib.tag("service", 'SSH' + ssh_version).implied_by(taglib.env_tags.device, ssh_version_line)
if __name__ == '__main__':
main(taglib.default_filename)
taglib.output_tagging_log()
| pusateri/canner | taggers/OS--ScreenOS/file--config.netscreen/parse-config.py | Python | gpl-3.0 | 4,229 |
# Copyright (C) 2006-2007 Red Hat, Inc.
# Copyright (C) 2010 Collabora Ltd. <http://www.collabora.co.uk/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
from gi.repository import GObject
import dbus
from gi.repository import TelepathyGLib
CONNECTION = TelepathyGLib.IFACE_CONNECTION
CONNECTION_STATUS_CONNECTED = TelepathyGLib.ConnectionStatus.CONNECTED
from sugar3 import profile
from jarabe.util.telepathy import connection_watcher
CONNECTION_INTERFACE_BUDDY_INFO = 'org.laptop.Telepathy.BuddyInfo'
_owner_instance = None
class BaseBuddyModel(GObject.GObject):
__gtype_name__ = 'SugarBaseBuddyModel'
def __init__(self, **kwargs):
self._key = None
self._nick = None
self._color = None
self._tags = None
self._current_activity = None
GObject.GObject.__init__(self, **kwargs)
def get_nick(self):
return self._nick
def set_nick(self, nick):
self._nick = nick
nick = GObject.Property(type=object, getter=get_nick, setter=set_nick)
def get_key(self):
return self._key
def set_key(self, key):
if isinstance(key, bytes):
key = key.decode('utf-8')
self._key = key
key = GObject.Property(type=object, getter=get_key, setter=set_key)
def get_color(self):
return self._color
def set_color(self, color):
self._color = color
color = GObject.Property(type=object, getter=get_color, setter=set_color)
def get_tags(self):
return self._tags
tags = GObject.Property(type=object, getter=get_tags)
def get_current_activity(self):
return self._current_activity
def set_current_activity(self, current_activity):
if self._current_activity != current_activity:
self._current_activity = current_activity
self.notify('current-activity')
current_activity = GObject.Property(type=object,
getter=get_current_activity,
setter=set_current_activity)
def is_owner(self):
raise NotImplementedError
class OwnerBuddyModel(BaseBuddyModel):
__gtype_name__ = 'SugarOwnerBuddyModel'
def __init__(self):
BaseBuddyModel.__init__(self)
self.props.nick = profile.get_nick_name()
self.props.color = profile.get_color()
self.props.key = profile.get_profile().pubkey
self.connect('notify::nick', self.__property_changed_cb)
self.connect('notify::color', self.__property_changed_cb)
bus = dbus.SessionBus()
bus.add_signal_receiver(
self.__name_owner_changed_cb,
signal_name='NameOwnerChanged',
dbus_interface='org.freedesktop.DBus')
bus_object = bus.get_object(dbus.BUS_DAEMON_NAME, dbus.BUS_DAEMON_PATH)
for service in bus_object.ListNames(
dbus_interface=dbus.BUS_DAEMON_IFACE):
if service.startswith(CONNECTION + '.'):
path = '/%s' % service.replace('.', '/')
conn_proxy = bus.get_object(service, path)
self._prepare_conn(path, conn_proxy)
def _prepare_conn(self, object_path, conn_proxy):
self.connection = {}
self.object_path = object_path
self.conn_proxy = conn_proxy
self.conn_ready = False
self.connection[CONNECTION] = \
dbus.Interface(self.conn_proxy, CONNECTION)
self.connection[CONNECTION].GetInterfaces(
reply_handler=self.__conn_get_interfaces_reply_cb,
error_handler=self.__error_handler_cb)
def __conn_get_interfaces_reply_cb(self, interfaces):
for interface in interfaces:
self.connection[interface] = dbus.Interface(
self.conn_proxy, interface)
self.conn_ready = True
self.__connection_ready_cb(self.connection)
def __connection_ready_cb(self, connection):
if not self.conn_ready:
return
self._sync_properties_on_connection(connection)
def __name_owner_changed_cb(self, name, old, new):
if name.startswith(CONNECTION + '.') and not old and new:
path = '/' + name.replace('.', '/')
self.conn_proxy = dbus.Bus().get_object(name, path)
self._prepare_conn(path, self.conn_proxy)
def __property_changed_cb(self, buddy, pspec):
self._sync_properties()
def _sync_properties(self):
conn_watcher = connection_watcher.get_instance()
for connection in conn_watcher.get_connections():
self._sync_properties_on_connection(connection)
def _sync_properties_on_connection(self, connection):
if CONNECTION_INTERFACE_BUDDY_INFO in connection:
properties = {}
if self.props.key is not None:
properties['key'] = dbus.ByteArray(
self.props.key.encode('utf-8'))
if self.props.color is not None:
properties['color'] = self.props.color.to_string()
logging.debug('calling SetProperties with %r', properties)
connection[CONNECTION_INTERFACE_BUDDY_INFO].SetProperties(
properties,
reply_handler=self.__set_properties_cb,
error_handler=self.__error_handler_cb)
def __set_properties_cb(self):
logging.debug('__set_properties_cb')
def __error_handler_cb(self, error):
raise RuntimeError(error)
def is_owner(self):
return True
def get_owner_instance():
global _owner_instance
if _owner_instance is None:
_owner_instance = OwnerBuddyModel()
return _owner_instance
class BuddyModel(BaseBuddyModel):
__gtype_name__ = 'SugarBuddyModel'
def __init__(self, **kwargs):
self._account = None
self._contact_id = None
self._handle = None
BaseBuddyModel.__init__(self, **kwargs)
def is_owner(self):
return False
def get_account(self):
return self._account
def set_account(self, account):
self._account = account
account = GObject.Property(type=object, getter=get_account,
setter=set_account)
def get_contact_id(self):
return self._contact_id
def set_contact_id(self, contact_id):
self._contact_id = contact_id
contact_id = GObject.Property(type=object, getter=get_contact_id,
setter=set_contact_id)
def get_handle(self):
return self._handle
def set_handle(self, handle):
self._handle = handle
handle = GObject.Property(type=object, getter=get_handle,
setter=set_handle)
| sugarlabs/sugar | src/jarabe/model/buddy.py | Python | gpl-3.0 | 7,314 |
"""
DFO-GN
====================
A derivative-free solver for least squares minimisation with bound constraints.
This version has resampling (not part of main package).
This file is a modified version of DFOGN which allows resampling and restarts,
to better cope with noisy problems.
Lindon Roberts, 2017
Call structure is:
x, f, nf, exit_flag, exit_str = dfogn(objfun, x0, lower, upper,
maxfun, init_tr_radius, rhoend=1e-8)
Required inputs:
objfun Objective function, callable as: residual_vector = objfun(x)
x0 Initial starting point, NumPy ndarray
Optional inputs:
lower, upper Lower and upper bound constraints (lower <= x <= upper),
must be NumPy ndarrays of same size as x0 (default +/-1e20)
maxfun Maximum number of allowable function evalutions (default 1000)
init_tr_radius Initial trust region radius (default 0.1*max(1, ||x0||_infty)
rhoend Termination condition on trust region radius (default 1e-8)
Outputs:
x Estimate of minimiser
f Value of least squares objective at x (f = ||objfun(x)||^2)
nf Number of objective evaluations used to find x
exit_flag Integer flag indicating termination criterion (see list below imports)
exit_str String with more detailed termination message
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
The development of this software was sponsored by NAG Ltd. (http://www.nag.co.uk)
and the EPSRC Centre For Doctoral Training in Industrially Focused Mathematical
Modelling (EP/L015803/1) at the University of Oxford. Please contact NAG for
alternative licensing.
Copyright 2017, Lindon Roberts
"""
# Ensure compatibility with Python 2
from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
import scipy.linalg as sp_linalg
from math import sqrt
import logging
from .util import *
from .trust_region import *
from .alternative_move import *
#######################
# Exit codes
EXIT_SUCCESS = 0 # successful finish (rho=rhoend or sufficient objective reduction)
EXIT_INPUT_ERROR = 1 # error, bad inputs
EXIT_MAXFUN_WARNING = 2 # warning, reached max function evals
EXIT_TR_INCREASE_ERROR = 3 # error, trust region step increased model value
EXIT_LINALG_ERROR = 4 # error, linalg error (singular matrix encountered)
EXIT_ALTMOV_MEMORY_ERROR = 5 # error, stpsav issue in ALTMOV
# Errors for which we can do a restart (not including rho=rhoend in EXIT_SUCCESS)
DO_RESTART_ERRORS = [EXIT_TR_INCREASE_ERROR, EXIT_LINALG_ERROR, EXIT_ALTMOV_MEMORY_ERROR]
#######################
#######################
# Sampling scenarios
SCEN_PRELIM = 1 # during prelim
SCEN_GROWING_NEW_DIRECTION = 2 # adding new direction while growing
SCEN_TR_STEP = 3 # sampling xk+sk from successful trust region step
SCEN_GEOM_UPDATE = 4 # calling altmov for geometry fixing
SCEN_RHOEND_REACHED = 5 # reached rhoend in unsuccessful TR step
#######################
class Model:
def __init__(self, n, m, npt, x0, xl, xu):
assert npt==n+1, "Require strictly linear model"
# Problem sizes
self.n = n
self.m = m
self.npt = npt
self.npt_so_far = 0 # how many points have we added so far (for growing initial set)
# Actual model info
# Here, the model for each residual is centred around xbase
# m(x) = model_const_term + gqv*(x-xbase)
self.kbase = 0 # index of base point
self.xbase = x0 # base point
self.xl = xl # lower bounds (absolute terms)
self.xu = xu # upper bounds (absolute terms)
self.sl = xl - x0 # lower bounds (adjusted for xbase), should be -ve (actually < -rhobeg)
self.su = xu - x0 # upper bounds (adjusted for xbase), should be +ve (actually > rhobeg)
self.xpt = np.zeros((npt, n)) # interpolation points
self.fval_v = np.zeros((npt, m)) # residual vectors at each xpt(+xbase)
self.fval = np.zeros((npt, )) # total sum of squares at each xpt(+xbase)
self.model_const_term_v = np.zeros((m,)) # constant term of each mini-model
self.gqv = np.zeros((n, m)) # interpolated gradients for each mini-model
self.kopt = None # index of current best x
self.fbeg = None # initial sum of squares at x0
self.xsave = None # possible final return value (abs coords)
self.fsave = None # sum of squares for final return value
self.lu = None # LU decomp of interp matrix
self.piv = None # pivots for LU decomposition of interp matrix
self.lu_current = False # whether current LU factorisation of interp matrix is up-to-date or not
self.EXACT_CONST_TERM = True # use exact c=r(xopt) for interpolation (improve conditioning)
# Affects mini-model interpolation / interpolation matrix, but also geometry updating
self.nsamples = np.zeros((npt,), dtype=np.int) # how many samples we have averaged to get fval_v, where fval = sumsq(avg fval_v)
def x_within_bounds(self, k=None, x=None):
# Get x value for k-th point or x vector (in absolute terms, force within bounds)
if k is not None:
return np.minimum(np.maximum(self.xl, self.xbase + self.xpt[k, :]), self.xu)
elif x is not None:
return np.minimum(np.maximum(self.xl, self.xbase + x), self.xu)
else:
return None
def xopt(self):
# Current best x (relative to xbase)
return self.xpt[self.kopt, :].copy()
def fval_v_opt(self):
return self.fval_v[self.kopt,:]
def fval_opt(self):
return self.fval[self.kopt]
def update_point(self, knew, xnew, v_err, f):
if knew >= self.npt_so_far and self.npt_so_far < self.npt:
# when still growing, need to append in correct order
assert knew == self.npt_so_far, "Updating new index too far along (%g when should be %g)" % (knew, self.npt_so_far)
self.npt_so_far += 1
# Add point xnew with objective vector v_err (full objective f) at the knew-th index
self.xpt[knew,:] = xnew
self.fval_v[knew, :] = v_err
self.fval[knew] = f
self.nsamples[knew] = 1
# Update XOPT, GOPT and KOPT if the new calculated F is less than FOPT.
if f < self.fval_opt():
self.kopt = knew
self.lu_current = False
return
def add_point_resample(self, knew, v_err_new):
# We have resampled point knew and got a new fval_v = v_err_new
# Update our estimates of fval_v
assert knew < self.npt_to_use(), "Invalid knew"
t = float(self.nsamples[knew]) / float(self.nsamples[knew] + 1)
self.fval_v[knew, :] = t * self.fval_v[knew, :] + (1 - t) * v_err_new
self.fval[knew] = sumsq(self.fval_v[knew, :])
self.nsamples[knew] += 1
if self.fval[knew] < self.fval_opt():
self.kopt = knew
return
def npt_to_use(self):
# Number of points to use when building interpolation system
return min(self.npt_so_far, self.npt) # depends on whether we have a full set yet (or not)
def gqv_at_xopt(self):
return self.gqv
def shift_base(self, xbase_shift):
for m1 in range(self.m):
self.model_const_term_v[m1] += np.dot(self.gqv[:, m1], xbase_shift)
# The main updates
for k in range(self.npt):
self.xpt[k, :] = self.xpt[k, :] - xbase_shift
self.xbase += xbase_shift
self.sl = self.sl - xbase_shift
self.su = self.su - xbase_shift
self.lu_current = False
self.factorise_LU()
return
def interpolate_mini_models(self):
# Build interpolation matrix and factorise (in self.lu, self.piv)
self.factorise_LU()
try:
if self.EXACT_CONST_TERM:
idx_to_use = [k for k in range(self.npt) if k != self.kopt]
for m1 in range(self.m):
rhs = np.zeros((self.n,))
for i in range(self.n):
k = idx_to_use[i]
rhs[i] = self.fval_v[k, m1] - self.fval_v[self.kopt, m1] - \
np.dot(self.gqv[:, m1], self.xpt[k, :] - self.xopt())
soln = sp_linalg.lu_solve((self.lu, self.piv), rhs)
self.gqv[:, m1] += soln # whole solution is gradient
# shift constant term back
self.model_const_term_v = self.fval_v[self.kopt, :] - np.dot(self.gqv.T, self.xopt())
return True # flag ok
else:
model_values_v = np.zeros((self.npt, self.m))
for k in range(self.npt):
model_values_v[k, :] = self.predicted_values(self.xpt[k, :], d_based_at_xopt=False,
with_const_term=True)
# Sometimes when things get too close to a solution, we can get NaNs in model_values - flag error & quit
if np.any(np.isnan(model_values_v)):
self.gqv = None
return False # flag error
for m1 in range(self.m):
rhs = self.fval_v[:, m1] - model_values_v[:, m1]
soln = sp_linalg.lu_solve((self.lu, self.piv), rhs)
self.model_const_term_v[m1] += soln[0]
self.gqv[:, m1] += soln[1:] # first term is constant, rest is gradient term
return True # flag ok
except np.linalg.LinAlgError:
self.gqv = None
return False # flag error
except ValueError: # happens when LU decomposition has Inf or NaN
self.gqv = None
return False # flag error
def factorise_LU(self):
if not self.lu_current:
Wmat = self.build_interp_matrix()
self.lu, self.piv = sp_linalg.lu_factor(Wmat) # LU has L and U parts, piv indicates row swaps for pivoting
self.lu_current = True
return
def solve_LU(self, rhs):
# If lu_current, use that, otherwise revert to generic solver
if self.lu_current:
if self.EXACT_CONST_TERM:
return sp_linalg.lu_solve((self.lu, self.piv), rhs) # only get gradient (no const term)
else:
return sp_linalg.lu_solve((self.lu, self.piv), rhs)[1:] # only return gradient (1st term is constant)
else:
logging.warning("model.solve_LU not using factorisation")
Wmat = self.build_interp_matrix()
if self.EXACT_CONST_TERM:
return np.linalg.solve(Wmat, rhs) # only get gradient (no const term)
else:
return np.linalg.solve(Wmat, rhs)[1:] # only return gradient (1st term is constant)
def get_final_results(self):
# Called when about to exit BOBYQB
# Return x and fval for optimal point (either from xsave+fsave or kopt)
if self.fsave is None or self.fval_opt() <= self.fsave: # optimal has changed since xsave+fsave were last set
x = self.x_within_bounds(k=self.kopt)
f = self.fval_opt()
else:
x = self.xsave
f = self.fsave
return x, f
def build_full_model(self):
# Build full least squares objective model from mini-models
# Centred around xopt = xpt[kopt, :]
v_temp = self.fval_v_opt() # m-vector
gqv_xopt = self.gqv_at_xopt() # J^T (transpose of Jacobian) at xopt, rather than xbase
# Use the gradient at xopt to formulate \sum_i (2*f_i \nabla f_i) = 2 J^t m(x_opt)
gopt = np.dot(gqv_xopt, v_temp) # n-vector (gqv = J^T)
# Gauss-Newton part of Hessian
hq = to_upper_triangular_vector(np.dot(gqv_xopt, gqv_xopt.T))
# Apply scaling based on convention for objective - this code uses sumsq(r_i) not 0.5*sumsq(r_i)
gopt = 2.0 * gopt
hq = 2.0 * hq
return gopt, hq
def build_interp_matrix(self):
if self.EXACT_CONST_TERM:
Wmat = np.zeros((self.n, self.n))
idx_to_use = [k for k in range(self.npt) if k != self.kopt]
for i in range(self.n):
Wmat[i,:] = self.xpt[idx_to_use[i], :] - self.xopt()
else:
Wmat = np.zeros((self.n + 1, self.n + 1))
Wmat[:, 0] = 1.0
Wmat[:, 1:] = self.xpt # size npt * n
return Wmat
def predicted_values(self, d, d_based_at_xopt=True, with_const_term=False):
if d_based_at_xopt:
Jd = np.dot(self.gqv.T, d + self.xopt()) # J^T * d (where Jacobian J = self.gqv^T)
else: # d based at xbase
Jd = np.dot(self.gqv.T, d) # J^T * d (where Jacobian J = self.gqv^T)
return Jd + (self.model_const_term_v if with_const_term else 0.0)
def square_distances_to_xopt(self):
sq_distances = np.zeros((self.npt,))
for k in range(self.npt):
sq_distances[k] = sumsq(self.xpt[k, :] - self.xopt())
return sq_distances
def min_objective_value(self, abs_tol=1.0e-12, rel_tol=1.0e-20):
# Set a minimum value so that if the full objective falls below it, we immediately finish
if self.fbeg is not None:
return max(abs_tol, rel_tol * self.fbeg)
else:
return abs_tol
def sample_objective(m, objfun, x, nf, nx, maxfun, min_obj_value, nsamples=1):
# Sample from objective function several times, keeping track of maxfun and min_obj_value throughout
if m is None:
# Don't initialise v_err_list yet
v_err_list = None
else:
v_err_list = np.zeros((nsamples, m))
f_list = np.zeros((nsamples,))
exit_flag = None
exit_str = None
nsamples_run = 0
for i in range(nsamples):
if nf >= maxfun:
exit_flag = EXIT_MAXFUN_WARNING
exit_str = "Objective has been called MAXFUN times"
break # quit
nf += 1
this_v_err, f_list[i] = eval_least_squares_objective_v2(objfun, x, eval_num=nf, pt_num=nx+1, full_x_thresh=6)
if m is None:
m = len(this_v_err)
v_err_list = np.zeros((nsamples, m))
v_err_list[i, :] = this_v_err
nsamples_run += 1
if f_list[i] <= min_obj_value:
# Force model.get_final_results() to return this new point if it's better than xopt, then quit
exit_flag = EXIT_SUCCESS
exit_str = "Objective is sufficiently small"
break # quit
return v_err_list, f_list, nf, nx+1, nsamples_run, exit_flag, exit_str
def build_initial_set(objfun, x0, xl, xu, rhobeg, maxfun, nsamples, nf_so_far, nx_so_far, ndirs_initial, nruns_so_far,
m=None, random_initial_directions=False):
n = np.size(x0)
npt = n + 1
if m is not None:
# Initialise model (sets x0 as base point and xpt = zeros, so xpt[0,:] = x0)
model = Model(n, m, npt, x0, xl, xu)
model.kopt = 0
minval = model.min_objective_value()
else:
# If we don't yet have m, wait until we have done a function evaluation before initialising model
model = None
minval = -1.0
assert 1 <= ndirs_initial < np.size(x0)+1, "build_inital_set: must have 1 <= ndirs_initial < n+1"
nx = nx_so_far
nf = nf_so_far
# For calling nsamples:
delta = rhobeg
rho = rhobeg
current_iter = 0
# Evaluate at initial point (also gets us m in the first run through)
nsamples_to_use = nsamples(delta, rho, current_iter, nruns_so_far, SCEN_PRELIM)
v_err_list, f_list, nf, nx, nsamples_run, exit_flag, exit_str = sample_objective(m, objfun, x0,
nf, nx, maxfun,
minval,
nsamples=nsamples_to_use)
# If we have just learned m, initialise model (sets x0 as base point and xpt = zeros, so xpt[0,:] = x0)
if model is None:
# Now we know m = v_err_list.shape[1]
model = Model(n, v_err_list.shape[1], npt, x0, xl, xu)
model.kopt = 0
f0 = sumsq(np.mean(v_err_list[:nsamples_run, :], axis=0)) # estimate actual objective value
# Handle exit conditions (f < min obj value or maxfun reached)
if exit_flag is not None: # then exit_str is also set
if nsamples_run > 0:
fmin = np.min(f_list[:nsamples_run])
if model.fsave is None or fmin < model.fsave:
model.xsave = x0
model.fsave = fmin
return_to_new_tr_iteration = False # return and quit
return model, nf, nx, return_to_new_tr_iteration, exit_flag, exit_str
# Otherwise, add new results (increments model.npt_so_far)
model.update_point(0, model.xpt[0, :], v_err_list[0, :], f_list[0])
for i in range(1, nsamples_run):
model.add_point_resample(0, v_err_list[i, :]) # add new info
# Add results of objective evaluation at x0
model.fbeg = f0
model.xsave = x0.copy()
model.fsave = f0
# Build initial sample set either using random orthogonal directions, or coordinate directions
if random_initial_directions:
# Get ndirs_initial random orthogonal directions
A = np.random.randn(n, ndirs_initial) # Standard Gaussian n*ndirs_initial
Q = np.linalg.qr(A)[0] # Q is n*ndirs_initial with orthonormal columns
# Now add the random directions
for ndirns in range(ndirs_initial):
dirn = Q[:, ndirns]
# Scale direction to ensure the new point lies within initial trust region, satisfies constraints
scale_factor = rhobeg / np.linalg.norm(dirn)
for j in range(n):
if dirn[j] < 0.0:
scale_factor = min(scale_factor, model.sl[j] / dirn[j])
elif dirn[j] > 0.0:
scale_factor = min(scale_factor, model.su[j] / dirn[j])
model.xpt[1 + ndirns, :] = scale_factor * dirn
else:
at_upper_boundary = (model.su < 0.01 * rhobeg) # su = xu - x0, should be +ve, actually > rhobeg
for k in range(ndirs_initial):
step_size = (rhobeg if not at_upper_boundary[k] else -rhobeg)
model.xpt[k+1, k] = step_size
# Evaluate objective at each point in the initial sample set
for k in range(1, ndirs_initial):
x = model.x_within_bounds(k=k)
nsamples_to_use = nsamples(delta, rho, current_iter, nruns_so_far, SCEN_PRELIM)
v_err_list, f_list, nf, nx, nsamples_run, exit_flag, exit_str = sample_objective(model.m, objfun, x,
nf, nx, maxfun,
model.min_objective_value(),
nsamples=nsamples_to_use)
# f = sumsq(np.mean(v_err_list[:nsamples_run, :], axis=0)) # estimate actual objective value
# Handle exit conditions (f < min obj value or maxfun reached)
if exit_flag is not None: # then exit_str is also set
if nsamples_run > 0:
fmin = np.min(f_list[:nsamples_run])
if model.fsave is None or fmin < model.fsave:
model.xsave = x
model.fsave = fmin
return_to_new_tr_iteration = False # return and quit
return model, nf, nx, return_to_new_tr_iteration, exit_flag, exit_str
# Otherwise, add new results (increments model.npt_so_far)
model.update_point(k, model.xpt[k, :], v_err_list[0, :], f_list[0])
for i in range(1, nsamples_run):
model.add_point_resample(k, v_err_list[i, :]) # add new info
return_to_new_tr_iteration = True # return and continue
exit_flag = None
exit_str = None
return model, nf, nx, return_to_new_tr_iteration, exit_flag, exit_str
def get_new_orthogonal_directions(model, adelt, num_steps=1):
# Step from xopt along a random direction orthogonal to other yt (or multiple mutually orthogonal steps)
for i in range(20): # allow several tries, in case we choosing a point in the subspace of (yt-xk) [very unlucky]
A = np.random.randn(model.n, num_steps)
# (modified) Gram-Schmidt to orthogonalise
for k in range(min(model.npt_so_far, model.npt)):
if k == model.kopt:
continue
yk = model.xpt[k,:] - model.xopt()
for j in range(num_steps):
A[:,j] = A[:,j] - (np.dot(A[:,j], yk) / np.dot(yk, yk)) * yk
# continue if every column sufficiently large
all_cols_ok = True
for j in range(num_steps):
if np.linalg.norm(A[:,j]) < 1e-8:
all_cols_ok = False
if all_cols_ok:
break
# Scale appropriately so within bounds and ||d|| <= adelt
Q = np.linalg.qr(A)[0] # Q is n*ndirs with orthonormal columns
for j in range(num_steps):
scale_factor = adelt / np.linalg.norm(Q[:,j])
for i in range(model.n):
if Q[i,j] < 0.0:
scale_factor = min(scale_factor, (model.sl[i] - model.xopt()[i]) / Q[i,j])
elif Q[i,j] > 0.0:
scale_factor = min(scale_factor, (model.su[i] - model.xopt()[i]) / Q[i,j])
Q[:,j] = Q[:,j] * scale_factor
# Finished!
return Q
def altmov_wrapper(model, knew, adelt):
model.factorise_LU()
# First need to get knew-th column of H matrix
if model.EXACT_CONST_TERM:
if knew == model.kopt:
ek = -np.ones((model.n, )) # matrix based on (y-xk), so different geom structure for kopt
else:
ek = np.zeros((model.n, ))
if knew < model.kopt:
ek[knew] = 1.0
else:
ek[knew - 1] = 1.0
H_knew = model.solve_LU(ek)
else:
ek = np.zeros((model.n + 1,))
ek[knew] = 1.0
H_knew = model.solve_LU(ek)
xnew, xalt, cauchy, abs_denom = altmov(model.xpt, model.sl, model.su, model.kopt,
model.xopt(), knew, adelt, H_knew)
# abs_denom is Lagrange_knew evaluated at xnew
return xnew, xalt, cauchy, abs_denom
def choose_knew(model, delta, xnew, skip_kopt=True):
# in model, uses: n, npt, xpt, kopt/xopt, build_interp_matrix()
# model unchanged by this method
# Criteria is to maximise: max(1, ||yt-xk||^4/Delta^4) * abs(Lagrange_t(xnew))
# skip_kopt determines whether to check t=kopt as a possible candidate or not
model.factorise_LU() # Prep for linear solves
delsq = delta ** 2
scaden = -1.0
knew = None # may knew never be set here?
try:
for k in range(model.npt):
if skip_kopt and k == model.kopt:
continue # next k in this inner loop
if model.EXACT_CONST_TERM:
if k == model.kopt:
ek = -np.ones((model.n,)) # matrix based on (y-xk), so different geom structure for kopt
else:
ek = np.zeros((model.n, ))
if k < model.kopt:
ek[k] = 1.0
else:
ek[k-1] = 1.0
Hk = model.solve_LU(ek)
else:
ek = np.zeros((model.n + 1,))
ek[k] = 1.0
Hk = model.solve_LU(ek) # k-th column of H, except 1st entry (i.e. Lagrange polynomial gradient)
lagrange_k_at_d = 1.0 + np.dot(xnew-model.xpt[k, :], Hk)
distsq = sumsq(model.xpt[k, :] - model.xopt())
temp = max(1.0, (distsq / delsq) ** 2)
if temp * abs(lagrange_k_at_d) > scaden:
scaden = temp * abs(lagrange_k_at_d)
knew = k
linalg_error = False
except np.linalg.LinAlgError:
linalg_error = True
return knew, linalg_error
def trust_region_subproblem_least_squares(model, delta):
# in model, uses: n, npt, xpt, kopt/xopt, sl, su, build_full_model()
# model unchanged by this method
# Build model for full least squares objectives
gopt, hq = model.build_full_model()
# Call original BOBYQA trsbox function
d, gnew, crvmin = trsbox(model.xopt(), gopt, hq, model.sl, model.su, delta)
return d, gopt, hq, gnew, crvmin
def done_with_current_rho(model, current_iter, last_successful_iter, rho, diffs, xnew, gnew, hq, crvmin):
# in model, uses: n, sl, su
# model unchanged by this method
if current_iter <= last_successful_iter + 2:
return False
errbig = max(diffs)
frhosq = 0.125 * rho ** 2
if crvmin > 0.0 and errbig > frhosq * crvmin:
return False
bdtol = errbig / rho
for j in range(model.n):
bdtest = bdtol
if xnew[j] == model.sl[j]:
bdtest = gnew[j]
if xnew[j] == model.su[j]:
bdtest = -gnew[j]
if bdtest < bdtol:
curv = get_hessian_element(model.n, hq, j, j) # curv = Hessian(j, j)
bdtest += 0.5 * curv * rho
if bdtest < bdtol:
return False
return True
def reduce_rho(old_rho, rhoend):
ratio = old_rho/rhoend
if ratio <= 16.0:
new_rho = rhoend
elif ratio <= 250.0:
new_rho = sqrt(ratio)*rhoend
else:
new_rho = 0.1*old_rho
delta = max(0.5*old_rho, new_rho)
return delta, new_rho
def check_and_fix_geometry(model, objfun, distsq, delta, rho, dnorm, diffs, nf, nx, current_iter, last_successful_iter,
maxfun, nsamples, rounding_error_const, nruns_so_far, update_delta=True):
# [Fortran label 650]
# If any xpt more than distsq away from xopt, fix geometry
knew_tmp, distsq_tmp = get_vector_max(all_square_distances(model.xpt, model.xopt()))
if distsq_tmp > distsq: # fix geometry and quit
knew = knew_tmp
distsq = distsq_tmp
dist = sqrt(distsq)
if update_delta: # optional
delta = max(min(0.1 * delta, 0.5 * dist), 1.5 * rho) # use 0.5*dist, within range [0.1*delta, 1.5*rho]
adelt = max(min(0.1 * dist, delta), rho)
if adelt ** 2 <= rounding_error_const * sumsq(model.xopt()):
model.shift_base(model.xopt())
model, nf, nx, last_successful_iter, diffs, return_to_new_tr_iteration, exit_flag, exit_str \
= fix_geometry(model, objfun, knew, delta, adelt, rho, dnorm, diffs, nf, nx, current_iter,
last_successful_iter, maxfun, nsamples, nruns_so_far)
return model, delta, nf, nx, last_successful_iter, diffs, return_to_new_tr_iteration, exit_flag, exit_str
else:
# Do nothing, just quit
# return_to_new_tr_iteration = None when didn't fix geometry
return model, delta, nf, nx, last_successful_iter, diffs, None, None, None
def fix_geometry(model, objfun, knew, delta, adelt, rho, dnorm, diffs, nf, nx, current_iter, last_successful_iter,
maxfun, nsamples, nruns_so_far):
# in model, uses: n, npt, xpt, sl, su, kopt/xopt, build_interp_metrix, and others
# model is changed by this function: gqv from interp_mini_models, and others
try:
xnew, xalt, cauchy, denom = altmov_wrapper(model, knew, adelt)
except np.linalg.LinAlgError:
exit_flag = EXIT_LINALG_ERROR
exit_str = "Singular matrix encountered in ALTMOV"
return_to_new_tr_iteration = False # return and quit
return model, nf, nx, last_successful_iter, diffs, return_to_new_tr_iteration, exit_flag, exit_str
if xnew is None: # issue with stpsav occurred, quit DFOGN
exit_flag = EXIT_ALTMOV_MEMORY_ERROR
exit_str = "Error in ALTMOV - stpsav undefined"
return_to_new_tr_iteration = False # return and quit
return model, nf, nx, last_successful_iter, diffs, return_to_new_tr_iteration, exit_flag, exit_str
if denom < cauchy and cauchy > 0.0:
xnew = xalt.copy()
d = xnew - model.xopt()
# [Fortran label 360]
x = model.x_within_bounds(x=xnew)
nsamples_to_use = nsamples(delta, rho, current_iter, nruns_so_far, SCEN_GEOM_UPDATE)
v_err_list, f_list, nf, nx, nsamples_run, exit_flag, exit_str = sample_objective(model.m, objfun, x, nf, nx, maxfun,
model.min_objective_value(),
nsamples=nsamples_to_use)
# Handle exit conditions (f < min obj value or maxfun reached)
if exit_flag is not None: # then exit_str is also set
if nsamples_run > 0:
fmin = np.min(f_list[:nsamples_run])
if fmin < model.fsave:
model.xsave = x
model.fsave = fmin
return_to_new_tr_iteration = False # return and quit
return model, nf, nx, last_successful_iter, diffs, return_to_new_tr_iteration, exit_flag, exit_str
# Otherwise, add new results
model.update_point(knew, xnew, v_err_list[0, :], f_list[0]) # increments model.npt_so_far, if still growing
for i in range(1, nsamples_run):
model.add_point_resample(knew, v_err_list[i, :]) # add new info
# Estimate actual reduction to add to diffs vector
f = sumsq(np.mean(v_err_list[:nsamples_run, :], axis=0)) # estimate actual objective value
# Use the quadratic model to predict the change in F due to the step D,
# and set DIFF to the error of this prediction.
gopt, hq = model.build_full_model()
if gopt is None: # Use this to indicate linalg error
if f < model.fval_opt():
# Force model.get_final_results() to return this new point if it's better than xopt, then quit
model.xsave = x
model.fsave = f
exit_flag = EXIT_LINALG_ERROR
exit_str = "Singular matrix encountered in FIX_GEOMETRY (full model interpolation step)"
return_to_new_tr_iteration = False # return and quit
return model, nf, nx, last_successful_iter, diffs, return_to_new_tr_iteration, exit_flag, exit_str
pred_reduction = - calculate_model_value(gopt, hq, d)
actual_reduction = model.fval_opt() - f
diffs = [abs(pred_reduction - actual_reduction), diffs[0], diffs[1]]
if dnorm > rho:
last_successful_iter = current_iter
exit_flag = None
exit_str = None
return_to_new_tr_iteration = True # return and start new trust region iteration (label 60)
return model, nf, nx, last_successful_iter, diffs, return_to_new_tr_iteration, exit_flag, exit_str
def dfogn_main(objfun, x0, xl, xu, rhobeg, rhoend, maxfun, nsamples, m=None, delta_scale_for_new_dirns_when_growing=1.0,
use_random_initial_directions=False, ndirs_initial=None, num_geom_steps_when_growing=1, nf_so_far=0,
nx_so_far=0, nruns_so_far=0):
exit_flag = None
exit_str = None
# One variable in BOBYQB depends on which code form we are using
if zhang_code_structure:
rounding_error_const = 0.1 # Zhang code
else:
rounding_error_const = 1.0e-3 # BOBYQA
###########################################################
# Set up initial interpolation set
###########################################################
# It shouldn't ever happen, but make sure ndirs_initial is not None
if ndirs_initial is None:
ndirs_initial = np.size(x0)
model, nf, nx, return_to_new_tr_iteration, exit_flag, exit_str = \
build_initial_set(objfun, x0, xl, xu, rhobeg, maxfun, nsamples, nf_so_far, nx_so_far, ndirs_initial,
nruns_so_far, m=m, random_initial_directions=use_random_initial_directions)
if not return_to_new_tr_iteration:
x, f = model.get_final_results()
return x, f, nf, nx, exit_flag, exit_str, model.m
###########################################################
# Set other variables before begin iterations
###########################################################
finished_main_loop = False
(rho, delta) = (rhobeg, rhobeg)
diffs = [0.0, 0.0, 0.0] # (diffa, diffb, diffc) in Fortran code, used in done_with_current_rho()
###########################################################
# Start of main loop [Fortran label 60]
###########################################################
current_iter = -1
last_successful_iter = 0
while not finished_main_loop:
current_iter += 1
logging.debug("Iter %g (last successful %g) with delta = %g and rho = %g" % (
current_iter, last_successful_iter, delta, rho))
# Interpolate each mini-model
interp_ok = model.interpolate_mini_models()
if not interp_ok:
exit_flag = EXIT_LINALG_ERROR
exit_str = "Singular matrix in mini-model interpolation (main loop)"
finished_main_loop = True
break # quit
# Solve trust region subproblem to get tentative step d
# Model for full least squares objective is given by (gopt, hq)
d, gopt, hq, gnew, crvmin = trust_region_subproblem_least_squares(model, delta)
logging.debug("Trust region step is d = " + str(d))
xnew = model.xopt() + d
dsq = sumsq(d)
dnorm = min(delta, sqrt(dsq))
if dnorm < 0.5 * rho and model.npt_so_far < model.n + 1:
# Failed TR step during initial phase - add a point and see if that helps
logging.debug("Failed trust region step during growing phase - adding new directions")
dnew_matrix = get_new_orthogonal_directions(model, delta_scale_for_new_dirns_when_growing * delta,
num_steps=num_geom_steps_when_growing)
break_main_loop = False # the internal breaks only quit this inner loop!
for j in range(num_geom_steps_when_growing):
xnew = model.xopt() + dnew_matrix[:, j]
logging.debug("Growing: compulsory geometry improving step xnew = %s" % str(xnew))
x = model.x_within_bounds(x=xnew)
nsamples_to_use = nsamples(delta, rho, current_iter, nruns_so_far, SCEN_GROWING_NEW_DIRECTION)
v_err_list, f_list, nf, nx, nsamples_run, exit_flag, exit_str = \
sample_objective(model.m, objfun, x, nf, nx, maxfun, model.min_objective_value(),
nsamples=nsamples_to_use)
# Handle exit conditions (f < min obj value or maxfun reached)
if exit_flag is not None: # then exit_str is also set
if nsamples_run > 0:
fmin = np.min(f_list[:nsamples_run])
if fmin < model.fsave:
model.xsave = x
model.fsave = fmin
break_main_loop = True
break # quit inner loop over j, then quit main iteration
if model.npt_so_far < model.npt: # still growing
kmin = model.npt_so_far
logging.debug("Updating point kmin=%g, since still growing" % kmin)
else: # full set
kmin, linalg_error = choose_knew(model, delta, xnew, skip_kopt=True)
if linalg_error:
exit_flag = EXIT_LINALG_ERROR
exit_str = "Singular matrix when finding kmin (in main loop)"
break_main_loop = True
break # quit inner loop over j, then quit main iteration
logging.debug("Updating point kmin=%g, chosen in usual way" % kmin)
# Otherwise, add new results, incrementing model.npt_so_far (if still growing)
model.update_point(kmin, xnew, v_err_list[0, :], f_list[0])
for i in range(1, nsamples_run):
model.add_point_resample(kmin, v_err_list[i, :]) # add new info
# Finished adding new directions - restart main trust region iteration (if no errors encountered)
if break_main_loop:
finished_main_loop = True
break # quit
else:
finished_main_loop = False
continue # next trust region step
elif dnorm < 0.5 * rho:
###################
# Start failed TR step
###################
logging.debug("Failed trust region step (main phase)")
if not done_with_current_rho(model, current_iter, last_successful_iter, rho, diffs, xnew, gnew, hq,
crvmin):
# [Fortran label 650]
distsq = (10.0 * rho) ** 2
model, delta, nf, nx, last_successful_iter, diffs, return_to_new_tr_iteration, geom_exit_flag, geom_exit_str = \
check_and_fix_geometry(model, objfun, distsq, delta, rho, dnorm, diffs, nf, nx, current_iter,
last_successful_iter, maxfun, nsamples, rounding_error_const, nruns_so_far, update_delta=True)
if return_to_new_tr_iteration is not None: # i.e. if we did actually fix geometry
if return_to_new_tr_iteration:
finished_main_loop = False
continue # next trust region step
else: # quit
exit_flag = geom_exit_flag
exit_str = geom_exit_str
finished_main_loop = True
break # quit
# If we didn't fix geometry, reduce rho as below
# otherwise, if we are done with current rho, reduce rho as below
# Reduce rho and continue [Fortran label 680]
if rho > rhoend:
delta, rho = reduce_rho(rho, rhoend)
logging.info("New rho = %g after %i function evaluations" % (rho, nf))
logging.debug("Best so far: f = %.15g at x = " % (model.fval_opt()) + str(model.xbase + model.xopt()))
last_successful_iter = current_iter
finished_main_loop = False
continue # next trust region step
else:
# Cannot reduce rho, so check xnew and quit
x = model.x_within_bounds(x=xnew)
nsamples_to_use = nsamples(delta, rho, current_iter, nruns_so_far, SCEN_RHOEND_REACHED)
v_err_list, f_list, nf, nx, nsamples_run, exit_flag, exit_str = \
sample_objective(model.m, objfun, x, nf, nx, maxfun, model.min_objective_value(),
nsamples=nsamples_to_use)
# Handle exit conditions (f < min obj value or maxfun reached)
if exit_flag is not None: # then exit_str is also set
if nsamples_run > 0:
fmin = np.min(f_list[:nsamples_run])
if fmin < model.fsave:
model.xsave = x
model.fsave = fmin
finished_main_loop = True
break # quit
# Force model.get_final_results() to return this new point if it's better than xopt, then quit
model.xsave = x
model.fsave = np.min(f_list[:nsamples_run])
exit_flag = EXIT_SUCCESS
exit_str = "rho has reached rhoend"
finished_main_loop = True
break # quit
###################
# End failed TR step
###################
else:
###################
# Start successful TR step
###################
logging.debug("Successful trust region step")
# Severe cancellation is likely to occur if XOPT is too far from XBASE. [Fortran label 90]
if dsq <= rounding_error_const * sumsq(model.xopt()):
model.shift_base(model.xopt()) # includes a re-factorisation of the interpolation matrix
xnew = xnew - model.xopt()
# Set KNEW to the index of the next interpolation point to be deleted to make room for a trust
# region step. Again RESCUE may be called if rounding errors have damaged
# the chosen denominator, which is the reason for attempting to select
# KNEW before calculating the next value of the objective function.
knew, linalg_error = choose_knew(model, delta, xnew, skip_kopt=True)
if linalg_error:
exit_flag = EXIT_LINALG_ERROR
exit_str = "Singular matrix when finding knew (in main loop)"
finished_main_loop = True
break # quit
# Calculate the value of the objective function at XBASE+XNEW, unless
# the limit on the number of calculations of F has been reached.
# [Fortran label 360, with ntrits > 0]
x = model.x_within_bounds(x=xnew)
nsamples_to_use = nsamples(delta, rho, current_iter, nruns_so_far, SCEN_TR_STEP)
v_err_list, f_list, nf, nx, nsamples_run, exit_flag, exit_str = \
sample_objective(model.m, objfun, x, nf, nx, maxfun, model.min_objective_value(),
nsamples=nsamples_to_use)
# Estimate f in order to compute 'actual reduction'
f = sumsq(np.mean(v_err_list[:nsamples_run, :], axis=0)) # estimate actual objective value
# Handle exit conditions (f < min obj value or maxfun reached)
if exit_flag is not None: # then exit_str is also set
if nsamples_run > 0:
fmin = np.min(f_list[:nsamples_run])
if fmin < model.fsave:
model.xsave = x
model.fsave = fmin
finished_main_loop = True
break # quit
# Use the quadratic model to predict the change in F due to the step D,
# and set DIFF to the error of this prediction.
pred_reduction = - calculate_model_value(gopt, hq, d)
actual_reduction = model.fval_opt() - f
diffs = [abs(pred_reduction - actual_reduction), diffs[0], diffs[1]]
if dnorm > rho:
last_successful_iter = current_iter
if pred_reduction < 0.0:
exit_flag = EXIT_TR_INCREASE_ERROR
exit_str = "Trust region step gave model increase"
finished_main_loop = True
break # quit
# Pick the next value of DELTA after a trust region step.
# Update trust region radius
ratio = actual_reduction / pred_reduction
if ratio <= 0.1:
delta = min(0.5 * delta, dnorm)
elif ratio <= 0.7:
delta = max(0.5 * delta, dnorm)
else: # (ratio > 0.7) Different updates depending on which code version we're using
if zhang_code_structure:
delta = min(max(2.0 * delta, 4.0 * dnorm), 1.0e10) # DFBOLS code version
elif bbqtr:
delta = max(0.5 * delta, 2.0 * dnorm) # BOBYQA version
else:
delta = max(delta, 2.0 * dnorm) # Zhang paper version
if delta <= 1.5 * rho: # cap trust region radius at rho
delta = rho
logging.debug("New delta = %g (rho = %g) from ratio %g" % (delta, rho, ratio))
# Recalculate KNEW and DENOM if the new F is less than FOPT.
if actual_reduction > 0.0: # f < model.fval_opt()
knew, linalg_error = choose_knew(model, delta, xnew, skip_kopt=False)
if linalg_error:
exit_flag = EXIT_LINALG_ERROR
exit_str = "Singular matrix when finding knew (in main loop, second time)"
finished_main_loop = True
break # quit
# Updating...
logging.debug("Updating with knew = %i" % knew)
model.update_point(knew, xnew, v_err_list[0, :], f_list[0]) # increments model.npt_so_far, if still growing
for i in range(1, nsamples_run):
model.add_point_resample(knew, v_err_list[i, :]) # add new info
# When growing and don't yet have a full set of directions, we always need a geometry improving step
if model.npt_so_far <= model.n + 1: # even after npt function evaluations, still one direction short
dnew_matrix = get_new_orthogonal_directions(model, delta_scale_for_new_dirns_when_growing * delta,
num_steps=num_geom_steps_when_growing)
# breaks below only stop num_geom_steps_when_growing loop, check if need to quit main loop too
break_main_loop = False
for j in range(num_geom_steps_when_growing):
xnew = model.xopt() + dnew_matrix[:, j]
logging.debug("Growing: compulsory geometry improving step xnew = %s" % str(xnew))
x = model.x_within_bounds(x=xnew)
nsamples_to_use = nsamples(delta, rho, current_iter, nruns_so_far, SCEN_GROWING_NEW_DIRECTION)
v_err_list, f_list, nf, nx, nsamples_run, exit_flag, exit_str = \
sample_objective(model.m, objfun, x, nf, nx, maxfun, model.min_objective_value(),
nsamples=nsamples_to_use)
# Handle exit conditions (f < min obj value or maxfun reached)
if exit_flag is not None: # then exit_str is also set
if nsamples_run > 0:
fmin = np.min(f_list[:nsamples_run])
if fmin < model.fsave:
model.xsave = x
model.fsave = fmin
finished_main_loop = True
break_main_loop = True
break # quit
if model.npt_so_far < model.npt: # still growing
kmin = model.npt_so_far
logging.debug("Updating point kmin=%g, since still growing" % kmin)
else: # full set
kmin, linalg_error = choose_knew(model, delta, xnew, skip_kopt=True)
if linalg_error:
exit_flag = EXIT_LINALG_ERROR
exit_str = "Singular matrix when finding kmin (in main loop)"
finished_main_loop = True
break_main_loop = True
break # quit
logging.debug("Updating point kmin=%g, chosen in usual way" % kmin)
# Otherwise, add new results, incrementing model.npt_so_far (if still growing)
model.update_point(kmin, xnew, v_err_list[0, :], f_list[0])
for i in range(1, nsamples_run):
model.add_point_resample(kmin, v_err_list[i, :]) # add new info
# Finished adding new directions - restart main trust region iteration (if no errors encountered)
if break_main_loop:
finished_main_loop = True
break # quit
# If a trust region step has provided a sufficient decrease in F, then
# branch for another trust region calculation.
if ratio >= 0.1:
finished_main_loop = False
continue # next trust region step
# Alternatively, find out if the interpolation points are close enough
# to the best point so far.
# [Fortran label 650]
distsq = max((2.0 * delta) ** 2, (10.0 * rho) ** 2)
model, delta, nf, nx, last_successful_iter, diffs, return_to_new_tr_iteration, geom_exit_flag, geom_exit_str = \
check_and_fix_geometry(model, objfun, distsq, delta, rho, dnorm, diffs, nf, nx, current_iter,
last_successful_iter, maxfun, nsamples, rounding_error_const, nruns_so_far,
update_delta=False) # don't update delta when ntrits > 0
if return_to_new_tr_iteration is not None: # i.e. if we did actually fix geometry
if return_to_new_tr_iteration:
finished_main_loop = False
continue # next trust region step
else: # quit
exit_flag = geom_exit_flag
exit_str = geom_exit_str
finished_main_loop = True
break # quit
# If we didn't fix geometry, reduce rho [Fortran label 680]
if ratio > 0.0:
finished_main_loop = False
continue # next trust region step
if max(delta, dnorm) > rho:
finished_main_loop = False
continue # next trust region step
# Reduce rho and continue [Fortran label 680]
if rho > rhoend:
delta, rho = reduce_rho(rho, rhoend)
logging.info("New rho = %g after %i function evaluations" % (rho, nf))
logging.debug("Best so far: f = %.15g at x = " % (model.fval_opt()) + str(model.xbase + model.xopt()))
last_successful_iter = current_iter
finished_main_loop = False
continue # next trust region step
else:
# Cannot reduce rho further
exit_flag = EXIT_SUCCESS
exit_str = "rho has reached rhoend"
finished_main_loop = True
break # quit
###################
# End successful TR step
###################
#############################
# End this iteration of main loop - take next TR step
#############################
###########################################################
# End of main loop [Fortran label 720]
###########################################################
x, f = model.get_final_results()
logging.debug("At return from DFOGN, number of function evals = %i" % nf)
logging.debug("Smallest objective value = %.15g at x = " % f + str(x))
return x, f, nf, nx, exit_flag, exit_str, model.m
def dfogn_resampling(objfun, x0, lower=None, upper=None, maxfun=1000, nsamples=None, init_tr_radius=None, rhoend=1e-8,
delta_scale_for_new_dirns_when_growing=1.0, use_random_initial_directions=False,
ndirs_initial='n', num_geom_steps_when_growing=1, use_restarts=True,
max_unsuccessful_restarts=10):
# If bounds not provided, set to something large
xl = (lower if lower is not None else -1.0e20 * np.ones(x0.shape))
xu = (upper if upper is not None else 1.0e20 * np.ones(x0.shape))
# Set default value of rhobeg to something sensible
rhobeg = (init_tr_radius if init_tr_radius is not None else 0.1 * max(np.max(np.abs(x0)), 1.0))
# Set default number of samples to be 1 for every evaluation
if nsamples is None:
nsamples_to_use = lambda delta, rho, iter, nruns_so_far, scenario : 1
else:
nsamples_to_use = nsamples
n = np.size(x0)
assert (rhobeg > 0.0), "rhobeg must be strictly positive"
assert (rhoend > 0.0), "rhoend must be strictly positive"
assert (rhoend < rhobeg), "rhoend must be less than rhobeg"
assert (maxfun > 0), "maxfun must be strictly positive"
assert (np.shape(x0) == (n,)), "x0 must be a vector"
assert (np.shape(x0) == np.shape(xl)), "xl must have same shape as x0"
assert (np.shape(x0) == np.shape(x0)), "xu must have same shape as x0"
assert (np.all(xu-xl >= 2.0*rhobeg)), "gap between xl and xu must be at least 2*rhobeg"
if maxfun <= n+1:
logging.warning("Warning (maxfun <= n+1): Are you sure your budget is large enough?")
# Parse string arguments: number of geometry steps to take at each growing iteration of main TR loop
n_extra_steps_to_use = None
if type(num_geom_steps_when_growing) == int:
n_extra_steps_to_use = num_geom_steps_when_growing
elif type(num_geom_steps_when_growing) == str:
if num_geom_steps_when_growing == 'tenthn':
n_extra_steps_to_use = int(x0.size // 10)
elif num_geom_steps_when_growing == 'fifthn':
n_extra_steps_to_use = int(x0.size // 5)
elif num_geom_steps_when_growing == 'qtrn':
n_extra_steps_to_use = int(x0.size // 4)
assert n_extra_steps_to_use is not None, "Unknown num_geom_steps_when_growing: " + str(
num_geom_steps_when_growing)
n_extra_steps_to_use = max(n_extra_steps_to_use, 1) # floor at 1
# Parse string arguments: number of initial directions to add before beginning main TR loop
ndirs_initial_val = None
if type(ndirs_initial) == int:
ndirs_initial_val = ndirs_initial
elif type(ndirs_initial) == str:
if ndirs_initial == 'tenthn':
ndirs_initial_val = int(n // 10)
elif ndirs_initial == 'fifthn':
ndirs_initial_val = int(n // 5)
elif ndirs_initial == 'qtrn':
ndirs_initial_val = int(n // 4)
elif ndirs_initial == 'halfn':
ndirs_initial_val = int(n // 2)
elif ndirs_initial == 'n':
ndirs_initial_val = n
elif ndirs_initial == '2n':
ndirs_initial_val = 2 * n
elif ndirs_initial == 'nsq':
ndirs_initial_val = (n + 1) * (n + 2) // 2 - 1
assert ndirs_initial_val is not None, "Unknown ndirs_initial: " + str(ndirs_initial)
assert ndirs_initial_val == n, "Must have n initial directions (build_interp_matrix assumes this)"
ndirs_initial_val = max(ndirs_initial_val, 1) # floor at 1
# Enforce lower bounds on x0 (ideally with gap of at least rhobeg)
idx = (xl < x0) & (x0 <= xl+rhobeg)
x0[idx] = xl[idx] + rhobeg
idx = (x0 <= xl)
x0[idx] = xl[idx]
# Enforce upper bounds on x0 (ideally with gap of at least rhobeg)
idx = (xu-rhobeg <= x0) & (x0 < xu)
x0[idx] = xu[idx] - rhobeg
idx = (x0 >= xu)
x0[idx] = xu[idx]
# First run
x, f, nf, nx, exit_flag, exit_str, m = \
dfogn_main(objfun, x0, xl, xu, rhobeg, rhoend, maxfun, nsamples_to_use, m=None,
delta_scale_for_new_dirns_when_growing=delta_scale_for_new_dirns_when_growing,
use_random_initial_directions=use_random_initial_directions, ndirs_initial=ndirs_initial_val,
num_geom_steps_when_growing=n_extra_steps_to_use,
nf_so_far=0, nx_so_far=0, nruns_so_far=0)
# Now do repeats
nruns_so_far = 1
reduction_last_run = True # did the last run give us a reduction?
rhobeg_to_use = rhobeg
rhoend_to_use = rhoend
last_successful_run = 1
while use_restarts and nf < maxfun and nruns_so_far - last_successful_run < max_unsuccessful_restarts and \
((exit_flag == EXIT_SUCCESS and 'rho' in exit_str) or exit_flag in DO_RESTART_ERRORS):
if reduction_last_run:
rhobeg_to_use = max(0.1 * max(np.max(np.abs(x)), 1.0), 10 * rhoend_to_use)
rhoend_to_use = 1.0 * rhoend_to_use
else:
# Reduce initial TR radius when things have been going badly
rhobeg_to_use = max(0.5 * rhobeg_to_use, 10 * rhoend_to_use)
logging.info(
"Restarting from finish point (f = %g) after %g function evals; new rhobeg = %g and rhoend = %g" % (
f, nf, rhobeg_to_use, rhoend_to_use))
x2, f2, nf, nx, exit_flag, exit_str, m_tmp = \
dfogn_main(objfun, x, xl, xu, rhobeg_to_use, rhoend_to_use, maxfun, nsamples_to_use, m=m,
delta_scale_for_new_dirns_when_growing=delta_scale_for_new_dirns_when_growing,
use_random_initial_directions=use_random_initial_directions, ndirs_initial=ndirs_initial_val,
num_geom_steps_when_growing=n_extra_steps_to_use,
nf_so_far=nf, nx_so_far=nx, nruns_so_far=nruns_so_far)
nruns_so_far += 1
if f2 < f or np.isnan(f):
logging.info("Successful run with new f = %s compared to old f = %s" % (f2, f))
last_successful_run = nruns_so_far
x = x2
f = f2
reduction_last_run = True
else:
logging.info("Unsuccessful run with new f = %s compared to old f = %s" % (f2, f))
reduction_last_run = False
logging.info("Finished after a total of %g runs" % nruns_so_far)
# Clean up exit_str to have better information:
if exit_flag == EXIT_SUCCESS:
exit_str = "Success: " + exit_str
elif exit_flag == EXIT_MAXFUN_WARNING:
exit_str = "Warning: " + exit_str
elif exit_flag == EXIT_INPUT_ERROR:
exit_str = "Input error: " + exit_str
elif exit_flag == EXIT_TR_INCREASE_ERROR:
exit_str = "Trust region subproblem error: " + exit_str
elif exit_flag == EXIT_LINALG_ERROR:
exit_str = "Linear algebra error: " + exit_str
elif exit_flag == EXIT_ALTMOV_MEMORY_ERROR:
exit_str = "ALTMOV memory error: " + exit_str
else:
exit_str = "Unknown exit flag " + str(exit_flag) + " with message " + exit_str
return x, f, nf, exit_flag, exit_str
| numericalalgorithmsgroup/dfogn | dfogn/dfogn_resampling.py | Python | gpl-3.0 | 58,645 |
# -*- coding: utf-8 -*-
# Copyright (C) Duncan Macleod (2014-2020)
#
# This file is part of GWpy.
#
# GWpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GWpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GWpy. If not, see <http://www.gnu.org/licenses/>.
"""Basic utilities for reading/writing LIGO_LW-format XML files.
All specific unified input/output for class objects should be placed in
an 'io' subdirectory of the containing directory for that class.
"""
import os
import os.path
from contextlib import contextmanager
from functools import wraps
from importlib import import_module
import numpy
try:
from ligo.lw.ligolw import (
ElementError as LigolwElementError,
LIGOLWContentHandler,
)
except ImportError: # no ligo.lw
LigolwElementError = None
LIGOLWContentHandler = None
from .utils import (file_list, FILE_LIKE)
from ..utils.decorators import deprecated_function
__author__ = 'Duncan Macleod <[email protected]>'
# XML elements
XML_SIGNATURE = b'<?xml'
LIGOLW_SIGNATURE = b'<!doctype ligo_lw'
LIGOLW_ELEMENT = b'<ligo_lw>'
# -- hack around around TypeError from LIGOTimeGPS(numpy.int32(...)) ----------
def _ligotimegps(s, ns=0):
"""Catch TypeError and cast `s` and `ns` to `int`
"""
from lal import LIGOTimeGPS
try:
return LIGOTimeGPS(s, ns)
except TypeError:
return LIGOTimeGPS(int(s), int(ns))
@contextmanager
def patch_ligotimegps(module="ligo.lw.lsctables"):
"""Context manager to on-the-fly patch LIGOTimeGPS to accept all int types
"""
module = import_module(module)
orig = module.LIGOTimeGPS
module.LIGOTimeGPS = _ligotimegps
try:
yield
finally:
module.LIGOTimeGPS = orig
# -- content handling ---------------------------------------------------------
def strip_ilwdchar(_ContentHandler):
"""Wrap a LIGO_LW content handler to swap ilwdchar for int on-the-fly
when reading a document
This is adapted from :func:`ligo.skymap.utils.ilwd`, copyright
Leo Singer (GPL-3.0-or-later).
"""
from ligo.lw.lsctables import TableByName
from ligo.lw.table import (Column, TableStream)
from ligo.lw.types import (FromPyType, ToPyType)
class IlwdMapContentHandler(_ContentHandler):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._idconverter = {}
@wraps(_ContentHandler.startColumn)
def startColumn(self, parent, attrs):
result = super().startColumn(parent, attrs)
# if an old ID type, convert type definition to an int
if result.Type == "ilwd:char":
old_type = ToPyType[result.Type]
def converter(old):
return int(old_type(old))
self._idconverter[(id(parent), result.Name)] = converter
result.Type = FromPyType[int]
try:
validcolumns = TableByName[parent.Name].validcolumns
except KeyError: # parent.Name not in TableByName
return result
if result.Name not in validcolumns:
stripped_column_to_valid_column = {
Column.ColumnName(name): name
for name in validcolumns
}
if result.Name in stripped_column_to_valid_column:
result.setAttribute(
'Name',
stripped_column_to_valid_column[result.Name],
)
return result
@wraps(_ContentHandler.startStream)
def startStream(self, parent, attrs):
result = super().startStream(parent, attrs)
if isinstance(result, TableStream):
loadcolumns = set(parent.columnnames)
if parent.loadcolumns is not None:
loadcolumns &= set(parent.loadcolumns)
pid = id(parent)
result._tokenizer.set_types([
self._idconverter.pop((pid, colname), pytype)
if colname in loadcolumns else None
for pytype, colname in zip(
parent.columnpytypes,
parent.columnnames,
)
])
return result
return IlwdMapContentHandler
def _wrap_content_handler(contenthandler):
from ligo.lw.lsctables import use_in
@strip_ilwdchar
@use_in
class ContentHandler(contenthandler):
pass
return ContentHandler
def default_content_handler():
"""Return a standard content handler to read LIGO_LW documents
This handler knows how to parse LSCTables, and automatically converts
old-style ilwdchar ID types to `int`.
Returns
-------
contenthandler : subclass of `ligo.lw.ligolw.LIGOLWContentHandler`
"""
from ligo.lw.ligolw import LIGOLWContentHandler
return _wrap_content_handler(LIGOLWContentHandler)
def get_partial_contenthandler(element):
"""Build a `PartialLIGOLWContentHandler` to read only this element
Parameters
----------
element : `type`, subclass of :class:`~ligo.lw.ligolw.Element`
the element class to be read
Returns
-------
contenthandler : `type`
a subclass of `~ligo.lw.ligolw.PartialLIGOLWContentHandler`
to read only the given `element`
"""
from ligo.lw.ligolw import PartialLIGOLWContentHandler
from ligo.lw.table import Table
if issubclass(element, Table):
def _element_filter(name, attrs):
return element.CheckProperties(name, attrs)
else:
def _element_filter(name, _):
return name == element.tagName
return build_content_handler(PartialLIGOLWContentHandler, _element_filter)
def get_filtering_contenthandler(element):
"""Build a `FilteringLIGOLWContentHandler` to exclude this element
Parameters
----------
element : `type`, subclass of :class:`~ligo.lw.ligolw.Element`
the element to exclude (and its children)
Returns
-------
contenthandler : `type`
a subclass of `~ligo.lw.ligolw.FilteringLIGOLWContentHandler`
to exclude an element and its children
"""
from ligo.lw.ligolw import FilteringLIGOLWContentHandler
from ligo.lw.table import Table
if issubclass(element, Table):
def _element_filter(name, attrs):
return ~element.CheckProperties(name, attrs)
else:
def _element_filter(name, _):
# pylint: disable=unused-argument
return name != element.tagName
return build_content_handler(
FilteringLIGOLWContentHandler,
_element_filter,
)
def build_content_handler(parent, filter_func):
"""Build a `~xml.sax.handler.ContentHandler` with a given filter
Parameters
----------
parent : `type`, subclass of `xml.sax.handler.ContentHandler`
a class of contenthandler to use
filter_func : `callable`
the filter function to pass to the content handler creation
Returns
-------
contenthandler : subclass of ``parent``
a new content handler that applies the filter function and the
default parsing extras from :func:`_wrap_content_handler`.
"""
class ContentHandler(parent):
# pylint: disable=too-few-public-methods
def __init__(self, document):
super().__init__(document, filter_func)
return _wrap_content_handler(ContentHandler)
# -- reading ------------------------------------------------------------------
def read_ligolw(source, contenthandler=None, **kwargs):
"""Read one or more LIGO_LW format files
Parameters
----------
source : `str`, `file`
the open file or file path to read
contenthandler : `~xml.sax.handler.ContentHandler`, optional
content handler used to parse document
verbose : `bool`, optional
be verbose when reading files, default: `False`
Returns
-------
xmldoc : :class:`~ligo.lw.ligolw.Document`
the document object as parsed from the file(s)
"""
from ligo.lw.ligolw import Document
from ligo.lw import types
from ligo.lw.utils import (load_url, ligolw_add)
# mock ToPyType to link to numpy dtypes
topytype = types.ToPyType.copy()
for key in types.ToPyType:
if key in types.ToNumPyType:
types.ToPyType[key] = numpy.dtype(types.ToNumPyType[key]).type
# set contenthandler
if contenthandler is None:
contenthandler = default_content_handler()
# read one or more files into a single Document
source = file_list(source)
try:
if len(source) == 1:
return load_url(
source[0],
contenthandler=contenthandler,
**kwargs
)
return ligolw_add.ligolw_add(
Document(),
source,
contenthandler=contenthandler,
**kwargs
)
finally: # replace ToPyType
types.ToPyType = topytype
# -- reading ------------------------------------------------------------------
def read_table(
source,
tablename=None,
columns=None,
contenthandler=None,
**kwargs,
):
"""Read a :class:`~ligo.lw.table.Table` from one or more LIGO_LW files
Parameters
----------
source : `Document`, `file`, `str`, `CacheEntry`, `list`
object representing one or more files. One of
- a LIGO_LW :class:`~ligo.lw.ligolw.Document`
- an open `file`
- a `str` pointing to a file path on disk
- a formatted :class:`~lal.utils.CacheEntry` representing one file
- a `list` of `str` file paths or :class:`~lal.utils.CacheEntry`
tablename : `str`
name of the table to read.
columns : `list`, optional
list of column name strings to read, default all.
contenthandler : `~xml.sax.handler.ContentHandler`, optional
SAX content handler for parsing LIGO_LW documents.
**kwargs
other keyword arguments are passed to `~gwpy.io.ligolw.read_ligolw`
Returns
-------
table : :class:`~ligo.lw.table.Table`
`Table` of data
"""
from ligo.lw.ligolw import Document
from ligo.lw import (table, lsctables)
# get content handler to read only this table (if given)
if tablename is not None:
tableclass = lsctables.TableByName[
table.Table.TableName(tablename)
]
if contenthandler is None:
contenthandler = get_partial_contenthandler(tableclass)
# overwrite loading column names to get just what was asked for
_oldcols = tableclass.loadcolumns
if columns is not None:
tableclass.loadcolumns = columns
# read document
if isinstance(source, Document):
xmldoc = source
else:
if contenthandler is None:
contenthandler = default_content_handler()
try:
xmldoc = read_ligolw(
source,
contenthandler=contenthandler,
**kwargs)
finally: # reinstate original set of loading column names
if tablename is not None:
tableclass.loadcolumns = _oldcols
# now find the right table
if tablename is None:
tables = list_tables(xmldoc)
if not tables:
raise ValueError("No tables found in LIGO_LW document(s)")
if len(tables) > 1:
raise ValueError(
"Multiple tables found in LIGO_LW document(s), please specify "
"the table to read via the ``tablename=`` keyword argument. "
"The following tables were found: "
"'{}'".format("', '".join(tables)),
)
tableclass = lsctables.TableByName[table.Table.TableName(tables[0])]
# extract table
return tableclass.get_table(xmldoc)
# -- writing ------------------------------------------------------------------
def open_xmldoc(fobj, contenthandler=None, **kwargs):
"""Try and open an existing LIGO_LW-format file, or create a new Document
Parameters
----------
fobj : `str`, `file`
file path or open file object to read
contenthandler : `~xml.sax.handler.ContentHandler`, optional
the content handler with which to parse the document, if not given
a default handler will be created using
:func:`default_content_handler`.
**kwargs
other keyword arguments to pass to
:func:`~ligo.lw.utils.load_fileobj` as appropriate
Returns
--------
xmldoc : :class:`~ligo.lw.ligolw.Document`
either the `Document` as parsed from an existing file, or a new, empty
`Document`
"""
from ligo.lw.ligolw import Document
from ligo.lw.utils import load_fileobj
if contenthandler is None:
contenthandler = default_content_handler()
# read from an existing Path/filename
if not isinstance(fobj, FILE_LIKE):
try:
with open(fobj, "rb") as fobj2:
return open_xmldoc(
fobj2,
contenthandler=contenthandler,
**kwargs,
)
except (OSError, IOError):
# or just create a new Document
return Document()
return load_fileobj(
fobj,
contenthandler=contenthandler,
**kwargs,
)
def get_ligolw_element(xmldoc):
"""Find an existing <LIGO_LW> element in this XML Document
"""
from ligo.lw.ligolw import (LIGO_LW, WalkChildren)
if isinstance(xmldoc, LIGO_LW):
return xmldoc
for elem in WalkChildren(xmldoc):
if isinstance(elem, LIGO_LW):
return elem
raise ValueError("Cannot find LIGO_LW element in XML Document")
def write_tables_to_document(xmldoc, tables, overwrite=False):
"""Write the given LIGO_LW table into a :class:`Document`
Parameters
----------
xmldoc : :class:`~ligo.lw.ligolw.Document`
the document to write into
tables : `list` of :class:`~ligo.lw.table.Table`
the set of tables to write
overwrite : `bool`, optional, default: `False`
if `True`, delete an existing instance of the table type, otherwise
append new rows
"""
from ligo.lw.ligolw import LIGO_LW
from ligo.lw import lsctables
# find or create LIGO_LW tag
try:
llw = get_ligolw_element(xmldoc)
except ValueError:
llw = LIGO_LW()
xmldoc.appendChild(llw)
for table in tables:
try: # append new data to existing table
old = lsctables.TableByName[
table.TableName(table.Name)].get_table(xmldoc)
except ValueError: # or create a new table
llw.appendChild(table)
else:
if overwrite:
llw.removeChild(old)
old.unlink()
llw.appendChild(table)
else:
old.extend(table)
return xmldoc
def write_tables(
target,
tables,
append=False,
overwrite=False,
contenthandler=None,
**kwargs,
):
"""Write an LIGO_LW table to file
Parameters
----------
target : `str`, `file`, :class:`~ligo.lw.ligolw.Document`
the file or document to write into
tables : `list`, `tuple` of :class:`~ligo.lw.table.Table`
the tables to write
append : `bool`, optional, default: `False`
if `True`, append to an existing file/table, otherwise `overwrite`
overwrite : `bool`, optional, default: `False`
if `True`, delete an existing instance of the table type, otherwise
append new rows
contenthandler : `~xml.sax.handler.ContentHandler`, optional
the content handler with which to parse the document, if not given
a default handler will be created using
:func:`default_content_handler`.
**kwargs
other keyword arguments to pass to
:func:`~ligo.lw.utils.load_fileobj` as appropriate
"""
from ligo.lw.ligolw import Document, LIGO_LW
from ligo.lw import utils as ligolw_utils
# allow writing directly to XML
if isinstance(target, (Document, LIGO_LW)):
xmldoc = target
# open existing document, if possible
elif append:
if contenthandler is None:
contenthandler = default_content_handler()
xmldoc = open_xmldoc(
target,
contenthandler=contenthandler,
)
# fail on existing document and not overwriting
elif (
not overwrite
and isinstance(target, (str, os.PathLike))
and os.path.exists(target)
):
raise IOError(f"File exists: {target}")
else: # or create a new document
xmldoc = Document()
# convert table to format
write_tables_to_document(xmldoc, tables, overwrite=overwrite)
# find writer function and target filename
if isinstance(target, FILE_LIKE):
writer = ligolw_utils.write_fileobj
name = target.name
else:
writer = ligolw_utils.write_filename
name = target = str(target)
# handle gzip compression kwargs
if name.endswith('.gz'):
kwargs.setdefault('compress', 'gz')
# write XML
writer(xmldoc, target, **kwargs)
# -- utilities ----------------------------------------------------------------
def iter_tables(source):
"""Iterate over all tables in the given document(s)
Parameters
----------
source : `file`, `str`, :class:`~ligo.lw.ligolw.Document`, `list`
one or more open files, file paths, or LIGO_LW `Document`s
Yields
------
ligo.lw.table.Table
a table structure from the document(s)
"""
from ligo.lw.ligolw import (Element, Stream, WalkChildren)
# get LIGO_LW object
if not isinstance(source, Element):
filt = get_filtering_contenthandler(Stream)
source = read_ligolw(source, contenthandler=filt)
llw = get_ligolw_element(source)
# yield tables
for elem in WalkChildren(llw):
if elem.tagName == "Table":
yield elem
def list_tables(source):
"""List the names of all tables in this file(s)
Parameters
----------
source : `file`, `str`, :class:`~ligo.lw.ligolw.Document`, `list`
one or more open files, file paths, or LIGO_LW `Document`s
Examples
--------
>>> from gwpy.io.ligolw import list_tables
>>> print(list_tables('H1-LDAS_STRAIN-968654552-10.xml.gz'))
['process', 'process_params', 'sngl_burst', 'search_summary', 'segment_definer', 'segment_summary', 'segment']
""" # noqa: E501
return [tbl.TableName(tbl.Name) for tbl in iter_tables(source)]
def to_table_type(val, cls, colname):
"""Cast a value to the correct type for inclusion in a LIGO_LW table
This method returns the input unmodified if a type mapping for ``colname``
isn't found.
Parameters
----------
val : `object`
The input object to convert, of any type
cls : `type`, subclass of :class:`~ligo.lw.table.Table`
the table class to map against
colname : `str`
The name of the mapping column
Returns
-------
obj : `object`
The input ``val`` cast to the correct type
Examples
--------
>>> from gwpy.io.ligolw import to_table_type as to_ligolw_type
>>> from ligo.lw.lsctables import SnglBurstTable
>>> x = to_ligolw_type(1.0, SnglBurstTable, 'central_freq'))
>>> print(type(x), x)
<class 'numpy.float32'> 1.0
"""
from ligo.lw.types import (
ToNumPyType as numpytypes,
ToPyType as pytypes,
)
# if nothing to do...
if val is None or colname not in cls.validcolumns:
return val
llwtype = cls.validcolumns[colname]
# map to numpy or python types
try:
return numpy.sctypeDict[numpytypes[llwtype]](val)
except KeyError:
return pytypes[llwtype](val)
# -- identify -----------------------------------------------------------------
def is_ligolw(origin, filepath, fileobj, *args, **kwargs):
"""Identify a file object as LIGO_LW-format XML
"""
# pylint: disable=unused-argument
if fileobj is not None:
loc = fileobj.tell()
fileobj.seek(0)
try:
line1 = fileobj.readline().lower()
line2 = fileobj.readline().lower()
try:
return (
line1.startswith(XML_SIGNATURE)
and line2.startswith((LIGOLW_SIGNATURE, LIGOLW_ELEMENT))
)
except TypeError: # bytes vs str
return (
line1.startswith(XML_SIGNATURE.decode('utf-8'))
and line2.startswith((
LIGOLW_SIGNATURE.decode('utf-8'),
LIGOLW_ELEMENT.decode('utf-8'),
))
)
finally:
fileobj.seek(loc)
try:
from ligo.lw.ligolw import Element
except ImportError:
return
return len(args) > 0 and isinstance(args[0], Element)
@deprecated_function
def is_xml(origin, filepath, fileobj, *args, **kwargs): # pragma: no cover
"""Identify a file object as XML (any format)
"""
# pylint: disable=unused-argument
if fileobj is not None:
loc = fileobj.tell()
fileobj.seek(0)
try:
sig = fileobj.read(5).lower()
return sig == XML_SIGNATURE
finally:
fileobj.seek(loc)
elif filepath is not None:
return filepath.endswith(('.xml', '.xml.gz'))
| gwpy/gwpy | gwpy/io/ligolw.py | Python | gpl-3.0 | 22,098 |
#################################################################################
# Copyright 2014 See AUTHORS file.
#
# Licensed under the GNU General Public License Version 3.0 (the "LICENSE");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.gnu.org/licenses/gpl-3.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#################################################################################
bl_info = {
"name": "LibGDX G3D Exporter",
"author": "Danilo Costa Viana",
"blender": (2,6,9),
"version": (0,1,0),
"location": "File > Import-Export",
"description": "Export scene to G3D (LibGDX) format",
"category": "Import-Export"
}
import bpy
from io_scene_g3d.export_g3d import G3DExporter
class Mesh(object):
def __init__(self, s):
self.s = s
def __repr__(self):
return '<Mesh(%s)>' % self.s
def menu_func(self, context):
self.layout.operator(G3DExporter.bl_idname, text="LibGDX G3D text format (.g3dj)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_func)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_func)
if __name__ == "__main__":
register()
| blackears/libgdx_blender_g3d_exporter | io_scene_g3d/__init__.py | Python | gpl-3.0 | 1,616 |
#
# Copyright 2017 Russell Smiley
#
# This file is part of timetools.
#
# timetools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# timetools is distributed in the hope that it will be useful
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with timetools. If not, see <http://www.gnu.org/licenses/>.
#
import numpy
import unittest
import timetools.signalProcessing.tolerance as spt
import timetools.synchronization.intervals as si
class TestIntervals (unittest.TestCase):
def testCalculateLogIntervalScale1 (self):
minValue = 10
maxValue = 120
numberPoints = 11
result = si.generateLogIntervalScale(minValue, maxValue, numberPoints)
self.assertTrue(len(result) == numberPoints, 'Result does not have correct length')
minValueTolerance = spt.ToleranceValue(minValue, 0.1, spt.ToleranceUnit['percent'])
maxValueTolerance = spt.ToleranceValue(maxValue, 0.1, spt.ToleranceUnit['percent'])
self.assertTrue((minValueTolerance.isWithinTolerance(result[0]) and maxValueTolerance.isWithinTolerance(result[-1])), 'Incorrect endpoints')
# A logarithmic sequence will be evenly spaced in the logarithmic domain
logIntervals = numpy.diff(numpy.log10(result))
intervalTolerance = spt.ToleranceValue(numpy.mean(logIntervals), 0.1, spt.ToleranceUnit['percent'])
self.assertTrue(numpy.all(intervalTolerance.isWithinTolerance(logIntervals)), 'Intervals are not logarithmic')
def testGenerateMonotonicLogScale1 (self):
minValue = 10
maxValue = 25
numberPoints = 12
expectedSequence = numpy.array([10, 11, 12, 13, 14, 15, 16, 17, 19, 21, 23, 25])
thisArray = si.generateLogIntervalScale(minValue, maxValue, numberPoints)
thisIntegerArray = numpy.floor(thisArray)
monotonicIntervals = si.generateMonotonicLogScale(thisIntegerArray)
self.assertTrue(isinstance(monotonicIntervals[0], numpy.intp), '')
self.assertTrue(len(monotonicIntervals) == len(expectedSequence), 'Incorrect length of monotonic sequence')
self.assertTrue(numpy.all(monotonicIntervals == expectedSequence), 'Monotonicity failed')
if __name__ == "__main__":
unittest.main()
| blueskyjunkie/timeTools | timetools/synchronization/tests/testIntervals.py | Python | gpl-3.0 | 2,719 |
from Collisions.Collidable import Collidable
from Collisions.TypeBasedCollision import TypeBasedCollision
from Drawing.Drawable import Drawable
from Drawing.DrawPlatform import DrawPlatform
from Movement.Movable import Movable, Movable2D
from Movement.State import State2D
from Utility.Entity import Entity
################################################################################
################################################################################
class Platform(Entity, Movable, Drawable, Collidable):
##############################################################################
def __init__(self, Window, xPosition, yPosition, TileType, Length = 1):
Entity.__init__(self)
InitialState = \
State2D(xPosition, yPosition, Width = Length * 64, Height = 64)
Movable.__init__(self, Movable2D(InitialState))
DrawFunctor = \
DrawPlatform(Window, "Images/platformPack/PNG/Tiles/" + TileType, Length)
Drawable.__init__(self, DrawFunctor, InitialState)
Collidable.__init__(self, TypeBasedCollision(), InitialState)
################################################################################
################################################################################
| dloman/FiestaMonsterz | Entities/Platform.py | Python | gpl-3.0 | 1,236 |
## awg_iq class
# two channels of awgs and a local oscillator used to feed a single iq mixer
# maximum settings of the current mixer that should not be exceeded
import numpy as np
import logging
from .save_pkl import *
from .config import get_config
from matplotlib import pyplot as plt
class awg_iq:
"""Interface for IQ modulation of RF signals wth two AWG channels.
IQ modulation requires two low (intermediate) frequency arbitrary waveform generators for the I and Q
connectors of the mixer and one for the LO input connector.
Modulated signal is output at RF (radio frequency).
Attributes:
awg_I (:obj:`awg`): Instance of an AWG class (for example, Tektronix AWG5014C or AWG500)
that is connected to the I input of the mixer. Should implement the methods get_nop, get_clock,
set_clock, set_waveform, set_status, set_trigger_mode, run and stop.
awg_Q (:obj:`awg`): Instance of an AWG class that is connected to the Q input of the mixer.
awg_I and awg_Q are normaly the same device (I and Q are connected to different channels of the same device).
awg_ch_I (int): Channel id of the device awg_I that is connected to the I connector of the mixer.
awg_ch_Q (int): Channel id of the device awg_I that is connected to the Q connector of the mixer.
lo (:obj:`psg`): Instance of a sinusodial signal generator. Should implement the methods get_frequency and set_frequency.
"""
def __init__(self, awg_I, awg_Q, awg_ch_I, awg_ch_Q, lo):#, mixer):
"""
"""
self.awg_I = awg_I
self.awg_Q = awg_Q
self.awg_ch_I = awg_ch_I
self.awg_ch_Q = awg_ch_Q
self.lo = lo
self._if = 0
self.frequency = lo.get_frequency()
self.calibrations = {}
self.sideband_id = 0
self.ignore_calibration_drift = False
self.frozen = False
#self.mixer = mixer
#@property
def get_nop(self):
"""int: Number of samples in segment."""
I_nop = self.awg_I.get_nop()
Q_nop = self.awg_Q.get_nop()
if I_nop != Q_nop:
raise ValueError('Number of points in I channel awg and Q channel should coincide')
return I_nop
def get_clock(self):
"""int: Sample rate of I and Q channels (complex amplitude envelope)."""
I_clock = self.awg_I.get_clock()
Q_clock = self.awg_Q.get_clock()
if I_clock != Q_clock:
raise ValueError('Clock rate in I channel awg and Q channel should coincide')
return I_clock
def set_nop(self, nop):
"""int: Sets number of samples in segment."""
self.awg_I.set_nop(nop)
self.awg_Q.set_nop(nop)
def set_clock(self, clock):
"""Sets sampling rate."""
self.awg_I.set_clock(clock)
self.awg_Q.set_clock(clock)
def set_status(self, status):
"""Turns on and off the lo and awg channels."""
self.lo.set_status(status)
self.awg_I.set_status(status, channel=self.awg_ch_I)
self.awg_Q.set_status(status, channel=self.awg_ch_Q)
def set_waveform_IQ_cmplx_raw(self, waveform_cmplx):
self.__set_waveform_IQ_cmplx(waveform_cmplx)
def __set_waveform_IQ_cmplx(self, waveform_cmplx):
"""Sets the real part of the waveform on the I channel and the imaginary part of the
waveform on the Q channel.
No intermediate frequency multiplication and mixer calibration corrections are performed.
This is a low-level function that is normally only called for debugging purposes. Pulse
sequence generators do not normally call this function, but rather sate_waveform."""
waveform_I = np.real(waveform_cmplx)
waveform_Q = np.imag(waveform_cmplx)
self.awg_I.stop()
if self.awg_I != self.awg_Q:
self.awg_Q.stop()
self.awg_I.set_waveform(waveform_I, channel=self.awg_ch_I)
self.awg_Q.set_waveform(waveform_Q, channel=self.awg_ch_Q)
self.awg_I.run()
if self.awg_I != self.awg_Q:
self.awg_Q.run()
#import matplotlib.pyplot as plt
#plt.plot(waveform_I)
#plt.plot(waveform_Q)
#if np.any(np.abs(waveform_I)>1.0) or np.any(np.abs(waveform_Q)>1.0):
#logging.warning('Waveform clipped!')
def calib(self, cname):
if self.ignore_calibration_drift:
if cname not in self.calibrations:
c = [calib for calib in self.calibrations.values()]
return c[0]
if cname not in self.calibrations:
print ('Calibration not loaded. Use ignore_calibration_drift to use any calibration.')
return self.calibrations[cname]
def set_waveform(self, waveform_cmplx):
"""Sets the real part of the waveform on the I channel and the imaginary part of the
waveform on the Q channel.
This function multiplies the waveform with the intermediate frequency oscillation and sideband
calibration amplitudes. he function accepts a complex waveform envelope and effectively forms a
RF output waveform of the given envelope at the frequency given by the frequency attribute."""
t = np.linspace(0, self.get_nop()/self.get_clock(), self.get_nop(), endpoint=False)
waveform_if = waveform_cmplx*np.exp(1j*2*np.pi*t*self.get_if())
waveform_I = np.real(self.calib(self.cname())['I']*waveform_if)+np.real(self.calib(self.cname())['dc'])
waveform_Q = np.imag(self.calib(self.cname())['Q']*waveform_if)+np.imag(self.calib(self.cname())['dc'])
self.__set_waveform_IQ_cmplx(waveform_I+1j*waveform_Q)
self.waveform = waveform_cmplx
return np.max([np.max(np.abs(waveform_I)), np.max(np.abs(waveform_Q))])
def get_waveform(self):
return self.waveform
def set_trigger_mode(self, mode):
self.awg_I.set_trigger_mode(mode)
self.awg_Q.set_trigger_mode(mode)
# clip DC to prevent mixer damage
def clip_dc(self, x):
"""Clips the dc complonent of the output at both channels of the AWG to prevent mixer damage."""
x = [np.real(x), np.imag(x)]
for c in (0,1):
if x[c] < -0.5:
x[c] = -0.5
if x[c] > 0.5:
x[c] = 0.5
x = x[0] + 1j * x[1]
return x
def _set_dc(self, x):
"""Clips the dc complonent of the output at both channels of the AWG to prevent mixer damage."""
x = self.clip_dc(x)
self.__set_waveform_IQ_cmplx([x]*self.get_nop())
def _set_if_cw(self, dc, I, Q):
"""Sets a CW with arbitrary calibration. This functions is invoked by _calibrate_sa
to find the optimal values of the I and Q complex amplitudes and dc offsets that correspond
to the minimum SFDR."""
t = np.linspace(0, self.get_nop()/self.get_clock(), self.get_nop(), endpoint=False)
dc = self.clip_dc(dc)
waveform_I = np.real(I*np.exp(2*np.pi*1j*t*self.get_if()))+np.real(dc)
waveform_Q = np.imag(Q*np.exp(2*np.pi*1j*t*self.get_if()))+np.imag(dc)
self.__set_waveform_IQ_cmplx(waveform_I+1j*waveform_Q)
return np.max([np.max(np.abs(waveform_I)), np.max(np.abs(waveform_Q))])
def cname(self):
return ('if', self.get_if()), ('frequency', self.get_frequency()), ('sideband_id', self.sideband_id)
def get_calibration(self, sa=None):
"""User-level function to sort out mxer calibration matters. Checks if there is a saved calibration for the given
LO and IF frequencies and loads it.
When invoked with a spectrum analyzer instance as an argument it perform and save the calibration with the current
frequencies.
"""
calibration_path = get_config()['datadir']+'/calibrations/'
filename = 'IQ-if{0:5.3g}-rf{1:5.3g}-sb-{2}'.format(self.get_if(), self.get_frequency(), self.sideband_id)
try:
self.calibrations[self.cname()] = load_pkl(filename, location=calibration_path)
except Exception as e:
if not sa:
logging.error('No ready calibration found and no spectrum analyzer to calibrate')
else:
self._calibrate_cw_sa(sa)
self.save_calibration()
return self.calibrations[self.cname()]
def save_calibration(self):
calibration_path = get_config()['datadir']+'/calibrations/'
print (calibration_path)
filename = 'IQ-if{0:5.3g}-rf{1:5.3g}-sb-{2}'.format(self.get_if(), self.get_frequency(), self.sideband_id)
save_pkl(None, self.calibrations[self.cname()], location=calibration_path, filename=filename, plot=False)
def _calibrate_cw_sa(self, sa, num_sidebands = 7):
"""Performs IQ mixer calibration with the spectrum analyzer sa with the intermediate frequency."""
from scipy.optimize import fmin
import time
dc = self._calibrate_zero_sa(sa)
res_bw = 1e5
video_bw = 1e4
if hasattr(sa, 'set_nop'):
sa.set_centerfreq(self.lo.get_frequency())
sa.set_span((num_sidebands-1)*self.get_if())
sa.set_nop(num_sidebands)
sa.set_detector('POS')
sa.set_res_bw(res_bw)
sa.set_video_bw(video_bw)
self.set_trigger_mode('CONT')
else:
sa.set_detector('rms')
sa.set_res_bw(res_bw)
sa.set_video_bw(video_bw)
sa.set_span(res_bw)
sa.set_sweep_time_auto(1)
self.lo.set_status(True)
sideband_ids = np.asarray(np.linspace(-(num_sidebands-1)/2, (num_sidebands-1)/2, num_sidebands), dtype=int)
self.awg_I.run()
self.awg_Q.run()
solution = [np.real(dc), np.imag(dc), 0.5, 0.5, 0.5, 0.5]
for iter_id in range(1):
def tfunc(x):
dc = x[0] + x[1]*1j
I = x[2] + x[3]*1j
Q = x[4] + x[5]*1j
max_amplitude = self._set_if_cw(dc, I, Q)
if max_amplitude < 1:
clipping = 0
else:
clipping = (max_amplitude-1)
# if we can measure all sidebands in a single sweep, do it
if hasattr(sa, 'set_nop'):
result = sa.measure()['Power'].ravel()
else:
# otherwise, sweep through each sideband
result = []
for sideband_id in range(num_sidebands):
sa.set_centerfreq(self.lo.get_frequency()+(sideband_id-(num_sidebands-1)/2.)*self.get_if())
print (sa.get_centerfreq())
#time.sleep(0.1)
#result.append(np.log10(np.sum(10**(sa.measure()['Power']/10)))*10)
result.append(np.log10(np.sum(sa.measure()['Power']))*10)
#time.sleep(0.1)
result = np.asarray(result)
bad_power = np.sum(10**((result[sideband_ids != self.sideband_id])/20))
good_power = np.sum(10**((result[sideband_ids==self.sideband_id])/20))
bad_power_dbm = np.log10(bad_power)*20
good_power_dbm = np.log10(good_power)*20
print ('dc: {0: 4.2e}\tI: {1: 4.2e}\tQ:{2: 4.2e}\tB: {3:4.2f} G: {4:4.2f}, C:{5:4.2f}\r'.format(dc, I, Q, bad_power_dbm, good_power_dbm, clipping))
print (result)
return -good_power/bad_power+np.abs(good_power/bad_power)*10*clipping
solution = fmin(tfunc, solution, maxiter=75, xtol=2**(-14))
score = tfunc(solution)
self.calibrations[self.cname()] = {'dc': self.clip_dc(solution[0]+solution[1]*1j),
'I': solution[2]+solution[3]*1j,
'Q': solution[4]+solution[5]*1j,
'score': score,
'num_sidebands': num_sidebands}
return self.calibrations[self.cname()]
def _calibrate_zero_sa(self, sa):
"""Performs IQ mixer calibration for DC signals at the I and Q inputs."""
import time
from scipy.optimize import fmin
print(self.lo.get_frequency())
res_bw = 1e5
video_bw = 1e4
sa.set_res_bw(res_bw)
sa.set_video_bw(video_bw)
sa.set_detector('rms')
sa.set_centerfreq(self.lo.get_frequency())
sa.set_sweep_time(1e-3)
#time.sleep(0.1)
if hasattr(sa, 'set_nop'):
sa.set_span(0)
sa.set_nop(1)
self.set_trigger_mode('CONT')
else:
sa.set_span(res_bw)
self.lo.set_status(True)
def tfunc(x):
self.awg_I.stop()
self.awg_Q.stop()
self._set_dc(x[0]+x[1]*1j)
self.awg_I.run()
self.awg_Q.run()
if hasattr(sa, 'set_nop'):
result = sa.measure()['Power'].ravel()[0]
else:
#result = np.log10(np.sum(10**(sa.measure()['Power']/10)))*10
result = np.log10(np.sum(sa.measure()['Power']))*10
print (x, result)
return result
solution = fmin(tfunc, [0.3,0.3], maxiter=30, xtol=2**(-14))
x = self.clip_dc(solution[0]+1j*solution[1])
self.zero = x
return x
def set_if(self, _if):
self._if = _if
def get_if(self):
return self._if
def set_sideband_id(self, sideband_id):
self.sideband_id = sideband_id
def get_sideband_id(self):
return self.sideband_id
def set_frequency(self, frequency):
self.lo.set_frequency(frequency-self.get_sideband_id()*self.get_if())
self.frequency = self.lo.get_frequency()+self.get_sideband_id()*self.get_if()
def set_uncal_frequency(self, frequency):
self.lo.set_frequency(frequency-self.get_sideband_id()*self.get_if())
def get_frequency(self):
return self.frequency
def freeze(self):
self.frozen = True
def unfreeze(self):
if self.frozen:
self.frozen = False
#self.assemble_waveform() | ooovector/qtlab_replacement | awg_iq.py | Python | gpl-3.0 | 12,232 |
#!/usr/bin/env python
# Bootstrap installation of Distribute
from importlib import import_module
import os, sys
from distutils.core import setup
from distutils.command.install import install
from setuptools.command.develop import develop
import subprocess
# from subprocess import call
# command = partial(subprocess.call, shell=True, stdout=sys.stdout, stdin=sys.stdin)
def package_env(file_name, strict=False):
file_path = os.path.join(os.path.dirname(__file__),file_name)
if os.path.exists(file_path) or strict:
return open(file_path).read()
else:
return u''
PACKAGE = u'public-forms'
PROJECT = u'public_forms'
PROJECT_SLUG = u'public_forms'
VERSION = package_env('VERSION')
URL = package_env('URL')
AUTHOR_AND_EMAIL = [v.strip('>').strip() for v \
in package_env('AUTHOR').split('<mailto:')]
if len(AUTHOR_AND_EMAIL)==2:
AUTHOR, AUTHOR_EMAIL = AUTHOR_AND_EMAIL
else:
AUTHOR = AUTHOR_AND_EMAIL
AUTHOR_EMAIL = u''
DESC = "feincms extension templated from django.contrib.skeleton.application"
PACKAGE_NAMESPACE = [s for s in 'feincms.page.extensions'.strip()\
.strip('"')\
.strip("'")\
.strip()\
.split('.') if s]
NSLIST = lambda sep:(sep.join(PACKAGE_NAMESPACE[:i+1]) for i,n in enumerate(PACKAGE_NAMESPACE))
PACKAGE_NAMESPACE_WITH_PACKAGE = PACKAGE_NAMESPACE + [PROJECT_SLUG,]
NSLIST_WITH_PACKAGE = lambda sep:(sep.join(PACKAGE_NAMESPACE_WITH_PACKAGE[:i+1]) \
for i,n in enumerate(PACKAGE_NAMESPACE_WITH_PACKAGE))
PACKAGE_DIRS = dict(zip(NSLIST_WITH_PACKAGE('.'),
NSLIST_WITH_PACKAGE('/')))
class install_requirements(object):
def install_requirements(self):
if os.environ.get('INSTALL_SKELETON_REQUIREMENTS', False):
for r in self.requirements:
if os.path.exists(r):
subprocess.call('pip install -r %s'%r,
shell=True,
stdout=sys.stdout,
stderr=sys.stderr)
class post_install(install, install_requirements):
requirements = ['requirements.txt']
def run(self):
install.run(self)
self.install_requirements()
class post_develop(develop, install_requirements):
requirements = ['requirements.txt', 'requirements.dev.txt']
def run(self):
develop.run(self)
self.install_requirements()
if __name__ == '__main__':
setup(
cmdclass={"install": post_install,
"develop": post_develop,},
name=PROJECT,
version=VERSION,
description=DESC,
long_description=package_env('README.rst'),
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
license=package_env('LICENSE'),
namespace_packages=list(NSLIST('.')),
packages=list(NSLIST_WITH_PACKAGE('.')),
package_dir=PACKAGE_DIRS,
include_package_data=True,
zip_safe=False,
test_suite = 'tests',
# install_requires=['argparse.extra',],
classifiers=[
'License :: OSI Approved',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Framework :: Django',
],
) | lamerzan/public-forms | setup.py | Python | gpl-3.0 | 3,570 |
import logging
import os
import sys
from logging import handlers
sys.path.append(os.path.dirname(__file__) + '/../src/')
sys.path.append(os.path.dirname(__file__) + '/../lib/')
from vision import playlist
from vision import playout
from vision import playoutweb
from vision.configuration import configuration
def sanity_test():
# TODO: Create a proper sanity test
if not os.path.isdir("cache/dailyplan"):
print("The directory 'cache/dailyplan' has to exist in work directory")
sys.exit(1)
if not os.path.isdir("cache/logs"):
print("The directory 'cache/logs' has to exist in work directory")
sys.exit(1)
"""
# Not needed yet
if not os.path.isdir("cache/screenshots"):
print "The directory 'cache/screenshots' has to exist in work directory"
sys.exit(1)
if not os.path.isdir("cache/overlays"):
print "The directory 'cache/screenhots' has to exist in work directory"
sys.exit(1)
"""
def logging_excepthook(type, value, tb):
"Exception handler that logs"
logging.debug("Unhandled exception", exc_info=(type, value, tb))
# continue processing the exception
sys.__excepthook__(type, value, tb)
def setup_logging():
log_fmt = ("%(asctime)s %(levelname)s:%(name)s "
"%(filename)s:%(lineno)d %(message)s")
logging.basicConfig(level=logging.DEBUG, format=log_fmt)
logger = logging.getLogger()
#ch = logging.StreamHandler()
#ch.setLevel(logging.DEBUG)
handler = handlers.TimedRotatingFileHandler(
"cache/logs/integrated_playout.log", when="D")
handler.setLevel(logging.DEBUG)
handler.setFormatter(logging.Formatter(log_fmt))
logger.addHandler(handler)
sys.excepthook = logging_excepthook
if __name__ == "__main__":
sanity_test()
setup_logging()
logging.info("FK Integrated Playout started")
logging.info("Configuration details:\n%s" % configuration.config_strings())
# Create today's schedule
schedule = playlist.Schedule()
schedule.update_from_pg_cache(days=14)
# Start the player
playout_service = playout.PlayoutService()
playout = playout.Playout(service=playout_service)
# Start Web
playoutweb.start_web(None, playout_service, playout, schedule=schedule)
# Setting the schedule starts playback
playout.set_schedule(schedule)
# Heat up the reactor
from twisted.internet import reactor
reactor.run()
| Frikanalen/mltplayout | bin/integrated.py | Python | gpl-3.0 | 2,450 |
"""
:copyright: (c) 2015 by OpenCredo.
:license: GPLv3, see LICENSE for more details.
"""
import logging
import json
log = logging.getLogger(__name__)
redis_server = None
redis_master_server = None
def get_redis_slave():
return redis_server
def get_redis_master():
return redis_master_server
class QueueIterator(object):
def __init__(self, queue, start=0):
self.q = queue
self.iter = iter(range(start, len(self.q)))
def __iter__(self):
return self
def next(self):
"""
Retrieve the next message in the queue.
"""
i = self.iter.next()
return self.q.get_item(i)
class Queue(object):
def __init__(self, name, server=None):
self.name = name
self.server = server or redis_server
def size(self):
return sum([len(self.server.lindex(self.name, m)) for m in range(0, len(self))])
def put_raw(self, data):
self.server.rpush(self.name, data)
def get_raw(self):
return self.server.lpop(self.name)
def put(self, msg):
json_msg = json.dumps(msg)
self.server.rpush(self.name, json_msg)
def get(self, timeout=None):
if timeout:
result = self.server.blpop(self.name, timeout)
if result:
result = result[1]
else:
result = self.server.lpop(self.name)
if result:
result = json.loads(result)
return result
def get_item(self, index):
msg = self.server.lindex(self.name, index)
if msg:
msg = json.loads(msg)
return msg
def __len__(self):
return self.server.llen(self.name)
def __iter__(self):
return QueueIterator(self)
def delete(self):
return self.server.delete(self.name)
class String(object):
def __init__(self, server=None, ttl=None):
self.server = server or redis_server
self.ttl = ttl or 24 * 7 * 3600 # 7 days
def set_raw(self, key, msg):
self.server.setex(key, msg, self.ttl)
def get_raw(self, key):
return self.server.get(key)
def get(self, key):
try:
return json.loads(self.get_raw(key))
except TypeError:
return None
def set(self, key, msg):
self.set_raw(key, json.dumps(msg))
def delete(self, key):
return self.server.delete(key)
class Hash(object):
def __init__(self, server=None):
self.server = server or redis_server
def get(self, name, key):
try:
return json.loads(self.get_raw(name, key))
except TypeError:
return None
def set(self, name, key, msg):
return self.set_raw(name, key, json.dumps(msg))
def set_raw(self, name, key, msg):
return self.server.hset(name, key, msg)
def incr(self, name, key, amount=1):
return self.server.hincrby(name, key, amount=amount)
def get_raw(self, name, key):
return self.server.hget(name, key)
def get_all_raw(self, name):
return self.server.hgetall(name)
def get_all(self, name):
return dict((k, json.loads(v)) for k, v in self.server.hgetall(
name).iteritems())
def keys(self, name):
return self.server.hkeys(name)
def values(self, name):
return [json.loads(x) for x in self.server.hvals(name)]
def delete(self, name, *keys):
"""
delete the hash key
"""
return self.server.hdel(name, *keys)
def remove(self, name):
"""
delete the hash
"""
return self.server.delete(name)
def exists(self, name, key):
return self.server.hexists(name, key)
def get_queue(q=None):
q = q or Queue
return q
| rusenask/stubo-app | stubo/cache/queue.py | Python | gpl-3.0 | 3,776 |
# This file is part of barrioSquare.
#
# barrioSquare is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# barrioSquare is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with barrioSquare. If not, see <http://www.gnu.org/licenses/>.
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
QPUSHBUTTON_DEFAULT = 'QPushButton { color: #000000; border-radius: 6px; border: 2px solid #8f8f91; background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #f6f7fa, stop: 1 #dadbde); } QPushButton:pressed {background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #7ec2cc, stop: 1 #defbff); }'
QPUSHBUTTON_HIGHLIGHT = 'QPushButton { color: #000000; border-radius: 6px; border: 2px solid #8f8f91; background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #7ec2cc, stop: 1 #defbff); } QPushButton:pressed {background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, stop: 0 #cccccc, stop: 1 #f6f7fa); }'
QLISTWIDGET_DEFAULT = 'QListWidget { color: #000000; background-color: #ffffff; } QListView { color: #000000; background-color: #ffffff; }'
# '
| chilitechno/barrioSquare | barrioStyles.py | Python | gpl-3.0 | 1,574 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import json
import bpy
import mathutils
import bmesh as bm
import numpy as np
from bpy.props import StringProperty
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.data_structure import updateNode
callback_id = 'node.callback_execnodemod'
lines = """\
for i, i2 in zip(V1, V2):
append([x + y for x, y in zip(i, i2)])
""".strip().split('\n')
def update_wrapper(self, context):
try:
updateNode(context.node, context)
except:
...
class SvExecNodeDynaStringItem(bpy.types.PropertyGroup):
line = bpy.props.StringProperty(name="line to eval", default="", update=update_wrapper)
class SvExecNodeModCallback(bpy.types.Operator):
bl_idname = callback_id
bl_label = "generic callback"
cmd = bpy.props.StringProperty(default='')
idx = bpy.props.IntProperty(default=-1)
form = bpy.props.StringProperty(default='')
def execute(self, context):
getattr(context.node, self.cmd)(self)
return {'FINISHED'}
class SvExecNodeMod(bpy.types.Node, SverchCustomTreeNode):
''' Exec Node Mod'''
bl_idname = 'SvExecNodeMod'
bl_label = 'Exec Node Mod'
bl_icon = 'CONSOLE'
text = StringProperty(default='', update=updateNode)
dynamic_strings = bpy.props.CollectionProperty(type=SvExecNodeDynaStringItem)
def draw_buttons(self, context, layout):
row = layout.row(align=True)
# add() remove() clear() move()
row.operator(callback_id, text='', icon='ZOOMIN').cmd = 'add_new_line'
row.operator(callback_id, text='', icon='ZOOMOUT').cmd = 'remove_last_line'
row.operator(callback_id, text='', icon='TRIA_UP').cmd = 'shift_up'
row.operator(callback_id, text='', icon='TRIA_DOWN').cmd = 'shift_down'
row.operator(callback_id, text='', icon='SNAP_ON').cmd = 'delete_blank'
row.operator(callback_id, text='', icon='SNAP_OFF').cmd = 'insert_blank'
if len(self.dynamic_strings) == 0:
return
if not context.active_node == self:
b = layout.box()
col = b.column(align=True)
for idx, line in enumerate(self.dynamic_strings):
col.prop(self.dynamic_strings[idx], "line", text="", emboss=False)
else:
col = layout.column(align=True)
for idx, line in enumerate(self.dynamic_strings):
row = col.row(align=True)
row.prop(self.dynamic_strings[idx], "line", text="")
# if UI , then
opp = row.operator(callback_id, text='', icon='TRIA_DOWN_BAR')
opp.cmd = 'insert_line'
opp.form = 'below'
opp.idx = idx
opp2 = row.operator(callback_id, text='', icon='TRIA_UP_BAR')
opp2.cmd = 'insert_line'
opp2.form = 'above'
opp2.idx = idx
def draw_buttons_ext(self, context, layout):
col = layout.column(align=True)
col.operator(callback_id, text='copy to node').cmd = 'copy_from_text'
col.prop_search(self, 'text', bpy.data, "texts", text="")
row = layout.row()
col.operator(callback_id, text='cc code to clipboard').cmd = 'copy_node_text_to_clipboard'
def add_new_line(self, context):
self.dynamic_strings.add().line = ""
def remove_last_line(self, context):
if len(self.dynamic_strings) > 1:
self.dynamic_strings.remove(len(self.dynamic_strings)-1)
def shift_up(self, context):
sds = self.dynamic_strings
for i in range(len(sds)):
sds.move(i+1, i)
def shift_down(self, context):
sds = self.dynamic_strings
L = len(sds)
for i in range(L):
sds.move(L-i, i-1)
def delete_blank(self, context):
sds = self.dynamic_strings
Lines = [i.line for i in sds if i.line != ""]
sds.clear()
for i in Lines:
sds.add().line = i
def insert_blank(self, context):
sds = self.dynamic_strings
Lines = [i.line for i in sds]
sds.clear()
for i in Lines:
sds.add().line = i
if i != "":
sds.add().line = ""
def copy_from_text(self, context):
""" make sure self.dynamic_strings has enough strings to do this """
slines = bpy.data.texts[self.text].lines
while len(self.dynamic_strings) < len(slines):
self.dynamic_strings.add()
for i, i2 in zip(self.dynamic_strings, slines):
i.line = i2.body
def copy_node_text_to_clipboard(self, context):
lines = [d.line for d in self.dynamic_strings]
if not lines:
return
str_lines = "\n".join(lines)
bpy.context.window_manager.clipboard = str_lines
def insert_line(self, op_props):
sds = self.dynamic_strings
Lines = [i.line for i in sds]
sds.clear()
for tidx, i in enumerate(Lines):
if op_props.form == 'below':
sds.add().line = i
if op_props.idx == tidx:
sds.add().line = ""
else:
if op_props.idx == tidx:
sds.add().line = ""
sds.add().line = i
def sv_init(self, context):
self.inputs.new('StringsSocket', 'V1')
self.inputs.new('StringsSocket', 'V2')
self.inputs.new('StringsSocket', 'V3')
self.outputs.new('StringsSocket', 'out')
# add default strings
self.dynamic_strings.add().line = lines[0]
self.dynamic_strings.add().line = lines[1]
self.dynamic_strings.add().line = ""
self.width = 289
def process(self):
v1, v2, v3 = self.inputs
V1, V2, V3 = v1.sv_get(0), v2.sv_get(0), v3.sv_get(0)
out = []
extend = out.extend
append = out.append
exec('\n'.join([j.line for j in self.dynamic_strings]))
self.outputs[0].sv_set(out)
def storage_set_data(self, storage):
strings_json = storage['string_storage']
lines_list = json.loads(strings_json)['lines']
self.id_data.freeze(hard=True)
self.dynamic_strings.clear()
for line in lines_list:
self.dynamic_strings.add().line = line
self.id_data.unfreeze(hard=True)
def storage_get_data(self, node_dict):
local_storage = {'lines': []}
for item in self.dynamic_strings:
local_storage['lines'].append(item.line)
node_dict['string_storage'] = json.dumps(local_storage)
def register():
bpy.utils.register_class(SvExecNodeDynaStringItem)
bpy.utils.register_class(SvExecNodeMod)
bpy.utils.register_class(SvExecNodeModCallback)
def unregister():
bpy.utils.unregister_class(SvExecNodeModCallback)
bpy.utils.unregister_class(SvExecNodeMod)
bpy.utils.unregister_class(SvExecNodeDynaStringItem)
| elfnor/sverchok | nodes/script/multi_exec.py | Python | gpl-3.0 | 7,709 |
import requests
import re
def request_by_ip(ip):
response = {}
request = requests.get("http://www.fairplay.ac/lookup/address/{}".format(ip))
assert request.status_code == 200
output = re.findall("Fairplay Guid: ([a-zA-Z0-9]{5})", request.text)
if len(output) == 0:
return None
response["guid"] = output[0]
response["fairshots"] = re.findall("reportFairshot\('(.*?)','(.*?)','(.*?)','(.*?)','(.*?)'", request.text)
return response | ohad258/sof2utils | FairplayRequester.py | Python | gpl-3.0 | 471 |
"""
"""
from __future__ import unicode_literals
from django.conf import settings
def fikoStatik(request):
"""
Fiko Statik Adresini(url) Döndürür...
Örnek:
<script src="{{ FIKO_STATIK_URL }}/pkt/angular/angular.js"></script>
Bu İçerik İşleyicinin Aktif Olabilmewsi İçin Aşağıdaki Kodları Settings Dosyanıza Uyarlamanız Gerekmektedir.
```urls.py
urlpatterns = [
...
url(r'^fikoStatik/', include(fikoStatik.urls)),
...
]
```
``` settings.py
TEMPLATES = [
{
...
'OPTIONS': {
'context_processors': [
...
'fikoStatik.icerikIsleyici.fikoStatik.fikoStatik'
...
],
},
},
]
```
"""
return dict(
FIKO_STATIK_URL=settings.FIKO_STATIK_URL if hasattr(settings, "FIKO_STATIK_URL") else "fikoStatik"
)
| fikri007/django-fikoStatik | fikoStatik/icerikIsleyici/fikoStatik.py | Python | gpl-3.0 | 1,063 |
# Copyright (C) 2011-2012 Patrick Totzke <[email protected]>
# This file is released under the GNU GPL, version 3 or a later revision.
# For further details see the COPYING file
from __future__ import absolute_import
import argparse
import glob
import logging
import os
import re
from ..settings.const import settings
from ..helper import split_commandstring, string_decode
class Command(object):
"""base class for commands"""
repeatable = False
def __init__(self):
self.prehook = None
self.posthook = None
self.undoable = False
self.help = self.__doc__
def apply(self, caller):
"""code that gets executed when this command is applied"""
pass
class CommandCanceled(Exception):
""" Exception triggered when an interactive command has been cancelled
"""
pass
COMMANDS = {
'search': {},
'envelope': {},
'bufferlist': {},
'taglist': {},
'thread': {},
'global': {},
}
def lookup_command(cmdname, mode):
"""
returns commandclass, argparser and forced parameters used to construct
a command for `cmdname` when called in `mode`.
:param cmdname: name of the command to look up
:type cmdname: str
:param mode: mode identifier
:type mode: str
:rtype: (:class:`Command`, :class:`~argparse.ArgumentParser`,
dict(str->dict))
"""
if cmdname in COMMANDS[mode]:
return COMMANDS[mode][cmdname]
elif cmdname in COMMANDS['global']:
return COMMANDS['global'][cmdname]
else:
return None, None, None
def lookup_parser(cmdname, mode):
"""
returns the :class:`CommandArgumentParser` used to construct a
command for `cmdname` when called in `mode`.
"""
return lookup_command(cmdname, mode)[1]
class CommandParseError(Exception):
"""could not parse commandline string"""
pass
class CommandArgumentParser(argparse.ArgumentParser):
"""
:class:`~argparse.ArgumentParser` that raises :class:`CommandParseError`
instead of printing to `sys.stderr`"""
def exit(self, message):
raise CommandParseError(message)
def error(self, message):
raise CommandParseError(message)
class registerCommand(object):
"""
Decorator used to register a :class:`Command` as
handler for command `name` in `mode` so that it
can be looked up later using :func:`lookup_command`.
Consider this example that shows how a :class:`Command` class
definition is decorated to register it as handler for
'save' in mode 'thread' and add boolean and string arguments::
.. code-block::
@registerCommand('thread', 'save', arguments=[
(['--all'], {'action': 'store_true', 'help':'save all'}),
(['path'], {'nargs':'?', 'help':'path to save to'})],
help='save attachment(s)')
class SaveAttachmentCommand(Command):
pass
"""
def __init__(self, mode, name, help=None, usage=None,
forced=None, arguments=None):
"""
:param mode: mode identifier
:type mode: str
:param name: command name to register as
:type name: str
:param help: help string summarizing what this command does
:type help: str
:param usage: overides the auto generated usage string
:type usage: str
:param forced: keyword parameter used for commands constructor
:type forced: dict (str->str)
:param arguments: list of arguments given as pairs (args, kwargs)
accepted by
:meth:`argparse.ArgumentParser.add_argument`.
:type arguments: list of (list of str, dict (str->str)
"""
self.mode = mode
self.name = name
self.help = help
self.usage = usage
self.forced = forced or {}
self.arguments = arguments or []
def __call__(self, klass):
helpstring = self.help or klass.__doc__
argparser = CommandArgumentParser(description=helpstring,
usage=self.usage,
prog=self.name, add_help=False)
for args, kwargs in self.arguments:
argparser.add_argument(*args, **kwargs)
COMMANDS[self.mode][self.name] = (klass, argparser, self.forced)
return klass
def commandfactory(cmdline, mode='global'):
"""
parses `cmdline` and constructs a :class:`Command`.
:param cmdline: command line to interpret
:type cmdline: str
:param mode: mode identifier
:type mode: str
"""
# split commandname and parameters
if not cmdline:
return None
logging.debug('mode:%s got commandline "%s"', mode, cmdline)
# allow to shellescape without a space after '!'
if cmdline.startswith('!'):
cmdline = 'shellescape \'%s\'' % cmdline[1:]
cmdline = re.sub(r'"(.*)"', r'"\\"\1\\""', cmdline)
try:
args = split_commandstring(cmdline)
except ValueError as e:
raise CommandParseError(str(e))
args = [string_decode(x, 'utf-8') for x in args]
logging.debug('ARGS: %s', args)
cmdname = args[0]
args = args[1:]
# unfold aliases
# TODO: read from settingsmanager
# get class, argparser and forced parameter
(cmdclass, parser, forcedparms) = lookup_command(cmdname, mode)
if cmdclass is None:
msg = 'unknown command: %s' % cmdname
logging.debug(msg)
raise CommandParseError(msg)
parms = vars(parser.parse_args(args))
parms.update(forcedparms)
logging.debug('cmd parms %s', parms)
# create Command
cmd = cmdclass(**parms)
# set pre and post command hooks
get_hook = settings.get_hook
cmd.prehook = get_hook('pre_%s_%s' % (mode, cmdname)) or \
get_hook('pre_global_%s' % cmdname)
cmd.posthook = get_hook('post_%s_%s' % (mode, cmdname)) or \
get_hook('post_global_%s' % cmdname)
return cmd
pyfiles = glob.glob1(os.path.dirname(__file__), '*.py')
__all__ = list(filename[:-3] for filename in pyfiles)
| geier/alot | alot/commands/__init__.py | Python | gpl-3.0 | 6,105 |
# $Id$
import sys
import random
import math
import numpy
from itcc.core import ctools
__revision__ = '$Rev$'
__all__ = ['length', 'angle', 'torsionangle', 'imptor',
'combinecombine', 'xyzatm', 'minidx', 'maxidx',
'weightedmean', 'weightedsd', 'datafreq',
'random_vector', 'all', 'any',
'dissq', 'lensq', 'distance', 'normal']
def normal(a):
return a / length(a)
def datafreq(data, min_, max_, num):
result = [0] * num
step = float(max_ - min_)/num
for x in data:
type_ = int((x - min_)/step)
if 0 <= type_ < num:
result[type_] += 1
return result
def distance(a, b):
return length(a-b)
def dissq(a, b):
return lensq(a-b)
def length(a):
return math.sqrt(sum(a*a))
def lensq(a):
return sum(a*a)
def angle(a, b, c):
return ctools.angle(tuple(a), tuple(b), tuple(c))
def torsionangle(a, b, c, d):
"""torsionangle(a, b, c, d) -> angle
a, b, c, d are 4 numpy.array
return the torsionangle of a-b-c-d in radian, range is (-pi, pi].
if torsionangle is invalid, for example, a == b or b == c or c == d
or a == c or b == d, then return float("nan").
"""
return ctools.torsionangle(tuple(a), tuple(b),
tuple(c), tuple(d))
def imptor(a, b, c, d):
'''imptor(a, b, c, d) -> angle
a, b, c, d are 4 Scientific.Geometry.Vector
return the imptor of a-b-c-d
imptor(abcd) is the angle between vector ad and plane abc,
crossmulti(ab, ac) is the positive direction.
'''
ad = d - a
ab = b - a
ac = c - a
abc = numpy.cross(ab, ac)
angle_ = ad.angle(abc)
return math.pi - angle_
def combinecombine(cmbs):
if not cmbs:
yield []
return
for x in cmbs[0]:
for cc in combinecombine(cmbs[1:]):
yield [x] + cc
def xyzatm(p1, p2, p3, r, theta, phi):
'''
>>> from Scientific.Geometry import Vector
>>> import math
>>> xyzatm(Vector(0,0,1), Vector(0,0,0), Vector(1,0,0),
... 1, math.radians(90), 0)
Vector(1.0,0.0,0.99999999999999989)
'''
r12 = normal(p1 - p2)
r23 = normal(p2 - p3)
rt = numpy.cross(r23, r12)
cosine = r12 * r23
sine = math.sqrt(max(1.0 - cosine*cosine, 0.0))
rt /= sine
ru = numpy.cross(rt, r12)
ts = math.sin(theta)
tc = math.cos(theta)
ps = math.sin(phi)
pc = math.cos(phi)
return p1 + (ru * (ts * pc) + rt * (ts * ps) - r12 * tc) * r
def minidx(iterable):
iterable = iter(iterable)
idx = 0
item = iterable.next()
for i, x in enumerate(iterable):
if x < item:
idx = i+1
item = x
return idx, item
def maxidx(iterable):
iterable = iter(iterable)
idx = 0
item = iterable.next()
for i, x in enumerate(iterable):
if x > item:
idx = i+1
item = x
return idx, item
def swapaxes(matrix):
rank1 = len(matrix)
if rank1 == 0:
return []
rank2 = len(matrix[0])
for row in matrix:
assert len(row) == rank2
result = [[None] * rank1 for i in range(rank2)]
for i in range(rank2):
for j in range(rank1):
result[i][j] = matrix[j][i]
return result
def weightedmean(datas, weights):
assert len(datas) == len(weights)
sum_ = sum([data * weight for data, weight in zip(datas, weights)])
totalweight = sum(weights)
return sum_/totalweight
def weightedsd(datas, weights):
assert len(datas) == len(weights)
assert len(datas) > 1
mean_ = weightedmean(datas, weights)
sum_ = sum([(data - mean_)**2 * weight \
for data, weight in zip(datas, weights)])
totalweight = sum(weights)
return math.sqrt(sum_/totalweight)
def any(iterable):
for element in iterable:
if element:
return True
return False
def all(iterable):
for element in iterable:
if not element:
return False
return True
def random_vector(length_=1.0):
z = random.uniform(-length_, length_)
s = math.sqrt(length_*length_ - z*z)
theta = random.uniform(0.0, math.pi*2)
x = s * math.cos(theta)
y = s * math.sin(theta)
return (x, y, z)
def open_file_or_stdin(ifname):
if ifname == '-':
return sys.stdin
else:
return file(ifname)
def sorted_(iterable):
'''python 2.3 does not support sorted'''
res = list(iterable)
res.sort()
return res
def _test():
import doctest
doctest.testmod()
if __name__ == '__main__':
_test()
| lidaobing/itcc | itcc/core/tools.py | Python | gpl-3.0 | 4,560 |
# -*- coding: utf-8 -*-
"""
Copyright 2008 Serge Matveenko
This file is part of PyStarDict.
PyStarDict is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PyStarDict is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PyStarDict. If not, see <http://www.gnu.org/licenses/>.
@author: Serge Matveenko <[email protected]>
"""
import datetime
import os
import sys
"""hack in local sources if requested and handle import crash"""
if '--local' in sys.argv:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
try:
from pystardict import Dictionary
except ImportError, e:
if __name__ == '__main__':
print Exception('No pystardict in PYTHONPATH. Try --local parameter.')
exit(1)
else:
raise e
def demo():
milestone1 = datetime.datetime.today()
dicts_dir = os.path.join(os.path.dirname(__file__))
dict1 = Dictionary(os.path.join(dicts_dir, 'stardict-quick_eng-rus-2.4.2',
'quick_english-russian'))
dict2 = Dictionary(os.path.join(dicts_dir, 'stardict-quick_rus-eng-2.4.2',
'quick_russian-english'))
milestone2 = datetime.datetime.today()
print '2 dicts load:', milestone2-milestone1
print dict1.idx['test']
print dict2.idx['проверка']
milestone3 = datetime.datetime.today()
print '2 cords getters:', milestone3-milestone2
print dict1.dict['test']
print dict2.dict['проверка']
milestone4 = datetime.datetime.today()
print '2 direct data getters (w\'out cache):', milestone4-milestone3
print dict1['test']
print dict2['проверка']
milestone5 = datetime.datetime.today()
print '2 high level data getters (not cached):', milestone5-milestone4
print dict1['test']
print dict2['проверка']
milestone6 = datetime.datetime.today()
print '2 high level data getters (cached):', milestone6-milestone5
# list dictionary keys and dictionary content according to the key
for key in dict1.ids.keys():
print dict1.dict[key]
if __name__ == '__main__':
demo()
| lig/pystardict | examples/demo.py | Python | gpl-3.0 | 2,522 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# FIDATA. Open-source system for analysis of financial and economic data
# Copyright © 2012-2013 Basil Peace
# This file is part of FIDATA.
#
# FIDATA is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# FIDATA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with FIDATA. If not, see <http://www.gnu.org/licenses/>.
import FIDATA.Engine as engine
engine.initArgParser('Installer of database structure', defLogFilename = 'install.log')
engine.argParser.add_argument('--disable-clear',
dest = 'clear', action = 'store_false',
help = "don't clear database structure (only for clean end-user installation, saves some time)"
)
engine.argParser.add_argument('--disable-tests',
dest = 'tests', action = 'store_false',
help = "don't run tests"
)
engine.argParser.add_argument('--disable-predefined-data',
dest = 'predefinedData', action = 'store_false',
help = "don't import predefined data"
)
engine.connect(asAdmin = True)
import logging
from os import path
srcDir = path.split(path.abspath(__file__))[0]
scripts = [
'epic/epic.sql',
'pre.sql',
'common.sql',
'intl.sql',
'fin-system.sql',
'data-sets.sql',
'data-sets.tools.sql',
'procs.sql',
'data-sets.data-set-fields.sql',
'portfolios.sql',
'trade-systems.sql',
'distributions.sql',
'data-sets.data.sql',
'procs.comp-field-functions.sql',
'procs.aggr-functions.sql',
'procs.dyn-show-functions.sql',
'procs.window-functions.sql',
'data-sets.convertors.sql',
'test/test.common.sql',
'test/test.intl.sql',
'test/test.fin-system.sql',
'test/test.data-sets.sql',
'test/test.procs.sql',
'post.sql',
# Internationalization
'intl/intl.intl.sql',
'intl/fin-system.intl.sql',
# TODO: Move it before our own tests (problem with search path)
'epic/test/test_asserts.sql',
'epic/test/test_core.sql',
'epic/test/test_globals.sql',
'epic/test/test_results.sql',
'epic/test/test_timing.sql',
]
if engine.args.clear:
scripts.insert(0, 'clear.sql')
cursor = engine.conn.cursor()
for script in scripts:
logging.info('Importing {:s}'.format(script))
# TODO: psycopg2 methods of import script?
file = open(path.join(srcDir, script), mode = 'r')
scriptText = file.read()
del file
cursor.execute(scriptText)
del scriptText
engine.commit()
if engine.args.tests:
logging.info('Running tests')
modules = [
'common',
'intl',
'fin-system',
'data-sets',
'procs',
]
# Ensure that there is enough space for test's name during output
cursor.execute("SELECT typlen from pg_type where oid = 'name'::regtype")
formatStr = '{0:<'+str(cursor.fetchone()[0])+'s}{1:s}'
for module in modules:
logging.info('TESTING MODULE: {:s}'.format(module))
logging.info(formatStr.format('name', 'result'))
cursor.execute("SELECT name, result, errcode, errmsg FROM test.run_module(%s)", (module,))
for row in cursor:
logging.info(formatStr.format(*row))
if row[2] != '' or row[3] != '':
logging.info('Error code: {2:s}\nError message: {3:s}'.format(*row))
del formatStr
del cursor, engine.conn
# Import of predefined data
if engine.args.predefinedData:
logging.info('Importing predefined data')
from subprocess import call
callArgs = ['python', 'import.py', '--log-filename', engine.args.logFilename]
if engine.args.devDatabase:
callArgs.append('--use-dev-database')
res = call(callArgs, cwd = path.join(srcDir, 'predefined-data'))
if res != 0:
exit(res)
| FIDATA/database-draft | install.py | Python | gpl-3.0 | 3,877 |
# Simple program to add the value of $1 and $5 bills in a wallet
'''
Multiple line comments can be placed between triple quotations.
'''
nFives = 2
nOnes = 3
total = (nFives * 5) + nOnes
print "The total is $" + str(total)
# Simple program to calculate how much you should be paid at work
rate = 10.00
totalHours = 45
regularHours = 40
overTime = totalHours - regularHours
salary = (regularHours * rate) + (overTime * rate * 1.5)
print "You will be paid $" + str(salary)
| geoffmomin/ScratchPad | Python/simplewallet.py | Python | gpl-3.0 | 479 |
import csv
import requests
import pandas as pd
from zipfile import ZipFile
from io import StringIO
URL = 'https://www.quandl.com/api/v3/databases/%(dataset)s/codes'
def dataset_url(dataset):
return URL % {'dataset': dataset}
def download_file(url):
r = requests.get(url)
if r.status_code == 200:
return StringIO(r.text)
def unzip(file_):
d = unzip_files(file_)
return d[list(d.keys())[0]]
def unzip_files(file_):
d = {}
with ZipFile(file_, 'r') as zipfile:
for filename in zipfile.namelist():
d[filename] = str(zipfile.read(filename))
return d
def csv_rows(str):
for row in csv.reader(StringIO(str)):
yield row
def csv_dicts(str, fieldnames=None):
for d in csv.DictReader(StringIO(str), fieldnames=fieldnames):
yield d
def get_symbols_list(dataset):
csv_ = unzip(download_file(dataset_url(dataset)))
return map(lambda x: x[0].replace(dataset + '/', ''), csv_rows(csv_))
def get_symbols_dict(dataset):
csv_ = unzip(download_file(dataset_url(dataset)))
return dict(csv_rows(csv_))
def get_symbols_df(dataset):
csv_ = unzip(download_file(dataset_url(dataset)))
df = pd.read_csv(StringIO(csv_), header=None, names=['symbol', 'company'])
df.symbol = df.symbols.map(lambda x: x.replace(dataset + '/', ''))
df.company = df.company.map(lambda x: x.replace('Prices, Dividends, Splits and Trading Volume', ''))
return df
| briancappello/PyTradeLib | pytradelib/quandl/metadata.py | Python | gpl-3.0 | 1,451 |
# gensim modules
from gensim import utils
from gensim.models.doc2vec import LabeledSentence
from gensim.models import Doc2Vec
# numpy
import numpy
# shuffle
from random import shuffle
# logging
import logging
import os.path
import sys
import cPickle as pickle
program = os.path.basename(sys.argv[0])
logger = logging.getLogger(program)
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s')
logging.root.setLevel(level=logging.INFO)
logger.info("running %s" % ' '.join(sys.argv))
class LabeledLineSentence(object):
def __init__(self, sources):
self.sources = sources
flipped = {}
# make sure that keys are unique
for key, value in sources.items():
if value not in flipped:
flipped[value] = [key]
else:
raise Exception('Non-unique prefix encountered')
def __iter__(self):
for source, prefix in self.sources.items():
with utils.smart_open(source) as fin:
for item_no, line in enumerate(fin):
yield LabeledSentence(utils.to_unicode(line).split(), [prefix + '_%s' % item_no])
def to_array(self):
self.sentences = []
for source, prefix in self.sources.items():
with utils.smart_open(source) as fin:
for item_no, line in enumerate(fin):
self.sentences.append(LabeledSentence(
utils.to_unicode(line).split(), [prefix + '_%s' % item_no]))
return self.sentences
def sentences_perm(self):
shuffle(self.sentences)
return self.sentences
#sources = {'test-neg.txt':'TEST_NEG', 'test-pos.txt':'TEST_POS', 'train-neg.txt':'TRAIN_NEG', 'train-pos.txt':'TRAIN_POS', 'train-unsup.txt':'TRAIN_UNS'}
model.save('./enwiki_quality_train.d2v')
model = Doc2Vec.load './enwiki_quality_train.d2v')
def convert_array_to_string (data):
res = ""
for i in range(len(data)):
res = res + str (data[i])
if (i < len(data) - 1):
res = res + '\t'
return res
def write_array_to_file (file_name, array_data):
f = open (file_name, "w")
for i in range (len(array_data)):
f.write (str(array_data[i]) + "\n")
f.close ()
qualities = ['FA','GA','B','C','START','STUB']
train_labels = [0] * 23577
train_content_file = "doc2vec_train_content_separated.txt"
train_label_file = "doc2vec_train_label_separated.txt"
train_cnt = 0
for i in range (len(qualities)):
for j in range (30000):
key = 'TRAIN_' + qualities[i] + "_" + str(j)
if key in model.docvecs:
data = model.docvecs[key]
if (len(data) == 500):
with open(train_content_file, "a") as myfile:
myfile.write(convert_array_to_string (data))
myfile.write("\n")
train_labels [train_cnt] = qualities[i]
train_cnt += 1
write_array_to_file (file_name = train_label_file, array_data = train_labels)
| vinhqdang/doc2vec_dnn_wikipedia | code/load_pre_train.py | Python | gpl-3.0 | 3,073 |
import pytest
import nengo
def pytest_funcarg__Simulator(request):
"""the Simulator class being tested.
Please use this, and not nengo.Simulator directly,
unless the test is reference simulator specific.
"""
return nengo.Simulator
def pytest_generate_tests(metafunc):
if "nl" in metafunc.funcargnames:
metafunc.parametrize("nl", [nengo.LIF, nengo.LIFRate, nengo.Direct])
if "nl_nodirect" in metafunc.funcargnames:
metafunc.parametrize("nl_nodirect", [nengo.LIF, nengo.LIFRate])
def pytest_addoption(parser):
parser.addoption('--benchmarks', action='store_true', default=False,
help='Also run benchmarking tests')
parser.addoption('--noexamples', action='store_false', default=True,
help='Do not run examples')
parser.addoption(
'--optional', action='store_true', default=False,
help='Also run optional tests that may use optional packages')
def pytest_runtest_setup(item):
for mark, option, message in [
('benchmark', 'benchmarks', "benchmarks not requested"),
('example', 'noexamples', "examples not requested"),
('optional', 'optional', "optional tests not requested")]:
if getattr(item.obj, mark, None) and not item.config.getvalue(option):
pytest.skip(message)
| ZeitgeberH/nengo | nengo/tests/conftest.py | Python | gpl-3.0 | 1,348 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
import socket
import sys
from argparse import ArgumentParser
from setproctitle import setproctitle
from amavisvt.config import Configuration
BUFFER_SIZE = 4096
class AmavisVTClient(object):
def __init__(self, socket_path):
self.config = Configuration()
self.socket_path = socket_path or self.config.socket_path
def execute(self, command, *arguments):
logger.debug("Executing command '%s' with args: %s", command, arguments)
translate = {
'ping': 'PING',
'scan': 'CONTSCAN',
'report': 'REPORT',
}
sock = None
try:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(self.socket_path)
# send absolute paths to amavisvtd
absolute_args = [os.path.abspath(p) for p in arguments]
s = "%s %s" % (translate.get(command, command.upper()), ' '.join(absolute_args))
payload = s.strip() + "\n"
sock.sendall(payload.encode('utf-8'))
data = sock.recv(BUFFER_SIZE)
return data.decode('utf-8')
finally:
if sock:
sock.close()
if __name__ == "__main__": # pragma: no cover
setproctitle("amavisvtd")
parser = ArgumentParser()
parser.add_argument('-v', '--verbose', action='count', help='Increase verbosity', default=2)
parser.add_argument('-d', '--debug', action='store_true', default=False, help='Send verbose log messages to stdout too')
parser.add_argument('-s', '--socket', help='Socket path')
parser.add_argument('command', choices=('ping', 'scan', 'report'))
parser.add_argument('command_args', nargs='*')
args = parser.parse_args()
logging.basicConfig(
level=logging.FATAL - (10 * args.verbose),
format='%(asctime)s %(levelname)-7s [%(threadName)s] %(message)s',
)
logger = logging.getLogger()
if not args.debug:
for h in logger.handlers:
h.setLevel(logging.ERROR)
if not args.command.lower() in ('ping', 'scan', 'report'):
print("Invalid command: %s" % args.command)
sys.exit(1)
error = False
try:
client = AmavisVTClient(args.socket)
response = client.execute(args.command, *tuple(args.command_args))
error = response.startswith('ERROR:')
print(response)
except Exception as ex:
error = True
logger.exception("Command '%s' failed", args.command)
print(ex)
finally:
sys.exit(int(error))
| ercpe/amavisvt | amavisvt/amavisvtc.py | Python | gpl-3.0 | 2,613 |
from biot import *
# display_wires(N_wires=6, r_wires=r_wires)
display_quiver()
display_particles(mode_name="boris_exact", colormap="Blues")
# display_particles(mode_name="RK4_exact", colormap="Reds")
print("Finished display")
mlab.show()
| StanczakDominik/PythonBiotSavart | plot.py | Python | gpl-3.0 | 240 |
#!/usr/bin/env python3
#
# Copyright (C) 2016 Canonical, Ltd.
# Author: Scott Sweeny <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
class Rule:
def __init__(self, path):
# Make sure path ends in a separator to make things easier
self.path = os.path.join(path, '')
def get_file_list(self):
'''Return a list of files in the snap'''
file_list = []
for root, dirs, files in os.walk(self.path):
for f in files:
file_list.append(os.path.relpath(os.path.join(root, f),
self.path))
return file_list
def get_dir_list(self):
'''Return a list of directories in the snap'''
dir_list = []
for root, dirs, files in os.walk(self.path):
for d in dirs:
dir_list.append(os.path.relpath(os.path.join(root, d),
self.path))
return dir_list
def scan(self):
'''Override this method to implement your rule checking logic'''
pass
| ssweeny/snaplint | snaplint/_rule.py | Python | gpl-3.0 | 1,643 |
# -*- coding: utf-8 -*-
import numpy as np
import config
from feature.feature_multi import FeatureMulti
from similarity.similarity_base import SimiliarityBase
class SimilarityStyle(SimiliarityBase):
def calculate(self, image1, image2):
# 获取特征
multi_feature_extractor = FeatureMulti()
luminance_sample_p, mu_p, sigma_p = multi_feature_extractor.extract(image1)
luminance_sample_s, mu_s, sigma_s = multi_feature_extractor.extract(image2)
# 实际相似度计算
return self.__class__.calculate_inner(luminance_sample_p, luminance_sample_s, mu_p, mu_s, sigma_p, sigma_s)
@classmethod
def calculate_inner(cls, luminance_sample_p, luminance_sample_s, mu_p, mu_s, sigma_p, sigma_s):
# 参数
lambda_l = config.style_ranking['lambda_l']
lambda_c = config.style_ranking['lambda_c']
epsilon = config.style_ranking['epsilon']
# 求亮度之间的欧式距离
de = np.power(np.linalg.norm(luminance_sample_p - luminance_sample_s, 2), 2)
# 求色彩之间的距离
mu = np.matrix(np.abs(mu_p - mu_s) + epsilon).T
sigma = np.matrix((sigma_p + sigma_s) / 2)
dh_1 = np.power(np.linalg.norm(sigma_s.dot(sigma_p), 1), 1 / 4) / (np.power(np.linalg.norm(sigma, 1), 1 / 2))
dh_2 = (-1 / 8) * mu.T * np.linalg.inv(sigma) * mu
dh = 1 - dh_1 * np.exp(dh_2)
ans = np.exp(-de / lambda_l) * np.exp(-np.power(dh, 2) / lambda_c)
# 因为ans是一个 1x1 Matrix,所以必须弄成一个值
return np.max(ans)
| jinyu121/ACACTS | similarity/similarity_style.py | Python | gpl-3.0 | 1,581 |
#!/usr/bin/env python3
"""
Copyright 2020 Paul Willworth <[email protected]>
This file is part of Galaxy Harvester.
Galaxy Harvester is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Galaxy Harvester is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with Galaxy Harvester. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import sys
import dbSession
import pymysql
import cgi
from http import cookies
import ghShared
import ghLists
import dbShared
#
form = cgi.FieldStorage()
C = cookies.SimpleCookie()
errorstr = ''
try:
C.load(os.environ['HTTP_COOKIE'])
except KeyError:
errorstr = 'no cookies\n'
if errorstr == '':
try:
currentUser = C['userID'].value
except KeyError:
currentUser = ''
try:
sid = C['gh_sid'].value
except KeyError:
sid = form.getfirst('gh_sid', '')
else:
currentUser = ''
sid = form.getfirst('gh_sid', '')
# Get a session
logged_state = 0
sess = dbSession.getSession(sid)
if (sess != ''):
logged_state = 1
currentUser = sess
groupType = form.getfirst('groupType', '')
profession = form.getfirst('profession', '')
craftingTab = form.getfirst('craftingTab', '')
resType = form.getfirst('resType', '')
resSecondary = form.getfirst('resSecondary', '')
resGroup = form.getfirst('resGroup', '')
selectSchematic = form.getfirst('selectSchematic', '')
listFormat = form.getfirst('listFormat', 'list')
galaxy = form.getfirst('galaxy', '')
# escape input to prevent sql injection
groupType = dbShared.dbInsertSafe(groupType)
profession = dbShared.dbInsertSafe(profession)
craftingTab = dbShared.dbInsertSafe(craftingTab)
resType = dbShared.dbInsertSafe(resType)
resSecondary = dbShared.dbInsertSafe(resSecondary)
resGroup = dbShared.dbInsertSafe(resGroup)
selectSchematic = dbShared.dbInsertSafe(selectSchematic)
listFormat = dbShared.dbInsertSafe(listFormat)
# groupType determines the filtering method for narrowing down schematic list
filterStr = ''
joinStr = ''
if (groupType == 'prof'):
if (profession.isdigit()):
filterStr = ' WHERE tSkillGroup.profID = ' + str(profession)
elif (groupType == 'tab'):
if (craftingTab.isdigit()):
filterStr = ' WHERE tSchematic.craftingTab = ' + str(craftingTab)
elif (groupType == 'res'):
if (resGroup != '' and resGroup != None):
if resSecondary == '1':
joinStr = ' INNER JOIN (SELECT resourceType FROM tResourceTypeGroup WHERE resourceGroup="' + resGroup + '") rtg ON ingredientObject = rtg.resourceType'
else:
filterStr = ' WHERE ingredientObject = "' + resGroup + '"'
else:
if resSecondary == '1':
filterStr = ' WHERE ingredientObject IN (SELECT tResourceTypeGroup.resourceGroup FROM tResourceTypeGroup INNER JOIN tResourceGroup ON tResourceTypeGroup.resourceGroup=tResourceGroup.resourceGroup WHERE resourceType="' + resType + '" AND groupLevel=(SELECT Max(rg.groupLevel) FROM tResourceTypeGroup rtg INNER JOIN tResourceGroup rg ON rtg.resourceGroup = rg.resourceGroup WHERE rtg.resourceType="' + resType + '") GROUP BY tResourceTypeGroup.resourceGroup)'
joinStr = ' INNER JOIN (SELECT tResourceTypeGroup.resourceGroup FROM tResourceTypeGroup INNER JOIN tResourceGroup ON tResourceTypeGroup.resourceGroup=tResourceGroup.resourceGroup WHERE resourceType="' + resType + '" AND groupLevel=(SELECT Max(rg.groupLevel) FROM tResourceTypeGroup rtg INNER JOIN tResourceGroup rg ON rtg.resourceGroup = rg.resourceGroup WHERE rtg.resourceType="' + resType + '") GROUP BY tResourceTypeGroup.resourceGroup) rtgg ON ingredientObject = rtgg.resourceGroup'
else:
filterStr = ' WHERE ingredientObject = "' + resType + '"'
elif (groupType == 'favorite'):
filterStr = ' WHERE tFavorites.userID = "' + currentUser + '" AND favType = 4'
joinStr = ' INNER JOIN tFavorites ON tSchematic.schematicID = tFavorites.favGroup'
conn = dbShared.ghConn()
# Some schematics are custom entered per galaxy but those with galaxyID 0 are for all
if galaxy.isdigit():
baseProfs = '0, 1337'
checkCursor = conn.cursor()
if (checkCursor):
checkCursor.execute('SELECT galaxyNGE FROM tGalaxy WHERE galaxyID={0};'.format(str(galaxy)))
checkRow = checkCursor.fetchone()
if (checkRow != None) and (checkRow[0] > 0):
baseProfs = '-1, 1337'
checkCursor.close()
filterStr = filterStr + ' AND tSchematic.galaxy IN ({1}, {0}) AND tSchematic.schematicID NOT IN (SELECT schematicID FROM tSchematicOverrides WHERE galaxyID={0})'.format(galaxy, baseProfs)
# We output an unordered list or a bunch of select element options depending on listFormat
currentGroup = ''
currentIngredient = ''
print('Content-type: text/html\n')
if listFormat != 'option':
print(' <ul class="schematics">')
cursor = conn.cursor()
if (cursor):
if (groupType == 'tab' or groupType == 'favorite'):
sqlStr1 = 'SELECT schematicID, tSchematic.craftingTab, typeName, schematicName FROM tSchematic INNER JOIN tObjectType ON tSchematic.objectType = tObjectType.objectType' + joinStr + filterStr + ' ORDER BY craftingTab, typeName, schematicName'
elif (groupType == 'res'):
sqlStr1 = 'SELECT DISTINCT tSchematic.schematicID, tSchematic.craftingTab, typeName, schematicName, ingredientObject, res.resName FROM tSchematic INNER JOIN tObjectType ON tSchematic.objectType = tObjectType.objectType INNER JOIN tSchematicIngredients ON tSchematic.schematicID = tSchematicIngredients.schematicID' + joinStr + ' LEFT JOIN (SELECT resourceGroup AS resID, groupName AS resName FROM tResourceGroup UNION ALL SELECT resourceType, resourceTypeName FROM tResourceType) res ON ingredientObject = res.resID' + filterStr + ' ORDER BY res.resName, craftingTab, typeName, schematicName'
else:
sqlStr1 = 'SELECT schematicID, profName, skillGroupName, schematicName FROM tSchematic INNER JOIN tSkillGroup ON tSchematic.skillGroup = tSkillGroup.skillGroup LEFT JOIN tProfession ON tSkillGroup.profID = tProfession.profID' + joinStr + filterStr
if listFormat == 'option':
sqlStr1 += ' ORDER BY schematicName'
else:
sqlStr1 += ' ORDER BY profName, skillGroupName, schematicName'
cursor.execute(sqlStr1)
row = cursor.fetchone()
if (row == None):
print(' <li><h3>No Schematics Found</h3></li>')
while (row != None):
if listFormat == 'option':
print('<option value="' + row[0] + '">' + row[3] + '</option>')
else:
if (groupType == 'res'):
if (currentIngredient != row[5]):
print(' </ul>')
print(' <div style="margin-top:14px;"><a class="bigLink" href="' + ghShared.BASE_SCRIPT_URL + 'resourceType.py/' + str(row[4]) + '">' + str(row[5]) + '</a></div>')
print(' <ul class="schematics">')
currentIngredient = row[5]
currentGroup = ''
if (currentGroup != row[2]):
print(' <li><h3>' + row[2] + '</h3></li>')
currentGroup = row[2]
if row[0] == selectSchematic:
print(' <li class="listSelected"><a href="' + ghShared.BASE_SCRIPT_URL + 'schematics.py/' + row[0] + '">' + row[3] + '</a></li>')
else:
print(' <li><a href="' + ghShared.BASE_SCRIPT_URL + 'schematics.py/' + row[0] + '">' + row[3] + '</a></li>')
row = cursor.fetchone()
cursor.close()
conn.close()
if listFormat != 'option':
print(' </ul>')
| pwillworth/galaxyharvester | html/getSchematicList.py | Python | gpl-3.0 | 7,494 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- Python -*-
"""
@file outTemp.py
@brief ModuleDescription
@date $Date$
"""
import sys
import time
sys.path.append(".")
# Import RTM module
import RTC
import OpenRTM_aist
# Import Service implementation class
# <rtc-template block="service_impl">
# </rtc-template>
# Import Service stub modules
# <rtc-template block="consumer_import">
# </rtc-template>
# This module's spesification
# <rtc-template block="module_spec">
outtemp_spec = ["implementation_id", "outTemp",
"type_name", "outTemp",
"description", "ModuleDescription",
"version", "1.0.0",
"vendor", "VenderName",
"category", "Category",
"activity_type", "STATIC",
"max_instance", "1",
"language", "Python",
"lang_type", "SCRIPT",
""]
# </rtc-template>
##
# @class outTemp
# @brief ModuleDescription
#
# get original temperature and output by celius.
#
#
class outTemp(OpenRTM_aist.DataFlowComponentBase):
##
# @brief constructor
# @param manager Maneger Object
#
def __init__(self, manager):
OpenRTM_aist.DataFlowComponentBase.__init__(self, manager)
origin_Temp_arg = [None] * ((len(RTC._d_TimedDouble) - 4) / 2)
self._d_origin_Temp = RTC.TimedDouble(*origin_Temp_arg)
"""
"""
self._origin_TempIn = OpenRTM_aist.InPort("origin_Temp", self._d_origin_Temp,OpenRTM_aist.RingBuffer(1))
# initialize of configuration-data.
# <rtc-template block="init_conf_param">
# </rtc-template>
##
#
# The initialize action (on CREATED->ALIVE transition)
# formaer rtc_init_entry()
#
# @return RTC::ReturnCode_t
#
#
def onInitialize(self):
# Bind variables and configuration variabl
print "onInitialize"
print
# Set InPort buffers
self.addInPort("origin_Temp",self._origin_TempIn)
# Set OutPort buffers
# Set service provider to Ports
# Set service consumers to Ports
# Set CORBA Service Ports
return RTC.RTC_OK
# ##
# #
# # The finalize action (on ALIVE->END transition)
# # formaer rtc_exiting_entry()
# #
# # @return RTC::ReturnCode_t
#
# #
#def onFinalize(self):
#
# return RTC.RTC_OK
# ##
# #
# # The startup action when ExecutionContext startup
# # former rtc_starting_entry()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onStartup(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The shutdown action when ExecutionContext stop
# # former rtc_stopping_entry()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onShutdown(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The activated action (Active state entry action)
# # former rtc_active_entry()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onActivated(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The deactivated action (Active state exit action)
# # former rtc_active_exit()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onDeactivated(self, ec_id):
#
# return RTC.RTC_OK
##
#
# The execution action that is invoked periodically
# former rtc_active_do()
#
# @param ec_id target ExecutionContext Id
#
# @return RTC::ReturnCode_t
#
#
def onExecute(self, ec_id):
self._origin_TempIn.read()
# if(self._origin_TempIn.isNew()):
self._d_origin_Temp = self._origin_TempIn.read()
temp = self._d_origin_Temp.data
#print "Temp: %4.2lf" % self._d_origin_Temp.data
#print self._d_origin_Temp.data
print temp
#else:
# print "no new data"
time.sleep(5)
return RTC.RTC_OK
# ##
# #
# # The aborting action when main logic error occurred.
# # former rtc_aborting_entry()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onAborting(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The error action in ERROR state
# # former rtc_error_do()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onError(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The reset action that is invoked resetting
# # This is same but different the former rtc_init_entry()
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onReset(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The state update action that is invoked after onExecute() action
# # no corresponding operation exists in OpenRTm-aist-0.2.0
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onStateUpdate(self, ec_id):
#
# return RTC.RTC_OK
# ##
# #
# # The action that is invoked when execution context's rate is changed
# # no corresponding operation exists in OpenRTm-aist-0.2.0
# #
# # @param ec_id target ExecutionContext Id
# #
# # @return RTC::ReturnCode_t
# #
# #
#def onRateChanged(self, ec_id):
#
# return RTC.RTC_OK
def outTempInit(manager):
profile = OpenRTM_aist.Properties(defaults_str=outtemp_spec)
manager.registerFactory(profile,
outTemp,
OpenRTM_aist.Delete)
def MyModuleInit(manager):
outTempInit(manager)
# Create a component
comp = manager.createComponent("outTemp")
def main():
mgr = OpenRTM_aist.Manager.init(sys.argv)
mgr.setModuleInitProc(MyModuleInit)
mgr.activateManager()
mgr.runManager()
if __name__ == "__main__":
main()
| max-koara/OutTemp | outTemp.py | Python | gpl-3.0 | 5,559 |
# Paperwork - Using OCR to grep dead trees the easy way
# Copyright (C) 2014 Jerome Flesch
#
# Paperwork is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Paperwork is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Paperwork. If not, see <http://www.gnu.org/licenses/>.
import os
import logging
from gi.repository import Gdk
from gi.repository import GdkPixbuf
from gi.repository import Gtk
from paperwork.backend.labels import Label
from paperwork.frontend.util import load_uifile
from paperwork.frontend.util.actions import SimpleAction
logger = logging.getLogger(__name__)
DROPPER_BITS = (
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377\377\377\377\377\377\377\377\377\377"
"\377\377\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377\377\377\377\0\0\0\377"
"\0\0\0\377\0\0\0\377\377\377\377\377\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377\377\377"
"\377\0\0\0\377\0\0\0\377\0\0\0\377\0\0\0\377\0\0\0\377\377\377\377\377"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377"
"\377\377\377\377\377\377\377\377\377\377\377\0\0\0\377\0\0\0\377\0\0"
"\0\377\0\0\0\377\0\0\0\377\377\377\377\377\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377\377\377\377\0\0\0\377\0\0\0\377\0"
"\0\0\377\0\0\0\377\0\0\0\377\0\0\0\377\0\0\0\377\0\0\0\377\377\377\377"
"\377\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\377\377\377\377\0\0\0\377\0\0\0\377\0\0\0\377\0\0\0\377\0\0\0\377\0"
"\0\0\377\377\377\377\377\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377\377\377\377\377\0\0\0\377\0\0"
"\0\377\0\0\0\377\377\377\377\377\377\377\377\377\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377\377\377"
"\377\377\377\377\377\377\377\377\377\377\0\0\0\377\0\0\0\377\377\377"
"\377\377\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\377\377\377\377\377\377\377\377\377\377\377\377\377"
"\0\0\0\377\377\377\377\377\0\0\0\377\377\377\377\377\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377\377\377\377"
"\377\377\377\377\377\377\377\377\377\0\0\0\377\0\0\0\0\0\0\0\0\377\377"
"\377\377\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\377\377\377\377\377\377\377\377\377\377\377\377\377\0\0\0"
"\377\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377\377\377\377\377\377\377\377\377\377"
"\377\377\377\0\0\0\377\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377\377\377\377\377"
"\377\377\377\377\377\377\377\377\0\0\0\377\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\377\377\377\377\377\377\377\377\377\377\377\377\377\0\0\0\377\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377\377\377\377\377\377\377\377\377\0\0"
"\0\377\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377\0\0\0\0\0\0\0\377\0\0\0"
"\377\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0"
)
DROPPER_WIDTH = 17
DROPPER_HEIGHT = 17
DROPPER_X_HOT = 2
DROPPER_Y_HOT = 16
class PickColorAction(SimpleAction):
"""
Hack taken from libgtk3/gtk/deprecated/gtkcolorsel.c
"""
def __init__(self, label_editor):
super(PickColorAction, self).__init__("Pick color")
self.__editor = label_editor
self.__dropper_grab_widget = None
self.__grab_time = None
self.__has_grab = False
self.__pointer_device = None
def do(self):
self._get_screen_color()
def _make_picker_cursor(self, display):
# XXX(Jflesch) ... do not work
try:
return Gdk.Cursor.new_from_name(display, "color-picker")
except TypeError:
pass
try:
# happens when new_from_name returns NULL at C level
pixbuf = GdkPixbuf.Pixbuf.new_from_data(
DROPPER_BITS, GdkPixbuf.Colorspace.RGB, True, 8,
DROPPER_WIDTH, DROPPER_HEIGHT,
DROPPER_WIDTH * 4
)
cursor = Gdk.Cursor.new_from_pixbuf(display, pixbuf,
DROPPER_X_HOT, DROPPER_Y_HOT)
return cursor
except TypeError:
pass
return None
def _get_screen_color(self):
time = Gtk.get_current_event_time()
screen = self.__editor._pick_button.get_screen()
display = self.__editor._pick_button.get_display()
# XXX(JFlesch): Assumption: mouse is used
pointer_device = Gtk.get_current_event_device()
if not self.__dropper_grab_widget:
self.__dropper_grab_widget = Gtk.Window.new(Gtk.WindowType.POPUP)
self.__dropper_grab_widget.set_screen(screen)
self.__dropper_grab_widget.resize(1, 1)
self.__dropper_grab_widget.move(-100, -100)
self.__dropper_grab_widget.show()
self.__dropper_grab_widget.add_events(
Gdk.EventMask.BUTTON_RELEASE_MASK
)
toplevel = self.__editor._pick_button.get_toplevel()
if isinstance(toplevel, Gtk.Window):
if toplevel.has_group():
toplevel.get_group().add_window(self.__dropper_grab_widget)
window = self.__dropper_grab_widget.get_window()
picker_cursor = self._make_picker_cursor(display)
if (pointer_device.grab(
window,
Gdk.GrabOwnership.APPLICATION, False,
Gdk.EventMask.BUTTON_RELEASE_MASK,
picker_cursor, time) != Gdk.GrabStatus.SUCCESS):
logger.warning("Pointer device grab failed !")
return
Gtk.device_grab_add(self.__dropper_grab_widget, pointer_device, True)
self.__grab_time = time
self.__pointer_device = pointer_device
self.__has_grab = True
self.__dropper_grab_widget.connect("button-release-event",
self._on_mouse_release)
def _grab_color_at_pointer(self, screen, device, x, y):
root_window = screen.get_root_window()
pixbuf = Gdk.pixbuf_get_from_window(root_window, x, y, 1, 1)
# XXX(Jflesch): bad shortcut here ...
pixels = pixbuf.get_pixels()
rgb = (
float(ord(pixels[0]) * 0x101) / 65535,
float(ord(pixels[1]) * 0x101) / 65535,
float(ord(pixels[2]) * 0x101) / 65535,
)
logger.info("Picked color: %s" % str(rgb))
return rgb
def _on_mouse_release(self, invisible_widget, event):
if not self.__has_grab:
return
try:
color = self._grab_color_at_pointer(
event.get_screen(), event.get_device(),
event.x_root, event.y_root
)
self.__editor._color_chooser.set_rgba(
Gdk.RGBA(
red=color[0],
green=color[1],
blue=color[2],
alpha=1.0
)
)
finally:
self.__pointer_device.ungrab(self.__grab_time)
Gtk.device_grab_remove(self.__dropper_grab_widget,
self.__pointer_device)
self.__has_grab = False
self.__pointer_device = None
class LabelEditor(object):
"""
Dialog to create / edit labels
"""
def __init__(self, label_to_edit=None):
if label_to_edit is None:
label_to_edit = Label()
self.label = label_to_edit
self.__ok_button = None
def edit(self, main_window):
"""
Open the edit dialog, and update the label according to user changes
"""
widget_tree = load_uifile(
os.path.join("labeleditor", "labeleditor.glade"))
dialog = widget_tree.get_object("dialogLabelEditor")
dialog.set_transient_for(main_window)
self.__ok_button = widget_tree.get_object("buttonOk")
self._pick_button = widget_tree.get_object("buttonPickColor")
PickColorAction(self).connect([self._pick_button])
self._color_chooser = widget_tree.get_object("labelColorChooser")
self._color_chooser.set_rgba(self.label.color)
name_entry = widget_tree.get_object("entryLabelName")
name_entry.connect("changed", self.__on_label_entry_changed)
name_entry.set_text(self.label.name)
response = dialog.run()
if (response == Gtk.ResponseType.OK
and name_entry.get_text().strip() == ""):
response = Gtk.ResponseType.CANCEL
if (response == Gtk.ResponseType.OK):
logger.info("Label validated")
self.label.name = unicode(name_entry.get_text(), encoding='utf-8')
self.label.color = self._color_chooser.get_rgba()
else:
logger.info("Label editing cancelled")
dialog.destroy()
logger.info("Label after editing: %s" % self.label)
return (response == Gtk.ResponseType.OK)
def __on_label_entry_changed(self, label_entry):
txt = unicode(label_entry.get_text(), encoding='utf-8').strip()
ok_enabled = True
ok_enabled = ok_enabled and txt != u""
ok_enabled = ok_enabled and u"," not in txt
self.__ok_button.set_sensitive(ok_enabled)
| kschwank/paperwork | src/paperwork/frontend/labeleditor/__init__.py | Python | gpl-3.0 | 10,471 |
""" SicPy
"""
# __version__ = '0.1'
# __author__ = 'disrupts'
# __license__ = 'GPLv3'
# Cryptobox should only be used to implement ciphers
#from sicpy.cryptobox import Cryptobox
# Ciphers are imported directly with sicpy
# not requiring an aditional import
from sicpy.ciphers.caesar import Caesar
from sicpy.ciphers.vigenere import Vigenere
from sicpy.ciphers.railfence import RailFence
# Alphabet can be used to build alphabets easily
# >>> myalpha = Alphabet('ABSDE')
from sicpy.alphabet import Alphabet
# LATIN is imported by cryptobox.py
| disrupts/SicPy | sicpy/__init__.py | Python | gpl-3.0 | 557 |
# Copyright 2014-2020 by Christopher C. Little.
# This file is part of Abydos.
#
# Abydos is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Abydos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Abydos. If not, see <http://www.gnu.org/licenses/>.
"""abydos.tests.distance.test_distance_ncd_bwtrle.
This module contains unit tests for abydos.distance.NCDbwtrle
"""
import unittest
from abydos.distance import NCDbwtrle
class NCDbwtrleTestCases(unittest.TestCase):
"""Test compression distance functions.
abydos.distance.NCDbwtrle
"""
cmp = NCDbwtrle()
def test_ncd_bwtrle_dist(self):
"""Test abydos.distance.NCDbwtrle.dist."""
self.assertEqual(self.cmp.dist('', ''), 0)
self.assertGreater(self.cmp.dist('a', ''), 0)
self.assertGreater(self.cmp.dist('abcdefg', 'fg'), 0)
self.assertAlmostEqual(self.cmp.dist('abc', 'abc'), 0)
self.assertAlmostEqual(self.cmp.dist('abc', 'def'), 0.75)
self.assertAlmostEqual(
self.cmp.dist('banana', 'banane'), 0.57142857142
)
self.assertAlmostEqual(self.cmp.dist('bananas', 'bananen'), 0.5)
def test_ncd_bwtrle_sim(self):
"""Test abydos.distance.NCDbwtrle.sim."""
self.assertEqual(self.cmp.sim('', ''), 1)
self.assertLess(self.cmp.sim('a', ''), 1)
self.assertLess(self.cmp.sim('abcdefg', 'fg'), 1)
self.assertAlmostEqual(self.cmp.sim('abc', 'abc'), 1)
self.assertAlmostEqual(self.cmp.sim('abc', 'def'), 0.25)
self.assertAlmostEqual(self.cmp.sim('banana', 'banane'), 0.42857142857)
self.assertAlmostEqual(self.cmp.sim('bananas', 'bananen'), 0.5)
if __name__ == '__main__':
unittest.main()
| chrislit/abydos | tests/distance/test_distance_ncd_bwtrle.py | Python | gpl-3.0 | 2,159 |
# -*- coding: utf-8 -*-
# This file is part of BBFlux (BackBox XFCE -> FluxBox Menu Automatic Update Daemon).
#
# Copyright(c) 2010-2011 Simone Margaritelli
# [email protected] - [email protected]
# http://www.evilsocket.net
# http://www.backbox.org
#
# This file may be licensed under the terms of of the
# GNU General Public License Version 2 (the ``GPL'').
#
# Software distributed under the License is distributed
# on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
# express or implied. See the GPL for the specific language
# governing rights and limitations.
#
# You should have received a copy of the GPL along with this
# program. If not, go to http://www.gnu.org/licenses/gpl.html
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import os
import fnmatch
class IconParser:
__instance = None
def __init__(self):
self.cache = {}
def __findIcon( self, path, pattern ):
for root, dirnames, filenames in os.walk( path ):
for filename in fnmatch.filter( filenames, pattern ):
return os.path.join(root, filename)
return None
def getIconByName( self, name ):
name = name.replace( '.png', '' )
if name is None or name == '':
return None
if name[0] == '/':
return name
elif self.cache.has_key(name):
return self.cache[name]
else:
if os.path.exists( '/usr/share/pixmaps/' + name + '.png' ):
self.cache[name] = '/usr/share/pixmaps/' + name + '.png'
return '/usr/share/pixmaps/' + name + '.png'
elif os.path.exists( '/usr/share/pixmaps/' + name + '.xpm' ):
self.cache[name] = '/usr/share/pixmaps/' + name + '.xpm'
return '/usr/share/pixmaps/' + name + '.xpm'
else:
icon = self.__findIcon( '/usr/share/icons', name + '.png' )
if icon is not None:
self.cache[name] = icon
else:
icon = self.__findIcon( '/usr/share/icons', name + '.xpm' )
if icon is not None:
self.cache[name] = icon
return icon
@classmethod
def getInstance(cls):
if cls.__instance is None:
cls.__instance = IconParser()
return cls.__instance | evilsocket/BBFlux | parsers/IconParser.py | Python | gpl-3.0 | 2,214 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Test thorns.waves module.
"""
from __future__ import division, absolute_import, print_function
__author__ = "Marek Rudnicki"
import numpy as np
from numpy.testing import assert_equal
import thorns.waves as wv
def test_electrical_pulse_charge():
durations = [1, 1, 1]
amplitudes = [-1, 2, -1]
fs = 100
pulse = wv.electrical_pulse(
fs=fs,
amplitudes=amplitudes,
durations=durations,
charge=1
)
charge = np.sum(np.abs(pulse))/fs
assert_equal(charge, 1)
def test_electrical_amplitudes_2():
durations = [1, 0.5]
amplitudes = wv.electrical_amplitudes(
durations=durations,
polarity=1,
)
assert_equal(amplitudes, [0.5, -1])
def test_electrical_amplitudes_3():
durations = [0.5, 1, 0.5]
ratio = 0.3
polarity = 1
amplitudes = wv.electrical_amplitudes(
durations=durations,
polarity=polarity,
ratio=ratio
)
assert_equal(amplitudes, [0.3, -0.5, 0.7])
| timtammittee/thorns | tests/test_waves.py | Python | gpl-3.0 | 1,056 |
# -*- coding: utf-8 -*-
# Copyright (C) 2005 Osmo Salomaa
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gaupol
from gi.repository import Gtk
class _TestPositionTransformDialog(gaupol.TestCase):
def run_dialog(self):
self.dialog.run()
self.dialog.destroy()
def test__on_response(self):
self.dialog.response(Gtk.ResponseType.OK)
class TestFrameTransformDialog(_TestPositionTransformDialog):
def setup_method(self, method):
self.application = self.new_application()
self.dialog = gaupol.FrameTransformDialog(
self.application.window, self.application)
self.dialog.show()
class TestTimeTransformDialog(_TestPositionTransformDialog):
def setup_method(self, method):
self.application = self.new_application()
self.dialog = gaupol.TimeTransformDialog(
self.application.window, self.application)
self.dialog.show()
| otsaloma/gaupol | gaupol/dialogs/test/test_position_transform.py | Python | gpl-3.0 | 1,527 |
import odoo.tests
@odoo.tests.common.at_install(False)
@odoo.tests.common.post_install(True)
class TestUi(odoo.tests.HttpCase):
def test_admin(self):
self.phantom_js("/", "odoo.__DEBUG__.services['web.Tour'].run('event_buy_tickets', 'test')", "odoo.__DEBUG__.services['web.Tour'].tours.event_buy_tickets", login="admin")
def test_demo(self):
self.phantom_js("/", "odoo.__DEBUG__.services['web.Tour'].run('event_buy_tickets', 'test')", "odoo.__DEBUG__.services['web.Tour'].tours.event_buy_tickets", login="demo")
def test_public(self):
self.phantom_js("/", "odoo.__DEBUG__.services['web.Tour'].run('event_buy_tickets', 'test')", "odoo.__DEBUG__.services['web.Tour'].tours.event_buy_tickets")
| ChawalitK/odoo | addons/website_event_sale/tests/test_ui.py | Python | gpl-3.0 | 731 |
#!/usr/bin/python
#
# Copyright 2016 Red Hat | Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: docker_container
short_description: manage docker containers
description:
- Manage the life cycle of docker containers.
- Supports check mode. Run with C(--check) and C(--diff) to view config difference and list of actions to be taken.
version_added: "2.1"
notes:
- For most config changes, the container needs to be recreated, i.e. the existing container has to be destroyed and
a new one created. This can cause unexpected data loss and downtime. You can use the I(comparisons) option to
prevent this.
- If the module needs to recreate the container, it will only use the options provided to the module to create the
new container (except I(image)). Therefore, always specify *all* options relevant to the container.
- When I(restart) is set to C(true), the module will only restart the container if no config changes are detected.
Please note that several options have default values; if the container to be restarted uses different values for
these options, it will be recreated instead. The options with default values which can cause this are I(auto_remove),
I(detach), I(init), I(interactive), I(memory), I(paused), I(privileged), I(read_only) and I(tty). This behavior
can be changed by setting I(container_default_behavior) to C(no_defaults), which will be the default value from
Ansible 2.14 on.
options:
auto_remove:
description:
- Enable auto-removal of the container on daemon side when the container's process exits.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
version_added: "2.4"
blkio_weight:
description:
- Block IO (relative weight), between 10 and 1000.
type: int
capabilities:
description:
- List of capabilities to add to the container.
type: list
elements: str
cap_drop:
description:
- List of capabilities to drop from the container.
type: list
elements: str
version_added: "2.7"
cleanup:
description:
- Use with I(detach=false) to remove the container after successful execution.
type: bool
default: no
version_added: "2.2"
command:
description:
- Command to execute when the container starts. A command may be either a string or a list.
- Prior to version 2.4, strings were split on commas.
type: raw
comparisons:
description:
- Allows to specify how properties of existing containers are compared with
module options to decide whether the container should be recreated / updated
or not.
- Only options which correspond to the state of a container as handled by the
Docker daemon can be specified, as well as C(networks).
- Must be a dictionary specifying for an option one of the keys C(strict), C(ignore)
and C(allow_more_present).
- If C(strict) is specified, values are tested for equality, and changes always
result in updating or restarting. If C(ignore) is specified, changes are ignored.
- C(allow_more_present) is allowed only for lists, sets and dicts. If it is
specified for lists or sets, the container will only be updated or restarted if
the module option contains a value which is not present in the container's
options. If the option is specified for a dict, the container will only be updated
or restarted if the module option contains a key which isn't present in the
container's option, or if the value of a key present differs.
- The wildcard option C(*) can be used to set one of the default values C(strict)
or C(ignore) to *all* comparisons which are not explicitly set to other values.
- See the examples for details.
type: dict
version_added: "2.8"
container_default_behavior:
description:
- Various module options used to have default values. This causes problems with
containers which use different values for these options.
- The default value is C(compatibility), which will ensure that the default values
are used when the values are not explicitly specified by the user.
- From Ansible 2.14 on, the default value will switch to C(no_defaults). To avoid
deprecation warnings, please set I(container_default_behavior) to an explicit
value.
- This affects the I(auto_remove), I(detach), I(init), I(interactive), I(memory),
I(paused), I(privileged), I(read_only) and I(tty) options.
type: str
choices:
- compatibility
- no_defaults
version_added: "2.10"
cpu_period:
description:
- Limit CPU CFS (Completely Fair Scheduler) period.
- See I(cpus) for an easier to use alternative.
type: int
cpu_quota:
description:
- Limit CPU CFS (Completely Fair Scheduler) quota.
- See I(cpus) for an easier to use alternative.
type: int
cpus:
description:
- Specify how much of the available CPU resources a container can use.
- A value of C(1.5) means that at most one and a half CPU (core) will be used.
type: float
version_added: '2.10'
cpuset_cpus:
description:
- CPUs in which to allow execution C(1,3) or C(1-3).
type: str
cpuset_mems:
description:
- Memory nodes (MEMs) in which to allow execution C(0-3) or C(0,1).
type: str
cpu_shares:
description:
- CPU shares (relative weight).
type: int
detach:
description:
- Enable detached mode to leave the container running in background.
- If disabled, the task will reflect the status of the container run (failed if the command failed).
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(yes).
type: bool
devices:
description:
- List of host device bindings to add to the container.
- "Each binding is a mapping expressed in the format C(<path_on_host>:<path_in_container>:<cgroup_permissions>)."
type: list
elements: str
device_read_bps:
description:
- "List of device path and read rate (bytes per second) from device."
type: list
elements: dict
suboptions:
path:
description:
- Device path in the container.
type: str
required: yes
rate:
description:
- "Device read limit in format C(<number>[<unit>])."
- "Number is a positive integer. Unit can be one of C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte)."
- "Omitting the unit defaults to bytes."
type: str
required: yes
version_added: "2.8"
device_write_bps:
description:
- "List of device and write rate (bytes per second) to device."
type: list
elements: dict
suboptions:
path:
description:
- Device path in the container.
type: str
required: yes
rate:
description:
- "Device read limit in format C(<number>[<unit>])."
- "Number is a positive integer. Unit can be one of C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte)."
- "Omitting the unit defaults to bytes."
type: str
required: yes
version_added: "2.8"
device_read_iops:
description:
- "List of device and read rate (IO per second) from device."
type: list
elements: dict
suboptions:
path:
description:
- Device path in the container.
type: str
required: yes
rate:
description:
- "Device read limit."
- "Must be a positive integer."
type: int
required: yes
version_added: "2.8"
device_write_iops:
description:
- "List of device and write rate (IO per second) to device."
type: list
elements: dict
suboptions:
path:
description:
- Device path in the container.
type: str
required: yes
rate:
description:
- "Device read limit."
- "Must be a positive integer."
type: int
required: yes
version_added: "2.8"
dns_opts:
description:
- List of DNS options.
type: list
elements: str
dns_servers:
description:
- List of custom DNS servers.
type: list
elements: str
dns_search_domains:
description:
- List of custom DNS search domains.
type: list
elements: str
domainname:
description:
- Container domainname.
type: str
version_added: "2.5"
env:
description:
- Dictionary of key,value pairs.
- Values which might be parsed as numbers, booleans or other types by the YAML parser must be quoted (e.g. C("true")) in order to avoid data loss.
type: dict
env_file:
description:
- Path to a file, present on the target, containing environment variables I(FOO=BAR).
- If variable also present in I(env), then the I(env) value will override.
type: path
version_added: "2.2"
entrypoint:
description:
- Command that overwrites the default C(ENTRYPOINT) of the image.
type: list
elements: str
etc_hosts:
description:
- Dict of host-to-IP mappings, where each host name is a key in the dictionary.
Each host name will be added to the container's C(/etc/hosts) file.
type: dict
exposed_ports:
description:
- List of additional container ports which informs Docker that the container
listens on the specified network ports at runtime.
- If the port is already exposed using C(EXPOSE) in a Dockerfile, it does not
need to be exposed again.
type: list
elements: str
aliases:
- exposed
- expose
force_kill:
description:
- Use the kill command when stopping a running container.
type: bool
default: no
aliases:
- forcekill
groups:
description:
- List of additional group names and/or IDs that the container process will run as.
type: list
elements: str
healthcheck:
description:
- Configure a check that is run to determine whether or not containers for this service are "healthy".
- "See the docs for the L(HEALTHCHECK Dockerfile instruction,https://docs.docker.com/engine/reference/builder/#healthcheck)
for details on how healthchecks work."
- "I(interval), I(timeout) and I(start_period) are specified as durations. They accept duration as a string in a format
that look like: C(5h34m56s), C(1m30s) etc. The supported units are C(us), C(ms), C(s), C(m) and C(h)."
type: dict
suboptions:
test:
description:
- Command to run to check health.
- Must be either a string or a list. If it is a list, the first item must be one of C(NONE), C(CMD) or C(CMD-SHELL).
type: raw
interval:
description:
- Time between running the check.
- The default used by the Docker daemon is C(30s).
type: str
timeout:
description:
- Maximum time to allow one check to run.
- The default used by the Docker daemon is C(30s).
type: str
retries:
description:
- Consecutive number of failures needed to report unhealthy.
- The default used by the Docker daemon is C(3).
type: int
start_period:
description:
- Start period for the container to initialize before starting health-retries countdown.
- The default used by the Docker daemon is C(0s).
type: str
version_added: "2.8"
hostname:
description:
- The container's hostname.
type: str
ignore_image:
description:
- When I(state) is C(present) or C(started), the module compares the configuration of an existing
container to requested configuration. The evaluation includes the image version. If the image
version in the registry does not match the container, the container will be recreated. You can
stop this behavior by setting I(ignore_image) to C(True).
- "*Warning:* This option is ignored if C(image: ignore) or C(*: ignore) is specified in the
I(comparisons) option."
type: bool
default: no
version_added: "2.2"
image:
description:
- Repository path and tag used to create the container. If an image is not found or pull is true, the image
will be pulled from the registry. If no tag is included, C(latest) will be used.
- Can also be an image ID. If this is the case, the image is assumed to be available locally.
The I(pull) option is ignored for this case.
type: str
init:
description:
- Run an init inside the container that forwards signals and reaps processes.
- This option requires Docker API >= 1.25.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
version_added: "2.6"
interactive:
description:
- Keep stdin open after a container is launched, even if not attached.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
ipc_mode:
description:
- Set the IPC mode for the container.
- Can be one of C(container:<name|id>) to reuse another container's IPC namespace or C(host) to use
the host's IPC namespace within the container.
type: str
keep_volumes:
description:
- Retain volumes associated with a removed container.
type: bool
default: yes
kill_signal:
description:
- Override default signal used to kill a running container.
type: str
kernel_memory:
description:
- "Kernel memory limit in format C(<number>[<unit>]). Number is a positive integer.
Unit can be C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte). Minimum is C(4M)."
- Omitting the unit defaults to bytes.
type: str
labels:
description:
- Dictionary of key value pairs.
type: dict
links:
description:
- List of name aliases for linked containers in the format C(container_name:alias).
- Setting this will force container to be restarted.
type: list
elements: str
log_driver:
description:
- Specify the logging driver. Docker uses C(json-file) by default.
- See L(here,https://docs.docker.com/config/containers/logging/configure/) for possible choices.
type: str
log_options:
description:
- Dictionary of options specific to the chosen I(log_driver).
- See U(https://docs.docker.com/engine/admin/logging/overview/) for details.
type: dict
aliases:
- log_opt
mac_address:
description:
- Container MAC address (e.g. 92:d0:c6:0a:29:33).
type: str
memory:
description:
- "Memory limit in format C(<number>[<unit>]). Number is a positive integer.
Unit can be C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte)."
- Omitting the unit defaults to bytes.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C("0").
type: str
memory_reservation:
description:
- "Memory soft limit in format C(<number>[<unit>]). Number is a positive integer.
Unit can be C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte)."
- Omitting the unit defaults to bytes.
type: str
memory_swap:
description:
- "Total memory limit (memory + swap) in format C(<number>[<unit>]).
Number is a positive integer. Unit can be C(B) (byte), C(K) (kibibyte, 1024B),
C(M) (mebibyte), C(G) (gibibyte), C(T) (tebibyte), or C(P) (pebibyte)."
- Omitting the unit defaults to bytes.
type: str
memory_swappiness:
description:
- Tune a container's memory swappiness behavior. Accepts an integer between 0 and 100.
- If not set, the value will be remain the same if container exists and will be inherited
from the host machine if it is (re-)created.
type: int
mounts:
version_added: "2.9"
type: list
elements: dict
description:
- Specification for mounts to be added to the container. More powerful alternative to I(volumes).
suboptions:
target:
description:
- Path inside the container.
type: str
required: true
source:
description:
- Mount source (e.g. a volume name or a host path).
type: str
type:
description:
- The mount type.
- Note that C(npipe) is only supported by Docker for Windows.
type: str
choices:
- bind
- npipe
- tmpfs
- volume
default: volume
read_only:
description:
- Whether the mount should be read-only.
type: bool
consistency:
description:
- The consistency requirement for the mount.
type: str
choices:
- cached
- consistent
- default
- delegated
propagation:
description:
- Propagation mode. Only valid for the C(bind) type.
type: str
choices:
- private
- rprivate
- shared
- rshared
- slave
- rslave
no_copy:
description:
- False if the volume should be populated with the data from the target. Only valid for the C(volume) type.
- The default value is C(false).
type: bool
labels:
description:
- User-defined name and labels for the volume. Only valid for the C(volume) type.
type: dict
volume_driver:
description:
- Specify the volume driver. Only valid for the C(volume) type.
- See L(here,https://docs.docker.com/storage/volumes/#use-a-volume-driver) for details.
type: str
volume_options:
description:
- Dictionary of options specific to the chosen volume_driver. See
L(here,https://docs.docker.com/storage/volumes/#use-a-volume-driver) for details.
type: dict
tmpfs_size:
description:
- "The size for the tmpfs mount in bytes in format <number>[<unit>]."
- "Number is a positive integer. Unit can be one of C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte)."
- "Omitting the unit defaults to bytes."
type: str
tmpfs_mode:
description:
- The permission mode for the tmpfs mount.
type: str
name:
description:
- Assign a name to a new container or match an existing container.
- When identifying an existing container name may be a name or a long or short container ID.
type: str
required: yes
network_mode:
description:
- Connect the container to a network. Choices are C(bridge), C(host), C(none), C(container:<name|id>), C(<network_name>) or C(default).
- "*Note* that from Ansible 2.14 on, if I(networks_cli_compatible) is C(true) and I(networks) contains at least one network,
the default value for I(network_mode) will be the name of the first network in the I(networks) list. You can prevent this
by explicitly specifying a value for I(network_mode), like the default value C(default) which will be used by Docker if
I(network_mode) is not specified."
type: str
userns_mode:
description:
- Set the user namespace mode for the container. Currently, the only valid value are C(host) and the empty string.
type: str
version_added: "2.5"
networks:
description:
- List of networks the container belongs to.
- For examples of the data structure and usage see EXAMPLES below.
- To remove a container from one or more networks, use the I(purge_networks) option.
- Note that as opposed to C(docker run ...), M(docker_container) does not remove the default
network if I(networks) is specified. You need to explicitly use I(purge_networks) to enforce
the removal of the default network (and all other networks not explicitly mentioned in I(networks)).
Alternatively, use the I(networks_cli_compatible) option, which will be enabled by default from Ansible 2.12 on.
type: list
elements: dict
suboptions:
name:
description:
- The network's name.
type: str
required: yes
ipv4_address:
description:
- The container's IPv4 address in this network.
type: str
ipv6_address:
description:
- The container's IPv6 address in this network.
type: str
links:
description:
- A list of containers to link to.
type: list
elements: str
aliases:
description:
- List of aliases for this container in this network. These names
can be used in the network to reach this container.
type: list
elements: str
version_added: "2.2"
networks_cli_compatible:
description:
- "When networks are provided to the module via the I(networks) option, the module
behaves differently than C(docker run --network): C(docker run --network other)
will create a container with network C(other) attached, but the default network
not attached. This module with I(networks: {name: other}) will create a container
with both C(default) and C(other) attached. If I(purge_networks) is set to C(yes),
the C(default) network will be removed afterwards."
- "If I(networks_cli_compatible) is set to C(yes), this module will behave as
C(docker run --network) and will *not* add the default network if I(networks) is
specified. If I(networks) is not specified, the default network will be attached."
- "*Note* that docker CLI also sets I(network_mode) to the name of the first network
added if C(--network) is specified. For more compatibility with docker CLI, you
explicitly have to set I(network_mode) to the name of the first network you're
adding. This behavior will change for Ansible 2.14: then I(network_mode) will
automatically be set to the first network name in I(networks) if I(network_mode)
is not specified, I(networks) has at least one entry and I(networks_cli_compatible)
is C(true)."
- Current value is C(no). A new default of C(yes) will be set in Ansible 2.12.
type: bool
version_added: "2.8"
oom_killer:
description:
- Whether or not to disable OOM Killer for the container.
type: bool
oom_score_adj:
description:
- An integer value containing the score given to the container in order to tune
OOM killer preferences.
type: int
version_added: "2.2"
output_logs:
description:
- If set to true, output of the container command will be printed.
- Only effective when I(log_driver) is set to C(json-file) or C(journald).
type: bool
default: no
version_added: "2.7"
paused:
description:
- Use with the started state to pause running processes inside the container.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
pid_mode:
description:
- Set the PID namespace mode for the container.
- Note that Docker SDK for Python < 2.0 only supports C(host). Newer versions of the
Docker SDK for Python (docker) allow all values supported by the Docker daemon.
type: str
pids_limit:
description:
- Set PIDs limit for the container. It accepts an integer value.
- Set C(-1) for unlimited PIDs.
type: int
version_added: "2.8"
privileged:
description:
- Give extended privileges to the container.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
published_ports:
description:
- List of ports to publish from the container to the host.
- "Use docker CLI syntax: C(8000), C(9000:8000), or C(0.0.0.0:9000:8000), where 8000 is a
container port, 9000 is a host port, and 0.0.0.0 is a host interface."
- Port ranges can be used for source and destination ports. If two ranges with
different lengths are specified, the shorter range will be used.
- "Bind addresses must be either IPv4 or IPv6 addresses. Hostnames are *not* allowed. This
is different from the C(docker) command line utility. Use the L(dig lookup,../lookup/dig.html)
to resolve hostnames."
- A value of C(all) will publish all exposed container ports to random host ports, ignoring
any other mappings.
- If I(networks) parameter is provided, will inspect each network to see if there exists
a bridge network with optional parameter C(com.docker.network.bridge.host_binding_ipv4).
If such a network is found, then published ports where no host IP address is specified
will be bound to the host IP pointed to by C(com.docker.network.bridge.host_binding_ipv4).
Note that the first bridge network with a C(com.docker.network.bridge.host_binding_ipv4)
value encountered in the list of I(networks) is the one that will be used.
type: list
elements: str
aliases:
- ports
pull:
description:
- If true, always pull the latest version of an image. Otherwise, will only pull an image
when missing.
- "*Note:* images are only pulled when specified by name. If the image is specified
as a image ID (hash), it cannot be pulled."
type: bool
default: no
purge_networks:
description:
- Remove the container from ALL networks not included in I(networks) parameter.
- Any default networks such as C(bridge), if not found in I(networks), will be removed as well.
type: bool
default: no
version_added: "2.2"
read_only:
description:
- Mount the container's root file system as read-only.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
recreate:
description:
- Use with present and started states to force the re-creation of an existing container.
type: bool
default: no
restart:
description:
- Use with started state to force a matching container to be stopped and restarted.
type: bool
default: no
restart_policy:
description:
- Container restart policy.
- Place quotes around C(no) option.
type: str
choices:
- 'no'
- 'on-failure'
- 'always'
- 'unless-stopped'
restart_retries:
description:
- Use with restart policy to control maximum number of restart attempts.
type: int
runtime:
description:
- Runtime to use for the container.
type: str
version_added: "2.8"
shm_size:
description:
- "Size of C(/dev/shm) in format C(<number>[<unit>]). Number is positive integer.
Unit can be C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte)."
- Omitting the unit defaults to bytes. If you omit the size entirely, Docker daemon uses C(64M).
type: str
security_opts:
description:
- List of security options in the form of C("label:user:User").
type: list
elements: str
state:
description:
- 'C(absent) - A container matching the specified name will be stopped and removed. Use I(force_kill) to kill the container
rather than stopping it. Use I(keep_volumes) to retain volumes associated with the removed container.'
- 'C(present) - Asserts the existence of a container matching the name and any provided configuration parameters. If no
container matches the name, a container will be created. If a container matches the name but the provided configuration
does not match, the container will be updated, if it can be. If it cannot be updated, it will be removed and re-created
with the requested config.'
- 'C(started) - Asserts that the container is first C(present), and then if the container is not running moves it to a running
state. Use I(restart) to force a matching container to be stopped and restarted.'
- 'C(stopped) - Asserts that the container is first C(present), and then if the container is running moves it to a stopped
state.'
- To control what will be taken into account when comparing configuration, see the I(comparisons) option. To avoid that the
image version will be taken into account, you can also use the I(ignore_image) option.
- Use the I(recreate) option to always force re-creation of a matching container, even if it is running.
- If the container should be killed instead of stopped in case it needs to be stopped for recreation, or because I(state) is
C(stopped), please use the I(force_kill) option. Use I(keep_volumes) to retain volumes associated with a removed container.
- Use I(keep_volumes) to retain volumes associated with a removed container.
type: str
default: started
choices:
- absent
- present
- stopped
- started
stop_signal:
description:
- Override default signal used to stop the container.
type: str
stop_timeout:
description:
- Number of seconds to wait for the container to stop before sending C(SIGKILL).
When the container is created by this module, its C(StopTimeout) configuration
will be set to this value.
- When the container is stopped, will be used as a timeout for stopping the
container. In case the container has a custom C(StopTimeout) configuration,
the behavior depends on the version of the docker daemon. New versions of
the docker daemon will always use the container's configured C(StopTimeout)
value if it has been configured.
type: int
trust_image_content:
description:
- If C(yes), skip image verification.
- The option has never been used by the module. It will be removed in Ansible 2.14.
type: bool
default: no
tmpfs:
description:
- Mount a tmpfs directory.
type: list
elements: str
version_added: 2.4
tty:
description:
- Allocate a pseudo-TTY.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
ulimits:
description:
- "List of ulimit options. A ulimit is specified as C(nofile:262144:262144)."
type: list
elements: str
sysctls:
description:
- Dictionary of key,value pairs.
type: dict
version_added: 2.4
user:
description:
- Sets the username or UID used and optionally the groupname or GID for the specified command.
- "Can be of the forms C(user), C(user:group), C(uid), C(uid:gid), C(user:gid) or C(uid:group)."
type: str
uts:
description:
- Set the UTS namespace mode for the container.
type: str
volumes:
description:
- List of volumes to mount within the container.
- "Use docker CLI-style syntax: C(/host:/container[:mode])"
- "Mount modes can be a comma-separated list of various modes such as C(ro), C(rw), C(consistent),
C(delegated), C(cached), C(rprivate), C(private), C(rshared), C(shared), C(rslave), C(slave), and
C(nocopy). Note that the docker daemon might not support all modes and combinations of such modes."
- SELinux hosts can additionally use C(z) or C(Z) to use a shared or private label for the volume.
- "Note that Ansible 2.7 and earlier only supported one mode, which had to be one of C(ro), C(rw),
C(z), and C(Z)."
type: list
elements: str
volume_driver:
description:
- The container volume driver.
type: str
volumes_from:
description:
- List of container names or IDs to get volumes from.
type: list
elements: str
working_dir:
description:
- Path to the working directory.
type: str
version_added: "2.4"
extends_documentation_fragment:
- docker
- docker.docker_py_1_documentation
author:
- "Cove Schneider (@cove)"
- "Joshua Conner (@joshuaconner)"
- "Pavel Antonov (@softzilla)"
- "Thomas Steinbach (@ThomasSteinbach)"
- "Philippe Jandot (@zfil)"
- "Daan Oosterveld (@dusdanig)"
- "Chris Houseknecht (@chouseknecht)"
- "Kassian Sun (@kassiansun)"
- "Felix Fontein (@felixfontein)"
requirements:
- "L(Docker SDK for Python,https://docker-py.readthedocs.io/en/stable/) >= 1.8.0 (use L(docker-py,https://pypi.org/project/docker-py/) for Python 2.6)"
- "Docker API >= 1.20"
'''
EXAMPLES = '''
- name: Create a data container
docker_container:
name: mydata
image: busybox
volumes:
- /data
- name: Re-create a redis container
docker_container:
name: myredis
image: redis
command: redis-server --appendonly yes
state: present
recreate: yes
exposed_ports:
- 6379
volumes_from:
- mydata
- name: Restart a container
docker_container:
name: myapplication
image: someuser/appimage
state: started
restart: yes
links:
- "myredis:aliasedredis"
devices:
- "/dev/sda:/dev/xvda:rwm"
ports:
- "8080:9000"
- "127.0.0.1:8081:9001/udp"
env:
SECRET_KEY: "ssssh"
# Values which might be parsed as numbers, booleans or other types by the YAML parser need to be quoted
BOOLEAN_KEY: "yes"
- name: Container present
docker_container:
name: mycontainer
state: present
image: ubuntu:14.04
command: sleep infinity
- name: Stop a container
docker_container:
name: mycontainer
state: stopped
- name: Start 4 load-balanced containers
docker_container:
name: "container{{ item }}"
recreate: yes
image: someuser/anotherappimage
command: sleep 1d
with_sequence: count=4
- name: remove container
docker_container:
name: ohno
state: absent
- name: Syslogging output
docker_container:
name: myservice
image: busybox
log_driver: syslog
log_options:
syslog-address: tcp://my-syslog-server:514
syslog-facility: daemon
# NOTE: in Docker 1.13+ the "syslog-tag" option was renamed to "tag" for
# older docker installs, use "syslog-tag" instead
tag: myservice
- name: Create db container and connect to network
docker_container:
name: db_test
image: "postgres:latest"
networks:
- name: "{{ docker_network_name }}"
- name: Start container, connect to network and link
docker_container:
name: sleeper
image: ubuntu:14.04
networks:
- name: TestingNet
ipv4_address: "172.1.1.100"
aliases:
- sleepyzz
links:
- db_test:db
- name: TestingNet2
- name: Start a container with a command
docker_container:
name: sleepy
image: ubuntu:14.04
command: ["sleep", "infinity"]
- name: Add container to networks
docker_container:
name: sleepy
networks:
- name: TestingNet
ipv4_address: 172.1.1.18
links:
- sleeper
- name: TestingNet2
ipv4_address: 172.1.10.20
- name: Update network with aliases
docker_container:
name: sleepy
networks:
- name: TestingNet
aliases:
- sleepyz
- zzzz
- name: Remove container from one network
docker_container:
name: sleepy
networks:
- name: TestingNet2
purge_networks: yes
- name: Remove container from all networks
docker_container:
name: sleepy
purge_networks: yes
- name: Start a container and use an env file
docker_container:
name: agent
image: jenkinsci/ssh-slave
env_file: /var/tmp/jenkins/agent.env
- name: Create a container with limited capabilities
docker_container:
name: sleepy
image: ubuntu:16.04
command: sleep infinity
capabilities:
- sys_time
cap_drop:
- all
- name: Finer container restart/update control
docker_container:
name: test
image: ubuntu:18.04
env:
arg1: "true"
arg2: "whatever"
volumes:
- /tmp:/tmp
comparisons:
image: ignore # don't restart containers with older versions of the image
env: strict # we want precisely this environment
volumes: allow_more_present # if there are more volumes, that's ok, as long as `/tmp:/tmp` is there
- name: Finer container restart/update control II
docker_container:
name: test
image: ubuntu:18.04
env:
arg1: "true"
arg2: "whatever"
comparisons:
'*': ignore # by default, ignore *all* options (including image)
env: strict # except for environment variables; there, we want to be strict
- name: Start container with healthstatus
docker_container:
name: nginx-proxy
image: nginx:1.13
state: started
healthcheck:
# Check if nginx server is healthy by curl'ing the server.
# If this fails or timeouts, the healthcheck fails.
test: ["CMD", "curl", "--fail", "http://nginx.host.com"]
interval: 1m30s
timeout: 10s
retries: 3
start_period: 30s
- name: Remove healthcheck from container
docker_container:
name: nginx-proxy
image: nginx:1.13
state: started
healthcheck:
# The "NONE" check needs to be specified
test: ["NONE"]
- name: start container with block device read limit
docker_container:
name: test
image: ubuntu:18.04
state: started
device_read_bps:
# Limit read rate for /dev/sda to 20 mebibytes per second
- path: /dev/sda
rate: 20M
device_read_iops:
# Limit read rate for /dev/sdb to 300 IO per second
- path: /dev/sdb
rate: 300
'''
RETURN = '''
container:
description:
- Facts representing the current state of the container. Matches the docker inspection output.
- Note that facts are part of the registered vars since Ansible 2.8. For compatibility reasons, the facts
are also accessible directly as C(docker_container). Note that the returned fact will be removed in Ansible 2.12.
- Before 2.3 this was C(ansible_docker_container) but was renamed in 2.3 to C(docker_container) due to
conflicts with the connection plugin.
- Empty if I(state) is C(absent)
- If I(detached) is C(false), will include C(Output) attribute containing any output from container run.
returned: always
type: dict
sample: '{
"AppArmorProfile": "",
"Args": [],
"Config": {
"AttachStderr": false,
"AttachStdin": false,
"AttachStdout": false,
"Cmd": [
"/usr/bin/supervisord"
],
"Domainname": "",
"Entrypoint": null,
"Env": [
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
],
"ExposedPorts": {
"443/tcp": {},
"80/tcp": {}
},
"Hostname": "8e47bf643eb9",
"Image": "lnmp_nginx:v1",
"Labels": {},
"OnBuild": null,
"OpenStdin": false,
"StdinOnce": false,
"Tty": false,
"User": "",
"Volumes": {
"/tmp/lnmp/nginx-sites/logs/": {}
},
...
}'
'''
import os
import re
import shlex
import traceback
from distutils.version import LooseVersion
from ansible.module_utils.common.text.formatters import human_to_bytes
from ansible.module_utils.docker.common import (
AnsibleDockerClient,
DifferenceTracker,
DockerBaseClass,
compare_generic,
is_image_name_id,
sanitize_result,
clean_dict_booleans_for_docker_api,
omit_none_from_dict,
parse_healthcheck,
DOCKER_COMMON_ARGS,
RequestException,
)
from ansible.module_utils.six import string_types
try:
from docker import utils
from ansible.module_utils.docker.common import docker_version
if LooseVersion(docker_version) >= LooseVersion('1.10.0'):
from docker.types import Ulimit, LogConfig
from docker import types as docker_types
else:
from docker.utils.types import Ulimit, LogConfig
from docker.errors import DockerException, APIError, NotFound
except Exception:
# missing Docker SDK for Python handled in ansible.module_utils.docker.common
pass
REQUIRES_CONVERSION_TO_BYTES = [
'kernel_memory',
'memory',
'memory_reservation',
'memory_swap',
'shm_size'
]
def is_volume_permissions(mode):
for part in mode.split(','):
if part not in ('rw', 'ro', 'z', 'Z', 'consistent', 'delegated', 'cached', 'rprivate', 'private', 'rshared', 'shared', 'rslave', 'slave', 'nocopy'):
return False
return True
def parse_port_range(range_or_port, client):
'''
Parses a string containing either a single port or a range of ports.
Returns a list of integers for each port in the list.
'''
if '-' in range_or_port:
try:
start, end = [int(port) for port in range_or_port.split('-')]
except Exception:
client.fail('Invalid port range: "{0}"'.format(range_or_port))
if end < start:
client.fail('Invalid port range: "{0}"'.format(range_or_port))
return list(range(start, end + 1))
else:
try:
return [int(range_or_port)]
except Exception:
client.fail('Invalid port: "{0}"'.format(range_or_port))
def split_colon_ipv6(text, client):
'''
Split string by ':', while keeping IPv6 addresses in square brackets in one component.
'''
if '[' not in text:
return text.split(':')
start = 0
result = []
while start < len(text):
i = text.find('[', start)
if i < 0:
result.extend(text[start:].split(':'))
break
j = text.find(']', i)
if j < 0:
client.fail('Cannot find closing "]" in input "{0}" for opening "[" at index {1}!'.format(text, i + 1))
result.extend(text[start:i].split(':'))
k = text.find(':', j)
if k < 0:
result[-1] += text[i:]
start = len(text)
else:
result[-1] += text[i:k]
if k == len(text):
result.append('')
break
start = k + 1
return result
class TaskParameters(DockerBaseClass):
'''
Access and parse module parameters
'''
def __init__(self, client):
super(TaskParameters, self).__init__()
self.client = client
self.auto_remove = None
self.blkio_weight = None
self.capabilities = None
self.cap_drop = None
self.cleanup = None
self.command = None
self.cpu_period = None
self.cpu_quota = None
self.cpus = None
self.cpuset_cpus = None
self.cpuset_mems = None
self.cpu_shares = None
self.detach = None
self.debug = None
self.devices = None
self.device_read_bps = None
self.device_write_bps = None
self.device_read_iops = None
self.device_write_iops = None
self.dns_servers = None
self.dns_opts = None
self.dns_search_domains = None
self.domainname = None
self.env = None
self.env_file = None
self.entrypoint = None
self.etc_hosts = None
self.exposed_ports = None
self.force_kill = None
self.groups = None
self.healthcheck = None
self.hostname = None
self.ignore_image = None
self.image = None
self.init = None
self.interactive = None
self.ipc_mode = None
self.keep_volumes = None
self.kernel_memory = None
self.kill_signal = None
self.labels = None
self.links = None
self.log_driver = None
self.output_logs = None
self.log_options = None
self.mac_address = None
self.memory = None
self.memory_reservation = None
self.memory_swap = None
self.memory_swappiness = None
self.mounts = None
self.name = None
self.network_mode = None
self.userns_mode = None
self.networks = None
self.networks_cli_compatible = None
self.oom_killer = None
self.oom_score_adj = None
self.paused = None
self.pid_mode = None
self.pids_limit = None
self.privileged = None
self.purge_networks = None
self.pull = None
self.read_only = None
self.recreate = None
self.restart = None
self.restart_retries = None
self.restart_policy = None
self.runtime = None
self.shm_size = None
self.security_opts = None
self.state = None
self.stop_signal = None
self.stop_timeout = None
self.tmpfs = None
self.trust_image_content = None
self.tty = None
self.user = None
self.uts = None
self.volumes = None
self.volume_binds = dict()
self.volumes_from = None
self.volume_driver = None
self.working_dir = None
for key, value in client.module.params.items():
setattr(self, key, value)
self.comparisons = client.comparisons
# If state is 'absent', parameters do not have to be parsed or interpreted.
# Only the container's name is needed.
if self.state == 'absent':
return
if self.cpus is not None:
self.cpus = int(round(self.cpus * 1E9))
if self.groups:
# In case integers are passed as groups, we need to convert them to
# strings as docker internally treats them as strings.
self.groups = [str(g) for g in self.groups]
for param_name in REQUIRES_CONVERSION_TO_BYTES:
if client.module.params.get(param_name):
try:
setattr(self, param_name, human_to_bytes(client.module.params.get(param_name)))
except ValueError as exc:
self.fail("Failed to convert %s to bytes: %s" % (param_name, exc))
self.publish_all_ports = False
self.published_ports = self._parse_publish_ports()
if self.published_ports in ('all', 'ALL'):
self.publish_all_ports = True
self.published_ports = None
self.ports = self._parse_exposed_ports(self.published_ports)
self.log("expose ports:")
self.log(self.ports, pretty_print=True)
self.links = self._parse_links(self.links)
if self.volumes:
self.volumes = self._expand_host_paths()
self.tmpfs = self._parse_tmpfs()
self.env = self._get_environment()
self.ulimits = self._parse_ulimits()
self.sysctls = self._parse_sysctls()
self.log_config = self._parse_log_config()
try:
self.healthcheck, self.disable_healthcheck = parse_healthcheck(self.healthcheck)
except ValueError as e:
self.fail(str(e))
self.exp_links = None
self.volume_binds = self._get_volume_binds(self.volumes)
self.pid_mode = self._replace_container_names(self.pid_mode)
self.ipc_mode = self._replace_container_names(self.ipc_mode)
self.network_mode = self._replace_container_names(self.network_mode)
self.log("volumes:")
self.log(self.volumes, pretty_print=True)
self.log("volume binds:")
self.log(self.volume_binds, pretty_print=True)
if self.networks:
for network in self.networks:
network['id'] = self._get_network_id(network['name'])
if not network['id']:
self.fail("Parameter error: network named %s could not be found. Does it exist?" % network['name'])
if network.get('links'):
network['links'] = self._parse_links(network['links'])
if self.mac_address:
# Ensure the MAC address uses colons instead of hyphens for later comparison
self.mac_address = self.mac_address.replace('-', ':')
if self.entrypoint:
# convert from list to str.
self.entrypoint = ' '.join([str(x) for x in self.entrypoint])
if self.command:
# convert from list to str
if isinstance(self.command, list):
self.command = ' '.join([str(x) for x in self.command])
self.mounts_opt, self.expected_mounts = self._process_mounts()
self._check_mount_target_collisions()
for param_name in ["device_read_bps", "device_write_bps"]:
if client.module.params.get(param_name):
self._process_rate_bps(option=param_name)
for param_name in ["device_read_iops", "device_write_iops"]:
if client.module.params.get(param_name):
self._process_rate_iops(option=param_name)
def fail(self, msg):
self.client.fail(msg)
@property
def update_parameters(self):
'''
Returns parameters used to update a container
'''
update_parameters = dict(
blkio_weight='blkio_weight',
cpu_period='cpu_period',
cpu_quota='cpu_quota',
cpu_shares='cpu_shares',
cpuset_cpus='cpuset_cpus',
cpuset_mems='cpuset_mems',
mem_limit='memory',
mem_reservation='memory_reservation',
memswap_limit='memory_swap',
kernel_memory='kernel_memory',
)
result = dict()
for key, value in update_parameters.items():
if getattr(self, value, None) is not None:
if self.client.option_minimal_versions[value]['supported']:
result[key] = getattr(self, value)
return result
@property
def create_parameters(self):
'''
Returns parameters used to create a container
'''
create_params = dict(
command='command',
domainname='domainname',
hostname='hostname',
user='user',
detach='detach',
stdin_open='interactive',
tty='tty',
ports='ports',
environment='env',
name='name',
entrypoint='entrypoint',
mac_address='mac_address',
labels='labels',
stop_signal='stop_signal',
working_dir='working_dir',
stop_timeout='stop_timeout',
healthcheck='healthcheck',
)
if self.client.docker_py_version < LooseVersion('3.0'):
# cpu_shares and volume_driver moved to create_host_config in > 3
create_params['cpu_shares'] = 'cpu_shares'
create_params['volume_driver'] = 'volume_driver'
result = dict(
host_config=self._host_config(),
volumes=self._get_mounts(),
)
for key, value in create_params.items():
if getattr(self, value, None) is not None:
if self.client.option_minimal_versions[value]['supported']:
result[key] = getattr(self, value)
if self.networks_cli_compatible and self.networks:
network = self.networks[0]
params = dict()
for para in ('ipv4_address', 'ipv6_address', 'links', 'aliases'):
if network.get(para):
params[para] = network[para]
network_config = dict()
network_config[network['name']] = self.client.create_endpoint_config(**params)
result['networking_config'] = self.client.create_networking_config(network_config)
return result
def _expand_host_paths(self):
new_vols = []
for vol in self.volumes:
if ':' in vol:
if len(vol.split(':')) == 3:
host, container, mode = vol.split(':')
if not is_volume_permissions(mode):
self.fail('Found invalid volumes mode: {0}'.format(mode))
if re.match(r'[.~]', host):
host = os.path.abspath(os.path.expanduser(host))
new_vols.append("%s:%s:%s" % (host, container, mode))
continue
elif len(vol.split(':')) == 2:
parts = vol.split(':')
if not is_volume_permissions(parts[1]) and re.match(r'[.~]', parts[0]):
host = os.path.abspath(os.path.expanduser(parts[0]))
new_vols.append("%s:%s:rw" % (host, parts[1]))
continue
new_vols.append(vol)
return new_vols
def _get_mounts(self):
'''
Return a list of container mounts.
:return:
'''
result = []
if self.volumes:
for vol in self.volumes:
if ':' in vol:
if len(vol.split(':')) == 3:
dummy, container, dummy = vol.split(':')
result.append(container)
continue
if len(vol.split(':')) == 2:
parts = vol.split(':')
if not is_volume_permissions(parts[1]):
result.append(parts[1])
continue
result.append(vol)
self.log("mounts:")
self.log(result, pretty_print=True)
return result
def _host_config(self):
'''
Returns parameters used to create a HostConfig object
'''
host_config_params = dict(
port_bindings='published_ports',
publish_all_ports='publish_all_ports',
links='links',
privileged='privileged',
dns='dns_servers',
dns_opt='dns_opts',
dns_search='dns_search_domains',
binds='volume_binds',
volumes_from='volumes_from',
network_mode='network_mode',
userns_mode='userns_mode',
cap_add='capabilities',
cap_drop='cap_drop',
extra_hosts='etc_hosts',
read_only='read_only',
ipc_mode='ipc_mode',
security_opt='security_opts',
ulimits='ulimits',
sysctls='sysctls',
log_config='log_config',
mem_limit='memory',
memswap_limit='memory_swap',
mem_swappiness='memory_swappiness',
oom_score_adj='oom_score_adj',
oom_kill_disable='oom_killer',
shm_size='shm_size',
group_add='groups',
devices='devices',
pid_mode='pid_mode',
tmpfs='tmpfs',
init='init',
uts_mode='uts',
runtime='runtime',
auto_remove='auto_remove',
device_read_bps='device_read_bps',
device_write_bps='device_write_bps',
device_read_iops='device_read_iops',
device_write_iops='device_write_iops',
pids_limit='pids_limit',
mounts='mounts',
nano_cpus='cpus',
)
if self.client.docker_py_version >= LooseVersion('1.9') and self.client.docker_api_version >= LooseVersion('1.22'):
# blkio_weight can always be updated, but can only be set on creation
# when Docker SDK for Python and Docker API are new enough
host_config_params['blkio_weight'] = 'blkio_weight'
if self.client.docker_py_version >= LooseVersion('3.0'):
# cpu_shares and volume_driver moved to create_host_config in > 3
host_config_params['cpu_shares'] = 'cpu_shares'
host_config_params['volume_driver'] = 'volume_driver'
params = dict()
for key, value in host_config_params.items():
if getattr(self, value, None) is not None:
if self.client.option_minimal_versions[value]['supported']:
params[key] = getattr(self, value)
if self.restart_policy:
params['restart_policy'] = dict(Name=self.restart_policy,
MaximumRetryCount=self.restart_retries)
if 'mounts' in params:
params['mounts'] = self.mounts_opt
return self.client.create_host_config(**params)
@property
def default_host_ip(self):
ip = '0.0.0.0'
if not self.networks:
return ip
for net in self.networks:
if net.get('name'):
try:
network = self.client.inspect_network(net['name'])
if network.get('Driver') == 'bridge' and \
network.get('Options', {}).get('com.docker.network.bridge.host_binding_ipv4'):
ip = network['Options']['com.docker.network.bridge.host_binding_ipv4']
break
except NotFound as nfe:
self.client.fail(
"Cannot inspect the network '{0}' to determine the default IP: {1}".format(net['name'], nfe),
exception=traceback.format_exc()
)
return ip
def _parse_publish_ports(self):
'''
Parse ports from docker CLI syntax
'''
if self.published_ports is None:
return None
if 'all' in self.published_ports:
return 'all'
default_ip = self.default_host_ip
binds = {}
for port in self.published_ports:
parts = split_colon_ipv6(str(port), self.client)
container_port = parts[-1]
protocol = ''
if '/' in container_port:
container_port, protocol = parts[-1].split('/')
container_ports = parse_port_range(container_port, self.client)
p_len = len(parts)
if p_len == 1:
port_binds = len(container_ports) * [(default_ip,)]
elif p_len == 2:
port_binds = [(default_ip, port) for port in parse_port_range(parts[0], self.client)]
elif p_len == 3:
# We only allow IPv4 and IPv6 addresses for the bind address
ipaddr = parts[0]
if not re.match(r'^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$', parts[0]) and not re.match(r'^\[[0-9a-fA-F:]+\]$', ipaddr):
self.fail(('Bind addresses for published ports must be IPv4 or IPv6 addresses, not hostnames. '
'Use the dig lookup to resolve hostnames. (Found hostname: {0})').format(ipaddr))
if re.match(r'^\[[0-9a-fA-F:]+\]$', ipaddr):
ipaddr = ipaddr[1:-1]
if parts[1]:
port_binds = [(ipaddr, port) for port in parse_port_range(parts[1], self.client)]
else:
port_binds = len(container_ports) * [(ipaddr,)]
for bind, container_port in zip(port_binds, container_ports):
idx = '{0}/{1}'.format(container_port, protocol) if protocol else container_port
if idx in binds:
old_bind = binds[idx]
if isinstance(old_bind, list):
old_bind.append(bind)
else:
binds[idx] = [old_bind, bind]
else:
binds[idx] = bind
return binds
def _get_volume_binds(self, volumes):
'''
Extract host bindings, if any, from list of volume mapping strings.
:return: dictionary of bind mappings
'''
result = dict()
if volumes:
for vol in volumes:
host = None
if ':' in vol:
parts = vol.split(':')
if len(parts) == 3:
host, container, mode = parts
if not is_volume_permissions(mode):
self.fail('Found invalid volumes mode: {0}'.format(mode))
elif len(parts) == 2:
if not is_volume_permissions(parts[1]):
host, container, mode = (vol.split(':') + ['rw'])
if host is not None:
result[host] = dict(
bind=container,
mode=mode
)
return result
def _parse_exposed_ports(self, published_ports):
'''
Parse exposed ports from docker CLI-style ports syntax.
'''
exposed = []
if self.exposed_ports:
for port in self.exposed_ports:
port = str(port).strip()
protocol = 'tcp'
match = re.search(r'(/.+$)', port)
if match:
protocol = match.group(1).replace('/', '')
port = re.sub(r'/.+$', '', port)
exposed.append((port, protocol))
if published_ports:
# Any published port should also be exposed
for publish_port in published_ports:
match = False
if isinstance(publish_port, string_types) and '/' in publish_port:
port, protocol = publish_port.split('/')
port = int(port)
else:
protocol = 'tcp'
port = int(publish_port)
for exposed_port in exposed:
if exposed_port[1] != protocol:
continue
if isinstance(exposed_port[0], string_types) and '-' in exposed_port[0]:
start_port, end_port = exposed_port[0].split('-')
if int(start_port) <= port <= int(end_port):
match = True
elif exposed_port[0] == port:
match = True
if not match:
exposed.append((port, protocol))
return exposed
@staticmethod
def _parse_links(links):
'''
Turn links into a dictionary
'''
if links is None:
return None
result = []
for link in links:
parsed_link = link.split(':', 1)
if len(parsed_link) == 2:
result.append((parsed_link[0], parsed_link[1]))
else:
result.append((parsed_link[0], parsed_link[0]))
return result
def _parse_ulimits(self):
'''
Turn ulimits into an array of Ulimit objects
'''
if self.ulimits is None:
return None
results = []
for limit in self.ulimits:
limits = dict()
pieces = limit.split(':')
if len(pieces) >= 2:
limits['name'] = pieces[0]
limits['soft'] = int(pieces[1])
limits['hard'] = int(pieces[1])
if len(pieces) == 3:
limits['hard'] = int(pieces[2])
try:
results.append(Ulimit(**limits))
except ValueError as exc:
self.fail("Error parsing ulimits value %s - %s" % (limit, exc))
return results
def _parse_sysctls(self):
'''
Turn sysctls into an hash of Sysctl objects
'''
return self.sysctls
def _parse_log_config(self):
'''
Create a LogConfig object
'''
if self.log_driver is None:
return None
options = dict(
Type=self.log_driver,
Config=dict()
)
if self.log_options is not None:
options['Config'] = dict()
for k, v in self.log_options.items():
if not isinstance(v, string_types):
self.client.module.warn(
"Non-string value found for log_options option '%s'. The value is automatically converted to '%s'. "
"If this is not correct, or you want to avoid such warnings, please quote the value." % (k, str(v))
)
v = str(v)
self.log_options[k] = v
options['Config'][k] = v
try:
return LogConfig(**options)
except ValueError as exc:
self.fail('Error parsing logging options - %s' % (exc))
def _parse_tmpfs(self):
'''
Turn tmpfs into a hash of Tmpfs objects
'''
result = dict()
if self.tmpfs is None:
return result
for tmpfs_spec in self.tmpfs:
split_spec = tmpfs_spec.split(":", 1)
if len(split_spec) > 1:
result[split_spec[0]] = split_spec[1]
else:
result[split_spec[0]] = ""
return result
def _get_environment(self):
"""
If environment file is combined with explicit environment variables, the explicit environment variables
take precedence.
"""
final_env = {}
if self.env_file:
parsed_env_file = utils.parse_env_file(self.env_file)
for name, value in parsed_env_file.items():
final_env[name] = str(value)
if self.env:
for name, value in self.env.items():
if not isinstance(value, string_types):
self.fail("Non-string value found for env option. Ambiguous env options must be "
"wrapped in quotes to avoid them being interpreted. Key: %s" % (name, ))
final_env[name] = str(value)
return final_env
def _get_network_id(self, network_name):
network_id = None
try:
for network in self.client.networks(names=[network_name]):
if network['Name'] == network_name:
network_id = network['Id']
break
except Exception as exc:
self.fail("Error getting network id for %s - %s" % (network_name, str(exc)))
return network_id
def _process_mounts(self):
if self.mounts is None:
return None, None
mounts_list = []
mounts_expected = []
for mount in self.mounts:
target = mount['target']
datatype = mount['type']
mount_dict = dict(mount)
# Sanity checks (so we don't wait for docker-py to barf on input)
if mount_dict.get('source') is None and datatype != 'tmpfs':
self.client.fail('source must be specified for mount "{0}" of type "{1}"'.format(target, datatype))
mount_option_types = dict(
volume_driver='volume',
volume_options='volume',
propagation='bind',
no_copy='volume',
labels='volume',
tmpfs_size='tmpfs',
tmpfs_mode='tmpfs',
)
for option, req_datatype in mount_option_types.items():
if mount_dict.get(option) is not None and datatype != req_datatype:
self.client.fail('{0} cannot be specified for mount "{1}" of type "{2}" (needs type "{3}")'.format(option, target, datatype, req_datatype))
# Handle volume_driver and volume_options
volume_driver = mount_dict.pop('volume_driver')
volume_options = mount_dict.pop('volume_options')
if volume_driver:
if volume_options:
volume_options = clean_dict_booleans_for_docker_api(volume_options)
mount_dict['driver_config'] = docker_types.DriverConfig(name=volume_driver, options=volume_options)
if mount_dict['labels']:
mount_dict['labels'] = clean_dict_booleans_for_docker_api(mount_dict['labels'])
if mount_dict.get('tmpfs_size') is not None:
try:
mount_dict['tmpfs_size'] = human_to_bytes(mount_dict['tmpfs_size'])
except ValueError as exc:
self.fail('Failed to convert tmpfs_size of mount "{0}" to bytes: {1}'.format(target, exc))
if mount_dict.get('tmpfs_mode') is not None:
try:
mount_dict['tmpfs_mode'] = int(mount_dict['tmpfs_mode'], 8)
except Exception as dummy:
self.client.fail('tmp_fs mode of mount "{0}" is not an octal string!'.format(target))
# Fill expected mount dict
mount_expected = dict(mount)
mount_expected['tmpfs_size'] = mount_dict['tmpfs_size']
mount_expected['tmpfs_mode'] = mount_dict['tmpfs_mode']
# Add result to lists
mounts_list.append(docker_types.Mount(**mount_dict))
mounts_expected.append(omit_none_from_dict(mount_expected))
return mounts_list, mounts_expected
def _process_rate_bps(self, option):
"""
Format device_read_bps and device_write_bps option
"""
devices_list = []
for v in getattr(self, option):
device_dict = dict((x.title(), y) for x, y in v.items())
device_dict['Rate'] = human_to_bytes(device_dict['Rate'])
devices_list.append(device_dict)
setattr(self, option, devices_list)
def _process_rate_iops(self, option):
"""
Format device_read_iops and device_write_iops option
"""
devices_list = []
for v in getattr(self, option):
device_dict = dict((x.title(), y) for x, y in v.items())
devices_list.append(device_dict)
setattr(self, option, devices_list)
def _replace_container_names(self, mode):
"""
Parse IPC and PID modes. If they contain a container name, replace
with the container's ID.
"""
if mode is None or not mode.startswith('container:'):
return mode
container_name = mode[len('container:'):]
# Try to inspect container to see whether this is an ID or a
# name (and in the latter case, retrieve it's ID)
container = self.client.get_container(container_name)
if container is None:
# If we can't find the container, issue a warning and continue with
# what the user specified.
self.client.module.warn('Cannot find a container with name or ID "{0}"'.format(container_name))
return mode
return 'container:{0}'.format(container['Id'])
def _check_mount_target_collisions(self):
last = dict()
def f(t, name):
if t in last:
if name == last[t]:
self.client.fail('The mount point "{0}" appears twice in the {1} option'.format(t, name))
else:
self.client.fail('The mount point "{0}" appears both in the {1} and {2} option'.format(t, name, last[t]))
last[t] = name
if self.expected_mounts:
for t in [m['target'] for m in self.expected_mounts]:
f(t, 'mounts')
if self.volumes:
for v in self.volumes:
vs = v.split(':')
f(vs[0 if len(vs) == 1 else 1], 'volumes')
class Container(DockerBaseClass):
def __init__(self, container, parameters):
super(Container, self).__init__()
self.raw = container
self.Id = None
self.container = container
if container:
self.Id = container['Id']
self.Image = container['Image']
self.log(self.container, pretty_print=True)
self.parameters = parameters
self.parameters.expected_links = None
self.parameters.expected_ports = None
self.parameters.expected_exposed = None
self.parameters.expected_volumes = None
self.parameters.expected_ulimits = None
self.parameters.expected_sysctls = None
self.parameters.expected_etc_hosts = None
self.parameters.expected_env = None
self.parameters_map = dict()
self.parameters_map['expected_links'] = 'links'
self.parameters_map['expected_ports'] = 'expected_ports'
self.parameters_map['expected_exposed'] = 'exposed_ports'
self.parameters_map['expected_volumes'] = 'volumes'
self.parameters_map['expected_ulimits'] = 'ulimits'
self.parameters_map['expected_sysctls'] = 'sysctls'
self.parameters_map['expected_etc_hosts'] = 'etc_hosts'
self.parameters_map['expected_env'] = 'env'
self.parameters_map['expected_entrypoint'] = 'entrypoint'
self.parameters_map['expected_binds'] = 'volumes'
self.parameters_map['expected_cmd'] = 'command'
self.parameters_map['expected_devices'] = 'devices'
self.parameters_map['expected_healthcheck'] = 'healthcheck'
self.parameters_map['expected_mounts'] = 'mounts'
def fail(self, msg):
self.parameters.client.fail(msg)
@property
def exists(self):
return True if self.container else False
@property
def running(self):
if self.container and self.container.get('State'):
if self.container['State'].get('Running') and not self.container['State'].get('Ghost', False):
return True
return False
@property
def paused(self):
if self.container and self.container.get('State'):
return self.container['State'].get('Paused', False)
return False
def _compare(self, a, b, compare):
'''
Compare values a and b as described in compare.
'''
return compare_generic(a, b, compare['comparison'], compare['type'])
def _decode_mounts(self, mounts):
if not mounts:
return mounts
result = []
empty_dict = dict()
for mount in mounts:
res = dict()
res['type'] = mount.get('Type')
res['source'] = mount.get('Source')
res['target'] = mount.get('Target')
res['read_only'] = mount.get('ReadOnly', False) # golang's omitempty for bool returns None for False
res['consistency'] = mount.get('Consistency')
res['propagation'] = mount.get('BindOptions', empty_dict).get('Propagation')
res['no_copy'] = mount.get('VolumeOptions', empty_dict).get('NoCopy', False)
res['labels'] = mount.get('VolumeOptions', empty_dict).get('Labels', empty_dict)
res['volume_driver'] = mount.get('VolumeOptions', empty_dict).get('DriverConfig', empty_dict).get('Name')
res['volume_options'] = mount.get('VolumeOptions', empty_dict).get('DriverConfig', empty_dict).get('Options', empty_dict)
res['tmpfs_size'] = mount.get('TmpfsOptions', empty_dict).get('SizeBytes')
res['tmpfs_mode'] = mount.get('TmpfsOptions', empty_dict).get('Mode')
result.append(res)
return result
def has_different_configuration(self, image):
'''
Diff parameters vs existing container config. Returns tuple: (True | False, List of differences)
'''
self.log('Starting has_different_configuration')
self.parameters.expected_entrypoint = self._get_expected_entrypoint()
self.parameters.expected_links = self._get_expected_links()
self.parameters.expected_ports = self._get_expected_ports()
self.parameters.expected_exposed = self._get_expected_exposed(image)
self.parameters.expected_volumes = self._get_expected_volumes(image)
self.parameters.expected_binds = self._get_expected_binds(image)
self.parameters.expected_ulimits = self._get_expected_ulimits(self.parameters.ulimits)
self.parameters.expected_sysctls = self._get_expected_sysctls(self.parameters.sysctls)
self.parameters.expected_etc_hosts = self._convert_simple_dict_to_list('etc_hosts')
self.parameters.expected_env = self._get_expected_env(image)
self.parameters.expected_cmd = self._get_expected_cmd()
self.parameters.expected_devices = self._get_expected_devices()
self.parameters.expected_healthcheck = self._get_expected_healthcheck()
if not self.container.get('HostConfig'):
self.fail("has_config_diff: Error parsing container properties. HostConfig missing.")
if not self.container.get('Config'):
self.fail("has_config_diff: Error parsing container properties. Config missing.")
if not self.container.get('NetworkSettings'):
self.fail("has_config_diff: Error parsing container properties. NetworkSettings missing.")
host_config = self.container['HostConfig']
log_config = host_config.get('LogConfig', dict())
restart_policy = host_config.get('RestartPolicy', dict())
config = self.container['Config']
network = self.container['NetworkSettings']
# The previous version of the docker module ignored the detach state by
# assuming if the container was running, it must have been detached.
detach = not (config.get('AttachStderr') and config.get('AttachStdout'))
# "ExposedPorts": null returns None type & causes AttributeError - PR #5517
if config.get('ExposedPorts') is not None:
expected_exposed = [self._normalize_port(p) for p in config.get('ExposedPorts', dict()).keys()]
else:
expected_exposed = []
# Map parameters to container inspect results
config_mapping = dict(
expected_cmd=config.get('Cmd'),
domainname=config.get('Domainname'),
hostname=config.get('Hostname'),
user=config.get('User'),
detach=detach,
init=host_config.get('Init'),
interactive=config.get('OpenStdin'),
capabilities=host_config.get('CapAdd'),
cap_drop=host_config.get('CapDrop'),
expected_devices=host_config.get('Devices'),
dns_servers=host_config.get('Dns'),
dns_opts=host_config.get('DnsOptions'),
dns_search_domains=host_config.get('DnsSearch'),
expected_env=(config.get('Env') or []),
expected_entrypoint=config.get('Entrypoint'),
expected_etc_hosts=host_config['ExtraHosts'],
expected_exposed=expected_exposed,
groups=host_config.get('GroupAdd'),
ipc_mode=host_config.get("IpcMode"),
labels=config.get('Labels'),
expected_links=host_config.get('Links'),
mac_address=network.get('MacAddress'),
memory_swappiness=host_config.get('MemorySwappiness'),
network_mode=host_config.get('NetworkMode'),
userns_mode=host_config.get('UsernsMode'),
oom_killer=host_config.get('OomKillDisable'),
oom_score_adj=host_config.get('OomScoreAdj'),
pid_mode=host_config.get('PidMode'),
privileged=host_config.get('Privileged'),
expected_ports=host_config.get('PortBindings'),
read_only=host_config.get('ReadonlyRootfs'),
restart_policy=restart_policy.get('Name'),
runtime=host_config.get('Runtime'),
shm_size=host_config.get('ShmSize'),
security_opts=host_config.get("SecurityOpt"),
stop_signal=config.get("StopSignal"),
tmpfs=host_config.get('Tmpfs'),
tty=config.get('Tty'),
expected_ulimits=host_config.get('Ulimits'),
expected_sysctls=host_config.get('Sysctls'),
uts=host_config.get('UTSMode'),
expected_volumes=config.get('Volumes'),
expected_binds=host_config.get('Binds'),
volume_driver=host_config.get('VolumeDriver'),
volumes_from=host_config.get('VolumesFrom'),
working_dir=config.get('WorkingDir'),
publish_all_ports=host_config.get('PublishAllPorts'),
expected_healthcheck=config.get('Healthcheck'),
disable_healthcheck=(not config.get('Healthcheck') or config.get('Healthcheck').get('Test') == ['NONE']),
device_read_bps=host_config.get('BlkioDeviceReadBps'),
device_write_bps=host_config.get('BlkioDeviceWriteBps'),
device_read_iops=host_config.get('BlkioDeviceReadIOps'),
device_write_iops=host_config.get('BlkioDeviceWriteIOps'),
pids_limit=host_config.get('PidsLimit'),
# According to https://github.com/moby/moby/, support for HostConfig.Mounts
# has been included at least since v17.03.0-ce, which has API version 1.26.
# The previous tag, v1.9.1, has API version 1.21 and does not have
# HostConfig.Mounts. I have no idea what about API 1.25...
expected_mounts=self._decode_mounts(host_config.get('Mounts')),
cpus=host_config.get('NanoCpus'),
)
# Options which don't make sense without their accompanying option
if self.parameters.restart_policy:
config_mapping['restart_retries'] = restart_policy.get('MaximumRetryCount')
if self.parameters.log_driver:
config_mapping['log_driver'] = log_config.get('Type')
config_mapping['log_options'] = log_config.get('Config')
if self.parameters.client.option_minimal_versions['auto_remove']['supported']:
# auto_remove is only supported in Docker SDK for Python >= 2.0.0; unfortunately
# it has a default value, that's why we have to jump through the hoops here
config_mapping['auto_remove'] = host_config.get('AutoRemove')
if self.parameters.client.option_minimal_versions['stop_timeout']['supported']:
# stop_timeout is only supported in Docker SDK for Python >= 2.1. Note that
# stop_timeout has a hybrid role, in that it used to be something only used
# for stopping containers, and is now also used as a container property.
# That's why it needs special handling here.
config_mapping['stop_timeout'] = config.get('StopTimeout')
if self.parameters.client.docker_api_version < LooseVersion('1.22'):
# For docker API < 1.22, update_container() is not supported. Thus
# we need to handle all limits which are usually handled by
# update_container() as configuration changes which require a container
# restart.
config_mapping.update(dict(
blkio_weight=host_config.get('BlkioWeight'),
cpu_period=host_config.get('CpuPeriod'),
cpu_quota=host_config.get('CpuQuota'),
cpu_shares=host_config.get('CpuShares'),
cpuset_cpus=host_config.get('CpusetCpus'),
cpuset_mems=host_config.get('CpusetMems'),
kernel_memory=host_config.get("KernelMemory"),
memory=host_config.get('Memory'),
memory_reservation=host_config.get('MemoryReservation'),
memory_swap=host_config.get('MemorySwap'),
))
differences = DifferenceTracker()
for key, value in config_mapping.items():
minimal_version = self.parameters.client.option_minimal_versions.get(key, {})
if not minimal_version.get('supported', True):
continue
compare = self.parameters.client.comparisons[self.parameters_map.get(key, key)]
self.log('check differences %s %s vs %s (%s)' % (key, getattr(self.parameters, key), str(value), compare))
if getattr(self.parameters, key, None) is not None:
match = self._compare(getattr(self.parameters, key), value, compare)
if not match:
# no match. record the differences
p = getattr(self.parameters, key)
c = value
if compare['type'] == 'set':
# Since the order does not matter, sort so that the diff output is better.
if p is not None:
p = sorted(p)
if c is not None:
c = sorted(c)
elif compare['type'] == 'set(dict)':
# Since the order does not matter, sort so that the diff output is better.
if key == 'expected_mounts':
# For selected values, use one entry as key
def sort_key_fn(x):
return x['target']
else:
# We sort the list of dictionaries by using the sorted items of a dict as its key.
def sort_key_fn(x):
return sorted((a, str(b)) for a, b in x.items())
if p is not None:
p = sorted(p, key=sort_key_fn)
if c is not None:
c = sorted(c, key=sort_key_fn)
differences.add(key, parameter=p, active=c)
has_differences = not differences.empty
return has_differences, differences
def has_different_resource_limits(self):
'''
Diff parameters and container resource limits
'''
if not self.container.get('HostConfig'):
self.fail("limits_differ_from_container: Error parsing container properties. HostConfig missing.")
if self.parameters.client.docker_api_version < LooseVersion('1.22'):
# update_container() call not supported
return False, []
host_config = self.container['HostConfig']
config_mapping = dict(
blkio_weight=host_config.get('BlkioWeight'),
cpu_period=host_config.get('CpuPeriod'),
cpu_quota=host_config.get('CpuQuota'),
cpu_shares=host_config.get('CpuShares'),
cpuset_cpus=host_config.get('CpusetCpus'),
cpuset_mems=host_config.get('CpusetMems'),
kernel_memory=host_config.get("KernelMemory"),
memory=host_config.get('Memory'),
memory_reservation=host_config.get('MemoryReservation'),
memory_swap=host_config.get('MemorySwap'),
)
differences = DifferenceTracker()
for key, value in config_mapping.items():
if getattr(self.parameters, key, None):
compare = self.parameters.client.comparisons[self.parameters_map.get(key, key)]
match = self._compare(getattr(self.parameters, key), value, compare)
if not match:
# no match. record the differences
differences.add(key, parameter=getattr(self.parameters, key), active=value)
different = not differences.empty
return different, differences
def has_network_differences(self):
'''
Check if the container is connected to requested networks with expected options: links, aliases, ipv4, ipv6
'''
different = False
differences = []
if not self.parameters.networks:
return different, differences
if not self.container.get('NetworkSettings'):
self.fail("has_missing_networks: Error parsing container properties. NetworkSettings missing.")
connected_networks = self.container['NetworkSettings']['Networks']
for network in self.parameters.networks:
network_info = connected_networks.get(network['name'])
if network_info is None:
different = True
differences.append(dict(
parameter=network,
container=None
))
else:
diff = False
network_info_ipam = network_info.get('IPAMConfig') or {}
if network.get('ipv4_address') and network['ipv4_address'] != network_info_ipam.get('IPv4Address'):
diff = True
if network.get('ipv6_address') and network['ipv6_address'] != network_info_ipam.get('IPv6Address'):
diff = True
if network.get('aliases'):
if not compare_generic(network['aliases'], network_info.get('Aliases'), 'allow_more_present', 'set'):
diff = True
if network.get('links'):
expected_links = []
for link, alias in network['links']:
expected_links.append("%s:%s" % (link, alias))
if not compare_generic(expected_links, network_info.get('Links'), 'allow_more_present', 'set'):
diff = True
if diff:
different = True
differences.append(dict(
parameter=network,
container=dict(
name=network['name'],
ipv4_address=network_info_ipam.get('IPv4Address'),
ipv6_address=network_info_ipam.get('IPv6Address'),
aliases=network_info.get('Aliases'),
links=network_info.get('Links')
)
))
return different, differences
def has_extra_networks(self):
'''
Check if the container is connected to non-requested networks
'''
extra_networks = []
extra = False
if not self.container.get('NetworkSettings'):
self.fail("has_extra_networks: Error parsing container properties. NetworkSettings missing.")
connected_networks = self.container['NetworkSettings'].get('Networks')
if connected_networks:
for network, network_config in connected_networks.items():
keep = False
if self.parameters.networks:
for expected_network in self.parameters.networks:
if expected_network['name'] == network:
keep = True
if not keep:
extra = True
extra_networks.append(dict(name=network, id=network_config['NetworkID']))
return extra, extra_networks
def _get_expected_devices(self):
if not self.parameters.devices:
return None
expected_devices = []
for device in self.parameters.devices:
parts = device.split(':')
if len(parts) == 1:
expected_devices.append(
dict(
CgroupPermissions='rwm',
PathInContainer=parts[0],
PathOnHost=parts[0]
))
elif len(parts) == 2:
parts = device.split(':')
expected_devices.append(
dict(
CgroupPermissions='rwm',
PathInContainer=parts[1],
PathOnHost=parts[0]
)
)
else:
expected_devices.append(
dict(
CgroupPermissions=parts[2],
PathInContainer=parts[1],
PathOnHost=parts[0]
))
return expected_devices
def _get_expected_entrypoint(self):
if not self.parameters.entrypoint:
return None
return shlex.split(self.parameters.entrypoint)
def _get_expected_ports(self):
if not self.parameters.published_ports:
return None
expected_bound_ports = {}
for container_port, config in self.parameters.published_ports.items():
if isinstance(container_port, int):
container_port = "%s/tcp" % container_port
if len(config) == 1:
if isinstance(config[0], int):
expected_bound_ports[container_port] = [{'HostIp': "0.0.0.0", 'HostPort': config[0]}]
else:
expected_bound_ports[container_port] = [{'HostIp': config[0], 'HostPort': ""}]
elif isinstance(config[0], tuple):
expected_bound_ports[container_port] = []
for host_ip, host_port in config:
expected_bound_ports[container_port].append({'HostIp': host_ip, 'HostPort': str(host_port)})
else:
expected_bound_ports[container_port] = [{'HostIp': config[0], 'HostPort': str(config[1])}]
return expected_bound_ports
def _get_expected_links(self):
if self.parameters.links is None:
return None
self.log('parameter links:')
self.log(self.parameters.links, pretty_print=True)
exp_links = []
for link, alias in self.parameters.links:
exp_links.append("/%s:%s/%s" % (link, ('/' + self.parameters.name), alias))
return exp_links
def _get_expected_binds(self, image):
self.log('_get_expected_binds')
image_vols = []
if image:
image_vols = self._get_image_binds(image[self.parameters.client.image_inspect_source].get('Volumes'))
param_vols = []
if self.parameters.volumes:
for vol in self.parameters.volumes:
host = None
if ':' in vol:
if len(vol.split(':')) == 3:
host, container, mode = vol.split(':')
if not is_volume_permissions(mode):
self.fail('Found invalid volumes mode: {0}'.format(mode))
if len(vol.split(':')) == 2:
parts = vol.split(':')
if not is_volume_permissions(parts[1]):
host, container, mode = vol.split(':') + ['rw']
if host:
param_vols.append("%s:%s:%s" % (host, container, mode))
result = list(set(image_vols + param_vols))
self.log("expected_binds:")
self.log(result, pretty_print=True)
return result
def _get_image_binds(self, volumes):
'''
Convert array of binds to array of strings with format host_path:container_path:mode
:param volumes: array of bind dicts
:return: array of strings
'''
results = []
if isinstance(volumes, dict):
results += self._get_bind_from_dict(volumes)
elif isinstance(volumes, list):
for vol in volumes:
results += self._get_bind_from_dict(vol)
return results
@staticmethod
def _get_bind_from_dict(volume_dict):
results = []
if volume_dict:
for host_path, config in volume_dict.items():
if isinstance(config, dict) and config.get('bind'):
container_path = config.get('bind')
mode = config.get('mode', 'rw')
results.append("%s:%s:%s" % (host_path, container_path, mode))
return results
def _get_expected_volumes(self, image):
self.log('_get_expected_volumes')
expected_vols = dict()
if image and image[self.parameters.client.image_inspect_source].get('Volumes'):
expected_vols.update(image[self.parameters.client.image_inspect_source].get('Volumes'))
if self.parameters.volumes:
for vol in self.parameters.volumes:
container = None
if ':' in vol:
if len(vol.split(':')) == 3:
dummy, container, mode = vol.split(':')
if not is_volume_permissions(mode):
self.fail('Found invalid volumes mode: {0}'.format(mode))
if len(vol.split(':')) == 2:
parts = vol.split(':')
if not is_volume_permissions(parts[1]):
dummy, container, mode = vol.split(':') + ['rw']
new_vol = dict()
if container:
new_vol[container] = dict()
else:
new_vol[vol] = dict()
expected_vols.update(new_vol)
if not expected_vols:
expected_vols = None
self.log("expected_volumes:")
self.log(expected_vols, pretty_print=True)
return expected_vols
def _get_expected_env(self, image):
self.log('_get_expected_env')
expected_env = dict()
if image and image[self.parameters.client.image_inspect_source].get('Env'):
for env_var in image[self.parameters.client.image_inspect_source]['Env']:
parts = env_var.split('=', 1)
expected_env[parts[0]] = parts[1]
if self.parameters.env:
expected_env.update(self.parameters.env)
param_env = []
for key, value in expected_env.items():
param_env.append("%s=%s" % (key, value))
return param_env
def _get_expected_exposed(self, image):
self.log('_get_expected_exposed')
image_ports = []
if image:
image_exposed_ports = image[self.parameters.client.image_inspect_source].get('ExposedPorts') or {}
image_ports = [self._normalize_port(p) for p in image_exposed_ports.keys()]
param_ports = []
if self.parameters.ports:
param_ports = [str(p[0]) + '/' + p[1] for p in self.parameters.ports]
result = list(set(image_ports + param_ports))
self.log(result, pretty_print=True)
return result
def _get_expected_ulimits(self, config_ulimits):
self.log('_get_expected_ulimits')
if config_ulimits is None:
return None
results = []
for limit in config_ulimits:
results.append(dict(
Name=limit.name,
Soft=limit.soft,
Hard=limit.hard
))
return results
def _get_expected_sysctls(self, config_sysctls):
self.log('_get_expected_sysctls')
if config_sysctls is None:
return None
result = dict()
for key, value in config_sysctls.items():
result[key] = str(value)
return result
def _get_expected_cmd(self):
self.log('_get_expected_cmd')
if not self.parameters.command:
return None
return shlex.split(self.parameters.command)
def _convert_simple_dict_to_list(self, param_name, join_with=':'):
if getattr(self.parameters, param_name, None) is None:
return None
results = []
for key, value in getattr(self.parameters, param_name).items():
results.append("%s%s%s" % (key, join_with, value))
return results
def _normalize_port(self, port):
if '/' not in port:
return port + '/tcp'
return port
def _get_expected_healthcheck(self):
self.log('_get_expected_healthcheck')
expected_healthcheck = dict()
if self.parameters.healthcheck:
expected_healthcheck.update([(k.title().replace("_", ""), v)
for k, v in self.parameters.healthcheck.items()])
return expected_healthcheck
class ContainerManager(DockerBaseClass):
'''
Perform container management tasks
'''
def __init__(self, client):
super(ContainerManager, self).__init__()
if client.module.params.get('log_options') and not client.module.params.get('log_driver'):
client.module.warn('log_options is ignored when log_driver is not specified')
if client.module.params.get('healthcheck') and not client.module.params.get('healthcheck').get('test'):
client.module.warn('healthcheck is ignored when test is not specified')
if client.module.params.get('restart_retries') is not None and not client.module.params.get('restart_policy'):
client.module.warn('restart_retries is ignored when restart_policy is not specified')
self.client = client
self.parameters = TaskParameters(client)
self.check_mode = self.client.check_mode
self.results = {'changed': False, 'actions': []}
self.diff = {}
self.diff_tracker = DifferenceTracker()
self.facts = {}
state = self.parameters.state
if state in ('stopped', 'started', 'present'):
self.present(state)
elif state == 'absent':
self.absent()
if not self.check_mode and not self.parameters.debug:
self.results.pop('actions')
if self.client.module._diff or self.parameters.debug:
self.diff['before'], self.diff['after'] = self.diff_tracker.get_before_after()
self.results['diff'] = self.diff
if self.facts:
self.results['ansible_facts'] = {'docker_container': self.facts}
self.results['container'] = self.facts
def present(self, state):
container = self._get_container(self.parameters.name)
was_running = container.running
was_paused = container.paused
container_created = False
# If the image parameter was passed then we need to deal with the image
# version comparison. Otherwise we handle this depending on whether
# the container already runs or not; in the former case, in case the
# container needs to be restarted, we use the existing container's
# image ID.
image = self._get_image()
self.log(image, pretty_print=True)
if not container.exists:
# New container
self.log('No container found')
if not self.parameters.image:
self.fail('Cannot create container when image is not specified!')
self.diff_tracker.add('exists', parameter=True, active=False)
new_container = self.container_create(self.parameters.image, self.parameters.create_parameters)
if new_container:
container = new_container
container_created = True
else:
# Existing container
different, differences = container.has_different_configuration(image)
image_different = False
if self.parameters.comparisons['image']['comparison'] == 'strict':
image_different = self._image_is_different(image, container)
if image_different or different or self.parameters.recreate:
self.diff_tracker.merge(differences)
self.diff['differences'] = differences.get_legacy_docker_container_diffs()
if image_different:
self.diff['image_different'] = True
self.log("differences")
self.log(differences.get_legacy_docker_container_diffs(), pretty_print=True)
image_to_use = self.parameters.image
if not image_to_use and container and container.Image:
image_to_use = container.Image
if not image_to_use:
self.fail('Cannot recreate container when image is not specified or cannot be extracted from current container!')
if container.running:
self.container_stop(container.Id)
self.container_remove(container.Id)
new_container = self.container_create(image_to_use, self.parameters.create_parameters)
if new_container:
container = new_container
container_created = True
if container and container.exists:
container = self.update_limits(container)
container = self.update_networks(container, container_created)
if state == 'started' and not container.running:
self.diff_tracker.add('running', parameter=True, active=was_running)
container = self.container_start(container.Id)
elif state == 'started' and self.parameters.restart:
self.diff_tracker.add('running', parameter=True, active=was_running)
self.diff_tracker.add('restarted', parameter=True, active=False)
container = self.container_restart(container.Id)
elif state == 'stopped' and container.running:
self.diff_tracker.add('running', parameter=False, active=was_running)
self.container_stop(container.Id)
container = self._get_container(container.Id)
if state == 'started' and container.paused is not None and container.paused != self.parameters.paused:
self.diff_tracker.add('paused', parameter=self.parameters.paused, active=was_paused)
if not self.check_mode:
try:
if self.parameters.paused:
self.client.pause(container=container.Id)
else:
self.client.unpause(container=container.Id)
except Exception as exc:
self.fail("Error %s container %s: %s" % (
"pausing" if self.parameters.paused else "unpausing", container.Id, str(exc)
))
container = self._get_container(container.Id)
self.results['changed'] = True
self.results['actions'].append(dict(set_paused=self.parameters.paused))
self.facts = container.raw
def absent(self):
container = self._get_container(self.parameters.name)
if container.exists:
if container.running:
self.diff_tracker.add('running', parameter=False, active=True)
self.container_stop(container.Id)
self.diff_tracker.add('exists', parameter=False, active=True)
self.container_remove(container.Id)
def fail(self, msg, **kwargs):
self.client.fail(msg, **kwargs)
def _output_logs(self, msg):
self.client.module.log(msg=msg)
def _get_container(self, container):
'''
Expects container ID or Name. Returns a container object
'''
return Container(self.client.get_container(container), self.parameters)
def _get_image(self):
if not self.parameters.image:
self.log('No image specified')
return None
if is_image_name_id(self.parameters.image):
image = self.client.find_image_by_id(self.parameters.image)
else:
repository, tag = utils.parse_repository_tag(self.parameters.image)
if not tag:
tag = "latest"
image = self.client.find_image(repository, tag)
if not image or self.parameters.pull:
if not self.check_mode:
self.log("Pull the image.")
image, alreadyToLatest = self.client.pull_image(repository, tag)
if alreadyToLatest:
self.results['changed'] = False
else:
self.results['changed'] = True
self.results['actions'].append(dict(pulled_image="%s:%s" % (repository, tag)))
elif not image:
# If the image isn't there, claim we'll pull.
# (Implicitly: if the image is there, claim it already was latest.)
self.results['changed'] = True
self.results['actions'].append(dict(pulled_image="%s:%s" % (repository, tag)))
self.log("image")
self.log(image, pretty_print=True)
return image
def _image_is_different(self, image, container):
if image and image.get('Id'):
if container and container.Image:
if image.get('Id') != container.Image:
self.diff_tracker.add('image', parameter=image.get('Id'), active=container.Image)
return True
return False
def update_limits(self, container):
limits_differ, different_limits = container.has_different_resource_limits()
if limits_differ:
self.log("limit differences:")
self.log(different_limits.get_legacy_docker_container_diffs(), pretty_print=True)
self.diff_tracker.merge(different_limits)
if limits_differ and not self.check_mode:
self.container_update(container.Id, self.parameters.update_parameters)
return self._get_container(container.Id)
return container
def update_networks(self, container, container_created):
updated_container = container
if self.parameters.comparisons['networks']['comparison'] != 'ignore' or container_created:
has_network_differences, network_differences = container.has_network_differences()
if has_network_differences:
if self.diff.get('differences'):
self.diff['differences'].append(dict(network_differences=network_differences))
else:
self.diff['differences'] = [dict(network_differences=network_differences)]
for netdiff in network_differences:
self.diff_tracker.add(
'network.{0}'.format(netdiff['parameter']['name']),
parameter=netdiff['parameter'],
active=netdiff['container']
)
self.results['changed'] = True
updated_container = self._add_networks(container, network_differences)
if (self.parameters.comparisons['networks']['comparison'] == 'strict' and self.parameters.networks is not None) or self.parameters.purge_networks:
has_extra_networks, extra_networks = container.has_extra_networks()
if has_extra_networks:
if self.diff.get('differences'):
self.diff['differences'].append(dict(purge_networks=extra_networks))
else:
self.diff['differences'] = [dict(purge_networks=extra_networks)]
for extra_network in extra_networks:
self.diff_tracker.add(
'network.{0}'.format(extra_network['name']),
active=extra_network
)
self.results['changed'] = True
updated_container = self._purge_networks(container, extra_networks)
return updated_container
def _add_networks(self, container, differences):
for diff in differences:
# remove the container from the network, if connected
if diff.get('container'):
self.results['actions'].append(dict(removed_from_network=diff['parameter']['name']))
if not self.check_mode:
try:
self.client.disconnect_container_from_network(container.Id, diff['parameter']['id'])
except Exception as exc:
self.fail("Error disconnecting container from network %s - %s" % (diff['parameter']['name'],
str(exc)))
# connect to the network
params = dict()
for para in ('ipv4_address', 'ipv6_address', 'links', 'aliases'):
if diff['parameter'].get(para):
params[para] = diff['parameter'][para]
self.results['actions'].append(dict(added_to_network=diff['parameter']['name'], network_parameters=params))
if not self.check_mode:
try:
self.log("Connecting container to network %s" % diff['parameter']['id'])
self.log(params, pretty_print=True)
self.client.connect_container_to_network(container.Id, diff['parameter']['id'], **params)
except Exception as exc:
self.fail("Error connecting container to network %s - %s" % (diff['parameter']['name'], str(exc)))
return self._get_container(container.Id)
def _purge_networks(self, container, networks):
for network in networks:
self.results['actions'].append(dict(removed_from_network=network['name']))
if not self.check_mode:
try:
self.client.disconnect_container_from_network(container.Id, network['name'])
except Exception as exc:
self.fail("Error disconnecting container from network %s - %s" % (network['name'],
str(exc)))
return self._get_container(container.Id)
def container_create(self, image, create_parameters):
self.log("create container")
self.log("image: %s parameters:" % image)
self.log(create_parameters, pretty_print=True)
self.results['actions'].append(dict(created="Created container", create_parameters=create_parameters))
self.results['changed'] = True
new_container = None
if not self.check_mode:
try:
new_container = self.client.create_container(image, **create_parameters)
self.client.report_warnings(new_container)
except Exception as exc:
self.fail("Error creating container: %s" % str(exc))
return self._get_container(new_container['Id'])
return new_container
def container_start(self, container_id):
self.log("start container %s" % (container_id))
self.results['actions'].append(dict(started=container_id))
self.results['changed'] = True
if not self.check_mode:
try:
self.client.start(container=container_id)
except Exception as exc:
self.fail("Error starting container %s: %s" % (container_id, str(exc)))
if self.parameters.detach is False:
if self.client.docker_py_version >= LooseVersion('3.0'):
status = self.client.wait(container_id)['StatusCode']
else:
status = self.client.wait(container_id)
if self.parameters.auto_remove:
output = "Cannot retrieve result as auto_remove is enabled"
if self.parameters.output_logs:
self.client.module.warn('Cannot output_logs if auto_remove is enabled!')
else:
config = self.client.inspect_container(container_id)
logging_driver = config['HostConfig']['LogConfig']['Type']
if logging_driver in ('json-file', 'journald'):
output = self.client.logs(container_id, stdout=True, stderr=True, stream=False, timestamps=False)
if self.parameters.output_logs:
self._output_logs(msg=output)
else:
output = "Result logged using `%s` driver" % logging_driver
if status != 0:
self.fail(output, status=status)
if self.parameters.cleanup:
self.container_remove(container_id, force=True)
insp = self._get_container(container_id)
if insp.raw:
insp.raw['Output'] = output
else:
insp.raw = dict(Output=output)
return insp
return self._get_container(container_id)
def container_remove(self, container_id, link=False, force=False):
volume_state = (not self.parameters.keep_volumes)
self.log("remove container container:%s v:%s link:%s force%s" % (container_id, volume_state, link, force))
self.results['actions'].append(dict(removed=container_id, volume_state=volume_state, link=link, force=force))
self.results['changed'] = True
response = None
if not self.check_mode:
count = 0
while True:
try:
response = self.client.remove_container(container_id, v=volume_state, link=link, force=force)
except NotFound as dummy:
pass
except APIError as exc:
if 'Unpause the container before stopping or killing' in exc.explanation:
# New docker daemon versions do not allow containers to be removed
# if they are paused. Make sure we don't end up in an infinite loop.
if count == 3:
self.fail("Error removing container %s (tried to unpause three times): %s" % (container_id, str(exc)))
count += 1
# Unpause
try:
self.client.unpause(container=container_id)
except Exception as exc2:
self.fail("Error unpausing container %s for removal: %s" % (container_id, str(exc2)))
# Now try again
continue
if 'removal of container ' in exc.explanation and ' is already in progress' in exc.explanation:
pass
else:
self.fail("Error removing container %s: %s" % (container_id, str(exc)))
except Exception as exc:
self.fail("Error removing container %s: %s" % (container_id, str(exc)))
# We only loop when explicitly requested by 'continue'
break
return response
def container_update(self, container_id, update_parameters):
if update_parameters:
self.log("update container %s" % (container_id))
self.log(update_parameters, pretty_print=True)
self.results['actions'].append(dict(updated=container_id, update_parameters=update_parameters))
self.results['changed'] = True
if not self.check_mode and callable(getattr(self.client, 'update_container')):
try:
result = self.client.update_container(container_id, **update_parameters)
self.client.report_warnings(result)
except Exception as exc:
self.fail("Error updating container %s: %s" % (container_id, str(exc)))
return self._get_container(container_id)
def container_kill(self, container_id):
self.results['actions'].append(dict(killed=container_id, signal=self.parameters.kill_signal))
self.results['changed'] = True
response = None
if not self.check_mode:
try:
if self.parameters.kill_signal:
response = self.client.kill(container_id, signal=self.parameters.kill_signal)
else:
response = self.client.kill(container_id)
except Exception as exc:
self.fail("Error killing container %s: %s" % (container_id, exc))
return response
def container_restart(self, container_id):
self.results['actions'].append(dict(restarted=container_id, timeout=self.parameters.stop_timeout))
self.results['changed'] = True
if not self.check_mode:
try:
if self.parameters.stop_timeout:
dummy = self.client.restart(container_id, timeout=self.parameters.stop_timeout)
else:
dummy = self.client.restart(container_id)
except Exception as exc:
self.fail("Error restarting container %s: %s" % (container_id, str(exc)))
return self._get_container(container_id)
def container_stop(self, container_id):
if self.parameters.force_kill:
self.container_kill(container_id)
return
self.results['actions'].append(dict(stopped=container_id, timeout=self.parameters.stop_timeout))
self.results['changed'] = True
response = None
if not self.check_mode:
count = 0
while True:
try:
if self.parameters.stop_timeout:
response = self.client.stop(container_id, timeout=self.parameters.stop_timeout)
else:
response = self.client.stop(container_id)
except APIError as exc:
if 'Unpause the container before stopping or killing' in exc.explanation:
# New docker daemon versions do not allow containers to be removed
# if they are paused. Make sure we don't end up in an infinite loop.
if count == 3:
self.fail("Error removing container %s (tried to unpause three times): %s" % (container_id, str(exc)))
count += 1
# Unpause
try:
self.client.unpause(container=container_id)
except Exception as exc2:
self.fail("Error unpausing container %s for removal: %s" % (container_id, str(exc2)))
# Now try again
continue
self.fail("Error stopping container %s: %s" % (container_id, str(exc)))
except Exception as exc:
self.fail("Error stopping container %s: %s" % (container_id, str(exc)))
# We only loop when explicitly requested by 'continue'
break
return response
def detect_ipvX_address_usage(client):
'''
Helper function to detect whether any specified network uses ipv4_address or ipv6_address
'''
for network in client.module.params.get("networks") or []:
if network.get('ipv4_address') is not None or network.get('ipv6_address') is not None:
return True
return False
class AnsibleDockerClientContainer(AnsibleDockerClient):
# A list of module options which are not docker container properties
__NON_CONTAINER_PROPERTY_OPTIONS = tuple([
'env_file', 'force_kill', 'keep_volumes', 'ignore_image', 'name', 'pull', 'purge_networks',
'recreate', 'restart', 'state', 'trust_image_content', 'networks', 'cleanup', 'kill_signal',
'output_logs', 'paused'
] + list(DOCKER_COMMON_ARGS.keys()))
def _parse_comparisons(self):
comparisons = {}
comp_aliases = {}
# Put in defaults
explicit_types = dict(
command='list',
devices='set(dict)',
dns_search_domains='list',
dns_servers='list',
env='set',
entrypoint='list',
etc_hosts='set',
mounts='set(dict)',
networks='set(dict)',
ulimits='set(dict)',
device_read_bps='set(dict)',
device_write_bps='set(dict)',
device_read_iops='set(dict)',
device_write_iops='set(dict)',
)
all_options = set() # this is for improving user feedback when a wrong option was specified for comparison
default_values = dict(
stop_timeout='ignore',
)
for option, data in self.module.argument_spec.items():
all_options.add(option)
for alias in data.get('aliases', []):
all_options.add(alias)
# Ignore options which aren't used as container properties
if option in self.__NON_CONTAINER_PROPERTY_OPTIONS and option != 'networks':
continue
# Determine option type
if option in explicit_types:
datatype = explicit_types[option]
elif data['type'] == 'list':
datatype = 'set'
elif data['type'] == 'dict':
datatype = 'dict'
else:
datatype = 'value'
# Determine comparison type
if option in default_values:
comparison = default_values[option]
elif datatype in ('list', 'value'):
comparison = 'strict'
else:
comparison = 'allow_more_present'
comparisons[option] = dict(type=datatype, comparison=comparison, name=option)
# Keep track of aliases
comp_aliases[option] = option
for alias in data.get('aliases', []):
comp_aliases[alias] = option
# Process legacy ignore options
if self.module.params['ignore_image']:
comparisons['image']['comparison'] = 'ignore'
if self.module.params['purge_networks']:
comparisons['networks']['comparison'] = 'strict'
# Process options
if self.module.params.get('comparisons'):
# If '*' appears in comparisons, process it first
if '*' in self.module.params['comparisons']:
value = self.module.params['comparisons']['*']
if value not in ('strict', 'ignore'):
self.fail("The wildcard can only be used with comparison modes 'strict' and 'ignore'!")
for option, v in comparisons.items():
if option == 'networks':
# `networks` is special: only update if
# some value is actually specified
if self.module.params['networks'] is None:
continue
v['comparison'] = value
# Now process all other comparisons.
comp_aliases_used = {}
for key, value in self.module.params['comparisons'].items():
if key == '*':
continue
# Find main key
key_main = comp_aliases.get(key)
if key_main is None:
if key_main in all_options:
self.fail("The module option '%s' cannot be specified in the comparisons dict, "
"since it does not correspond to container's state!" % key)
self.fail("Unknown module option '%s' in comparisons dict!" % key)
if key_main in comp_aliases_used:
self.fail("Both '%s' and '%s' (aliases of %s) are specified in comparisons dict!" % (key, comp_aliases_used[key_main], key_main))
comp_aliases_used[key_main] = key
# Check value and update accordingly
if value in ('strict', 'ignore'):
comparisons[key_main]['comparison'] = value
elif value == 'allow_more_present':
if comparisons[key_main]['type'] == 'value':
self.fail("Option '%s' is a value and not a set/list/dict, so its comparison cannot be %s" % (key, value))
comparisons[key_main]['comparison'] = value
else:
self.fail("Unknown comparison mode '%s'!" % value)
# Add implicit options
comparisons['publish_all_ports'] = dict(type='value', comparison='strict', name='published_ports')
comparisons['expected_ports'] = dict(type='dict', comparison=comparisons['published_ports']['comparison'], name='expected_ports')
comparisons['disable_healthcheck'] = dict(type='value',
comparison='ignore' if comparisons['healthcheck']['comparison'] == 'ignore' else 'strict',
name='disable_healthcheck')
# Check legacy values
if self.module.params['ignore_image'] and comparisons['image']['comparison'] != 'ignore':
self.module.warn('The ignore_image option has been overridden by the comparisons option!')
if self.module.params['purge_networks'] and comparisons['networks']['comparison'] != 'strict':
self.module.warn('The purge_networks option has been overridden by the comparisons option!')
self.comparisons = comparisons
def _get_additional_minimal_versions(self):
stop_timeout_supported = self.docker_api_version >= LooseVersion('1.25')
stop_timeout_needed_for_update = self.module.params.get("stop_timeout") is not None and self.module.params.get('state') != 'absent'
if stop_timeout_supported:
stop_timeout_supported = self.docker_py_version >= LooseVersion('2.1')
if stop_timeout_needed_for_update and not stop_timeout_supported:
# We warn (instead of fail) since in older versions, stop_timeout was not used
# to update the container's configuration, but only when stopping a container.
self.module.warn("Docker SDK for Python's version is %s. Minimum version required is 2.1 to update "
"the container's stop_timeout configuration. "
"If you use the 'docker-py' module, you have to switch to the 'docker' Python package." % (docker_version,))
else:
if stop_timeout_needed_for_update and not stop_timeout_supported:
# We warn (instead of fail) since in older versions, stop_timeout was not used
# to update the container's configuration, but only when stopping a container.
self.module.warn("Docker API version is %s. Minimum version required is 1.25 to set or "
"update the container's stop_timeout configuration." % (self.docker_api_version_str,))
self.option_minimal_versions['stop_timeout']['supported'] = stop_timeout_supported
def __init__(self, **kwargs):
option_minimal_versions = dict(
# internal options
log_config=dict(),
publish_all_ports=dict(),
ports=dict(),
volume_binds=dict(),
name=dict(),
# normal options
device_read_bps=dict(docker_py_version='1.9.0', docker_api_version='1.22'),
device_read_iops=dict(docker_py_version='1.9.0', docker_api_version='1.22'),
device_write_bps=dict(docker_py_version='1.9.0', docker_api_version='1.22'),
device_write_iops=dict(docker_py_version='1.9.0', docker_api_version='1.22'),
dns_opts=dict(docker_api_version='1.21', docker_py_version='1.10.0'),
ipc_mode=dict(docker_api_version='1.25'),
mac_address=dict(docker_api_version='1.25'),
oom_score_adj=dict(docker_api_version='1.22'),
shm_size=dict(docker_api_version='1.22'),
stop_signal=dict(docker_api_version='1.21'),
tmpfs=dict(docker_api_version='1.22'),
volume_driver=dict(docker_api_version='1.21'),
memory_reservation=dict(docker_api_version='1.21'),
kernel_memory=dict(docker_api_version='1.21'),
auto_remove=dict(docker_py_version='2.1.0', docker_api_version='1.25'),
healthcheck=dict(docker_py_version='2.0.0', docker_api_version='1.24'),
init=dict(docker_py_version='2.2.0', docker_api_version='1.25'),
runtime=dict(docker_py_version='2.4.0', docker_api_version='1.25'),
sysctls=dict(docker_py_version='1.10.0', docker_api_version='1.24'),
userns_mode=dict(docker_py_version='1.10.0', docker_api_version='1.23'),
uts=dict(docker_py_version='3.5.0', docker_api_version='1.25'),
pids_limit=dict(docker_py_version='1.10.0', docker_api_version='1.23'),
mounts=dict(docker_py_version='2.6.0', docker_api_version='1.25'),
cpus=dict(docker_py_version='2.3.0', docker_api_version='1.25'),
# specials
ipvX_address_supported=dict(docker_py_version='1.9.0', docker_api_version='1.22',
detect_usage=detect_ipvX_address_usage,
usage_msg='ipv4_address or ipv6_address in networks'),
stop_timeout=dict(), # see _get_additional_minimal_versions()
)
super(AnsibleDockerClientContainer, self).__init__(
option_minimal_versions=option_minimal_versions,
option_minimal_versions_ignore_params=self.__NON_CONTAINER_PROPERTY_OPTIONS,
**kwargs
)
self.image_inspect_source = 'Config'
if self.docker_api_version < LooseVersion('1.21'):
self.image_inspect_source = 'ContainerConfig'
self._get_additional_minimal_versions()
self._parse_comparisons()
if self.module.params['container_default_behavior'] is None:
self.module.params['container_default_behavior'] = 'compatibility'
self.module.deprecate(
'The container_default_behavior option will change its default value from "compatibility" to '
'"no_defaults" in Ansible 2.14. To remove this warning, please specify an explicit value for it now',
version='2.14'
)
if self.module.params['container_default_behavior'] == 'compatibility':
old_default_values = dict(
auto_remove=False,
detach=True,
init=False,
interactive=False,
memory="0",
paused=False,
privileged=False,
read_only=False,
tty=False,
)
for param, value in old_default_values.items():
if self.module.params[param] is None:
self.module.params[param] = value
def main():
argument_spec = dict(
auto_remove=dict(type='bool'),
blkio_weight=dict(type='int'),
capabilities=dict(type='list', elements='str'),
cap_drop=dict(type='list', elements='str'),
cleanup=dict(type='bool', default=False),
command=dict(type='raw'),
comparisons=dict(type='dict'),
container_default_behavior=dict(type='str', choices=['compatibility', 'no_defaults']),
cpu_period=dict(type='int'),
cpu_quota=dict(type='int'),
cpus=dict(type='float'),
cpuset_cpus=dict(type='str'),
cpuset_mems=dict(type='str'),
cpu_shares=dict(type='int'),
detach=dict(type='bool'),
devices=dict(type='list', elements='str'),
device_read_bps=dict(type='list', elements='dict', options=dict(
path=dict(required=True, type='str'),
rate=dict(required=True, type='str'),
)),
device_write_bps=dict(type='list', elements='dict', options=dict(
path=dict(required=True, type='str'),
rate=dict(required=True, type='str'),
)),
device_read_iops=dict(type='list', elements='dict', options=dict(
path=dict(required=True, type='str'),
rate=dict(required=True, type='int'),
)),
device_write_iops=dict(type='list', elements='dict', options=dict(
path=dict(required=True, type='str'),
rate=dict(required=True, type='int'),
)),
dns_servers=dict(type='list', elements='str'),
dns_opts=dict(type='list', elements='str'),
dns_search_domains=dict(type='list', elements='str'),
domainname=dict(type='str'),
entrypoint=dict(type='list', elements='str'),
env=dict(type='dict'),
env_file=dict(type='path'),
etc_hosts=dict(type='dict'),
exposed_ports=dict(type='list', elements='str', aliases=['exposed', 'expose']),
force_kill=dict(type='bool', default=False, aliases=['forcekill']),
groups=dict(type='list', elements='str'),
healthcheck=dict(type='dict', options=dict(
test=dict(type='raw'),
interval=dict(type='str'),
timeout=dict(type='str'),
start_period=dict(type='str'),
retries=dict(type='int'),
)),
hostname=dict(type='str'),
ignore_image=dict(type='bool', default=False),
image=dict(type='str'),
init=dict(type='bool'),
interactive=dict(type='bool'),
ipc_mode=dict(type='str'),
keep_volumes=dict(type='bool', default=True),
kernel_memory=dict(type='str'),
kill_signal=dict(type='str'),
labels=dict(type='dict'),
links=dict(type='list', elements='str'),
log_driver=dict(type='str'),
log_options=dict(type='dict', aliases=['log_opt']),
mac_address=dict(type='str'),
memory=dict(type='str'),
memory_reservation=dict(type='str'),
memory_swap=dict(type='str'),
memory_swappiness=dict(type='int'),
mounts=dict(type='list', elements='dict', options=dict(
target=dict(type='str', required=True),
source=dict(type='str'),
type=dict(type='str', choices=['bind', 'volume', 'tmpfs', 'npipe'], default='volume'),
read_only=dict(type='bool'),
consistency=dict(type='str', choices=['default', 'consistent', 'cached', 'delegated']),
propagation=dict(type='str', choices=['private', 'rprivate', 'shared', 'rshared', 'slave', 'rslave']),
no_copy=dict(type='bool'),
labels=dict(type='dict'),
volume_driver=dict(type='str'),
volume_options=dict(type='dict'),
tmpfs_size=dict(type='str'),
tmpfs_mode=dict(type='str'),
)),
name=dict(type='str', required=True),
network_mode=dict(type='str'),
networks=dict(type='list', elements='dict', options=dict(
name=dict(type='str', required=True),
ipv4_address=dict(type='str'),
ipv6_address=dict(type='str'),
aliases=dict(type='list', elements='str'),
links=dict(type='list', elements='str'),
)),
networks_cli_compatible=dict(type='bool'),
oom_killer=dict(type='bool'),
oom_score_adj=dict(type='int'),
output_logs=dict(type='bool', default=False),
paused=dict(type='bool'),
pid_mode=dict(type='str'),
pids_limit=dict(type='int'),
privileged=dict(type='bool'),
published_ports=dict(type='list', elements='str', aliases=['ports']),
pull=dict(type='bool', default=False),
purge_networks=dict(type='bool', default=False),
read_only=dict(type='bool'),
recreate=dict(type='bool', default=False),
restart=dict(type='bool', default=False),
restart_policy=dict(type='str', choices=['no', 'on-failure', 'always', 'unless-stopped']),
restart_retries=dict(type='int'),
runtime=dict(type='str'),
security_opts=dict(type='list', elements='str'),
shm_size=dict(type='str'),
state=dict(type='str', default='started', choices=['absent', 'present', 'started', 'stopped']),
stop_signal=dict(type='str'),
stop_timeout=dict(type='int'),
sysctls=dict(type='dict'),
tmpfs=dict(type='list', elements='str'),
trust_image_content=dict(type='bool', default=False, removed_in_version='2.14'),
tty=dict(type='bool'),
ulimits=dict(type='list', elements='str'),
user=dict(type='str'),
userns_mode=dict(type='str'),
uts=dict(type='str'),
volume_driver=dict(type='str'),
volumes=dict(type='list', elements='str'),
volumes_from=dict(type='list', elements='str'),
working_dir=dict(type='str'),
)
required_if = [
('state', 'present', ['image'])
]
client = AnsibleDockerClientContainer(
argument_spec=argument_spec,
required_if=required_if,
supports_check_mode=True,
min_docker_api_version='1.20',
)
if client.module.params['networks_cli_compatible'] is None and client.module.params['networks']:
client.module.deprecate(
'Please note that docker_container handles networks slightly different than docker CLI. '
'If you specify networks, the default network will still be attached as the first network. '
'(You can specify purge_networks to remove all networks not explicitly listed.) '
'This behavior will change in Ansible 2.12. You can change the behavior now by setting '
'the new `networks_cli_compatible` option to `yes`, and remove this warning by setting '
'it to `no`',
version='2.12'
)
if client.module.params['networks_cli_compatible'] is True and client.module.params['networks'] and client.module.params['network_mode'] is None:
client.module.deprecate(
'Please note that the default value for `network_mode` will change from not specified '
'(which is equal to `default`) to the name of the first network in `networks` if '
'`networks` has at least one entry and `networks_cli_compatible` is `true`. You can '
'change the behavior now by explicitly setting `network_mode` to the name of the first '
'network in `networks`, and remove this warning by setting `network_mode` to `default`. '
'Please make sure that the value you set to `network_mode` equals the inspection result '
'for existing containers, otherwise the module will recreate them. You can find out the '
'correct value by running "docker inspect --format \'{{.HostConfig.NetworkMode}}\' <container_name>"',
version='2.14'
)
try:
cm = ContainerManager(client)
client.module.exit_json(**sanitize_result(cm.results))
except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(e), exception=traceback.format_exc())
except RequestException as e:
client.fail('An unexpected requests error occurred when docker-py tried to talk to the docker daemon: {0}'.format(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
| Lujeni/ansible | lib/ansible/modules/cloud/docker/docker_container.py | Python | gpl-3.0 | 143,393 |
# This file is part of GxSubOS.
# Copyright (C) 2014 Christopher Kyle Horton <[email protected]>
# GxSubOS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# GxSubOS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with GxSubOS. If not, see <http://www.gnu.org/licenses/>.
import sys, pygame
from indicator import Indicator
import glass, shadow
transparent = pygame.color.Color(0, 0, 0, 0)
class IndicatorTray():
'''A class implementing a tray where various Indicators are displayed.'''
tray_color = glass.glass_color
tray_color_opaque = glass.glass_color
tray_color.a = glass.glass_alpha
tray_height = 24
shadow_height = 10
def __init__(self, screenw, screenh, wm=None):
self.indicator_list = []
self.surface = glass.MakeTransparentSurface(screenw, self.tray_height + self.shadow_height)
if glass.enable_transparency:
self.surface.fill(self.tray_color)
self.color_surface = pygame.Surface((screenw, self.tray_height), pygame.SRCALPHA)
self.color_surface.fill(self.tray_color)
else:
self.surface.fill(self.tray_color_opaque)
self.color_surface = pygame.Surface((screenw, self.tray_height))
self.color_surface.fill(self.tray_color_opaque)
self.update_rect = pygame.Rect(0, 0, 0, 0)
self.wm = wm
def SetWindowManager(self, windowmanager):
'''Sets the WindowManager that this IndicatorTray will connect to.'''
self.wm = windowmanager
def GetIndicatorsWidth(self):
'''Returns the total width in pixels of all the Indicators currently stored
in the indicator_list.'''
width = 0
for indicator in self.indicator_list:
width += indicator.width
return width
def UpdateIndicatorPositions(self):
'''Updates the positions of all the Indicators in the list.'''
next_right = 0
for indicator in self.indicator_list:
new_x = pygame.display.Info().current_w - next_right - indicator.width
self.update_rect.union_ip(indicator.UpdatePosition(new_x))
next_right += indicator.width
def RedrawBackground(self, screen):
'''Redraw the background behind the Indicators.'''
tray_width = self.GetIndicatorsWidth()
tray_left = pygame.display.Info().current_w - tray_width
glass.DrawBackground(screen, self.surface, self.surface.get_rect())
if glass.enable_transparency:
self.surface = glass.Blur(self.surface)
self.surface.blit(self.color_surface, [0, 0, 0, 0])
triangle_points = [(tray_left - self.tray_height, 0), (tray_left - self.tray_height, self.tray_height), (tray_left, self.tray_height)]
pygame.draw.polygon(self.surface, transparent, triangle_points)
pygame.draw.rect(self.surface, transparent, pygame.Rect(0, 0, tray_left - self.tray_height, self.tray_height))
pygame.draw.rect(self.surface, transparent, pygame.Rect(0, self.tray_height, self.surface.get_width(), self.surface.get_height() - self.tray_height))
shadow.DrawIndicatorTrayShadow(self)
def DrawTray(self, screen):
'''Draws this IndicatorTray onto the provided Surface. Returns a Rect
containing the area which was drawn to.'''
screen.blit(self.surface, (0, 0))
for indicator in self.indicator_list:
screen.blit(indicator.image, indicator.rect)
return self.update_rect
def UpdateWholeTray(self, screen):
'''Update the whole IndicatorTray and its Indicators.'''
self.UpdateIndicatorPositions()
for indicator in self.indicator_list:
indicator.RunFrameCode()
self.RemoveClosedIndicators()
self.RedrawBackground(screen)
def AddIndicator(self, indicator_name):
'''Adds the given Indicator based on the provided name.
Returns a reference to the Indicator added.'''
indicator = Indicator(len(self.indicator_list), indicator_name, self.wm)
self.indicator_list.append(indicator)
return indicator
def RemoveClosedIndicators(self):
'''Removes any Indicators which indicate that they are closed.'''
for indicator in self.indicator_list:
if indicator.closed == True:
self.indicator_list.remove(indicator)
# Maintain indicator order
new_number = 0
for indicator in self.indicator_list:
indicator.number = new_number
def HandleMouseButtonDownEvent(self, mouse_event, mouse_button):
'''Pass MOUSEDOWN events to the Indicators this holds.'''
for indicator in self.indicator_list:
indicator.HandleMouseButtonDownEvent(mouse_event, mouse_button)
| WarriorIng64/GxSubOS | indicatortray.py | Python | gpl-3.0 | 4,871 |
# Copyright (C) 2015-2016 Daniel Sel
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
__all__ = [
"__author__", "__copyright__", "__email__", "__license__", "__summary__",
"__title__", "__uri__", "__version__",
]
__version__ = "0.0.1"
__title__ = "KNOCK Server"
__uri__ = "https://github.com/DanielSel/knock"
__summary__ = "Transparent server service for the secure authenticated scalable port-knocking implementation \"KNOCK\""
__author__ = "Daniel Sel"
__license__ = "GNU General Public License"
__copyright__ = "Copyright 2015-2016 {0}".format(__author__)
| tumi8/sKnock | server/version.py | Python | gpl-3.0 | 1,225 |
#!/usr/bin/env python
# coding=utf-8
# Copyright (C) 2014 by Serge Poltavski #
# [email protected] #
# #
# This program is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/> #
from __future__ import print_function
__author__ = 'Serge Poltavski'
class PdPainter(object):
def draw_canvas(self, canvas):
pass
def draw_comment(self, comment):
print("Draw comment: #", comment.text())
def draw_message(self, message):
print("Draw message [id:%i]: %s" % (message.id, message.to_string()))
def draw_object(self, obj):
print("Draw object: [id:%i] [%s]" % (obj.id, " ".join(obj.args)))
def draw_core_gui(self, gui):
print("Draw core GUI: [id:%i] [%s]" % (gui.id, gui.name))
def draw_subpatch(self, subpatch):
print("Draw subpatch: [pd {0:s}]".format(subpatch.name))
def draw_graph(self, graph):
print("Draw graph ")
def draw_connections(self, canvas):
print("Draw connections ")
def draw_poly(self, vertexes, **kwargs):
print("Draw poly:", vertexes)
def draw_text(self, x, y, text, **kwargs):
print("Draw text:", text)
def draw_inlets(self, inlets, x, y, width):
print("Draw inlets:", inlets)
def draw_outlets(self, outlets, x, y, width):
print("Draw outlets:", outlets)
def draw_circle(self, x, y, width, **kwargs):
print("Draw circle")
def draw_arc(self, x, y, radius, start_angle, end_angle, **kwargs):
print("Draw arc")
def draw_line(self, x0, y0, x1, y1, **kwargs):
print("Draw line")
def draw_rect(self, x, y, w, h, **kwargs):
print("Draw rect")
| uliss/pddoc | pddoc/pdpainter.py | Python | gpl-3.0 | 2,655 |
# SSH proxy forward and remote shell
__author__ = "Frederico Martins"
__license__ = "GPLv3"
__version__ = 1
from getpass import getpass
from paramiko import AutoAddPolicy, SSHClient, ssh_exception
class SSH(object):
proxy = None
def __init__(self, host, user, password=None, port=22):
self.host = host
self.port = port
self.user = user
self.password = password or getpass(prompt="Network password: ")
def forward(self, host, user=None, password=None, port=22):
self._proxy = SSHClient()
user = user or self.user
password = password or self.password
self._proxy.set_missing_host_key_policy(AutoAddPolicy())
try:
self._proxy.connect(host, port=port, username=user, password=password)
except ssh_exception.AuthenticationException:
print("Error: Authentication failed for user {0}".format(self.user))
exit(-1)
transport = self._proxy.get_transport()
self.proxy = transport.open_channel("direct-tcpip", (self.host, self.port), (host, port))
def execute(self, command):
if not hasattr(self, '_ssh'):
self._ssh = SSHClient()
self._ssh.set_missing_host_key_policy(AutoAddPolicy())
self._ssh.connect(self.host, username=self.user, password=self.password, sock=self.proxy)
return self._ssh.exec_command(command)
def close(self):
if hasattr(self, '_ssh'):
self._ssh.close()
if hasattr(self, '_proxy'):
self._proxy.close()
| flippym/toolbox | ssh-streaming.py | Python | gpl-3.0 | 1,573 |
import os, json, random
from utils import *
import collections
class FileDataBaseException(Exception): pass
def update_dict_recursively(d, u):
for k, v in u.iteritems():
if isinstance(v, collections.Mapping):
r = update_dict_recursively(d.get(k, {}), v)
d[k] = r
else:
d[k] = u[k]
return d
class FileDataBase:
def __init__(self):
self.files = {}
self.last_id = 0
self.shuffled_keys = None
self.shuffled_last = 0
self._init = False
self._files_saved = False
def IsInitialized(self):
return self._init
def ScanDirectoryRecursively(self, watch_dir, extension, exclude, myFileInfoExtractor=None, renewFiles=True):
changed = 0
count = 0
exist = 0
excluded = 0
path2file = {self.files[i]['path']: i for i in self.files}
files_ok = {}
for root, _, files in os.walk(watch_dir):
for filename in files:
# skip excluded files
if filename in exclude:
excluded += 1
continue
if filename.endswith(extension):
path = root + '/' + filename
# find duplicates
added = False
value = {'path': path}
if myFileInfoExtractor:
try: info = myFileInfoExtractor(path)
except: info = None
if info is None:
excluded += 1
continue
value.update(info)
if path in path2file:
id_ = path2file[path]
files_ok[id_] = value
changed += self.files[id_] != value
added = True
exist += 1
if not added:
files_ok[str(self.last_id)] = value
self.last_id += 1
count += 1
if renewFiles:
self.files = files_ok
else:
self.files.update(files_ok)
self._init = True
self._files_saved = False if count > 0 or changed > 0 else True
return count, exist, excluded
def ShuffleFiles(self):
self.shuffled_keys = self.files.keys()
myRandom = random.Random(42)
myRandom.shuffle(self.shuffled_keys)
def GetFilesPortion(self, count, count_is_percent=True):
if self.shuffled_keys is None:
raise FileDataBaseException('Use ShuffleFiles before GetFilesPortion')
if count_is_percent:
count *= self.GetFilesNumber()
start = self.shuffled_last
end = None if count is None else int(self.shuffled_last+count)
self.shuffled_last = end
return self.shuffled_keys[start:end]
def GetFilesPortions(self, count_list):
return [self.GetFilesPortion(c) for c in count_list]
def ResetShuffle(self):
self.shuffled_keys = None
self.shuffled_last = 0
def GetFile(self, _id):
return self.files[_id]
def GetPath(self, _id):
try: int(_id)
except: return _id
else: return self.files[_id]['path']
def GetPathes(self, ids_or_ids_list):
if isinstance(ids_or_ids_list, list):
if isinstance(ids_or_ids_list[0], list):
return [[self.GetPath(i) for i in ids] for ids in ids_or_ids_list]
return [self.GetPath(i) for i in ids_or_ids_list]
def SetFiles(self, other_filedb):
self.files = other_filedb.files
self._init = other_filedb._init
self._files_saved = other_filedb._files_saved
def GetFiles(self):
return self.files
def GetFilesNumber(self):
return len(self.files)
def GetFileBasename2id(self):
return {os.path.basename(self.GetPath(_id)): _id for _id in self.GetAllIds()}
def SaveFiles(self, filename):
if not self._files_saved:
try:
filename = os.path.normpath(filename)
json.dump(self.files, open(filename, 'w'))
self._files_saved = True
# log('FileDB saved to:', filename)
return True
except: return False
else:
return False
def GetAllIds(self):
return self.files.keys()
def GetPathes2IdsMap(self):
return {self.files[i]['path']: i for i in self.files}
def LoadFiles(self, filename):
try:
self._init = False
self._files_saved = False
self.files = json.load(open(filename))
self.last_id = max([int(i) for i in self.files.keys()])+1
self._init = True
self._files_saved = True
except: return False
return True
class MetaDataBase:
def __init__(self):
self.meta = {}
self.filedb = None
def SetFileDB(self, filedb):
self.filedb = filedb
def SaveMeta(self, filename):
# save json with meta info
tmp = json.encoder.FLOAT_REPR
json.encoder.FLOAT_REPR = lambda o: format(o, '.6f')
json.dump(self.meta, open(filename, 'w'), sort_keys=True)
json.encoder.FLOAT_REPR = tmp
return True
def LoadMeta(self, filename):
try: self.meta = json.load(open(filename))
except: return False
return True
def SetMeta(self, _id, data):
if not isinstance(data, dict): raise FileDataBaseException('data must be a dict '+str(data))
self.meta[_id] = data
def AddMeta(self, _id, data):
if not isinstance(data, dict): raise FileDataBaseException('data must be a dict '+str(data))
if _id in self.meta: self.meta[_id] = update_dict_recursively(self.meta[_id], data)
else: self.meta[_id] = data
def GetMeta(self, _id):
if not _id in self.meta: raise FileDataBaseException('incorrect id ' + str(_id))
return self.meta[_id]
def GetAllIds(self):
return self.meta.keys() | makseq/testarium | testarium/filedb.py | Python | gpl-3.0 | 6,170 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2017 Stephane Caron <[email protected]>
#
# This file is part of fip-walkgen
# <https://github.com/stephane-caron/fip-walkgen>.
#
# fip-walkgen is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# fip-walkgen is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# fip-walkgen. If not, see <http://www.gnu.org/licenses/>.
from cop_nmpc import COPPredictiveController
from double_support import DoubleSupportController
from fip_nmpc import FIPPredictiveController
from regulation import FIPRegulator
from wrench_nmpc import WrenchPredictiveController
__all__ = [
'COPPredictiveController',
'DoubleSupportController',
'FIPPredictiveController',
'FIPRegulator',
'WrenchPredictiveController',
]
| stephane-caron/dynamic-walking | wpg/com_control/__init__.py | Python | gpl-3.0 | 1,231 |
# THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE.
# Copyright (C) NIWA & British Crown (Met Office) & Contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""General utilities for the integration test infrastructure.
These utilities are not intended for direct use by tests
(hence the underscore function names).
Use the fixtures provided in the conftest instead.
"""
import asyncio
def _rm_if_empty(path):
"""Convenience wrapper for removing empty directories."""
try:
path.rmdir()
except OSError:
return False
return True
| oliver-sanders/cylc | tests/integration/utils/__init__.py | Python | gpl-3.0 | 1,157 |
# Qt library
#
# Notes:
# There's no performance penalty for importing all Qt modules into whichever modules
# need access to at least some Qt modules, so for simplicity's sake that's what we'll do.
from util import RequiredImportError
from constants import PYSIDE, PYQT4
import qt_helper
_qtLib = qt_helper.qtLib
def QtLib():
"""Returns PYSIDE or PYQT4, whichever is being used by the program."""
return _qtLib
if _qtLib == PYSIDE:
from PySide import QtGui, QtCore, QtSql
from PySide.QtGui import *
from PySide.QtCore import *
from PySide.QtSql import *
from PySide.phonon import Phonon
elif _qtLib == PYQT4:
import sip
sip.setapi('QString', 2) # Prevent QString from being returned by PyQt4 functions.
sip.setapi('QVariant', 2) # Prevent QVariant from being returned by PyQt4 functions.
from PyQt4 import QtGui, QtCore, QtSql
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from PyQt4.QtSql import *
from PyQt4.phonon import Phonon
else:
raise RequiredImportError('No Qt library found.') | pylonsoflight/kea | qt.py | Python | gpl-3.0 | 1,023 |
#!/usr/bin/python -tt
# An incredibly simple agent. All we do is find the closest enemy tank, drive
# towards it, and shoot. Note that if friendly fire is allowed, you will very
# often kill your own tanks with this code.
#################################################################
# NOTE TO STUDENTS
# This is a starting point for you. You will need to greatly
# modify this code if you want to do anything useful. But this
# should help you to know how to interact with BZRC in order to
# get the information you need.
#
# After starting the bzrflag server, this is one way to start
# this code:
# python agent0.py [hostname] [port]
#
# Often this translates to something like the following (with the
# port name being printed out by the bzrflag server):
# python agent0.py localhost 49857
#################################################################
import sys
import math
import time
from bzrc import BZRC, Command
class Agent(object):
"""Class handles all command and control logic for a teams tanks."""
def __init__(self, bzrc):
self.go_straight = True
self.bzrc = bzrc
self.constants = self.bzrc.get_constants()
self.commands = []
def tick(self, time_diff, shoot=False):
print 'time_dif', time_diff
"""Some time has passed; decide what to do next."""
mytanks, othertanks, flags, shots = self.bzrc.get_lots_o_stuff()
self.mytanks = mytanks
self.othertanks = othertanks
self.flags = flags
self.shots = shots
self.enemies = [tank for tank in othertanks if tank.color !=
self.constants['team']]
self.commands = []
if shoot:
print 'shooting'
for tank in mytanks:
self.tanks_shoot(tank)
else:
print 'go straight' if self.go_straight else 'turn instead'
for tank in mytanks:
self.testing_tanks(tank)
self.go_straight = not self.go_straight
results = self.bzrc.do_commands(self.commands)
return self.go_straight
def tanks_shoot(self, tank):
self.bzrc.shoot(tank.index)
def testing_tanks(self, tank):
if self.go_straight:
command = Command(tank.index, 1, 0, 0)
else:
command = Command(tank.index, 0, 0.6, 0)
self.commands.append(command)
def attack_enemies(self, tank):
"""Find the closest enemy and chase it, shooting as you go."""
best_enemy = None
best_dist = 2 * float(self.constants['worldsize'])
for enemy in self.enemies:
if enemy.status != 'alive':
continue
dist = math.sqrt((enemy.x - tank.x)**2 + (enemy.y - tank.y)**2)
if dist < best_dist:
best_dist = dist
best_enemy = enemy
if best_enemy is None:
command = Command(tank.index, 0, 0, False)
self.commands.append(command)
else:
self.move_to_position(tank, best_enemy.x, best_enemy.y)
def move_to_position(self, tank, target_x, target_y):
"""Set command to move to given coordinates."""
target_angle = math.atan2(target_y - tank.y,
target_x - tank.x)
relative_angle = self.normalize_angle(target_angle - tank.angle)
command = Command(tank.index, 1, 2 * relative_angle, True)
self.commands.append(command)
def normalize_angle(self, angle):
"""Make any angle be between +/- pi."""
angle -= 2 * math.pi * int (angle / (2 * math.pi))
if angle <= -math.pi:
angle += 2 * math.pi
elif angle > math.pi:
angle -= 2 * math.pi
return angle
def main():
# Process CLI arguments.
try:
execname, host, port = sys.argv
except ValueError:
execname = sys.argv[0]
print >>sys.stderr, '%s: incorrect number of arguments' % execname
print >>sys.stderr, 'usage: %s hostname port' % sys.argv[0]
sys.exit(-1)
# Connect.
#bzrc = BZRC(host, int(port), debug=True)
bzrc = BZRC(host, int(port))
agent = Agent(bzrc)
prev_time = time.time()
prev_time_shoot = time.time()
wait = 8
# Run the agent
try:
while True:
if time.time() > prev_time_shoot + 2:
agent.tick(time.time() - prev_time_shoot, True)
prev_time_shoot = time.time()
if time.time() > prev_time + wait:
went_straight = agent.tick(time.time() - prev_time)
wait = 3 if went_straight else 8
prev_time = time.time()
except KeyboardInterrupt:
print "Exiting due to keyboard interrupt."
bzrc.close()
if __name__ == '__main__':
main()
# vim: et sw=4 sts=4
| sm-github/bzrflag | bzagents/dumb_agent.py | Python | gpl-3.0 | 4,903 |
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2015-2021 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <https://www.gnu.org/licenses/>.
"""Tests for qutebrowser.misc.ipc."""
import os
import pathlib
import getpass
import logging
import json
import hashlib
import dataclasses
from unittest import mock
from typing import Optional, List
import pytest
from PyQt5.QtCore import pyqtSignal, QObject
from PyQt5.QtNetwork import QLocalServer, QLocalSocket, QAbstractSocket
from PyQt5.QtTest import QSignalSpy
import qutebrowser
from qutebrowser.misc import ipc
from qutebrowser.utils import standarddir, utils
from helpers import stubs, utils as testutils
pytestmark = pytest.mark.usefixtures('qapp')
@pytest.fixture(autouse=True)
def shutdown_server():
"""If ipc.send_or_listen was called, make sure to shut server down."""
yield
if ipc.server is not None:
ipc.server.shutdown()
@pytest.fixture
def ipc_server(qapp, qtbot):
server = ipc.IPCServer('qute-test')
yield server
if (server._socket is not None and
server._socket.state() != QLocalSocket.UnconnectedState):
with qtbot.waitSignal(server._socket.disconnected, raising=False):
server._socket.abort()
try:
server.shutdown()
except ipc.Error:
pass
@pytest.fixture
def qlocalserver(qapp):
server = QLocalServer()
yield server
server.close()
server.deleteLater()
@pytest.fixture
def qlocalsocket(qapp):
socket = QLocalSocket()
yield socket
socket.disconnectFromServer()
if socket.state() != QLocalSocket.UnconnectedState:
socket.waitForDisconnected(1000)
@pytest.fixture(autouse=True)
def fake_runtime_dir(monkeypatch, short_tmpdir):
monkeypatch.setenv('XDG_RUNTIME_DIR', str(short_tmpdir))
standarddir._init_runtime(args=None)
return short_tmpdir
class FakeSocket(QObject):
"""A stub for a QLocalSocket.
Args:
_can_read_line_val: The value returned for canReadLine().
_error_val: The value returned for error().
_state_val: The value returned for state().
_connect_successful: The value returned for waitForConnected().
"""
readyRead = pyqtSignal() # noqa: N815
disconnected = pyqtSignal()
def __init__(self, *, error=QLocalSocket.UnknownSocketError, state=None,
data=None, connect_successful=True, parent=None):
super().__init__(parent)
self._error_val = error
self._state_val = state
self._data = data
self._connect_successful = connect_successful
self.error = stubs.FakeSignal('error', func=self._error)
def _error(self):
return self._error_val
def state(self):
return self._state_val
def canReadLine(self):
return bool(self._data)
def readLine(self):
firstline, mid, rest = self._data.partition(b'\n')
self._data = rest
return firstline + mid
def errorString(self):
return "Error string"
def abort(self):
self.disconnected.emit()
def disconnectFromServer(self):
pass
def connectToServer(self, _name):
pass
def waitForConnected(self, _time):
return self._connect_successful
def writeData(self, _data):
pass
def waitForBytesWritten(self, _time):
pass
def waitForDisconnected(self, _time):
pass
class FakeServer:
def __init__(self, socket):
self._socket = socket
def nextPendingConnection(self):
socket = self._socket
self._socket = None
return socket
def close(self):
pass
def deleteLater(self):
pass
def test_getpass_getuser():
"""Make sure getpass.getuser() returns something sensible."""
assert getpass.getuser()
def md5(inp):
return hashlib.md5(inp.encode('utf-8')).hexdigest()
class TestSocketName:
WINDOWS_TESTS = [
(None, 'qutebrowser-testusername'),
('/x', 'qutebrowser-testusername-{}'.format(md5('/x'))),
]
@pytest.fixture(autouse=True)
def patch_user(self, monkeypatch):
monkeypatch.setattr(ipc.getpass, 'getuser', lambda: 'testusername')
@pytest.mark.parametrize('basedir, expected', WINDOWS_TESTS)
@pytest.mark.windows
def test_windows(self, basedir, expected):
socketname = ipc._get_socketname(basedir)
assert socketname == expected
@pytest.mark.parametrize('basedir, expected', WINDOWS_TESTS)
def test_windows_on_posix(self, basedir, expected):
socketname = ipc._get_socketname_windows(basedir)
assert socketname == expected
def test_windows_broken_getpass(self, monkeypatch):
def _fake_username():
raise ImportError
monkeypatch.setattr(ipc.getpass, 'getuser', _fake_username)
with pytest.raises(ipc.Error, match='USERNAME'):
ipc._get_socketname_windows(basedir=None)
@pytest.mark.mac
@pytest.mark.parametrize('basedir, expected', [
(None, 'i-{}'.format(md5('testusername'))),
('/x', 'i-{}'.format(md5('testusername-/x'))),
])
def test_mac(self, basedir, expected):
socketname = ipc._get_socketname(basedir)
parts = socketname.split(os.sep)
assert parts[-2] == 'qutebrowser'
assert parts[-1] == expected
@pytest.mark.linux
@pytest.mark.parametrize('basedir, expected', [
(None, 'ipc-{}'.format(md5('testusername'))),
('/x', 'ipc-{}'.format(md5('testusername-/x'))),
])
def test_linux(self, basedir, fake_runtime_dir, expected):
socketname = ipc._get_socketname(basedir)
expected_path = str(fake_runtime_dir / 'qutebrowser' / expected)
assert socketname == expected_path
def test_other_unix(self):
"""Fake test for POSIX systems which aren't Linux/macOS.
We probably would adjust the code first to make it work on that
platform.
"""
if utils.is_windows:
pass
elif utils.is_mac:
pass
elif utils.is_linux:
pass
else:
raise Exception("Unexpected platform!")
class TestExceptions:
def test_listen_error(self, qlocalserver):
qlocalserver.listen(None)
exc = ipc.ListenError(qlocalserver)
assert exc.code == 2
assert exc.message == "QLocalServer::listen: Name error"
msg = ("Error while listening to IPC server: QLocalServer::listen: "
"Name error (error 2)")
assert str(exc) == msg
with pytest.raises(ipc.Error):
raise exc
def test_socket_error(self, qlocalserver):
socket = FakeSocket(error=QLocalSocket.ConnectionRefusedError)
exc = ipc.SocketError("testing", socket)
assert exc.code == QLocalSocket.ConnectionRefusedError
assert exc.message == "Error string"
assert str(exc) == "Error while testing: Error string (error 0)"
with pytest.raises(ipc.Error):
raise exc
class TestListen:
@pytest.mark.posix
def test_remove_error(self, ipc_server, monkeypatch):
"""Simulate an error in _remove_server."""
monkeypatch.setattr(ipc_server, '_socketname', None)
with pytest.raises(ipc.Error,
match="Error while removing server None!"):
ipc_server.listen()
def test_error(self, ipc_server, monkeypatch):
"""Simulate an error while listening."""
monkeypatch.setattr(ipc.QLocalServer, 'removeServer',
lambda self: True)
monkeypatch.setattr(ipc_server, '_socketname', None)
with pytest.raises(ipc.ListenError):
ipc_server.listen()
@pytest.mark.posix
def test_in_use(self, qlocalserver, ipc_server, monkeypatch):
monkeypatch.setattr(ipc.QLocalServer, 'removeServer',
lambda self: True)
qlocalserver.listen('qute-test')
with pytest.raises(ipc.AddressInUseError):
ipc_server.listen()
def test_successful(self, ipc_server):
ipc_server.listen()
@pytest.mark.windows
def test_permissions_windows(self, ipc_server):
opts = ipc_server._server.socketOptions()
assert opts == QLocalServer.UserAccessOption
@pytest.mark.posix
def test_permissions_posix(self, ipc_server):
ipc_server.listen()
sockfile = ipc_server._server.fullServerName()
sockdir = pathlib.Path(sockfile).parent
file_stat = os.stat(sockfile)
dir_stat = sockdir.stat()
# pylint: disable=no-member,useless-suppression
file_owner_ok = file_stat.st_uid == os.getuid()
dir_owner_ok = dir_stat.st_uid == os.getuid()
# pylint: enable=no-member,useless-suppression
file_mode_ok = file_stat.st_mode & 0o777 == 0o700
dir_mode_ok = dir_stat.st_mode & 0o777 == 0o700
print('sockdir: {} / owner {} / mode {:o}'.format(
sockdir, dir_stat.st_uid, dir_stat.st_mode))
print('sockfile: {} / owner {} / mode {:o}'.format(
sockfile, file_stat.st_uid, file_stat.st_mode))
assert file_owner_ok or dir_owner_ok
assert file_mode_ok or dir_mode_ok
@pytest.mark.posix
def test_atime_update(self, qtbot, ipc_server):
ipc_server._atime_timer.setInterval(500) # We don't want to wait
ipc_server.listen()
old_atime = os.stat(ipc_server._server.fullServerName()).st_atime_ns
with qtbot.waitSignal(ipc_server._atime_timer.timeout, timeout=2000):
pass
# Make sure the timer is not singleShot
with qtbot.waitSignal(ipc_server._atime_timer.timeout, timeout=2000):
pass
new_atime = os.stat(ipc_server._server.fullServerName()).st_atime_ns
assert old_atime != new_atime
@pytest.mark.posix
def test_atime_update_no_name(self, qtbot, caplog, ipc_server):
with caplog.at_level(logging.ERROR):
ipc_server.update_atime()
assert caplog.messages[-1] == "In update_atime with no server path!"
@pytest.mark.posix
def test_atime_shutdown_typeerror(self, qtbot, ipc_server):
"""This should never happen, but let's handle it gracefully."""
ipc_server._atime_timer.timeout.disconnect(ipc_server.update_atime)
ipc_server.shutdown()
@pytest.mark.posix
def test_vanished_runtime_file(self, qtbot, caplog, ipc_server):
ipc_server._atime_timer.setInterval(500) # We don't want to wait
ipc_server.listen()
sockfile = pathlib.Path(ipc_server._server.fullServerName())
sockfile.unlink()
with caplog.at_level(logging.ERROR):
with qtbot.waitSignal(ipc_server._atime_timer.timeout,
timeout=2000):
pass
msg = 'Failed to update IPC socket, trying to re-listen...'
assert caplog.messages[-1] == msg
assert ipc_server._server.isListening()
assert sockfile.exists()
class TestOnError:
def test_closed(self, ipc_server):
ipc_server._socket = QLocalSocket()
ipc_server._timer.timeout.disconnect()
ipc_server._timer.start()
ipc_server.on_error(QLocalSocket.PeerClosedError)
assert not ipc_server._timer.isActive()
def test_other_error(self, ipc_server, monkeypatch):
socket = QLocalSocket()
ipc_server._socket = socket
monkeypatch.setattr(socket, 'error',
lambda: QLocalSocket.ConnectionRefusedError)
monkeypatch.setattr(socket, 'errorString',
lambda: "Connection refused")
socket.setErrorString("Connection refused.")
with pytest.raises(ipc.Error, match=r"Error while handling IPC "
r"connection: Connection refused \(error 0\)"):
ipc_server.on_error(QLocalSocket.ConnectionRefusedError)
class TestHandleConnection:
def test_ignored(self, ipc_server, monkeypatch):
m = mock.Mock(spec=[])
monkeypatch.setattr(ipc_server._server, 'nextPendingConnection', m)
ipc_server.ignored = True
ipc_server.handle_connection()
m.assert_not_called()
def test_no_connection(self, ipc_server, caplog):
ipc_server.handle_connection()
assert caplog.messages[-1] == "No new connection to handle."
def test_double_connection(self, qlocalsocket, ipc_server, caplog):
ipc_server._socket = qlocalsocket
ipc_server.handle_connection()
msg = ("Got new connection but ignoring it because we're still "
"handling another one")
assert any(message.startswith(msg) for message in caplog.messages)
def test_disconnected_immediately(self, ipc_server, caplog):
socket = FakeSocket(state=QLocalSocket.UnconnectedState)
ipc_server._server = FakeServer(socket)
ipc_server.handle_connection()
assert "Socket was disconnected immediately." in caplog.messages
def test_error_immediately(self, ipc_server, caplog):
socket = FakeSocket(error=QLocalSocket.ConnectionError)
ipc_server._server = FakeServer(socket)
with pytest.raises(ipc.Error, match=r"Error while handling IPC "
r"connection: Error string \(error 7\)"):
ipc_server.handle_connection()
assert "We got an error immediately." in caplog.messages
def test_read_line_immediately(self, qtbot, ipc_server, caplog):
data = ('{{"args": ["foo"], "target_arg": "tab", '
'"protocol_version": {}}}\n'.format(ipc.PROTOCOL_VERSION))
socket = FakeSocket(data=data.encode('utf-8'))
ipc_server._server = FakeServer(socket)
with qtbot.waitSignal(ipc_server.got_args) as blocker:
ipc_server.handle_connection()
assert blocker.args == [['foo'], 'tab', '']
assert "We can read a line immediately." in caplog.messages
@pytest.fixture
def connected_socket(qtbot, qlocalsocket, ipc_server):
if utils.is_mac:
pytest.skip("Skipping connected_socket test - "
"https://github.com/qutebrowser/qutebrowser/issues/1045")
ipc_server.listen()
with qtbot.waitSignal(ipc_server._server.newConnection):
qlocalsocket.connectToServer('qute-test')
yield qlocalsocket
qlocalsocket.disconnectFromServer()
def test_disconnected_without_data(qtbot, connected_socket,
ipc_server, caplog):
"""Disconnect without sending data.
This means self._socket will be None on on_disconnected.
"""
connected_socket.disconnectFromServer()
def test_partial_line(connected_socket):
connected_socket.write(b'foo')
OLD_VERSION = str(ipc.PROTOCOL_VERSION - 1).encode('utf-8')
NEW_VERSION = str(ipc.PROTOCOL_VERSION + 1).encode('utf-8')
@pytest.mark.parametrize('data, msg', [
(b'\x80\n', 'invalid utf-8'),
(b'\n', 'invalid json'),
(b'{"is this invalid json?": true\n', 'invalid json'),
(b'{"valid json without args": true}\n', 'Missing args'),
(b'{"args": []}\n', 'Missing target_arg'),
(b'{"args": [], "target_arg": null, "protocol_version": ' + OLD_VERSION +
b'}\n', 'incompatible version'),
(b'{"args": [], "target_arg": null, "protocol_version": ' + NEW_VERSION +
b'}\n', 'incompatible version'),
(b'{"args": [], "target_arg": null, "protocol_version": "foo"}\n',
'invalid version'),
(b'{"args": [], "target_arg": null}\n', 'invalid version'),
])
def test_invalid_data(qtbot, ipc_server, connected_socket, caplog, data, msg):
signals = [ipc_server.got_invalid_data, connected_socket.disconnected]
with caplog.at_level(logging.ERROR):
with qtbot.assertNotEmitted(ipc_server.got_args):
with qtbot.waitSignals(signals, order='strict'):
connected_socket.write(data)
invalid_msg = 'Ignoring invalid IPC data from socket '
assert caplog.messages[-1].startswith(invalid_msg)
assert caplog.messages[-2].startswith(msg)
def test_multiline(qtbot, ipc_server, connected_socket):
spy = QSignalSpy(ipc_server.got_args)
data = ('{{"args": ["one"], "target_arg": "tab",'
' "protocol_version": {version}}}\n'
'{{"args": ["two"], "target_arg": null,'
' "protocol_version": {version}}}\n'.format(
version=ipc.PROTOCOL_VERSION))
with qtbot.assertNotEmitted(ipc_server.got_invalid_data):
with qtbot.waitSignals([ipc_server.got_args, ipc_server.got_args],
order='strict'):
connected_socket.write(data.encode('utf-8'))
assert len(spy) == 2
assert spy[0] == [['one'], 'tab', '']
assert spy[1] == [['two'], '', '']
class TestSendToRunningInstance:
def test_no_server(self, caplog):
sent = ipc.send_to_running_instance('qute-test', [], None)
assert not sent
assert caplog.messages[-1] == "No existing instance present (error 2)"
@pytest.mark.parametrize('has_cwd', [True, False])
@pytest.mark.linux(reason="Causes random trouble on Windows and macOS")
def test_normal(self, qtbot, tmp_path, ipc_server, mocker, has_cwd):
ipc_server.listen()
with qtbot.assertNotEmitted(ipc_server.got_invalid_data):
with qtbot.waitSignal(ipc_server.got_args,
timeout=5000) as blocker:
with qtbot.waitSignal(ipc_server.got_raw,
timeout=5000) as raw_blocker:
with testutils.change_cwd(tmp_path):
if not has_cwd:
m = mocker.patch('qutebrowser.misc.ipc.os')
m.getcwd.side_effect = OSError
sent = ipc.send_to_running_instance(
'qute-test', ['foo'], None)
assert sent
expected_cwd = str(tmp_path) if has_cwd else ''
assert blocker.args == [['foo'], '', expected_cwd]
raw_expected = {'args': ['foo'], 'target_arg': None,
'version': qutebrowser.__version__,
'protocol_version': ipc.PROTOCOL_VERSION}
if has_cwd:
raw_expected['cwd'] = str(tmp_path)
assert len(raw_blocker.args) == 1
parsed = json.loads(raw_blocker.args[0].decode('utf-8'))
assert parsed == raw_expected
def test_socket_error(self):
socket = FakeSocket(error=QLocalSocket.ConnectionError)
with pytest.raises(ipc.Error, match=r"Error while writing to running "
r"instance: Error string \(error 7\)"):
ipc.send_to_running_instance('qute-test', [], None, socket=socket)
def test_not_disconnected_immediately(self):
socket = FakeSocket()
ipc.send_to_running_instance('qute-test', [], None, socket=socket)
def test_socket_error_no_server(self):
socket = FakeSocket(error=QLocalSocket.ConnectionError,
connect_successful=False)
with pytest.raises(ipc.Error, match=r"Error while connecting to "
r"running instance: Error string \(error 7\)"):
ipc.send_to_running_instance('qute-test', [], None, socket=socket)
@pytest.mark.not_mac(reason="https://github.com/qutebrowser/qutebrowser/"
"issues/975")
def test_timeout(qtbot, caplog, qlocalsocket, ipc_server):
ipc_server._timer.setInterval(100)
ipc_server.listen()
with qtbot.waitSignal(ipc_server._server.newConnection):
qlocalsocket.connectToServer('qute-test')
with caplog.at_level(logging.ERROR):
with qtbot.waitSignal(qlocalsocket.disconnected, timeout=5000):
pass
assert caplog.messages[-1].startswith("IPC connection timed out")
def test_ipcserver_socket_none_readyread(ipc_server, caplog):
assert ipc_server._socket is None
assert ipc_server._old_socket is None
with caplog.at_level(logging.WARNING):
ipc_server.on_ready_read()
msg = "In on_ready_read with None socket and old_socket!"
assert msg in caplog.messages
@pytest.mark.posix
def test_ipcserver_socket_none_error(ipc_server, caplog):
assert ipc_server._socket is None
ipc_server.on_error(0)
msg = "In on_error with None socket!"
assert msg in caplog.messages
class TestSendOrListen:
@dataclasses.dataclass
class Args:
no_err_windows: bool
basedir: str
command: List[str]
target: Optional[str]
@pytest.fixture
def args(self):
return self.Args(no_err_windows=True, basedir='/basedir/for/testing',
command=['test'], target=None)
@pytest.fixture
def qlocalserver_mock(self, mocker):
m = mocker.patch('qutebrowser.misc.ipc.QLocalServer', autospec=True)
m().errorString.return_value = "Error string"
m().newConnection = stubs.FakeSignal()
return m
@pytest.fixture
def qlocalsocket_mock(self, mocker):
m = mocker.patch('qutebrowser.misc.ipc.QLocalSocket', autospec=True)
m().errorString.return_value = "Error string"
for name in ['UnknownSocketError', 'UnconnectedState',
'ConnectionRefusedError', 'ServerNotFoundError',
'PeerClosedError']:
setattr(m, name, getattr(QLocalSocket, name))
return m
@pytest.mark.linux(reason="Flaky on Windows and macOS")
def test_normal_connection(self, caplog, qtbot, args):
ret_server = ipc.send_or_listen(args)
assert isinstance(ret_server, ipc.IPCServer)
assert "Starting IPC server..." in caplog.messages
assert ret_server is ipc.server
with qtbot.waitSignal(ret_server.got_args):
ret_client = ipc.send_or_listen(args)
assert ret_client is None
@pytest.mark.posix(reason="Unneeded on Windows")
def test_correct_socket_name(self, args):
server = ipc.send_or_listen(args)
expected_dir = ipc._get_socketname(args.basedir)
assert '/' in expected_dir
assert server._socketname == expected_dir
def test_address_in_use_ok(self, qlocalserver_mock, qlocalsocket_mock,
stubs, caplog, args):
"""Test the following scenario.
- First call to send_to_running_instance:
-> could not connect (server not found)
- Trying to set up a server and listen
-> AddressInUseError
- Second call to send_to_running_instance:
-> success
"""
qlocalserver_mock().listen.return_value = False
err = QAbstractSocket.AddressInUseError
qlocalserver_mock().serverError.return_value = err
qlocalsocket_mock().waitForConnected.side_effect = [False, True]
qlocalsocket_mock().error.side_effect = [
QLocalSocket.ServerNotFoundError,
QLocalSocket.UnknownSocketError,
QLocalSocket.UnknownSocketError, # error() gets called twice
]
ret = ipc.send_or_listen(args)
assert ret is None
assert "Got AddressInUseError, trying again." in caplog.messages
@pytest.mark.parametrize('has_error, exc_name, exc_msg', [
(True, 'SocketError',
'Error while writing to running instance: Error string (error 0)'),
(False, 'AddressInUseError',
'Error while listening to IPC server: Error string (error 8)'),
])
def test_address_in_use_error(self, qlocalserver_mock, qlocalsocket_mock,
stubs, caplog, args, has_error, exc_name,
exc_msg):
"""Test the following scenario.
- First call to send_to_running_instance:
-> could not connect (server not found)
- Trying to set up a server and listen
-> AddressInUseError
- Second call to send_to_running_instance:
-> not sent / error
"""
qlocalserver_mock().listen.return_value = False
err = QAbstractSocket.AddressInUseError
qlocalserver_mock().serverError.return_value = err
# If the second connection succeeds, we will have an error later.
# If it fails, that's the "not sent" case above.
qlocalsocket_mock().waitForConnected.side_effect = [False, has_error]
qlocalsocket_mock().error.side_effect = [
QLocalSocket.ServerNotFoundError,
QLocalSocket.ServerNotFoundError,
QLocalSocket.ConnectionRefusedError,
QLocalSocket.ConnectionRefusedError, # error() gets called twice
]
with caplog.at_level(logging.ERROR):
with pytest.raises(ipc.Error):
ipc.send_or_listen(args)
error_msgs = [
'Handling fatal misc.ipc.{} with --no-err-windows!'.format(
exc_name),
'',
'title: Error while connecting to running instance!',
'pre_text: ',
'post_text: ',
'exception text: {}'.format(exc_msg),
]
assert caplog.messages == ['\n'.join(error_msgs)]
@pytest.mark.posix(reason="Flaky on Windows")
def test_error_while_listening(self, qlocalserver_mock, caplog, args):
"""Test an error with the first listen call."""
qlocalserver_mock().listen.return_value = False
err = QAbstractSocket.SocketResourceError
qlocalserver_mock().serverError.return_value = err
with caplog.at_level(logging.ERROR):
with pytest.raises(ipc.Error):
ipc.send_or_listen(args)
error_msgs = [
'Handling fatal misc.ipc.ListenError with --no-err-windows!',
'',
'title: Error while connecting to running instance!',
'pre_text: ',
'post_text: ',
('exception text: Error while listening to IPC server: Error '
'string (error 4)'),
]
assert caplog.messages[-1] == '\n'.join(error_msgs)
@pytest.mark.windows
@pytest.mark.mac
def test_long_username(monkeypatch):
"""See https://github.com/qutebrowser/qutebrowser/issues/888."""
username = 'alexandercogneau'
basedir = '/this_is_a_long_basedir'
monkeypatch.setattr(getpass, 'getuser', lambda: username)
name = ipc._get_socketname(basedir=basedir)
server = ipc.IPCServer(name)
expected_md5 = md5('{}-{}'.format(username, basedir))
assert expected_md5 in server._socketname
try:
server.listen()
finally:
server.shutdown()
def test_connect_inexistent(qlocalsocket):
"""Make sure connecting to an inexistent server fails immediately.
If this test fails, our connection logic checking for the old naming scheme
would not work properly.
"""
qlocalsocket.connectToServer('qute-test-inexistent')
assert qlocalsocket.error() == QLocalSocket.ServerNotFoundError
@pytest.mark.posix
def test_socket_options_address_in_use_problem(qlocalserver, short_tmpdir):
"""Qt seems to ignore AddressInUseError when using socketOptions.
With this test we verify this bug still exists. If it fails, we can
probably start using setSocketOptions again.
"""
servername = str(short_tmpdir / 'x')
s1 = QLocalServer()
ok = s1.listen(servername)
assert ok
s2 = QLocalServer()
s2.setSocketOptions(QLocalServer.UserAccessOption)
ok = s2.listen(servername)
print(s2.errorString())
# We actually would expect ok == False here - but we want the test to fail
# when the Qt bug is fixed.
assert ok
| fiete201/qutebrowser | tests/unit/misc/test_ipc.py | Python | gpl-3.0 | 28,342 |
#!/usr/bin/env python
if __name__ == '__main__':
from anoisetools.scripts.dispatch import main
main()
| fhcrc/ampliconnoise | anoise.py | Python | gpl-3.0 | 111 |
#!/usr/bin/env python
"""
Seshat
Web App/API framework built on top of gevent
modifying decorators for HTTP method functions
For more information, see: https://github.com/JoshAshby/
http://xkcd.com/353/
Josh Ashby
2014
http://joshashby.com
[email protected]
"""
import json
import seshat_addons.utils.patch_json
from ..view.template import template
import seshat.actions as actions
def HTML(f):
def wrapper(*args, **kwargs):
self = args[0]
data = {"title": self._title if self._title else "Untitled"}
self.view = template(self._tmpl, self.request, data)
res = f(*args, **kwargs)
if isinstance(res, actions.BaseAction):
return res
if type(res) is dict:
self.view.data = res
res = self.view
if isinstance(res, template):
string = res.render()
else:
string = res
del res
self.head.add_header("Content-Type", "text/html")
return string
return wrapper
def JSON(f):
def wrapper(*args, **kwargs):
self = args[0]
res = f(*args, **kwargs)
if isinstance(res, actions.BaseAction):
return res
if type(res) is not list:
res = [res]
self.head.add_header("Content-Type", "application/json")
return json.dumps(res)
return wrapper
def Guess(f):
def wrapper(*args, **kwargs):
self = args[0]
res = f(*args, **kwargs)
if isinstance(res, actions.BaseAction):
return res
if self.request.accepts("html") or self.request.accepts("*/*"):
self.head.add_header("Content-Type", "text/html")
data = {"title": self._title if self._title else "Untitled"}
data.update(res)
view = template(self._tmpl, self.request, data).render()
del res
return view
if self.request.accepts("json") or self.request.accepts("*/*"):
self.head.add_header("Content-Type", "application/json")
t_res = type(res)
if t_res is dict:
final_res = json.dumps([res])
elif t_res is list:
final_res = json.dumps(res)
del res
return final_res
else:
return unicode(res)
return wrapper
| JoshAshby/seshat_addons | seshat_addons/seshat/func_mods.py | Python | gpl-3.0 | 2,354 |
# -*- coding: utf-8 -*-
# XMPPVOX: XMPP client for DOSVOX.
# Copyright (C) 2012 Rodolfo Henrique Carvalho
#
# This file is part of XMPPVOX.
#
# XMPPVOX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
u"""
XMPPVOX - módulo servidor
Este módulo implementa um servidor compatível com o protocolo usado pelo Papovox
e Sítiovox.
"""
import socket
import struct
import textwrap
import time
import sys
from cStringIO import StringIO
from xmppvox import commands
from xmppvox.strings import safe_unicode, get_string as S
import logging
log = logging.getLogger(__name__)
# Constantes ------------------------------------------------------------------#
# Todas as strings passadas para o Papovox devem ser codificadas usando a
# codificação padrão do DOSVOX, ISO-8859-1, também conhecida como Latin-1.
SYSTEM_ENCODING = 'ISO-8859-1'
#------------------------------------------------------------------------------#
class PapovoxLikeServer(object):
u"""Um servidor compatível com o Papovox."""
DEFAULT_HOST = '127.0.0.1' # Escuta apenas conexões locais
PORTA_PAPOVOX = 1963
#PORTA_URGENTE = 1964
#PORTA_NOMES = 1956
DADOTECLADO = 1 # texto da mensagem (sem tab, nem lf nem cr ao final)
TAMANHO_DO_BUFFER = 4096 # Ver C:\winvox\Fontes\tradutor\DVINET.PAS
TAMANHO_MAXIMO_MSG = 255
def __init__(self, host=None, port=None):
u"""Cria servidor compatível com o Papovox."""
# Socket do servidor
self.server_socket = None
# Host/interface de escuta
self.host = host or self.DEFAULT_HOST
# Porta do servidor
self.port = port or self.PORTA_PAPOVOX
# Socket do cliente
self.socket = None
# Endereço do cliente
self.addr = None
# Apelido
self.nickname = u""
def connect(self):
u"""Conecta ao Papovox via socket.
Retorna booleano indicando se a conexão foi bem-sucedida.
Bloqueia aguardando Papovox conectar.
Define atributos:
self.server_socket
self.socket
self.addr
self.nickname
"""
try:
self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Reutilizar porta já aberta
#self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.server_socket.bind((self.host, self.port))
self.server_socket.listen(1)
log.debug(u"XMPPVOX servindo em %s:%s", self.host, self.port)
# Conecta ao Papovox ----------------------------------------------#
try:
self._accept()
except socket.error:
return False
#------------------------------------------------------------------#
return True
except socket.error, e:
log.error(safe_unicode(e.message) or u" ".join(map(safe_unicode, e.args)))
sys.exit(1)
def _accept(self):
ur"""Aceita uma conexão via socket com o Papovox.
Ver 'C:\winvox\Fontes\PAPOVOX\PPLIGA.PAS' e
'C:\winvox\Fontes\SITIOVOX\SVPROC.PAS'.
"""
log.info(u"Aguardando Papovox conectar...")
self.socket, self.addr = self.server_socket.accept()
self.sendline(u"+OK - %s:%s conectado" % self.addr)
self.nickname = self.recvline(self.TAMANHO_DO_BUFFER)
self.sendline(u"+OK")
# Espera Papovox estar pronto para receber mensagens.
#
# A espera é necessária pois se enviar mensagens logo em seguida o Papovox
# as ignora, provavelmente relacionado a alguma temporização ou espera na
# leitura de algum buffer ou estado da variável global 'conversando'.
# O SítioVox aguarda 100ms (arquivo SVPROC.PAS).
time.sleep(0.1)
log.info(u"Conectado ao Papovox.")
log.debug(u"Apelido: %s", self.nickname)
def disconnect(self):
u"""Desliga a conexão com o Papovox."""
log.debug(u"Encerrando conexão com o Papovox...")
try:
self.socket.shutdown(socket.SHUT_RDWR)
self.socket.close()
except socket.error, e:
log.debug("Client socket: %s", safe_unicode(e))
try:
self.server_socket.shutdown(socket.SHUT_RDWR)
self.server_socket.close()
except socket.error, e:
log.debug("Server socket: %s", safe_unicode(e))
# Funções de integração com o cliente XMPP --------------------------------#
def process(self, xmpp):
u"""Processa mensagens do Papovox para a rede XMPP.
Mensagens podem conter comandos para o XMPPVOX.
Nota: esta função só termina caso ocorra algum erro ou a conexão com o
Papovox seja perdida.
"""
try:
while True:
data = self.recvmessage()
# Tenta executar algum comando contido na mensagem.
if commands.process_command(xmpp, data, self):
# Caso algum comando seja executado, sai do loop e passa
# para a próxima mensagem.
continue
else:
# Caso contrário, envia a mensagem para a rede XMPP.
self.send_xmpp_message(xmpp, data)
except socket.error, e:
log.debug(safe_unicode(e))
finally:
log.info(u"Conexão com o Papovox encerrada.")
def show_online_contacts(self, xmpp):
u"""Envia para o Papovox informação sobre contatos disponíveis."""
online_contacts_count = len(commands.enumerate_online_roster(xmpp))
if online_contacts_count == 0:
online_contacts_count = "nenhum"
contacts = u"contato disponível"
elif online_contacts_count == 1:
contacts = u"contato disponível"
else:
contacts = u"contatos disponíveis"
self.sendmessage(S.ONLINE_CONTACTS_INFO.format(amount=online_contacts_count,
contacts=contacts))
def send_xmpp_message(self, xmpp, mbody):
u"""Envia mensagem XMPP para quem está conversando comigo."""
if xmpp.talking_to is not None:
mto = xmpp.talking_to
# Envia mensagem XMPP.
xmpp.send_message(mto=mto, mbody=mbody, mtype='chat')
# Repete a mensagem que estou enviando para ser falada pelo Papovox.
self.send_chat_message(u"eu", mbody)
# Avisa se o contato estiver offline.
bare_jid = xmpp.get_bare_jid(mto)
roster = xmpp.client_roster
if bare_jid in roster and not roster[bare_jid].resources:
name = xmpp.get_chatty_name(mto)
self.sendmessage(S.WARN_MSG_TO_OFFLINE_USER.format(name=name))
else:
mto = u"ninguém"
self.sendmessage(S.WARN_MSG_TO_NOBODY)
log.debug(u"-> %(mto)s: %(mbody)s", locals())
# Funções de envio de dados para o Papovox --------------------------------#
def sendline(self, line):
u"""Codifica e envia texto via socket pelo protocolo do Papovox.
Uma quebra de linha é adicionada automaticamente ao fim da mensagem.
Nota: esta função *não* deve ser usada para enviar mensagens. Use apenas
para transmitir dados brutos de comunicação.
"""
log.debug(u"> %s", line)
line = line.encode(SYSTEM_ENCODING, 'replace')
self.socket.sendall("%s\r\n" % (line,))
def sendmessage(self, msg):
u"""Codifica e envia uma mensagem via socket pelo protocolo do Papovox."""
log.debug(u">> %s", msg)
msg = msg.encode(SYSTEM_ENCODING, 'replace')
# Apesar de teoricamente o protocolo do Papovox suportar mensagens com até
# 65535 (2^16 - 1) caracteres, na prática apenas os 255 primeiros são
# exibidos, e os restantes acabam ignorados.
# Portanto, é necessário quebrar mensagens grandes em várias menores.
chunks = textwrap.wrap(
text=msg,
width=self.TAMANHO_MAXIMO_MSG,
expand_tabs=False,
replace_whitespace=False,
drop_whitespace=False,
)
def sendmsg(msg):
self.socket.sendall("%s%s" % (struct.pack('<BH', self.DADOTECLADO, len(msg)), msg))
# Envia uma ou mais mensagens pelo socket
map(sendmsg, chunks)
def send_chat_message(self, sender, body, _state={}):
u"""Formata e envia uma mensagem de bate-papo via socket.
Use esta função para enviar uma mensagem para o Papovox sintetizar.
"""
# Tempo máximo entre duas mensagens para considerar que fazem parte da mesma
# conversa, em segundos.
TIMEOUT = 90
# Recupera estado.
last_sender = _state.get('last_sender')
last_timestamp = _state.get('last_timestamp', 0) # em segundos
timestamp = time.time()
timed_out = (time.time() - last_timestamp) > TIMEOUT
if sender == last_sender and not timed_out:
msg = S.MSG
else:
msg = S.MSG_FROM
self.sendmessage(msg.format(**locals()))
# Guarda estado para ser usado na próxima execução.
_state['last_sender'] = sender
_state['last_timestamp'] = timestamp
def signal_error(self, msg):
u"""Sinaliza erro para o Papovox e termina a conexão."""
# Avisa Papovox sobre o erro.
self.sendmessage(msg)
# Encerra conexão com o Papovox.
self.disconnect()
# Funções de recebimento de dados do Papovox ------------------------------#
def recv(self, size):
u"""Recebe dados via socket.
Use esta função para receber do socket `size' bytes ou menos.
Levanta uma exceção caso nenhum byte seja recebido.
Nota: em geral, use esta função ao invés do método 'socket.recv'.
Veja também a função 'recvall'.
"""
data = self.socket.recv(size)
if not data and size:
raise socket.error(u"Nenhum dado recebido do socket, conexão perdida.")
return data
def recvline(self, size):
u"""Recebe uma linha via socket.
A string é retornada em unicode e não contém \r nem \n.
"""
# Assume que apenas uma linha está disponível no socket.
data = self.recv(size).rstrip('\r\n')
data = data.decode(SYSTEM_ENCODING)
if any(c in data for c in '\r\n'):
log.warning("Mais que uma linha recebida!")
return data
def recvall(self, size):
u"""Recebe dados exaustivamente via socket.
Use esta função para receber do socket exatamente `size' bytes.
Levanta uma exceção caso nenhum byte seja recebido.
Nota: em geral, use esta função ou 'recv' ao invés do método 'socket.recv'.
"""
data = StringIO()
while data.tell() < size:
data.write(self.recv(size - data.tell()))
data_str = data.getvalue()
data.close()
return data_str
def recvmessage(self):
u"""Recebe uma mensagem via socket pelo protocolo do Papovox.
A mensagem é retornada em unicode.
Se uma exceção não for levantada, esta função sempre retorna uma mensagem.
"""
# Tenta receber mensagem até obter sucesso.
while True:
datatype, datalen = struct.unpack('<BH', self.recvall(3))
# Recusa dados do Papovox que não sejam do tipo DADOTECLADO
if datatype != self.DADOTECLADO:
log.warning(u"Tipo de dado desconhecido: (%d).", datatype)
continue
# Ignora mensagens vazias
if datalen == 0:
log.debug(u"Mensagem vazia ignorada")
continue
# Recebe dados/mensagem do Papovox
data = self.recvall(datalen)
data = data.decode(SYSTEM_ENCODING)
return data
# FIXME Remover referências hard-coded para comandos e o prefixo de comando.
| rhcarvalho/xmppvox | xmppvox/server.py | Python | gpl-3.0 | 12,781 |
import urllib
import urllib2
import json
import time
import hmac,hashlib
def createTimeStamp(datestr, format="%Y-%m-%d %H:%M:%S"):
return time.mktime(time.strptime(datestr, format))
class poloniex:
def __init__(self, APIKey, Secret):
self.APIKey = APIKey
self.Secret = Secret
def post_process(self, before):
after = before
# Add timestamps if there isnt one but is a datetime
if('return' in after):
if(isinstance(after['return'], list)):
for x in xrange(0, len(after['return'])):
if(isinstance(after['return'][x], dict)):
if('datetime' in after['return'][x] and 'timestamp' not in after['return'][x]):
after['return'][x]['timestamp'] = float(createTimeStamp(after['return'][x]['datetime']))
return after
def api_query(self, command, req={}):
if(command == "returnTicker" or command == "return24Volume"):
ret = urllib2.urlopen(urllib2.Request('https://poloniex.com/public?command=' + command))
return json.loads(ret.read())
elif(command == "returnChartData"):
ret = urllib2.urlopen(urllib2.Request('https://poloniex.com/public?command=' + command + '¤cyPair=' + str(req['currencyPair'] + '&start=' + str((int(time.time()) - req['chartDataAge'])) + '&period=' + str(300))))
return json.loads(ret.read())
elif(command == "returnOrderBook"):
ret = urllib2.urlopen(urllib2.Request('https://poloniex.com/public?command=' + command + '¤cyPair=' + str(req['currencyPair'])))
return json.loads(ret.read())
elif(command == "returnMarketTradeHistory"):
ret = urllib2.urlopen(urllib2.Request('https://poloniex.com/public?command=' + "returnTradeHistory" + '¤cyPair=' + str(req['currencyPair'])))
return json.loads(ret.read())
else:
req['command'] = command
req['nonce'] = int(time.time()*1000)
post_data = urllib.urlencode(req)
sign = hmac.new(self.Secret, post_data, hashlib.sha512).hexdigest()
headers = {
'Sign': sign,
'Key': self.APIKey
}
ret = urllib2.urlopen(urllib2.Request('https://poloniex.com/tradingApi', post_data, headers))
jsonRet = json.loads(ret.read())
return self.post_process(jsonRet)
def returnTicker(self):
return self.api_query("returnTicker")
def returnChartData(self, currencyPair, chartDataAge = 300):
return self.api_query("returnChartData", {'currencyPair': currencyPair, 'chartDataAge' : chartDataAge})
def return24Volume(self):
return self.api_query("return24Volume")
def returnOrderBook (self, currencyPair):
return self.api_query("returnOrderBook", {'currencyPair': currencyPair})
def returnMarketTradeHistory (self, currencyPair):
return self.api_query("returnMarketTradeHistory", {'currencyPair': currencyPair})
# Returns all of your balances.
# Outputs:
# {"BTC":"0.59098578","LTC":"3.31117268", ... }
def returnBalances(self):
return self.api_query('returnBalances')
# Returns your open orders for a given market, specified by the "currencyPair" POST parameter, e.g. "BTC_XCP"
# Inputs:
# currencyPair The currency pair e.g. "BTC_XCP"
# Outputs:
# orderNumber The order number
# type sell or buy
# rate Price the order is selling or buying at
# Amount Quantity of order
# total Total value of order (price * quantity)
def returnOpenOrders(self,currencyPair):
return self.api_query('returnOpenOrders',{"currencyPair":currencyPair})
# Returns your trade history for a given market, specified by the "currencyPair" POST parameter
# Inputs:
# currencyPair The currency pair e.g. "BTC_XCP"
# Outputs:
# date Date in the form: "2014-02-19 03:44:59"
# rate Price the order is selling or buying at
# amount Quantity of order
# total Total value of order (price * quantity)
# type sell or buy
def returnTradeHistory(self,currencyPair):
return self.api_query('returnTradeHistory',{"currencyPair":currencyPair})
# Places a buy order in a given market. Required POST parameters are "currencyPair", "rate", and "amount". If successful, the method will return the order number.
# Inputs:
# currencyPair The curreny pair
# rate price the order is buying at
# amount Amount of coins to buy
# Outputs:
# orderNumber The order number
def buy(self,currencyPair,rate,amount):
return self.api_query('buy',{"currencyPair":currencyPair,"rate":rate,"amount":amount})
# Places a sell order in a given market. Required POST parameters are "currencyPair", "rate", and "amount". If successful, the method will return the order number.
# Inputs:
# currencyPair The curreny pair
# rate price the order is selling at
# amount Amount of coins to sell
# Outputs:
# orderNumber The order number
def sell(self,currencyPair,rate,amount):
return self.api_query('sell',{"currencyPair":currencyPair,"rate":rate,"amount":amount})
# Cancels an order you have placed in a given market. Required POST parameters are "currencyPair" and "orderNumber".
# Inputs:
# currencyPair The curreny pair
# orderNumber The order number to cancel
# Outputs:
# succes 1 or 0
def cancel(self,currencyPair,orderNumber):
return self.api_query('cancelOrder',{"currencyPair":currencyPair,"orderNumber":orderNumber})
# Immediately places a withdrawal for a given currency, with no email confirmation. In order to use this method, the withdrawal privilege must be enabled for your API key. Required POST parameters are "currency", "amount", and "address". Sample output: {"response":"Withdrew 2398 NXT."}
# Inputs:
# currency The currency to withdraw
# amount The amount of this coin to withdraw
# address The withdrawal address
# Outputs:
# response Text containing message about the withdrawal
def withdraw(self, currency, amount, address):
return self.api_query('withdraw',{"currency":currency, "amount":amount, "address":address})
| Vadus/eldo | src/poloniex/poloniex.py | Python | gpl-3.0 | 6,502 |
from pygame import USEREVENT
# Events
SONG_END_EVENT = USEREVENT + 1
ENEMY_DESTROYED_EVENT = USEREVENT + 2
BOSS_BATTLE_EVENT = USEREVENT + 3
DISPLAY_MESSAGE_EVENT = USEREVENT + 4
END_LEVEL_EVENT = USEREVENT + 5
END_GAME_EVENT = USEREVENT + 6
# Constants
SCREEN_SIZE = (800, 600)
# Colors
COLOR_BLACK = (0, 0, 0)
| juanjosegzl/learningpygame | constants.py | Python | gpl-3.0 | 315 |
""" This is a test of the chain
ReportsClient -> ReportsGeneratorHandler -> AccountingDB
It supposes that the DB is present, and that the service is running.
Also the service DataStore has to be up and running.
this is pytest!
"""
# pylint: disable=invalid-name,wrong-import-position
import datetime
import DIRAC
DIRAC.initialize() # Initialize configuration
from DIRAC import gLogger
from DIRAC.AccountingSystem.Client.DataStoreClient import gDataStoreClient
from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
from DIRAC.tests.Utilities.Accounting import createDataOperationAccountingRecord
from DIRAC.tests.Utilities.Accounting import createStorageOccupancyAccountingRecord
gLogger.setLevel("DEBUG")
def test_addAndRemoveDataOperation():
# just inserting one record
record = createDataOperationAccountingRecord()
record.setStartTime()
record.setEndTime()
res = gDataStoreClient.addRegister(record)
assert res["OK"]
res = gDataStoreClient.commit()
assert res["OK"]
rc = ReportsClient()
res = rc.listReports("DataOperation")
assert res["OK"]
res = rc.listUniqueKeyValues("DataOperation")
assert res["OK"]
res = rc.getReport(
"DataOperation",
"Successful transfers",
datetime.datetime.utcnow(),
datetime.datetime.utcnow(),
{},
"Destination",
)
assert res["OK"]
# now removing that record
res = gDataStoreClient.remove(record)
assert res["OK"]
def test_addAndRemoveStorageOccupancy():
# just inserting one record
record = createStorageOccupancyAccountingRecord()
record.setStartTime()
record.setEndTime()
res = gDataStoreClient.addRegister(record)
assert res["OK"]
res = gDataStoreClient.commit()
assert res["OK"]
rc = ReportsClient()
res = rc.listReports("StorageOccupancy")
assert res["OK"]
res = rc.listUniqueKeyValues("StorageOccupancy")
assert res["OK"]
res = rc.getReport(
"StorageOccupancy",
"Free and Used Space",
datetime.datetime.utcnow(),
datetime.datetime.utcnow(),
{},
"StorageElement",
)
assert res["OK"]
# now removing that record
res = gDataStoreClient.remove(record)
assert res["OK"]
| DIRACGrid/DIRAC | tests/Integration/AccountingSystem/Test_ReportsClient.py | Python | gpl-3.0 | 2,308 |
'''Language module, allows the user to change the language on demand'''
# -*- coding: utf-8 -*-
# This file is part of emesene.
#
# emesene is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# emesene is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with emesene; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import gettext
import locale
import glob
class Language(object):
"""
A class for language management
"""
NAME = 'Language'
DESCRIPTION = 'Language management module'
AUTHOR = 'Lucas F. Ottaviano (lfottaviano)'
WEBSITE = 'www.emesene.org'
LANGUAGES_DICT = {'af':'Afrikaans',
'ar':'\xd8\xa7\xd9\x84\xd8\xb9\xd8\xb1\xd8\xa8\xd9\x8a\xd8\xa9',
'ast':'Asturianu',
'az':'\xd8\xa2\xd8\xb0\xd8\xb1\xd8\xa8\xd8\xa7\xdb\x8c\xd8\xac\xd8\xa7\xd9\x86 \xd8\xaf\xdb\x8c\xd9\x84\xdb\x8c',
'bg':'\xd0\x91\xd1\x8a\xd0\xbb\xd0\xb3\xd0\xb0\xd1\x80\xd1\x81\xd0\xba\xd0\xb8 \xd0\xb5\xd0\xb7\xd0\xb8\xd0\xba',
'bn':'\xe0\xa6\xac\xe0\xa6\xbe\xe0\xa6\x82\xe0\xa6\xb2\xe0\xa6\xbe',
'bs':'\xd0\xb1\xd0\xbe\xd1\x81\xd0\xb0\xd0\xbd\xd1\x81\xd0\xba\xd0\xb8',
'ca':'Catal\xc3\xa0',
'cs':'\xc4\x8de\xc5\xa1tina',
'da':'Danish',
'de':'Deutsch',
'dv':'\xde\x8b\xde\xa8\xde\x88\xde\xac\xde\x80\xde\xa8',
'el':'\xce\x95\xce\xbb\xce\xbb\xce\xb7\xce\xbd\xce\xb9\xce\xba\xce\xac',
'en':'English',
'en_AU':'English (Australia)',
'en_CA':'English (Canada)',
'en_GB':'English (United Kingdom)',
'eo':'Esperanto',
'es':'Espa\xc3\xb1ol',
'et':'Eesti keel',
'eu':'Euskara',
'fi':'Suomi',
'fil':'Filipino',
'fo':'F\xc3\xb8royskt',
'fr':'Fran\xc3\xa7ais',
'ga':'Gaeilge',
'gl':'Galego',
'gv':'Gaelg',
'he':'\xd7\xa2\xd6\xb4\xd7\x91\xd6\xb0\xd7\xa8\xd6\xb4\xd7\x99\xd7\xaa',
'hr':'Hrvatski',
'hu':'Magyar',
'ia':'Interlingua',
'id':'Bahasa Indonesia',
'is':'\xc3\x8dslenska',
'it':'Italiano',
'ja':'\xe6\x97\xa5\xe6\x9c\xac\xe8\xaa\x9e',
'kab':'Taqbaylit',
'kn':'Kanna\xe1\xb8\x8da',
'ko':'\xed\x95\x9c\xea\xb5\xad\xec\x96\xb4/\xec\xa1\xb0\xec\x84\xa0\xeb\xa7\x90',
'ku':'\xda\xa9\xd9\x88\xd8\xb1\xd8\xaf\xdb\x8c',
'la':'Lat\xc4\xabna',
'lb':'L\xc3\xabtzebuergesch',
'lt':'Lietuvi\xc5\xb3',
'lv':'Latvie\xc5\xa1u valoda',
'mk':'\xd0\x9c\xd0\xb0\xd0\xba\xd0\xb5\xd0\xb4\xd0\xbe\xd0\xbd\xd1\x81\xd0\xba\xd0\xb8 \xd1\x98\xd0\xb0\xd0\xb7\xd0\xb8\xd0\xba',
'ms':'\xd8\xa8\xd9\x87\xd8\xa7\xd8\xb3 \xd9\x85\xd9\x84\xd8\xa7\xd9\x8a\xd9\x88',
'nan':'\xe9\x96\xa9\xe5\x8d\x97\xe8\xaa\x9e / \xe9\x97\xbd\xe5\x8d\x97\xe8\xaf\xad',
'nb':'Norwegian Bokm\xc3\xa5l',
'nds':'Plattd\xc3\xbc\xc3\xbctsch',
'nl':'Nederlands',
'nn':'Norwegian Nynorsk',
'oc':'Occitan (post 1500)',
'pl':'J\xc4\x99zyk Polski',
'pt':'Portugu\xc3\xaas',
'pt_BR':'Portugu\xc3\xaas Brasileiro',
'ro':'Rom\xc3\xa2n\xc4\x83',
'ru':'\xd1\x80\xd1\x83\xd1\x81\xd1\x81\xd0\xba\xd0\xb8\xd0\xb9 \xd1\x8f\xd0\xb7\xd1\x8b\xd0\xba',
'sk':'Sloven\xc4\x8dina',
'sl':'Sloven\xc5\xa1\xc4\x8dina',
'sq':'Shqip',
'sr':'\xd1\x81\xd1\x80\xd0\xbf\xd1\x81\xd0\xba\xd0\xb8',
'sv':'Svenska',
'ta':'\xe0\xae\xa4\xe0\xae\xae\xe0\xae\xbf\xe0\xae\xb4\xe0\xaf\x8d',
'th':'\xe0\xb8\xa0\xe0\xb8\xb2\xe0\xb8\xa9\xe0\xb8\xb2\xe0\xb9\x84\xe0\xb8\x97\xe0\xb8\xa2',
'tr':'T\xc3\xbcrk\xc3\xa7e',
'uk':'\xd1\x83\xd0\xba\xd1\x80\xd0\xb0\xd1\x97\xcc\x81\xd0\xbd\xd1\x81\xd1\x8c\xd0\xba\xd0\xb0 \xd0\xbc\xd0\xbe\xcc\x81\xd0\xb2\xd0\xb0',
'vec':'V\xc3\xa8neto',
'zh_CN':'\xe7\xae\x80\xe4\xbd\x93\xe5\xad\x97',
'zh_HK':'\xe6\xb1\x89\xe8\xaf\xad/\xe6\xbc\xa2\xe8\xaa\x9e (\xe9\xa6\x99\xe6\xb8\xaf\xe4\xba\xba)',
'zh_TW':'\xe7\xb9\x81\xe9\xab\x94\xe4\xb8\xad\xe6\x96\x87'}
def __init__(self):
""" constructor """
self._languages = None
self._default_locale = locale.getdefaultlocale()[0]
self._lang = os.getenv('LANGUAGE') or self._default_locale
self._locales_path = 'po/' if os.path.exists('po/') else None
self._get_languages_list()
def install_desired_translation(self, language):
"""
installs translation of the given @language
@language, a string with the language code or None
"""
if language is not None:
#if default_locale is something like es_UY or en_XX, strip the end
#if it's not in LANGUAGES_DICT
if language not in self.LANGUAGES_DICT.keys():
language = language.split("_")[0]
self._lang = language
os.putenv('LANGUAGE', language)
# gettext.translation() receives a _list_ of languages, so make it a list.
language = [language]
#now it's a nice language in LANGUAGE_DICT or, if not, it's english or
#some unsupported translation so we fall back to english in those cases
translation = gettext.translation('emesene',
localedir=self._locales_path,
languages=language,
fallback=True)
if not isinstance(translation, gettext.GNUTranslations):
self._lang = 'en'
translation.install()
def install_default_translation(self):
"""
installs a translation relative to system enviroment
"""
language = os.getenv('LANGUAGE') or self._default_locale
self.install_desired_translation(language)
# Getters
def get_default_locale(self):
"""
returns default locale obtained assigned only on object instantiation
from locale python module
"""
return self._default_locale
def get_lang(self):
"""
returns the current language code that has been used for translation
"""
return self._lang
def get_locales_path(self):
"""
returns the locales path
"""
return self._locales_path
def get_available_languages(self):
""" returns a list of available languages """
return self._get_languages_list()
def _get_languages_list(self):
""" fills languages list"""
if self._languages is None:
paths = glob.glob(os.path.join(self._locales_path, '*',
'LC_MESSAGES', 'emesene.mo'))
self._languages = [path.split(os.path.sep)[-3] for path in paths]
self._languages.append('en')
self._languages.sort()
return self._languages
_instance = None
def get_language_manager():
'''instance Language object, if needed. otherwise, return it'''
global _instance
if _instance:
return _instance
_instance = Language()
return _instance
| emesene/emesene | emesene/Language.py | Python | gpl-3.0 | 7,488 |
import os
import pytest
import perun.utils.helpers as helpers
import perun.utils.streams as streams
import perun.logic.store as store
import perun.logic.index as index
import perun.utils.exceptions as exceptions
import perun.utils.timestamps as timestamps
__author__ = 'Tomas Fiedor'
@pytest.mark.usefixtures('cleandir')
def test_malformed_indexes(tmpdir, monkeypatch, capsys):
"""Tests malformed indexes"""
index_file = os.path.join(str(tmpdir), "index")
index.touch_index(index_file)
# Try different number of stuff
old_read_int = store.read_int_from_handle
def mocked_read_int(_):
return 2
monkeypatch.setattr('perun.logic.store.read_int_from_handle', mocked_read_int)
with open(index_file, 'rb') as index_handle:
with pytest.raises(SystemExit):
print(list(index.walk_index(index_handle)))
_, err = capsys.readouterr()
assert "fatal: malformed index file: too many or too few objects registered in index" in err
monkeypatch.setattr('perun.logic.store.read_int_from_handle', old_read_int)
monkeypatch.setattr('perun.logic.index.INDEX_VERSION', index.INDEX_VERSION - 1)
with open(index_file, 'rb') as index_handle:
with pytest.raises(exceptions.MalformedIndexFileException) as exc:
index.print_index_from_handle(index_handle)
assert "different index version" in str(exc.value)
index_file = os.path.join(str(tmpdir), "index2")
index.touch_index(index_file)
monkeypatch.setattr('perun.logic.index.INDEX_MAGIC_PREFIX', index.INDEX_MAGIC_PREFIX.upper())
with open(index_file, 'rb') as index_handle:
with pytest.raises(exceptions.MalformedIndexFileException) as exc:
index.print_index_from_handle(index_handle)
assert "not an index file" in str(exc.value)
@pytest.mark.usefixtures('cleandir')
def test_correct_index(tmpdir):
"""Test correct working with index"""
index_file = os.path.join(str(tmpdir), "index")
index.touch_index(index_file)
index.print_index(index_file)
@pytest.mark.usefixtures('cleandir')
def test_versions(tmpdir, monkeypatch):
"""Test correct working with index"""
monkeypatch.setattr('perun.logic.index.INDEX_VERSION', index.IndexVersion.SlowLorris.value)
pool_path = os.path.join(os.path.split(__file__)[0], 'profiles', 'degradation_profiles')
profile_name = os.path.join(pool_path, 'linear_base.perf')
profile = store.load_profile_from_file(profile_name, True)
index_file = os.path.join(str(tmpdir), "index")
index.touch_index(index_file)
st = timestamps.timestamp_to_str(os.stat(profile_name).st_mtime)
sha = store.compute_checksum("Wow, such checksum".encode('utf-8'))
basic_entry = index.BasicIndexEntry(st, sha, profile_name, index.INDEX_ENTRIES_START_OFFSET)
index.write_entry_to_index(index_file, basic_entry)
with pytest.raises(SystemExit):
with open(index_file, 'rb+') as index_handle:
index.BasicIndexEntry.read_from(index_handle, index.IndexVersion.FastSloth)
with open(index_file, 'rb+') as index_handle:
index_handle.seek(index.INDEX_ENTRIES_START_OFFSET)
entry = index.BasicIndexEntry.read_from(index_handle, index.IndexVersion.SlowLorris)
assert entry == basic_entry
index.print_index(index_file)
# Test update to version 2.0 index
with open(index_file, 'rb+') as index_handle:
index_handle.seek(4)
version = store.read_int_from_handle(index_handle)
assert version == index.IndexVersion.SlowLorris.value
monkeypatch.setattr('perun.logic.index.INDEX_VERSION', index.IndexVersion.FastSloth.value)
monkeypatch.setattr('perun.logic.store.split_object_name', lambda _, __: (None, index_file))
monkeypatch.setattr('perun.logic.index.walk_index', lambda _: [])
index.get_profile_list_for_minor(os.getcwd(), index_file)
with open(index_file, 'rb+') as index_handle:
index_handle.seek(4)
version = store.read_int_from_handle(index_handle)
assert version == index.IndexVersion.FastSloth.value
# Test version 2 index
monkeypatch.setattr('perun.logic.index.INDEX_VERSION', index.IndexVersion.FastSloth.value)
index_v2_file = os.path.join(str(tmpdir), "index_v2")
index.touch_index(index_v2_file)
extended_entry = index.ExtendedIndexEntry(st, sha, profile_name, index.INDEX_ENTRIES_START_OFFSET, profile)
index.write_entry_to_index(index_v2_file, extended_entry)
with open(index_v2_file, 'rb+') as index_handle:
index_handle.seek(index.INDEX_ENTRIES_START_OFFSET)
stored = index.ExtendedIndexEntry.read_from(index_handle, index.IndexVersion.FastSloth)
assert stored == extended_entry
index.print_index(index_v2_file)
# Test FastSloth with SlowLorris
monkeypatch.setattr('perun.logic.index.INDEX_VERSION', index.IndexVersion.FastSloth.value)
monkeypatch.setattr('perun.logic.pcs.get_object_directory', lambda: '')
monkeypatch.setattr('perun.logic.store.load_profile_from_file', lambda *_, **__: profile)
index_v1_2_file = os.path.join(str(tmpdir), "index_v1_2")
index.touch_index(index_v1_2_file)
index.write_entry_to_index(index_v1_2_file, basic_entry)
with open(index_v1_2_file, 'rb+') as index_handle:
index_handle.seek(index.INDEX_ENTRIES_START_OFFSET)
stored = index.ExtendedIndexEntry.read_from(index_handle, index.IndexVersion.SlowLorris)
assert stored.__dict__ == extended_entry.__dict__
@pytest.mark.usefixtures('cleandir')
def test_helpers(tmpdir):
index_file = os.path.join(str(tmpdir), "index")
index.touch_index(index_file)
with open(index_file, 'rb+') as index_handle:
store.write_string_to_handle(index_handle, "Hello Dolly!")
index_handle.seek(0)
stored_string = store.read_string_from_handle(index_handle)
assert stored_string == "Hello Dolly!"
current_position = index_handle.tell()
store.write_list_to_handle(index_handle, ['hello', 'dolly'])
index_handle.seek(current_position)
stored_list = store.read_list_from_handle(index_handle)
assert stored_list == ['hello', 'dolly']
@pytest.mark.usefixtures('cleandir')
def test_streams(tmpdir, monkeypatch):
"""Test various untested behaviour"""
# Loading from nonexistant file
yaml = streams.safely_load_yaml_from_file("nonexistant")
assert yaml == {}
# Load file with incorrect encoding
tmp_file = tmpdir.mkdir("tmp").join("tmp.file")
with open(tmp_file, 'wb') as tmp:
tmp.write(bytearray("hello šunte", "windows-1252"))
file = streams.safely_load_file(tmp_file)
assert file == []
# Safely load from string
yaml = streams.safely_load_yaml_from_stream('"root: 1"')
assert yaml == {'root': 1}
# Bad yaml
yaml = streams.safely_load_yaml_from_stream('"root: "1 "')
assert yaml == {}
# Nonexistant file
with pytest.raises(exceptions.IncorrectProfileFormatException):
store.load_profile_from_file("nonexistant", False)
monkeypatch.setattr("perun.logic.store.read_and_deflate_chunk", lambda _: "p mixed 1\0tmp")
with pytest.raises(exceptions.IncorrectProfileFormatException):
store.load_profile_from_file(tmp_file, False)
| tfiedor/perun | tests/test_store.py | Python | gpl-3.0 | 7,270 |
""" Copied from https://bitcointalk.org/index.php?topic=1026.0 (public domain) """
from hashlib import sha256
if str != bytes:
def ord(c):
# Python 3.x
return c
def chr(n):
return bytes((n,))
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
def b58encode(v):
""" encode v, which is a string of bytes, to base58. """
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += (256 ** i) * ord(c)
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Bitcoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0':
nPad += 1
else:
break
return (__b58chars[0]*nPad) + result
def b58decode(v, length):
""" decode v into a string of len bytes """
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += __b58chars.find(c) * (__b58base**i)
result = bytes()
while long_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]:
nPad += 1
else:
break
result = chr(0) * nPad + result
if length is not None and len(result) != length:
return None
return result
def _parse_address(str_address):
raw = b58decode(str_address, 25)
if raw is None:
raise AttributeError("'{}' is invalid base58 of decoded length 25"
.format(str_address))
version = raw[0]
checksum = raw[-4:]
vh160 = raw[:-4] # Version plus hash160 is what is checksummed
h3 = sha256(sha256(vh160).digest()).digest()
if h3[0:4] != checksum:
raise AttributeError("'{}' has an invalid address checksum"
.format(str_address))
return ord(version), raw[1:-4]
def get_bcaddress_version(str_address):
""" Reverse compatibility non-python implementation """
try:
return _parse_address(str_address)[0]
except AttributeError:
return None
def get_bcaddress(str_address):
""" Reverse compatibility non-python implementation """
try:
return _parse_address(str_address)[1]
except AttributeError:
return None
def address_version(str_address):
return _parse_address(str_address)[0]
def address_bytes(str_address):
return _parse_address(str_address)[1]
| simplecrypto/cryptokit | cryptokit/base58.py | Python | gpl-3.0 | 2,723 |
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2019 Michal Čihař <[email protected]>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
import gettext
import re
from collections import defaultdict
from itertools import chain
from appconf import AppConf
from django.conf import settings
from django.db import models, transaction
from django.db.models import Q
from django.db.utils import OperationalError
from django.urls import reverse
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.safestring import mark_safe
from django.utils.translation import pgettext_lazy
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from weblate.lang import data
from weblate.langdata import languages
from weblate.logger import LOGGER
from weblate.trans.util import sort_objects
from weblate.utils.stats import LanguageStats
from weblate.utils.validators import validate_pluraleq
PLURAL_RE = re.compile(
r'\s*nplurals\s*=\s*([0-9]+)\s*;\s*plural\s*=\s*([()n0-9!=|&<>+*/%\s?:-]+)'
)
PLURAL_TITLE = '''
{name} <i class="fa fa-question-circle text-primary" title="{examples}"></i>
'''
COPY_RE = re.compile(r'\([0-9]+\)')
def get_plural_type(base_code, pluralequation):
"""Get correct plural type for language."""
# Remove not needed parenthesis
if pluralequation[-1] == ';':
pluralequation = pluralequation[:-1]
# No plural
if pluralequation == '0':
return data.PLURAL_NONE
# Standard plural equations
for mapping in data.PLURAL_MAPPINGS:
if pluralequation in mapping[0]:
return mapping[1]
# Arabic special case
if base_code in ('ar',):
return data.PLURAL_ARABIC
# Log error in case of uknown mapping
LOGGER.error(
'Can not guess type of plural for %s: %s', base_code, pluralequation
)
return data.PLURAL_UNKNOWN
def get_english_lang():
"""Return object ID for English language"""
try:
return Language.objects.get_default().id
except (Language.DoesNotExist, OperationalError):
return 65535
class LanguageQuerySet(models.QuerySet):
# pylint: disable=no-init
def get_default(self):
"""Return default source language object."""
return self.get(code='en')
def try_get(self, *args, **kwargs):
"""Try to get language by code."""
try:
return self.get(*args, **kwargs)
except (Language.DoesNotExist, Language.MultipleObjectsReturned):
return None
def parse_lang_country(self, code):
"""Parse language and country from locale code."""
# Parse private use subtag
subtag_pos = code.find('-x-')
if subtag_pos != -1:
subtag = code[subtag_pos:]
code = code[:subtag_pos]
else:
subtag = ''
# Parse the string
if '-' in code:
lang, country = code.split('-', 1)
# Android regional locales
if len(country) > 2 and country[0] == 'r':
country = country[1:]
elif '_' in code:
lang, country = code.split('_', 1)
elif '+' in code:
lang, country = code.split('+', 1)
else:
lang = code
country = None
return lang, country, subtag
@staticmethod
def sanitize_code(code):
"""Language code sanitization."""
# Strip b+ prefix from Android
if code.startswith('b+'):
code = code[2:]
# Handle duplicate language files eg. "cs (2)"
code = COPY_RE.sub('', code)
# Remove some unwanted characters
code = code.replace(' ', '').replace('(', '').replace(')', '')
# Strip leading and trailing .
code = code.strip('.')
return code
def aliases_get(self, code):
code = code.lower()
codes = (
code,
code.replace('+', '_'),
code.replace('-', '_'),
code.replace('-r', '_'),
code.replace('_r', '_')
)
for newcode in codes:
if newcode in languages.ALIASES:
newcode = languages.ALIASES[newcode]
ret = self.try_get(code=newcode)
if ret is not None:
return ret
return None
def fuzzy_get(self, code, strict=False):
"""Get matching language for code (the code does not have to be exactly
same, cs_CZ is same as cs-CZ) or returns None
It also handles Android special naming of regional locales like pt-rBR
"""
code = self.sanitize_code(code)
lookups = [
# First try getting language as is
Q(code__iexact=code),
# Replace dash with underscore (for things as zh_Hant)
Q(code__iexact=code.replace('-', '_')),
# Try using name
Q(name__iexact=code),
]
for lookup in lookups:
# First try getting language as is
ret = self.try_get(lookup)
if ret is not None:
return ret
# Handle aliases
ret = self.aliases_get(code)
if ret is not None:
return ret
# Parse the string
lang, country, subtags = self.parse_lang_country(code)
# Try "corrected" code
if country is not None:
if '@' in country:
region, variant = country.split('@', 1)
country = '{0}@{1}'.format(region.upper(), variant.lower())
elif '_' in country:
# Xliff way of defining variants
region, variant = country.split('_', 1)
country = '{0}@{1}'.format(region.upper(), variant.lower())
else:
country = country.upper()
newcode = '{0}_{1}'.format(lang.lower(), country)
else:
newcode = lang.lower()
if subtags:
newcode += subtags
ret = self.try_get(code__iexact=newcode)
if ret is not None:
return ret
# Try canonical variant
if (settings.SIMPLIFY_LANGUAGES
and newcode.lower() in data.DEFAULT_LANGS):
ret = self.try_get(code=lang.lower())
if ret is not None:
return ret
return None if strict else newcode
def auto_get_or_create(self, code, create=True):
"""Try to get language using fuzzy_get and create it if that fails."""
ret = self.fuzzy_get(code)
if isinstance(ret, Language):
return ret
# Create new one
return self.auto_create(ret, create)
def auto_create(self, code, create=True):
"""Automatically create new language based on code and best guess
of parameters.
"""
# Create standard language
name = '{0} (generated)'.format(code)
if create:
lang = self.get_or_create(
code=code,
defaults={'name': name},
)[0]
else:
lang = Language(code=code, name=name)
baselang = None
# Check for different variant
if baselang is None and '@' in code:
parts = code.split('@')
baselang = self.fuzzy_get(code=parts[0], strict=True)
# Check for different country
if baselang is None and '_' in code or '-' in code:
parts = code.replace('-', '_').split('_')
baselang = self.fuzzy_get(code=parts[0], strict=True)
if baselang is not None:
lang.name = baselang.name
lang.direction = baselang.direction
if create:
lang.save()
baseplural = baselang.plural
lang.plural_set.create(
source=Plural.SOURCE_DEFAULT,
number=baseplural.number,
equation=baseplural.equation,
)
elif create:
lang.plural_set.create(
source=Plural.SOURCE_DEFAULT,
number=2,
equation='n != 1',
)
return lang
def setup(self, update):
"""Create basic set of languages based on languages defined in the
languages-data repo.
"""
# Create Weblate languages
for code, name, nplurals, pluraleq in languages.LANGUAGES:
lang, created = self.get_or_create(
code=code, defaults={'name': name}
)
# Get plural type
plural_type = get_plural_type(lang.base_code, pluraleq)
# Should we update existing?
if update:
lang.name = name
lang.save()
plural_data = {
'type': plural_type,
'number': nplurals,
'equation': pluraleq,
}
try:
plural, created = lang.plural_set.get_or_create(
source=Plural.SOURCE_DEFAULT,
language=lang,
defaults=plural_data,
)
if not created:
modified = False
for item in plural_data:
if getattr(plural, item) != plural_data[item]:
modified = True
setattr(plural, item, plural_data[item])
if modified:
plural.save()
except Plural.MultipleObjectsReturned:
continue
# Create addditiona plurals
for code, _unused, nplurals, pluraleq in languages.EXTRAPLURALS:
lang = self.get(code=code)
# Get plural type
plural_type = get_plural_type(lang.base_code, pluraleq)
plural_data = {
'type': plural_type,
}
plural, created = lang.plural_set.get_or_create(
source=Plural.SOURCE_GETTEXT,
language=lang,
number=nplurals,
equation=pluraleq,
defaults=plural_data,
)
if not created:
modified = False
for item in plural_data:
if getattr(plural, item) != plural_data[item]:
modified = True
setattr(plural, item, plural_data[item])
if modified:
plural.save()
def have_translation(self):
"""Return list of languages which have at least one translation."""
return self.filter(translation__pk__gt=0).distinct().order()
def order(self):
return self.order_by('name')
def order_translated(self):
return sort_objects(self)
def setup_lang(sender, **kwargs):
"""Hook for creating basic set of languages on database migration."""
with transaction.atomic():
Language.objects.setup(False)
@python_2_unicode_compatible
class Language(models.Model):
code = models.SlugField(
unique=True,
verbose_name=ugettext_lazy('Language code'),
)
name = models.CharField(
max_length=100,
verbose_name=ugettext_lazy('Language name'),
)
direction = models.CharField(
verbose_name=ugettext_lazy('Text direction'),
max_length=3,
default='ltr',
choices=(
('ltr', ugettext_lazy('Left to right')),
('rtl', ugettext_lazy('Right to left'))
),
)
objects = LanguageQuerySet.as_manager()
class Meta(object):
verbose_name = ugettext_lazy('Language')
verbose_name_plural = ugettext_lazy('Languages')
def __init__(self, *args, **kwargs):
"""Constructor to initialize some cache properties."""
super(Language, self).__init__(*args, **kwargs)
self._plural_examples = {}
self.stats = LanguageStats(self)
def __str__(self):
if self.show_language_code:
return '{0} ({1})'.format(
_(self.name), self.code
)
return _(self.name)
@property
def show_language_code(self):
return self.code not in data.NO_CODE_LANGUAGES
def get_absolute_url(self):
return reverse('show_language', kwargs={'lang': self.code})
def get_html(self):
"""Return html attributes for markup in this language, includes
language and direction.
"""
return mark_safe(
'lang="{0}" dir="{1}"'.format(self.code, self.direction)
)
def save(self, *args, **kwargs):
"""Set default direction for language."""
if self.base_code in data.RTL_LANGS:
self.direction = 'rtl'
else:
self.direction = 'ltr'
return super(Language, self).save(*args, **kwargs)
@cached_property
def base_code(self):
return self.code.replace('_', '-').split('-')[0]
def uses_ngram(self):
return self.base_code in ('ja', 'zh', 'ko')
@cached_property
def plural(self):
return self.plural_set.filter(source=Plural.SOURCE_DEFAULT)[0]
class PluralQuerySet(models.QuerySet):
def order(self):
return self.order_by('source')
@python_2_unicode_compatible
class Plural(models.Model):
PLURAL_CHOICES = (
(
data.PLURAL_NONE,
pgettext_lazy('Plural type', 'None')
),
(
data.PLURAL_ONE_OTHER,
pgettext_lazy('Plural type', 'One/other (classic plural)')
),
(
data.PLURAL_ONE_FEW_OTHER,
pgettext_lazy('Plural type', 'One/few/other (Slavic languages)')
),
(
data.PLURAL_ARABIC,
pgettext_lazy('Plural type', 'Arabic languages')
),
(
data.PLURAL_ZERO_ONE_OTHER,
pgettext_lazy('Plural type', 'Zero/one/other')
),
(
data.PLURAL_ONE_TWO_OTHER,
pgettext_lazy('Plural type', 'One/two/other')
),
(
data.PLURAL_ONE_OTHER_TWO,
pgettext_lazy('Plural type', 'One/other/two')
),
(
data.PLURAL_ONE_TWO_FEW_OTHER,
pgettext_lazy('Plural type', 'One/two/few/other')
),
(
data.PLURAL_OTHER_ONE_TWO_FEW,
pgettext_lazy('Plural type', 'Other/one/two/few')
),
(
data.PLURAL_ONE_TWO_THREE_OTHER,
pgettext_lazy('Plural type', 'One/two/three/other')
),
(
data.PLURAL_ONE_OTHER_ZERO,
pgettext_lazy('Plural type', 'One/other/zero')
),
(
data.PLURAL_ONE_FEW_MANY_OTHER,
pgettext_lazy('Plural type', 'One/few/many/other')
),
(
data.PLURAL_TWO_OTHER,
pgettext_lazy('Plural type', 'Two/other')
),
(
data.PLURAL_ONE_TWO_FEW_MANY_OTHER,
pgettext_lazy('Plural type', 'One/two/few/many/other')
),
(
data.PLURAL_UNKNOWN,
pgettext_lazy('Plural type', 'Unknown')
),
(
data.PLURAL_ZERO_ONE_TWO_THREE_SIX_OTHER,
pgettext_lazy('Plural type', 'Zero/one/two/three/six/other')
),
)
SOURCE_DEFAULT = 0
SOURCE_GETTEXT = 1
SOURCE_MANUAL = 2
source = models.SmallIntegerField(
default=SOURCE_DEFAULT,
verbose_name=ugettext_lazy('Plural definition source'),
choices=(
(SOURCE_DEFAULT, ugettext_lazy('Default plural')),
(SOURCE_GETTEXT, ugettext_lazy('Plural gettext formula')),
(SOURCE_MANUAL, ugettext_lazy('Manually entered formula')),
),
)
number = models.SmallIntegerField(
default=2,
verbose_name=ugettext_lazy('Number of plurals'),
)
equation = models.CharField(
max_length=400,
default='n != 1',
validators=[validate_pluraleq],
blank=False,
verbose_name=ugettext_lazy('Plural equation'),
)
type = models.IntegerField(
choices=PLURAL_CHOICES,
default=data.PLURAL_ONE_OTHER,
verbose_name=ugettext_lazy('Plural type'),
editable=False,
)
language = models.ForeignKey(Language, on_delete=models.deletion.CASCADE)
objects = PluralQuerySet.as_manager()
class Meta(object):
verbose_name = ugettext_lazy('Plural form')
verbose_name_plural = ugettext_lazy('Plural forms')
def __str__(self):
return self.get_type_display()
@cached_property
def plural_form(self):
return 'nplurals={0:d}; plural={1};'.format(
self.number, self.equation
)
@cached_property
def plural_function(self):
return gettext.c2py(
self.equation if self.equation else '0'
)
@cached_property
def examples(self):
result = defaultdict(list)
func = self.plural_function
for i in chain(range(0, 10000), range(10000, 2000001, 1000)):
ret = func(i)
if len(result[ret]) >= 10:
continue
result[ret].append(str(i))
return result
@staticmethod
def parse_formula(plurals):
matches = PLURAL_RE.match(plurals)
if matches is None:
raise ValueError('Failed to parse formula')
number = int(matches.group(1))
formula = matches.group(2)
if not formula:
formula = '0'
# Try to parse the formula
gettext.c2py(formula)
return number, formula
def same_plural(self, number, equation):
"""Compare whether given plurals formula matches"""
if number != self.number or not equation:
return False
# Convert formulas to functions
ours = self.plural_function
theirs = gettext.c2py(equation)
# Compare equation results
# It would be better to compare formulas,
# but this was easier to implement and the performance
# is still okay.
for i in range(-10, 200):
if ours(i) != theirs(i):
return False
return True
def get_plural_label(self, idx):
"""Return label for plural form."""
return PLURAL_TITLE.format(
name=self.get_plural_name(idx),
# Translators: Label for plurals with example counts
examples=_('For example: {0}').format(
', '.join(self.examples.get(idx, []))
)
)
def get_plural_name(self, idx):
"""Return name for plural form."""
try:
return force_text(data.PLURAL_NAMES[self.type][idx])
except (IndexError, KeyError):
if idx == 0:
return _('Singular')
if idx == 1:
return _('Plural')
return _('Plural form %d') % idx
def list_plurals(self):
for i in range(self.number):
yield {
'index': i,
'name': self.get_plural_name(i),
'examples': ', '.join(self.examples.get(i, []))
}
def save(self, *args, **kwargs):
self.type = get_plural_type(self.language.base_code, self.equation)
# Try to calculate based on equation
if self.type == data.PLURAL_UNKNOWN:
for equations, plural in data.PLURAL_MAPPINGS:
for equation in equations:
if self.same_plural(self.number, equation):
self.type = plural
break
if self.type != data.PLURAL_UNKNOWN:
break
super(Plural, self).save(*args, **kwargs)
def get_absolute_url(self):
return '{}#information'.format(
reverse('show_language', kwargs={'lang': self.language.code})
)
class WeblateLanguagesConf(AppConf):
"""Languages settings."""
# Use simple language codes for default language/country combinations
SIMPLIFY_LANGUAGES = True
class Meta(object):
prefix = ''
| dontnod/weblate | weblate/lang/models.py | Python | gpl-3.0 | 20,870 |
# -*- coding: utf-8 -*-
"""
HipparchiaServer: an interface to a database of Greek and Latin texts
Copyright: E Gunderson 2016-21
License: GNU GENERAL PUBLIC LICENSE 3
(see LICENSE in the top level directory of the distribution)
"""
import re
from collections import defaultdict
try:
from rich.progress import track
except ImportError:
track = None
from server.formatting.miscformatting import timedecorator
from server.formatting.wordformatting import stripaccents
"""
simple loaders called when HipparchiaServer launches
these lists will contain (more or less...) globally available values
the main point is to avoid constant calls to the DB
for commonly used info
"""
def buildaugenresdict(authordict: dict) -> dict:
"""
build lists of author genres: [ g1, g2, ...]
do this by corpus and tag it accordingly
:param authordict:
:return:
"""
gklist = list()
ltlist = list()
inlist = list()
dplist = list()
chlist = list()
genresdict = {'gr': gklist, 'lt': ltlist, 'in': inlist, 'dp': dplist, 'ch': chlist}
for a in authordict:
if authordict[a].genres and authordict[a].genres != '':
g = authordict[a].genres.split(',')
l = authordict[a].universalid[0:2]
genresdict[l] += g
for l in ['gr', 'lt', 'in', 'dp', 'ch']:
genresdict[l] = list(set(genresdict[l]))
genresdict[l] = [re.sub(r'^\s|\s$', '', x) for x in genresdict[l]]
genresdict[l].sort()
return genresdict
def buildworkgenresdict(workdict: dict) -> dict:
"""
load up the list of work genres: [ g1, g2, ...]
this will see heavy use throughout the world of 'views.py'
:param workdict:
:return:
"""
gklist = list()
ltlist = list()
inlist = list()
dplist = list()
chlist = list()
genresdict = {'gr': gklist, 'lt': ltlist, 'in': inlist, 'dp': dplist, 'ch': chlist}
for w in workdict:
if workdict[w].workgenre and workdict[w].workgenre != '':
g = workdict[w].workgenre.split(',')
lg = workdict[w].universalid[0:2]
genresdict[lg] += g
for lg in ['gr', 'lt', 'in', 'dp', 'ch']:
genresdict[lg] = list(set(genresdict[lg]))
genresdict[lg] = [re.sub(r'^\s|\s$', '', x) for x in genresdict[lg]]
genresdict[lg].sort()
return genresdict
def buildauthorlocationdict(authordict: dict) -> dict:
"""
build lists of author locations: [ g1, g2, ...]
do this by corpus and tag it accordingly
:param authordict:
:return:
"""
gklist = list()
ltlist = list()
inlist = list()
dplist = list()
chlist = list()
locationdict = {'gr': gklist, 'lt': ltlist, 'in': inlist, 'dp': dplist, 'ch': chlist}
for a in authordict:
if authordict[a].location and authordict[a].location != '':
# think about what happens if the location looks like 'Italy, Africa and the West [Chr.]'...
loc = authordict[a].location.split(',')
lg = authordict[a].universalid[0:2]
locationdict[lg] += loc
for lg in ['gr', 'lt', 'in', 'dp', 'ch']:
locationdict[lg] = list(set(locationdict[lg]))
locationdict[lg] = [re.sub(r'\[.*?\]', '', x) for x in locationdict[lg]]
locationdict[lg] = [re.sub(r'^\s|\s$', '', x) for x in locationdict[lg]]
locationdict[lg].sort()
return locationdict
def buildworkprovenancedict(workdict: dict) -> dict:
"""
load up the list of work provenances
used in offerprovenancehints()
:param workdict:
:return:
"""
gklist = list()
ltlist = list()
inlist = list()
dplist = list()
chlist = list()
locationdict = {'gr': gklist, 'lt': ltlist, 'in': inlist, 'dp': dplist, 'ch': chlist }
for w in workdict:
if workdict[w].provenance and workdict[w].provenance != '':
loc = workdict[w].provenance.split(',')
lg = workdict[w].universalid[0:2]
locationdict[lg] += loc
for lg in ['gr', 'lt', 'in', 'dp', 'ch']:
locationdict[lg] = list(set(locationdict[lg]))
locationdict[lg] = [re.sub(r'^\s|\s$', '', x) for x in locationdict[lg]]
locationdict[lg].sort()
return locationdict
@timedecorator
def buildkeyedlemmata(listofentries: list) -> defaultdict:
"""
a list of 140k words is too long to send to 'getlemmahint' without offering quicker access
a dict with keys...
:param listofentries:
:return:
"""
invals = u'jvσς'
outvals = u'iuϲϲ'
keyedlemmata = defaultdict(list)
if track:
iterable = track(listofentries, description='building keyedlemmata', transient=True)
else:
print('building keyedlemmata', end=str())
iterable = listofentries
for e in iterable:
try:
# might IndexError here...
bag = e[0:2]
key = stripaccents(bag.translate(str.maketrans(invals, outvals)))
try:
keyedlemmata[key].append(e)
except KeyError:
keyedlemmata[key] = [e]
except IndexError:
pass
if track:
print('building keyedlemmata', end=str())
return keyedlemmata
| e-gun/HipparchiaServer | server/listsandsession/sessiondicts.py | Python | gpl-3.0 | 4,686 |
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import copy,re,os
from waflib import Task,Utils,Logs,Errors,ConfigSet,Node
feats=Utils.defaultdict(set)
class task_gen(object):
mappings={}
prec=Utils.defaultdict(list)
def __init__(self,*k,**kw):
self.source=''
self.target=''
self.meths=[]
self.prec=Utils.defaultdict(list)
self.mappings={}
self.features=[]
self.tasks=[]
if not'bld'in kw:
self.env=ConfigSet.ConfigSet()
self.idx=0
self.path=None
else:
self.bld=kw['bld']
self.env=self.bld.env.derive()
self.path=self.bld.path
try:
self.idx=self.bld.idx[id(self.path)]=self.bld.idx.get(id(self.path),0)+1
except AttributeError:
self.bld.idx={}
self.idx=self.bld.idx[id(self.path)]=1
for key,val in kw.items():
setattr(self,key,val)
def __str__(self):
return"<task_gen %r declared in %s>"%(self.name,self.path.abspath())
def __repr__(self):
lst=[]
for x in self.__dict__.keys():
if x not in['env','bld','compiled_tasks','tasks']:
lst.append("%s=%s"%(x,repr(getattr(self,x))))
return"bld(%s) in %s"%(", ".join(lst),self.path.abspath())
def get_name(self):
try:
return self._name
except AttributeError:
if isinstance(self.target,list):
lst=[str(x)for x in self.target]
name=self._name=','.join(lst)
else:
name=self._name=str(self.target)
return name
def set_name(self,name):
self._name=name
name=property(get_name,set_name)
def to_list(self,val):
if isinstance(val,str):return val.split()
else:return val
def post(self):
if getattr(self,'posted',None):
return False
self.posted=True
keys=set(self.meths)
self.features=Utils.to_list(self.features)
for x in self.features+['*']:
st=feats[x]
if not st:
if not x in Task.classes:
Logs.warn('feature %r does not exist - bind at least one method to it'%x)
keys.update(list(st))
prec={}
prec_tbl=self.prec or task_gen.prec
for x in prec_tbl:
if x in keys:
prec[x]=prec_tbl[x]
tmp=[]
for a in keys:
for x in prec.values():
if a in x:break
else:
tmp.append(a)
tmp.sort()
out=[]
while tmp:
e=tmp.pop()
if e in keys:out.append(e)
try:
nlst=prec[e]
except KeyError:
pass
else:
del prec[e]
for x in nlst:
for y in prec:
if x in prec[y]:
break
else:
tmp.append(x)
if prec:
raise Errors.WafError('Cycle detected in the method execution %r'%prec)
out.reverse()
self.meths=out
Logs.debug('task_gen: posting %s %d'%(self,id(self)))
for x in out:
try:
v=getattr(self,x)
except AttributeError:
raise Errors.WafError('%r is not a valid task generator method'%x)
Logs.debug('task_gen: -> %s (%d)'%(x,id(self)))
v()
Logs.debug('task_gen: posted %s'%self.name)
return True
def get_hook(self,node):
name=node.name
for k in self.mappings:
if name.endswith(k):
return self.mappings[k]
for k in task_gen.mappings:
if name.endswith(k):
return task_gen.mappings[k]
raise Errors.WafError("File %r has no mapping in %r (did you forget to load a waf tool?)"%(node,task_gen.mappings.keys()))
def create_task(self,name,src=None,tgt=None):
task=Task.classes[name](env=self.env.derive(),generator=self)
if src:
task.set_inputs(src)
if tgt:
task.set_outputs(tgt)
self.tasks.append(task)
return task
def clone(self,env):
newobj=self.bld()
for x in self.__dict__:
if x in['env','bld']:
continue
elif x in['path','features']:
setattr(newobj,x,getattr(self,x))
else:
setattr(newobj,x,copy.copy(getattr(self,x)))
newobj.posted=False
if isinstance(env,str):
newobj.env=self.bld.all_envs[env].derive()
else:
newobj.env=env.derive()
return newobj
def declare_chain(name='',rule=None,reentrant=None,color='BLUE',ext_in=[],ext_out=[],before=[],after=[],decider=None,scan=None,install_path=None,shell=False):
ext_in=Utils.to_list(ext_in)
ext_out=Utils.to_list(ext_out)
if not name:
name=rule
cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell)
def x_file(self,node):
ext=decider and decider(self,node)or cls.ext_out
if ext_in:
_ext_in=ext_in[0]
tsk=self.create_task(name,node)
cnt=0
keys=list(self.mappings.keys())+list(self.__class__.mappings.keys())
for x in ext:
k=node.change_ext(x,ext_in=_ext_in)
tsk.outputs.append(k)
if reentrant!=None:
if cnt<int(reentrant):
self.source.append(k)
else:
for y in keys:
if k.name.endswith(y):
self.source.append(k)
break
cnt+=1
if install_path:
self.bld.install_files(install_path,tsk.outputs)
return tsk
for x in cls.ext_in:
task_gen.mappings[x]=x_file
return x_file
def taskgen_method(func):
setattr(task_gen,func.__name__,func)
return func
def feature(*k):
def deco(func):
setattr(task_gen,func.__name__,func)
for name in k:
feats[name].update([func.__name__])
return func
return deco
def before_method(*k):
def deco(func):
setattr(task_gen,func.__name__,func)
for fun_name in k:
if not func.__name__ in task_gen.prec[fun_name]:
task_gen.prec[fun_name].append(func.__name__)
return func
return deco
before=before_method
def after_method(*k):
def deco(func):
setattr(task_gen,func.__name__,func)
for fun_name in k:
if not fun_name in task_gen.prec[func.__name__]:
task_gen.prec[func.__name__].append(fun_name)
return func
return deco
after=after_method
def extension(*k):
def deco(func):
setattr(task_gen,func.__name__,func)
for x in k:
task_gen.mappings[x]=func
return func
return deco
@taskgen_method
def to_nodes(self,lst,path=None):
tmp=[]
path=path or self.path
find=path.find_resource
if isinstance(lst,self.path.__class__):
lst=[lst]
for x in Utils.to_list(lst):
if isinstance(x,str):
node=find(x)
else:
node=x
if not node:
raise Errors.WafError("source not found: %r in %r"%(x,self))
tmp.append(node)
return tmp
@feature('*')
def process_source(self):
self.source=self.to_nodes(getattr(self,'source',[]))
for node in self.source:
self.get_hook(node)(self,node)
@feature('*')
@before_method('process_source')
def process_rule(self):
if not getattr(self,'rule',None):
return
name=str(getattr(self,'name',None)or self.target or self.rule)
try:
cache=self.bld.cache_rule_attr
except AttributeError:
cache=self.bld.cache_rule_attr={}
cls=None
if getattr(self,'cache_rule','True'):
try:
cls=cache[(name,self.rule)]
except KeyError:
pass
if not cls:
cls=Task.task_factory(name,self.rule,getattr(self,'vars',[]),shell=getattr(self,'shell',True),color=getattr(self,'color','BLUE'),scan=getattr(self,'scan',None))
if getattr(self,'scan',None):
cls.scan=self.scan
elif getattr(self,'deps',None):
def scan(self):
nodes=[]
for x in self.generator.to_list(getattr(self.generator,'deps',None)):
node=self.generator.path.find_resource(x)
if not node:
self.generator.bld.fatal('Could not find %r (was it declared?)'%x)
nodes.append(node)
return[nodes,[]]
cls.scan=scan
if getattr(self,'update_outputs',None):
Task.update_outputs(cls)
if getattr(self,'always',None):
Task.always_run(cls)
for x in['after','before','ext_in','ext_out']:
setattr(cls,x,getattr(self,x,[]))
if getattr(self,'cache_rule','True'):
cache[(name,self.rule)]=cls
tsk=self.create_task(name)
if getattr(self,'target',None):
if isinstance(self.target,str):
self.target=self.target.split()
if not isinstance(self.target,list):
self.target=[self.target]
for x in self.target:
if isinstance(x,str):
tsk.outputs.append(self.path.find_or_declare(x))
else:
x.parent.mkdir()
tsk.outputs.append(x)
if getattr(self,'install_path',None):
self.bld.install_files(self.install_path,tsk.outputs)
if getattr(self,'source',None):
tsk.inputs=self.to_nodes(self.source)
self.source=[]
if getattr(self,'cwd',None):
tsk.cwd=self.cwd
@feature('seq')
def sequence_order(self):
if self.meths and self.meths[-1]!='sequence_order':
self.meths.append('sequence_order')
return
if getattr(self,'seq_start',None):
return
if getattr(self.bld,'prev',None):
self.bld.prev.post()
for x in self.bld.prev.tasks:
for y in self.tasks:
y.set_run_after(x)
self.bld.prev=self
re_m4=re.compile('@(\w+)@',re.M)
class subst_pc(Task.Task):
def run(self):
if getattr(self.generator,'is_copy',None):
self.outputs[0].write(self.inputs[0].read('rb'),'wb')
if getattr(self.generator,'chmod',None):
os.chmod(self.outputs[0].abspath(),self.generator.chmod)
return
code=self.inputs[0].read(encoding=getattr(self.generator,'encoding','ISO8859-1'))
code=code.replace('%','%%')
lst=[]
def repl(match):
g=match.group
if g(1):
lst.append(g(1))
return"%%(%s)s"%g(1)
return''
code=re_m4.sub(repl,code)
try:
d=self.generator.dct
except AttributeError:
d={}
for x in lst:
tmp=getattr(self.generator,x,'')or self.env.get_flat(x)or self.env.get_flat(x.upper())
d[x]=str(tmp)
code=code%d
self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','ISO8859-1'))
self.generator.bld.raw_deps[self.uid()]=self.dep_vars=lst
try:delattr(self,'cache_sig')
except AttributeError:pass
if getattr(self.generator,'chmod',None):
os.chmod(self.outputs[0].abspath(),self.generator.chmod)
def sig_vars(self):
bld=self.generator.bld
env=self.env
upd=self.m.update
vars=self.generator.bld.raw_deps.get(self.uid(),[])
act_sig=bld.hash_env_vars(env,vars)
upd(act_sig)
lst=[getattr(self.generator,x,'')for x in vars]
upd(Utils.h_list(lst))
return self.m.digest()
@extension('.pc.in')
def add_pcfile(self,node):
tsk=self.create_task('subst_pc',node,node.change_ext('.pc','.pc.in'))
self.bld.install_files(getattr(self,'install_path','${LIBDIR}/pkgconfig/'),tsk.outputs)
class subst(subst_pc):
pass
@feature('subst')
@before_method('process_source','process_rule')
def process_subst(self):
src=Utils.to_list(getattr(self,'source',[]))
if isinstance(src,Node.Node):
src=[src]
tgt=Utils.to_list(getattr(self,'target',[]))
if isinstance(tgt,Node.Node):
tgt=[tgt]
if len(src)!=len(tgt):
raise Errors.WafError('invalid number of source/target for %r'%self)
for x,y in zip(src,tgt):
if not x or not y:
raise Errors.WafError('null source or target for %r'%self)
a,b=None,None
if isinstance(x,str)and isinstance(y,str)and x==y:
a=self.path.find_node(x)
b=self.path.get_bld().make_node(y)
if not os.path.isfile(b.abspath()):
b.sig=None
b.parent.mkdir()
else:
if isinstance(x,str):
a=self.path.find_resource(x)
elif isinstance(x,Node.Node):
a=x
if isinstance(y,str):
b=self.path.find_or_declare(y)
elif isinstance(y,Node.Node):
b=y
if not a:
raise Errors.WafError('cound not find %r for %r'%(x,self))
has_constraints=False
tsk=self.create_task('subst',a,b)
for k in('after','before','ext_in','ext_out'):
val=getattr(self,k,None)
if val:
has_constraints=True
setattr(tsk,k,val)
if not has_constraints and b.name.endswith('.h'):
tsk.before=[k for k in('c','cxx')if k in Task.classes]
inst_to=getattr(self,'install_path',None)
if inst_to:
self.bld.install_files(inst_to,b,chmod=getattr(self,'chmod',Utils.O644))
self.source=[]
| arnov-sinha/sFFT-OpenACC-Library | tools/.waf-1.7.5-47d5afdb5e7e2856f7f46a7101914026/waflib/TaskGen.py | Python | gpl-3.0 | 11,393 |
from umlfri2.ufl.components.base.componenttype import ComponentType
from umlfri2.types.image import Image
from .componentloader import ComponentLoader
from ....constants import ADDON_NAMESPACE, ADDON_SCHEMA
from .structureloader import UflStructureLoader
from umlfri2.ufl.components.valueproviders import ConstantValueProvider, DynamicValueProvider
from umlfri2.ufl.components.text import TextContainerComponent
from umlfri2.metamodel import DiagramType
class DiagramTypeLoader:
def __init__(self, storage, xmlroot, file_name, elements, connections):
self.__storage = storage
self.__xmlroot = xmlroot
self.__file_name = file_name
if not ADDON_SCHEMA.validate(xmlroot):
raise Exception("Cannot load diagram type: {0}".format(ADDON_SCHEMA.error_log.last_error))
self.__elements = elements
self.__connections = connections
def load(self):
id = self.__xmlroot.attrib["id"]
icon = None
ufl_type = None
display_name = None
background = None
connections = []
elements = []
for child in self.__xmlroot:
if child.tag == "{{{0}}}Icon".format(ADDON_NAMESPACE):
icon_path = child.attrib["path"]
if not self.__storage.exists(icon_path):
raise Exception("Unknown icon {0}".format(icon_path))
icon = Image(self.__storage, icon_path)
elif child.tag == "{{{0}}}Structure".format(ADDON_NAMESPACE):
ufl_type = UflStructureLoader(child, self.__file_name).load()
elif child.tag == "{{{0}}}DisplayName".format(ADDON_NAMESPACE):
display_name = TextContainerComponent(
ComponentLoader(child, ComponentType.text, self.__file_name).load()
)
elif child.tag == "{{{0}}}Connections".format(ADDON_NAMESPACE):
for childchild in child:
connections.append(childchild.attrib["id"])
elif child.tag == "{{{0}}}Elements".format(ADDON_NAMESPACE):
for childchild in child:
elements.append(childchild.attrib["id"])
elif child.tag == "{{{0}}}Appearance".format(ADDON_NAMESPACE):
for childchild in child:
if childchild.tag == "{{{0}}}Background".format(ADDON_NAMESPACE):
attrvalue = childchild.attrib["color"]
if attrvalue.startswith("##"):
background = ConstantValueProvider(attrvalue[1:])
elif attrvalue.startswith("#"):
background = DynamicValueProvider(attrvalue[1:])
else:
background = ConstantValueProvider(attrvalue)
else:
raise Exception
elements = tuple(self.__elements[id] for id in elements)
connections = tuple(self.__connections[id] for id in connections)
return DiagramType(id, icon, ufl_type, display_name, elements, connections, background)
| umlfri/umlfri2 | umlfri2/datalayer/loaders/addon/metamodel/diagramtypeloader.py | Python | gpl-3.0 | 3,129 |
"""
sample AWS SQS enqueue an item
"""
import boto3 # AWS API python module
import json
# open a connection to the SQS service
# (using ~/.aws/credentials for access info)
sqs = boto3.resource('sqs')
# print queues in existence
#for queue in sqs.queues.all():
# print(queue.url)
# create (or return existing) queue of this name
try:
queue = sqs.get_queue_by_name(QueueName='sample_queue')
except:
try:
queue = sqs.create_queue(QueueName='sample_queue', Attributes={'DelaySeconds': '5'})
except:
# print exception in case sqs could not create the queue
import sys, traceback, uuid, datetime
# API is "exc_traceback = sys.exc_info(out exc_type, out exc_value)"
# next stmt: out parameters come before equal return variable
exc_type, exc_value, exc_traceback = sys.exc_info()
e = traceback.format_exception(exc_type, exc_value, exc_traceback)
e = ''.join(e)
uniqueErrorId = uuid.uuid4()
data = {'status': 'E', 'uuid': uniqueErrorId, 'exception': str(e), 'now': datetime.datetime.utcnow()}
print("Exception caught during SQS.create_queue: '{0}'".format(data))
sys.exit(0) # forces script exit without a traceback printed
# print queue url for diagnostics
print(queue.url)
# on a real program, one would not purge the queue, but here is how to do it
try:
queue.purge()
except:
# only purge once every 60 seconds allowed in SQS...
pass # <<== this is a noop stmt in python; this will "eat" the exception and continue execution
# format the message to be sent
msgBody = """
{
"verb": "SCAN",
"TCPIP": "97.80.230.155"
}
"""
print ('orignal msg: "{0}"'.format(msgBody))
body = json.loads(msgBody)
print('msg body after json.loads: "{0}"'.format(json.dumps(body)))
# send the message
string_msg = json.dumps(body)
string_msgAttributes = {
'Author': {
'StringValue': __file__, # <== "sample_post_sqs.py"
'DataType': 'String'
}
}
try:
queue.send_message(MessageBody=string_msg,
MessageAttributes=string_msgAttributes)
except:
import sys, traceback, uuid, datetime
# API is "exc_traceback = sys.exc_info(out exc_type, out exc_value)"
# next stmt: out parameters come before equal return variable
exc_type, exc_value, exc_traceback = sys.exc_info()
e = traceback.format_exception(exc_type, exc_value, exc_traceback)
e = ''.join(e)
uniqueErrorId = uuid.uuid4()
data = {'status': 'E', 'uuid': uniqueErrorId, 'exception': str(e), 'now': datetime.datetime.utcnow()}
print("Exception caught during SQS.create_queue: '{0}'".format(data))
sys.exit(0) # forces script exit without a traceback printed
| opedroso/aws-python-examples | sqs/sample_post_sqs.py | Python | gpl-3.0 | 2,736 |
from Products.CMFCore.utils import getToolByName
from cStringIO import StringIO
from Products.OpenPlans.Extensions.Install import installZ3Types
from Products.OpenPlans.Extensions.setup import migrate_listen_member_lookup
from Products.OpenPlans.Extensions.setup import reinstallSubskins
def migrate_listen(self):
out = StringIO()
# set listen to the active property configuration
portal_setup_tool = getToolByName(self, 'portal_setup')
portal_setup_tool.setImportContext('profile-listen:default')
out.write('updated generic setup tool properties\n')
# run the listen import step
portal_setup_tool.runImportStep('listen-various', True)
out.write('Listen import step run\n')
# remove the open mailing list fti
portal_types = getToolByName(self, 'portal_types')
portal_types.manage_delObjects(['Open Mailing List'])
out.write('open mailing list fti removed\n')
# now run the install z3types to add it back
installZ3Types(self, out)
out.write('z3 types installed (Open Mailing List fti)\n')
# migrate the listen local utility
# self, self?
migrate_listen_member_lookup(self, self)
out.write('listen member lookup utility migrated\n')
# remove openplans skins
portal_skins = getToolByName(self, 'portal_skins')
skins_to_remove = ['openplans', 'openplans_images', 'openplans_patches', 'openplans_richwidget', 'openplans_styles']
portal_skins.manage_delObjects(skins_to_remove)
out.write('removed openplans skins: %s\n' % ', '.join(skins_to_remove))
# reinstall openplans skins
portal_quickinstaller = getToolByName(self, 'portal_quickinstaller')
portal_quickinstaller.reinstallProducts(['opencore.siteui'])
out.write('reinstall opencore.siteui - openplans skins\n')
# reinstall subskins
reinstallSubskins(self, self)
out.write('subskins reinstalled\n')
# run listen migration?
return out.getvalue()
| socialplanning/opencore | Products/OpenPlans/Extensions/migrate_listen.py | Python | gpl-3.0 | 1,938 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-13 09:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('db', '0077_auto_20161113_2049'),
]
operations = [
migrations.AlterModelOptions(
name='learningresource',
options={'verbose_name': 'lr', 'verbose_name_plural': 'lr'},
),
]
| caw/curriculum | db/migrations/0078_auto_20161113_2057.py | Python | gpl-3.0 | 444 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-21 05:36
from __future__ import unicode_literals
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='QuizInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quiz_title', models.CharField(max_length=100)),
('quizDate', models.DateField(default=datetime.date.today)),
('quizTime', models.TimeField(default=datetime.time)),
],
),
migrations.CreateModel(
name='QuizQuestions',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.CharField(max_length=200)),
('isMultipleChoice', models.CharField(max_length=1)),
('choice_1', models.CharField(blank=True, max_length=10, null=True)),
('choice_2', models.CharField(blank=True, max_length=10, null=True)),
('choice_3', models.CharField(blank=True, max_length=10, null=True)),
('choice_4', models.CharField(blank=True, max_length=10, null=True)),
('answer', models.CharField(blank=True, max_length=200, null=True)),
('quiz', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='quiz.QuizInfo')),
],
),
migrations.CreateModel(
name='StudentProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('school', models.CharField(blank=True, max_length=200, null=True)),
('standard', models.PositiveSmallIntegerField(blank=True, null=True)),
('doj', models.DateField(blank=True, default=datetime.date.today, null=True)),
('student', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, to_field='username', unique=True)),
],
),
migrations.CreateModel(
name='StudentQuizAttempts',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('attempt_date', models.DateField(default=datetime.date.today)),
('score', models.PositiveSmallIntegerField(default=0)),
('quiz', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='quiz.QuizInfo')),
('student', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='quiz.StudentProfile', to_field='student')),
],
),
migrations.CreateModel(
name='StudentResponses',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('response', models.CharField(blank=True, max_length=200, null=True)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='quiz.QuizQuestions')),
('quiz', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='quiz.QuizInfo')),
('student', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='quiz.StudentProfile', to_field='student')),
],
),
migrations.CreateModel(
name='TeacherProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('doj', models.DateField(blank=True, default=datetime.date.today, null=True)),
('teacher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, to_field='username')),
],
),
migrations.CreateModel(
name='TeacherS',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('student', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='quiz.StudentProfile')),
('teacher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='quiz.TeacherProfile')),
],
),
migrations.AlterUniqueTogether(
name='teachers',
unique_together=set([('teacher', 'student')]),
),
migrations.AlterUniqueTogether(
name='studentresponses',
unique_together=set([('student', 'quiz', 'question')]),
),
migrations.AlterUniqueTogether(
name='studentquizattempts',
unique_together=set([('student', 'quiz')]),
),
]
| rubyAce71697/QMS | quiz_system/quiz/migrations/0001_initial.py | Python | gpl-3.0 | 5,170 |
#!/usr/bin/env python
"""
Copyright (C) 2015 Ivan Gregor
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
The master script of the Snowball gene assembler. Module version 1.2
"""
import os
import sys
import platform
import tempfile
import shutil
import argparse
import multiprocessing as mp
from algbioi.com import fq
from algbioi.com import parallel
from algbioi.hsim import comh
from algbioi.haplo import hmain
from algbioi.haplo import hio
def mainSnowball(fq1Path, fq2Path, profileHmmFile, insertSize, readLen=None, outFile=None, outFormat='fna',
workingDir=None, hmmsearchPath=None, pfamMinScore=None, pOverlap=None, overlapLen=None,
outAnnot=False, cleanUp=False, processors=mp.cpu_count()):
"""
Main function, the interface of the Snowball gene assembler.
@param fq1Path: FASTQ 1 file path (containing first ends of Illumina paired-end reads)
@param fq2Path: FASTQ 2 file path (containing second ends)
@param profileHmmFile: profile HMMs file, containing models generated by the HMMER 3 software
@param insertSize: mean insert size used for the library preparation (i.e. read generation)
@param readLen: read length (if None, it will be derived from the FASTQ file)
@param outFile: output file (if None, it will be derived from the FASTQ 1 file)
@param outFormat: output file format 'fna' or 'fq'
@param workingDir: temporary files will be stored here (if None, a temporary directory will be created/deleted)
@param hmmsearchPath: path to the HMMER hmmsearch command (if None, it will take version that is in the PATH)
@param pfamMinScore: minimum score for the hmmsearch (if None, use default)
@param pOverlap: minimum overlap probability for the Snowball algorithm
@param overlapLen: minimum overlap length for the Snowball algorithm
@param outAnnot: if true, additional annotation will be stored along with the resulting contigs
@param cleanUp: if true, delete temporary files at the end
@param processors: Number of processors (default: use all processors available)
@type fq1Path: str
@type fq2Path: str
@type profileHmmFile: str
@type insertSize: int
@type readLen: str
@type outFile: str
@type outFormat: str
@type workingDir: str
@type hmmsearchPath: str
@type pfamMinScore: int
@type pOverlap: float
@type overlapLen: float
@type outAnnot: bool
@type cleanUp: bool
@type processors: int
"""
assert os.name == 'posix', 'Snowball runs only on "posix" systems, your system is: %s' % os.name
# checking input parameters
assert os.path.isfile(fq1Path), 'File does not exist: %s' % fq1Path
assert os.path.isfile(fq2Path), 'File does not exist: %s' % fq2Path
# derive the read length
if readLen is None:
for name, dna, p, qs in fq.ReadFqGen(fq1Path):
readLen = len(dna)
assert readLen == len(qs), 'File corrupted %s' % fq1Path
break
assert readLen is not None, 'Cannot derive read length from %s' % fq1Path
assert readLen <= insertSize < 2 * readLen, 'Invalid read length (%s) and insert size (%s) combination' \
% (readLen, insertSize)
assert os.path.isfile(profileHmmFile), 'File does not exist: %s' % profileHmmFile
outFormat = outFormat.strip()
assert outFormat == 'fna' or outFormat == 'fq', 'Invalid output format: %s' % outFormat
# checking the output file
if outFile is None:
c = 0
while True:
outFile = fq1Path + '_%s.%s.gz' % (c, outFormat)
if not os.path.isfile(outFile):
break
c += 1
else:
outFileDir = os.path.dirname(outFile)
assert os.path.basename(outFile) != '', 'Output file name is empty'
assert outFileDir == '' or os.path.isdir(outFileDir), 'Invalid output directory: %s' % outFileDir
outFile = outFile.strip()
if not outFile.endswith('.gz'):
outFile += '.gz'
print('The name of the output file was modified to:\n\t%s' % outFile)
# Looking for the hmmsearch binaries
if hmmsearchPath is None:
hmmsearchPath = os.popen("which hmmsearch").read().strip()
if hmmsearchPath != '':
print('This hmmsearch binary will be used:\n\t%s' % hmmsearchPath)
assert os.path.isfile(hmmsearchPath), 'Path for (hmmsearch) is invalid: %s' % hmmsearchPath
# creates a temporary working directory
if workingDir is None:
workingDir = tempfile.mkdtemp(prefix='snowball_')
assert os.path.isdir(workingDir), 'Cannot create temporary working directory (%s)' % workingDir
cleenUpTmpWorkingDir = True
print('Using temporary directory:\n\t%s' % workingDir)
else:
cleenUpTmpWorkingDir = False
assert os.path.isdir(workingDir), 'Working directory does not exist:\n\t%s' % workingDir
assert not os.listdir(workingDir), 'Working directory must be empty:\n\t%s' % workingDir
# set the number of processor cores to be used
comh.MAX_PROC = max(1, min(processors, mp.cpu_count()))
# set assembly parameters or use defaults
if pfamMinScore is not None:
comh.SAMPLES_PFAM_EVAN_MIN_SCORE = pfamMinScore
if pOverlap is not None:
comh.ASSEMBLY_POVERLAP = (pOverlap,)
if overlapLen is not None:
comh.ASSEMBLY_OVERLAP_LEN = (overlapLen,)
# creates a temporary directory for the sample strains
strainsDir = os.path.join(workingDir, 'strains')
if not os.path.isdir(strainsDir):
os.mkdir(strainsDir)
assert os.path.isdir(strainsDir), 'Cannot create temporary directory:\n\t%s' % strainsDir
os.symlink(fq1Path, os.path.join(strainsDir, '0_pair1.fq.gz'))
os.symlink(fq2Path, os.path.join(strainsDir, '0_pair2.fq.gz'))
# Start of the algorithm
print('Running on: %s (%s)' % (' '.join(platform.dist()), sys.platform))
print('Using %s processors' % comh.MAX_PROC)
print('Settings:\n\tRead length: %s\n\tInsert size: %s\n\tMin. overlap probability: %s\n\tMin. overlap length: %s'
'\n\tMin. HMM score: %s'
% (readLen, insertSize, comh.ASSEMBLY_POVERLAP[0], comh.ASSEMBLY_OVERLAP_LEN[0],
comh.SAMPLES_PFAM_EVAN_MIN_SCORE))
# file with joined consensus reads
fqJoinPath = os.path.join(strainsDir, '0_join.fq.gz')
# join paired-end reads
if True: # to skip this step, set to False (e.g. resume processing after OS/HW failure)
print('Joining paired-end reads into consensus reads, loading reads from:\n\t%s\n\t%s' % (fq1Path, fq2Path))
r = fq.joinPairEnd([(fq1Path, fq2Path, fqJoinPath, readLen, insertSize, None, 60)],
minOverlap=comh.SAMPLES_PAIRED_END_JOIN_MIN_OVERLAP,
minOverlapIdentity=comh.SAMPLES_PAIRED_END_JOIN_MIN_OVERLAP_IDENTITY,
maxCpu=comh.MAX_PROC)
print("Filtered out: %s %% reads" % r)
# Translate consensus reads into protein sequences, run hmmsearch
if True: # to skip this step, set to False (e.g. resume processing after OS/HW failure)
print("Translating reads to protein sequences")
# file with protein consensus read sequences
joinFastaProtGzip = os.path.join(strainsDir, '0_join_prot.fna.gz')
fq.readsToProt(fqJoinPath, joinFastaProtGzip, comh.TRANSLATION_TABLE)
print("Running HMMER (hmmsearch)")
domOut = os.path.join(strainsDir, '0_join_prot.domtblout')
joinFastaProt = joinFastaProtGzip[:-3]
cmd = 'zcat %s > %s;%s -o /dev/null --noali --domtblout %s -E 0.01 ' \
'--cpu %s %s %s;rm %s;gzip -f %s' % (joinFastaProtGzip, joinFastaProt, hmmsearchPath, domOut, comh.MAX_PROC,
profileHmmFile, joinFastaProt, joinFastaProt, domOut)
assert parallel.reportFailedCmd(parallel.runCmdSerial([parallel.TaskCmd(cmd, strainsDir)])) is None
# Assign consensus reads to individual gene domains
if True: # to skip this step, set to False (e.g. resume processing after OS/HW failure)
print("Assigning consensus reads to gene domains")
hio.partitionReads(workingDir, comh.SAMPLES_PFAM_EVAN_MIN_SCORE, comh.SAMPLES_PFAM_EVAN_MIN_ACCURACY,
comh.SAMPLES_SHUFFLE_RAND_SEED, comh.SAMPLES_PFAM_PARTITIONED_DIR, True, False)
partitionedDir = os.path.join(workingDir, comh.SAMPLES_PFAM_PARTITIONED_DIR)
# Run Assembly
if True: # to skip this step, set to False (e.g. resume processing after OS/HW failure)
print("Running Snowball assembly")
# collect tasks for each gene domain
taskList = []
assert os.path.isdir(partitionedDir), 'Temporary directory does not exist:\n\t%s' % partitionedDir
for f in os.listdir(partitionedDir):
fPath = os.path.join(partitionedDir, f)
if f.endswith('join.fq.gz') and os.path.isfile(fPath):
base = fPath[:-6]
inFq = fPath
inDomtblout = '%s_prot.domtblout.gz' % base
inProtFna = '%s_prot.fna.gz' % base
outPath = '%s_read_rec.pkl.gz' % base
taskList.append(parallel.TaskThread(hmain.buildSuperReads,
(inFq, inDomtblout, inProtFna, outPath,
comh.ASSEMBLY_CONSIDER_PROT_COMP,
comh.ASSEMBLY_ONLY_POVERLAP,
comh.ASSEMBLY_POVERLAP,
comh.ASSEMBLY_OVERLAP_LEN,
comh.ASSEMBLY_OVERLAP_ANNOT_LEN,
comh.ASSEMBLY_STOP_OVERLAP_MISMATCH,
comh.ASSEMBLY_MAX_LOOPS,
comh.TRANSLATION_TABLE)))
# run tasks in parallel
parallel.runThreadParallel(taskList, comh.MAX_PROC, keepRetValues=False)
# Creates the output file
if True: # to skip this step, set to False (e.g. resume processing after OS/HW failure)
print('Creating output file:\n\t%s' % outFile)
counter = 0
out = fq.WriteFq(outFile)
for f in os.listdir(partitionedDir):
fPath = os.path.join(partitionedDir, f)
if f.endswith('.pkl.gz') and os.path.isfile(fPath):
domName = f[2:-23]
for rec in hio.loadReadRec(fPath):
counter += 1
contigName = 'contig_%s_%s' % (counter, domName)
dnaSeq = rec.dnaSeq
# get the quality score string
if outAnnot or outFormat == 'fq':
qs = rec.qsArray.getQSStr(dnaSeq)
else:
qs = None
# get the contig annotations
if outAnnot:
assert qs is not None
codingStart = rec.annotStart
codingLen = rec.annotLen
posCov = ','.join(map(lambda x: str(int(x)), rec.getPosCovArray()))
annotStr = 'domName:%s|codingStart:%s|codingLen:%s|qs:%s|posCov:%s' % (domName, codingStart,
codingLen, qs, posCov)
else:
annotStr = ''
# write an entry to the output file
if outFormat == 'fq':
out.writeFqEntry('@' + contigName, dnaSeq, qs, annotStr)
else:
assert outFormat == 'fna'
if outAnnot:
annotStr = '|' + annotStr
out.write('>%s%s\n%s\n' % (contigName, annotStr, dnaSeq))
# close output file
out.close()
# Clean up the working directory
if cleenUpTmpWorkingDir:
# clean up the temporary directory
print('Cleaning up temporary directory')
assert os.path.isdir(workingDir), 'Directory to be cleaned does not exist:\n%s' % workingDir
shutil.rmtree(workingDir)
elif cleanUp:
# clean up the user defined working directory
if os.path.isdir(workingDir):
print('Cleaning up working directory:\n\t%s' % workingDir)
shutil.rmtree(os.path.join(workingDir, comh.SAMPLES_PFAM_PARTITIONED_DIR))
shutil.rmtree(strainsDir)
print('Done')
def _main():
"""
Main function of the master script.
"""
# Command line parameters
parser = argparse.ArgumentParser(
description = 'Snowball gene assembler for Metagenomes (version 1.2).',
epilog='This software is distributed under the GNU General Public License version 3 (http://www.gnu.org/licenses/).')
parser.add_argument('-f', '--fq-1-file', nargs=1, type=file, required=True,
help='FASTQ 1 file path containing first read-ends of Illumina paired-end reads).',
metavar='pair1.fq.gz',
dest='fq1File')
parser.add_argument('-s', '--fq-2-file', nargs=1, type=file, required=True,
help='FASTQ 2 file path containing second read-ends of Illumina paired-end reads).',
metavar='pair2.fq.gz',
dest='fq2File')
parser.add_argument('-m', '--profile-hmm-file', nargs=1, type=file, required=True,
help='Profile HMMs file containing models of individual gene domains '
'(this file is generated by the HMMER 3.0 software).',
metavar='profile.hmm',
dest='profileHmmFile')
parser.add_argument('-i', '--insert-size', nargs=1, type=int, required=True,
help='Mean insert size used for the library preparation (i.e. read generation).',
metavar='225',
dest='insertSize')
parser.add_argument('-r', '--read-length', nargs=1, type=int, required=False,
help='Read length of the read-ends (Default: read length will be derived from the input files).',
metavar='150',
dest='readLen')
parser.add_argument('-o', '--output-file', nargs=1, type=str, required=False,
help='Output FASTA or FASTQ file containing assembled contigs '
'(Default: the file name will be derived from the input file names).',
metavar='contigs.fna.gz',
dest='outFile')
parser.add_argument('-t', '--output-format', nargs=1, type=str, required=False,
choices=['fna', 'fq'],
help='Format of the output file, supported: fna, fq (Default: fna).',
metavar='fna',
dest='outFormat')
parser.add_argument('-w', '--working-directory', nargs=1, type=str, required=False,
help='Working directory (Default: a temporary working directory will be automatically '
'created and removed).',
metavar='wd',
dest='workingDir')
parser.add_argument('-e', '--hmmsearch-path', nargs=1, type=file, required=False,
help='Path to the HMMER hmmsearch command (Default: the version in the PATH will be used).',
metavar='/usr/bin/hmmsearch',
dest='hmmsearchPath')
parser.add_argument('-n', '--hmmsearch-min-score', nargs=1, type=int, required=False,
help='Minimum score for the reads to gene domains assignments (Default: 40).',
metavar='40',
dest='pfamMinScore')
parser.add_argument('-v', '--minimum-overlap-probability', nargs=1, type=float, required=False,
help='Minimum overlap probability parameter of the Snowball algorithm (Default: 0.8).',
metavar='0.8',
dest='pOverlap')
parser.add_argument('-l', '--min-overlap-length', nargs=1, type=float, required=False,
help='Minimum overlap length parameter of the Snowball algorithm (Default: 0.5).',
metavar='0.5',
dest='overlapLen')
parser.add_argument('-a', '--output-contig-annotation', action='store_true', required=False,
help='Store additional contig annotation along with the contigs.'
' (Default: no annotation is stored).',
dest='outAnnot')
parser.add_argument('-c', '--clean-up', action='store_true', required=False,
help='Clean up the working directory. If a temporary working directory is automatically created, '
'it will always be cleaned up. (Default: no clean up is performed).',
dest='cleanUp')
parser.add_argument('-p', '--processors', nargs=1, type=int, required=False,
help='Number of processors to be used (Default: all available processors will be used).',
metavar='60',
dest='processors')
args = parser.parse_args()
# Values that must be defined
fq1File = None
fq2File = None
profileHmmFile = None
insertSize = None
# Default values
outFormat = 'fna'
pfamMinScore = 40
pOverlap = 0.8
overlapLen = 0.5
outAnnot = False
cleanUp = False
processors = mp.cpu_count()
readLen = None
outFile = None
workingDir = None
hmmsearchPath = None
# reading arguments
if args.fq1File:
fq1File = normPath(args.fq1File[0].name)
if args.fq2File:
fq2File = normPath(args.fq2File[0].name)
if args.profileHmmFile:
profileHmmFile = normPath(args.profileHmmFile[0].name)
if args.insertSize:
insertSize = int(args.insertSize[0])
if args.readLen:
readLen = int(args.readLen[0])
if args.outFile:
outFile = normPath(args.outFile[0])
if args.outFormat:
outFormat = args.outFormat[0]
if args.workingDir:
workingDir = normPath(args.workingDir[0])
if args.hmmsearchPath:
hmmsearchPath = normPath(args.hmmsearchPath[0].name)
if args.pfamMinScore:
pfamMinScore = int(args.pfamMinScore[0])
if args.pOverlap:
pOverlap = float(args.pOverlap[0])
if args.overlapLen:
overlapLen = float(args.overlapLen[0])
if args.outAnnot:
outAnnot = args.outAnnot
if args.cleanUp:
cleanUp = args.cleanUp
if args.processors:
processors = int(args.processors[0])
# Printing for debugging.
if False: # set to True to print input parameters
print('fq1File: %s' % fq1File)
print('fq2File: %s' % fq2File)
print('profileHmmFile: %s' % profileHmmFile)
print('insertSize: %s' % insertSize)
print('readLen: %s' % readLen)
print('outFile: %s' % outFile)
print('outFormat: %s' % outFormat)
print('workingDir: %s' % workingDir)
print('hmmsearchPath: %s' % hmmsearchPath)
print('pfamMinScore: %s' % pfamMinScore)
print('pOverlap: %s' % pOverlap)
print('overlapLen: %s' % overlapLen)
print('outAnnot: %s' % outAnnot)
print('cleanUp: %s' % cleanUp)
print('processors: %s' % processors)
# Run Snowball
mainSnowball(fq1File, fq2File, profileHmmFile, insertSize,
readLen=readLen, outFile=outFile, outFormat=outFormat, workingDir=workingDir,
hmmsearchPath=hmmsearchPath, pfamMinScore=pfamMinScore, pOverlap=pOverlap, overlapLen=overlapLen,
outAnnot=outAnnot, cleanUp=cleanUp, processors=processors)
def normPath(path):
if os.path.isabs(path) or path.strip().startswith('~'):
return path
else:
return os.path.abspath(path)
# TESTS ---------------------------------------------------
def _test():
fq1File = '/home/igregor/Documents/work/release_snow/input/10_1_NZ_AIGN00000000_p1.fq.gz'
fq2File = '/home/igregor/Documents/work/release_snow/input/10_1_NZ_AIGN00000000_p2.fq.gz'
profileHmmFile = '/home/igregor/Documents/work/db/pfamV27/Pfam-A_and_Amphora2.hmm'
insertSize = 225
outFile = '/home/igregor/Documents/work/release_snow/out.fna'
outFormat = 'fna' # or 'fq'
workingDir = '/home/igregor/Documents/work/release_snow/working'
hmmsearchPath = '/home/igregor/Documents/work/tools/hmmer-3.0/binaries/hmmsearch'
mainSnowball(fq1File, fq2File, profileHmmFile, insertSize, readLen=None, outFile=outFile, outFormat=outFormat,
workingDir=workingDir, hmmsearchPath=hmmsearchPath, pfamMinScore=None, outAnnot=False, cleanUp=False,
processors=mp.cpu_count())
if __name__ == "__main__":
_main()
# _test()
| algbioi/snowball | algbioi/ga/run.py | Python | gpl-3.0 | 21,873 |
# deprecated?
class HostControlWrapper(object):
def __init__(self):
self.id = None
self.idx = None
self.label = None
self.btn = None
self.tile_bg = None
self.host = None
| wackerl91/luna | resources/lib/model/hostcontrolwrapper.py | Python | gpl-3.0 | 222 |
import requests
from requests.auth import HTTPDigestAuth
import time
import json
import sys
class IMT550C():
def __init__(self):
params = self.configure()
self.uri = params["uri"]
self.ip = params["IP"]
self.user = params["username"]
self.password = params["password"]
self.sample_rate = params["sample_rate"]
self.points = [
{"name": "cooling_setpoint", "unit": "F", "data_type": "double",
"OID": "4.1.6", "range": (45.0,95.0), "access": 6,
"devtosmap": lambda x: x/10, "smaptodev": lambda x: x*10}, #thermSetbackCool
{"name": "fan_state", "unit": "Mode", "data_type": "long",
"OID": "4.1.4", "range": [0,1], "access": 4,
"devtosmap": lambda x: {0:0, 1:0, 2:1}[x], "smaptodev": lambda x: {x:x}[x]}, # thermFanState
{"name": "heating_setpoint", "unit": "F", "data_type": "double",
"OID": "4.1.5", "range": (45.0,95.0), "access": 6,
"devtosmap": lambda x: x/10, "smaptodev": lambda x: x*10}, #thermSetbackHeat
{"name": "mode", "unit": "Mode", "data_type": "long",
"OID": "4.1.1", "range": [0,1,2,3], "access": 6,
"devtosmap": lambda x: x-1, "smaptodev": lambda x: x+1}, # thermHvacMode
{"name": "override", "unit": "Mode", "data_type": "long",
"OID": "4.1.9", "range": [0,1], "access": 6,
"devtosmap": lambda x: {1:0, 3:1, 2:0}[x], "smaptodev": lambda x: {0:1, 1:3}[x]}, # hold/override
{"name": "relative_humidity", "unit": "%RH", "data_type": "double",
"OID": "4.1.14", "range": (0,95), "access": 0,
"devtosmap": lambda x: x, "smaptodev": lambda x: x}, #thermRelativeHumidity
{"name": "state", "unit": "Mode", "data_type": "long",
"OID": "4.1.2", "range": [0,1,2], "access": 4,
"devtosmap": lambda x: {1:0, 2:0, 3:1, 4:1, 5:1, 6:2, 7:2, 8:0, 9:0}[x],
"smaptodev": lambda x: {x:x}[x]}, # thermHvacState
{"name": "temperature", "unit": "F", "data_type": "double",
"OID": "4.1.13", "range": (-30.0,200.0), "access": 4,
"devtosmap": lambda x: x/10, "smaptodev": lambda x: x*10}, # thermAverageTemp
{"name": "fan_mode", "unit": "Mode", "data_type": "long",
"OID": "4.1.3", "range": [1,2,3], "access": 6,
"devtosmap": lambda x: x, "smaptodev": lambda x: x} # thermFanMode
]
def get_state(self):
data = {}
for p in self.points:
url = "http://%s/get?OID%s" % (self.ip, p["OID"])
r = requests.get(url, auth=HTTPDigestAuth(self.user, self.password))
val = r.content.split('=')[-1]
if p["data_type"] == "long":
data[p["name"]] = p["devtosmap"](long(val))
else:
data[p["name"]] = p["devtosmap"](float(val))
data["time"] = int(time.time()*1e9)
return data
def set_state(self, request):
for p in self.points:
key = p["name"]
if key in request:
payload = {"OID"+p["OID"]: int(p["smaptodev"](request[key])), "submit": "Submit"}
r = requests.get('http://'+self.ip+"/pdp/", auth=HTTPDigestAuth(self.user, self.password), params=payload)
if not r.ok:
print r.content
def configure(self):
params = None
with open("params.json") as f:
try:
params = json.loads(f.read())
except ValueError as e:
print "Invalid parameter file"
sys.exit(1)
return dict(params)
if __name__ == '__main__':
thermostat = IMT550C()
while True:
print thermostat.get_state()
print
| SoftwareDefinedBuildings/bw2-contrib | driver/imt550c/smap.py | Python | gpl-3.0 | 3,893 |
# nnmware(c)2012-2020
from __future__ import unicode_literals
from django import forms
from django.contrib.admin.widgets import AdminTimeWidget
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.utils.timezone import now
from django.utils.translation import gettext as _, get_language
from nnmware.apps.booking.models import Hotel, Room, Booking, Discount, DISCOUNT_SPECIAL
from nnmware.apps.booking.models import RequestAddHotel, DISCOUNT_NOREFUND, DISCOUNT_CREDITCARD, \
DISCOUNT_EARLY, DISCOUNT_LATER, DISCOUNT_PERIOD, DISCOUNT_PACKAGE, DISCOUNT_NORMAL, DISCOUNT_HOLIDAY, \
DISCOUNT_LAST_MINUTE
from nnmware.apps.money.models import Bill
from nnmware.core.fields import ReCaptchaField
from nnmware.core.forms import UserFromRequestForm
class LocaleNamedForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(LocaleNamedForm, self).__init__(*args, **kwargs)
if get_language() == 'ru':
name = self.instance.name
description = self.instance.description
else:
name = self.instance.name_en
description = self.instance.description_en
self.fields['name'] = forms.CharField(widget=forms.TextInput(attrs={'size': '25'}), initial=name)
self.fields['description'] = forms.CharField(required=False, widget=forms.Textarea(attrs={'class': 'wide',
'rows': '5'}),
initial=description)
def save(self, commit=True):
if get_language() == 'ru':
self.instance.name = self.cleaned_data['name']
self.instance.description = self.cleaned_data['description']
else:
self.instance.name_en = self.cleaned_data['name']
self.instance.description_en = self.cleaned_data['description']
return super(LocaleNamedForm, self).save(commit=commit)
class CabinetInfoForm(UserFromRequestForm, LocaleNamedForm):
time_on = forms.CharField(widget=AdminTimeWidget(), required=False)
time_off = forms.CharField(widget=AdminTimeWidget(), required=False)
class Meta:
model = Hotel
fields = ('option', 'time_on', 'time_off')
def __init__(self, *args, **kwargs):
super(CabinetInfoForm, self).__init__(*args, **kwargs)
if get_language() == 'ru':
schema_transit = self.instance.schema_transit
paid_services = self.instance.paid_services
else:
schema_transit = self.instance.schema_transit_en
paid_services = self.instance.paid_services_en
self.fields['schema_transit'] = forms.CharField(required=False, widget=forms.Textarea(attrs={'class': 'wide',
'rows': '5'}),
initial=schema_transit)
self.fields['paid_services'] = forms.CharField(required=False, widget=forms.Textarea(attrs={'class': 'wide',
'rows': '5'}),
initial=paid_services)
if not self._user.is_superuser:
self.fields['name'].widget.attrs['readonly'] = True
def clean_name(self):
if self._user.is_superuser:
return self.cleaned_data['name']
if get_language() == 'ru':
return self.instance.name
else:
return self.instance.name_en
def save(self, commit=True):
if get_language() == 'ru':
self.instance.schema_transit = self.cleaned_data['schema_transit']
self.instance.paid_services = self.cleaned_data['paid_services']
else:
self.instance.schema_transit_en = self.cleaned_data['schema_transit']
self.instance.paid_services_en = self.cleaned_data['paid_services']
return super(CabinetInfoForm, self).save(commit=commit)
class CabinetRoomForm(LocaleNamedForm):
class Meta:
model = Room
fields = ('option', 'typefood', 'surface_area')
widgets = {
'typefood': forms.RadioSelect(),
}
class CabinetEditBillForm(forms.ModelForm):
class Meta:
model = Bill
fields = ('date_billed', 'status', 'description_small', 'invoice_number', 'amount')
class RequestAddHotelForm(forms.ModelForm):
city = forms.CharField(required=False, widget=forms.TextInput(attrs={'size': '35'}))
address = forms.CharField(required=False, widget=forms.TextInput(attrs={'size': '35'}))
name = forms.CharField(widget=forms.TextInput(attrs={'size': '35'}))
email = forms.CharField(required=False, widget=forms.TextInput(attrs={'size': '35'}))
phone = forms.CharField(required=False, widget=forms.TextInput(attrs={'size': '35'}))
fax = forms.CharField(required=False, widget=forms.TextInput(attrs={'size': '35'}))
contact_email = forms.CharField(required=False, widget=forms.TextInput(attrs={'size': '35'}))
website = forms.CharField(required=False, widget=forms.TextInput(attrs={'size': '35'}))
rooms_count = forms.CharField(required=False, widget=forms.TextInput(attrs={'size': '35'}))
class Meta:
model = RequestAddHotel
fields = ('city', 'address', 'name', 'email', 'phone', 'fax', 'contact_email',
'website', 'rooms_count', 'starcount')
def __init__(self, *args, **kwargs):
user = kwargs.pop('user')
super(RequestAddHotelForm, self).__init__(*args, **kwargs)
if not user.is_authenticated:
self.fields['recaptcha'] = ReCaptchaField(error_messages={'required': _('This field is required'),
'invalid': _('Answer is wrong')})
class UserCabinetInfoForm(UserFromRequestForm):
class Meta:
model = get_user_model()
fields = ('first_name', 'last_name', 'subscribe')
class BookingAddForm(UserFromRequestForm):
room_id = forms.CharField(max_length=30, required=False)
settlement = forms.CharField(max_length=30, required=False)
hid_method = forms.CharField(max_length=30, required=False)
class Meta:
model = Booking
fields = (
'from_date', 'to_date', 'first_name', 'middle_name', 'last_name', 'phone', 'email', 'guests', 'comment')
def clean_hid_method(self):
m = self.cleaned_data.get('hid_method')
if m:
raise forms.ValidationError(_("Spam."))
return None
def clean_phone(self):
phone = self.cleaned_data.get('phone')
if not phone:
raise forms.ValidationError(_("Phone is required"))
return phone
def clean_email(self):
email = self.cleaned_data.get('email')
if not email:
raise forms.ValidationError(_("Email is required"))
try:
validate_email(email)
except ValidationError as verr:
raise forms.ValidationError(_("Email is wrong"))
return email
def clean(self):
cleaned_data = super(BookingAddForm, self).clean()
if not self._user.is_authenticated:
email = cleaned_data.get('email')
if get_user_model().objects.filter(email=email).exists():
raise forms.ValidationError(_("Email already registered, please sign-in."))
return cleaned_data
class AddDiscountForm(LocaleNamedForm):
# TODO - Not used now(future)
class Meta:
model = Discount
fields = ('name', 'choice', 'time_on', 'time_off', 'days', 'at_price_days', 'percentage', 'apply_norefund',
'apply_creditcard', 'apply_package', 'apply_period')
def __init__(self, *args, **kwargs):
super(AddDiscountForm, self).__init__(*args, **kwargs)
self.fields['name'].required = False
def clean_choice(self):
choice = self.cleaned_data.get('choice')
if choice == 0:
raise forms.ValidationError(_("Discount not set"))
return choice
def clean_name(self):
name = self.cleaned_data.get('name')
if len(name.strip()) is 0:
name = _("New discount from ") + now().strftime("%d.%m.%Y")
return name
def clean(self):
cleaned_data = super(AddDiscountForm, self).clean()
choice = cleaned_data.get("choice")
need_del = []
if choice == DISCOUNT_NOREFUND or choice == DISCOUNT_CREDITCARD:
need_del = ['time_on', 'time_off', 'days', 'at_price_days', 'apply_norefund', 'apply_creditcard',
'apply_package', 'apply_period']
elif choice == DISCOUNT_EARLY:
need_del = ['time_on', 'time_off', 'at_price_days', 'apply_creditcard']
elif choice == DISCOUNT_LATER:
need_del = ['time_off', 'at_price_days']
elif choice == DISCOUNT_PERIOD:
need_del = ['time_on', 'time_off', 'at_price_days', 'apply_package', 'apply_period']
elif choice == DISCOUNT_PACKAGE:
need_del = ['time_on', 'time_off', 'apply_norefund', 'apply_creditcard']
elif choice == DISCOUNT_HOLIDAY or choice == DISCOUNT_SPECIAL or choice == DISCOUNT_NORMAL:
need_del = ['time_on', 'time_off', 'days', 'at_price_days', 'apply_package', 'apply_period']
elif choice == DISCOUNT_LAST_MINUTE:
need_del = ['days', 'at_price_days', 'apply_norefund', 'apply_creditcard', 'apply_package', 'apply_period']
for i in need_del:
del cleaned_data[i]
| nnmware/nnmware | apps/booking/forms.py | Python | gpl-3.0 | 9,755 |
from __future__ import print_function, division
INLINE_LABEL_STYLE = {
'display': 'inline-block',
}
GRAPH_GLOBAL_CONFIG = {
'displaylogo': False,
'modeBarButtonsToRemove': ['sendDataToCloud'],
}
AXIS_OPTIONS = ({
'label': 'linear',
'value': 'linear',
}, {
'label': 'log',
'value': 'log',
})
ERRORBAR_OPTIONS = ({
'label': 'True',
'value': True,
}, {
'label': 'False',
'value': False,
})
LINE_STYLE = {
'width': 1.0,
}
XLABEL = {
'linear': r'Scattering Vector, $q$ $(\text{\AA}^{-1})$',
'log': r'Scattering Vector, $q$ $(\text{\AA}^{-1}) (log scale)$',
'guinier': r'$q^2$ $(\text{\AA}^{-2})$',
'kratky': r'Scattering Vector, $q$ $(\text{\AA}^{-1})$',
'porod': r'$q^4$ $(\text{\AA}^{-4})$',
'pdf': r'pair distance ($\text{\AA}$)',
}
YLABEL = {
'linear': 'Intensity (arb. units.)',
'log': r'$\log(I)$',
'guinier': r'$\ln(I(q))$',
'kratky': r'$I(q) \cdot q^2$',
'porod': r'$I(q) \cdot q^4$',
'relative_diff': 'Relative Ratio (%)',
'absolute_diff': 'Absolute Difference (arb. units.)',
'error': 'Error',
'error_relative_diff': 'Error Relative Ratio (%)',
'pdf': 'P(r)',
}
TITLE = {
'sasprofile': 'Subtracted Profiles',
'guinier': 'Guinier Profiles',
'kratky': 'Kratky Profiles',
'porod': 'Porod Profiles',
'relative_diff': 'Relative Difference Profiles',
'absolute_diff': 'Absolute Difference Profiles',
'error': 'Error Profile',
'error_relative_diff': 'Error Relative Difference Profile',
'pdf': 'Pair-wise Distribution',
'fitting': 'P(r) Distribution Fitting',
}
| lqhuang/SAXS-tools | dashboard/layouts/style.py | Python | gpl-3.0 | 1,620 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.3 on 2016-07-01 17:09
from __future__ import unicode_literals
from django.db import migrations
import django_fsm
class Migration(migrations.Migration):
dependencies = [
('invoice', '0002_billordering'),
]
operations = [
migrations.AlterField(
model_name='bill',
name='status',
field=django_fsm.FSMIntegerField(choices=[(0, 'building'), (1, 'valid'), (2, 'cancel'), (3, 'archive')], db_index=True, default=0, verbose_name='status'),
),
]
| Diacamma2/financial | diacamma/invoice/migrations/0003_bill_status.py | Python | gpl-3.0 | 568 |
from setuptools import setup, find_packages
DESCRIPTION = "Implementation of Huff et. al. (2011) Estimation of Recent Shared Ancestry "
LONG_DESCRIPTION = "`ersa` estimates the combined number of generations between pairs of " \
"individuals using a " \
"`Germline <http://www1.cs.columbia.edu/~gusev/germline/>`_ " \
"matchfile as input. It is an implementation of " \
"`Huff et. al. (2011) Maximum-Likelihood estimation of recent shared ancenstry (ERSA) <http://genome.cshlp.org/content/21/5/768.full>`_ " \
"and `Li et. al. (2014) Relationship Estimation from Whole-Genome Sequence Data <http://journals.plos.org/plosgenetics/article?id=10.1371/journal.pgen.1004144>`_ ."
NAME = "ersa"
AUTHOR = "Richard Munoz, Jie Yuan, Yaniv Erlich"
AUTHOR_EMAIL = "[email protected], [email protected], [email protected]"
MAINTAINER = "Richard Munoz"
MAINTAINER_EMAIL = "[email protected]"
DOWNLOAD_URL = 'http://github.com/rmunoz12/ersa'
LICENSE = 'GNU General Public License v3 (GPLv3)'
VERSION = '1.1.2'
setup(name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
url=DOWNLOAD_URL,
download_url=DOWNLOAD_URL,
license=LICENSE,
packages=find_packages(),
test_suite='tests',
entry_points = {
"console_scripts": ['ersa = ersa.ersa:main']
},
install_requires=['sqlalchemy', 'inflect', 'pytest', 'scipy'],
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3 :: Only',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Sociology :: Genealogy']
)
| rmunoz12/ersa | setup.py | Python | gpl-3.0 | 2,290 |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# (C) British Crown Copyright 2012-6 Met Office.
#
# This file is part of Rose, a framework for meteorological suites.
#
# Rose is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rose is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rose. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
import os
import time
import webbrowser
import pygtk
pygtk.require('2.0')
import gtk
import rose.config
import rose.config_editor.util
import rose.gtk.dialog
class NavPanelHandler(object):
"""Handles the navigation panel menu."""
def __init__(self, data, util, reporter, mainwindow,
undo_stack, redo_stack, add_config_func,
group_ops_inst, section_ops_inst, variable_ops_inst,
kill_page_func, reload_ns_tree_func, transform_default_func,
graph_ns_func):
self.data = data
self.util = util
self.reporter = reporter
self.mainwindow = mainwindow
self.undo_stack = undo_stack
self.redo_stack = redo_stack
self.group_ops = group_ops_inst
self.sect_ops = section_ops_inst
self.var_ops = variable_ops_inst
self._add_config = add_config_func
self.kill_page_func = kill_page_func
self.reload_ns_tree_func = reload_ns_tree_func
self._transform_default_func = transform_default_func
self._graph_ns_func = graph_ns_func
def add_dialog(self, base_ns):
"""Handle an add section dialog and request."""
if base_ns is not None and '/' in base_ns:
config_name, subsp = self.util.split_full_ns(self.data, base_ns)
config_data = self.data.config[config_name]
if config_name == base_ns:
help_str = ''
else:
sections = self.data.helper.get_sections_from_namespace(
base_ns)
if sections == []:
help_str = subsp.replace('/', ':')
else:
help_str = sections[0]
help_str = help_str.split(':', 1)[0]
for config_section in (config_data.sections.now.keys() +
config_data.sections.latent.keys()):
if config_section.startswith(help_str + ":"):
help_str = help_str + ":"
else:
help_str = None
config_name = None
choices_help = self.data.helper.get_missing_sections(config_name)
config_names = [
n for n in self.data.config if not self.ask_is_preview(n)]
config_names.sort(lambda x, y: (y == config_name) - (x == config_name))
config_name, section = self.mainwindow.launch_add_dialog(
config_names, choices_help, help_str)
if config_name in self.data.config and section is not None:
self.sect_ops.add_section(config_name, section, page_launch=True)
def ask_is_preview(self, base_ns):
namespace = "/" + base_ns.lstrip("/")
try:
config_name = self.util.split_full_ns(self.data, namespace)[0]
config_data = self.data.config[config_name]
return config_data.is_preview
except KeyError:
print config_name
return False
def copy_request(self, base_ns, new_section=None, skip_update=False):
"""Handle a copy request for a section and its options."""
namespace = "/" + base_ns.lstrip("/")
sections = self.data.helper.get_sections_from_namespace(namespace)
if len(sections) != 1:
return False
section = sections.pop()
config_name = self.util.split_full_ns(self.data, namespace)[0]
config_data = self.data.config[config_name]
return self.group_ops.copy_section(config_name, section,
skip_update=skip_update)
def create_request(self):
"""Handle a create configuration request."""
if not any([v.config_type == rose.TOP_CONFIG_NAME
for v in self.data.config.values()]):
text = rose.config_editor.WARNING_APP_CONFIG_CREATE
title = rose.config_editor.WARNING_APP_CONFIG_CREATE_TITLE
rose.gtk.dialog.run_dialog(rose.gtk.dialog.DIALOG_TYPE_ERROR,
text, title)
return False
# Need an application configuration to be created.
root = os.path.join(self.data.top_level_directory,
rose.SUB_CONFIGS_DIR)
name, meta = self.mainwindow.launch_new_config_dialog(root)
if name is None:
return False
config_name = "/" + name
self._add_config(config_name, meta)
def ignore_request(self, base_ns, is_ignored):
"""Handle an ignore or enable section request."""
config_names = self.data.config.keys()
if base_ns is not None and '/' in base_ns:
config_name, subsp = self.util.split_full_ns(self.data, base_ns)
prefer_name_sections = {
config_name:
self.data.helper.get_sections_from_namespace(base_ns)}
else:
prefer_name_sections = {}
config_sect_dict = {}
sorter = rose.config.sort_settings
for config_name in config_names:
config_data = self.data.config[config_name]
config_sect_dict[config_name] = []
sect_and_data = list(config_data.sections.now.items())
for v_sect in config_data.vars.now:
sect_data = config_data.sections.now[v_sect]
sect_and_data.append((v_sect, sect_data))
for section, sect_data in sect_and_data:
if section not in config_sect_dict[config_name]:
if sect_data.ignored_reason:
if is_ignored:
continue
if not is_ignored:
co = sect_data.metadata.get(rose.META_PROP_COMPULSORY)
if (not sect_data.ignored_reason or
co == rose.META_PROP_VALUE_TRUE):
continue
config_sect_dict[config_name].append(section)
config_sect_dict[config_name].sort(rose.config.sort_settings)
if config_name in prefer_name_sections:
prefer_name_sections[config_name].sort(
rose.config.sort_settings)
config_name, section = self.mainwindow.launch_ignore_dialog(
config_sect_dict, prefer_name_sections, is_ignored)
if config_name in self.data.config and section is not None:
self.sect_ops.ignore_section(config_name, section, is_ignored)
def edit_request(self, base_ns):
"""Handle a request for editing section comments."""
if base_ns is None:
return False
base_ns = "/" + base_ns.lstrip("/")
config_name, subsp = self.util.split_full_ns(self.data, base_ns)
config_data = self.data.config[config_name]
sections = self.data.helper.get_sections_from_namespace(base_ns)
for section in list(sections):
if section not in config_data.sections.now:
sections.remove(section)
if not sections:
return False
if len(sections) > 1:
section = rose.gtk.dialog.run_choices_dialog(
rose.config_editor.DIALOG_LABEL_CHOOSE_SECTION_EDIT,
sections,
rose.config_editor.DIALOG_TITLE_CHOOSE_SECTION)
else:
section = sections[0]
if section is None:
return False
title = rose.config_editor.DIALOG_TITLE_EDIT_COMMENTS.format(section)
text = "\n".join(config_data.sections.now[section].comments)
finish = lambda t: self.sect_ops.set_section_comments(
config_name, section, t.splitlines())
rose.gtk.dialog.run_edit_dialog(text, finish_hook=finish, title=title)
def fix_request(self, base_ns):
"""Handle a request to auto-fix a configuration."""
if base_ns is None:
return False
base_ns = "/" + base_ns.lstrip("/")
config_name, subsp = self.util.split_full_ns(self.data, base_ns)
self._transform_default_func(only_this_config=config_name)
def get_ns_metadata_and_comments(self, namespace):
"""Return metadata dict and comments list."""
namespace = "/" + namespace.lstrip("/")
metadata = {}
comments = ""
if namespace is None:
return metadata, comments
metadata = self.data.namespace_meta_lookup.get(namespace, {})
comments = self.data.helper.get_ns_comment_string(namespace)
return metadata, comments
def info_request(self, namespace):
"""Handle a request for namespace info."""
if namespace is None:
return False
config_name, subsp = self.util.split_full_ns(self.data, namespace)
config_data = self.data.config[config_name]
sections = self.data.helper.get_sections_from_namespace(namespace)
search_function = lambda i: self.search_request(namespace, i)
for section in sections:
sect_data = config_data.sections.now.get(section)
if sect_data is not None:
rose.config_editor.util.launch_node_info_dialog(
sect_data, "", search_function)
def graph_request(self, namespace):
"""Handle a graph request for namespace info."""
self._graph_ns_func(namespace)
def remove_request(self, base_ns):
"""Handle a delete section request."""
config_names = self.data.config.keys()
if base_ns is not None and '/' in base_ns:
config_name, subsp = self.util.split_full_ns(self.data, base_ns)
prefer_name_sections = {
config_name:
self.data.helper.get_sections_from_namespace(base_ns)}
else:
prefer_name_sections = {}
config_sect_dict = {}
sorter = rose.config.sort_settings
for config_name in config_names:
config_data = self.data.config[config_name]
config_sect_dict[config_name] = config_data.sections.now.keys()
config_sect_dict[config_name].sort(rose.config.sort_settings)
if config_name in prefer_name_sections:
prefer_name_sections[config_name].sort(
rose.config.sort_settings)
config_name, section = self.mainwindow.launch_remove_dialog(
config_sect_dict, prefer_name_sections)
if config_name in self.data.config and section is not None:
start_stack_index = len(self.undo_stack)
group = (
rose.config_editor.STACK_GROUP_DELETE + "-" + str(time.time()))
config_data = self.data.config[config_name]
sect_data = config_data.sections.now[section]
ns = sect_data.metadata["full_ns"]
variable_sorter = lambda v, w: rose.config.sort_settings(
v.metadata['id'], w.metadata['id'])
variables = list(config_data.vars.now.get(section, []))
variables.sort(variable_sorter)
variables.reverse()
for variable in variables:
self.var_ops.remove_var(variable)
self.sect_ops.remove_section(config_name, section)
for stack_item in self.undo_stack[start_stack_index:]:
stack_item.group = group
def search_request(self, namespace, setting_id):
"""Handle a search for an id (hyperlink)."""
config_name, subsp = self.util.split_full_ns(self.data, namespace)
self.var_ops.search_for_var(config_name, setting_id)
def popup_panel_menu(self, base_ns, event):
"""Popup a page menu on the navigation panel."""
if base_ns is None:
namespace = None
else:
namespace = "/" + base_ns.lstrip("/")
ui_config_string = """<ui> <popup name='Popup'>"""
actions = [('New', gtk.STOCK_NEW,
rose.config_editor.TREE_PANEL_NEW_CONFIG),
('Add', gtk.STOCK_ADD,
rose.config_editor.TREE_PANEL_ADD_GENERIC),
('Add section', gtk.STOCK_ADD,
rose.config_editor.TREE_PANEL_ADD_SECTION),
('Autofix', gtk.STOCK_CONVERT,
rose.config_editor.TREE_PANEL_AUTOFIX_CONFIG),
('Clone', gtk.STOCK_COPY,
rose.config_editor.TREE_PANEL_CLONE_SECTION),
('Edit', gtk.STOCK_EDIT,
rose.config_editor.TREE_PANEL_EDIT_SECTION),
('Enable', gtk.STOCK_YES,
rose.config_editor.TREE_PANEL_ENABLE_GENERIC),
('Enable section', gtk.STOCK_YES,
rose.config_editor.TREE_PANEL_ENABLE_SECTION),
('Graph', gtk.STOCK_SORT_ASCENDING,
rose.config_editor.TREE_PANEL_GRAPH_SECTION),
('Ignore', gtk.STOCK_NO,
rose.config_editor.TREE_PANEL_IGNORE_GENERIC),
('Ignore section', gtk.STOCK_NO,
rose.config_editor.TREE_PANEL_IGNORE_SECTION),
('Info', gtk.STOCK_INFO,
rose.config_editor.TREE_PANEL_INFO_SECTION),
('Help', gtk.STOCK_HELP,
rose.config_editor.TREE_PANEL_HELP_SECTION),
('URL', gtk.STOCK_HOME,
rose.config_editor.TREE_PANEL_URL_SECTION),
('Remove', gtk.STOCK_DELETE,
rose.config_editor.TREE_PANEL_REMOVE_GENERIC),
('Remove section', gtk.STOCK_DELETE,
rose.config_editor.TREE_PANEL_REMOVE_SECTION)]
url = None
help = None
is_empty = (not self.data.config)
if namespace is not None:
config_name = self.util.split_full_ns(self.data, namespace)[0]
if self.data.config[config_name].is_preview:
return False
cloneable = self.is_ns_duplicate(namespace)
is_top = (namespace in self.data.config.keys())
is_fixable = bool(self.get_ns_errors(namespace))
has_content = self.data.helper.is_ns_content(namespace)
is_unsaved = self.data.helper.get_config_has_unsaved_changes(
config_name)
ignored_sections = self.data.helper.get_ignored_sections(namespace)
enabled_sections = self.data.helper.get_ignored_sections(
namespace, get_enabled=True)
is_latent = self.data.helper.get_ns_latent_status(namespace)
latent_sections = self.data.helper.get_latent_sections(namespace)
metadata, comments = self.get_ns_metadata_and_comments(namespace)
if is_latent:
for i, section in enumerate(latent_sections):
action_name = "Add {0}".format(i)
ui_config_string += '<menuitem action="{0}"/>'.format(
action_name)
actions.append(
(action_name, gtk.STOCK_ADD,
rose.config_editor.TREE_PANEL_ADD_SECTION.format(
section.replace("_", "__")))
)
ui_config_string += '<separator name="addlatentsep"/>'
ui_config_string += '<menuitem action="Add"/>'
if cloneable:
ui_config_string += '<separator name="clonesep"/>'
ui_config_string += '<menuitem action="Clone"/>'
ui_config_string += '<separator name="ignoresep"/>'
ui_config_string += '<menuitem action="Enable"/>'
ui_config_string += '<menuitem action="Ignore"/>'
ui_config_string += '<separator name="infosep"/>'
if has_content:
ui_config_string += '<menuitem action="Info"/>'
ui_config_string += '<menuitem action="Edit"/>'
ui_config_string += '<separator name="graphsep"/>'
ui_config_string += '<menuitem action="Graph"/>'
url = metadata.get(rose.META_PROP_URL)
help = metadata.get(rose.META_PROP_HELP)
if url is not None or help is not None:
ui_config_string += '<separator name="helpsep"/>'
if url is not None:
ui_config_string += '<menuitem action="URL"/>'
if help is not None:
ui_config_string += '<menuitem action="Help"/>'
if not is_empty:
ui_config_string += """<separator name="removesep"/>"""
ui_config_string += """<menuitem action="Remove"/>"""
if is_fixable:
ui_config_string += """<separator name="sepauto"/>
<menuitem action="Autofix"/>"""
else:
ui_config_string += '<menuitem action="Add"/>'
ui_config_string += '<separator name="ignoresep"/>'
ui_config_string += '<menuitem action="Enable"/>'
ui_config_string += '<menuitem action="Ignore"/>'
if namespace is None or (is_top or is_empty):
ui_config_string += """<separator name="newconfigsep"/>
<menuitem action="New"/>"""
ui_config_string += """</popup> </ui>"""
uimanager = gtk.UIManager()
actiongroup = gtk.ActionGroup('Popup')
actiongroup.add_actions(actions)
uimanager.insert_action_group(actiongroup, pos=0)
uimanager.add_ui_from_string(ui_config_string)
if namespace is None or (is_top or is_empty):
new_item = uimanager.get_widget('/Popup/New')
new_item.connect("activate", lambda b: self.create_request())
new_item.set_sensitive(not is_empty)
add_item = uimanager.get_widget('/Popup/Add')
add_item.connect("activate", lambda b: self.add_dialog(namespace))
add_item.set_sensitive(not is_empty)
enable_item = uimanager.get_widget('/Popup/Enable')
enable_item.connect(
"activate", lambda b: self.ignore_request(namespace, False))
enable_item.set_sensitive(not is_empty)
ignore_item = uimanager.get_widget('/Popup/Ignore')
ignore_item.connect(
"activate", lambda b: self.ignore_request(namespace, True))
ignore_item.set_sensitive(not is_empty)
if namespace is not None:
if is_latent:
for i, section in enumerate(latent_sections):
action_name = "Add {0}".format(i)
add_item = uimanager.get_widget("/Popup/" + action_name)
add_item._section = section
add_item.connect(
"activate",
lambda b: self.sect_ops.add_section(
config_name, b._section))
if cloneable:
clone_item = uimanager.get_widget('/Popup/Clone')
clone_item.connect("activate",
lambda b: self.copy_request(namespace))
if has_content:
edit_item = uimanager.get_widget('/Popup/Edit')
edit_item.connect("activate",
lambda b: self.edit_request(namespace))
info_item = uimanager.get_widget('/Popup/Info')
info_item.connect("activate",
lambda b: self.info_request(namespace))
graph_item = uimanager.get_widget("/Popup/Graph")
graph_item.connect("activate",
lambda b: self.graph_request(namespace))
if is_unsaved:
graph_item.set_sensitive(False)
if help is not None:
help_item = uimanager.get_widget('/Popup/Help')
help_title = namespace.split('/')[1:]
help_title = rose.config_editor.DIALOG_HELP_TITLE.format(
help_title)
search_function = lambda i: self.search_request(namespace, i)
help_item.connect(
"activate",
lambda b: rose.gtk.dialog.run_hyperlink_dialog(
gtk.STOCK_DIALOG_INFO, help, help_title,
search_function))
if url is not None:
url_item = uimanager.get_widget('/Popup/URL')
url_item.connect(
"activate", lambda b: webbrowser.open(url))
if is_fixable:
autofix_item = uimanager.get_widget('/Popup/Autofix')
autofix_item.connect("activate",
lambda b: self.fix_request(namespace))
remove_section_item = uimanager.get_widget('/Popup/Remove')
remove_section_item.connect(
"activate", lambda b: self.remove_request(namespace))
menu = uimanager.get_widget('/Popup')
menu.popup(None, None, None, event.button, event.time)
return False
def is_ns_duplicate(self, namespace):
"""Lookup whether a page can be cloned, via the metadata."""
sections = self.data.helper.get_sections_from_namespace(namespace)
if len(sections) != 1:
return False
section = sections.pop()
config_name = self.util.split_full_ns(self.data, namespace)[0]
sect_data = self.data.config[config_name].sections.now.get(section)
if sect_data is None:
return False
return (sect_data.metadata.get(rose.META_PROP_DUPLICATE) ==
rose.META_PROP_VALUE_TRUE)
def get_ns_errors(self, namespace):
"""Count the number of errors in a namespace."""
config_name = self.util.split_full_ns(self.data, namespace)[0]
config_data = self.data.config[config_name]
sections = self.data.helper.get_sections_from_namespace(namespace)
errors = 0
for section in sections:
errors += len(config_data.sections.get_sect(section).error)
real_data, latent_data = self.data.helper.get_data_for_namespace(
namespace)
errors += sum([len(v.error) for v in real_data + latent_data])
return errors
def get_ns_ignored(self, base_ns):
"""Lookup the ignored status of a namespace's data."""
namespace = "/" + base_ns.lstrip("/")
return self.data.helper.get_ns_ignored_status(namespace)
def get_can_show_page(self, latent_status, ignored_status, has_error):
"""Lookup whether to display a page based on the data status."""
if has_error or (not ignored_status and not latent_status):
# Always show this.
return True
show_ignored = self.data.page_ns_show_modes[
rose.config_editor.SHOW_MODE_IGNORED]
show_user_ignored = self.data.page_ns_show_modes[
rose.config_editor.SHOW_MODE_USER_IGNORED]
show_latent = self.data.page_ns_show_modes[
rose.config_editor.SHOW_MODE_LATENT]
if latent_status:
if not show_latent:
# Latent page, no latent pages allowed.
return False
# Latent page, latent pages allowed (but may be ignored...).
if ignored_status:
if ignored_status == rose.config.ConfigNode.STATE_USER_IGNORED:
if show_ignored or show_user_ignored:
# This is an allowed user-ignored page.
return True
# This is a user-ignored page that isn't allowed.
return False
# This is a trigger-ignored page that may be allowed.
return show_ignored
# This is a latent page that isn't ignored, latent pages allowed.
return True
| kaday/rose | lib/python/rose/config_editor/nav_panel_menu.py | Python | gpl-3.0 | 24,642 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-30 03:32
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('administrador', '0011_auto_20171129_2215'),
]
operations = [
migrations.AlterField(
model_name='respuesta',
name='fecha',
field=models.DateTimeField(blank=True, default=datetime.datetime(2017, 11, 29, 22, 32, 35, 304385), null=True),
),
]
| adbetin/organico-cooperativas | administrador/migrations/0012_auto_20171129_2232.py | Python | gpl-3.0 | 548 |
from django.db import models
from wagtail.core.models import Page
from wagtail.admin.edit_handlers import FieldPanel, StreamFieldPanel
from wagtail.core.fields import StreamField
from wagtail.core import blocks
from wagtail.images.blocks import ImageChooserBlock
from test.anotherapp.models import HomePage
class AnotherTest(Page):
"""
This model is entirely NOT translated.
This could be normal for some kind of page that's individual to every
language.
"""
field_that_is_not_translated = models.CharField(max_length=255, null=True, blank=True)
content_panels = [
FieldPanel("field_that_is_not_translated"),
StreamFieldPanel("body"),
]
body = StreamField(
[
("heading", blocks.CharBlock(classname="full title")),
("paragraph", blocks.RichTextBlock()),
("image", ImageChooserBlock()),
],
verbose_name="body",
blank=True,
help_text="The main contents of the page",
)
def get_context(self, request):
context = super().get_context(request)
context['test'] = self.get_children().live()
return context
class Test(Page):
"""
This model is translated
"""
field_that_is_translated = models.CharField(max_length=255, null=True, blank=True)
field_that_is_not_translated = models.CharField(max_length=255, null=True, blank=True)
class InheritsFromHomePageTranslated(HomePage):
"""
This model is translated but inherits from a non-translated parent.
This could be normal for some kind of page that's individual to every
language.
"""
field_that_is_translated = models.CharField(max_length=255, null=True, blank=True)
field_that_is_not_translated = models.CharField(max_length=255, null=True, blank=True)
class InheritsFromPageTranslated(Page):
"""
This model is translated but inherits from a non-translated parent.
This could be normal for some kind of page that's individual to every
language.
"""
field_that_is_translated = models.CharField(max_length=255, null=True, blank=True)
field_that_is_not_translated = models.CharField(max_length=255, null=True, blank=True)
| benjaoming/django-modeltranslation-wagtail | test/transapp/models.py | Python | gpl-3.0 | 2,238 |
# daisy-extract
# Copyright (C) 2016 James Scholes
# This program is free software, licensed under the terms of the GNU General Public License (version 3 or later).
# See the file LICENSE for more details.
from collections import namedtuple
import argparse
import glob
import logging
import os
import platform
import shutil
import sys
from bs4 import BeautifulSoup
from natsort import natsorted
__version__ = '0.1'
is_windows = 'windows' in platform.system().lower()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
log_stream = logging.StreamHandler(sys.stdout)
log_stream.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
logger.addHandler(log_stream)
HTML_PARSER = 'html.parser'
NCC_FILENAME = 'NCC.HTML'
MASTER_SMIL_FILENAME = 'MASTER.SMIL'
SMIL_GLOB = '*.[sS][mM][iI][lL]'
BookMetadata = namedtuple('BookMetadata', ('authors', 'title'))
class InvalidDAISYBookError(Exception):
pass
class ExtractMetadataError(Exception):
pass
def main():
logger.info('daisy-extract version {0}'.format(__version__))
cli_args = parse_command_line()
if cli_args.debug:
logger.setLevel(logging.DEBUG)
encoding = getattr(cli_args, 'encoding', 'utf-8')
input_directory = os.path.abspath(cli_args.input_directory)
output_directory = os.path.abspath(cli_args.output_directory)
if not os.path.exists(input_directory) or not os.path.isdir(input_directory):
exit_with_error('{0} does not exist or is not a directory'.format(input_directory))
try:
metadata = create_metadata_object_from_ncc(find_ncc_path(input_directory), encoding=encoding)
except InvalidDAISYBookError as e:
exit_with_error('The contents of {0} don\'t seem to be a valid DAISY 2.02 book: {1}'.format(input_directory, str(e)))
except ExtractMetadataError as e:
exit_with_error(str(e))
output_directory = os.path.join(output_directory, make_safe_filename(metadata.authors), make_safe_filename(metadata.title))
logger.info('Extracting content of book: {0} by {1} from {2} to {3}'.format(metadata.title, metadata.authors, input_directory, output_directory))
source_audio_files = []
destination_audio_files = []
for doc in find_smil_documents(input_directory):
parsed_doc = parse_smil_document(doc, encoding=encoding)
try:
section_title = find_document_title(parsed_doc)
logger.debug('Found SMIL document: {0}'.format(section_title))
except ExtractMetadataError as e:
exit_with_error('Could not retrieve metadata from SMIL document ({0}): {1}'.format(file, str(e)))
section_audio_files = get_audio_filenames_from_smil(parsed_doc)
logger.debug('SMIL document spans {0} audio file(s)'.format(len(section_audio_files)))
for audio_file in section_audio_files:
source_audio_files.append((section_title, os.path.join(input_directory, audio_file)))
logger.info('Copying {0} audio files'.format(len(source_audio_files)))
try:
os.makedirs(output_directory)
logger.debug('Created directory: {0}'.format(output_directory))
except (FileExistsError, PermissionError):
pass
track_number = 1
for section_name, file_path in source_audio_files:
destination_filename = '{0:02d} - {1}.{2}'.format(track_number, make_safe_filename(section_name), os.path.splitext(file_path)[-1][1:].lower())
destination_path = os.path.join(output_directory, destination_filename)
logger.debug('Copying file: {0} to: {1}'.format(file_path, destination_path))
if is_windows:
destination_path = add_path_prefix(destination_path)
shutil.copyfile(file_path, destination_path)
destination_audio_files.append(os.path.split(destination_path)[-1])
track_number += 1
logger.info('Creating M3U playlist')
playlist_filename = '{0}.m3u'.format(make_safe_filename(metadata.title))
playlist_path = os.path.join(output_directory, playlist_filename)
logger.debug('M3U playlist path: {0}'.format(playlist_path))
if is_windows:
playlist_path = add_path_prefix(playlist_path)
with open(playlist_path, 'w', newline=None) as f:
f.write('\n'.join(destination_audio_files))
logger.info('Done!')
def parse_command_line():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input-directory', nargs='?', required=True)
parser.add_argument('-o', '--output-directory', nargs='?', required=True)
parser.add_argument('-e', '--encoding', nargs='?', required=False)
parser.add_argument('-d', '--debug', dest='debug', action='store_true', default=False, help='Enable debug logging')
args = parser.parse_args()
return args
def exit_with_error(message):
logger.error(message)
sys.exit(1)
def find_ncc_path(directory):
filenames = (NCC_FILENAME, NCC_FILENAME.lower())
for filename in filenames:
path = os.path.join(directory, filename)
if os.path.exists(path) and os.path.isfile(path):
logger.debug('Found NCC file: {0}'.format(path))
return path
raise InvalidDAISYBookError('Could not find NCC file')
def find_smil_documents(directory):
documents = list(filter(lambda smil: not smil.upper().endswith(MASTER_SMIL_FILENAME), glob.iglob(os.path.join(directory, SMIL_GLOB))))
if documents:
logger.debug('Found {0} SMIL documents in directory'.format(len(documents)))
return natsorted(documents)
else:
raise InvalidDAISYBookError('No SMIL documents found')
def create_metadata_object_from_ncc(ncc_path, encoding='utf-8'):
with open(ncc_path, 'r', encoding=encoding) as f:
ncc = BeautifulSoup(f, HTML_PARSER)
title_tag = ncc.find('meta', attrs={'name': 'dc:title'})
if title_tag is None:
raise ExtractMetadataError('The title of the DAISY book could not be found')
title = title_tag.attrs.get('content')
if not title:
raise ExtractMetadataError('The title of the DAISY book is blank')
creator_tags = ncc.find_all('meta', attrs={'name': 'dc:creator'})
if not creator_tags:
raise ExtractMetadataError('No authors are listed in the DAISY book')
authors = ', '.join([tag.attrs.get('content') for tag in creator_tags])
return BookMetadata(authors, title)
def parse_smil_document(path, encoding='utf-8'):
logger.debug('Parsing SMIL document: {0}'.format(os.path.split(path)[-1]))
with open(path, 'r', encoding=encoding) as f:
return BeautifulSoup(f, HTML_PARSER)
def find_document_title(doc):
title_tag = doc.find('meta', attrs={'name': 'title'})
if title_tag is None:
raise ExtractMetadataError('Unable to extract title from SMIL document')
title = title_tag.attrs.get('content')
if not title:
raise ExtractMetadataError('SMIL document has no title')
return title
def get_audio_filenames_from_smil(smil):
audio_files = [audio.attrs.get('src') for audio in smil.find_all('audio')]
unique_audio_files = []
for file in audio_files:
if file not in unique_audio_files:
unique_audio_files.append(file)
return tuple(unique_audio_files)
def add_path_prefix(path):
return '\\\\?\\{0}'.format(path)
def make_safe_filename(filename):
# strip out any disallowed chars and replace with underscores
disallowed_ascii = [chr(i) for i in range(0, 32)]
disallowed_chars = '<>:"/\\|?*^{0}'.format(''.join(disallowed_ascii))
translator = dict((ord(char), '_') for char in disallowed_chars)
safe_filename = filename.replace(': ', ' - ').translate(translator).rstrip('. ')
return safe_filename
if __name__ == '__main__':
main()
| jscholes/daisy-extract | extract.py | Python | gpl-3.0 | 7,728 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This file is part of XBMC Mega Pack Addon.
Copyright (C) 2014 Wolverine ([email protected])
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/gpl-3.0.html
"""
class Countries_Tajikistan():
'''Class that manages this specific menu context.'''
def open(self, plugin, menu):
menu.add_xplugins(plugin.get_xplugins(dictionaries=["Channels",
"Events", "Live", "Movies", "Sports", "TVShows"],
countries=["Tajikistan"])) | xbmcmegapack/plugin.video.megapack.dev | resources/lib/menus/home_countries_tajikistan.py | Python | gpl-3.0 | 1,117 |
from core.testcases import APIModelViewSetTestCase, ModelViewSetTestCase, get_permissions_from_compact
from authentication.models import User, UserType
class UserViewSetTestCase(APIModelViewSetTestCase):
model = User
permissions = get_permissions_from_compact({
'list': '...a', # Only admin can list
'retrieve': '.u.a', # Only user and admin can retrieve
# 'update': '.u.a', # TODO Only user and admin can update ??
})
def create_object(self, user: User=None) -> User:
return self.users['user']
class UserTypeViewSetTestCase(ModelViewSetTestCase):
model = UserType
permissions = get_permissions_from_compact({
'list': 'puoa', # Everyone can list
'retrieve': 'puoa', # Everyone can retrieve
'create': '...a', # Only admin can create
'update': '...a', # Only admin can update
'delete': '...a', # Only admin can delete
})
# TODO Test user retrieval from API
| simde-utc/woolly-api | authentication/tests.py | Python | gpl-3.0 | 999 |
# from mainsite.models import Web_Region
context = {'title': 'my static title',
'description': 'my static description',
'data': 'my static data',
}
def get_context(request):
# region_list = Web_Region.objects.values_list('region_name', flat=True)
context.update({'data': 'my dynamic data'})
return context | sstacha/uweb-vagrant | files/docroot/files/test.data.py | Python | gpl-3.0 | 364 |
#!/usr/bin/python
"""
DES-CHAN: A Framework for Channel Assignment Algorithms for Testbeds
This module provides a class to represent network graphs and conflict graphs.
Authors: Matthias Philipp <[email protected]>,
Felix Juraschek <[email protected]>
Copyright 2008-2013, Freie Universitaet Berlin (FUB). All rights reserved.
These sources were developed at the Freie Universitaet Berlin,
Computer Systems and Telematics / Distributed, embedded Systems (DES) group
(http://cst.mi.fu-berlin.de, http://www.des-testbed.net)
-------------------------------------------------------------------------------
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program. If not, see http://www.gnu.org/licenses/ .
--------------------------------------------------------------------------------
For further information and questions please use the web site
http://www.des-testbed.net
"""
import re
import sys
class Graph:
def __init__(self, vertices=[]):
# initialize internal data structures
self.values = dict()
self.distances = dict()
# set vertex names and distances
for vertex in vertices:
self.add_vertex(vertex)
def add_vertex(self, new_vertex):
"""Adds the given vertex to the graph.
"""
# do nothing if vertex already exists
if new_vertex in self.get_vertices():
return
# otherwise set up data structures for new vertex
self.values[new_vertex] = dict()
self.distances[new_vertex] = dict()
for old_vertex in self.get_vertices():
self.set_edge_value((old_vertex, new_vertex), None, False)
self.set_distance(old_vertex, new_vertex, sys.maxint)
# distance to itself is 0
self.set_distance(new_vertex, new_vertex, 0)
def remove_vertex(self, vertex):
"""Removes the given vertex from the graph.
"""
del self.values[vertex]
del self.distances[vertex]
for v in self.get_vertices():
del self.values[v][vertex]
del self.distances[v][vertex]
def get_vertices(self):
"""Returns a list that contains all vertices of the graph.
"""
return set(self.values.keys())
def set_edge_value(self, edge, value, update=True):
"""Sets the value of the given edge. The edge is represented by a tuple
of two vertices.
"""
v1, v2 = edge
self.values[v1][v2] = value
# we implement an undirected graph
self.values[v2][v1] = value
# update distance information
# None, "", False, and 0 correspond to no edge
if value:
self.set_distance(v1, v2, 1)
# other shortest paths may have changed
if update:
self.update_distances()
def set_distance(self, v1, v2, d):
"""Sets the distance between the two vertices.
"""
self.distances[v1][v2] = d
# we implement an undirected graph
self.distances[v2][v1] = d
def get_edge_value(self, edge):
"""Returns the value of the given edge. The edge is represented by a tuple
of two vertices.
"""
v1, v2 = edge
return self.values[v1][v2]
def get_distance(self, v1, v2):
"""Returns the distance between v1 and v2.
"""
return self.distances[v1][v2]
def get_edges(self, get_all=False):
"""Returns a dictionary that contains all edges as keys and the
corresponding edge values as values. Only edges that have a value are
returned. By default the graph is assumed to be undirected. If the
optional parameter get_all is True, all vertices are returned.
"""
edges = dict()
remaining_vertices = self.get_vertices()
for v1 in self.get_vertices():
for v2 in remaining_vertices:
value = self.get_edge_value((v1, v2))
# None, "", False, and 0 correspond to no edge
if value:
edges[(v1, v2)] = value
# graph is assumed to be undirected, therefore discard
# duplicate edges if not explicitly requested
if not get_all:
remaining_vertices.remove(v1)
return edges
def merge(self, graph):
"""Merges the current graph with the specified graph. The new graph
contains the union of both vertex sets and the corresponding edge
values.
"""
# add missing vertices
for vertex in graph.get_vertices() - self.get_vertices():
self.add_vertex(vertex)
# set edge values
for edge, edge_value in graph.get_edges().items():
self.set_edge_value(edge, edge_value, False)
self.update_distances()
def get_adjacency_matrix(self):
"""Returns the graph's adjacency matrix as a formatted string.
"""
vertices = self.values.keys()
maxlen = 4
# get maximum length of vertex names for proper layout
for vertex in vertices:
if len(str(vertex)) > maxlen:
maxlen = len(str(vertex))
# print column heads
matrix = "".rjust(maxlen) + " |"
for vertex in vertices:
matrix += " " + str(vertex).rjust(maxlen) + " |"
# print without trailing |
matrix = matrix[:-1] + "\n"
# generate row separator
matrix += "-" * maxlen + "-+"
for i in range(len(vertices)):
matrix += "-" + "-" * maxlen + "-+"
# print without trailing +
matrix = matrix[:-1] + "\n"
# print rows
for v1 in vertices:
matrix += str(v1).ljust(maxlen) + " |"
for v2 in vertices:
matrix += " " + self._get_edge_value_as_text((v1, v2)).rjust(maxlen) + " |"
# print without trailing |
matrix = matrix[:-1] + "\n"
return matrix
def get_graphviz(self, label=""):
"""Returns a string representation of the graph in the dot language from
the graphviz project.
"""
left_vertices = set(self.values.keys())
graph = "Graph G {\n"
if label != "":
graph += "\tgraph [label = \"%s\", labelloc=t]\n" % label
for v1 in self.values.keys():
for v2 in left_vertices:
if self.get_edge_value((v1, v2)):
graph += "\t\"" + str(v1) + "\" -- \"" + str(v2) + "\" "
graph += "[label = \"" + str(self.get_edge_value((v1, v2))) + "\"]\n"
# undirected graph, therefore discard double connections
left_vertices.remove(v1)
graph += "}\n"
return graph
def write_to_file(self, file_name, use_graphviz=False):
"""Writes a textual representation of the graph to the specified file.
If the optional parameter use_graphviz is True, the graph is represented
in the dot language from the graphviz project.
"""
file = open(file_name, 'w')
if use_graphviz:
file.write(self.get_graphviz())
else:
file.write(self.get_adjacency_matrix())
file.close()
def read_from_dotfile(self, file_name):
"""Reads the graph from a graphviz dot file.
"""
file = open(file_name, 'r')
# clear current data
self.__init__()
# match following lines
# "t9-035" -- "t9-146" [label = "1"]
edge_re = re.compile('\s*"(.+)" -- "(.+)" \[label = "(.+)"\]')
for line in file:
edge_ma = edge_re.match(line)
if edge_ma:
v1 = edge_ma.group(1)
v2 = edge_ma.group(2)
value = edge_ma.group(3)
self.add_vertex(v1)
self.add_vertex(v2)
self.set_edge_value((v1, v2), value, False)
file.close()
self.update_distances()
def read_from_file(self, file_name):
"""Reads the graph from a file containing an adjacency matrix as
generated by get_adjacency_matrix() or write_to_file(). The dot format
is not supported.
"""
file = open(file_name, 'r')
# line counter
i = 0;
vertices = list()
for line in file:
i += 1
# first line contains the vertex names
if i == 1:
# first element is empty, therefore discard it
for vertex in line.split("|")[1:]:
vertices.append(vertex.strip())
# clear current data and set new vertices
self.__init__(vertices)
if i > 2:
row = line.split("|")
# first element is the vertex name
v1 = row[0].strip()
# remaining elements are edge values
row = row[1:]
for v2 in vertices:
value = row[vertices.index(v2)].strip()
if value == '':
value = None
self.set_edge_value((v1, v2), value, False)
file.close()
self.update_distances()
def update_distances(self):
"""Updates the distance matrix with the number of hops between all
vertex pairs.
"""
# Floyd Warshall algorithm
# calculate all shortest paths
for k in self.get_vertices():
remaining_vertices = self.get_vertices()
for v1 in self.get_vertices():
for v2 in remaining_vertices:
d = min(self.get_distance(v1, v2),
self.get_distance(v1, k) + self.get_distance(k, v2))
self.set_distance(v1, v2, d)
remaining_vertices.remove(v1)
def get_neighbors(self, vertex):
"""Returns a set that contains all direct neighbors of the given vertex.
"""
neighbors = set()
for v1, v2 in self.get_edges().keys():
if v1 == vertex:
neighbors.add(v2)
elif v2 == vertex:
neighbors.add(v1)
return neighbors
def copy(self):
"""Returns a new Graph object that contains the same vertices and edges.
"""
remaining_vertices = self.get_vertices()
g = Graph(list(remaining_vertices))
for v1 in self.get_vertices():
for v2 in remaining_vertices:
g.set_edge_value((v1, v2), self.get_edge_value((v1, v2)))
g.set_distance(v1, v2, self.get_distance(v1, v2))
remaining_vertices.remove(v1)
return g
def copy_fast(self):
"""Returns a new Graph object that contains the same vertices and edges.
"""
remaining_vertices = self.get_vertices()
g = Graph(list(remaining_vertices))
e = self.get_edges()
for (v1, v2), chans in self.get_edges().iteritems():
g.set_edge_value((v1,v2), self.get_edge_value((v1, v2)), update=False)
g.set_distance(v1, v2, self.get_distance(v1, v2))
return g
def _get_edge_value_as_text(self, edge):
"""Returns a textual representation of the value of the given edge. The
edge is represented by a tuple of two vertices.
"""
v1, v2 = edge
if not self.values[v1][v2]:
return ""
else:
return str(self.values[v1][v2])
class ConflictGraphVertex:
def __init__(self, conflict_graph, nw_graph_edge):
self.conflict_graph = conflict_graph
self.nw_graph_edge = nw_graph_edge
self.channels = None
def __str__(self):
return "%s_%s" % (self.nw_graph_edge)
def get_channel(self):
"""Returns the channel of the link in the network graph that corresponds
to this vertex.
"""
return int(self.conflict_graph.network_graph.get_edge_value(self.nw_graph_edge))
def set_channel(self, channel):
"""Sets the channel in the network graph and computes the resulting
conflict graph.
"""
# update network graph
self.conflict_graph.network_graph.set_edge_value(self.nw_graph_edge,
str(channel))
# update conflict graph
# NOTE the change: We do not have to recalculate ALL edges, just the onces
# adjacent to the changed one are enough! gives us O(n) instead of O(n*n)
self.conflict_graph.update_edge(self)
# self.conflict_graph.update_edges()
def get_nw_graph_neighbor(self, node_name):
"""Returns the neigbor in the network graph corresponding to the link.
"""
if node_name not in self.nw_graph_edge:
return None
if self.nw_graph_edge[0] == node_name:
return self.nw_graph_edge[1]
else:
return self.nw_graph_edge[0]
class ConflictGraph(Graph):
def __init__(self, network_graph, interference_model):
# store the original network graph for later reference
self.network_graph = network_graph
self.interference_model = interference_model
vertices = set()
# each edge in the network graph corresponds to a vertex in the conflict
# graph
for edge in network_graph.get_edges().keys():
vertices.add(ConflictGraphVertex(self, edge))
# call constructor of the super-class with new vertex set
Graph.__init__(self, vertices)
# set edges according to interference model
self.update_edges()
def update_edges(self):
"""Updates all edges of the ConflictGraph regarding the current channel
assignment and the applied interference model.
"""
remaining_vertices = self.get_vertices()
for v1 in self.get_vertices():
for v2 in remaining_vertices:
# get edge value according to the interference model
value = self.interference_model.get_interference(self.network_graph,
v1.nw_graph_edge,
v2.nw_graph_edge)
self.set_edge_value((v1, v2), value, False)
# graph is undirected
remaining_vertices.remove(v1)
def update_edge(self, cg_vertex):
"""Updates all edges that are adjacent to the supplied cg_vertex.
"""
remaining_vertices = self.get_vertices()
for v2 in self.get_vertices():
# get edge value according to the interference model
value = self.interference_model.get_interference(self.network_graph,
cg_vertex.nw_graph_edge,
v2.nw_graph_edge)
self.set_edge_value((cg_vertex, v2), value, False)
def get_vertices_for_node(self, node_name):
"""Returns a set containing all vertices that correspond to links that
are incident to the given node.
"""
vertices = set()
for vertex in self.get_vertices():
if vertex.nw_graph_edge[0] == node_name or \
vertex.nw_graph_edge[1] == node_name:
vertices.add(vertex)
return vertices
def get_vertex(self, node1, node2):
"""Returns the vertex that corresponds to the link between the two given
node names, or None, if such vertex does not exist.
"""
for vertex in self.get_vertices():
if (vertex.nw_graph_edge[0] == node1 and \
vertex.nw_graph_edge[1] == node2) or \
(vertex.nw_graph_edge[0] == node2 and \
vertex.nw_graph_edge[1] == node1):
return vertex
return None
def get_interference_sum(self):
"""Returns the overall interference which is calculated by summing up
all edge values.
"""
sum = 0
for value in self.get_edges().values():
sum += value
return sum
def get_vertex_names(self):
vertex_names = set()
for vertex in self.get_vertices():
vertex_names.add(str(vertex))
return vertex_names
def update(self, network_graph):
old_edges = self.network_graph.get_edges()
new_edges = network_graph.get_edges()
# do nothing if graphs are equal
if new_edges == old_edges:
return
old_edges_keys = set(old_edges.keys())
new_edges_keys = set(new_edges.keys())
# assign new network graph
self.network_graph = network_graph
# update conflict graph
for new_edge in new_edges_keys - old_edges_keys:
# create a new conflict graph vertex for each new network graph edge
self.add_vertex(ConflictGraphVertex(self, new_edge))
for obsolete_edge in old_edges_keys - new_edges_keys:
# remove conflict graph vertices for obsolete edges
self.remove_vertex(self.get_vertex(obsolete_edge[0],
obsolete_edge[1]))
self.update_edges()
# this only runs if the module was *not* imported
if __name__ == '__main__':
g = Graph(["a", "b", "c", "d", "e"])
g.set_edge_value(("a", "b"), 40)
g.set_edge_value(("b", "c"), 40)
g.set_edge_value(("c", "d"), 40)
g.set_edge_value(("d", "e"), 40)
print g.get_adjacency_matrix()
| des-testbed/des_chan | graph.py | Python | gpl-3.0 | 18,151 |
#!/usr/bin/env python
"""Publish coverage results online via coveralls.io
Puts your coverage results on coveralls.io for everyone to see.
It makes custom report for data generated by coverage.py package and sends it to `json API`_ of coveralls.io service.
All python files in your coverage analysis are posted to this service along with coverage stats,
so please make sure you're not ruining your own security!
Usage:
coveralls [options]
coveralls debug [options]
Debug mode doesn't send anything, just outputs json to stdout, useful for development.
It also forces verbose output.
Global options:
-h --help Display this help
-v --verbose Print extra info, True for debug command
Example:
$ coveralls
Submitting coverage to coveralls.io...
Coverage submitted!
Job #38.1
https://coveralls.io/jobs/92059
"""
import logging
from docopt import docopt
from coveralls import Coveralls
from coveralls.api import CoverallsException
log = logging.getLogger('coveralls')
def main(argv=None):
options = docopt(__doc__, argv=argv)
if options['debug']:
options['--verbose'] = True
level = logging.DEBUG if options['--verbose'] else logging.INFO
log.addHandler(logging.StreamHandler())
log.setLevel(level)
try:
coverallz = Coveralls()
if not options['debug']:
log.info("Submitting coverage to coveralls.io...")
result = coverallz.wear()
log.info("Coverage submitted!")
log.info(result['message'])
log.info(result['url'])
log.debug(result)
else:
log.info("Testing coveralls-python...")
coverallz.wear(dry_run=True)
except KeyboardInterrupt: # pragma: no cover
log.info('Aborted')
except CoverallsException as e:
log.error(e)
except Exception: # pragma: no cover
raise
| phimpme/generator | Phimpme/site-packages/coveralls/cli.py | Python | gpl-3.0 | 1,907 |
# -*- coding: utf8 -*-
from yanntricks import *
def UQZooGFLNEq():
pspict,fig = SinglePicture("UQZooGFLNEq")
pspict.dilatation_X(1)
pspict.dilatation_Y(1)
mx=-5
Mx=5
x=var('x')
f=phyFunction( arctan(x) ).graph(mx,Mx)
seg1=Segment( Point(mx,pi/2),Point(Mx,pi/2) )
seg2=Segment( Point(mx,-pi/2),Point(Mx,-pi/2) )
seg1.parameters.color="red"
seg2.parameters.color="red"
seg1.parameters.style="dashed"
seg2.parameters.style="dashed"
pspict.DrawGraphs(f,seg1,seg2)
pspict.axes.single_axeY.axes_unit=AxesUnit(pi/2,'')
pspict.DrawDefaultAxes()
fig.no_figure()
fig.conclude()
fig.write_the_file()
| LaurentClaessens/mazhe | src_yanntricks/yanntricksUQZooGFLNEq.py | Python | gpl-3.0 | 671 |
##
import argparse
from plotWheels.helical_wheel import helical_wheel
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate Helical Wheel")
parser.add_argument("--sequence",dest="sequence",type=str)
parser.add_argument("--seqRange",dest="seqRange",type=int,default=1)
parser.add_argument("--t_size",dest="t_size",type=int,default=32)
parser.add_argument("--rotation",dest="rotation",type=int,default=90)
parser.add_argument("--numbering",action="store_true",help="numbering for helical wheel")
parser.add_argument("--output",dest="output",type=argparse.FileType("wb"), default="_helicalwheel.png")#dest="output",default="_helicalwheel.png")
#### circle colors
parser.add_argument("--f_A",dest="f_A", default="#ffcc33")
parser.add_argument("--f_C",dest="f_C",default="#b5b5b5")
parser.add_argument("--f_D",dest="f_D",default="#db270f")
parser.add_argument("--f_E",dest="f_E",default="#db270f")
parser.add_argument("--f_F",dest="f_F",default="#ffcc33")
parser.add_argument("--f_G",dest="f_G",default="#b5b5b5")
parser.add_argument("--f_H",dest="f_H",default="#12d5fc")
parser.add_argument("--f_I",dest="f_I",default="#ffcc33")
parser.add_argument("--f_K",dest="f_K",default="#12d5fc")
parser.add_argument("--f_L",dest="f_L",default="#ffcc33")
parser.add_argument("--f_M",dest="f_M",default="#ffcc33")
parser.add_argument("--f_N",dest="f_N",default="#b5b5b5")
parser.add_argument("--f_P",dest="f_P",default="#ffcc33")
parser.add_argument("--f_Q",dest="f_Q",default="#b5b5b5")
parser.add_argument("--f_R",dest="f_R",default="#12d5fc")
parser.add_argument("--f_S",dest="f_S",default="#b5b5b5")
parser.add_argument("--f_T",dest="f_T",default="#b5b5b5")
parser.add_argument("--f_V",dest="f_V",default="#ffcc33")
parser.add_argument("--f_W",dest="f_W",default="#ffcc33")
parser.add_argument("--f_Y",dest="f_Y",default="#b5b5b5")
### text colors
parser.add_argument("--t_A",dest="t_A",default="k")
parser.add_argument("--t_C",dest="t_C",default="k")
parser.add_argument("--t_D",dest="t_D",default="w")
parser.add_argument("--t_E",dest="t_E",default="w")
parser.add_argument("--t_F",dest="t_F",default="k")
parser.add_argument("--t_G",dest="t_G",default="k")
parser.add_argument("--t_H",dest="t_H",default="k")
parser.add_argument("--t_I",dest="t_I",default="k")
parser.add_argument("--t_K",dest="t_K",default="k")
parser.add_argument("--t_L",dest="t_L",default="k")
parser.add_argument("--t_M",dest="t_M",default="k")
parser.add_argument("--t_N",dest="t_N",default="k")
parser.add_argument("--t_P",dest="t_P",default="k")
parser.add_argument("--t_Q",dest="t_Q",default="k")
parser.add_argument("--t_R",dest="t_R",default="k")
parser.add_argument("--t_S",dest="t_S",default="k")
parser.add_argument("--t_T",dest="t_T",default="k")
parser.add_argument("--t_V",dest="t_V",default="k")
parser.add_argument("--t_W",dest="t_W",default="k")
parser.add_argument("--t_Y",dest="t_Y",default="k")
args = parser.parse_args()
#print(type(args.output))
f_colors = [args.f_A,args.f_C,args.f_D,args.f_E,args.f_F,args.f_G,args.f_H,args.f_I,args.f_K,
args.f_L,args.f_M,args.f_N,args.f_P,args.f_Q,args.f_R,args.f_S,args.f_T,args.f_V,
args.f_W,args.f_Y]
t_colors = [args.t_A,args.t_C,args.t_D,args.t_E,args.t_F,args.t_G,args.t_H,args.t_I,args.t_K,
args.t_L,args.t_M,args.t_N,args.t_P,args.t_Q,args.t_R,args.t_S,args.t_T,args.t_V,
args.t_W,args.t_Y]
colors = [f_colors, t_colors]
tmp_file = "./tmp.png"
helical_wheel(sequence=args.sequence,
colorcoding=colors[0],
text_color=colors[1],
seqRange=args.seqRange,
t_size=args.t_size,
rot=args.rotation,
numbering=args.numbering,
filename=tmp_file
)
with open("tmp.png", "rb") as f:
for line in f:
args.output.write(line)
| TAMU-CPT/galaxy-tools | tools/helicalWheel/generateHelicalWheel.py | Python | gpl-3.0 | 4,144 |
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from redsolutioncms.models import CMSSettings, BaseSettings, BaseSettingsManager
FIELD_TYPES = (
('BooleanField', _('Checkbox')),
('CharField', _('Character field')),
('Text', _('Text area')),
('EmailField', _('Email field')),
)
class FeedbackSettingsManager(BaseSettingsManager):
def get_settings(self):
if self.get_query_set().count():
return self.get_query_set()[0]
else:
feedback_settings = self.get_query_set().create()
# cms_settings = CMSSettings.objects.get_settings()
return feedback_settings
class FeedbackSettings(BaseSettings):
# use_direct_view = models.BooleanField(
# verbose_name=_('Use dedicated view to render feedback page'),
# default=True
# )
# Temporary hadrcoded
use_direct_view = True
use_custom_form = models.BooleanField(verbose_name=_('Use custom feedback form'),
default=False)
objects = FeedbackSettingsManager()
class FormField(models.Model):
feedback_settings = models.ForeignKey('FeedbackSettings')
field_type = models.CharField(verbose_name=_('Type of field'),
choices=FIELD_TYPES, max_length=255)
field_name = models.CharField(verbose_name=_('Verbose name of field'),
max_length=255)
| redsolution/django-simple-feedback | feedback/redsolution_setup/models.py | Python | gpl-3.0 | 1,391 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
"""
oauthlib.oauth1.rfc5849
~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for signing and checking OAuth 1.0 RFC 5849 requests.
"""
import logging
log = logging.getLogger("oauthlib")
import sys
import time
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
if sys.version_info[0] == 3:
bytes_type = bytes
else:
bytes_type = str
from ...common import Request, urlencode, generate_nonce
from ...common import generate_timestamp, to_unicode
from . import parameters, signature, utils
SIGNATURE_HMAC = "HMAC-SHA1"
SIGNATURE_RSA = "RSA-SHA1"
SIGNATURE_PLAINTEXT = "PLAINTEXT"
SIGNATURE_METHODS = (SIGNATURE_HMAC, SIGNATURE_RSA, SIGNATURE_PLAINTEXT)
SIGNATURE_TYPE_AUTH_HEADER = 'AUTH_HEADER'
SIGNATURE_TYPE_QUERY = 'QUERY'
SIGNATURE_TYPE_BODY = 'BODY'
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
class Client(object):
"""A client used to sign OAuth 1.0 RFC 5849 requests"""
def __init__(self, client_key,
client_secret=None,
resource_owner_key=None,
resource_owner_secret=None,
callback_uri=None,
signature_method=SIGNATURE_HMAC,
signature_type=SIGNATURE_TYPE_AUTH_HEADER,
rsa_key=None, verifier=None, realm=None,
encoding='utf-8', decoding=None,
nonce=None, timestamp=None):
"""Create an OAuth 1 client.
:param client_key: Client key (consumer key), mandatory.
:param resource_owner_key: Resource owner key (oauth token).
:param resource_owner_secret: Resource owner secret (oauth token secret).
:param callback_uri: Callback used when obtaining request token.
:param signature_method: SIGNATURE_HMAC, SIGNATURE_RSA or SIGNATURE_PLAINTEXT.
:param signature_type: SIGNATURE_TYPE_AUTH_HEADER (default),
SIGNATURE_TYPE_QUERY or SIGNATURE_TYPE_BODY
depending on where you want to embed the oauth
credentials.
:param rsa_key: RSA key used with SIGNATURE_RSA.
:param verifier: Verifier used when obtaining an access token.
:param realm: Realm (scope) to which access is being requested.
:param encoding: If you provide non-unicode input you may use this
to have oauthlib automatically convert.
:param decoding: If you wish that the returned uri, headers and body
from sign be encoded back from unicode, then set
decoding to your preferred encoding, i.e. utf-8.
:param nonce: Use this nonce instead of generating one. (Mainly for testing)
:param timestamp: Use this timestamp instead of using current. (Mainly for testing)
"""
# Convert to unicode using encoding if given, else assume unicode
encode = lambda x: to_unicode(x, encoding) if encoding else x
self.client_key = encode(client_key)
self.client_secret = encode(client_secret)
self.resource_owner_key = encode(resource_owner_key)
self.resource_owner_secret = encode(resource_owner_secret)
self.signature_method = encode(signature_method)
self.signature_type = encode(signature_type)
self.callback_uri = encode(callback_uri)
self.rsa_key = encode(rsa_key)
self.verifier = encode(verifier)
self.realm = encode(realm)
self.encoding = encode(encoding)
self.decoding = encode(decoding)
self.nonce = encode(nonce)
self.timestamp = encode(timestamp)
if self.signature_method == SIGNATURE_RSA and self.rsa_key is None:
raise ValueError('rsa_key is required when using RSA signature method.')
def get_oauth_signature(self, request):
"""Get an OAuth signature to be used in signing a request
"""
if self.signature_method == SIGNATURE_PLAINTEXT:
# fast-path
return signature.sign_plaintext(self.client_secret,
self.resource_owner_secret)
uri, headers, body = self._render(request)
collected_params = signature.collect_parameters(
uri_query=urlparse.urlparse(uri).query,
body=body,
headers=headers)
log.debug("Collected params: {0}".format(collected_params))
normalized_params = signature.normalize_parameters(collected_params)
normalized_uri = signature.normalize_base_string_uri(request.uri)
log.debug("Normalized params: {0}".format(normalized_params))
log.debug("Normalized URI: {0}".format(normalized_uri))
base_string = signature.construct_base_string(request.http_method,
normalized_uri, normalized_params)
log.debug("Base signing string: {0}".format(base_string))
if self.signature_method == SIGNATURE_HMAC:
sig = signature.sign_hmac_sha1(base_string, self.client_secret,
self.resource_owner_secret)
elif self.signature_method == SIGNATURE_RSA:
sig = signature.sign_rsa_sha1(base_string, self.rsa_key)
else:
sig = signature.sign_plaintext(self.client_secret,
self.resource_owner_secret)
log.debug("Signature: {0}".format(sig))
return sig
def get_oauth_params(self):
"""Get the basic OAuth parameters to be used in generating a signature.
"""
nonce = (generate_nonce()
if self.nonce is None else self.nonce)
timestamp = (generate_timestamp()
if self.timestamp is None else self.timestamp)
params = [
('oauth_nonce', nonce),
('oauth_timestamp', timestamp),
('oauth_version', '1.0'),
('oauth_signature_method', self.signature_method),
('oauth_consumer_key', self.client_key),
]
if self.resource_owner_key:
params.append(('oauth_token', self.resource_owner_key))
if self.callback_uri:
params.append(('oauth_callback', self.callback_uri))
if self.verifier:
params.append(('oauth_verifier', self.verifier))
return params
def _render(self, request, formencode=False, realm=None):
"""Render a signed request according to signature type
Returns a 3-tuple containing the request URI, headers, and body.
If the formencode argument is True and the body contains parameters, it
is escaped and returned as a valid formencoded string.
"""
# TODO what if there are body params on a header-type auth?
# TODO what if there are query params on a body-type auth?
uri, headers, body = request.uri, request.headers, request.body
# TODO: right now these prepare_* methods are very narrow in scope--they
# only affect their little thing. In some cases (for example, with
# header auth) it might be advantageous to allow these methods to touch
# other parts of the request, like the headers—so the prepare_headers
# method could also set the Content-Type header to x-www-form-urlencoded
# like the spec requires. This would be a fundamental change though, and
# I'm not sure how I feel about it.
if self.signature_type == SIGNATURE_TYPE_AUTH_HEADER:
headers = parameters.prepare_headers(request.oauth_params, request.headers, realm=realm)
elif self.signature_type == SIGNATURE_TYPE_BODY and request.decoded_body is not None:
body = parameters.prepare_form_encoded_body(request.oauth_params, request.decoded_body)
if formencode:
body = urlencode(body)
headers['Content-Type'] = 'application/x-www-form-urlencoded'
elif self.signature_type == SIGNATURE_TYPE_QUERY:
uri = parameters.prepare_request_uri_query(request.oauth_params, request.uri)
else:
raise ValueError('Unknown signature type specified.')
return uri, headers, body
def sign(self, uri, http_method='GET', body=None, headers=None, realm=None):
"""Sign a request
Signs an HTTP request with the specified parts.
Returns a 3-tuple of the signed request's URI, headers, and body.
Note that http_method is not returned as it is unaffected by the OAuth
signing process. Also worth noting is that duplicate parameters
will be included in the signature, regardless of where they are
specified (query, body).
The body argument may be a dict, a list of 2-tuples, or a formencoded
string. The Content-Type header must be 'application/x-www-form-urlencoded'
if it is present.
If the body argument is not one of the above, it will be returned
verbatim as it is unaffected by the OAuth signing process. Attempting to
sign a request with non-formencoded data using the OAuth body signature
type is invalid and will raise an exception.
If the body does contain parameters, it will be returned as a properly-
formatted formencoded string.
Body may not be included if the http_method is either GET or HEAD as
this changes the semantic meaning of the request.
All string data MUST be unicode or be encoded with the same encoding
scheme supplied to the Client constructor, default utf-8. This includes
strings inside body dicts, for example.
"""
# normalize request data
request = Request(uri, http_method, body, headers,
encoding=self.encoding)
# sanity check
content_type = request.headers.get('Content-Type', None)
multipart = content_type and content_type.startswith('multipart/')
should_have_params = content_type == CONTENT_TYPE_FORM_URLENCODED
has_params = request.decoded_body is not None
# 3.4.1.3.1. Parameter Sources
# [Parameters are collected from the HTTP request entity-body, but only
# if [...]:
# * The entity-body is single-part.
if multipart and has_params:
raise ValueError("Headers indicate a multipart body but body contains parameters.")
# * The entity-body follows the encoding requirements of the
# "application/x-www-form-urlencoded" content-type as defined by
# [W3C.REC-html40-19980424].
elif should_have_params and not has_params:
raise ValueError("Headers indicate a formencoded body but body was not decodable.")
# * The HTTP request entity-header includes the "Content-Type"
# header field set to "application/x-www-form-urlencoded".
elif not should_have_params and has_params:
raise ValueError("Body contains parameters but Content-Type header was not set.")
# 3.5.2. Form-Encoded Body
# Protocol parameters can be transmitted in the HTTP request entity-
# body, but only if the following REQUIRED conditions are met:
# o The entity-body is single-part.
# o The entity-body follows the encoding requirements of the
# "application/x-www-form-urlencoded" content-type as defined by
# [W3C.REC-html40-19980424].
# o The HTTP request entity-header includes the "Content-Type" header
# field set to "application/x-www-form-urlencoded".
elif self.signature_type == SIGNATURE_TYPE_BODY and not (
should_have_params and has_params and not multipart):
raise ValueError('Body signatures may only be used with form-urlencoded content')
# We amend http://tools.ietf.org/html/rfc5849#section-3.4.1.3.1
# with the clause that parameters from body should only be included
# in non GET or HEAD requests. Extracting the request body parameters
# and including them in the signature base string would give semantic
# meaning to the body, which it should not have according to the
# HTTP 1.1 spec.
elif http_method.upper() in ('GET', 'HEAD') and has_params:
raise ValueError('GET/HEAD requests should not include body.')
# generate the basic OAuth parameters
request.oauth_params = self.get_oauth_params()
# generate the signature
request.oauth_params.append(('oauth_signature', self.get_oauth_signature(request)))
# render the signed request and return it
uri, headers, body = self._render(request, formencode=True,
realm=(realm or self.realm))
if self.decoding:
log.debug('Encoding URI, headers and body to %s.', self.decoding)
uri = uri.encode(self.decoding)
body = body.encode(self.decoding) if body else body
new_headers = {}
for k, v in headers.items():
new_headers[k.encode(self.decoding)] = v.encode(self.decoding)
headers = new_headers
return uri, headers, body
class Server(object):
"""A server base class used to verify OAuth 1.0 RFC 5849 requests
OAuth providers should inherit from Server and implement the methods
and properties outlined below. Further details are provided in the
documentation for each method and property.
Methods used to check the format of input parameters. Common tests include
length, character set, membership, range or pattern. These tests are
referred to as `whitelisting or blacklisting`_. Whitelisting is better
but blacklisting can be usefull to spot malicious activity.
The following have methods a default implementation:
- check_client_key
- check_request_token
- check_access_token
- check_nonce
- check_verifier
- check_realm
The methods above default to whitelist input parameters, checking that they
are alphanumerical and between a minimum and maximum length. Rather than
overloading the methods a few properties can be used to configure these
methods.
* @safe_characters -> (character set)
* @client_key_length -> (min, max)
* @request_token_length -> (min, max)
* @access_token_length -> (min, max)
* @nonce_length -> (min, max)
* @verifier_length -> (min, max)
* @realms -> [list, of, realms]
Methods used to validate input parameters. These checks usually hit either
persistent or temporary storage such as databases or the filesystem. See
each methods documentation for detailed usage.
The following methods must be implemented:
- validate_client_key
- validate_request_token
- validate_access_token
- validate_timestamp_and_nonce
- validate_redirect_uri
- validate_requested_realm
- validate_realm
- validate_verifier
Method used to retrieve sensitive information from storage.
The following methods must be implemented:
- get_client_secret
- get_request_token_secret
- get_access_token_secret
- get_rsa_key
To prevent timing attacks it is necessary to not exit early even if the
client key or resource owner key is invalid. Instead dummy values should
be used during the remaining verification process. It is very important
that the dummy client and token are valid input parameters to the methods
get_client_secret, get_rsa_key and get_(access/request)_token_secret and
that the running time of those methods when given a dummy value remain
equivalent to the running time when given a valid client/resource owner.
The following properties must be implemented:
* @dummy_client
* @dummy_request_token
* @dummy_access_token
Example implementations have been provided, note that the database used is
a simple dictionary and serves only an illustrative purpose. Use whichever
database suits your project and how to access it is entirely up to you.
The methods are introduced in an order which should make understanding
their use more straightforward and as such it could be worth reading what
follows in chronological order.
.. _`whitelisting or blacklisting`: http://www.schneier.com/blog/archives/2011/01/whitelisting_vs.html
"""
def __init__(self):
pass
@property
def allowed_signature_methods(self):
return SIGNATURE_METHODS
@property
def safe_characters(self):
return set(utils.UNICODE_ASCII_CHARACTER_SET)
@property
def client_key_length(self):
return 20, 30
@property
def request_token_length(self):
return 20, 30
@property
def access_token_length(self):
return 20, 30
@property
def timestamp_lifetime(self):
return 600
@property
def nonce_length(self):
return 20, 30
@property
def verifier_length(self):
return 20, 30
@property
def realms(self):
return []
@property
def enforce_ssl(self):
return True
def check_client_key(self, client_key):
"""Check that the client key only contains safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.client_key_length
return (set(client_key) <= self.safe_characters and
lower <= len(client_key) <= upper)
def check_request_token(self, request_token):
"""Checks that the request token contains only safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.request_token_length
return (set(request_token) <= self.safe_characters and
lower <= len(request_token) <= upper)
def check_access_token(self, request_token):
"""Checks that the token contains only safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.access_token_length
return (set(request_token) <= self.safe_characters and
lower <= len(request_token) <= upper)
def check_nonce(self, nonce):
"""Checks that the nonce only contains only safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.nonce_length
return (set(nonce) <= self.safe_characters and
lower <= len(nonce) <= upper)
def check_verifier(self, verifier):
"""Checks that the verifier contains only safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.verifier_length
return (set(verifier) <= self.safe_characters and
lower <= len(verifier) <= upper)
def check_realm(self, realm):
"""Check that the realm is one of a set allowed realms.
"""
return realm in self.realms
def get_client_secret(self, client_key):
"""Retrieves the client secret associated with the client key.
This method must allow the use of a dummy client_key value.
Fetching the secret using the dummy key must take the same amount of
time as fetching a secret for a valid client::
# Unlikely to be near constant time as it uses two database
# lookups for a valid client, and only one for an invalid.
from your_datastore import ClientSecret
if ClientSecret.has(client_key):
return ClientSecret.get(client_key)
else:
return 'dummy'
# Aim to mimic number of latency inducing operations no matter
# whether the client is valid or not.
from your_datastore import ClientSecret
return ClientSecret.get(client_key, 'dummy')
Note that the returned key must be in plaintext.
"""
raise NotImplementedError("Subclasses must implement this function.")
@property
def dummy_client(self):
"""Dummy client used when an invalid client key is supplied.
The dummy client should be associated with either a client secret,
a rsa key or both depending on which signature methods are supported.
Providers should make sure that
get_client_secret(dummy_client)
get_rsa_key(dummy_client)
return a valid secret or key for the dummy client.
"""
raise NotImplementedError("Subclasses must implement this function.")
def get_request_token_secret(self, client_key, request_token):
"""Retrieves the shared secret associated with the request token.
This method must allow the use of a dummy values and the running time
must be roughly equivalent to that of the running time of valid values::
# Unlikely to be near constant time as it uses two database
# lookups for a valid client, and only one for an invalid.
from your_datastore import RequestTokenSecret
if RequestTokenSecret.has(client_key):
return RequestTokenSecret.get((client_key, request_token))
else:
return 'dummy'
# Aim to mimic number of latency inducing operations no matter
# whether the client is valid or not.
from your_datastore import RequestTokenSecret
return ClientSecret.get((client_key, request_token), 'dummy')
Note that the returned key must be in plaintext.
"""
raise NotImplementedError("Subclasses must implement this function.")
def get_access_token_secret(self, client_key, access_token):
"""Retrieves the shared secret associated with the access token.
This method must allow the use of a dummy values and the running time
must be roughly equivalent to that of the running time of valid values::
# Unlikely to be near constant time as it uses two database
# lookups for a valid client, and only one for an invalid.
from your_datastore import AccessTokenSecret
if AccessTokenSecret.has(client_key):
return AccessTokenSecret.get((client_key, request_token))
else:
return 'dummy'
# Aim to mimic number of latency inducing operations no matter
# whether the client is valid or not.
from your_datastore import AccessTokenSecret
return ClientSecret.get((client_key, request_token), 'dummy')
Note that the returned key must be in plaintext.
"""
raise NotImplementedError("Subclasses must implement this function.")
@property
def dummy_request_token(self):
"""Dummy request token used when an invalid token was supplied.
The dummy request token should be associated with a request token
secret such that get_request_token_secret(.., dummy_request_token)
returns a valid secret.
"""
raise NotImplementedError("Subclasses must implement this function.")
@property
def dummy_access_token(self):
"""Dummy access token used when an invalid token was supplied.
The dummy access token should be associated with an access token
secret such that get_access_token_secret(.., dummy_access_token)
returns a valid secret.
"""
raise NotImplementedError("Subclasses must implement this function.")
def get_rsa_key(self, client_key):
"""Retrieves a previously stored client provided RSA key.
This method must allow the use of a dummy client_key value. Fetching
the rsa key using the dummy key must take the same amount of time
as fetching a key for a valid client. The dummy key must also be of
the same bit length as client keys.
Note that the key must be returned in plaintext.
"""
raise NotImplementedError("Subclasses must implement this function.")
def _get_signature_type_and_params(self, request):
"""Extracts parameters from query, headers and body. Signature type
is set to the source in which parameters were found.
"""
# Per RFC5849, only the Authorization header may contain the 'realm' optional parameter.
header_params = signature.collect_parameters(headers=request.headers,
exclude_oauth_signature=False, with_realm=True)
body_params = signature.collect_parameters(body=request.body,
exclude_oauth_signature=False)
query_params = signature.collect_parameters(uri_query=request.uri_query,
exclude_oauth_signature=False)
params = []
params.extend(header_params)
params.extend(body_params)
params.extend(query_params)
signature_types_with_oauth_params = list(filter(lambda s: s[2], (
(SIGNATURE_TYPE_AUTH_HEADER, params,
utils.filter_oauth_params(header_params)),
(SIGNATURE_TYPE_BODY, params,
utils.filter_oauth_params(body_params)),
(SIGNATURE_TYPE_QUERY, params,
utils.filter_oauth_params(query_params))
)))
if len(signature_types_with_oauth_params) > 1:
raise ValueError('oauth_ params must come from only 1 signature type but were found in %s' % ', '.join(
[s[0] for s in signature_types_with_oauth_params]))
try:
signature_type, params, oauth_params = signature_types_with_oauth_params[0]
except IndexError:
raise ValueError('oauth_ params are missing. Could not determine signature type.')
return signature_type, params, oauth_params
def validate_client_key(self, client_key):
"""Validates that supplied client key is a registered and valid client.
Note that if the dummy client is supplied it should validate in same
or nearly the same amount of time as a valid one.
Ensure latency inducing tasks are mimiced even for dummy clients.
For example, use::
from your_datastore import Client
try:
return Client.exists(client_key, access_token)
except DoesNotExist:
return False
Rather than::
from your_datastore import Client
if access_token == self.dummy_access_token:
return False
else:
return Client.exists(client_key, access_token)
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_request_token(self, client_key, request_token):
"""Validates that supplied request token is registered and valid.
Note that if the dummy request_token is supplied it should validate in
the same nearly the same amount of time as a valid one.
Ensure latency inducing tasks are mimiced even for dummy clients.
For example, use::
from your_datastore import RequestToken
try:
return RequestToken.exists(client_key, access_token)
except DoesNotExist:
return False
Rather than::
from your_datastore import RequestToken
if access_token == self.dummy_access_token:
return False
else:
return RequestToken.exists(client_key, access_token)
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_access_token(self, client_key, access_token):
"""Validates that supplied access token is registered and valid.
Note that if the dummy access token is supplied it should validate in
the same or nearly the same amount of time as a valid one.
Ensure latency inducing tasks are mimiced even for dummy clients.
For example, use::
from your_datastore import AccessToken
try:
return AccessToken.exists(client_key, access_token)
except DoesNotExist:
return False
Rather than::
from your_datastore import AccessToken
if access_token == self.dummy_access_token:
return False
else:
return AccessToken.exists(client_key, access_token)
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_timestamp_and_nonce(self, client_key, timestamp, nonce,
request_token=None, access_token=None):
"""Validates that the nonce has not been used before.
Per `Section 3.3`_ of the spec.
"A nonce is a random string, uniquely generated by the client to allow
the server to verify that a request has never been made before and
helps prevent replay attacks when requests are made over a non-secure
channel. The nonce value MUST be unique across all requests with the
same timestamp, client credentials, and token combinations."
.. _`Section 3.3`: http://tools.ietf.org/html/rfc5849#section-3.3
One of the first validation checks that will be made is for the validity
of the nonce and timestamp, which are associated with a client key and
possibly a token. If invalid then immediately fail the request
by returning False. If the nonce/timestamp pair has been used before and
you may just have detected a replay attack. Therefore it is an essential
part of OAuth security that you not allow nonce/timestamp reuse.
Note that this validation check is done before checking the validity of
the client and token.::
nonces_and_timestamps_database = [
(u'foo', 1234567890, u'rannoMstrInghere', u'bar')
]
def validate_timestamp_and_nonce(self, client_key, timestamp, nonce,
request_token=None, access_token=None):
return ((client_key, timestamp, nonce, request_token or access_token)
in self.nonces_and_timestamps_database)
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_redirect_uri(self, client_key, redirect_uri):
"""Validates the client supplied redirection URI.
It is highly recommended that OAuth providers require their clients
to register all redirection URIs prior to using them in requests and
register them as absolute URIs. See `CWE-601`_ for more information
about open redirection attacks.
By requiring registration of all redirection URIs it should be
straightforward for the provider to verify whether the supplied
redirect_uri is valid or not.
Alternatively per `Section 2.1`_ of the spec:
"If the client is unable to receive callbacks or a callback URI has
been established via other means, the parameter value MUST be set to
"oob" (case sensitive), to indicate an out-of-band configuration."
.. _`CWE-601`: http://cwe.mitre.org/top25/index.html#CWE-601
.. _`Section 2.1`: https://tools.ietf.org/html/rfc5849#section-2.1
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_requested_realm(self, client_key, realm):
"""Validates that the client may request access to the realm.
This method is invoked when obtaining a request token and should
tie a realm to the request token and after user authorization
this realm restriction should transfer to the access token.
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_realm(self, client_key, access_token, uri=None,
required_realm=None):
"""Validates access to the request realm.
How providers choose to use the realm parameter is outside the OAuth
specification but it is commonly used to restrict access to a subset
of protected resources such as "photos".
required_realm is a convenience parameter which can be used to provide
a per view method pre-defined list of allowed realms.
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_verifier(self, client_key, request_token, verifier):
"""Validates a verification code.
OAuth providers issue a verification code to clients after the
resource owner authorizes access. This code is used by the client to
obtain token credentials and the provider must verify that the
verifier is valid and associated with the client as well as the
resource owner.
Verifier validation should be done in near constant time
(to avoid verifier enumeration). To achieve this we need a
constant time string comparison which is provided by OAuthLib
in ``oauthlib.common.safe_string_equals``::
from your_datastore import Verifier
correct_verifier = Verifier.get(client_key, request_token)
from oauthlib.common import safe_string_equals
return safe_string_equals(verifier, correct_verifier)
"""
raise NotImplementedError("Subclasses must implement this function.")
def verify_request_token_request(self, uri, http_method='GET', body=None,
headers=None):
"""Verify the initial request in the OAuth workflow.
During this step the client obtains a request token for use during
resource owner authorization (which is outside the scope of oauthlib).
"""
return self.verify_request(uri, http_method=http_method, body=body,
headers=headers, require_resource_owner=False,
require_realm=True, require_callback=True)
def verify_access_token_request(self, uri, http_method='GET', body=None,
headers=None):
"""Verify the second request in the OAuth workflow.
During this step the client obtains the access token for use when
accessing protected resources.
"""
return self.verify_request(uri, http_method=http_method, body=body,
headers=headers, require_verifier=True)
def verify_request(self, uri, http_method='GET', body=None,
headers=None, require_resource_owner=True, require_verifier=False,
require_realm=False, required_realm=None, require_callback=False):
"""Verifies a request ensuring that the following is true:
Per `section 3.2`_ of the spec.
- all mandated OAuth parameters are supplied
- parameters are only supplied in one source which may be the URI
query, the Authorization header or the body
- all parameters are checked and validated, see comments and the
methods and properties of this class for further details.
- the supplied signature is verified against a recalculated one
A ValueError will be raised if any parameter is missing,
supplied twice or invalid. A HTTP 400 Response should be returned
upon catching an exception.
A HTTP 401 Response should be returned if verify_request returns False.
`Timing attacks`_ are prevented through the use of dummy credentials to
create near constant time verification even if an invalid credential
is used. Early exit on invalid credentials would enable attackers
to perform `enumeration attacks`_. Near constant time string comparison
is used to prevent secret key guessing. Note that timing attacks can
only be prevented through near constant time execution, not by adding
a random delay which would only require more samples to be gathered.
.. _`section 3.2`: http://tools.ietf.org/html/rfc5849#section-3.2
.. _`Timing attacks`: http://rdist.root.org/2010/07/19/exploiting-remote-timing-attacks/
.. _`enumeration attacks`: http://www.sans.edu/research/security-laboratory/article/attacks-browsing
"""
# Only include body data from x-www-form-urlencoded requests
headers = headers or {}
if ("Content-Type" in headers and
headers["Content-Type"] == CONTENT_TYPE_FORM_URLENCODED):
request = Request(uri, http_method, body, headers)
else:
request = Request(uri, http_method, '', headers)
if self.enforce_ssl and not request.uri.lower().startswith("https://"):
raise ValueError("Insecure transport, only HTTPS is allowed.")
signature_type, params, oauth_params = self._get_signature_type_and_params(request)
# The server SHOULD return a 400 (Bad Request) status code when
# receiving a request with duplicated protocol parameters.
if len(dict(oauth_params)) != len(oauth_params):
raise ValueError("Duplicate OAuth entries.")
oauth_params = dict(oauth_params)
request.signature = oauth_params.get('oauth_signature')
request.client_key = oauth_params.get('oauth_consumer_key')
request.resource_owner_key = oauth_params.get('oauth_token')
request.nonce = oauth_params.get('oauth_nonce')
request.timestamp = oauth_params.get('oauth_timestamp')
request.callback_uri = oauth_params.get('oauth_callback')
request.verifier = oauth_params.get('oauth_verifier')
request.signature_method = oauth_params.get('oauth_signature_method')
request.realm = dict(params).get('realm')
# The server SHOULD return a 400 (Bad Request) status code when
# receiving a request with missing parameters.
if not all((request.signature, request.client_key,
request.nonce, request.timestamp,
request.signature_method)):
raise ValueError("Missing OAuth parameters.")
# OAuth does not mandate a particular signature method, as each
# implementation can have its own unique requirements. Servers are
# free to implement and document their own custom methods.
# Recommending any particular method is beyond the scope of this
# specification. Implementers should review the Security
# Considerations section (`Section 4`_) before deciding on which
# method to support.
# .. _`Section 4`: http://tools.ietf.org/html/rfc5849#section-4
if not request.signature_method in self.allowed_signature_methods:
raise ValueError("Invalid signature method.")
# Servers receiving an authenticated request MUST validate it by:
# If the "oauth_version" parameter is present, ensuring its value is
# "1.0".
if ('oauth_version' in request.oauth_params and
request.oauth_params['oauth_version'] != '1.0'):
raise ValueError("Invalid OAuth version.")
# The timestamp value MUST be a positive integer. Unless otherwise
# specified by the server's documentation, the timestamp is expressed
# in the number of seconds since January 1, 1970 00:00:00 GMT.
if len(request.timestamp) != 10:
raise ValueError("Invalid timestamp size")
try:
ts = int(request.timestamp)
except ValueError:
raise ValueError("Timestamp must be an integer")
else:
# To avoid the need to retain an infinite number of nonce values for
# future checks, servers MAY choose to restrict the time period after
# which a request with an old timestamp is rejected.
if time.time() - ts > self.timestamp_lifetime:
raise ValueError("Request too old, over 10 minutes.")
# Provider specific validation of parameters, used to enforce
# restrictions such as character set and length.
if not self.check_client_key(request.client_key):
raise ValueError("Invalid client key.")
if not request.resource_owner_key and require_resource_owner:
raise ValueError("Missing resource owner.")
if (require_resource_owner and not require_verifier and
not self.check_access_token(request.resource_owner_key)):
raise ValueError("Invalid resource owner key.")
if (require_resource_owner and require_verifier and
not self.check_request_token(request.resource_owner_key)):
raise ValueError("Invalid resource owner key.")
if not self.check_nonce(request.nonce):
raise ValueError("Invalid nonce.")
if request.realm and not self.check_realm(request.realm):
raise ValueError("Invalid realm. Allowed are %s" % self.realms)
if not request.verifier and require_verifier:
raise ValueError("Missing verifier.")
if require_verifier and not self.check_verifier(request.verifier):
raise ValueError("Invalid verifier.")
if require_callback and not request.callback_uri:
raise ValueError("Missing callback URI.")
# Servers receiving an authenticated request MUST validate it by:
# If using the "HMAC-SHA1" or "RSA-SHA1" signature methods, ensuring
# that the combination of nonce/timestamp/token (if present)
# received from the client has not been used before in a previous
# request (the server MAY reject requests with stale timestamps as
# described in `Section 3.3`_).
# .._`Section 3.3`: http://tools.ietf.org/html/rfc5849#section-3.3
#
# We check this before validating client and resource owner for
# increased security and performance, both gained by doing less work.
if require_verifier:
token = {"request_token": request.resource_owner_key}
else:
token = {"access_token": request.resource_owner_key}
if not self.validate_timestamp_and_nonce(request.client_key,
request.timestamp, request.nonce, **token):
return False, request
# The server SHOULD return a 401 (Unauthorized) status code when
# receiving a request with invalid client credentials.
# Note: This is postponed in order to avoid timing attacks, instead
# a dummy client is assigned and used to maintain near constant
# time request verification.
#
# Note that early exit would enable client enumeration
valid_client = self.validate_client_key(request.client_key)
if not valid_client:
request.client_key = self.dummy_client
# Callback is normally never required, except for requests for
# a Temporary Credential as described in `Section 2.1`_
# .._`Section 2.1`: http://tools.ietf.org/html/rfc5849#section-2.1
if require_callback:
valid_redirect = self.validate_redirect_uri(request.client_key,
request.callback_uri)
else:
valid_redirect = True
# The server SHOULD return a 401 (Unauthorized) status code when
# receiving a request with invalid or expired token.
# Note: This is postponed in order to avoid timing attacks, instead
# a dummy token is assigned and used to maintain near constant
# time request verification.
#
# Note that early exit would enable resource owner enumeration
if request.resource_owner_key:
if require_verifier:
valid_resource_owner = self.validate_request_token(
request.client_key, request.resource_owner_key)
if not valid_resource_owner:
request.resource_owner_key = self.dummy_request_token
else:
valid_resource_owner = self.validate_access_token(
request.client_key, request.resource_owner_key)
if not valid_resource_owner:
request.resource_owner_key = self.dummy_access_token
else:
valid_resource_owner = True
# Note that `realm`_ is only used in authorization headers and how
# it should be interepreted is not included in the OAuth spec.
# However they could be seen as a scope or realm to which the
# client has access and as such every client should be checked
# to ensure it is authorized access to that scope or realm.
# .. _`realm`: http://tools.ietf.org/html/rfc2617#section-1.2
#
# Note that early exit would enable client realm access enumeration.
#
# The require_realm indicates this is the first step in the OAuth
# workflow where a client requests access to a specific realm.
# This first step (obtaining request token) need not require a realm
# and can then be identified by checking the require_resource_owner
# flag and abscence of realm.
#
# Clients obtaining an access token will not supply a realm and it will
# not be checked. Instead the previously requested realm should be
# transferred from the request token to the access token.
#
# Access to protected resources will always validate the realm but note
# that the realm is now tied to the access token and not provided by
# the client.
if ((require_realm and not request.resource_owner_key) or
(not require_resource_owner and not request.realm)):
valid_realm = self.validate_requested_realm(request.client_key,
request.realm)
elif require_verifier:
valid_realm = True
else:
valid_realm = self.validate_realm(request.client_key,
request.resource_owner_key, uri=request.uri,
required_realm=required_realm)
# The server MUST verify (Section 3.2) the validity of the request,
# ensure that the resource owner has authorized the provisioning of
# token credentials to the client, and ensure that the temporary
# credentials have not expired or been used before. The server MUST
# also verify the verification code received from the client.
# .. _`Section 3.2`: http://tools.ietf.org/html/rfc5849#section-3.2
#
# Note that early exit would enable resource owner authorization
# verifier enumertion.
if request.verifier:
valid_verifier = self.validate_verifier(request.client_key,
request.resource_owner_key, request.verifier)
else:
valid_verifier = True
# Parameters to Client depend on signature method which may vary
# for each request. Note that HMAC-SHA1 and PLAINTEXT share parameters
request.params = filter(lambda x: x[0] not in ("oauth_signature", "realm"), params)
# ---- RSA Signature verification ----
if request.signature_method == SIGNATURE_RSA:
# The server verifies the signature per `[RFC3447] section 8.2.2`_
# .. _`[RFC3447] section 8.2.2`: http://tools.ietf.org/html/rfc3447#section-8.2.1
rsa_key = self.get_rsa_key(request.client_key)
valid_signature = signature.verify_rsa_sha1(request, rsa_key)
# ---- HMAC or Plaintext Signature verification ----
else:
# Servers receiving an authenticated request MUST validate it by:
# Recalculating the request signature independently as described in
# `Section 3.4`_ and comparing it to the value received from the
# client via the "oauth_signature" parameter.
# .. _`Section 3.4`: http://tools.ietf.org/html/rfc5849#section-3.4
client_secret = self.get_client_secret(request.client_key)
resource_owner_secret = None
if require_resource_owner:
if require_verifier:
resource_owner_secret = self.get_request_token_secret(
request.client_key, request.resource_owner_key)
else:
resource_owner_secret = self.get_access_token_secret(
request.client_key, request.resource_owner_key)
if request.signature_method == SIGNATURE_HMAC:
valid_signature = signature.verify_hmac_sha1(request,
client_secret, resource_owner_secret)
else:
valid_signature = signature.verify_plaintext(request,
client_secret, resource_owner_secret)
# We delay checking validity until the very end, using dummy values for
# calculations and fetching secrets/keys to ensure the flow of every
# request remains almost identical regardless of whether valid values
# have been supplied. This ensures near constant time execution and
# prevents malicious users from guessing sensitive information
v = all((valid_client, valid_resource_owner, valid_realm,
valid_redirect, valid_verifier, valid_signature))
if not v:
log.info("[Failure] OAuthLib request verification failed.")
log.info("Valid client:\t%s" % valid_client)
log.info("Valid token:\t%s\t(Required: %s" % (valid_resource_owner, require_resource_owner))
log.info("Valid realm:\t%s\t(Required: %s)" % (valid_realm, require_realm))
log.info("Valid callback:\t%s" % valid_redirect)
log.info("Valid verifier:\t%s\t(Required: %s)" % (valid_verifier, require_verifier))
log.info("Valid signature:\t%s" % valid_signature)
return v, request
| raqqun/tweetcommander | packages/oauthlib/oauth1/rfc5849/__init__.py | Python | gpl-3.0 | 49,724 |
"""Feat components."""
from component_objects import Component, Element
class FeatAddModal(Component):
"""Definition of feat add modal component."""
modal_div_id = 'addFeat'
name_id = 'featAddNameInput'
description_id = 'featAddDescriptionTextarea'
tracked_id = 'featAddTrackedCheckbox'
max_id = 'featAddMaxInput'
short_rest_id = 'featAddShortRestInput'
long_rest_id = 'featAddLongRestInput'
add_id = 'featAddAddButton'
modal_div = Element(id_=modal_div_id)
name = Element(id_=name_id)
description = Element(id_=description_id)
tracked = Element(id_=tracked_id)
max_ = Element(id_=max_id)
short_rest = Element(id_=short_rest_id)
long_rest = Element(id_=long_rest_id)
add = Element(id_=add_id)
class FeatEditModal(Component):
"""Definition of feat edit modal component."""
modal_div_id = 'viewWeapon'
name_id = 'featEditNameInput'
description_id = 'featEditDescriptionTextarea'
tracked_id = 'featEditTrackedCheckbox'
max_id = 'featEditMaxInput'
short_rest_id = 'featEditShortRestInput'
long_rest_id = 'featEditLongRestInput'
done_id = 'featEditDoneButton'
modal_div = Element(id_=modal_div_id)
name = Element(id_=name_id)
description = Element(id_=description_id)
tracked = Element(id_=tracked_id)
max_ = Element(id_=max_id)
short_rest = Element(id_=short_rest_id)
long_rest = Element(id_=long_rest_id)
done = Element(id_=done_id)
class FeatModalTabs(Component):
"""Definition of feat modal tabs component."""
preview_id = 'featModalPreview'
edit_id = 'featModalEdit'
preview = Element(id_=preview_id)
edit = Element(id_=edit_id)
class FeatsTable(Component):
"""Definition of feats edit modal componenet."""
add_id = 'featAddIcon'
table_id = 'featTable'
add = Element(id_=add_id)
table = Element(id_=table_id)
| adventurerscodex/uat | components/core/character/feats.py | Python | gpl-3.0 | 1,902 |
from datetime import datetime
import json
import traceback
from django.http import HttpResponse
from django.template import loader, Context
from django.views.decorators.csrf import csrf_exempt
from events.models import ScriptEvent, MessageEvent
from profiles.models import PhoneNumber, format_phone
from sms_messages.models import ScheduledScript, ScriptVariable
from services.models import AppApi
@csrf_exempt
def callback(request):
response = {}
response['success'] = False
response['error'] = { 'description': 'Not yet implemented' }
response['parameters'] = {}
if request.method == 'POST':
message = json.loads(request.POST['message'])
if message['action'] == 'fetch_clarification':
lang_code = ''
phone_number = message['parameters']['recipient']
phone_number = format_phone(phone_number)
phone_objs = PhoneNumber.objects.filter(value=phone_number, active=True).order_by('priority')
if phone_objs.count() > 0:
lang_code = phone_objs[0].profile.primary_language
template = None
try:
template = loader.get_template('clarification_' + lang_code + '.txt')
except:
template = loader.get_template('clarification.txt')
c = Context({ 'message': message })
response['parameters']['clarification'] = template.render(c)
response['success'] = True
elif message['action'] == 'fetch_unsolicited_response':
lang_code = ''
phone_number = message['parameters']['recipient']
phone_number = format_phone(phone_number)
phone_objs = PhoneNumber.objects.filter(value=phone_number, active=True).order_by('priority')
if phone_objs.count() > 0:
lang_code = phone_objs[0].profile.primary_language
template = None
try:
template = loader.get_template('unsolicited_response_' + lang_code + '.txt')
except:
template = loader.get_template('unsolicited_response.txt')
c = Context({ 'message': message })
response['parameters']['response'] = template.render(c)
response['success'] = True
elif message['action'] == 'set_value':
script = ScheduledScript.objects.get(session=message['parameters']['session'])
event = ScriptEvent(script=script, event='script_session_variable_set',
value=json.dumps(message['parameters']))
event.save()
variable = ScriptVariable(script=script, key=message['parameters']['key'],
value=message['parameters']['value'])
variable.save()
response['success'] = True
response['error']['description'] = ''
elif message['action'] == 'log_session_started':
script = ScheduledScript.objects.get(session=message['parameters']['session'])
script.confirmed_date = datetime.now()
script.save()
event = ScriptEvent(script=script, event='script_session_started')
event.save()
response['success'] = True
response['error']['description'] = ''
elif message['action'] == 'log_receive':
event = MessageEvent(type='receive', sender=message['parameters']['sender'],
message=message['parameters']['message'])
event.save()
response['success'] = True
response['error']['description'] = ''
for app in AppApi.objects.all():
try:
api = __import__(app.app_name + '.api', globals(), locals(), ['on_receive'], -1)
api.on_receive(message['parameters']['sender'], message['parameters']['message'])
except:
traceback.print_exc()
elif message['action'] == 'log_send':
event = MessageEvent(type='send', recipient=message['parameters']['recipient'],
message=message['parameters']['message'])
event.save()
response['success'] = True
response['error']['description'] = ''
else:
request.META['wsgi.errors'].write('TODO: HANDLE ' + message['action'])
response['success'] = True
return HttpResponse(json.dumps(response), mimetype='application/json') | audaciouscode/SMSBot | smsbot_django/services/views.py | Python | gpl-3.0 | 4,783 |
# -*- coding: utf-8 -*-
from ui import ui_elements
from ui import ui_handler
class my_text(ui_elements.Text):
def start(self, args):
self.set_text(args["text"])
self.set_pos((100, 100))
self.set_size(60)
self.set_color((0, 0, 255))
def mouse_enter(self):
self.set_color((255, 0, 0))
def mouse_leave(self):
self.set_color((0, 0, 255))
#pos = self.get_pos()
#new_pos = (pos[0] + 1, pos[1] + 1)
#self.set_pos(new_pos)
def mouse_down(self, buttons):
self.set_color((0, 255, 0))
def mouse_up(self, buttons):
self.set_color((0, 0, 255))
def init(screen):
MyText = my_text(text="Just a Test UI element")
my_handler = ui_handler.Handler(screen)
my_handler.register(MyText)
return my_handler | c-michi/pygame_ui | test_ui.py | Python | gpl-3.0 | 722 |
"""
Copyright (c) 2012-2014 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.db import models
class NetatalkShare(models.Model):
YES = 'yes'
NO = 'no'
"""share that is exported"""
share = models.OneToOneField('Share', related_name='netatalkshare')
"""mount point of the share"""
path = models.CharField(max_length=4096, unique=True)
description = models.CharField(max_length=1024, default='afp on rockstor')
BOOLEAN_CHOICES = (
(YES, 'yes'),
(NO, 'no'),
)
time_machine = models.CharField(max_length=3, choices=BOOLEAN_CHOICES,
default=YES)
def share_name(self, *args, **kwargs):
return self.share.name
def share_id(self, *args, **kwargs):
return self.share.id
@property
def vol_size(self):
return self.share.size
class Meta:
app_label = 'storageadmin'
| schakrava/rockstor-core | src/rockstor/storageadmin/models/netatalk_share.py | Python | gpl-3.0 | 1,546 |
Subsets and Splits