repo_name
stringlengths
5
92
path
stringlengths
4
221
copies
stringclasses
19 values
size
stringlengths
4
6
content
stringlengths
766
896k
license
stringclasses
15 values
hash
int64
-9,223,277,421,539,062,000
9,223,102,107B
line_mean
float64
6.51
99.9
line_max
int64
32
997
alpha_frac
float64
0.25
0.96
autogenerated
bool
1 class
ratio
float64
1.5
13.6
config_test
bool
2 classes
has_no_keywords
bool
2 classes
few_assignments
bool
1 class
fast90/christian
modules/hq.py
1
1507
from datetime import datetime class HQ(object): def __init__(self): self.people_in_hq = 0 self.keys_in_hq = 0 self.joined_users = [] self.hq_status = 'unknown' self.status_since = datetime.now().strftime('%Y-%m-%d %H:%M') self.is_clean = True self.joined_keys = [] def update_time(self): self.status_since = datetime.now().strftime('%Y-%m-%d %H:%M') def hq_open(self): self.hq_status = 'open' self.update_time() def hq_close(self): self.hq_status = 'closed' self.update_time() self.people_in_hq = 0 del(self.joined_users[:]) del(self.joined_keys[:]) def hq_private(self): self.hq_status = 'private' self.update_time() def hq_clean(self): self.is_clean = True def hq_dirty(self): self.is_clean = False def hq_join(self,user): self.people_in_hq +=1 self.joined_users.append(user) def hq_leave(self,user): self.people_in_hq -=1 self.joined_users.remove(user) def hq_keyjoin(self,user): self.keys_in_hq +=1 self.joined_keys.append(user) def hq_keyleave(self,user): self.keys_in_hq -=1 self.joined_keys.remove(user) def get_hq_status(self): return ('HQ is {} since {}. {} Members are here' .format(self.hq_status, self.status_since, self.people_in_hq)) def get_hq_clean(self): return self.is_clean
gpl-3.0
6,626,186,680,984,100,000
24.542373
78
0.558062
false
3.24086
false
false
false
micbou/YouCompleteMe
python/ycm/client/completer_available_request.py
2
1716
# Copyright (C) 2013 Google Inc. # # This file is part of YouCompleteMe. # # YouCompleteMe is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # YouCompleteMe is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import # Not installing aliases from python-future; it's unreliable and slow. from builtins import * # noqa from ycm.client.base_request import BaseRequest, BuildRequestData class CompleterAvailableRequest( BaseRequest ): def __init__( self, filetypes ): super( CompleterAvailableRequest, self ).__init__() self.filetypes = filetypes self._response = None def Start( self ): request_data = BuildRequestData() request_data.update( { 'filetypes': self.filetypes } ) self._response = self.PostDataToHandler( request_data, 'semantic_completion_available' ) def Response( self ): return self._response def SendCompleterAvailableRequest( filetypes ): request = CompleterAvailableRequest( filetypes ) # This is a blocking call. request.Start() return request.Response()
gpl-3.0
-4,551,042,928,777,884,700
33.32
78
0.726107
false
4.115108
false
false
false
vivisect/synapse
synapse/cryotank.py
1
49542
import os import types import shutil import struct import logging import threading import contextlib from functools import partial, wraps from collections import defaultdict import lmdb # type: ignore import synapse.lib.cell as s_cell import synapse.lib.lmdb as s_lmdb import synapse.lib.queue as s_queue import synapse.lib.config as s_config import synapse.lib.msgpack as s_msgpack import synapse.lib.threads as s_threads import synapse.lib.datapath as s_datapath import synapse.exc as s_exc import synapse.glob as s_glob import synapse.common as s_common import synapse.eventbus as s_eventbus import synapse.datamodel as s_datamodel logger = logging.getLogger(__name__) class CryoTank(s_config.Config): ''' A CryoTank implements a stream of structured data. ''' def __init__(self, dirn, conf=None): s_config.Config.__init__(self, conf) self.path = s_common.gendir(dirn) path = s_common.gendir(self.path, 'cryo.lmdb') mapsize = self.getConfOpt('mapsize') self.lmdb = lmdb.open(path, writemap=True, max_dbs=128) self.lmdb.set_mapsize(mapsize) self.lmdb_items = self.lmdb.open_db(b'items') self.lmdb_metrics = self.lmdb.open_db(b'metrics') noindex = self.getConfOpt('noindex') self.indexer = None if noindex else CryoTankIndexer(self) with self.lmdb.begin() as xact: self.items_indx = xact.stat(self.lmdb_items)['entries'] self.metrics_indx = xact.stat(self.lmdb_metrics)['entries'] def fini(): self.lmdb.sync() self.lmdb.close() self.onfini(fini) @staticmethod @s_config.confdef(name='cryotank') def _cryotank_confdefs(): defs = ( ('mapsize', {'type': 'int', 'doc': 'LMDB mapsize value', 'defval': s_lmdb.DEFAULT_MAP_SIZE}), ('noindex', {'type': 'bool', 'doc': 'Disable indexing', 'defval': 0}), ) return defs def last(self): ''' Return the last item stored in this CryoTank. ''' with self.lmdb.begin() as xact: with xact.cursor(db=self.lmdb_items) as curs: if not curs.last(): return None indx = struct.unpack('>Q', curs.key())[0] return indx, s_msgpack.un(curs.value()) def puts(self, items): ''' Add the structured data from items to the CryoTank. Args: items (list): A list of objects to store in the CryoTank. Returns: int: The index that the item storage began at. ''' itembyts = [s_msgpack.en(i) for i in items] tick = s_common.now() bytesize = sum([len(b) for b in itembyts]) with self.lmdb.begin(db=self.lmdb_items, write=True) as xact: retn = self.items_indx todo = [] for byts in itembyts: todo.append((struct.pack('>Q', self.items_indx), byts)) self.items_indx += 1 with xact.cursor() as curs: curs.putmulti(todo, append=True) took = s_common.now() - tick with xact.cursor(db=self.lmdb_metrics) as curs: lkey = struct.pack('>Q', self.metrics_indx) self.metrics_indx += 1 info = {'time': tick, 'count': len(items), 'size': bytesize, 'took': took} curs.put(lkey, s_msgpack.en(info), append=True) self.fire('cryotank:puts', numrecords=len(itembyts)) return retn def metrics(self, offs, size=None): ''' Yield metrics rows starting at offset. Args: offs (int): The index offset. size (int): The maximum number of records to yield. Yields: ((int, dict)): An index offset, info tuple for metrics. ''' mink = struct.pack('>Q', offs) with self.lmdb.begin() as xact: with xact.cursor(db=self.lmdb_metrics) as curs: if not curs.set_range(mink): return for i, (lkey, lval) in enumerate(curs): if size is not None and i >= size: return indx = struct.unpack('>Q', lkey)[0] item = s_msgpack.un(lval) yield indx, item def slice(self, offs, size): ''' Yield a number of items from the CryoTank starting at a given offset. Args: offs (int): The index of the desired datum (starts at 0) size (int): The max number of items to yield. Notes: This API performs msgpack unpacking on the bytes, and could be slow to call remotely. Yields: ((index, object)): Index and item values. ''' lmin = struct.pack('>Q', offs) with self.lmdb.begin() as xact: with xact.cursor(db=self.lmdb_items) as curs: if not curs.set_range(lmin): return for i, (lkey, lval) in enumerate(curs): if i >= size: return indx = struct.unpack('>Q', lkey)[0] yield indx, s_msgpack.un(lval) def rows(self, offs, size): ''' Yield a number of raw items from the CryoTank starting at a given offset. Args: offs (int): The index of the desired datum (starts at 0) size (int): The max number of items to yield. Yields: ((indx, bytes)): Index and msgpacked bytes. ''' lmin = struct.pack('>Q', offs) imax = offs + size # time slice the items from the cryo tank with self.lmdb.begin() as xact: with xact.cursor(db=self.lmdb_items) as curs: if not curs.set_range(lmin): return for lkey, lval in curs: indx = struct.unpack('>Q', lkey)[0] if indx >= imax: break yield indx, lval def info(self): ''' Returns information about the CryoTank instance. Returns: dict: A dict containing items and metrics indexes. ''' return {'indx': self.items_indx, 'metrics': self.metrics_indx, 'stat': self.lmdb.stat()} class CryoCell(s_cell.Cell): def postCell(self): ''' CryoCell initialization routines. ''' self.names = self.getCellDict('cryo:names') self.confs = self.getCellDict('cryo:confs') self.tanks = s_eventbus.BusRef() for name, iden in self.names.items(): logger.info('Bringing tank [%s][%s] online', name, iden) path = self.getCellPath('tanks', iden) conf = self.confs.get(name) tank = CryoTank(path, conf) self.tanks.put(name, tank) def initConfDefs(self): super().initConfDefs() self.addConfDefs(( ('defvals', {'defval': {}, 'ex': '{"mapsize": 1000000000}', 'doc': 'Default settings for cryotanks created by the cell.', 'asloc': 'tank_defaults'}), )) def finiCell(self): ''' Fini handlers for the CryoCell ''' self.tanks.fini() def handlers(self): ''' CryoCell message handlers. ''' cryo_handlers = { 'cryo:init': self._onCryoInit, 'cryo:list': self._onCryoList, 'cryo:puts': self._onCryoPuts, 'cryo:dele': self._onCryoDele, 'cryo:last': partial(self._onGeneric, CryoTank.last), 'cryo:rows': partial(self._onGeneric, CryoTank.rows), 'cryo:slice': partial(self._onGeneric, CryoTank.slice), 'cryo:metrics': partial(self._onGeneric, CryoTank.metrics), } indexer_calls = { 'cryo:indx:add': CryoTankIndexer.addIndex, 'cryo:indx:del': CryoTankIndexer.delIndex, 'cryo:indx:pause': CryoTankIndexer.pauseIndex, 'cryo:indx:resume': CryoTankIndexer.resumeIndex, 'cryo:indx:stat': CryoTankIndexer.getIndices, 'cryo:indx:querynormvalu': CryoTankIndexer.queryNormValu, 'cryo:indx:querynormrecords': CryoTankIndexer.queryNormRecords, 'cryo:indx:queryrows': CryoTankIndexer.queryRows } cryo_handlers.update({k: partial(self._onCryoIndex, v) for k, v in indexer_calls.items()}) return cryo_handlers def _standard_return(self, chan, subfunc, *args, **kwargs): ''' Calls a function and returns the return value or exception back through the channel ''' try: rv = subfunc(*args, **kwargs) except Exception as e: retn = s_common.getexcfo(e) return chan.tx((False, retn)) if isinstance(rv, types.GeneratorType): chan.setq() chan.tx((True, True)) genr = s_common.chunks(rv, 1000) chan.txwind(genr, 100, timeout=30) return return chan.tx((True, rv)) @s_glob.inpool def _onGeneric(self, method, chan, mesg): ''' Generic handler that looks up tank in name field and passes it to method of cryotank ''' cmdstr, kwargs = mesg name = kwargs.pop('name') tank = self.tanks.get(name) with chan: if tank is None: return chan.tx((False, ('NoSuchName', {'name': name}))) return self._standard_return(chan, method, tank, **kwargs) @s_glob.inpool def _onCryoIndex(self, subfunc, chan, mesg): cmdstr, kwargs = mesg name = kwargs.pop('name') tank = self.tanks.get(name) with chan: if tank is None: return chan.tx((False, ('NoSuchName', {'name': name}))) indexer = tank.indexer if indexer is None: return chan.tx((False, ('IndexingDisabled', {'name': name}))) return self._standard_return(chan, subfunc, indexer, **kwargs) def genCryoTank(self, name, conf=None): ''' Generate a new CryoTank with a given name or get an reference to an existing CryoTank. Args: name (str): Name of the CryoTank. Returns: CryoTank: A CryoTank instance. ''' tank = self.tanks.get(name) if tank is not None: return tank iden = s_common.guid() logger.info('Creating new tank: %s', name) path = self.getCellPath('tanks', iden) mergeconf = self.tank_defaults.copy() if conf is not None: mergeconf.update(conf) tank = CryoTank(path, mergeconf) self.names.set(name, iden) self.confs.set(name, conf) self.tanks.put(name, tank) return tank def getCryoList(self): ''' Get a list of (name, info) tuples for the CryoTanks. Returns: list: A list of tufos. ''' return [(name, tank.info()) for (name, tank) in self.tanks.items()] def _onCryoList(self, chan, mesg): chan.txfini((True, self.getCryoList())) @s_glob.inpool def _onCryoDele(self, chan, mesg): name = mesg[1].get('name') logger.info('Deleting tank: %s' % (name,)) with chan: tank = self.tanks.pop(name) # type: CryoTank if tank is None: return chan.tx((True, False)) self.names.pop(name) tank.fini() shutil.rmtree(tank.path, ignore_errors=True) return chan.tx((True, True)) @s_glob.inpool def _onCryoPuts(self, chan, mesg): name = mesg[1].get('name') chan.setq() chan.tx(True) with chan: size = 0 tank = self.genCryoTank(name) for items in chan.rxwind(timeout=30): tank.puts(items) size += len(items) chan.txok(size) @s_glob.inpool def _onCryoInit(self, chan, mesg): with chan: tank = self.tanks.get(mesg[1].get('name')) if tank: return chan.tx((True, False)) return self._standard_return(chan, lambda **kwargs: bool(self.genCryoTank(**kwargs)), **mesg[1]) class CryoClient: ''' Client-side helper for interacting with a CryoCell which hosts CryoTanks. Args: auth ((str, dict)): A user auth tufo addr ((str, int)): The address / port tuple. timeout (int): Connect timeout ''' _chunksize = 10000 def _remotecall(self, name, cmd_str, timeout=None, **kwargs): ''' Handles all non-generator remote calls ''' kwargs['name'] = name ok, retn = self.sess.call((cmd_str, kwargs), timeout=timeout) return s_common.reqok(ok, retn) def _genremotecall(self, name, cmd_str, timeout=None, **kwargs): ''' Handles all generator function remote calls ''' kwargs['name'] = name with self.sess.task((cmd_str, kwargs), timeout=timeout) as chan: ok, retn = chan.next(timeout=timeout) s_common.reqok(ok, retn) for bloc in chan.rxwind(timeout=timeout): for item in bloc: yield item def __init__(self, sess): self.sess = sess def puts(self, name, items, timeout=None): ''' Add data to the named remote CryoTank by consuming from items. Args: name (str): The name of the remote CryoTank. items (iter): An iterable of data items to load. timeout (float/int): The maximum timeout for an ack. Returns: None ''' with self.sess.task(('cryo:puts', {'name': name})) as chan: if not chan.next(timeout=timeout): return False genr = s_common.chunks(items, self._chunksize) chan.txwind(genr, 100, timeout=timeout) return chan.next(timeout=timeout) def last(self, name, timeout=None): ''' Return the last entry in the named CryoTank. Args: name (str): The name of the remote CryoTank. timeout (int): Request timeout Returns: ((int, object)): The last entry index and object from the CryoTank. ''' return self._remotecall(name, cmd_str='cryo:last', timeout=timeout) def delete(self, name, timeout=None): ''' Delete a named CryoTank. Args: name (str): The name of the remote CryoTank. timeout (int): Request timeout Returns: bool: True if the CryoTank was deleted, False if it was not deleted. ''' return self._remotecall(name, cmd_str='cryo:dele', timeout=timeout) def list(self, timeout=None): ''' Get a list of the remote CryoTanks. Args: timeout (int): Request timeout Returns: tuple: A tuple containing name, info tufos for the remote CryoTanks. ''' ok, retn = self.sess.call(('cryo:list', {}), timeout=timeout) return s_common.reqok(ok, retn) def slice(self, name, offs, size, timeout=None): ''' Slice and return a section from the named CryoTank. Args: name (str): The name of the remote CryoTank. offs (int): The offset to begin the slice. size (int): The number of records to slice. timeout (int): Request timeout Yields: (int, obj): (indx, item) tuples for the sliced range. ''' return self._genremotecall(name, offs=offs, size=size, cmd_str='cryo:slice', timeout=timeout) def rows(self, name, offs, size, timeout=None): ''' Retrieve raw rows from a section of the named CryoTank. Args: name (str): The name of the remote CryoTank. offs (int): The offset to begin the row retrieval from. size (int): The number of records to retrieve. timeout (int): Request timeout. Notes: This returns msgpack encoded records. It is the callers responsibility to decode them. Yields: (int, bytes): (indx, bytes) tuples for the rows in range. ''' return self._genremotecall(name, offs=offs, size=size, cmd_str='cryo:rows', timeout=timeout) def metrics(self, name, offs, size=None, timeout=None): ''' Carve a slice of metrics data from the named CryoTank. Args: name (str): The name of the remote CryoTank. offs (int): The index offset. timeout (int): Request timeout Returns: tuple: A tuple containing metrics tufos for the named CryoTank. ''' return self._genremotecall(name, offs=offs, size=size, cmd_str='cryo:metrics', timeout=timeout) def init(self, name, conf=None, timeout=None): ''' Create a new named Cryotank. Args: name (str): Name of the Cryotank to make. conf (dict): Additional configable options for the Cryotank. timeout (int): Request timeout Returns: True if the tank was created, False if the tank existed or there was an error during CryoTank creation. ''' return self._remotecall(name, conf=conf, cmd_str='cryo:init', timeout=timeout) def addIndex(self, name, prop, syntype, datapaths, timeout=None): ''' Add an index to the cryotank Args: name (str): name of the Cryotank. prop (str): the name of the property this will be stored as in the normalized record syntype (str): the synapse type this will be interpreted as datapaths(Iterable[str]): datapath specs against which the raw record is run to extract a single field that is passed to the type normalizer. These will be tried in order until one succeeds. At least one must be present. timeout (Optional[float]): the maximum timeout for an ack Returns: None Note: Additional datapaths will only be tried if prior datapaths are not present, and *not* if the normalization fails. ''' if not len(datapaths): raise s_exc.BadOperArg(mesg='datapaths must have at least one entry') return self._remotecall(name, prop=prop, syntype=syntype, datapaths=datapaths, cmd_str='cryo:indx:add', timeout=timeout) def delIndex(self, name, prop, timeout=None): ''' Delete an index Args: name (str): name of the Cryotank prop (str): the (normalized) property name timeout (Optional[float]): the maximum timeout for an ack Returns: None ''' return self._remotecall(name, prop=prop, cmd_str='cryo:indx:del', timeout=timeout) def pauseIndex(self, name, prop=None, timeout=None): ''' Temporarily stop indexing one or all indices Args: name (str): name of the Cryotank prop: (Optional[str]): the index to stop indexing, or if None, indicate to stop all indices timeout (Optional[float]): the maximum timeout for an ack Returns: None Note: Pausing is not persistent. Restarting the process will resume indexing. ''' return self._remotecall(name, prop=prop, cmd_str='cryo:indx:pause', timeout=timeout) def resumeIndex(self, name, prop=None, timeout=None): ''' Undo a pauseIndex Args: name (str): name of the Cryotank prop (Optional[str]): the index to start indexing, or if None, indicate to resume all indices timeout (Optional[float]): the maximum timeout for an ack Returns: None ''' return self._remotecall(name, prop=prop, cmd_str='cryo:indx:resume', timeout=timeout) def getIndices(self, name, timeout=None): ''' Get information about all the indices Args: name (str): name of the Cryotank timeout (Optional[float]): the maximum timeout for an ack Returns: List[Dict[str: Any]]: all the indices with progress and statistics ''' return self._remotecall(name, cmd_str='cryo:indx:stat', timeout=timeout) def queryNormValu(self, name, prop, valu=None, exact=False, timeout=None): ''' Query for normalized individual property values Args: name (str): name of the Cryotank prop (str): The name of the indexed property valu (Optional[Union[int, str]]): The normalized value. If not present, all records with prop present, sorted by prop will be returned. It will be considered a prefix if exact is False. exact (bool): Indicates that the result must match exactly. Conversely, if False, indicates a prefix match. timeout (Optional[float]): the maximum timeout for an ack Returns: Iterable[Tuple[int, Union[str, int]]]: A generator of offset, normalized value tuples. ''' return self._genremotecall(name, prop=prop, valu=valu, exact=exact, cmd_str='cryo:indx:querynormvalu', timeout=timeout) def queryNormRecords(self, name, prop, valu=None, exact=False, timeout=None): ''' Query for normalized property values grouped together in dicts Args: name (str): name of the Cryotank prop (str): The name of the indexed property valu (Optional[Union[int, str]]): The normalized value. If not present, all records with prop present, sorted by prop will be returned. It will be considered a prefix if exact is False. exact (bool): Indicates that the result must match exactly. Conversely, if False, indicates a prefix match. timeout (Optional[float]): the maximum timeout for an ack Returns: Iterable[Tuple[int, Dict[str, Union[str, int]]]]: A generator of offset, dictionary tuples ''' return self._genremotecall(name, prop=prop, valu=valu, exact=exact, cmd_str='cryo:indx:querynormrecords', timeout=timeout) def queryRows(self, name, prop, valu=None, exact=False, timeout=None): ''' Query for raw (i.e. from the cryotank itself) records Args: name (str): name of the Cryotank prop (str): The name of the indexed property valu (Optional[Union[int, str]]): The normalized value. If not present, all records with prop present, sorted by prop will be returned. It will be considered a prefix if exact is False. exact (bool): Indicates that the result must match exactly. Conversely, if False, indicates a prefix match. timeout (Optional[float]): The maximum timeout for an ack Returns: Iterable[Tuple[int, bytes]]: A generator of tuple (offset, messagepack encoded) raw records ''' return self._genremotecall(name, prop=prop, valu=valu, exact=exact, cmd_str='cryo:indx:queryrows', timeout=timeout) # TODO: what to do with subprops returned from getTypeNorm class _MetaEntry: ''' Describes a single CryoTank index in the system. ''' def __init__(self, propname: str, syntype: str, datapaths) -> None: ''' Makes a MetaEntry Args: propname: The name of the key in the normalized dictionary syntype: The synapse type name against which the data will be normalized datapath (Iterable[str]) One or more datapath strings that will be used to find the field in a raw record ''' self.propname = propname self.syntype = syntype self.datapaths = tuple(s_datapath.DataPath(d) for d in datapaths) def en(self): ''' Encodes a MetaEntry for storage ''' return s_msgpack.en(self.asdict()) def asdict(self): ''' Returns a MetaEntry as a dictionary ''' return {'propname': self.propname, 'syntype': self.syntype, 'datapaths': tuple(d.path for d in self.datapaths)} # Big-endian 64-bit integer encoder _Int64be = struct.Struct('>Q') class _IndexMeta: ''' Manages persistence of CryoTank index metadata with an in-memory copy "Schema": b'indices' key has msgpack encoded dict of { 'present': [8238483: {'propname': 'foo:bar', 'syntype': type, 'datapaths': (datapath, datapath2)}, ...], 'deleting': [8238483, ...] } b'progress' key has mesgpack encoded dict of { 8328483: {nextoffset, ngood, nnormfail}, ... _present_ contains the encoding information about the current indices _deleting_ contains the indices currently being deleted (but aren't done) _progress_ contains how far each index has gotten, how many successful props were indexed (which might be different because of missing properties), and how many normalizations failed. It is separate because it gets updated a lot more. ''' def __init__(self, dbenv: lmdb.Environment) -> None: ''' Creates metadata for all the indices. Args: dbenv (lmdb.Environment): the lmdb instance in which to store the metadata. Returns: None ''' self._dbenv = dbenv # The table in the database file (N.B. in LMDB speak, this is called a database) self._metatbl = dbenv.open_db(b'meta') is_new_db = False with dbenv.begin(db=self._metatbl, buffers=True) as txn: indices_enc = txn.get(b'indices') progress_enc = txn.get(b'progress') if indices_enc is None or progress_enc is None: if indices_enc is None and progress_enc is None: is_new_db = True indices_enc = s_msgpack.en({'present': {}, 'deleting': []}) progress_enc = s_msgpack.en({}) else: raise s_exc.CorruptDatabase('missing meta information in index meta') # pragma: no cover indices = s_msgpack.un(indices_enc) # The details about what the indices are actually indexing: the datapath and type. self.indices = {k: _MetaEntry(**s_msgpack.un(v)) for k, v in indices.get('present', {}).items()} self.deleting = list(indices.get('deleting', ())) # Keeps track (non-persistently) of which indices have been paused self.asleep = defaultdict(bool) # type: ignore # How far each index has progressed as well as statistics self.progresses = s_msgpack.un(progress_enc) if not all(p in self.indices for p in self.deleting): raise s_exc.CorruptDatabase( 'index meta table: deleting entry with unrecognized property name') # pragma: no cover if not all(p in self.indices for p in self.progresses): raise s_exc.CorruptDatabase( 'index meta table: progress entry with unrecognized property name') # pragma: no cover if is_new_db: self.persist() def persist(self, progressonly=False, txn=None): ''' Persists the index info to the database Args: progressonly (bool): if True, only persists the progress (i.e. more dynamic) information txn (Optional[lmdb.Transaction]): if not None, will use that transaction to record data. txn is not committed. Returns: None ''' d = {'delete': self.deleting, 'present': {k: metaentry.en() for k, metaentry in self.indices.items()}} with contextlib.ExitStack() as stack: if txn is None: txn = stack.enter_context(self._dbenv.begin(db=self._metatbl, buffers=True, write=True)) if not progressonly: txn.put(b'indices', s_msgpack.en(d), db=self._metatbl) txn.put(b'progress', s_msgpack.en(self.progresses), db=self._metatbl) def lowestProgress(self): ''' Returns: int: The next offset that should be indexed, based on active indices. ''' nextoffsets = [p['nextoffset'] for iid, p in self.progresses.items() if not self.asleep[iid]] return min(nextoffsets) if nextoffsets else s_lmdb.MAX_INT_VAL def iidFromProp(self, prop): ''' Retrieve the random index ID from the property name Args: prop (str) The name of the indexed property Returns: int: the index id for the propname, None if not found ''' return next((k for k, idx in self.indices.items() if idx.propname == prop), None) def addIndex(self, prop, syntype, datapaths): ''' Add an index to the cryotank Args: prop (str): the name of the property this will be stored as in the normalized record syntype (str): the synapse type this will be interpreted as datapaths (Iterable[str]): datapaths that will be tried in order. Returns: None Note: Additional datapaths will only be tried if prior datapaths are not present, and *not* if the normalization fails. ''' if self.iidFromProp(prop) is not None: raise s_exc.DupIndx(mesg='Index already exists', index=prop) if not len(datapaths): raise s_exc.BadOperArg(mesg='datapaths must have at least one entry') s_datamodel.tlib.reqDataType(syntype) iid = int.from_bytes(os.urandom(8), 'little') self.indices[iid] = _MetaEntry(propname=prop, syntype=syntype, datapaths=datapaths) self.progresses[iid] = {'nextoffset': 0, 'ngood': 0, 'nnormfail': 0} self.persist() def delIndex(self, prop): ''' Delete an index Args: prop (str): the (normalized) property name Returns: None ''' iid = self.iidFromProp(prop) if iid is None: raise s_exc.NoSuchIndx(mesg='No such index', index=prop) del self.indices[iid] self.deleting.append(iid) # remove the progress entry in case a new index with the same propname gets added later del self.progresses[iid] self.persist() def pauseIndex(self, prop): ''' Temporarily stop indexing one or all indices Args: prop: (Optional[str]): the index to stop indexing, or if None, indicate to stop all indices Returns: None Note: Pausing is not persistent. Restarting the process will resume indexing. ''' for iid, idx in self.indices.items(): if prop is None or prop == idx.propname: self.asleep[iid] = True def resumeIndex(self, prop): ''' Undo a pauseIndex Args: prop (Optional[str]): the index to start indexing, or if None, indicate to resume all indices Returns: None ''' for iid, idx in self.indices.items(): if prop is None or prop == idx.propname: self.asleep[iid] = False def markDeleteComplete(self, iid): ''' Indicates that deletion of a single index is complete Args: iid (int): The index ID to mark as deleted ''' self.deleting.remove(iid) self.persist() _Int64le = struct.Struct('<Q') def _iid_en(iid): ''' Encode a little endian 64-bit integer ''' return _Int64le.pack(iid) def _iid_un(iid): ''' Decode a little endian 64-bit integer ''' return _Int64le.unpack(iid)[0] def _inWorker(callback): ''' Queue the the decorated function to the indexing worker to run in his thread Args: callback: the function to wrap Returns: the wrapped function (Just like inpool for the worker) ''' @wraps(callback) def wrap(self, *args, **kwargs): with s_threads.RetnWait() as retn: self._workq.put((retn, callback, (self, ) + args, kwargs)) succ, rv = retn.wait(timeout=self.MAX_WAIT_S) if succ: if isinstance(rv, Exception): raise rv return rv raise s_exc.TimeOut() return wrap class CryoTankIndexer: ''' Manages indexing of a single cryotank's records This implements a lazy indexer that indexes a cryotank in a separate thread. Cryotank entries are msgpack-encoded values. An index consists of a property name, one or more datapaths (i.e. what field out of the entry), and a synapse type. The type specifies the function that normalizes the output of the datapath query into a string or integer. Indices can be added and deleted asynchronously from the indexing thread via CryotankIndexer.addIndex and CryotankIndexer.delIndex. Indexes can be queried with queryNormValu, queryNormRecords, queryRows. To harmonize with LMDB requirements, writing only occurs on a singular indexing thread. Reading indices takes place in the caller's thread. Both reading and writing index metadata (that is, information about which indices are running) take place on the indexer's thread. Note: The indexer cannot detect when a type has changed from underneath itself. Operators must explicitly delete and re-add the index to avoid mixed normalized data. ''' MAX_WAIT_S = 10 def __init__(self, cryotank): ''' Create an indexer Args: cryotank: the cryotank to index Returns: None ''' self.cryotank = cryotank ebus = cryotank self._worker = threading.Thread(target=self._workerloop, name='CryoTankIndexer') path = s_common.gendir(cryotank.path, 'cryo_index.lmdb') cryotank_map_size = cryotank.lmdb.info()['map_size'] self._dbenv = lmdb.open(path, writemap=True, metasync=False, max_readers=8, max_dbs=4, map_size=cryotank_map_size) # iid, v -> offset table self._idxtbl = self._dbenv.open_db(b'indices', dupsort=True) # offset, iid -> normalized prop self._normtbl = self._dbenv.open_db(b'norms') self._to_delete = {} # type: Dict[str, int] self._workq = s_queue.Queue() # A dict of propname -> MetaEntry self._meta = _IndexMeta(self._dbenv) self._next_offset = self._meta.lowestProgress() self._chunk_sz = 1000 # < How many records to read at a time self._remove_chunk_sz = 1000 # < How many index entries to remove at a time ebus.on('cryotank:puts', self._onData) self._worker.start() def _onfini(): self._workq.done() self._worker.join(self.MAX_WAIT_S) self._dbenv.close() ebus.onfini(_onfini) def _onData(self, unused): ''' Wake up the index worker if he already doesn't have a reason to be awake ''' if 0 == len(self._workq): self._workq.put((None, lambda: None, None, None)) def _removeSome(self): ''' Make some progress on removing deleted indices ''' left = self._remove_chunk_sz for iid in self._meta.deleting: if not left: break iid_enc = _iid_en(iid) with self._dbenv.begin(db=self._idxtbl, buffers=True, write=True) as txn, txn.cursor() as curs: if curs.set_range(iid_enc): for k, offset_enc in curs.iternext(): if k[:len(iid_enc)] != iid_enc: break if not curs.delete(): raise s_exc.CorruptDatabase('delete failure') # pragma: no cover txn.delete(offset_enc, iid_enc, db=self._normtbl) left -= 1 if not left: break if not left: break self._meta.markDeleteComplete(iid) def _normalize_records(self, raw_records): ''' Yield stream of normalized fields Args: raw_records(Iterable[Tuple[int, Dict[int, str]]]) generator of tuples of offset/decoded raw cryotank record Returns: Iterable[Tuple[int, int, Union[str, int]]]: generator of tuples of offset, index ID, normalized property value ''' for offset, record in raw_records: self._next_offset = offset + 1 dp = s_datapath.initelem(s_msgpack.un(record)) for iid, idx in ((k, v) for k, v in self._meta.indices.items() if not self._meta.asleep[k]): if self._meta.progresses[iid]['nextoffset'] > offset: continue try: self._meta.progresses[iid]['nextoffset'] = offset + 1 for datapath in idx.datapaths: field = dp.valu(datapath) if field is None: continue # TODO : what to do with subprops? break else: # logger.debug('Datapaths %s yield nothing for offset %d', # [d.path for d in idx.datapaths], offset) continue normval, _ = s_datamodel.getTypeNorm(idx.syntype, field) except (s_exc.NoSuchType, s_exc.BadTypeValu): # logger.debug('Norm fail', exc_info=True) self._meta.progresses[iid]['nnormfail'] += 1 continue self._meta.progresses[iid]['ngood'] += 1 yield offset, iid, normval def _writeIndices(self, rows): ''' Persist actual indexing to disk Args: rows(Iterable[Tuple[int, int, Union[str, int]]]): generators of tuples of offset, index ID, normalized property value Returns: int: the next cryotank offset that should be indexed ''' count = -1 with self._dbenv.begin(db=self._idxtbl, buffers=True, write=True) as txn: for count, (offset, iid, normval) in enumerate(rows): offset_enc = _Int64be.pack(offset) iid_enc = _iid_en(iid) valkey_enc = s_lmdb.encodeValAsKey(normval) txn.put(iid_enc + valkey_enc, offset_enc) txn.put(offset_enc + iid_enc, s_msgpack.en(normval), db=self._normtbl) self._meta.persist(progressonly=True, txn=txn) return count + 1 def _workerloop(self): ''' Actually do the indexing Runs as separate thread. ''' stillworktodo = True last_callback = 'None' while True: # Run the outstanding commands recalc = False while True: try: job = self._workq.get(timeout=0 if stillworktodo else None) stillworktodo = True retn, callback, args, kwargs = job try: if retn is not None: last_callback = callback.__name__ retn.retn(callback(*args, **kwargs)) recalc = True except Exception as e: if retn is None: raise else: # Not using errx because I want the exception object itself retn.retn(e) except s_exc.IsFini: return except s_exc.TimeOut: break if recalc: # Recalculate the next offset to index, since we may have a new index self._next_offset = self._meta.lowestProgress() record_tuples = self.cryotank.rows(self._next_offset, self._chunk_sz) norm_gen = self._normalize_records(record_tuples) rowcount = self._writeIndices(norm_gen) self._removeSome() if not rowcount and not self._meta.deleting: if stillworktodo is True: self.cryotank.fire('cryotank:indexer:noworkleft:' + last_callback) last_callback = 'None' stillworktodo = False else: stillworktodo = True @_inWorker def addIndex(self, prop, syntype, datapaths): ''' Add an index to the cryotank Args: prop (str): the name of the property this will be stored as in the normalized record syntype (str): the synapse type this will be interpreted as datapaths(Iterable[str]): datapath specs against which the raw record is run to extract a single field that is passed to the type normalizer. These will be tried in order until one succeeds. At least one must be present. Returns: None Note: Additional datapaths will only be tried if prior datapaths are not present, and *not* if the normalization fails. ''' return self._meta.addIndex(prop, syntype, datapaths) @_inWorker def delIndex(self, prop): ''' Delete an index Args: prop (str): the (normalized) property name Returns: None ''' return self._meta.delIndex(prop) @_inWorker def pauseIndex(self, prop=None): ''' Temporarily stop indexing one or all indices. Args: prop: (Optional[str]): the index to stop indexing, or if None, indicate to stop all indices Returns: None Note: Pausing is not persistent. Restarting the process will resume indexing. ''' return self._meta.pauseIndex(prop) @_inWorker def resumeIndex(self, prop=None): ''' Undo a pauseIndex Args: prop: (Optional[str]): the index to start indexing, or if None, indicate to resume all indices Returns: None ''' return self._meta.resumeIndex(prop) @_inWorker def getIndices(self): ''' Get information about all the indices Args: None Returns: List[Dict[str: Any]]: all the indices with progress and statistics ''' idxs = {iid: dict(metaentry.asdict()) for iid, metaentry in self._meta.indices.items()} for iid in idxs: idxs[iid].update(self._meta.progresses.get(iid, {})) return list(idxs.values()) def _iterrows(self, prop, valu, exact=False): ''' Query against an index. Args: prop (str): The name of the indexed property valu (Optional[Union[int, str]]): The normalized value. If not present, all records with prop present, sorted by prop will be returned. It will be considered prefix if exact is False. exact (bool): Indicates that the result must match exactly. Conversly, if False, indicates a prefix match. Returns: Iterable[Tuple[int, bytes, bytes, lmdb.Transaction]: a generator of a Tuple of the offset, the encoded offset, the encoded index ID, and the LMDB read transaction. Note: Ordering of Tuples disregard everything after the first 128 bytes of a property. ''' iid = self._meta.iidFromProp(prop) if iid is None: raise s_exc.NoSuchIndx(mesg='No such index', index=prop) iidenc = _iid_en(iid) islarge = valu is not None and isinstance(valu, str) and len(valu) >= s_lmdb.LARGE_STRING_SIZE if islarge and not exact: raise s_exc.BadOperArg(mesg='prefix search valu cannot exceed 128 characters') if islarge and exact: key = iidenc + s_lmdb.encodeValAsKey(valu) elif valu is None: key = iidenc else: key = iidenc + s_lmdb.encodeValAsKey(valu, isprefix=not exact) with self._dbenv.begin(db=self._idxtbl, buffers=True) as txn, txn.cursor() as curs: if exact: rv = curs.set_key(key) else: rv = curs.set_range(key) if not rv: return while True: rv = [] curkey, offset_enc = curs.item() if (not exact and not curkey[:len(key)] == key) or (exact and curkey != key): return offset = _Int64be.unpack(offset_enc)[0] yield (offset, offset_enc, iidenc, txn) if not curs.next(): return def queryNormValu(self, prop, valu=None, exact=False): ''' Query for normalized individual property values Args: prop (str): The name of the indexed property valu (Optional[Union[int, str]]): The normalized value. If not present, all records with prop present, sorted by prop will be returned. It will be considered a prefix if exact is False. exact (bool): Indicates that the result must match exactly. Conversely, if False, indicates a prefix match. Returns: Iterable[Tuple[int, Union[str, int]]]: A generator of offset, normalized value tuples. ''' if not exact and valu is not None and isinstance(valu, str) and len(valu) >= s_lmdb.LARGE_STRING_SIZE: raise s_exc.BadOperArg(mesg='prefix search valu cannot exceed 128 characters') for (offset, offset_enc, iidenc, txn) in self._iterrows(prop, valu, exact): rv = txn.get(bytes(offset_enc) + iidenc, None, db=self._normtbl) if rv is None: raise s_exc.CorruptDatabase('Missing normalized record') # pragma: no cover yield offset, s_msgpack.un(rv) def queryNormRecords(self, prop, valu=None, exact=False): ''' Query for normalized property values grouped together in dicts Args: prop (str): The name of the indexed property valu (Optional[Union[int, str]]): The normalized value. If not present, all records with prop present, sorted by prop will be returned. It will be considered a prefix if exact is False. exact (bool): Indicates that the result must match exactly. Conversely, if False, indicates a prefix match. Returns: Iterable[Tuple[int, Dict[str, Union[str, int]]]]: A generator of offset, dictionary tuples ''' if not exact and valu is not None and isinstance(valu, str) and len(valu) >= s_lmdb.LARGE_STRING_SIZE: raise s_exc.BadOperArg(mesg='prefix search valu cannot exceed 128 characters') for offset, offset_enc, _, txn in self._iterrows(prop, valu, exact): norm = {} olen = len(offset_enc) with txn.cursor(db=self._normtbl) as curs: if not curs.set_range(offset_enc): raise s_exc.CorruptDatabase('Missing normalized record') # pragma: no cover while True: curkey, norm_enc = curs.item() if curkey[:olen] != offset_enc: break iid = _iid_un(curkey[olen:]) # this is racy with the worker, but it is still safe idx = self._meta.indices.get(iid) if idx is not None: norm[idx.propname] = s_msgpack.un(norm_enc) if not curs.next(): break yield offset, norm def queryRows(self, prop, valu=None, exact=False): ''' Query for raw (i.e. from the cryotank itself) records Args: prop (str): The name of the indexed property valu (Optional[Union[int, str]]): The normalized value. If not present, all records with prop present, sorted by prop will be returned. It will be considered a prefix if exact is False. exact (bool): Indicates that the result must match exactly. Conversely, if False, indicates a prefix match. Returns: Iterable[Tuple[int, bytes]]: A generator of tuple (offset, messagepack encoded) raw records ''' if not exact and valu is not None and isinstance(valu, str) and len(valu) >= s_lmdb.LARGE_STRING_SIZE: raise s_exc.BadOperArg(mesg='prefix search valu cannot exceed 128 characters') for offset, _, _, txn in self._iterrows(prop, valu, exact): yield next(self.cryotank.rows(offset, 1))
apache-2.0
-7,920,170,919,440,636,000
35.214912
120
0.568669
false
3.987605
false
false
false
Tufin/pytos
pytos/securetrack/xml_objects/rest/nat_rules.py
1
13490
import logging from pytos.common.base_types import XML_List, XML_Object_Base, Comparable from pytos.common.definitions.xml_tags import Elements from pytos.common.logging.definitions import XML_LOGGER_NAME from pytos.common.functions import get_xml_int_value, get_xml_text_value, get_xml_node, create_tagless_xml_objects_list, str_to_bool from pytos.securetrack.xml_objects.base_types import Base_Object logger = logging.getLogger(XML_LOGGER_NAME) class NatRules(XML_List): def __init__(self, nat_rules): super().__init__(Elements.NAT_RULES, nat_rules) @classmethod def from_xml_node(cls, xml_node): rules = [] for nat_rule in xml_node.iter(tag=Elements.NAT_RULE): rules.append(NatRule.from_xml_node(nat_rule)) return cls(rules) class NatRule(XML_Object_Base, Comparable): def __init__(self, binding ,num_id, order, uid, auto_nat, disabled, dst_nat_method, enable_net4tonet6, enable_route_lookup, orig_dst_network, orig_service, orig_src_network, egress_interface, rule_number, service_nat_method, src_nat_method, translated_service, translated_dst_network, translated_src_network, nat_type): self.binding = binding self.id = num_id self.order = order self.uid = uid self.autoNat = auto_nat self.disabled = disabled self.dstNatMethod = dst_nat_method self.enable_net4tonet6 = enable_net4tonet6 self.enable_route_lookup = enable_route_lookup self.orig_dst_network = orig_dst_network self.orig_service = orig_service self.orig_src_network = orig_src_network self.egress_interface = egress_interface self.ruleNumber = rule_number self.serviceNatMethod = service_nat_method self.srcNatMethod = src_nat_method self.translated_service = translated_service self.translated_dst_network = translated_dst_network self.translated_src_network = translated_src_network self.type = nat_type super().__init__(Elements.NAT_RULE) def _key(self): hash_keys = [self.id, self.uid] if self.binding: try: hash_keys.append(self.binding.uid) except AttributeError: pass return tuple(hash_keys) def __str__(self): return "ORIGINAL: (src={} dst={} srv={}); TRANSLATED: (src={} dst={} srv={})".format( self.orig_src_network, self.orig_dst_network, self.orig_service, self.translated_src_network, self.translated_dst_network, self.translated_service ) def is_enabled(self): return str_to_bool(self.disabled) @classmethod def from_xml_node(cls, xml_node): num_id = get_xml_int_value(xml_node, Elements.ID) order = get_xml_text_value(xml_node, Elements.ORDER) uid = get_xml_text_value(xml_node, Elements.UID) auto_nat = get_xml_text_value(xml_node, Elements.AUTONAT) disabled = get_xml_text_value(xml_node, Elements.DISABLED) dst_nat_method = get_xml_text_value(xml_node, Elements.DST_NAT_METHOD) enable_net4tonet6 = get_xml_text_value(xml_node, Elements.ENABLE_NET_4_TO_NET_6) enable_route_lookup = get_xml_text_value(xml_node, Elements.ENABLE_ROUTE_LOOKUP) rule_number = get_xml_text_value(xml_node, Elements.RULENUMBER) service_nat_method = get_xml_text_value(xml_node, Elements.SERVICENATMETHOD) src_nat_method = get_xml_text_value(xml_node, Elements.SRCNATMETHOD) nat_type = get_xml_text_value(xml_node, Elements.TYPE) binding = create_tagless_xml_objects_list(xml_node, Elements.BINDING, NatRuleBinding)[0] orig_dst_network = create_tagless_xml_objects_list(xml_node, Elements.ORIG_DST_NETWORK, OrigDstNetwork)[0] orig_service = create_tagless_xml_objects_list(xml_node, Elements.ORIG_SERVICE, OrigService)[0] orig_src_network = create_tagless_xml_objects_list(xml_node, Elements.ORIG_SRC_NETWORK, OrigSrcNetwork)[0] egress_interface_node = get_xml_node(xml_node, Elements.ENGRESS_INTERFACE) egress_interface = EgressInterface.from_xml_node(egress_interface_node) if egress_interface_node else None translated_service = create_tagless_xml_objects_list(xml_node, Elements.TRANSLATED_SERVICE, TranslatedService)[0] translated_dst_network = create_tagless_xml_objects_list(xml_node, Elements.TRANSLATED_DST_NETWORK, TranslatedDstNetwork)[0] translated_src_network = create_tagless_xml_objects_list(xml_node, Elements.TRANSLATED_SRC_NETWORK, TranslatedSrcNetwork)[0] return cls(binding ,num_id, order, uid, auto_nat, disabled, dst_nat_method, enable_net4tonet6, enable_route_lookup, orig_dst_network, orig_service, orig_src_network, egress_interface, rule_number, service_nat_method, src_nat_method, translated_service, translated_dst_network, translated_src_network, nat_type) class NatRuleBinding(XML_Object_Base): def __init__(self, default, postnat_iface, prenat_iface, rule_count, security_rule_count, uid): self.default = default self.postnat_iface = postnat_iface self.prenat_iface = prenat_iface self.rule_count = rule_count self.security_rule_count = security_rule_count self.uid = uid super().__init__(Elements.BINDING) @classmethod def from_xml_node(cls, xml_node): """ Initialize the object from a XML node. :param xml_node: The XML node from which all necessary parameters will be parsed. :type xml_node: xml.etree.Element """ default = get_xml_text_value(xml_node, Elements.DEFAULT) postnat_iface = get_xml_text_value(xml_node, Elements.POSTNAT_IFACE) prenat_iface = get_xml_text_value(xml_node, Elements.PRENAT_IFACE) rule_count = get_xml_text_value(xml_node, Elements.RULE_COUNT) security_rule_count = get_xml_text_value(xml_node, Elements.SECURITY_RULE_COUNT) uid = get_xml_text_value(xml_node, Elements.UID) return cls(default, postnat_iface, prenat_iface, rule_count, security_rule_count, uid) class OrigDstNetwork(Base_Object): def __init__(self, id, uid, display_name, name): super().__init__(Elements.ORIG_DST_NETWORK, name, display_name, id, uid) @classmethod def from_xml_node(cls, xml_node): """ Initialize the object from a XML node. :param xml_node: The XML node from which all necessary parameters will be parsed. :type xml_node: xml.etree.Element """ id = get_xml_int_value(xml_node, Elements.ID) uid = get_xml_text_value(xml_node, Elements.UID) display_name = get_xml_text_value(xml_node, Elements.DISPLAY_NAME) name = get_xml_text_value(xml_node, Elements.NAME) return cls(id, uid, display_name, name) class OrigService(Base_Object): def __init__(self, id, uid, display_name, name): super().__init__(Elements.DST_SERVICE, name, display_name, id, uid) @classmethod def from_xml_node(cls, xml_node): id = get_xml_int_value(xml_node, Elements.ID) uid = get_xml_text_value(xml_node, Elements.UID) display_name = get_xml_text_value(xml_node, Elements.DISPLAY_NAME) name = get_xml_text_value(xml_node, Elements.NAME) return cls(id, uid, display_name, name) class OrigSrcNetwork(Base_Object): def __init__(self, id, uid, display_name, name): super().__init__(Elements.ORIG_SRC_NETWORK, name, display_name, id, uid) @classmethod def from_xml_node(cls, xml_node): """ Initialize the object from a XML node. :param xml_node: The XML node from which all necessary parameters will be parsed. :type xml_node: xml.etree.Element """ id = get_xml_int_value(xml_node, Elements.ID) uid = get_xml_text_value(xml_node, Elements.UID) display_name = get_xml_text_value(xml_node, Elements.DISPLAY_NAME) name = get_xml_text_value(xml_node, Elements.NAME) return cls(id, uid, display_name, name) class TranslatedService(Base_Object): def __init__(self, id, uid, display_name, name): super().__init__(Elements.TRANSLATED_SERVICE, name, display_name, id, uid) @classmethod def from_xml_node(cls, xml_node): """ Initialize the object from a XML node. :param xml_node: The XML node from which all necessary parameters will be parsed. :type xml_node: xml.etree.Element """ id = get_xml_int_value(xml_node, Elements.ID) uid = get_xml_text_value(xml_node, Elements.UID) display_name = get_xml_text_value(xml_node, Elements.DISPLAY_NAME) name = get_xml_text_value(xml_node, Elements.NAME) return cls(id, uid, display_name, name) class TranslatedSrcNetwork(Base_Object): def __init__(self, id, uid, display_name, name): super().__init__(Elements.TRANSLATED_SRC_NETWORK, name, display_name, id, uid) @classmethod def from_xml_node(cls, xml_node): """ Initialize the object from a XML node. :param xml_node: The XML node from which all necessary parameters will be parsed. :type xml_node: xml.etree.Element """ id = get_xml_int_value(xml_node, Elements.ID) uid = get_xml_text_value(xml_node, Elements.UID) display_name = get_xml_text_value(xml_node, Elements.DISPLAY_NAME) name = get_xml_text_value(xml_node, Elements.NAME) return cls(id, uid, display_name, name) class TranslatedDstNetwork(Base_Object): def __init__(self, id, uid, display_name, name, dm_inline_members): super().__init__(Elements.TRANSLATED_DST_NETWORK, name, display_name, id, uid) if dm_inline_members is not None: self.dm_inline_members = dm_inline_members @classmethod def from_xml_node(cls, xml_node): """ Initialize the object from a XML node. :param xml_node: The XML node from which all necessary parameters will be parsed. :type xml_node: xml.etree.Element """ id = get_xml_int_value(xml_node, Elements.ID) uid = get_xml_text_value(xml_node, Elements.UID) display_name = get_xml_text_value(xml_node, Elements.DISPLAY_NAME) name = get_xml_text_value(xml_node, Elements.NAME) dm_inline_members_node = get_xml_node(xml_node, Elements.DM_INLINE_MEMBRES, True) if dm_inline_members_node: dm_inline_members = XML_List.from_xml_node_by_tags(xml_node, Elements.DM_INLINE_MEMBRES, Elements.MEMBER, DmInlineMember) else: dm_inline_members = None return cls(id, uid, display_name, name, dm_inline_members) class DmInlineMember(Base_Object): def __init__(self, id, uid, display_name, name): super().__init__(Elements.MEMBER, name, display_name, id, uid) @classmethod def from_xml_node(cls, xml_node): """ Initialize the object from a XML node. :param xml_node: The XML node from which all necessary parameters will be parsed. :type xml_node: xml.etree.Element """ id = get_xml_int_value(xml_node, Elements.ID) uid = get_xml_text_value(xml_node, Elements.UID) display_name = get_xml_text_value(xml_node, Elements.DISPLAY_NAME) name = get_xml_text_value(xml_node, Elements.NAME) return cls(id, uid, display_name, name) class EgressInterface(XML_Object_Base): def __init__(self, name, id, direction, device_id, acl_name, is_global, interface_ips): self.name = name self.id = id self.direction = direction self.device_id = device_id self.acl_name = acl_name self.is_global = is_global self.interface_ips = interface_ips super().__init__(Elements.ENGRESS_INTERFACE) @classmethod def from_xml_node(cls, xml_node): """ Initialize the object from a XML node. :param xml_node: The XML node from which all necessary parameters will be parsed. :type xml_node: xml.etree.Element """ name = get_xml_text_value(xml_node, Elements.NAME) id = get_xml_int_value(xml_node, Elements.ID) direction = get_xml_text_value(xml_node, Elements.DIRECTION) device_id = get_xml_text_value(xml_node, Elements.DEVICE_ID) acl_name = get_xml_text_value(xml_node, Elements.ACL_NAME) is_global = get_xml_text_value(xml_node, Elements.GLOBAL) interface_ips_node = get_xml_node(xml_node, Elements.INTERFACE_IPS, True) if interface_ips_node: interface_ips = XML_List.from_xml_node_by_tags(xml_node, Elements.INTERFACE_IPS, Elements.INTERFACE_IP, NatInterfaceIP) else: interface_ips = None return cls(name, id, direction, device_id, acl_name, is_global, interface_ips) class NatInterfaceIP(XML_Object_Base): def __init__(self, ip, netmask): self.ip = ip self.netmask = netmask super().__init__(Elements.INTERFACE_IP) @classmethod def from_xml_node(cls, xml_node): ip = get_xml_text_value(xml_node, Elements.IP) netmask = get_xml_text_value(xml_node, Elements.NETMASK) return cls(ip, netmask)
apache-2.0
-5,468,314,753,821,417,000
43.820598
132
0.646256
false
3.437819
false
false
false
brandonw/photo-album
photo_album/rotate_and_thumbs.py
1
1245
import os, sys from PIL import Image, ExifTags size = (128, 128) for infile in os.listdir(sys.argv[1]): inpath = os.path.join(sys.argv[1], infile) pieces = os.path.splitext(inpath) outpath = pieces[0] + ".thumb" + pieces[1] if (inpath != outpath and not os.path.exists(outpath) and 'thumb' not in infile): try: image = Image.open(inpath) for orientation in ExifTags.TAGS.keys(): if ExifTags.TAGS[orientation] == 'Orientation': break e = image._getexif() if e is not None: exif = dict(e.items()) if orientation in exif: if exif[orientation] == 3: image=image.transpose(Image.ROTATE_180) elif exif[orientation] == 6: image = image.transpose(Image.ROTATE_270) elif exif[orientation] == 8: image = image.transpose(Image.ROTATE_90) image.save(inpath) image.thumbnail(size, Image.ANTIALIAS) image.save(outpath, 'JPEG') except IOError as ex: print('cannot create thumbnail for ' + infile + ' -- ' + ex.strerror)
bsd-3-clause
8,823,710,597,784,660,000
36.727273
81
0.526104
false
3.939873
false
false
false
innovation-cat/DeepLearningBook
cifar10 classification/py3/softmax.py
1
4468
# coding: utf-8 # # softmax.py # # Author: Huang Anbu # Date: 2017.3 # # Description: Implementation of softmax classification # # Copyright©2017. All Rights Reserved. # =============================================================================================== from __future__ import print_function, division from basiclib import * # 模型构建 class SoftmaxLayer: def __init__ (self, input, n_input, n_output): self.input = input self.n_input = n_input self.n_output = n_output # 权重参数初始化 self.W = theano.shared( value = numpy.zeros(shape=(n_input, n_output)).astype(theano.config.floatX), name = "W", borrow = True ) self.b = theano.shared( value = numpy.zeros(shape=(n_output, )).astype(theano.config.floatX), name = 'b', borrow = True ) self.params = [self.W, self.b] # 输出矩阵 self.p_y_given_x = T.nnet.softmax(T.dot(self.input, self.W)+self.b) # 预测值 self.p_pred = T.argmax(self.p_y_given_x, axis=1) def cross_entropy(self, y): # 交叉熵损失函数 return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]), y]) def get_cost_updates(self, y, lr, reg, optimizer_fun): cost = self.cross_entropy(y) + 0.5*reg*((self.W**2).sum()) try: updates = optimizer_fun(cost, self.params, lr) except: print("Error: no optimizer function") else: return (cost, updates) def error_rate(self, y): # 错误率 return T.mean(T.neq(self.p_pred, y)) if __name__ == "__main__": # 读取输入数据 train_x, train_y = load_cifar10_dataset(r"./dataset/cifar-10-batches-py/data_batch_*") test_x, test_y = load_cifar10_dataset(r"./dataset/cifar-10-batches-py/test_batch") train_x = train_x / 255.0 test_x = test_x / 255.0 train_set_size, col = train_x.shape test_set_size, _ = test_x.shape # 设置tensor变量 x = T.matrix('x').astype(theano.config.floatX) y = T.ivector('y') index = T.iscalar('index') lr = T.scalar('lr', dtype=theano.config.floatX) reg = T.scalar('reg', dtype=theano.config.floatX) batch_size = options['batch_size'] n_train_batch = train_set_size//batch_size n_test_batch = test_set_size//batch_size model = SoftmaxLayer(x, col, options['n_output']) cost, updates = model.get_cost_updates(y, lr, reg, optimizer[options["optimizer"]]) # 构建训练函数 train_model = theano.function(inputs = [x, y, lr, reg], outputs = cost, updates = updates) # 构建测试函数 train_err = theano.function(inputs = [x, y, lr, reg], outputs = model.error_rate(y), on_unused_input = 'ignore') test_err = theano.function(inputs = [x, y, lr, reg], outputs = model.error_rate(y), on_unused_input = 'ignore') idx = numpy.arange(train_set_size) train_num = 0 best_err = 1.0 error_output = open("softmax.txt", "w") with open("model_softmax.npz", "wb") as fout: for epoch in range(options["n_epoch"]): numpy.random.shuffle(idx) new_train_x = [train_x[i] for i in idx] new_train_y = [train_y[i] for i in idx] for n_batch_index in range(n_train_batch): c = train_model( new_train_x[n_batch_index*batch_size:(n_batch_index+1)*batch_size], new_train_y[n_batch_index*batch_size:(n_batch_index+1)*batch_size], 0.0001, 0.0 ) train_num = train_num + 1 if train_num%options["print_freq"]==0: print("train num: %d, cost: %lf"%(train_num, c)) if train_num%options["valid_freq"]==0: train_errors = [train_err(train_x[n_train_index*batch_size:(n_train_index+1)*batch_size], train_y[n_train_index*batch_size:(n_train_index+1)*batch_size], 0.00000001, 0.0) for n_train_index in range(n_train_batch)] test_errors = [test_err(test_x[n_test_index*batch_size:(n_test_index+1)*batch_size], test_y[n_test_index*batch_size:(n_test_index+1)*batch_size], 0.00000001, 0.0) for n_test_index in range(n_test_batch)] if numpy.mean(test_errors) < best_err: best_err = numpy.mean(test_errors) params = dict([(p.name, p.get_value()) for p in model.params]) numpy.savez(fout, params) print("train num: %d, best train error: %lf, best test error: %lf"%(train_num, numpy.mean(train_errors), numpy.mean(test_errors))) print("epoch %d end" % epoch) test_errors = [test_err(test_x[n_test_index*batch_size:(n_test_index+1)*batch_size], test_y[n_test_index*batch_size:(n_test_index+1)*batch_size], 0.00000001, 0.0) for n_test_index in range(n_test_batch)] print("%lf" % numpy.mean(test_errors), file=error_output)
mit
-4,965,939,120,042,824,000
33.393701
218
0.63499
false
2.549329
true
false
false
sthesing/Podstatty
db.py
1
4858
# -*- coding: utf8 -*- ## Copyright (c) 2013 Stefan Thesing ## ##This file is part of Podstatty. ## ##Podstatty is free software: you can redistribute it and/or modify ##it under the terms of the GNU General Public License as published by ##the Free Software Foundation, either version 3 of the License, or ##(at your option) any later version. ## ##Podstatty is distributed in the hope that it will be useful, ##but WITHOUT ANY WARRANTY; without even the implied warranty of ##MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ##GNU General Public License for more details. ## ##You should have received a copy of the GNU General Public License ##along with Podstatty. If not, see http://www.gnu.org/licenses/. from storm.locals import Storm, Int, Unicode, ReferenceSet import requests class Db: """ A class intended to provide handy control over the database. """ def __init__(self, store, base_url): self.store = store self.base_url = base_url def add_file(self, filename, exclude_strings): """ Processes a prepared logfile and stores the data into the database. """ log = open(filename) date = filename.split("access_log_")[1] date = date.replace("_filtered.txt", "") if self.store.find(Stats, Stats.date_time_string == unicode(date)).count(): print "A logfile for this date has already been processed." return None stats =[] for line in log: # In the settings file, users can specify strings that are # used as filter criteria. If the line contains this string, # it won't be processed. # In the beginning, we assume the line will be processed. line_shall_be_processed = True # 'exclude_strings' is a list of the filter criteria. # If the line contains one of those strings, the line will # not be processed. for string in exclude_strings: if string in line: line_shall_be_processed = False if line_shall_be_processed: split_line = line.split() stat = Stats(unicode(split_line[0]), int(split_line[1]), unicode(date)) stats.append(stat) urls = [] for stat in stats: if not stat.url in urls: urls.append(stat.url) for url in urls: new_stat = Stats(url, 0, unicode(date)) for stat in stats: if stat.url == url: new_stat.traffic = new_stat.traffic+stat.traffic self.store.add(new_stat) #check if all URLs are already in table "filesizes", if not, #get the filesize and write it into that table self.check_url(url) self.store.flush() self.store.commit() def check_url(self, url): """ Checks if the filesize of the file found behind this url is already stored in the database. If not, it tries to retrieve the filesize by making a http HEAD request and stores it into the database. """ #if the url is not yet in the "Filesizes" table if not self.store.find(Filesizes, Filesizes.url == url).count(): # Get the filesize from the server # TODO Implement error routine r = requests.head(self.base_url + url) # Files no longer present on the server are removed, for now. # TODO Maybe add an "else"-condition here and ask the user what to do? # What about files that are no longer there but you still want to # have them in your statistics? if not (r.status_code == 404): size = int(r.headers['Content-Length']) # Write the URL and it's filesize to database self.store.add(Filesizes(url, size)) class Stats(Storm): """ The table containing the actual numbers 'CREATE TABLE stats (id INTEGER PRIMARY KEY, url VARCHAR, traffic INTEGER, date_time_string VARCHAR)' """ __storm_table__ = "stats" id = Int(primary=True) url = Unicode() traffic = Int() date_time_string = Unicode() def __init__(self, url, traffic, date_time_string): self.url = url self.traffic = traffic self.date_time_string = date_time_string class Filesizes(Storm): """ The table containing the filesizes for each URL 'CREATE TABLE filesizes (url VARCHAR PRIMARY KEY, filesize INTEGER)' """ __storm_table__ = "filesizes" url = Unicode(primary=True) filesize = Int() def __init__(self, url, filesize): self.url = url self.filesize = filesize
gpl-3.0
-6,791,800,694,305,659,000
37.251969
87
0.594072
false
4.299115
false
false
false
SabaFar/plc
examples/plc-ccphy-example.py
1
4082
# -*- Mode:Python; -*- # /* # * Copyright (c) 2010 INRIA # * # * This program is free software; you can redistribute it and/or modify # * it under the terms of the GNU General Public License version 2 as # * published by the Free Software Foundation; # * # * This program is distributed in the hope that it will be useful, # * but WITHOUT ANY WARRANTY; without even the implied warranty of # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # * GNU General Public License for more details. # * # * You should have received a copy of the GNU General Public License # * along with this program; if not, write to the Free Software # * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # * # * Authors: Alexander Schloegl <[email protected]> # */ # Chase combining phy example import ns.plc import ns.core import ns.spectrum import ns.network def startTx(phy,p): phy.StartTx(p) def sendRedundancy(phy): phy.SendRedundancy() def receiveSuccess(packet): print "\n*** Packet received ***\n" def main(dummy_argv): ## Enable logging ns.core.LogComponentEnableAll(ns.core.LOG_PREFIX_TIME) ns.core.LogComponentEnable('PLC_Phy', ns.core.LOG_LEVEL_FUNCTION) # ns.core.LogComponentEnable('PLC_LinkPerformanceModel', ns.core.LOG_LEVEL_LOGIC) # ns.core.LogComponentEnable('PLC_Interference', ns.core.LOG_LEVEL_LOGIC) ## Enable packet printing ns.network.Packet.EnablePrinting() ## Define spectrum model sm = ns.plc.PLC_SpectrumModelHelper().GetG3SpectrumModel() ## Define time model, mains frequency: 60Hz, OFDM symbol duration: 2240us ns.plc.PLC_Time.SetTimeModel(60, ns.core.MicroSeconds(2240)) ## Define transmit power spectral density txPsd = ns.spectrum.SpectrumValue(sm) txPsd += 1e-8; ## Create nodes n1 = ns.plc.PLC_Node() n2 = ns.plc.PLC_Node() n1.SetPosition(0,0,0) n2.SetPosition(1000,0,0) n1.SetName('Node1') n2.SetName('Node2') nodes = [n1,n2] ## Create cable type cable = ns.plc.PLC_NAYY50SE_Cable(sm) ## Link nodes ns.plc.PLC_Line(cable,n1,n2) ## Setup channel channelHelper = ns.plc.PLC_ChannelHelper() channelHelper.Install(nodes) channel = channelHelper.GetChannel() ## Create outlets o1 = ns.plc.PLC_Outlet(n1) o2 = ns.plc.PLC_Outlet(n2) ## Create PHYs phy1 = ns.plc.PLC_ChaseCombiningPhy() phy2 = ns.plc.PLC_ChaseCombiningPhy() ## Define RX/TX impedances txImp = ns.plc.PLC_ConstImpedance(sm, 50) rxImp = ns.plc.PLC_ConstImpedance(sm, 50) ## Create interfaces phy1.CreateInterfaces(o1, txPsd, txImp, rxImp) phy2.CreateInterfaces(o2, txPsd, txImp, rxImp) ## Set background noise noiseFloor = ns.plc.PLC_ColoredNoiseFloor(-140,38.75,-0.72,sm).GetNoisePsd() noiseFloor += 1e-7 phy1.SetNoiseFloor(noiseFloor) phy2.SetNoiseFloor(noiseFloor) ## Set modulation and coding scheme phy1.SetHeaderModulationAndCodingScheme(ns.plc.BPSK_1_2) phy2.SetHeaderModulationAndCodingScheme(ns.plc.BPSK_1_2) phy1.SetPayloadModulationAndCodingScheme(ns.plc.QAM64_16_21) phy2.SetPayloadModulationAndCodingScheme(ns.plc.QAM64_16_21) ## Aggregate RX-Interfaces to ns3 nodes phy1.GetRxInterface().AggregateObject(ns.network.Node()) phy2.GetRxInterface().AggregateObject(ns.network.Node()) ## Set the function to be called after successful packet reception by phy2 phy2.SetReceiveSuccessCallback(receiveSuccess) ## Calculate channels channel.InitTransmissionChannels() channel.CalcTransmissionChannels() ## Create packet to send p = ns.network.Packet(128) ## Schedule chase combining transmissions ns.core.Simulator.Schedule(ns.core.Seconds(1), startTx, phy1, p) for i in range(1,11): ns.core.Simulator.Schedule(ns.core.Seconds(i), sendRedundancy, phy1) ## Start simulation ns.core.Simulator.Run() ## Cleanup simulation ns.core.Simulator.Destroy() if __name__ == '__main__': import sys main(sys.argv)
gpl-3.0
-16,086,139,063,123,222
29.691729
84
0.695492
false
3.104183
false
false
false
ayepezv/GAD_ERP
addons/website_sale/models/product.py
1
8355
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import api, fields, models, tools, _ class ProductStyle(models.Model): _name = "product.style" name = fields.Char(string='Style Name', required=True) html_class = fields.Char(string='HTML Classes') class ProductPricelist(models.Model): _inherit = "product.pricelist" code = fields.Char(string='E-commerce Promotional Code') class ProductPublicCategory(models.Model): _name = "product.public.category" _inherit = ["website.seo.metadata"] _description = "Website Product Category" _order = "sequence, name" name = fields.Char(required=True, translate=True) parent_id = fields.Many2one('product.public.category', string='Parent Category', index=True) child_id = fields.One2many('product.public.category', 'parent_id', string='Children Categories') sequence = fields.Integer(help="Gives the sequence order when displaying a list of product categories.") # NOTE: there is no 'default image', because by default we don't show # thumbnails for categories. However if we have a thumbnail for at least one # category, then we display a default image on the other, so that the # buttons have consistent styling. # In this case, the default image is set by the js code. image = fields.Binary(attachment=True, help="This field holds the image used as image for the category, limited to 1024x1024px.") image_medium = fields.Binary(string='Medium-sized image', attachment=True, help="Medium-sized image of the category. It is automatically " "resized as a 128x128px image, with aspect ratio preserved. " "Use this field in form views or some kanban views.") image_small = fields.Binary(string='Small-sized image', attachment=True, help="Small-sized image of the category. It is automatically " "resized as a 64x64px image, with aspect ratio preserved. " "Use this field anywhere a small image is required.") @api.model def create(self, vals): tools.image_resize_images(vals) return super(ProductPublicCategory, self).create(vals) @api.multi def write(self, vals): tools.image_resize_images(vals) return super(ProductPublicCategory, self).write(vals) @api.constrains('parent_id') def check_parent_id(self): if not self._check_recursion(): raise ValueError(_('Error ! You cannot create recursive categories.')) @api.multi def name_get(self): res = [] for category in self: names = [category.name] parent_category = category.parent_id while parent_category: names.append(parent_category.name) parent_category = parent_category.parent_id res.append((category.id, ' / '.join(reversed(names)))) return res class ProductTemplate(models.Model): _inherit = ["product.template", "website.seo.metadata", 'website.published.mixin', 'rating.mixin'] _order = 'website_published desc, website_sequence desc, name' _name = 'product.template' _mail_post_access = 'read' website_message_ids = fields.One2many( 'mail.message', 'res_id', domain=lambda self: ['&', ('model', '=', self._name), ('message_type', '=', 'comment')], string='Website Comments', ) website_description = fields.Html('Description for the website', sanitize=False, translate=True) alternative_product_ids = fields.Many2many('product.template', 'product_alternative_rel', 'src_id', 'dest_id', string='Suggested Products', help='Appear on the product page') accessory_product_ids = fields.Many2many('product.product', 'product_accessory_rel', 'src_id', 'dest_id', string='Accessory Products', help='Appear on the shopping cart') website_size_x = fields.Integer('Size X', default=1) website_size_y = fields.Integer('Size Y', default=1) website_style_ids = fields.Many2many('product.style', string='Styles') website_sequence = fields.Integer('Website Sequence', help="Determine the display order in the Website E-commerce", default=lambda self: self._default_website_sequence()) public_categ_ids = fields.Many2many('product.public.category', string='Website Product Category', help="Those categories are used to group similar products for e-commerce.") availability = fields.Selection([ ('empty', 'Display Nothing'), ('in_stock', 'In Stock'), ('warning', 'Warning'), ], "Availability", default='empty', help="This field is used to display a availability banner with a message on the ecommerce") availability_warning = fields.Text("Availability Warning", translate=True) def _default_website_sequence(self): self._cr.execute("SELECT MIN(website_sequence) FROM %s" % self._table) min_sequence = self._cr.fetchone()[0] return min_sequence and min_sequence - 1 or 10 def set_sequence_top(self): self.website_sequence = self.sudo().search([], order='website_sequence desc', limit=1).website_sequence + 1 def set_sequence_bottom(self): self.website_sequence = self.sudo().search([], order='website_sequence', limit=1).website_sequence - 1 def set_sequence_up(self): previous_product_tmpl = self.sudo().search( [('website_sequence', '>', self.website_sequence), ('website_published', '=', self.website_published)], order='website_sequence', limit=1) if previous_product_tmpl: previous_product_tmpl.website_sequence, self.website_sequence = self.website_sequence, previous_product_tmpl.website_sequence else: self.set_sequence_top() def set_sequence_down(self): next_prodcut_tmpl = self.search([('website_sequence', '<', self.website_sequence), ('website_published', '=', self.website_published)], order='website_sequence desc', limit=1) if next_prodcut_tmpl: next_prodcut_tmpl.website_sequence, self.website_sequence = self.website_sequence, next_prodcut_tmpl.website_sequence else: return self.set_sequence_bottom() @api.multi def _compute_website_url(self): super(ProductTemplate, self)._compute_website_url() for product in self: product.website_url = "/shop/product/%s" % (product.id,) @api.multi def display_price(self, pricelist, qty=1, public=False, **kw): self.ensure_one() return self.product_variant_ids and self.product_variant_ids[0].display_price(pricelist, qty=qty, public=public) or 0 class Product(models.Model): _inherit = "product.product" @api.multi def website_publish_button(self): self.ensure_one() return self.product_tmpl_id.website_publish_button() @api.multi def display_price(self, pricelist, qty=1, public=False, **kw): self.ensure_one() partner = self.env.user.partner_id context = { 'pricelist': pricelist.id, 'quantity': qty, 'partner': partner } ret = self.env.user.has_group('sale.group_show_price_subtotal') and 'total_excluded' or 'total_included' taxes = partner.property_account_position_id.map_tax(self.taxes_id) return taxes.compute_all(public and self.lst_price or self.with_context(context).price, pricelist.currency_id, qty, product=self, partner=partner)[ret] class ProductAttribute(models.Model): _inherit = "product.attribute" type = fields.Selection([('radio', 'Radio'), ('select', 'Select'), ('color', 'Color'), ('hidden', 'Hidden')], default='radio') class ProductAttributeValue(models.Model): _inherit = "product.attribute.value" html_color = fields.Char(string='HTML Color Index', oldname='color', help="Here you can set a " "specific HTML color index (e.g. #ff0000) to display the color on the website if the " "attibute type is 'Color'.")
gpl-3.0
7,746,958,657,846,388,000
47.017241
183
0.641412
false
4.07959
false
false
false
poldracklab/cogat
cognitive/apps/atlas/forms.py
1
18517
from django import forms from django.core.exceptions import ValidationError from django.urls import reverse from crispy_forms.helper import FormHelper from crispy_forms.layout import Div, Field, HTML, Layout, Reset, Submit from cognitive.apps.atlas.query import Assertion, Disorder, Task, Battery, ConceptClass, Concept import cognitive.apps.atlas.query as query def set_field_html_name(cls, new_name): """ This creates wrapper around the normal widget rendering, allowing for a custom field name (new_name). """ old_render = cls.widget.render def _widget_render_wrapper(name, value, attrs=None): return old_render(new_name, value, attrs) cls.widget.render = _widget_render_wrapper class TaskForm(forms.Form): term_name = forms.CharField(required=True) definition_text = forms.CharField(required=True) class ConceptForm(forms.Form): name = forms.CharField(required=True, label="Term:") definition_text = forms.CharField(required=True, widget=forms.Textarea(), label="Your Definition:") concept_class = ConceptClass() choices = [(x['id'], "-yes- " + str(x['name'])) for x in concept_class.all()] choices.insert(0, (None, "-no-")) cc_label = "In your opinion, does this concept belong to a larger class of concepts?" concept_class = forms.ChoiceField( choices=choices, label=cc_label, required=False) def __init__(self, concept_id, *args, **kwargs): if not args or not args[0].get('submit'): concept = Concept() con_class = concept.get_relation(concept_id, "CLASSIFIEDUNDER", label="concept_class") if con_class: args[0]['concept_class'] = con_class[0]['id'] super(ConceptForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.form_action = reverse( 'update_concept', kwargs={'uid': concept_id, }) self.helper.layout = Layout( Div( Field('name'), Field('definition_text'), Field('concept_class'), Submit('submit', 'Submit'), Reset('concept-cancel', 'Cancel', type="reset"), css_class="formline", ) ) class ContrastForm(forms.Form): name = forms.CharField(required=True) description = forms.CharField(required=True) class ConditionForm(forms.Form): condition_text = forms.CharField(required=True) condition_description = forms.CharField(required=True) class WeightForm(forms.Form): weight = forms.FloatField() def __init__(self, cond_id, label, *args, **kwargs): super().__init__(*args, **kwargs) self.helper = FormHelper() self.helper.form_tag = False self.weight_name = cond_id self.fields['weight'].label = label set_field_html_name(self.fields['weight'], self.weight_name) def clean_weight(self): data = self.data['weight'] if not data: raise ValidationError('Missing input') return data class ImplementationForm(forms.Form): implementation_uri = forms.URLField(required=True) implementation_name = forms.CharField(required=True) implementation_description = forms.CharField(required=True) def __init__(self, *args, **kwargs): super(ImplementationForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.form_tag = False self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('implementation-cancel', 'Cancel')) class ExternalDatasetForm(forms.Form): dataset_name = forms.CharField(required=True) dataset_uri = forms.URLField(required=True) def __init__(self, *args, **kwargs): super(ExternalDatasetForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.form_tag = False self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('dataset-cancel', 'Cancel')) class IndicatorForm(forms.Form): type = forms.CharField(required=True) def __init__(self, *args, **kwargs): super(IndicatorForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.form_tag = False self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('indicator-cancel', 'Cancel')) class CitationForm(forms.Form): citation_url = forms.URLField(required=True) citation_comment = forms.CharField(required=False) citation_desc = forms.CharField(required=True) citation_authors = forms.CharField(required=False) citation_type = forms.CharField(required=False) citation_pubname = forms.CharField(required=False) citation_pubdate = forms.CharField(required=False) citation_pmid = forms.CharField(required=False) citation_source = forms.CharField(required=False) doi = forms.CharField(required=False) def __init__(self, *args, **kwargs): super(CitationForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.form_tag = False self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('citation-cancel', 'Cancel')) class DisorderForm(forms.Form): name = forms.CharField(required=True) definition = forms.CharField(required=True, widget=forms.Textarea()) def __init__(self, *args, **kwargs): super(DisorderForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.form_tag = False self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('disorder-cancel', 'Cancel')) class TheoryAssertionForm(forms.Form): def __init__(self, *args, **kwargs): super(TheoryAssertionForm, self).__init__(*args, **kwargs) assertions = Assertion() choices = [(x['id'], x['name']) for x in assertions.all()] self.fields['assertions'] = forms.ChoiceField(choices=choices) self.helper = FormHelper() self.helper.form_tag = False self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('theory-assertion-cancel', 'Cancel')) class TaskDisorderForm(forms.Form): def __init__(self, task_id, *args, **kwargs): super(TaskDisorderForm, self).__init__(*args, **kwargs) disorders = Disorder() behaviors = query.Behavior() traits = query.Trait() tasks = Task() contrasts = tasks.get_relation(task_id, "HASCONTRAST") cont_choices = [(x['id'], x['name']) for x in contrasts] self.fields['contrasts'] = forms.ChoiceField(choices=cont_choices) pheno_choices = [] pheno_choices.extend( [(x['id'], ''.join([x['name'], " (Disorder)"])) for x in disorders.all()]) pheno_choices.extend( [(x['id'], ''.join([x['name'], " (Behavior)"])) for x in behaviors.all()]) pheno_choices.extend( [(x['id'], ''.join([x['name'], " (Trait)"])) for x in traits.all()]) self.fields['disorders'] = forms.ChoiceField(choices=pheno_choices) self.helper = FormHelper() self.helper.form_tag = False self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('task-disorder-cancel', 'Cancel')) class TaskConceptForm(forms.Form): def __init__(self, task_id, *args, **kwargs): super(TaskConceptForm, self).__init__(*args, **kwargs) concept = Concept() tasks = Task() contrasts = tasks.get_relation(task_id, "HASCONTRAST") cont_choices = [(x['id'], x['name']) for x in contrasts] self.fields['concept-contrasts'] = forms.ChoiceField( choices=cont_choices) concept_choices = [(x['id'], x['name']) for x in concept.all()] self.fields['concept'] = forms.ChoiceField(choices=concept_choices) self.helper = FormHelper() self.helper.attrs = {'id': 'concept-form'} self.helper.form_class = "hidden" self.helper.form_action = reverse('add_task_concept', kwargs={'uid': task_id}) self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('task-concept-cancel', 'Cancel')) class TheoryForm(forms.Form): label = "Enter the name of the theory collection you wish to add: " name = forms.CharField(required=True, label=label) def __init__(self, *args, **kwargs): super(TheoryForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.attrs = {'id': 'theory-form'} self.helper.form_class = "hidden" self.helper.form_action = reverse('add_theory') self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('theory-cancel', 'Cancel')) class BatteryForm(forms.Form): label = "Enter the name of the task collection you wish to add: " name = forms.CharField(required=True, label=label) def __init__(self, *args, **kwargs): super(BatteryForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.attrs = {'id': 'battery-form'} self.helper.form_class = "hidden" self.helper.form_action = reverse('add_battery') self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('battery-cancel', 'Cancel', type="button")) class ConceptTaskForm(forms.Form): def __init__(self, *args, **kwargs): super(ConceptTaskForm, self).__init__(*args, **kwargs) tasks = Task() choices = [(x['id'], x['name']) for x in tasks.all()] self.fields['tasks'] = forms.ChoiceField(choices=choices) self.helper = FormHelper() self.helper.form_class = "hidden" self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('battery-cancel', 'Cancel', type="button")) class BatteryBatteryForm(forms.Form): def __init__(self, *args, **kwargs): super(BatteryBatteryForm, self).__init__(*args, **kwargs) batteries = Battery() choices = [(x['id'], x['name']) for x in batteries.all()] self.fields['batteries'] = forms.ChoiceField(choices=choices) self.helper = FormHelper() self.helper.form_tag = False self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('battery-cancel', 'Cancel', type="button")) class BatteryTaskForm(forms.Form): def __init__(self, *args, **kwargs): super(BatteryTaskForm, self).__init__(*args, **kwargs) tasks = Task() choices = [(x['id'], x['name']) for x in tasks.all()] self.fields['tasks'] = forms.ChoiceField(choices=choices) self.helper = FormHelper() self.helper.form_tag = False self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('battery-task-cancel', 'Cancel', type="button")) class ConceptContrastForm(forms.Form): def __init__(self, task_id, concept_id, *args, **kwargs): super(ConceptContrastForm, self).__init__(*args, **kwargs) tasks = Task() contrasts = tasks.get_relation(task_id, "HASCONTRAST") choices = [(x['id'], x['name']) for x in contrasts] self.fields['contrasts'] = forms.ChoiceField(choices=choices) self.helper = FormHelper() self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('battery-cancel', 'Cancel', type="button")) self.helper.form_action = reverse('add_concept_contrast', kwargs={'uid': concept_id, 'tid': task_id}) class DisorderDisorderForm(forms.Form): ''' form for relating disorders to themselves ''' type = forms.ChoiceField( choices=[('parent', 'Parent'), ('child', 'Child')]) def __init__(self, name=None, *args, **kwargs): super(DisorderDisorderForm, self).__init__(*args, **kwargs) name = (name if name is not None else '') disorders = Disorder() type_choices = [ ('parent', '{} is a kind of <selected disorder>'.format(name)), ('child', '<selected disorder> is a kind of {}'.format(name)) ] dis_choices = [(x['id'], x['name']) for x in disorders.all()] self.fields['type'] = forms.ChoiceField(choices=type_choices) self.fields['disorders'] = forms.ChoiceField(choices=dis_choices) self.helper = FormHelper() self.helper.form_tag = False self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('disorder-disorder-cancel', 'Cancel')) class ExternalLinkForm(forms.Form): ''' an external link for a node. For disorders this link may describe the disorder in more detail''' uri = forms.URLField( required=True, label="Enter the full URL for the link") def __init__(self, *args, **kwargs): super(ExternalLinkForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.form_tag = False self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('link-cancel', 'Cancel')) class ConceptClassForm(forms.Form): name = forms.CharField() def __init__(self, *args, **kwargs): super(ConceptClassForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('concept-class-cancel', 'Cancel')) self.helper.form_action = reverse('add_concept_class') class DisambiguationForm(forms.Form): term1_name = forms.CharField(label="") term1_name_ext = forms.CharField(label="") term1_definition = forms.CharField(required=True, widget=forms.Textarea(), label="Original Term Description") term2_name = forms.CharField(label="") term2_name_ext = forms.CharField(label="") term2_definition = forms.CharField(required=True, widget=forms.Textarea(), label="New Term Description") def __init__(self, label, uid, term=None, *args, **kwargs): super(DisambiguationForm, self).__init__(*args, **kwargs) if term is not None: self.initial = { 'term1_name': term['name'], 'term2_name': term['name'], 'term1_definition': term['definition_text'] } self.helper = FormHelper() self.helper.add_input(Reset('disambiguate_cancel_button', 'Cancel')) self.helper.add_input(Submit('submit', 'Submit')) self.helper.form_action = reverse('add_disambiguation', kwargs={'label': label, 'uid': uid}) self.helper.layout = Layout( Div( Div( Field('term1_name', css_class='disam-name'), HTML('('), Field('term1_name_ext', css_class='disam-name-ext'), HTML(')'), css_class='name-ext-inputs' ), Field('term1_definition', css_class='disam-def'), Div( Field('term2_name', css_class='disam-name'), HTML('('), Field('term2_name_ext', css_class='disam-name-ext'), HTML(')'), css_class='name-ext-inputs' ), Field('term2_definition', css_class='disam-def'), css_class='popstar', ) ) class PhenotypeForm(forms.Form): name = forms.CharField(required=True, label="Phenotype Name:") definition = forms.CharField(required=True, widget=forms.Textarea(), label="Description:") choices = (("disorder", "Disorder"), ("trait", "Trait"), ("behavior", "Behavior")) type = forms.ChoiceField( choices=choices, label="Phenotype classification", required=False) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.helper = FormHelper() self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('phenotype-cancel-button', 'Cancel')) self.helper.form_action = reverse('add_phenotype') class TraitForm(forms.Form): name = forms.CharField(required=True, label="Phenotype Name:") definition = forms.CharField(required=True, widget=forms.Textarea(), label="Description:") def __init__(self, uid, trait=None, *args, **kwargs): super().__init__(*args, **kwargs) if trait is not None: self.initial = { 'name': trait['name'], 'definition': trait['definition'] } self.helper = FormHelper() self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('trait_cancel_button', 'Cancel')) self.helper.form_action = reverse('update_trait', kwargs={'uid': uid}) class BehaviorForm(forms.Form): name = forms.CharField(required=True, label="Phenotype Name:") definition = forms.CharField(required=True, widget=forms.Textarea(), label="Description:") def __init__(self, uid, behavior=None, *args, **kwargs): super().__init__(*args, **kwargs) if behavior is not None: self.initial = { 'name': behavior['name'], 'definition': behavior['definition'] } self.helper = FormHelper() self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('behavior_cancel_button', 'Cancel')) self.helper.form_action = reverse( 'update_behavior', kwargs={'uid': uid}) class DoiForm(forms.Form): doi = forms.CharField(required=True, label="DOI:") def __init__(self, uid, label, *args, **kwargs): super().__init__(*args, **kwargs) self.helper = FormHelper() self.helper.add_input(Submit('submit', 'Submit')) self.helper.add_input(Reset('doi-cancel-button', 'Cancel')) self.helper.form_action = reverse('add_citation_doi', kwargs={'label': label, 'uid': uid})
mit
-3,327,605,317,850,539,000
38.736052
96
0.596155
false
3.850489
false
false
false
CSAILVision/sceneparsing
evaluationCode/utils_eval.py
1
1826
import numpy as np # This function takes the prediction and label of a single image, returns intersection and union areas for each class # To compute over many images do: # for i in range(Nimages): # (area_intersection[:,i], area_union[:,i]) = intersectionAndUnion(imPred[i], imLab[i]) # IoU = 1.0 * np.sum(area_intersection, axis=1) / np.sum(np.spacing(1)+area_union, axis=1) def intersectionAndUnion(imPred, imLab, numClass): imPred = np.asarray(imPred) imLab = np.asarray(imLab) # Remove classes from unlabeled pixels in gt image. # We should not penalize detections in unlabeled portions of the image. imPred = imPred * (imLab>0) # Compute area intersection: intersection = imPred * (imPred==imLab) (area_intersection,_) = np.histogram(intersection, bins=numClass, range=(1, numClass)) # Compute area union: (area_pred,_) = np.histogram(imPred, bins=numClass, range=(1, numClass)) (area_lab,_) = np.histogram(imLab, bins=numClass, range=(1, numClass)) area_union = area_pred + area_lab - area_intersection return (area_intersection, area_union) # This function takes the prediction and label of a single image, returns pixel-wise accuracy # To compute over many images do: # for i = range(Nimages): # (pixel_accuracy[i], pixel_correct[i], pixel_labeled[i]) = pixelAccuracy(imPred[i], imLab[i]) # mean_pixel_accuracy = 1.0 * np.sum(pixel_correct) / (np.spacing(1) + np.sum(pixel_labeled)) def pixelAccuracy(imPred, imLab): imPred = np.asarray(imPred) imLab = np.asarray(imLab) # Remove classes from unlabeled pixels in gt image. # We should not penalize detections in unlabeled portions of the image. pixel_labeled = np.sum(imLab>0) pixel_correct = np.sum((imPred==imLab)*(imLab>0)) pixel_accuracy = 1.0 * pixel_correct / pixel_labeled return (pixel_accuracy, pixel_correct, pixel_labeled)
bsd-3-clause
-2,552,586,764,081,443,000
42.47619
117
0.727273
false
2.98366
false
false
false
hexid/WordGenerator
GenerateChain.py
1
1975
#!/bin/env python # usage: depth , inDictionary [, outJSON] def generateChain(depth, inFile): import collections, re numChar, endChar = '#', '.' regexWord = re.compile('^[a-z]+$') depthRange = range(depth - 1) padStr = ' ' * (depth - 1) chars = collections.deque(maxlen = depth) # limit to depth chars def NestedDict(): return collections.defaultdict(NestedDict) rootNode = NestedDict() # create a tree of dictionaries rootNode['depth'] = depth # set the depth of the chain curNode, curChar = None, None with open(inFile, 'r') as f: for word in f.read().split(): if regexWord.match(word): chars.extend(padStr) # reset chars for the new word for curChar in "%s%s" % (word, endChar): chars.append(curChar) # add the next character curNode = rootNode # start at the root of the tree for n in depthRange: # traverse down the tree curNode = curNode[chars[n]] # increment the total for the leaves on the branch curNode[numChar] = curNode.get(numChar, 0) + 1 # increment the total for the current leaf curNode[curChar] = curNode.get(curChar, 0) + 1 return rootNode def writeToFile(chain, outFile): with open(outFile, 'w') as f: import json # write the json data to outFile # the json data will be sorted and compressed to save space f.write(json.dumps(chain, sort_keys=True, separators=(',',':'))) def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('depth', metavar='depth', type=int, help='The length of any given chain') parser.add_argument('inFile', type=str, help='Input dictionary file') parser.add_argument('outFile', type=str, nargs='?', default='_markov.json', help='Output JSON file (default = _markov.json)') (args, unknown) = parser.parse_known_args() chain = generateChain(args.depth, args.inFile) writeToFile(chain, args.outFile) if __name__ == "__main__": main()
mit
-1,638,003,219,230,873,600
36.980769
127
0.658734
false
3.604015
false
false
false
smartforceplus/SmartForceplus
.local/share/Odoo/addons/8.0/builder/models/demo/base.py
1
5437
import json import pickle import os import random from openerp import models, api, fields, _ class GeneratorInterface(models.AbstractModel): _name = 'builder.ir.model.demo.generator.base' _description = 'Generator Interface' @api.multi def get_generator(self, field): raise NotImplementedError @api.multi def action_save(self): return {'type': 'ir.actions.act_window_close'} _demo_data = {} @api.model def get_demo_data(self, filename=None, dataFormat='json'): if filename is None: filename = "{name}.json".format(name=self.subclass_model) if filename not in self._demo_data: fullname = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data', filename)) if os.path.exists(fullname): try: if dataFormat == 'json': self._demo_data[filename] = json.loads(open(fullname).read()) else: self._demo_data[filename] = open(fullname).read() except Exception, e: return {} return self._demo_data.get(filename, {}) class Generator(models.Model): _name = 'builder.ir.model.demo.generator' _description = 'Generic Generator' _inherit = ['ir.mixin.polymorphism.superclass', 'builder.ir.model.demo.generator.base'] _order = 'module_id asc, model_id asc' _target_type = 'char' model_id = fields.Many2one('builder.ir.model', ondelete='cascade') module_id = fields.Many2one('builder.ir.module.module', 'Module', related='model_id.module_id', ondelete='cascade', store=True) type = fields.Char('Type', compute='_compute_type') target_fields_type = fields.Char('Target Fields Type', compute='_compute_target_fields_type') field_ids = fields.Many2many( comodel_name='builder.ir.model.fields', relation='builder_model_demo_fields_rel', column1='generator_id', column2='field_id', string='Fields', ) field_names = fields.Char('Field Names', compute='_compute_field_names', store=True) allow_nulls = fields.Boolean('Allow Null Values', help='If the field is not required allow to generate null values for them.') _defaults = { 'subclass_model': lambda s, c, u, cxt=None: s._name } @api.multi def generate_null_values(self, field): if self.allow_nulls and not field.required: return random.random() <= (1.0 / (self.model_id.demo_records + 1)) return False @api.one @api.depends('subclass_model') def _compute_type(self): data = dict(self.get_generators()) self.type = data.get(self.subclass_model, _('Unknown')) @api.one @api.depends('field_ids.name') def _compute_field_names(self): self.field_names = ', '.join([field.name for field in self.field_ids]) @api.one @api.depends('subclass_model') def _compute_target_fields_type(self): self.target_fields_type = self.env[self.subclass_model]._model._target_type @api.model def get_generators(self): ms = self.env['ir.model'].search([ ('model', 'ilike', 'builder.ir.model.demo.generator.%'), ('model', 'not in', ['builder.ir.model.demo.generator.base', 'builder.ir.model.demo.generator']) ]) return [ (model.model, model.name) for model in ms ] @api.one def get_generator(self, field): return self.get_instance().get_generator(field) @api.multi def action_open_view(self): model = self._model action = model.get_formview_action(self.env.cr, self.env.uid, self.ids, self.env.context) action.update({'target': 'new'}) return action class IrModel(models.Model): _name = 'builder.ir.model' _inherit = ['builder.ir.model'] demo_records = fields.Integer('Demo Records') demo_data_ids = fields.One2many( comodel_name='builder.ir.model.demo.generator', inverse_name='model_id', string='Demo Data', copy=True, ) demo_xml_id_sample = fields.Text(compute='_compute_demo_xml_id_sample', store=True) @api.one @api.depends('demo_records', 'model') def _compute_demo_xml_id_sample(self): tmpl = '{model}_'.format(model=self.model.lower().replace('.', '_')) + '{id}' if self.model else 'model_' self.demo_xml_id_sample = pickle.dumps([tmpl.format(id=i) for i in xrange(self.demo_records)]) @api.multi def demo_xml_id(self, index): return pickle.loads(self.demo_xml_id_sample)[index] _field_generators = None @property def field_generators(self, reload=False): if not self._field_generators or reload: result = {} for generator in self.demo_data_ids: for field in generator.field_ids: if field.name not in result: result[field.name] = generator.instance.get_generator(field) self._field_generators = result return self._field_generators class IrModule(models.Model): _name = 'builder.ir.module.module' _inherit = ['builder.ir.module.module'] demo_data_ids = fields.One2many( comodel_name='builder.ir.model.demo.generator', inverse_name='module_id', string='Demo Data', copy=True, )
agpl-3.0
1,007,184,603,457,756,800
32.98125
130
0.606401
false
3.693614
false
false
false
marmarek/qubes-core-admin
qubes/vm/adminvm.py
1
9608
# # The Qubes OS Project, https://www.qubes-os.org/ # # Copyright (C) 2010-2015 Joanna Rutkowska <[email protected]> # Copyright (C) 2013-2015 Marek Marczykowski-Górecki # <[email protected]> # Copyright (C) 2014-2015 Wojtek Porczyk <[email protected]> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, see <https://www.gnu.org/licenses/>. # ''' This module contains the AdminVM implementation ''' import asyncio import subprocess import libvirt import qubes import qubes.exc import qubes.vm from qubes.vm.qubesvm import _setter_kbd_layout class AdminVM(qubes.vm.BaseVM): '''Dom0''' dir_path = None name = qubes.property('name', default='dom0', setter=qubes.property.forbidden) qid = qubes.property('qid', default=0, type=int, setter=qubes.property.forbidden) uuid = qubes.property('uuid', default='00000000-0000-0000-0000-000000000000', setter=qubes.property.forbidden) default_dispvm = qubes.VMProperty('default_dispvm', load_stage=4, allow_none=True, default=(lambda self: self.app.default_dispvm), doc='Default VM to be used as Disposable VM for service calls.') include_in_backups = qubes.property('include_in_backups', default=True, type=bool, doc='If this domain is to be included in default backup.') updateable = qubes.property('updateable', default=True, type=bool, setter=qubes.property.forbidden, doc='True if this machine may be updated on its own.') # for changes in keyboard_layout, see also the same property in QubesVM keyboard_layout = qubes.property( 'keyboard_layout', type=str, setter=_setter_kbd_layout, default='us++', doc='Keyboard layout for this VM') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._qdb_connection = None self._libvirt_domain = None if not self.app.vmm.offline_mode: self.start_qdb_watch() def __str__(self): return self.name def __lt__(self, other): # order dom0 before anything return self.name != other.name @property def attached_volumes(self): return [] @property def xid(self): '''Always ``0``. .. seealso: :py:attr:`qubes.vm.qubesvm.QubesVM.xid` ''' return 0 @qubes.stateless_property def icon(self): # pylint: disable=no-self-use """freedesktop icon name, suitable for use in :py:meth:`PyQt4.QtGui.QIcon.fromTheme`""" return 'adminvm-black' @property def libvirt_domain(self): '''Libvirt object for dom0. .. seealso: :py:attr:`qubes.vm.qubesvm.QubesVM.libvirt_domain` ''' if self._libvirt_domain is None: self._libvirt_domain = self.app.vmm.libvirt_conn.lookupByID(0) return self._libvirt_domain @staticmethod def is_running(): '''Always :py:obj:`True`. .. seealso: :py:meth:`qubes.vm.qubesvm.QubesVM.is_running` ''' return True @staticmethod def is_halted(): '''Always :py:obj:`False`. .. seealso: :py:meth:`qubes.vm.qubesvm.QubesVM.is_halted` ''' return False @staticmethod def get_power_state(): '''Always ``'Running'``. .. seealso: :py:meth:`qubes.vm.qubesvm.QubesVM.get_power_state` ''' return 'Running' @staticmethod def get_mem(): '''Get current memory usage of Dom0. Unit is KiB. .. seealso: :py:meth:`qubes.vm.qubesvm.QubesVM.get_mem` ''' # return psutil.virtual_memory().total/1024 with open('/proc/meminfo') as file: for line in file: if line.startswith('MemTotal:'): return int(line.split(':')[1].strip().split()[0]) raise NotImplementedError() def get_mem_static_max(self): '''Get maximum memory available to Dom0. .. seealso: :py:meth:`qubes.vm.qubesvm.QubesVM.get_mem_static_max` ''' if self.app.vmm.offline_mode: # default value passed on xen cmdline return 4096 try: return self.app.vmm.libvirt_conn.getInfo()[1] except libvirt.libvirtError as e: self.log.warning('Failed to get memory limit for dom0: %s', e) return 4096 def get_cputime(self): '''Get total CPU time burned by Dom0 since start. .. seealso: :py:meth:`qubes.vm.qubesvm.QubesVM.get_cputime` ''' try: return self.libvirt_domain.info()[4] except libvirt.libvirtError as e: self.log.warning('Failed to get CPU time for dom0: %s', e) return 0 def verify_files(self): '''Always :py:obj:`True` .. seealso: :py:meth:`qubes.vm.qubesvm.QubesVM.verify_files` ''' # pylint: disable=no-self-use return True def start(self, start_guid=True, notify_function=None, mem_required=None): '''Always raises an exception. .. seealso: :py:meth:`qubes.vm.qubesvm.QubesVM.start` ''' # pylint: disable=unused-argument,arguments-differ raise qubes.exc.QubesVMNotHaltedError( self, 'Cannot start Dom0 fake domain!') def suspend(self): '''Does nothing. .. seealso: :py:meth:`qubes.vm.qubesvm.QubesVM.suspend` ''' raise qubes.exc.QubesVMError(self, 'Cannot suspend Dom0 fake domain!') def shutdown(self): '''Does nothing. .. seealso: :py:meth:`qubes.vm.qubesvm.QubesVM.shutdown` ''' raise qubes.exc.QubesVMError(self, 'Cannot shutdown Dom0 fake domain!') def kill(self): '''Does nothing. .. seealso: :py:meth:`qubes.vm.qubesvm.QubesVM.kill` ''' raise qubes.exc.QubesVMError(self, 'Cannot kill Dom0 fake domain!') @property def untrusted_qdb(self): '''QubesDB handle for this domain.''' if self._qdb_connection is None: import qubesdb # pylint: disable=import-error self._qdb_connection = qubesdb.QubesDB(self.name) return self._qdb_connection async def run_service(self, service, source=None, user=None, filter_esc=False, autostart=False, gui=False, **kwargs): '''Run service on this VM :param str service: service name :param qubes.vm.qubesvm.QubesVM source: source domain as presented to this VM :param str user: username to run service as :param bool filter_esc: filter escape sequences to protect terminal \ emulator :param bool autostart: if :py:obj:`True`, machine will be started if \ it is not running :param bool gui: when autostarting, also start gui daemon :rtype: asyncio.subprocess.Process .. note:: User ``root`` is redefined to ``SYSTEM`` in the Windows agent code ''' # pylint: disable=unused-argument source = 'dom0' if source is None else self.app.domains[source].name if filter_esc: raise NotImplementedError( 'filter_esc=True not supported on calls to dom0') if user is None: user = 'root' await self.fire_event_async('domain-cmd-pre-run', pre_event=True, start_guid=gui) if user != 'root': cmd = ['runuser', '-u', user, '--'] else: cmd = [] cmd.extend([ qubes.config.system_path['qrexec_rpc_multiplexer'], service, source, 'name', self.name, ]) return (await asyncio.create_subprocess_exec( *cmd, **kwargs)) async def run_service_for_stdio(self, *args, input=None, **kwargs): '''Run a service, pass an optional input and return (stdout, stderr). Raises an exception if return code != 0. *args* and *kwargs* are passed verbatim to :py:meth:`run_service`. .. warning:: There are some combinations if stdio-related *kwargs*, which are not filtered for problems originating between the keyboard and the chair. ''' # pylint: disable=redefined-builtin kwargs.setdefault('stdin', subprocess.PIPE) kwargs.setdefault('stdout', subprocess.PIPE) kwargs.setdefault('stderr', subprocess.PIPE) p = await self.run_service(*args, **kwargs) # this one is actually a tuple, but there is no need to unpack it stdouterr = await p.communicate(input=input) if p.returncode: raise subprocess.CalledProcessError(p.returncode, args[0], *stdouterr) return stdouterr
lgpl-2.1
412,797,591,439,523,840
29.791667
79
0.596128
false
3.80626
false
false
false
danielsunzhongyuan/my_leetcode_in_python
lowest_common_ancestor_of_a_binary_tree_236.py
1
1928
# Definition for a binary tree node. # class TreeNode(object): # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution(object): def lowestCommonAncestor(self, root, p, q): """ :type root: TreeNode :type p: TreeNode :type q: TreeNode :rtype: TreeNode """ # Solution One: 99ms # stack = [root] # parent = {root: None} # while p not in parent or q not in parent: # node = stack.pop() # if node.left: # parent[node.left] = node # stack.append(node.left) # if node.right: # parent[node.right] = node # stack.append(node.right) # ancestor_of_p = [] # while p: # ancestor_of_p.append(p) # p = parent[p] # while q not in ancestor_of_p: # q = parent[q] # return q # Solution Two: if root in (None, p, q): return root left, right = (self.lowestCommonAncestor(kid, p, q) for kid in (root.left, root.right)) return root if left and right else left or right def lowestCommonAncestor2(self, root, p, q): """ :type root: TreeNode :type p: TreeNode :type q: TreeNode :rtype: TreeNode """ stack = [root] parent = {root: None} while p not in parent or q not in parent: node = stack.pop() if node.left: parent[node.left] = node stack.append(node.left) if node.right: parent[node.right] = node stack.append(node.right) ancestor_of_p = [] while p: ancestor_of_p.append(p) p = parent[p] while q not in ancestor_of_p: q = parent[q] return q
apache-2.0
-4,629,820,916,250,762,000
28.661538
95
0.48029
false
3.750973
false
false
false
beyondvalence/scratch
chp01.py
1
1697
# !/usr/bin/env python27 # -*- coding: utf-8 -*- """ Created on Fri Dec 2 16:15:29 2016 @author: waynetliu """ #%% from __future__ import division users = [ { "id": 0, "name": "Hero" }, { "id": 1, "name": "Dunn" }, { "id": 2, "name": "Sue" }, { "id": 3, "name": "Chi" }, { "id": 4, "name": "Thor" }, { "id": 5, "name": "Clive" }, { "id": 6, "name": "Hicks" }, { "id": 7, "name": "Devin" }, { "id": 8, "name": "Kate" }, { "id": 9, "name": "Klein" } ] print users #%% friendships = [(0, 1), (0, 2), (1, 2), (1, 3), (2, 3), (3, 4), (4, 5), (5, 6), (5, 7), (6, 8), (7, 8), (8, 9)] #%% for user in users: user["friends"]=[] #%% appends together, not separately for i, j in friendships: users[i]["friends"].append(users[j]) # adds j as a friend of i users[j]["friends"].append(users[i]) # adds i as a friend of j print"\n", "users", "\n\n", users #%% # for first two friendship tuples # hero - dunn, sue # dunn - hero # sue - hero (dunn) def number_of_friends(user): """how many friends does user have?""" return len(users["friends"]) # length of friends_id list total_connections = sum(number_of_friends(user) for user in users) # 24 print "total connections: ", total_connections num_users = len(users) avg_connections = num_users / total_connections # create a list of (ids, number of friends) num_friends_by_id = [(users["id"], number_of_friends(user)) for user in users] sorted(num_friends_by_id, key=lambda (user_id, num_friends): num_friends, reverse=True) print("/n", num_friends_by_id)
bsd-2-clause
5,344,159,609,228,425,000
27.283333
71
0.524455
false
2.809603
false
false
false
arcyfelix/ML-DL-AI
Supervised Learning/GANs/GAN.py
1
3364
# -*- coding: utf-8 -*- """ GAN Example Use a generative adversarial network (GAN) to generate digit images from a noise distribution. References: - Generative adversarial nets. I Goodfellow, J Pouget-Abadie, M Mirza, B Xu, D Warde-Farley, S Ozair, Y. Bengio. Advances in neural information processing systems, 2672-2680. Links: - [GAN Paper](https://arxiv.org/pdf/1406.2661.pdf). """ from __future__ import division, print_function, absolute_import import matplotlib.pyplot as plt import numpy as np import tensorflow as tf import tflearn # Data loading and preprocessing import tflearn.datasets.mnist as mnist X, Y, testX, testY = mnist.load_data() image_dim = 784 # 28*28 pixels z_dim = 200 # Noise data points total_samples = len(X) # Generator def generator(x, reuse=False): with tf.variable_scope('Generator', reuse=reuse): x = tflearn.fully_connected(x, 256, activation='relu') x = tflearn.fully_connected(x, image_dim, activation='sigmoid') return x # Discriminator def discriminator(x, reuse=False): with tf.variable_scope('Discriminator', reuse=reuse): x = tflearn.fully_connected(x, 256, activation='relu') x = tflearn.fully_connected(x, 1, activation='sigmoid') return x # Build Networks gen_input = tflearn.input_data(shape=[None, z_dim], name='input_noise') disc_input = tflearn.input_data(shape=[None, 784], name='disc_input') gen_sample = generator(gen_input) disc_real = discriminator(disc_input) disc_fake = discriminator(gen_sample, reuse=True) # Define Loss disc_loss = -tf.reduce_mean(tf.log(disc_real) + tf.log(1. - disc_fake)) gen_loss = -tf.reduce_mean(tf.log(disc_fake)) # Build Training Ops for both Generator and Discriminator. # Each network optimization should only update its own variable, thus we need # to retrieve each network variables (with get_layer_variables_by_scope) and set # 'placeholder=None' because we do not need to feed any target. gen_vars = tflearn.get_layer_variables_by_scope('Generator') gen_model = tflearn.regression(gen_sample, placeholder=None, optimizer='adam', loss=gen_loss, trainable_vars=gen_vars, batch_size=64, name='target_gen', op_name='GEN') disc_vars = tflearn.get_layer_variables_by_scope('Discriminator') disc_model = tflearn.regression(disc_real, placeholder=None, optimizer='adam', loss=disc_loss, trainable_vars=disc_vars, batch_size=64, name='target_disc', op_name='DISC') # Define GAN model, that output the generated images. gan = tflearn.DNN(gen_model) # Training # Generate noise to feed to the generator z = np.random.uniform(-1., 1., size=[total_samples, z_dim]) # Start training, feed both noise and real images. gan.fit(X_inputs={gen_input: z, disc_input: X}, Y_targets=None, n_epoch=100) # Generate images from noise, using the generator network. f, a = plt.subplots(2, 10, figsize=(10, 4)) for i in range(10): for j in range(2): # Noise input. z = np.random.uniform(-1., 1., size=[1, z_dim]) # Generate image from noise. Extend to 3 channels for matplot figure. temp = [[ii, ii, ii] for ii in list(gan.predict([z])[0])] a[j][i].imshow(np.reshape(temp, (28, 28, 3))) f.show() plt.draw() plt.waitforbuttonpress()
apache-2.0
2,824,382,926,397,684,000
36.388889
82
0.678062
false
3.250242
false
false
false
TransportLayer/mc-id2name
id2name.py
1
45678
############################################################################### # Minecraft ID to Friendly Name # # Copyright (C) 2016 TransportLayer # # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU General Public License as published by # # the Free Software Foundation, either version 3 of the License, or # # (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU General Public License for more details. # # # # You should have received a copy of the GNU General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### items = { 'minecraft': { '__VERSION__': 1.10, '__LANGUAGE__': 'en_US', 'stone': { 'id': 1, 'category': 'Building Blocks', 0: 'Stone', 1: 'Granite', 2: 'Polished Granite', 3: 'Diorite', 4: 'Polished Diorite', 5: 'Andesite', 6: 'Polished Andesite' }, 'grass': { 'id': 2, 'category': 'Building Blocks', 0: 'Grass Block' }, 'dirt': { 'id': 3, 'category': 'Building Blocks', 0: 'Dirt', 1: 'Coarse Dirt', 2: 'Podzol' }, 'cobblestone': { 'id': 4, 'category': 'Building Blocks', 0: 'Cobblestone' }, 'planks': { 'id': 5, 'category': 'Building Blocks', 0: 'Oak Wood Planks', 1: 'Spruce Wood Planks', 2: 'Birch Wood Planks', 3: 'Jungle Wood Planks', 4: 'Acacia Wood Planks', 5: 'Dark Oak Wood Planks' }, 'sapling': { 'id': 6, 'category': 'Decoration Blocks', 0: 'Oak Sapling', 1: 'Spruce Sapling', 2: 'Birch Sapling', 3: 'Jungle Sapling', 4: 'Acacia Sapling', 5: 'Dark Oak Sapling' }, 'bedrock': { 'id': 7, 'category': 'Building Blocks', 0: 'Bedrock' }, # No item 8? # No item 9? # No item 10? # No item 11? 'sand': { 'id': 12, 'category': 'Building Blocks', 0: 'Sand', 1: 'Red Sand' }, 'gravel': { 'id': 13, 'category': 'Building Blocks', 0: 'Gravel' }, 'gold_ore': { 'id': 14, 'category': 'Building Blocks', 0: 'Gold Ore' }, 'iron_ore': { 'id': 15, 'category': 'Building Blocks', 0: 'Iron Ore' }, 'coal_ore': { 'id': 16, 'category': 'Building Blocks', 0: 'Coal Ore' }, 'log': { 'id': 17, 'category': 'Building Blocks', 0: 'Oak Wood', 1: 'Spruce Wood', 2: 'Birch Wood', 3: 'Jungle Wood' }, 'leaves': { 'id': 18, 'category': 'Decoration Blocks', 0: 'Oak Leaves', 1: 'Spruce Leaves', 2: 'Birch Leaves', 3: 'Jungle Leaves' }, 'sponge': { 'id': 19, 'category': 'Building Blocks', 0: 'Sponge', 1: 'Wet Sponge' }, 'glass': { 'id': 20, 'category': 'Building Blocks', 0: 'Glass' }, 'lapis_ore': { 'id': 21, 'category': 'Building Blocks', 0: 'Lapis Lazuli Ore' }, 'lapis_block': { 'id': 22, 'category': 'Building Blocks', 0: 'Lapis Lazuli Block' }, 'dispenser': { 'id': 23, 'category': 'Redstone', 0: 'Dispenser' }, 'sandstone': { 'id': 24, 'category': 'Building Blocks', 0: 'Sandstone', 1: 'Chiseled Sandstone', 2: 'Smooth Sandstone' }, 'noteblock': { 'id': 25, 'category': 'Redstone', 0: 'Note Block' }, # No item 26? 'golden_rail': { 'id': 27, 'category': 'Transportation', 0: 'Powered Rail' }, 'detector_rail': { 'id': 28, 'category': 'Transportation', 0: 'Detector Rail' }, 'sticky_piston': { 'id': 29, 'category': 'Redstone', 0: 'Sticky Piston' }, 'web': { 'id': 30, 'category': 'Decoration Blocks', 0: 'Cobweb' }, 'tallgrass': { 'id': 31, 'category': 'Decoration Blocks', # Missing DV 0? 1: 'Grass', 2: 'Fern' }, 'deadbush': { 'id': 32, 'category': 'Decoration Blocks', 0: 'Dead Bush' }, 'piston': { 'id': 33, 'category': 'Redstone', 0: 'Piston' }, # No item 34? 'wool': { 'id': 35, 'category': 'Building Blocks', 0: 'Wool', 1: 'Orange Wool', 2: 'Magenta Wool', 3: 'Light Blue Wool', 4: 'Yellow Wool', 5: 'Lime Wool', 6: 'Pink Wool', 7: 'Gray Wool', 8: 'Light Gray Wool', 9: 'Cyan Wool', 10: 'Purple Wool', 11: 'Blue Wool', 12: 'Brown Wool', 13: 'Green Wool', 14: 'Red Wool', 15: 'Black Wool' }, # No item 36? 'yellow_flower': { 'id': 37, 'category': 'Decoration Blocks', 0: 'Dandelion' # Marked for more DVs. }, 'red_flower': { 'id': 38, 'category': 'Decoration Blocks', 0: 'Poppy', 1: 'Blue Orchid', # Not red. 2: 'Allium', # Also not red. 3: 'Azure Bluet', # Still not red. 4: 'Red Tulip', # Wow, good job, this one's red. 5: 'Orange Tulip', # Closer to red...? 6: 'White Tulip', # Farther from red. 7: 'Pink Tulip', # Ah, there we go, back on track. 8: 'Oxeye Daisy' # I give up at this point. }, 'brown_mushroom': { 'id': 39, 'category': 'Decoration Blocks', 0: 'Mushroom' }, 'red_mushroom': { 'id': 40, 'category': 'Decoration Blocks', 0: 'Mushroom' }, 'gold_block': { 'id': 41, 'category': 'Building Blocks', 0: 'Block of Gold' }, 'iron_block': { 'id': 42, 'category': 'Building Blocks', 0: 'Block of Iron' }, # No item 43? 'stone_slab': { 'id': 44, 'category': 'Building Blocks', 0: 'Stone Slab', 1: 'Sandstone Slab', # No DV 2? 3: 'Cobblestone Slab', 4: 'Bricks Slab', 5: 'Stone Bricks Slab', 6: 'Nether Brick Slab', 7: 'Quartz Slab' }, 'brick_block': { 'id': 45, 'category': 'Building Blocks', 0: 'Bricks' }, 'tnt': { 'id': 46, 'category': 'Redstone', 0: 'TNT' }, 'bookshelf': { 'id': 47, 'category': 'Building Blocks', 0: 'Bookshelf' }, 'mossy_cobblestone': { 'id': 48, 'category': 'Building Blocks', 0: 'Moss Stone' }, 'obsidian': { 'id': 49, 'category': 'Building Blocks', 0: 'Obsidian' }, 'torch': { 'id': 50, 'category': 'Decoration Blocks', 0: 'Torch' }, # No item 51? # No item 52? 'oak_stairs': { 'id': 53, 'category': 'Building Blocks', 0: 'Oak Wood Stairs' }, 'chest': { 'id': 54, 'category': 'Decoration Blocks', 0: 'Chest' }, # No item 55? 'diamond_ore': { 'id': 56, 'category': 'Building Blocks', 0: 'Diamond Ore' }, 'diamond_block': { 'id': 57, 'category': 'Building Blocks', 0: 'Block of Diamond' }, 'crafting_table': { 'id': 58, 'category': 'Decoration Blocks', 0: 'Crafting Table' }, # No item 59? # No item 60? 'furnace': { 'id': 61, 'category': 'Decoration Blocks', 0: 'Furnace' }, # No item 62? # No item 63? # No item 64? 'ladder': { 'id': 65, 'category': 'Decoration Blocks', 0: 'Ladder' }, 'rail': { 'id': 66, 'category': 'Transportation', 0: 'Rail' }, 'stone_stairs': { 'id': 67, 'category': 'Building Blocks', 0: 'Cobblestone Stairs' }, # No item 68? 'lever': { 'id': 69, 'category': 'Redstone', 0: 'Lever' }, 'stone_pressure_plate': { 'id': 70, 'category': 'Redstone', 0: 'Stone Pressure Plate' }, # No item 71? 'wooden_pressure_plate': { 'id': 72, 'category': 'Redstone', 0: 'Wooden Pressure Plate' }, 'redstone_ore': { 'id': 73, 'category': 'Building Blocks', 0: 'Redstone Ore' }, # No item 74? # No item 75? 'redstone_torch': { 'id': 76, 'category': 'Redstone', 0: 'Redstone Torch' }, 'stone_button': { 'id': 77, 'category': 'Redstone', 0: 'Button' }, 'snow_layer': { 'id': 78, 'category': 'Decoration Blocks', 0: 'Snow' # Marked for more DVs. }, 'ice': { 'id': 79, 'category': 'Building Blocks', 0: 'Ice' }, 'snow': { 'id': 80, 'category': 'Building Blocks', 0: 'Snow' }, 'cactus': { 'id': 81, 'category': 'Decoration Blocks', 0: 'Cactus' }, 'clay': { 'id': 82, 'category': 'Building Blocks', 0: 'Clay' }, # No item 83? 'jukebox': { 'id': 84, 'category': 'Decoration Blocks', 0: 'Jukebox' }, 'fence': { 'id': 85, 'category': 'Decoration Blocks', 0: 'Oak Fence' }, 'pumpkin': { 'id': 86, 'category': 'Building Blocks', 0: 'Pumpkin' }, 'netherrack': { 'id': 87, 'category': 'Building Blocks', 0: 'Netherrack' }, 'soul_sand': { 'id': 88, 'category': 'Building Blocks', 0: 'Soul Sand' }, 'glowstone': { 'id': 89, 'category': 'Building Blocks', 0: 'Glowstone' }, # No item 90? 'lit_pumpkin': { 'id': 91, 'category': 'Building Blocks', 0: 'Jack o\'Lantern' }, # No item 92? # No item 93? # No item 94? 'stained_glass': { 'id': 95, 'category': 'Building Blocks', 0: 'White Stained Glass', 1: 'Orange Stained Glass', 2: 'Magenta Stained Glass', 3: 'Light Blue Stained Glass', 4: 'Yellow Stained Glass', 5: 'Lime Stained Glass', 6: 'Pink Stained Glass', 7: 'Gray Stained Glass', 8: 'Light Gray Stained Glass', 9: 'Cyan Stained Glass', 10: 'Purple Stained Glass', 11: 'Blue Stained Glass', 12: 'Brown Stained Glass', 13: 'Green Stained Glass', 14: 'Red Stained Glass', 15: 'Black Stained Glass' }, 'trapdoor': { 'id': 96, 'category': 'Redstone', 0: 'Wooden Trapdoor' }, 'monster_egg': { 'id': 97, 'category': 'Decoration Blocks', 0: 'Stone Monster Egg', 1: 'Cobblestone Monster Egg', 2: 'Stone Brick Monster Egg', 3: 'Mossy Stone Brick Monster Egg', 4: 'Cracked Stone Brick Monster Egg', 5: 'Chiseled Stone Brick Monster Egg' }, 'stonebrick': { 'id': 98, 'category': 'Building Blocks', 0: 'Stone Bricks', 1: 'Mossy Stone Bricks', 2: 'Cracked Stone Bricks', 3: 'Chiseled Stone Bricks' }, # No item 99? # No item 100? 'iron_bars': { 'id': 101, 'category': 'Decoration Blocks', 0: 'Iron Bars' }, 'glass_pane': { 'id': 102, 'category': 'Decoration Blocks', 0: 'Glass Pane' }, 'melon_block': { 'id': 103, 'category': 'Building Blocks', 0: 'Melon' }, # No item 104? # No item 105? 'vine': { 'id': 106, 'category': 'Decoration Blocks', 0: 'Vines' }, 'fence_gate': { 'id': 107, 'category': 'Redstone', 0: 'Oak Fence Gate' }, 'brick_stairs': { 'id': 108, 'category': 'Building Blocks', 0: 'Brick Stairs' }, 'stone_brick_stairs': { 'id': 109, 'category': 'Building Blocks', 0: 'Stone Brick Stairs' }, 'mycelium': { 'id': 110, 'category': 'Building Blocks', 0: 'Mycelium' }, 'waterlily': { 'id': 111, 'category': 'Decoration Blocks', 0: 'Lily Pad' }, 'nether_brick': { 'id': 112, 'category': 'Building Blocks', 0: 'Nether Brick' }, 'nether_brick_fence': { 'id': 113, 'category': 'Decoration Blocks', 0: 'Nether Brick Fence' }, 'nether_brick_stairs': { 'id': 114, 'category': 'Building Blocks', 0: 'Nether Brick Stairs' }, # No item 115? 'enchanting_table': { 'id': 116, 'category': 'Decoration Blocks', 0: 'Enchantment Table' }, # No item 117? # No item 118? # No item 119? 'end_portal_frame': { 'id': 120, 'category': 'Decoration Blocks', 0: 'End Portal' }, 'end_stone': { 'id': 121, 'category': 'Building Blocks', 0: 'End Stone' }, 'redstone_lamp': { 'id': 123, 'category': 'Redstone', 0: 'Redstone Lamp' }, # No item 124? # No item 125? 'wooden_slab': { 'id': 126, 'category': 'Building Blocks', 0: 'Oak Wood Slab', 1: 'Spruce Wood Slab', 2: 'Birch Wood Slab', 3: 'Jungle Wood Slab', 4: 'Acacia Wood Slab', 5: 'Dark Oak Wood Slab' }, # No item 127? 'sandstone_stairs': { 'id': 128, 'category': 'Building Blocks', 0: 'Sandstone Stairs' }, 'emerald_ore': { 'id': 129, 'category': 'Building Blocks', 0: 'Emerald Ore' }, 'ender_chest': { 'id': 130, 'category': 'Decoration Blocks', 0: 'Ender Chest' }, 'tripwire_hook': { 'id': 131, 'category': 'Redstone', 0: 'Tripwire Hook' }, # No item 132? 'emerald_block': { 'id': 133, 'category': 'Building Blocks', 0: 'Block of Emerald' }, 'spruce_stairs': { 'id': 134, 'category': 'Building Blocks', 0: 'Spruce Wood Stairs' }, 'birch_stairs': { 'id': 135, 'category': 'Building Blocks', 0: 'Birch Wood Stairs' }, 'jungle_stairs': { 'id': 136, 'category': 'Building Blocks', 0: 'Jungle Wood Stairs' }, # No item 137? 'beacon': { 'id': 138, 'category': 'Miscellaneous', 0: 'Beacon' }, 'cobblestone_wall': { 'id': 139, 'category': 'Building Blocks', 0: 'Cobblestone Wall', 1: 'Mossy Cobblestone Wall' }, # No item 140? # No item 141? # No item 142? 'wooden_button': { 'id': 143, 'category': 'Redstone', 0: 'Button' }, # No item 144? 'anvil': { 'id': 145, 'category': 'Decoration Blocks', 0: 'Anvil', 1: 'Slightly Damaged Anvil', 2: 'Very Damaged Anvil' }, 'trapped_chest': { 'id': 146, 'category': 'Redstone', 0: 'Trapped Chest' }, 'light_weighted_pressure_plate': { 'id': 147, 'category': 'Redstone', 0: 'Weighted Pressure Plate (Light)' }, 'heavy_weighted_pressure_plate': { 'id': 148, 'category': 'Redstone', 0: 'Weighted Pressure Plate (Heavy)' }, # No item 149? # No item 150? 'daylight_detector': { 'id': 151, 'category': 'Redstone', 0: 'Daylight Sensor' }, 'redstone_block': { 'id': 152, 'category': 'Redstone', 0: 'Block of Redstone' }, 'quartz_ore': { 'id': 153, 'category': 'Building Blocks', 0: 'Nether Quartz Ore' }, 'hopper': { 'id': 154, 'category': 'Redstone', 0: 'Hopper' }, 'quartz_block': { 'id': 155, 'category': 'Building Blocks', 0: 'Block of Quartz', 1: 'Chiseled Quartz Block', 2: 'Pillar Quartz Block' }, 'quartz_stairs': { 'id': 156, 'category': 'Building Blocks', 0: 'Quartz Stairs' }, 'activator_rail': { 'id': 157, 'category': 'Transportation', 0: 'Activator Rail' }, 'dropper': { 'id': 158, 'category': 'Redstone', 0: 'Dropper' }, 'stained_hardened_clay': { 'id': 159, 'category': 'Building Blocks', 0: 'White Hardened Clay', 1: 'Orange Hardened Clay', 2: 'Magenta Hardened Clay', 3: 'Light Blue Hardened Clay', 4: 'Yellow Hardened Clay', 5: 'Lime Hardened Clay', 6: 'Pink Hardened Clay', 7: 'Gray Hardened Clay', 8: 'Light Gray Hardened Clay', 9: 'Cyan Hardened Clay', 10: 'Purple Hardened Clay', 11: 'Blue Hardened Clay', 12: 'Brown Hardened Clay', 13: 'Green Hardened Clay', 14: 'Red Hardened Clay', 15: 'Black Hardened Clay' }, 'stained_glass_pane': { 'id': 160, 'category': 'Decoration Blocks', 0: 'White Stained Glass Pane', 1: 'Orange Stained Glass Pane', 2: 'Magenta Stained Glass Pane', 3: 'Light Blue Stained Glass Pane', 4: 'Yellow Stained Glass Pane', 5: 'Lime Stained Glass Pane', 6: 'Pink Stained Glass Pane', 7: 'Gray Stained Glass Pane', 8: 'Light Gray Stained Glass Pane', 9: 'Cyan Stained Glass Pane', 10: 'Purple Stained Glass Pane', 11: 'Blue Stained Glass Pane', 12: 'Brown Stained Glass Pane', 13: 'Green Stained Glass Pane', 14: 'Red Stained Glass Pane', 15: 'Black Stained Glass Pane' }, 'leaves2': { 'id': 161, 'category': 'Decoration Blocks', 0: 'Acacia Leaves', 1: 'Dark Oak Leaves' }, 'log2': { 'id': 162, 'category': 'Building Blocks', 0: 'Acacia Wood', 1: 'Dark Oak Wood' }, 'acacia_stairs': { 'id': 163, 'category': 'Building Blocks', 0: 'Acacia Wood Stairs' }, 'dark_oak_stairs': { 'id': 164, 'category': 'Building Blocks', 0: 'Dark Oak Wood Stairs' }, 'slime': { 'id': 165, 'category': 'Decoration Blocks', 0: 'Slime Block' }, 'iron_trapdoor': { 'id': 167, 'category': 'Redstone', 0: 'Iron Trapdoor' }, 'prismarine': { 'id': 168, 'category': 'Building Blocks', 0: 'Prismarine', 1: 'Prismarine Bricks', 2: 'Dark Prismarine' }, 'sea_lantern': { 'id': 169, 'category': 'Building Blocks', 0: 'Sea Lantern' }, 'hay_block': { 'id': 170, 'category': 'Building Blocks', 0: 'Hay Bale' }, 'carpet': { 'id': 171, 'category': 'Decoration Blocks', 0: 'Carpet', 1: 'Orange Carpet', 2: 'Magenta Carpet', 3: 'Light Blue Carpet', 4: 'Yellow Carpet', 5: 'Lime Carpet', 6: 'Pink Carpet', 7: 'Gray Carpet', 8: 'Light Gray Carpet', 9: 'Cyan Carpet', 10: 'Purple Carpet', 11: 'Blue Carpet', 12: 'Brown Carpet', 13: 'Green Carpet', 14: 'Red Carpet', 15: 'Black Carpet' }, 'hardened_clay': { 'id': 172, 'category': 'Building Blocks', 0: 'Hardened Clay' }, 'coal_block': { 'id': 173, 'category': 'Building Blocks', 0: 'Block of Coal' }, 'packed_ice': { 'id': 174, 'category': 'Building Blocks', 0: 'Packed Ice' }, 'double_plant': { 'id': 175, 'category': 'Decoration Blocks', 0: 'Sunflower', 1: 'Lilac', 2: 'Double Tallgrass', 3: 'Large Fern', 4: 'Rose Bush', 5: 'Peony' }, # No item 176? # No item 177? # No item 178? 'red_sandstone': { 'id': 179, 'category': 'Building Blocks', 0: 'Red Sandstone', 1: 'Chiseled Red Sandstone', 2: 'Smooth Red Sandstone' }, 'red_sandstone_stairs': { 'id': 180, 'category': 'Building Blocks', 0: 'Red Sandstone Stairs' }, # No item 181? 'stone_slab2': { 'id': 182, 'category': 'Building Blocks', 0: 'Red Sandstone Slab' # Marked for more DVs. }, 'spruce_fence_gate': { 'id': 183, 'category': 'Redstone', 0: 'Spruce Fence Gate' }, 'birch_fence_gate': { 'id': 184, 'category': 'Redstone', 0: 'Birch Fence Gate' }, 'jungle_fence_gate': { 'id': 185, 'category': 'Redstone', 0: 'Jungle Fence Gate' }, 'dark_oak_fence_gate': { 'id': 186, 'category': 'Redstone', 0: 'Dark Oak Fence Gate' }, 'acacia_fence_gate': { 'id': 187, 'category': 'Redstone', 0: 'Acacia Fence Gate' }, 'spruce_fence': { 'id': 188, 'category': 'Decoration Blocks', 0: 'Spruce Fence' }, 'birch_fence': { 'id': 189, 'category': 'Decoration Blocks', 0: 'Birch Fence' }, 'jungle_fence': { 'id': 190, 'category': 'Decoration Blocks', 0: 'Jungle Fence' }, 'dark_oak_fence': { 'id': 191, 'category': 'Decoration Blocks', 0: 'Dark Oak Fence' }, 'acacia_fence': { 'id': 192, 'category': 'Decoration Blocks', 0: 'Acacia Fence' }, # No item 193? # No item 194? # No item 195? # No item 196? # No item 197? 'end_rod': { 'id': 198, 'category': 'Decoration Blocks', 0: 'End Rod' }, 'chorus_plant': { 'id': 199, 'category': 'Decoration Blocks', 0: 'Chorus Plant' }, 'chorus_flower': { 'id': 200, 'category': 'Decoration Blocks', 0: 'Chorus Flower' }, 'purpur_block': { 'id': 201, 'category': 'Building Blocks', 0: 'Purpur Block' }, 'purpur_pillar': { 'id': 202, 'category': 'Building Blocks', 0: 'Purpur Pillar' }, 'purpur_stairs': { 'id': 203, 'category': 'Building Blocks', 0: 'Purpur Stairs' }, # No item 204? 'purpur_slab': { 'id': 205, 'category': 'Building Blocks', 0: 'Purpur Slab' # Marked for more DVs. }, 'end_bricks': { 'id': 206, 'category': 'Building Blocks', 0: 'End Stone Bricks' }, # No item 207? # No item 208? # No item 209? # No item 210? # No item 211? # No item 212? 'magma': { 'id': 213, 'category': 'Building Blocks', 0: 'Magma Block' }, 'nether_wart_block': { 'id': 214, 'category': 'Building Blocks', 0: 'Nether Wart Block' }, 'red_nether_brick': { 'id': 215, 'category': 'Building Blocks', 0: 'Red Nether Brick' }, 'bone_block': { 'id': 216, 'category': 'Building Blocks', 0: 'Bone Block' }, # No item... # ... # Start of 256 block. 'iron_shovel': { 'id': 256, 'category': 'Tools', 'name': 'Iron Shovel', 'uses': 251 }, 'iron_pickaxe': { 'id': 257, 'category': 'Tools', 'name': 'Iron Pickaxe', 'uses': 251 }, 'iron_axe': { 'id': 258, 'category': 'Tools', 'name': 'Iron Axe', 'uses': 251 }, 'flint_and_steel': { 'id': 259, 'category': 'Tools', 'name': 'Flint and Steel', 'uses': 65 }, 'apple': { 'id': 260, 'category': 'Foodstuffs', 0: 'Apple' }, 'bow': { 'id': 261, 'category': 'Combat', 'name': 'Bow', 'uses': 385 }, 'arrow': { 'id': 262, 'category': 'Combat', 0: 'Arrow' }, 'coal': { 'id': 263, 'category': 'Materials', 0: 'Coal', 1: 'Charcoal' }, 'diamond': { 'id': 264, 'category': 'Materials', 0: 'Diamond' }, 'iron_ingot': { 'id': 265, 'category': 'Materials', 0: 'Iron Ingot' }, 'gold_ingot': { 'id': 266, 'category': 'Materials', 0: 'Gold Ingot' }, 'iron_sword': { 'id': 267, 'category': 'Combat', 'name': 'Iron Sword', 'uses': 251 }, 'wooden_sword': { 'id': 268, 'category': 'Combat', 'name': 'Wooden Sword', 'uses': 60 }, 'wooden_shovel': { 'id': 269, 'category': 'Tools', 'name': 'Wooden Shovel', 'uses': 60 }, 'wooden_pickaxe': { 'id': 270, 'category': 'Tools', 'name': 'Wooden Pickaxe', 'uses': 60 }, 'wooden_axe': { 'id': 271, 'category': 'Tools', 'name': 'Wooden Axe', 'uses': 60 }, 'stone_sword': { 'id': 272, 'category': 'Combat', 'name': 'Stone Sword', 'uses': 132 }, 'stone_shovel': { 'id': 273, 'category': 'Tools', 'name': 'Stone Shovel', 'uses': 132 }, 'stone_pickaxe': { 'id': 274, 'category': 'Tools', 'name': 'Stone Pickaxe', 'uses': 132 }, 'stone_axe': { 'id': 275, 'category': 'Tools', 'name': 'Stone Axe', 'uses': 132 }, 'diamond_sword': { 'id': 276, 'category': 'Combat', 'name': 'Diamond Sword', 'uses': 1562 }, 'diamond_shovel': { 'id': 277, 'category': 'Tools', 'name': 'Diamond Shovel', 'uses': 1562 }, 'diamond_pickaxe': { 'id': 278, 'category': 'Tools', 'name': 'Diamond Pickaxe', 'uses': 1562 }, 'diamond_axe': { 'id': 279, 'category': 'Tools', 'name': 'Diamond Axe', 'uses': 1562 }, 'stick': { 'id': 280, 'category': 'Materials', 0: 'Stick' }, 'bowl': { 'id': 281, 'category': 'Materials', 0: 'Bowl' }, 'mushroom_stew': { 'id': 282, 'category': 'Foodstuffs', 0: 'Mushroom Stew' }, 'golden_sword': { 'id': 283, 'category': 'Combat', 'name': 'Golden Sword', 'uses': 33 }, 'golden_shovel': { 'id': 284, 'category': 'Tools', 'name': 'Golden Shovel', 'uses': 33 }, 'golden_pickaxe': { 'id': 285, 'category': 'Tools', 'name': 'Golden Pickaxe', 'uses': 33 }, 'golden_axe': { 'id': 286, 'category': 'Tools', 'name': 'Golden Axe', 'uses': 33 }, 'string': { 'id': 287, 'category': 'Materials', 0: 'String' }, 'feather': { 'id': 288, 'category': 'Materials', 0: 'Feather' }, 'gunpowder': { 'id': 289, 'category': 'Materials', 0: 'Gunpowder' }, 'wooden_hoe': { 'id': 290, 'category': 'Tools', 'name': 'Wooden Hoe', 'uses': 60 }, 'stone_hoe': { 'id': 291, 'category': 'Tools', 'name': 'Stone Hoe', 'uses': 132 }, 'iron_hoe': { 'id': 292, 'category': 'Tools', 'name': 'Iron Hoe', 'uses': 251 }, 'diamond_hoe': { 'id': 293, 'category': 'Tools', 'name': 'Diamond Hoe', 'uses': 1562 }, 'golden_hoe': { 'id': 294, 'category': 'Tools', 'names': 'Golden Hoe', 'uses': 33 }, 'wheat_seeds': { 'id': 295, 'category': 'Materials', 0: 'Seeds' }, 'wheat': { 'id': 296, 'category': 'Materials', 0: 'Wheat' }, 'bread': { 'id': 297, 'category': 'Foodstuffs', 0: 'Bread' }, 'leather_helmet': { 'id': 298, 'category': 'Combat', 'name': 'Leather Cap', 'uses': 56, 'armor': 1, 'toughness': 0 }, 'leather_chestplate': { 'id': 299, 'category': 'Combat', 'name': 'Leather Tunic', 'uses': 81, 'armor': 3, 'toughness': 0 }, 'leather_leggings': { 'id': 300, 'category': 'Combat', 'name': 'Leather Pants', 'uses': 76, 'armor': 2, 'toughness': 0 }, 'leather_boots': { 'id': 301, 'category': 'Combat', 'name': 'Leather Boots', 'uses': 66, 'armor': 1, 'toughness': 0 }, 'chainmail_helmet': { 'id': 302, 'category': 'Combat', 'name': 'Chain Helmet', 'uses': 166, 'armor': 2, 'toughness': 0 }, 'chainmail_chestplate': { 'id': 303, 'category': 'Combat', 'name': 'Chain Chestplate', 'uses': 241, 'armor': 5, 'toughness': 0 }, 'chainmail_leggings': { 'id': 304, 'category': 'Combat', 'name': 'Chain Leggings', 'uses': 226, 'armor': 4, 'toughness': 0 }, 'chainmail_boots': { 'id': 305, 'category': 'Combat', 'name': 'Chain Boots', 'uses': 196, 'armor': 1, 'toughness': 0 }, 'iron_helmet': { 'id': 306, 'category': 'Combat', 'name': 'Iron Helmet', 'uses': 166, 'armor': 2, 'toughness': 0 }, 'iron_chestplate': { 'id': 307, 'category': 'Combat', 'name': 'Iron Chestplate', 'uses': 241, 'armor': 6, 'toughness': 0 }, 'iron_leggings': { 'id': 308, 'category': 'Combat', 'name': 'Iron Leggings', 'uses': 226, 'armor': 5, 'toughness': 0 }, 'iron_boots': { 'id': 309, 'category': 'Combat', 'name': 'Iron Boots', 'uses': 196, 'armor': 2, 'toughness': 0 }, 'diamond_helmet': { 'id': 310, 'category': 'Combat', 'name': 'Diamond Helmet', 'uses': 364, 'armor': 3, 'toughness': 2 }, 'diamond_chestplate': { 'id': 311, 'category': 'Combat', 'name': 'Diamond Chestplate', 'uses': 529, 'armor': 8, 'toughness': 2 }, 'diamond_leggings': { 'id': 312, 'category': 'Combat', 'name': 'Diamond Leggings', 'uses': 496, 'armor': 6, 'toughness': 2 }, 'diamond_boots': { 'id': 313, 'category': 'Combat', 'name': 'Diamond Boots', 'uses': 430, 'armor': 3, 'toughness': 2 }, 'golden_helmet': { 'id': 314, 'category': 'Combat', 'name': 'Golden Helmet', 'uses': 78, 'armor': 2, 'toughness': 0 }, 'golden_chestplate': { 'id': 315, 'category': 'Combat', 'name': 'Golden Chestplate', 'uses': 113, 'armor': 5, 'toughness': 0 }, 'golden_leggings': { 'id': 316, 'category': 'Combat', 'name': 'Golden Leggings', 'uses': 106, 'armor': 3, 'toughness': 0 }, 'golden_boots': { 'id': 317, 'category': 'Combat', 'name': 'Golden Boots', 'uses': 92, 'armor': 1, 'toughness': 0 }, 'flint': { 'id': 318, 'category': 'Materials', 0: 'Flint' }, 'porkchop': { 'id': 319, 'category': 'Foodstuffs', 0: 'Raw Porkchop' }, 'cooked_porkchop': { 'id': 320, 'category': 'Foodstuffs', 0: 'Cooked Porkchop' }, 'painting': { 'id': 321, 'category': 'Decoration Blocks', 0: 'Painting' }, 'golden_apple': { 'id': 322, 'category': 'Foodstuffs', 0: 'Golden Apple', # Regular. 1: 'Golden Apple' # Notch Apple. }, 'sign': { 'id': 323, 'category': 'Decoration Blocks', 0: 'Sign' }, 'wooden_door': { 'id': 324, 'category': 'Redstone', 0: 'Oak Door' }, 'bucket': { 'id': 325, 'category': 'Miscellaneous', 0: 'Bucket' }, 'water_bucket': { 'id': 326, 'category': 'Miscellaneous', 0: 'Water Bucket' }, 'lava_bucket': { 'id': 327, 'category': 'Miscellaneous', 0: 'Lava Bucket' }, 'minecart': { 'id': 328, 'category': 'Transportation', 0: 'Minecart' }, 'saddle': { 'id': 329, 'category': 'Transportation', 0: 'Saddle' }, 'iron_door': { 'id': 330, 'category': 'Redstone', 0: 'Iron Door' }, 'redstone': { 'id': 331, 'category': 'Redstone', 0: 'Redstone' }, 'snowball': { 'id': 332, 'category': 'Miscellaneous', 0: 'Snowball' }, 'boat': { 'id': 333, 'category': 'Transportation', 0: 'Oak Boat' }, 'leather': { 'id': 334, 'category': 'Materials', 0: 'Leather' }, 'milk_bucket': { 'id': 335, 'category': 'Miscellaneous', 0: 'Milk' }, 'brick': { 'id': 336, 'category': 'Materials', 0: 'Brick' }, 'clay_ball': { 'id': 337, 'category': 'Materials', 0: 'Clay' }, 'reeds': { 'id': 338, 'category': 'Materials', 0: 'Sugar Canes' }, 'paper': { 'id': 339, 'category': 'Miscellaneous', 0: 'Paper' }, 'book': { 'id': 340, 'category': 'Miscellaneous', 0: 'Book' }, 'slime_ball': { 'id': 341, 'category': 'Miscellaneous', 0: 'Slimeball' }, 'chest_minecart': { 'id': 342, 'category': 'Transportation', 0: 'Minecart with Chest' }, 'furnace_minecart': { 'id': 343, 'category': 'Transportation', 0: 'Minecart with Furnace' }, 'egg': { 'id': 334, 'category': 'Materials', 0: 'Egg' }, 'compass': { 'id': 345, 'category': 'Tools', 0: 'Compass' }, 'fishing_rod': { 'id': 346, 'category': 'Tools', 'name': 'Fishing Rod', 'uses': 65 }, 'clock': { 'id': 347, 'category': 'Tools', 0: 'Clock' }, 'glowstone_dust': { 'id': 348, 'category': 'Materials', 0: 'Glowstone Dust' }, 'fish': { 'id': 349, 'category': 'Foodstuffs', 0: 'Raw Fish', 1: 'Raw Salmon', 2: 'Clownfish', 3: 'Pufferfish' }, 'cooked_fish': { 'id': 350, 'category': 'Foodstuffs', 0: 'Cooked Fish', 1: 'Cooked Salmon' }, 'dye': { 'id': 351, 'category': 'Materials', 0: 'Ink Sac', 1: 'Rose Red', 2: 'Cactus Green', 3: 'Cocoa Beans', 4: 'Lapis Lazuli', 5: 'Purple Dye', 6: 'Cyan Dye', 7: 'Light Gray Dye', 8: 'Gray Dye', 9: 'Pink Dye', 10: 'Lime Dye', 11: 'Dandelion Yellow', 12: 'Light Blue Dye', 13: 'Magenta Dye', 14: 'Orange Dye', 15: 'Bone Meal' }, 'bone': { 'id': 352, 'category': 'Miscellaneous', 0: 'Bone' }, 'sugar': { 'id': 353, 'category': 'Materials', 0: 'Sugar' }, 'cake': { 'id': 354, 'category': 'Foodstuffs', 0: 'Cake' }, 'bed': { 'id': 355, 'category': 'Decoration Blocks', 0: 'Bed' }, 'repeater': { 'id': 356, 'category': 'Redstone', 0: 'Redstone Repeater' }, 'cookie': { 'id': 357, 'category': 'Foodstuffs', 0: 'Cookie' }, # No item 358? 'shears': { 'id': 359, 'category': 'Tools', 'name': 'Shears', 'uses': 238 }, 'melon': { 'id': 360, 'category': 'Foodstuffs', 0: 'Melon' }, 'pumpkin_seeds': { 'id': 361, 'category': 'Materials', 0: 'Pumpkin Seeds' }, 'melon_seeds': { 'id': 362, 'category': 'Materials', 0: 'Melon Seeds' }, 'beef': { 'id': 363, 'category': 'Foodstuffs', 0: 'Raw Beef' }, 'cooked_beef': { 'id': 364, 'category': 'Foodstuffs', 0: 'Steak' }, 'chicken': { 'id': 365, 'category': 'Foodstuffs', 0: 'Raw Chicken' }, 'cooked_chicken': { 'id': 366, 'category': 'Foodstuffs', 0: 'Cooked Chicken' }, 'rotten_flesh': { 'id': 367, 'category': 'Foodstuffs', 0: 'Rotten Flesh' }, 'ender_pearl': { 'id': 368, 'category': 'Miscellaneous', 0: 'Ender Pearl' }, 'blaze_rod': { 'id': 369, 'category': 'Materials', 0: 'Blaze Rod' }, 'ghast_tear': { 'id': 370, 'category': 'Brewing', 0: 'Ghast Tear' }, 'gold_nugget': { 'id': 371, 'category': 'Materials', 0: 'Gold Nugget' }, 'nether_wart': { 'id': 372, 'category': 'Materials', 0: 'Nether Wart' }, 'potion': { 'id': 373, 'category': 'Brewing', 0: 'Potion' # Potions are stored as NBT data. }, 'glass_bottle': { 'id': 374, 'category': 'Brewing', 0: 'Glass Bottle' }, 'spider_eye': { 'id': 375, 'category': 'Foodstuffs', 0: 'Spider Eye' }, 'fermented_spider_eye': { 'id': 376, 'category': 'Brewing', 0: 'Fermented Spider Eye' }, 'blaze_powder': { 'id': 377, 'category': 'Brewing', 0: 'Blaze Powder' }, 'magma_cream': { 'id': 378, 'category': 'Brewing', 0: 'Magma Cream' }, 'brewing_stand': { 'id': 379, 'category': 'Brewing', 0: 'Brewing Stand' }, 'cauldron': { 'id': 380, 'category': 'Brewing', 0: 'Cauldron' }, 'ender_eye': { 'id': 381, 'category': 'Miscellaneous', 0: 'Eye of Ender' }, 'speckled_melon': { 'id': 382, 'category': 'Brewing', 0: 'Glistering Melon' }, 'spawn_egg': { 'id': 383, 'category': 'Miscellaneous', 0: 'Spawn Egg' # Entity data is stored as NBT data. }, 'experience_bottle': { 'id': 384, 'category': 'Miscellaneous', 0: 'Bottle o\' Enchanting' }, 'fire_charge': { 'id': 385, 'category': 'Miscellaneous', 0: 'Fire Charge' }, 'writable_book': { 'id': 386, 'category': 'Miscellaneous', 0: 'Book and Quill' }, # No item 387? 'emerald': { 'id': 388, 'category': 'Materials', 0: 'Emerald' }, 'item_frame': { 'id': 389, 'category': 'Decoration Blocks', 0: 'Item Frame' }, 'flower_pot': { 'id': 390, 'category': 'Decoration Blocks', 0: 'Flower Pot' }, 'carrot': { 'id': 391, 'category': 'Foodstuff', 0: 'Carrot' }, 'potato': { 'id': 392, 'category': 'Foodstuff', 0: 'Potato' }, 'baked_potato': { 'id': 393, 'category': 'Foodstuffs', 0: 'Baked Potato' }, 'poisonous_potato': { 'id': 394, 'category': 'Foodstuffs', 0: 'Poisonous Potato' }, 'map': { 'id': 395, 'category': 'Miscellaneous', 0: 'Empty Map' }, 'golden_carrot': { 'id': 396, 'category': 'Brewing', 0: 'Golden Carrot' }, 'skull': { 'id': 397, 'category': 'Decoration Blocks', 0: 'Skeleton Skull', 1: 'Wither Skeleton Skull', 2: 'Zombie Head', 3: 'Head', 4: 'Creeper Head', 5: 'Dragon Head' }, 'carrot_on_a_stick': { 'id': 398, 'category': 'Transportation', 'name': 'Carrot on a Stick', 'uses': 26 }, 'nether_star': { 'id': 399, 'category': 'Materials', 0: 'Nether Star' }, 'pumpkin_pie': { 'id': 400, 'category': 'Foodstuffs', 0: 'Pumpkin Pie' }, # No item 401? 'firework_charge': { 'id': 402, 'category': 'Miscellaneous', 0: 'Firework Star' }, 'enchanted_book': { 'id': 403, 'category': 'Miscellaneous', # Category changes based on enchant. 0: 'Enchanted Book' # Enchant is stored as NBT data. }, 'comparator': { 'id': 404, # If you make a HTTP joke you will be slapped. 'category': 'Redstone', 0: 'Redstone Comparator' }, 'netherbrick': { 'id': 405, 'category': 'Materials', 0: 'Nether Brick' }, 'quartz': { 'id': 406, 'category': 'Materials', 0: 'Nether Quartz' }, 'tnt_minecart': { 'id': 407, 'category': 'Transportation', 0: 'Minecart with TNT' }, 'hopper_minecart': { 'id': 408, 'category': 'Transportation', 0: 'Minecart with Hopper' }, 'prismarine_shard': { 'id': 409, 'category': 'Materials', 0: 'Prismarine Shard' }, 'prismarine_crystals': { 'id': 410, 'category': 'Materials', 0: 'Prismarine Crystals' }, 'rabbit': { 'id': 411, 'category': 'Foodstuffs', 0: 'Raw Rabbit' }, 'cooked_rabbit': { 'id': 412, 'category': 'Foodstuffs', 0: 'Cooked Rabbit' }, 'rabbit_stew': { 'id': 413, 'category': 'Foodstuffs', 0: 'Rabbit Stew' }, 'rabbit_foot': { 'id': 414, 'category': 'Brewing', 0: 'Rabbit\'s Foot' }, 'rabbit_hide': { 'id': 415, 'category': 'Materials', 0: 'Rabbit Hide' }, 'armor_stand': { 'id': 416, 'category': 'Decoration Blocks', 0: 'Armor Stand' }, 'iron_horse_armor': { 'id': 417, 'category': 'Miscellaneous', 0: 'Iron Horse Armor' }, 'golden_horse_armor': { 'id': 418, 'category': 'Miscellaneous', 0: 'Gold Horse Armor' }, 'diamond_horse_armor': { 'id': 419, 'category': 'Miscellaneous', 0: 'Diamond Horse Armor' }, 'lead': { 'id': 420, 'category': 'Tools', 0: 'Lead' }, 'name_tag': { 'id': 421, 'category': 'Tools', 0: 'Name Tag' }, # No item 422? 'mutton': { 'id': 423, 'category': 'Foodstuffs', 0: 'Raw Mutton' }, 'cooked_mutton': { 'id': 424, 'category': 'Foodstuffs', 0: 'Cooked Mutton' }, 'banner': { 'id': 425, 'category': 'Decoration Blocks', 0: 'Black Banner', # Colours are in reverse order...? 1: 'Red Banner', 2: 'Green Banner', 3: 'Brown Banner', 4: 'Blue Banner', 5: 'Purple Banner', 6: 'Cyan Banner', 7: 'Light Gray Banner', 8: 'Gray Banner', 9: 'Pink Banner', 10: 'Lime Banner', 11: 'Yellow Banner', 12: 'Light Blue Banner', 13: 'Magenta Banner', 14: 'Orange Banner', 15: 'White Banner' }, 'end_crystal': { 'id': 426, 'category': 'Decoration Blocks', 0: 'End Crystal' }, 'spruce_door': { 'id': 427, 'category': 'Redstone', 0: 'Spruce Door' }, 'birch_door': { 'id': 428, 'category': 'Redstone', 0: 'Birch Door' }, 'jungle_door': { 'id': 429, 'category': 'Redstone', 0: 'Jungle Door' }, 'acacia_door': { 'id': 430, 'category': 'Redstone', 0: 'Acacia Door' }, 'dark_oak_door': { 'id': 431, 'category': 'Redstone', 0: 'Dark Oak Door' }, 'chorus_fruit': { 'id': 432, 'category': 'Materials', 0: 'Chorus Fruit' }, 'chorus_fruit_popped': { 'id': 433, 'category': 'Materials', 0: 'Popped Chorus Fruit' }, 'beetroot': { 'id': 434, 'category': 'Foodstuffs', 0: 'Beetroot' }, 'beetroot_seeds': { 'id': 435, 'category': 'Materials', 0: 'Beetroot Seeds' }, 'beetroot_soup': { 'id': 436, 'category': 'Foodstuffs', 0: 'Beetroot Soup' }, 'dragon_breath': { 'id': 437, 'category': 'Brewing', 0: 'Dragon\'s Breath' }, 'splash_potion': { 'id': 438, 'category': 'Brewing', 0: 'Splash Potion' # Potion is stored as NBT data. }, 'spectral_arrow': { 'id': 439, 'category': 'Combat', 0: 'Spectral Arrow' }, 'tipped_arrow': { 'id': 440, 'category': 'Combat', 0: 'Tipped Arrow' # Arrow type is stored as NBT data. }, 'lingering_potion': { 'id': 441, 'category': 'Brewing', 0: 'Lingering Potion' # Potion is stored as NBT data. }, 'shield': { 'id': 442, 'category': 'Combat', 'name': 'Shield', 'uses': 337 }, 'elytra': { 'id': 443, 'category': 'Transportation', 'name': 'Elytra', 'uses': 431 }, 'spruce_boat': { 'id': 444, 'category': 'Transportation', 0: 'Spruce Boat' }, 'birch_boat': { 'id': 445, 'category': 'Transportation', 0: 'Birch Boat' }, 'jungle_boat': { 'id': 446, 'category': 'Transportation', 0: 'Jungle Boat' }, 'acacia_boat': { 'id': 447, 'category': 'Transportation', 0: 'Acacia Boat' }, 'dark_oak_boat': { 'id': 448, 'category': 'Transportation', 0: 'Dark Oak Boat' }, # Missing item... # ... # Start of 2256 block. 'record_13': { 'id': 2256, 'category': 'Miscellaneous', 0: 'Music Disc' }, 'record_cat': { 'id': 2257, 'category': 'Miscellaneous', 0: 'Music Disc' }, 'record_blocks': { 'id': 2258, 'category': 'Miscellaneous', 0: 'Music Disc' }, 'record_chirp': { 'id': 2259, 'category': 'Miscellaneous', 0: 'Music Disc' }, 'record_far': { 'id': 2260, 'category': 'Miscellaneous', 0: 'Music Disc' }, 'record_mall': { 'id': 2261, 'category': 'Miscellaneous', 0: 'Music Disc' }, 'record_mellohi': { 'id': 2262, 'category': 'Miscellaneous', 0: 'Music Disc' }, 'record_stal': { 'id': 2263, 'category': 'Miscellaneous', 0: 'Music Disc' }, 'record_strad': { 'id': 2264, 'category': 'Miscellaneous', 0: 'Music Disc' }, 'record_ward': { 'id': 2265, 'category': 'Miscellaneous', 0: 'Music Disc' }, 'record_11': { 'id': 2266, 'category': 'Miscellaneous', 0: 'Music Disc' }, 'record_wait': { 'id': 2267, 'category': 'Miscellaneous', 0: 'Music Disc' } } } enchantments = { 'minecraft': { '__VERSION__': 1.10, '__LANGUAGE__': 'en_US', # Begin Armour Block. 'protection': { 'id': 0, 'name': 'Protection' }, 'fire_protection': { 'id': 1, 'name': 'Fire Protection' }, 'feather_falling': { 'id': 2, 'name': 'Feather Falling' }, 'blast_protection': { 'id': 3, 'name': 'Blast Protection' }, 'projectile_protection': { 'id': 4, 'name': 'Projectile Protection' }, 'respiration': { 'id': 5, 'name': 'Respiration' }, 'aqua_affinity': { 'id': 6, 'name': 'Aqua Affinity' }, 'thorns': { 'id': 7, 'name': 'Thorns' }, 'depth_strider': { 'id': 8, 'name': 'Depth Strider' }, 'frost_walker': { 'id': 9, 'name': 'Frost Walker' }, # End Armour Block. # Begin Sword Block. 'sharpness': { 'id': 16, 'name': 'Sharpness' }, 'smite': { 'id': 17, 'name': 'Smite' }, 'bane_of_arthropods': { 'id': 18, 'name': 'Bane of Arthropods' }, 'knockback': { 'id': 19, 'name': 'Knockback' }, 'fire_aspect': { 'id': 20, 'name': 'Fire Aspect' }, 'looting': { 'id': 21, 'name': 'Looting' }, # End Sword Block. # Begin Tools Block. 'efficiency': { 'id': 32, 'name': 'Efficiency' }, 'silk_touch': { 'id': 33, 'name': 'Silk Touch' }, 'unbreaking': { 'id': 34, 'name': 'Unbreaking' }, 'fortune': { 'id': 35, 'name': 'Fortune' }, # End Tools Block. # Begin Bows Block. 'power': { 'id': 48, 'name': 'Power' }, 'punch': { 'id': 49, 'name': 'Punch' }, 'flame': { 'id': 50, 'name': 'Flame' }, 'infinity': { 'id': 51, 'name': 'Flame' }, # End Bows Block. # Begin Fishing Rods Block. 'luck_of_the_sea': { 'id': 61, 'name': 'Luck of the Sea' }, 'lure': { 'id': 62, 'name': 'Lure' }, # End Fishing Rods Block. # Begin Misc Block. 'mending': { 'id': 70, 'name': 'Mending' } # End Misc Block. } } # Roman Numeral Conversion # Inspired by: https://stackoverflow.com/a/28777781 romanNumerals = ( (1000, 'M'), (900, 'CM'), (500, 'D'), (400, 'CD'), (100, 'C'), (90, 'XC'), (50, 'L'), (40, 'XL'), (10, 'X'), (9, 'IX'), (5, 'V'), (4, 'IV'), (1, 'I') ) def intToRoman(number): romanString = '' for romanTuple in romanNumerals: div, number = divmod(number, romanTuple[0]) romanString += romanTuple[1] * div return romanString def lookupItem(item, damage=0): mod, item = item.split(':') result = [None, None, None, None] if mod in items and item in items[mod]: if damage in items[mod][item]: result[0] = items[mod][item][damage] elif 'name' in items[mod][item]: result[0] = items[mod][item]['name'] else: result[0] = '[Unknown Name]' if 'uses' in items[mod][item]: result[1] = '{:.1%}'.format((items[mod][item]['uses'] - damage) / float(items[mod][item]['uses'])) if 'armor' in items[mod][item]: result[2] = items[mod][item]['armor'] if 'toughness' in items[mod][item]: result[3] = items[mod][item]['toughness'] else: result[0] = '[Item Not Found]' return result def lookupNumericItem(itemNumeric, damage=0): print('WARNING: Item numeric IDs are deprecated. Please use text IDs.') result = [None, None, None, None] for mod in items.values(): for item in mod.values(): if type(item) is dict and item['id'] == itemNumeric: if damage in item: result[0] = item[damage] elif 'name' in item: result[0] = item['name'] else: result[0] = '[Unknown Name]' if 'uses' in item: result[1] = '{:.1%}'.format((item['uses'] - damage) / float(item['uses'])) if 'armor' in item: result[2] = item['armor'] if 'toughness' in item: result[3] = item['toughness'] break if not result[0]: result[0] = '[Item Not Found]' return result def lookupEnchant(enchant, level=None): mod, enchant = enchant.split(':') result = [None, None] if mod in enchantments and enchant in enchantments[mod]: if 'name' in enchantments[mod][enchant]: result[0] = enchantments[mod][enchant]['name'] else: result[0] = '[Unknown Name]' else: result[0] = '[Enchantment Not Found]' if level: result[1] = intToRoman(level) return result def lookupNumericEnchant(enchantNumeric, level=None): result = [None, None] for mod in enchantments.values(): for enchant in mod.values(): if type(enchant) is dict and enchant['id'] == enchantNumeric: if 'name' in enchant: result[0] = enchant['name'] else: result[0] = '[Unknown Name]' break if not result[0]: result[0] = '[Enchantment Not Found]' if level: result[1] = intToRoman(level) return result
gpl-3.0
3,909,941,655,184,969,700
18.160235
101
0.520557
false
2.331938
false
false
false
IZSVenezie/VetEpiGIS-Tool
plugin/xitem_dialog.py
1
1545
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'xitem_dialog_base.ui' # # Created by: PyQt5 UI code generator 5.5.1 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_Dialog(object): def setupUi(self, Dialog): Dialog.setObjectName("Dialog") Dialog.resize(400, 78) self.gridLayout = QtWidgets.QGridLayout(Dialog) self.gridLayout.setObjectName("gridLayout") self.label = QtWidgets.QLabel(Dialog) self.label.setObjectName("label") self.gridLayout.addWidget(self.label, 0, 0, 1, 1) self.lineEdit = QtWidgets.QLineEdit(Dialog) self.lineEdit.setObjectName("lineEdit") self.gridLayout.addWidget(self.lineEdit, 0, 1, 1, 1) self.buttonBox = QtWidgets.QDialogButtonBox(Dialog) self.buttonBox.setOrientation(QtCore.Qt.Horizontal) self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Save) self.buttonBox.setObjectName("buttonBox") self.gridLayout.addWidget(self.buttonBox, 1, 0, 1, 2) self.retranslateUi(Dialog) self.buttonBox.accepted.connect(Dialog.accept) self.buttonBox.rejected.connect(Dialog.reject) QtCore.QMetaObject.connectSlotsByName(Dialog) def retranslateUi(self, Dialog): _translate = QtCore.QCoreApplication.translate Dialog.setWindowTitle(_translate("Dialog", "Dialog")) self.label.setText(_translate("Dialog", "Item:"))
gpl-2.0
-5,246,522,193,695,220,000
39.657895
108
0.698382
false
3.951407
false
false
false
hfiguiere/abiword
tools/build_osx_release.py
2
12008
#!/usr/bin/env python # Copyright (C) 2011 Fabiano Fidencio # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA. from os import chdir, environ, getcwd, listdir, mkdir, path from shutil import copy2, rmtree from subprocess import PIPE, Popen from sys import argv, exit from argparse import ArgumentParser contents_path = "abiword/AbiWord.app/Contents" def environment_prepare(): abisource_path="/tmp/abisource" mkdir(abisource_path, 0755) path = getcwd() chdir(abisource_path) return path def environment_clean(path): chdir(path) abisource_path="/tmp/abisource" rmtree(abisource_path) def _macports_source_get(): source = "https://distfiles.macports.org/MacPorts/MacPorts-2.0.0.tar.gz" cmd = "curl -O %s" % source p = Popen(cmd, shell=True) p.wait() def _macports_source_extract(): cmd = "tar xzvpf MacPorts-2.0.0.tar.gz" p = Popen(cmd, shell=True) p.wait() def _macports_install(): current_dir = getcwd() chdir("MacPorts-2.0.0") cmd = "./configure --prefix=/tmp/abisource/macports \ && make \ && sudo make install" p = Popen(cmd, shell=True) p.wait() chdir(current_dir) def _macports_env(): macports_path = "/tmp/abisource/macports/" envs = environ env = "%s/bin:%s/sbin:%s" % (macports_path, macports_path, envs["PATH"]) return env def _macports_sync(): envs = _macports_env() cmd = "sudo port -v selfupdate" p = Popen(cmd, shell=True, env={"PATH":envs}) p.wait() def macports_install(): _macports_source_get() _macports_source_extract() _macports_install() _macports_sync() def dependencies_install(): envs = _macports_env() pkgs = "cairo +quartz+no_x11 \ pango +quartz+no_x11 \ fribidi \ libgsf +no_gnome \ redland \ wv +no_x11 \ enchant \ boost" cmd = "sudo port install %s" % pkgs p = Popen(cmd, shell=True, env={"PATH":envs}) p.wait() def _abiword_source_get(): cmd = "svn co http://svn.abisource.com/abiword/trunk abiword" p = Popen(cmd, shell=True) p.wait() def _abiword_fix_macports_path(): cmd = "sed -i -e \ 's/\\/opt\\/local/\\/tmp\\/abisource\\/macports/g' \ configure.in" p = Popen(cmd, shell=True) p.wait() def _abiword_install(): envs = _macports_env() current_dir = getcwd() chdir("abiword") _abiword_fix_macports_path() cmd = "./autogen.sh \ --with-darwinports \ --enable-maintainer-mode \ --disable-static \ --enable-shared \ --enable-plugins=\"docbook epub latex openwriter openxml opml\" \ && make && DESTDIR=`pwd` make install" p = Popen(cmd, shell=True, env={"PATH":envs}) p.wait() chdir(current_dir) def abiword_install(): _abiword_source_get() _abiword_install() def _dep_list_get(lib): #otool -L path cmd = "otool -L %s " %lib #get all .dylib from otool -L cmd += "| grep macports | sed -e 's/.dylib.*$/.dylib/'" #remove white spaces before and after the lib path/name cmd += "| sed 's/^[ \t]*//;s/[ \t]*$//'" p = Popen(cmd, shell=True, stdout=PIPE) p.wait() stdout = p.communicate() return stdout[0].split('\n')[:-1] def _rdeps_get(): libabiword = "" libabiword_deps = [] for content in listdir(contents_path + "/Frameworks"): if content.endswith(".dylib"): libabiword = contents_path + "/Frameworks/" + content libabiword_deps = _dep_list_get(libabiword) break plugins = [] plugins_deps = [] for content in listdir(contents_path + "/PlugIns"): if content.endswith(".so"): plugin = contents_path + "/PlugIns/" + content plugins.append(plugin) plugins_deps = _dep_list_get(plugin) abiword = contents_path + "/MacOS/AbiWord" abiword_deps = _dep_list_get(abiword) rdeps = [] for lib in libabiword_deps: rdeps.append(lib) for lib in plugins_deps: if lib not in rdeps: rdeps.append(lib) for lib in abiword_deps: if lib not in rdeps: rdeps.append(lib) rdeps_deps = [] for lib in rdeps: rdeps_deps += _dep_list_get(lib) for lib in rdeps_deps: if lib not in rdeps_deps: rdeps.append(lib) return rdeps, libabiword, abiword, plugins def _rdeps_copy(rdeps): rdeps_path = contents_path mkdir(rdeps_path, 0755) n_rdeps = [] for dep in rdeps: dep_path, dep_name = path.split(dep) copy2(dep, rdeps_path) d = "%s/%s" % (rdeps_path, dep_name) cmd = "chmod 755 " + d n_rdeps.append(d) p = Popen(cmd, shell=True) p.wait() return n_rdeps def _fix(lib, new): dep_list = _dep_list_get(lib) for d in dep_list: d_path, d_name = path.split(d) n = "@executable_path/../Frameworks/rdeps/" + d_name cmd = "install_name_tool -change %s %s %s" % (d, n, lib) p = Popen(cmd, shell=True) p.wait() lib_path, lib_name = path.split(lib) cmd = "install_name_tool -id %s %s" % (new, lib) p = Popen(cmd, shell=True) p.wait() def _rdeps_fix(rdeps): for r in rdeps: file_path, file_name = path.split(r) new = "@executable_path/../Frameworks/rdeps/" + file_name _fix(r, new) def _libabiword_fix(libabiword): file_path, file_name = path.split(libabiword) new = "@executable_path/../Frameworks/" + file_name _fix(libabiword, new) def _abiword_fix(abiword): file_path, file_name = path.split(abiword) new = "@executable_path/" + file_name _fix(abiword, new) def _plugins_fix(plugins): for p in plugins: file_path, file_name = path.split(p) new = "@executable_path/../PlugIns/" + file_name _fix(p, new) def do_app(): rdeps, libabiword, abiword, plugins = _rdeps_get() n_rdeps = _rdeps_copy(rdeps) _rdeps_fix(n_rdeps) _libabiword_fix(libabiword) _abiword_fix(abiword) _plugins_fix(plugins) def do_dmg(): mkdir("dmg", 0755) cmd = "cp -a abiword/AbiWord.app dmg/" p = Popen(cmd, shell = True) p.wait() cmd = "ln -s /Applications dmg/" p = Popen(cmd, shell=True) p.wait() cmd = "hdiutil create \ -srcfolder \"dmg\" \ -volname \"AbiWord\" \ -fs HFS+ \ -fsargs \"-c c=64,a=16,e=16\" \ -format UDRW \"AbiWord.dmg\"" p = Popen(cmd, shell=True) p.wait() rmtree("dmg") copy2("AbiWord.dmg", environ["HOME"] + "/Desktop/") if __name__ == "__main__": parser = ArgumentParser(description="Automated dmg generator") parser.add_argument("--macports_path", action="store", dest="macports_path", help="This option will use your current macports' \ installation from MACPORTS_PATH.\n\ ATTENTION: Without this option, macports will \ be downloaded and installed in: \ /tmp/abisource/macports") parser.add_argument("--abisource_path", action="store", dest="abi_path", default=False, help="This option will consider that you have \ AbiWord's sources in your computer, located at \ ABISOURCE_PATH and want to build it and NOT a \ specific version from our SVN.") parser.add_argument("--abisource_revision", action="store", dest="abi_rev", help="This option will get a specific revision from \ AbiWord's SVN. \ ATTETION: If this option isn't passed, SVN's \ trunk will be used.") parser.add_argument("--abiword_version", action="store", dest="abi_version", help="This option will get a specific version from \ AbiWord's SVN. \ ATTETION: If this option isn't passed, SVN's \ trunk will be used.") parser.add_argument("--no_deps", action="store_true", dest="no_deps", default=False, help="This option won't install AbiWord's \ dependencies in your computer. So, is YOUR \ WORK install all needed dependencies. Of \ course, you'll need to install macports before.") parser.add_argument("--start_from_build", action="store_true", dest="start_from_build", default=False, help="This option will consider that you have \ macports and all AbiWord's dependencies \ installed. \ ATTENTION: This options will build AbiWord and \ create a dmg file. So, is REALLY NECESSARY \ that you pass --abisource_path option.") parser.add_argument("--start_from_app", action="store", dest="start_from_app", help="This option will use a generated .app file \ to fix all linkage and put all nedded libs \ into .app in a specific folder. After that a \ dmg file will be created(Don't put '/' at the end of .app package path). \ ATTENTION: Is REALLY NECESSARY that you pass \ --macports_path option. Eg: python build_script.py --start_from_app /Users/abi/Abiword.app") parser.add_argument("--start_from_linkage_fixed", action="store", dest="start_from_linkage_fixed", help="This option will use a generated .app file \ with linkage working properly to create a \ .dmg file.\ ATTENTION: Is REALLY NECESSARY that you pass \ --macports_path option.") if len(argv) < 2: parser.print_help() exit() else: args = parser.parse_args() # print args current_dir = getcwd() def cleanAndPrint(): environment_clean(current_dir) print "****************************************************" print "* AbiWord.dmg was created in you ~/Desktop. Enjoy! *" print "****************************************************" dict_args=vars(args) print dict_args if dict_args['start_from_app'] != None: contents_path = dict_args['start_from_app'] + "/Contents" do_app() do_dmg() print_text() exit() else: environment_prepare() macports_install() dependencies_install() abiword_install() do_app() do_dmg() cleanAndPrint()
gpl-2.0
9,047,993,009,526,091,000
31.193029
122
0.541889
false
3.674419
false
false
false
Valchris/IEEEXtreme_WorkingAsIntended
2012/AA_Alex.py
1
1107
__author__ = 'alexander' import sys initial_bunnies = long(sys.stdin.readline()) bunnies = dict() bunnies['adults'] = initial_bunnies bunnies['babies'] = long(0) bunnies['juveniles'] = long(0) bunnies['juveniles2'] = long(0) for i in range(0,365,15): if i % 2 == 0: bunnies['babies'] = long(bunnies['babies'])*0.75 # Death to flu bunnies['juveniles'] = long(bunnies['juveniles']*0.75) # Death to flu bunnies['juveniles2'] = long(bunnies['juveniles2']*0.75) # Death to flu bunnies['adults'] = long(bunnies['adults']*0.75) # Death to flu bunnies['adults'] += long(bunnies['juveniles2']*0.70) # Forest migration if i == 0: continue bunnies['juveniles2'] = bunnies['juveniles'] # Juveniles growing bunnies['juveniles'] = long(bunnies['babies']) # Babies growing bunnies['babies'] = long(bunnies['adults']*0.90) # Babies being born / 10% of babies die at birth if bunnies['adults'] == 0 and bunnies['babies'] == 0 and bunnies['juveniles'] == 0: break print long(bunnies['adults'] + bunnies['babies'] + bunnies['juveniles'])
mit
-221,296,799,781,316,100
33.625
101
0.633243
false
2.427632
false
false
false
RRSCDS/douban-mining
src/Python/api-oauth/oauthx.py
1
2742
# -*- coding: utf8 -*- import urllib, urllib2 import json # key and secret通过创建豆瓣app获得(无审核) # http://developers.douban.com/apikey/ APIKEY = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' SECRET = 'xxxxxxxxxxxxxxxx' CALLBACK_URL = 'http://www.douban.com' GETTOKEN_URL = 'https://www.douban.com/service/auth2/token' def getToken(code): postParams = { 'client_id': APIKEY, 'client_secret': SECRET, 'redirect_uri': CALLBACK_URL, 'grant_type': 'authorization_code', 'code': code } # hearders可能非必要 headers = { 'Host': 'www.douban.com', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:34.0) Gecko/20100101 Firefox/34.0', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Connection': 'keep-alive' } req = urllib2.Request( url = GETTOKEN_URL, data = urllib.urlencode(postParams), headers = headers ) # Get the access token try: r = urllib2.urlopen(req).read() print r return json.loads(r)['access_token'] # Get detailed error msg if 400 bad request occurs: except urllib2.HTTPError as e: print 'Error: ' + e.read() raise SystemExit(e) # Authorization code can be obtained manually using browser, # see http://developers.douban.com/wiki/?title=oauth2 ("获取authorization_code") # Each code can only be used once to get an access token (?) # Tokens are relatively long-lived - no need to get a code every time def apiTest(user, count=1, until_id=''): # Use old token from file if there is one, otherwise get new token f = open('token.txt', 'a+') tok = f.read() if len(tok) == 0: tok = getToken(raw_input('input code here:')) # input code manually f.write(tok) f.close() print 'Current token:', tok # Reuqest url and headers url = 'https://api.douban.com/shuo/v2/statuses/user_timeline/' url = url + user + '?count=%s&until_id=%s'%(count, until_id) headers = {'Authorization': 'Bearer '+tok} # Get data try: req2 = urllib2.Request(url=url, headers=headers) resp2 = urllib2.urlopen(req2) rj = resp2.read() # Json格式数据 print rj r = json.loads(rj) # 转换为python列表,每条广播表示为一个词典对象 print '%s statuses loaded' % len(r) except urllib2.HTTPError as e: print 'Error: ' + e.read() raise SystemExit(e) if __name__ == "__main__": apiTest('homeland', 5, '1605326442') # Note that contrary to what douban api help says, until_id is NOT inclusive, # i.e. only statuses with id < until_id will be loaded.
mit
1,631,758,914,176,409,900
29.193182
101
0.615587
false
3.106433
false
false
false
thesealion/writelightly
writelightly/main.py
1
5266
import curses import datetime import sys from writelightly.calendar import Calendar from writelightly.conf import Config from writelightly.edit import edit_date, get_edits, clean_tmp, show_edits from writelightly.metadata import Metadata from writelightly.screen import ScreenManager, TextArea from writelightly.tags import show_tags, show_tag from writelightly.utils import entry_exists, parse_date, WLError, WLQuit import locale locale.setlocale(locale.LC_ALL, ('en_US', 'UTF-8')) def show_calendar(): """Show an interactive calendar. Show the calendar on the left side of the screen and some metadata about the selected date on the right. Any entry can be edited in external editor. """ today = datetime.date.today() year, month = today.year, today.month cal = Calendar(year, month, today.day, entry_exists) metadata = Metadata.get(year, month) text_area = TextArea() ScreenManager.draw_all() d = cal.get_current_date() text_area.show_text(metadata.text(d.day)) keys = Config.calendar_keys while 1: try: kn = curses.keyname(cal.window.getch()) except KeyboardInterrupt: break except ValueError: continue if kn in Config.general_keys['quit']: raise WLQuit if kn in Config.general_keys['quit_mode']: break if kn == 'KEY_RESIZE': ScreenManager.resize() if cal.hidden: continue if kn in keys['left']: moved = cal.move_left() if not moved: cal = cal.get_previous_calendar() cal.draw() metadata = Metadata.get(cal.year, cal.month) text_area.show_text(metadata.text(cal.get_current_day())) elif kn in keys['right']: moved = cal.move_right() if not moved: cal = cal.get_next_calendar() cal.draw() metadata = Metadata.get(cal.year, cal.month) text_area.show_text(metadata.text(cal.get_current_day())) elif kn in keys['down']: cal.move_down() text_area.show_text(metadata.text(cal.get_current_day())) elif kn in keys['up']: cal.move_up() text_area.show_text(metadata.text(cal.get_current_day())) elif kn in keys['edit']: date = cal.get_current_date() edit_date(date) metadata.load_day(date.day) cal.set_active(entry_exists(date)) text_area.show_text(metadata.text(date.day)) elif kn in keys['tags']: show_tags(cal.area_id, text_area) ScreenManager.restore_area(cal.area_id) cal.reinit() text_area.set_title() text_area.show_text(metadata.text(cal.get_current_day())) elif kn in keys['edits']: date = cal.get_current_date() edits = get_edits(date) if edits: show_edits(date, edits, text_area.area_id) ScreenManager.restore_area(text_area.area_id) text_area.show_text(metadata.text(date.day)) elif kn in keys['prev_month']: cal = cal.get_previous_calendar(cal.get_current_day()) cal.draw() metadata = Metadata.get(cal.year, cal.month) text_area.show_text(metadata.text(cal.get_current_day())) elif kn in keys['next_month']: cal = cal.get_next_calendar(cal.get_current_day()) cal.draw() metadata = Metadata.get(cal.year, cal.month) text_area.show_text(metadata.text(cal.get_current_day())) Metadata.write_all() clean_tmp() def edit_single_date(date): """Edit a single entry in external editor without initializing screen.""" date = parse_date(date) if not date: raise WLError('Unrecognised date format\n') edit_date(date) metadata = Metadata(date.year, date.month) metadata.load_day(date.day) metadata.write() usage = '''Usage: %(name)s %(name)s ( <date> | today | yesterday ) %(name)s -t [<tag>] ''' % {'name': sys.argv[0]} def wrapper(func, with_screen=False): if with_screen: ScreenManager.init() error = None try: func() except WLQuit: pass except WLError as exc: error = exc finally: if with_screen: ScreenManager.quit() if error is not None: sys.stderr.write('%s\n' % error) def main(): from getopt import getopt, GetoptError from functools import partial try: options, args = getopt(sys.argv[1:], 'th', ['help']) except GetoptError as exc: sys.stderr.write('%s\nTry `%s -h` for help\n' % (exc, sys.argv[0])) sys.exit(1) init_screen = True option_names = [o[0] for o in options] if '-h' in option_names or '--help' in option_names: print usage sys.exit() if options: if args: func = partial(show_tag, args[0]) else: func = show_tags else: if args: func = partial(edit_single_date, args[0]) init_screen = False else: func = show_calendar wrapper(func, init_screen)
mit
-884,814,466,088,044,500
32.75641
79
0.583175
false
3.690259
false
false
false
allofhercats/whiskey
proto/literal_print.py
1
1544
def get_n_digits(value, base): n = 0 if value == 0: return 1 else: while value > 0: value //= base n += 1 return n def literal_int_to_string(value, base, width = 0, pad = '0', prefix = True): rtn = "" if base == 2 and prefix: rtn = "0b" elif base == 8 and prefix: rtn = "0" elif base == 16 and prefix: rtn = "0x" n_digits = get_n_digits(value, base) if width > 0 and width > n_digits: i = 0 while i < width - n_digits: rtn += pad i += 1 fac = base ** (n_digits - 1) i = 0 while i < n_digits: digit = (value // fac) % base if base == 2: rtn += "01"[digit] elif base == 8: rtn += "01234567"[digit] elif base == 10: rtn += "0123456789"[digit] elif base == 16: rtn += "0123456789abcdef"[digit] else: raise NotImplementedError() fac //= base i += 1 return rtn def literal_float_to_string(value, precision = 5, strip = True): if value < 0.0: return "-" + literal_float_to_string(-value, precision, strip) rtn = literal_int_to_string(int(value), 10) rtn += "." value1 = value - int(value) n_stripped = 0 i = 0 while i < precision: value1 *= 10.0 digit = int(value1) % 10 if digit == 0: n_stripped += 1 else: n_stripped = 0 if not strip: rtn += "0123456789"[digit] i += 1 if strip: value1 = value - int(value) i = 0 while i < precision - n_stripped: value1 *= 10.0 digit = int(value1) % 10 rtn += "0123456789"[digit] i += 1 return rtn if __name__ == "__main__": print(literal_float_to_string(3.1400000000001, 20))
mit
2,214,180,999,662,640,600
17.60241
76
0.577073
false
2.573333
false
false
false
ngageoint/scale
scale/scale/settings.py
1
14209
""" Django settings for scale_test project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ import os import scale import sys import dj_database_url def get_env_boolean(variable_name, default=False): return os.getenv(variable_name, str(default)).lower() in ('yes', 'true', 't', '1') # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Project version VERSION = scale.__version__ DOCKER_VERSION = scale.__docker_version__ # Mesos connection information. Default for -m # This can be something like "127.0.0.1:5050" # or a zookeeper url like 'zk://host1:port1,host2:port2,.../path` MESOS_MASTER = os.getenv('MESOS_MASTER', 'zk://leader.mesos:2181/mesos') # We by default, use the '*' role, meaning all resources are unreserved offers are received # By default, use the '*' role, meaning all resources are unreserved offers are received MESOS_ROLE = os.getenv('MESOS_ROLE', '*') # Used to set the user that Mesos tasks are launched by Docker. This should NEVER be set to root # and must be a user name NOT a Linux UID. Mesos chokes on UIDs. CONTAINER_PROCESS_OWNER = os.getenv('CONTAINER_PROCESS_OWNER', 'nobody') # By default, the accepted resources match reservations to the MESOS_ROLE ACCEPTED_RESOURCE_ROLE = os.getenv('ACCEPTED_RESOURCE_ROLE', MESOS_ROLE) # By default, all API calls require authentication. PUBLIC_READ_API = get_env_boolean('PUBLIC_READ_API') # Placeholder for service secret that will be overridden in local_settings_docker SERVICE_SECRET = None # Zookeeper URL for scheduler leader election. If this is None, only a single scheduler is used. SCHEDULER_ZK = None # The full name for the Scale Docker image (without version tag) SCALE_DOCKER_IMAGE = 'geoint/scale' # The location of the config file containing Docker credentials # The URI value should point to an externally hosted location such as a webserver or hosted S3 bucket. # The value will be an http URL such as 'http://static.mysite.com/foo/.dockercfg' CONFIG_URI = None # Directory for rotating metrics storage METRICS_DIR = None # fluentd warning levels, or -1 to disable warnings FLUENTD_BUFFER_WARN = int(os.environ.get('FLUENTD_BUFFER_WARN', -1)) FLUENTD_BUFFER_SIZE_WARN = int(os.environ.get('FLUENTD_BUFFER_SIZE_WARN', -1)) # URL for fluentd, or None to disable fluentd LOGGING_ADDRESS = os.environ.get('LOGGING_ADDRESS') LOGGING_HEALTH_ADDRESS = os.environ.get('LOGGING_HEALTH_ADDRESS') # Base URL of elasticsearch nodes ELASTICSEARCH_URL = os.environ.get('ELASTICSEARCH_URL') # Placeholder for elasticsearch version. Supplied in production by local_settings_docker.py ELASTICSEARCH_VERSION = None # Placeholder for Elasticsearch object. Needed for unit tests. ELASTICSEARCH = None DATABASE_URL = os.getenv('DATABASE_URL') #root url for scale installation SCALE_VHOST = os.getenv('SCALE_VHOST', 'localhost:8000') # Broker URL for connection to messaging backend BROKER_URL = 'amqp://guest:guest@localhost:5672//' QUEUE_NAME = 'scale-command-messages' MESSSAGE_QUEUE_DEPTH_WARN = int(os.environ.get('MESSSAGE_QUEUE_DEPTH_WARN', -1)) # Queue limit SCHEDULER_QUEUE_LIMIT = int(os.environ.get('SCHEDULER_QUEUE_LIMIT', 500)) # The max number of times the scheduler will try to reconnect to # mesos if disconnected. SCHEDULER_MAX_RECONNECT = int(os.environ.get('SCHEDULER_MAX_RECONNECT', 3)) # Base URL of vault or DCOS secrets store, or None to disable secrets SECRETS_URL = None # Public token if DCOS secrets store, or privleged token for vault SECRETS_TOKEN = None # DCOS service account name, or None if not DCOS secrets store DCOS_SERVICE_ACCOUNT = None # Flag for raising SSL warnings associated with secrets transactions. SECRETS_SSL_WARNINGS = True # SECURITY WARNING: keep the secret key used in production secret! INSECURE_DEFAULT_KEY = 'this-key-is-insecure-and-should-never-be-used-in-production' SECRET_KEY = INSECURE_DEFAULT_KEY # Used to write the superuser password MESOS_SANDBOX = os.getenv('MESOS_SANDBOX') # Security settings for production SECURE_CONTENT_TYPE_NOSNIFF = True SECURE_BROWSER_XSS_FILTER = True SESSION_COOKIE_SECURE = get_env_boolean('SESSION_COOKIE_SECURE', True) X_FRAME_OPTIONS = 'DENY' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False # authentication toggle, to be used for testing AUTHENTICATION_ENABLED = get_env_boolean('AUTHENTICATION_ENABLED', True) ALLOWED_HOSTS = ['localhost', '127.0.0.1'] # used primarily by debug-toolbar to dictate what client url has access if os.environ.get('INTERNAL_IP'): INTERNAL_IPS = [os.environ.get('INTERNAL_IP')] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.gis', 'rest_framework', 'rest_framework.authtoken', 'debug_toolbar', ############### # Social Auth # ############### 'oauth2_provider', 'social_django', 'rest_framework_social_oauth2', # Scale apps 'accounts', 'batch', 'cli', 'data', 'diagnostic', 'error', 'ingest', 'job', 'mesos_api', 'messaging', 'metrics', 'node', 'product', 'queue', 'recipe', 'scheduler', 'shared_resource', 'source', 'storage', 'trigger', 'util', 'vault' ) MIDDLEWARE = [ 'debug_toolbar.middleware.DebugToolbarMiddleware', 'util.middleware.MultipleProxyMiddleware', 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'util.middleware.ExceptionLoggingMiddleware', ] TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'debug': False, 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ############### # Social Auth # ############### 'social_django.context_processors.backends', 'social_django.context_processors.login_redirect', ], }, }, ] AUTHENTICATION_BACKENDS = [ 'django.contrib.auth.backends.ModelBackend', ] REST_FRAMEWORK = { 'DEFAULT_FILTER_BACKENDS': ( 'django_filters.rest_framework.DjangoFilterBackend', ), 'DEFAULT_PAGINATION_CLASS': 'util.rest.DefaultPagination', 'DEFAULT_RENDERER_CLASSES': ( 'rest_framework.renderers.JSONRenderer', 'rest_framework.renderers.BrowsableAPIRenderer', 'rest_framework.renderers.AdminRenderer', ), 'ALLOWED_VERSIONS': ('v6', 'v7'), 'DEFAULT_VERSION': 'v6', 'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.NamespaceVersioning', } if AUTHENTICATION_ENABLED: REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES'] = ( 'rest_framework.authentication.SessionAuthentication', 'rest_framework.authentication.TokenAuthentication', ############### # Social Auth # ############### 'oauth2_provider.contrib.rest_framework.OAuth2Authentication', 'rest_framework_social_oauth2.authentication.SocialAuthentication', ) REST_FRAMEWORK['DEFAULT_PERMISSION_CLASSES'] = ( 'util.rest.ScaleAPIPermissions', ) else: REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES'] = () REST_FRAMEWORK['DEFAULT_PERMISSION_CLASSES'] = () REST_FRAMEWORK['UNAUTHENTICATED_USER'] = None ROOT_URLCONF = 'scale.urls' WSGI_APPLICATION = 'scale.wsgi.application' # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases DATABASES = { 'default': dj_database_url.config(default='sqlite://%s' % os.path.join(BASE_DIR, 'db.sqlite3')) } # Internationalization # https://docs.djangoproject.com/en/1.7/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True LOGIN_REDIRECT_URL = '/' ############################# # GEOAxIS specific settings # ############################# SOCIAL_AUTH_NEW_USER_REDIRECT_URL = '/' # Redirect after directly hitting login endpoint SOCIAL_AUTH_LOGIN_REDIRECT_URL = '/' DEFAULT_AUTH_PIPELINE = ( 'social_core.pipeline.social_auth.social_details', 'social_core.pipeline.social_auth.social_uid', 'social_core.pipeline.social_auth.auth_allowed', 'social_core.pipeline.social_auth.social_user', 'social_core.pipeline.user.get_username', 'social_core.pipeline.mail.mail_validation', 'social_core.pipeline.social_auth.associate_by_email', 'social_core.pipeline.user.create_user', 'social_core.pipeline.social_auth.associate_user', 'social_core.pipeline.social_auth.load_extra_data', 'social_core.pipeline.user.user_details' ) SOCIAL_AUTH_GEOAXIS_KEY = os.getenv('GEOAXIS_KEY') SOCIAL_AUTH_GEOAXIS_SECRET = os.getenv('GEOAXIS_SECRET') SOCIAL_AUTH_GEOAXIS_HOST = os.getenv('GEOAXIS_HOST', 'geoaxis.gxaccess.com') OAUTH_GEOAXIS_USER_FIELDS = os.getenv( 'GEOAXIS_USER_FIELDS', 'username, email, last_name, first_name') SOCIAL_AUTH_GEOAXIS_USER_FIELDS = map( str.strip, OAUTH_GEOAXIS_USER_FIELDS.split(',')) OAUTH_GEOAXIS_SCOPES = os.getenv('GEOAXIS_SCOPES', 'UserProfile.me') SOCIAL_AUTH_GEOAXIS_SCOPE = map(str.strip, OAUTH_GEOAXIS_SCOPES.split(',')) # GeoAxisOAuth2 will cause all login attempt to fail if # SOCIAL_AUTH_GEOAXIS_HOST is None GEOAXIS_ENABLED = False if SOCIAL_AUTH_GEOAXIS_KEY and len(SOCIAL_AUTH_GEOAXIS_KEY) > 0: GEOAXIS_ENABLED = True AUTHENTICATION_BACKENDS += ( 'django_geoaxis.backends.geoaxis.GeoAxisOAuth2', ) # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = 'static/' STATICFILES_DIRS = () STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) # Logging configuration LOG_DIR = os.path.join(BASE_DIR, 'logs') if not os.path.exists(LOG_DIR): os.makedirs(LOG_DIR) LOG_NAME = 'scale' LOG_FORMATTERS = { 'standard': { 'format': ('%(asctime)s %(levelname)s ' + '[%(name)s(%(lineno)s)] %(message)s'), 'datefmt': '%Y-%m-%d %H:%M:%S', }, 'db-standard': { 'format': ('[%(name)s(%(lineno)s)] %(message)s'), } } LOG_FILTERS = { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' }, 'debug_info_only':{ '()':'scale.custom_logging.UserFilter', } } LOG_HANDLERS = { 'null': { 'level': 'DEBUG', 'class': 'logging.NullHandler', }, 'mesoshttp' : { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'standard', 'stream': sys.stdout }, 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'standard', 'stream': sys.stdout }, 'console-stderr': { 'level': 'WARNING', 'class': 'logging.StreamHandler', 'formatter': 'standard', 'stream': sys.stderr }, 'console-stdout': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'standard', 'stream': sys.stdout, 'filters':['debug_info_only'] }, 'log-db': { 'level': 'WARNING', 'class': 'error.handlers.DatabaseLogHandler', 'formatter': 'db-standard', 'model': 'error.models.LogEntry', }, } LOG_CONSOLE_DEBUG = { 'version': 1, 'formatters': LOG_FORMATTERS, 'filters': LOG_FILTERS, 'handlers': LOG_HANDLERS, 'loggers': { '': { 'handlers': ['console-stdout','console-stderr'], 'level': 'DEBUG', }, }, } LOG_CONSOLE_INFO = { 'version': 1, 'formatters': LOG_FORMATTERS, 'filters': LOG_FILTERS, 'handlers': LOG_HANDLERS, 'loggers': { '': { 'handlers': ['console-stdout','console-stderr'], 'level': 'INFO', }, }, } LOG_CONSOLE_ERROR = { 'version': 1, 'formatters': LOG_FORMATTERS, 'filters': LOG_FILTERS, 'handlers': LOG_HANDLERS, 'loggers': { '': { 'handlers': ['console-stderr'], 'level': 'ERROR', }, }, } LOG_CONSOLE_WARNING = { 'version': 1, 'formatters': LOG_FORMATTERS, 'filters': LOG_FILTERS, 'handlers': LOG_HANDLERS, 'loggers': { '': { 'handlers': ['console-stderr'], 'level': 'WARNING', }, }, } LOG_CONSOLE_CRITICAL = { 'version': 1, 'formatters': LOG_FORMATTERS, 'filters': LOG_FILTERS, 'handlers': LOG_HANDLERS, 'loggers': { '': { 'handlers': ['console-stderr'], 'level': 'CRITICAL', }, }, } LOGGING = LOG_CONSOLE_DEBUG # Hack to fix ISO8601 for datetime filters. # This should be taken care of by a future django fix. And might even be handled # by a newer version of django-rest-framework. Unfortunately, both of these solutions # will accept datetimes without timezone information which we do not want to allow # see https://code.djangoproject.com/tickets/23448 # Solution modified from http://akinfold.blogspot.com/2012/12/datetimefield-doesnt-accept-iso-8601.html from django.forms import fields from util.parse import parse_datetime fields.DateTimeField.strptime = lambda _self, datetime_string, _format: parse_datetime(datetime_string)
apache-2.0
-3,697,003,481,016,563,700
29.622845
103
0.661553
false
3.472385
false
false
false
TeamADEA/Hunger_Games
HG_Code/Model.py
1
7064
import numpy as np import copy import time from Kat import Kat from Visualize import Visualizer from SimManager import sim_manager from hg_settings import * from Hunger_Grid import hunger_grid import sys import os STEP_SIZE = 10 # 0 = only last frame, # 1 = every frame, # N = every N frames # -1 = don't show tki_breakdown = np.zeros(NUM_OF_GENERATIONS*6).reshape(NUM_OF_GENERATIONS, 6) full_graph = np.zeros(NUM_OF_SPECIES*NUM_OF_GENERATIONS).reshape(NUM_OF_SPECIES, NUM_OF_GENERATIONS) full_graph_bk = np.zeros(NUM_OF_SPECIES*2).reshape(NUM_OF_SPECIES, 2) def run_model(from_lava = .02, to_lava = .02, from_berry = .05, to_berry = .05\ , from_mut=10, to_mut=10, from_gen = 33, to_gen = 33, \ t_name = 'Default', frames = -1): global STEP_SIZE STEP_SIZE = frames progenitor = Kat(0,0) grid = hunger_grid() vis = Visualizer(grid) start_time = time.time() # Calculate the seed settings for each specie. This is used to run multiple # species in a row without needing to manually set it def calc_steps(from_num, to_num): array = np.arange(1, NUM_OF_SPECIES+1, dtype='float') if(from_num == to_num): # If values match. fill with only 1 value array[:] = from_num else: # fill with incemental steps inc = (float(to_num) - from_num) / float(NUM_OF_SPECIES) array = np.arange(from_num, to_num, inc, dtype='float') return copy.deepcopy(array) #Fill arrays for each specie, these array determine % of land for each specie lava_chance_array = calc_steps(from_lava, to_lava) berry_chance_array = calc_steps(from_berry, to_berry) mutate_chance_array = calc_steps(from_mut, to_mut) generate_chance_array = calc_steps(from_gen, to_gen) #open output file, file will be named after the test given name. file_name = t_name + '.txt' __location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__))) out_file = open(os.path.join(__location__,file_name), 'a') print "\n", generate_chance_array print mutate_chance_array for i in range(NUM_OF_SPECIES): # MAIN LOOP OF SIMULATION RUNNING mutation_var = [mutate_chance_array[i]] mutation_var.append(generate_chance_array[i]) grid = hunger_grid(lava_chance_array[i], berry_chance_array[i]) full_graph[i] = model(progenitor, vis, grid, i, mutation_var,t_name, out_file) full_graph_bk[i] = [grid.lava_chance, grid.berry_chance] #close output file out_file.close() # DISPLAY VARIOUS GRAPHS AND PLOTS tki_breakdown[:] /= NUM_OF_SPECIES vis.graph(full_graph, full_graph_bk, t_name) vis.ins_graph(tki_breakdown, t_name) vis.chance_vs_fitness(full_graph, full_graph_bk, mutate_chance_array, generate_chance_array,t_name) print("--- %s MODEL COMPLETE ---" % (t_name)) print("--- TIME TO COMPLETE MODEL: %s seconds ---" % (time.time() - start_time)) vis.show_plots() def one_sim(seed_kat, grid, mut ,gen, out_file, multi_cat=False): """Run one simulation of number of time steps (default: 300) First initialize a sim_manager with first Kat agent. Then update at each time steps, finally taking the top Kat and top fitness score, returns it. """ if not multi_cat: sim_temp = sim_manager(seed_kat, grid, mut) top_kat = seed_kat else: sim_temp = sim_manager(seed_kat, grid, mut, multi_cat=True) top_kat = seed_kat[0] for i in range(NUM_OF_INDIVIDUALS): sim_temp.clear_grid(grid) sim_temp.start_kat(i) for j in range(STEPS_PER_SIM): if(sim_temp.kats[i].dead == False): sim_temp.update(i, j) else: break avg_fitness = sim_temp.average_fitness() top_kats = sim_temp.top_kats() # ARRAY FOR DISPLAYING FITNESS tki_breakdown[gen] += sim_temp.tk_breakdown() # FOR BREAKDOWN OF INSTRUCTIONS #file output for k in top_kats: out_file.write("\nFitness: ") out_file.write(str(k.calculate_fitness())) out_file.write(k.print_ins_1(False)) for kat in top_kats: kat.reset() kat_temp, score_temp = sim_temp.top_kat() return copy.deepcopy(kat_temp), score_temp, sim_temp.return_playback(),\ avg_fitness, copy.deepcopy(top_kats) def playback(vis, pb, best_kats, gen, specie, t_name): if (STEP_SIZE == -1): return if (STEP_SIZE == 0): vis.show(pb[-1], best_kats, gen, specie, t_name) else: for i in np.arange(0,len(pb), STEP_SIZE): vis.show(pb[i], copy.deepcopy(best_kats), gen, specie, t_name) def model(seed_kat, vis, grid, specie, mut,t_name, out_file): """Run multiple simulation of number of time steps each, (default: 300 simulations). In a loop, keep running each simulation of 300 number of time steps, append the top fitness score, and after loops ended, graph the fitness score over generations (simulations). """ top_kats = [] avg_kats = [] print "Species:",specie,"| Gen: 1" seed_kat, fit_score, play, avg_fitness, seed_kats = one_sim(seed_kat, grid, mut, 0,out_file) top_kats.append(fit_score) avg_kats.append(avg_fitness) playback(vis, play, seed_kat, 1, specie+1, t_name) #flie output out_file.write("Species:") out_file.write(str(specie)) out_file.write(" | Gen: 1\n") if (NUM_OF_SPECIES > 1): for i in np.arange(2, (NUM_OF_GENERATIONS+1)): #file output out_file.write("\nMODEL NAME: %s" % (t_name)) out_file.write("\n######### START: Species:") out_file.write(str(specie+1)) out_file.write(" | Gen:") out_file.write(str(i)) out_file.write("###########") print "\nMODEL NAME: %s" % (t_name) print "\n############### START: Species:",specie+1," OF ", NUM_OF_SPECIES ," | Gen:",i, "#######################\n" temp_top = seed_kats seed_kat, fit_score, play, avg_fitness, seed_kats = one_sim(seed_kats, grid, mut, (i-1),out_file, multi_cat=True) if fit_score < top_kats[-1]: seed_kats = temp_top top_kats.append(top_kats[-1]) else: top_kats.append(fit_score) avg_kats.append(avg_fitness) playback(vis, play,copy.deepcopy(seed_kats),i, specie+1, t_name) print "\n############### END: Species:",specie+1," OF ", NUM_OF_SPECIES ," | Gen:",i, "#######################\n" #file output out_file.write("######### END: Species:") out_file.write(str(specie+1)) out_file.write(" OF ") out_file.write(str(NUM_OF_SPECIES)) out_file.write(" | Gen:") out_file.write(str(i)) out_file.write("###########\n") return copy.deepcopy(list(top_kats))
mit
4,120,856,940,087,549,000
37.601093
127
0.587061
false
3.09553
false
false
false
chapware/aircrack
scripts/airdrop-ng/install.py
1
3800
#!/usr/bin/env python __version__ = "1.13.2010.21:00" __author__ = "Bryan Chapman <[email protected]>" ''' This is the installer file for airdrop-ng. It first checks for different dependancies, such as make, svn, etc. ''' import os, sys from shutil import rmtree if os.geteuid() != 0: print "Installer must be root to run. \nPlease 'su' or 'sudo -i' and try again. \nExiting..." sys.exit(1) class checkDepend: def __init__ (self): clear = "\n" *100 print clear print "Checking for dependancies used by the installer..." self.a = 0 self.deps = ["make", "svn", "tar", "gcc"] for depends in self.deps: if (os.path.isfile("/usr/bin/" + depends) or os.path.isfile("/usr/sbin/" + depends) or os.path.isfile("/usr/local/bin/" + depends) or os.path.isfile("/usr/local/sbin/" + depends) or os.path.isfile ("/bin/" + depends) ) == True: pass else: self.a = 1 print depends + " not installed." if self.a == 0: print "All dependancies installed! Continuing...\n" print "#### NOTE: For Ubuntu based distro's, \npython2.6-dev must be installed. Please \nmake sure it is installed before continuing!\n" else: print "Please install dependancies. Exiting...\n\n" exit() class installAirdrop: def __init__(self): print "Welcome to the airdrop-ng installer!\nYou will be prompted for installing\nAirdrop-ng, lorcon, and pylorcon.\n" yno = raw_input ("Continue with installer? (y/n): ") if yno == "y": pass else: print "Fine, be that way. Exiting..." exit() yno = raw_input ("Install airdrop-ng? (y/n): ") if yno == "y": self.install() else: print "airdrop-ng not installed. Continuing..." pass def install(self): print "Build exist? " if os.path.isdir("build"): rmtree("build") # imported from shutil, or shutil.rmtree() print "File exists. Cleaning it..." os.mkdir ("build") else: os.mkdir ("build") print "Didn't exist. Creating..." # moves everything to build/. This is to keep everything clean, # and not clutter up the directory. os.system ("cp airdrop-ng.py build/ && cp -r lib build/ && cp docs/airdrop-ng.1 build/") print "Files copied. Now, moving to directory..." os.chdir ("build") if os.path.isdir("/usr/lib/airdrop-ng") == True: rmtree ("/usr/lib/airdrop-ng") print "Moving airdrop-ng to /usr/bin, lib to \n/usr/lib/airdrop-ng, and installing man pages..." os.system ("cp airdrop-ng.py /usr/bin/airdrop-ng && cp -r lib /usr/lib/airdrop-ng && cp airdrop-ng.1 /usr/share/man/man1/") #os.chdir ("..") print "airdrop-ng installed! =)" class installLorcon: def __init__(self): yno = raw_input ("Would you like to install lorcon? (y/n): ") if yno == "y": print "Running svn co http://802.11ninja.net/svn/lorcon/branch/lorcon-old. This may take a while..." os.system ("svn co http://802.11ninja.net/svn/lorcon/branch/lorcon-old") os.chdir("lorcon-old") os.system ("./configure && make && make install") print "Creating symlinks..." os.system ("ln -s /usr/local/lib/liborcon-1.0.0.so /usr/lib") os.chdir("..") else: print "Lorcon wasn't installed. " class installPylorcon: def __init__(self): yno = raw_input ("Would you like to install pylorcon? (y/n): ") if yno == "y": import urllib urllib.urlretrieve("http://pylorcon.googlecode.com/files/pylorcon-3.tar.bz2", "pylorcon-3.tar.bz2") os.system ("tar -xvf pylorcon-3.tar.bz2") os.chdir ("pylorcon") os.system ("python setup.py install") os.chdir("..") # What actually runs the classes checkDepend() installAirdrop() installLorcon() installPylorcon() yno = raw_input ("Clean up? (y/n): ") if yno == "y": os.chdir("..") if os.path.isdir("build") == True: rmtree("build") print "Operation(s) complete! May the source be with you. =) " sys.exit()
gpl-2.0
3,112,824,450,673,922,600
30.147541
230
0.649211
false
2.810651
false
false
false
ppries/tensorflow
tensorflow/contrib/framework/python/ops/variables.py
1
25287
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Variable functions. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import re from tensorflow.contrib.framework.python.ops import add_arg_scope as contrib_add_arg_scope from tensorflow.contrib.framework.python.ops import gen_variable_ops from tensorflow.contrib.util import loader from tensorflow.python import pywrap_tensorflow from tensorflow.python.framework import device as tf_device from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import init_ops from tensorflow.python.ops import variable_scope from tensorflow.python.ops import variables from tensorflow.python.ops import gen_state_ops from tensorflow.python.platform import tf_logging as logging from tensorflow.python.platform import resource_loader from tensorflow.python.training import saver as tf_saver from tensorflow.python.training import training_util __all__ = ['add_model_variable', 'assert_global_step', 'assert_or_get_global_step', 'assign_from_checkpoint', 'assign_from_checkpoint_fn', 'assign_from_values', 'assign_from_values_fn', 'create_global_step', 'filter_variables', 'get_global_step', 'get_or_create_global_step', 'get_local_variables', 'get_model_variables', 'get_unique_variable', 'get_variables_by_name', 'get_variables_by_suffix', 'get_variables_to_restore', 'get_variables', 'local_variable', 'model_variable', 'variable', 'VariableDeviceChooser', 'zero_initializer'] def zero_initializer(ref, use_locking=True, name="zero_initializer"): """Initialize 'ref' with all zeros, ref tensor should be uninitialized. If already initialized, you will get ValueError. This op is intended to save memory during initialization. Args: ref: ref of the tensor need to be zero initialized. name: optional name for this operation. Returns: ref that initialized. Raises: ValueError: If ref tensor is initialized. """ loader.load_op_library( resource_loader.get_path_to_datafile("_variable_ops.so")) return gen_variable_ops.zero_initializer(ref, name=name) def assert_global_step(global_step_tensor): training_util.assert_global_step(global_step_tensor) def assert_or_get_global_step(graph=None, global_step_tensor=None): """Verifies that a global step tensor is valid or gets one if None is given. If `global_step_tensor` is not None, check that it is a valid global step tensor (using `assert_global_step`). Otherwise find a global step tensor using `get_global_step` and return it. Args: graph: The graph to find the global step tensor for. global_step_tensor: The tensor to check for suitability as a global step. If None is given (the default), find a global step tensor. Returns: A tensor suitable as a global step, or `None` if none was provided and none was found. """ if global_step_tensor is None: # Get the global step tensor the same way the supervisor would. global_step_tensor = get_global_step(graph) else: assert_global_step(global_step_tensor) return global_step_tensor def get_global_step(graph=None): return training_util.get_global_step(graph) def create_global_step(graph=None): """Create global step tensor in graph. Args: graph: The graph in which to create the global step. If missing, use default graph. Returns: Global step tensor. Raises: ValueError: if global step key is already defined. """ graph = ops.get_default_graph() if graph is None else graph if get_global_step(graph) is not None: raise ValueError('"global_step" already exists.') # Create in proper graph and base name_scope. with graph.as_default() as g, g.name_scope(None): collections = [ops.GraphKeys.GLOBAL_VARIABLES, ops.GraphKeys.GLOBAL_STEP] return variable(ops.GraphKeys.GLOBAL_STEP, shape=[], dtype=dtypes.int64, initializer=init_ops.zeros_initializer, trainable=False, collections=collections) def get_or_create_global_step(graph=None): """Returns and create (if necessary) the global step variable. Args: graph: The graph in which to create the global step. If missing, use default graph. Returns: the tensor representing the global step variable. """ graph = ops.get_default_graph() if graph is None else graph globalstep = get_global_step(graph) if globalstep is None: globalstep = create_global_step(graph) return globalstep def local_variable(initial_value, validate_shape=True, name=None): """Create variable and add it to `GraphKeys.LOCAL_VARIABLES` collection. Args: initial_value: See variables.Variable.__init__. validate_shape: See variables.Variable.__init__. name: See variables.Variable.__init__. Returns: New variable. """ return variables.Variable( initial_value, trainable=False, collections=[ops.GraphKeys.LOCAL_VARIABLES], validate_shape=validate_shape, name=name) @contrib_add_arg_scope def variable(name, shape=None, dtype=None, initializer=None, regularizer=None, trainable=True, collections=None, caching_device=None, device=None, partitioner=None, custom_getter=None): """Gets an existing variable with these parameters or creates a new one. Args: name: the name of the new or existing variable. shape: shape of the new or existing variable. dtype: type of the new or existing variable (defaults to `DT_FLOAT`). initializer: initializer for the variable if one is created. regularizer: a (Tensor -> Tensor or None) function; the result of applying it on a newly created variable will be added to the collection GraphKeys.REGULARIZATION_LOSSES and can be used for regularization. trainable: If `True` also add the variable to the graph collection `GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`). collections: A list of collection names to which the Variable will be added. If None it would default to `tf.GraphKeys.GLOBAL_VARIABLES`. caching_device: Optional device string or function describing where the Variable should be cached for reading. Defaults to the Variable's device. device: Optional device to place the variable. It can be an string or a function that is called to get the device for the variable. partitioner: Optional callable that accepts a fully defined `TensorShape` and dtype of the `Variable` to be created, and returns a list of partitions for each axis (currently only one axis can be partitioned). custom_getter: Callable that allows overwriting the internal get_variable method and has to have the same signature. Returns: The created or existing variable. """ collections = list(collections or [ops.GraphKeys.GLOBAL_VARIABLES]) # Remove duplicates collections = set(collections) getter = variable_scope.get_variable if custom_getter is not None: getter = custom_getter with ops.device(device or ''): return getter(name, shape=shape, dtype=dtype, initializer=initializer, regularizer=regularizer, trainable=trainable, collections=collections, caching_device=caching_device, partitioner=partitioner) @contrib_add_arg_scope def model_variable(name, shape=None, dtype=dtypes.float32, initializer=None, regularizer=None, trainable=True, collections=None, caching_device=None, device=None, partitioner=None, custom_getter=None): """Gets an existing model variable with these parameters or creates a new one. Args: name: the name of the new or existing variable. shape: shape of the new or existing variable. dtype: type of the new or existing variable (defaults to `DT_FLOAT`). initializer: initializer for the variable if one is created. regularizer: a (Tensor -> Tensor or None) function; the result of applying it on a newly created variable will be added to the collection GraphKeys.REGULARIZATION_LOSSES and can be used for regularization. trainable: If `True` also add the variable to the graph collection `GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`). collections: A list of collection names to which the Variable will be added. Note that the variable is always also added to the `GraphKeys.GLOBAL_VARIABLES` and `GraphKeys.MODEL_VARIABLES` collections. caching_device: Optional device string or function describing where the Variable should be cached for reading. Defaults to the Variable's device. device: Optional device to place the variable. It can be an string or a function that is called to get the device for the variable. partitioner: Optional callable that accepts a fully defined `TensorShape` and dtype of the `Variable` to be created, and returns a list of partitions for each axis (currently only one axis can be partitioned). custom_getter: Callable that allows overwriting the internal get_variable method and has to have the same signature. Returns: The created or existing variable. """ collections = list(collections or []) collections += [ops.GraphKeys.GLOBAL_VARIABLES, ops.GraphKeys.MODEL_VARIABLES] var = variable(name, shape=shape, dtype=dtype, initializer=initializer, regularizer=regularizer, trainable=trainable, collections=collections, caching_device=caching_device, device=device, partitioner=partitioner, custom_getter=custom_getter) return var def add_model_variable(var): """Adds a variable to the `GraphKeys.MODEL_VARIABLES` collection. Args: var: a variable. """ if var not in ops.get_collection(ops.GraphKeys.MODEL_VARIABLES): ops.add_to_collection(ops.GraphKeys.MODEL_VARIABLES, var) def get_variables(scope=None, suffix=None, collection=ops.GraphKeys.GLOBAL_VARIABLES): """Gets the list of variables, filtered by scope and/or suffix. Args: scope: an optional scope for filtering the variables to return. Can be a variable scope or a string. suffix: an optional suffix for filtering the variables to return. collection: in which collection search for. Defaults to `GraphKeys.GLOBAL_VARIABLES`. Returns: a list of variables in collection with scope and suffix. """ if isinstance(scope, variable_scope.VariableScope): scope = scope.name if suffix is not None: if ':' not in suffix: suffix += ':' scope = (scope or '') + '.*' + suffix return ops.get_collection(collection, scope) def get_model_variables(scope=None, suffix=None): """Gets the list of model variables, filtered by scope and/or suffix. Args: scope: an optional scope for filtering the variables to return. suffix: an optional suffix for filtering the variables to return. Returns: a list of variables in collection with scope and suffix. """ return get_variables(scope, suffix, ops.GraphKeys.MODEL_VARIABLES) def get_local_variables(scope=None, suffix=None): """Gets the list of local variables, filtered by scope and/or suffix. Args: scope: an optional scope for filtering the variables to return. suffix: an optional suffix for filtering the variables to return. Returns: a list of variables in collection with scope and suffix. """ return get_variables(scope, suffix, ops.GraphKeys.LOCAL_VARIABLES) def get_variables_to_restore(include=None, exclude=None): """Gets the list of the variables to restore. Args: include: an optional list/tuple of scope strings for filtering which variables from the VARIABLES collection to include. None would include all the variables. exclude: an optional list/tuple of scope strings for filtering which variables from the VARIABLES collection to exclude. None it would not exclude any. Returns: a list of variables to restore. Raises: TypeError: include or exclude is provided but is not a list or a tuple. """ if include is None: # Include all variables. vars_to_include = get_variables() else: if not isinstance(include, (list, tuple)): raise TypeError('include is provided but is not a list or a tuple.') vars_to_include = [] for scope in include: vars_to_include += get_variables(scope) vars_to_exclude = set() if exclude is not None: if not isinstance(exclude, (list, tuple)): raise TypeError('exclude is provided but is not a list or a tuple.') for scope in exclude: vars_to_exclude |= set(get_variables(scope)) # Exclude the variables in vars_to_exclude return [v for v in vars_to_include if v not in vars_to_exclude] def get_variables_by_suffix(suffix, scope=None): """Gets the list of variables that end with the given suffix. Args: suffix: suffix for filtering the variables to return. scope: an optional scope for filtering the variables to return. Returns: a copied list of variables with the given name and prefix. """ return get_variables(scope=scope, suffix=suffix) def get_variables_by_name(given_name, scope=None): """Gets the list of variables that were given that name. Args: given_name: name given to the variable without any scope. scope: an optional scope for filtering the variables to return. Returns: a copied list of variables with the given name and scope. """ suffix = '/' + given_name + ':|^' + given_name + ':' return get_variables(scope=scope, suffix=suffix) def get_unique_variable(var_op_name): """Gets the variable uniquely identified by that var_op_name. Args: var_op_name: the full name of the variable op, including the scope. Returns: a tensorflow variable. Raises: ValueError: if no variable uniquely identified by the name exists. """ candidates = get_variables(scope=var_op_name) if not candidates: raise ValueError('Couldnt find variable %s' % var_op_name) for candidate in candidates: if candidate.op.name == var_op_name: return candidate raise ValueError('Variable %s does not uniquely identify a variable', var_op_name) def assign_from_values(var_names_to_values): """Creates an assignment operation from a given mapping. This function provides a mechanism for performing assignment of variables to values in a way that does not fill the graph with large assignment values. Args: var_names_to_values: A map from variable names to values. Returns: assign_op: An `Operation` that assigns each of the given variables to the requested values. feed_dict: The feed dictionary to use when evaluating `assign_op`. Raises: ValueError: if any of the given variable names were not found. """ feed_dict = {} assign_ops = [] for var_name in var_names_to_values: var_value = var_names_to_values[var_name] var = ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES, var_name) if not var: raise ValueError('Variable %s wasnt found', var_name) elif len(var) > 1: # tf.get_collection is just a filter on the prefix: find the exact match: found = False for v in var: if v.op.name == var_name: var = v found = True break if not found: raise ValueError('Variable %s doesnt uniquely identify a variable', var_name) else: var = var[0] # TODO(nsilberman): ensure placeholder and assign are on the same device. # Assign a placeholder to the value that will be filled later. placeholder_name = 'placeholder/' + var.op.name placeholder_value = array_ops.placeholder( dtype=var.dtype.base_dtype, shape=var.get_shape(), name=placeholder_name) assign_ops.append(var.assign(placeholder_value)) feed_dict[placeholder_value] = var_value.reshape(var.get_shape()) assign_op = control_flow_ops.group(*assign_ops) return assign_op, feed_dict def assign_from_values_fn(var_names_to_values): """Returns a function that assigns specific variables from the given values. This function provides a mechanism for performing assignment of variables to values in a way that does not fill the graph with large assignment values. Args: var_names_to_values: A map from variable names to values. Returns: A function that takes a single argument, a `tf.Session`, that applies the assignment operation. Raises: ValueError: if any of the given variable names were not found. """ assign_op, feed_dict = assign_from_values(var_names_to_values) def callback(session): return session.run(assign_op, feed_dict) return callback # TODO(nsilberman): add flag to load exponential moving averages instead def assign_from_checkpoint(model_path, var_list): """Creates an operation to assign specific variables from a checkpoint. Args: model_path: The full path to the model checkpoint. To get latest checkpoint use `model_path = tf.train.latest_checkpoint(checkpoint_dir)` var_list: A list of `Variable` objects or a dictionary mapping names in the checkpoint to the corresponding variables to initialize. If empty or None, it would return no_op(), None. Returns: the restore_op and the feed_dict that need to be run to restore var_list. Raises: ValueError: If the checkpoint specified at `model_path` is missing one of the variables in `var_list`. """ reader = pywrap_tensorflow.NewCheckpointReader(model_path) if isinstance(var_list, (tuple, list)): var_list = {var.op.name: var for var in var_list} feed_dict = {} assign_ops = [] for checkpoint_var_name in var_list: var = var_list[checkpoint_var_name] if not reader.has_tensor(checkpoint_var_name): raise ValueError( 'Checkpoint is missing variable [%s]' % checkpoint_var_name) var_value = reader.get_tensor(checkpoint_var_name) placeholder_name = 'placeholder/' + var.op.name placeholder_value = array_ops.placeholder( dtype=var.dtype.base_dtype, shape=var.get_shape(), name=placeholder_name) assign_ops.append(var.assign(placeholder_value)) if var.get_shape() != var_value.shape: raise ValueError( 'Total size of new array must be unchanged for %s ' 'lh_shape: [%s], rh_shape: [%s]' % (checkpoint_var_name, str(var_value.shape), str(var.get_shape()))) feed_dict[placeholder_value] = var_value.reshape(var.get_shape()) assign_op = control_flow_ops.group(*assign_ops) return assign_op, feed_dict def assign_from_checkpoint_fn(model_path, var_list, ignore_missing_vars=False, reshape_variables=False): """Returns a function that assigns specific variables from a checkpoint. Args: model_path: The full path to the model checkpoint. To get latest checkpoint use `model_path = tf.train.latest_checkpoint(checkpoint_dir)` var_list: A list of `Variable` objects or a dictionary mapping names in the checkpoint to the correspoing variables to initialize. If empty or None, it would return no_op(), None. ignore_missing_vars: Boolean, if True it would ignore variables missing in the checkpoint with a warning instead of failing. reshape_variables: Boolean, if True it would automatically reshape variables which are of different shape then the ones stored in the checkpoint but which have the same number of elements. Returns: A function that takes a single argument, a `tf.Session`, that applies the assignment operation. Raises: ValueError: If the checkpoint specified at `model_path` is missing one of the variables in `var_list`. """ if ignore_missing_vars: reader = pywrap_tensorflow.NewCheckpointReader(model_path) if isinstance(var_list, dict): var_dict = var_list else: var_dict = {var.op.name: var for var in var_list} available_vars = {} for var in var_dict: if reader.has_tensor(var): available_vars[var] = var_dict[var] else: logging.warning( 'Variable %s missing in checkpoint %s', var, model_path) var_list = available_vars saver = tf_saver.Saver(var_list, reshape=reshape_variables) def callback(session): saver.restore(session, model_path) return callback class VariableDeviceChooser(object): """Device chooser for variables. When using a parameter server it will assign them in a round-robin fashion. When not using a parameter server it allows GPU or CPU placement. """ def __init__(self, num_tasks=0, job_name='ps', device_type='CPU', device_index=0): """Initialize VariableDeviceChooser. Usage: To use with 2 parameter servers: VariableDeviceChooser(2) To use without parameter servers: VariableDeviceChooser() VariableDeviceChooser(device_type='GPU') # For GPU placement Args: num_tasks: number of tasks. job_name: String, a name for the parameter server job. device_type: Optional device type string (e.g. "CPU" or "GPU") device_index: int. Optional device index. If left unspecified, device represents 'any' device_index. """ self._job_name = job_name self._device_type = device_type self._device_index = device_index self._num_tasks = num_tasks self._next_task_id = 0 def __call__(self, op): device_spec = tf_device.DeviceSpec(device_type=self._device_type, device_index=self._device_index) if self._num_tasks > 0: task_id = self._next_task_id self._next_task_id = (self._next_task_id + 1) % self._num_tasks device_spec.job = self._job_name device_spec.task = task_id return device_spec.to_string() def filter_variables(var_list, include_patterns=None, exclude_patterns=None, reg_search=True): """Filter a list of variables using regular expressions. First includes variables according to the list of include_patterns. Afterwards, eliminates variables according to the list of exclude_patterns. For example, one can obtain a list of variables with the weights of all convolutional layers (depending on the network definition) by: ```python variables = tf.contrib.framework.get_model_variables() conv_weight_variables = tf.contrib.framework.filter_variables( variables, include_patterns=['Conv'], exclude_patterns=['biases', 'Logits']) ``` Args: var_list: list of variables. include_patterns: list of regular expressions to include. Defaults to None, which means all variables are selected according to the include rules. A variable is included if it matches any of the include_patterns. exclude_patterns: list of regular expressions to exclude. Defaults to None, which means all variables are selected according to the exclude rules. A variable is excluded if it matches any of the exclude_patterns. reg_search: boolean. If True (default), performs re.search to find matches (i.e. pattern can match any substring of the variable name). If False, performs re.match (i.e. regexp should match from the beginning of the variable name). Returns: filtered list of variables. """ if reg_search: reg_exp_func = re.search else: reg_exp_func = re.match # First include variables. if include_patterns is None: included_variables = list(var_list) else: included_variables = [] for var in var_list: if any(reg_exp_func(ptrn, var.name) for ptrn in include_patterns): included_variables.append(var) # Afterwards, exclude variables. if exclude_patterns is None: filtered_variables = included_variables else: filtered_variables = [] for var in included_variables: if not any(reg_exp_func(ptrn, var.name) for ptrn in exclude_patterns): filtered_variables.append(var) return filtered_variables
apache-2.0
-135,211,928,952,083,820
35.701016
90
0.693835
false
4.16384
false
false
false
Daniel-Brosnan-Blazquez/DIT-100
debugging/trajectory_planning_profiles/trapezoidal-profile.py
1
7690
import numpy import time from matplotlib import pyplot def main (params): angle = params['p0'] vel = params['v0'] sign = params['sign'] # Plan the trajectory if it is not planned T = 0 Ta = 0 Td = 0 dt = params['dt'] if not params['trajectory']: # Maximum acceleration and velocity values in degrees/s^2 and # degrees/s respectively amax = params['acc_limit_d']*sign*(-1) vmax = params['vel_limit']*sign*(-1) v0 = vel h = angle vlim = vmax # Check if the trajectory is feasible print "abs (amax*h) >= v0**2/2.0 = %s" % (abs (amax*h) >= v0**2/2.0) if abs (amax*h) >= v0**2/2.0: # The trajectory is feasible # Check if the maximum value of velocity can be reached if abs (h*amax) > vmax**2 - v0**2/2.0: # The maximum value of velocity can be reached Ta = (vmax - v0)/amax Td = vmax/amax term1 = abs (h/vmax) term2 = (vmax/(2*amax)) * (1 - (v0/vmax))**2 term3 = (vmax/(2*amax)) T = term1 + term2 + term3 else: # The maximum value of velocity can't be reached vlim = ((abs (h * amax) + v0**2/2.0)**(1/2.0))*sign*(-1) Ta = abs ((vlim - v0)/amax) Td = abs (vlim/amax) T = Ta + Td # end if # The time has to be positive Ta = abs (Ta) Td = abs (Td) T = abs (T) print "Ta = %s, Td = %s" % (Ta, Td) params['trajectory'] = True params['T'] = T params['Ta'] = Ta params['Td'] = Td params['T_sign'] = sign*(-1) params['vv'] = vlim # if Ta > dt and Td > dt: # params['trajectory'] = True # params['T'] = T # params['Ta'] = Ta # params['Td'] = Td # params['T_sign'] = sign*(-1) # params['vv'] = vlim # else: # Ta = 0 # Td = 0 # T = 0 # end if # end if return def plot (params): t = 0 interval = params['dt'] # Sign sign = params['T_sign'] # Maximum values amax = params['acc_limit_d']*sign vmax = params['vel_limit']*sign # Buffers to store the motion positions = [] vels = [] accs = [] # Initial values of the motion v0 = params['v0'] p0 = params['p0'] vv = params['vv'] T = params['T'] Ta = params['Ta'] Td = params['Td'] # Acceleration phase while t < Ta: # Position pos = p0 + v0*t + ((vv - v0)/(2*Ta))*t**2 positions.append (pos) # Velocity vel = v0 + ((vv - v0)/(Ta))*t vels.append (vel) # Acceleration acc = (vv - v0)/Ta accs.append (acc) t += interval # end while # Constant velocity phase while t < (T - Td): # Position pos = p0 + v0*(Ta/2.0) + vv*(t-(Ta/2.0)) positions.append (pos) # Velocity vel = vv vels.append (vel) # Acceleration acc = 0 accs.append (acc) t += interval # end while # Deceleration phase while t < T: # Position pos = 0 - (vv/(2*Td))*(T-t)**2 positions.append (pos) # Velocity vel = (vv/Td)*(T-t) vels.append (vel) # Acceleration acc = -(vv/Td) accs.append (acc) t += interval # end while fig = pyplot.figure (1, figsize = (20,10)) s = fig.add_subplot (311) p, = s.plot(positions) s.grid (True) s.set_title ("position") s = fig.add_subplot (312) p, = s.plot(vels) s.grid (True) s.set_title ("velocity") s = fig.add_subplot (313) p, = s.plot(accs) s.grid (True) s.set_title ("acceleration") pyplot.show () pyplot.close (1) return if __name__ == "__main__": params = {} # Period params['dt'] = 0.015 # Flag to indicate if it is necessary to compute the trajectory # (not needed here) params['trajectory'] = False # Velocity, acceleration and jerk limits in degrees/s^2 params['vel_limit'] = 150.0 rad_to_degrees = 180.0/numpy.pi radius = 0.3 # m/s^2 params['acc_limit'] = 7.5 # degrees/s^2 params['acc_limit_d'] = (params['acc_limit']*rad_to_degrees)/radius # # p0 = 0. Checked, trajectory unfeasible # # p0 # params['p0'] = 0.0 # # v0 # params['v0'] = 100.0 # p0 > 50 v0 = 0. Checked, trajectory feasible # p0 params['p0'] = 80.0 # v0 params['v0'] = 0.0 # # p0 > 50 v0 < limit. Checked, trajectory feasible # # p0 # params['p0'] = 80.0 # # v0 # params['v0'] = 50.0 # # p0 > 50 v0 = limit. Checked, trajectory feasible # # p0 # params['p0'] = 80.0 # # v0 # params['v0'] = 100.0 # # p0 > 50 v0 > limit. Checked, trajectory feasible # # p0 # params['p0'] = 80.0 # # v0 # params['v0'] = -150.0 # # p0 < 50 p0 > 0 v0 = 0. Checked, trajectory feasible # # p0 # params['p0'] = 20.0 # # v0 # params['v0'] = 0.0 # # p0 < 50 p0 > 0 v0 < limit. REVIEW IT!!!!!!!!! # # p0 # params['p0'] = 20.0 # # v0 # params['v0'] = 50.0 # # p0 < 50 p0 > 0 v0 = limit. Checked, trajectory feasible # # p0 # params['p0'] = 20.0 # # v0 # params['v0'] = 100.0 # # p0 < 50 p0 > 0 v0 > limit. Checked, trajectory feasible # # p0 # params['p0'] = 20.0 # # v0 # params['v0'] = 150.0 # # p0 < -50 v0 = 0. Checked, trajectory feasible # # p0 # params['p0'] = -80.0 # # v0 # params['v0'] = 0.0 # # p0 < -50 v0 < limit. Checked, trajectory feasible # # p0 # params['p0'] = -80.0 # # v0 # params['v0'] = 50.0 # # p0 < -50 v0 = limit. Checked, trajectory feasible # # p0 # params['p0'] = -80.0 # # v0 # params['v0'] = 100.0 # # p0 < -50 v0 > limit. Checked, trajectory feasible # # p0 # params['p0'] = -80.0 # # v0 # params['v0'] = 150.0 # # p0 > -50 p0 < 0 v0 = 0. Checked, trajectory feasible # # p0 # params['p0'] = -20.0 # # v0 # params['v0'] = 0.0 # # p0 > -50 p0 < 0 v0 < limit. Checked, trajectory feasible # # p0 # params['p0'] = -20.0 # # v0 # params['v0'] = -50.0 # # p0 > -50 p0 < 0 v0 = limit. Checked, trajectory feasible # # p0 # params['p0'] = -20.0 # # v0 # params['v0'] = 100.0 # # p0 > -50 p0 < 0 v0 > limit. Checked, trajectory feasible # # p0 # params['p0'] = -20.0 # # v0 # params['v0'] = 150.0 # # p0 > -50 p0 < 0 v0 > limit. Checked, trajectory feasible # # p0 # params['p0'] = -20.0 # # v0 # params['v0'] = 200.0 # sign params['sign'] = 1 # params['sign'] = -1 # # p0 # params['p0'] = 11.0962258945 # # params['p0'] = 22.0 # # v0 # params['v0'] = 71.19 # # params['v0'] = 0.0 main(params) print "Trajectory performed: %s" % params['trajectory'] if params['trajectory']: T = params['T'] Ta = params['Ta'] Td = params['Td'] print "T = %s, Ta = %s, Td = %s" %(T, Ta, Td) plot (params)
gpl-3.0
-1,022,844,989,781,801,000
23.258675
76
0.447854
false
3.067411
false
false
false
Oreder/PythonSelfStudy
Exe_18.py
1
8002
# Creating class # # +++ Syntax +++ # class ClassName: # 'Optional class documentation string' # class_suite # class Employee: 'common base class for all employees' empCount = 0 def __init__(self, name, salary): self.name = name self.salary = salary Employee.empCount += 1 def displayCount(self): print("Total Employee:", empCount) def display(self): print("Name:", self.name, "with Salary:", self.salary) # Here, #  The variable empCount is a class variable whose value is shared among all # instances of a this class. This can be accessed as Employee.empCount from # inside the class or outside the class. #  The first method __init__() is a special method, which is called class # constructor or initialization method that Python calls when you create a new # instance of this class. #  You declare other class methods like normal functions with the exception that # the first argument to each method is self. Python adds the self argument to # the list for you; you do not need to include it when you call the methods. # # Creating Instance Objects "This would create first object of Employee class" emp1 = Employee("Zara", 2000) "This would create second object of Employee class" emp2 = Employee("Manni", 5000) # Accessing Attributes emp1.display() emp2.display() print("Total Employee:", Employee.empCount) # We can add, remove, or modify attributes of classes and objects at any time emp1.age = 7 # Add an 'age' attribute print(emp1.age) emp1.age = 8 # Modify 'age' attribute print(emp1.age) del emp1.age # Delete 'age' attribute # Instead of using the normal statements to access attributes, we can use the # following functions: # #  The getattr(obj, name[, default]) : to access the attribute of object. #  The hasattr(obj,name) : to check if an attribute exists or not. #  The setattr(obj,name,value) : to set an attribute. # If attribute does not exist, then it would be created. #  The delattr(obj, name) : to delete an attribute. print(hasattr(emp1, 'age')) # Returns true if 'age' attribute exists print(setattr(emp1, 'age', 8)) # Set attribute 'age' at 8 print(getattr(emp1, 'age')) # Returns value of 'age' attribute print(delattr(emp1, 'age')) # Delete attribute 'age' # +++ Built-In Class Attributes # Every Python class keeps following built-in attributes and they can be accessed using # dot operator like any other attribute: #  __dict__: Dictionary containing the class's namespace. #  __doc__: Class documentation string or none, if undefined. #  __name__: Class name. #  __module__: Module name in which the class is defined. This attribute is # "__main__" in interactive mode. #  __bases__: A possibly empty tuple containing the base classes, in the order # of their occurrence in the base class list. print("Employee.__doc__:", Employee.__doc__) print("Employee.__name__:", Employee.__name__) print("Employee.__module__:", Employee.__module__) print("Employee.__bases__:", Employee.__bases__) print("Employee.__dict__:", Employee.__dict__) # +++ Destroying Objects (Garbage Collection) # The __del__() destructor prints the class name of an instance that is about to be destroyed. class Point: def __init__(self, x = 0, y = 0): self.x = x self.y = y def __del__(self): class_name = self.__class__.__name__ print(class_name, "is destroyed!") p1 = Point() p2 = p1 p3 = p1 print("Id(P1):", id(p1)) print("Id(P2):", id(p2)) print("Id(P3):", id(p3)) del p1 del p2 del p3 # +++ Class Inheritance +++ # --------------------------------------------------------- # Syntax # class SubClassName (ParentClass1[, ParentClass2, ...]): # 'Optional class documentation string' # class_suite # --------------------------------------------------------- class Parent: # define parent class parentAttr = 100 def __init__(self): print("Calling parent constructor") def parentMethod(self): print('Calling parent method') def setAttr(self, attr): Parent.parentAttr = attr def getAttr(self): print("Parent attribute:", Parent.parentAttr) class Child(Parent): # define child class def __init__(self): print("Calling child constructor") def childMethod(self): print('Calling child method') c = Child() # instance of child c.childMethod() # child calls its method c.parentMethod() # calls parent's method c.setAttr(200) # again call parent's method c.getAttr() # again call parent's method # Similar way, we can drive a class from multiple parent classes as follows: # ----------------------------------------------- # class A: # define class A | # ..... | # class B: # define class B | # ..... | # class C(A, B): # subclass of A and B | # ..... | # ----------------------------------------------- # +++ Overriding Methods +++ class Parent: def myMethod(self): print("Calling parent method") class Child(Parent): def myMethod(self): print("Calling child method") c = Child() c.myMethod() # +++ Base Overloading Methods # =========================================================== # Sr. No. # Method, Description and Sample Call # # =========================================================== # # __init__ ( self [,args...] ) # # 1 # Constructor (with any optional arguments) # # # Sample Call : obj = className(args) # # ----------------------------------------------------------- # # __del__( self ) # # 2 # Destructor, deletes an object # # # Sample Call : del obj # # ----------------------------------------------------------- # # __repr__( self ) # # 3 # Evaluatable string representation # # # Sample Call : repr(obj) # # ----------------------------------------------------------- # # __str__( self ) # # 4 # Printable string representation # # # Sample Call : str(obj) # # ----------------------------------------------------------- # # __cmp__ ( self, x ) # # 5 # Object comparison # # # Sample Call : cmp(obj, x) # # =========================================================== # +++ Overloading Operators: using __add__ method class Vector: def __init__(self, a, b): self.a = a self.b = b def __str__(self): return 'Vector (%d, %d)' % (self.a, self.b) def __add__(self, other): return Vector(self.a + other.a, self.b + other.b) v1 = Vector(2, 10) v2 = Vector(5, -2) print(v1 + v2) # Data Hiding class JustCounter: __secretCount = 0 def count(self): self.__secretCount += 1 print(self.__secretCount) counter = JustCounter() counter.count() counter.count() print(counter.__secretCount) # Error! # When the above code is executed, it produces the following result: # 1 # 2 # Traceback (most recent call last): # File "Exe_18.py", line 225, in <module> # print counter.__secretCount # AttributeError: JustCounter instance has no attribute '__secretCount' # # Python protects those members by internally changing the name to include the class # name. We can access such attributes as # object._className__attrName # If we would replace our last line as following, then it works for us: print(counter._JustCounter__secretCount) # Worked!
mit
4,672,089,774,815,598,000
32.241667
94
0.543369
false
3.916544
false
false
false
ess-dmsc/do-ess-data-simulator
DonkiPlayer/DonkiOrchestraLib.py
1
7360
import zmq import traceback import socket import time class CommunicationClass: def __init__(self, name='director'): self.context = zmq.Context() self.poller = zmq.Poller() self.pub_sock = None self.sub_socks = {} self.pub_tag = name # self.create_pub_socket() #----------------------------------------------------------------------------------- # create_pub_socket: # #----------------------------------------------------------------------------------- def create_pub_socket(self): try: self.pub_sock = self.context.socket(zmq.PUB) self.pub_port = self.pub_sock.bind_to_random_port("tcp://0.0.0.0") print "PUB " + "tcp://" + str(self.pub_port) except: traceback.print_exc() self.pub_sock = None #----------------------------------------------------------------------------------- # create_sub_socket: # #----------------------------------------------------------------------------------- def create_sub_socket(self, name, url): try: if name in self.sub_socks: self.poller.unregister(self.sub_socks[name]) self.sub_socks[name].close() self.sub_socks[name] = self.context.socket(zmq.SUB) self.sub_socks[name].setsockopt(zmq.SUBSCRIBE, '') self.sub_socks[name].connect("tcp://"+str(url)) self.poller.register(self.sub_socks[name], zmq.POLLIN) #print "SUB TO " + "tcp://" + str(url),self.sub_socks[name] except: traceback.print_exc() print "tcp://"+str(url) del self.sub_socks[name] return False return True #----------------------------------------------------------------------------------- # my_pub_socket_info : # #----------------------------------------------------------------------------------- def my_pub_socket_info(self): return socket.gethostname()+":"+str(self.pub_port) #----------------------------------------------------------------------------------- # publish_ack : # #----------------------------------------------------------------------------------- def publish_ack(self, ack_tag, trg_start, trg_stop): # At the moment just use send_pyobj self.pub_sock.send_pyobj([ack_tag, trg_start,trg_stop]) #----------------------------------------------------------------------------------- # publish_data : # #----------------------------------------------------------------------------------- def publish_data(self, tag, trg_start, trg_stop, data_value): # At the moment just use send_pyobj self.pub_sock.send_pyobj(['data',tag.lower(), trg_start,trg_stop,data_value]) #----------------------------------------------------------------------------------- # publish_info : # #----------------------------------------------------------------------------------- def publish_info( self, priority = -1, data_names=[]): # At the moment just use send_pyobj self.pub_sock.send_pyobj(['info',{'prio':priority,'data':data_names}]) #----------------------------------------------------------------------------------- # ask_for_info : # #----------------------------------------------------------------------------------- def ask_for_info(self, srv_name, timeout_sec=1): # At the moment just use send_pyobj self.pub_sock.send_pyobj(["info", srv_name]) msg = [] sub_socket = self.sub_socks[srv_name] max_retries = 5 retry = 0 while retry < max_retries and msg == []: socks = dict(self.poller.poll((1000./max_retries)*timeout_sec)) #if len(socks) == 0: # return msg if sub_socket in socks and socks[sub_socket] == zmq.POLLIN: try: reply = sub_socket.recv_pyobj() if reply[0] == 'info': msg = reply[1] except: traceback.print_exc() msg = [] retry += 1 return msg #----------------------------------------------------------------------------------- # ask_for_log : # #----------------------------------------------------------------------------------- def ask_for_log(self, srv_name, timeout_sec=1): # At the moment just use send_pyobj self.pub_sock.send_pyobj(["playerlog", srv_name]) msg = [] sub_socket = self.sub_socks[srv_name] max_retries = 5 retry = 0 while retry < max_retries and msg == []: socks = dict(self.poller.poll((1000./max_retries)*timeout_sec)) #if len(socks) == 0: # return msg if sub_socket in socks and socks[sub_socket] == zmq.POLLIN: try: reply = sub_socket.recv_pyobj() if reply[0] == 'data' and reply[1] == 'playerlog': msg = reply[4] except: traceback.print_exc() msg = [] retry += 1 return msg #----------------------------------------------------------------------------------- # wait_message : # #----------------------------------------------------------------------------------- def wait_message(self, srv_names, timeout_sec=1): try: msg = {} socks = dict(self.poller.poll(1000*timeout_sec)) if len(socks) == 0: return msg for sn in srv_names: s = self.sub_socks[sn] if s in socks and socks[s] == zmq.POLLIN: recv_msg = s.recv_pyobj() msg[sn] = recv_msg except: traceback.print_exc() msg = None return msg #----------------------------------------------------------------------------------- # publish_command : # #----------------------------------------------------------------------------------- def publish_command(self, command, srv_name, argin=None, timeout_sec=1): # At the moment just use send_pyobj self.pub_sock.send_pyobj([command, srv_name, argin]) print "Sent command:", command, srv_name, argin msg = [] sub_socket = self.sub_socks[srv_name] max_retries = 5 retry = 0 while retry < max_retries and msg == []: socks = dict(self.poller.poll((1000./max_retries)*timeout_sec)) if sub_socket in socks and socks[sub_socket] == zmq.POLLIN: try: reply = sub_socket.recv_pyobj() if reply[0] == command and reply[1] == reply[2] == -1: return True except: traceback.print_exc() return False retry += 1 return False #----------------------------------------------------------------------------------- # publish_trigger : # #----------------------------------------------------------------------------------- def publish_trigger(self, trigger_value, priority): # At the moment just use send_pyobj self.pub_sock.send_pyobj(["trigger", trigger_value, priority])
bsd-2-clause
-3,240,108,281,225,997,000
37.134715
85
0.38125
false
4.6494
false
false
false
ragupta-git/ImcSdk
imcsdk/mometa/storage/StorageFlexFlashVirtualDrive.py
1
7941
"""This module contains the general information for StorageFlexFlashVirtualDrive ManagedObject.""" from ...imcmo import ManagedObject from ...imccoremeta import MoPropertyMeta, MoMeta from ...imcmeta import VersionMeta class StorageFlexFlashVirtualDriveConsts: ADMIN_ACTION_DISABLE_VD = "disable-vd" ADMIN_ACTION_ENABLE_VD = "enable-vd" ADMIN_ACTION_ERASE_VD = "erase-vd" ADMIN_ACTION_SYNC_VD = "sync-vd" ADMIN_ACTION_UPDATE_VD = "update-vd" class StorageFlexFlashVirtualDrive(ManagedObject): """This is StorageFlexFlashVirtualDrive class.""" consts = StorageFlexFlashVirtualDriveConsts() naming_props = set([u'partitionId']) mo_meta = { "classic": MoMeta("StorageFlexFlashVirtualDrive", "storageFlexFlashVirtualDrive", "vd-[partition_id]", VersionMeta.Version202c, "InputOutput", 0x1f, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [u'faultInst'], ["Get", "Set"]), "modular": MoMeta("StorageFlexFlashVirtualDrive", "storageFlexFlashVirtualDrive", "vd-[partition_id]", VersionMeta.Version2013e, "InputOutput", 0x1f, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [u'faultInst'], ["Get", "Set"]) } prop_meta = { "classic": { "admin_action": MoPropertyMeta("admin_action", "adminAction", "string", VersionMeta.Version202c, MoPropertyMeta.READ_WRITE, 0x2, 0, 510, None, ["disable-vd", "enable-vd", "erase-vd", "sync-vd", "update-vd"], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version202c, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version202c, MoPropertyMeta.READ_WRITE, 0x8, 0, 255, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version202c, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["", "created", "deleted", "modified", "removed"], []), "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version202c, MoPropertyMeta.INTERNAL, None, None, None, None, [], []), "drive_scope": MoPropertyMeta("drive_scope", "driveScope", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "drive_status": MoPropertyMeta("drive_status", "driveStatus", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "drive_type": MoPropertyMeta("drive_type", "driveType", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "host_accessible": MoPropertyMeta("host_accessible", "hostAccessible", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "last_operation_status": MoPropertyMeta("last_operation_status", "lastOperationStatus", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "operation_in_progress": MoPropertyMeta("operation_in_progress", "operationInProgress", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "partition_id": MoPropertyMeta("partition_id", "partitionId", "string", VersionMeta.Version202c, MoPropertyMeta.NAMING, None, 0, 510, None, [], []), "size": MoPropertyMeta("size", "size", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "virtual_drive": MoPropertyMeta("virtual_drive", "virtualDrive", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), }, "modular": { "admin_action": MoPropertyMeta("admin_action", "adminAction", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x2, 0, 510, None, ["disable-vd", "enable-vd", "erase-vd", "sync-vd", "update-vd"], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x8, 0, 255, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["", "created", "deleted", "modified", "removed"], []), "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version2013e, MoPropertyMeta.INTERNAL, None, None, None, None, [], []), "drive_scope": MoPropertyMeta("drive_scope", "driveScope", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "drive_status": MoPropertyMeta("drive_status", "driveStatus", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "drive_type": MoPropertyMeta("drive_type", "driveType", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "host_accessible": MoPropertyMeta("host_accessible", "hostAccessible", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "last_operation_status": MoPropertyMeta("last_operation_status", "lastOperationStatus", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "operation_in_progress": MoPropertyMeta("operation_in_progress", "operationInProgress", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "partition_id": MoPropertyMeta("partition_id", "partitionId", "string", VersionMeta.Version2013e, MoPropertyMeta.NAMING, None, 0, 510, None, [], []), "size": MoPropertyMeta("size", "size", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "virtual_drive": MoPropertyMeta("virtual_drive", "virtualDrive", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), }, } prop_map = { "classic": { "adminAction": "admin_action", "dn": "dn", "rn": "rn", "status": "status", "childAction": "child_action", "driveScope": "drive_scope", "driveStatus": "drive_status", "driveType": "drive_type", "hostAccessible": "host_accessible", "lastOperationStatus": "last_operation_status", "operationInProgress": "operation_in_progress", "partitionId": "partition_id", "size": "size", "virtualDrive": "virtual_drive", }, "modular": { "adminAction": "admin_action", "dn": "dn", "rn": "rn", "status": "status", "childAction": "child_action", "driveScope": "drive_scope", "driveStatus": "drive_status", "driveType": "drive_type", "hostAccessible": "host_accessible", "lastOperationStatus": "last_operation_status", "operationInProgress": "operation_in_progress", "partitionId": "partition_id", "size": "size", "virtualDrive": "virtual_drive", }, } def __init__(self, parent_mo_or_dn, partition_id, **kwargs): self._dirty_mask = 0 self.partition_id = partition_id self.admin_action = None self.status = None self.child_action = None self.drive_scope = None self.drive_status = None self.drive_type = None self.host_accessible = None self.last_operation_status = None self.operation_in_progress = None self.size = None self.virtual_drive = None ManagedObject.__init__(self, "StorageFlexFlashVirtualDrive", parent_mo_or_dn, **kwargs)
apache-2.0
-984,964,799,914,068,200
65.175
258
0.625866
false
3.607905
false
false
false
ErnieAllen/qpid-dispatch
tests/system_tests_protocol_settings.py
1
16207
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from __future__ import print_function import unittest2 as unittest from system_test import TestCase, Qdrouterd, main_module from proton.utils import BlockingConnection import subprocess X86_64_ARCH = "x86_64" skip_test = True # Dont skip tests on 64 bit architectures. p = subprocess.Popen("uname -m", shell=True, stdout=subprocess.PIPE, universal_newlines=True) if X86_64_ARCH in p.communicate()[0]: skip_test = False class MaxFrameMaxSessionFramesTest(TestCase): """System tests setting proton negotiated size max-frame-size and incoming-window""" @classmethod def setUpClass(cls): '''Start a router''' super(MaxFrameMaxSessionFramesTest, cls).setUpClass() name = "MaxFrameMaxSessionFrames" config = Qdrouterd.Config([ ('router', {'mode': 'standalone', 'id': 'QDR'}), ('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port(), 'maxFrameSize': '2048', 'maxSessionFrames': '10'}), ]) cls.router = cls.tester.qdrouterd(name, config) cls.router.wait_ready() cls.address = cls.router.addresses[0] def test_max_frame_max_session_frames__max_sessions_default(self): # Set up a connection to get the Open and a receiver to get a Begin frame in the log bc = BlockingConnection(self.router.addresses[0]) bc.create_receiver("xxx") bc.close() with open('../setUpClass/MaxFrameMaxSessionFrames.log', 'r') as router_log: log_lines = router_log.read().split("\n") open_lines = [s for s in log_lines if "-> @open" in s] # max-frame is from the config self.assertTrue(' max-frame-size=2048,' in open_lines[0]) # channel-max is default self.assertTrue(" channel-max=32767" in open_lines[0]) begin_lines = [s for s in log_lines if "-> @begin" in s] # incoming-window is from the config self.assertTrue(" incoming-window=10," in begin_lines[0] ) class MaxSessionsTest(TestCase): """System tests setting proton channel-max""" @classmethod def setUpClass(cls): """Start a router and a messenger""" super(MaxSessionsTest, cls).setUpClass() name = "MaxSessions" config = Qdrouterd.Config([ ('router', {'mode': 'standalone', 'id': 'QDR'}), ('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port(), 'maxSessions': '10'}), ]) cls.router = cls.tester.qdrouterd(name, config) cls.router.wait_ready() cls.address = cls.router.addresses[0] def test_max_sessions(self): # Set up a connection to get the Open and a receiver to get a Begin frame in the log bc = BlockingConnection(self.router.addresses[0]) bc.create_receiver("xxx") bc.close() with open('../setUpClass/MaxSessions.log', 'r') as router_log: log_lines = router_log.read().split("\n") open_lines = [s for s in log_lines if "-> @open" in s] # channel-max is 10 self.assertTrue(" channel-max=9" in open_lines[0]) class MaxSessionsZeroTest(TestCase): """System tests setting proton channel-max""" @classmethod def setUpClass(cls): """Start a router and a messenger""" super(MaxSessionsZeroTest, cls).setUpClass() name = "MaxSessionsZero" config = Qdrouterd.Config([ ('router', {'mode': 'standalone', 'id': 'QDR'}), ('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port(), 'maxSessions': '0'}), ]) cls.router = cls.tester.qdrouterd(name, config) cls.router.wait_ready() cls.address = cls.router.addresses[0] def test_max_sessions_zero(self): # Set up a connection to get the Open and a receiver to get a Begin frame in the log bc = BlockingConnection(self.router.addresses[0]) bc.create_receiver("xxx") bc.close() with open('../setUpClass/MaxSessionsZero.log', 'r') as router_log: log_lines = router_log.read().split("\n") open_lines = [s for s in log_lines if "-> @open" in s] # channel-max is 0. Should get proton default 32767 self.assertTrue(" channel-max=32767" in open_lines[0]) class MaxSessionsLargeTest(TestCase): """System tests setting proton channel-max""" @classmethod def setUpClass(cls): """Start a router and a messenger""" super(MaxSessionsLargeTest, cls).setUpClass() name = "MaxSessionsLarge" config = Qdrouterd.Config([ ('router', {'mode': 'standalone', 'id': 'QDR'}), ('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port(), 'maxSessions': '500000'}), ]) cls.router = cls.tester.qdrouterd(name, config) cls.router.wait_ready() cls.address = cls.router.addresses[0] def test_max_sessions_large(self): # Set up a connection to get the Open and a receiver to get a Begin frame in the log bc = BlockingConnection(self.router.addresses[0]) bc.create_receiver("xxx") bc.close() with open('../setUpClass/MaxSessionsLarge.log', 'r') as router_log: log_lines = router_log.read().split("\n") open_lines = [s for s in log_lines if "-> @open" in s] # channel-max is 0. Should get proton default 32767 self.assertTrue(" channel-max=32767" in open_lines[0]) class MaxFrameSmallTest(TestCase): """System tests setting proton max-frame-size""" @classmethod def setUpClass(cls): """Start a router and a messenger""" super(MaxFrameSmallTest, cls).setUpClass() name = "MaxFrameSmall" config = Qdrouterd.Config([ ('router', {'mode': 'standalone', 'id': 'QDR'}), ('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port(), 'maxFrameSize': '2'}), ]) cls.router = cls.tester.qdrouterd(name, config) cls.router.wait_ready() cls.address = cls.router.addresses[0] def test_max_frame_small(self): # Set up a connection to get the Open and a receiver to get a Begin frame in the log bc = BlockingConnection(self.router.addresses[0]) bc.create_receiver("xxx") bc.close() with open('../setUpClass/MaxFrameSmall.log', 'r') as router_log: log_lines = router_log.read().split("\n") open_lines = [s for s in log_lines if "-> @open" in s] # if frame size <= 512 proton set min of 512 self.assertTrue(" max-frame-size=512" in open_lines[0]) class MaxFrameDefaultTest(TestCase): """System tests setting proton max-frame-size""" @classmethod def setUpClass(cls): """Start a router and a messenger""" super(MaxFrameDefaultTest, cls).setUpClass() name = "MaxFrameDefault" config = Qdrouterd.Config([ ('router', {'mode': 'standalone', 'id': 'QDR'}), ('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port()}), ]) cls.router = cls.tester.qdrouterd(name, config) cls.router.wait_ready() cls.address = cls.router.addresses[0] def test_max_frame_default(self): # Set up a connection to get the Open and a receiver to get a Begin frame in the log bc = BlockingConnection(self.router.addresses[0]) bc.create_receiver("xxx") bc.close() with open('../setUpClass/MaxFrameDefault.log', 'r') as router_log: log_lines = router_log.read().split("\n") open_lines = [s for s in log_lines if "-> @open" in s] # if frame size not set then a default is used self.assertTrue(" max-frame-size=16384" in open_lines[0]) class MaxSessionFramesDefaultTest(TestCase): """System tests setting proton max-frame-size""" @classmethod def setUpClass(cls): """Start a router and a messenger""" super(MaxSessionFramesDefaultTest, cls).setUpClass() name = "MaxSessionFramesDefault" config = Qdrouterd.Config([ ('router', {'mode': 'standalone', 'id': 'QDR'}), ('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port()}), ]) cls.router = cls.tester.qdrouterd(name, config) cls.router.wait_ready() cls.address = cls.router.addresses[0] def test_max_session_frames_default(self): # Set up a connection to get the Open and a receiver to get a Begin frame in the log if skip_test: return self.skipTest("Test skipped on non-64 bit architectures") bc = BlockingConnection(self.router.addresses[0]) bc.create_receiver("xxx") bc.close() with open('../setUpClass/MaxSessionFramesDefault.log', 'r') as router_log: log_lines = router_log.read().split("\n") open_lines = [s for s in log_lines if "-> @open" in s] # if frame size not set then a default is used self.assertTrue(" max-frame-size=16384" in open_lines[0]) begin_lines = [s for s in log_lines if "-> @begin" in s] # incoming-window is from the config self.assertTrue(" incoming-window=2147483647," in begin_lines[0]) class MaxFrameMaxSessionFramesZeroTest(TestCase): """ System tests setting proton negotiated size max-frame-size and incoming-window when they are both zero. Frame size is bumped up to the minimum and capacity is bumped up to have an incoming window of 1 """ @classmethod def setUpClass(cls): '''Start a router''' super(MaxFrameMaxSessionFramesZeroTest, cls).setUpClass() name = "MaxFrameMaxSessionFramesZero" config = Qdrouterd.Config([ ('router', {'mode': 'standalone', 'id': 'QDR'}), ('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port(), 'maxFrameSize': '0', 'maxSessionFrames': '0'}), ]) cls.router = cls.tester.qdrouterd(name, config) cls.router.wait_ready() cls.address = cls.router.addresses[0] def test_max_frame_max_session_zero(self): # Set up a connection to get the Open and a receiver to get a Begin frame in the log if skip_test: return self.skipTest("Test disabled on non-64 bit architectures") bc = BlockingConnection(self.router.addresses[0]) bc.create_receiver("xxx") bc.close() with open('../setUpClass/MaxFrameMaxSessionFramesZero.log', 'r') as router_log: log_lines = router_log.read().split("\n") open_lines = [s for s in log_lines if "-> @open" in s] # max-frame gets set to protocol min self.assertTrue(' max-frame-size=512,' in open_lines[0]) begin_lines = [s for s in log_lines if "-> @begin" in s] # incoming-window is defaulted to 2^31-1 self.assertTrue(" incoming-window=2147483647," in begin_lines[0]) class ConnectorSettingsDefaultTest(TestCase): """ The internal logic for protocol settings in listener and connector is common code. This test makes sure that defaults in the connector config make it to the wire. """ inter_router_port = None @staticmethod def ssl_config(client_server, connection): return [] # Over-ridden by RouterTestSsl @classmethod def setUpClass(cls): """Start two routers""" super(ConnectorSettingsDefaultTest, cls).setUpClass() def router(name, client_server, connection): config = cls.ssl_config(client_server, connection) + [ ('router', {'mode': 'interior', 'id': 'QDR.%s' % name}), ('listener', {'port': cls.tester.get_port()}), connection ] config = Qdrouterd.Config(config) cls.routers.append(cls.tester.qdrouterd(name, config, wait=True)) cls.routers = [] inter_router_port = cls.tester.get_port() router('A', 'server', ('listener', {'role': 'inter-router', 'port': inter_router_port})) router('B', 'client', ('connector', {'name': 'connectorToA', 'role': 'inter-router', 'port': inter_router_port, 'verifyHostname': 'no'})) cls.routers[0].wait_router_connected('QDR.B') cls.routers[1].wait_router_connected('QDR.A') def test_connector_default(self): if skip_test: return self.skipTest("Test disabled on non-64 bit architectures") with open('../setUpClass/A.log', 'r') as router_log: log_lines = router_log.read().split("\n") open_lines = [s for s in log_lines if "<- @open" in s] # defaults self.assertTrue(' max-frame-size=16384,' in open_lines[0]) self.assertTrue(' channel-max=32767,' in open_lines[0]) begin_lines = [s for s in log_lines if "<- @begin" in s] # defaults self.assertTrue(" incoming-window=2147483647," in begin_lines[0]) class ConnectorSettingsNondefaultTest(TestCase): """ The internal logic for protocol settings in listener and connector is common code. This test makes sure that settings in the connector config make it to the wire. The listener tests test the setting logic. """ inter_router_port = None @staticmethod def ssl_config(client_server, connection): return [] # Over-ridden by RouterTestSsl @classmethod def setUpClass(cls): """Start two routers""" super(ConnectorSettingsNondefaultTest, cls).setUpClass() def router(name, client_server, connection): config = cls.ssl_config(client_server, connection) + [ ('router', {'mode': 'interior', 'id': 'QDR.%s' % name}), ('listener', {'port': cls.tester.get_port()}), connection ] config = Qdrouterd.Config(config) cls.routers.append(cls.tester.qdrouterd(name, config, wait=True)) cls.routers = [] inter_router_port = cls.tester.get_port() router('A', 'server', ('listener', {'role': 'inter-router', 'port': inter_router_port})) router('B', 'client', ('connector', {'name': 'connectorToA', 'role': 'inter-router', 'port': inter_router_port, 'maxFrameSize': '2048', 'maxSessionFrames': '10', 'maxSessions': '20', 'verifyHostname': 'no'})) cls.routers[0].wait_router_connected('QDR.B') cls.routers[1].wait_router_connected('QDR.A') def test_connector_default(self): with open('../setUpClass/A.log', 'r') as router_log: log_lines = router_log.read().split("\n") open_lines = [s for s in log_lines if "<- @open" in s] # nondefaults self.assertTrue(' max-frame-size=2048,' in open_lines[0]) self.assertTrue(' channel-max=19,' in open_lines[0]) begin_lines = [s for s in log_lines if "<- @begin" in s] # nondefaults self.assertTrue(" incoming-window=10," in begin_lines[0]) if __name__ == '__main__': unittest.main(main_module())
apache-2.0
-6,041,756,895,692,285,000
39.31592
127
0.604554
false
3.805353
true
false
false
acressity/acressity
narratives/forms.py
1
2019
from datetime import date from django import forms from narratives.models import Narrative from django.forms.extras.widgets import SelectDateWidget from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from django.http import HttpResponse from django.core.exceptions import PermissionDenied TRANSFER_ACTION_CHOICES = ( ('', '-----'), (1, _('Transfer')), (2, _('Copy')), ) class NarrativeForm(forms.ModelForm): date_created = forms.DateField(widget=SelectDateWidget(years=range(timezone.now().year, timezone.now().year - 110, -1)), required=False) title = forms.CharField(widget=forms.TextInput(attrs={'class': 'larger', 'onfocus': 'if($(this).val()==this.defaultValue){$(this).val("")};', 'onblur': 'if($(this).val()==""){$(this).val(this.defaultValue)};'})) # default value moved to views.py class Meta: model = Narrative exclude = ('gallery', 'author') def __init__(self, *args, **kwargs): self.author = kwargs.pop('author', None) super(NarrativeForm, self).__init__(*args, **kwargs) self.fields['experience'].queryset = self.author.experiences.all() def save(self, commit=True): instance = super(NarrativeForm, self).save(commit=False) if self.author: instance.author = self.author if commit: instance.save() return instance def clean_date_created(self): date_created = self.cleaned_data.get('date_created') if not date_created: date_created = timezone.now() return date_created def clean_body(self): body = self.cleaned_data.get('body') if len(body) < 3: raise forms.ValidationError('The narrative body needs a little more extrapolation') return body class NarrativeTransferForm(forms.ModelForm): potential_actions = forms.ChoiceField(choices=TRANSFER_ACTION_CHOICES, required=False) class Meta: model = Narrative fields = ('title',)
gpl-3.0
-5,569,418,809,155,219,000
34.421053
250
0.65577
false
3.867816
false
false
false
rigetticomputing/grove
grove/tomography/state_tomography.py
1
11664
############################################################################## # Copyright 2017-2018 Rigetti Computing # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################## import logging import numpy as np import matplotlib.pyplot as plt from pyquil.quilbase import Pragma from scipy.sparse import csr_matrix, coo_matrix from pyquil.quil import Program import grove.tomography.operator_utils from grove.tomography.tomography import TomographyBase, TomographySettings, DEFAULT_SOLVER_KWARGS from grove.tomography import tomography import grove.tomography.utils as ut import grove.tomography.operator_utils as o_ut _log = logging.getLogger(__name__) qt = ut.import_qutip() cvxpy = ut.import_cvxpy() UNIT_TRACE = 'unit_trace' POSITIVE = 'positive' DEFAULT_STATE_TOMO_SETTINGS = TomographySettings( constraints={UNIT_TRACE}, solver_kwargs=DEFAULT_SOLVER_KWARGS ) def _prepare_c_jk_m(readout_povm, pauli_basis, channel_ops): """ Prepare the coefficient matrix for state tomography. This function uses sparse matrices for much greater efficiency. The coefficient matrix is defined as: .. math:: C_{(jk)m} = \tr{\Pi_{s_j} \Lambda_k(P_m)} = \sum_{r}\pi_{jr}(\mathcal{R}_{k})_{rm} where :math:`\Lambda_k(\cdot)` is the quantum map corresponding to the k-th pre-measurement channel, i.e., :math:`\Lambda_k(\rho) = E_k \rho E_k^\dagger` where :math:`E_k` is the k-th channel operator. This map can also be represented via its transfer matrix :math:`\mathcal{R}_{k}`. In that case one also requires the overlap between the (generalized) Pauli basis ops and the projection operators :math:`\pi_{jl}:=\sbraket{\Pi_j}{P_l} = \tr{\Pi_j P_l}`. See the grove documentation on tomography for detailed information. :param DiagonalPOVM readout_povm: The POVM corresponding to the readout plus classifier. :param OperatorBasis pauli_basis: The (generalized) Pauli basis employed in the estimation. :param list channel_ops: The pre-measurement channel operators as `qutip.Qobj` :return: The coefficient matrix necessary to set up the binomial state tomography problem. :rtype: scipy.sparse.csr_matrix """ channel_transfer_matrices = [pauli_basis.transfer_matrix(qt.to_super(ek)) for ek in channel_ops] # This bit could be more efficient but does not run super long and is thus preserved for # readability. pi_jr = csr_matrix( [pauli_basis.project_op(n_j).toarray().ravel() for n_j in readout_povm.ops]) # Dict used for constructing our sparse matrix, keys are tuples (row_index, col_index), values # are the non-zero elements of the final matrix. c_jk_m_elms = {} # This explicitly exploits the sparsity of all operators involved for k in range(len(channel_ops)): pi_jr__rk_rm = (pi_jr * channel_transfer_matrices[k]).tocoo() for (j, m, val) in ut.izip(pi_jr__rk_rm.row, pi_jr__rk_rm.col, pi_jr__rk_rm.data): # The multi-index (j,k) is enumerated in column-major ordering (like Fortran arrays) c_jk_m_elms[(j + k * readout_povm.pi_basis.dim, m)] = val.real # create sparse matrix from COO-format (see scipy.sparse docs) _keys, _values = ut.izip(*c_jk_m_elms.items()) _rows, _cols = ut.izip(*_keys) c_jk_m = coo_matrix((list(_values), (list(_rows), list(_cols))), shape=(readout_povm.pi_basis.dim * len(channel_ops), pauli_basis.dim)).tocsr() return c_jk_m class StateTomography(TomographyBase): """ A StateTomography object encapsulates the result of quantum state estimation from tomographic data. It provides convenience functions for visualization and computing state fidelities. """ __tomography_type__ = "STATE" @staticmethod def estimate_from_ssr(histograms, readout_povm, channel_ops, settings): """ Estimate a density matrix from single shot histograms obtained by measuring bitstrings in the Z-eigenbasis after application of given channel operators. :param numpy.ndarray histograms: The single shot histograms, `shape=(n_channels, dim)`. :param DiagognalPOVM readout_povm: The POVM corresponding to the readout plus classifier. :param list channel_ops: The tomography measurement channels as `qutip.Qobj`'s. :param TomographySettings settings: The solver and estimation settings. :return: The generated StateTomography object. :rtype: StateTomography """ nqc = len(channel_ops[0].dims[0]) pauli_basis = grove.tomography.operator_utils.PAULI_BASIS ** nqc pi_basis = readout_povm.pi_basis if not histograms.shape[1] == pi_basis.dim: # pragma no coverage raise ValueError("Currently tomography is only implemented for two-level systems.") # prepare the log-likelihood function parameters, see documentation n_kj = np.asarray(histograms) c_jk_m = _prepare_c_jk_m(readout_povm, pauli_basis, channel_ops) rho_m = cvxpy.Variable(pauli_basis.dim) p_jk = c_jk_m * rho_m obj = -n_kj.ravel() * cvxpy.log(p_jk) p_jk_mat = cvxpy.reshape(p_jk, pi_basis.dim, len(channel_ops)) # cvxpy has col-major order # Default constraints: # MLE must describe valid probability distribution # i.e., for each k, p_jk must sum to one and be element-wise non-negative: # 1. \sum_j p_jk == 1 for all k # 2. p_jk >= 0 for all j, k # where p_jk = \sum_m c_jk_m rho_m constraints = [ p_jk >= 0, np.matrix(np.ones((1, pi_basis.dim))) * p_jk_mat == 1, ] rho_m_real_imag = sum((rm * o_ut.to_realimag(Pm) for (rm, Pm) in ut.izip(rho_m, pauli_basis.ops)), 0) if POSITIVE in settings.constraints: if tomography._SDP_SOLVER.is_functional(): constraints.append(rho_m_real_imag >> 0) else: # pragma no coverage _log.warning("No convex solver capable of semi-definite problems installed.\n" "Dropping the positivity constraint on the density matrix.") if UNIT_TRACE in settings.constraints: # this assumes that the first element of the Pauli basis is always proportional to # the identity constraints.append(rho_m[0, 0] == 1. / pauli_basis.ops[0].tr().real) prob = cvxpy.Problem(cvxpy.Minimize(obj), constraints) _log.info("Starting convex solver") prob.solve(solver=tomography.SOLVER, **settings.solver_kwargs) if prob.status != cvxpy.OPTIMAL: # pragma no coverage _log.warning("Problem did not converge to optimal solution. " "Solver settings: {}".format(settings.solver_kwargs)) return StateTomography(np.array(rho_m.value).ravel(), pauli_basis, settings) def __init__(self, rho_coeffs, pauli_basis, settings): """ Construct a StateTomography to encapsulate the result of estimating the quantum state from a quantum tomography measurement. :param numpy.ndarray r_est: The estimated quantum state represented in a given (generalized) Pauli basis. :param OperatorBasis pauli_basis: The employed (generalized) Pauli basis. :param TomographySettings settings: The settings used to estimate the state. """ self.rho_coeffs = rho_coeffs self.pauli_basis = pauli_basis self.rho_est = sum((r_m * p_m for r_m, p_m in ut.izip(rho_coeffs, pauli_basis.ops))) self.settings = settings def fidelity(self, other): """ Compute the quantum state fidelity of the estimated state with another state. :param qutip.Qobj other: The other quantum state. :return: The fidelity, a real number between 0 and 1. :rtype: float """ return qt.fidelity(self.rho_est, other) def plot_state_histogram(self, ax): """ Visualize the complex matrix elements of the estimated state. :param matplotlib.Axes ax: A matplotlib Axes object to plot into. """ title = "Estimated state" nqc = int(round(np.log2(self.rho_est.data.shape[0]))) labels = ut.basis_labels(nqc) return ut.state_histogram(self.rho_est, ax, title) def plot(self): """ Visualize the state. :return: The generated figure. :rtype: matplotlib.Figure """ width = 10 # The pleasing golden ratio. height = width / 1.618 f = plt.figure(figsize=(width, height)) ax = f.add_subplot(111, projection="3d") self.plot_state_histogram(ax) return f def state_tomography_programs(state_prep, qubits=None, rotation_generator=tomography.default_rotations): """ Yield tomographic sequences that prepare a state with Quil program `state_prep` and then append tomographic rotations on the specified `qubits`. If `qubits is None`, it assumes all qubits in the program should be tomographically rotated. :param Program state_prep: The program to prepare the state to be tomographed. :param list|NoneType qubits: A list of Qubits or Numbers, to perform the tomography on. If `None`, performs it on all in state_prep. :param generator rotation_generator: A generator that yields tomography rotations to perform. :return: Program for state tomography. :rtype: Program """ if qubits is None: qubits = state_prep.get_qubits() for tomography_program in rotation_generator(*qubits): state_tomography_program = Program(Pragma("PRESERVE_BLOCK")) state_tomography_program.inst(state_prep) state_tomography_program.inst(tomography_program) state_tomography_program.inst(Pragma("END_PRESERVE_BLOCK")) yield state_tomography_program def do_state_tomography(preparation_program, nsamples, cxn, qubits=None, use_run=False): """ Method to perform both a QPU and QVM state tomography, and use the latter as as reference to calculate the fidelity of the former. :param Program preparation_program: Program to execute. :param int nsamples: Number of samples to take for the program. :param QVMConnection|QPUConnection cxn: Connection on which to run the program. :param list qubits: List of qubits for the program. to use in the tomography analysis. :param bool use_run: If ``True``, use append measurements on all qubits and use ``cxn.run`` instead of ``cxn.run_and_measure``. :return: The state tomogram. :rtype: StateTomography """ return tomography._do_tomography(preparation_program, nsamples, cxn, qubits, tomography.MAX_QUBITS_STATE_TOMO, StateTomography, state_tomography_programs, DEFAULT_STATE_TOMO_SETTINGS, use_run=use_run)
apache-2.0
-8,882,595,617,667,638,000
43.015094
100
0.650034
false
3.650704
false
false
false
ecohealthalliance/eidr-connect
.scripts/utils.py
1
1603
import re import requests import os import functools import json try: from functools import lru_cache except ImportError: from backports.functools_lru_cache import lru_cache GRITS_URL = os.environ.get("GRITS_URL", "https://grits.eha.io") def clean(s): return re.sub(r"\s+", " ", s).strip() def clean_disease_name(s): # Modifiers that make case counts more specific need to be treated # specially because constraining counts for the general disease cannot be # created from them. # s = re.sub(r"^(Highly Pathogenic|Virulent|Suspected)", "", s, re.I) # s = re.sub(" Serotype .+$", "", s, re.I) # Remove hyphens s = re.sub(r"\-", "", s) s = re.sub(r"\(.*\)", "", s) s = re.sub(r"\[.*\]", "", s) return clean(s) @lru_cache() def lookup_geoname(name): resp = requests.get(GRITS_URL + "/api/geoname_lookup/api/lookup", params={ "q": name }) result = json.loads(resp.text)["hits"][0]["_source"] del result["alternateNames"] del result["rawNames"] del result["asciiName"] del result["cc2"] del result["elevation"] del result["dem"] del result["timezone"] del result["modificationDate"] return result @lru_cache() def lookup_disease(name): if len(name) == 0: return None resp = requests.get(GRITS_URL + "/api/v1/disease_ontology/lookup", params={ "q": name }) result = resp.json() first_result = next(iter(result["result"]), None) if first_result: return { "id": first_result["id"], "text": first_result["label"] }
apache-2.0
-169,896,742,788,247,500
24.870968
79
0.601996
false
3.238384
false
false
false
kevin-intel/scikit-learn
sklearn/datasets/_openml.py
2
34451
import gzip import json import os import shutil import hashlib from os.path import join from warnings import warn from contextlib import closing from functools import wraps from typing import Callable, Optional, Dict, Tuple, List, Any, Union import itertools from collections.abc import Generator from collections import OrderedDict from functools import partial from urllib.request import urlopen, Request import numpy as np import scipy.sparse from ..externals import _arff from ..externals._arff import ArffSparseDataType, ArffContainerType from . import get_data_home from urllib.error import HTTPError from ..utils import Bunch from ..utils import is_scalar_nan from ..utils import get_chunk_n_rows from ..utils import _chunk_generator from ..utils import check_pandas_support # noqa __all__ = ['fetch_openml'] _OPENML_PREFIX = "https://openml.org/" _SEARCH_NAME = "api/v1/json/data/list/data_name/{}/limit/2" _DATA_INFO = "api/v1/json/data/{}" _DATA_FEATURES = "api/v1/json/data/features/{}" _DATA_QUALITIES = "api/v1/json/data/qualities/{}" _DATA_FILE = "data/v1/download/{}" OpenmlQualitiesType = List[Dict[str, str]] OpenmlFeaturesType = List[Dict[str, str]] def _get_local_path(openml_path: str, data_home: str) -> str: return os.path.join(data_home, 'openml.org', openml_path + ".gz") def _retry_with_clean_cache( openml_path: str, data_home: Optional[str] ) -> Callable: """If the first call to the decorated function fails, the local cached file is removed, and the function is called again. If ``data_home`` is ``None``, then the function is called once. """ def decorator(f): @wraps(f) def wrapper(*args, **kw): if data_home is None: return f(*args, **kw) try: return f(*args, **kw) except HTTPError: raise except Exception: warn("Invalid cache, redownloading file", RuntimeWarning) local_path = _get_local_path(openml_path, data_home) if os.path.exists(local_path): os.unlink(local_path) return f(*args, **kw) return wrapper return decorator def _open_openml_url(openml_path: str, data_home: Optional[str]): """ Returns a resource from OpenML.org. Caches it to data_home if required. Parameters ---------- openml_path : str OpenML URL that will be accessed. This will be prefixes with _OPENML_PREFIX data_home : str Directory to which the files will be cached. If None, no caching will be applied. Returns ------- result : stream A stream to the OpenML resource """ def is_gzip_encoded(_fsrc): return _fsrc.info().get('Content-Encoding', '') == 'gzip' req = Request(_OPENML_PREFIX + openml_path) req.add_header('Accept-encoding', 'gzip') if data_home is None: fsrc = urlopen(req) if is_gzip_encoded(fsrc): return gzip.GzipFile(fileobj=fsrc, mode='rb') return fsrc local_path = _get_local_path(openml_path, data_home) if not os.path.exists(local_path): try: os.makedirs(os.path.dirname(local_path)) except OSError: # potentially, the directory has been created already pass try: with closing(urlopen(req)) as fsrc: opener: Callable if is_gzip_encoded(fsrc): opener = open else: opener = gzip.GzipFile with opener(local_path, 'wb') as fdst: shutil.copyfileobj(fsrc, fdst) except Exception: if os.path.exists(local_path): os.unlink(local_path) raise # XXX: First time, decompression will not be necessary (by using fsrc), but # it will happen nonetheless return gzip.GzipFile(local_path, 'rb') class OpenMLError(ValueError): """HTTP 412 is a specific OpenML error code, indicating a generic error""" pass def _get_json_content_from_openml_api( url: str, error_message: Optional[str], data_home: Optional[str] ) -> Dict: """ Loads json data from the openml api Parameters ---------- url : str The URL to load from. Should be an official OpenML endpoint error_message : str or None The error message to raise if an acceptable OpenML error is thrown (acceptable error is, e.g., data id not found. Other errors, like 404's will throw the native error message) data_home : str or None Location to cache the response. None if no cache is required. Returns ------- json_data : json the json result from the OpenML server if the call was successful. An exception otherwise. """ @_retry_with_clean_cache(url, data_home) def _load_json(): with closing(_open_openml_url(url, data_home)) as response: return json.loads(response.read().decode("utf-8")) try: return _load_json() except HTTPError as error: # 412 is an OpenML specific error code, indicating a generic error # (e.g., data not found) if error.code != 412: raise error # 412 error, not in except for nicer traceback raise OpenMLError(error_message) def _split_sparse_columns( arff_data: ArffSparseDataType, include_columns: List ) -> ArffSparseDataType: """ obtains several columns from sparse arff representation. Additionally, the column indices are re-labelled, given the columns that are not included. (e.g., when including [1, 2, 3], the columns will be relabelled to [0, 1, 2]) Parameters ---------- arff_data : tuple A tuple of three lists of equal size; first list indicating the value, second the x coordinate and the third the y coordinate. include_columns : list A list of columns to include. Returns ------- arff_data_new : tuple Subset of arff data with only the include columns indicated by the include_columns argument. """ arff_data_new: ArffSparseDataType = (list(), list(), list()) reindexed_columns = {column_idx: array_idx for array_idx, column_idx in enumerate(include_columns)} for val, row_idx, col_idx in zip(arff_data[0], arff_data[1], arff_data[2]): if col_idx in include_columns: arff_data_new[0].append(val) arff_data_new[1].append(row_idx) arff_data_new[2].append(reindexed_columns[col_idx]) return arff_data_new def _sparse_data_to_array( arff_data: ArffSparseDataType, include_columns: List ) -> np.ndarray: # turns the sparse data back into an array (can't use toarray() function, # as this does only work on numeric data) num_obs = max(arff_data[1]) + 1 y_shape = (num_obs, len(include_columns)) reindexed_columns = {column_idx: array_idx for array_idx, column_idx in enumerate(include_columns)} # TODO: improve for efficiency y = np.empty(y_shape, dtype=np.float64) for val, row_idx, col_idx in zip(arff_data[0], arff_data[1], arff_data[2]): if col_idx in include_columns: y[row_idx, reindexed_columns[col_idx]] = val return y def _convert_arff_data( arff: ArffContainerType, col_slice_x: List[int], col_slice_y: List[int], shape: Optional[Tuple] = None ) -> Tuple: """ converts the arff object into the appropriate matrix type (np.array or scipy.sparse.csr_matrix) based on the 'data part' (i.e., in the liac-arff dict, the object from the 'data' key) Parameters ---------- arff : dict As obtained from liac-arff object. col_slice_x : list The column indices that are sliced from the original array to return as X data col_slice_y : list The column indices that are sliced from the original array to return as y data Returns ------- X : np.array or scipy.sparse.csr_matrix y : np.array """ arff_data = arff['data'] if isinstance(arff_data, Generator): if shape is None: raise ValueError( "shape must be provided when arr['data'] is a Generator" ) if shape[0] == -1: count = -1 else: count = shape[0] * shape[1] data = np.fromiter(itertools.chain.from_iterable(arff_data), dtype='float64', count=count) data = data.reshape(*shape) X = data[:, col_slice_x] y = data[:, col_slice_y] return X, y elif isinstance(arff_data, tuple): arff_data_X = _split_sparse_columns(arff_data, col_slice_x) num_obs = max(arff_data[1]) + 1 X_shape = (num_obs, len(col_slice_x)) X = scipy.sparse.coo_matrix( (arff_data_X[0], (arff_data_X[1], arff_data_X[2])), shape=X_shape, dtype=np.float64) X = X.tocsr() y = _sparse_data_to_array(arff_data, col_slice_y) return X, y else: # This should never happen raise ValueError('Unexpected Data Type obtained from arff.') def _feature_to_dtype(feature: Dict[str, str]): """Map feature to dtype for pandas DataFrame """ if feature['data_type'] == 'string': return object elif feature['data_type'] == 'nominal': return 'category' # only numeric, integer, real are left elif (feature['number_of_missing_values'] != '0' or feature['data_type'] in ['numeric', 'real']): # cast to floats when there are any missing values return np.float64 elif feature['data_type'] == 'integer': return np.int64 raise ValueError('Unsupported feature: {}'.format(feature)) def _convert_arff_data_dataframe( arff: ArffContainerType, columns: List, features_dict: Dict[str, Any] ) -> Tuple: """Convert the ARFF object into a pandas DataFrame. Parameters ---------- arff : dict As obtained from liac-arff object. columns : list Columns from dataframe to return. features_dict : dict Maps feature name to feature info from openml. Returns ------- result : tuple tuple with the resulting dataframe """ pd = check_pandas_support('fetch_openml with as_frame=True') attributes = OrderedDict(arff['attributes']) arff_columns = list(attributes) if not isinstance(arff['data'], Generator): raise ValueError( "arff['data'] must be a generator when converting to pd.DataFrame." ) # calculate chunksize first_row = next(arff['data']) first_df = pd.DataFrame([first_row], columns=arff_columns) row_bytes = first_df.memory_usage(deep=True).sum() chunksize = get_chunk_n_rows(row_bytes) # read arff data with chunks columns_to_keep = [col for col in arff_columns if col in columns] dfs = [] dfs.append(first_df[columns_to_keep]) for data in _chunk_generator(arff['data'], chunksize): dfs.append(pd.DataFrame(data, columns=arff_columns)[columns_to_keep]) df = pd.concat(dfs, ignore_index=True) for column in columns_to_keep: dtype = _feature_to_dtype(features_dict[column]) if dtype == 'category': cats_without_missing = [cat for cat in attributes[column] if cat is not None and not is_scalar_nan(cat)] dtype = pd.api.types.CategoricalDtype(cats_without_missing) df[column] = df[column].astype(dtype, copy=False) return (df, ) def _get_data_info_by_name( name: str, version: Union[int, str], data_home: Optional[str] ): """ Utilizes the openml dataset listing api to find a dataset by name/version OpenML api function: https://www.openml.org/api_docs#!/data/get_data_list_data_name_data_name Parameters ---------- name : str name of the dataset version : int or str If version is an integer, the exact name/version will be obtained from OpenML. If version is a string (value: "active") it will take the first version from OpenML that is annotated as active. Any other string values except "active" are treated as integer. data_home : str or None Location to cache the response. None if no cache is required. Returns ------- first_dataset : json json representation of the first dataset object that adhired to the search criteria """ if version == "active": # situation in which we return the oldest active version url = _SEARCH_NAME.format(name) + "/status/active/" error_msg = "No active dataset {} found.".format(name) json_data = _get_json_content_from_openml_api( url, error_msg, data_home=data_home ) res = json_data['data']['dataset'] if len(res) > 1: warn("Multiple active versions of the dataset matching the name" " {name} exist. Versions may be fundamentally different, " "returning version" " {version}.".format(name=name, version=res[0]['version'])) return res[0] # an integer version has been provided url = (_SEARCH_NAME + "/data_version/{}").format(name, version) try: json_data = _get_json_content_from_openml_api( url, error_message=None, data_home=data_home ) except OpenMLError: # we can do this in 1 function call if OpenML does not require the # specification of the dataset status (i.e., return datasets with a # given name / version regardless of active, deactivated, etc. ) # TODO: feature request OpenML. url += "/status/deactivated" error_msg = "Dataset {} with version {} not found.".format(name, version) json_data = _get_json_content_from_openml_api( url, error_msg, data_home=data_home ) return json_data['data']['dataset'][0] def _get_data_description_by_id( data_id: int, data_home: Optional[str] ) -> Dict[str, Any]: # OpenML API function: https://www.openml.org/api_docs#!/data/get_data_id url = _DATA_INFO.format(data_id) error_message = "Dataset with data_id {} not found.".format(data_id) json_data = _get_json_content_from_openml_api( url, error_message, data_home=data_home ) return json_data['data_set_description'] def _get_data_features( data_id: int, data_home: Optional[str] ) -> OpenmlFeaturesType: # OpenML function: # https://www.openml.org/api_docs#!/data/get_data_features_id url = _DATA_FEATURES.format(data_id) error_message = "Dataset with data_id {} not found.".format(data_id) json_data = _get_json_content_from_openml_api( url, error_message, data_home=data_home ) return json_data['data_features']['feature'] def _get_data_qualities( data_id: int, data_home: Optional[str] ) -> OpenmlQualitiesType: # OpenML API function: # https://www.openml.org/api_docs#!/data/get_data_qualities_id url = _DATA_QUALITIES.format(data_id) error_message = "Dataset with data_id {} not found.".format(data_id) json_data = _get_json_content_from_openml_api( url, error_message, data_home=data_home ) # the qualities might not be available, but we still try to process # the data return json_data.get('data_qualities', {}).get('quality', []) def _get_num_samples(data_qualities: OpenmlQualitiesType) -> int: """Get the number of samples from data qualities. Parameters ---------- data_qualities : list of dict Used to retrieve the number of instances (samples) in the dataset. Returns ------- n_samples : int The number of samples in the dataset or -1 if data qualities are unavailable. """ # If the data qualities are unavailable, we return -1 default_n_samples = -1 qualities = {d['name']: d['value'] for d in data_qualities} return int(float(qualities.get('NumberOfInstances', default_n_samples))) def _load_arff_response( url: str, data_home: Optional[str], return_type, encode_nominal: bool, parse_arff: Callable[[ArffContainerType], Tuple], md5_checksum: str ) -> Tuple: """Load arff data with url and parses arff response with parse_arff""" response = _open_openml_url(url, data_home) with closing(response): # Note that if the data is dense, no reading is done until the data # generator is iterated. actual_md5_checksum = hashlib.md5() def _stream_checksum_generator(response): for line in response: actual_md5_checksum.update(line) yield line.decode('utf-8') stream = _stream_checksum_generator(response) arff = _arff.load(stream, return_type=return_type, encode_nominal=encode_nominal) parsed_arff = parse_arff(arff) # consume remaining stream, if early exited for _ in stream: pass if actual_md5_checksum.hexdigest() != md5_checksum: raise ValueError("md5 checksum of local file for " + url + " does not match description. " "Downloaded file could have been modified / " "corrupted, clean cache and retry...") return parsed_arff def _download_data_to_bunch( url: str, sparse: bool, data_home: Optional[str], *, as_frame: bool, features_list: List, data_columns: List[int], target_columns: List, shape: Optional[Tuple[int, int]], md5_checksum: str ): """Download OpenML ARFF and convert to Bunch of data """ # NB: this function is long in order to handle retry for any failure # during the streaming parse of the ARFF. # Prepare which columns and data types should be returned for the X and y features_dict = {feature['name']: feature for feature in features_list} # XXX: col_slice_y should be all nominal or all numeric _verify_target_data_type(features_dict, target_columns) col_slice_y = [int(features_dict[col_name]['index']) for col_name in target_columns] col_slice_x = [int(features_dict[col_name]['index']) for col_name in data_columns] for col_idx in col_slice_y: feat = features_list[col_idx] nr_missing = int(feat['number_of_missing_values']) if nr_missing > 0: raise ValueError('Target column {} has {} missing values. ' 'Missing values are not supported for target ' 'columns. '.format(feat['name'], nr_missing)) # Access an ARFF file on the OpenML server. Documentation: # https://www.openml.org/api_data_docs#!/data/get_download_id if sparse is True: return_type = _arff.COO else: return_type = _arff.DENSE_GEN frame = nominal_attributes = None parse_arff: Callable postprocess: Callable if as_frame: columns = data_columns + target_columns parse_arff = partial(_convert_arff_data_dataframe, columns=columns, features_dict=features_dict) def postprocess(frame): X = frame[data_columns] if len(target_columns) >= 2: y = frame[target_columns] elif len(target_columns) == 1: y = frame[target_columns[0]] else: y = None return X, y, frame, nominal_attributes else: def parse_arff(arff): X, y = _convert_arff_data(arff, col_slice_x, col_slice_y, shape) # nominal attributes is a dict mapping from the attribute name to # the possible values. Includes also the target column (which will # be popped off below, before it will be packed in the Bunch # object) nominal_attributes = {k: v for k, v in arff['attributes'] if isinstance(v, list) and k in data_columns + target_columns} return X, y, nominal_attributes def postprocess(X, y, nominal_attributes): is_classification = {col_name in nominal_attributes for col_name in target_columns} if not is_classification: # No target pass elif all(is_classification): y = np.hstack([ np.take( np.asarray(nominal_attributes.pop(col_name), dtype='O'), y[:, i:i + 1].astype(int, copy=False)) for i, col_name in enumerate(target_columns) ]) elif any(is_classification): raise ValueError('Mix of nominal and non-nominal targets is ' 'not currently supported') # reshape y back to 1-D array, if there is only 1 target column; # back to None if there are not target columns if y.shape[1] == 1: y = y.reshape((-1,)) elif y.shape[1] == 0: y = None return X, y, frame, nominal_attributes out = _retry_with_clean_cache(url, data_home)( _load_arff_response)(url, data_home, return_type=return_type, encode_nominal=not as_frame, parse_arff=parse_arff, md5_checksum=md5_checksum) X, y, frame, nominal_attributes = postprocess(*out) return Bunch(data=X, target=y, frame=frame, categories=nominal_attributes, feature_names=data_columns, target_names=target_columns) def _verify_target_data_type(features_dict, target_columns): # verifies the data type of the y array in case there are multiple targets # (throws an error if these targets do not comply with sklearn support) if not isinstance(target_columns, list): raise ValueError('target_column should be list, ' 'got: %s' % type(target_columns)) found_types = set() for target_column in target_columns: if target_column not in features_dict: raise KeyError('Could not find target_column={}') if features_dict[target_column]['data_type'] == "numeric": found_types.add(np.float64) else: found_types.add(object) # note: we compare to a string, not boolean if features_dict[target_column]['is_ignore'] == 'true': warn('target_column={} has flag is_ignore.'.format( target_column)) if features_dict[target_column]['is_row_identifier'] == 'true': warn('target_column={} has flag is_row_identifier.'.format( target_column)) if len(found_types) > 1: raise ValueError('Can only handle homogeneous multi-target datasets, ' 'i.e., all targets are either numeric or ' 'categorical.') def _valid_data_column_names(features_list, target_columns): # logic for determining on which columns can be learned. Note that from the # OpenML guide follows that columns that have the `is_row_identifier` or # `is_ignore` flag, these can not be learned on. Also target columns are # excluded. valid_data_column_names = [] for feature in features_list: if (feature['name'] not in target_columns and feature['is_ignore'] != 'true' and feature['is_row_identifier'] != 'true'): valid_data_column_names.append(feature['name']) return valid_data_column_names def fetch_openml( name: Optional[str] = None, *, version: Union[str, int] = 'active', data_id: Optional[int] = None, data_home: Optional[str] = None, target_column: Optional[Union[str, List]] = 'default-target', cache: bool = True, return_X_y: bool = False, as_frame: Union[str, bool] = 'auto' ): """Fetch dataset from openml by name or dataset id. Datasets are uniquely identified by either an integer ID or by a combination of name and version (i.e. there might be multiple versions of the 'iris' dataset). Please give either name or data_id (not both). In case a name is given, a version can also be provided. Read more in the :ref:`User Guide <openml>`. .. versionadded:: 0.20 .. note:: EXPERIMENTAL The API is experimental (particularly the return value structure), and might have small backward-incompatible changes without notice or warning in future releases. Parameters ---------- name : str, default=None String identifier of the dataset. Note that OpenML can have multiple datasets with the same name. version : int or 'active', default='active' Version of the dataset. Can only be provided if also ``name`` is given. If 'active' the oldest version that's still active is used. Since there may be more than one active version of a dataset, and those versions may fundamentally be different from one another, setting an exact version is highly recommended. data_id : int, default=None OpenML ID of the dataset. The most specific way of retrieving a dataset. If data_id is not given, name (and potential version) are used to obtain a dataset. data_home : str, default=None Specify another download and cache folder for the data sets. By default all scikit-learn data is stored in '~/scikit_learn_data' subfolders. target_column : str, list or None, default='default-target' Specify the column name in the data to use as target. If 'default-target', the standard target column a stored on the server is used. If ``None``, all columns are returned as data and the target is ``None``. If list (of strings), all columns with these names are returned as multi-target (Note: not all scikit-learn classifiers can handle all types of multi-output combinations) cache : bool, default=True Whether to cache downloaded datasets using joblib. return_X_y : bool, default=False If True, returns ``(data, target)`` instead of a Bunch object. See below for more information about the `data` and `target` objects. as_frame : bool or 'auto', default='auto' If True, the data is a pandas DataFrame including columns with appropriate dtypes (numeric, string or categorical). The target is a pandas DataFrame or Series depending on the number of target_columns. The Bunch will contain a ``frame`` attribute with the target and the data. If ``return_X_y`` is True, then ``(data, target)`` will be pandas DataFrames or Series as describe above. If as_frame is 'auto', the data and target will be converted to DataFrame or Series as if as_frame is set to True, unless the dataset is stored in sparse format. .. versionchanged:: 0.24 The default value of `as_frame` changed from `False` to `'auto'` in 0.24. Returns ------- data : :class:`~sklearn.utils.Bunch` Dictionary-like object, with the following attributes. data : np.array, scipy.sparse.csr_matrix of floats, or pandas DataFrame The feature matrix. Categorical features are encoded as ordinals. target : np.array, pandas Series or DataFrame The regression target or classification labels, if applicable. Dtype is float if numeric, and object if categorical. If ``as_frame`` is True, ``target`` is a pandas object. DESCR : str The full description of the dataset feature_names : list The names of the dataset columns target_names: list The names of the target columns .. versionadded:: 0.22 categories : dict or None Maps each categorical feature name to a list of values, such that the value encoded as i is ith in the list. If ``as_frame`` is True, this is None. details : dict More metadata from OpenML frame : pandas DataFrame Only present when `as_frame=True`. DataFrame with ``data`` and ``target``. (data, target) : tuple if ``return_X_y`` is True .. note:: EXPERIMENTAL This interface is **experimental** and subsequent releases may change attributes without notice (although there should only be minor changes to ``data`` and ``target``). Missing values in the 'data' are represented as NaN's. Missing values in 'target' are represented as NaN's (numerical target) or None (categorical target) """ if cache is False: # no caching will be applied data_home = None else: data_home = get_data_home(data_home=data_home) data_home = join(data_home, 'openml') # check valid function arguments. data_id XOR (name, version) should be # provided if name is not None: # OpenML is case-insensitive, but the caching mechanism is not # convert all data names (str) to lower case name = name.lower() if data_id is not None: raise ValueError( "Dataset data_id={} and name={} passed, but you can only " "specify a numeric data_id or a name, not " "both.".format(data_id, name)) data_info = _get_data_info_by_name(name, version, data_home) data_id = data_info['did'] elif data_id is not None: # from the previous if statement, it is given that name is None if version != "active": raise ValueError( "Dataset data_id={} and version={} passed, but you can only " "specify a numeric data_id or a version, not " "both.".format(data_id, version)) else: raise ValueError( "Neither name nor data_id are provided. Please provide name or " "data_id.") data_description = _get_data_description_by_id(data_id, data_home) if data_description['status'] != "active": warn("Version {} of dataset {} is inactive, meaning that issues have " "been found in the dataset. Try using a newer version from " "this URL: {}".format( data_description['version'], data_description['name'], data_description['url'])) if 'error' in data_description: warn("OpenML registered a problem with the dataset. It might be " "unusable. Error: {}".format(data_description['error'])) if 'warning' in data_description: warn("OpenML raised a warning on the dataset. It might be " "unusable. Warning: {}".format(data_description['warning'])) return_sparse = False if data_description['format'].lower() == 'sparse_arff': return_sparse = True if as_frame == 'auto': as_frame = not return_sparse if as_frame and return_sparse: raise ValueError('Cannot return dataframe with sparse data') # download data features, meta-info about column types features_list = _get_data_features(data_id, data_home) if not as_frame: for feature in features_list: if 'true' in (feature['is_ignore'], feature['is_row_identifier']): continue if feature['data_type'] == 'string': raise ValueError('STRING attributes are not supported for ' 'array representation. Try as_frame=True') if target_column == "default-target": # determines the default target based on the data feature results # (which is currently more reliable than the data description; # see issue: https://github.com/openml/OpenML/issues/768) target_columns = [feature['name'] for feature in features_list if feature['is_target'] == 'true'] elif isinstance(target_column, str): # for code-simplicity, make target_column by default a list target_columns = [target_column] elif target_column is None: target_columns = [] elif isinstance(target_column, list): target_columns = target_column else: raise TypeError("Did not recognize type of target_column" "Should be str, list or None. Got: " "{}".format(type(target_column))) data_columns = _valid_data_column_names(features_list, target_columns) shape: Optional[Tuple[int, int]] # determine arff encoding to return if not return_sparse: # The shape must include the ignored features to keep the right indexes # during the arff data conversion. data_qualities = _get_data_qualities(data_id, data_home) shape = _get_num_samples(data_qualities), len(features_list) else: shape = None # obtain the data url = _DATA_FILE.format(data_description['file_id']) bunch = _download_data_to_bunch(url, return_sparse, data_home, as_frame=bool(as_frame), features_list=features_list, shape=shape, target_columns=target_columns, data_columns=data_columns, md5_checksum=data_description[ "md5_checksum"]) if return_X_y: return bunch.data, bunch.target description = "{}\n\nDownloaded from openml.org.".format( data_description.pop('description')) bunch.update( DESCR=description, details=data_description, url="https://www.openml.org/d/{}".format(data_id)) return bunch
bsd-3-clause
66,852,054,886,861,020
35.845989
79
0.604017
false
4.03313
false
false
false
yoazmenda/Hearthstone_deck_builder
run_games.py
1
1724
import json from hearthbreaker.agents.basic_agents import RandomAgent from hearthbreaker.cards.heroes import hero_for_class from hearthbreaker.constants import CHARACTER_CLASS from hearthbreaker.engine import Game, Deck, card_lookup from hearthbreaker.cards import * import timeit def load_deck(filename): cards = [] character_class = CHARACTER_CLASS.MAGE with open(filename, "r") as deck_file: contents = deck_file.read() items = contents.splitlines() for line in items[0:]: parts = line.split(" ", 1) count = int(parts[0]) for i in range(0, count): card = card_lookup(parts[1]) if card.character_class != CHARACTER_CLASS.ALL: character_class = card.character_class cards.append(card) if len(cards) > 30: pass return Deck(cards, hero_for_class(character_class)) def do_stuff(): _count = 0 def play_game(): nonlocal _count _count += 1 new_game = game.copy() try: new_game.start() except Exception as e: print(json.dumps(new_game.__to_json__(), default=lambda o: o.__to_json__(), indent=1)) print(new_game._all_cards_played) raise e #winner #print(new_game.players[0].hero.dead) del new_game if _count % 1000 == 0: print("---- game #{} ----".format(_count)) deck1 = load_deck("zoo.hsdeck") deck2 = load_deck("zoo.hsdeck") game = Game([deck1, deck2], [RandomAgent(), RandomAgent()]) print(timeit.timeit(play_game, 'gc.enable()', number=2000))
mit
-2,154,089,804,845,221,400
27.733333
98
0.563805
false
3.723542
false
false
false
donlorenzo/AdvancedConfigParser
src/AdvancedConfigParser.py
1
17728
# -*- coding: utf-8 -*- # Copyright (c) 2010, 2014 Lorenz Quack # This code is released under the MIT License: # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. u""" AdvancedConfigParser parse config files written in a .ini-file like style. In addition to ini files this module has the following advanced features: * arbitrarily nested subsections * various (nested) types including int, float, str, list, dict * various calculations in values * refer to other keys in values Example: global_var = True [Section_1] pi = 3.141 [[Sub_Sec_1]] tau = 2 * pi [whatever] foo = [Section_1.pi, Section_1.Sub_Section_1.tau, global_var] bar = max(foo) baz = foo if Section_1.pi < 2**2 < Section_1.Sub_Sec_1.tau/2 or True else bar Configuration can be loaded from strings (parse_string()), files (parse_file()) or file-like objects (parse_stream()). Access to the sections and options is done by attribute access: >>> config = AdvancedConfigParser.parse_string("filename") >>> print(config.global_var) >>> print(config.Section_1.pi + config.whatever.bar) """ try: import __builtin__ as builtins except ImportError: import builtins import io import re import ast import operator from ast_to_src import ast_to_src def parse_file(filename): with open(filename) as f: return parse_stream(f) def parse_string(s): return parse_stream(io.StringIO(s)) def parse_stream(stream): """ parse the stream into a hirarchical tree of (sub-)sections and options. return the root/global section. """ root = current_section = Section() current_section._acp_name = "<global>" current_nesting_level = 0 line = 0 while True: buf = "" tmp = stream.readline() line += 1 if tmp == "": break buf += tmp stripped_buf = buf.strip() # preserve empty lines if not stripped_buf: current_section._acp_add_empty_line() # ignore comments elif stripped_buf.startswith("#"): current_section._acp_add_comment(stripped_buf) # handle section header elif stripped_buf.startswith("["): result = re.match(r"(\[+)([^\d\W]\w*)(\]+)", stripped_buf) if result is None: msg = "malformed section header in line {line}:\n{tmp}" raise SyntaxError(msg.format(**locals())) if len(result.group(1)) != len(result.group(3)): msg = "section braket mismatch in line {line}:\n{tmp}" raise SyntaxError(msg.format(**locals())) level = min(len(result.group(1)), len(result.group(3))) if level > current_nesting_level + 1: msg = "wrong section nesting in line {line}" raise SyntaxError(msg.format(**locals())) while current_nesting_level >= level: current_section = current_section._acp_parent current_nesting_level -= 1 section_name = ast.parse(result.group(2)).body[0].value.id if section_name in list(current_section._acp_section_names()): msg = 'duplicate section "{section_name}".'.format(**locals()) raise SyntaxError(msg) new_section = Section() new_section._acp_name = section_name current_section._acp_add_child(new_section) current_section = new_section current_nesting_level += 1 # handle options else: node = None while node is None and tmp != "": try: node = ast.parse(stripped_buf) except SyntaxError: tmp = stream.readline() buf += tmp stripped_buf = buf.strip() node = node.body[0] assert isinstance(node, ast.Assign) option_name = node.targets[0].id if option_name in list(current_section._acp_option_names()): msg = ('duplicate option "{option_name}" in ' 'section "{current_section._acp_name}".') raise SyntaxError(msg.format(**locals())) new_option = Option() new_option._acp_name = option_name new_option._acp_value = node.value current_section._acp_add_child(new_option) return root class Section(object): """ Section objects allow access to their sub-sections and options via attribute access and subscript. new sections and options may be added via "_acp_add_child()". """ def __init__(self): self.__dict__["_acp_name"] = "" self.__dict__["_acp_parent"] = None self.__dict__["_acp_order"] = [] self.__dict__["_acp_nesting_level"] = 0 def __str__(self): return '<Section "{self._acp_name}">'.format(**locals()) __repr__ = __str__ def __setattr__(self, attr, val): obj = object.__getattribute__(self, attr) if isinstance(obj, Option): obj._acp_value = val else: super(Section, self).__setattr__(attr, val) def __getattribute__(self, attr, raw=False): obj = super(Section, self).__getattribute__(attr) if isinstance(obj, Option) and not raw: return obj._acp_value else: return obj def __getitem__(self, key): try: return getattr(self, key) except AttributeError as e: raise KeyError(str(e)) def _acp_add_child(self, child): child._acp_nesting_level = self._acp_nesting_level + 1 if child._acp_parent is None: child._acp_parent = self if child._acp_name in self.__dict__: msg = "duplicate object: {child_name}" raise SyntaxError(msg.format(child_name=child._acp_name)) self.__dict__[child._acp_name] = child self.__dict__["_acp_order"].append(child._acp_name) def _acp_add_empty_line(self): self.__dict__["_acp_order"].append("\n") def _acp_add_comment(self, comment): self.__dict__["_acp_order"].append(comment) def _acp_sections(self): for section in (section for section in self.__dict__.values() if isinstance(section, Section)): yield section def _acp_section_names(self): for section_name in (sn for (sn, s) in self.__dict__.items() if isinstance(s, Section)): yield section_name def _acp_options(self): for option in (option for option in self.__dict__.values() if isinstance(option, Option)): yield option def _acp_option_names(self): for option_name in (o_name for o_name, option in self.__dict__.items() if isinstance(option, Option)): yield option_name def _acp_children(self): for child in (child for child in self.__dict__.values() if isinstance(child, (Section, Option))): yield child def dump(self): return self.pretty_print(do_indent=False) def pretty_print(self, indent=0, do_indent=True): if self._acp_name != "<global>": template = "{indentation}{left}{section_name}{right}\n" s = template.format(indentation=" " * indent, left="[" * self._acp_nesting_level, right="]" * self._acp_nesting_level, section_name=self._acp_name) if do_indent: indent += 1 else: s = "" for child_name in self._acp_order: if child_name == "\n": s += "\n" elif child_name.strip().startswith("#"): s += "{indent}{comment}\n".format(indent=" " * indent, comment=child_name) else: child = getattr(self, child_name) if isinstance(child, Section): s += child.pretty_print(indent) else: child_raw = self._acp_get_raw_option(child_name) template = "{indentation}{option_name} = {option_raw}\n" s += template.format(indentation=" " * indent, option_name=child_name, option_raw=child_raw) return s def _acp_get_raw_option(self, option_name): return self.__getattribute__(option_name, True)._acp_raw_value class LazyEval(object): """ evaluates the ast nodes lazy when used as a descriptor. when we find that all involved ast-nodes are static we cache the result. """ def __init__(self): self.cache = {} def __get__(self, instance, owner): # see if we already cached the result from a previous evaluation if instance in self.cache: return self.cache[instance] # dynamically evaluate the ast-nodes val, has_refs = self._acp_eval(instance._acp_parent, instance._acp_ast_node) # if the ast-nodes have no external references cache the result if not has_refs: self.cache[instance] = val return val def __set__(self, instance, value): # if value is a ast-node invalidate the cache if isinstance(value, ast.AST): instance._acp_ast_node = value try: del self.cache[instance] except KeyError: pass # else it is a static value which can be put directly into the cache else: self.cache[instance] = value def _acp_eval(self, parent, node): """ dynamically and recursively evaluate the ast-nodes. returns a 2-tuple. first is the actual value, second a bool indicating if this ast-node has external dependencies and should not be cached. """ # first try simple conversion of literals try: return ast.literal_eval(node), False except (SyntaxError, ValueError): pass # handle external references if isinstance(node, (ast.Name, ast.Attribute)): ref = "" while isinstance(node, ast.Attribute): ref = "." + node.attr + ref node = node.value ref = node.id + ref return self._acp_resolve_reference(ref, parent), True # handle lists, tuples and dicts elif isinstance(node, (ast.List, ast.Tuple, ast.Dict)): vals = [] has_refs = False for child_node in ast.iter_child_nodes(node): tmp = self._acp_eval(parent, child_node) if not tmp: continue vals.append(tmp[0]) has_refs = tmp[1] if isinstance(node, ast.List): return list(vals), has_refs elif isinstance(node, ast.Tuple): return tuple(vals), has_refs return vals, has_refs # handle the following math operators +, -, *, /, //, %, **, |, &, ^ elif isinstance(node, ast.BinOp): lhs, lhs_has_refs = self._acp_eval(parent, node.left) rhs, rhs_has_refs = self._acp_eval(parent, node.right) ops = {ast.Add: operator.add, ast.Sub: operator.sub, ast.Mult: operator.mul, ast.Div: operator.truediv, ast.FloorDiv: operator.floordiv, ast.Mod: operator.mod, ast.Pow: operator.pow, ast.LShift: operator.lshift, ast.RShift: operator.rshift, ast.BitOr: operator.or_, ast.BitXor: operator.xor, ast.BitAnd: operator.and_,} if node.op.__class__ in ops: return (ops[node.op.__class__](lhs, rhs), lhs_has_refs | rhs_has_refs) else: msg = 'op "{op_name}" not supported yet' raise SyntaxError(msg.format(op_name=str(node.op.__class__))) # handle calls to some selected builtin functions elif isinstance(node, ast.Call): if node.func.id in ("abs", "all", "any", "bin", "bool", "chr", "complex", "dict", "divmod", "enumerate", "float", "hex", "int", "len", "list", "max", "min", "oct", "ord", "pow", "range", "reversed", "round", "set", "sorted", "str", "sum", "tuple", "type", "unichr", "zip", ): has_refs = False args = [] for arg_node in node.args: arg, temp_has_refs = self._acp_eval(parent, arg_node) args.append(arg) has_refs |= temp_has_refs kwargs = {} for keyword_node in node.keywords: kwargs[keyword_node.arg], temp_has_refs = self._acp_eval(parent, keyword_node.value) has_refs |= temp_has_refs return (builtins.__dict__[node.func.id](*args, **kwargs), has_refs) # handle ternary if operator elif isinstance(node, ast.IfExp): test, test_has_refs = self._acp_eval(parent, node.test) if test: result, has_refs = self._acp_eval(parent, node.body) else: result, has_refs = self._acp_eval(parent, node.orelse) return result, has_refs | test_has_refs # handle compares elif isinstance(node, ast.Compare): astOp2FuncOp = {ast.Eq: operator.eq, ast.NotEq: operator.ne, ast.Lt: operator.lt, ast.LtE: operator.le, ast.Gt: operator.gt, ast.GtE: operator.ge, ast.Is: operator.is_, ast.IsNot: operator.is_not, # don't use contains because arguments are reversed ast.In: lambda a, b: a in b, ast.NotIn: lambda a, b: a not in b} left, left_has_refs = self._acp_eval(parent, node.left) has_refs = left_has_refs for ast_op, ast_right in zip(node.ops, node.comparators): right, right_has_refs = self._acp_eval(parent, ast_right) has_refs |= right_has_refs op = astOp2FuncOp[ast_op.__class__] if op(left, right): left = right else: return False, has_refs return True, has_refs # handle boolean operators elif isinstance(node, ast.BoolOp): has_refs = False if node.op.__class__ == ast.And: for value in node.values: v, value_has_refs = self._acp_eval(parent, value) has_refs |= value_has_refs if not v: return False, has_refs return True, has_refs elif node.op.__class__ == ast.Or: for value in node.values: v, value_has_refs = self._acp_eval(parent, value) has_refs |= value_has_refs if v: return True, has_refs return False, has_refs raise RuntimeError("unreachable") # not sure what this is about... elif isinstance(node, ast.Load): pass else: raise RuntimeError("unhandled node: " + str(node)) @classmethod def _acp_resolve_reference(cls, ref, parent): """ resolves external references by walking up the tree until we find a complete match """ attrs = ref.split(".") while parent is not None: try: obj = parent for attr in attrs: obj = getattr(obj, attr) return obj except (KeyError, AttributeError): parent = parent._acp_parent raise AttributeError(ref) class Option(object): def __init__(self): self._acp_name = "" self._acp_parent = None self._acp_has_refs = True self._acp_nesting_level = 0 self._acp_ast_node = None def _acp_get_raw_value(self): return ast_to_src(self._acp_ast_node) _acp_value = LazyEval() _acp_raw_value = property(_acp_get_raw_value) def __str__(self): return '<Option {self._acp_name}>'.format(**locals()) __repr__ = __str__
mit
-2,125,530,160,059,667,500
38.838202
104
0.542137
false
4.147871
true
false
false
rmanoni/mi-instrument
mi/instrument/kut/ek60/ooicore/driver.py
1
39253
""" @package mi.instrument.kut.ek60.ooicore.driver @file /mi/instrument/kut/ek60/ooicore/driver.py @author Richard Han @brief Driver for the ooicore Release notes: This Driver supports the Kongsberg UnderWater Technology's EK60 Instrument. """ __author__ = 'Richard Han & Craig Risien' __license__ = 'Apache 2.0' import ftplib import json import tempfile import urllib2 import yaml from mi.core.common import BaseEnum from mi.core.exceptions import InstrumentParameterException, InstrumentException, SampleException from mi.core.exceptions import InstrumentConnectionException from mi.core.instrument.data_particle import DataParticle, CommonDataParticleType, DataParticleKey from mi.core.instrument.driver_dict import DriverDictKey from mi.core.instrument.instrument_driver import SingleConnectionInstrumentDriver from mi.core.instrument.instrument_driver import DriverEvent from mi.core.instrument.instrument_driver import DriverAsyncEvent from mi.core.instrument.instrument_driver import DriverProtocolState from mi.core.instrument.instrument_driver import DriverParameter from mi.core.instrument.instrument_driver import ResourceAgentState from mi.core.instrument.instrument_fsm import ThreadSafeFSM from mi.core.instrument.instrument_protocol import CommandResponseInstrumentProtocol from mi.core.instrument.protocol_param_dict import ParameterDictType from mi.core.log import get_logger from mi.core.log import get_logging_metaclass log = get_logger() # newline. NEWLINE = '\r\n' # Default Instrument's IP Address DEFAULT_HOST = "128.193.64.201" YAML_FILE_NAME = "driver_schedule.yaml" DEFAULT_PORT = "80" USER_NAME = "ooi" PASSWORD = "994ef22" DEFAULT_CONFIG = { 'file_prefix': "Driver DEFAULT CONFIG_PREFIX", 'file_path': "DEFAULT_FILE_PATH", # relative to filesystem_root/data 'max_file_size': 288, # 50MB in bytes: 50 * 1024 * 1024 'intervals': [{ 'name': "default", 'type': "constant", 'start_at': "00:00", 'duration': "00:15:00", 'repeat_every': "01:00", 'stop_repeating_at': "23:55", 'interval': 1000, 'max_range': 80, 'frequency': { 38000: { 'mode': 'active', 'power': 100, 'pulse_length': 256 }, 120000: { 'mode': 'active', 'power': 100, 'pulse_length': 64 }, 200000: { 'mode': 'active', 'power': 120, 'pulse_length': 64 } } }] } ### # Driver Constant Definitions ### # String constants CONNECTED = "connected" CURRENT_RAW_FILENAME = "current_raw_filename" CURRENT_RAW_FILESIZE = "current_raw_filesize" CURRENT_RUNNING_INTERVAL = "current_running_interval" CURRENT_UTC_TIME = "current_utc_time" DURATION = "duration" ER60_CHANNELS = "er60_channels" ER60_STATUS = "er60_status" EXECUTABLE = "executable" FILE_PATH = "file_path" FILE_PREFIX = "file_prefix" FREQUENCY = "frequency" FREQ_120K = "120000" FREQ_200K = "200000" FREQ_38K = "38000" FS_ROOT = "fs_root" GPTS_ENABLED = "gpts_enabled" HOST = "host" INTERVAL = "interval" INTERVALS = "intervals" RAW_OUTPUT = "raw_output" MAX_FILE_SIZE = "max_file_size" MAX_RANGE = "max_range" MODE = "mode" NAME = "name" NEXT_SCHEDULED_INTERVAL = "next_scheduled_interval" PID = "pid" PORT = "port" POWER = "power" PULSE_LENGTH = "pulse_length" SAMPLE_INTERVAL = "sample_interval" SAMPLE_RANGE = "sample_range" SAVE_INDEX = "save_index" SAVE_BOTTOM = "save_bottom" SAVE_RAW = "save_raw" SCHEDULE = "schedule" SCHEDULE_FILENAME = "schedule_filename" SCHEDULED_INTERVALS_REMAINING = "scheduled_intervals_remaining" START_AT = "start_at" STOP_REPEATING_AT = "stop_repeating_at" TYPE = "type" class DataParticleType(BaseEnum): """ Data particle types produced by this driver """ RAW = CommonDataParticleType.RAW ZPLSC_STATUS = 'zplsc_status' class ProtocolState(BaseEnum): """ Instrument protocol states """ UNKNOWN = DriverProtocolState.UNKNOWN COMMAND = DriverProtocolState.COMMAND AUTOSAMPLE = DriverProtocolState.AUTOSAMPLE class ProtocolEvent(BaseEnum): """ Protocol events """ ENTER = DriverEvent.ENTER EXIT = DriverEvent.EXIT GET = DriverEvent.GET SET = DriverEvent.SET DISCOVER = DriverEvent.DISCOVER START_AUTOSAMPLE = DriverEvent.START_AUTOSAMPLE STOP_AUTOSAMPLE = DriverEvent.STOP_AUTOSAMPLE ACQUIRE_STATUS = DriverEvent.ACQUIRE_STATUS class Capability(BaseEnum): """ Protocol events that should be exposed to users (subset of above). """ START_AUTOSAMPLE = ProtocolEvent.START_AUTOSAMPLE STOP_AUTOSAMPLE = ProtocolEvent.STOP_AUTOSAMPLE ACQUIRE_STATUS = ProtocolEvent.ACQUIRE_STATUS GET = ProtocolEvent.GET SET = ProtocolEvent.SET class Parameter(DriverParameter): """ Device specific parameters. """ SCHEDULE = "schedule" FTP_IP_ADDRESS = "ftp_ip_address" FTP_USERNAME = "ftp_username" FTP_PASSWORD = "ftp_password" FTP_PORT = "ftp_port" class Prompt(BaseEnum): """ Device i/o prompts.. """ class Command(BaseEnum): """ Instrument command strings """ ACQUIRE_STATUS = 'acquire_status' START_AUTOSAMPLE = 'start_autosample' STOP_AUTOSAMPLE = 'stop_autosample' GET = 'get_param' SET = 'set_param' ############################################################################### # Data Particles ############################################################################### class ZPLSCStatusParticleKey(BaseEnum): ZPLSC_CONNECTED = "zplsc_connected" # Connected to a running ER 60 instance ZPLSC_ACTIVE_38K_MODE = "zplsc_active_38k_mode" # 38K Transducer transmit mode ZPLSC_ACTIVE_38K_POWER = "zplsc_active_38k_power" # 38K Transducer transmit power in W ZPLSC_ACTIVE_38K_PULSE_LENGTH = "zplsc_active_38k_pulse_length" # 38K Transducer transmit pulse length in seconds ZPLSC_ACTIVE_38K_SAMPLE_INTERVAL = "zplsc_active_38k_sample_interval" # Sample interval in seconds ZPLSC_ACTIVE_120K_MODE = "zplsc_active_120k_mode" # 120K Transducer transmit mode ZPLSC_ACTIVE_120K_POWER = "zplsc_active_120k_power" # 120K Transducer transmit power in W ZPLSC_ACTIVE_120K_PULSE_LENGTH = "zplsc_active_120k_pulse_length" # 120K Transducer Transmit pulse length in seconds ZPLSC_ACTIVE_120K_SAMPLE_INTERVAL = "zplsc_active_120k_sample_interval" # 120K Sample Interval ZPLSC_ACTIVE_200K_MODE = "zplsc_active_200k_mode" # 200K Transducer transmit mode ZPLSC_ACTIVE_200K_POWER = "zplsc_active_200k_power" # 200K Transducer transmit power in W ZPLSC_ACTIVE_200K_PULSE_LENGTH = "zplsc_active_200k_pulse_length" # 200K Transducer transmit pulse length in seconds ZPLSC_ACTIVE_200K_SAMPLE_INTERVAL = "zplsc_active_200k_sample_interval" # 200K Transducer sample interval ZPLSC_CURRENT_UTC_TIME = "zplsc_current_utc_time" # Current UTC Time ZPLSC_EXECUTABLE = "zplsc_executable" # Executable used to launch ER60 ZPLSC_FS_ROOT = "zplsc_fs_root" # Root directory where data/logs/configs are stored ZPLSC_NEXT_SCHEDULED_INTERVAL = "zplsc_next_scheduled_interval" # UTC time of next scheduled interval ZPLSC_HOST = "zplsc_host" # Host IP Address ZPLSC_PID = "zplsc_pid" # PID of running ER60 process ZPLSC_PORT = "zplsc_port" # Host port number ZPLSC_CURRENT_RAW_FILENAME = "zplsc_current_raw_filename" # File name of the current .raw file ZPLSC_CURRENT_RAW_FILESIZE = "zplsc_current_raw_filesize" # File size of current .raw file ZPLSC_FILE_PATH = "zplsc_file_path" # File storage path ZPLSC_FILE_PREFIX = "zplsc_file_prefix" # Current file prefix ZPLSC_MAX_FILE_SIZE = "zplsc_max_file_size" # Maximum file size ZPLSC_SAMPLE_RANGE = "zplsc_sample_range" # Recording range ZPLSC_SAVE_BOTTOM = "zplsc_save_bottom" # Save bottom file ZPLSC_SAVE_INDEX = "zplsc_save_index" # Save index file ZPLSC_SAVE_RAW = "zplsc_save_raw" # Save raw file ZPLSC_SCHEDULED_INTERVALS_REMAINING = "zplsc_scheduled_intervals_remaining" # Number of intervals remaining in running schedule ZPLSC_GPTS_ENABLED = "zplsc_gpts_enabled" # GPTs enabled ZPLSC_SCHEDULE_FILENAME = "zplsc_schedule_filename" # Filename for .yaml schedule file class ZPLSCStatusParticle(DataParticle): """ Routines for parsing raw data into a status particle structure. Override the building of values, and the rest should come along for free. Sample: {'connected': True, 'er60_channels': {'GPT 38 kHz 00907207b7b1 6-1 OOI.38|200': {'frequency': 38000, 'mode': 'active', 'power': 100.0, 'pulse_length': 0.000256, 'sample_interval': 6.4e-05}, 'GPT 120 kHz 00907207b7dc 1-1 ES120-7CD': {'frequency': 120000, 'mode': 'active', 'power': 100.0, 'pulse_length': 6.4e-05, 'sample_interval': 1.6e-05}, 'GPT 200 kHz 00907207b7b1 6-2 OOI38|200': {'frequency': 200000, 'mode': 'active', 'power': 120.0, 'pulse_length': 6.4e-05, 'sample_interval': 1.6e-05}}, 'er60_status': {'current_running_interval': None, 'current_utc_time': '2014-07-08 22:34:18.667000', 'executable': 'c:/users/ooi/desktop/er60.lnk', 'fs_root': 'D:/', 'host': '157.237.15.100', 'next_scheduled_interval': None, 'pid': 1864, 'port': 56635, 'raw_output': {'current_raw_filename': 'OOI-D20140707-T214500.raw', 'current_raw_filesize': None, 'file_path': 'D:\\data\\QCT_1', 'file_prefix': 'OOI', 'max_file_size': 52428800, 'sample_range': 220.0, 'save_bottom': True, 'save_index': True, 'save_raw': True}, 'scheduled_intervals_remaining': 0}, 'gpts_enabled': False, 'schedule': {}, 'schedule_filename': 'qct_configuration_example_1.yaml'} """ __metaclass__ = get_logging_metaclass(log_level='trace') _data_particle_type = DataParticleType.ZPLSC_STATUS def _encode_value(self, name, value, encoding_function): """ Encode a value using the encoding function, if it fails store the error in a queue Override to handle None values. """ encoded_val = None if value is not None: try: encoded_val = encoding_function(value) except Exception: log.error("Data particle error encoding. Name:%s Value:%s", name, value) self._encoding_errors.append({name: value}) return {DataParticleKey.VALUE_ID: name, DataParticleKey.VALUE: encoded_val} def _build_parsed_values(self): """ Parse ZPLSC Status response and return the ZPLSC Status particles @throws SampleException If there is a problem with sample """ try: log.debug("status raw_data = %s", self.raw_data) config = self.raw_data if not isinstance(config, dict): raise SampleException("ZPLSC status data is not a dictionary" % self.raw_data) active_200k_mode = None active_200k_power = None active_200k_pulse_length = None active_200k_sample_interval = None active_120k_mode = None active_120k_power = None active_120k_pulse_length = None active_120k_sample_interval = None active_38k_mode = None active_38k_power = None active_38k_pulse_length = None active_38k_sample_interval = None connected = config.get(CONNECTED) er60_channels = config.get(ER60_CHANNELS) if er60_channels is not None: for key in er60_channels: if '200 kHz' in key: active_200k_mode = er60_channels[key].get(MODE) active_200k_power = er60_channels[key].get(POWER) active_200k_pulse_length = er60_channels[key].get(PULSE_LENGTH) active_200k_sample_interval = er60_channels[key].get(SAMPLE_INTERVAL) elif '120 kHz' in key: active_120k_mode = er60_channels[key].get(MODE) active_120k_power = er60_channels[key].get(POWER) active_120k_pulse_length = er60_channels[key].get(PULSE_LENGTH) active_120k_sample_interval = er60_channels[key].get(SAMPLE_INTERVAL) elif '38 kHz' in key: active_38k_mode = er60_channels[key].get(MODE) active_38k_power = er60_channels[key].get(POWER) active_38k_pulse_length = er60_channels[key].get(PULSE_LENGTH) active_38k_sample_interval = er60_channels[key].get(SAMPLE_INTERVAL) current_utc_time = None executable = None fs_root = None next_scheduled_interval = 'None' host = None pid = '0' port = None current_raw_filename = None current_raw_filesize = 0 file_path = None file_prefix = None max_file_size = None sample_range = None save_bottom = None save_index = None save_raw = None scheduled_intervals_remaining = None er60_status = config.get(ER60_STATUS) if er60_status is not None: current_utc_time = er60_status.get(CURRENT_UTC_TIME) executable = er60_status.get(EXECUTABLE) fs_root = er60_status.get(FS_ROOT) if er60_status.get(NEXT_SCHEDULED_INTERVAL) is not None: next_scheduled_interval = er60_status.get(NEXT_SCHEDULED_INTERVAL) host = er60_status.get(HOST) if er60_status.get(PID) is not None: pid = er60_status.get(PID) port = er60_status.get(PORT) raw_output = er60_status.get(RAW_OUTPUT) if raw_output is not None: current_raw_filename = raw_output.get(CURRENT_RAW_FILENAME) if raw_output.get(CURRENT_RAW_FILESIZE) is not None: current_raw_filesize = raw_output.get(CURRENT_RAW_FILESIZE) file_path = raw_output.get(FILE_PATH) file_prefix = raw_output.get(FILE_PREFIX) max_file_size = raw_output.get(MAX_FILE_SIZE) sample_range = raw_output.get(SAMPLE_RANGE) save_bottom = raw_output.get(SAVE_BOTTOM) save_index = raw_output.get(SAVE_INDEX) save_raw = raw_output.get(SAVE_RAW) scheduled_intervals_remaining = er60_status.get(SCHEDULED_INTERVALS_REMAINING) gpts_enabled = config.get(GPTS_ENABLED) schedule_filename = config.get(SCHEDULE_FILENAME) except KeyError: raise SampleException("ValueError while converting ZPLSC Status: [%s]" % self.raw_data) result = [ self._encode_value(ZPLSCStatusParticleKey.ZPLSC_CONNECTED, connected, int), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_200K_MODE, active_200k_mode, str), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_200K_POWER, active_200k_power, float), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_200K_PULSE_LENGTH, active_200k_pulse_length, float), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_200K_SAMPLE_INTERVAL, active_200k_sample_interval, float), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_120K_MODE, active_120k_mode, str), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_120K_POWER, active_120k_power, float), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_120K_PULSE_LENGTH, active_120k_pulse_length, float), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_120K_SAMPLE_INTERVAL, active_120k_sample_interval, float), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_38K_MODE, active_38k_mode, str), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_38K_POWER, active_38k_power, float), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_38K_PULSE_LENGTH, active_38k_pulse_length, float), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_38K_SAMPLE_INTERVAL, active_38k_sample_interval, float), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_CURRENT_UTC_TIME, current_utc_time, str), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_EXECUTABLE, executable, str), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_FS_ROOT, fs_root, str), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_NEXT_SCHEDULED_INTERVAL, next_scheduled_interval, str), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_HOST, host, str), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_PID, pid, str), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_PORT, port, int), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_CURRENT_RAW_FILENAME, current_raw_filename, str), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_CURRENT_RAW_FILESIZE, current_raw_filesize, int), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_FILE_PATH, file_path, str), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_FILE_PREFIX, file_prefix, str), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_MAX_FILE_SIZE, max_file_size, int), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SAMPLE_RANGE, sample_range, float), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SAVE_BOTTOM, save_bottom, int), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SAVE_INDEX, save_index, int), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SAVE_RAW, save_raw, int), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SCHEDULED_INTERVALS_REMAINING, scheduled_intervals_remaining, int), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_GPTS_ENABLED, gpts_enabled, int), self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SCHEDULE_FILENAME, schedule_filename, str) ] log.debug("build_parsed_value: %s", result) return result ############################################################################### # Driver ############################################################################### class InstrumentDriver(SingleConnectionInstrumentDriver): """ InstrumentDriver subclass Subclasses SingleConnectionInstrumentDriver with connection state machine. """ ######################################################################## # Protocol builder. ######################################################################## def _build_protocol(self): """ Construct the driver protocol state machine. """ self._protocol = Protocol(Prompt, NEWLINE, self._driver_event) ########################################################################### # Protocol ########################################################################### class Protocol(CommandResponseInstrumentProtocol): """ Instrument protocol class Subclasses CommandResponseInstrumentProtocol """ __metaclass__ = get_logging_metaclass(log_level='trace') def __init__(self, prompts, newline, driver_event): """ Protocol constructor. @param prompts A BaseEnum class containing instrument prompts. @param newline The newline. @param driver_event Driver process event callback. """ # Construct protocol superclass. CommandResponseInstrumentProtocol.__init__(self, prompts, newline, driver_event) # Build protocol state machine. self._protocol_fsm = ThreadSafeFSM(ProtocolState, ProtocolEvent, ProtocolEvent.ENTER, ProtocolEvent.EXIT) # Add event handlers for protocol state machine. self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.ENTER, self._handler_unknown_enter) self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.DISCOVER, self._handler_unknown_discover) self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.ENTER, self._handler_command_enter) self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.START_AUTOSAMPLE, self._handler_command_autosample) self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.ACQUIRE_STATUS, self._handler_command_acquire_status) self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.GET, self._handler_command_get) self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.SET, self._handler_command_set) self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.STOP_AUTOSAMPLE, self._handler_autosample_stop) # Construct the parameter dictionary containing device parameters, # current parameter values, and set formatting functions. self._build_driver_dict() self._build_command_dict() self._build_param_dict() # Add sample handlers. # State state machine in UNKNOWN state. self._protocol_fsm.start(ProtocolState.UNKNOWN) # commands sent sent to device to be filtered in responses for telnet DA self._sent_cmds = [] def _build_param_dict(self): """ Populate the parameter dictionary with parameters. For each parameter key, add match string, match lambda function, and value formatting function for set commands. """ self._param_dict.add(Parameter.SCHEDULE, r'schedule:\s+(.*)', lambda match: match.group(1), str, type=ParameterDictType.STRING, display_name="Schedule", description="Large block of text used to create the .yaml file defining the sampling schedule.", startup_param=True, default_value=yaml.dump(DEFAULT_CONFIG, default_flow_style=False)) self._param_dict.add(Parameter.FTP_IP_ADDRESS, r'ftp address:\s+(\d\d\d\d\.\d\d\d\d\.\d\d\d\d\.\d\d\d)', lambda match: match.group(1), str, type=ParameterDictType.STRING, display_name="FTP IP Address", description="IP address the driver uses to connect to the instrument FTP server.", startup_param=True, default_value=DEFAULT_HOST) self._param_dict.add(Parameter.FTP_USERNAME, r'username:(.*)', lambda match: match.group(1), str, type=ParameterDictType.STRING, display_name="FTP User Name", description="Username used to connect to the FTP server.", startup_param=True, default_value=USER_NAME) self._param_dict.add(Parameter.FTP_PASSWORD, r'password:(.*)', lambda match: match.group(1), str, type=ParameterDictType.STRING, display_name="FTP Password", description="Password used to connect to the FTP server.", startup_param=True, default_value=PASSWORD) self._param_dict.add(Parameter.FTP_PORT, r'port:(.*)', lambda match: match.group(1), str, type=ParameterDictType.STRING, display_name="FTP Port", description="Location on the OOI infrastructure where .raw files and echogram images will be stored.", startup_param=True, default_value=DEFAULT_PORT) def _build_driver_dict(self): """ Populate the driver dictionary with options """ self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, True) def _build_command_dict(self): """ Populate the command dictionary with command. """ self._cmd_dict.add(Capability.START_AUTOSAMPLE, display_name="Start Autosample") self._cmd_dict.add(Capability.STOP_AUTOSAMPLE, display_name="Stop Autosample") self._cmd_dict.add(Capability.ACQUIRE_STATUS, display_name="Acquire Status") def _filter_capabilities(self, events): """ Return a list of currently available capabilities. """ return [x for x in events if Capability.has(x)] ######################################################################## # Unknown handlers. ######################################################################## def _handler_unknown_enter(self, *args, **kwargs): """ Enter unknown state. """ # Tell driver superclass to send a state change event. # Superclass will query the state. self._driver_event(DriverAsyncEvent.STATE_CHANGE) def _handler_unknown_exit(self, *args, **kwargs): """ Exit unknown state. """ pass def _handler_unknown_discover(self, *args, **kwargs): """ Discover current state @retval (next_state, next_agent_state) """ # Try to get the status to check if the instrument is alive host = self._param_dict.get_config_value(Parameter.FTP_IP_ADDRESS) port = self._param_dict.get_config_value(Parameter.FTP_PORT) response = self._url_request(host, port, '/status.json') if response is None: error_msg = "_handler_unknown_discover: Unable to connect to host: %s" % host log.error(error_msg) raise InstrumentConnectionException(error_msg) return ProtocolState.COMMAND, ResourceAgentState.IDLE ######################################################################## # Command handlers. ######################################################################## def _handler_command_enter(self, *args, **kwargs): """ Enter command state. @throws InstrumentTimeoutException if the device cannot be woken. @throws InstrumentProtocolException if the update commands and not recognized. """ self._init_params() # Tell driver superclass to send a state change event. # Superclass will query the state. self._driver_event(DriverAsyncEvent.STATE_CHANGE) def _handler_command_exit(self, *args, **kwargs): """ Exit command state. """ pass def _handler_command_get(self, *args, **kwargs): """ Get parameters while in the command state. @param params List of the parameters to pass to the state @retval returns (next_state, result) where result is a dict {}. No agent state changes happening with Get, so no next_agent_state @throw InstrumentParameterException for invalid parameter """ result_vals = {} # Retrieve required parameter. # Raise if no parameter provided, or not a dict. try: params = args[0] except IndexError: raise InstrumentParameterException('_handler_command_get requires a parameter dict.') if Parameter.ALL in params: log.debug("Parameter ALL in params") params = Parameter.list() params.remove(Parameter.ALL) log.debug("_handler_command_get: params = %s", params) if params is None or not isinstance(params, list): raise InstrumentParameterException("GET parameter list not a list!") # fill the return values from the update for param in params: if not Parameter.has(param): raise InstrumentParameterException("Invalid parameter!") result_vals[param] = self._param_dict.get(param) self._param_dict.get_config_value(param) result = result_vals log.debug("Get finished, next_state: %s, result: %s", None, result) return None, result def _handler_command_set(self, *args, **kwargs): """ Set parameter @retval next state, result """ startup = False try: params = args[0] except IndexError: raise InstrumentParameterException('_handler_command_set: command requires a parameter dict.') try: startup = args[1] except IndexError: pass if not isinstance(params, dict): raise InstrumentParameterException('Set parameters not a dict.') # For each key, val in the params, set the param dictionary. old_config = self._param_dict.get_config() self._set_params(params, startup) new_config = self._param_dict.get_config() if old_config != new_config: self._driver_event(DriverAsyncEvent.CONFIG_CHANGE) return None, None def _set_params(self, *args, **kwargs): """ Issue commands to the instrument to set various parameters """ try: params = args[0] except IndexError: raise InstrumentParameterException('Set command requires a parameter dict.') # verify param is not readonly param self._verify_not_readonly(*args, **kwargs) for key, val in params.iteritems(): log.debug("KEY = %s VALUE = %s", key, val) self._param_dict.set_value(key, val) if key == Parameter.SCHEDULE: self._ftp_schedule_file() # Load the schedule file host = self._param_dict.get(Parameter.FTP_IP_ADDRESS) port = self._param_dict.get_config_value(Parameter.FTP_PORT) log.debug("_set_params: stop the current schedule file") self._url_request(host, port, '/stop_schedule', data={}) log.debug("_set_params: upload driver YAML file to host %s", host) res = self._url_request(host, port, '/load_schedule', data=json.dumps({'filename': YAML_FILE_NAME})) log.debug("_set_params: result from load = %s", res) log.debug("set complete, update params") def _ftp_schedule_file(self): """ Construct a YAML schedule file and ftp the file to the Instrument server """ # Create a temporary file and write the schedule YAML information to the file try: config_file = tempfile.TemporaryFile() log.debug("temporary file created") if config_file is None or not isinstance(config_file, file): raise InstrumentException("config_file is not a temp file!") config_file.write(self._param_dict.get(Parameter.SCHEDULE)) config_file.seek(0) log.debug("finished writing config file:\n%r", self._param_dict.get(Parameter.SCHEDULE)) except Exception as e: log.error("Create schedule YAML file exception: %s", e) raise e # FTP the schedule file to the ZPLSC server host = '' try: log.debug("Create a ftp session") host = self._param_dict.get_config_value(Parameter.FTP_IP_ADDRESS) log.debug("Got host ip address %s", host) ftp_session = ftplib.FTP() ftp_session.connect(host) ftp_session.login(USER_NAME, PASSWORD) log.debug("ftp session was created...") ftp_session.set_pasv(False) ftp_session.cwd("config") ftp_session.storlines('STOR ' + YAML_FILE_NAME, config_file) files = ftp_session.dir() log.debug("*** Config yaml file sent: %s", files) ftp_session.quit() config_file.close() except (ftplib.socket.error, ftplib.socket.gaierror), e: log.error("ERROR: cannot reach FTP Host %s: %s ", host, e) raise InstrumentException("ERROR: cannot reach FTP Host %s " % host) log.debug("*** FTP %s to ftp host %s successfully", YAML_FILE_NAME, host) def _url_request(self, host, port, page, data=None): """ Loads a schedule file previously uploaded to the instrument and sets it as the active instrument configuration """ result = None url = "https://%s:%d/%s" % (host, port, page) try: if data is not None: log.debug("Request data: %s", data) req = urllib2.Request(url, data=data, headers={'Content-Type': 'application/json'}) else: log.debug("No request data") req = urllib2.Request(url) log.debug("Request url: %s", req.__dict__) f = urllib2.urlopen(req, timeout=10) res = f.read() f.close() except urllib2.HTTPError as e: log.error("Failed to open url %s. %s", url, e) return result except urllib2.URLError as e: log.error("Failed to open url %s. %s", url, e) return result try: result = json.loads(res) except ValueError: log.error("Request from url %s is not in valid json format, returned: %s.", url, res) return result def _handler_command_autosample(self, *args, **kwargs): """ Start autosample mode @retval next_state, (next_resource_state, result) tuple """ # FTP the driver schedule file to the instrument server self._ftp_schedule_file() # Stop the current running schedule file just in case one is running and # load the driver schedule file host = self._param_dict.get(Parameter.FTP_IP_ADDRESS) port = self._param_dict.get_config_value(Parameter.FTP_PORT) log.debug("_handler_command_autosample: stop the current schedule file") self._url_request(host, port, '/stop_schedule', data={}) log.debug("_handler_command_autosample: upload driver YAML file to host %s", host) res = self._url_request(host, port, '/load_schedule', data=json.dumps({'filename': YAML_FILE_NAME})) log.debug(" result from load = %s", res) if res.get('result') != 'OK': raise InstrumentException('_handler_command_autosample: Load Instrument Schedule File Error.') res = self._url_request(host, port, '/start_schedule', data={}) if res.get('result') != 'OK': raise InstrumentException('_handler_command_autosample: Start Schedule File Error.') return ProtocolState.AUTOSAMPLE, (ResourceAgentState.STREAMING, None) def _handler_command_acquire_status(self, *args, **kwargs): """ Acquire status from the instrument @retval next_state, (next_resource_state, result) tuple """ host = self._param_dict.get_config_value(Parameter.FTP_IP_ADDRESS) port = self._param_dict.get_config_value(Parameter.FTP_PORT) response = self._url_request(host, port, '/status.json') if response: log.debug("_handler_command_acquire_status: response from status = %r", response) particle = ZPLSCStatusParticle(response, port_timestamp=self._param_dict.get_current_timestamp()) self._driver_event(DriverAsyncEvent.SAMPLE, particle.generate()) else: log.error("_handler_command_acquire_status: Failed to acquire status from instrument.") return None, (None, None) ######################################################################## # Autosample handlers ######################################################################## def _handler_autosample_enter(self, *args, **kwargs): """ Enter autosample mode """ self._driver_event(DriverAsyncEvent.STATE_CHANGE) def _handler_autosample_stop(self): """ Stop autosample mode @retval next_state, (next_resource_state, result) tuple """ host = self._param_dict.get_config_value(Parameter.FTP_IP_ADDRESS) port = self._param_dict.get_config_value(Parameter.FTP_PORT) log.debug("_handler_autosample_stop: stop the current schedule file") res = self._url_request(host, port, '/stop_schedule', data={}) log.debug("handler_autosample_stop: stop schedule returns %r", res) return ProtocolState.COMMAND, (ResourceAgentState.COMMAND, None)
bsd-2-clause
1,710,098,018,871,872,300
42.567148
132
0.572542
false
4.178518
true
false
false
EmbodiedCognition/pagoda
pagoda/physics.py
1
47156
'''This module contains convenience wrappers for ODE objects.''' from __future__ import division import collections import numpy as np import ode BodyState = collections.namedtuple( 'BodyState', 'name position quaternion linear_velocity angular_velocity') class Registrar(type): '''A metaclass that builds a registry of its subclasses.''' def __init__(cls, name, bases, dct): if not hasattr(cls, '_registry'): cls._registry = {} else: key = name.lower() for i in range(3, len(name) + 1): cls._registry[key[:i]] = cls super(Registrar, cls).__init__(name, bases, dct) def build(cls, key, *args, **kwargs): return cls._registry[key.lower()](*args, **kwargs) class Body(Registrar(str('Base'), (), {})): '''This class wraps things that participate in the ODE physics simulation. This class basically provides lots of Python-specific properties that call the equivalent ODE getters and setters for things like position, rotation, etc. ''' def __init__(self, name, world, density=1000., mass=None, **shape): self.name = name self.world = world self.shape = shape m = ode.Mass() self.init_mass(m, density, mass) self.ode_body = ode.Body(world.ode_world) self.ode_body.setMass(m) self.ode_geom = getattr(ode, 'Geom%s' % self.__class__.__name__)( world.ode_space, **shape) self.ode_geom.setBody(self.ode_body) def __str__(self): return '{0.__class__.__name__} {0.name} at {1}'.format( self, self.position.round(3)) @property def mass(self): '''The ODE mass object for this body.''' return self.ode_body.getMass() @property def state(self): '''The state of this body includes: - name of the body (str) - position (3-tuple) - quaternion (4-tuple) - linear velocity (3-tuple) - angular velocity (3-tuple) ''' return BodyState(self.name, tuple(self.position), tuple(self.quaternion), tuple(self.linear_velocity), tuple(self.angular_velocity)) @state.setter def state(self, state): '''Set the state of this body. Parameters ---------- state : BodyState tuple The desired state of the body. ''' assert self.name == state.name, \ 'state name "{}" != body name "{}"'.format(state.name, self.name) self.position = state.position self.quaternion = state.quaternion self.linear_velocity = state.linear_velocity self.angular_velocity = state.angular_velocity @property def position(self): '''The (x, y, z) coordinates of the center of this body.''' return np.array(self.ode_body.getPosition()) @position.setter def position(self, position): '''Set the (x, y, z) coordinates of the center of this body. Parameters ---------- position : 3-tuple of float The coordinates of the desired center of this body. ''' self.ode_body.setPosition(tuple(position)) @property def rotation(self): '''The rotation matrix for this body.''' return np.array(self.ode_body.getRotation()).reshape((3, 3)) @rotation.setter def rotation(self, rotation): '''Set the rotation of this body using a rotation matrix. Parameters ---------- rotation : sequence of 9 floats The desired rotation matrix for this body. ''' if isinstance(rotation, np.ndarray): rotation = rotation.ravel() self.ode_body.setRotation(tuple(rotation)) @property def quaternion(self): '''The (w, x, y, z) rotation quaternion for this body.''' return np.array(self.ode_body.getQuaternion()) @quaternion.setter def quaternion(self, quaternion): self.ode_body.setQuaternion(tuple(quaternion)) @property def linear_velocity(self): '''Current linear velocity of this body (in world coordinates).''' return np.array(self.ode_body.getLinearVel()) @linear_velocity.setter def linear_velocity(self, velocity): '''Set the linear velocity for this body. Parameters ---------- velocity : 3-tuple of float The desired velocity for this body, in world coordinates. ''' self.ode_body.setLinearVel(tuple(velocity)) @property def angular_velocity(self): '''Current angular velocity of this body (in world coordinates).''' return np.array(self.ode_body.getAngularVel()) @angular_velocity.setter def angular_velocity(self, velocity): '''Set the angular velocity for this body. Parameters ---------- velocity : 3-tuple of float The desired angular velocity for this body, in world coordinates. ''' self.ode_body.setAngularVel(tuple(velocity)) @property def force(self): '''Current net force acting on this body (in world coordinates).''' return np.array(self.ode_body.getForce()) @force.setter def force(self, force): '''Set the force acting on this body. Parameters ---------- force : 3-tuple of float The desired force acting on this body, in world coordinates. ''' self.ode_body.setForce(tuple(force)) @property def torque(self): '''Current net torque acting on this body (in world coordinates).''' return np.array(self.ode_body.getTorque()) @torque.setter def torque(self, torque): '''Set the torque acting on this body. Parameters ---------- torque : 3-tuple of float The desired torque acting on this body, in world coordinates. ''' self.ode_body.setTorque(tuple(torque)) @property def is_kinematic(self): '''True iff this body is kinematic.''' return self.ode_body.isKinematic() @is_kinematic.setter def is_kinematic(self, is_kinematic): '''Set the kinematic/dynamic attribute for this body. In pagoda, kinematic bodies have infinite mass and do interact with other bodies via collisions. Parameters ---------- is_kinematic : bool If True, this body will be set to kinematic. If False, it will be set to dynamic. ''' if is_kinematic: self.ode_body.setKinematic() else: self.ode_body.setDynamic() @property def follows_gravity(self): '''True iff this body follows gravity.''' return self.ode_body.getGravityMode() @follows_gravity.setter def follows_gravity(self, follows_gravity): '''Set whether this body follows gravity. Parameters ---------- follows_gravity : bool This body will follow gravity iff this parameter is True. ''' self.ode_body.setGravityMode(follows_gravity) def rotate_to_body(self, x): '''Rotate the given vector to the same orientation as this body. Parameters ---------- x : 3-tuple of float A point in three dimensions. Returns ------- xrot : 3-tuple of float The same point after rotation into the orientation of this body. ''' return np.dot(x, self.rotation) def body_to_world(self, position): '''Convert a body-relative offset to world coordinates. Parameters ---------- position : 3-tuple of float A tuple giving body-relative offsets. Returns ------- position : 3-tuple of float A tuple giving the world coordinates of the given offset. ''' return np.array(self.ode_body.getRelPointPos(tuple(position))) def world_to_body(self, position): '''Convert a point in world coordinates to a body-relative offset. Parameters ---------- position : 3-tuple of float A world coordinates position. Returns ------- offset : 3-tuple of float A tuple giving the body-relative offset of the given position. ''' return np.array(self.ode_body.getPosRelPoint(tuple(position))) def relative_offset_to_world(self, offset): '''Convert a relative body offset to world coordinates. Parameters ---------- offset : 3-tuple of float The offset of the desired point, given as a relative fraction of the size of this body. For example, offset (0, 0, 0) is the center of the body, while (0.5, -0.2, 0.1) describes a point halfway from the center towards the maximum x-extent of the body, 20% of the way from the center towards the minimum y-extent, and 10% of the way from the center towards the maximum z-extent. Returns ------- position : 3-tuple of float A position in world coordinates of the given body offset. ''' return np.array(self.body_to_world(offset * self.dimensions / 2)) def add_force(self, force, relative=False, position=None, relative_position=None): '''Add a force to this body. Parameters ---------- force : 3-tuple of float A vector giving the forces along each world or body coordinate axis. relative : bool, optional If False, the force values are assumed to be given in the world coordinate frame. If True, they are assumed to be given in the body-relative coordinate frame. Defaults to False. position : 3-tuple of float, optional If given, apply the force at this location in world coordinates. Defaults to the current position of the body. relative_position : 3-tuple of float, optional If given, apply the force at this relative location on the body. If given, this method ignores the ``position`` parameter. ''' b = self.ode_body if relative_position is not None: op = b.addRelForceAtRelPos if relative else b.addForceAtRelPos op(force, relative_position) elif position is not None: op = b.addRelForceAtPos if relative else b.addForceAtPos op(force, position) else: op = b.addRelForce if relative else b.addForce op(force) def add_torque(self, torque, relative=False): '''Add a torque to this body. Parameters ---------- force : 3-tuple of float A vector giving the torque along each world or body coordinate axis. relative : bool, optional If False, the torque values are assumed to be given in the world coordinate frame. If True, they are assumed to be given in the body-relative coordinate frame. Defaults to False. ''' op = self.ode_body.addRelTorque if relative else self.ode_body.addTorque op(torque) def join_to(self, joint, other_body=None, **kwargs): '''Connect this body to another one using a joint. This method creates a joint to fasten this body to the other one. See :func:`World.join`. Parameters ---------- joint : str The type of joint to use when connecting these bodies. other_body : :class:`Body` or str, optional The other body to join with this one. If not given, connects this body to the world. ''' self.world.join(joint, self, other_body, **kwargs) def connect_to(self, joint, other_body, offset=(0, 0, 0), other_offset=(0, 0, 0), **kwargs): '''Move another body next to this one and join them together. This method will move the ``other_body`` so that the anchor points for the joint coincide. It then creates a joint to fasten the two bodies together. See :func:`World.move_next_to` and :func:`World.join`. Parameters ---------- joint : str The type of joint to use when connecting these bodies. other_body : :class:`Body` or str The other body to join with this one. offset : 3-tuple of float, optional The body-relative offset where the anchor for the joint should be placed. Defaults to (0, 0, 0). See :func:`World.move_next_to` for a description of how offsets are specified. other_offset : 3-tuple of float, optional The offset on the second body where the joint anchor should be placed. Defaults to (0, 0, 0). Like ``offset``, this is given as an offset relative to the size and shape of ``other_body``. ''' anchor = self.world.move_next_to(self, other_body, offset, other_offset) self.world.join(joint, self, other_body, anchor=anchor, **kwargs) class Box(Body): @property def lengths(self): return self.shape['lengths'] @property def dimensions(self): return np.array(self.lengths).squeeze() @property def volume(self): return np.prod(self.lengths) def init_mass(self, m, density, mass): if mass: density = mass / self.volume m.setBox(density, *self.lengths) class Sphere(Body): @property def radius(self): return self.shape['radius'] @property def dimensions(self): d = 2 * self.radius return np.array([d, d, d]).squeeze() @property def volume(self): return 4 / 3 * np.pi * self.radius ** 3 def init_mass(self, m, density, mass): if mass: density = mass / self.volume m.setSphere(density, self.radius) class Cylinder(Body): @property def radius(self): return self.shape['radius'] @property def length(self): return self.shape['length'] @property def dimensions(self): d = 2 * self.radius return np.array([d, d, self.length]).squeeze() @property def volume(self): return self.length * np.pi * self.radius ** 2 def init_mass(self, m, density, mass): if mass: density = mass / self.volume m.setCylinder(density, 3, self.radius, self.length) class Capsule(Body): @property def radius(self): return self.shape['radius'] @property def length(self): return self.shape['length'] @property def dimensions(self): d = 2 * self.radius return np.array([d, d, d + self.length]).squeeze() @property def volume(self): return 4 / 3 * np.pi * self.radius ** 3 + \ self.length * np.pi * self.radius ** 2 def init_mass(self, m, density, mass): if mass: density = mass / self.volume m.setCapsule(density, 3, self.radius, self.length) def _get_params(target, param, dof): '''Get the given param from each of the DOFs for a joint.''' return [target.getParam(getattr(ode, 'Param{}{}'.format(param, s))) for s in ['', '2', '3'][:dof]] def _set_params(target, param, values, dof): '''Set the given param for each of the DOFs for a joint.''' if not isinstance(values, (list, tuple, np.ndarray)): values = [values] * dof assert dof == len(values) for s, value in zip(['', '2', '3'][:dof], values): target.setParam(getattr(ode, 'Param{}{}'.format(param, s)), value) class Joint(Registrar(str('Base'), (), {})): '''Base class for joints connecting two bodies. In ODE, :class:`Body` objects represent mass/inertia properties, while :class:`Joint` and :class:`Motor` objects represent mathematical constraints that govern how specific pairs of bodies interact. For example, a :class:`BallJoint` that connects two bodies will force the anchor point for those two bodies to remain in the same location in world coordinates -- any linear force that displaces one of the bodies will also cause a force to be applied to the second body, because of the constraint imposed by the ball joint. As another example, a :class:`Slider` that connects two bodies allows those two bodies to displace relative to one another along a single axis, but not to rotate with respect to one another -- any torque applied to one body will also cause a torque to be applied to the other body. Constraints can be applied to angular degrees of freedom (e.g., :class:`AMotor`), linear degrees of freedom (e.g., :class:`BallJoint`, :class:`LMotor`), or both (e.g., :class:`PistonJoint`). Both joints and motors apply constraints to pairs of bodies, but they are quite different in many ways and so are represented using specific subclasses. This superclass is just a mixin to avoid repeating the getters and setters that are common between motors and joints. ''' ADOF = 0 LDOF = 0 @property def feedback(self): '''Feedback buffer (list of 3-tuples) for this ODE motor/joint.''' return self.ode_obj.getFeedback() @property def positions(self): '''List of positions for linear degrees of freedom.''' return [self.ode_obj.getPosition(i) for i in range(self.LDOF)] @property def position_rates(self): '''List of position rates for linear degrees of freedom.''' return [self.ode_obj.getPositionRate(i) for i in range(self.LDOF)] @property def angles(self): '''List of angles for rotational degrees of freedom.''' return [self.ode_obj.getAngle(i) for i in range(self.ADOF)] @property def angle_rates(self): '''List of angle rates for rotational degrees of freedom.''' return [self.ode_obj.getAngleRate(i) for i in range(self.ADOF)] @property def axes(self): '''List of axes for this object's degrees of freedom.''' return [np.array(self.ode_obj.getAxis(i)) for i in range(self.ADOF or self.LDOF)] @axes.setter def axes(self, axes): '''Set the axes for this object's degrees of freedom. Parameters ---------- axes : list of axes specifications A list of axis values to set. This list must have the same number of elements as the degrees of freedom of the underlying ODE object. Each element can be (a) None, which has no effect on the corresponding axis, or (b) three floats specifying the axis to set. ''' assert self.ADOF == len(axes) or self.LDOF == len(axes) for i, axis in enumerate(axes): if axis is not None: self.ode_obj.setAxis(i, 0, axis) @property def lo_stops(self): '''List of lo stop values for this object's degrees of freedom.''' return _get_params(self.ode_obj, 'LoStop', self.ADOF + self.LDOF) @lo_stops.setter def lo_stops(self, lo_stops): '''Set the lo stop values for this object's degrees of freedom. Parameters ---------- lo_stops : float or sequence of float A lo stop value to set on all degrees of freedom, or a list containing one such value for each degree of freedom. For rotational degrees of freedom, these values must be in radians. ''' _set_params(self.ode_obj, 'LoStop', lo_stops, self.ADOF + self.LDOF) @property def hi_stops(self): '''List of hi stop values for this object's degrees of freedom.''' return _get_params(self.ode_obj, 'HiStop', self.ADOF + self.LDOF) @hi_stops.setter def hi_stops(self, hi_stops): '''Set the hi stop values for this object's degrees of freedom. Parameters ---------- hi_stops : float or sequence of float A hi stop value to set on all degrees of freedom, or a list containing one such value for each degree of freedom. For rotational degrees of freedom, these values must be in radians. ''' _set_params(self.ode_obj, 'HiStop', hi_stops, self.ADOF + self.LDOF) @property def velocities(self): '''List of target velocity values for rotational degrees of freedom.''' return _get_params(self.ode_obj, 'Vel', self.ADOF + self.LDOF) @velocities.setter def velocities(self, velocities): '''Set the target velocities for this object's degrees of freedom. Parameters ---------- velocities : float or sequence of float A target velocity value to set on all degrees of freedom, or a list containing one such value for each degree of freedom. For rotational degrees of freedom, these values must be in radians / second. ''' _set_params(self.ode_obj, 'Vel', velocities, self.ADOF + self.LDOF) @property def max_forces(self): '''List of max force values for rotational degrees of freedom.''' return _get_params(self.ode_obj, 'FMax', self.ADOF + self.LDOF) @max_forces.setter def max_forces(self, max_forces): '''Set the maximum forces for this object's degrees of freedom. Parameters ---------- max_forces : float or sequence of float A maximum force value to set on all degrees of freedom, or a list containing one such value for each degree of freedom. ''' _set_params(self.ode_obj, 'FMax', max_forces, self.ADOF + self.LDOF) @property def erps(self): '''List of ERP values for this object's degrees of freedom.''' return _get_params(self.ode_obj, 'ERP', self.ADOF + self.LDOF) @erps.setter def erps(self, erps): '''Set the ERP values for this object's degrees of freedom. Parameters ---------- erps : float or sequence of float An ERP value to set on all degrees of freedom, or a list containing one such value for each degree of freedom. ''' _set_params(self.ode_obj, 'ERP', erps, self.ADOF + self.LDOF) @property def cfms(self): '''List of CFM values for this object's degrees of freedom.''' return _get_params(self.ode_obj, 'CFM', self.ADOF + self.LDOF) @cfms.setter def cfms(self, cfms): '''Set the CFM values for this object's degrees of freedom. Parameters ---------- cfms : float or sequence of float A CFM value to set on all degrees of freedom, or a list containing one such value for each degree of freedom. ''' _set_params(self.ode_obj, 'CFM', cfms, self.ADOF + self.LDOF) @property def stop_cfms(self): '''List of lo/hi stop CFM values.''' return _get_params(self.ode_obj, 'StopCFM', self.ADOF + self.LDOF) @stop_cfms.setter def stop_cfms(self, stop_cfms): '''Set the CFM values for this object's DOF limits. Parameters ---------- stop_cfms : float or sequence of float A CFM value to set on all degrees of freedom limits, or a list containing one such value for each degree of freedom limit. ''' _set_params(self.ode_obj, 'StopCFM', stop_cfms, self.ADOF + self.LDOF) @property def stop_erps(self): '''List of lo/hi stop ERP values.''' return _get_params(self.ode_obj, 'StopERP', self.ADOF + self.LDOF) @stop_erps.setter def stop_erps(self, stop_erps): '''Set the ERP values for this object's DOF limits. Parameters ---------- stop_erps : float or sequence of float An ERP value to set on all degrees of freedom limits, or a list containing one such value for each degree of freedom limit. ''' _set_params(self.ode_obj, 'StopERP', stop_erps, self.ADOF + self.LDOF) def enable_feedback(self): '''Enable feedback on this ODE object.''' self.ode_obj.setFeedback(True) def disable_feedback(self): '''Disable feedback on this ODE object.''' self.ode_obj.setFeedback(False) class Dynamic(Joint): '''This class wraps an ODE motor -- either an LMotor or an AMotor. Parameters ---------- name : str A name for this object in the world. world : :class:`World` A world object to which this motor belongs. body_a : :class:`Body` A first body connected to this joint. body_b : :class:`Body`, optional A second body connected to this joint. If not given, the joint will connect the first body to the world. feedback : bool, optional Feedback will be enabled on this motor iff this is True. Defaults to False. dof : int, optional Number of degrees of freedom in this motor. Defaults to 3. jointgroup : ode.JointGroup, optional A joint group to which this motor belongs. Defaults to the default joint group in the world. ''' def __init__(self, name, world, body_a, body_b=None, feedback=False, dof=3, jointgroup=None): self.name = name self.ode_obj = self.MOTOR_FACTORY(world.ode_world, jointgroup=jointgroup) self.ode_obj.attach(body_a.ode_body, body_b.ode_body if body_b else None) self.ode_obj.setNumAxes(dof) self.cfms = 1e-8 if feedback: self.enable_feedback() else: self.disable_feedback() class AMotor(Dynamic): '''An angular motor applies torques to change an angle in the physics world. AMotors can be created in "user" mode---in which case the user must supply all axis and angle values---or, for 3-DOF motors, in "euler" mode---in which case the first and last axes must be specified, and ODE computes the middle axis automatically. ''' MOTOR_FACTORY = ode.AMotor def __init__(self, *args, **kwargs): mode = kwargs.pop('mode', 'user') if isinstance(mode, str): mode = ode.AMotorEuler if mode.lower() == 'euler' else ode.AMotorUser super(AMotor, self).__init__(*args, **kwargs) self.ode_obj.setMode(mode) @property def ADOF(self): '''Number of angular degrees of freedom for this motor.''' return self.ode_obj.getNumAxes() @property def axes(self): '''List of axes for this object's degrees of freedom.''' return [np.array(self.ode_obj.getAxis(i)) for i in range(self.ADOF)] @axes.setter def axes(self, axes): '''Set the axes for this object's degrees of freedom. Parameters ---------- axes : list of axis parameters A list of axis values to set. This list must have the same number of elements as the degrees of freedom of the underlying ODE object. Each element can be (a) None, which has no effect on the corresponding axis, or (b) three floats specifying the axis to set, or (c) a dictionary with an "axis" key specifying the axis to set and an optional "rel" key (defaults to 0) specifying the relative body to set the axis on. ''' assert len(axes) == self.ADOF for i, ax in enumerate(axes): if ax is None: continue if not isinstance(ax, dict): ax = dict(axis=ax) self.ode_obj.setAxis(i, ax.get('rel', 0), ax['axis']) def add_torques(self, torques): '''Add the given torques along this motor's axes. Parameters ---------- torques : sequence of float A sequence of torque values to apply to this motor's axes. ''' self.ode_obj.addTorques(*torques) class LMotor(Dynamic): '''An LMotor applies forces to change a position in the physics world.''' MOTOR_FACTORY = ode.LMotor @property def LDOF(self): '''Number of linear degrees of freedom for this motor.''' return self.ode_obj.getNumAxes() class Kinematic(Joint): '''This class wraps kinematic ODE joints with some Python properties. Parameters ---------- name : str Name of the joint to create. This is only to make the joint discoverable in the world. world : :class:`World` Wrapper for the world in which this joint exists. body_a : :class:`Body` Wrapper for the first body that this joint connects. body_b : :class:`Body`, optional Wrapper for the second body that this joint connects. If this is None, the joint will connect ``body_a`` to the ``world``. anchor : 3-tuple of floats, optional Anchor in world coordinates for the joint. Optional for :class:`Fixed` joint. feedback : bool, optional If this is True, a force feedback structure will be enabled for this joint, which will make it possible to record the forces that this joint exerts on its two bodies. By default, no structure will be allocated. jointgroup : ODE joint group, optional Add the joint to this group. Defaults to the default world joint group. ''' def __init__(self, name, world, body_a, body_b=None, anchor=None, feedback=False, jointgroup=None, amotor=True, lmotor=True): self.name = name build = getattr(ode, '{}Joint'.format(self.__class__.__name__)) self.ode_obj = build(world.ode_world, jointgroup=jointgroup) self.ode_obj.attach(body_a.ode_body, body_b.ode_body if body_b else None) if anchor is not None: self.ode_obj.setAnchor(tuple(anchor)) self.ode_obj.setParam(ode.ParamCFM, 0) self.amotor = None if self.ADOF > 0 and amotor: self.amotor = AMotor(name=name + ':amotor', world=world, body_a=body_a, body_b=body_b, feedback=feedback, jointgroup=jointgroup, dof=self.ADOF, mode='euler' if self.ADOF == 3 else 'user') self.lmotor = None if self.LDOF > 0 and lmotor: self.lmotor = LMotor(name=name + ':lmotor', world=world, body_a=body_a, body_b=body_b, feedback=feedback, jointgroup=jointgroup, dof=self.LDOF) if feedback: self.enable_feedback() else: self.disable_feedback() def __str__(self): return self.name @property def anchor(self): '''3-tuple specifying location of this joint's anchor.''' return np.array(self.ode_obj.getAnchor()) @property def anchor2(self): '''3-tuple specifying location of the anchor on the second body.''' return np.array(self.ode_obj.getAnchor2()) def add_torques(self, *torques): '''Add the given torques along this joint's axes. Parameters ---------- torques : sequence of float A sequence of torque values to apply to this motor's axes. ''' self.amotor.add_torques(*torques) class Fixed(Kinematic): ADOF = 0 LDOF = 0 class Slider(Kinematic): ADOF = 0 LDOF = 1 @property def positions(self): '''List of positions for this joint's linear degrees of freedom.''' return [self.ode_obj.getPosition()] @property def position_rates(self): '''List of position rates for this joint's degrees of freedom.''' return [self.ode_obj.getPositionRate()] @property def axes(self): '''Axis of displacement for this joint.''' return [np.array(self.ode_obj.getAxis())] @axes.setter def axes(self, axes): '''Set the linear axis of displacement for this joint. Parameters ---------- axes : list containing one 3-tuple of floats A list of the axes for this joint. For a slider joint, which has one degree of freedom, this must contain one 3-tuple specifying the X, Y, and Z axis for the joint. ''' self.lmotor.axes = [axes[0]] self.ode_obj.setAxis(tuple(axes[0])) class Hinge(Kinematic): ADOF = 1 LDOF = 0 @property def angles(self): '''List of angles for this joint's rotational degrees of freedom.''' return [self.ode_obj.getAngle()] @property def angle_rates(self): '''List of angle rates for this joint's degrees of freedom.''' return [self.ode_obj.getAngleRate()] @property def axes(self): '''Axis of rotation for this joint.''' return [np.array(self.ode_obj.getAxis())] @axes.setter def axes(self, axes): '''Set the angular axis of rotation for this joint. Parameters ---------- axes : list containing one 3-tuple of floats A list of the axes for this joint. For a hinge joint, which has one degree of freedom, this must contain one 3-tuple specifying the X, Y, and Z axis for the joint. ''' self.amotor.axes = [axes[0]] self.ode_obj.setAxis(tuple(axes[0])) class Piston(Kinematic): ADOF = 1 LDOF = 1 @property def axes(self): '''Axis of rotation and displacement for this joint.''' return [np.array(self.ode_obj.getAxis())] @axes.setter def axes(self, axes): self.amotor.axes = [axes[0]] self.lmotor.axes = [axes[0]] self.ode_obj.setAxis(axes[0]) class Universal(Kinematic): ADOF = 2 LDOF = 0 @property def axes(self): '''A list of axes of rotation for this joint.''' return [np.array(self.ode_obj.getAxis1()), np.array(self.ode_obj.getAxis2())] @axes.setter def axes(self, axes): self.amotor.axes = [axes[0], axes[1]] setters = [self.ode_obj.setAxis1, self.ode_obj.setAxis2] for axis, setter in zip(axes, setters): if axis is not None: setter(tuple(axis)) @property def angles(self): '''A list of two angles for this joint's degrees of freedom.''' return [self.ode_obj.getAngle1(), self.ode_obj.getAngle2()] @property def angle_rates(self): '''A list of two angle rates for this joint's degrees of freedom.''' return [self.ode_obj.getAngle1Rate(), self.ode_obj.getAngle2Rate()] class Ball(Kinematic): ADOF = 3 LDOF = 0 def __init__(self, name, *args, **kwargs): super(Ball, self).__init__(name, *args, **kwargs) # we augment ball joints with an additional motor that allows us to set # rotation limits. keys = 'name world body_a body_b feedback dof jointgroup'.split() self.alimit = AMotor(name + ':alimit', *args, dof=self.ADOF, mode='euler', **{k: v for k, v in kwargs.items() if k in keys}) @property def angles(self): return self.alimit.angles @property def angle_rates(self): return self.alimit.angle_rates @property def axes(self): return self.alimit.axes @axes.setter def axes(self, axes): if len(axes) == 2: axes = dict(rel=1, axis=axes[0]), None, dict(rel=2, axis=axes[1]) self.amotor.axes = axes self.alimit.axes = axes @property def lo_stops(self): return self.alimit.lo_stops @lo_stops.setter def lo_stops(self, lo_stops): self.alimit.lo_stops = lo_stops @property def hi_stops(self): return self.alimit.hi_stops @hi_stops.setter def hi_stops(self, hi_stops): self.alimit.hi_stops = hi_stops def make_quaternion(theta, *axis): '''Given an angle and an axis, create a quaternion.''' x, y, z = axis r = np.sqrt(x * x + y * y + z * z) st = np.sin(theta / 2.) ct = np.cos(theta / 2.) return [x * st / r, y * st / r, z * st / r, ct] def center_of_mass(bodies): '''Given a set of bodies, compute their center of mass in world coordinates. ''' x = np.zeros(3.) t = 0. for b in bodies: m = b.mass x += b.body_to_world(m.c) * m.mass t += m.mass return x / t class World(object): '''A wrapper for an ODE World object, for running in a simulator.''' def __init__(self, dt=1. / 60, max_angular_speed=20): self.ode_world = ode.World() self.ode_world.setMaxAngularSpeed(max_angular_speed) self.ode_space = ode.QuadTreeSpace((0, 0, 0), (100, 100, 20), 10) self.ode_floor = ode.GeomPlane(self.ode_space, (0, 0, 1), 0) self.ode_contactgroup = ode.JointGroup() self.frame_no = 0 self.dt = dt self.elasticity = 0.1 self.friction = 2000 self.gravity = 0, 0, -9.81 self.cfm = 1e-6 self.erp = 0.7 self._bodies = {} self._joints = {} @property def gravity(self): '''Current gravity vector in the world.''' return self.ode_world.getGravity() @gravity.setter def gravity(self, gravity): '''Set the gravity vector in the world. Parameters ---------- gravity : 3-tuple of float The vector where gravity should point. ''' return self.ode_world.setGravity(gravity) @property def cfm(self): '''Current global CFM value.''' return self.ode_world.getCFM() @cfm.setter def cfm(self, cfm): '''Set the global CFM value. Parameters ---------- cfm : float The desired global CFM value. ''' return self.ode_world.setCFM(cfm) @property def erp(self): '''Current global ERP value.''' return self.ode_world.getERP() @erp.setter def erp(self, erp): '''Set the global ERP value. Parameters ---------- erp : float The desired global ERP value. ''' return self.ode_world.setERP(erp) @property def bodies(self): '''Sequence of all bodies in the world, sorted by name.''' for k in sorted(self._bodies): yield self._bodies[k] @property def joints(self): '''Sequence of all joints in the world, sorted by name.''' for k in sorted(self._joints): yield self._joints[k] def get_body(self, key): '''Get a body by key. Parameters ---------- key : str, None, or :class:`Body` The key for looking up a body. If this is None or a :class:`Body` instance, the key itself will be returned. Returns ------- body : :class:`Body` The body in the world with the given key. ''' return self._bodies.get(key, key) def get_joint(self, key): '''Get a joint by key. Parameters ---------- key : str The key for a joint to look up. Returns ------- joint : :class:`Joint` The joint in the world with the given key, or None if there is no such joint. ''' return self._joints.get(key, None) def create_body(self, shape, name=None, **kwargs): '''Create a new body. Parameters ---------- shape : str The "shape" of the body to be created. This should name a type of body object, e.g., "box" or "cap". name : str, optional The name to use for this body. If not given, a default name will be constructed of the form "{shape}{# of objects in the world}". Returns ------- body : :class:`Body` The created body object. ''' shape = shape.lower() if name is None: for i in range(1 + len(self._bodies)): name = '{}{}'.format(shape, i) if name not in self._bodies: break self._bodies[name] = Body.build(shape, name, self, **kwargs) return self._bodies[name] def join(self, shape, body_a, body_b=None, name=None, **kwargs): '''Create a new joint that connects two bodies together. Parameters ---------- shape : str The "shape" of the joint to use for joining together two bodies. This should name a type of joint, such as "ball" or "piston". body_a : str or :class:`Body` The first body to join together with this joint. If a string is given, it will be used as the name of a body to look up in the world. body_b : str or :class:`Body`, optional If given, identifies the second body to join together with ``body_a``. If not given, ``body_a`` is joined to the world. name : str, optional If given, use this name for the created joint. If not given, a name will be constructed of the form "{body_a.name}^{shape}^{body_b.name}". Returns ------- joint : :class:`Joint` The joint object that was created. ''' ba = self.get_body(body_a) bb = self.get_body(body_b) shape = shape.lower() if name is None: name = '{}^{}^{}'.format(ba.name, shape, bb.name if bb else '') self._joints[name] = Joint.build( shape, name, self, body_a=ba, body_b=bb, **kwargs) return self._joints[name] def move_next_to(self, body_a, body_b, offset_a, offset_b): '''Move one body to be near another one. After moving, the location described by ``offset_a`` on ``body_a`` will be coincident with the location described by ``offset_b`` on ``body_b``. Parameters ---------- body_a : str or :class:`Body` The body to use as a reference for moving the other body. If this is a string, it is treated as the name of a body to look up in the world. body_b : str or :class:`Body` The body to move next to ``body_a``. If this is a string, it is treated as the name of a body to look up in the world. offset_a : 3-tuple of float The offset of the anchor point, given as a relative fraction of the size of ``body_a``. See :func:`Body.relative_offset_to_world`. offset_b : 3-tuple of float The offset of the anchor point, given as a relative fraction of the size of ``body_b``. Returns ------- anchor : 3-tuple of float The location of the shared point, which is often useful to use as a joint anchor. ''' ba = self.get_body(body_a) bb = self.get_body(body_b) if ba is None: return bb.relative_offset_to_world(offset_b) if bb is None: return ba.relative_offset_to_world(offset_a) anchor = ba.relative_offset_to_world(offset_a) offset = bb.relative_offset_to_world(offset_b) bb.position = bb.position + anchor - offset return anchor def get_body_states(self): '''Return the complete state of all bodies in the world. Returns ------- states : list of state information tuples A list of body state information for each body in the world. See :func:`Body.state`. ''' return [b.state for b in self.bodies] def set_body_states(self, states): '''Set the states of some bodies in the world. Parameters ---------- states : sequence of states A complete state tuple for one or more bodies in the world. See :func:`get_body_states`. ''' for state in states: self.get_body(state.name).state = state def step(self, substeps=2): '''Step the world forward by one frame. Parameters ---------- substeps : int, optional Split the step into this many sub-steps. This helps to prevent the time delta for an update from being too large. ''' self.frame_no += 1 dt = self.dt / substeps for _ in range(substeps): self.ode_contactgroup.empty() self.ode_space.collide(None, self.on_collision) self.ode_world.step(dt) def needs_reset(self): '''Return True iff the world needs to be reset.''' return False def reset(self): '''Reset the state of the world.''' pass def on_key_press(self, key, modifiers, keymap): '''Handle an otherwise unhandled keypress event (from a GUI).''' if key == keymap.ENTER: self.reset() return True def are_connected(self, body_a, body_b): '''Determine whether the given bodies are currently connected. Parameters ---------- body_a : str or :class:`Body` One body to test for connectedness. If this is a string, it is treated as the name of a body to look up. body_b : str or :class:`Body` One body to test for connectedness. If this is a string, it is treated as the name of a body to look up. Returns ------- connected : bool Return True iff the two bodies are connected. ''' return bool(ode.areConnected( self.get_body(body_a).ode_body, self.get_body(body_b).ode_body)) def on_collision(self, args, geom_a, geom_b): '''Callback function for the collide() method. Parameters ---------- args : None Arguments passed when the callback was registered. Not used. geom_a : ODE geometry The geometry object of one of the bodies that has collided. geom_b : ODE geometry The geometry object of one of the bodies that has collided. ''' body_a = geom_a.getBody() body_b = geom_b.getBody() if ode.areConnected(body_a, body_b) or \ (body_a and body_a.isKinematic()) or \ (body_b and body_b.isKinematic()): return for c in ode.collide(geom_a, geom_b): c.setBounce(self.elasticity) c.setMu(self.friction) ode.ContactJoint(self.ode_world, self.ode_contactgroup, c).attach( geom_a.getBody(), geom_b.getBody())
mit
-2,953,832,453,207,946,000
32.349364
86
0.58192
false
4.007479
false
false
false
IQSS/geoconnect
gc_apps/gis_shapefiles/views_02_visualize.py
1
4711
from __future__ import print_function import logging from django.http import HttpResponse from django.views.generic import View from django.template.loader import render_to_string from django.conf import settings from gc_apps.gis_shapefiles.models import ShapefileInfo from gc_apps.worldmap_layers.models import WorldMapLayerInfo from gc_apps.worldmap_connect.send_shapefile_service import SendShapefileService from gc_apps.geo_utils.geoconnect_step_names import GEOCONNECT_STEP_KEY, STEP2_STYLE,\ PANEL_TITLE_MAP_DATA_FILE, PANEL_TITLE_STYLE_MAP from shared_dataverse_information.layer_classification.forms import\ ClassifyLayerForm, ATTRIBUTE_VALUE_DELIMITER from gc_apps.geo_utils.message_helper_json import MessageHelperJSON from gc_apps.gis_tabular.views import build_map_html LOGGER = logging.getLogger(__name__) from gc_apps.geo_utils.msg_util import msg, msgt """ Handle AJAX requests to Visualize a Layer - Upon successful visualizations, several pieces of the page are update including - page title - breadcrumb - main content panel """ def render_ajax_basic_err_msg(err_note, shapefile_info=None): """Convenience method for returning an error message via AJAX""" d = { 'DATAVERSE_SERVER_URL' : settings.DATAVERSE_SERVER_URL\ , 'ERR_NOTE' : err_note\ , 'shapefile_info' : shapefile_info\ } return render_to_string('gis_shapefiles/view_02_ajax_basic_err.html', d) class ViewAjaxVisualizeShapefile(View): """ Given the md5 of a ShapefileInfo, attempt to visualize the file on WorldMap Return a JSON response """ def get(self, request, shp_md5): """Use the SendShapefileService to create a map from a shapefile. - SendShapefileService takes care of details starting with retrieving the ShapefileInfo object """ # OK if shp_md5 is None, SendShapefileService creates error message # send_shp_service = SendShapefileService(**dict(shp_md5=shp_md5)) # Send the shapefile to WorldMap # success = send_shp_service.send_shapefile_to_worldmap() # ----------------------------------- # Did it work? NOPE! Failed along the way! # ----------------------------------- if not success: err_note = ('Sorry! The shapefile mapping did not work.' '<br /><span class="small">{0}</span>').format(\ '<br />'.join(send_shp_service.err_msgs)) LOGGER.error(err_note) err_note_html = render_ajax_basic_err_msg(err_note,\ send_shp_service.shapefile_info) json_msg = MessageHelperJSON.get_json_fail_msg(err_note_html, dict(id_main_panel_content=err_note_html)) return HttpResponse(json_msg, content_type="application/json", status=200) # ----------------------------------- # Yes! We have a new map layer # ----------------------------------- worldmap_shapefile_layerinfo = send_shp_service.get_worldmap_layerinfo() shapefile_info = worldmap_shapefile_layerinfo.get_gis_data_info() assert worldmap_shapefile_layerinfo is not None,\ "Failure in SendShapefileService! Said success but not worldmap_layerinfo (WorldMapShapefileLayerInfo)" # ----------------------------------------- # Build the Map HTML to replace the form # ----------------------------------------- map_html, user_message_html = build_map_html(request, worldmap_shapefile_layerinfo) if map_html is None: # Failed! Send an error LOGGER.error("Failed to create map HTML using WorldMapShapefileLayerInfo: %s (%d)",\ worldmap_shapefile_layerinfo, worldmap_shapefile_layerinfo.id) user_msg = 'Sorry! Failed to create map. Please try again. (code: s3)' json_msg = MessageHelperJSON.get_json_fail_msg(user_msg) return HttpResponse(json_msg, content_type="application/json", status=200) # ----------------------------------------- # Looks good. In the JSON response, send # back the map HTML # ----------------------------------------- data_dict = dict(\ map_html=map_html, user_message_html=user_message_html, id_main_panel_title=PANEL_TITLE_STYLE_MAP, message='Success! The shapefile was successfully mapped!') json_msg = MessageHelperJSON.get_json_success_msg("great job", data_dict=data_dict) return HttpResponse(json_msg, content_type="application/json", status=200)
apache-2.0
1,538,729,477,637,049,300
37.300813
116
0.612397
false
3.968829
false
false
false
nickmilon/milonpy
milonpy/utils/basic2.py
1
14147
#!/usr/bin/env python # -*- coding: UTF-8 -*- ####################################################### ''' module: utilities.basic2 Created:Aug 21, 2012 author: nickmilon Description: Description: Simple utilities (2) and Vars - Very Limited IMPORTS HERE ! ''' ####################################################### from sys import stdout from datetime import datetime , timedelta from basic import FMT_dtGen,FMT_tGen, color_txt ,color_switch_txt,dictDot from time import sleep ,time,mktime import re from random import random def re_is_sameLen(txt,rexp):return len(txt)==len(rexp.findall(txt)) def re_is_same(txt,rexp):return txt==u''.join(rexp.findall(txt)) def re_diff(txt,rexp):return ''.join(list(set([c for c in txt]) - set(rexp.findall(txt)))) #re_gr=re.compile(ur'[\u03AC-\u03CE]|[;\s]', re.IGNORECASE| re.VERBOSE| re.UNICODE |re.MULTILINE) def lst_randomize(lst): "returns list in random order" return [i[1] for i in [[random(),i] for i in sorted(lst)] ] def time_seconds_since_epoch(dt=None): if dt is None:dt=datetime.utcnow() return mktime(dt.timetuple())+1e-6*dt.microsecond def autoRetry(exceptionOrTuple,retries=3,sleepSeconds=1, BackOfFactor=1,loggerFun=None): """ exceptionOrTuple= exception or tuple of exceptions,BackOfFactor=factor to back off on each retry loggerFun i.e. logger.info """ def wrapper(func): def fun_call(*args, **kwargs): tries = 0 while tries < retries: try: return func(*args, **kwargs) except exceptionOrTuple, e: tries += 1 if loggerFun:loggerFun("exception [%s] e=[%s] handled tries :%d sleeping[%f]" % (exceptionOrTuple ,e,tries,sleepSeconds * tries * BackOfFactor) ) sleep(sleepSeconds * tries * BackOfFactor) #* tries) raise return fun_call return wrapper def parseJSfunFromFile(filepath,functionName): """ helper function to get a js function string from a file containing js functions. Function must be named starting in first column and file must end with //eof// lazyloads re """ with open( filepath) as fin: r=re.search("(^.*?)(?P<fun>function\s+?%s.*?)(^fun|//eof//)" % functionName,fin.read(),re.MULTILINE|re.DOTALL) return r.group('fun').strip() if r else False def stdout_switchColor(color): stdout.write (color_switch_txt(color)) def stdout_write_flush(txt,stAfter="\r",color=None): if color:txt= color_txt(color,txt) stdout.write("%s%s" %(txt,stAfter) ) stdout.flush() class timeElapsed(object): """ overwrite str_dtimes str_deltas to return "" to exclude this form output string @todo: logging handler """ def __init__(self, cnt_max=1,name_str=""): self.name_str=name_str self.cnt_max= cnt_max self.dt_start=datetime.utcnow() self.dt_last=self.dt_start self.dt_current=self.dt_start self.cnt=0 self.cnt_last=0 self.cnt_last_dif=0 self.perc_done=0.0 self.time_remaining=0 self.time_elapsed_since_start=timedelta(0) self.time_elapsed_since_last=timedelta(0) self.time_remaining =timedelta(0) self.units=['sec','min','hour'] self.set_cnt_max(cnt_max) def set_cnt_max(self,val): self.cnt_max=val self.frmt_str="%s%d%s" %("%",len(str(val)),"d" ) def set_auto_unit(self,velocity,unit_idx=0): if velocity < 1 and unit_idx < 2: velocity=velocity * 60 unit_idx+=1 return self.set_auto_unit(velocity, unit_idx) else: return velocity, self.units[unit_idx] def frmt_max(self,val): return self.frmt_str % val def update(self,cur_val,getStr=True,): cur_val=float(cur_val) if cur_val > self.cnt_max:self.set_cnt_max(self.cnt_max+int(cur_val/10)) self.dt_current=datetime.utcnow() self.time_elapsed_since_start = self.dt_current- self.dt_start self.time_elapsed_since_last=self.dt_current- self.dt_last self.cnt_last_dif=self.cnt_last-cur_val self.perc_done=cur_val/self.cnt_max self.time_remaining =timedelta(seconds=int ( self.time_elapsed_since_start.total_seconds() * ( (1-self.perc_done)/self.perc_done))) self.cnt=cur_val self.v_start= self.cnt/self.time_elapsed_since_start.total_seconds() self.v_last= self.cnt_last_dif/self.time_elapsed_since_last.total_seconds() self.dt_last=self.dt_current self.cnt_last=cur_val return self.toStr() if getStr else True def update_last(self,cur_val,getStr=True): self.cnt_max=cur_val return self.update(cur_val,getStr) def str_counters(self): return u"|%s of %s" %(self.frmt_max(self.cnt), self.frmt_max(self.cnt_max)) def str_dtimes(self): return u"⌚ %s %s %s" % (self.dt_start.strftime(FMT_dtGen),self.dt_current.strftime(FMT_tGen), (self.dt_current+self.time_remaining).strftime(FMT_tGen)) def str_tdeltas(self): return u"⌛ %s %s %s" %(self._str_tdelta(self.time_elapsed_since_start),self._str_tdelta(self.time_elapsed_since_last), self._str_tdelta(self.time_remaining) ) @staticmethod def _str_tdelta(tdelta): str_td=str(tdelta) tmp=str_td.find(".") if tmp !=-1 : str_td= str_td[:tmp] return u"%8s" % str_td def toStr(self): return u"[%s:%6.2f%%%s%s%s]" %(self.name_str,100* self.perc_done, self.str_counters(), self.str_tdeltas(),self.str_dtimes() ) class SubToEvent(object): ''' lightwaight Event handler modeled after Peter Thatcher's http://www.valuedlessons.com/2008/04/events-in-python.html usage: watcher = SubToEvent() def log_docs(doc):print doc watcher += log_docs watcher += lambda x:str(x) watcher.stop() ''' def __init__(self,channelName=''): self.channelName=channelName self.handlers = set() def handle(self, handler): self.handlers.add(handler) return self def unhandle(self, handler): try: self.handlers.remove(handler) except: raise ValueError("No_such_handler") return self def fire(self, *args, **kargs): for handler in self.handlers: handler(*args, **kargs) def fireTopic(self,topic=None,verb=None,payload=None): self.fire ((self.channelName,topic,verb,payload)) def getHandlerCount(self): return len(self.handlers) __iadd__ = handle __isub__ = unhandle __call__ = fire __len__ = getHandlerCount class multiOrderedDict(object): ''' deletes can't be multi ''' def __init__(self,lst): self.lstDic=lst def __getitem__ (self,key): return self._getOrSetDictItem(key) def __setitem__(self, key, val): return self._getOrSetDictItem(key,True,val) def __delitem__ (self, key): return self._getOrSetDictItem(key,delete=True) def get(self,key,orVal=None): try: return self[key] except KeyError: return orVal def keys(self): return[i[0] for i in self.lstDic if self.isKey(i[0])] def values(self): return [self[i] for i in self.keys()] def isKey(self,k): return True def _getOrSetDictItem (self,key,setVal=False,newVal=None,multi=False,delete=False): idx=[] for n,i in enumerate(self.lstDic): if i[0]==key and self.isKey(i[0]): idx.append(n) if setVal:self.lstDic[n]=[i[0],newVal] if not multi: break if len(idx)>0: if delete: self.lstDic.pop(idx[0]) #can't be multi return None rt= [self.lstDic[i][1:] for i in idx ] if multi: return rt else: return rt[0][0] else: if setVal: self.lstDic.append([key,newVal]) return newVal else: raise KeyError (key) def toDict(self): return dict(zip(self.keys(),self.values())) def toString(self): return str(self.toDict()) __str__ = toString class confFileDict(multiOrderedDict): def __init__(self,path,skipBlanks=True,skipRemarks=True): self.path=path with open(self.path) as fin: rlines=fin.readlines() if skipBlanks:rlines=[i for i in rlines if not i=='\n'] if skipRemarks:rlines=[i for i in rlines if not i.startswith("#")] lstDic=[ map(lambda x: x.strip(), i.split("=") ) for i in rlines] super(confFileDict, self).__init__(lstDic) def isKey(self,key): return key !='' and not key.startswith("#") def toStr(self): s='' for i in self.lstDic: s+= "=".join(i)+'\n' return s.rstrip() def toFile(self,path=None): if not path:path=self.path with open(path, 'w') as fl: fl.write(self.toStr) def PrintTiming(func): """set up a decorator function for timing""" def wrapper(*args, **kargs): t1 = time.time() res = func(*args, **kargs) tel = time.time()-t1 timeformated = time.strftime( "%H:%M:%S",time.gmtime(tel)) print '-'*5 + '%s took %0.3f ms' % (func.func_name + str(kargs) + str(args), (tel)*1000.0) + '|' + timeformated + '|'+ '-'*10 return res return wrapper def totalsVertical(orgD,resD,funct,initFunc): '''Apply funct to resD dict values by orgD values, creates keys in resD if do not exist usufull for vertical persentage and totals attention : it is ditractive replacing resD with results i.e: to incr resD values by OrgD values resultAply(orgDict,resultsDict,lambda x,y:x+y, lambda x:0) to find perc of org : .resultAply(res,dorg[0].value,lambda x,y:100*y/x if x!=0 else None,None) ''' for k in orgD.keys(): if isinstance(orgD[k],dict): if resD.get(k): totalsVertical(orgD[k],resD[k],funct,initFunc) else: if initFunc: resD[k]=totalsVertical(orgD[k],dictDot({}),funct,initFunc) else: continue elif isinstance(orgD[k],(float,int)): if resD.get(k,False) is False : if initFunc: resD[k]=initFunc(orgD[k]) else: continue resD[k] = funct(orgD[k],resD[k]) else: if initFunc:resD[k]=orgD[k] return resD def totalsVertSimple(orgD,resD,funct): ''' simplified and faster version of totalsVertical assumes all key/values of orgD are present in resD ''' for k in orgD.keys(): if isinstance(orgD[k],dict):totalsVertSimple(orgD[k],resD[k],funct) elif isinstance(orgD[k],(float,int)):orgD[k]=funct(orgD[k],resD[k]) return orgD def totalsHorizontal(value,a_dict,funct=lambda x,y:100*x/y): for k in a_dict.keys(): if isinstance(a_dict[k],dict):totalsHorizontal(value,a_dict[k]) elif isinstance(a_dict[k],(float,int)):a_dict[k]=funct(a_dict[k],value) return a_dict class TextWrapper(object): ''' http://jrgraphix.net/r/Unicode/ ''' elipsis=u"\u2026" # "…" numbers=u"₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎" def __init__(self, maxLen=140,minLen=100, contChr=u'⎘',inclNumbers=True,strFlag=u'',strFirst=u'',strRest=u'',strAll=u''): self.contChr=contChr self.inlNumbers=inclNumbers self.strFlag=strFlag self.strFirst=strFirst self.strRest=strRest self.strAll=strAll self.maxLen=maxLen self.minLen=minLen def compineStr(self,s,cnt,totalCnt=None): return "%s%s%s%s%s" %(self.strFlag,self.formatNumOfTotal(cnt+1,totalCnt) if self.inlNumbers else u'', self.strAll, self.strFirst if cnt==0 else self.strRest,s) def splits(self,astr): n=self.maxLen-1- len(self.contChr) minLen=self.minLen cnt=0 s=self.compineStr(astr, cnt) while len(s) > n: cnt+=1 rf=s[0:n].rfind(u'\n',minLen) if rf == -1:rf=s[0:n].rfind(u'.',minLen) if rf == -1:rf=s[0:n].rfind(u' ',minLen) spltn = rf+1 if rf !=-1 else n #print "(%3d) %3d %3d %3d [%s]" %(cnt, rf,n,spltn,s[0:n]) rt=s[:spltn].rstrip() remainingStr=s[spltn:] if self.contChr !=u'': if len(remainingStr)>1:rt+=self.contChr else: rt+=remainingStr remainingStr=u'' yield rt s=self.compineStr(remainingStr, cnt) if remainingStr !=u'' else u'' yield s def formatNumOfTotal(self,cnt, totalCnt=None): return u"%s∕%s" %(self.formatNum(cnt),u'??' if totalCnt is None else self.formatNum(totalCnt)) #'∕' is not '/' but math '\u2215' def formatNum(self,num): header=map(int,str(num)) rt=[self.numbers[i] for i in header] return ''.join(rt) def format(self,text): rt=[] for i in self.splits(text): if i !=u'':rt.append(i) if self.inlNumbers: rt2=[] maxCnt=len(rt) for cnt,vl in enumerate(rt): old= self.formatNumOfTotal(cnt+1,None) new= u'' if maxCnt == 1 else self.formatNumOfTotal(cnt+1,maxCnt) if new !=u'':new += u' '* (len(old)-len(new)) rt2.append(vl.replace(old, new , 1)) return rt2 return rt ################## tests def test_timeElapsed(x): et=timeElapsed(x,"foo") for i in range(1,x): sleep(1) print et.update(i, True) print et.update_last(i) ###################
apache-2.0
-7,230,937,652,603,792,000
37.752747
168
0.57072
false
3.306376
false
false
false
AnthonyCheetham/naco_ispy
data_handling_scripts/queue_cal_analysis.py
1
1931
# -*- coding: utf-8 -*- """ Created on Fri Nov 11 10:41:32 2016 Program to run through the calibrations folders and queue all data for analysis It isn't yet smart enough to check which ones are done already @author: cheetham """ import naco_ispy,subprocess,os,argparse,glob parser = argparse.ArgumentParser(description='This program queues up all unprocessed NACO ISPY calibration data for analysis.') parser.add_argument('-dry_run', dest="dry_run",action='store_const',const=True, default=False, help='Dont actually queue the analysis, but print the commands it will do') parser.add_argument('--num', action="store", dest="num", type=int, default=-1, help='Maximum number of datasets to process') # Get the input arguments args = parser.parse_args() num = args.num data_folder = '/data/NACO/' # db_filename = '/data/NACO/calib_table.dat' # data_folder='/Users/cheetham/data/naco_data/GTO/' #db_filename='/Users/cheetham/data/data_archive/GTO/obs_table.dat' dry_run = args.dry_run # First, load the target database # calib_db = naco_ispy.databases.calib_table(filename=db_filename, data_folder=data_folder) scripts_directory = os.path.expanduser('~/code/naco_ispy/processing_scripts/') # Instead of using the database, use glob to find all folders all_folders = glob.glob(data_folder+'Calib/*/') # Loop through the targets in the database for targ_ix,targ_folder in enumerate(all_folders[0:num]): # Check what we want to process process_script = scripts_directory+'naco_calibrations.slurm' # The command to run: cmd = "echo 'bash "+process_script+"' | at -q b now" # Change to the right directory os.chdir(targ_folder) if dry_run: print('Queueing analysis for '+targ_folder) print(' '+cmd) else: # Execute the processing command subprocess.call(cmd,shell=True)
gpl-3.0
-3,123,554,130,894,556,000
32.310345
127
0.684102
false
3.523723
false
false
false
Maronato/SpottedBot
custom_auth/facebook_methods.py
1
4677
import facebook from django.conf import settings from django.shortcuts import reverse from urllib.parse import urlencode, quote, unquote from django.contrib.auth import login from django.contrib import messages app_id = settings.SOCIAL_FACEBOOK_KEY app_secret = settings.SOCIAL_FACEBOOK_SECRET def get_graph(): """Get App Graph Object. returns a graph object containing an app token from the registered facebook app """ graph = facebook.GraphAPI(version='3.1') graph.access_token = graph.get_app_access_token(app_id, app_secret) return graph def canv_url(request): """Return Canvas URL. Generates the canvas_url used by facebook to redirect after auth """ # Check whether the last call was secure and use its protocol if request.is_secure(): return 'https://' + request.get_host() + reverse('social_login:facebook_login_response') else: return 'https://' + request.get_host() + reverse('social_login:facebook_login_response') def auth_url(request): """Auth URL. Returns the facebook auth url using the current app's domain """ canvas_url = canv_url(request) # Permissions set by user. Default is none perms = settings.SOCIAL_FACEBOOK_PERMISSIONS url = "https://www.facebook.com/dialog/oauth?" # Payload kvps = {'client_id': app_id, 'redirect_uri': canvas_url} # Add 'next' as state if provided next_param = f"next_url={quote(request.GET.get('next', ''))}" # Add 'redirected' as state if provided redirected_param = f"redirected={request.GET.get('redirected', '')}" if request.GET.get('next', False): kvps['state'] = next_param redirected_param = f',{redirected_param}' if request.GET.get('redirected', False): kvps['state'] = kvps.get('state', '') + redirected_param # Format permissions if needed if perms: kvps['scope'] = ",".join(perms) # Return the url return url + urlencode(kvps) def debug_token(token): """Debug Token. Returns debug string from token """ return get_graph().debug_access_token(token, app_id, app_secret) def login_successful(code, request): """Login Successful. Process successful login by creating or updating an user using Facebook's response """ canvas_url = canv_url(request) graph = get_graph() # Get token info from user try: token_info = graph.get_access_token_from_code(code, canvas_url, app_id, app_secret) except facebook.GraphAPIError: # For some reason, the auth code has already been used, redirect to login again return 'auth code used' # Extract token from token info access_token = token_info['access_token'] # Debug the token, as per documentation debug = debug_token(access_token)['data'] # Get the user's scope ID from debug data social_id = debug['user_id'] token_expires = debug.get('expires_at') - debug.get('issued_at') if debug.get('expires_at') == 0: token_expires = 99999999 scopes = debug.get('scopes', []) # Get some user info like name and url extra_data = graph.get_object(str(social_id) + '/?fields=name,first_name,last_name,link') name = extra_data['name'] first_name = extra_data['first_name'] last_name = extra_data['last_name'] link = extra_data.get('link', '') # Call FacebookUser's method to create or update based on social_id, that returns an facebookuser object from .models import FacebookUser new = FacebookUser.create_or_update(social_id, access_token, token_expires, first_name, last_name, name, link, scopes) # Try to login the user if new.user.is_active: login(request, new.user) messages.add_message(request, messages.SUCCESS, 'Olá, ' + first_name + '!') else: messages.add_message(request, messages.ERROR, 'Essa conta foi desativada!') return request def login_canceled(request): # If the user has canceled the login process, or something else happened, do nothing and display error message messages.add_message(request, messages.ERROR, 'Oops! Algo de errado aconteceu :( Se isso se repetir, fale conosco!') return request def decode_state_data(state): if not state: return {} parts = state.split(',') data = {} for part in parts: p = part.split('=') data[p[0]] = unquote(p[1]) return data def code_already_used_url(next_url, redirected): state = {} if next_url: state['next'] = next_url state['redirected'] = int(redirected) + 1 if redirected else 0 return reverse('social_login:facebook_login') + '?' + urlencode(state)
agpl-3.0
7,889,079,061,397,053,000
30.38255
122
0.663388
false
3.676101
false
false
false
LegoStormtroopr/canard
SQBLWidgets/sqblUI/statementText.py
1
1584
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'ui/statementText.ui' # # Created: Sat Jul 25 12:17:11 2015 # by: PyQt4 UI code generator 4.9.1 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: _fromUtf8 = lambda s: s class Ui_Form(object): def setupUi(self, Form): Form.setObjectName(_fromUtf8("Form")) Form.resize(400, 300) self.verticalLayout = QtGui.QVBoxLayout(Form) self.verticalLayout.setObjectName(_fromUtf8("verticalLayout")) self.label = QtGui.QLabel(Form) self.label.setWordWrap(True) self.label.setObjectName(_fromUtf8("label")) self.verticalLayout.addWidget(self.label) self.statementText = QtGui.QTextEdit(Form) self.statementText.setStyleSheet(_fromUtf8("margin-left:8px;")) self.statementText.setObjectName(_fromUtf8("statementText")) self.verticalLayout.addWidget(self.statementText) self.label.setBuddy(self.statementText) self.retranslateUi(Form) QtCore.QMetaObject.connectSlotsByName(Form) def retranslateUi(self, Form): Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8)) self.label.setText(QtGui.QApplication.translate("Form", "<html><head/><body><p><span style=\" font-weight:600;\">Statement Text</span> - <small>The text shown to a respondent.</small></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
gpl-3.0
-4,289,232,995,782,310,000
39.615385
250
0.696338
false
3.789474
false
false
false
samueldeng/crosslan
headquarter/iptman.py
1
2445
import iptc import logging logging.basicConfig(format='[%(levelname)s]\t%(asctime)s\t%(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG) log = logging.getLogger("iptman") class IptMan(): def __init__(self): pass @staticmethod def insert_rule(port): try: # New Rule. rule = iptc.Rule() rule.protocol = "tcp" # Add match to the rule. match = iptc.Match(rule, "tcp") match.sport = str(port) rule.add_match(match) # Add target to the rule. target = iptc.Target(rule, "ACCEPT") rule.target = target # Insert rule to the OUTPUT chain in filter Table. output_chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), "OUTPUT") output_chain.insert_rule(rule) except Exception, e: raise e @staticmethod def delete_rule(port): try: filter_table = iptc.Table(iptc.Table.FILTER) output_chain = iptc.Chain(filter_table, "OUTPUT") rule_del = None for rule in output_chain.rules: sport = str(rule.matches[0].parameters["sport"]) if sport == str(port): rule_del = rule break if rule_del is not None: output_chain.delete_rule(rule_del) except Exception, e: raise e @staticmethod def get_rule_counter(port): try: filter_table = iptc.Table(iptc.Table.FILTER) filter_table.refresh() output_chain = iptc.Chain(filter_table, "OUTPUT") bytes_counts = None for rule in output_chain.rules: sport = str(rule.matches[0].parameters["sport"]) # log.debug(rule.get_counters()) if sport == str(port): counter = rule.get_counters() packets = counter[0] bytes_counts = counter[1] log.debug("packet #:" + str(packets)) log.debug("bytes #:" + str(bytes_counts)) break if bytes_counts is None: raise Exception("NotFoundPort") return bytes_counts except Exception, e: raise e def unit_test(): pass if __name__ == "__main__": unit_test()
gpl-2.0
5,179,356,324,944,784,000
27.44186
103
0.503476
false
4.179487
false
false
false
google-research/falken
service/generated_flatbuffers/tflite/LessEqualOptions.py
1
2218
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # automatically generated by the FlatBuffers compiler, do not modify # namespace: tflite import flatbuffers from flatbuffers.compat import import_numpy np = import_numpy() class LessEqualOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAsLessEqualOptions(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = LessEqualOptions() x.Init(buf, n + offset) return x @classmethod def LessEqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) # LessEqualOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) def LessEqualOptionsStart(builder): builder.StartObject(0) def LessEqualOptionsEnd(builder): return builder.EndObject() class LessEqualOptionsT(object): # LessEqualOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): lessEqualOptions = LessEqualOptions() lessEqualOptions.Init(buf, pos) return cls.InitFromObj(lessEqualOptions) @classmethod def InitFromObj(cls, lessEqualOptions): x = LessEqualOptionsT() x._UnPack(lessEqualOptions) return x # LessEqualOptionsT def _UnPack(self, lessEqualOptions): if lessEqualOptions is None: return # LessEqualOptionsT def Pack(self, builder): LessEqualOptionsStart(builder) lessEqualOptions = LessEqualOptionsEnd(builder) return lessEqualOptions
apache-2.0
3,640,308,164,800,651,300
29.805556
114
0.708747
false
3.857391
false
false
false
Southpaw-TACTIC/Team
src/python/Lib/site-packages/PySide/examples/itemviews/addressbook/addresswidget.py
1
10279
#!/usr/bin/python """************************************************************************** ** ** Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies). ** All rights reserved. ** Contact: Nokia Corporation ([email protected]) ** ** This file is part of the examples of the Qt Toolkit. ** ** You may use this file under the terms of the BSD license as follows: ** ** "Redistribution and use in source and binary forms, with or without ** modification, are permitted provided that the following conditions are ** met: ** * Redistributions of source code must retain the above copyright ** notice, this list of conditions and the following disclaimer. ** * Redistributions in binary form must reproduce the above copyright ** notice, this list of conditions and the following disclaimer in ** the documentation and/or other materials provided with the ** distribution. ** * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor ** the names of its contributors may be used to endorse or promote ** products derived from this software without specific prior written ** permission. ** ** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ** A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT ** OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, ** SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT ** LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, ** DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY ** THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT ** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ** OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ** ***************************************************************************** ** February 2011 ** - addressbook example ported to PySide by Arun Srinivasan ** <[email protected]> **************************************************************************""" try: import cpickle as pickle except ImportError: import pickle from PySide.QtCore import (Qt, Signal, QRegExp, QModelIndex) from PySide.QtGui import (QWidget, QTabWidget, QItemSelectionModel, QMessageBox, QTableView, QSortFilterProxyModel, QAbstractItemView, QItemSelection) from tablemodel import TableModel from newaddresstab import NewAddressTab from adddialogwidget import AddDialogWidget class AddressWidget(QTabWidget): """ The central widget of the application. Most of the addressbook's functionality is contained in this class. """ selectionChanged = Signal(QItemSelection) def __init__(self, parent=None): """ Initialize the AddressWidget. """ super(AddressWidget, self).__init__(parent) self.tableModel = TableModel() self.newAddressTab = NewAddressTab() self.newAddressTab.sendDetails.connect(self.addEntry) self.addTab(self.newAddressTab, "Address Book") self.setupTabs() def addEntry(self, name=None, address=None): """ Add an entry to the addressbook. """ if name is None and address is None: addDialog = AddDialogWidget() if addDialog.exec_(): name = addDialog.name address = addDialog.address address = {"name": name, "address": address} addresses = self.tableModel.addresses[:] # The QT docs for this example state that what we're doing here # is checking if the entered name already exists. What they # (and we here) are actually doing is checking if the whole # name/address pair exists already - ok for the purposes of this # example, but obviously not how a real addressbook application # should behave. try: addresses.remove(address) QMessageBox.information(self, "Duplicate Name", "The name \"%s\" already exists." % name) except ValueError: # The address didn't already exist, so let's add it to the model. # Step 1: create the row self.tableModel.insertRows(0) # Step 2: get the index of the newly created row and use it. # to set the name ix = self.tableModel.index(0, 0, QModelIndex()) self.tableModel.setData(ix, address["name"], Qt.EditRole) # Step 3: lather, rinse, repeat for the address. ix = self.tableModel.index(0, 1, QModelIndex()) self.tableModel.setData(ix, address["address"], Qt.EditRole) # Remove the newAddressTab, as we now have at least one # address in the model. self.removeTab(self.indexOf(self.newAddressTab)) # The screenshot for the QT example shows nicely formatted # multiline cells, but the actual application doesn't behave # quite so nicely, at least on Ubuntu. Here we resize the newly # created row so that multiline addresses look reasonable. tableView = self.currentWidget() tableView.resizeRowToContents(ix.row()) def editEntry(self): """ Edit an entry in the addressbook. """ tableView = self.currentWidget() proxyModel = tableView.model() selectionModel = tableView.selectionModel() # Get the name and address of the currently selected row. indexes = selectionModel.selectedRows() for index in indexes: row = proxyModel.mapToSource(index).row() ix = self.tableModel.index(row, 0, QModelIndex()) name = self.tableModel.data(ix, Qt.DisplayRole) ix = self.tableModel.index(row, 1, QModelIndex()) address = self.tableModel.data(ix, Qt.DisplayRole) # Open an addDialogWidget, and only allow the user to edit the address. addDialog = AddDialogWidget() addDialog.setWindowTitle("Edit a Contact") addDialog.nameText.setReadOnly(True) addDialog.nameText.setText(name) addDialog.addressText.setText(address) # If the address is different, add it to the model. if addDialog.exec_(): newAddress = addDialog.address if newAddress != address: ix = self.tableModel.index(row, 1, QModelIndex()) self.tableModel.setData(ix, newAddress, Qt.EditRole) def removeEntry(self): """ Remove an entry from the addressbook. """ tableView = self.currentWidget() proxyModel = tableView.model() selectionModel = tableView.selectionModel() # Just like editEntry, but this time remove the selected row. indexes = selectionModel.selectedRows() for index in indexes: row = proxyModel.mapToSource(index).row() self.tableModel.removeRows(row) # If we've removed the last address in the model, display the # newAddressTab if self.tableModel.rowCount() == 0: self.insertTab(0, self.newAddressTab, "Address Book") def setupTabs(self): """ Setup the various tabs in the AddressWidget. """ groups = ["ABC", "DEF", "GHI", "JKL", "MNO", "PQR", "STU", "VW", "XYZ"] for group in groups: proxyModel = QSortFilterProxyModel(self) proxyModel.setSourceModel(self.tableModel) proxyModel.setDynamicSortFilter(True) tableView = QTableView() tableView.setModel(proxyModel) tableView.setSortingEnabled(True) tableView.setSelectionBehavior(QAbstractItemView.SelectRows) tableView.horizontalHeader().setStretchLastSection(True) tableView.verticalHeader().hide() tableView.setEditTriggers(QAbstractItemView.NoEditTriggers) tableView.setSelectionMode(QAbstractItemView.SingleSelection) # This here be the magic: we use the group name (e.g. "ABC") to # build the regex for the QSortFilterProxyModel for the group's # tab. The regex will end up looking like "^[ABC].*", only # allowing this tab to display items where the name starts with # "A", "B", or "C". Notice that we set it to be case-insensitive. reFilter = "^[%s].*" % group proxyModel.setFilterRegExp(QRegExp(reFilter, Qt.CaseInsensitive)) proxyModel.setFilterKeyColumn(0) # Filter on the "name" column proxyModel.sort(0, Qt.AscendingOrder) tableView.selectionModel().selectionChanged.connect(self.selectionChanged) self.addTab(tableView, group) # Note: the QT example uses a QDataStream for the saving and loading. # Here we're using a python dictionary to store the addresses, which # can't be streamed using QDataStream, so we just use cpickle for this # example. def readFromFile(self, filename): """ Read contacts in from a file. """ try: f = open(filename, "rb") addresses = pickle.load(f) except IOError: QMessageBox.information(self, "Unable to open file: %s" % filename) finally: f.close() if len(addresses) == 0: QMessageBox.information(self, "No contacts in file: %s" % filename) else: for address in addresses: self.addEntry(address["name"], address["address"]) def writeToFile(self, filename): """ Save all contacts in the model to a file. """ try: f = open(filename, "wb") pickle.dump(self.tableModel.addresses, f) except IOError: QMessageBox.information(self, "Unable to open file: %s" % filename) finally: f.close() if __name__ == "__main__": import sys from PySide.QtGui import QApplication app = QApplication(sys.argv) addressWidget = AddressWidget() addressWidget.show() sys.exit(app.exec_())
epl-1.0
-137,049,467,144,635,650
40.447581
86
0.628369
false
4.394613
false
false
false
googleapis/python-grafeas
grafeas/grafeas_v1/services/grafeas/transports/grpc_asyncio.py
1
26599
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.protobuf import empty_pb2 # type: ignore from grafeas.grafeas_v1.types import grafeas from .base import GrafeasTransport, DEFAULT_CLIENT_INFO from .grpc import GrafeasGrpcTransport class GrafeasGrpcAsyncIOTransport(GrafeasTransport): """gRPC AsyncIO backend transport for Grafeas. `Grafeas <https://grafeas.io>`__ API. Retrieves analysis results of Cloud components such as Docker container images. Analysis results are stored as a series of occurrences. An ``Occurrence`` contains information about a specific analysis instance on a resource. An occurrence refers to a ``Note``. A note contains details describing the analysis and is generally stored in a separate project, called a ``Provider``. Multiple occurrences can refer to the same note. For example, an SSL vulnerability could affect multiple images. In this case, there would be one note for the vulnerability and an occurrence for each image with the vulnerability referring to that note. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation and call it. It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ _grpc_channel: aio.Channel _stubs: Dict[str, Callable] = {} @classmethod def create_channel( cls, host: str = "", credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. quota_project_id (Optional[str]): An optional project to use for billing and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: aio.Channel: A gRPC AsyncIO channel object. """ return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, **kwargs, ) def __init__( self, *, host: str = "", credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. Args: host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if ``channel`` is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. channel (Optional[aio.Channel]): A ``Channel`` instance through which to make calls. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None else: if api_mtls_endpoint: host = api_mtls_endpoint # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials else: if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key ) # The base transport sets the host, credentials and scopes super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=True, ) if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, credentials=self._credentials, credentials_file=credentials_file, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) # Wrap messages. This must be done after self._grpc_channel exists self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: """Create the channel designed to connect to this service. This property caches on the instance; repeated calls return the same channel. """ # Return the channel from cache. return self._grpc_channel @property def get_occurrence( self, ) -> Callable[[grafeas.GetOccurrenceRequest], Awaitable[grafeas.Occurrence]]: r"""Return a callable for the get occurrence method over gRPC. Gets the specified occurrence. Returns: Callable[[~.GetOccurrenceRequest], Awaitable[~.Occurrence]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_occurrence" not in self._stubs: self._stubs["get_occurrence"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/GetOccurrence", request_serializer=grafeas.GetOccurrenceRequest.serialize, response_deserializer=grafeas.Occurrence.deserialize, ) return self._stubs["get_occurrence"] @property def list_occurrences( self, ) -> Callable[ [grafeas.ListOccurrencesRequest], Awaitable[grafeas.ListOccurrencesResponse] ]: r"""Return a callable for the list occurrences method over gRPC. Lists occurrences for the specified project. Returns: Callable[[~.ListOccurrencesRequest], Awaitable[~.ListOccurrencesResponse]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_occurrences" not in self._stubs: self._stubs["list_occurrences"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/ListOccurrences", request_serializer=grafeas.ListOccurrencesRequest.serialize, response_deserializer=grafeas.ListOccurrencesResponse.deserialize, ) return self._stubs["list_occurrences"] @property def delete_occurrence( self, ) -> Callable[[grafeas.DeleteOccurrenceRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete occurrence method over gRPC. Deletes the specified occurrence. For example, use this method to delete an occurrence when the occurrence is no longer applicable for the given resource. Returns: Callable[[~.DeleteOccurrenceRequest], Awaitable[~.Empty]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_occurrence" not in self._stubs: self._stubs["delete_occurrence"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/DeleteOccurrence", request_serializer=grafeas.DeleteOccurrenceRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_occurrence"] @property def create_occurrence( self, ) -> Callable[[grafeas.CreateOccurrenceRequest], Awaitable[grafeas.Occurrence]]: r"""Return a callable for the create occurrence method over gRPC. Creates a new occurrence. Returns: Callable[[~.CreateOccurrenceRequest], Awaitable[~.Occurrence]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_occurrence" not in self._stubs: self._stubs["create_occurrence"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/CreateOccurrence", request_serializer=grafeas.CreateOccurrenceRequest.serialize, response_deserializer=grafeas.Occurrence.deserialize, ) return self._stubs["create_occurrence"] @property def batch_create_occurrences( self, ) -> Callable[ [grafeas.BatchCreateOccurrencesRequest], Awaitable[grafeas.BatchCreateOccurrencesResponse], ]: r"""Return a callable for the batch create occurrences method over gRPC. Creates new occurrences in batch. Returns: Callable[[~.BatchCreateOccurrencesRequest], Awaitable[~.BatchCreateOccurrencesResponse]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_create_occurrences" not in self._stubs: self._stubs["batch_create_occurrences"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/BatchCreateOccurrences", request_serializer=grafeas.BatchCreateOccurrencesRequest.serialize, response_deserializer=grafeas.BatchCreateOccurrencesResponse.deserialize, ) return self._stubs["batch_create_occurrences"] @property def update_occurrence( self, ) -> Callable[[grafeas.UpdateOccurrenceRequest], Awaitable[grafeas.Occurrence]]: r"""Return a callable for the update occurrence method over gRPC. Updates the specified occurrence. Returns: Callable[[~.UpdateOccurrenceRequest], Awaitable[~.Occurrence]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_occurrence" not in self._stubs: self._stubs["update_occurrence"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/UpdateOccurrence", request_serializer=grafeas.UpdateOccurrenceRequest.serialize, response_deserializer=grafeas.Occurrence.deserialize, ) return self._stubs["update_occurrence"] @property def get_occurrence_note( self, ) -> Callable[[grafeas.GetOccurrenceNoteRequest], Awaitable[grafeas.Note]]: r"""Return a callable for the get occurrence note method over gRPC. Gets the note attached to the specified occurrence. Consumer projects can use this method to get a note that belongs to a provider project. Returns: Callable[[~.GetOccurrenceNoteRequest], Awaitable[~.Note]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_occurrence_note" not in self._stubs: self._stubs["get_occurrence_note"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/GetOccurrenceNote", request_serializer=grafeas.GetOccurrenceNoteRequest.serialize, response_deserializer=grafeas.Note.deserialize, ) return self._stubs["get_occurrence_note"] @property def get_note(self) -> Callable[[grafeas.GetNoteRequest], Awaitable[grafeas.Note]]: r"""Return a callable for the get note method over gRPC. Gets the specified note. Returns: Callable[[~.GetNoteRequest], Awaitable[~.Note]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_note" not in self._stubs: self._stubs["get_note"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/GetNote", request_serializer=grafeas.GetNoteRequest.serialize, response_deserializer=grafeas.Note.deserialize, ) return self._stubs["get_note"] @property def list_notes( self, ) -> Callable[[grafeas.ListNotesRequest], Awaitable[grafeas.ListNotesResponse]]: r"""Return a callable for the list notes method over gRPC. Lists notes for the specified project. Returns: Callable[[~.ListNotesRequest], Awaitable[~.ListNotesResponse]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_notes" not in self._stubs: self._stubs["list_notes"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/ListNotes", request_serializer=grafeas.ListNotesRequest.serialize, response_deserializer=grafeas.ListNotesResponse.deserialize, ) return self._stubs["list_notes"] @property def delete_note( self, ) -> Callable[[grafeas.DeleteNoteRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete note method over gRPC. Deletes the specified note. Returns: Callable[[~.DeleteNoteRequest], Awaitable[~.Empty]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_note" not in self._stubs: self._stubs["delete_note"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/DeleteNote", request_serializer=grafeas.DeleteNoteRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_note"] @property def create_note( self, ) -> Callable[[grafeas.CreateNoteRequest], Awaitable[grafeas.Note]]: r"""Return a callable for the create note method over gRPC. Creates a new note. Returns: Callable[[~.CreateNoteRequest], Awaitable[~.Note]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_note" not in self._stubs: self._stubs["create_note"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/CreateNote", request_serializer=grafeas.CreateNoteRequest.serialize, response_deserializer=grafeas.Note.deserialize, ) return self._stubs["create_note"] @property def batch_create_notes( self, ) -> Callable[ [grafeas.BatchCreateNotesRequest], Awaitable[grafeas.BatchCreateNotesResponse] ]: r"""Return a callable for the batch create notes method over gRPC. Creates new notes in batch. Returns: Callable[[~.BatchCreateNotesRequest], Awaitable[~.BatchCreateNotesResponse]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "batch_create_notes" not in self._stubs: self._stubs["batch_create_notes"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/BatchCreateNotes", request_serializer=grafeas.BatchCreateNotesRequest.serialize, response_deserializer=grafeas.BatchCreateNotesResponse.deserialize, ) return self._stubs["batch_create_notes"] @property def update_note( self, ) -> Callable[[grafeas.UpdateNoteRequest], Awaitable[grafeas.Note]]: r"""Return a callable for the update note method over gRPC. Updates the specified note. Returns: Callable[[~.UpdateNoteRequest], Awaitable[~.Note]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_note" not in self._stubs: self._stubs["update_note"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/UpdateNote", request_serializer=grafeas.UpdateNoteRequest.serialize, response_deserializer=grafeas.Note.deserialize, ) return self._stubs["update_note"] @property def list_note_occurrences( self, ) -> Callable[ [grafeas.ListNoteOccurrencesRequest], Awaitable[grafeas.ListNoteOccurrencesResponse], ]: r"""Return a callable for the list note occurrences method over gRPC. Lists occurrences referencing the specified note. Provider projects can use this method to get all occurrences across consumer projects referencing the specified note. Returns: Callable[[~.ListNoteOccurrencesRequest], Awaitable[~.ListNoteOccurrencesResponse]]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_note_occurrences" not in self._stubs: self._stubs["list_note_occurrences"] = self.grpc_channel.unary_unary( "/grafeas.v1.Grafeas/ListNoteOccurrences", request_serializer=grafeas.ListNoteOccurrencesRequest.serialize, response_deserializer=grafeas.ListNoteOccurrencesResponse.deserialize, ) return self._stubs["list_note_occurrences"] __all__ = ("GrafeasGrpcAsyncIOTransport",)
apache-2.0
-3,522,779,740,541,284,000
41.626603
89
0.617956
false
4.548393
false
false
false
terranum-ch/GraphLink
graphlink/ui/gkui_node_manager.py
1
2716
#!/urs/bin/python import os import wx from ..core.gk_node import GKNode from .gkui_node_dlg import GKUINodeEditDialog class GKUINodeManager(object): def __init__(self, parentframe, listctrl): self.m_listctrl = listctrl assert (self.m_listctrl is not None), "listctrl is None!" self.m_parent_frame = parentframe self.m_nodes = [] self.m_node_paths = [] def add_node_path(self, nodepath): """specify search path for nodes""" if nodepath not in self.m_node_paths: self.m_node_paths.append(nodepath) def has_node_paths(self): """return True if some nodes path are defined""" if len(self.m_node_paths) == 0: return False return True def add_node_to_list(self, node): """add node to the internal list if it isn't already present""" if node not in self.m_nodes: self.m_nodes.append(node) def get_node_count(self): """get the number of nodes""" return len(self.m_nodes) def reload_path(self): """clear the list ctrl and parse the node paths""" for path in self.m_node_paths: if os.path.exists(path) is False: wx.LogError("{} didn't exist!".format(path)) else: for myfile in os.listdir(path): if myfile.endswith(".gkn"): # node files node = GKNode() if node.load_from_file(myfile) is False: wx.LogWarning("Error loading: {}".format(myfile)) else: self.add_node_to_list(node) # reload the node list self.reload_list() def reload_list(self): """reload the node list""" self.m_listctrl.DeleteAllItems() for index, node in enumerate(self.m_nodes): self.m_listctrl.Append([index + 1, node.m_name]) def add_node_dialog(self): """display the add node dialog""" mynode = GKNode() myDlg = GKUINodeEditDialog(self.m_parent_frame, mynode) if myDlg.ShowModal() == wx.ID_SAVE: self.add_node_to_list(mynode) self.reload_list() def edit_node_dialog(self): """display the edit node dialog""" my_node_index = self.m_listctrl.GetFirstSelected() if my_node_index == -1: wx.LogWarning("Nothing selected, select à node first!") return False my_node = self.m_nodes[my_node_index] assert(my_node) myDlg = GKUINodeEditDialog(self.m_parent_frame, my_node) if myDlg.ShowModal() == wx.ID_SAVE: self.reload_list()
apache-2.0
-1,177,501,102,955,131,400
31.710843
77
0.559116
false
3.71409
false
false
false
chrplr/AIP2015
resources/python-scripts/dualscope.py
1
28502
#!/usr/bin/env python """ Oscilloscope + spectrum analyser in Python. ------------------------------------------------------------ Copyright (C) 2008, Roger Fearick, University of Cape Town This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. ------------------------------------------------------------ Version 0.7c Dependencies: uumpy -- numerics, fft PyQt4, PyQwt5 -- gui, graphics pyaudio -- sound card -- Enthought unstable branch! This code provides an oscillator and spectrum analyzer using the PC sound card as input. The interface, based on qwt, uses a familar 'knob based' layout so that it approximates an analogue scope. Two traces are provided with imput via the sound card "line in" jack. Traces can be averaged to reduce influence of noise. The cross-correlation between the inputs can be computed. The spectrum analyser has both log (dB) scale and linear scale. A cross hair status display permits the reading ov values off the screen. Printing is provided. """ # dualscope6.py derived from dualscopy5.py 11/8/05 # adds autocorrelation # Update for Qt4: 4-11/10/2007 rwf # dualscope7.py: use pyaudio 27/2/08 rwf import sys from PyQt4 import Qt from PyQt4 import Qwt5 as Qwt from numpy import * import numpy.fft as FFT import pyaudio import icons # part of this package -- toolbar icons # audio setup CHUNK = 8192 # input buffer size in frames FORMAT = pyaudio.paInt16 CHANNELS = 2 RATE = 48000 # depends on sound card: 96000 might be possible # scope configuration BOTHLR=0 LEFT=1 RIGHT=2 soundbuffersize=CHUNK samplerate=float(RATE) scopeheight=350 LRchannel=BOTHLR PENWIDTH=2 # status messages freezeInfo = 'Freeze: Press mouse button and drag' cursorInfo = 'Cursor Pos: Press mouse button in plot region' # utility classes class LogKnob(Qwt.QwtKnob): """ Provide knob with log scale """ def __init__(self, *args): apply(Qwt.QwtKnob.__init__, (self,) + args) self.setScaleEngine(Qwt.QwtLog10ScaleEngine()) def setRange(self,minR,maxR): self.setScale(minR,maxR) Qwt.QwtKnob.setRange(self, log10(minR), log10(maxR), 0.333333) def setValue(self,val): Qwt.QwtKnob.setValue(self,log10(val)) class LblKnob: """ Provide knob with a label """ def __init__(self, wgt, x,y, name, logscale=0): if logscale: self.knob=LogKnob(wgt) else: self.knob=Qwt.QwtKnob(wgt) color=Qt.QColor(200,200,210) self.knob.palette().setColor(Qt.QPalette.Active, Qt.QPalette.Button, color ) self.lbl=Qt.QLabel(name, wgt) self.knob.setGeometry(x, y, 140, 100) # oooh, eliminate this ... if name[0]=='o': self.knob.setKnobWidth(40) self.lbl.setGeometry(x, y+90, 140, 15) self.lbl.setAlignment(Qt.Qt.AlignCenter) def setRange(self,*args): apply(self.knob.setRange, args) def setValue(self,*args): apply(self.knob.setValue, args) def setScaleMaxMajor(self,*args): apply(self.knob.setScaleMaxMajor, args) class Scope(Qwt.QwtPlot): """ Oscilloscope display widget """ def __init__(self, *args): apply(Qwt.QwtPlot.__init__, (self,) + args) self.setTitle('Scope'); self.setCanvasBackground(Qt.Qt.white) # grid self.grid = Qwt.QwtPlotGrid() self.grid.enableXMin(True) self.grid.setMajPen(Qt.QPen(Qt.Qt.gray, 0, Qt.Qt.SolidLine)) self.grid.attach(self) # axes self.enableAxis(Qwt.QwtPlot.yRight); self.setAxisTitle(Qwt.QwtPlot.xBottom, 'Time [s]'); self.setAxisTitle(Qwt.QwtPlot.yLeft, 'Amplitude [V]'); self.setAxisMaxMajor(Qwt.QwtPlot.xBottom, 10); self.setAxisMaxMinor(Qwt.QwtPlot.xBottom, 0); self.setAxisScaleEngine(Qwt.QwtPlot.yRight, Qwt.QwtLinearScaleEngine()); self.setAxisMaxMajor(Qwt.QwtPlot.yLeft, 10); self.setAxisMaxMinor(Qwt.QwtPlot.yLeft, 0); self.setAxisMaxMajor(Qwt.QwtPlot.yRight, 10); self.setAxisMaxMinor(Qwt.QwtPlot.yRight, 0); # curves for scope traces: 2 first so 1 is on top self.curve2 = Qwt.QwtPlotCurve('Trace2') self.curve2.setPen(Qt.QPen(Qt.Qt.magenta,PENWIDTH)) self.curve2.setYAxis(Qwt.QwtPlot.yRight) self.curve2.attach(self) self.curve1 = Qwt.QwtPlotCurve('Trace1') self.curve1.setPen(Qt.QPen(Qt.Qt.blue,PENWIDTH)) self.curve1.setYAxis(Qwt.QwtPlot.yLeft) self.curve1.attach(self) # default settings self.triggerval=0.0 self.maxamp=1.0 self.maxamp2=1.0 self.freeze=0 self.average=0 self.autocorrelation=0 self.avcount=0 self.datastream = None self.offset1=0.0 self.offset2=0.0 # set data # NumPy: f, g, a and p are arrays! self.dt=1.0/samplerate self.f = arange(0.0, 1.0, self.dt) self.a1 = 0.0*self.f self.a2 = 0.0*self.f self.curve1.setData(self.f, self.a1) self.curve2.setData(self.f, self.a2) # start self.timerEvent() callbacks running self.startTimer(100) # plot self.replot() # convenience methods for knob callbacks def setMaxAmp(self, val): self.maxamp=val def setMaxAmp2(self, val): self.maxamp2=val def setMaxTime(self, val): self.maxtime=val def setOffset1(self, val): self.offset1=val def setOffset2(self, val): self.offset2=val def setTriggerLevel(self, val): self.triggerval=val # plot scope traces def setDisplay(self): l=len(self.a1) if LRchannel==BOTHLR: self.curve1.setData(self.f[0:l], self.a1[:l]+self.offset1*self.maxamp) self.curve2.setData(self.f[0:l], self.a2[:l]+self.offset2*self.maxamp2) elif LRchannel==RIGHT: self.curve1.setData([0.0,0.0], [0.0,0.0]) self.curve2.setData(self.f[0:l], self.a2[:l]+self.offset2*self.maxamp2) elif LRchannel==LEFT: self.curve1.setData(self.f[0:l], self.a1[:l]+self.offset1*self.maxamp) self.curve2.setData([0.0,0.0], [0.0,0.0]) self.replot() def getValue(self, index): return self.f[index],self.a[index] def setAverage(self, state): self.average = state self.avcount=0 def setAutoc(self, state): self.autocorrelation = state self.avcount=0 def setFreeze(self, freeze): self.freeze = 1-self.freeze def setDatastream(self, datastream): self.datastream = datastream # timer callback that does the work def timerEvent(self,e): # Scope if self.datastream == None: return x=self.datastream.read(CHUNK) if self.freeze==1 or self.avcount>16: return X=fromstring(x,dtype='h') if len(X) == 0: return P=array(X,dtype='d')/32768.0 val=self.triggerval*self.maxamp i=0 R=P[0::2] L=P[1::2] if self.autocorrelation: lenX=len(R) if lenX == 0: return if lenX!=soundbuffersize: print lenX window=blackman(lenX) A1=FFT.fft(R*window) #lenX A2=FFT.fft(L*window) #lenX B2=(A1*conjugate(A2))/10.0 R=FFT.ifft(B2).real else: # normal scope # set trigger levels for i in range(len(R)-1): if R[i]<val and R[i+1]>=val: break if i > len(R)-2: i=0 R=R[i:] L=L[i:] if self.average == 0: self.a1=R self.a2=L else: self.avcount+=1 if self.avcount==1: self.sumR=R self.sumL=L else: lp=min(len(R),len(self.sumR)) self.sumR=self.sumR[:lp]+R[:lp] self.sumL=self.sumL[:lp]+L[:lp] self.a1=self.sumR/self.avcount self.a2=self.sumL/self.avcount self.setDisplay() inittime=0.01 initamp=0.1 class ScopeFrame(Qt.QFrame): """ Oscilloscope widget --- contains controls + display """ def __init__(self, *args): apply(Qt.QFrame.__init__, (self,) + args) # the following: setPal.. doesn't seem to work on Win try: self.setPaletteBackgroundColor( QColor(240,240,245)) except: pass knobpos=scopeheight+30 self.setFixedSize(700, scopeheight+150) self.freezeState = 0 self.knbLevel = LblKnob(self,560,50,"Trigger level") self.knbTime = LblKnob(self,560, 220,"Time", 1) self.knbSignal = LblKnob(self,150, knobpos, "Signal1",1) self.knbSignal2 = LblKnob(self,450, knobpos, "Signal2",1) self.knbOffset1=LblKnob(self,10, knobpos,"offset1") self.knbOffset2=LblKnob(self,310, knobpos,"offset2") self.knbTime.setRange(0.0001, 1.0) self.knbTime.setValue(0.01) self.knbSignal.setRange(0.0001, 1.0) self.knbSignal.setValue(0.1) self.knbSignal2.setRange(0.0001, 1.0) self.knbSignal2.setValue(0.1) self.knbOffset2.setRange(-1.0, 1.0, 0.001) self.knbOffset2.setValue(0.0) self.knbOffset1.setRange(-1.0, 1.0, 0.001) self.knbOffset1.setValue(0.0) self.knbLevel.setRange(-1.0, 1.0, 0.001) self.knbLevel.setValue(0.1) self.knbLevel.setScaleMaxMajor(10) self.plot = Scope(self) self.plot.setGeometry(10, 10, 550, scopeheight) self.picker = Qwt.QwtPlotPicker( Qwt.QwtPlot.xBottom, Qwt.QwtPlot.yLeft, Qwt.QwtPicker.PointSelection | Qwt.QwtPicker.DragSelection, Qwt.QwtPlotPicker.CrossRubberBand, Qwt.QwtPicker.ActiveOnly, #AlwaysOn, self.plot.canvas()) self.picker.setRubberBandPen(Qt.QPen(Qt.Qt.green)) self.picker.setTrackerPen(Qt.QPen(Qt.Qt.cyan)) self.connect(self.knbTime.knob, Qt.SIGNAL("valueChanged(double)"), self.setTimebase) self.knbTime.setValue(0.01) self.connect(self.knbSignal.knob, Qt.SIGNAL("valueChanged(double)"), self.setAmplitude) self.connect(self.knbSignal2.knob, Qt.SIGNAL("valueChanged(double)"), self.setAmplitude2) self.knbSignal.setValue(0.1) self.connect(self.knbLevel.knob, Qt.SIGNAL("valueChanged(double)"), self.setTriggerlevel) self.connect(self.knbOffset1.knob, Qt.SIGNAL("valueChanged(double)"), self.plot.setOffset1) self.connect(self.knbOffset2.knob, Qt.SIGNAL("valueChanged(double)"), self.plot.setOffset2) self.knbLevel.setValue(0.1) self.plot.setAxisScale( Qwt.QwtPlot.xBottom, 0.0, 10.0*inittime) self.plot.setAxisScale( Qwt.QwtPlot.yLeft, -5.0*initamp, 5.0*initamp) self.plot.setAxisScale( Qwt.QwtPlot.yRight, -5.0*initamp, 5.0*initamp) self.plot.show() def _calcKnobVal(self,val): ival=floor(val) frac=val-ival if frac >=0.9: frac=1.0 elif frac>=0.66: frac=log10(5.0) elif frac>=log10(2.0): frac=log10(2.0) else: frac=0.0 dt=10**frac*10**ival return dt def setTimebase(self, val): dt=self._calcKnobVal(val) self.plot.setAxisScale( Qwt.QwtPlot.xBottom, 0.0, 10.0*dt) self.plot.replot() def setAmplitude(self, val): dt=self._calcKnobVal(val) self.plot.setAxisScale( Qwt.QwtPlot.yLeft, -5.0*dt, 5.0*dt) self.plot.setMaxAmp( 5.0*dt ) self.plot.replot() def setAmplitude2(self, val): dt=self._calcKnobVal(val) self.plot.setAxisScale( Qwt.QwtPlot.yRight, -5.0*dt, 5.0*dt) self.plot.setMaxAmp2( 5.0*dt ) self.plot.replot() def setTriggerlevel(self, val): self.plot.setTriggerLevel(val) self.plot.setDisplay() #-------------------------------------------------------------------- class FScope(Qwt.QwtPlot): """ Power spectrum display widget """ def __init__(self, *args): apply(Qwt.QwtPlot.__init__, (self,) + args) self.setTitle('Power spectrum'); self.setCanvasBackground(Qt.Qt.white) # grid self.grid = Qwt.QwtPlotGrid() self.grid.enableXMin(True) self.grid.setMajPen(Qt.QPen(Qt.Qt.gray, 0, Qt.Qt.SolidLine)); self.grid.attach(self) # axes self.setAxisTitle(Qwt.QwtPlot.xBottom, 'Frequency [Hz]'); self.setAxisTitle(Qwt.QwtPlot.yLeft, 'Power [dB]'); self.setAxisMaxMajor(Qwt.QwtPlot.xBottom, 10); self.setAxisMaxMinor(Qwt.QwtPlot.xBottom, 0); self.setAxisMaxMajor(Qwt.QwtPlot.yLeft, 10); self.setAxisMaxMinor(Qwt.QwtPlot.yLeft, 0); # curves self.curve2 = Qwt.QwtPlotCurve('PSTrace2') self.curve2.setPen(Qt.QPen(Qt.Qt.magenta,PENWIDTH)) self.curve2.setYAxis(Qwt.QwtPlot.yLeft) self.curve2.attach(self) self.curve1 = Qwt.QwtPlotCurve('PSTrace1') self.curve1.setPen(Qt.QPen(Qt.Qt.blue,PENWIDTH)) self.curve1.setYAxis(Qwt.QwtPlot.yLeft) self.curve1.attach(self) self.triggerval=0.0 self.maxamp=1.0 self.freeze=0 self.average=0 self.avcount=0 self.logy=1 self.datastream=None self.dt=1.0/samplerate self.df=1.0/(soundbuffersize*self.dt) self.f = arange(0.0, samplerate, self.df) self.a = 0.0*self.f self.p = 0.0*self.f self.curve1.setData(self.f, self.a) self.setAxisScale( Qwt.QwtPlot.xBottom, 0.0, 10.0*initfreq) self.setAxisScale( Qwt.QwtPlot.yLeft, -120.0, 0.0) self.startTimer(100) self.replot() def resetBuffer(self): self.df=1.0/(soundbuffersize*self.dt) self.f = arrayrange(0.0, 20000.0, self.df) self.a = 0.0*self.f self.p = 0.0*self.f self.curve1.setData(self.curve1, self.f, self.a) def setMaxAmp(self, val): if val>0.6: self.setAxisScale( Qwt.QwtPlot.yLeft, -120.0, 0.0) self.logy=1 else: self.setAxisScale( Qwt.QwtPlot.yLeft, 0.0, 10.0*val) self.logy=0 self.maxamp=val def setMaxTime(self, val): self.maxtime=val def setTriggerLevel(self, val): self.triggerval=val def setDisplay(self): n=soundbuffersize/2 if LRchannel==BOTHLR: self.curve1.setData(self.f[0:n], self.a[:n]) self.curve2.setData(self.f[0:n], self.a2[:n]) elif LRchannel==RIGHT: self.curve1.setData([0.0,0.0], [0.0,0.0]) self.curve2.setData(self.f[0:n], self.a2[:n]) elif LRchannel==LEFT: self.curve1.setData(self.f[0:n], self.a[:n]) self.curve2.setData([0.0,0.0], [0.0,0.0]) self.replot() def getValue(self, index): return self.f[index],self.a[index] def setAverage(self, state): self.average = state self.avcount=0 def setFreeze(self, freeze): self.freeze = 1-self.freeze def setDatastream(self, datastream): self.datastream = datastream def timerEvent(self,e): # FFT if self.datastream == None: return x=self.datastream.read(CHUNK) if self.freeze==1: return X=fromstring(x,dtype='h') if len(X) == 0: return P=array(X,dtype='d')/32768.0 val=self.triggerval*self.maxamp i=0 R=P[0::2] L=P[1::2] lenX=len(R) if lenX == 0: return if lenX!=(CHUNK): print 'size fail',lenX window=blackman(lenX) sumw=sum(window*window) A=FFT.fft(R*window) #lenX B=(A*conjugate(A)).real A=FFT.fft(L*window) #lenX B2=(A*conjugate(A)).real sumw*=2.0 # sym about Nyquist (*4); use rms (/2) sumw/=self.dt # sample rate B=B/sumw B2=B2/sumw if self.logy: P1=log10(B)*10.0+20.0#60.0 P2=log10(B2)*10.0+20.0#60.0 else: P1=B P2=B2 if self.average == 0: self.a=P1 self.a2=P2 else: self.avcount+=1 if self.avcount==1: self.sumP1=P1 self.sumP2=P2 else: self.sumP1=self.sumP1+P1 self.sumP2=self.sumP2+P2 self.a=self.sumP1/self.avcount self.a2=self.sumP2/self.avcount self.setDisplay() initfreq=100.0 class FScopeFrame(Qt.QFrame): """ Power spectrum widget --- contains controls + display """ def __init__(self , *args): apply(Qt.QFrame.__init__, (self,) + args) knobpos=scopeheight+30 # the following: setPal.. doesn't seem to work on Ein try: self.setPaletteBackgroundColor( QColor(240,240,245)) except: pass self.setFixedSize(700, scopeheight+150) self.freezeState = 0 self.knbSignal = LblKnob(self,160, knobpos, "Signal",1) self.knbTime = LblKnob(self,310, knobpos,"Frequency", 1) self.knbTime.setRange(1.0, 2000.0) self.knbSignal.setRange(0.0000001, 1.0) self.plot = FScope(self) self.plot.setGeometry(10, 10, 500, scopeheight) self.picker = Qwt.QwtPlotPicker( Qwt.QwtPlot.xBottom, Qwt.QwtPlot.yLeft, Qwt.QwtPicker.PointSelection | Qwt.QwtPicker.DragSelection, Qwt.QwtPlotPicker.CrossRubberBand, Qwt.QwtPicker.ActiveOnly, #AlwaysOn, self.plot.canvas()) self.picker.setRubberBandPen(Qt.QPen(Qt.Qt.green)) self.picker.setTrackerPen(Qt.QPen(Qt.Qt.cyan)) self.connect(self.knbTime.knob, Qt.SIGNAL("valueChanged(double)"), self.setTimebase) self.knbTime.setValue(1000.0) self.connect(self.knbSignal.knob, Qt.SIGNAL("valueChanged(double)"), self.setAmplitude) self.knbSignal.setValue(1.0) self.plot.show() def _calcKnobVal(self,val): ival=floor(val) frac=val-ival if frac >=0.9: frac=1.0 elif frac>=0.66: frac=log10(5.0) elif frac>=log10(2.0): frac=log10(2.0) else: frac=0.0 dt=10**frac*10**ival return dt def setTimebase(self, val): dt=self._calcKnobVal(val) self.plot.setAxisScale( Qwt.QwtPlot.xBottom, 0.0, 10.0*dt) self.plot.replot() def setAmplitude(self, val): dt=self._calcKnobVal(val) self.plot.setMaxAmp( dt ) self.plot.replot() #--------------------------------------------------------------------- class FScopeDemo(Qt.QMainWindow): """ Application container widget Contains scope and power spectrum analyser in tabbed windows. Enables switching between the two. Handles toolbar and status. """ def __init__(self, *args): apply(Qt.QMainWindow.__init__, (self,) + args) self.freezeState = 0 self.changeState = 0 self.averageState = 0 self.autocState = 0 self.scope = ScopeFrame(self) self.current = self.scope self.pwspec = FScopeFrame(self) self.pwspec.hide() self.stack=Qt.QTabWidget(self) self.stack.addTab(self.scope,"scope") self.stack.addTab(self.pwspec,"fft") self.setCentralWidget(self.stack) toolBar = Qt.QToolBar(self) self.addToolBar(toolBar) sb=self.statusBar() sbfont=Qt.QFont("Helvetica",12) sb.setFont(sbfont) self.btnFreeze = Qt.QToolButton(toolBar) self.btnFreeze.setText("Freeze") self.btnFreeze.setIcon(Qt.QIcon(Qt.QPixmap(icons.stopicon))) self.btnFreeze.setCheckable(True) self.btnFreeze.setToolButtonStyle(Qt.Qt.ToolButtonTextUnderIcon) toolBar.addWidget(self.btnFreeze) self.btnPrint = Qt.QToolButton(toolBar) self.btnPrint.setText("Print") self.btnPrint.setIcon(Qt.QIcon(Qt.QPixmap(icons.print_xpm))) self.btnPrint.setToolButtonStyle(Qt.Qt.ToolButtonTextUnderIcon) toolBar.addWidget(self.btnPrint) self.btnMode = Qt.QToolButton(toolBar) self.btnMode.setText("fft") self.btnMode.setIcon(Qt.QIcon(Qt.QPixmap(icons.pwspec))) self.btnMode.setCheckable(True) self.btnMode.setToolButtonStyle(Qt.Qt.ToolButtonTextUnderIcon) toolBar.addWidget(self.btnMode) self.btnAvge = Qt.QToolButton(toolBar) self.btnAvge.setText("average") self.btnAvge.setIcon(Qt.QIcon(Qt.QPixmap(icons.avge))) self.btnAvge.setCheckable(True) self.btnAvge.setToolButtonStyle(Qt.Qt.ToolButtonTextUnderIcon) toolBar.addWidget(self.btnAvge) self.btnAutoc = Qt.QToolButton(toolBar) self.btnAutoc.setText("correlate") self.btnAutoc.setIcon(Qt.QIcon(Qt.QPixmap(icons.avge))) self.btnAutoc.setCheckable(True) self.btnAutoc.setToolButtonStyle(Qt.Qt.ToolButtonTextUnderIcon) toolBar.addWidget(self.btnAutoc) self.lstLabl = Qt.QLabel("Buffer:",toolBar) toolBar.addWidget(self.lstLabl) self.lstChan = Qt.QComboBox(toolBar) self.lstChan.insertItem(0,"8192") self.lstChan.insertItem(1,"16k") self.lstChan.insertItem(2,"32k") toolBar.addWidget(self.lstChan) self.lstLR = Qt.QLabel("Channels:",toolBar) toolBar.addWidget(self.lstLR) self.lstLRmode = Qt.QComboBox(toolBar) self.lstLRmode.insertItem(0,"LR") self.lstLRmode.insertItem(1,"L") self.lstLRmode.insertItem(2,"R") toolBar.addWidget(self.lstLRmode) self.connect(self.btnPrint, Qt.SIGNAL('clicked()'), self.printPlot) self.connect(self.btnFreeze, Qt.SIGNAL('toggled(bool)'), self.freeze) self.connect(self.btnMode, Qt.SIGNAL('toggled(bool)'), self.mode) self.connect(self.btnAvge, Qt.SIGNAL('toggled(bool)'), self.average) self.connect(self.btnAutoc, Qt.SIGNAL('toggled(bool)'), self.autocorrelation) self.connect(self.lstChan, Qt.SIGNAL('activated(int)'), self.fftsize) self.connect(self.lstLRmode, Qt.SIGNAL('activated(int)'), self.channel) self.connect(self.scope.picker, Qt.SIGNAL('moved(const QPoint&)'), self.moved) self.connect(self.scope.picker, Qt.SIGNAL('appended(const QPoint&)'), self.appended) self.connect(self.pwspec.picker, Qt.SIGNAL('moved(const QPoint&)'), self.moved) self.connect(self.pwspec.picker, Qt.SIGNAL('appended(const QPoint&)'), self.appended) self.connect(self.stack, Qt.SIGNAL('currentChanged(int)'), self.mode) self.showInfo(cursorInfo) def showInfo(self, text): self.statusBar().showMessage(text) def printPlot(self): p = QPrinter() if p.setup(): self.current.plot.printPlot(p)#, Qwt.QwtFltrDim(200)); def fftsize(self, item): pass ## global s, soundbuffersize ## s.stop() ## s.close() ## if item==2: ## soundbuffersize=8192*3 ## elif item==1: ## soundbuffersize=8192*2 ## else: ## soundbuffersize=8192 ## s=f.stream(48000,2,'int16',soundbuffersize,1) ## s.open() ## s.start() ## self.pwspec.plot.resetBuffer() ## if self.current==self.pwspec: ## self.pwspec.plot.setDatastream(s) ## self.pwspec.plot.avcount=0 ## else: ## self.scope.plot.setDatastream(s) def channel(self, item): global LRchannel if item==2: LRchannel=RIGHT elif item==1: LRchannel=LEFT else: LRchannel=BOTHLR def freeze(self, on): if on: self.freezeState = 1 self.btnFreeze.setText("Run") self.btnFreeze.setIcon(Qt.QIcon(Qt.QPixmap(icons.goicon))) else: self.freezeState = 0 self.btnFreeze.setText("Freeze") self.btnFreeze.setIcon(Qt.QIcon(Qt.QPixmap(icons.stopicon))) self.scope.plot.setFreeze(self.freezeState) self.pwspec.plot.setFreeze(self.freezeState) def average(self, on): if on: self.averageState = 1 self.btnAvge.setText("single") self.btnAvge.setIcon(Qt.QIcon(Qt.QPixmap(icons.single))) else: self.averageState = 0 self.btnAvge.setText("average") self.btnAvge.setIcon(Qt.QIcon(Qt.QPixmap(icons.avge))) self.scope.plot.setAverage(self.averageState) self.pwspec.plot.setAverage(self.averageState) def autocorrelation(self, on): if on: self.autocState = 1 self.btnAutoc.setText("normal") self.btnAutoc.setIcon(Qt.QIcon(Qt.QPixmap(icons.single))) else: self.autocState = 0 self.btnAutoc.setText("correlate") self.btnAutoc.setIcon(Qt.QIcon(Qt.QPixmap(icons.avge))) self.scope.plot.setAutoc(self.autocState) def mode(self, on): if on: self.changeState=1 self.current=self.pwspec self.btnMode.setText("scope") self.btnMode.setIcon(Qt.QIcon(Qt.QPixmap(icons.scope))) else: self.changeState=0 self.current=self.scope self.btnMode.setText("fft") self.btnMode.setIcon(Qt.QIcon(Qt.QPixmap(icons.pwspec))) if self.changeState==1: self.stack.setCurrentIndex(self.changeState) self.scope.plot.setDatastream(None) self.pwspec.plot.setDatastream(stream) else: self.stack.setCurrentIndex(self.changeState) self.pwspec.plot.setDatastream(None) self.scope.plot.setDatastream(stream) def moved(self, e): if self.changeState==1: name='Freq' else: name='Time' frequency = self.current.plot.invTransform(Qwt.QwtPlot.xBottom, e.x()) amplitude = self.current.plot.invTransform(Qwt.QwtPlot.yLeft, e.y()) if name=='Time': df=self.scope.plot.dt i=int(frequency/df) ampa=self.scope.plot.a1[i] ampb=self.scope.plot.a2[i] else: df=self.pwspec.plot.df i=int(frequency/df) ampa=self.pwspec.plot.a[i] ampb=self.pwspec.plot.a2[i] self.showInfo('%s=%g, cursor=%g, A=%g, B=%g' % (name,frequency, amplitude,ampa,ampb)) def appended(self, e): print 's' # Python semantics: self.pos = e.pos() does not work; force a copy self.xpos = e.x() self.ypos = e.y() self.moved(e) # fake a mouse move to show the cursor position # open sound card data stream p = pyaudio.PyAudio() stream = p.open(format = FORMAT, channels = CHANNELS, rate = RATE, input = True, frames_per_buffer = CHUNK) # Admire! app = Qt.QApplication(sys.argv) demo=FScopeDemo() demo.scope.plot.setDatastream(stream) demo.show() app.exec_() stream.stop_stream() stream.close() p.terminate()
gpl-2.0
2,410,628,168,513,785,300
32.026651
83
0.586941
false
3.21439
false
false
false
alexgorban/models
official/modeling/tf_utils.py
1
5438
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Common TF utilities.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import six import tensorflow as tf from tensorflow.python.util import deprecation from official.modeling import activations @deprecation.deprecated( None, "tf.keras.layers.Layer supports multiple positional args and kwargs as " "input tensors. pack/unpack inputs to override __call__ is no longer " "needed." ) def pack_inputs(inputs): """Pack a list of `inputs` tensors to a tuple. Args: inputs: a list of tensors. Returns: a tuple of tensors. if any input is None, replace it with a special constant tensor. """ inputs = tf.nest.flatten(inputs) outputs = [] for x in inputs: if x is None: outputs.append(tf.constant(0, shape=[], dtype=tf.int32)) else: outputs.append(x) return tuple(outputs) @deprecation.deprecated( None, "tf.keras.layers.Layer supports multiple positional args and kwargs as " "input tensors. pack/unpack inputs to override __call__ is no longer " "needed." ) def unpack_inputs(inputs): """unpack a tuple of `inputs` tensors to a tuple. Args: inputs: a list of tensors. Returns: a tuple of tensors. if any input is a special constant tensor, replace it with None. """ inputs = tf.nest.flatten(inputs) outputs = [] for x in inputs: if is_special_none_tensor(x): outputs.append(None) else: outputs.append(x) x = tuple(outputs) # To trick the very pointless 'unbalanced-tuple-unpacking' pylint check # from triggering. if len(x) == 1: return x[0] return tuple(outputs) def is_special_none_tensor(tensor): """Checks if a tensor is a special None Tensor.""" return tensor.shape.ndims == 0 and tensor.dtype == tf.int32 # TODO(hongkuny): consider moving custom string-map lookup to keras api. def get_activation(identifier): """Maps a identifier to a Python function, e.g., "relu" => `tf.nn.relu`. It checks string first and if it is one of customized activation not in TF, the corresponding activation will be returned. For non-customized activation names and callable identifiers, always fallback to tf.keras.activations.get. Args: identifier: String name of the activation function or callable. Returns: A Python function corresponding to the activation function. """ if isinstance(identifier, six.string_types): name_to_fn = { "gelu": activations.gelu, "simple_swish": activations.simple_swish, "hard_swish": activations.hard_swish, "identity": activations.identity, } identifier = str(identifier).lower() if identifier in name_to_fn: return tf.keras.activations.get(name_to_fn[identifier]) return tf.keras.activations.get(identifier) def get_shape_list(tensor, expected_rank=None, name=None): """Returns a list of the shape of tensor, preferring static dimensions. Args: tensor: A tf.Tensor object to find the shape of. expected_rank: (optional) int. The expected rank of `tensor`. If this is specified and the `tensor` has a different rank, and exception will be thrown. name: Optional name of the tensor for the error message. Returns: A list of dimensions of the shape of tensor. All static dimensions will be returned as python integers, and dynamic dimensions will be returned as tf.Tensor scalars. """ if expected_rank is not None: assert_rank(tensor, expected_rank, name) shape = tensor.shape.as_list() non_static_indexes = [] for (index, dim) in enumerate(shape): if dim is None: non_static_indexes.append(index) if not non_static_indexes: return shape dyn_shape = tf.shape(tensor) for index in non_static_indexes: shape[index] = dyn_shape[index] return shape def assert_rank(tensor, expected_rank, name=None): """Raises an exception if the tensor rank is not of the expected rank. Args: tensor: A tf.Tensor to check the rank of. expected_rank: Python integer or list of integers, expected rank. name: Optional name of the tensor for the error message. Raises: ValueError: If the expected shape doesn't match the actual shape. """ expected_rank_dict = {} if isinstance(expected_rank, six.integer_types): expected_rank_dict[expected_rank] = True else: for x in expected_rank: expected_rank_dict[x] = True actual_rank = tensor.shape.ndims if actual_rank not in expected_rank_dict: raise ValueError( "For the tensor `%s`, the actual tensor rank `%d` (shape = %s) is not " "equal to the expected tensor rank `%s`" % (name, actual_rank, str(tensor.shape), str(expected_rank)))
apache-2.0
7,756,442,032,010,609,000
30.074286
80
0.690143
false
3.926354
false
false
false
PyFilesystem/pyfilesystem
fs/rpcfs.py
1
11326
""" fs.rpcfs ======== This module provides the class 'RPCFS' to access a remote FS object over XML-RPC. You probably want to use this in conjunction with the 'RPCFSServer' class from the :mod:`~fs.expose.xmlrpc` module. """ import xmlrpclib import socket import base64 from fs.base import * from fs.errors import * from fs.path import * from fs import iotools from fs.filelike import StringIO import six from six import PY3, b def re_raise_faults(func): """Decorator to re-raise XML-RPC faults as proper exceptions.""" def wrapper(*args, **kwds): try: return func(*args, **kwds) except (xmlrpclib.Fault), f: #raise # Make sure it's in a form we can handle print f.faultString bits = f.faultString.split(" ") if bits[0] not in ["<type", "<class"]: raise f # Find the class/type object bits = " ".join(bits[1:]).split(">:") cls = bits[0] msg = ">:".join(bits[1:]) cls = cls.strip('\'') print "-" + cls cls = _object_by_name(cls) # Re-raise using the remainder of the fault code as message if cls: if issubclass(cls, FSError): raise cls('', msg=msg) else: raise cls(msg) raise f except socket.error, e: raise RemoteConnectionError(str(e), details=e) return wrapper def _object_by_name(name, root=None): """Look up an object by dotted-name notation.""" bits = name.split(".") if root is None: try: obj = globals()[bits[0]] except KeyError: try: obj = __builtins__[bits[0]] except KeyError: obj = __import__(bits[0], globals()) else: obj = getattr(root, bits[0]) if len(bits) > 1: return _object_by_name(".".join(bits[1:]), obj) else: return obj class ReRaiseFaults: """XML-RPC proxy wrapper that re-raises Faults as proper Exceptions.""" def __init__(self, obj): self._obj = obj def __getattr__(self, attr): val = getattr(self._obj, attr) if callable(val): val = re_raise_faults(val) self.__dict__[attr] = val return val class RPCFS(FS): """Access a filesystem exposed via XML-RPC. This class provides the client-side logic for accessing a remote FS object, and is dual to the RPCFSServer class defined in fs.expose.xmlrpc. Example:: fs = RPCFS("http://my.server.com/filesystem/location/") """ _meta = {'thread_safe' : True, 'virtual': False, 'network' : True, } def __init__(self, uri, transport=None): """Constructor for RPCFS objects. The only required argument is the URI of the server to connect to. This will be passed to the underlying XML-RPC server proxy object, along with the 'transport' argument if it is provided. :param uri: address of the server """ super(RPCFS, self).__init__(thread_synchronize=True) self.uri = uri self._transport = transport self.proxy = self._make_proxy() self.isdir('/') @synchronize def _make_proxy(self): kwds = dict(allow_none=True, use_datetime=True) if self._transport is not None: proxy = xmlrpclib.ServerProxy(self.uri, self._transport, **kwds) else: proxy = xmlrpclib.ServerProxy(self.uri, **kwds) return ReRaiseFaults(proxy) def __str__(self): return '<RPCFS: %s>' % (self.uri,) def __repr__(self): return '<RPCFS: %s>' % (self.uri,) @synchronize def __getstate__(self): state = super(RPCFS, self).__getstate__() try: del state['proxy'] except KeyError: pass return state def __setstate__(self, state): super(RPCFS, self).__setstate__(state) self.proxy = self._make_proxy() def encode_path(self, path): """Encode a filesystem path for sending over the wire. Unfortunately XMLRPC only supports ASCII strings, so this method must return something that can be represented in ASCII. The default is base64-encoded UTF8. """ return six.text_type(base64.b64encode(path.encode("utf8")), 'ascii') def decode_path(self, path): """Decode paths arriving over the wire.""" return six.text_type(base64.b64decode(path.encode('ascii')), 'utf8') @synchronize def getmeta(self, meta_name, default=NoDefaultMeta): if default is NoDefaultMeta: meta = self.proxy.getmeta(meta_name) else: meta = self.proxy.getmeta_default(meta_name, default) if isinstance(meta, basestring): # To allow transport of meta with invalid xml chars (like null) meta = self.encode_path(meta) return meta @synchronize def hasmeta(self, meta_name): return self.proxy.hasmeta(meta_name) @synchronize @iotools.filelike_to_stream def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs): # TODO: chunked transport of large files epath = self.encode_path(path) if "w" in mode: self.proxy.set_contents(epath, xmlrpclib.Binary(b(""))) if "r" in mode or "a" in mode or "+" in mode: try: data = self.proxy.get_contents(epath, "rb").data except IOError: if "w" not in mode and "a" not in mode: raise ResourceNotFoundError(path) if not self.isdir(dirname(path)): raise ParentDirectoryMissingError(path) self.proxy.set_contents(path, xmlrpclib.Binary(b(""))) else: data = b("") f = StringIO(data) if "a" not in mode: f.seek(0, 0) else: f.seek(0, 2) oldflush = f.flush oldclose = f.close oldtruncate = f.truncate def newflush(): self._lock.acquire() try: oldflush() self.proxy.set_contents(epath, xmlrpclib.Binary(f.getvalue())) finally: self._lock.release() def newclose(): self._lock.acquire() try: f.flush() oldclose() finally: self._lock.release() def newtruncate(size=None): self._lock.acquire() try: oldtruncate(size) f.flush() finally: self._lock.release() f.flush = newflush f.close = newclose f.truncate = newtruncate return f @synchronize def exists(self, path): path = self.encode_path(path) return self.proxy.exists(path) @synchronize def isdir(self, path): path = self.encode_path(path) return self.proxy.isdir(path) @synchronize def isfile(self, path): path = self.encode_path(path) return self.proxy.isfile(path) @synchronize def listdir(self, path="./", wildcard=None, full=False, absolute=False, dirs_only=False, files_only=False): enc_path = self.encode_path(path) if not callable(wildcard): entries = self.proxy.listdir(enc_path, wildcard, full, absolute, dirs_only, files_only) entries = [self.decode_path(e) for e in entries] else: entries = self.proxy.listdir(enc_path, None, False, False, dirs_only, files_only) entries = [self.decode_path(e) for e in entries] entries = [e for e in entries if wildcard(e)] if full: entries = [relpath(pathjoin(path, e)) for e in entries] elif absolute: entries = [abspath(pathjoin(path, e)) for e in entries] return entries @synchronize def makedir(self, path, recursive=False, allow_recreate=False): path = self.encode_path(path) return self.proxy.makedir(path, recursive, allow_recreate) @synchronize def remove(self, path): path = self.encode_path(path) return self.proxy.remove(path) @synchronize def removedir(self, path, recursive=False, force=False): path = self.encode_path(path) return self.proxy.removedir(path, recursive, force) @synchronize def rename(self, src, dst): src = self.encode_path(src) dst = self.encode_path(dst) return self.proxy.rename(src, dst) @synchronize def settimes(self, path, accessed_time, modified_time): path = self.encode_path(path) return self.proxy.settimes(path, accessed_time, modified_time) @synchronize def getinfo(self, path): path = self.encode_path(path) info = self.proxy.getinfo(path) return info @synchronize def desc(self, path): path = self.encode_path(path) return self.proxy.desc(path) @synchronize def getxattr(self, path, attr, default=None): path = self.encode_path(path) attr = self.encode_path(attr) return self.fs.getxattr(path, attr, default) @synchronize def setxattr(self, path, attr, value): path = self.encode_path(path) attr = self.encode_path(attr) return self.fs.setxattr(path, attr, value) @synchronize def delxattr(self, path, attr): path = self.encode_path(path) attr = self.encode_path(attr) return self.fs.delxattr(path, attr) @synchronize def listxattrs(self, path): path = self.encode_path(path) return [self.decode_path(a) for a in self.fs.listxattrs(path)] @synchronize def copy(self, src, dst, overwrite=False, chunk_size=16384): src = self.encode_path(src) dst = self.encode_path(dst) return self.proxy.copy(src, dst, overwrite, chunk_size) @synchronize def move(self, src, dst, overwrite=False, chunk_size=16384): src = self.encode_path(src) dst = self.encode_path(dst) return self.proxy.move(src, dst, overwrite, chunk_size) @synchronize def movedir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=16384): src = self.encode_path(src) dst = self.encode_path(dst) return self.proxy.movedir(src, dst, overwrite, ignore_errors, chunk_size) @synchronize def copydir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=16384): src = self.encode_path(src) dst = self.encode_path(dst) return self.proxy.copydir(src, dst, overwrite, ignore_errors, chunk_size)
bsd-3-clause
6,380,686,112,742,397,000
30.373961
123
0.555183
false
4.05659
false
false
false
orlenko/bccf
src/pybb/permissions.py
1
6478
# -*- coding: utf-8 -*- """ Extensible permission system for pybbm """ from django.utils.importlib import import_module from django.db.models import Q from pybb import defaults def _resolve_class(name): """ resolves a class function given as string, returning the function """ if not name: return False modname, funcname = name.rsplit('.', 1) return getattr(import_module(modname), funcname)() class DefaultPermissionHandler(object): """ Default Permission handler. If you want to implement custom permissions (for example, private forums based on some application-specific settings), you can inherit from this class and override any of the `filter_*` and `may_*` methods. Methods starting with `may` are expected to return `True` or `False`, whereas methods starting with `filter_*` should filter the queryset they receive, and return a new queryset containing only the objects the user is allowed to see. To activate your custom permission handler, set `settings.PYBB_PERMISSION_HANDLER` to the full qualified name of your class, e.g. "`myapp.pybb_adapter.MyPermissionHandler`". """ # # permission checks on categories # def filter_categories(self, user, qs): """ return a queryset with categories `user` is allowed to see """ return qs.filter(hidden=False) if not user.is_staff else qs def may_view_category(self, user, category): """ return True if `user` may view this category, False if not """ return user.is_staff or not category.hidden # # permission checks on forums # def filter_forums(self, user, qs): """ return a queryset with forums `user` is allowed to see """ return qs.filter(Q(hidden=False) & Q(category__hidden=False)) if not user.is_staff else qs def may_view_forum(self, user, forum): """ return True if user may view this forum, False if not """ return user.is_staff or ( forum.hidden == False and forum.category.hidden == False ) def may_create_topic(self, user, forum): """ return True if `user` is allowed to create a new topic in `forum` """ return user.has_perm('pybb.add_post') # # permission checks on topics # def filter_topics(self, user, qs): """ return a queryset with topics `user` is allowed to see """ if not user.is_staff: qs = qs.filter(Q(forum__hidden=False) & Q(forum__category__hidden=False)) if not user.is_superuser: if user.is_authenticated(): qs = qs.filter(Q(forum__moderators=user) | Q(user=user) | Q(on_moderation=False)).distinct() else: qs = qs.filter(on_moderation=False) return qs def may_view_topic(self, user, topic): """ return True if user may view this topic, False otherwise """ if user.is_superuser: return True if not user.is_staff and (topic.forum.hidden or topic.forum.category.hidden): return False # only staff may see hidden forum / category if topic.on_moderation: return user.is_authenticated() and (user == topic.user or user in topic.forum.moderators) return True def may_moderate_topic(self, user, topic): return user.is_superuser or user in topic.forum.moderators.all() def may_close_topic(self, user, topic): """ return True if `user` may close `topic` """ return self.may_moderate_topic(user, topic) def may_open_topic(self, user, topic): """ return True if `user` may open `topic` """ return self.may_moderate_topic(user, topic) def may_stick_topic(self, user, topic): """ return True if `user` may stick `topic` """ return self.may_moderate_topic(user, topic) def may_unstick_topic(self, user, topic): """ return True if `user` may unstick `topic` """ return self.may_moderate_topic(user, topic) def may_create_post(self, user, topic): """ return True if `user` is allowed to create a new post in `topic` """ if topic.forum.hidden and (not user.is_staff): # if topic is hidden, only staff may post return False if topic.closed and (not user.is_staff): # if topic is closed, only staff may post return False # only user which have 'pybb.add_post' permission may post return user.has_perm('pybb.add_post') def may_post_as_admin(self, user): """ return True if `user` may post as admin """ return user.is_staff # # permission checks on posts # def filter_posts(self, user, qs): """ return a queryset with posts `user` is allowed to see """ # first filter by topic availability if not user.is_staff: qs = qs.filter(Q(topic__forum__hidden=False) & Q(topic__forum__category__hidden=False)) if not defaults.PYBB_PREMODERATION or user.is_superuser: # superuser may see all posts, also if premoderation is turned off moderation # flag is ignored return qs elif user.is_authenticated(): # post is visible if user is author, post is not on moderation, or user is moderator # for this forum qs = qs.filter(Q(user=user) | Q(on_moderation=False) | Q(topic__forum__moderators=user)) else: # anonymous user may not see posts which are on moderation qs = qs.filter(on_moderation=False) return qs def may_view_post(self, user, post): """ return True if `user` may view `post`, False otherwise """ if user.is_superuser: return True if post.on_moderation: return post.user == user or user in post.topic.forum.moderators.all() return True def may_edit_post(self, user, post): """ return True if `user` may edit `post` """ return user.is_superuser or post.user == user or self.may_moderate_topic(user, post.topic) def may_delete_post(self, user, post): """ return True if `user` may delete `post` """ return self.may_moderate_topic(user, post.topic) # # permission checks on users # def may_block_user(self, user, user_to_block): """ return True if `user` may block `user_to_block` """ return user.has_perm('pybb.block_users') perms = _resolve_class(defaults.PYBB_PERMISSION_HANDLER)
unlicense
-5,898,669,304,835,996,000
38.506098
108
0.629824
false
3.926061
false
false
false
ktok07b6/polyphony
polyphony/compiler/libs.py
1
3330
single_port_ram = """module SinglePortRam # ( parameter DATA_WIDTH = 8, parameter ADDR_WIDTH = 4, parameter RAM_DEPTH = 1 << ADDR_WIDTH ) ( input clk, input rst, input [ADDR_WIDTH-1:0] ram_addr, input [DATA_WIDTH-1:0] ram_d, input ram_we, output [DATA_WIDTH-1:0] ram_q ); reg [DATA_WIDTH-1:0] mem [0:RAM_DEPTH-1]; reg [ADDR_WIDTH-1:0] read_addr; assign ram_q = mem[read_addr]; always @ (posedge clk) begin if (ram_we) mem[ram_addr] <= ram_d; read_addr <= ram_addr; end endmodule """ bidirectional_single_port_ram = """module BidirectionalSinglePortRam # ( parameter DATA_WIDTH = 8, parameter ADDR_WIDTH = 4, parameter RAM_LENGTH = 16, parameter RAM_DEPTH = 1 << (ADDR_WIDTH-1) ) ( input clk, input rst, input [ADDR_WIDTH-1:0] ram_addr, input [DATA_WIDTH-1:0] ram_d, input ram_we, output [DATA_WIDTH-1:0] ram_q, output [ADDR_WIDTH-1:0] ram_len ); reg [DATA_WIDTH-1:0] mem [0:RAM_DEPTH-1]; reg [ADDR_WIDTH-1:0] read_addr; /* integer i; initial begin for (i = 0; i < RAM_DEPTH; i = i + 1) mem[i] = 0; end */ function [ADDR_WIDTH-1:0] address ( input [ADDR_WIDTH-1:0] in_addr ); begin if (in_addr[ADDR_WIDTH-1] == 1'b1) begin address = RAM_LENGTH + in_addr; end else begin address = in_addr; end end endfunction // address wire [ADDR_WIDTH-1:0] a; assign a = address(ram_addr); assign ram_q = mem[read_addr]; assign ram_len = RAM_LENGTH; always @ (posedge clk) begin if (ram_we) mem[a] <= ram_d; read_addr <= a; end endmodule """ fifo = """module FIFO # ( parameter integer DATA_WIDTH = 32, parameter integer ADDR_WIDTH = 2, parameter integer LENGTH = 4 ) ( input clk, input rst, input [DATA_WIDTH - 1 : 0] din, input write, output full, output [DATA_WIDTH - 1 : 0] dout, input read, output empty, output will_full, output will_empty ); reg [ADDR_WIDTH - 1 : 0] head; reg [ADDR_WIDTH - 1 : 0] tail; reg [ADDR_WIDTH : 0] count; wire we; assign we = write && !full; reg [DATA_WIDTH - 1 : 0] mem [0 : LENGTH - 1]; initial begin : initialize_mem integer i; for (i = 0; i < LENGTH; i = i + 1) begin mem[i] = 0; end end always @(posedge clk) begin if (we) mem[head] <= din; end assign dout = mem[tail]; assign full = count >= LENGTH; assign empty = count == 0; assign will_full = write && !read && count == LENGTH-1; assign will_empty = read && !write && count == 1; always @(posedge clk) begin if (rst == 1) begin head <= 0; tail <= 0; count <= 0; end else begin if (write && read) begin if (count == LENGTH) begin count <= count - 1; tail <= (tail == (LENGTH - 1)) ? 0 : tail + 1; end else if (count == 0) begin count <= count + 1; head <= (head == (LENGTH - 1)) ? 0 : head + 1; end else begin count <= count; head <= (head == (LENGTH - 1)) ? 0 : head + 1; tail <= (tail == (LENGTH - 1)) ? 0 : tail + 1; end end else if (write) begin if (count < LENGTH) begin count <= count + 1; head <= (head == (LENGTH - 1)) ? 0 : head + 1; end end else if (read) begin if (count > 0) begin count <= count - 1; tail <= (tail == (LENGTH - 1)) ? 0 : tail + 1; end end end end endmodule """
mit
682,272,391,786,811,300
20.907895
70
0.571471
false
2.878133
false
false
false
tonioo/modoboa
modoboa/core/forms.py
1
4606
# -*- coding: utf-8 -*- """Core forms.""" from __future__ import unicode_literals from django import forms from django.contrib.auth import ( forms as auth_forms, get_user_model, password_validation ) from django.db.models import Q from django.utils.translation import ugettext as _, ugettext_lazy from modoboa.core.models import User from modoboa.parameters import tools as param_tools class LoginForm(forms.Form): """User login form.""" username = forms.CharField( label=ugettext_lazy("Username"), widget=forms.TextInput(attrs={"class": "form-control"}) ) password = forms.CharField( label=ugettext_lazy("Password"), widget=forms.PasswordInput(attrs={"class": "form-control"}) ) rememberme = forms.BooleanField( initial=False, required=False ) class ProfileForm(forms.ModelForm): """Form to update User profile.""" oldpassword = forms.CharField( label=ugettext_lazy("Old password"), required=False, widget=forms.PasswordInput(attrs={"class": "form-control"}) ) newpassword = forms.CharField( label=ugettext_lazy("New password"), required=False, widget=forms.PasswordInput(attrs={"class": "form-control"}) ) confirmation = forms.CharField( label=ugettext_lazy("Confirmation"), required=False, widget=forms.PasswordInput(attrs={"class": "form-control"}) ) class Meta(object): model = User fields = ("first_name", "last_name", "language", "phone_number", "secondary_email") widgets = { "first_name": forms.TextInput(attrs={"class": "form-control"}), "last_name": forms.TextInput(attrs={"class": "form-control"}) } def __init__(self, update_password, *args, **kwargs): super(ProfileForm, self).__init__(*args, **kwargs) if not update_password: del self.fields["oldpassword"] del self.fields["newpassword"] del self.fields["confirmation"] def clean_oldpassword(self): if self.cleaned_data["oldpassword"] == "": return self.cleaned_data["oldpassword"] if param_tools.get_global_parameter("authentication_type") != "local": return self.cleaned_data["oldpassword"] if not self.instance.check_password(self.cleaned_data["oldpassword"]): raise forms.ValidationError(_("Old password mismatchs")) return self.cleaned_data["oldpassword"] def clean_confirmation(self): newpassword = self.cleaned_data["newpassword"] confirmation = self.cleaned_data["confirmation"] if not newpassword and not confirmation: return confirmation if newpassword != confirmation: raise forms.ValidationError(_("Passwords mismatch")) password_validation.validate_password(confirmation, self.instance) return confirmation def save(self, commit=True): user = super(ProfileForm, self).save(commit=False) if commit: if self.cleaned_data.get("confirmation", "") != "": user.set_password( self.cleaned_data["confirmation"], self.cleaned_data["oldpassword"] ) user.save() return user class APIAccessForm(forms.Form): """Form to control API access.""" enable_api_access = forms.BooleanField( label=ugettext_lazy("Enable API access"), required=False) def __init__(self, *args, **kwargs): """Initialize form.""" user = kwargs.pop("user") super(APIAccessForm, self).__init__(*args, **kwargs) self.fields["enable_api_access"].initial = hasattr(user, "auth_token") class PasswordResetForm(auth_forms.PasswordResetForm): """Custom password reset form.""" def get_users(self, email): """Return matching user(s) who should receive a reset.""" return ( get_user_model()._default_manager.filter( email__iexact=email, is_active=True) .exclude(Q(secondary_email__isnull=True) | Q(secondary_email="")) ) def send_mail(self, subject_template_name, email_template_name, context, from_email, to_email, html_email_template_name=None): """Send message to secondary email instead.""" to_email = context["user"].secondary_email super(PasswordResetForm, self).send_mail( subject_template_name, email_template_name, context, from_email, to_email, html_email_template_name)
isc
-1,737,334,883,102,439,700
34.160305
78
0.621146
false
4.276695
false
false
false
praekelt/txtalert
txtalert/apps/bookings/views.py
1
7598
from django.http import HttpResponse, Http404, HttpResponseRedirect from django.template import RequestContext from django.shortcuts import render_to_response, get_object_or_404 from django.contrib import messages from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.core.paginator import Paginator import logging from django.utils import timezone from txtalert.core.models import Visit, PleaseCallMe, MSISDN, AuthProfile, Patient from txtalert.core.forms import RequestCallForm from txtalert.core.utils import normalize_msisdn from datetime import date, datetime from functools import wraps def effective_page_range_for(page,paginator,delta=3): return [p for p in range(page.number-delta,page.number+delta+1) if (p > 0 and p <= paginator.num_pages)] def auth_profile_required(func): @wraps(func) def wrapper(request, *args, **kwargs): try: return func(request, *args, **kwargs) except AuthProfile.DoesNotExist: return render_to_response('auth_profile_error.html', { }, context_instance = RequestContext(request)) return wrapper @login_required @auth_profile_required def index(request): profile = request.user.get_profile() return render_to_response("index.html", { 'profile': profile, 'patient': profile.patient, }, context_instance = RequestContext(request)) @login_required def appointment_change(request, visit_id): profile = request.user.get_profile() visit = get_object_or_404(Visit, pk=visit_id) change_requested = request.POST.get('when') if change_requested == 'later': visit.reschedule_later() messages.add_message(request, messages.INFO, "Your request to change the appointment has been sent to " \ "the clinic. You will be notified as soon as possible.") elif change_requested == 'earlier': visit.reschedule_earlier() messages.add_message(request, messages.INFO, "Your request to change the appointment has been sent to " \ "the clinic. You will be notified as soon as possible.") return render_to_response("appointment/change.html", { 'profile': profile, 'patient': profile.patient, 'visit': visit, 'change_requested': change_requested, }, context_instance = RequestContext(request)) @login_required def appointment_upcoming(request): profile = request.user.get_profile() patient = profile.patient paginator = Paginator(patient.visit_set.upcoming(), 5) page = paginator.page(request.GET.get('p', 1)) return render_to_response("appointment/upcoming.html", { 'profile': profile, 'patient': patient, 'paginator': paginator, 'page': page, 'effective_page_range': effective_page_range_for(page, paginator) }, context_instance = RequestContext(request)) @login_required def appointment_history(request): profile = request.user.get_profile() patient = profile.patient paginator = Paginator(patient.visit_set.past().order_by('-date'), 5) page = paginator.page(request.GET.get('p', 1)) return render_to_response("appointment/history.html", { 'profile': profile, 'patient': profile.patient, 'paginator': paginator, 'page': page, 'effective_page_range': effective_page_range_for(page, paginator) }, context_instance=RequestContext(request)) @login_required def attendance_barometer(request): profile = request.user.get_profile() patient = profile.patient visits = patient.visit_set.all() attended = visits.filter(status='a').count() missed = visits.filter(status='m').count() total = visits.filter(date__lt=date.today()).count() if total: attendance = int(float(attended) / float(total) * 100) else: attendance = 0.0 return render_to_response("attendance_barometer.html", { 'profile': profile, 'patient': patient, 'attendance': attendance, 'attended': attended, 'missed': missed, 'total': total }, context_instance=RequestContext(request)) def request_call(request): if request.POST: form = RequestCallForm(request.POST) if form.is_valid(): clinic = form.cleaned_data['clinic'] # normalize msisdn = normalize_msisdn(form.cleaned_data['msisdn']) # orm object msisdn_record, _ = MSISDN.objects.get_or_create(msisdn=msisdn) pcm = PleaseCallMe(user=clinic.user, clinic=clinic, msisdn=msisdn_record, timestamp=timezone.now(), message='Please call me!', notes='Call request issued via txtAlert Bookings') pcm.save() messages.add_message(request, messages.INFO, 'Your call request has been registered. '\ 'The clinic will call you back as soon as possible.') return HttpResponseRedirect(reverse('bookings:request_call')) else: form = RequestCallForm(initial={ 'msisdn': '' if request.user.is_anonymous() else request.user.username }) if request.user.is_anonymous(): profile = patient = None else: profile = request.user.get_profile() patient = profile.patient return render_to_response('request_call.html', { 'profile': profile, 'patient': patient, 'form': form, }, context_instance=RequestContext(request)) def widget_landing(request): if 'patient_id' in request.GET \ and 'msisdn' in request.GET: try: msisdn = normalize_msisdn(request.GET.get('msisdn')) patient_id = request.GET.get('patient_id') patient = Patient.objects.get(active_msisdn__msisdn=msisdn, te_id=patient_id) try: visit = patient.next_visit() except Visit.DoesNotExist: visit = None visits = patient.visit_set.all() context = { 'msisdn': msisdn, 'patient_id': patient_id, 'patient': patient, 'name': patient.name, 'surname': patient.surname, 'next_appointment': visit.date if visit else '', 'visit_id': visit.pk if visit else '', 'clinic': visit.clinic.name if visit else '', 'attendance': int((1.0 - patient.risk_profile) * 100), 'total': visits.count(), 'attended': visits.filter(status='a').count(), 'rescheduled': visits.filter(status='r').count(), 'missed': visits.filter(status='m').count(), } except Patient.DoesNotExist: context = { 'patient_id': patient_id, 'msisdn': msisdn, } else: context = { 'patient_id': request.GET.get('patient_id', ''), 'msisdn': request.GET.get('msisdn', ''), } print context return render_to_response('widget_landing.html', context, context_instance=RequestContext(request)) def todo(request): """Anything that resolves to here still needs to be completed""" return HttpResponse("This still needs to be implemented.") def not_found(request): """test 404 template rendering""" raise Http404 def server_error(request): """test 500 template rendering""" raise Exception, '500 testing'
gpl-3.0
-763,102,905,537,551,700
36.995
93
0.622664
false
4.007384
false
false
false
Flamacue/pretix
src/tests/plugins/test_ticketoutputpdf.py
2
1826
from datetime import timedelta from decimal import Decimal from io import BytesIO import pytest from django.utils.timezone import now from PyPDF2 import PdfFileReader from pretix.base.models import ( Event, Item, ItemVariation, Order, OrderPosition, Organizer, ) from pretix.plugins.ticketoutputpdf.ticketoutput import PdfTicketOutput @pytest.fixture def env(): o = Organizer.objects.create(name='Dummy', slug='dummy') event = Event.objects.create( organizer=o, name='Dummy', slug='dummy', date_from=now(), live=True ) o1 = Order.objects.create( code='FOOBAR', event=event, email='[email protected]', status=Order.STATUS_PENDING, datetime=now(), expires=now() + timedelta(days=10), total=Decimal('13.37'), payment_provider='banktransfer' ) shirt = Item.objects.create(event=event, name='T-Shirt', default_price=12) shirt_red = ItemVariation.objects.create(item=shirt, default_price=14, value="Red") OrderPosition.objects.create( order=o1, item=shirt, variation=shirt_red, price=12, attendee_name=None, secret='1234' ) OrderPosition.objects.create( order=o1, item=shirt, variation=shirt_red, price=12, attendee_name=None, secret='5678' ) return event, o1 @pytest.mark.django_db def test_generate_pdf(env, mocker): mocked = mocker.patch('reportlab.pdfgen.canvas.Canvas.drawString') event, order = env event.settings.set('ticketoutput_pdf_code_x', 30) event.settings.set('ticketoutput_pdf_code_y', 50) event.settings.set('ticketoutput_pdf_code_s', 2) o = PdfTicketOutput(event) fname, ftype, buf = o.generate(order.positions.first()) assert ftype == 'application/pdf' pdf = PdfFileReader(BytesIO(buf)) assert pdf.numPages == 1 assert mocked.called
apache-2.0
6,230,654,048,277,624,000
33.45283
87
0.693866
false
3.406716
false
false
false
abusesa/abusehelper
abusehelper/core/rules/classifier.py
1
1033
class Classifier(object): def __init__(self): self._rules = dict() def inc(self, rule, class_id): classes = self._rules.get(rule, None) if classes is None: classes = dict() self._rules[rule] = classes classes[class_id] = classes.get(class_id, 0) + 1 def dec(self, rule, class_id): classes = self._rules.get(rule, None) if classes is None: return count = classes.get(class_id, 0) - 1 if count > 0: classes[class_id] = count else: classes.pop(class_id, None) if not classes: self._rules.pop(rule, None) def classify(self, obj): result = set() cache = dict() for rule, classes in self._rules.iteritems(): if result.issuperset(classes): continue if rule.match(obj, cache): result.update(classes) return result def is_empty(self): return not self._rules
mit
496,745,086,639,479,040
25.487179
56
0.516941
false
4.066929
false
false
false
geobricks/pgeo
pgeo/metadata/metadata.py
1
2602
import os import json from pgeo.utils.json import dict_merge_and_convert_dates from pgeo.metadata.db_metadata import DBMetadata from pgeo.metadata.search import MongoSearch from pgeo.utils import log from pgeo.config.metadata.core import template as core_template from pgeo.config.metadata.raster import template as raster_template log = log.logger(__name__) # REMOVE EXAMPLE #db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['EARTHSTAT']}}) #db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['TRMM']}}) # db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['MODIS']}}) # db.layer.find({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['MODIS']}}) #db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['MODIS-SADC']}}) #db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['MODIS_TEST']}}) #db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['Doukkala-Seasonal-wheat']}}) #db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['Doukkala - actual evapotransipiration']}}) # with Regular expression #db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {$regex: /^MOROCCO/}}) #db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {$regex: /^JRC/}}) #db.layer.find({'meContent.seCoverage.coverageSector.codes.code': {$regex: /^UMD/}}) #db.layer.find({'uid': {$regex: /^UMD/}}) class Metadata: db_metadata = None search = None def __init__(self, settings): self.settings = settings print settings self.db_metadata = DBMetadata(settings["db"]["metadata"]) self.search = MongoSearch(settings["db"]["metadata"]['connection'], settings["db"]["metadata"]["database"], settings["db"]["metadata"]['document']['layer']) log.info("---Metadata initialization---") log.info(self.db_metadata) log.info(self.search) def merge_layer_metadata(self, template_name, data): """ Merge user's data with the core metadata and the selected template @param template_name: Name of the template, e.g. 'modis' @param data: User data, in JSON format @return: Merged JSON """ if template_name == "raster": out = dict_merge_and_convert_dates(core_template, raster_template) elif template_name == "vector": log.error("TODO: vector template") out = dict_merge_and_convert_dates(out, data) #log.info(out) return out
gpl-2.0
-6,503,222,040,618,752,000
40.31746
164
0.673328
false
3.423684
false
false
false
habalux/pglog2grok
pglog2grok.py
1
4033
#!/usr/bin/env python # # Small script for generating a logstash grok filter and patterns for postgresql # using a non-default log_line_prefix setting. # # Output of this script has NOT been tested in any production environment as of yet. # # Copyright (c) 2014, Teemu Haapoja <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING # IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY # OF SUCH DAMAGE. # Custom patterns # PGLOG_TZ is a modified TZ pattern (original didn't recognize "EET" as valid) pg_patterns = """ PGLOG_TZ (?:[PMCE][SDE]T|UTC) PGLOG_APPLICATION_NAME .*? PGLOG_USER_NAME .*? PGLOG_DATABASE_NAME .*? PGLOG_REMOTE_HOST_PORT (\[local\]|%{IP:host}\(%{POSINT:port}\)) PGLOG_REMOTE_HOST (\[local\]|%{IP:host}) PGLOG_PROCESS_ID %{POSINT} PGLOG_TIMESTAMP %{TIMESTAMP_ISO8601} %{PGLOG_TZ:TZ} PGLOG_COMMAND_TAG .*? PGLOG_SQL_STATE .*? PGLOG_SESSION_ID [0-9\.A-Fa-f]+ PGLOG_SESSION_LINE_NUMBER %{POSINT} PGLOG_SESSION_START_TIMESTAMP %{PGLOG_TIMESTAMP} PGLOG_VIRTUAL_TRANSACTION_ID ([\/0-9A-Fa-f]+) PGLOG_TRANSACTION_ID ([0-9A-Fa-f])+ PGLOG_LOGLEVEL (DEBUG[1-5]|INFO|NOTICE|WARNING|ERROR|LOG|FATAL|PANIC|DETAIL) PGLOG_MESSAGE .* """ def prefix_to_grok(pr): replace_map = { r'%a' : "%{PGLOG_APPLICATION_NAME:application_name}", r'%u' : "%{PGLOG_USER_NAME:user_name}", r'%d' : "%{PGLOG_DATABASE_NAME:database_name}", r'%r' : "%{PGLOG_REMOTE_HOST_PORT:remote_host_port}", r'%h' : "%{PGLOG_REMOTE_HOST:remote_host}", r'%p' : "%{PGLOG_PROCESS_ID:process_id}", r'%t' : "%{PGLOG_TIMESTAMP}", r'%m' : "%{PGLOG_TIMESTAMP}", r'%i' : "%{PGLOG_COMMAND_TAG:command_tag}", r'%e' : "%{PGLOG_SQL_STATE:sql_state}", r'%c' : "%{PGLOG_SESSION_ID:session_id}", r'%l' : "%{PGLOG_SESSION_LINE_NUMBER:session_line_number}", r'%s' : "%{PGLOG_SESSION_START_TIMESTAMP:session_start_timestamp}", r'%v' : "%{PGLOG_VIRTUAL_TRANSACTION_ID:virtual_transaction_id}", r'%x' : "%{PGLOG_TRANSACTION_ID:transaction_id}", r'%q' : "", } pr = pr.replace(r'%%',r'%') for k,v in replace_map.items(): pr = pr.replace(k,v) return "%s%%{PGLOG_LOGLEVEL:loglevel}: %%{PGLOG_MESSAGE:message}"%(pr) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description="Create a grok pattern for your postgresql configuration") parser.add_argument('-q','--quiet', help="Be quiet, only output the grok pattern", action='store_const', const=True) parser.add_argument('-p', '--prefix', help="log_line_prefix from YOUR postgresql.conf", required=True) args = parser.parse_args() if args.quiet: print prefix_to_grok(args.prefix) else: print "You need to add these patterns to your logstash patterns_dir: " print "> ==== snip === <" print pg_patterns print "> ==== snip === <" print "" print "This is the filter for your log_line_prefix:\n\n%s"%(prefix_to_grok(args.prefix))
bsd-2-clause
-7,059,104,021,549,284,000
38.539216
117
0.706422
false
3.046073
false
false
false
TeMPO-Consulting/mediadrop
mediacore/controllers/login.py
1
4471
# This file is a part of MediaDrop (http://www.mediadrop.net), # Copyright 2009-2013 MediaCore Inc., Felix Schwarz and other contributors. # For the exact contribution history, see the git revision log. # The source code contained in this file is licensed under the GPLv3 or # (at your option) any later version. # See LICENSE.txt in the main project directory, for more information. from formencode import Invalid from pylons import request, tmpl_context from mediacore.forms.login import LoginForm from mediacore.lib.base import BaseController from mediacore.lib.helpers import redirect, url_for from mediacore.lib.i18n import _ from mediacore.lib.decorators import expose, observable from mediacore.plugin import events import logging log = logging.getLogger(__name__) login_form = LoginForm() class LoginController(BaseController): @expose('login.html') @observable(events.LoginController.login) def login(self, came_from=None, **kwargs): if request.environ.get('repoze.who.identity'): redirect(came_from or '/') # the friendlyform plugin requires that these values are set in the # query string form_url = url_for('/login/submit', came_from=(came_from or '').encode('utf-8'), __logins=str(self._is_failed_login())) login_errors = None if self._is_failed_login(): login_errors = Invalid('dummy', None, {}, error_dict={ '_form': Invalid(_('Invalid username or password.'), None, {}), 'login': Invalid('dummy', None, {}), 'password': Invalid('dummy', None, {}), }) return dict( login_form = login_form, form_action = form_url, form_values = kwargs, login_errors = login_errors, ) @expose() def login_handler(self): """This is a dummy method. Without a dummy method, Routes will throw a NotImplemented exception. Calls that would route to this method are intercepted by repoze.who, as defined in mediacore.lib.auth """ pass @expose() def logout_handler(self): """This is a dummy method. Without a dummy method, Routes will throw a NotImplemented exception. Calls that would route to this method are intercepted by repoze.who, as defined in mediacore.lib.auth """ pass @expose() @observable(events.LoginController.post_login) def post_login(self, came_from=None, **kwargs): if not request.identity: # The FriendlyForm plugin will always issue a redirect to # /login/continue (post login url) even for failed logins. # If 'came_from' is a protected page (i.e. /admin) we could just # redirect there and the login form will be displayed again with # our login error message. # However if the user tried to login from the front page, this # mechanism doesn't work so go to the login method directly here. self._increase_number_of_failed_logins() return self.login(came_from=came_from) if came_from: redirect(came_from) # It is important to return absolute URLs (if app mounted in subdirectory) if request.perm.contains_permission(u'edit') or request.perm.contains_permission(u'admin'): redirect(url_for('/admin', qualified=True)) redirect(url_for('/', qualified=True)) @expose() @observable(events.LoginController.post_logout) def post_logout(self, came_from=None, **kwargs): redirect('/') def _is_failed_login(self): # repoze.who.logins will always be an integer even if the HTTP login # counter variable contained a non-digit string return (request.environ.get('repoze.who.logins', 0) > 0) def _increase_number_of_failed_logins(self): request.environ['repoze.who.logins'] += 1 def __call__(self, environ, start_response): """Invoke the Controller""" # BaseController.__call__ dispatches to the Controller method # the request is routed to. This routing information is # available in environ['pylons.routes_dict'] request.identity = request.environ.get('repoze.who.identity') tmpl_context.identity = request.identity return BaseController.__call__(self, environ, start_response)
gpl-3.0
-8,258,448,679,975,395,000
39.645455
99
0.641244
false
4.198122
false
false
false
cryptica/slapnet
benchmarks/scalable/PhilosophersCM84/make_net.py
1
2045
#!/usr/bin/python3 import sys print(""" petri net "The drinking philosophers for n=2" { places { p1h p1e p2h p2e req1p1 req1p2 req2p1 req2p2 fork1p1 fork1p2 fork2p1 fork2p2 fork1clean fork1dirty fork2clean fork2dirty } transitions { p1req1 p1req2 p1give1 p1give2 p1eat p1done p2req1 p2req2 p2give1 p2give2 p2eat p2done //p1done(2),p1eat(2),p1give1,p1give2,p1req1,p1req2, //p2give1,p2give2,p2req1,p2req2 } arcs { { p1h req1p1 fork1p2 } -> p1req1 -> { p1h req1p2 fork1p2 } { p1h req2p1 fork2p2 } -> p1req2 -> { p1h req2p2 fork2p2 } { p1h req1p1 fork1p1 fork1dirty } -> p1give1 -> { p1h req1p1 fork1p2 fork1clean } { p1h req2p1 fork2p1 fork2dirty } -> p1give2 -> { p1h req2p1 fork2p2 fork2clean } { p1h fork1p1 fork2p1 fork1clean fork2clean } -> p1eat -> { p1e fork1p1 fork2p1 fork1dirty fork2dirty } { p1e } -> p1done -> { p1h } { p2h req1p2 fork1p1 } -> p2req1 -> { p2h req1p1 fork1p1 } { p2h req2p2 fork2p1 } -> p2req2 -> { p2h req2p1 fork2p1 } { p2h req1p2 fork1p2 fork1dirty } -> p2give1 -> { p2h req1p2 fork1p1 fork1clean } { p2h req2p2 fork2p2 fork2dirty } -> p2give2 -> { p2h req2p2 fork2p1 fork2clean } { p2h fork1p2 fork2p2 fork1clean fork2clean } -> p2eat -> { p2e fork1p2 fork2p2 fork1dirty fork2dirty } { p2e } -> p2done -> { p2h } } initial { p1h p2h fork1dirty fork2dirty fork1p1 fork2p1 req1p2 req2p2 } } liveness property "philosopher 1 does not starve" { p1req1 + p1req2 + p1give1 + p1give2 + p1eat + p1done > 0 && p2req1 + p2req2 + p2give1 + p2give2 + p2eat + p2done > 0 && p1eat = 0 } liveness property "philosopher 2 does not starve" { p1req1 + p1req2 + p1give1 + p1give2 + p1eat + p1done > 0 && p2req1 + p2req2 + p2give1 + p2give2 + p2eat + p2done > 0 && p2eat = 0 } safety property "mutual exclusion" { p1e >= 1 && p2e >= 1 } """)
gpl-3.0
-4,088,092,992,242,717,000
33.083333
112
0.605868
false
2.310734
false
false
false
uwosh/uwosh.intranet.policy
uwosh/intranet/policy/importexport.py
1
5459
from Products.CMFCore.utils import getToolByName from Products.LDAPMultiPlugins import manage_addLDAPMultiPlugin def install(context): if not context.readDataFile('uwosh.intranet.policy.txt'): return setupLDAPPlugin(context) def setupLDAPPlugin(context): ldap_plugin_id = 'ldap_authentication' SUBTREE = 2 # this value comes from the zmi "Add LDAP Multi Plugin" html source acl_users = context.getSite().acl_users if hasattr(acl_users, ldap_plugin_id): logger = context.getLogger('uwosh.intranet.policy') logger.warning('Not configuring LDAP plugin, because "acl_users.%s" already exists.' % ldap_plugin_id) return manage_addLDAPMultiPlugin( acl_users, id=ldap_plugin_id, title='LDAP Authentication', LDAP_server='ldap.uwosh.edu:389', login_attr='uid', uid_attr='uid', users_base='ou=people,o=uwosh.edu,dc=uwosh,dc=edu', users_scope=SUBTREE, roles='Anonymous', groups_base='ou=groups,o=uwosh.edu,dc=uwosh,dc=edu', groups_scope=SUBTREE, binduid='', bindpwd='', binduid_usage=False, rdn_attr='uid', local_groups=False, use_ssl=False, encryption='SHA', read_only=True ) ldap_auth = getattr(acl_users, ldap_plugin_id) ldap_schema = { 'cn': { 'public_name': 'fullname', 'binary': False, 'ldap_name': 'cn', 'friendly_name': 'Canonical Name', 'multivalued': False }, 'mail': { 'public_name': 'email', 'binary': False, 'ldap_name': 'mail', 'friendly_name': 'Email Address', 'multivalued': False }, 'sn': { 'public_name': 'lastname', 'binary': False, 'ldap_name': 'sn', 'friendly_name': 'Last Name', 'multivalued': False }, 'givenName': { 'public_name': 'firstname', 'binary': False, 'ldap_name': 'givenName', 'friendly_name': 'First Name', 'multivalued': False }, 'uid': { 'public_name': '', 'binary': False, 'ldap_name': 'uid', 'friendly_name': 'uid', 'multivalued': False }, 'eduPersonAffiliation': { 'public_name': 'eduPersonAffiliation', 'binary': False, 'ldap_name': 'eduPersonAffiliation', 'friendly_name': 'eduPersonAffiliation', 'multivalued': True }, 'eduPersonPrimaryAffiliation': { 'public_name': 'eduPersonPrimaryAffiliation', 'binary': False, 'ldap_name': 'eduPersonPrimaryAffiliation', 'friendly_name': 'eduPersonPrimaryAffiliation', 'multivalued': False }, 'ou': { 'public_name': 'ou', 'binary': False, 'ldap_name': 'ou', 'friendly_name': 'Organizational Unit', 'multivalued': False }, 'uwodepartmentassoc': { 'public_name': 'uwodepartmentassoc', 'binary': False, 'ldap_name': 'uwodepartmentassoc', 'friendly_name': 'UWO Department Association', 'multivalued': False }, 'l': { 'public_name': 'location', 'binary': False, 'ldap_name': 'l', 'friendly_name': 'Location', 'multivalued': False }, 'telephoneNumber': { 'public_name': 'phone', 'binary': False, 'ldap_name': 'telephoneNumber', 'friendly_name': 'Phone Number', 'multivalued': False }, 'mailUserStatus': { 'public_name': 'mailUserStatus', 'binary': False, 'ldap_name': 'mailUserStatus', 'friendly_name': 'Mail User Status', 'multivalued': False }, 'uwomailstop': { 'public_name': 'uwomailstop', 'binary': False, 'ldap_name': 'uwomailstop', 'friendly_name': 'UWO Mail Stop', 'multivalued': False }, 'displayName': { 'public_name': 'displayname', 'binary': False, 'ldap_name': 'displayName', 'friendly_name': 'Display Name', 'multivalued': False }, } ldap_auth.acl_users.setSchemaConfig(ldap_schema) ldap_auth.acl_users._user_objclasses = ['inetOrgPerson'] ldap_auth.manage_activateInterfaces(['IUserEnumerationPlugin', 'IPropertiesPlugin', 'IAuthenticationPlugin']) movePluginToHeadOfList(acl_users.plugins, 'IPropertiesPlugin', ldap_plugin_id) def movePluginToHeadOfList(plugin_registry, plugin_type, plugin_id): interface = plugin_registry._getInterfaceFromName(plugin_type) index = plugin_registry._getPlugins(interface).index(plugin_id) while index > 0: plugin_registry.movePluginsUp(interface, [plugin_id]) new_index = plugin_registry._getPlugins(interface).index(plugin_id) if new_index >= index: # The plugin didn't move up. We calmly sidestep the infinite loop. break index = new_index
gpl-2.0
-1,045,420,193,652,714,100
32.913043
113
0.52702
false
4.067809
false
false
false
realopenit/bubble
bubble/commands/cmd_examples.py
1
1437
# -*- coding: utf-8 -*- # Part of bubble. See LICENSE file for full copyright and licensing details. import click from ..cli import pass_bubble from ..util.examples import all_examples_functions @click.command('examples', short_help='Show example for doing some task in bubble(experimental)') @click.option('--name', '-n', default=None, help='show the example with the name') @click.option('--all', '-a', is_flag=True, default=False, help='show all the examples') @pass_bubble def cli(ctx, name,all): """Show example for doing some task in bubble(experimental)""" ctx.gbc.say('all_example_functions',stuff=all_examples_functions, verbosity=1000) for example in all_examples_functions: if all or (name and example['name'] == name): if all: ctx.gbc.say('example',stuff=example, verbosity=100) name = example['name'] #click.echo_via_pager(example['fun']()) click.echo("#"*80) click.echo("### start of bubble example: "+name) click.echo("#"*80) click.echo(example['fun']()) click.echo("#"*80) click.echo("### end of bubble example: "+name) click.echo("#"*80) click.echo() else: click.echo("available example: " + example['name'])
gpl-3.0
-2,532,512,840,913,987,000
33.214286
85
0.557411
false
4.047887
false
false
false
yw374cornell/e-mission-server
emission/core/wrapper/stop.py
1
1696
import logging import emission.core.wrapper.wrapperbase as ecwb class Stop(ecwb.WrapperBase): props = {"trip_id": ecwb.WrapperBase.Access.WORM, # trip_id of the parent trip "enter_ts": ecwb.WrapperBase.Access.WORM, # the timestamp of entry (in secs) "enter_local_dt": ecwb.WrapperBase.Access.WORM, # searchable datetime in timezone of entry "enter_fmt_time": ecwb.WrapperBase.Access.WORM, # formatted entry time in timezone of place "exit_ts": ecwb.WrapperBase.Access.WORM, # the timestamp of exit (in secs) "exit_local_dt": ecwb.WrapperBase.Access.WORM, # searchable datetime in timezone of exit "exit_fmt_time": ecwb.WrapperBase.Access.WORM, # formatted time in timezone of place "ending_section": ecwb.WrapperBase.Access.WORM, # the id of the trip just before this "starting_section": ecwb.WrapperBase.Access.WORM, # the id of the trip just after this "enter_loc": ecwb.WrapperBase.Access.WORM, # the location in geojson format "exit_loc": ecwb.WrapperBase.Access.WORM, # the location in geojson format "source": ecwb.WrapperBase.Access.WORM, # the method used to generate this place "duration": ecwb.WrapperBase.Access.WORM} # the duration for which we were in this place enums = {} geojson = ["enter_loc", "exit_loc"] nullable = ["enter_ts", "enter_fmt_time", "ending_section", # for the start of a chain "exit_ts", "exit_fmt_time", "starting_section"] # for the end of a chain local_dates = ['enter_local_dt', 'exit_local_dt'] def _populateDependencies(self): pass
bsd-3-clause
-4,098,555,770,510,610,400
64.230769
104
0.653892
false
3.608511
false
false
false
RNAcentral/rnacentral-import-pipeline
tests/cli/pdb_test.py
1
1261
# -*- coding: utf-8 -*- """ Copyright [2009-2018] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import json import pytest from click.testing import CliRunner from rnacentral_pipeline.cli import pdb @pytest.mark.parametrize( "command,output,pdbs", [ ("data", "pdb.json", ("1S72",)), ("extra", "pdb-extra.json", ("1S72",)), ], ) def test_can_fetch_expected_data(command, output, pdbs): runner = CliRunner() with runner.isolated_filesystem(): args = [command, output] args.extend(pdbs) result = runner.invoke(pdb.cli, args) assert result.exit_code == 0, result.output assert not result.exception with open(output, "rb") as raw: assert raw.read()
apache-2.0
7,726,899,241,946,992,000
29.756098
72
0.694687
false
3.730769
false
false
false
ekapujiw2002/kweb
kwebhelper.py
1
34980
#!/usr/bin/env python # -*- coding: utf-8 -*- # helper file for kweb Minimal Kiosk Browser # Copyright 2013-2014 by Guenter Kreidl # free software without any warranty # you can do with it what you like # version 1.4 import os,urllib,sys,subprocess,threading,time import Tkinter as tk # GLOBAL OPTIONS # use external settings file, if not empty #settings = '' settings = '/usr/local/bin/kwebhelper_settings.py' # where the downloads, PDF files etc. go, make sure a "Downloads" folder exists there #homedir = '/media/volume' homedir = '' # if empty, the user's home dir will be taken # OMXPLAYER AUDIO VIDEO OPTIONS omxoptions = [] # for selecting the sound output, uncomment one of these: #omxoptions = ['-o','hdmi'] #omxoptions = ['-o','local'] # more options are also possible of course # special options for watching live tv streams (omxplayer > 0.32) omx_livetv_options = ['--live'] # add the start of your live tv stream links to this list to enable live tv options live_tv = [] # like this: #live_tv = ['http://192.168.0.5:9082'] # set this to false, if you want to allow more than one omxplayer instance kill_omxplayer = True #kill_omxplayer = False # mimetypes: if given, this will restrict what omxplayer will be given to play: mimetypes = [] # normally omxplayer is started from a terminal (xterm), to clear the screen and get full keyboard control # Set the following to "False" to use omxplayer without starting a terminal first omxplayer_in_terminal_for_video = True #omxplayer_in_terminal_for_video = False omxplayer_in_terminal_for_audio = True #omxplayer_in_terminal_for_audio = False # options for m3u playlists, to check that they contain only audio files or streams audioextensions = ['mp3','aac','flac','wav','wma','cda','ogg','ogm','ac3','ape'] try_stream_as_audio = False # if set to "True", the following list will be used for checking for video files videoextensions = ['asf','avi','mpg','mp4','mpeg','m2v','m1v','vob','divx','xvid','mov','m4v','m2p','mkv','m2ts','ts','mts','wmv','webm'] # Play audio files or playlists that contain only audio files in omxaudioplayer GUI: useAudioplayer = True # options for omxplayer to be used when playing audio omxaudiooptions = [] # volume setting when starting omxaudioplayer ranging from -20 to 4 ( -60 to +12 db) defaultaudiovolume = 0 # start playing and close after playing last song automatically (if "True", set to "False" to disable) autoplay = True autofinish = True # Interface settings for omxaudioplayer: # The font to be used for playlist and buttons fontname = 'SansSerif' # value between 10 and 22, will also determine the size of the GUI window: fontheight = 14 # number of entries displayed in playlist window, between 5 and 25: maxlines = 8 # width of the window, value between 40 and 80, defines the minimum number of characters of the song name # displayed in the songlist (usually much more are shown!) lwidth = 40 # if the following is set to "True", vlc will be used to play audio files and playlists (audio only) useVLC = False #useVLC = True #COMMAND EXECUTION OPTIONS # if this is set to "True", all Desktop (GUI) programs will be executed without starting a terminal first check_desktop = True #check_desktop = False # direct commands will be executed without starting a terminal first # use it for background commands or programs with a GUI that are not desktop programs or if check_desktop is set to "False" direct_commands = ['kwebhelper.py','omxplayer'] # preferred terminal to run commands in, must be set preferred_terminal = 'lxterminal' #preferred_terminal = 'xterm' formdata_in_terminal = False #formdata_in_terminal = True # set the following to "True", if you want to spare memory overhead (but you'll get more disk write accesses) run_as_script = False #run_as_script = True # PDF OPTIONS # preferred pdf reader; both must be set or emtpy pdfprogpath = '' pdfprog = '' #pdfprogpath = '/usr/bin/mupdf' #pdfprog = 'mupdf' # additional options for pdf program (must match the selected program!): pdfoptions = [] #pdfoptions = ['-fullscreen'] # this will allow to open pdf files on a local server as files instead of downloading them first; # will only work with "http://localhost" links pdfpathreplacements = {} #pdfpathreplacements = {'http://localhost:8073/Ebooks1':'file:///var/www/Ebooks1'} # DOWNLOAD OPTIONS #download options for external download mode, enable one of these options: show_download_in_terminal = True #show_download_in_terminal = False # ONLINE VIDEO OPTIONS # options for pages containing video, either HTML5 video tags or all websites supported by youtube-dl # if html5 video tags include more than one source format, select the preferred one here preferred_html5_video_format = '.mp4' # Choose, if HTML5 URL extraction is tried first and youtube-dl extraction afterwards or vice versa html5_first = True #html5_first = False #additional youtube-dl options, e. g. selecting a resolution or file format youtube_dl_options = [] #youtube_dl_options = ['-f','37/22/18'] # special omxplayer options for web video youtube_omxoptions = [] # to use the same options as for other video, set #youtube_omxoptions = omxoptions ### end of global settings # take settings from separate file: if settings and os.path.exists(settings): try: execfile(settings) except: pass if not homedir: homedir = os.path.expanduser('~') dldir = homedir +'/Downloads' if not os.path.exists(dldir): os.mkdir(dldir) # helper functions def get_opt(options): if '--win' in options: pos = options.index('--win') if pos < (len(options) -2): options[pos+1] = '"' + options[pos+1] + '"' return ' '.join(options) def get_playlist(url, audio_as_stream): playlist = [] fn = '' audioonly = True go = False if url.startswith('http://'): try: fn,h = urllib.urlretrieve(url) go = True except: pass elif url.startswith('file://'): fn = url.replace('file://','').replace('%20',' ') fn = urllib.unquote(fn) if os.path.exists(fn): go = True elif os.path.exists(url): fn = url go = True if go: f = file(fn,'rb') pl = f.read() f.close() if url.startswith('http://'): os.remove(fn) pll = pl.split('\n') if url.lower().endswith('.m3u') or url.lower().endswith('.m3u8'): for s in pll: if s != '' and not s.startswith('#'): if s.split('.')[-1].lower() in audioextensions: pass elif audio_as_stream and s.split('.')[-1].lower() not in videoextensions: pass else: audioonly = False playlist.append(s) elif url.lower().endswith('.pls'): for s in pll: if s.startswith('File'): aurl = s.split('=')[1].strip() playlist.append(aurl) return (audioonly, playlist) def video_tag_extractor(url): result = [] if url.startswith('file://'): fpath = url.replace('file://','').replace('%20',' ') else: try: fpath,h = urllib.urlretrieve(url) except: return result f = file(fpath,'rb') html = f.read() f.close() if '<video ' in html: htl = html.split('<video') for ind in range(1,len(htl)): if not 'src="' in htl[ind]: continue vtl = htl[ind].split('src="') if len(vtl) > 2: links = [] for l in vtl[1:]: pos = l.find('"') links.append(l[0:pos]) link = links[0] for li in links: if preferred_html5_video_format and li.lower().endswith(preferred_html5_video_format): link = li else: vt = vtl[1] pos = vt.find('"') link = vt[0:pos] if link.startswith('http://') or link.startswith('https://') or link.startswith('rtsp://') or link.startswith('rtmp://'): result.append(link) elif link.startswith('file://'): newlink = '"'+link.replace('file://','').replace('%20',' ')+'"' result.append(newlink) else: urll = url.split('/') if link.startswith('/'): newlink = '/'.join(urll[0:3]+[link[1:]]) else: relcount = len(urll) - 1 - link.count('../') newlink = '/'.join(urll[0:relcount]+[link.replace('../','')]) if newlink.startswith('file://'): newlink = '"'+newlink.replace('file://','').replace('%20',' ')+'"' result.append(newlink) return result def play_ytdl(res): vlist = res.split('\n') if (len(vlist) == 1) or (len(vlist) == 2 and vlist[1] == ''): vurl = vlist[0] if kill_omxplayer: dummy = os.system('killall omxplayer.bin > /dev/null 2>&1') pargs = ["xterm","-fn","fixed","-fullscreen", "-maximized", "-bg", "black", "-fg", "black", "-e",'omxplayer']+youtube_omxoptions+[vurl]+['>', '/dev/null', '2>&1'] os.execv("/usr/bin/xterm",pargs) else: if kill_omxplayer: script = '#!/bin/bash\nkillall omxplayer.bin > /dev/null 2>&1\n' else: script = '#!/bin/bash\n' for vurl in vlist: if vurl != '': script += 'omxplayer ' + get_opt(youtube_omxoptions) + ' "' + vurl + '" > /dev/null 2>&1\n' f = file(dldir+os.sep+'playall.sh','wb') f.write(script) f.close() os.chmod(dldir+os.sep+'playall.sh',511) os.execl("/usr/bin/xterm","xterm","-fn","fixed","-fullscreen", "-maximized", "-bg", "black", "-fg", "black", "-e",dldir+os.sep+'playall.sh') def play_html5(tags): if len(tags) == 1: if kill_omxplayer: dummy = os.system('killall omxplayer.bin > /dev/null 2>&1') pargs = ["xterm","-fn","fixed","-fullscreen", "-maximized", "-bg", "black", "-fg", "black", "-e",'omxplayer']+youtube_omxoptions+[tags[0]]+['>', '/dev/null', '2>&1'] os.execv("/usr/bin/xterm",pargs) else: if kill_omxplayer: script = '#!/bin/bash\nkillall omxplayer.bin > /dev/null 2>&1\n' else: script = '#!/bin/bash\n' for t in tags: script += 'omxplayer ' + get_opt(youtube_omxoptions) + ' ' + t + ' > /dev/null 2>&1\n' f = file(dldir+os.sep+'playall.sh','wb') f.write(script) f.close() os.chmod(dldir+os.sep+'playall.sh',511) os.execl("/usr/bin/xterm","xterm","-fn","fixed","-fullscreen", "-maximized", "-bg", "black", "-fg", "black", "-e",dldir+os.sep+'playall.sh') # omxaudioplayer GUI class omxaudioplayer(tk.Frame): def __init__(self, master=None, playlist=[],mode='simple',autofinish=True,volume=0,omxoptions=[], fontheight=14,fontname='SansSerif',maxlines=8,width=40,autoplay=True): tk.Frame.__init__(self, master) self.set_defaults() self.fontheight = min([max([fontheight,10]),22]) self.fontname = fontname try: self.font = (self.fontname,str(self.fontheight),'bold') except: self.font = ('SansSerif',str(self.fontheight),'bold') self.maxlines = min([max([maxlines,5]),25]) self.defaultwidth = min([max([width,40]),80]) self.root = master self.root.bind("<<finished>>",self.on_finished) self.root.protocol('WM_DELETE_WINDOW', self.on_close) self.root.title("omxaudioplayer") self.root.resizable(False,False) for keysym in self.keybindings: self.root.bind(keysym,self.keyp_handler) self.grid() self.omxoptions = omxoptions self.autofinish = autofinish self.playlist = playlist self.autoplay = autoplay self.mode = mode self.status = 'stopped' self.omxprocess = None self.omxwatcher = None self.songpointer = 0 self.listpointer = 0 self.currentvolume = min([max([volume,-20]),4]) self.changedvolume = tk.IntVar() self.changedvolume.set(volume) self.playcontent = tk.StringVar() self.playcontent.set(self.playstring) self.createwidgets() if self.playlist and self.autoplay: self.playsong(0) def set_defaults(self): self.playstring = '>' self.pausestring = '||' self.stopstring = '[]' self.rewstring = '←' self.fwdstring = '→' self.prevstring = '↑' self.nextstring = '↓' self.vchdelay = 0.05 self.keybindings = ['<KeyPress-Down>','<KeyPress-Up>','<KeyPress-space>','<KeyPress-q>','<KeyPress-Escape>', '<KeyPress-plus>','<KeyPress-minus>','<KeyPress-Left>','<KeyPress-Right>','<KeyPress-Return>', '<KeyPress-KP_Enter>','<KeyPress-KP_Add>','<KeyPress-KP_Subtract>'] def keyp_handler(self, event): if event.keysym in ['space','Return','KP_Enter']: self.playpause() elif event.keysym in ['q','Escape']: self.stop() elif event.keysym == 'Down': while self.nextbutton['state'] == tk.DISABLED: time.sleep(0.1) self.nextsong() elif event.keysym == 'Up': while self.prevbutton['state'] == tk.DISABLED: time.sleep(0.1) self.prevsong() elif event.keysym == 'Left': self.sendcommand('\x1b\x5b\x44') elif event.keysym == 'Right': self.sendcommand('\x1b\x5b\x43') else: av = 0 if event.keysym in ['plus','KP_Add']: av = 1 elif event.keysym in ['minus','KP_Subtract']: av = -1 if av != 0: nv = self.changedvolume.get() + av if nv in range(-20,5): self.changedvolume.set(nv) self.vol_changed(nv) def playsong(self, index): if not self.omxprocess: self.prevbutton['state'] = tk.DISABLED self.nextbutton['state'] = tk.DISABLED self.songpointer = index pargs = ['omxplayer', '--vol', str(self.currentvolume*300)] + self.omxoptions + [self.playlist[index]] self.omxprocess = subprocess.Popen(pargs,stdin=subprocess.PIPE,stdout=file('/dev/null','wa')) self.omxwatcher = threading.Timer(0,self.watch) self.omxwatcher.start() self.status = 'playing' self.playcontent.set(self.pausestring) selection = self.playlistwindow.curselection() if not selection or index != int(selection[0]): self.listpointer = index self.playlistwindow.selection_clear(0, len(self.playlist)-1) self.playlistwindow.selection_set(index) self.playlistwindow.see(index) time.sleep(0.3) self.prevbutton['state'] = tk.NORMAL self.nextbutton['state'] = tk.NORMAL def on_close(self): if self.omxprocess: self.status='closing' self.sendcommand('q') time.sleep(0.1) if self.omxprocess: try: self.omxprocess.terminate() time.sleep(0.1) except: pass if self.omxprocess: try: self.omxprocess.kill() time.sleep(0.1) except: pass self.root.destroy() def on_finished(self, *args): stat = self.status self.status = 'stopped' self.playcontent.set(self.playstring) if stat != 'finished': if self.songpointer == self.listpointer: self.nextsong() else: self.songpointer = self.listpointer self.playsong(self.songpointer) def watch(self): if self.omxprocess: try: dummy = self.omxprocess.wait() except: pass self.omxprocess = None if self.status != 'closing': self.root.event_generate("<<finished>>") def sendcommand(self, cmd): if self.omxprocess: try: self.omxprocess.stdin.write(cmd) except: pass def playpause(self): if self.status in ['stopped','finished']: self.songpointer = self.listpointer self.playsong(self.songpointer) elif self.status == 'paused': self.sendcommand('p') self.status = 'playing' self.playcontent.set(self.pausestring) elif self.status == 'playing': self.sendcommand('p') self.status = 'paused' self.playcontent.set(self.playstring) def stop(self,stat='finished'): if self.omxprocess: self.status = stat self.sendcommand('q') else: self.playcontent.set(self.playstring) self.status = 'stopped' def rewind(self): self.sendcommand('\x1b\x5b\x44') def forward(self): self.sendcommand('\x1b\x5b\x43') def prevsong(self): if self.listpointer != self.songpointer and self.status != 'stopped': self.stop('stopped') elif self.listpointer > 0: self.listpointer = self.listpointer - 1 self.playlistwindow.selection_clear(0, len(self.playlist)-1) self.playlistwindow.selection_set(self.listpointer) if self.status == 'stopped': self.playsong(self.listpointer) else: self.stop('stopped') def nextsong(self): if self.listpointer != self.songpointer and self.status != 'stopped': self.stop('stopped') elif self.listpointer < len(self.playlist)-1: self.listpointer = self.listpointer + 1 self.playlistwindow.selection_clear(0, len(self.playlist)-1) self.playlistwindow.selection_set(self.listpointer) if self.status == 'stopped': self.playsong(self.listpointer) else: self.stop('stopped') elif self.autofinish: self.on_close() def vol_changed(self, volume): vol = int(volume) if self.status != 'stopped': if vol > self.currentvolume: diff = vol - self.currentvolume self.currentvolume = vol for k in range(0,diff): self.sendcommand('+') time.sleep(self.vchdelay) elif vol < self.currentvolume: diff = self.currentvolume - vol self.currentvolume = vol for k in range(0,diff): self.sendcommand('-') time.sleep(self.vchdelay) else: self.currentvolume = vol def on_listbox_select(self,event): sel = self.playlistwindow.curselection() if sel: self.listpointer = int(sel[0]) def on_listbox_double(self,event): self.on_listbox_select(event) if self.status != 'stopped': if self.songpointer == self.listpointer: self.stop() self.playsong(self.listpointer) else: self.stop('stopped') else: self.playsong(self.listpointer) def focus_out(self, event): self.root.focus_set() def createwidgets(self): if len(self.playlist) > self.maxlines: self.yScroll = tk.Scrollbar(self, orient=tk.VERTICAL) self.yScroll['width'] = int(self.yScroll['width']) + (self.fontheight-10) hg = self.maxlines else: hg = len(self.playlist) self.playlistwindow = tk.Listbox(self, takefocus=0, selectmode = 'single', width = self.defaultwidth, height = hg, font=self.font,activestyle='none',bg='#000', fg = '#ddd', selectbackground='#60c', selectforeground='#ffffd0') for url in self.playlist: song = url.split('/')[-1] self.playlistwindow.insert(tk.END, urllib.unquote(song).replace('%20',' ')) self.playlistwindow.selection_set(self.songpointer) self.playlistwindow.bind("<<ListboxSelect>>", self.on_listbox_select) self.playlistwindow.bind("<Double-Button-1>",self.on_listbox_double) self.playlistwindow.bind("<FocusIn>",self.focus_out) self.playlistwindow.grid(row=0,column=0,columnspan=7, sticky=tk.N+tk.S+tk.E+tk.W) if len(self.playlist) > self.maxlines: self.playlistwindow.configure(yscrollcommand=self.yScroll.set) self.yScroll['command'] = self.playlistwindow.yview self.yScroll.grid(row=0,column=7, sticky=tk.N+tk.S) self.playbutton = tk.Button(self, command=self.playpause, font=self.font, textvariable = self.playcontent, width = 3, justify = tk.CENTER) self.playbutton.grid(row=1,column=0) self.stopbutton = tk.Button(self, command=self.stop, font=self.font, text = self.stopstring, width = 3, justify = tk.CENTER) self.stopbutton.grid(row=1,column=1) self.prevbutton = tk.Button(self, command=self.rewind, font=self.font, text = self.rewstring, width = 3, justify = tk.CENTER) self.prevbutton.grid(row=1,column=2) self.nextbutton = tk.Button(self, command=self.forward, font=self.font, text = self.fwdstring, width = 3, justify = tk.CENTER) self.nextbutton.grid(row=1,column=3) self.prevbutton = tk.Button(self, command=self.prevsong, font=self.font, text = self.prevstring, width = 3, justify = tk.CENTER) self.prevbutton.grid(row=1,column=4) self.nextbutton = tk.Button(self, command=self.nextsong, font=self.font, text = self.nextstring, width = 3, justify = tk.CENTER) self.nextbutton.grid(row=1,column=5) self.volume = tk.Scale(self, command=self.vol_changed, font=self.font, length=str((self.fontheight-2)*(self.defaultwidth-30))+'p', from_ = -20, to=4, variable=self.changedvolume ,orient=tk.HORIZONTAL, resolution=1, showvalue=0) self.volume.grid(row=1,column=6) # main script function args = sys.argv if len(args) > 2: mode = args[1] url = args[2] mimetype = '' # media section: play audio, video, m3u playlists and streams if mode == 'av': mtflag = True if len(args) > 3: mimetype = args[3] if mimetypes and mimetype not in mimetypes: mtflag = False url_extension = url.lower().split('.')[-1] if url_extension in ['m3u','m3u8','pls'] and mtflag: audioonly, playlist = get_playlist(url,try_stream_as_audio) if playlist: if audioonly and useVLC: os.execl("/usr/bin/vlc","vlc",url) elif audioonly and useAudioplayer: if kill_omxplayer: dummy = os.system('killall omxplayer.bin > /dev/null 2>&1') root = tk.Tk() player = omxaudioplayer(master=root, playlist=playlist,volume=defaultaudiovolume,omxoptions=omxaudiooptions, autofinish=autofinish,fontheight=fontheight,fontname=fontname,maxlines=maxlines, autoplay=autoplay,width=lwidth) player.mainloop() else: if audioonly: options = omxaudiooptions else: options = omxoptions if kill_omxplayer: script = '#!/bin/bash\nkillall omxplayer.bin > /dev/null 2>&1\n' else: script = '#!/bin/bash\n' for s in playlist: if audioonly and omxplayer_in_terminal_for_audio: script += 'echo "now playing: '+ urllib.unquote(s.split('/')[-1]) +'"\n' script += 'omxplayer ' + get_opt(options) + ' "' + s + '" > /dev/null 2>&1\n' f = file(dldir+os.sep+'playall.sh','wb') f.write(script) f.close() os.chmod(dldir+os.sep+'playall.sh',511) if omxplayer_in_terminal_for_audio and audioonly: os.execlp(preferred_terminal,preferred_terminal,"-e",dldir+os.sep+'playall.sh') elif omxplayer_in_terminal_for_video and not audioonly: os.execl("/usr/bin/xterm","xterm","-fn","fixed","-fullscreen", "-maximized", "-bg", "black", "-fg", "black", "-e",dldir+os.sep+'playall.sh') else: os.execl(dldir+os.sep+'playall.sh','playall.sh') elif mtflag: url_valid = True if url.startswith('file://'): url = url.replace('file://','').replace('%20',' ') url = urllib.unquote(url) if not os.path.exists(url): url_valid = False elif not url.startswith('http'): if not os.path.exists(url): url_valid = False if url_valid: if url_extension in audioextensions or (try_stream_as_audio and not url_extension in videoextensions): if useVLC: os.execl("/usr/bin/vlc","vlc",url) elif useAudioplayer: if kill_omxplayer: dummy = os.system('killall omxplayer.bin > /dev/null 2>&1') root = tk.Tk() player = omxaudioplayer(master=root, playlist=[url],volume=defaultaudiovolume,omxoptions=omxaudiooptions, autofinish=autofinish,fontheight=fontheight,fontname=fontname,maxlines=maxlines, autoplay=autoplay,width=lwidth) player.mainloop() else: if kill_omxplayer: dummy = os.system('killall omxplayer.bin > /dev/null 2>&1') if omxplayer_in_terminal_for_audio: pargs = [preferred_terminal,'-e','omxplayer'] + omxaudiooptions + [url] os.execvp(preferred_terminal,pargs) else: pargs = ['omxplayer'] + omxaudiooptions + [url] os.execvp('omxplayer',pargs) else: if kill_omxplayer: dummy = os.system('killall omxplayer.bin > /dev/null 2>&1') options = omxoptions if live_tv: for lt in live_tv: if url.startswith(lt): options = omx_livetv_options break if omxplayer_in_terminal_for_video: pargs = ["xterm","-fn","fixed","-fullscreen", "-maximized", "-bg", "black", "-fg", "black", "-e",'omxplayer']+options+[url]+['>', '/dev/null', '2>&1'] os.execv("/usr/bin/xterm",pargs) else: pargs = ['omxplayer'] + omxoptions + [url] os.execvp('omxplayer',pargs) # end of media section # pdf section (download - if needed - and open pdf file) elif mode == 'pdf': if not (pdfprogpath and pdfprog): if os.path.exists('/usr/bin/xpdf'): pdfprogpath = '/usr/bin/xpdf' pdfprog = 'xpdf' else: pdfprogpath = '/usr/bin/mupdf' pdfprog = 'mupdf' go = False # option to open pdf as files from http://localhost instead of downloading them first if pdfpathreplacements and url.startswith('http://localhost'): for k,v in pdfpathreplacements.iteritems(): if url.startswith(k): nurl = url.replace(k,v) if os.path.exists(urllib.unquote(nurl.replace('file://','').replace('%20',' ').split('#')[0])): url = nurl break if url.startswith('file://'): url = url.replace('file://','').replace('%20',' ') url = urllib.unquote(url) urll = url.split('#page=') f = urll[0] if os.path.exists(f): if len(urll) > 1: page = urll[1].split('&')[0] os.execv(pdfprogpath,[pdfprog]+pdfoptions+[f,page]) else: os.execv(pdfprogpath,[pdfprog]+pdfoptions+[f]) else: if url.endswith('.pdf') or url.endswith('.PDF') or '.pdf#page' in url.lower(): urll = url.split('#page=') fname = urllib.unquote(urll[0].split('/')[-1].replace('%20',' ')) f = dldir+os.sep+urllib.unquote(urll[0].split('/')[-1].replace('%20',' ')) if os.path.exists(f): go = True else: try: fn,h = urllib.urlretrieve(urll[0],f) go = True except: pass if go: if len(urll) > 1: page = urll[1].split('&')[0] os.execv(pdfprogpath,[pdfprog]+pdfoptions+[f,page]) else: os.execv(pdfprogpath,[pdfprog]+pdfoptions+[f]) # end of pdf section # download section elif mode == 'dl': # download file using wget if show_download_in_terminal: os.execlp(preferred_terminal,preferred_terminal,'-e', "wget", "-P", dldir,"--no-clobber","--adjust-extension","--content-disposition",url,"--load-cookies",homedir + "/.web_cookie_jar","--no-check-certificate") else: os.execl("/usr/bin/wget", "wget", "-P", dldir,"--no-clobber","--adjust-extension","--content-disposition",url,"--load-cookies",homedir + "/.web_cookie_jar","--no-check-certificate") #end of download section # command execution section elif mode == 'cmd': cmd = '' formdata = False cpage = 'file:///homepage.html?cmd=' url = url.decode('utf-8') if url.startswith('#'): cmd = url[1:] elif url.startswith(cpage): cmd = url.replace(cpage,'') if not cmd.startswith('formdata'): cmd = urllib.unquote_plus(cmd).replace('%20',' ') elif url.startswith('http://localhost') and ('/homepage.html?cmd=' in url): cmd = url.split('/homepage.html?cmd=')[1] if not cmd.startswith('formdata'): cmd = urllib.unquote_plus(cmd).replace('%20',' ') if cmd: if cmd.startswith('formdata'): formdata = True cmd = cmd.split('formdata')[1].strip() if '&' in cmd: cmdargs = cmd.split('&') for ind in range(0,len(cmdargs)): if '=' in cmdargs[ind]: cargl = cmdargs[ind].split('=') if cargl[0].startswith('quoted') and cargl[1] != '': cmdargs[ind] = " '" + urllib.unquote_plus(cargl[1]) + "'" elif cargl[0].startswith('dquoted') and cargl[1] != '': cmdargs[ind] = ' "' + urllib.unquote_plus(cargl[1]) + '"' elif cargl[1] != '': cmdargs[ind] = ' ' + urllib.unquote_plus(cargl[1]) else: cmdargs[ind] = '' else: cmdargs[ind] = ' ' + urllib.unquote_plus(cmdargs[ind]).strip() cmd = ''.join(cmdargs).strip() else: cmd = urllib.unquote_plus(cmd).strip() cmdl = cmd.split(' ') if len(cmdl)>1 and cmdl[0] == 'sudo': realname = cmdl[1] else: realname = cmdl[0] desktop_app = False if check_desktop and '/' not in realname: if os.path.exists('/usr/share/applications/'+realname+'.desktop'): desktop_app = True if desktop_app or (realname in direct_commands) or (formdata and not formdata_in_terminal): cmdline = cmd.encode('utf-8') else: cmdline = preferred_terminal + ' -e '+cmd.encode('utf-8') if run_as_script: dmcount = 0 scpath = dldir+os.sep+'temp'+str(dmcount)+'.sh' while os.path.exists(scpath): dmcount += 1 scpath = dldir+os.sep+'temp'+str(dmcount)+'.sh' f = file(scpath,'wb') f.write('#!/bin/bash\n'+cmdline+'\nrm '+scpath+'\n') f.close() os.chmod(scpath,511) os.execl(scpath,scpath) else: try: dummy = os.system(cmdline) except: pass # end of command execution section # web video section (HTML5 and all websites supported by youtube-dl) elif mode == 'ytdl' and os.path.exists('/usr/bin/youtube-dl'): #youtube and HTML5 videos if html5_first: tags = video_tag_extractor(url) if tags: #extract embedded html5 video play_html5(tags) else: yta = ['youtube-dl', '-g']+youtube_dl_options+[url] yt = subprocess.Popen(yta,stdout=subprocess.PIPE,stderr=subprocess.PIPE) (res,err) = yt.communicate() if res and not err: play_ytdl(res) else: yta = ['youtube-dl', '-g']+youtube_dl_options+[url] yt = subprocess.Popen(yta,stdout=subprocess.PIPE,stderr=subprocess.PIPE) (res,err) = yt.communicate() if res and not err: play_ytdl(res) else: tags = video_tag_extractor(url) if tags: #extract embedded html5 video play_html5(tags) # end of web video section
gpl-3.0
2,244,612,217,892,959,500
41.185766
235
0.53966
false
3.798414
false
false
false
bwohlberg/sporco
sporco/dictlrn/cbpdndl.py
1
18601
# -*- coding: utf-8 -*- # Copyright (C) 2015-2020 by Brendt Wohlberg <[email protected]> # All rights reserved. BSD 3-clause License. # This file is part of the SPORCO package. Details of the copyright # and user license can be found in the 'LICENSE.txt' file distributed # with the package. """Dictionary learning based on CBPDN sparse coding""" from __future__ import print_function, absolute_import import copy import numpy as np import sporco.cnvrep as cr import sporco.admm.cbpdn as admm_cbpdn import sporco.admm.ccmod as admm_ccmod import sporco.pgm.cbpdn as pgm_cbpdn import sporco.pgm.ccmod as pgm_ccmod from sporco.dictlrn import dictlrn import sporco.dictlrn.common as dc from sporco.common import _fix_dynamic_class_lookup from sporco.linalg import inner from sporco.fft import (rfftn, irfftn, rfl2norm2) __author__ = """Brendt Wohlberg <[email protected]>""" def cbpdn_class_label_lookup(label): """Get a CBPDN class from a label string.""" clsmod = {'admm': admm_cbpdn.ConvBPDN, 'pgm': pgm_cbpdn.ConvBPDN} if label in clsmod: return clsmod[label] else: raise ValueError('Unknown ConvBPDN solver method %s' % label) def ConvBPDNOptionsDefaults(method='admm'): """Get defaults dict for the ConvBPDN class specified by the ``method`` parameter. """ dflt = copy.deepcopy(cbpdn_class_label_lookup(method).Options.defaults) if method == 'admm': dflt.update({'MaxMainIter': 1, 'AutoRho': {'Period': 10, 'AutoScaling': False, 'RsdlRatio': 10.0, 'Scaling': 2.0, 'RsdlTarget': 1.0}}) else: dflt.update({'MaxMainIter': 1}) return dflt def ConvBPDNOptions(opt=None, method='admm'): """A wrapper function that dynamically defines a class derived from the Options class associated with one of the implementations of the Convolutional BPDN problem, and returns an object instantiated with the provided parameters. The wrapper is designed to allow the appropriate object to be created by calling this function using the same syntax as would be used if it were a class. The specific implementation is selected by use of an additional keyword argument 'method'. Valid values are as specified in the documentation for :func:`ConvBPDN`. """ # Assign base class depending on method selection argument base = cbpdn_class_label_lookup(method).Options # Nested class with dynamically determined inheritance class ConvBPDNOptions(base): def __init__(self, opt): super(ConvBPDNOptions, self).__init__(opt) # Allow pickling of objects of type ConvBPDNOptions _fix_dynamic_class_lookup(ConvBPDNOptions, method) # Return object of the nested class type return ConvBPDNOptions(opt) def ConvBPDN(*args, **kwargs): """A wrapper function that dynamically defines a class derived from one of the implementations of the Convolutional Constrained MOD problems, and returns an object instantiated with the provided parameters. The wrapper is designed to allow the appropriate object to be created by calling this function using the same syntax as would be used if it were a class. The specific implementation is selected by use of an additional keyword argument 'method'. Valid values are: - ``'admm'`` : Use the implementation defined in :class:`.admm.cbpdn.ConvBPDN`. - ``'pgm'`` : Use the implementation defined in :class:`.pgm.cbpdn.ConvBPDN`. The default value is ``'admm'``. """ # Extract method selection argument or set default method = kwargs.pop('method', 'admm') # Assign base class depending on method selection argument base = cbpdn_class_label_lookup(method) # Nested class with dynamically determined inheritance class ConvBPDN(base): def __init__(self, *args, **kwargs): super(ConvBPDN, self).__init__(*args, **kwargs) # Allow pickling of objects of type ConvBPDN _fix_dynamic_class_lookup(ConvBPDN, method) # Return object of the nested class type return ConvBPDN(*args, **kwargs) def ccmod_class_label_lookup(label): """Get a CCMOD class from a label string.""" clsmod = {'ism': admm_ccmod.ConvCnstrMOD_IterSM, 'cg': admm_ccmod.ConvCnstrMOD_CG, 'cns': admm_ccmod.ConvCnstrMOD_Consensus, 'pgm': pgm_ccmod.ConvCnstrMOD} if label in clsmod: return clsmod[label] else: raise ValueError('Unknown ConvCnstrMOD solver method %s' % label) def ConvCnstrMODOptionsDefaults(method='pgm'): """Get defaults dict for the ConvCnstrMOD class specified by the ``method`` parameter. """ dflt = copy.deepcopy(ccmod_class_label_lookup(method).Options.defaults) if method == 'pgm': dflt.update({'MaxMainIter': 1}) else: dflt.update({'MaxMainIter': 1, 'AutoRho': {'Period': 10, 'AutoScaling': False, 'RsdlRatio': 10.0, 'Scaling': 2.0, 'RsdlTarget': 1.0}}) return dflt def ConvCnstrMODOptions(opt=None, method='pgm'): """A wrapper function that dynamically defines a class derived from the Options class associated with one of the implementations of the Convolutional Constrained MOD problem, and returns an object instantiated with the provided parameters. The wrapper is designed to allow the appropriate object to be created by calling this function using the same syntax as would be used if it were a class. The specific implementation is selected by use of an additional keyword argument 'method'. Valid values are as specified in the documentation for :func:`ConvCnstrMOD`. """ # Assign base class depending on method selection argument base = ccmod_class_label_lookup(method).Options # Nested class with dynamically determined inheritance class ConvCnstrMODOptions(base): def __init__(self, opt): super(ConvCnstrMODOptions, self).__init__(opt) # Allow pickling of objects of type ConvCnstrMODOptions _fix_dynamic_class_lookup(ConvCnstrMODOptions, method) # Return object of the nested class type return ConvCnstrMODOptions(opt) def ConvCnstrMOD(*args, **kwargs): """A wrapper function that dynamically defines a class derived from one of the implementations of the Convolutional Constrained MOD problems, and returns an object instantiated with the provided parameters. The wrapper is designed to allow the appropriate object to be created by calling this function using the same syntax as would be used if it were a class. The specific implementation is selected by use of an additional keyword argument 'method'. Valid values are: - ``'ism'`` : Use the implementation defined in :class:`.ConvCnstrMOD_IterSM`. This method works well for a small number of training images, but is very slow for larger training sets. - ``'cg'`` : Use the implementation defined in :class:`.ConvCnstrMOD_CG`. This method is slower than ``'ism'`` for small training sets, but has better run time scaling as the training set grows. - ``'cns'`` : Use the implementation defined in :class:`.ConvCnstrMOD_Consensus`. This method is a good choice for large training sets. - ``'pgm'`` : Use the implementation defined in :class:`.pgm.ccmod.ConvCnstrMOD`. This method is the best choice for large training sets. The default value is ``'pgm'``. """ # Extract method selection argument or set default method = kwargs.pop('method', 'pgm') # Assign base class depending on method selection argument base = ccmod_class_label_lookup(method) # Nested class with dynamically determined inheritance class ConvCnstrMOD(base): def __init__(self, *args, **kwargs): super(ConvCnstrMOD, self).__init__(*args, **kwargs) # Allow pickling of objects of type ConvCnstrMOD _fix_dynamic_class_lookup(ConvCnstrMOD, method) # Return object of the nested class type return ConvCnstrMOD(*args, **kwargs) class ConvBPDNDictLearn(dictlrn.DictLearn): r""" Dictionary learning by alternating between sparse coding and dictionary update stages. | .. inheritance-diagram:: ConvBPDNDictLearn :parts: 2 | The sparse coding is performed using :class:`.admm.cbpdn.ConvBPDN` (see :cite:`wohlberg-2014-efficient`) or :class:`.pgm.cbpdn.ConvBPDN` (see :cite:`chalasani-2013-fast` and :cite:`wohlberg-2016-efficient`), and the dictionary update is computed using :class:`.pgm.ccmod.ConvCnstrMOD` (see :cite:`garcia-2018-convolutional1`) or one of the solver classes in :mod:`.admm.ccmod` (see :cite:`wohlberg-2016-efficient` and :cite:`sorel-2016-fast`). The coupling between sparse coding and dictionary update stages is as in :cite:`garcia-2017-subproblem`. Solve the optimisation problem .. math:: \mathrm{argmin}_{\mathbf{d}, \mathbf{x}} \; (1/2) \sum_k \left \| \sum_m \mathbf{d}_m * \mathbf{x}_{k,m} - \mathbf{s}_k \right \|_2^2 + \lambda \sum_k \sum_m \| \mathbf{x}_{k,m} \|_1 \quad \text{such that} \quad \mathbf{d}_m \in C \;\; \forall m \;, where :math:`C` is the feasible set consisting of filters with unit norm and constrained support, via interleaved alternation between the ADMM steps of the :class:`.admm.cbpdn.ConvBPDN` and :func:`.ConvCnstrMOD` problems. Multi-channel variants :cite:`wohlberg-2016-convolutional` are also supported. After termination of the :meth:`solve` method, attribute :attr:`itstat` is a list of tuples representing statistics of each iteration. The fields of the named tuple ``IterationStats`` are: ``Iter`` : Iteration number ``ObjFun`` : Objective function value ``DFid`` : Value of data fidelity term :math:`(1/2) \sum_k \| \sum_m \mathbf{d}_m * \mathbf{x}_{k,m} - \mathbf{s}_k \|_2^2` ``RegL1`` : Value of regularisation term :math:`\sum_k \sum_m \| \mathbf{x}_{k,m} \|_1` ``Cnstr`` : Constraint violation measure *If the ADMM solver is selected for sparse coding:* ``XPrRsdl`` : Norm of X primal residual ``XDlRsdl`` : Norm of X dual residual ``XRho`` : X penalty parameter *If the PGM solver is selected for sparse coding:* ``X_F_Btrack`` : Value of objective function for CSC problem ``X_Q_Btrack`` : Value of quadratic approximation for CSC problem ``X_ItBt`` : Number of iterations in backtracking for CSC problem ``X_L`` : Inverse of gradient step parameter for CSC problem *If an ADMM solver is selected for the dictionary update:* ``DPrRsdl`` : Norm of D primal residual ``DDlRsdl`` : Norm of D dual residual ``DRho`` : D penalty parameter *If the PGM solver is selected for the dictionary update:* ``D_F_Btrack`` : Value of objective function for CDU problem ``D_Q_Btrack`` : Value of wuadratic approximation for CDU problem ``D_ItBt`` : Number of iterations in backtracking for CDU problem ``D_L`` : Inverse of gradient step parameter for CDU problem ``Time`` : Cumulative run time """ class Options(dictlrn.DictLearn.Options): """CBPDN dictionary learning algorithm options. Options include all of those defined in :class:`.dictlrn.DictLearn.Options`, together with additional options: ``AccurateDFid`` : Flag determining whether data fidelity term is estimated from the value computed in the X update (``False``) or is computed after every outer iteration over an X update and a D update (``True``), which is slower but more accurate. ``DictSize`` : Dictionary size vector. ``CBPDN`` : An options class appropriate for the selected sparse coding solver class ``CCMOD`` : An options class appropriate for the selected dictionary update solver class """ defaults = copy.deepcopy(dictlrn.DictLearn.Options.defaults) defaults.update({'DictSize': None, 'AccurateDFid': False}) def __init__(self, opt=None, xmethod=None, dmethod=None): """ Valid values for parameters ``xmethod`` and ``dmethod`` are documented in functions :func:`.ConvBPDN` and :func:`.ConvCnstrMOD` respectively. """ if xmethod is None: xmethod = 'admm' if dmethod is None: dmethod = 'pgm' self.xmethod = xmethod self.dmethod = dmethod self.defaults.update( {'CBPDN': ConvBPDNOptionsDefaults(xmethod), 'CCMOD': ConvCnstrMODOptionsDefaults(dmethod)}) # Initialisation of CBPDN and CCMOD keys here is required to # ensure that the corresponding options have types appropriate # for classes in the cbpdn and ccmod modules, and are not just # standard entries in the parent option tree dictlrn.DictLearn.Options.__init__(self, { 'CBPDN': ConvBPDNOptions(self.defaults['CBPDN'], method=xmethod), 'CCMOD': ConvCnstrMODOptions(self.defaults['CCMOD'], method=dmethod)}) if opt is None: opt = {} self.update(opt) def __init__(self, D0, S, lmbda=None, opt=None, xmethod=None, dmethod=None, dimK=1, dimN=2): """ | **Call graph** .. image:: ../_static/jonga/cbpdndl_init.svg :width: 20% :target: ../_static/jonga/cbpdndl_init.svg | Parameters ---------- D0 : array_like Initial dictionary array S : array_like Signal array lmbda : float Regularisation parameter opt : :class:`ConvBPDNDictLearn.Options` object Algorithm options xmethod : string, optional (default 'admm') String selecting sparse coding solver. Valid values are documented in function :func:`.ConvBPDN`. dmethod : string, optional (default 'pgm') String selecting dictionary update solver. Valid values are documented in function :func:`.ConvCnstrMOD`. dimK : int, optional (default 1) Number of signal dimensions. If there is only a single input signal (e.g. if `S` is a 2D array representing a single image) `dimK` must be set to 0. dimN : int, optional (default 2) Number of spatial/temporal dimensions """ if opt is None: opt = ConvBPDNDictLearn.Options(xmethod=xmethod, dmethod=dmethod) if xmethod is None: xmethod = opt.xmethod if dmethod is None: dmethod = opt.dmethod if opt.xmethod != xmethod or opt.dmethod != dmethod: raise ValueError('Parameters xmethod and dmethod must have the ' 'same values used to initialise the Options ' 'object') self.opt = opt self.xmethod = xmethod self.dmethod = dmethod # Get dictionary size if self.opt['DictSize'] is None: dsz = D0.shape else: dsz = self.opt['DictSize'] # Construct object representing problem dimensions cri = cr.CDU_ConvRepIndexing(dsz, S, dimK, dimN) # Normalise dictionary D0 = cr.Pcn(D0, dsz, cri.Nv, dimN, cri.dimCd, crp=True, zm=opt['CCMOD', 'ZeroMean']) # Modify D update options to include initial value for Y optname = 'X0' if dmethod == 'pgm' else 'Y0' opt['CCMOD'].update({optname: cr.zpad( cr.stdformD(D0, cri.Cd, cri.M, dimN), cri.Nv)}) # Create X update object xstep = ConvBPDN(D0, S, lmbda, opt['CBPDN'], method=xmethod, dimK=dimK, dimN=dimN) # Create D update object dstep = ConvCnstrMOD(None, S, dsz, opt['CCMOD'], method=dmethod, dimK=dimK, dimN=dimN) # Configure iteration statistics reporting isc = dictlrn.IterStatsConfig( isfld=dc.isfld(xmethod, dmethod, opt), isxmap=dc.isxmap(xmethod, opt), isdmap=dc.isdmap(dmethod), evlmap=dc.evlmap(opt['AccurateDFid']), hdrtxt=dc.hdrtxt(xmethod, dmethod, opt), hdrmap=dc.hdrmap(xmethod, dmethod, opt), fmtmap={'It_X': '%4d', 'It_D': '%4d'}) # Call parent constructor super(ConvBPDNDictLearn, self).__init__(xstep, dstep, opt, isc) def getdict(self, crop=True): """Get final dictionary. If ``crop`` is ``True``, apply :func:`.cnvrep.bcrop` to returned array. """ return self.dstep.getdict(crop=crop) def reconstruct(self, D=None, X=None): """Reconstruct representation.""" if D is None: D = self.getdict(crop=False) if X is None: X = self.getcoef() Df = rfftn(D, self.xstep.cri.Nv, self.xstep.cri.axisN) Xf = rfftn(X, self.xstep.cri.Nv, self.xstep.cri.axisN) DXf = inner(Df, Xf, axis=self.xstep.cri.axisM) return irfftn(DXf, self.xstep.cri.Nv, self.xstep.cri.axisN) def evaluate(self): """Evaluate functional value of previous iteration.""" if self.opt['AccurateDFid']: if self.dmethod == 'pgm': D = self.dstep.getdict(crop=False) else: D = self.dstep.var_y() if self.xmethod == 'pgm': X = self.xstep.getcoef() else: X = self.xstep.var_y() Df = rfftn(D, self.xstep.cri.Nv, self.xstep.cri.axisN) Xf = rfftn(X, self.xstep.cri.Nv, self.xstep.cri.axisN) Sf = self.xstep.Sf Ef = inner(Df, Xf, axis=self.xstep.cri.axisM) - Sf dfd = rfl2norm2(Ef, self.xstep.S.shape, axis=self.xstep.cri.axisN) / 2.0 rl1 = np.sum(np.abs(X)) return dict(DFid=dfd, RegL1=rl1, ObjFun=dfd + self.xstep.lmbda * rl1) else: return None
bsd-3-clause
8,305,897,426,569,706,000
34.498092
77
0.626257
false
3.833677
false
false
false
kism/RViProgramLauncher
viinputdaemon.py
1
18123
# Input Daemon for the Visually Impared # For use with a device that outputs serial import uinput #interface between python and the uinput kernel module import time #for time.sleep() import serial #the keyboard this program interfaces with uses serial import os import sys # Easier debugging :^) class termcolour: PINK = '\033[95m' GREEN = '\033[92m' YELLOW = '\033[93m' WHITE = '\033[0m' # Figure out what to do on the keypresses def sendLetter(letter): global caps global numb print termcolour.GREEN + 'Sent ASCII Char:' + termcolour.WHITE if numb == True: if letter == 'KEY_A': device.emit_click(uinput.KEY_1) if letter == 'KEY_B': device.emit_click(uinput.KEY_2) if letter == 'KEY_C': device.emit_click(uinput.KEY_3) if letter == 'KEY_D': device.emit_click(uinput.KEY_4) if letter == 'KEY_E': device.emit_click(uinput.KEY_5) if letter == 'KEY_F': device.emit_click(uinput.KEY_6) if letter == 'KEY_G': device.emit_click(uinput.KEY_7) if letter == 'KEY_H': device.emit_click(uinput.KEY_8) if letter == 'KEY_I': device.emit_click(uinput.KEY_9) if letter == 'KEY_J': device.emit_click(uinput.KEY_0) else: if caps == 0: device.emit_click(getattr(uinput,letter)) if caps == 1: caps = 0 device.emit_combo([ uinput.KEY_LEFTSHIFT, getattr(uinput,letter), ]) if caps == 2: device.emit_combo([ uinput.KEY_LEFTSHIFT, getattr(uinput,letter), ]) def f1(inProgram): print termcolour.PINK + 'F1 Pressed' + termcolour.WHITE print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram if inProgram == 'viui': # Open menu item 1 device.emit_click(uinput.KEY_1) time.sleep(0.01) device.emit_click(uinput.KEY_ENTER) time.sleep(0.01) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F1' if inProgram == 'nano': # Open Help device.emit_combo([ uinput.KEY_LEFTCTRL, uinput.KEY_G, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Help' if inProgram == 'newsbeuter': # Open Help device.emit_combo([ uinput.KEY_LEFTSHIFT, uinput.KEY_SLASH, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Help' if inProgram == 'alpine': # Open Help device.emit_combo([ uinput.KEY_LEFTSHIFT, uinput.KEY_SLASH, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Help' if inProgram == 'links': # Open Help device.emit_click(uinput.KEY_F9) time.sleep(0.1) device.emit_click(uinput.KEY_H) time.sleep(0.1) device.emit_click(uinput.KEY_M) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Help' if inProgram == 'irssi': # Open Help device.emit_click(uinput.KEY_SLASH) time.sleep(0.01) device.emit_click(uinput.KEY_H) time.sleep(0.01) device.emit_click(uinput.KEY_E) time.sleep(0.01) device.emit_click(uinput.KEY_L) time.sleep(0.01) device.emit_click(uinput.KEY_P) time.sleep(0.01) device.emit_click(uinput.KEY_ENTER) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Help' if inProgram == 'zsh': # Go to home directory device.emit_combo([ uinput.KEY_LEFTCTRL, uinput.KEY_C, ]) device.emit_click(uinput.KEY_C) time.sleep(0.01) device.emit_click(uinput.KEY_D) time.sleep(0.01) device.emit_click(uinput.KEY_ENTER) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Home' if inProgram == 'man': # Help device.emit_click(uinput.KEY_H) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Help' def f2(inProgram): print termcolour.PINK + 'F2 Pressed' + termcolour.WHITE print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram if inProgram == 'viui': # Open menu item 2 device.emit_click(uinput.KEY_2) time.sleep(0.01) device.emit_click(uinput.KEY_ENTER) time.sleep(0.01) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F2' if inProgram == 'nano': # Open File device.emit_combo([ uinput.KEY_LEFTCTRL, uinput.KEY_R, ]) time.sleep(0.1) device.emit_combo([ uinput.KEY_LEFTCTRL, uinput.KEY_T, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Open File' if inProgram == 'newsbeuter': # Open Entry device.emit_click(uinput.KEY_ENTER) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Open Entry' if inProgram == 'alpine': # Open Index device.emit_click(uinput.KEY_I) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Index' if inProgram == 'links': # Go to device.emit_click(uinput.KEY_G) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Go to' if inProgram == 'irssi': # Connect device.emit_click(uinput.KEY_SLASH) time.sleep(0.01) device.emit_click(uinput.KEY_C) time.sleep(0.01) device.emit_click(uinput.KEY_O) time.sleep(0.01) device.emit_click(uinput.KEY_N) time.sleep(0.01) device.emit_click(uinput.KEY_N) time.sleep(0.01) device.emit_click(uinput.KEY_E) time.sleep(0.01) device.emit_click(uinput.KEY_C) time.sleep(0.01) device.emit_click(uinput.KEY_T) time.sleep(0.01) device.emit_click(uinput.KEY_SPACE) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Connect' if inProgram == 'zsh': # Use the mplayer alias device.emit_click(uinput.KEY_P) time.sleep(0.01) device.emit_click(uinput.KEY_L) time.sleep(0.01) device.emit_click(uinput.KEY_A) time.sleep(0.01) device.emit_click(uinput.KEY_Y) time.sleep(0.01) device.emit_click(uinput.KEY_SPACE) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Play' def f3(inProgram): print termcolour.PINK + 'F3 Pressed' + termcolour.WHITE print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram if inProgram == 'viui': # Open menu item 3 device.emit_click(uinput.KEY_3) time.sleep(0.01) device.emit_click(uinput.KEY_ENTER) time.sleep(0.01) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F3' if inProgram == 'nano': # Save file device.emit_combo([ uinput.KEY_LEFTCTRL, uinput.KEY_O, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Save File' if inProgram == 'newsbeuter': # Save entry to file device.emit_click(uinput.KEY_S) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Save Story' if inProgram == 'alpine': # Compose device.emit_click(uinput.KEY_C) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Compose' if inProgram == 'links': # Open menu device.emit_click(uinput.KEY_F9) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Menu' if inProgram == 'irssi': # Join device.emit_click(uinput.KEY_SLASH) time.sleep(0.01) device.emit_click(uinput.KEY_J) time.sleep(0.01) device.emit_click(uinput.KEY_O) time.sleep(0.01) device.emit_click(uinput.KEY_I) time.sleep(0.01) device.emit_click(uinput.KEY_N) time.sleep(0.01) device.emit_click(uinput.KEY_SPACE) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Join' def f4(inProgram): print termcolour.PINK + 'F4 Pressed' + termcolour.WHITE print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram if inProgram == 'viui': # Open menu item 4 device.emit_click(uinput.KEY_4) time.sleep(0.01) device.emit_click(uinput.KEY_ENTER) time.sleep(0.01) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F4' if inProgram == 'nano': # Cancel device.emit_combo([ uinput.KEY_LEFTCTRL, uinput.KEY_C, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Cancel' if inProgram == 'alpine': # Back device.emit_click(uinput.KEY_M) time.sleep(0.1) device.emit_click(uinput.KEY_COMMA) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Back' if inProgram == 'links': # Cancel device.emit_click(uinput.KEY_ESC) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Cancel' if inProgram == 'irssi': # Part device.emit_click(uinput.KEY_SLASH) time.sleep(0.01) device.emit_click(uinput.KEY_P) time.sleep(0.01) device.emit_click(uinput.KEY_A) time.sleep(0.01) device.emit_click(uinput.KEY_R) time.sleep(0.01) device.emit_click(uinput.KEY_T) time.sleep(0.01) device.emit_click(uinput.KEY_ENTER) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Part' if inProgram == 'zsh': # Cancel device.emit_combo([ uinput.KEY_LEFTCTRL, uinput.KEY_C, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Cancel' def f5(inProgram): print termcolour.PINK + 'F5 Pressed' + termcolour.WHITE print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram if inProgram == 'viui': # Open menu item 5 device.emit_click(uinput.KEY_5) time.sleep(0.01) device.emit_click(uinput.KEY_ENTER) time.sleep(0.01) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F5' if inProgram == 'nano': # Cut device.emit_combo([ uinput.KEY_LEFTCTRL, uinput.KEY_K, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Cut' if inProgram == 'newsbeuter': # Reload device.emit_click(uinput.KEY_R) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Reload' if inProgram == 'alpine': # Journal device.emit_click(uinput.KEY_J) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Journal' if inProgram == 'irssi': # Query device.emit_click(uinput.KEY_SLASH) time.sleep(0.01) device.emit_click(uinput.KEY_Q) time.sleep(0.01) device.emit_click(uinput.KEY_U) time.sleep(0.01) device.emit_click(uinput.KEY_E) time.sleep(0.01) device.emit_click(uinput.KEY_R) time.sleep(0.01) device.emit_click(uinput.KEY_Y) time.sleep(0.01) device.emit_click(uinput.SPACE) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Query' def f6(inProgram): print termcolour.PINK + 'F6 Pressed' + termcolour.WHITE print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram if inProgram == 'viui': # Open menu item 6 device.emit_click(uinput.KEY_6) time.sleep(0.01) device.emit_click(uinput.KEY_ENTER) time.sleep(0.01) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F6' if inProgram == 'nano': # Uncut device.emit_combo([ uinput.KEY_LEFTCTRL, uinput.KEY_U, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Uncut' if inProgram == 'newsbeuter': # Open next unread uinput.KEY_N, print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Next Unread' if inProgram == 'alpine': # Address device.emit_click(uinput.KEY_A) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Address' if inProgram == 'irssi': # Previous window device.emit_combo([ uinput.KEY_CTRL, uinput.KEY_P, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Previous window' def f7(inProgram): print termcolour.PINK + 'F7 Pressed' + termcolour.WHITE print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram if inProgram == 'viui': # Open menu item 7 device.emit_click(uinput.KEY_7) time.sleep(0.01) device.emit_click(uinput.KEY_ENTER) time.sleep(0.01) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F7' if inProgram == 'nano': # Find device.emit_combo([ uinput.KEY_LEFTCTRL, uinput.KEY_W, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Find' if inProgram == 'newsbeuter': # Open in browser device.emit_click(uinput.KEY_O) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Open in Browser' if inProgram == 'alpine': # Setup email device.emit_click(uinput.KEY_S) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Setup' if inProgram == 'links': # Find on page device.emit_click(uinput.KEY_SLASH) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Find' if inProgram == 'irssi': # Next window device.emit_combo([ uinput.KEY_CTRL, uinput.KEY_N, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Next Window' def f8(inProgram): print termcolour.PINK + 'F8 Pressed' + termcolour.WHITE print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram if inProgram == 'viui': # Open menu item 8 device.emit_click(uinput.KEY_8) time.sleep(0.01) device.emit_click(uinput.KEY_ENTER) time.sleep(0.01) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F8' if inProgram == 'nano': # Exit menu or program device.emit_combo([ uinput.KEY_LEFTCTRL, uinput.KEY_X, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit' if inProgram == 'newsbeuter': # Quit device.emit_click(uinput.KEY_Q) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit' if inProgram == 'alpine': # Quit device.emit_click(uinput.KEY_Q) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit' if inProgram == 'links': # Quit device.emit_click(uinput.KEY_Q) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit' if inProgram == 'irssi': # Quit device.emit_click(uinput.KEY_SLASH) time.sleep(0.01) device.emit_click(uinput.KEY_Q) time.sleep(0.01) device.emit_click(uinput.KEY_U) time.sleep(0.01) device.emit_click(uinput.KEY_I) time.sleep(0.01) device.emit_click(uinput.KEY_T) time.sleep(0.01) device.emit_click(uinput.KEY_ENTER) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit' if inProgram == 'zsh': # Quit device.emit_combo([ uinput.KEY_LEFTCTRL, uinput.KEY_D, ]) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit' if inProgram == 'man': # Quit device.emit_click(uinput.KEY_Q) print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit' # Main Program print termcolour.PINK + '~ Daemon initialising ~' + termcolour.WHITE # Check if program was run with an arguement if len(sys.argv) > 1: print termcolour.GREEN + 'Argument:' + termcolour.WHITE, str(sys.argv[1]) program = str(sys.argv[1]) else: print termcolour.YELLOW + 'No args, what are you even doing?' + termcolour.WHITE program = '' # Python-uinput is a quality Interface, To find key codes check /usr/include/linux/input.h device = uinput.Device([ uinput.KEY_A, uinput.KEY_B, uinput.KEY_C, uinput.KEY_D, uinput.KEY_E, uinput.KEY_F, uinput.KEY_G, uinput.KEY_H, uinput.KEY_I, uinput.KEY_J, uinput.KEY_K, uinput.KEY_L, uinput.KEY_M, uinput.KEY_N, uinput.KEY_O, uinput.KEY_P, uinput.KEY_Q, uinput.KEY_R, uinput.KEY_S, uinput.KEY_T, uinput.KEY_U, uinput.KEY_V, uinput.KEY_W, uinput.KEY_X, uinput.KEY_Y, uinput.KEY_Z, uinput.KEY_1, uinput.KEY_2, uinput.KEY_3, uinput.KEY_4, uinput.KEY_5, uinput.KEY_6, uinput.KEY_7, uinput.KEY_8, uinput.KEY_9, uinput.KEY_0, uinput.KEY_TAB, uinput.KEY_ENTER, uinput.KEY_SPACE, uinput.KEY_DOT, uinput.KEY_COMMA, uinput.KEY_SLASH, uinput.KEY_BACKSLASH, uinput.KEY_LEFTCTRL, uinput.KEY_LEFTALT, uinput.KEY_LEFTSHIFT, uinput.KEY_BACKSPACE, uinput.KEY_PAGEDOWN, uinput.KEY_PAGEUP, uinput.KEY_UP, uinput.KEY_LEFT, uinput.KEY_RIGHT, uinput.KEY_DOWN, uinput.KEY_ESC, uinput.KEY_F1, uinput.KEY_F2, uinput.KEY_F3, uinput.KEY_F4, uinput.KEY_F5, uinput.KEY_F6, uinput.KEY_F7, uinput.KEY_F8, uinput.KEY_F9, uinput.KEY_F10, uinput.KEY_F11, uinput.KEY_F12, uinput.KEY_1, uinput.KEY_2, uinput.KEY_3, uinput.KEY_4, uinput.KEY_5, uinput.KEY_6, uinput.KEY_7, uinput.KEY_8, uinput.KEY_9, uinput.KEY_0, ]) # Open serial decice ser = serial.Serial('/dev/ttyUSB0', 115200, timeout = 1) print termcolour.GREEN + 'Serial device opened:' + termcolour.WHITE, ser.name # Mad Hacks go here caps = 0 numb = False if program == 'newsbeuter': time.sleep(2.0) device.emit_click(uinput.KEY_R) time.sleep(3.0) device.emit_click(uinput.KEY_ENTER) # Polling for input while 1: sbuf = ser.read() print 'Buffer Queue =', ser.inWaiting() print 'Read =', sbuf # All values are in hex, not actuall ascii, lol python # Braille Modifier Characters if sbuf == '\x01': # Caps if caps > 1: caps = 2 else: caps = caps + 1 print termcolour.GREEN + 'Caps:' + termcolour.WHITE, caps if sbuf == '\x0F': # Number if numb == True: numb = False else: numb = True print termcolour.GREEN + 'Numb:' + termcolour.WHITE, numb # Regular Keys if sbuf == '\x20': sendLetter('KEY_A') if sbuf == '\x30': sendLetter('KEY_B') if sbuf == '\x24': sendLetter('KEY_C') if sbuf == '\x26': sendLetter('KEY_D') if sbuf == '\x22': sendLetter('KEY_E') if sbuf == '\x34': sendLetter('KEY_F') if sbuf == '\x36': sendLetter('KEY_G') if sbuf == '\x32': sendLetter('KEY_H') if sbuf == '\x14': sendLetter('KEY_I') if sbuf == '\x16': sendLetter('KEY_J') if sbuf == '\x28': sendLetter('KEY_K') if sbuf == '\x38': sendLetter('KEY_L') if sbuf == '\x2C': sendLetter('KEY_M') if sbuf == '\x2E': sendLetter('KEY_N') if sbuf == '\x2A': sendLetter('KEY_O') if sbuf == '\x3C': sendLetter('KEY_P') if sbuf == '\x3E': sendLetter('KEY_Q') if sbuf == '\x3A': sendLetter('KEY_R') if sbuf == '\x1C': sendLetter('KEY_S') if sbuf == '\x1E': sendLetter('KEY_T') if sbuf == '\x29': sendLetter('KEY_U') if sbuf == '\x39': sendLetter('KEY_V') if sbuf == '\x17': sendLetter('KEY_W') if sbuf == '\x2D': sendLetter('KEY_X') if sbuf == '\x2F': sendLetter('KEY_Y') if sbuf == '\x2B': sendLetter('KEY_Z') if sbuf == '\x10': device.emit_click(uinput.KEY_COMMA) if sbuf == '\x13': device.emit_click(uinput.KEY_DOT) if sbuf == '\x0C': device.emit_click(uinput.KEY_SLASH) if sbuf == '\x60': device.emit_click(uinput.KEY_SPACE) caps = 0 numb = 0 # IBM Compatable PC Keys if sbuf == '\x40': device.emit_click(uinput.KEY_ESC) if sbuf == '\x41': device.emit_click(uinput.KEY_UP) if sbuf == '\x42': device.emit_click(uinput.KEY_LEFT) if sbuf == '\x43': device.emit_click(uinput.KEY_RIGHT) if sbuf == '\x44': device.emit_click(uinput.KEY_DOWN) if sbuf == '\x45': device.emit_click(uinput.KEY_ENTER) if sbuf == '\x46': device.emit_click(uinput.KEY_BACKSPACE) if sbuf == '\x47': device.emit_click(uinput.KEY_PAGEUP) if sbuf == '\x48': device.emit_click(uinput.KEY_PAGEDOWN) # Macro Keys if sbuf == '\x81': #129 f1(program) if sbuf == '\x82': #130 f2(program) if sbuf == '\x83': #131 f3(program) if sbuf == '\x84': #132 f4(program) if sbuf == '\x85': #133 f5(program) if sbuf == '\x86': #134 f6(program) if sbuf == '\x87': #135 f7(program) if sbuf == '\x88': #136 f8(program)
mit
-6,499,945,008,790,168,000
27.81399
90
0.674668
false
2.501795
false
false
false
JNeiger/robocup-software
soccer/gameplay/plays/no_opponents/offensive_pivot_kick.py
1
1448
import play import behavior import robocup import skills.line_kick import tactics.defense import main import constants import enum import role_assignment class OffensivePivotKick(play.Play): def __init__(self): super().__init__(continuous=False) self.add_transition(behavior.Behavior.State.start, behavior.Behavior.State.running, lambda: True, 'immediately') self.add_transition( behavior.Behavior.State.running, behavior.Behavior.State.completed, lambda: self.has_subbehavior_with_name('kicker') and self.subbehavior_with_name('kicker').is_done_running(), "kicker finished") def on_enter_running(self): kicker = skills.pivot_kick.PivotKick() kicker.target = constants.Field.TheirGoalSegment kicker.aim_params = {'error_threshold': .01, 'desperate_timeout': 10, 'max_steady_ang_vel': 4} self.add_subbehavior(kicker, 'kicker', required=True, priority=100) def on_exit_running(self): self.remove_subbehavior('kicker') @classmethod def score(cls): gs = main.game_state() #Currently has lower priority than basic_122. Maybe add a check to see if we have all our robots? return 15 if gs.is_playing() else float("inf") @classmethod def handles_goalie(self): return False
apache-2.0
-2,246,814,608,390,388,200
31.177778
120
0.627072
false
3.934783
false
false
false
FederatedAI/FATE
examples/benchmark_quality/hetero_nn/fate-hetero_nn.py
1
6666
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import argparse from tensorflow.keras import initializers from tensorflow.keras import optimizers from tensorflow.keras.layers import Dense from pipeline.backend.pipeline import PipeLine from pipeline.component import DataIO from pipeline.component import Evaluation from pipeline.component import HeteroNN from pipeline.component import Intersection from pipeline.component import Reader from pipeline.interface import Data, Model from pipeline.utils.tools import load_job_config, JobConfig from pipeline.runtime.entity import JobParameters from federatedml.evaluation.metrics import classification_metric from fate_test.utils import extract_data, parse_summary_result def main(config="../../config.yaml", param="./hetero_nn_breast_config.yaml", namespace=""): # obtain config if isinstance(config, str): config = load_job_config(config) if isinstance(param, str): param = JobConfig.load_from_file(param) parties = config.parties guest = parties.guest[0] host = parties.host[0] backend = config.backend work_mode = config.work_mode guest_train_data = {"name": param["guest_table_name"], "namespace": f"experiment{namespace}"} host_train_data = {"name": param["host_table_name"], "namespace": f"experiment{namespace}"} pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host) reader_0 = Reader(name="reader_0") reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data) reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data) dataio_0 = DataIO(name="dataio_0") dataio_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True) dataio_0.get_party_instance(role='host', party_id=host).component_param(with_label=False) intersection_0 = Intersection(name="intersection_0") hetero_nn_0 = HeteroNN(name="hetero_nn_0", epochs=param["epochs"], interactive_layer_lr=param["learning_rate"], batch_size=param["batch_size"], early_stop="diff") hetero_nn_0.add_bottom_model(Dense(units=param["bottom_layer_units"], input_shape=(10,), activation="tanh", kernel_initializer=initializers.RandomUniform(minval=-1, maxval=1, seed=123))) hetero_nn_0.set_interactve_layer( Dense(units=param["interactive_layer_units"], input_shape=(param["bottom_layer_units"],), activation="relu", kernel_initializer=initializers.RandomUniform(minval=-1, maxval=1, seed=123))) hetero_nn_0.add_top_model( Dense(units=param["top_layer_units"], input_shape=(param["interactive_layer_units"],), activation=param["top_act"], kernel_initializer=initializers.RandomUniform(minval=-1, maxval=1, seed=123))) opt = getattr(optimizers, param["opt"])(lr=param["learning_rate"]) hetero_nn_0.compile(optimizer=opt, metrics=param["metrics"], loss=param["loss"]) hetero_nn_1 = HeteroNN(name="hetero_nn_1") if param["loss"] == "categorical_crossentropy": eval_type = "multi" else: eval_type = "binary" evaluation_0 = Evaluation(name="evaluation_0", eval_type=eval_type) pipeline.add_component(reader_0) pipeline.add_component(dataio_0, data=Data(data=reader_0.output.data)) pipeline.add_component(intersection_0, data=Data(data=dataio_0.output.data)) pipeline.add_component(hetero_nn_0, data=Data(train_data=intersection_0.output.data)) pipeline.add_component(hetero_nn_1, data=Data(test_data=intersection_0.output.data), model=Model(hetero_nn_0.output.model)) pipeline.add_component(evaluation_0, data=Data(data=hetero_nn_0.output.data)) pipeline.compile() job_parameters = JobParameters(backend=backend, work_mode=work_mode) pipeline.fit(job_parameters) nn_0_data = pipeline.get_component("hetero_nn_0").get_output_data().get("data") nn_1_data = pipeline.get_component("hetero_nn_1").get_output_data().get("data") nn_0_score = extract_data(nn_0_data, "predict_result") nn_0_label = extract_data(nn_0_data, "label") nn_1_score = extract_data(nn_1_data, "predict_result") nn_1_label = extract_data(nn_1_data, "label") nn_0_score_label = extract_data(nn_0_data, "predict_result", keep_id=True) nn_1_score_label = extract_data(nn_1_data, "predict_result", keep_id=True) metric_summary = parse_summary_result(pipeline.get_component("evaluation_0").get_summary()) if eval_type == "binary": metric_nn = { "score_diversity_ratio": classification_metric.Distribution.compute(nn_0_score_label, nn_1_score_label), "ks_2samp": classification_metric.KSTest.compute(nn_0_score, nn_1_score), "mAP_D_value": classification_metric.AveragePrecisionScore().compute(nn_0_score, nn_1_score, nn_0_label, nn_1_label)} metric_summary["distribution_metrics"] = {"hetero_nn": metric_nn} elif eval_type == "multi": metric_nn = { "score_diversity_ratio": classification_metric.Distribution.compute(nn_0_score_label, nn_1_score_label)} metric_summary["distribution_metrics"] = {"hetero_nn": metric_nn} data_summary = {"train": {"guest": guest_train_data["name"], "host": host_train_data["name"]}, "test": {"guest": guest_train_data["name"], "host": host_train_data["name"]} } return data_summary, metric_summary if __name__ == "__main__": parser = argparse.ArgumentParser("BENCHMARK-QUALITY PIPELINE JOB") parser.add_argument("-config", type=str, help="config file") parser.add_argument("-param", type=str, help="config file for params") args = parser.parse_args() if args.config is not None: main(args.config, args.param) else: main()
apache-2.0
4,090,263,162,999,835,600
46.276596
117
0.671467
false
3.547632
true
false
false
wadobo/GECO
src/gecod/gecod/xmlrpc_frontend.py
1
3242
#!/usr/bin/python # -*- coding: utf-8 -*- ''' Provides a xmlrpc frontend to gecod backend ''' import backend import secure_xmlrpc as sxmlrpc HOST = 'localhost' PORT = 4343 DATABASE = 'sqlite:///database.sqlite' KEYFILE='certs/key.pem' CERTFILE='certs/cert.pem' def parseconfig(configfile): global HOST, PORT, DATABASE, KEYFILE, CERTFILE options = open(configfile).readlines() for opt in options: k, v = opt.split('=') k = k.strip().lower() v = v.strip() if k == 'host': HOST = v elif k == 'port': PORT = int(v) elif k == 'database': DATABASE = v elif k == 'keyfile': KEYFILE = v elif k == 'certfile': CERTFILE = v class frontend: def __init__(self): pass def auth(self, user, password): ''' Return the cookie. ''' return backend.auth(user, 'password', password=password) def logout(self, cookie): backend.logout(cookie) def register(self, user, password): backend.register(user, password) def unregister(self, cookie): backend.unregister(cookie) def change_password(self, cookie, new_password): backend.change_password(cookie, new_password) def change_attr(self, cookie, name, args): ''' args is a dict with possible keys: type, description, account, expiration, password expiration must be a datetime ''' backend.change_attr(cookie, name, **args) def check_user_name(self, name): return backend.check_user_name(name) def set_password(self, cookie, name, password, args): ''' args is a dict with possible keys: type, description, account, expiration expiration must be an integer (days) ''' backend.set_password(cookie, name, password, **args) def del_password(self, cookie, name): backend.del_password(cookie, name) def get_password(self, cookie, name): p = backend.get_password(cookie, name) return p def get_passwords(self, cookie, args): ''' args is a dict with possible keys: name, type, updated, expiration, account ''' p = backend.get_passwords_by(cookie, **args) return [i for i in p] def get_all_passwords(self, cookie): ''' Return all passwords of user ''' p = backend.get_all_passwords(cookie) return [i for i in p] def export(self, cookie): ''' Returns a string with all passwords ready to import ''' return backend.export(cookie) def restore(self, cookie, data): ''' Restore data from a backup doit with export ''' backend.restore(cookie, data) def start_server(): sxmlrpc.EasyServer(HOST, PORT, frontend()) def main(config='gecod-xmlrpc.conf'): parseconfig(config) backend.DATABASE = DATABASE sxmlrpc.KEYFILE = KEYFILE sxmlrpc.CERTFILE = CERTFILE try: start_server() except KeyboardInterrupt: print "Closing" if __name__ == '__main__': main()
gpl-3.0
-6,722,768,706,466,853,000
22.664234
60
0.572795
false
4.124682
true
false
false
prior/webinars
webinars_web/webinars/views/events.py
1
10543
from operator import attrgetter from django.http import HttpResponse,HttpResponseRedirect,HttpResponseNotFound,HttpResponseForbidden from django.shortcuts import render_to_response from django.views.decorators.http import require_GET, require_POST from webinars_web.webinars.forms.event import EventForm from marketplace.decorators import marketplace from sanetime import time from django.conf import settings import hapi.leads #from django.core import management from django.template import RequestContext from webinars_web.webinars import utils import csv import logging def bucket_events(hub): from webinars_web.webinars import models as wm events = wm.Event.objects.filter( account__hub=hub, deleted_at__isnull=True).select_related('current_sync','account').extra( select={'registrant_count': 'SELECT COUNT(*) FROM webinars_registrant WHERE webinars_registrant.event_id=webinars_event.id'}).extra( select={'attendant_count': 'SELECT COUNT(*) FROM webinars_registrant WHERE webinars_registrant.event_id=webinars_event.id AND started_at IS NOT NULL'}) events = sorted(events, key=attrgetter('starts_at'), reverse=True) event_ids_form_ids = [(ef.event_id, ef.cms_form_id) for ef in wm.EventForm.objects.filter(event__in=wm.Event.objects.filter(account__hub=hub, deleted_at__isnull=True), cms_form__is_sync_target=False)] event_id_to_form_ids_map = {} for event_id, form_id in event_ids_form_ids: event_id_to_form_ids_map.setdefault(event_id,[]).append(form_id) form_ids_lps = [(lp.cms_form_id, lp) for lp in wm.LandingPage.objects.filter(cms_form__in=set(ef[1] for ef in event_ids_form_ids))] form_id_to_lp_map = {} for form_id, lp in form_ids_lps: form_id_to_lp_map.setdefault(form_id,[]).append(lp) for event in events: #TODO: this is creating an 2N+1 situation-- need to refactor! event.landing_pages = [] for form_id in event_id_to_form_ids_map.get(event.id,[]): event.landing_pages.extend(form_id_to_lp_map[form_id]) now = time() return utils.partition(events, lambda e: (e.ended_at < now), [True,False]) @marketplace @require_GET def _list(request, which): # returns the partial list requested (future or past)-- used by ajax table replace from webinars_web.webinars import models as wm hub = wm.Hub.ensure(request.marketplace.hub_id) buckets = bucket_events(hub) is_future = which.lower()=='future' is_past = not is_future return render_to_response('events/_list.djml', { 'events': buckets[is_past], 'past': is_past, 'empty_callout': is_future }, context_instance=RequestContext(request)) @marketplace @require_GET def list(request): from webinars_web.webinars import models as wm hub = wm.Hub.ensure(request.marketplace.hub_id, select_related=['current_sync','last_sync']) buckets = bucket_events(hub) return render_to_response('events/list.djml', { 'future_events': buckets[False], 'past_events': buckets[True], 'hub': hub, }, context_instance=RequestContext(request)) def filter_registrants(registrants, segment): if segment == 'noshows': return [r for r in registrants if not r.get('started_at')] elif segment == 'attendees': return [r for r in registrants if r.get('started_at')] else: return registrants @marketplace @require_GET def export(request, event_id, segment): if segment not in ['noshows', 'attendees', 'registrants']: return HttpResponseForbidden() attrs = ['first_name', 'last_name', 'email'] from webinars_web.webinars import models as wm registrant_set = wm.Event.objects.filter(pk=event_id)[0].registrant_set.values() logging.debug('CSVDEBUG: event=%s, segment=%s' % (event_id, segment)) name = '%s%s' % (segment, event_id) logging.debug('CSVDEBUG: filename=%s' % name) people = filter_registrants(registrant_set, segment) response = HttpResponse(mimetype='text/csv') response['Content-Disposition'] = 'attachment; filename=%s.csv' % name writer = csv.writer(response) writer.writerow(['FirstName', 'LastName', 'Email']) for p in people: writer.writerow([p.get(attr).encode('utf-8') for attr in attrs]) return response def get_fresh_last_modified_at(hub, guid): leads_client = hapi.leads.LeadsClient(settings.HUBSPOT_API_KEY, hub_id=hub.id, env=settings.API_ENV) leads = leads_client.get_leads( time_pivot = 'lastModifiedAt', sort = 'lastModifiedAt', dir = 'desc', max = 1, form_guid = guid) if leads: return time(us=leads[0]['lastModifiedAt']*1000 + 1000) else: return time(0) def new_or_edit(request, event_id=None): from webinars_web.webinars import models as wm hub = wm.Hub.ensure(request.marketplace.hub_id) kwargs = {'hub':hub} old_sync_leads_for_all_time = None if event_id: kwargs['instance']=wm.Event.objects.select_related('account').get(pk=event_id) old_sync_leads_for_all_time = kwargs['instance'].sync_leads_for_all_time if request.method == 'POST': # If the form has been submitted... form = EventForm(request.POST, **kwargs) # A form bound to the POST data if form.is_valid(): # All validation rules pass # Process the data in form.cleaned_data # ... event = form.save(commit=False) tz = kwargs.get('instance') and kwargs['instance'].starts_at.tz or hub.timezone event.starts_at = time(form.cleaned_data['starts_at_ndt'], tz) event.duration = int(form.cleaned_data['duration']) event.ensure_hashcode() event.save() old_cms_forms = dict((cf.guid, cf) for cf in event.cms_forms.all()) new_cms_forms = dict((cf.guid, cf) for cf in form.cleaned_data['cms_forms']) for guid in (set(new_cms_forms) - set(old_cms_forms)): wm.EventForm.objects.create(cms_form=new_cms_forms[guid], event=event, last_last_modified_at = not event.sync_leads_for_all_time and get_fresh_last_modified_at(hub, guid) or 0, converted_at_cutoff = not event.sync_leads_for_all_time and time() or 0) for guid in (set(old_cms_forms) - set(new_cms_forms)): wm.EventForm.objects.filter(cms_form=old_cms_forms[guid], event=event).delete() if old_sync_leads_for_all_time is not None and old_sync_leads_for_all_time != event.sync_leads_for_all_time: for event_form in event.eventform_set.all(): if event.sync_leads_for_all_time: event_form.last_last_modified_at = 0 event_form.converted_at_cutoff = 0 # doing the else doesn't really make sense cuz we could've already been syncing before event_form.save() return HttpResponseRedirect('%sevents'%request.marketplace.base_url) # Redirect after POST else: wm.CmsForm.sync(hub) form = EventForm(**kwargs) # An unbound form return render_to_response('events/%s.djml'%(event_id and 'edit' or 'new'), { 'form': form, }, context_instance=RequestContext(request)) @marketplace def new(request): return new_or_edit(request) @marketplace def edit(request, event_id): return new_or_edit(request, event_id) @marketplace @require_POST def destroy(request, event_id): from webinars_web.webinars import models as wm try: event = wm.Event.objects.get(pk=event_id) except Exception: return HttpResponseNotFound() if event.account.hub_id != request.marketplace.hub_id: return HttpResponseForbidden() event.deleted_at = time() event.save() return HttpResponse() @marketplace def show(request, event_id): from webinars_web.webinars import models as wm hub = wm.Hub.ensure(request.marketplace.hub_id) try: event = wm.Event.objects.select_related('account','account__hub').get(pk=event_id) except: return HttpResponseNotFound() if event.account.hub_id != hub.id: return HttpResponseForbidden() registrants = event.registrant_set.select_related('cms_form').extra( select = { 'durationx': 'IF(ISNULL(stopped_at) OR ISNULL(started_at), NULL, stopped_at-started_at)' }, order_by = ['-durationx'] ) for r in registrants: r.event = event lps = [lp for lp in wm.LandingPage.objects.filter(cms_form__event=event)] forms_to_lps = {} for lp in lps: forms_to_lps.setdefault(lp.cms_form.guid,[]).append(lp) for r in registrants: if r.effective_duration: if not r.cms_form or r.cms_form.is_sync_target: r.landing_pages = [] else: r.landing_pages = forms_to_lps[r.cms_form.guid] now = time() if event._time_ended_at or event.ends_at < now: partitioned_registrants = utils.partition(registrants, lambda r: bool(r.started_at and r.stopped_at), [True, False]) return render_to_response('events/show.djml', { 'event': event, 'future': False, 'registrants': registrants, 'registrants_count': len(registrants), 'attendees': partitioned_registrants[True], 'attendees_count': len(partitioned_registrants[True]), 'noshows': partitioned_registrants[False], 'noshows_count': len(partitioned_registrants[False]), 'MARKETPLACE_SLUG': settings.MARKETPLACE_SLUG, }, context_instance=RequestContext(request)) else: return render_to_response('events/show.djml', { 'event': event, 'future': True, 'registrants': registrants, 'registrants_count': len(registrants), 'MARKETPLACE_SLUG': settings.MARKETPLACE_SLUG, }, context_instance=RequestContext(request)) def sync(request, event_id): from webinars_web.webinars import models as wm force = request.REQUEST.get('force') and True or False postbin = request.REQUEST.get('postbin') or None auto = (request.REQUEST.get('auto') is None or request.REQUEST.get('auto').lower()!='false') and True or False event = wm.Event.objects.get(pk=event_id) sync_stages = event.trigger_sync(force=force, auto=auto) return render_to_response('events/trigger_sync.djml', {'event':event, 'sync_stages':sync_stages, 'postbin':postbin}, context_instance=RequestContext(request))
apache-2.0
-1,903,053,518,049,108,200
44.640693
265
0.656455
false
3.433084
false
false
false
edmundgentle/schoolscript
SchoolScript/bin/Debug/pythonlib/Lib/distutils/command/bdist_dumb.py
1
4801
"""distutils.command.bdist_dumb Implements the Distutils 'bdist_dumb' command (create a "dumb" built distribution -- i.e., just an archive to be unpacked under $prefix or $exec_prefix).""" __revision__ = "$Id$" import os from distutils.core import Command from distutils.util import get_platform from distutils.dir_util import remove_tree, ensure_relative from distutils.errors import * from distutils.sysconfig import get_python_version from distutils import log class bdist_dumb(Command): description = "create a \"dumb\" built distribution" user_options = [('bdist-dir=', 'd', "temporary directory for creating the distribution"), ('plat-name=', 'p', "platform name to embed in generated filenames " "(default: %s)" % get_platform()), ('format=', 'f', "archive format to create (tar, ztar, gztar, zip)"), ('keep-temp', 'k', "keep the pseudo-installation tree around after " + "creating the distribution archive"), ('dist-dir=', 'd', "directory to put final built distributions in"), ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), ('relative', None, "build the archive using relative paths" "(default: false)"), ] boolean_options = ['keep-temp', 'skip-build', 'relative'] default_format = { 'posix': 'gztar', 'nt': 'zip', 'os2': 'zip' } def initialize_options(self): self.bdist_dir = None self.plat_name = None self.format = None self.keep_temp = 0 self.dist_dir = None self.skip_build = 0 self.relative = 0 def finalize_options(self): if self.bdist_dir is None: bdist_base = self.get_finalized_command('bdist').bdist_base self.bdist_dir = os.path.join(bdist_base, 'dumb') if self.format is None: try: self.format = self.default_format[os.name] except KeyError: raise DistutilsPlatformError( "don't know how to create dumb built distributions " "on platform %s" % os.name) self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'), ('plat_name', 'plat_name')) def run(self): if not self.skip_build: self.run_command('build') install = self.reinitialize_command('install', reinit_subcommands=1) install.root = self.bdist_dir install.skip_build = self.skip_build install.warn_dir = 0 log.info("installing to %s" % self.bdist_dir) self.run_command('install') # And make an archive relative to the root of the # pseudo-installation tree. archive_basename = "%s.%s" % (self.distribution.get_fullname(), self.plat_name) # OS/2 objects to any ":" characters in a filename (such as when # a timestamp is used in a version) so change them to hyphens. if os.name == "os2": archive_basename = archive_basename.replace(":", "-") pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) if not self.relative: archive_root = self.bdist_dir else: if (self.distribution.has_ext_modules() and (install.install_base != install.install_platbase)): raise DistutilsPlatformError( "can't make a dumb built distribution where " "base and platbase are different (%s, %s)" % (repr(install.install_base), repr(install.install_platbase))) else: archive_root = os.path.join(self.bdist_dir, ensure_relative(install.install_base)) # Make the archive filename = self.make_archive(pseudoinstall_root, self.format, root_dir=archive_root) if self.distribution.has_ext_modules(): pyversion = get_python_version() else: pyversion = 'any' self.distribution.dist_files.append(('bdist_dumb', pyversion, filename)) if not self.keep_temp: remove_tree(self.bdist_dir, dry_run=self.dry_run)
gpl-2.0
-1,751,620,946,305,370,000
38.008333
76
0.519267
false
4.533522
false
false
false
Fabien-B/Web_ASA_Sourdoire
www/parcelle.py
1
4236
import mysql.connector import datetime class Parcelle(object): database = 'IENAC14_asa' user = 'root' password = 'root' host = '127.0.0.1' def __init__(self,id_parc,compteur=None,nom=None,lat=None,lon=None,altitude=None): if id_parc>0: self.load(id_parc) else: connection = mysql.connector.connect(user=Parcelle.user, password=Parcelle.password,host=Parcelle.host,database=Parcelle.database) curseur = connection.cursor() requete = 'select max(Id_parcelle) from Parcelle;' curseur.execute(requete) (maxId,)=curseur.fetchall()[0] self.id = maxId + 1 self.compteur = compteur self.nom = nom self.lat = lat self.lon = lon self.altitude = altitude curseur.close() connection.close() def save(self): if self.compteur == None: raise ParcelleError("compteur missing for create parcelle") connection = mysql.connector.connect(user=Parcelle.user, password=Parcelle.password,host=Parcelle.host,database=Parcelle.database) curseur = connection.cursor() requete = "INSERT INTO Parcelle VALUES ({0},{1},{2},{3},{4},{5});".format(self.id, self.compteur, self.nom, self.lat, self.lon, self.altitude) curseur.execute(requete) connection.commit() curseur.close() connection.close() def load(self,id_parc): connection = mysql.connector.connect(user=Parcelle.user, password=Parcelle.password,host=Parcelle.host,database=Parcelle.database) curseur = connection.cursor() requete = 'select * from Parcelle where Id_parcelle={};'.format(id_parc) curseur.execute(requete) try: (_,compteur,nom,lat,lon,altitude)=curseur.fetchall()[0] except IndexError: raise ParcelleError("Parcelle with id {} doesn't exist".format(id_parc)) curseur.close() connection.close() self.id = id_parc self.compteur = compteur self.nom = nom self.lat = lat self.lon = lon self.altitude = altitude def release_my_ornot(self, exploitant=0): connection = mysql.connector.connect(user=Parcelle.user, password=Parcelle.password,host=Parcelle.host,database=Parcelle.database) curseur = connection.cursor() actualtime = str(datetime.datetime.now()) requete = 'UPDATE Propriete SET date_fin="{1}" WHERE Id_parcelle={0} AND date_fin IS NULL;'.format(self.id, actualtime) curseur.execute(requete) requete = 'select max(Id_propriete) from Propriete;' curseur.execute(requete) (maxId,)=curseur.fetchall()[0] if exploitant==0: requete = 'INSERT INTO Propriete VALUES({2}, {0}, 0, "{1}", NULL);'.format(self.id, actualtime, maxId+1) else: requete = 'INSERT INTO Propriete VALUES({2}, {0}, {3}, "{1}", NULL);'.format(self.id, actualtime, maxId+1, exploitant.id) curseur.execute(requete) connection.commit() curseur.close() connection.close() @staticmethod def get_exploitant_parcelle_id(id_ex): connection = mysql.connector.connect(user=Parcelle.user, password=Parcelle.password,host=Parcelle.host,database=Parcelle.database) curseur = connection.cursor() if id_ex == 0: requete = 'select Id_parcelle FROM Parcelle;' elif id_ex == -1: #parcelles libres requete = 'select Parcelle.Id_parcelle FROM Parcelle,Propriete WHERE Propriete.Id_parcelle = Parcelle.Id_parcelle AND Id_exploitant = 0 AND date_fin IS NULL ORDER BY Parcelle.Id_parcelle;' else: requete = 'select Parcelle.Id_parcelle FROM Parcelle,Propriete WHERE Propriete.Id_parcelle = Parcelle.Id_parcelle AND Id_exploitant = {0} AND date_fin IS NULL ORDER BY Parcelle.Id_parcelle;'.format(id_ex) curseur.execute(requete) id_parc = curseur.fetchall() curseur.close() connection.close() id_parc_list = [] for (id,) in id_parc: id_parc_list.append(id) return id_parc_list class ParcelleError(Exception): pass
lgpl-3.0
-4,204,626,003,705,896,400
41.37
216
0.628895
false
3.27357
false
false
false
saknis/upelis
logs4.py
1
5178
#!/usr/bin/env python import base64 import cgi import datetime import logging import os import time #from datetime import datetime, date, time import urllib import wsgiref.handlers import string from google.appengine.api import users from google.appengine.api.logservice import logservice from google.appengine.ext import db #from google.appengine.ext import webapp import webapp2 as webapp # This sample gets the app request logs up to the current time, displays 5 logs # at a time, including all AppLogs, with a Next link to let the user "page" # through the results, using the RequestLog offset property. class MainPage(webapp.RequestHandler): def get(self): logging.info('Starting Main handler') # Get the incoming offset param from the Next link to advance through # the logs. (The first time the page is loaded, there won't be any offset.) start_time_set=False try: offset = self.request.get('offset') or None if offset: offset = base64.urlsafe_b64decode(str(offset)) except TypeError: offset = None try: start_time = self.request.get('start_time') or None if start_time: start_time = float(base64.urlsafe_b64decode(str(start_time))) start_time_set=True except TypeError: start_time = None start_time_set=False try: filter = str(self.request.get('filter')) or None except TypeError: filter = None # Set up end time for our query. # Count specifies the max number of RequestLogs shown at one time. # Use a boolean to initially turn off visiblity of the "Next" link. count = 1000 show_next = True last_offset = 5000 dt=datetime.datetime.now() tt=dt.timetuple() year=tt[0] month=tt[1] ttt=time.strptime((("01 %s %s") % (month,year)), "%d %m %Y") if not start_time_set: end_time = time.time() start_time = time.mktime(ttt) else: dt2=datetime.datetime.utcfromtimestamp(float(start_time)) tt2=dt2.timetuple() year2=tt2[0] month2=tt2[1] month2=month2+1 if month2==13: month2=1 year2=year2+1 ttt2=time.strptime((("01 %s %s") % (month2,year2)), "%d %m %Y") end_time=time.mktime(ttt2) dt3=datetime.datetime.utcfromtimestamp(float(start_time)) tt3=dt3.timetuple() year3=tt3[0] month3=tt3[1] month3=month3-1 if month3==0: month3=12 year3=year3-1 ttt3=time.strptime((("01 %s %s") % (month3,year3)), "%d %m %Y") start_time_next=time.mktime(ttt3) # Iterate through all the RequestLog objects, displaying some fields and # iterate through all AppLogs beloging to each RequestLog count times. # In each iteration, save the offset to last_offset; the last one when # count is reached will be used for the link. i = 0 for req_log in logservice.fetch(start_time=start_time,end_time=end_time, offset=offset, minimum_log_level=logservice.LOG_LEVEL_INFO, include_app_logs=False): ip=req_log.ip status=str(req_log.status) if filter and status and not string.find(status, filter) == -1: # self.response.out.write("<br /> REQUEST LOG <br />") # self.respons self.response.out.write("""%s <br />""" % (req_log.combined)) i += 1 else: if not filter: self.response.out.write("""%s <br />""" % (req_log.combined)) i += 1 # self.response.out.write("""IP: %s <br /> Method: %s <br /> # Resource: %s <br />""" % (req_log.ip, # req_log.method, req_log.resource)) # self.response.out.write("Date: "+datetime.datetime.fromtimestamp(req_log.end_time).strftime('%D %T UTC') +"<br />") last_offset= req_log.offset for app_log in req_log.app_logs: self.response.out.write("<br />APP LOG<br />") statslink = ("<a href=\"http://%s/stats/details?time=%s\">%s</a>" % (os.environ['HTTP_HOST'], app_log.time,app_log.time)) self.response.out.write("<br />STATS DETAILS: %s<br />" % (statslink)) self.response.out.write("Date: "+datetime.datetime.fromtimestamp(app_log.time).strftime('%Y-%m-%d %H:%M:%S UTC') +"<br />") self.response.out.write("<br />Message: "+app_log.message+"<br />") if i >= count: show_next = True break # Prepare the offset URL parameters, if any. if show_next: query = self.request.GET query['offset'] = base64.urlsafe_b64encode(last_offset) query['start_time'] = base64.urlsafe_b64encode(("%s")%(start_time_next)) next_link = urllib.urlencode(query) self.response.out.write("<a href=\"/logs4?"+next_link+"\">Next</a>") self.response.out.write("<br />") #def main(): logging.getLogger().setLevel(logging.DEBUG) app = webapp.WSGIApplication([ ('/logs4', MainPage), ], debug=True) # wsgiref.handlers.CGIHandler().run(application) #if __name__ == '__main__': # main()
lgpl-2.1
5,720,461,625,587,779,000
33
131
0.60506
false
3.415567
false
false
false
clearpathrobotics/axis_camera
nodes/axis.py
1
8123
#!/usr/bin/env python # # Axis camera image driver. Based on: # https://code.ros.org/svn/wg-ros-pkg/branches/trunk_cturtle/sandbox/axis_camera # /axis.py # import threading import urllib2 import rospy from sensor_msgs.msg import CompressedImage, CameraInfo import camera_info_manager class StreamThread(threading.Thread): def __init__(self, axis): threading.Thread.__init__(self) self.axis = axis self.daemon = True self.timeoutSeconds = 2.5 def run(self): while(True): self.stream() def stream(self): while(True): self.formURL() self.authenticate() if self.openURL(): self.publishFramesContinuously() rospy.sleep(2) # if stream stays intact we shouldn't get to this def formURL(self): self.url = 'http://%s/mjpg/video.mjpg' % self.axis.hostname self.url += "?fps=0&resolution=%dx%d" % (self.axis.width, self.axis.height) # support for Axis F34 multicamera switch if (self.axis.camera != 0): self.url += "&camera=%d" % self.axis.camera rospy.logdebug('opening ' + str(self.axis)) def authenticate(self): '''only try to authenticate if user/pass configured. I have not used this method (yet).''' if self.axis.password != '' and self.axis.username != '': # create a password manager password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() # Add the username and password, use default realm. top_level_url = "http://" + self.axis.hostname password_mgr.add_password(None, top_level_url, self.axis.username, self.axis.password) if self.axis.use_encrypted_password : handler = urllib2.HTTPDigestAuthHandler(password_mgr) else: handler = urllib2.HTTPBasicAuthHandler(password_mgr) # create "opener" (OpenerDirector instance) opener = urllib2.build_opener(handler) # ...and install it globally so it can be used with urlopen. urllib2.install_opener(opener) def openURL(self): '''Open connection to Axis camera using http''' try: self.fp = urllib2.urlopen(self.url, timeout=self.timeoutSeconds) return(True) except urllib2.URLError, e: rospy.logwarn('Error opening URL %s' % (self.url) + 'Possible timeout. Looping until camera appears') return(False) def publishFramesContinuously(self): '''Continuous loop to publish images''' while(True): try: self.findBoundary() self.getImage() self.publishMsg() self.publishCameraInfoMsg() except: rospy.loginfo('Timed out while trying to get message.') break def findBoundary(self): '''The string "--myboundary" is used to denote the start of an image in Axis cameras''' while(True): boundary = self.fp.readline() if boundary=='--myboundary\r\n': break def getImage(self): '''Get the image header and image itself''' self.getHeader() self.getImageData() def getHeader(self): self.header = {} while(True): line = self.fp.readline() if line == "\r\n": break line = line.strip() parts = line.split(": ", 1) try: self.header[parts[0]] = parts[1] except: rospy.logwarn('Problem encountered with image header. Setting ' 'content_length to zero') self.header['Content-Length'] = 0 # set content_length to zero if # there is a problem reading header self.content_length = int(self.header['Content-Length']) def getImageData(self): '''Get the binary image data itself (ie. without header)''' if self.content_length>0: self.img = self.fp.read(self.content_length) self.fp.readline() # Read terminating \r\n and do nothing with it def publishMsg(self): '''Publish jpeg image as a ROS message''' self.msg = CompressedImage() self.msg.header.stamp = rospy.Time.now() self.msg.header.frame_id = self.axis.frame_id self.msg.format = "jpeg" self.msg.data = self.img self.axis.pub.publish(self.msg) def publishCameraInfoMsg(self): '''Publish camera info manager message''' cimsg = self.axis.cinfo.getCameraInfo() cimsg.header.stamp = self.msg.header.stamp cimsg.header.frame_id = self.axis.frame_id cimsg.width = self.axis.width cimsg.height = self.axis.height self.axis.caminfo_pub.publish(cimsg) class Axis: def __init__(self, hostname, username, password, width, height, frame_id, camera_info_url, use_encrypted_password, camera): self.hostname = hostname self.username = username self.password = password self.width = width self.height = height self.frame_id = frame_id self.camera_info_url = camera_info_url self.use_encrypted_password = use_encrypted_password self.camera = camera # generate a valid camera name based on the hostname self.cname = camera_info_manager.genCameraName(self.hostname) self.cinfo = camera_info_manager.CameraInfoManager(cname = self.cname, url = self.camera_info_url) self.cinfo.loadCameraInfo() # required before getCameraInfo() self.st = None self.pub = rospy.Publisher("image_raw/compressed", CompressedImage, self, queue_size=1) self.caminfo_pub = rospy.Publisher("camera_info", CameraInfo, self, queue_size=1) def __str__(self): """Return string representation.""" return(self.hostname + ',' + self.username + ',' + self.password + '(' + str(self.width) + 'x' + str(self.height) + ')') def peer_subscribe(self, topic_name, topic_publish, peer_publish): '''Lazy-start the image-publisher.''' if self.st is None: self.st = StreamThread(self) self.st.start() def main(): rospy.init_node("axis_driver") arg_defaults = { 'hostname': '192.168.0.90', # default IP address 'username': 'root', # default login name 'password': '', 'width': 640, 'height': 480, 'frame_id': 'axis_camera', 'camera_info_url': '', 'use_encrypted_password' : False, 'camera' : 0} args = updateArgs(arg_defaults) Axis(**args) rospy.spin() def updateArgs(arg_defaults): '''Look up parameters starting in the driver's private parameter space, but also searching outer namespaces. Defining them in a higher namespace allows the axis_ptz.py script to share parameters with the driver.''' args = {} for name, val in arg_defaults.iteritems(): full_name = rospy.search_param(name) if full_name is None: args[name] = val else: args[name] = rospy.get_param(full_name, val) # resolve frame_id with tf_prefix (unless already absolute) if args['frame_id'][0] != '/': # not absolute? tf_prefix = rospy.search_param('tf_prefix') prefix_val = '' if tf_prefix is not None: # prefix defined? prefix_val = rospy.get_param(tf_prefix) if prefix_val[0] != '/': # prefix not absolute? prefix_val = '/' + prefix_val args['frame_id'] = prefix_val + '/' + args['frame_id'] return(args) if __name__ == "__main__": main()
bsd-3-clause
9,019,188,777,415,011,000
36.43318
95
0.562723
false
4.117081
false
false
false
jacebrowning/gridcommand
setup.py
1
1810
#!/usr/bin/env python """Setup script for the package.""" import os import sys import setuptools PACKAGE_NAME = 'gridcommand' MINIMUM_PYTHON_VERSION = 3, 5 def check_python_version(): """Exit when the Python version is too low.""" if sys.version_info < MINIMUM_PYTHON_VERSION: sys.exit("Python {}.{}+ is required.".format(*MINIMUM_PYTHON_VERSION)) def read_package_variable(key): """Read the value of a variable from the package without importing.""" module_path = os.path.join(PACKAGE_NAME, '__init__.py') with open(module_path) as module: for line in module: parts = line.strip().split(' ') if parts and parts[0] == key: return parts[-1].strip("'") assert 0, "'{0}' not found in '{1}'".format(key, module_path) def read_descriptions(): """Build a description for the project from documentation files.""" try: readme = open("README.rst").read() changelog = open("CHANGELOG.rst").read() except IOError: return "<placeholder>" else: return readme + '\n' + changelog check_python_version() setuptools.setup( name=read_package_variable('__project__'), version=read_package_variable('__version__'), description="TBD", url='https://github.com/jacebrowning/gridcommand', author='Jace Browning', author_email='[email protected]', packages=setuptools.find_packages(), entry_points={'console_scripts': []}, long_description=read_descriptions(), license='LGPL', classifiers=[ 'Development Status :: 1 - Planning', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.5', ], install_requires=open("requirements.txt").readlines(), )
lgpl-3.0
-8,046,625,268,878,089,000
25.617647
78
0.628729
false
3.892473
false
false
false
ledbutter/ProjectEulerPython
Problem39.py
1
1375
# If p is the perimeter of a right angle triangle with integral length sides, {a,b,c}, there are exactly three solutions for p = 120. # {20,48,52}, {24,45,51}, {30,40,50} # For which value of p 1000, is the number of solutions maximised? # p = 120 # sols = 0 # for a in range(1, p//2): # for b in range(a+1, p-a): # for c in range(b+1, p-a-b+1): # if a**2 + b**2 == c**2 and a+b+c==p: # print(a,b,c) # sols += 1 # print(sols) #def possible_perimters(p): #http://blog.dreamshire.com/2009/04/22/project-euler-problem-39-solution/ # t_max = 0 # p_limit = 1000 # for p in range(p_limit//2, p_limit+1, 2): # t = 0; # for a in range(2, p//4+1): # if p*(p - 2*a) % (2*(p-a)) == 0: t += 1 # if t > t_max: (t_max, p_max) = (t, p) # print(p_max) #840 #my original code would have worked but it was incredibly slow, #this is an optimized version of that code based on the message board from math import sqrt max_p = max_solutions = current_solutions = 0 for p in range(500, 1001, 2): #print(p) current_solutions = 0 for a in range(1, p//4): for b in range(a+1, (p-a)//2): c = sqrt(a**2+b**2) if a+b+c==p: #print(a,b,c) current_solutions += 1 if current_solutions > max_solutions: max_p = p max_solutions = current_solutions print(max_p, max_solutions)
mit
9,017,787,300,578,095,000
22.157895
133
0.576727
false
2.468582
false
false
false
google/personfinder
app/indexing.py
1
10056
# Copyright 2010 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Support for approximate string prefix queries. A hit is defined when the words entered in the query are all prefixes of one of the words in the given and family names on the record. For example, a record with the fields: given_name: ABC 123 family_name: DEF 456 will be retrieved by: "ABC 456" "45 ED" "123 ABC" "ABC 123 DEF" but will not be retrieved by: "ABC 1234" "ABC 123 DEF 456 789" """ from text_query import TextQuery from google.appengine.ext import db import unicodedata import logging import model import re import jautils def update_index_properties(entity): """Finds and updates all prefix-related properties on the given entity.""" # Using set to make sure I'm not adding the same string more than once. names_prefixes = set() for property in entity._fields_to_index_properties: for value in TextQuery(getattr(entity, property)).query_words: if property in entity._fields_to_index_by_prefix_properties: for n in xrange(1,len(value)+1): pref = value[:n] if pref not in names_prefixes: names_prefixes.add(pref) else: if value not in names_prefixes: names_prefixes.add(value) # Add alternate names to the index tokens. We choose not to index prefixes # of alternate names so that we can keep the index size small. # TODI(ryok): This strategy works well for Japanese, but how about other # languages? names_prefixes |= get_alternate_name_tokens(entity) # Put a cap on the number of tokens, just as a precaution. MAX_TOKENS = 100 entity.names_prefixes = list(names_prefixes)[:MAX_TOKENS] if len(names_prefixes) > MAX_TOKENS: logging.debug('MAX_TOKENS exceeded for %s' % ' '.join(list(names_prefixes))) def get_alternate_name_tokens(person): """Returns alternate name tokens and their variations.""" tokens = set(TextQuery(person.alternate_names).query_words) # This is no-op for non-Japanese. tokens |= set(jautils.get_additional_tokens(tokens)) return tokens class CmpResults(): def __init__(self, query): self.query = query self.query_words_set = set(query.words) def __call__(self, p1, p2): if ((p1.primary_full_name and p1.primary_full_name == p2.primary_full_name) or ((p1.given_name or p1.family_name) and p1.given_name == p2.given_name and p1.family_name == p2.family_name)): return 0 self.set_ranking_attr(p1) self.set_ranking_attr(p2) r1 = self.rank(p1) r2 = self.rank(p2) if r1 == r2: # if rank is the same sort by name so same names will be together return cmp(p1._normalized_full_name.normalized, p2._normalized_full_name.normalized) else: return cmp(r2, r1) def set_ranking_attr(self, person): """Consider save these into to db""" if not hasattr(person, '_normalized_given_name'): person._normalized_given_name = TextQuery(person.given_name) person._normalized_family_name = TextQuery(person.family_name) person._normalized_full_name = TextQuery(person.full_name) person._name_words = set(person._normalized_full_name.words) person._alt_name_words = set( TextQuery(person.alternate_names).words) # TODO(ryok): re-consider the ranking putting more weight on full_name (a # required field) instead of given name and family name pair (optional). def rank(self, person): # The normalized query words, in the order as entered. ordered_words = self.query.normalized.split() if (ordered_words == person._normalized_given_name.words + person._normalized_family_name.words): # Matches a Latin name exactly (given name followed by surname). return 10 if (re.match(ur'^[\u3400-\u9fff]$', person.family_name) and ordered_words in [ [person.family_name + person.given_name], [person.family_name, person.given_name] ]): # Matches a CJK name exactly (surname followed by given name). return 10 if (re.match(ur'^[\u3400-\u9fff]+$', person.family_name) and ordered_words in [ [person.family_name + person.given_name], [person.family_name, person.given_name] ]): # Matches a CJK name exactly (surname followed by given name). # A multi-character surname is uncommon, so it is ranked a bit lower. return 9.5 if (ordered_words == person._normalized_family_name.words + person._normalized_given_name.words): # Matches a Latin name with given and family name switched. return 9 if (re.match(ur'^[\u3400-\u9fff]$', person.given_name) and ordered_words in [ [person.given_name + person.family_name], [person.given_name, person.family_name] ]): # Matches a CJK name with surname and given name switched. return 9 if (re.match(ur'^[\u3400-\u9fff]+$', person.given_name) and ordered_words in [ [person.given_name + person.family_name], [person.given_name, person.family_name] ]): # Matches a CJK name with surname and given name switched. # A multi-character surname is uncommon, so it's ranked a bit lower. return 8.5 if person._name_words == self.query_words_set: # Matches all the words in the given and family name, out of order. return 8 if self.query.normalized in [ person._normalized_given_name.normalized, person._normalized_family_name.normalized, ]: # Matches the given name exactly or the family name exactly. return 7 if person._name_words.issuperset(self.query_words_set): # All words in the query appear somewhere in the name. return 6 # Count the number of words in the query that appear in the name and # also in the alternate names. matched_words = person._name_words.union( person._alt_name_words).intersection(self.query_words_set) return min(5, 1 + len(matched_words)) def rank_and_order(results, query, max_results): results.sort(CmpResults(query)) return results[:max_results] def sort_query_words(query_words): """Sort query_words so that the query filters created from query_words are more effective and consistent when truncated due to NeedIndexError, and return the sorted list.""" # (1) Sort them lexicographically so that we return consistent search # results for query 'AA BB CC DD' and 'DD AA BB CC' even when filters # are truncated. sorted_query_words = sorted(query_words) # (2) Sort them according to popularity so that less popular query words, # which are usually more effective filters, come first. sorted_query_words = jautils.sorted_by_popularity(sorted_query_words) # (3) Sort them according to the lengths so that longer query words, # which are usually more effective filters, come first. return sorted(sorted_query_words, key=len, reverse=True) def search(repo, query_obj, max_results): # As there are limits on the number of filters that we can apply and the # number of entries we can fetch at once, the order of query words could # potentially matter. In particular, this is the case for most Japanese # names, many of which consist of 4 to 6 Chinese characters, each # coresponding to an additional filter. query_words = sort_query_words(query_obj.query_words) logging.debug('query_words: %r' % query_words) # First try the query with all the filters, and then keep backing off # if we get NeedIndexError. fetch_limit = 400 fetched = [] filters_to_try = len(query_words) while filters_to_try: query = model.Person.all_in_repo(repo) for word in query_words[:filters_to_try]: query.filter('names_prefixes =', word) try: fetched = query.fetch(fetch_limit) logging.debug('query succeeded with %d filters' % filters_to_try) break except db.NeedIndexError: filters_to_try -= 1 continue logging.debug('indexing.search fetched: %d' % len(fetched)) # Now perform any filtering that App Engine was unable to do for us. matched = [] for result in fetched: for word in query_words: if word not in result.names_prefixes: break else: matched.append(result) logging.debug('indexing.search matched: %d' % len(matched)) if len(fetched) == fetch_limit and len(matched) < max_results: logging.debug('Warning: Fetch reached a limit of %d, but only %d ' 'exact-matched the query (max_results = %d).' % (fetch_limit, len(matched), max_results)) # Now rank and order the results. return rank_and_order(matched, query_obj, max_results)
apache-2.0
7,982,706,846,564,120,000
38.590551
81
0.625597
false
4.020792
false
false
false
miguelalba89/hfdp-python
combining/observer.py
1
7761
""" Ducks problem with quackologists (observers) Author: m1ge7 Date: 2014/03/24 """ from abc import ABCMeta, abstractmethod ############################################################################### # ############################################################################### class QuackObservable: __metaclass__ = ABCMeta @abstractmethod def register_observer(self, observer): pass @abstractmethod def notify_observers(self): pass class Quackable(QuackObservable): __metaclass__ = ABCMeta @abstractmethod def quack(self): pass class Observable(QuackObservable): def __init__(self, duck): self.__observers = [] self.__duck = duck def register_observer(self, observer): self.__observers.append(observer) def notify_observers(self): for obs in self.__observers: obs.update(self.__duck) def get_observers(self): return self.__observers class Observer: __metaclass__ = ABCMeta @abstractmethod def update(self, duck): pass class Quackologist(Observer): def update(self, duck): print "Quackologist: " + str(duck) + " just quacked." def __str__(self): return "Quackologist" ############################################################################### # Duck concrete classes ############################################################################### class DecoyDuck(Quackable): def __init__(self): self.__observable = Observable(self) def quack(self): print "<< Silence >>" self.notify_observers() def register_observer(self, observer): self.__observable.register_observer(observer) def notify_observers(self): self.__observable.notify_observers() def __str__(self): return "Decoy Duck" class DuckCall(Quackable): def __init__(self): self.__observable = Observable(self) def quack(self): print "Kwak" self.notify_observers() def register_observer(self, observer): self.__observable.register_observer(observer) def notify_observers(self): self.__observable.notify_observers() def __str__(self): return "Duck Call" class MallardDuck(Quackable): def __init__(self): self.__observable = Observable(self) def quack(self): print "Quack" self.notify_observers() def register_observer(self, observer): self.__observable.register_observer(observer) def notify_observers(self): self.__observable.notify_observers() def __str__(self): return "Mallard Duck" class RedheadDuck(Quackable): def __init__(self): self.__observable = Observable(self) def quack(self): print "Quack" self.notify_observers() def register_observer(self, observer): self.__observable.register_observer(observer) def notify_observers(self): self.__observable.notify_observers() def __str__(self): return "Redhead Duck" class RubberDuck(Quackable): def __init__(self): self.__observable = Observable(self) def quack(self): print "Squeak" self.notify_observers() def register_observer(self, observer): self.__observable.register_observer(observer) def notify_observers(self): self.__observable.notify_observers() def __str__(self): return "Rubber Duck" ############################################################################### # Goose classes ############################################################################### class Goose: def honk(self): print "Honk" def __str__(self): return "Goose" class GooseAdapter(Quackable): def __init__(self, goose): self.__goose = goose self.__observable = Observable(self) def quack(self): self.__goose.honk() self.notify_observers() def register_observer(self, observer): self.__observable.register_observer(observer) def notify_observers(self): self.__observable.notify_observers() def __str__(self): return "Goose pretending to be a Duck" ############################################################################### # QuackCounter ############################################################################### class QuackCounter(Quackable): number_of_quacks = 0 def __init__(self, duck): self.__duck = duck def quack(self): self.__duck.quack() QuackCounter.number_of_quacks += 1 @staticmethod def get_quacks(): return QuackCounter.number_of_quacks def register_observer(self, observer): self.__duck.register_observer(observer) def notify_observers(self): self.__duck.notify_observers() def __str__(self): return str(self.__duck) ############################################################################### # Factories ############################################################################### class AbstractDuckFactory: __metaclass__ = ABCMeta @abstractmethod def create_mallard_duck(self): pass @abstractmethod def create_redhead_duck(self): pass @abstractmethod def create_duck_call(self): pass @abstractmethod def create_rubber_duck(self): pass class DuckFactory(AbstractDuckFactory): def create_mallard_duck(self): return MallardDuck() def create_redhead_duck(self): return RedheadDuck() def create_duck_call(self): return DuckCall() def create_rubber_duck(self): return RubberDuck() class CountingDuckFactory(AbstractDuckFactory): def create_mallard_duck(self): return QuackCounter(MallardDuck()) def create_redhead_duck(self): return QuackCounter(RedheadDuck()) def create_duck_call(self): return QuackCounter(DuckCall()) def create_rubber_duck(self): return QuackCounter(RubberDuck()) ############################################################################### # Flock ############################################################################### class Flock(Quackable): def __init__(self): self.__ducks = [] def add(self, duck): self.__ducks.append(duck) def quack(self): for duck in self.__ducks: duck.quack() def register_observer(self, observer): for duck in self.__ducks: duck.register_observer(observer) def notify_observers(): pass def __str__(self): return "Flock of Ducks" class DuckSimulator: def simulate_factory(self, duck_factory): print "\nDuck Simulator: With Composite - Flocks" flock_of_ducks = Flock() flock_of_ducks.add(duck_factory.create_redhead_duck()) flock_of_ducks.add(duck_factory.create_duck_call()) flock_of_ducks.add(duck_factory.create_rubber_duck()) flock_of_ducks.add(GooseAdapter(Goose())) flock_of_mallards = Flock() for i in range(4): flock_of_mallards.add(duck_factory.create_mallard_duck()) flock_of_ducks.add(flock_of_mallards) print "\nDuck Simulator: With Observer" quackologist = Quackologist() flock_of_ducks.register_observer(quackologist) self.simulate_duck(flock_of_ducks) print "The ducks quacked " + str(QuackCounter.get_quacks()) + " times" def simulate_duck(self, duck): duck.quack() if __name__ == '__main__': simulator = DuckSimulator() duck_factory = CountingDuckFactory() simulator.simulate_factory(duck_factory)
gpl-3.0
939,939,761,361,892,700
21.365994
79
0.541039
false
3.943598
false
false
false
pattisdr/osf.io
api/meetings/views.py
1
9146
from rest_framework import generics, permissions as drf_permissions from rest_framework.exceptions import NotFound from django.db.models import Q, Count, Subquery, OuterRef, Case, When, Value, CharField, F, IntegerField from django.db.models.functions import Length, Substr, Coalesce from django.contrib.contenttypes.models import ContentType from addons.osfstorage.models import OsfStorageFile from api.base import permissions as base_permissions from api.base.exceptions import InvalidFilterOperator from api.base.filters import ListFilterMixin from api.base.views import JSONAPIBaseView from api.base.utils import get_object_or_error from api.base.versioning import PrivateVersioning from api.meetings.serializers import MeetingSerializer, MeetingSubmissionSerializer from api.meetings.permissions import IsPublic from api.nodes.views import NodeMixin from framework.auth.oauth_scopes import CoreScopes from osf.models import AbstractNode, Conference, Contributor, Tag, PageCounter from website import settings class MeetingMixin(object): """Mixin with convenience method get_meeting """ meeting_lookup_url_kwarg = 'meeting_id' def get_meeting(self): meeting = get_object_or_error( Conference, Q(endpoint=self.kwargs[self.meeting_lookup_url_kwarg]), self.request, display_name='meeting', ) return meeting class BaseMeetingView(JSONAPIBaseView, MeetingMixin): permission_classes = ( drf_permissions.IsAuthenticatedOrReadOnly, base_permissions.TokenHasScope, ) required_read_scopes = [CoreScopes.MEETINGS_READ] required_write_scopes = [CoreScopes.NULL] model = Conference # This view goes under the _/ namespace versioning_class = PrivateVersioning serializer_class = MeetingSerializer view_category = 'meetings' class MeetingList(BaseMeetingView, generics.ListAPIView, ListFilterMixin): view_name = 'meeting-list' ordering = ('-modified', ) # default ordering ordering_fields = ('name', 'submissions_count', 'location', 'start_date',) # overrides ListFilterMixin def get_default_queryset(self): tags = Tag.objects.filter( abstractnode_tagged__is_public=True, abstractnode_tagged__is_deleted=False, ).annotate( num_nodes=Count(F('abstractnode_tagged')), ).filter(name=OuterRef('endpoint')) conferences = Conference.objects.filter(is_meeting=True).annotate( submissions_count=Subquery( tags.values('num_nodes')[:1], output_field=IntegerField(), ), ) return conferences.filter(submissions_count__gte=settings.CONFERENCE_MIN_COUNT) # overrides ListAPIView def get_queryset(self): return self.get_queryset_from_request() class MeetingDetail(BaseMeetingView, generics.RetrieveAPIView): view_name = 'meeting-detail' def get_object(self): # No minimum submissions count for accessing meeting directly return self.get_meeting() class BaseMeetingSubmission(JSONAPIBaseView, MeetingMixin): permission_classes = ( drf_permissions.IsAuthenticatedOrReadOnly, base_permissions.TokenHasScope, IsPublic, ) required_read_scopes = [CoreScopes.MEETINGS_READ, CoreScopes.NODE_BASE_READ] required_write_scopes = [CoreScopes.NULL] model = AbstractNode # This view goes under the _/ namespace versioning_class = PrivateVersioning serializer_class = MeetingSubmissionSerializer view_category = 'meetings' def get_serializer_context(self): context = super(BaseMeetingSubmission, self).get_serializer_context() context['meeting'] = self.get_meeting() return context class MeetingSubmissionList(BaseMeetingSubmission, generics.ListAPIView, ListFilterMixin): view_name = 'meeting-submissions' ordering = ('-created', ) # default ordering ordering_fields = ('title', 'meeting_category', 'author_name', 'download_count', 'created', ) # overrides ListFilterMixin def get_default_queryset(self): meeting = self.get_meeting() return self.annotate_queryset_for_filtering_and_sorting(meeting, meeting.submissions) # overrides ListAPIView def get_queryset(self): return self.get_queryset_from_request() def build_query_from_field(self, field_name, operation): if field_name == 'author_name': if operation['op'] != 'eq': raise InvalidFilterOperator(value=operation['op'], valid_operators=['eq']) return Q(author_name__icontains=operation['value']) if field_name == 'meeting_category': if operation['op'] != 'eq': raise InvalidFilterOperator(value=operation['op'], valid_operators=['eq']) return Q(meeting_category__icontains=operation['value']) return super(MeetingSubmissionList, self).build_query_from_field(field_name, operation) def annotate_queryset_for_filtering_and_sorting(self, meeting, queryset): queryset = self.annotate_queryset_with_meeting_category(meeting, queryset) queryset = self.annotate_queryset_with_author_name(queryset) queryset = self.annotate_queryset_with_download_count(queryset) return queryset def annotate_queryset_with_meeting_category(self, meeting, queryset): """ Annotates queryset with meeting_category - if submission1 tag exists, use this, otherwise assume default submission2 tag """ # Setup meeting category subquery (really existence of certain tags) category_1 = meeting.field_names.get('submission1', 'poster') category_2 = meeting.field_names.get('submission2', 'talk') tag_subquery = Tag.objects.filter( abstractnode_tagged=OuterRef('pk'), name=category_1, ).values_list('name', flat=True) queryset = queryset.annotate(cat_one_count=Count(Subquery(tag_subquery))).annotate( meeting_category=Case( When(cat_one_count=1, then=Value(category_1)), default=Value(category_2), output_field=CharField(), ), ) return queryset def annotate_queryset_with_author_name(self, queryset): """ Annotates queryset with author_name_category - it is the family_name if it exists, otherwise, the fullname is used """ # Setup author name subquery (really first bibliographic contributor) contributors = Contributor.objects.filter( visible=True, node_id=OuterRef('pk'), ).order_by('_order') queryset = queryset.annotate( author_family_name=Subquery(contributors.values(('user__family_name'))[:1]), author_full_name=Subquery(contributors.values(('user__fullname'))[:1]), author_id=Subquery(contributors.values(('user__guids___id'))[:1]), ).annotate( author_name=Case( When(author_family_name='', then=F('author_full_name')), default=F('author_family_name'), output_field=CharField(), ), ) return queryset def annotate_queryset_with_download_count(self, queryset): """ Annotates queryset with download count of first osfstorage file NOTE: This is a brittle way to do this. PageCounter _ids are of the form <file_action>:<node__id>:<file__id>:<sometimes version>. - Assumes the "download" file action is the only action with that many letters - Assumes node and file guids are a consistent length - ENG-122 would get rid of this string matching behavior """ pages = PageCounter.objects.annotate( node_id=Substr('_id', 10, 5), file_id=Substr('_id', 16), _id_length=Length('_id'), ).filter( _id__icontains='download', node_id=OuterRef('guids___id'), file_id=OuterRef('file_id'), ).exclude(_id_length__gt=39) file_subqs = OsfStorageFile.objects.filter( target_content_type_id=ContentType.objects.get_for_model(AbstractNode), target_object_id=OuterRef('pk'), ).order_by('created') queryset = queryset.annotate( file_id=Subquery(file_subqs.values('_id')[:1]), ).annotate( download_count=Coalesce(Subquery(pages.values('total')[:1]), Value(0)), ) return queryset class MeetingSubmissionDetail(BaseMeetingSubmission, generics.RetrieveAPIView, NodeMixin): view_name = 'meeting-submission-detail' serializer_class = MeetingSubmissionSerializer node_lookup_url_kwarg = 'submission_id' def get_object(self): meeting = self.get_meeting() node = self.get_node() # Submission must be associated with the Conference if meeting.endpoint not in node.tags.values_list('name', flat=True): raise NotFound('This is not a submission to {}.'.format(meeting.name)) return node
apache-2.0
-3,653,973,906,636,123,000
36.63786
104
0.663459
false
4.083036
false
false
false
duonys/deep-learning-chainer
dlchainer/SdA.py
1
5225
#-*- coding: utf-8 -*- from abc import ABCMeta, abstractmethod import copy import numpy as np from sklearn.base import BaseEstimator, ClassifierMixin, RegressorMixin from sklearn.externals.six import with_metaclass from chainer import Variable, FunctionSet, optimizers, cuda import chainer.functions as F from .dA import dA from . import utils class SdAMixin(with_metaclass(ABCMeta, BaseEstimator)): """ Stacked Denoising Autoencoder References: http://deeplearning.net/tutorial/SdA.html https://github.com/pfnet/chainer/blob/master/examples/mnist/train_mnist.py """ def __init__(self, n_input, n_hiddens, n_output, noise_levels=None, dropout_ratios=None, do_pretrain=True, batch_size=100, n_epoch_pretrain=20, n_epoch_finetune=20, optimizer=optimizers.Adam(), activation_func=F.relu, verbose=False, gpu=-1): self.n_input = n_input self.n_hiddens = n_hiddens self.n_output = n_output self.do_pretrain = do_pretrain self.batch_size = batch_size self.n_epoch_pretrain = n_epoch_pretrain self.n_epoch_finetune = n_epoch_finetune self.optimizer = optimizer self.dAs = \ [dA(self.n_input, self.n_hiddens[0], self._check_var(noise_levels, 0), self._check_var(dropout_ratios, 0), self.batch_size, self.n_epoch_pretrain, copy.deepcopy(optimizer), activation_func, verbose, gpu)] + \ [dA(self.n_hiddens[i], self.n_hiddens[i + 1], self._check_var(noise_levels, i + 1), self._check_var(dropout_ratios, i + 1), self.batch_size, self.n_epoch_pretrain, copy.deepcopy(optimizer), activation_func, verbose, gpu) for i in range(len(n_hiddens) - 1)] self.verbose = verbose self.gpu = gpu def _check_var(self, var, index, default_val=0.0): return var[index] if var is not None else default_val def fit(self, X, y): if self.do_pretrain: self._pretrain(X) self._finetune(X, y) def _pretrain(self, X): for layer, dA in enumerate(self.dAs): utils.disp('*** pretrain layer: {} ***'.format(layer + 1), self.verbose) if layer == 0: layer_input = X else: layer_input = self.dAs[layer - 1].encode(Variable(layer_input), train=False).data dA.fit(layer_input) def _finetune(self, X, y): utils.disp('*** finetune ***', self.verbose) # construct model and setup optimizer params = {'l{}'.format(layer + 1): dA.encoder for layer, dA in enumerate(self.dAs)} params.update({'l{}'.format(len(self.dAs) + 1): F.Linear(self.dAs[-1].n_hidden, self.n_output)}) self.model = FunctionSet(**params) self.optimizer.setup(self.model) if self.gpu >= 0: cuda.get_device(self.gpu).use() self.model.to_gpu() xp = cuda.cupy if self.gpu >= 0 else np n = len(X) for epoch in range(self.n_epoch_finetune): utils.disp('epoch: {}'.format(epoch + 1), self.verbose) perm = np.random.permutation(n) sum_loss = 0 for i in range(0, n, self.batch_size): X_batch = xp.asarray(X[perm[i: i + self.batch_size]]) y_batch = xp.asarray(y[perm[i: i + self.batch_size]]) self.optimizer.zero_grads() y_var = self._forward(X_batch) loss = self._loss_func(y_var, Variable(y_batch)) loss.backward() self.optimizer.update() sum_loss += float(loss.data) * len(X_batch) utils.disp('fine tune mean loss={}'.format(sum_loss / n), self.verbose) def _forward(self, X, train=True): X_var = Variable(X) output = X_var for dA in self.dAs: output = dA.encode(output, train) y_var = self.model['l{}'.format(len(self.dAs) + 1)](output) return y_var @abstractmethod def _loss_func(self, y_var, t_var): pass class SdAClassifier(SdAMixin, ClassifierMixin): """ References: http://scikit-learn.org/stable/developers/#rolling-your-own-estimator """ def _loss_func(self, y_var, t_var): return F.softmax_cross_entropy(y_var, t_var) def fit(self, X, y): assert X.dtype == np.float32 and y.dtype == np.int32 super().fit(X, y) def transform(self, X): return self._forward(X, train=False).data def predict(self, X): return np.apply_along_axis(lambda x: np.argmax(x), arr=self.transform(X), axis=1) class SdARegressor(SdAMixin, RegressorMixin): """ References: http://scikit-learn.org/stable/developers/#rolling-your-own-estimator """ def _loss_func(self, y_var, t_var): y_var = F.reshape(y_var, [len(y_var)]) return F.mean_squared_error(y_var, t_var) def fit(self, X, y): assert X.dtype == np.float32 and y.dtype == np.float32 super().fit(X, y) def transform(self, X): return self._forward(X, train=False).data def predict(self, X): return self.transform(X)
mit
-6,527,209,594,631,249,000
32.280255
110
0.587368
false
3.347213
false
false
false
SecPi/SecPi
worker/temperature_sensor.py
1
2642
from tools.sensor import Sensor import glob import logging import os import threading import time class TemperatureSensor(Sensor): #DS18B20 digital temperature sensor def __init__(self, id, params, worker): super(TemperatureSensor, self).__init__(id, params, worker) #self.active = False try: self.min = int(params["min"]) self.max = int(params["max"]) self.bouncetime = int(params["bouncetime"]) self.device_id = params["device_id"] except ValueError as ve: # if one configuration parameter can't be parsed as int logging.error("TemperatureSensor: Wasn't able to initialize the sensor, please check your configuration: %s" % ve) self.corrupted = True return except KeyError as ke: # if config parameters are missing logging.error("TemperatureSensor: Wasn't able to initialize the sensor, it seems there is a config parameter missing: %s" % ke) self.corrupted = True return os.system('modprobe w1-gpio') os.system('modprobe w1-therm') base_dir = '/sys/bus/w1/devices/' #device_folder = glob.glob(base_dir + '28*')[0] self.device_file = base_dir + self.device_id + '/w1_slave' if not os.path.isfile(self.device_file): # if there is no slave file which contains the temperature self.corrupted = True logging.error("TemperatureSensor: Wasn't able to find temperature file at %s" % self.device_file) return logging.debug("TemperatureSensor: Sensor initialized") def activate(self): if not self.corrupted: self.stop_thread = False self.checker_thread = threading.Thread(name="thread-checker-%s" % self.device_id, target=self.check_temperature) self.checker_thread.start() else: logging.error("TemperatureSensor: Sensor couldn't be activated") def deactivate(self): if not self.corrupted: self.stop_thread = True else: logging.error("TemperatureSensor: Sensor couldn't be deactivated") def check_temperature(self): while True: if self.stop_thread: #exit thread when flag is set return current_temp = self.read_temp() if current_temp < self.min or current_temp > self.max: self.alarm("Temperature is not in valid range: %s" % current_temp) time.sleep(self.bouncetime) continue time.sleep(3) def read_temp_raw(self): f = open(self.device_file, 'r') lines = f.readlines() f.close() return lines def read_temp(self): lines = self.read_temp_raw() while lines[0].strip()[-3:] != 'YES': time.sleep(0.2) lines = read_temp_raw() equals_pos = lines[1].find('t=') if equals_pos != -1: temp_string = lines[1][equals_pos+2:] temp_c = float(temp_string) / 1000.00 return temp_c
gpl-3.0
729,324,569,803,099,900
30.452381
130
0.696064
false
3.214112
false
false
false
openstates/openstates
openstates/ct/events.py
1
1993
import datetime import json from pupa.scrape import Scraper, Event import pytz from .utils import open_csv class CTEventScraper(Scraper): _tz = pytz.timezone("US/Eastern") def __init__(self, *args, **kwargs): super(CTEventScraper, self).__init__(*args, **kwargs) def scrape(self): for (code, name) in self.get_comm_codes(): yield from self.scrape_committee_events(code, name) def scrape_committee_events(self, code, name): events_url = ( "http://www.cga.ct.gov/basin/fullcalendar/commevents.php?" "comm_code={}".format(code) ) events_data = self.get(events_url).text events = json.loads(events_data) DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" for info in events: if info["title"] is None: self.warning("Event found with no title; it will be skipped") continue elif info["title"].startswith("CANCELLED:"): self.info( "Cancelled event found; it will be skipped: {}".format( info["title"] ) ) continue when = datetime.datetime.strptime(info["start"], DATETIME_FORMAT) # end = datetime.datetime.strptime(info['end'], DATETIME_FORMAT) where = "{0} {1}".format(info["building"].strip(), info["location"].strip()) # end_time=self._tz.localize(end), event = Event( start_date=self._tz.localize(when), location_name=where, name=info["title"], description=info["title"], ) event.add_source(events_url) yield event def get_comm_codes(self): url = "ftp://ftp.cga.ct.gov/pub/data/committee.csv" page = self.get(url) page = open_csv(page) return [(row["comm_code"].strip(), row["comm_name"].strip()) for row in page]
gpl-3.0
-1,077,329,166,467,352,700
31.145161
88
0.537883
false
3.869903
false
false
false
frink182/stevostat
pir.py
1
1322
#!/usr/bin/env python from time import sleep from time import strftime import RPi.GPIO as GPIO import os import paho.mqtt.publish as publish import paho.mqtt.client as mqtt from datetime import datetime PIR=26 SCREEN_TIMEOUT=300 SCREEN='/sys/class/backlight/rpi_backlight/bl_power' ON=0 OFF=1 TOPIC="presence/PIR" GPIO.setmode(GPIO.BCM) GPIO.setup(PIR, GPIO.IN, pull_up_down=GPIO.PUD_DOWN) def my_callback(channel): sleep(0.5) # confirm the movement by waiting 0.5 sec if GPIO.input(PIR): # and check again the input publishMqtt() screenOn() # stop detection for a while GPIO.remove_event_detect(PIR) sleep(60) GPIO.add_event_detect(PIR, GPIO.RISING, callback=my_callback, bouncetime=300) GPIO.add_event_detect(PIR, GPIO.RISING, callback=my_callback, bouncetime=300) def publishMqtt(): message = datetime.now().strftime('%Y-%m-%d %H:%M:%S') publish.single(TOPIC, message, qos=0, retain=True, hostname="slug") def screenOn(): toggleScreen(ON) def screenOff(): toggleScreen(OFF) def toggleScreen(value): file = open(SCREEN,'r') current_status = int(file.read(1)) if current_status != value: # print strftime("%d %b %H:%M:%S") + " toggle screen to " + str(value) os.system("echo " + str(value) + " > " + SCREEN) # you can continue doing other stuff here while True: sleep(60)
gpl-2.0
-4,928,873,503,737,569,000
23.036364
79
0.712557
false
2.794926
false
false
false
rane-hs/fabric-py3
tests/Python26SocketServer.py
1
22074
"""Generic socket server classes. This module tries to capture the various aspects of defining a server: For socket-based servers: - address family: - AF_INET{,6}: IP (Internet Protocol) sockets (default) - AF_UNIX: Unix domain sockets - others, e.g. AF_DECNET are conceivable (see <socket.h> - socket type: - SOCK_STREAM (reliable stream, e.g. TCP) - SOCK_DGRAM (datagrams, e.g. UDP) For request-based servers (including socket-based): - client address verification before further looking at the request (This is actually a hook for any processing that needs to look at the request before anything else, e.g. logging) - how to handle multiple requests: - synchronous (one request is handled at a time) - forking (each request is handled by a new process) - threading (each request is handled by a new thread) The classes in this module favor the server type that is simplest to write: a synchronous TCP/IP server. This is bad class design, but save some typing. (There's also the issue that a deep class hierarchy slows down method lookups.) There are five classes in an inheritance diagram, four of which represent synchronous servers of four types: +------------+ | BaseServer | +------------+ | v +-----------+ +------------------+ | TCPServer |------->| UnixStreamServer | +-----------+ +------------------+ | v +-----------+ +--------------------+ | UDPServer |------->| UnixDatagramServer | +-----------+ +--------------------+ Note that UnixDatagramServer derives from UDPServer, not from UnixStreamServer -- the only difference between an IP and a Unix stream server is the address family, which is simply repeated in both unix server classes. Forking and threading versions of each type of server can be created using the ForkingMixIn and ThreadingMixIn mix-in classes. For instance, a threading UDP server class is created as follows: class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass The Mix-in class must come first, since it overrides a method defined in UDPServer! Setting the various member variables also changes the behavior of the underlying server mechanism. To implement a service, you must derive a class from BaseRequestHandler and redefine its handle() method. You can then run various versions of the service by combining one of the server classes with your request handler class. The request handler class must be different for datagram or stream services. This can be hidden by using the request handler subclasses StreamRequestHandler or DatagramRequestHandler. Of course, you still have to use your head! For instance, it makes no sense to use a forking server if the service contains state in memory that can be modified by requests (since the modifications in the child process would never reach the initial state kept in the parent process and passed to each child). In this case, you can use a threading server, but you will probably have to use locks to avoid two requests that come in nearly simultaneous to apply conflicting changes to the server state. On the other hand, if you are building e.g. an HTTP server, where all data is stored externally (e.g. in the file system), a synchronous class will essentially render the service "deaf" while one request is being handled -- which may be for a very long time if a client is slow to reqd all the data it has requested. Here a threading or forking server is appropriate. In some cases, it may be appropriate to process part of a request synchronously, but to finish processing in a forked child depending on the request data. This can be implemented by using a synchronous server and doing an explicit fork in the request handler class handle() method. Another approach to handling multiple simultaneous requests in an environment that supports neither threads nor fork (or where these are too expensive or inappropriate for the service) is to maintain an explicit table of partially finished requests and to use select() to decide which request to work on next (or whether to handle a new incoming request). This is particularly important for stream services where each client can potentially be connected for a long time (if threads or subprocesses cannot be used). Future work: - Standard classes for Sun RPC (which uses either UDP or TCP) - Standard mix-in classes to implement various authentication and encryption schemes - Standard framework for select-based multiplexing XXX Open problems: - What to do with out-of-band data? BaseServer: - split generic "request" functionality out into BaseServer class. Copyright (C) 2000 Luke Kenneth Casson Leighton <[email protected]> example: read entries from a SQL database (requires overriding get_request() to return a table entry from the database). entry is processed by a RequestHandlerClass. """ # This file copyright (c) 2001-2015 Python Software Foundation; All Rights Reserved # Author of the BaseServer patch: Luke Kenneth Casson Leighton # XXX Warning! # There is a test suite for this module, but it cannot be run by the # standard regression test. # To run it manually, run Lib/test/test_socketserver.py. __version__ = "0.4" import socket import select import sys import os try: import threading except ImportError: import dummy_threading as threading __all__ = ["TCPServer", "UDPServer", "ForkingUDPServer", "ForkingTCPServer", "ThreadingUDPServer", "ThreadingTCPServer", "BaseRequestHandler", "StreamRequestHandler", "DatagramRequestHandler", "ThreadingMixIn", "ForkingMixIn"] if hasattr(socket, "AF_UNIX"): __all__.extend(["UnixStreamServer", "UnixDatagramServer", "ThreadingUnixStreamServer", "ThreadingUnixDatagramServer"]) class BaseServer: """Base class for server classes. Methods for the caller: - __init__(server_address, RequestHandlerClass) - serve_forever(poll_interval=0.5) - shutdown() - handle_request() # if you do not use serve_forever() - fileno() -> int # for select() Methods that may be overridden: - server_bind() - server_activate() - get_request() -> request, client_address - handle_timeout() - verify_request(request, client_address) - server_close() - process_request(request, client_address) - close_request(request) - handle_error() Methods for derived classes: - finish_request(request, client_address) Class variables that may be overridden by derived classes or instances: - timeout - address_family - socket_type - allow_reuse_address Instance variables: - RequestHandlerClass - socket """ timeout = None def __init__(self, server_address, RequestHandlerClass): """Constructor. May be extended, do not override.""" self.server_address = server_address self.RequestHandlerClass = RequestHandlerClass self.__is_shut_down = threading.Event() self.__serving = False def server_activate(self): """Called by constructor to activate the server. May be overridden. """ pass def serve_forever(self, poll_interval=0.5): """Handle one request at a time until shutdown. Polls for shutdown every poll_interval seconds. Ignores self.timeout. If you need to do periodic tasks, do them in another thread. """ self.__serving = True self.__is_shut_down.clear() while self.__serving: # XXX: Consider using another file descriptor or # connecting to the socket to wake this up instead of # polling. Polling reduces our responsiveness to a # shutdown request and wastes cpu at all other times. r, w, e = select.select([self], [], [], poll_interval) if r: self._handle_request_noblock() self.__is_shut_down.set() def shutdown(self): """Stops the serve_forever loop. Blocks until the loop has finished. This must be called while serve_forever() is running in another thread, or it will deadlock. """ self.__serving = False self.__is_shut_down.wait() # The distinction between handling, getting, processing and # finishing a request is fairly arbitrary. Remember: # # - handle_request() is the top-level call. It calls # select, get_request(), verify_request() and process_request() # - get_request() is different for stream or datagram sockets # - process_request() is the place that may fork a new process # or create a new thread to finish the request # - finish_request() instantiates the request handler class; # this constructor will handle the request all by itself def handle_request(self): """Handle one request, possibly blocking. Respects self.timeout. """ # Support people who used socket.settimeout() to escape # handle_request before self.timeout was available. timeout = self.socket.gettimeout() if timeout is None: timeout = self.timeout elif self.timeout is not None: timeout = min(timeout, self.timeout) fd_sets = select.select([self], [], [], timeout) if not fd_sets[0]: self.handle_timeout() return self._handle_request_noblock() def _handle_request_noblock(self): """Handle one request, without blocking. I assume that select.select has returned that the socket is readable before this function was called, so there should be no risk of blocking in get_request(). """ try: request, client_address = self.get_request() except socket.error: return if self.verify_request(request, client_address): try: self.process_request(request, client_address) except: self.handle_error(request, client_address) self.close_request(request) def handle_timeout(self): """Called if no new request arrives within self.timeout. Overridden by ForkingMixIn. """ pass def verify_request(self, request, client_address): """Verify the request. May be overridden. Return True if we should proceed with this request. """ return True def process_request(self, request, client_address): """Call finish_request. Overridden by ForkingMixIn and ThreadingMixIn. """ self.finish_request(request, client_address) self.close_request(request) def server_close(self): """Called to clean-up the server. May be overridden. """ pass def finish_request(self, request, client_address): """Finish one request by instantiating RequestHandlerClass.""" self.RequestHandlerClass(request, client_address, self) def close_request(self, request): """Called to clean up an individual request.""" pass def handle_error(self, request, client_address): """Handle an error gracefully. May be overridden. The default is to print a traceback and continue. """ print(('-' * 40)) print(('Exception happened during processing of request from %s' % (client_address,))) import traceback traceback.print_exc() # XXX But this goes to stderr! print(('-' * 40)) class TCPServer(BaseServer): """Base class for various socket-based server classes. Defaults to synchronous IP stream (i.e., TCP). Methods for the caller: - __init__(server_address, RequestHandlerClass, bind_and_activate=True) - serve_forever(poll_interval=0.5) - shutdown() - handle_request() # if you don't use serve_forever() - fileno() -> int # for select() Methods that may be overridden: - server_bind() - server_activate() - get_request() -> request, client_address - handle_timeout() - verify_request(request, client_address) - process_request(request, client_address) - close_request(request) - handle_error() Methods for derived classes: - finish_request(request, client_address) Class variables that may be overridden by derived classes or instances: - timeout - address_family - socket_type - request_queue_size (only for stream sockets) - allow_reuse_address Instance variables: - server_address - RequestHandlerClass - socket """ address_family = socket.AF_INET socket_type = socket.SOCK_STREAM request_queue_size = 5 allow_reuse_address = False def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True): """Constructor. May be extended, do not override.""" BaseServer.__init__(self, server_address, RequestHandlerClass) self.socket = socket.socket(self.address_family, self.socket_type) if bind_and_activate: self.server_bind() self.server_activate() def server_bind(self): """Called by constructor to bind the socket. May be overridden. """ if self.allow_reuse_address: self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.bind(self.server_address) self.server_address = self.socket.getsockname() def server_activate(self): """Called by constructor to activate the server. May be overridden. """ self.socket.listen(self.request_queue_size) def server_close(self): """Called to clean-up the server. May be overridden. """ self.socket.close() def fileno(self): """Return socket file number. Interface required by select(). """ return self.socket.fileno() def get_request(self): """Get the request and client address from the socket. May be overridden. """ return self.socket.accept() def close_request(self, request): """Called to clean up an individual request.""" request.close() class UDPServer(TCPServer): """UDP server class.""" allow_reuse_address = False socket_type = socket.SOCK_DGRAM max_packet_size = 8192 def get_request(self): data, client_addr = self.socket.recvfrom(self.max_packet_size) return (data, self.socket), client_addr def server_activate(self): # No need to call listen() for UDP. pass def close_request(self, request): # No need to close anything. pass class ForkingMixIn: """Mix-in class to handle each request in a new process.""" timeout = 300 active_children = None max_children = 40 def collect_children(self): """Internal routine to wait for children that have exited.""" if self.active_children is None: return while len(self.active_children) >= self.max_children: # XXX: This will wait for any child process, not just ones # spawned by this library. This could confuse other # libraries that expect to be able to wait for their own # children. try: pid, status = os.waitpid(0, 0) except os.error: pid = None if pid not in self.active_children: continue self.active_children.remove(pid) # XXX: This loop runs more system calls than it ought # to. There should be a way to put the active_children into a # process group and then use os.waitpid(-pgid) to wait for any # of that set, but I couldn't find a way to allocate pgids # that couldn't collide. for child in self.active_children: try: pid, status = os.waitpid(child, os.WNOHANG) except os.error: pid = None if not pid: continue try: self.active_children.remove(pid) except ValueError as e: raise ValueError('%s. x=%d and list=%r' % \ (e.message, pid, self.active_children)) def handle_timeout(self): """Wait for zombies after self.timeout seconds of inactivity. May be extended, do not override. """ self.collect_children() def process_request(self, request, client_address): """Fork a new subprocess to process the request.""" self.collect_children() pid = os.fork() if pid: # Parent process if self.active_children is None: self.active_children = [] self.active_children.append(pid) self.close_request(request) return else: # Child process. # This must never return, hence os._exit()! try: self.finish_request(request, client_address) os._exit(0) except: try: self.handle_error(request, client_address) finally: os._exit(1) class ThreadingMixIn: """Mix-in class to handle each request in a new thread.""" # Decides how threads will act upon termination of the # main process daemon_threads = False def process_request_thread(self, request, client_address): """Same as in BaseServer but as a thread. In addition, exception handling is done here. """ try: self.finish_request(request, client_address) self.close_request(request) except: self.handle_error(request, client_address) self.close_request(request) def process_request(self, request, client_address): """Start a new thread to process the request.""" t = threading.Thread(target=self.process_request_thread, args=(request, client_address)) if self.daemon_threads: t.setDaemon(1) t.start() class ForkingUDPServer(ForkingMixIn, UDPServer): pass class ForkingTCPServer(ForkingMixIn, TCPServer): pass class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass if hasattr(socket, 'AF_UNIX'): class UnixStreamServer(TCPServer): address_family = socket.AF_UNIX class UnixDatagramServer(UDPServer): address_family = socket.AF_UNIX class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass class BaseRequestHandler: """Base class for request handler classes. This class is instantiated for each request to be handled. The constructor sets the instance variables request, client_address and server, and then calls the handle() method. To implement a specific service, all you need to do is to derive a class which defines a handle() method. The handle() method can find the request as self.request, the client address as self.client_address, and the server (in case it needs access to per-server information) as self.server. Since a separate instance is created for each request, the handle() method can define arbitrary other instance variariables. """ def __init__(self, request, client_address, server): self.request = request self.client_address = client_address self.server = server try: self.setup() self.handle() self.finish() finally: sys.exc_info()[2] = None # Help garbage collection def setup(self): pass def handle(self): pass def finish(self): pass # The following two classes make it possible to use the same service # class for stream or datagram servers. # Each class sets up these instance variables: # - rfile: a file object from which receives the request is read # - wfile: a file object to which the reply is written # When the handle() method returns, wfile is flushed properly class StreamRequestHandler(BaseRequestHandler): """Define self.rfile and self.wfile for stream sockets.""" # Default buffer sizes for rfile, wfile. # We default rfile to buffered because otherwise it could be # really slow for large data (a getc() call per byte); we make # wfile unbuffered because (a) often after a write() we want to # read and we need to flush the line; (b) big writes to unbuffered # files are typically optimized by stdio even when big reads # aren't. rbufsize = -1 wbufsize = 0 def setup(self): self.connection = self.request self.rfile = self.connection.makefile('rb', self.rbufsize) self.wfile = self.connection.makefile('wb', self.wbufsize) def finish(self): if not self.wfile.closed: self.wfile.flush() self.wfile.close() self.rfile.close() class DatagramRequestHandler(BaseRequestHandler): # XXX Regrettably, I cannot get this working on Linux; # s.recvfrom() doesn't return a meaningful client address. """Define self.rfile and self.wfile for datagram sockets.""" def setup(self): try: from io import StringIO except ImportError: from io import StringIO self.packet, self.socket = self.request self.rfile = StringIO(self.packet) self.wfile = StringIO() def finish(self): self.socket.sendto(self.wfile.getvalue(), self.client_address)
bsd-2-clause
8,849,255,045,800,621,000
30.670014
94
0.642339
false
4.378893
false
false
false
StefGou/Kijiji-Repost-Headless
kijiji_repost_headless/kijiji_cmd.py
1
4487
import argparse import os import sys from time import sleep import kijiji_api if sys.version_info < (3, 0): raise Exception("This program requires Python 3.0 or greater") def main(): ##Start here #Takes: config(user/pass) #One of: #post adPostingFile #show #delete adId #show #repost adPostingFile parser = argparse.ArgumentParser( description="Post ads on Kijiji") parser.add_argument('-u', '--username', help='username of your kijiji account') parser.add_argument('-p', '--password', help='password of your kijiji account') subparsers = parser.add_subparsers(help ='sub-command help') postParser = subparsers.add_parser('post', help='post a new ad') postParser.add_argument('inf_file', type=str, help='.inf file containing posting details') postParser.set_defaults(function=post_ad) folderParser = subparsers.add_parser('folder', help='post ad from folder') folderParser.add_argument('folderName', type=str, help='folder containing ad details') folderParser.set_defaults(function=post_folder) repostFolderParser = subparsers.add_parser('repost_folder', help='post ad from folder') repostFolderParser.add_argument('folderName', type=str, help='folder containing ad details') repostFolderParser.set_defaults(function=repost_folder) showParser = subparsers.add_parser('show', help='show currently listed ads') showParser.set_defaults(function=show_ads) deleteParser = subparsers.add_parser('delete', help='delete a listed ad') deleteParser.add_argument('id',type=str, help='id of the ad you wish to delete') deleteParser.set_defaults(function=delete_ad) nukeParser = subparsers.add_parser('nuke', help='delete all ads') nukeParser.set_defaults(function=nuke) repostParser = subparsers.add_parser('repost', help='repost an existing ad') repostParser.add_argument('inf_file', type = str,help = '.inf file containing posting details') repostParser.set_defaults(function=repost_ad) args = parser.parse_args() #try: args.function(args) #except AttributeError as err: # print(err) # parser.print_help() #HELPER FUNCTIONS def get_folder_data(args): args.inf_file = "item.inf" cred_file = args.folderName+"/login.inf" f = open(cred_file, 'r') creds = [line.strip() for line in f] args.username = creds[0] args.password = creds[1] def get_inf_details(inf_file): with open(inf_file, 'rt') as infFileLines: data = {key: val for line in infFileLines for (key, val) in (line.strip().split("="),)} files = [open(picture, 'rb').read() for picture in data['imageCsv'].split(",")] return [data, files] ##Actual Functions called from main def post_folder(args): get_folder_data(args) os.chdir(args.folderName) post_ad(args) def post_ad(args): [data, imageFiles] = get_inf_details(args.inf_file) api = kijiji_api.KijijiApi() api.login(args.username, args.password) api.post_ad_using_data(data, imageFiles) def show_ads(args): api = kijiji_api.KijijiApi() api.login(args.username, args.password) [print("{} '{}'".format(adId, adName)) for adName, adId in api.get_all_ads()] def delete_ad(args): api = kijiji_api.KijijiApi() api.login(args.username, args.password) api.delete_ad(args.id) def delete_ad_using_title(name): api = kijiji_api.KijijiApi() api.delete_ad_using_title(name) #Try to delete ad with same name if possible #post new ad def repost_ad(args): api = kijiji_api.KijijiApi() api.login(args.username, args.password) delAdName = "" for line in open(args.inf_file, 'rt'): [key, val] = line.strip().rstrip("\n").split("=") if key =='postAdForm.title': delAdName = val try: api.delete_ad_using_title(delAdName) print("Existing ad deleted before reposting") except kijiji_api.DeleteAdException: print("Did not find an existing ad with matching title, skipping ad deletion") pass # Must wait a bit before posting the same ad even after deleting it, otherwise Kijiji will automatically remove it sleep(180) post_ad(args) def repost_folder(args): get_folder_data(args) os.chdir(args.folderName) repost_ad(args) def nuke(args): api = kijiji_api.KijijiApi() api.login(args.username, args.password) allAds = api.get_all_ads() [api.delete_ad(adId) for adName, adId in allAds] if __name__ == "__main__": main()
mit
-5,459,380,662,855,144,000
32.485075
118
0.682416
false
3.253807
false
false
false
Ophiuchus1312/enigma2-master
Navigation.py
1
7217
from enigma import eServiceCenter, eServiceReference, eTimer, pNavigation, getBestPlayableServiceReference, iPlayableService from Components.ParentalControl import parentalControl from Components.config import config from Tools.BoundFunction import boundFunction from Tools.StbHardware import setFPWakeuptime, getFPWakeuptime, getFPWasTimerWakeup from time import time import RecordTimer import PowerTimer import Screens.Standby import NavigationInstance import ServiceReference from Screens.InfoBar import InfoBar, MoviePlayer from os import path # TODO: remove pNavgation, eNavigation and rewrite this stuff in python. class Navigation: def __init__(self, nextRecordTimerAfterEventActionAuto=False, nextPowerManagerAfterEventActionAuto=False): if NavigationInstance.instance is not None: raise NavigationInstance.instance NavigationInstance.instance = self self.ServiceHandler = eServiceCenter.getInstance() import Navigation as Nav Nav.navcore = self self.pnav = pNavigation() self.pnav.m_event.get().append(self.dispatchEvent) self.pnav.m_record_event.get().append(self.dispatchRecordEvent) self.event = [ ] self.record_event = [ ] self.currentlyPlayingServiceReference = None self.currentlyPlayingServiceOrGroup = None self.currentlyPlayingService = None self.RecordTimer = RecordTimer.RecordTimer() self.PowerTimer = PowerTimer.PowerTimer() self.__wasTimerWakeup = False self.__wasRecTimerWakeup = False self.__wasPowerTimerWakeup = False if getFPWasTimerWakeup(): self.__wasTimerWakeup = True if nextRecordTimerAfterEventActionAuto and abs(self.RecordTimer.getNextRecordingTime() - time()) <= 360: self.__wasRecTimerWakeup = True print 'RECTIMER: wakeup to standby detected.' f = open("/tmp/was_rectimer_wakeup", "w") f.write('1') f.close() # as we woke the box to record, place the box in standby. self.standbytimer = eTimer() self.standbytimer.callback.append(self.gotostandby) self.standbytimer.start(15000, True) elif nextPowerManagerAfterEventActionAuto: self.__wasPowerTimerWakeup = True print 'POWERTIMER: wakeup to standby detected.' f = open("/tmp/was_powertimer_wakeup", "w") f.write('1') f.close() # as a PowerTimer WakeToStandby was actiond to it. self.standbytimer = eTimer() self.standbytimer.callback.append(self.gotostandby) self.standbytimer.start(15000, True) def wasTimerWakeup(self): return self.__wasTimerWakeup def wasRecTimerWakeup(self): return self.__wasRecTimerWakeup def wasPowerTimerWakeup(self): return self.__wasPowerTimerWakeup def gotostandby(self): print 'TIMER: now entering standby' from Tools import Notifications Notifications.AddNotification(Screens.Standby.Standby) def dispatchEvent(self, i): for x in self.event: x(i) if i == iPlayableService.evEnd: self.currentlyPlayingServiceReference = None self.currentlyPlayingServiceOrGroup = None self.currentlyPlayingService = None def dispatchRecordEvent(self, rec_service, event): # print "record_event", rec_service, event for x in self.record_event: x(rec_service, event) def playService(self, ref, checkParentalControl = True, forceRestart = False): oldref = self.currentlyPlayingServiceReference if ref and oldref and ref == oldref and not forceRestart: print "ignore request to play already running service(1)" return 0 print "playing", ref and ref.toString() if path.exists("/proc/stb/lcd/symbol_signal") and config.lcd.mode.getValue() == '1': try: if ref.toString().find('0:0:0:0:0:0:0:0:0') == -1: signal = 1 else: signal = 0 f = open("/proc/stb/lcd/symbol_signal", "w") f.write(str(signal)) f.close() except: f = open("/proc/stb/lcd/symbol_signal", "w") f.write("0") f.close() elif path.exists("/proc/stb/lcd/symbol_signal") and config.lcd.mode.getValue() == '0': f = open("/proc/stb/lcd/symbol_signal", "w") f.write("0") f.close() if ref is None: self.stopService() return 0 InfoBarInstance = InfoBar.instance if not checkParentalControl or parentalControl.isServicePlayable(ref, boundFunction(self.playService, checkParentalControl = False)): if ref.flags & eServiceReference.isGroup: if not oldref: oldref = eServiceReference() playref = getBestPlayableServiceReference(ref, oldref) print "playref", playref if playref and oldref and playref == oldref and not forceRestart: print "ignore request to play already running service(2)" return 0 if not playref or (checkParentalControl and not parentalControl.isServicePlayable(playref, boundFunction(self.playService, checkParentalControl = False))): self.stopService() return 0 else: playref = ref if self.pnav: self.pnav.stopService() self.currentlyPlayingServiceReference = playref self.currentlyPlayingServiceOrGroup = ref if InfoBarInstance is not None: InfoBarInstance.servicelist.servicelist.setCurrent(ref) if self.pnav.playService(playref): print "Failed to start", playref self.currentlyPlayingServiceReference = None self.currentlyPlayingServiceOrGroup = None return 0 elif oldref: InfoBarInstance.servicelist.servicelist.setCurrent(oldref) return 1 def getCurrentlyPlayingServiceReference(self): return self.currentlyPlayingServiceReference def getCurrentlyPlayingServiceOrGroup(self): return self.currentlyPlayingServiceOrGroup def isMovieplayerActive(self): MoviePlayerInstance = MoviePlayer.instance if MoviePlayerInstance is not None and self.currentlyPlayingServiceReference.toString().find('0:0:0:0:0:0:0:0:0') != -1: from Screens.InfoBarGenerics import setResumePoint setResumePoint(MoviePlayer.instance.session) MoviePlayerInstance.close() def recordService(self, ref, simulate=False): service = None if not simulate: print "recording service: %s" % (str(ref)) if isinstance(ref, ServiceReference.ServiceReference): ref = ref.ref if ref: if ref.flags & eServiceReference.isGroup: ref = getBestPlayableServiceReference(ref, eServiceReference(), simulate) service = ref and self.pnav and self.pnav.recordService(ref, simulate) if service is None: print "record returned non-zero" return service def stopRecordService(self, service): ret = self.pnav and self.pnav.stopRecordService(service) return ret def getRecordings(self, simulate=False): return self.pnav and self.pnav.getRecordings(simulate) def getCurrentService(self): if not self.currentlyPlayingService: self.currentlyPlayingService = self.pnav and self.pnav.getCurrentService() return self.currentlyPlayingService def stopService(self): if self.pnav: self.pnav.stopService() self.currentlyPlayingServiceReference = None self.currentlyPlayingServiceOrGroup = None if path.exists("/proc/stb/lcd/symbol_signal"): f = open("/proc/stb/lcd/symbol_signal", "w") f.write("0") f.close() def pause(self, p): return self.pnav and self.pnav.pause(p) def shutdown(self): self.RecordTimer.shutdown() self.PowerTimer.shutdown() self.ServiceHandler = None self.pnav = None def stopUserServices(self): self.stopService()
gpl-2.0
6,904,893,535,751,126,000
34.033981
159
0.749758
false
3.361435
false
false
false
john5223/bottle-auth
auth/controllers/user_controller.py
1
2915
import logging logger = logging.getLogger(__name__) from bottle import route, get, post, delete from bottle import request, response def error(code, message): response.status = code message['status'] = code return message get_user_table = lambda db: db.get_table('users', primary_id='userid', primary_type='String(100)') @get('/users/<userid>') def get_user(db, userid): user_table = get_user_table(db) user = user_table.find_one(userid=userid) if not user: return error(404, {'error': 'Not a valid user'}) else: group_table = db.get_table('groups') groups = group_table.distinct('name', userid=userid) user['groups'] =sorted( [x['name'] for x in groups] ) return user @delete('/users/<userid>') def delete_user(db, userid): user_table = get_user_table(db) user = user_table.find_one(userid=userid) if not user: return error(404, {'error': 'userid not found'}) else: user_table.delete(userid=userid) return {'status': 200} @route('/users/<userid>', method=['POST', 'PUT']) def create_update_user(db, userid): data = request.json data_keys = data.keys() required_fields = ['first_name', 'last_name', 'userid', 'groups'] missing_fields = [x for x in required_fields if x not in data_keys] extra_fields = [x for x in data_keys if x not in required_fields] if missing_fields: return error(400, {'error': 'Missing fields (%s)' % (','.join(missing_fields)) }) if extra_fields: return error(400, {'error': 'Extra fields (%s)' % (','.join(extra_fields)) }) user_table = get_user_table(db) existing_user = user_table.find_one(userid=data['userid']) if request.method == 'POST' and existing_user: return error(409, {'error': 'User already exists'}) if request.method == 'PUT' and not existing_user: return error(404, {'error': 'User does not exist'}) #update this user's group membership userid = data.get('userid') groups = data.pop('groups') groups_table = db.get_table('groups') if request.method == 'POST': user_insert = user_table.insert(data) elif request.method == 'PUT': user_update = user_table.update(data, ['userid']) for name in groups: groups_table.upsert(dict(name=name, userid=userid), ['name','userid']) if request.method == 'PUT': #get rid of any old groups for this user params = {} for counter, group in enumerate(groups,1): params["group_name" + str(counter)] = group counter += 1 where_clause = 'name NOT IN(:' + ",:".join(params.keys()) + ')' # b/c sqlalchemy can't use a list!? params['userid'] = userid q = '''DELETE FROM groups WHERE userid=:userid AND ''' + where_clause db.executable.execute(q, params) return {'status': 200, 'user': get_user(db, userid)}
gpl-2.0
-8,540,600,454,075,682,000
34.120482
107
0.616467
false
3.576687
false
false
false
ProfessorX/Config
.PyCharm30/system/python_stubs/-1247971765/PyKDE4/kdeui/KPixmapCache.py
1
2878
# encoding: utf-8 # module PyKDE4.kdeui # from /usr/lib/python3/dist-packages/PyKDE4/kdeui.cpython-34m-x86_64-linux-gnu.so # by generator 1.135 # no doc # imports import PyKDE4.kdecore as __PyKDE4_kdecore import PyQt4.QtCore as __PyQt4_QtCore import PyQt4.QtGui as __PyQt4_QtGui import PyQt4.QtSvg as __PyQt4_QtSvg class KPixmapCache(): # skipped bases: <class 'sip.wrapper'> # no doc def cacheLimit(self, *args, **kwargs): # real signature unknown pass def deleteCache(self, *args, **kwargs): # real signature unknown pass def discard(self, *args, **kwargs): # real signature unknown pass def ensureInited(self, *args, **kwargs): # real signature unknown pass def find(self, *args, **kwargs): # real signature unknown pass def insert(self, *args, **kwargs): # real signature unknown pass def isEnabled(self, *args, **kwargs): # real signature unknown pass def isValid(self, *args, **kwargs): # real signature unknown pass def loadCustomData(self, *args, **kwargs): # real signature unknown pass def loadCustomIndexHeader(self, *args, **kwargs): # real signature unknown pass def loadFromFile(self, *args, **kwargs): # real signature unknown pass def loadFromSvg(self, *args, **kwargs): # real signature unknown pass def recreateCacheFiles(self, *args, **kwargs): # real signature unknown pass def removeEntries(self, *args, **kwargs): # real signature unknown pass def removeEntryStrategy(self, *args, **kwargs): # real signature unknown pass def setCacheLimit(self, *args, **kwargs): # real signature unknown pass def setRemoveEntryStrategy(self, *args, **kwargs): # real signature unknown pass def setTimestamp(self, *args, **kwargs): # real signature unknown pass def setUseQPixmapCache(self, *args, **kwargs): # real signature unknown pass def setValid(self, *args, **kwargs): # real signature unknown pass def size(self, *args, **kwargs): # real signature unknown pass def timestamp(self, *args, **kwargs): # real signature unknown pass def useQPixmapCache(self, *args, **kwargs): # real signature unknown pass def writeCustomData(self, *args, **kwargs): # real signature unknown pass def writeCustomIndexHeader(self, *args, **kwargs): # real signature unknown pass def __init__(self, *args, **kwargs): # real signature unknown pass __weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default """list of weak references to the object (if defined)""" RemoveLeastRecentlyUsed = 2 RemoveOldest = 0 RemoveSeldomUsed = 1 RemoveStrategy = None # (!) real value is ''
gpl-2.0
-1,374,738,902,669,333,000
26.941748
101
0.646977
false
3.915646
false
false
false
EclipseXuLu/DataHouse
DataHouse/crawler/university_spider.py
1
3941
import requests from bs4 import BeautifulSoup from lxml import etree import pandas as pd from io import StringIO, BytesIO university_list = [] class University(): def __init__(self, name='', is_985=False, is_211=False, has_institute=False, location='', orgnization='', education_level='', education_type='', university_type=''): self.name = name self.is_985 = is_985 self.is_211 = is_211 self.has_institute = has_institute self.location = location self.orgnization = orgnization self.education_level = education_level self.education_type = education_type self.university_type = university_type def __str__(self): return "{ " + str(self.name) + " ;" + str(self.is_985) + " ;" + str(self.is_211) + " ;" + str( self.has_institute) + " ;" + self.location + " ;" + self.orgnization + " ;" + self.education_level + " ;" \ + self.education_type + " ;" + self.university_type + " }" def crawl(page_url): headers = { 'Host': 'gaokao.chsi.com.cn', 'Upgrade-Insecure-Requests': '1', 'Referer': 'http://gaokao.chsi.com.cn/sch/search--ss-on,searchType-1,option-qg,start-0.dhtml', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/59.0.3071.115 Safari/537.36' } response = requests.get(page_url, timeout=20, headers=headers) if response.status_code == 200: html_raw = response.text soup = BeautifulSoup(html_raw, 'html5lib') parser = etree.HTMLParser() tree = etree.parse(StringIO(html_raw), parser) for tr in soup.find_all(bgcolor="#E1E1E1")[0].find_all('tr', attrs={'bgcolor': '#FFFFFF'}): try: name = tr.td.a.text.strip() # 大学名称 detail_url = 'http://gaokao.chsi.com.cn' + tr.td.a['href'] # 详情信息页面 is_985 = True if tr.td.find(class_='a211985 span985') is not None else False # 985 is_211 = True if tr.td.find(class_='a211985 span211') is not None else False # 211 has_institute = True if tr.td.find(class_='a211985 spanyan') is not None else False # 研究生院 location = tr.find_all('td')[1].get_text().strip() # 学校地址 orgnization = tr.find_all('td')[2].get_text().strip() # 所属机构 education_level = tr.find_all('td')[3].get_text().strip() # 学历层次 education_type = tr.find_all('td')[4].get_text().strip() # 办学类型 university_type = tr.find_all('td')[5].get_text().strip() # 院校类型 university = University(name, is_985, is_211, has_institute, location, orgnization, education_level, education_type, university_type) print(university) university_list.append([name, is_985, is_211, has_institute, location, orgnization, education_level, education_type, university_type]) except: pass else: print('Error!!') def output(some_list, filepath): col = [ u'院校名称', u'985', u'211', u'研究生院', u'所在地', u'院校隶属', u'学历层次', u'办学类型', u'院校类型'] df = pd.DataFrame(some_list, columns=col) df.to_excel(filepath, '大学', index=False) if __name__ == '__main__': page_urllist = ['http://gaokao.chsi.com.cn/sch/search--ss-on,searchType-1,option-qg,start-%d.dhtml' % _ for _ in range(0, 2660, 20)] # crawl('http://gaokao.chsi.com.cn/sch/search--ss-on,searchType-1,option-qg,start-0.dhtml') for page_url in page_urllist: crawl(page_url) output(university_list, './大学.xlsx')
mit
-7,579,131,018,589,852,000
40.423913
119
0.558908
false
3.000787
false
false
false
vinhqdang/algorithms_for_interviews
chapter1/problem1_3.py
1
1291
# given a sorted array and a key # find the first occurrence that larger than k # return -1 if there is no such element from problem0 import binary_search def test_find_first_larger_1 (): assert (find_first_larger([1,2,3,4,5,5,6],7) == -1) def test_find_first_larger_2 (): assert (find_first_larger([1,2,3,4,5,6,7,8],4) == 3) def test_find_first_larger_3 (): assert (find_first_larger([1,2,3,4,4,4,4,4,4,4,4,4,4,4,4,4,4,5,6,7,8],4) == 3) def test_find_first_larger_4 (): assert (find_first_larger([1,2,3,4,4,4,4,4,4,4,4,4,4,4,4,4,4,5,6,7,8],5) == 17) def find_first_larger (_arr, _k): if (len(_arr) == 0): return -1 if _k < _arr[0] or _k >_arr[-1]: return -1 l = 0 u = len(_arr) - 1 new_k = _k - 0.5 while u>l: m = l + int ((u-l)/2) if _arr[m] == new_k: return m #never happen elif _arr[m] < new_k: l=m+1 elif _arr[m] > new_k: u=m-1 m = l + int ((u-l)/2) # print (u) # print (l) # print (m) # print (_arr[m]) # print (_k) if _arr[m] == _k: return m if _arr[m] < _k: return m+1 if _arr[m] > _k: return m-1 if __name__ == '__main__': print (find_first_larger([1,2,3,4,5,6,7,8],4) == 3)
gpl-3.0
55,772,695,387,996,910
24.333333
83
0.492641
false
2.338768
false
false
false
iynaix/manga-downloader-flask
manga/spiders/animea.py
1
1663
import datetime from scrapy.selector import Selector from .base import BaseSpider as Spider from manga.items import MangaItem, MangaChapterItem from utils import extract_link class AnimeA(Spider): name = "animea" allowed_domains = ["animea.net"] start_urls = [ "http://manga.animea.net/series_old.php", ] def parse(self, resp): hxs = Selector(resp) for manga in hxs.css("a.tooltip_manga"): item = MangaItem() item['name'], item['link'] = extract_link(manga) yield item class AnimeAChapterSpider(Spider): name = "animea_chapter" allowed_domains = ["animea.net"] # parses the date format def parsedate(self, s): # date is in number of days / weeks / months / years ago s = s.strip().lower().split() val = int(s[0]) unit = s[1] if "day" in unit: delta = val elif "week" in unit: delta = val * 7 elif "month" in unit: delta = val * 30 elif "year" in unit: delta = val * 365 else: raise ValueError("Unrecognised unit: %s" % unit) return datetime.date.today() - datetime.timedelta(delta) def parse(self, resp): hxs = Selector(resp) for row in hxs.css("ul.chapterlistfull > li"): item = MangaChapterItem() try: item["name"], item["link"] = extract_link(row.xpath("a")[0]) dt = row.css("span.date::text") item["date"] = self.parsedate(dt.extract()[0]) except IndexError: continue yield item
mit
-8,047,761,475,780,702,000
26.716667
76
0.549008
false
3.662996
false
false
false
libravatar/libravatar
libravatar/account/urls.py
1
5296
# Copyright (C) 2011, 2013, 2015, 2016 Francois Marier <[email protected]> # Copyright (C) 2010 Francois Marier <[email protected]> # Jonathan Harker <[email protected]> # Brett Wilkins <[email protected]> # # This file is part of Libravatar # # Libravatar is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Libravatar is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Libravatar. If not, see <http://www.gnu.org/licenses/>. from django.conf.urls import url, patterns # pylint: disable=invalid-name urlpatterns = patterns('', url('login/$', 'django.contrib.auth.views.login', {'template_name': 'account/login.html'}, name='login'), url('logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout'), # must be the last pattern using this view! url('password_change/$', 'django.contrib.auth.views.password_change', {'template_name': 'account/password_change.html'}, name='password_change'), url('password_change_done/$', 'django.contrib.auth.views.password_change_done', {'template_name': 'account/password_change_done.html'}, name='password_change_done'), url('password_set/$', 'libravatar.account.views.password_set'), url('add_email/$', 'libravatar.account.views.add_email'), url('add_openid/$', 'libravatar.account.views.add_openid'), url('confirm_email/$', 'libravatar.account.views.confirm_email'), url(r'^(?P<openid_id>\d+)/confirm_openid/$', 'libravatar.account.views.confirm_openid'), url(r'^(?P<openid_id>\d+)/redirect_openid/$', 'libravatar.account.views.redirect_openid'), url(r'^(?P<email_id>\d+)/remove_confirmed_email/$', 'libravatar.account.views.remove_confirmed_email'), url(r'^(?P<email_id>\d+)/remove_unconfirmed_email/$', 'libravatar.account.views.remove_unconfirmed_email'), url(r'^(?P<openid_id>\d+)/remove_confirmed_openid/$', 'libravatar.account.views.remove_confirmed_openid'), url(r'^(?P<openid_id>\d+)/remove_unconfirmed_openid/$', 'libravatar.account.views.remove_unconfirmed_openid'), url('delete/$', 'libravatar.account.views.delete'), url('export/$', 'libravatar.account.views.export'), url('new/$', 'libravatar.account.views.new'), url('password_reset/$', 'libravatar.account.views.password_reset', name='password_reset'), url('password_reset_confirm/$', 'libravatar.account.views.password_reset_confirm', name='password_reset_confirm'), url('profile/$', 'libravatar.account.views.profile'), url('profile_success/$', 'libravatar.account.views.successfully_authenticated'), url(r'^(?P<email_id>\d+)/assign_photo_email/$', 'libravatar.account.views.assign_photo_email'), url(r'^(?P<openid_id>\d+)/assign_photo_openid/$', 'libravatar.account.views.assign_photo_openid'), url(r'^(?P<user_id>\d+)/import_photo/$', 'libravatar.account.views.import_photo'), url('upload_photo/$', 'libravatar.account.views.upload_photo'), url('crop_photo/$', 'libravatar.account.views.crop_photo'), url(r'^(?P<photo_id>\d+)/crop_photo/?$', 'libravatar.account.views.crop_photo'), url(r'^(?P<photo_id>\d+)/auto_crop/?$', 'libravatar.account.views.auto_crop'), url(r'^(?P<photo_id>\d+)/delete_photo/$', 'libravatar.account.views.delete_photo'), # Default page url(r'^$', 'libravatar.account.views.profile'), )
agpl-3.0
-5,768,908,466,926,725,000
55.946237
87
0.498301
false
4.52263
false
false
false
natsheh/semantic_query
api.py
1
4747
# -*- coding: utf-8 -*- # # This file is part of semantic_query. # Copyright (C) 2016 CIAPPLE. # # This is a free software; you can redistribute it and/or modify it # under the terms of the Revised BSD License; see LICENSE file for # more details. # Semantic Query API # Author: Hussein AL-NATSHEH <[email protected]> # Affiliation: CIAPPLE, Jordan import os, argparse, pickle, json import numpy as np from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.decomposition import TruncatedSVD from sklearn.metrics.pairwise import cosine_similarity from sklearn.pipeline import Pipeline from collections import OrderedDict from itertools import islice from bs4 import BeautifulSoup from flask import Flask, request, make_response from flask_httpauth import HTTPBasicAuth from flask_restful import Resource, Api, reqparse def top(n, sorted_results): return list(islice(sorted_results.iteritems(), n)) def query_by_text(transformer, transformed, documents, index, query_text, url, n_results=10): query = transformer.transform(query_text) sims = cosine_similarity(query.reshape(1,-1), transformed) scores = sims[0][:].reshape(-1,1) results= dict() for i in range(len(transformed)): results[i] = scores[i] sorted_results = OrderedDict(sorted(results.items(), key=lambda k: k[1], reverse=True)) topn = top(n_results, sorted_results) results = np.array(range(n_results), dtype=np.object) for rank, (answer, score) in enumerate(topn): title = documents[answer].split('\n__')[0] title_t = title.replace (" ", "_") doc_id = str(index[answer]) reference = url + title_t results[rank] = {'reference': reference, 'score': str(score), 'doc_id': doc_id, 'title': title, 'answer': documents[answer]} return results.tolist() class Query(Resource): def post(self): try: parser = reqparse.RequestParser() parser.add_argument('question', type=str, required=True, help='Query text') parser.add_argument('userId', type=str, required=False, help='User ID') parser.add_argument('questionId', type=str, required=False, help='Question ID') parser.add_argument('limit', type=int, required=False, help='Size of the returned results') args = parser.parse_args() q = request.args.get('question') question = BeautifulSoup(q, "lxml").p.contents try: size = request.args.get('limit') n_results = int(size) if n_results > 100: n_results = 100 except: n_results = 3 user_id = request.args.get('userId') question_id = request.args.get('questionId') response = {} response['userId'] = user_id response['questionId'] = question_id response['limit'] = n_results response['interesteId'] = 'future_feature' response['results'] = query_by_text(transformer, transformed, documents, index, question, url, n_results=n_results) if str(type(question)) == "<type 'list'>": question = question[0] response['question'] = question resp = make_response() resp.headers['Access-Control-Allow-Origin'] = '*' resp.headers['content-type'] = 'application/json' resp.data = response return response except Exception as e: return {'error': str(e)} def get(self): try: q = request.args.get('question') question = BeautifulSoup(q, "lxml").p.contents try: user_id = request.args.get('userId') except: user_id = 'uid1' try: question_id = request.args.get('questionId') except: question_id = 'qid1' try: size = request.args.get('limit') n_results = int(size) if n_results > 100: n_results = 100 except: n_results = 3 response = dict() response['userId'] = user_id response['questionId'] = question_id response['limit'] = n_results response['interesteId'] = 'future_feature' results = query_by_text(transformer, transformed, documents, index, question, url, n_results=n_results) response['results'] = results if str(type(question)) == "<type 'list'>": question = question[0] response['question'] = question return response except Exception as e: return {'error': str(e)} app = Flask(__name__, static_url_path="") auth = HTTPBasicAuth() api = Api(app) api.add_resource(Query, '/Query/') if __name__ == '__main__': transformed_file = 'transformed.pickle' docs_file = 'documents.pickle' index_file = 'index.pickle' transformer_file = 'transformer.pickle' transformed = np.load(transformed_file) index = pickle.load(open(index_file,'rb')) documents = pickle.load(open(docs_file,'rb')) print 'number of documents :', len(index) transformer = pickle.load(open(transformer_file,'rb')) url_config = json.load(open('url_config.json', 'r')) url = url_config['url'] print 'Ready to call!!' app.run(host='0.0.0.0', threaded=True)
bsd-3-clause
6,488,388,960,674,293,000
30.230263
127
0.688224
false
3.168892
false
false
false
SitiBanc/1061_NCTU_IOMDS
1108/HW7/HW7.py
1
3177
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Wed Nov 8 20:25:18 2017 @author: sitibanc """ import numpy as np from scipy import signal from PIL import Image def gen2DGaussian(stdv, mean, h, w): x, y = np.meshgrid(np.linspace(-1, 1, w), np.linspace(-1, 1, h)) d = np.sqrt(x ** 2 + y ** 2) sigma, mu = stdv, mean g = np.exp(-((d - mu) ** 2 / (2.0 * sigma ** 2))) return g def applyMask(M, I_array): R = I_array[:, :, 0] R2 = signal.convolve2d(R, M, mode = 'same', boundary = 'symm') G = I_array[:, :, 1] G2 = signal.convolve2d(G, M, mode = 'same', boundary = 'symm') B = I_array[:, :, 2] B2 = signal.convolve2d(B, M, mode = 'same', boundary = 'symm') data = I_array.copy() data[:, :, 0] = R2.astype('uint8') data[:, :, 1] = G2.astype('uint8') data[:, :, 2] = B2.astype('uint8') return data # 讀圖 I = Image.open('sample.jpg') data = np.asarray(I) # ============================================================================= # HW7-1: Gaussian Blur # ============================================================================= # Generate 2D Gaussian Array M1 = gen2DGaussian(1.0, 0.0, 10, 10) M1 = M1 / M1.sum() # Apply Mask masked1 = applyMask(M1, data) I1 = Image.fromarray(masked1.astype('uint8'), 'RGB') I1.show() # ============================================================================= # HW7-2: Motion Blur # ============================================================================= M2 = np.ones((20, 1)) M2 = M2 / M2.sum() # Apply Mask masked2 = applyMask(M2, data) I2 = Image.fromarray(masked2.astype('uint8'), 'RGB') I2.show() # ============================================================================= # HW7-3: Sharp Filter(銳化) <-- 兩個標準差不同的Gaussian相減 # ============================================================================= # Generate Mask #sig1 = gen2DGaussian(1.0, 0.0, 3, 3) #sig2 = gen2DGaussian(2.0, 0.0, 3, 3) #M3 = sig1 - sig2 #M3 = M3 / M3.sum() # Another Mask M3 = np.array([[-1, -1, -1], [-1, 16, -1], [-1, -1, -1]]) M3 = M3 / 8 # Apply Mask masked3 = applyMask(M3, data) I3 = Image.fromarray(masked3.astype('uint8'), 'RGB') I3.show() # ============================================================================= # HW7-4: Sobel Filter(邊界強化、類似素描風格) # ============================================================================= # Gray-scale image I0 = I.convert('L') data0 = np.asarray(I0) # Generate Mask sobel_x = np.array([[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]]) sobel_y = np.array([[1, 2, 1], [0, 0, 0], [-1, -2, -1]]) # Apply Mask Ix = signal.convolve2d(data0, sobel_x, mode = 'same', boundary = 'symm') Iy = signal.convolve2d(data0, sobel_y, mode = 'same', boundary = 'symm') masked4 = Ix ** 2 + Iy ** 2 # Adjust Color tmp = masked4.flatten() tmp[::-1].sort() # sorting in descending order n = 0.2 idx = int(len(tmp) * n) for h in range(masked4.shape[0]): for w in range(masked4.shape[1]): if masked4[h, w] >= tmp[idx]: masked4[h, w] = 0 else: masked4[h, w] = 255 I4 = Image.fromarray(masked4.astype('uint8'), 'L') I4.show()
apache-2.0
7,698,695,683,274,534,000
30.2
79
0.460083
false
2.820072
false
false
false
wufangjie/leetcode
015. 3Sum.py
1
1747
''' Given an array S of n integers, are there elements a, b, c in S such that a + b + c = 0? Find all unique triplets in the array which gives the sum of zero. Note: The solution set must not contain duplicate triplets. For example, given array S = [-1, 0, 1, 2, -1, -4], A solution set is: [ [-1, 0, 1], [-1, -1, 2] ] ''' def _move_right(sorted_list, lo, hi, val): while lo < hi: lo += 1 if sorted_list[lo] != val: break return lo def _move_left(sorted_list, lo, hi, val): while lo < hi: hi -= 1 if sorted_list[hi] != val: break return hi def twoSum(sorted_list, lo, hi, theSum): while lo < hi: test = sorted_list[lo] + sorted_list[hi] if test == theSum: yield [sorted_list[lo], sorted_list[hi]] lo = _move_right(sorted_list, lo, hi, sorted_list[lo]) hi = _move_left(sorted_list, lo, hi, sorted_list[hi]) elif test > theSum: hi = _move_left(sorted_list, lo, hi, sorted_list[hi]) else: lo = _move_right(sorted_list, lo, hi, sorted_list[lo]) class Solution(object): def threeSum(self, nums): """ :type nums: List[int] :rtype: List[List[int]] """ nums = sorted(nums) theMax = len(nums) - 1 pre = float('inf') results = [] for i, a in enumerate(nums[:-2], 1): if a > 0: break if a != pre: pre = a for comb in twoSum(nums, i, theMax, -a): results.append([a] + comb) return results if __name__ == '__main__': assert Solution().threeSum([-1, 0, 1, 2, -1, -4]) == [[-1, 0, 1], [-1, -1, 2]]
gpl-3.0
6,233,434,110,019,889,000
25.876923
155
0.502003
false
3.253259
false
false
false