code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
from time import time
from gi.repository import GLib, GObject
from pychess.Utils.const import WHITE, BLACK
from pychess.System.Log import log
class TimeModel(GObject.GObject):
__gsignals__ = {
"player_changed": (GObject.SignalFlags.RUN_FIRST, None, ()),
"time_changed": (GObject.SignalFlags.RUN_FIRST, None, ()),
"zero_reached": (GObject.SignalFlags.RUN_FIRST, None, (int, )),
"pause_changed": (GObject.SignalFlags.RUN_FIRST, None, (bool, ))
}
############################################################################
# Initing #
############################################################################
def __init__(self, secs=0, gain=0, bsecs=-1, minutes=-1):
GObject.GObject.__init__(self)
if bsecs < 0:
bsecs = secs
if minutes < 0:
minutes = secs / 60
self.minutes = minutes # The number of minutes for the original starting
# time control (not necessarily where the game was resumed,
# i.e. self.intervals[0][0])
self.intervals = [[secs], [bsecs]]
self.gain = gain
self.secs = secs
# in FICS games we don't count gain
self.handle_gain = True
self.paused = False
# The left number of secconds at the time pause was turned on
self.pauseInterval = 0
self.counter = None
self.started = False
self.ended = False
self.movingColor = WHITE
self.connect('time_changed', self.__zerolistener, 'time_changed')
self.connect('player_changed', self.__zerolistener, 'player_changed')
self.connect('pause_changed', self.__zerolistener, 'pause_changed')
self.zero_listener_id = None
self.zero_listener_time = 0
self.zero_listener_source = None
def __repr__(self):
text = "<TimeModel object at %s (White: %s Black: %s ended=%s)>" % \
(id(self), str(self.getPlayerTime(WHITE)),
str(self.getPlayerTime(BLACK)), self.ended)
return text
def __zerolistener(self, *args):
if self.ended:
return False
cur_time = time()
whites_time = cur_time + self.getPlayerTime(WHITE)
blacks_time = cur_time + self.getPlayerTime(BLACK)
if whites_time <= blacks_time:
the_time = whites_time
color = WHITE
else:
the_time = blacks_time
color = BLACK
remaining_time = the_time - cur_time + 0.01
if remaining_time > 0 and remaining_time != self.zero_listener_time:
if (self.zero_listener_id is not None) and \
(self.zero_listener_source is not None) and \
not self.zero_listener_source.is_destroyed():
GLib.source_remove(self.zero_listener_id)
self.zero_listener_time = remaining_time
self.zero_listener_id = GLib.timeout_add(10, self.__checkzero,
color)
default_context = GLib.main_context_get_thread_default(
) or GLib.main_context_default()
if hasattr(default_context, "find_source_by_id"):
self.zero_listener_source = default_context.find_source_by_id(
self.zero_listener_id)
def __checkzero(self, color):
if self.getPlayerTime(color) <= 0 and self.started:
self.emit('zero_reached', color)
return False
return True
############################################################################
# Interacting #
############################################################################
def setMovingColor(self, movingColor):
self.movingColor = movingColor
self.emit("player_changed")
def tap(self):
if self.paused:
return
gain = self.gain if self.handle_gain else 0
ticker = self.intervals[self.movingColor][-1] + gain
if self.started:
if self.counter is not None:
ticker -= time() - self.counter
else:
# FICS rule
if self.ply >= 1:
self.started = True
self.intervals[self.movingColor].append(ticker)
self.movingColor = 1 - self.movingColor
if self.started:
self.counter = time()
self.emit("time_changed")
self.emit("player_changed")
def start(self):
if self.started:
return
self.counter = time()
self.emit("time_changed")
def end(self):
log.debug("TimeModel.end: self=%s" % self)
self.pause()
self.ended = True
if (self.zero_listener_id is not None) and \
(self.zero_listener_source is not None) and \
not self.zero_listener_source.is_destroyed():
GLib.source_remove(self.zero_listener_id)
def pause(self):
log.debug("TimeModel.pause: self=%s" % self)
if self.paused:
return
self.paused = True
if self.counter is not None:
self.pauseInterval = time() - self.counter
self.counter = None
self.emit("time_changed")
self.emit("pause_changed", True)
def resume(self):
log.debug("TimeModel.resume: self=%s" % self)
if not self.paused:
return
self.paused = False
self.counter = time() - self.pauseInterval
self.emit("pause_changed", False)
############################################################################
# Undo and redo in TimeModel #
############################################################################
def undoMoves(self, moves):
""" Sets time and color to move, to the values they were having in the
beginning of the ply before the current.
his move.
Example:
White intervals (is thinking): [120, 130, ...]
Black intervals: [120, 115]
Is undoed to:
White intervals: [120, 130]
Black intervals (is thinking): [120, ...] """
if not self.started:
self.start()
for move in range(moves):
self.movingColor = 1 - self.movingColor
del self.intervals[self.movingColor][-1]
if len(self.intervals[0]) + len(self.intervals[1]) >= 4:
self.counter = time()
else:
self.started = False
self.counter = None
self.emit("time_changed")
self.emit("player_changed")
############################################################################
# Updating #
############################################################################
def updatePlayer(self, color, secs):
self.intervals[color][-1] = secs
if color == self.movingColor and self.started:
self.counter = secs + time() - self.intervals[color][-1]
self.emit("time_changed")
############################################################################
# Info #
############################################################################
def getPlayerTime(self, color, movecount=-1):
if color == self.movingColor and self.started and movecount == -1:
if self.paused:
return self.intervals[color][movecount] - self.pauseInterval
elif self.counter:
return self.intervals[color][movecount] - (time() -
self.counter)
return self.intervals[color][movecount]
def getInitialTime(self):
return self.intervals[WHITE][0]
def getElapsedMoveTime(self, ply):
movecount, color = divmod(ply + 1, 2)
gain = self.gain if ply > 2 else 0
if len(self.intervals[color]) > movecount:
return self.intervals[color][movecount - 1] - self.intervals[
color][movecount] + gain if movecount > 1 else 0
else:
return 0
@property
def display_text(self):
text = ("%d " % self.minutes) + _("min")
if self.gain != 0:
text += (" + %d " % self.gain) + _("sec")
return text
@property
def hasTimes(self):
return len(self.intervals[0]) > 1
@property
def ply(self):
return len(self.intervals[BLACK]) + len(self.intervals[WHITE]) - 2
def hasBWTimes(self, bmovecount, wmovecount):
return len(self.intervals[BLACK]) > bmovecount and len(self.intervals[
WHITE]) > wmovecount
| cajone/pychess | lib/pychess/Utils/TimeModel.py | Python | gpl-3.0 | 8,916 |
# -*- coding: utf-8 -*-
__author__ = 'ogaidukov'
import os.path
import argparse
import tornado.ioloop
import tornado.httpserver
import tornado.web
from commonlib import configparser, logmaker
from rotabanner.lib import route
import redis
import pygeoip
config = None
logger = None
args = None
redisdb = None
geoip = None
def init_application():
# TODO refactor these global variables
global config, logger, args, redisdb, geoip
arg_parser = argparse.ArgumentParser(description="Ad Serving daemon which built on top of Tornado")
arg_parser.add_argument('config', help="Daemon's config file")
arg_parser.add_argument('-D', '--debug', help="Debug mode", action='store_true')
arg_parser.add_argument('-A', '--listen_addr', help="Listen address or '0.0.0.0'")
arg_parser.add_argument('-L', '--listen_port', help="Listen TCP port")
args = arg_parser.parse_args()
config = configparser.parse_config(args.config)
# TODO make use of standard Tornado logging streams 'tornado.access', 'tornado.application' and 'tornado.general'
logger = logmaker.logger_from_config(config)
redis_config = configparser.config_section_obj(config, 'redis')
redisdb = redis.StrictRedis(host=redis_config.host, port=int(redis_config.port),
db=redis_config.db, password=redis_config.password)
geoip = pygeoip.GeoIP('../me-advert/rotabanner/data/GeoIPCity.dat', pygeoip.MEMORY_CACHE)
import handlers # Important! Initialize url handler classes by importing
def run_application():
init_application()
listen_port = int(args.listen_port)
listen_addr = args.listen_addr
template_path=os.path.join(os.path.dirname(__file__), "templates")
application = TornadoApp(debug=args.debug, template_path=template_path)
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(listen_port, address=listen_addr)
tornado.ioloop.IOLoop.instance().start()
class TornadoApp(tornado.web.Application):
def __init__(self, **settings):
logger.info("Starting Tornado application instance")
tornado.web.Application.__init__(self, route.url_handlers, **settings) | olegvg/me-advert | me-advert/rotabanner/application.py | Python | gpl-3.0 | 2,183 |
from collections import defaultdict
from math import log2 as log
from gui.transcriptions import STANDARD_SYMBOLS
from imports import (QDialog, QHBoxLayout, QVBoxLayout, QGroupBox, QRadioButton, QButtonGroup, QPushButton,
QStackedWidget, QWidget, QComboBox, QMessageBox, QLabel, QLineEdit, QTableWidget, QTableWidgetItem)
class FunctionalLoadDialog(QDialog):
def __init__(self, corpus):
super().__init__()
self.corpus = corpus
self.results = list()
self.setWindowTitle('Functional Load')
layout = QVBoxLayout()
#Set up top row of radio button options
contrastBox = QGroupBox('Contrast')
contrastLayout = QHBoxLayout()
self.contrastGroup = QButtonGroup()
flexionOption = QRadioButton('Degrees of flexion')
flexionOption.click()
ductionOption = QRadioButton('Degree of duction')
oppositionOption = QRadioButton('Thumb opposition')
contactOption = QRadioButton('Thumb/finger contact')
customOption = QRadioButton('Custom options')
self.contrastGroup.addButton(flexionOption, id=0)
self.contrastGroup.addButton(ductionOption, id=1)
self.contrastGroup.addButton(oppositionOption, id=2)
self.contrastGroup.addButton(contactOption, id=3)
self.contrastGroup.addButton(customOption, id=4)
contrastLayout.addWidget(flexionOption)
contrastLayout.addWidget(ductionOption)
contrastLayout.addWidget(oppositionOption)
contrastLayout.addWidget(contactOption)
contrastLayout.addWidget(customOption)
contrastBox.setLayout(contrastLayout)
#set up stacked widgets
self.middleWidget = QStackedWidget()
#Collapse degress of flexion
flexionWidget = QWidget()
flexionLayout = QHBoxLayout()
self.flexionFingerSelection = QComboBox()
self.flexionFingerSelection.addItems(['Thumb', 'Index', 'Middle', 'Pinky', 'Ring', 'All'])
self.flexionJointSelection = QComboBox()
self.flexionJointSelection.addItems(['Proximal', 'Medial', 'Distal', 'All'])
#note: Thumb+Proximal not possible, and there's an alert window that will pop up if this combination is chosen
flexionLayout.addWidget(self.flexionFingerSelection)
flexionLayout.addWidget(self.flexionJointSelection)
flexionWidget.setLayout(flexionLayout)
#Collapse degrees of duction
ductionWidget = QWidget()
ductionLayout = QHBoxLayout()
self.ductionFingerSelection = QComboBox()
self.ductionFingerSelection.addItems(['Thumb/Finger', 'Index/Middle', 'Middle/Ring', 'Ring/Pinky', 'All'])
ductionLayout.addWidget(self.ductionFingerSelection)
ductionWidget.setLayout(ductionLayout)
#Collapse thumb opposition
oppositionWidget = QWidget()
oppositionLayout = QHBoxLayout()
oppositionWidget.setLayout(oppositionLayout)
#Collapse thumb/finger contact
contactWidget = QWidget()
contactLayout = QHBoxLayout()
contactWidget.setLayout(contactLayout)
#Collapse custom slots
customWidget = QWidget()
customLayout = QHBoxLayout()
customLayout.addWidget(QLabel('Merge this symbol: '))
self.customSymbo1A = QComboBox()
self.customSymbo1A.addItem('')
self.customSymbo1A.addItems(STANDARD_SYMBOLS)
self.customSymbo1A.setEditable(True)
customLayout.addWidget(self.customSymbo1A)
customLayout.addWidget(QLabel('with this symbol: '))
self.customSymbolB = QComboBox()
self.customSymbolB.addItem('')
self.customSymbolB.addItems(STANDARD_SYMBOLS)
self.customSymbolB.setEditable(True)
customLayout.addWidget(self.customSymbolB)
customLayout.addWidget(QLabel('in these slots: '))
self.customSlots = QLineEdit()
customLayout.addWidget(self.customSlots)
customLayout.addWidget(QLabel('(separate numbers with commas, leave blank to merge symbols everywhere)'))
customWidget.setLayout(customLayout)
#Build up middle widget
self.middleWidget.addWidget(flexionWidget)
self.middleWidget.addWidget(ductionWidget)
self.middleWidget.addWidget(oppositionWidget)
self.middleWidget.addWidget(contactWidget)
self.middleWidget.addWidget(customWidget)
#Connect slots and signals
flexionOption.clicked.connect(self.changeMiddleWidget)
ductionOption.clicked.connect(self.changeMiddleWidget)
oppositionOption.clicked.connect(self.changeMiddleWidget)
contactOption.clicked.connect(self.changeMiddleWidget)
customOption.clicked.connect(self.changeMiddleWidget)
#Bottom buttons (OK/Cancel)
buttonLayout = QHBoxLayout()
ok = QPushButton('OK')
ok.clicked.connect(self.accept)
cancel = QPushButton('Cancel')
cancel.clicked.connect(self.reject)
buttonLayout.addWidget(ok)
buttonLayout.addWidget(cancel)
layout.addWidget(contrastBox)
layout.addWidget(self.middleWidget)
layout.addLayout(buttonLayout)
self.setLayout(layout)
def changeMiddleWidget(self, e):
self.middleWidget.setCurrentIndex(self.contrastGroup.id(self.sender()))
def accept(self):
index = self.middleWidget.currentIndex()
if index == 0:
if (self.flexionFingerSelection.currentText() == 'Thumb'
and self.flexionJointSelection.currentText() == 'Proximal'):
alert = QMessageBox()
alert.setWindowTitle('Incompatible Options')
alert.setText('Thumbs cannot be selected for proximal joint. Choose either "Medial" or "Distal"')
alert.exec_()
return
self.calcByFlexion()
elif index == 1:
self.calcByDuction()
elif index == 4:
slots = self.customSlots.text()
alert = QMessageBox()
alert.setWindowTitle('Invalid slot numbers')
alert.setText('Slot numbers must be between 1 and 34 (inclusive)')
try:
slots = [int(x.strip()) for x in slots.split(',')]
except ValueError:
alert.exec_()
return
if any(n > 34 or n < 1 for n in slots):
alert.exec_()
return
self.calcCustom(slots)
super().accept()
def calculateEntropy(self, corpus=None):
corpus_size = len(corpus) if corpus is not None else len(self.corpus)
return corpus_size, sum([1 / corpus_size * log(1 / corpus_size) for n in range(corpus_size)]) * -1
def calcByDuction(self):
corpus_size, starting_h = self.calculateEntropy()
duction = self.ductionFingerSelection.currentText()
if duction == 'Thumb/Finger':
slot = 3
elif duction == 'Index/Middle':
slot = 19
elif duction == 'Middle/Ring':
slot = 24
elif duction == 'Ring/Pinky':
slot = 29
elif duction == 'All':
slot = -1
if slot > 1:
print('{} DUCTION'.format(duction.upper()))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
ch[slot] = 'X'
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h - ending_h))
else:
print('{} DUCTION'.format(duction.upper()))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
ch[2] = 'X'
ch[19] = 'X'
ch[24] = 'X'
ch[29] = 'X'
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h - ending_h))
result = [corpus_size, starting_h, new_corpus_size, ending_h, starting_h-ending_h]
self.results = [result]
def calcCustom(self, slots):
corpus_size, starting_h = self.calculateEntropy()
slots = [n-1 for n in slots]
# minus 1 because slot numbers starts at 1 but list indices start at 0
symbolA = self.customSymbo1A.currentText()
symbolB = self.customSymbolB.currentText()
print('Merging {} and {}'.format(symbolA, symbolB))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
for slot in slots:
if ch[slot] in [symbolA, symbolB]:
ch[slot] = 'X'
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h - ending_h))
result = [corpus_size, starting_h, new_corpus_size, ending_h, starting_h-ending_h]
self.results = [result]
def calcByFlexion(self):
corpus_size, starting_h = self.calculateEntropy()
finger = self.flexionFingerSelection.currentText()
joint = self.flexionJointSelection.currentText()
jointDict = {'Proximal': 0,
'Medial': 1,
'Distal': 2,
'All': -1}
fingerDict = {'Thumb':2,
'Index': 16,
'Middle': 21,
'Ring': 26,
'Pinky': 31,
'All': -1}
offset = jointDict[joint]
slot = fingerDict[finger]
slot += offset
if slot > 0:#user chose particular fingers
print('{} {} JOINTS'.format(finger.upper(), joint.upper()))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
ch[slot] = 'X'
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h - ending_h))
self.results = [[corpus_size, starting_h, new_corpus_size, ending_h, starting_h-ending_h]]
else: #user chose an "All" option
if joint == 'All' and finger != 'All':
#all the joints on a particular finger
slot = fingerDict[finger]
print('ALL {} JOINTS'.format(finger.upper()))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
ch[slot] = 'X' #proximal
ch[slot+1] = 'X' #medial
if not finger == 'Thumb':
ch[slot+2] = 'X' #distal
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h-ending_h))
self.results = [[corpus_size, starting_h, new_corpus_size, ending_h, starting_h-ending_h]]
elif finger == 'All' and joint != 'All':
#a particular joint on all the fingers
if joint == 'Proximal':
slot = 17
elif joint == 'Medial':
slot = 18
elif joint == 'Distal':
slot = 19
print('ALL {} JOINTS'.format(joint.upper()))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
# for finger,slot in [('INDEX', 17), ('MIDDLE',22), ('RING',27), ('PINKY',32)]:
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
ch[slot] = 'X'
ch[slot+5] = 'X'
ch[slot+10] = 'X'
ch[slot+15] = 'X'
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h-ending_h))
self.results = [[corpus_size, starting_h, new_corpus_size, ending_h, starting_h-ending_h]]
elif finger == 'All' and joint == 'All':
results = list()
for finger, slot in [('THUMB', 2), ('INDEX', 17), ('MIDDLE', 22), ('RING', 27), ('PINKY', 31)]:
print('ALL {} JOINTS'.format(joint.upper()))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
ch[slot] = 'X'
ch[slot+1] = 'X'
if not finger == 'Thumb':
ch[slot+2] = 'X'
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h-ending_h))
results.append([corpus_size, starting_h, new_corpus_size, ending_h, starting_h-ending_h])
self.results = results
class FunctionalLoadResultsTable(QDialog):
def __init__(self, results):
super().__init__()
layout = QHBoxLayout()
table = QTableWidget()
table.setColumnCount(5)
table.setHorizontalHeaderLabels(['Starting corpus size', 'Starting entropy',
'Ending corpus size', 'Ending entropy', 'Change in entropy'])
for result in results:
table.insertRow(table.rowCount())
for i, item in enumerate(result):
newItem = QTableWidgetItem(str(item))
table.setItem(table.rowCount()-1, i, newItem)
layout.addWidget(table)
self.setLayout(layout)
| PhonologicalCorpusTools/SLP-Annotator | slpa/gui/functional_load.py | Python | gpl-3.0 | 15,434 |
# encoding=utf8
# pylint: disable=W0611
""" The utility
Author: lipixun
Created Time : 日 2/12 14:14:50 2017
File Name: utils.py
Description:
"""
from spec import DataPath
# Import json
try:
import simplejson as json
except ImportError:
import json
# NLTK
import nltk
nltk.data.path = [ DataPath ]
| lipixun/newsanalyzer4w | newsanalyzer/utils.py | Python | gpl-3.0 | 329 |
"""
WSGI config for antibiobank project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "antibiobank.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| dridk/antibiobank | antibiobank/wsgi.py | Python | gpl-3.0 | 397 |
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c) 2015 Peter Sprygada, <[email protected]>
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import traceback
import json
from ansible.module_utils._text import to_text, to_native
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
from ansible.module_utils.connection import Connection, ConnectionError
from ansible.module_utils.network.common.netconf import NetconfConnection
from ansible.module_utils.network.common.parsing import Cli
from ansible.module_utils.six import iteritems
NET_TRANSPORT_ARGS = dict(
host=dict(required=True),
port=dict(type='int'),
username=dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
password=dict(no_log=True, fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD'])),
ssh_keyfile=dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
authorize=dict(default=False, fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
auth_pass=dict(no_log=True, fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS'])),
provider=dict(type='dict', no_log=True),
transport=dict(choices=list()),
timeout=dict(default=10, type='int')
)
NET_CONNECTION_ARGS = dict()
NET_CONNECTIONS = dict()
def _transitional_argument_spec():
argument_spec = {}
for key, value in iteritems(NET_TRANSPORT_ARGS):
value['required'] = False
argument_spec[key] = value
return argument_spec
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
elif val is not None:
return [val]
else:
return list()
class ModuleStub(object):
def __init__(self, argument_spec, fail_json):
self.params = dict()
for key, value in argument_spec.items():
self.params[key] = value.get('default')
self.fail_json = fail_json
class NetworkError(Exception):
def __init__(self, msg, **kwargs):
super(NetworkError, self).__init__(msg)
self.kwargs = kwargs
class Config(object):
def __init__(self, connection):
self.connection = connection
def __call__(self, commands, **kwargs):
lines = to_list(commands)
return self.connection.configure(lines, **kwargs)
def load_config(self, commands, **kwargs):
commands = to_list(commands)
return self.connection.load_config(commands, **kwargs)
def get_config(self, **kwargs):
return self.connection.get_config(**kwargs)
def save_config(self):
return self.connection.save_config()
class NetworkModule(AnsibleModule):
def __init__(self, *args, **kwargs):
connect_on_load = kwargs.pop('connect_on_load', True)
argument_spec = NET_TRANSPORT_ARGS.copy()
argument_spec['transport']['choices'] = NET_CONNECTIONS.keys()
argument_spec.update(NET_CONNECTION_ARGS.copy())
if kwargs.get('argument_spec'):
argument_spec.update(kwargs['argument_spec'])
kwargs['argument_spec'] = argument_spec
super(NetworkModule, self).__init__(*args, **kwargs)
self.connection = None
self._cli = None
self._config = None
try:
transport = self.params['transport'] or '__default__'
cls = NET_CONNECTIONS[transport]
self.connection = cls()
except KeyError:
self.fail_json(msg='Unknown transport or no default transport specified')
except (TypeError, NetworkError) as exc:
self.fail_json(msg=to_native(exc), exception=traceback.format_exc())
if connect_on_load:
self.connect()
@property
def cli(self):
if not self.connected:
self.connect()
if self._cli:
return self._cli
self._cli = Cli(self.connection)
return self._cli
@property
def config(self):
if not self.connected:
self.connect()
if self._config:
return self._config
self._config = Config(self.connection)
return self._config
@property
def connected(self):
return self.connection._connected
def _load_params(self):
super(NetworkModule, self)._load_params()
provider = self.params.get('provider') or dict()
for key, value in provider.items():
for args in [NET_TRANSPORT_ARGS, NET_CONNECTION_ARGS]:
if key in args:
if self.params.get(key) is None and value is not None:
self.params[key] = value
def connect(self):
try:
if not self.connected:
self.connection.connect(self.params)
if self.params['authorize']:
self.connection.authorize(self.params)
self.log('connected to %s:%s using %s' % (self.params['host'],
self.params['port'], self.params['transport']))
except NetworkError as exc:
self.fail_json(msg=to_native(exc), exception=traceback.format_exc())
def disconnect(self):
try:
if self.connected:
self.connection.disconnect()
self.log('disconnected from %s' % self.params['host'])
except NetworkError as exc:
self.fail_json(msg=to_native(exc), exception=traceback.format_exc())
def register_transport(transport, default=False):
def register(cls):
NET_CONNECTIONS[transport] = cls
if default:
NET_CONNECTIONS['__default__'] = cls
return cls
return register
def add_argument(key, value):
NET_CONNECTION_ARGS[key] = value
def get_resource_connection(module):
if hasattr(module, '_connection'):
return module._connection
capabilities = get_capabilities(module)
network_api = capabilities.get('network_api')
if network_api == 'cliconf':
module._connection = Connection(module._socket_path)
elif network_api == 'netconf':
module._connection = NetconfConnection(module._socket_path)
else:
module.fail_json(msg='Invalid connection type {0!s}'.format(network_api))
return module._connection
def get_capabilities(module):
if hasattr(module, 'capabilities'):
return module._capabilities
try:
capabilities = Connection(module._socket_path).get_capabilities()
except ConnectionError as exc:
module.fail_json(msg=to_text(exc, errors='surrogate_then_replace'))
module._capabilities = json.loads(capabilities)
return module._capabilities
| pgmillon/ansible | lib/ansible/module_utils/network/common/network.py | Python | gpl-3.0 | 8,128 |
#!/usr/bin/python3
import sqlite3
import os, sys, time, datetime, random, string
import urllib.request, urllib.error
import configparser
from flask import Flask, request, session, redirect
from flask import render_template, g, flash, url_for
from contextlib import closing
from .modules import Pagi
from pxeat import app
from config import *
def prt_help():
print("To start the service:\n\n\t" + sys.argv[0] + " server\n")
print("Listen to \"localhost:5000\" by default, \nDeploy on production (Apache, Nginx...) with \"pxeat.wsgi\"")
def chk_args():
if len(sys.argv) == 2:
if sys.argv[1] == 'server':
if not os.path.isfile(DATABASE):
print("Database is not available!\nCreate with --initdb")
sys.exit()
if not os.path.isfile(PXE_FILE):
print("PXE file is not available!\nPlease check the configuration")
sys.exit()
if not os.path.isfile("./config.py"):
print("PXEAT Config file is missing!")
sys.exit()
elif sys.argv[1] == '--initdb':
init_db()
else:
prt_help()
sys.exit()
else:
prt_help()
sys.exit()
# Defaults
items_num = int(ITEMS_NUM)
form_default = ["", \
"http://", \
REPO_KERNEL_DEFAULT, \
REPO_INITRD_DEFAULT, \
"def", \
""]
loader_dir = TFTP_ROOT + LOADER_PATH
postfix_kernelfn = '-0'
postfix_initrdfn = '-1'
items = {}
def chk_input(chk_string, chk_type):
if chk_type == 'pxe_title':
if chk_string == '':
raise ValueError("The title can not be empty!")
return
elif chk_type == 'file_path':
if chk_string[0] != '/' or chk_string[-1] == '/':
raise ValueError("Path format is invalid!")
return
elif chk_type == 'repo_url':
chk_elements = chk_string.split('//')
if chk_elements[1] == '':
raise ValueError("The repository can not be empty!")
return
elif chk_elements[0] not in ['http:','https:']:
raise ValueError("Invalid format!"+\
" (Only support http:// or https://)")
return
else:
sys.exit("chk_type error!")
def grab_file(base_url, file_path, saved_file):
errmsg0 = "<br />Something wrong, please contact the administrator."
errmsg1 = "<br />Something wrong, please check the repository link \
and kernel/initrd file path."
dbginfo_local = "Debug info: Configuration error! \
Failed to open/write local kernel&initrd file. \
Check your \'LOADER_PATH\' setting in config file. \
Make sure the path exist and you have permission to write.\n\
Current path: " + saved_file
file_url = base_url + file_path
try:
f = urllib.request.urlopen(file_url)
except urllib.error.HTTPError as e:
return str(e.code) + " " + str(e.reason) + str(errmsg1)
except:
return str(errmsg0)
try:
local_file = open(saved_file, "wb")
local_file.write(f.read())
except:
print(dbginfo_local)
return str(errmsg0)
local_file.close()
def boot_opts_gen(opt_flag):
if opt_flag == "vnc":
return(DEFAULT_BOOT_OPTS + \
" console=ttyS0 vnc=1 vncpassword=" + \
VNC_PASSWD)
elif opt_flag == "ssh":
return(DEFAULT_BOOT_OPTS + \
" console=ttyS0 usessh=1 sshpassword=" + \
SSH_PASSWD)
else:
return(DEFAULT_BOOT_OPTS)
def connect_db():
return sqlite3.connect(DATABASE)
def init_db():
with closing(connect_db()) as db:
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
@app.before_request
def before_request():
g.db = connect_db()
@app.teardown_request
def teardown_request(exception):
db = getattr(g, 'db', None)
if db is not None:
db.close()
@app.route('/')
def form():
default_val = {}
for i,k in enumerate(['title', \
'repo_url', \
'repo_kernel', \
'repo_initrd', \
'inst_method', \
'comment']):
default_val[k] = form_default[i]
return render_template('form.html', default_val=default_val)
@app.route('/history/', defaults={'page': 1})
@app.route('/history/page/<int:page>')
def history(page):
count = g.db.execute('select count(*) from pxeitems').fetchone()[0]
per_page = 10
pagination = Pagi(page, per_page, count)
try:
cur = g.db.execute('select id,\
pxe_title,\
repo_url,\
repo_kernel,\
repo_initrd,\
pxe_comment,\
unix_time,\
inst_flag from pxeitems order by id desc')
except sqlite3.Error as e:
return render_template('failed.html', \
failed_msg = "Database error: "+str(e))
history_entries = [ dict(pxe_id=row[0], \
pxe_title=row[1], \
repo_url=row[2], \
repo_kernel=row[3], \
repo_initrd=row[4], \
pxe_comment=row[5], \
unix_time=datetime.datetime.fromtimestamp(int(row[6])), \
inst_flag=row[7]) \
for row in cur.fetchall()[(page-1)*per_page:page*per_page]\
]
if not history_entries and page != 1:
#Shoud do something here other than pass or abort(404)
pass
return render_template('history.html',\
pagination=pagination,\
history_entries=history_entries)
@app.route('/clone/<int:clone_id>')
def clone(clone_id):
row = g.db.execute('select pxe_title,\
repo_url,\
repo_kernel,\
repo_initrd,\
inst_flag,\
pxe_comment from pxeitems where id=?',[clone_id]).fetchone()
default_val = {}
for i,k in enumerate(['title', \
'repo_url', \
'repo_kernel', \
'repo_initrd', \
'inst_method', \
'comment']):
default_val[k] = row[i]
flash(u'Cloned Entry!','green')
return render_template('form.html', default_val=default_val)
@app.route('/about')
@app.route('/about/')
def about():
return render_template('about.html')
# For the pagination
def url_for_other_page(page):
args = request.view_args.copy()
args['page'] = page
return url_for(request.endpoint, **args)
app.jinja_env.globals['url_for_other_page'] = url_for_other_page
@app.route('/confirm', methods=['POST'])
def confirm_entry():
#Input checking
try:
for x,y in [[request.form['pxe_title'],'pxe_title'], \
[request.form['repo_url'], 'repo_url'], \
[request.form['repo_kernel'], 'file_path'], \
[request.form['repo_initrd'], 'file_path']]:
chk_input(x,y)
except ValueError as e:
flash(e.args[0],'error')
return redirect(url_for('form'))
# Assign to the dictionary
items['repo_kernel'] = request.form['repo_kernel']
items['repo_url'] = request.form['repo_url']
items['repo_initrd'] = request.form['repo_initrd']
items['pxe_title'] = request.form['pxe_title']
items['pxe_comment'] = request.form['pxe_comment']
items['inst_flag'] = request.form['inst_method']
# Generate a random string
items['random_str'] = ''.join(random.choice(string.ascii_lowercase) for _ in range(4))
items['unix_time'] = ''
# Show the entry which will be generated on the confirm page
gen_format = ["menu label ^a - " + items['pxe_title'], \
"kernel " + LOADER_PATH + "[random]" + postfix_kernelfn, \
"append initrd=" + LOADER_PATH + "[random]" + postfix_initrdfn + " " + \
boot_opts_gen(items['inst_flag']) + " " + \
"install=" + items['repo_url']]
return render_template('confirm.html', cfm_entries=items, cfm_fmt=gen_format)
@app.route('/add', methods=['POST'])
def add_entry():
items['unix_time'] = str(int(time.time()))
id_random = items['unix_time'] + items['random_str']
# Get kernel and initrd file
for f_name,i in [[items['repo_kernel'], postfix_kernelfn],\
[items['repo_initrd'], postfix_initrdfn]]:
ret = grab_file(items['repo_url'],\
f_name,\
loader_dir + id_random + i)
if ret:
return render_template('failed.html',\
failed_msg = f_name + ": " + str(ret))
else:
pass
# Add new entry to database
try:
g.db.execute('INSERT INTO pxeitems (\
pxe_title, \
repo_url, \
repo_kernel, \
repo_initrd, \
pxe_comment, \
unix_time, \
random_str, \
inst_flag) values (?, ?, ?, ?, ?, ?, ?, ?)', \
[items['pxe_title'], \
items['repo_url'], \
items['repo_kernel'], \
items['repo_initrd'], \
items['pxe_comment'], \
items['unix_time'], \
items['random_str'], \
items['inst_flag']\
])
except sqlite3.Error as e:
#Remove downloaded files here
for i in (postfix_kernelfn, postfix_initrdfn):
os.remove(TFTP_ROOT + LOADER_PATH + id_random + i)
return render_template('failed.html', \
failed_msg = "Database error: " + str(e))
g.db.commit()
# Fetch first items_num of entires from the database
cur = g.db.execute('SELECT pxe_title,\
repo_url,\
repo_kernel,\
repo_initrd,\
inst_flag FROM pxeitems order by id desc')
pxe_entries = [ dict(pxe_title=row[0], \
repo_url=row[1], \
repo_kernel=row[2], \
repo_initrd=row[3], \
inst_flag=row[4]) for row in cur.fetchall()[:items_num]\
]
# Write the entries to PXE configure file
try:
fpxe = open(PXE_FILE,'w')
except IOError as e:
for i in ("0","1"):
os.remove(TFTP_ROOT + LOADER_PATH + id_random + "-" + i)
g.db.execute('DELETE FROM pxeitems WHERE id = (SELECT max(id) FROM pxeitems)')
return render_template('failed.html', failed_msg = e)
fpxe.write(PXE_HEADER + '\n')
pxe_index = 'a'
for pxe_entry in pxe_entries:
fpxe.write('label {0}\n menu label ^{0} - {1}\n menu indent 2\n kernel {2}\n append initrd={3} {4} install={5}\n\n'.format(\
pxe_index,\
pxe_entry['pxe_title'],\
LOADER_PATH + items['unix_time'] + items['random_str'] + postfix_kernelfn, \
LOADER_PATH + items['unix_time'] + items['random_str'] + postfix_initrdfn, \
boot_opts_gen(pxe_entry['inst_flag']),items['repo_url']))
pxe_index = chr(ord(pxe_index)+1)
fpxe.write(PXE_FOOTER + '\n')
fpxe.close
# Remove the out of service kernel&initrd files
for root, dirs, files in os.walk(loader_dir, topdown=False):
names=sorted(files,reverse=True)
for i in names[items_num*2:]:
os.remove(os.path.join(root,i))
flash(u'New entry was successfully posted','green')
return redirect(url_for('form'))
| wnereiz/pxeat | pxeat/views.py | Python | gpl-3.0 | 12,105 |
import pigpio
import time
class LeftEncoder:
def __init__(self, pin=24):
self.pi = pigpio.pi()
self.pin = pin
self.pi.set_mode(pin, pigpio.INPUT)
self.pi.set_pull_up_down(pin, pigpio.PUD_UP)
cb1 = self.pi.callback(pin, pigpio.EITHER_EDGE, self.cbf)
self.tick = 0
def cbf(self, gpio, level, tick):
# print(gpio, level, tick)
print(self.tick)
self.tick += 1
e = LeftEncoder()
while True:
time.sleep(.01) | MrYsLab/razmq | hardware_baseline/encoders/left_encoder.py | Python | gpl-3.0 | 491 |
#http://informatics.mccme.ru/mod/statements/view3.php?id=22783&chapterid=113362#1
n = int(input())
def sum_kv_cifr(x):
su = 0
for i in str(x):
su += int(i)*int(i)
return su
maxi_power = 0
for i in range(1, n//2+1):
print('______',i)
for k in range(n//i, 0, -1):
power = sum_kv_cifr(i * k)
print('_', k, power)
if power > maxi_power:
maxi_power = power
print(maxi_power)
| dichenko/kpk2016 | Diff/dra.py | Python | gpl-3.0 | 442 |
import unittest
from .Weather_analyzer import is_not_number
class BtcPriceTestCase(unittest.TestCase):
def test_checking_of_input_in_form(self):
input = 46
answer = is_not_number(input) # The bitcoin returned changes over time!
self.assertEqual(answer, False) | AntonKuksov/Weather_analyzer | test_form.py | Python | gpl-3.0 | 300 |
#!/usr/bin/env python
# Copyright (C) 2012,2013,2015(H),2016
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import argparse
import math
import re
def convertTable(gro_in_file, esp_out_file, sigma=1.0, epsilon=1.0, c6=1.0, c12=1.0):
"""Convert GROMACS tabulated file into ESPResSo++ tabulated file (new file
is created). First column of input file can be either distance or angle.
For non-bonded files, c6 and c12 can be provided. Default value for sigma, epsilon,
c6 and c12 is 1.0. Electrostatics are not taken into account (f and fd columns).
Keyword arguments:
gro_in_file -- the GROMACS tabulated file name (bonded, nonbonded, angle
or dihedral).
esp_out_file -- filename of the ESPResSo++ tabulated file to be written.
sigma -- optional, depending on whether you want to convert units or not.
epsilon -- optional, depending on whether you want to convert units or not.
c6 -- optional
c12 -- optional
"""
# determine file type
bonded, angle, dihedral = False, False, False
re_bond = re.compile('.*_b[0-9]+.*')
re_angle = re.compile('.*_a[0-9]+.*')
re_dihedral = re.compile('.*_d[0-9]+.*')
if re.match(re_bond, gro_in_file):
bonded = True
elif re.match(re_angle, gro_in_file):
angle = True
bonded = True
elif re.match(re_dihedral, gro_in_file):
dihedral = True
bonded = True
fin = open(gro_in_file, 'r')
fout = open(esp_out_file, 'w')
if bonded: # bonded has 3 columns
for line in fin:
if line[0] == "#": # skip comment lines
continue
columns = line.split()
r = float(columns[0])
f = float(columns[1]) # energy
fd= float(columns[2]) # force
# convert units
if angle or dihedral: # degrees to radians
r = math.radians(r)
fd=fd*180/math.pi
else:
r = r / sigma
e = f / epsilon
f = fd*sigma / epsilon
if (not angle and not dihedral and r != 0) or \
(angle and r <= math.pi and r > 0) or \
(dihedral and r >= -math.pi and r <= math.pi):
fout.write("%15.8g %15.8g %15.8g\n" % (r, e, f))
else: # non-bonded has 7 columns
for line in fin:
if line.startswith('#'): # skip comment lines
continue
columns = line.split()
r = float(columns[0])
g = float(columns[3]) # dispersion
gd= float(columns[4])
h = float(columns[5]) # repulsion
hd= float(columns[6])
e = c6*g + c12*h
f = c6*gd+ c12*hd
# convert units
r = r / sigma
e = e / epsilon
f = f*sigma / epsilon
if r != 0: # skip 0
fout.write("%15.8g %15.8g %15.8g\n" % (r, e, f))
fin.close()
fout.close()
def _args():
parser = argparse.ArgumentParser()
parser.add_argument('in_file')
parser.add_argument('out_file')
return parser
def main():
args = _args().parse_args()
convertTable(args.in_file, args.out_file)
if __name__ == '__main__':
main()
| cgchemlab/chemlab | tools/convert_gromacs2espp.py | Python | gpl-3.0 | 4,036 |
# Teacher Quiz - Python Code - Elizabeth Tweedale
import csv, random
def askName(): # askName function returns the name of the student
print("Welcome to the Super Python Quiz!")
yourName = input("What is your name? ")
print ("Hello",str(yourName))
return yourName
def getQuestions(): # getQuestions reads in the questions from a CSV file
questions = [] # this creates an empty list for adding the questions to
with open("SuperPythonQuiz.csv", mode="r", encoding="utf-8") as myFile:
myQuiz = csv.reader(myFile)
for row in myQuiz:
questions.append(row)
return questions
def askQuestion(question,score): # askQuestion prints the question and choices to the screen then checks the answer
print(question[0]) # print the question - this is in the [0] position of the row
for eachChoice in question[1:-1]: # print each choice from [1] to the last position [-1]
print("{0:>5}{1}".format("", eachChoice))
answer = input("Please select an answer: ") # get the student's answer
if answer == question[-1]: # check if the answer matches the last position in the question, the correct answer
print("Correct!") # if it's correct, tell the user and add one to the score
score += 1
else: # if it's incorrect, tell the user what the correct answer was
print("Incorrect, the correct answer was {0}.".format(question[-1]))
return score # return the score
def recordScore(studentName, score):
with open("QuizResults.txt", mode="a+",encoding="utf-8") as myFile: # note the '+' sign after the a means if the file does not exist, then create it
myFile.write(str(studentName) + "," + str(score) + "\n") # write name,score to the file
# "\n" will add a new line to the file so that it's ready for the next name
def main():
studentName = askName() # call the askName function
questions = getQuestions() # call the getQuestions function
score = 0 # initialise the score to 0
number = len(questions) # use the number to keep track of the total number of questions - which is the length of the 'questions' list
for eachQuestion in range(number): # reppeat for each question
question = random.choice(questions) # choose a random question from the questions list
score = askQuestion(question,score) # ask the question and update the score
questions.remove(question) # remove the current question from the list so that you don't ask it again
print("Your final score is:", score, "out of:", number) # tell the user what their final score is
recordScore(studentName, score) # call the recordScore function
main()
| elizabethtweedale/HowToCode2 | SuperSkill-08-Teacher/Teacher-Quiz.py | Python | gpl-3.0 | 3,594 |
"""
WSGI config for mng_files project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
import sys
path = os.path.abspath(__file__+'/../..')
if path not in sys.path:
sys.path.append(path)
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mng_files.settings")
application = get_wsgi_application()
| idjung96/mng_files | mng_files/wsgi.py | Python | gpl-3.0 | 498 |
#!/usr/bin/env python2.7
# coding=utf-8
# Author: Dustyn Gibson <[email protected]>
# URL: http://github.com/SickRage/SickRage
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
"""
Test sickbeard.helpers
Methods:
fixGlob
indentXML
remove_non_release_groups
isMediaFile
isRarFile
isBeingWritten
remove_file_failed
makeDir
searchIndexerForShowID
listMediaFiles
copyFile
moveFile
link
hardlinkFile
symlink
moveAndSymlinkFile
make_dirs
rename_ep_file
delete_empty_folders
fileBitFilter
chmodAsParent
fixSetGroupID
is_anime_in_show_list
update_anime_support
get_absolute_number_from_season_and_episode
get_all_episodes_from_absolute_number
sanitizeSceneName
arithmeticEval
create_https_certificates
backupVersionedFile
restoreVersionedFile
md5_for_file
get_lan_ip
check_url
anon_url
encrypt
decrypt
full_sanitizeSceneName
_check_against_names
get_show
is_hidden_folder
real_path
validateShow
set_up_anidb_connection
makeZip
extractZip
backupConfigZip
restoreConfigZip
mapIndexersToShow
touchFile
_getTempDir
_setUpSession
getURL
download_file
get_size
generateApiKey
remove_article
generateCookieSecret
verify_freespace
pretty_time_delta
isFileLocked
getDiskSpaceUsage
"""
import os.path
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from sickbeard.helpers import remove_non_release_groups
TEST_RESULT = 'Show.Name.S01E01.HDTV.x264-RLSGROUP'
TEST_CASES = {
'removewords': [
TEST_RESULT,
'Show.Name.S01E01.HDTV.x264-RLSGROUP[cttv]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP.RiPSaLoT',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[GloDLS]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[EtHD]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP-20-40',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[NO-RAR] - [ www.torrentday.com ]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[rarbg]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[Seedbox]',
'{ www.SceneTime.com } - Show.Name.S01E01.HDTV.x264-RLSGROUP',
'].[www.tensiontorrent.com] - Show.Name.S01E01.HDTV.x264-RLSGROUP',
'[ www.TorrentDay.com ] - Show.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[silv4]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[AndroidTwoU]',
'[www.newpct1.com]Show.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP-NZBGEEK',
'.www.Cpasbien.pwShow.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP [1044]',
'[ www.Cpasbien.pw ] Show.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP.[BT]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[vtv]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP.[www.usabit.com]',
'[www.Cpasbien.com] Show.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[ettv]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[rartv]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP-Siklopentan',
'Show.Name.S01E01.HDTV.x264-RLSGROUP-RP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[PublicHD]',
'[www.Cpasbien.pe] Show.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[eztv]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP-[SpastikusTV]',
'].[ www.tensiontorrent.com ] - Show.Name.S01E01.HDTV.x264-RLSGROUP',
'[ www.Cpasbien.com ] Show.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP- { www.SceneTime.com }',
'Show.Name.S01E01.HDTV.x264-RLSGROUP- [ www.torrentday.com ]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP.Renc'
]
}
class HelpersTests(unittest.TestCase):
"""
Test using test generator
"""
def __init__(self, *args, **kwargs):
"""
Initialize test
"""
super(HelpersTests, self).__init__(*args, **kwargs)
def test_generator(test_strings):
"""
Generate tests from test strings
:param test_strings: to generate tests from
:return: test
"""
def _test(self):
"""
Generate tests
:param self:
:return: test to run
"""
for test_string in test_strings:
self.assertEqual(remove_non_release_groups(test_string), TEST_RESULT)
return _test
class HelpersZipTests(unittest.TestCase):
"""
Test zip methods
"""
@unittest.skip('Not yet implemented')
def test_make_zip(self):
"""
Test makeZip
"""
pass
@unittest.skip('Not yet implemented')
def test_extract_zip(self):
"""
Test extractZip
"""
pass
@unittest.skip('Not yet implemented')
def test_backup_config_zip(self):
"""
Test backupConfigZip
"""
pass
@unittest.skip('Not yet implemented')
def test_restore_config_zip(self):
"""
Test restoreConfigZip
"""
pass
@unittest.skip('Not yet implemented')
def test_is_rar_file(self):
"""
Test isRarFile
"""
pass
class HelpersDirectoryTests(unittest.TestCase):
"""
Test directory methods
"""
@unittest.skip('Not yet implemented')
def test_make_dirs(self):
"""
Test make_dirs
"""
pass
@unittest.skip('Not yet implemented')
def test_delete_empty_folders(self):
"""
Test delete_empty_folders
"""
pass
@unittest.skip('Not yet implemented')
def test_make_dir(self):
"""
Test makeDir
"""
pass
@unittest.skip('Not yet implemented')
def test_get_temp_dir(self):
"""
Test _getTempDir
"""
pass
@unittest.skip('Not yet implemented')
def test_is_hidden_folder(self):
"""
Test is_hidden_folder
"""
pass
@unittest.skip('Not yet implemented')
def test_real_path(self):
"""
Test real_path
"""
pass
class HelpersFileTests(unittest.TestCase):
"""
Test file helpers
"""
@unittest.skip('Not yet implemented')
def test_is_media_file(self):
"""
Test isMediaFile
"""
pass
@unittest.skip('Not yet implemented')
def test_is_file_locked(self):
"""
Test isFileLocked
"""
pass
@unittest.skip('Not yet implemented')
def test_is_being_written(self):
"""
Test isBeingWritten
"""
pass
@unittest.skip('Not yet implemented')
def test_remove_file_failed(self):
"""
Test remove_file_failed
"""
pass
@unittest.skip('Not yet implemented')
def test_list_media_files(self):
"""
Test listMediaFiles
"""
pass
@unittest.skip('Not yet implemented')
def test_copy_file(self):
"""
Test copyFile
"""
pass
@unittest.skip('Not yet implemented')
def test_move_file(self):
"""
Test moveFile
"""
pass
@unittest.skip('Not yet implemented')
def test_rename_ep_file(self):
"""
Test rename_ep_file
"""
pass
@unittest.skip('Not yet implemented')
def test_file_bit_filter(self):
"""
Test fileBitFilter
"""
pass
@unittest.skip('Not yet implemented')
def test_chmod_as_parent(self):
"""
Test chmodAsParent
"""
pass
@unittest.skip('Not yet implemented')
def test_backup_versioned_file(self):
"""
Test backupVersionedFile
"""
pass
@unittest.skip('Not yet implemented')
def test_restore_versioned_file(self):
"""
Test restoreVersionedFile
"""
pass
@unittest.skip('Not yet implemented')
def test_verify_free_space(self):
"""
Test verify_freespace
"""
pass
@unittest.skip('Not yet implemented')
def test_get_disk_space_usage(self):
"""
Test getDiskSpaceUsage
"""
pass
@unittest.skip('Not yet implemented')
def test_download_file(self):
"""
Test download_file
"""
pass
@unittest.skip('Not yet implemented')
def test_get_size(self):
"""
Test get_size
"""
pass
@unittest.skip('Not yet implemented')
def test_md5_for_file(self):
"""
Test md5_for_file
"""
pass
@unittest.skip('Not yet implemented')
def test_touch_file(self):
"""
Test touchFile
"""
pass
class HelpersFileLinksTests(unittest.TestCase):
"""
Test sym and hard links
"""
@unittest.skip('Not yet implemented')
def test_link(self):
"""
Test link
"""
pass
@unittest.skip('Not yet implemented')
def test_hardlink_file(self):
"""
Test hardlinkFile
"""
pass
@unittest.skip('Not yet implemented')
def test_symlink(self):
"""
Test symlink
"""
pass
@unittest.skip('Not yet implemented')
def test_move_and_symlink_file(self):
"""
Test moveAndSymlinkFile
"""
pass
class HelpersEncryptionTests(unittest.TestCase):
"""
Test encryption and decryption
"""
@unittest.skip('Not yet implemented')
def test_create_https_certificates(self):
"""
Test create_https_certificates
"""
pass
@unittest.skip('Not yet implemented')
def test_encrypt(self):
"""
Test encrypt
"""
pass
@unittest.skip('Not yet implemented')
def test_decrypt(self):
"""
Test decrypt
"""
pass
@unittest.skip('Not yet implemented')
def test_generate_cookie_secret(self):
"""
Test generateCookieSecret
"""
pass
class HelpersShowTests(unittest.TestCase):
"""
Test show methods
"""
@unittest.skip('Not yet implemented')
def test_search_indexer_for_show_id(self):
"""
Test searchIndexerForShowID
"""
pass
@unittest.skip('Not yet implemented')
def test_is_anime_in_show_list(self):
"""
Test is_anime_in_show_list
"""
pass
@unittest.skip('Not yet implemented')
def test_check_against_names(self):
"""
Test _check_against_names
"""
pass
@unittest.skip('Not yet implemented')
def test_get_show(self):
"""
Test get_show
"""
pass
@unittest.skip('Not yet implemented')
def test_validate_show(self):
"""
Test validateShow
"""
pass
@unittest.skip('Not yet implemented')
def test_map_indexers_to_show(self):
"""
Test mapIndexersToShow
"""
pass
@unittest.skip('Not yet implemented')
def test_get_abs_no_from_s_and_e(self):
"""
Test get_absolute_number_from_season_and_episode
"""
pass
@unittest.skip('Not yet implemented')
def test_get_all_eps_from_abs_no(self):
"""
Test get_all_episodes_from_absolute_number
"""
pass
class HelpersConnectionTests(unittest.TestCase):
"""
Test connections
"""
@unittest.skip('Not yet implemented')
def test_get_lan_ip(self):
"""
Test get_lan_ip
"""
pass
@unittest.skip('Not yet implemented')
def test_check_url(self):
"""
Test check_url
"""
pass
@unittest.skip('Not yet implemented')
def test_anon_url(self):
"""
Test anon_url
"""
pass
@unittest.skip('Not yet implemented')
def test_set_up_anidb_connection(self):
"""
Test set_up_anidb_connection
"""
pass
@unittest.skip('Not yet implemented')
def test_set_up_session(self):
"""
Test _setUpSession
"""
pass
@unittest.skip('Not yet implemented')
def test_get_url(self):
"""
Test getURL
"""
pass
@unittest.skip('Not yet implemented')
def test_generate_api_key(self):
"""
Test generateApiKey
"""
pass
class HelpersMiscTests(unittest.TestCase):
"""
Test misc helper methods
"""
@unittest.skip('Not yet implemented')
def test_fix_glob(self):
"""
Test fixGlob
"""
pass
@unittest.skip('Not yet implemented')
def test_indent_xml(self):
"""
Test indentXML
"""
pass
@unittest.skip('Not yet implemented')
def test_remove_non_release_groups(self):
"""
Test remove_non_release_groups
"""
pass
@unittest.skip('Not yet implemented')
def test_fix_set_group_id(self):
"""
Test fixSetGroupID
"""
pass
@unittest.skip('Not yet implemented')
def test_update_anime_support(self):
"""
Test update_anime_support
"""
pass
@unittest.skip('Not yet implemented')
def test_sanitize_scene_name(self):
"""
Test sanitizeSceneName
"""
pass
@unittest.skip('Not yet implemented')
def test_arithmetic_eval(self):
"""
Test arithmeticEval
"""
pass
@unittest.skip('Not yet implemented')
def test_full_sanitize_scene_name(self):
"""
Test full_sanitizeSceneName
"""
pass
@unittest.skip('Not yet implemented')
def test_remove_article(self):
"""
Test remove_article
"""
pass
@unittest.skip('Not yet implemented')
def test_pretty_time_delta(self):
"""
Test pretty_time_delta
"""
pass
if __name__ == '__main__':
print "=================="
print "STARTING - Helpers TESTS"
print "=================="
print "######################################################################"
for name, test_data in TEST_CASES.items():
test_name = 'test_%s' % name
test = test_generator(test_data)
setattr(HelpersTests, test_name, test)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersConnectionTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersDirectoryTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersEncryptionTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersFileLinksTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersFileTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersMiscTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersShowTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersZipTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| hernandito/SickRage | tests/helpers_tests.py | Python | gpl-3.0 | 16,369 |
'''
Decomposition
-------------
The core of sector decomposition. This module implements
the actual decomposition routines.
Common
~~~~~~
This module collects routines that are used by
multiple decompition modules.
.. autoclass:: pySecDec.decomposition.Sector
.. autofunction:: pySecDec.decomposition.squash_symmetry_redundant_sectors_sort
.. autofunction:: pySecDec.decomposition.squash_symmetry_redundant_sectors_dreadnaut
Iterative
~~~~~~~~~
.. automodule:: pySecDec.decomposition.iterative
:members:
Geometric
~~~~~~~~~
.. automodule:: pySecDec.decomposition.geometric
:members:
Splitting
~~~~~~~~~
.. automodule:: pySecDec.decomposition.splitting
:members:
'''
from . import iterative, geometric, splitting
from .common import *
| mppmu/secdec | pySecDec/decomposition/__init__.py | Python | gpl-3.0 | 758 |
# -*- encoding: utf-8 -*-
from robottelo.constants import FILTER, FOREMAN_PROVIDERS
from nailgun import entities
from robottelo.ui.base import Base, UINoSuchElementError, UIError
from robottelo.ui.locators import common_locators, locators, tab_locators
from robottelo.ui.navigator import Navigator
class ResourceProfileFormBase(object):
"""Base class for compute resources profiles forms"""
_page = None
# some fields are like two panel and to select from the left one to the
# right as users groups and roles
# please see how implemented in ResourceProfileFormEC2 for security_groups
selector_fields = []
# some fields can be part of sections that can be added
# like storage and networks, please check how implemented in
# ResourceProfileFormRHEV (implement network_interfaces and storage)
group_fields_locators = {}
fetch_values_locators = {}
def __init__(self, page):
"""Initiate compute resource profile form
:type page: ComputeProfile
:param page: The compute profile object ComputeProfile or
ComputeResource
"""
self._page = page
@property
def page(self):
"""Return the current page ComputeResource or ComputeProfile"""
return self._page
def _clean_value(self, name, value):
"""Check some values and correct them accordingly"""
if name in self.selector_fields:
if not isinstance(value, (list, tuple)):
value = [value]
return value
def _assign_locator_value(self, target, value):
"""Assign provided value to page element depending on the type of that
element
"""
target_type = self.page.element_type(target)
if (target_type == 'span' or
target_type == 'select') and ' (' in value:
# do all the necessary workaround
self.page.click(target)
# Typing entity value without parenthesis part
self.page.assign_value(
common_locators['select_list_search_box'], value.split(' (')
[0])
# selecting Value by its full name (with parenthesis
# part)
self.page.click(
common_locators['entity_select_list_vmware'] % value.split
(' (')[0])
pass
else:
self.page.assign_value(target, value)
def set_value(self, name, value):
"""Set the value of the corresponding field in UI"""
locator_attr = '{0}_locator'.format(name)
locator = getattr(self, locator_attr, None)
if locator is None and name not in self.group_fields_locators:
raise UIError('Field name: {0} not supported'.format(name))
value = self._clean_value(name, value)
if name in self.selector_fields:
self.page.configure_entity(value, locator)
elif name in self.group_fields_locators:
field_index = 0
group_fields_locators = self.group_fields_locators[name]
add_node_locator = group_fields_locators['_add_node']
for group_field in value:
if group_field is not None:
for field_key, field_value in group_field.items():
field_locator = group_fields_locators.get(field_key)
available_fields = self.page.find_elements(
field_locator)
if len(available_fields) - 1 < field_index:
self.page.click(add_node_locator)
available_fields = self.page.find_elements(
field_locator)
self._assign_locator_value(
available_fields[field_index], field_value)
field_index += 1
else:
self._assign_locator_value(locator, value)
def set_values(self, **kwargs):
"""Set the values of the corresponding fields in UI"""
for key, value in kwargs.items():
self.set_value(key, value)
def get_values(self, params_names):
"""Get the values of the corresponding fields in UI"""
return_dict = {}
for param_name in params_names:
locator_attr = 'fetch_{0}_locator'.format(param_name)
if locator_attr not in self.fetch_values_locators:
raise UIError(
'Field name: {0} not supported'.format(param_name))
field_locator = self.fetch_values_locators[locator_attr]
return_dict[param_name] = self.page.get_element_value(
field_locator)
return return_dict
def submit(self):
"""Press the submit form button"""
self.page.click(common_locators['submit'])
class ResourceProfileFormEC2(ResourceProfileFormBase):
"""Implement EC2 compute resource profile form"""
flavor_locator = locators["resource.compute_profile.ec2_flavor"]
image_locator = locators["resource.compute_profile.ec2_image"]
subnet_locator = locators["resource.compute_profile.ec2_subnet"]
managed_ip_locator = locators["resource.compute_profile.ec2_managed_ip"]
availability_zone_locator = locators[
"resource.compute_profile.ec2_availability_zone"]
security_groups_locator = FILTER['ec2_security_groups']
selector_fields = ['security_groups']
def _clean_value(self, name, value):
"""Check some values and correct them accordingly"""
value = ResourceProfileFormBase._clean_value(self, name, value)
if not value:
if name == 'availability_zone':
value = 'No preference'
elif name == 'subnet':
value = 'EC2'
elif name == 'security_groups':
value = []
return value
class ResourceProfileFormRHEV(ResourceProfileFormBase):
"""Implement RHEV compute resource profile form"""
cluster_locator = locators["resource.compute_profile.rhev_cluster"]
template_locator = locators["resource.compute_profile.rhev_template"]
cores_locator = locators["resource.compute_profile.rhev_cores"]
memory_locator = locators["resource.compute_profile.rhev_memory"]
group_fields_locators = dict(
network_interfaces=dict(
_add_node=locators[
"resource.compute_profile.interface_add_node"],
name=locators["resource.compute_profile.rhev_interface_name"],
network=locators["resource.compute_profile.rhev_interface_network"]
),
storage=dict(
_add_node=locators[
"resource.compute_profile.storage_add_node"],
size=locators["resource.compute_profile.rhev_storage_size"],
storage_domain=locators[
"resource.compute_profile.rhev_storage_domain"],
preallocate_disk=locators[
"resource.compute_profile.rhev_storage_preallocate"],
bootable=locators["resource.compute_profile.rhev_storage_bootable"]
),
)
fetch_values_locators = dict(
fetch_cluster_locator=locators[
"resource.compute_profile.fetch_rhev_cluster"],
fetch_cores_locator=locators["resource.compute_profile.rhev_cores"],
fetch_memory_locator=locators["resource.compute_profile.rhev_memory"],
fetch_size_locator=locators[
"resource.compute_profile.rhev_storage_size"],
fetch_storage_domain_locator=locators[
"resource.compute_profile.fetch_rhev_storage_domain"],
fetch_bootable_locator=locators[
"resource.compute_profile.rhev_storage_bootable"],
fetch_preallocate_disk_locator=locators[
"resource.compute_profile.rhev_storage_preallocate"],
)
def set_values(self, **kwargs):
"""Set the values of the corresponding fields in UI"""
# if template is the fields to set, it set in priority as, when
# selecting a template, configuration data is loaded in UI
template_key = 'template'
template = kwargs.get(template_key)
if template is not None:
self.set_value(template_key, template)
del kwargs[template_key]
# when setting memory value it does not fire the change event,
# that do the necessary validation and update the memory hidden field,
# without this event fired the memory value cannot be saved,
memory_key = 'memory'
memory = kwargs.get(memory_key)
if memory is not None:
memory_input = self.page.wait_until_element(self.memory_locator)
self._assign_locator_value(memory_input, memory)
# explicitly fire change event, as seems not fired by send keys
self.page.browser.execute_script(
"arguments[0].dispatchEvent(new Event('change'));",
memory_input,
)
del kwargs[memory_key]
ResourceProfileFormBase.set_values(self, **kwargs)
class ResourceProfileFormVMware(ResourceProfileFormBase):
"""Implement VMware compute resource profile form"""
cpus_locator = locators["resource.compute_profile.vmware_cpus"]
corespersocket_locator = locators[
"resource.compute_profile.vmware_corespersocket"]
memory_locator = locators["resource.compute_profile.vmware_memory"]
cluster_locator = locators["resource.compute_profile.vmware_cluster"]
folder_locator = locators["resource.compute_profile.vmware_folder"]
guest_os_locator = locators["resource.compute_profile.vmware_guest_os"]
scsicontroller_locator = locators[
"resource.compute_profile.vmware_scsicontroller"]
virtualhw_version_locator = locators[
"resource.compute_profile.vmware_virtualhw_version"]
memory_hotadd_locator = locators[
"resource.compute_profile.vmware_memory_hotadd"]
cpu_hotadd_locator = locators[
"resource.compute_profile.vmware_cpu_hotadd"]
cdrom_drive_locator = locators[
"resource.compute_profile.vmware_cdrom_drive"]
annotation_notes_locator = locators[
"resource.compute_profile.vmware_annotation_notes"]
image_locator = locators["resource.compute_profile.rhev_image"]
pool_locator = locators[
"resource.compute_profile.vmware_resource_pool"]
group_fields_locators = dict(
network_interfaces=dict(
_add_node=locators[
"resource.compute_profile.interface_add_node"],
name=locators["resource.compute_profile.vmware_interface_name"],
network=locators[
"resource.compute_profile.vmware_interface_network"]
),
storage=dict(
_add_node=locators[
"resource.compute_profile.storage_add_node"],
datastore=locators[
"resource.compute_profile.vmware_storage_datastore"],
size=locators["resource.compute_profile.vmware_storage_size"],
thin_provision=locators[
"resource.compute_profile.vmware_storage_thin_provision"],
eager_zero=locators[
"resource.compute_profile.vmware_storage_eager_zero"],
disk_mode=locators["resource.compute_profile.vmware_disk_mode"]
),
)
_compute_resource_profiles = {
FOREMAN_PROVIDERS['ec2']: ResourceProfileFormEC2,
FOREMAN_PROVIDERS['rhev']: ResourceProfileFormRHEV,
FOREMAN_PROVIDERS['vmware']: ResourceProfileFormVMware,
}
def get_compute_resource_profile(page, res_type=None):
"""Return the corresponding instance compute resource profile form object
"""
resource_profile_class = _compute_resource_profiles.get(res_type)
if not resource_profile_class:
raise UIError(
'Resource profile for resource type: {0}'
' not supported'.format(res_type)
)
return resource_profile_class(page)
class ComputeResource(Base):
"""Provides the CRUD functionality for Compute Resources."""
def navigate_to_entity(self):
"""Navigate to Compute Resource entity page"""
Navigator(self.browser).go_to_compute_resources()
def _search_locator(self):
"""Specify locator for Compute Resource entity search procedure"""
return locators['resource.select_name']
def _configure_resource_provider(
self, provider_type=None, parameter_list=None):
"""Provide configuration capabilities for compute resource provider.
All values should be passed in absolute correspondence to UI. For
example, we need to input some data to 'URL' field, select checkbox
'Console Passwords' and choose 'SPICE' value from select list, so next
parameter list should be passed::
[
['URL', libvirt_url, 'field'],
['Display Type', 'SPICE', 'select'],
['Console passwords', False, 'checkbox']
]
We have cases when it is necessary to push a button to populate values
for select list. For such scenarios we have 'special select' parameter
type. For example, for 'RHEV' provider, we need to click 'Load
Datacenters' button to get values for 'Datacenter' list::
[
['Description', 'My_Test', 'field'],
['URL', libvirt_url, 'field'],
['Username', 'admin', 'field'],
['Password', 'test', 'field'],
['X509 Certification Authorities', 'test', 'field'],
['Datacenter', 'test', 'special select'],
]
"""
if provider_type:
self.select(locators['resource.provider_type'], provider_type)
if parameter_list is None:
return
for parameter_name, parameter_value, parameter_type in parameter_list:
if parameter_name.find('/') >= 0:
_, parameter_name = parameter_name.split('/')
param_locator = '.'.join((
'resource',
(parameter_name.lower()).replace(' ', '_')
))
if parameter_type != 'special select':
self.assign_value(
locators[param_locator], parameter_value)
else:
button_locator = '.'.join((
'resource',
(parameter_name.lower()).replace(' ', '_'),
'button'
))
self.click(locators[button_locator])
self.assign_value(locators[param_locator], parameter_value)
def _configure_orgs(self, orgs, org_select):
"""Provides configuration capabilities for compute resource
organization. The following format should be used::
orgs=['Aoes6V', 'JIFNPC'], org_select=True
"""
self.configure_entity(
orgs,
FILTER['cr_org'],
tab_locator=tab_locators['tab_org'],
entity_select=org_select
)
def _configure_locations(self, locations, loc_select):
"""Provides configuration capabilities for compute resource location
The following format should be used::
locations=['Default Location'], loc_select=True
"""
self.configure_entity(
locations,
FILTER['cr_loc'],
tab_locator=tab_locators['tab_loc'],
entity_select=loc_select
)
def create(self, name, provider_type, parameter_list,
orgs=None, org_select=None, locations=None, loc_select=None):
"""Creates a compute resource."""
self.click(locators['resource.new'])
self.assign_value(locators['resource.name'], name)
self._configure_resource_provider(provider_type, parameter_list)
if locations:
self._configure_locations(locations, loc_select)
if orgs:
self._configure_orgs(orgs, org_select)
self.click(common_locators['submit'])
def update(self, name, newname=None, parameter_list=None,
orgs=None, org_select=None, locations=None, loc_select=None):
"""Updates compute resource entity."""
element = self.search(name)
if element is None:
raise UINoSuchElementError(
'Could not find the resource {0}'.format(name))
self.click(locators['resource.edit'] % name)
self.wait_until_element(locators['resource.name'])
if newname:
self.assign_value(locators['resource.name'], newname)
self._configure_resource_provider(parameter_list=parameter_list)
if locations:
self._configure_locations(locations, loc_select)
if orgs:
self._configure_orgs(orgs, org_select)
self.click(common_locators['submit'])
def search_container(self, cr_name, container_name):
"""Searches for specific container located in compute resource under
'Containers' tab
"""
self.search_and_click(cr_name)
self.click(tab_locators['resource.tab_containers'])
self.assign_value(
locators['resource.filter_containers'], container_name)
return self.wait_until_element(
locators['resource.select_container'] % container_name)
def list_vms(self, res_name):
"""Lists vms of a particular compute resource.
Note: Currently lists only vms that show up on the first page.
"""
self.search_and_click(res_name)
self.click(tab_locators['resource.tab_virtual_machines'])
vm_elements = self.find_elements(locators['resource.vm_list'])
return [vm.text for vm in vm_elements]
def add_image(self, res_name, parameter_list):
"""Adds an image to a compute resource."""
self.search_and_click(res_name)
self.click(locators['resource.image_add'])
self.wait_until_element(locators['resource.image_name'])
for parameter_name, parameter_value in parameter_list:
param_locator = '_'.join((
'resource.image',
(parameter_name.lower())
))
self.assign_value(locators[param_locator], parameter_value)
self.click(locators['resource.image_submit'])
def list_images(self, res_name):
"""Lists images on Compute Resource.
Note: Currently lists only images that show up on the first page.
"""
self.search_and_click(res_name)
self.click(tab_locators['resource.tab_images'])
image_elements = self.find_elements(locators['resource.image_list'])
return [image.text for image in image_elements]
def vm_action_toggle(self, res_name, vm_name, really):
"""Toggle power status of a vm on the compute resource."""
self.search_and_click(res_name)
self.click(tab_locators['resource.tab_virtual_machines'])
button = self.find_element(
locators['resource.vm_power_button'] % vm_name
)
self.click(button)
if "Off" in button.text:
self.handle_alert(really)
def vm_delete(self, res_name, vm_name, really):
"""Removes a vm from the compute resource."""
self.search_and_click(res_name)
self.click(tab_locators['resource.tab_virtual_machines'])
for locator in [locators['resource.vm_delete_button_dropdown'],
locators['resource.vm_delete_button']]:
self.click(locator % vm_name)
self.handle_alert(really)
def search_vm(self, resource_name, vm_name):
"""Searches for existing Virtual machine from particular compute resource. It
is necessary to use custom search here as we need to select compute
resource tab before searching for particular Virtual machine and also,
there is no search button to click
"""
self.search_and_click(resource_name)
self.click(tab_locators['resource.tab_virtual_machines'])
self.assign_value(
locators['resource.search_filter'], vm_name)
strategy, value = self._search_locator()
return self.wait_until_element((strategy, value % vm_name))
def power_on_status(self, resource_name, vm_name):
"""Return the compute resource virtual machine power status
:param resource_name: The compute resource name
:param vm_name: the virtual machine name
:return: on or off
"""
element = self.search_vm(resource_name, vm_name)
if element is None:
raise UIError(
'Could not find Virtual machine "{0}"'.format(vm_name))
return self.wait_until_element(
locators['resource.power_status']).text.lower()
def set_power_status(self, resource_name, vm_name, power_on=None):
"""Perform power on or power off for VM's
:param bool power_on: True - for On, False - for Off
"""
status = None
locator_status = locators['resource.power_status']
element = self.search_vm(resource_name, vm_name)
if element is None:
raise UIError(
'Could not find Virtual machine "{0}"'.format(vm_name))
button = self.find_element(
locators['resource.vm_power_button']
)
if power_on is True:
if 'On' not in button.text:
raise UIError(
'Could not start VM {0}. VM is running'.format(vm_name)
)
self.click(button)
self.search_vm(resource_name, vm_name)
status = self.wait_until_element(locator_status).text
elif power_on is False:
if 'Off' not in button.text:
raise UIError(
'Could not stop VM {0}. VM is not running'.format(vm_name)
)
self.click(button, wait_for_ajax=False)
self.handle_alert(True)
self.search_vm(resource_name, vm_name)
status = self.wait_until_element(locator_status).text
return status
def select_profile(self, resource_name, profile_name):
"""Select the compute profile of a specific compute resource
:param resource_name: Name of compute resource to select from the list
:param profile_name: Name of profile that contains required compute
resource (e.g. '2-Medium' or '1-Small')
:return: resource type and the resource profile form element
:returns: tuple
"""
resource_element = self.search(resource_name)
resource_type = self.wait_until_element(
locators['resource.resource_type'] % resource_name).text
self.click(resource_element)
self.click(tab_locators['resource.tab_compute_profiles'])
self.click(locators["resource.compute_profile"] % profile_name)
return (resource_type,
self.wait_until_element(locators['profile.resource_form']))
def get_profile_values(self, resource_name, profile_name, params_name):
"""Fetch provided compute profile parameters values
:param resource_name: Name of compute resource to select from the list
:param profile_name: Name of profile that contains required compute
resource (e.g. '2-Medium' or '1-Small')
:param params_name: the compute resource profile configuration
properties fields to get
:return: Dictionary of parameters names and their corresponding values
"""
resource_type, _ = self.select_profile(resource_name, profile_name)
resource_profile_form = get_compute_resource_profile(
self, resource_type)
return resource_profile_form.get_values(params_name)
def set_profile_values(self, resource_name, profile_name, **kwargs):
"""Fill and Submit the compute resource profile form configuration
properties
:param resource_name: Name of compute resource to select from the list
:param profile_name: Name of profile that contains required compute
resource (e.g. '2-Medium' or '1-Small')
:param kwargs: the compute resource profile configuration properties
fields to be set
"""
resource_type, _ = self.select_profile(resource_name, profile_name)
resource_profile_form = get_compute_resource_profile(
self, resource_type)
resource_profile_form.set_values(**kwargs)
resource_profile_form.submit()
def check_image_os(self, os_name):
"""Check if the OS is present, if not create the required OS
:param os_name: OS name to check, and create
:return: Created os
"""
# Check if OS that image needs is present or no, If not create the OS
result = entities.OperatingSystem().search(query={
u'search': u'title="{0}"'.format(os_name)
})
if result:
os = result[0]
else:
os = entities.OperatingSystem(
name=os_name.split(' ')[0],
major=os_name.split(' ')[1].split('.')[0],
minor=os_name.split(' ')[1].split('.')[1],
).create()
return os
| sghai/robottelo | robottelo/ui/computeresource.py | Python | gpl-3.0 | 25,187 |
# Copyright (C) 2001 Steve Howell
# You must read the file called INFO.txt before distributing this code.
# ---
# Worlds for Karel are defined with simple text files that we parse out
# in this module. See the worlds folder for examples.
from world import NORTH,SOUTH,EAST,WEST
from utils import trace_error
import re,gettext
# Use this to be able to extract strings for translation by pygettext.py
try:
#print "current _", _
old_ = _
except Exception,info:
print >> sys.stderr, "in gvrparser locale switch:\n",info
_ = gettext.gettext
KEYWORDS = (
_('ROBOT'),
_('WALL'),
_('BEEPERS'),
_('SIZE'))
DIRECTIONS = (NORTH,SOUTH,EAST,WEST)
####################### Start I18N part #####################################
# Now we install a gettext file in the builtin namespace
# If this fails the bogus '_()' function is used and we end up with a english - english
# look-up table :-(
# A possible solution would be to test for locales but i think it won't matter much in speed.
_ = old_
#print _
# get a list with the translated strings
trans_commands, org_commands = [],[]
words = KEYWORDS
for i in words:
trans_commands.append(_(i))
org_commands.append(i) # this is the english one
# With this we build a look-up dictionary that is used in the Program class.
# The look-up dict: {'beweeg':'move','rechtsaf':turnright',....}
# the keys are the gettext strings and the vals are the original names.
lookup_dict = {}
for k,v in map(None,trans_commands,org_commands):
lookup_dict[k] = v
lookup_dir_dict = {_('N'):'N',_('S'):'S',_('E'):'E',_('W'):'W'}#
class WorldMapException(Exception):
def __init__(self, line, str):
self.line = line
self.str = str
def __str__(self): return self.str
def checkDirection(line, dir):
if dir not in lookup_dir_dict.values():
raise WorldMapException(line,
_("In line %d:\n%s is not a valid direction -- use N, S, E, or W")
% (line, dir))
def removeComment(line):
foundComment = False
for i in range(len(line)):
if line[i] == "#":
foundComment = True
break
if foundComment:
return line[:i]
else:
return line
def readWorld(lines, world):
definedRobot = 0
useGuido = False
linenumber = 0
worldSize = None
#print "worldMap lines",lines
for line in lines:
linenumber += 1
try:
if re.search("\S", line) and not re.match("\s*#", line):
line = removeComment(line)
tokens = line.split()
tokens = [x.upper() for x in tokens]
keyword = tokens[0]
if lookup_dict.has_key(keyword):
keyword = lookup_dict[keyword]
dir = tokens[3]
if lookup_dir_dict.has_key(dir):
dir = lookup_dir_dict[dir]
tokens[3] = dir
if keyword ==_('WALL') or keyword == 'WALL':
tokens[0] = keyword
#print "wall",tokens
checkDirection(linenumber, dir)
#print "tokens",tokens
world.setWall(*tokens[1:])
elif keyword == _('ROBOT') or keyword == 'ROBOT':
if definedRobot:
raise WorldMapException(linenumber, _('You may only have one robot definition.'))
definedRobot = 1
tokens = [x.upper() for x in tokens]
if len(tokens) == 5:
x, y, dir, numBeepers = tokens[1:]
else:
x, y, dir = tokens[1:]
numBeepers = 0
robotX, robotY = int(x), int(y)
world.positionRobot(robotX, robotY, dir)
if numBeepers == "unlimited":
world.unlimitedBeepers = True
numBeepers = 0
world.setRobotBeepers(int(numBeepers))
elif keyword == _('BEEPERS') or keyword == 'BEEPERS':
x, y, numBeepers = tokens[1:]
world.setBeepers(int(x), int(y), int(numBeepers))
elif keyword == 'BDFL':
useGuido = True
elif keyword == _('SIZE') or keyword == 'SIZE':
if worldSize:
raise WorldMapException(linenumber,
_('You may only have one size statement'))
try:
avenues, streets = [int(coord) for coord in tokens[1:]]
except ValueError:
raise WorldMapException(linenumber,
_('Size statement should have 2 integers'))
if avenues < 7 or streets < 7:
raise WorldMapException(linenumber,
_('Size coordinates must be at least 7'))
worldSize = (avenues, streets)
else:
raise WorldMapException(linenumber,_("Cannot understand: %s") % line)
except Exception,info:
info = _("Error in line %s:\n%s\nCheck your world file for syntax errors") % (linenumber,line)
raise WorldMapException(linenumber, info)
if not definedRobot:
raise WorldMapException(linenumber, _("The world map seems to be missing information."))
world.useGuido = useGuido
return worldSize
| cristian99garcia/guido-van-robot-activity | worldMap.py | Python | gpl-3.0 | 5,535 |
from aquarius.objects.Book import Book
class GetBookByTitleAndAuthor(object):
def __init__(self, connection):
self.__connection = connection
def execute(self, book):
b = Book()
sql = "SELECT Id, Title, Author FROM Book WHERE Title=? AND Author=?"
r = list(self.__connection.execute_sql_fetch_all_with_params(sql, (book.title, book.author)))
if len(r) > 0:
self.map_resultset_to_book(b, r)
return b
def map_resultset_to_book(self, book, resultset):
book.id = resultset[0][0]
book.title = resultset[0][1]
book.author = resultset[0][2]
| jeroanan/Aquarius | aquarius/persistence/sqlitepersistence/GetBookByTitleAndAuthor.py | Python | gpl-3.0 | 634 |
from django.contrib import admin
from comments.models import Comment
# Register your models here.
admin.site.register(Comment)
| awwong1/CMPUT410-Project | dogenode/comments/admin.py | Python | gpl-3.0 | 130 |
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 08 13:25:40 2015
@author: J. Alejandro Cardona
"""
from Board import *
import pygame
UP, LEFT, DOWN, RIGHT = 1, 2, 3, 4
juego = Board()
_2 = pygame.image.load("2.jpg"); _2re = _2.get_rect()
_4 = pygame.image.load("4.jpg"); _4re = _4.get_rect()
_8 = pygame.image.load("8.jpg"); _8re = _8.get_rect()
_16 = pygame.image.load("16.jpg"); _16re = _16.get_rect()
_32 = pygame.image.load("32.jpg"); _32re = _32.get_rect()
_64 = pygame.image.load("64.jpg"); _64re = _64.get_rect()
_128 = pygame.image.load("128.jpg"); _128re = _128.get_rect()
_256 = pygame.image.load("256.jpg"); _256re = _256.get_rect()
_512 = pygame.image.load("512.jpg"); _512re = _512.get_rect()
_1024 = pygame.image.load("1024.jpg"); _1024re = _1024.get_rect()
_2048 = pygame.image.load("2048.jpg"); _2048re = _2048.get_rect()
figs = {2:(_2, _2re), 4:(_4,_4re), 8:(_8,_8re), 16:(_16,_16re),
32:(_32,_32re), 64:(_64,_64re), 128:(_128,_128re), 256:(_256,_256re),
512:(_512,_512re), 1024:(_1024,_1024re), 2048:(_2048,_2048re)}
def read_key(key):
# Este metodo se usa solo para jugar en modo consola
if key == 'w':
juego.move(UP)
elif key == 's':
juego.move(DOWN)
elif key == 'a':
juego.move(LEFT)
elif key == 'd':
juego.move(RIGHT) | Alecardv/College-projects | 2048/Control.py | Python | gpl-3.0 | 1,336 |
from __future__ import division
import numpy as np
import argparse
import matplotlib.pyplot as plt
from matplotlib.colors import colorConverter
from mpl_toolkits.axes_grid1 import ImageGrid
import matplotlib.cm as cm
from amitgroup.stats import bernoullimm
def main(args):
means = np.load('%s_means.npy' % args.save_path)
S_clusters = np.load('%s_S_clusters.npy' % args.save_path)
means = means.reshape( *( S_clusters.shape + ( int(means.size/S_clusters.size),)))
if True:
plt.close('all')
fig = plt.figure(1, (6, 6))
grid = ImageGrid(fig, 111, # similar to subplot(111)
nrows_ncols = (means.shape[0],means.shape[-1]+1 ), # creates 2x2 grid of axes
axes_pad=0.001, # pad between axes in inch.
)
ncols = means.shape[-1] + 1
for i in xrange(S_clusters.shape[0]):
try:
grid[i*ncols].imshow(S_clusters[i],cmap=cm.binary,interpolation='nearest')
grid[i*ncols].spines['bottom'].set_color('red')
grid[i*ncols].spines['top'].set_color('red')
grid[i*ncols].spines['left'].set_color('red')
grid[i*ncols].spines['right'].set_color('red')
for a in grid[i*ncols].axis.values():
a.toggle(all=False)
except:
import pdb; pdb.set_trace()
for j in xrange(ncols-1):
try:
grid[i*ncols+j+1].imshow(means[i,:,:,j],cmap=cm.binary,interpolation='nearest')
grid[i*ncols+j+1].spines['bottom'].set_color('red')
grid[i*ncols+j+1].spines['top'].set_color('red')
grid[i*ncols+j+1].spines['left'].set_color('red')
grid[i*ncols+j+1].spines['right'].set_color('red')
for a in grid[i*ncols+j+1].axis.values():
a.toggle(all=False)
except:
import pdb; pdb.set_trace()
plt.savefig('%s' % args.display_bernoulli_parts
,bbox_inches='tight')
if __name__ == "__main__":
parser = argparse.ArgumentParser("""Clustering and output for bernoulli data with paired spectrum data""")
parser.add_argument('--save_path',type=str,default='',help='path to save the trained models to')
parser.add_argument('--patches',type=str,default='',help='path to the patches')
parser.add_argument('--spec_patches',type=str,default='',help='path to the spectrogram patches')
parser.add_argument('--n_components',type=int,default=20,help='number of parts')
parser.add_argument('-v',action='store_true',help='whether it is verbose')
parser.add_argument('--display_bernoulli_parts',type=str,default=None,help='path to save the learned parts to')
main(parser.parse_args())
| markstoehr/spectral_features | local/display_bernoulli_model.py | Python | gpl-3.0 | 2,936 |
import os
from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
register = template.Library()
@register.simple_tag()
def custom_css():
theme_path = os.path.join(
settings.MEDIA_ROOT,
"overrides.css"
)
if os.path.exists(theme_path):
return mark_safe(
'<link rel="stylesheet" type="text/css" href="{}" />'.format(
os.path.join(settings.MEDIA_URL, "overrides.css")
)
)
return ""
@register.simple_tag()
def custom_js():
theme_path = os.path.join(
settings.MEDIA_ROOT,
"overrides.js"
)
if os.path.exists(theme_path):
return mark_safe(
'<script src="{}"></script>'.format(
os.path.join(settings.MEDIA_URL, "overrides.js")
)
)
return ""
| danielquinn/paperless | src/documents/templatetags/customisation.py | Python | gpl-3.0 | 865 |
"""
Given a 2D grid, each cell is either a wall 'W',
an enemy 'E' or empty '0' (the number zero),
return the maximum enemies you can kill using one bomb.
The bomb kills all the enemies in the same row and column from
the planted point until it hits the wall since the wall is too strong
to be destroyed.
Note that you can only put the bomb at an empty cell.
Example:
For the given grid
0 E 0 0
E 0 W E
0 E 0 0
return 3. (Placing a bomb at (1,1) kills 3 enemies)
"""
def max_killed_enemies(grid):
if not grid: return 0
m, n = len(grid), len(grid[0])
max_killed = 0
row_e, col_e = 0, [0] * n
# iterates over all cells in the grid
for i in range(m):
for j in range(n):
# makes sure we are next to a wall.
if j == 0 or grid[i][j - 1] == 'W':
row_e = row_kills(grid, i, j)
# makes sure we are next to a wall.
if i == 0 or grid[i - 1][j] == 'W':
col_e[j] = col_kills(grid, i, j)
# makes sure the cell contains a 0
if grid[i][j] == '0':
# updates the variable
max_killed = max(max_killed, row_e + col_e[j])
return max_killed
# calculate killed enemies for row i from column j
def row_kills(grid, i, j):
num = 0
len_row = len(grid[0])
while j < len_row and grid[i][j] != 'W':
if grid[i][j] == 'E':
num += 1
j += 1
return num
# calculate killed enemies for column j from row i
def col_kills(grid, i, j):
num = 0
len_col = len(grid)
while i < len_col and grid[i][j] != 'W':
if grid[i][j] == 'E':
num += 1
i += 1
return num
# ----------------- TESTS -------------------------
"""
Testsuite for the project
"""
import unittest
class TestBombEnemy(unittest.TestCase):
def test_3x4(self):
grid1 = [["0", "E", "0", "0"],
["E", "0", "W", "E"],
["0", "E", "0", "0"]]
self.assertEqual(3, max_killed_enemies(grid1))
def test_4x4(self):
grid1 = [
["0", "E", "0", "E"],
["E", "E", "E", "0"],
["E", "0", "W", "E"],
["0", "E", "0", "0"]]
grid2 = [
["0", "0", "0", "E"],
["E", "0", "0", "0"],
["E", "0", "W", "E"],
["0", "E", "0", "0"]]
self.assertEqual(5, max_killed_enemies(grid1))
self.assertEqual(3, max_killed_enemies(grid2))
if __name__ == "__main__":
unittest.main()
| marcosfede/algorithms | matrix/bomb_enemy.py | Python | gpl-3.0 | 2,524 |
from distutils.core import setup
# Dummy setup.py to install libtorrent for python 2.7 using pip
setup(
name='libtorrent',
version='1.0.9',
packages=['libtorrent',],
data_files=[('Lib', ['libtorrent/libtorrent.pyd']),],
)
# Install in "editable mode" for development:
# pip install -e .
| overfl0/Bulletproof-Arma-Launcher | dependencies/libtorrent/setup.py | Python | gpl-3.0 | 303 |
from pyFTS.common import FuzzySet, Membership
import numpy as np
from scipy.spatial import KDTree
import matplotlib.pylab as plt
import logging
class Partitioner(object):
"""
Universe of Discourse partitioner. Split data on several fuzzy sets
"""
def __init__(self, **kwargs):
"""
Universe of Discourse partitioner scheme. Split data on several fuzzy sets
"""
self.name = kwargs.get('name',"")
"""partitioner name"""
self.partitions = kwargs.get('npart', 10)
"""The number of universe of discourse partitions, i.e., the number of fuzzy sets that will be created"""
self.sets = {}
self.membership_function = kwargs.get('func', Membership.trimf)
"""Fuzzy membership function (pyFTS.common.Membership)"""
self.setnames = kwargs.get('names', None)
"""list of partitions names. If None is given the partitions will be auto named with prefix"""
self.prefix = kwargs.get('prefix', 'A')
"""prefix of auto generated partition names"""
self.transformation = kwargs.get('transformation', None)
"""data transformation to be applied on data"""
self.indexer = kwargs.get('indexer', None)
self.variable = kwargs.get('variable', None)
"""In a multivariate context, the variable that contains this partitioner"""
self.type = kwargs.get('type', 'common')
"""The type of fuzzy sets that are generated by this partitioner"""
self.ordered_sets = None
"""A ordered list of the fuzzy sets names, sorted by their middle point"""
self.kdtree = None
"""A spatial index to help in fuzzyfication"""
self.margin = kwargs.get("margin", 0.1)
"""The upper and lower exceeding margins for the known UoD. The default value is .1"""
self.lower_margin = kwargs.get("lower_margin", self.margin)
"""Specific lower exceeding margins for the known UoD. The default value is the self.margin parameter"""
self.upper_margin = kwargs.get("lower_margin", self.margin)
"""Specific upper exceeding margins for the known UoD. The default value is the self.margin parameter"""
if kwargs.get('preprocess',True):
data = kwargs.get('data',[None])
if self.indexer is not None:
ndata = self.indexer.get_data(data)
else:
ndata = data
if self.transformation is not None:
ndata = self.transformation.apply(ndata)
else:
ndata = data
if self.indexer is not None:
ndata = self.indexer.get_data(ndata)
_min = np.nanmin(ndata)
if _min == -np.inf:
ndata[ndata == -np.inf] = 0
_min = np.nanmin(ndata)
self.min = float(_min * (1 + self.lower_margin) if _min < 0 else _min * (1 - self.lower_margin))
_max = np.nanmax(ndata)
self.max = float(_max * (1 + self.upper_margin) if _max > 0 else _max * (1 - self.upper_margin))
self.sets = self.build(ndata)
self.partitions = len(self.sets)
if self.ordered_sets is None and self.setnames is not None:
self.ordered_sets = self.setnames[:len(self.sets)]
else:
self.ordered_sets = FuzzySet.set_ordered(self.sets)
del(ndata)
def extractor(self,x):
"""Extract a single primitive type from an structured instance"""
return x
def build(self, data):
"""
Perform the partitioning of the Universe of Discourse
:param data: training data
:return:
"""
pass
def get_name(self, counter):
"""
Find the name of the fuzzy set given its counter id.
:param counter: The number of the fuzzy set
:return: String
"""
return self.prefix + str(counter) if self.setnames is None else self.setnames[counter]
def lower_set(self):
"""
Return the fuzzy set on lower bound of the universe of discourse.
:return: Fuzzy Set
"""
return self.sets[self.ordered_sets[0]]
def upper_set(self):
"""
Return the fuzzy set on upper bound of the universe of discourse.
:return: Fuzzy Set
"""
return self.sets[self.ordered_sets[-1]]
def build_index(self):
points = []
#self.index = {}
for ct, key in enumerate(self.ordered_sets):
fset = self.sets[key]
points.append([fset.lower, fset.centroid, fset.upper])
#self.index[ct] = fset.name
import sys
sys.setrecursionlimit(100000)
self.kdtree = KDTree(points)
sys.setrecursionlimit(1000)
def fuzzyfy(self, data, **kwargs):
"""
Fuzzyfy the input data according to this partitioner fuzzy sets.
:param data: input value to be fuzzyfied
:keyword alpha_cut: the minimal membership value to be considered on fuzzyfication (only for mode='sets')
:keyword method: the fuzzyfication method (fuzzy: all fuzzy memberships, maximum: only the maximum membership)
:keyword mode: the fuzzyfication mode (sets: return the fuzzy sets names, vector: return a vector with the membership
values for all fuzzy sets, both: return a list with tuples (fuzzy set, membership value) )
:returns a list with the fuzzyfied values, depending on the mode
"""
if isinstance(data, (tuple, list, np.ndarray)):
ret = []
for inst in data:
mv = self.fuzzyfy(inst, **kwargs)
ret.append(mv)
return ret
alpha_cut = kwargs.get('alpha_cut', 0.)
mode = kwargs.get('mode', 'sets')
method = kwargs.get('method', 'fuzzy')
nearest = self.search(data, type='index')
mv = np.zeros(self.partitions)
for ix in nearest:
tmp = self[ix].membership(data)
mv[ix] = tmp if tmp >= alpha_cut else 0.
ix = np.ravel(np.argwhere(mv > 0.))
if ix.size == 0:
mv[self.check_bounds(data)] = 1.
if method == 'fuzzy' and mode == 'vector':
return mv
elif method == 'fuzzy' and mode == 'sets':
try:
ix = np.ravel(np.argwhere(mv > 0.))
sets = [self.ordered_sets[i] for i in ix if i < self.partitions]
return sets
except Exception as ex:
return None
elif method == 'maximum' and mode == 'sets':
mx = max(mv)
ix = np.ravel(np.argwhere(mv == mx))
return self.ordered_sets[ix[0]]
elif mode == 'both':
ix = np.ravel(np.argwhere(mv > 0.))
sets = [(self.ordered_sets[i], mv[i]) for i in ix]
return sets
def defuzzyfy(self, values, mode='both'):
if not isinstance(values, list):
values = [values]
num = []
den = []
for val in values:
fset = val[0]
mv = val[1]
if mode == 'both':
num.append( self.sets[fset].centroid * mv )
den.append(mv)
elif mode == 'sets':
num.append(self.sets[fset].centroid)
elif mode == 'vector':
num.append(self.sets[self.ordered_sets[fset]].centroid * mv)
den.append(mv)
else:
raise Exception('Unknown deffuzyfication mode')
if mode in ('both','vector'):
return np.nansum(num) / np.nansum(den)
else:
return np.nanmean(num)
def check_bounds(self, data):
"""
Check if the input data is outside the known Universe of Discourse and, if it is, round it to the closest
fuzzy set.
:param data: input data to be verified
:return: the index of the closest fuzzy set when data is outside de universe of discourse or None if
the data is inside the UoD.
"""
if data < self.min:
return 0
elif data > self.max:
return self.partitions-1
def search(self, data, **kwargs):
"""
Perform a search for the nearest fuzzy sets of the point 'data'. This function were designed to work with several
overlapped fuzzy sets.
:param data: the value to search for the nearest fuzzy sets
:param type: the return type: 'index' for the fuzzy set indexes or 'name' for fuzzy set names.
:param results: the number of nearest fuzzy sets to return
:return: a list with the nearest fuzzy sets
"""
if self.kdtree is None:
self.build_index()
type = kwargs.get('type','index')
results = kwargs.get('results', 3)
_, ix = self.kdtree.query([data, data, data], results)
if type == 'name':
return [self.ordered_sets[k] for k in sorted(ix)]
else:
return sorted(ix)
def plot(self, ax, rounding=0):
"""
Plot the partitioning using the Matplotlib axis ax
:param ax: Matplotlib axis
"""
ax.set_title(self.name)
ax.set_ylim([0, 1.1])
ax.set_xlim([self.min, self.max])
ticks = []
x = []
for key in self.sets.keys():
s = self.sets[key]
if s.type == 'common':
self.plot_set(ax, s)
elif s.type == 'composite':
for ss in s.sets:
self.plot_set(ax, ss)
ticks.append(str(round(s.centroid,rounding))+'\n'+s.name)
x.append(s.centroid)
ax.xaxis.set_ticklabels(ticks)
ax.xaxis.set_ticks(x)
def plot_set(self, ax, s):
"""
Plot an isolate fuzzy set on Matplotlib axis
:param ax: Matplotlib axis
:param s: Fuzzy Set
"""
if s.mf == Membership.trimf:
ax.plot([s.parameters[0], s.parameters[1], s.parameters[2]], [0, s.alpha, 0])
elif s.mf in (Membership.gaussmf, Membership.bellmf, Membership.sigmf):
tmpx = np.linspace(s.lower, s.upper, 100)
tmpy = [s.membership(kk) for kk in tmpx]
ax.plot(tmpx, tmpy)
elif s.mf == Membership.trapmf:
ax.plot(s.parameters, [0, s.alpha, s.alpha, 0])
elif s.mf == Membership.singleton:
ax.plot([s.parameters[0],s.parameters[0]], [0, s.alpha])
def __str__(self):
"""
Return a string representation of the partitioner, the list of fuzzy sets and their parameters
:return:
"""
tmp = self.name + ":\n"
for key in self.sets.keys():
tmp += str(self.sets[key])+ "\n"
return tmp
def __len__(self):
"""
Return the number of partitions
:return: number of partitions
"""
return self.partitions
def __getitem__(self, item):
"""
Return a fuzzy set by its order or its name.
:param item: If item is an integer then it represents the fuzzy set index (order), if it was a string then
it represents the fuzzy set name.
:return: the fuzzy set
"""
if isinstance(item, (int, np.int, np.int8, np.int16, np.int32, np.int64)):
if item < 0 or item >= self.partitions:
raise ValueError("The fuzzy set index must be between 0 and {}.".format(self.partitions))
return self.sets[self.ordered_sets[item]]
elif isinstance(item, str):
if item not in self.sets:
raise ValueError("The fuzzy set with name {} does not exist.".format(item))
return self.sets[item]
else:
raise ValueError("The parameter 'item' must be an integer or a string and the value informed was {} of type {}!".format(item, type(item)))
def __iter__(self):
"""
Iterate over the fuzzy sets, ordered by its midpoints.
:return: An iterator over the fuzzy sets.
"""
for key in self.ordered_sets:
yield self.sets[key]
| petroniocandido/pyFTS | pyFTS/partitioners/partitioner.py | Python | gpl-3.0 | 12,188 |
'''
command_line.py
Utility functions for reading command line arguments.
Author:
Martin Norbury
Novemeber 2013
'''
import inspect
import argparse
def command_line(fn):
'''
A decorator for functions intented to be run from the command line.
This decorator introspects the method signature of the wrapped function
to configures and parses command line arguments. Positional arguments
translate to required command line arguments. Arguments with defaults
supplied are assumed to be optional e.g.
def myfunction(a,b=1):
...
Can be called from the command line as:-
> myfunction <a> [--b=value]
All arguments are assumed to be strings at this point.
'''
def wrapper_fn(*args, **kwargs):
# Get the original function's method signature
arguments, varargs, kwargs, defaults = inspect.getargspec(fn)
# Get required and optional arguments
required_length = -len(defaults) if defaults else len(arguments)
required_arguments = arguments[:required_length]
optional_arguments = arguments[required_length:]
# Create a list of optional arguments of the form (name, value)
optional_arguments_with_defaults = []
if optional_arguments:
optional_arguments_with_defaults = zip(optional_arguments, defaults)
# Create a command line parser
parser = argparse.ArgumentParser()
# Configure required arguments
for argument in required_arguments:
parser.add_argument('{0}'.format(argument))
# Configure optional arguments, setting defaults appropriately.
for argument, default in optional_arguments_with_defaults:
parser.add_argument('--{0}'.format(argument), type=type(default), default=default)
# Parse the command line arguments
args = parser.parse_args()
# Call the original function with command line supplied arguments
result = fn(**dict(args._get_kwargs()))
return result
return wrapper_fn
| mnorbury/scriptutil | src/scriptutil/command_line.py | Python | gpl-3.0 | 2,055 |
from django.http import HttpResponseRedirect, JsonResponse
from django.views.generic import CreateView, UpdateView
from django.contrib.messages.views import SuccessMessageMixin
from .models import HistoriaClinica, Patologia
from .forms import HistoriaClinicaForms
from apps.afiliados.models import Titular, Adherente
from apps.personas.models import Persona
class HistoriaClinicaCreate(SuccessMessageMixin, CreateView):
model = HistoriaClinica
form_class = HistoriaClinicaForms
template_name = 'historias_clinicas/historia_clinica_form.html'
success_url = '/historia/clinica/alta/'
success_message = 'La historia clínica se guardo con exito'
def form_valid(self, form):
form.instance.persona = Persona.objects.get(pk=self.kwargs['pk'])
self.success_url = '/historia/clinica/redireccion/%s' % str(Persona.objects.get(pk=self.kwargs['pk']).id)
return super(HistoriaClinicaCreate, self).form_valid(form)
def get_context_data(self, **kwargs):
context = super(HistoriaClinicaCreate, self).get_context_data(**kwargs)
context['persona'] = Persona.objects.get(pk=self.kwargs['pk'])
return context
def redireccion(request, id):
titular = Titular.objects.filter(id=id)
if titular.exists():
persona = Persona.objects.filter(titular=titular)
historia_clinica = HistoriaClinica.objects.filter(persona=persona)
if historia_clinica.exists():
return HttpResponseRedirect('/historia/clinica/modi/' + str(historia_clinica[0].id))
else:
return HttpResponseRedirect('/historia/clinica/alta/' + str(persona[0].id))
else:
adherente = Adherente.objects.filter(id=id)
if adherente.exists():
persona = Persona.objects.filter(adherente=adherente)
historia_clinica = HistoriaClinica.objects.filter(persona=persona)
if historia_clinica.exists():
return HttpResponseRedirect('/historia/clinica/modi/' + str(historia_clinica[0].id))
else:
return HttpResponseRedirect('/historia/clinica/alta/' + str(persona[0].id))
class HistoriaClinicaUpdate(SuccessMessageMixin, UpdateView):
model = HistoriaClinica
form_class = HistoriaClinicaForms
template_name = 'historias_clinicas/historia_clinica_form.html'
success_url = '/historia/clinica/alta/'
success_message = 'La historia clínica se guardo con exito'
def form_valid(self, form):
form.instance.persona = Persona.objects.get(pk=HistoriaClinica.objects.get(pk=self.kwargs['pk']).persona.id)
self.success_url = '/historia/clinica/redireccion/%s' % str(HistoriaClinica.objects.get(
pk=self.kwargs['pk']).persona.id)
return super(HistoriaClinicaUpdate, self).form_valid(form)
def get_context_data(self, **kwargs):
context = super(HistoriaClinicaUpdate, self).get_context_data(**kwargs)
context['persona'] = Persona.objects.get(pk=HistoriaClinica.objects.get(pk=self.kwargs['pk']).persona.id)
return context
# Ajax ######
def patologia_create_ajax(request):
if request.method == 'POST':
if request.is_ajax():
id = ''
patologia = Patologia(nombre_enfermedad=request.POST.get('id_nombre_enfermedad_patologia_ajax'),
fecha_deteccion=request.POST.get('id_fecha_deteccion_patologia_ajax'))
patologia.save()
patologia = Patologia.objects.filter(
nombre_enfermedad=request.POST.get('id_nombre_enfermedad_patologia_ajax'),
fecha_deteccion=request.POST.get('id_fecha_deteccion_patologia_ajax'))
for indice in patologia.values('id'):
for valor in indice:
id = indice[valor]
for indice in patologia.values('nombre_enfermedad'):
for valor in indice:
nombre_enfermedad = indice[valor]
return JsonResponse({'id': id, 'nombre_enfermedad': nombre_enfermedad})
| montenegroariel/sigos | apps/historias_clinicas/views.py | Python | gpl-3.0 | 4,039 |
#!/usr/bin/env python
# _*_ coding:utf-8 _*-_
############################
# File Name: demo.py
# Author: lza
# Created Time: 2016-08-30 16:29:35
############################
import dns.resolver
domain = raw_input ('Please input an domain: ') #输入域名地址
MX = dns.resolver.query(domain , "MX") #指定查询类型为A记录
for i in MX: # 遍历回应结果,输出MX记录的preference及exchanger信息
print 'MX preference =', i.preference, 'mail exchanger =', i.exchange
if __name__ == "__main__":
pass
| zhengjue/mytornado | study/1/dnspython/demo1.py | Python | gpl-3.0 | 537 |
# -*- coding: utf-8 -*-
# Copyright 2014-2016 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <[email protected]>
# Copyright 2016 Sodexis (http://sodexis.com)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import models, fields, api, _
from openerp.exceptions import ValidationError
class ProductProduct(models.Model):
_inherit = 'product.product'
# Link rental service -> rented HW product
rented_product_id = fields.Many2one(
'product.product', string='Related Rented Product',
domain=[('type', 'in', ('product', 'consu'))])
# Link rented HW product -> rental service
rental_service_ids = fields.One2many(
'product.product', 'rented_product_id',
string='Related Rental Services')
@api.one
@api.constrains('rented_product_id', 'must_have_dates', 'type', 'uom_id')
def _check_rental(self):
if self.rented_product_id and self.type != 'service':
raise ValidationError(_(
"The rental product '%s' must be of type 'Service'.")
% self.name)
if self.rented_product_id and not self.must_have_dates:
raise ValidationError(_(
"The rental product '%s' must have the option "
"'Must Have Start and End Dates' checked.")
% self.name)
# In the future, we would like to support all time UoMs
# but it is more complex and requires additionnal developments
day_uom = self.env.ref('product.product_uom_day')
if self.rented_product_id and self.uom_id != day_uom:
raise ValidationError(_(
"The unit of measure of the rental product '%s' must "
"be 'Day'.") % self.name)
@api.multi
def _need_procurement(self):
# Missing self.ensure_one() in the native code !
res = super(ProductProduct, self)._need_procurement()
if not res:
for product in self:
if product.type == 'service' and product.rented_product_id:
return True
# TODO find a replacement for soline.rental_type == 'new_rental')
return res
| stellaf/sales_rental | sale_rental/models/product.py | Python | gpl-3.0 | 2,194 |
# coding=utf-8
Version = "0.2.0"
Description = "LOTRO/DDO Launcher"
Author = "Alan Jackson"
Email = "[email protected]"
WebSite = "http://www.lotrolinux.com"
LongDescription = "Lord of the Rings Online and Dungeons & Dragons Online\nLauncher for Linux & Mac OS X"
Copyright=" (C) 2009-2010 AJackson"
CLIReference = "Based on CLI launcher for LOTRO\n(C) 2007-2010 SNy"
LotROLinuxReference = "Based on LotROLinux\n(C) 2007-2009 AJackson"
| Lynx3d/pylotro | PyLotROLauncher/Information.py | Python | gpl-3.0 | 441 |
n=int(input('Enter any number: '))
if n%2!=0:
n=n+1
for i in range(n):
for j in range(n):
if (i==int(n/2)) or j==int(n/2) or ((i==0)and (j>=int(n/2))) or ((j==0)and (i<=int(n/2))) or ((j==n-1)and (i>=int(n/2))) or ((i==n-1)and (j<=int(n/2))):
print('*',end='')
else:
print(' ',end='')
print()
| rohitjogson/pythonwork | assign27.09.py | Python | gpl-3.0 | 355 |
import numpy, cairo, math
from scipy import ndimage
from .object3d import Object3d
from .point3d import Point3d
from .polygon3d import Polygon3d
from .draw_utils import *
from .colors import hsv_to_rgb, rgb_to_hsv
def surface2array(surface):
data = surface.get_data()
if not data:
return None
rgb_array = 0+numpy.frombuffer(surface.get_data(), numpy.uint8)
rgb_array.shape = (surface.get_height(), surface.get_width(), 4)
#rgb_array = rgb_array[:,:,[2,1,0,3]]
#rgb_array = rgb_array[:,:, :3]
return rgb_array
class Camera3d(Object3d):
def __init__(self, viewer=(0,0,0)):
super(Camera3d, self).__init__()
self.viewer = Point3d.create_if_needed(viewer)
self.sorted_items = []
self.mat_params = None
self.hit_alpha = 0
self.convolve_kernel = 0
self.hsv_coef = None
def project_point_values(self, point_values):
point_values = self.forward_transform_point_values(point_values)
return self.viewer_point_values(point_values)
def viewer_point_values(self, point_values):
if self.viewer.get_z() != 0:
ratio = self.viewer.get_z()/point_values[:, 2]
x_values = (ratio*point_values[:,0]) - self.viewer.get_x()
y_values = (ratio*point_values[:,1]) - self.viewer.get_y()
return numpy.stack((x_values, y_values), axis=1)
else:
return point_values[:, [0,1]]
def reverse_project_point_value(self, point_value, z_depth):
real_point_value = Point3d(x=point_value[0], y=point_value[1], z=z_depth)
if self.viewer.get_z() != 0:
ratio = z_depth/self.viewer.get_z()
real_point_value.values[0] = (point_value[0] + self.viewer.get_x())/ratio
real_point_value.values[1] = (point_value[1] + self.viewer.get_y())/ratio
real_point_value.values = self.reverse_transform_point_values(real_point_value.values)
return real_point_value.values
def sort_items(self, items=None):
polygons = []
if items is None:
polygons.extend(Polygon3d.Items)
else:
for item in items:
if not hasattr(item, "polygons"):
continue
polygons.extend(item.polygons)
self.sorted_items = sorted(polygons, key=self.z_depth_sort_key)
self.poly_face_params = None
for item in self.sorted_items:
params = numpy.array([item.plane_params_normalized[self]])
if self.poly_face_params is None:
self.poly_face_params = params
else:
self.poly_face_params = numpy.concatenate(
(self.poly_face_params, params), axis=0)
def z_depth_sort_key(self, ob):
return ob.z_depths[self]
def get_image_canvas(self, left, top, width, height, border_color=None, border_width=None, scale=.5):
left = math.floor(left)
top = math.floor(top)
width = int(width)
height = int(height)
if border_width>0:
border_width = max(border_width*scale, 1)
min_depth = -100000
canvas_width = int(width*scale)
canvas_height = int(height*scale)
pixel_count = canvas_width*canvas_height
canvas_surf = cairo.ImageSurface(cairo.FORMAT_ARGB32, canvas_width, canvas_height)
canvas_surf_array = surface2array(canvas_surf)
canvas_z_depths =numpy.repeat(min_depth, pixel_count)
canvas_z_depths = canvas_z_depths.astype("f").reshape(canvas_height, canvas_width)
obj_pad = max(border_width*4, 0)
for object_3d in self.sorted_items:
if object_3d.border_width:
pad = max(obj_pad, object_3d.border_width*2)
else:
pad = obj_pad
brect = object_3d.bounding_rect[self]
bleft, btop = int(math.ceil(brect[0][0])), int(math.ceil(brect[0][1]))
bright, bbottom = int(math.ceil(brect[1][0])), int(math.ceil(brect[1][1]))
if bleft>left+width or bright<left or \
btop>top+height or bbottom<top:
continue
bleft -= pad
bright += pad
btop -= pad
bbottom += pad
sleft = max(left, bleft)
stop = max(top, btop)
sright = min(left+width, bright)
sbottom = min(top+height, bbottom)
#if sleft>=sright or stop>=sbottom:
# continue
sw = int(math.ceil(sright-sleft))
sh = int(math.ceil(sbottom-stop))
if sw<=0 or sh<=0:
continue
poly_canvas_width = int(sw*scale)
poly_canvas_height = int(sh*scale)
cleft = int((sleft-left)*scale)
cright = min(int((sright-left)*scale), canvas_width)
ctop = int((stop-top)*scale)
cbottom = int((sbottom-top)*scale)
if (ctop-cbottom!=poly_canvas_height):
cbottom=poly_canvas_height+ctop
if cbottom>canvas_height:
cbottom = canvas_height
ctop = cbottom-poly_canvas_height
if (cright-cleft!=poly_canvas_width):
cright=poly_canvas_width+cleft
if cright>canvas_width:
cright = canvas_width
cleft = cright-poly_canvas_width
#print "poly_canvas_height", poly_canvas_height, "poly_canvas_width", poly_canvas_width
#print "cbottom-ctop", cbottom-ctop, "cright-cleft", cright-cleft
#print "canvas_width, canvas_height", canvas_width, canvas_height
#print "cbottom, ctop", cbottom, ctop, "cright, cleft", cright, cleft
poly_surf = cairo.ImageSurface(cairo.FORMAT_ARGB32, poly_canvas_width, poly_canvas_height)
poly_ctx = cairo.Context(poly_surf)
poly_ctx.scale(scale, scale)
set_default_line_style(poly_ctx)
poly_ctx.rectangle(0, 0, sw, sh)
poly_ctx.set_source_rgba(1, 0, 0, 0)
poly_ctx.fill()
poly_ctx.translate(-bleft, -btop)
poly_ctx.translate(-(sleft-bleft), -(stop-btop))
object_3d.draw(poly_ctx, self, border_color=border_color, border_width=border_width)
surfacearray = surface2array(poly_surf)
if surfacearray is None:
continue
area_cond = (surfacearray[:, :, 3]<=self.hit_alpha)
xs = numpy.linspace(sleft, sright, poly_canvas_width)
xcount = len(xs)
ys = numpy.linspace(stop, sbottom, poly_canvas_height)
ycount = len(ys)
xs, ys = numpy.meshgrid(xs, ys)
coords = numpy.vstack((xs.flatten(), ys.flatten()))
coords = coords.T#.reshape((ycount, xcount, 2))
coords.shape = (xcount*ycount, 2)
vz = self.viewer.get_z()
if vz == 0:
coords_depths = numpy.matmul(object_3d.plane_params_normalized[self],
numpy.concatenate((coords.T, [numpy.ones(coords.shape[0])]), axis=0))
else:
vx = self.viewer.get_x()
vy = self.viewer.get_y()
pp = object_3d.plane_params_normalized[self]
coords_depths = pp[2]*vz/(-pp[0]*(coords[:, 0]+vx)-pp[1]*(coords[:, 1]+vy)+vz)
coords_depths.shape = (ycount, xcount)
coords_depths.shape = (ycount, xcount)
blank_depths = numpy.repeat(min_depth+1, ycount*xcount)
blank_depths.shape = coords_depths.shape
coords_depths = numpy.where(area_cond, blank_depths, coords_depths)
pre_depths = canvas_z_depths[ctop:cbottom, cleft:cright]
pre_depths.shape = (cbottom-ctop, cright-cleft)
depths_cond = pre_depths<coords_depths#highier depths come at top
new_depths = numpy.where(depths_cond, coords_depths, pre_depths)
canvas_z_depths[ctop:cbottom, cleft:cright] = new_depths
pre_colors = canvas_surf_array[ctop:cbottom, cleft:cright, :]
pre_colors.shape = (cbottom-ctop, cright-cleft, 4)
depths_cond_multi = numpy.repeat(depths_cond, 4)
depths_cond_multi.shape = (depths_cond.shape[0], depths_cond.shape[1], 4)
new_colors = numpy.where(depths_cond_multi, surfacearray, pre_colors)
canvas_surf_array[ctop:cbottom, cleft:cright, :] = new_colors
"""
cond = (canvas_surf_array[:, :, 3]!=255)
cond_multi = numpy.repeat(cond, 4)
cond_multi.shape = (cond.shape[0], cond.shape[1], 4)
filled_values = numpy.repeat([[255, 0, 0, 255]], height*width, axis=0 ).astype(numpy.uint8)
filled_values.shape= (cond.shape[0], cond.shape[1], 4)
canvas_surf_array = numpy.where(cond_multi, filled_values, canvas_surf_array)
"""
canvas = cairo.ImageSurface.create_for_data(
canvas_surf_array, cairo.FORMAT_ARGB32, canvas_width, canvas_height)
if scale != 1:
enlarged_canvas = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height)
ctx = cairo.Context(enlarged_canvas)
ctx.rectangle(0, 0, width, height)
ctx.scale(1./scale, 1./scale)
ctx.set_source_surface(canvas)
ctx.paint()
canvas = enlarged_canvas
return canvas
def draw(self, ctx, border_color, border_width):
for object_3d in self.sorted_items:
object_3d.draw(ctx, self, border_color=border_color, border_width=border_width)
def get_image_canvas_high_quality(self,
container,
canvas_width, canvas_height, premat,
left, top, width, height,
border_color=None, border_width=None):
min_depth = -100000
canvas_width = int(canvas_width)
canvas_height = int(canvas_height)
canvas_surf = cairo.ImageSurface(cairo.FORMAT_ARGB32, canvas_width, canvas_height)
ctx = cairo.Context(canvas_surf)
ctx.rectangle(0, 0, canvas_width, canvas_height)
draw_fill(ctx, "00000000")
canvas_surf_array = surface2array(canvas_surf)
canvas_z_depths = numpy.zeros(canvas_surf_array.shape[:2], dtype="f")
canvas_z_depths.fill(min_depth)
canvas_normals = numpy.zeros(
(canvas_surf_array.shape[0],
canvas_surf_array.shape[1],
3), dtype="f")
canvas_normals.fill(0)
canvas_points = numpy.zeros(
(canvas_surf_array.shape[0],
canvas_surf_array.shape[1],
3), dtype="f")
canvas_points.fill(0)
invert = cairo.Matrix().multiply(premat)
invert.invert()
xx, yx, xy, yy, x0, y0 = invert
numpy_pre_invert_mat = numpy.array([[xx, xy, x0], [yx, yy, y0]])
span_y = max(-top, top+height)
lights = container.get_lights()
lights = sorted(lights, self.z_depth_sort_key)
for object_3d in self.sorted_items:
brect = object_3d.bounding_rect[self]
bleft, btop = brect[0][0], brect[0][1]
bright, bbottom = brect[1][0], brect[1][1]
if bleft>left+width or bright<left or \
btop>span_y or bbottom<-span_y:
continue
sleft = max(left, bleft)
stop = btop#max(top, btop)
sright = min(left+width, bright+1)
sbottom = bbottom#min(top+height, bbottom+1)
sw = sright-sleft
sh = sbottom-stop
if sw<=0 or sh<=0:
continue
poly_surf = cairo.ImageSurface(cairo.FORMAT_ARGB32, canvas_width, canvas_height)
poly_ctx = cairo.Context(poly_surf)
set_default_line_style(poly_ctx)
poly_ctx.rectangle(0, 0, canvas_width, canvas_height)
poly_ctx.set_source_rgba(1, 0, 0, 0)
poly_ctx.fill()
poly_ctx.set_matrix(premat)
object_3d.draw(poly_ctx, self, border_color=border_color, border_width=border_width)
del poly_ctx
surfacearray = surface2array(poly_surf)
xs = numpy.arange(0, canvas_width, step=1)
xcount = len(xs)
ys = numpy.arange(0, canvas_height, step=1)
ycount = len(ys)
xs, ys = numpy.meshgrid(xs, ys)
surface_grid = numpy.vstack((xs.flatten(), ys.flatten(), numpy.ones(xcount*ycount)))
surface_grid.shape = (3, ycount*xcount)
del xs, ys
hit_area_cond = (surfacearray[:, :, 3]>self.hit_alpha)
hit_area_cond.shape = (ycount*xcount,)
canvas_poly_coords = surface_grid[:, hit_area_cond]
canvas_poly_coords.shape = (3, -1)
del hit_area_cond
poly_coor_x = canvas_poly_coords[0, :].astype(numpy.uint32)
poly_coor_y = canvas_poly_coords[1, :].astype(numpy.uint32)
coords = numpy.matmul(numpy_pre_invert_mat, canvas_poly_coords)
coords.shape = (2, -1)
del canvas_poly_coords
coords_depths = numpy.matmul(object_3d.plane_params_normalized[self],
numpy.concatenate((coords, [numpy.ones(coords.shape[1])]), axis=0))
surface_points = numpy.concatenate((coords, [coords_depths]), axis=0).T
del coords
canvas_points[poly_coor_y, poly_coor_x, :] = surface_points
canvas_normals[poly_coor_y, poly_coor_x, :] = object_3d.plane_normals[self].copy()
pre_depths = canvas_z_depths[poly_coor_y, poly_coor_x]
depths_cond = pre_depths<coords_depths
new_depths = numpy.where(depths_cond, coords_depths, pre_depths)
canvas_z_depths[poly_coor_y, poly_coor_x] = new_depths
del pre_depths, new_depths
pre_colors = canvas_surf_array[poly_coor_y, poly_coor_x, :]
pre_colors.shape = (-1, 4)
depths_cond_multi = numpy.repeat(depths_cond, 4)
depths_cond_multi.shape = (depths_cond.shape[0], 4)
picked_surface = surfacearray[poly_coor_y, poly_coor_x]
picked_surface.shape = (-1, 4)
for light in lights:
light_depths_cond = ((coords_depths<light.z_depths[self]) & (coords_depths>min_depth))
light_vectors = light.rel_location_values[self][0][:3]-surface_points[light_depths_cond, :]
if len(light_vectors)==0:
continue
surface_normals = object_3d.plane_normals[self]
norm = numpy.linalg.norm(light_vectors, axis=1)
norm = numpy.repeat(norm, 3)
norm.shape = (-1,3)
light_vectors = light_vectors/norm
cosines = numpy.sum(light_vectors*surface_normals, axis=1)
cosines = numpy.abs(cosines)
cosines_multi = numpy.repeat(cosines, 4)
cosines_multi.shape = (cosines.shape[0],4)
if hasattr(light, "normal"):
cosines = numpy.sum(-light_vectors*light.normal.values[:3], axis=1)
cosines = numpy.abs(cosines)
damp = numpy.exp(-light.decay*(numpy.abs(cosines-light.cone_cosine)))
colors = numpy.repeat([light.color.to_255()], len(light_vectors), axis=0)
colors.shape = (-1, 4)
colors[:,3] = colors[:,3]*cosines
else:
colors = numpy.repeat([light.color.to_255()], len(light_vectors), axis=0)
pre_surf_colors = picked_surface[light_depths_cond, :].copy()
picked_surface[light_depths_cond, :] = (pre_surf_colors*(1-cosines_multi) + \
colors*cosines_multi).astype(numpy.uint8)
new_colors = numpy.where(depths_cond_multi, picked_surface, pre_colors)
new_colors.shape = (-1, 4)
del depths_cond_multi, pre_colors, picked_surface
canvas_surf_array[poly_coor_y, poly_coor_x, :] = new_colors
del poly_coor_x, poly_coor_y
if self.convolve_kernel:
n=self.convolve_kernel
kernel = numpy.ones(n*n).reshape(n,n)*1./(n*n)
for i in xrange(4):
canvas_surf_array[:,:,i] = ndimage.convolve(
canvas_surf_array[:,:,i], kernel, mode="constant")
if self.hsv_coef:
hsv = rgb_to_hsv(canvas_surf_array[:,:,:3].copy())
hsv[:, :, 0] = (hsv[:, :,0]*self.hsv_coef[0])
hsv[:, :, 1] = (hsv[:, :,1]*self.hsv_coef[1])
hsv[:, :, 2] = (hsv[:, :,2]*self.hsv_coef[2])
canvas_surf_array[:, :, :3] = hsv_to_rgb(hsv)
"""
for light in lights:
depths_cond = ((canvas_z_depths<light.z_depths[self]) & (canvas_z_depths>min_depth))
light_vectors = light.rel_location_values[self][0][:3]-canvas_points[depths_cond, :]
if len(light_vectors)==0:
continue
surface_normals = canvas_normals[depths_cond, :]
norm = numpy.linalg.norm(light_vectors, axis=1)
norm = numpy.repeat(norm, 3)
norm.shape = (-1,3)
light_vectors = light_vectors/norm
cosines = numpy.sum(light_vectors*surface_normals, axis=1)
cosines = numpy.abs(cosines)
cosines_multi = numpy.repeat(cosines, 4)
cosines_multi.shape = (cosines.shape[0],4)
if hasattr(light, "normal"):
cosines = numpy.sum(-light_vectors*light.normal.values[:3], axis=1)
cosines = numpy.abs(cosines)
damp = numpy.exp(-light.decay*(numpy.abs(cosines-light.cone_cosine)))
cosines = numpy.where(cosines<light.cone_cosine, damp, 1)
colors = numpy.repeat([light.color.to_255()], len(light_vectors), axis=0)
colors.shape = (-1, 4)
colors[:,3] = colors[:,3]*cosines
else:
colors = numpy.repeat([light.color.to_255()], len(light_vectors), axis=0)
pre_colors = canvas_surf_array[depths_cond, :].copy()
canvas_surf_array[depths_cond, :] = (pre_colors*(1-cosines_multi) + \
colors*cosines_multi).astype(numpy.uint8)
"""
canvas = cairo.ImageSurface.create_for_data(
numpy.getbuffer(canvas_surf_array), cairo.FORMAT_ARGB32, canvas_width, canvas_height)
return canvas
def draw_objects(self, ctx, left, top, width, height):
image_canvas = self.get_image_canvas(left, top, width, height)
ctx.set_source_surface(image_canvas, left, top)
ctx.get_source().set_filter(cairo.FILTER_FAST)
ctx.paint()
"""
for object_3d in self.get_sorted_items():
object_3d.draw_bounding_rect(self, ctx)
"""
| sujoykroy/motion-picture | editor/MotionPicture/commons/camera3d.py | Python | gpl-3.0 | 18,923 |
#!/usr/bin/env python
# Copyright (C) 2014 Equinor ASA, Norway.
#
# The file 'test_grid.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
import os.path
import six
from unittest import skipIf, skip
import time
import itertools
from numpy import linspace
from ecl.util.util import IntVector
from ecl import EclDataType, EclUnitTypeEnum
from ecl.eclfile import EclKW, EclFile
from ecl.grid import EclGrid
from ecl.grid import EclGridGenerator as GridGen
from ecl.grid.faults import Layer , FaultCollection
from ecl.util.test import TestAreaContext
from numpy.testing import assert_allclose
from tests import EclTest
# This dict is used to verify that corners are mapped to the correct
# cell with respect to containment.
CORNER_HOME = {
(0, 0, 0) : 0, (0, 0, 1) : 9, (0, 0, 2) : 18, (0, 0, 3) : 18,
(0, 1, 0) : 3, (0, 1, 1) : 12, (0, 1, 2) : 21, (0, 1, 3) : 21,
(0, 2, 0) : 6, (0, 2, 1) : 15, (0, 2, 2) : 24, (0, 2, 3) : 24,
(0, 3, 0) : 6, (0, 3, 1) : 15, (0, 3, 2) : 24, (0, 3, 3) : 24,
(1, 0, 0) : 1, (1, 0, 1) : 10, (1, 0, 2) : 19, (1, 0, 3) : 19,
(1, 1, 0) : 4, (1, 1, 1) : 13, (1, 1, 2) : 22, (1, 1, 3) : 22,
(1, 2, 0) : 7, (1, 2, 1) : 16, (1, 2, 2) : 25, (1, 2, 3) : 25,
(1, 3, 0) : 7, (1, 3, 1) : 16, (1, 3, 2) : 25, (1, 3, 3) : 25,
(2, 0, 0) : 2, (2, 0, 1) : 11, (2, 0, 2) : 20, (2, 0, 3) : 20,
(2, 1, 0) : 5, (2, 1, 1) : 14, (2, 1, 2) : 23, (2, 1, 3) : 23,
(2, 2, 0) : 8, (2, 2, 1) : 17, (2, 2, 2) : 26, (2, 2, 3) : 26,
(2, 3, 0) : 8, (2, 3, 1) : 17, (2, 3, 2) : 26, (2, 3, 3) : 26,
(3, 0, 0) : 2, (3, 0, 1) : 11, (3, 0, 2) : 20, (3, 0, 3) : 20,
(3, 1, 0) : 5, (3, 1, 1) : 14, (3, 1, 2) : 23, (3, 1, 3) : 23,
(3, 2, 0) : 8, (3, 2, 1) : 17, (3, 2, 2) : 26, (3, 2, 3) : 26,
(3, 3, 0) : 8, (3, 3, 1) : 17, (3, 3, 2) : 26, (3, 3, 3) : 26
}
def createVolumeTestGridBase(dim, dV, offset=1):
return [
GridGen.create_grid(dim, dV, offset=0),
GridGen.create_grid(dim, dV, offset=offset),
GridGen.create_grid(dim, dV, offset=offset, irregular_offset=True),
GridGen.create_grid(dim, dV, offset=offset, concave=True),
GridGen.create_grid(dim, dV, offset=offset, irregular=True),
GridGen.create_grid(dim, dV, offset=offset, concave=True, irregular=True),
GridGen.create_grid(dim, dV, offset=offset, irregular_offset=True, concave=True),
GridGen.create_grid(dim, dV, offset=0, faults=True),
GridGen.create_grid(dim, dV, offset=offset, faults=True),
GridGen.create_grid(dim, dV, escape_origo_shift=(100, 100, 0), scale=2),
GridGen.create_grid(dim, dV, escape_origo_shift=(100, 100, 0), scale=0.5),
GridGen.create_grid(dim, dV, escape_origo_shift=(100, 100, 0), translation=(50,50,0)),
GridGen.create_grid(dim, dV, escape_origo_shift=(100, 100, 0), rotate=True),
GridGen.create_grid(dim, dV, escape_origo_shift=(100, 100, 0), misalign=True),
GridGen.create_grid(dim, dV, offset=offset, escape_origo_shift=(100, 100, 0),
irregular_offset=True, concave=True, irregular=True,
scale=1.5, translation=(5,5,0), rotate=True,
misalign=True)
]
def createContainmentTestBase():
return [
(3, GridGen.create_grid((6,6,6), (1,1,1), offset=1)),
(10, GridGen.create_grid((3,3,3), (1,1,1), offset=1, concave=True)),
(4, GridGen.create_grid((10,10,1), (1,1,1), offset=0., misalign=True)),
(3,
GridGen.create_grid((6,6,6), (1,1,1), offset=0.,
escape_origo_shift=(100, 100, 0),
irregular_offset=True, concave=True, irregular=True,
scale=1.5, translation=(5,5,0),
misalign=True
)
)
]
def getMinMaxValue(grid):
corners = [
grid.getCellCorner(i, cell)
for i in range(8)
for cell in range(grid.getGlobalSize())
]
return [(min(values), max(values)) for values in zip(*corners)]
def createWrapperGrid(grid):
"""
Creates a grid that occupies the same space as the given grid,
but that consists of a single cell.
"""
x, y, z = grid.getNX()-1, grid.getNY()-1, grid.getNZ()-1
corner_pos = [
(0, 0, 0), (x, 0, 0), (0, y, 0), (x, y, 0),
(0, 0, z), (x, 0, z), (0, y, z), (x, y, z)
]
corners = [
grid.getCellCorner(i, ijk=pos)
for i, pos in enumerate(corner_pos)
]
return GridGen.create_single_cell_grid(corners)
def average(points):
p = six.functools.reduce(
lambda a, b: (a[0] + b[0], a[1] + b[1], a[2] + b[2]), points)
return [elem/float(len(points)) for elem in p]
# This test class should only have test cases which do not require
# external test data. Tests involving Equinor test data are in the
# test_grid_equinor module.
class GridTest(EclTest):
def test_oom_grid(self):
nx = 2000
ny = 2000
nz = 2000
with self.assertRaises(MemoryError):
grid = GridGen.createRectangular( (nx,ny,nz), (1,1,1))
def test_posXYEdge(self):
nx = 10
ny = 11
grid = GridGen.createRectangular( (nx,ny,1) , (1,1,1) )
self.assertEqual( grid.findCellCornerXY(0,0,0) , 0 )
self.assertEqual( grid.findCellCornerXY(nx,0,0) , nx)
self.assertEqual( grid.findCellCornerXY(0 , ny , 0) , (nx + 1 ) * ny )
self.assertEqual( grid.findCellCornerXY(nx,ny,0) , (nx + 1 ) * (ny + 1) - 1)
self.assertEqual( grid.findCellCornerXY(0.25,0,0) , 0 )
self.assertEqual( grid.findCellCornerXY(0,0.25,0) , 0 )
self.assertEqual( grid.findCellCornerXY(nx - 0.25,0,0) , nx )
self.assertEqual( grid.findCellCornerXY(nx , 0.25,0) , nx )
self.assertEqual( grid.findCellCornerXY(0 , ny - 0.25, 0) , (nx + 1 ) * ny )
self.assertEqual( grid.findCellCornerXY(0.25 , ny , 0) , (nx + 1 ) * ny )
self.assertEqual( grid.findCellCornerXY(nx -0.25 ,ny,0) , (nx + 1 ) * (ny + 1) - 1)
self.assertEqual( grid.findCellCornerXY(nx , ny - 0.25,0) , (nx + 1 ) * (ny + 1) - 1)
def test_dims(self):
grid = GridGen.createRectangular( (10,20,30) , (1,1,1) )
self.assertEqual( grid.getNX() , 10 )
self.assertEqual( grid.getNY() , 20 )
self.assertEqual( grid.getNZ() , 30 )
self.assertEqual( grid.getGlobalSize() , 30*10*20 )
self.assertEqual( grid.getDims() , (10,20,30,6000) )
def test_create(self):
with self.assertRaises(ValueError):
grid = GridGen.createRectangular( (10,20,30) , (1,1,1) , actnum = [0,1,1,2])
with self.assertRaises(ValueError):
grid = GridGen.createRectangular( (10,20,30) , (1,1,1) , actnum = IntVector(initial_size = 10))
grid = GridGen.createRectangular( (10,20,30) , (1,1,1) ) # actnum=None -> all active
self.assertEqual( grid.getNumActive( ) , 30*20*10)
actnum = IntVector(default_value = 1 , initial_size = 6000)
actnum[0] = 0
actnum[1] = 0
grid = GridGen.createRectangular( (10,20,30) , (1,1,1) , actnum = actnum)
self.assertEqual( grid.getNumActive( ) , 30*20*10 - 2)
def test_all_iters(self):
fk = self.createTestPath('local/ECLIPSE/faarikaal/faarikaal1.EGRID')
grid = EclGrid(fk)
cell = grid[3455]
self.assertEqual(3455, cell.global_index)
cell = grid[(4,1,82)]
self.assertEqual(3455, cell.global_index)
self.assertEqual(grid.cell(global_index=3455),
grid.cell(active_index=2000))
self.assertEqual(grid.cell(global_index=3455),
grid.cell(i=4, j=1, k=82))
na = grid.get_num_active()
self.assertEqual(na, 4160)
cnt = 0
for c in grid.cells(active=True):
cnt += 1
self.assertTrue(c.active)
self.assertEqual(cnt, 4160)
cnt = len([c for c in grid.cells()])
self.assertEqual(cnt, len(grid))
def test_repr_and_name(self):
grid = GridGen.createRectangular((2,2,2), (10,10,10), actnum=[0,0,0,0,1,1,1,1])
pfx = 'EclGrid('
rep = repr(grid)
self.assertEqual(pfx, rep[:len(pfx)])
self.assertEqual(type(rep), type(''))
self.assertEqual(type(grid.getName()), type(''))
with TestAreaContext("python/ecl_grid/repr"):
grid.save_EGRID("CASE.EGRID")
g2 = EclGrid("CASE.EGRID")
r2 = repr(g2)
self.assertEqual(pfx, r2[:len(pfx)])
self.assertEqual(type(r2), type(''))
self.assertEqual(type(g2.getName()), type(''))
def test_node_pos(self):
grid = GridGen.createRectangular( (10,20,30) , (1,1,1) )
with self.assertRaises(IndexError):
grid.getNodePos(-1,0,0)
with self.assertRaises(IndexError):
grid.getNodePos(11,0,0)
p0 = grid.getNodePos(0,0,0)
self.assertEqual( p0 , (0,0,0))
p7 = grid.getNodePos(10,20,30)
self.assertEqual( p7 , (10,20,30))
# The broken file was previously handled by the ecl_file_open() call internally
# in the ecl_grid implementation. That will now not fail for a broken file, and then
# the grid class needs to do more/better checking itself.
@skip("Needs better error checking inside in the ecl_grid")
def test_truncated_file(self):
grid = GridGen.createRectangular( (10,20,30) , (1,1,1) )
with TestAreaContext("python/ecl_grid/truncated"):
grid.save_EGRID( "TEST.EGRID")
size = os.path.getsize( "TEST.EGRID")
with open("TEST.EGRID" , "r+") as f:
f.truncate( size / 2 )
with self.assertRaises(IOError):
EclGrid("TEST.EGRID")
def test_posXY1(self):
nx = 4
ny = 1
nz = 1
grid = GridGen.createRectangular( (nx,ny,nz) , (1,1,1) )
(i,j) = grid.findCellXY( 0.5 , 0.5, 0 )
self.assertEqual(i , 0)
self.assertEqual(j , 0)
(i,j) = grid.findCellXY( 3.5 , 0.5, 0 )
self.assertEqual(i , 3)
self.assertEqual(j , 0)
def test_init_ACTNUM(self):
nx = 10
ny = 23
nz = 7
grid = GridGen.createRectangular( (nx,ny,nz) , (1,1,1) )
actnum = grid.exportACTNUM()
self.assertEqual( len(actnum) , nx*ny*nz )
self.assertEqual( actnum[0] , 1 )
self.assertEqual( actnum[nx*ny*nz - 1] , 1 )
actnum_kw = grid.exportACTNUMKw( )
self.assertEqual(len(actnum_kw) , len(actnum))
for a1,a2 in zip(actnum, actnum_kw):
self.assertEqual(a1, a2)
def test_posXY(self):
nx = 10
ny = 23
nz = 7
grid = GridGen.createRectangular( (nx,ny,nz) , (1,1,1) )
with self.assertRaises(IndexError):
grid.findCellXY( 1 , 1, -1 )
with self.assertRaises(IndexError):
grid.findCellXY( 1 , 1, nz + 1 )
with self.assertRaises(ValueError):
grid.findCellXY(15 , 78 , 2)
i,j = grid.findCellXY( 1.5 , 1.5 , 2 )
self.assertEqual(i , 1)
self.assertEqual(j , 1)
for i in range(nx):
for j in range(ny):
p = grid.findCellXY(i + 0.5 , j+ 0.5 , 0)
self.assertEqual( p[0] , i )
self.assertEqual( p[1] , j )
c = grid.findCellCornerXY( 0.10 , 0.10 , 0 )
self.assertEqual(c , 0)
c = grid.findCellCornerXY( 0.90 , 0.90 , 0 )
self.assertEqual( c , (nx + 1) + 1 )
c = grid.findCellCornerXY( 0.10 , 0.90 , 0 )
self.assertEqual( c , (nx + 1) )
c = grid.findCellCornerXY( 0.90 , 0.90 , 0 )
self.assertEqual( c , (nx + 1) + 1 )
c = grid.findCellCornerXY( 0.90 , 0.10 , 0 )
self.assertEqual( c , 1 )
def test_compressed_copy(self):
nx = 10
ny = 10
nz = 10
grid = GridGen.createRectangular( (nx,ny,nz) , (1,1,1) )
kw1 = EclKW("KW" , 1001 , EclDataType.ECL_INT )
with self.assertRaises(ValueError):
cp = grid.compressedKWCopy( kw1 )
def test_dxdydz(self):
nx = 10
ny = 10
nz = 10
grid = GridGen.createRectangular( (nx,ny,nz) , (2,3,4) )
(dx,dy,dz) = grid.getCellDims( active_index = 0 )
self.assertEqual( dx , 2 )
self.assertEqual( dy , 3 )
self.assertEqual( dz , 4 )
def test_create_3d_is_create_kw_inverse(self):
nx = 10
ny = 7
nz = 5
grid = GridGen.create_rectangular((nx, ny, nz), (1, 1, 1))
kw1 = EclKW("SWAT", nx * ny * nz, EclDataType.ECL_FLOAT)
for k, j, i in itertools.product(range(nz), range(ny), range(nx)):
kw1[i + j * nx + nx * ny * k] = i * j * k
numpy_3d = grid.create3D(kw1)
kw2 = grid.create_kw(numpy_3d, "SWAT", False)
self.assertEqual(kw2.name, "SWAT")
assert_allclose(grid.create3D(kw2), numpy_3d)
def test_create_3d_agrees_with_get_value(self):
nx = 5
ny = 3
nz = 2
grid = GridGen.createRectangular((nx, ny, nz), (1, 1, 1))
kw = EclKW("SWAT", nx * ny * nz, EclDataType.ECL_FLOAT)
for k, j, i in itertools.product(range(nz), range(ny), range(nx)):
kw[i + j * nx + nx * ny * k] = i * j * k
numpy_3d = grid.create3D(kw)
for k, j, i in itertools.product(range(nz), range(ny), range(nx)):
self.assertAlmostEqual(numpy_3d[i, j, k], grid.grid_value(kw, i, j, k))
def test_len(self):
nx = 10
ny = 11
nz = 12
actnum = EclKW( "ACTNUM" , nx*ny*nz , EclDataType.ECL_INT )
actnum[0] = 1
actnum[1] = 1
actnum[2] = 1
actnum[3] = 1
grid = GridGen.createRectangular( (nx,ny,nz) , (1,1,1), actnum = actnum)
self.assertEqual( len(grid) , nx*ny*nz )
self.assertEqual( grid.getNumActive( ) , 4 )
def test_export(self):
dims = (3, 3, 3)
coord = GridGen.create_coord(dims, (1,1,1))
zcorn = GridGen.create_zcorn(dims, (1,1,1), offset=0)
grid = EclGrid.create(dims, zcorn, coord, None)
self.assertEqual(zcorn, grid.export_zcorn())
self.assertEqual(coord, grid.export_coord())
def test_output_units(self):
n = 10
a = 1
grid = GridGen.createRectangular( (n,n,n), (a,a,a))
with TestAreaContext("python/ecl_grid/units"):
grid.save_EGRID( "CASE.EGRID" , output_unit = EclUnitTypeEnum.ECL_FIELD_UNITS )
f = EclFile("CASE.EGRID")
g = f["GRIDUNIT"][0]
self.assertEqual( g[0].strip( ) , "FEET" )
g2 = EclGrid("CASE.EGRID")
self.assertFloatEqual( g2.cell_volume( global_index = 0 ) , 3.28084*3.28084*3.28084)
grid.save_EGRID( "CASE.EGRID" )
f = EclFile("CASE.EGRID")
g = f["GRIDUNIT"][0]
self.assertEqual( g[0].strip( ) , "METRES" )
grid.save_EGRID( "CASE.EGRID" , output_unit = EclUnitTypeEnum.ECL_LAB_UNITS)
f = EclFile("CASE.EGRID")
g = f["GRIDUNIT"][0]
self.assertEqual( g[0].strip() , "CM" )
g2 = EclGrid("CASE.EGRID")
self.assertFloatEqual( g2.cell_volume( global_index = 0 ) , 100*100*100 )
def test_volume(self):
dim = (10,10,10)
dV = (2,2,2)
grids = createVolumeTestGridBase(dim, dV)
for grid in grids:
tot_vol = createWrapperGrid(grid).cell_volume(0)
cell_volumes = [grid.cell_volume(i) for i in range(grid.getGlobalSize())]
self.assertTrue(min(cell_volumes) >= 0)
self.assertFloatEqual(sum(cell_volumes), tot_vol)
def test_unique_containment(self):
test_base = createContainmentTestBase()
for steps_per_unit, grid in test_base:
wgrid = createWrapperGrid(grid)
(xmin, xmax), (ymin, ymax), (zmin, zmax) = getMinMaxValue(wgrid)
x_space = linspace(
xmin - 1, xmax + 1, int(xmax - xmin + 2) * steps_per_unit + 1
)
y_space = linspace(
ymin - 1, ymax + 1, int(ymax - ymin + 2) * steps_per_unit + 1
)
z_space = linspace(
zmin - 1, zmax + 1, int(zmax - zmin + 2) * steps_per_unit + 1
)
# limit amount of points tested by
# only testing every 3rd point
x_space = x_space[0:-1:3]
y_space = y_space[0:-1:3]
z_space = z_space[0:-1:3]
for x, y, z in itertools.product(x_space, y_space, z_space):
hits = [
grid.cell_contains(x, y, z, i)
for i in range(grid.getGlobalSize())
].count(True)
self.assertIn(hits, [0, 1])
expected = 1 if wgrid.cell_contains(x, y, z, 0) else 0
self.assertEqual(
expected,
hits,
'Expected %d for (%g,%g,%g), got %d' % (expected, x, y, z, hits)
)
def test_cell_corner_containment(self):
n = 4
d = 10
grid = GridGen.createRectangular( (n, n, n), (d, d, d))
for x, y, z in itertools.product(range(0, n*d+1, d), repeat=3):
self.assertEqual(
1,
[grid.cell_contains(x, y, z, i) for i in range(n**3)].count(True)
)
def test_cell_corner_containment_compatability(self):
grid = GridGen.createRectangular( (3,3,3), (1,1,1) )
for x, y, z in itertools.product(range(4), repeat=3):
for i in range(27):
if grid.cell_contains(x, y, z, i):
self.assertEqual(
CORNER_HOME[(x,y,z)],
i
)
def test_cell_face_containment(self):
n = 4
d = 10
grid = GridGen.createRectangular( (n, n, n), (d, d, d))
for x, y, z in itertools.product(range(d//2, n*d, d), repeat=3):
for axis, direction in itertools.product(range(3), [-1, 1]):
p = [x, y, z]
p[axis] = p[axis] + direction*d/2
self.assertEqual(
1,
[grid.cell_contains(p[0], p[1], p[2], i) for i in range(n**3)].count(True)
)
# This test generates a cell that is concave on ALL 6 sides
def test_concave_cell_containment(self):
points = [
(5, 5, 5),
(20, 10, 10),
(10, 20, 10),
(25, 25, 5),
(10, 10, 20),
(25, 5, 25),
(5, 25, 25),
(20, 20, 20)
]
grid = GridGen.create_single_cell_grid(points)
assertPoint = lambda p : self.assertTrue(
grid.cell_contains(p[0], p[1], p[2], 0)
)
assertNotPoint = lambda p : self.assertFalse(
grid.cell_contains(p[0], p[1], p[2], 0)
)
# Cell center
assertPoint(average(points));
# "Side" center
assertNotPoint(average(points[0:4:]))
assertNotPoint(average(points[4:8:]))
assertNotPoint(average(points[1:8:2]))
assertNotPoint(average(points[0:8:2]))
assertNotPoint(average(points[0:8:4] + points[1:8:4]))
assertNotPoint(average(points[2:8:4] + points[3:8:4]))
# Corners
for p in points:
assertPoint(p)
# Edges
edges = ([(i, i+1) for i in range(0, 8, 2)] +
[(i, i+2) for i in [0, 1, 4, 5]] +
[(i, i+4) for i in range(4)] +
[(1,2), (2,7), (1,7), (4,7), (2,4), (4,1)])
for a,b in edges:
assertPoint(average([points[a], points[b]]))
# Epsilon inside from corners
middle_point = average(points)
for p in points:
assertPoint(average(20*[p] + [middle_point]))
# Espilon outside
middle_point[2] = 0
for p in points[0:4:]:
assertNotPoint(average(20*[p] + [middle_point]))
middle_point[2] = 30
for p in points[4:8:]:
assertNotPoint(average(20*[p] + [middle_point]))
# This test generates a cell that is strictly convex on ALL 6 sides
def test_concvex_cell_containment(self):
points = [
(10, 10, 10),
(25, 5, 5),
(5, 25, 5),
(20, 20, 10),
(5, 5, 25),
(20, 10, 20),
(10, 20, 20),
(25, 25, 25)
]
grid = GridGen.create_single_cell_grid(points)
assertPoint = lambda p : self.assertTrue(
grid.cell_contains(p[0], p[1], p[2], 0)
)
assertNotPoint = lambda p : self.assertFalse(
grid.cell_contains(p[0], p[1], p[2], 0)
)
# Cell center
assertPoint(average(points));
# "Side" center
assertPoint(average(points[0:4:]))
assertPoint(average(points[4:8:]))
assertPoint(average(points[1:8:2]))
assertPoint(average(points[0:8:2]))
assertPoint(average(points[0:8:4] + points[1:8:4]))
assertPoint(average(points[2:8:4] + points[3:8:4]))
# Corners
for p in points:
assertPoint(p)
# Edges
edges = ([(i, i+1) for i in range(0, 8, 2)] +
[(i, i+2) for i in [0, 1, 4, 5]] +
[(i, i+4) for i in range(4)] +
[(1,2), (2,7), (1,7), (4,7), (2,4), (4,1)])
for a,b in edges:
assertPoint(average([points[a], points[b]]))
# Epsilon inside from corners
middle_point = average(points)
for p in points:
assertPoint(average(20*[p] + [middle_point]))
# Espilon outside
middle_point[2] = 0
for p in points[0:4:]:
assertNotPoint(average(20*[p] + [middle_point]))
middle_point[2] = 30
for p in points[4:8:]:
assertNotPoint(average(20*[p] + [middle_point]))
| Statoil/libecl | python/tests/ecl_tests/test_grid.py | Python | gpl-3.0 | 23,060 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-15 09:46
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('scoping', '0128_auto_20170808_0954'),
]
operations = [
migrations.CreateModel(
name='ProjectRoles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('role', models.CharField(choices=[('OW', 'Owner'), ('AD', 'Admin'), ('RE', 'Reviewer'), ('VE', 'Viewer')], max_length=2)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.RemoveField(
model_name='project',
name='owner',
),
migrations.AddField(
model_name='projectroles',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='scoping.Project'),
),
migrations.AddField(
model_name='project',
name='users',
field=models.ManyToManyField(through='scoping.ProjectRoles', to=settings.AUTH_USER_MODEL),
),
]
| mcallaghan/tmv | BasicBrowser/scoping/migrations/0129_auto_20170815_0946.py | Python | gpl-3.0 | 1,435 |
'''
Created on Mar 20, 2011
@author: frederikns
'''
from model.flyweight import Flyweight
from collections import namedtuple
from model.static.database import database
from model.dynamic.inventory.item import Item
class SchematicTypeMap(Flyweight):
def __init__(self,schematic_id):
#prevents reinitializing
if "_inited" in self.__dict__:
return
self._inited = None
#prevents reinitializing
self.schematic_id = schematic_id
cursor = database.get_cursor(
"select * from planetSchematicTypeMap where schematicID={};".format(self.schematic_id))
types = list()
schematic_type = namedtuple("schematic_type", "item, is_input")
for row in cursor:
types.append(schematic_type(
item=Item(row["typeID"], row["quantity"]),
is_input=(row["isInput"])))
cursor.close()
| Iconik/eve-suite | src/model/static/planet/schematic_type_map.py | Python | gpl-3.0 | 914 |
#!/usr/bin/env python
# vim: set expandtab shiftwidth=4:
# http://www.voip-info.org/wiki/view/asterisk+manager+events
import sys,time
import simplejson as json
from stompy.simple import Client
import ConfigParser
config = ConfigParser.ConfigParser()
devel_config = ConfigParser.ConfigParser()
config.read('/opt/ucall/etc/config.ini')
devel_config.read('/opt/ucall/etc/devel_config.ini')
stomp_host = config.get('STOMP', 'host')
stomp_username = config.get('STOMP', 'username')
stomp_password = config.get('STOMP', 'password')
stomp_queue = "/queue/messages/" + devel_config.get('GENERAL', 'agent')
print '='*80
print 'Stomp host:', stomp_host
print 'Stomp username:', stomp_username
print 'Stomp password:', stomp_password
print 'Stomp queue:', stomp_queue
print '='*80
stomp = Client(stomp_host)
stomp.connect(stomp_username, stomp_password)
stomp.subscribe("jms.queue.msg.ctrl")
while True:
message = stomp.get()
print message.body
stomp.disconnect()
| gryzz/uCall | utils/asterisk-connector/ami2stomp-get.py | Python | gpl-3.0 | 972 |
import re
import os
import pytz
from PIL import Image
from dateutil.parser import parse
from datetime import datetime
from decimal import Decimal
from django.template import Library
from django.conf import settings
from django.template.defaultfilters import stringfilter
from django.utils import formats
from django.utils.safestring import mark_safe
from django.utils.html import conditional_escape, strip_tags, urlize
from django.contrib.auth.models import AnonymousUser
from django.core.files.storage import default_storage
register = Library()
@register.filter(name="localize_date")
def localize_date(value, to_tz=None):
from timezones.utils import adjust_datetime_to_timezone
try:
if to_tz is None:
to_tz = settings.UI_TIME_ZONE
from_tz = settings.TIME_ZONE
return adjust_datetime_to_timezone(value, from_tz=from_tz, to_tz=to_tz)
except AttributeError:
return ''
localize_date.is_safe = True
@register.filter_function
def date_short(value, arg=None):
"""Formats a date according to the given format."""
from django.utils.dateformat import format
from tendenci.apps.site_settings.utils import get_setting
if not value:
return u''
if arg is None:
s_date_format = get_setting('site', 'global', 'dateformat')
if s_date_format:
arg = s_date_format
else:
arg = settings.SHORT_DATETIME_FORMAT
try:
return formats.date_format(value, arg)
except AttributeError:
try:
return format(value, arg)
except AttributeError:
return ''
date_short.is_safe = False
@register.filter_function
def date_long(value, arg=None):
"""Formats a date according to the given format."""
from django.utils.dateformat import format
from tendenci.apps.site_settings.utils import get_setting
if not value:
return u''
if arg is None:
s_date_format = get_setting('site', 'global', 'dateformatlong')
if s_date_format:
arg = s_date_format
else:
arg = settings.DATETIME_FORMAT
try:
return formats.date_format(value, arg)
except AttributeError:
try:
return format(value, arg)
except AttributeError:
return ''
date_long.is_safe = False
@register.filter_function
def date(value, arg=None):
"""Formats a date according to the given format."""
from django.utils.dateformat import format
if not value:
return u''
if arg is None:
arg = settings.DATETIME_FORMAT
else:
if arg == 'long':
return date_long(value)
if arg == 'short':
return date_short(value)
try:
return formats.date_format(value, arg)
except AttributeError:
try:
return format(value, arg)
except AttributeError:
return ''
date_long.is_safe = False
@register.filter_function
def order_by(queryset, args):
args = [x.strip() for x in args.split(',')]
return queryset.order_by(*args)
@register.filter_function
def str_to_date(string, args=None):
"""Takes a string and converts it to a datetime object"""
date = parse(string)
if date:
return date
return ''
@register.filter_function
def exif_to_date(s, fmt='%Y:%m:%d %H:%M:%S'):
"""
The format of datetime in exif is as follows:
%Y:%m:%d %H:%M:%S
Convert the string with this format to a datetime object.
"""
if not s:
return None
try:
return datetime.strptime(s, fmt)
except ValueError:
return None
@register.filter_function
def in_group(user, group):
if group:
if isinstance(user, AnonymousUser):
return False
return group in [dict['pk'] for dict in user.group_set.values('pk')]
else:
return False
@register.filter
def domain(link):
from urlparse import urlparse
link = urlparse(link)
return link.hostname
@register.filter
def strip_template_tags(string):
p = re.compile('{[#{%][^#}%]+[%}#]}')
return re.sub(p, '', string)
@register.filter
@stringfilter
def stripentities(value):
"""Strips all [X]HTML tags."""
from django.utils.html import strip_entities
return strip_entities(value)
stripentities.is_safe = True
@register.filter
def format_currency(value):
"""format currency"""
from tendenci.apps.base.utils import tcurrency
return tcurrency(value)
format_currency.is_safe = True
@register.filter
def get_object(obj):
"""return obj.object if this obj has the attribute of object"""
if hasattr(obj, 'object'):
return obj.object
else:
return obj
@register.filter
def scope(object):
return dir(object)
@register.filter
def obj_type(object):
"""
Return object type
"""
return type(object)
@register.filter
def is_iterable(object):
"""
Return boolean
Is the object iterable or not
"""
try:
iter(object)
return True
except TypeError:
return False
@register.filter
@stringfilter
def basename(path):
from os.path import basename
return basename(path)
@register.filter
def date_diff(value, date_to_compare=None):
"""Compare two dates and return the difference in days"""
import datetime
if not isinstance(value, datetime.datetime):
return 0
if not isinstance(date_to_compare, datetime.datetime):
date_to_compare = datetime.datetime.now()
return (date_to_compare - value).days
@register.filter
def first_chars(string, arg):
""" returns the first x characters from a string """
string = str(string)
if arg:
if not arg.isdigit():
return string
return string[:int(arg)]
else:
return string
return string
@register.filter
def rss_date(value, arg=None):
"""Formats a date according to the given format."""
from django.utils import formats
from django.utils.dateformat import format
from datetime import datetime
if not value:
return u''
else:
value = datetime(*value[:-3])
if arg is None:
arg = settings.DATE_FORMAT
try:
return formats.date_format(value, arg)
except AttributeError:
try:
return format(value, arg)
except AttributeError:
return ''
rss_date.is_safe = False
@register.filter()
def obfuscate_email(email, linktext=None, autoescape=None):
"""
Given a string representing an email address,
returns a mailto link with rot13 JavaScript obfuscation.
Accepts an optional argument to use as the link text;
otherwise uses the email address itself.
"""
if autoescape:
esc = conditional_escape
else:
esc = lambda x: x
email = re.sub('@', '\\\\100', re.sub('\.', '\\\\056', \
esc(email))).encode('rot13')
if linktext:
linktext = esc(linktext).encode('rot13')
else:
linktext = email
rotten_link = """<script type="text/javascript">document.write \
("<n uers=\\\"znvygb:%s\\\">%s<\\057n>".replace(/[a-zA-Z]/g, \
function(c){return String.fromCharCode((c<="Z"?90:122)>=\
(c=c.charCodeAt(0)+13)?c:c-26);}));</script>""" % (email, linktext)
return mark_safe(rotten_link)
obfuscate_email.needs_autoescape = True
@register.filter_function
def split_str(s, args):
"""
Split a string using the python string split method
"""
if args:
if isinstance(s, str):
splitter = args[0]
return s.split(splitter)
return s
return s
@register.filter_function
def str_basename(s):
"""
Get the basename using the python basename method
"""
return basename(s)
@register.filter
@stringfilter
def twitterize(value, autoescape=None):
value = strip_tags(value)
# Link URLs
value = urlize(value, nofollow=False, autoescape=autoescape)
# Link twitter usernames for the first person
value = re.sub(r'(^[^:]+)', r'<a href="http://twitter.com/\1">\1</a>', value)
# Link twitter usernames prefixed with @
value = re.sub(r'(\s+|\A)@([a-zA-Z0-9\-_]*)\b', r'\1<a href="http://twitter.com/\2">@\2</a>', value)
# Link hash tags
value = re.sub(r'(\s+|\A)#([a-zA-Z0-9\-_]*)\b', r'\1<a href="http://search.twitter.com/search?q=%23\2">#\2</a>', value)
return mark_safe(value)
twitterize.is_safe = True
twitterize.needs_autoescape = True
@register.filter
@stringfilter
def twitterdate(value):
from datetime import datetime, timedelta
time = value.replace(" +0000", "")
dt = datetime.strptime(time, "%a, %d %b %Y %H:%M:%S")
return dt + timedelta(hours=-6)
@register.filter
def thumbnail(file, size='200x200'):
# defining the size
x, y = [int(x) for x in size.split('x')]
# defining the filename and the miniature filename
filehead, filetail = os.path.split(file.name)
basename, format = os.path.splitext(filetail)
miniature = basename + '_' + size + format
filename = file.name
miniature_filename = os.path.join(filehead, miniature)
filehead, filetail = os.path.split(file.url)
miniature_url = filehead + '/' + miniature
thumbnail_exist = False
if default_storage.exists(miniature_filename):
mt_filename = default_storage.modified_time(filename)
mt_miniature_filename = default_storage.modified_time(
miniature_filename)
if mt_filename > mt_miniature_filename:
# remove the miniature
default_storage.delete(miniature_filename)
else:
thumbnail_exist = True
# if the image wasn't already resized, resize it
if not thumbnail_exist:
if not default_storage.exists(filename):
return u''
image = Image.open(default_storage.open(filename))
image.thumbnail([x, y], Image.ANTIALIAS)
f = default_storage.open(miniature_filename, 'w')
image.save(f, image.format, quality=90, optimize=1)
f.close()
return miniature_url
@register.filter_function
def datedelta(dt, range_):
from datetime import timedelta
range_type = 'add'
# parse the range
if '+' in range_:
range_ = range_[1:len(range_)]
if '-' in range_:
range_type = 'subtract'
range_ = range_[1:len(range_)]
k, v = range_.split('=')
set_range = {
str(k): int(v)
}
# set the date
if range_type == 'add':
dt = dt + timedelta(**set_range)
if range_type == 'subtract':
dt = dt - timedelta(**set_range)
return dt
@register.filter
def split(str, splitter):
return str.split(splitter)
@register.filter
def tag_split(str):
str = "".join(str)
str = str.replace(", ", ",")
return str.split(",")
@register.filter
def make_range(value):
try:
value = int(value)
if value > 0:
return range(int(value))
return []
except:
return []
@register.filter
def underscore_space(value):
return value.replace("_", " ")
@register.filter
def format_string(value, arg):
return arg % value
@register.filter
def md5_gs(value, arg=None):
import hashlib
from datetime import datetime, timedelta
hashdt = ''
if arg and int(arg):
timestamp = datetime.now() + timedelta(hours=int(arg))
hashdt = hashlib.md5(timestamp.strftime("%Y;%m;%d;%H;%M").replace(';0', ';')).hexdigest()
return ''.join([value, hashdt])
@register.filter
def multiply(value, arg):
return Decimal(str(value)) * Decimal(str(arg))
@register.filter
def add_decimal(value, arg):
return Decimal(str(value)) + Decimal(str(arg))
@register.filter
def phonenumber(value):
if value:
# split number from extension or any text
x = re.split(r'([a-zA-Z]+)', value)
# clean number
y = ''.join(i for i in x[0] if i.isdigit())
if len(y) > 10: # has country code
code = y[:len(y)-10]
number = y[len(y)-10:]
if code == '1':
number = "(%s) %s-%s" %(number[:3], number[3:6], number[6:])
else:
number = "+%s %s %s %s" %(code, number[:3], number[3:6], number[6:])
else: # no country code
number = "(%s) %s-%s" %(y[:3], y[3:6], y[6:])
# attach additional text extension
ext = ''
for i in xrange(1, len(x)):
ext = ''.join((ext, x[i]))
if ext:
return ' '.join((number, ext))
else:
return number
@register.filter
def timezone_label(value):
try:
now = datetime.now(pytz.timezone(value))
tzinfo = now.strftime("%z")
return "(GMT%s) %s" %(tzinfo, value)
except:
return ""
@register.filter
def field_to_string(value):
if isinstance(value, str) or isinstance(value, unicode):
return value
if isinstance(value, list):
if len(value) == 0:
return ""
if len(value) == 1:
return str(value[0])
if len(value) == 2:
return "%s and %s" % (value[0], value[1])
return ", ".join(value)
return str(value)
| alirizakeles/tendenci | tendenci/apps/base/templatetags/base_filters.py | Python | gpl-3.0 | 13,196 |
# Copyright (C) 2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest as ut
import importlib_wrapper
tutorial, skipIfMissingFeatures = importlib_wrapper.configure_and_import(
"@TUTORIALS_DIR@/04-lattice_boltzmann/04-lattice_boltzmann_part2.py",
gpu=True, loops=400)
@skipIfMissingFeatures
class Tutorial(ut.TestCase):
system = tutorial.system
if __name__ == "__main__":
ut.main()
| mkuron/espresso | testsuite/scripts/tutorials/test_04-lattice_boltzmann_part2.py | Python | gpl-3.0 | 1,053 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ProxyServe.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| nanshihui/ipProxyDec | ProxyServe/manage.py | Python | gpl-3.0 | 253 |
from __future__ import division
from math import sqrt, pi
import unittest
from sapphire import clusters
class SimpleClusterTest(unittest.TestCase):
def setUp(self):
self.cluster = clusters.SimpleCluster(size=100)
def test_station_positions_and_angles(self):
a = sqrt(100 ** 2 - 50 ** 2)
expected = [(0, 2 * a / 3, 0, 0), (0, 0, 0, 0),
(-50, -a / 3, 0, 2 * pi / 3), (50, -a / 3, 0, -2 * pi / 3)]
actual = [(station.x[0], station.y[0], station.z[0], station.angle[0])
for station in self.cluster.stations]
for actual_value, expected_value in zip(actual, expected):
self.assert_tuple_almost_equal(actual_value, expected_value)
def test_get_detector_coordinates(self):
for station in self.cluster.stations:
for detector in station.detectors:
detector.get_xy_coordinates()
def assert_tuple_almost_equal(self, actual, expected):
self.assertIsInstance(actual, tuple)
self.assertIsInstance(expected, tuple)
msg = "Tuples differ: %s != %s" % (str(actual), str(expected))
for actual_value, expected_value in zip(actual, expected):
self.assertAlmostEqual(actual_value, expected_value, msg=msg)
if __name__ == '__main__':
unittest.main()
| tomkooij/sapphire | sapphire/tests/test_clusters_acceptance.py | Python | gpl-3.0 | 1,327 |
from math import *
def f(e, x):
return abs(eval(e.replace('^', '**').replace('x', '('+str(x)+')')))
def solve(e, a, b):
N = 1999
t = f(e, a) + f(e, b)
for i in range(1, 2*N):
if i % 2 == 0:
t += 2*f(e, a + (b-a)*i/2/N)
else:
t += 4*f(e, a + (b-a)*i/2/N)
return (b-a)*t/6/N
def main():
##
with open('input.txt', 'r') as f:
data = f.read().splitlines()
a, b = map(int, data[0].split())
e = data[1]
##
# a, b = map(int, input().split())
# e = input()
ans = solve(e, a, b)
# print(ans)
##
ans = str(ans)
with open('output.txt', 'w') as f:
f.write(ans)
print('Done:')
if len(ans) > 500:
print(ans[:200] + '...')
else:
print(ans)
##
main()
| Lipen/LipenDev | Azeroth/Northrend/TP Olymp/TaskE.py | Python | gpl-3.0 | 687 |
"""Utility functions"""
import os
import difflib
def get_diff(str1, str2):
"""Returns git-diff-like diff between two strings"""
expected = str1.splitlines(1)
actual = str2.splitlines(1)
diff = difflib.unified_diff(expected, actual, lineterm=-0, n=0)
return ''.join(diff)
def ensure_directory(path):
"""Creates the given directory, if not existing"""
os.makedirs(path, exist_ok=True)
def ensure_directory_of_file(file_path):
"""Creates the parent directory of a given file path, if not existing"""
ensure_directory(os.path.dirname(file_path))
def check_service_name(service_name):
"""Raises an exception if service_name is not valid"""
service_name_errors = get_service_name_errors(service_name)
if service_name_errors:
raise Exception('errors: %s' % str(service_name_errors))
def get_service_name_errors(service_name):
"""Checks if service_name is valid and returns errors if it is not.
Returns None if service_name is valid"""
errors = []
legal_characters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789\\'
for index in range(len(service_name)):
if not service_name[index] in legal_characters:
errors.append('Illegal character in service name: %s at position %s'
% (service_name[index], index))
return errors
| perfalle/smartbox | common/utils.py | Python | gpl-3.0 | 1,360 |
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 NovaPoint Group LLC (<http://www.novapointgroup.com>)
# Copyright (C) 2004-2010 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
from . import api
class stock_packages(osv.osv):
_inherit = "stock.packages"
def cancel_postage(self, cr, uid, ids, context=None):
for package in self.browse(cr, uid, ids, context=context):
if package.shipping_company_name.lower() != "usps":
continue
usps_config = api.v1.get_config(cr, uid, sale=package.pick_id.sale_id, context=context)
test = package.pick_id.logis_company.test_mode
if hasattr(package, "tracking_no") and package.tracking_no:
try:
response = api.v1.cancel_shipping(usps_config, package, shipper=None, test=test)
except Exception, e:
self.pool.get('stock.packages').write(cr, uid, package.id, {'ship_message': str(e)}, context=context)
return {
'type': 'ir.actions.client',
'tag': 'action_warn',
'name': _('Exception'),
'params': {'title': _('Exception'), 'text': str(e), 'sticky': True}
}
if hasattr(response, "error") or not response.refunds[0].refunded:
err = response.error if hasattr(response, "error") else response.refunds[0].message
self.pool.get('stock.packages').write(cr, uid, package.id, {'ship_message': err}, context=context)
return {
'type': 'ir.actions.client',
'tag': 'action_warn',
'name': _('Failure'),
'params': {
'title': _('Package #%s Cancellation Failed') % package.packge_no,
'text': err,
'sticky': True
}
}
else:
self.pool.get('stock.packages').write(cr, uid, package.id, {
'ship_message' : 'Shipment Cancelled', 'tracking_no': ''
}, context=context)
return super(stock_packages, self).cancel_postage(cr, uid, ids, context=context)
stock_packages()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | lercloud/shipping_api_usps | stock_packages.py | Python | gpl-3.0 | 3,338 |
# Copyright (C) 2018 Philipp Hörist <philipp AT hoerist.com>
#
# This file is part of nbxmpp.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; If not, see <http://www.gnu.org/licenses/>.
from nbxmpp.namespaces import Namespace
from nbxmpp.protocol import NodeProcessed
from nbxmpp.structs import StanzaHandler
from nbxmpp.task import iq_request_task
from nbxmpp.errors import MalformedStanzaError
from nbxmpp.modules.base import BaseModule
from nbxmpp.modules.util import raise_if_error
from nbxmpp.modules.bookmarks.util import parse_bookmarks
from nbxmpp.modules.bookmarks.util import build_storage_node
BOOKMARK_OPTIONS = {
'pubsub#persist_items': 'true',
'pubsub#access_model': 'whitelist',
}
class PEPBookmarks(BaseModule):
_depends = {
'publish': 'PubSub',
'request_items': 'PubSub',
}
def __init__(self, client):
BaseModule.__init__(self, client)
self._client = client
self.handlers = [
StanzaHandler(name='message',
callback=self._process_pubsub_bookmarks,
ns=Namespace.PUBSUB_EVENT,
priority=16),
]
def _process_pubsub_bookmarks(self, _client, stanza, properties):
if not properties.is_pubsub_event:
return
if properties.pubsub_event.node != Namespace.BOOKMARKS:
return
item = properties.pubsub_event.item
if item is None:
# Retract, Deleted or Purged
return
try:
bookmarks = parse_bookmarks(item, self._log)
except MalformedStanzaError as error:
self._log.warning(error)
self._log.warning(stanza)
raise NodeProcessed
if not bookmarks:
self._log.info('Bookmarks removed')
return
pubsub_event = properties.pubsub_event._replace(data=bookmarks)
self._log.info('Received bookmarks from: %s', properties.jid)
for bookmark in bookmarks:
self._log.info(bookmark)
properties.pubsub_event = pubsub_event
@iq_request_task
def request_bookmarks(self):
_task = yield
items = yield self.request_items(Namespace.BOOKMARKS, max_items=1)
raise_if_error(items)
if not items:
yield []
bookmarks = parse_bookmarks(items[0], self._log)
for bookmark in bookmarks:
self._log.info(bookmark)
yield bookmarks
@iq_request_task
def store_bookmarks(self, bookmarks):
_task = yield
self._log.info('Store Bookmarks')
self.publish(Namespace.BOOKMARKS,
build_storage_node(bookmarks),
id_='current',
options=BOOKMARK_OPTIONS,
force_node_options=True)
| gajim/python-nbxmpp | nbxmpp/modules/bookmarks/pep_bookmarks.py | Python | gpl-3.0 | 3,379 |
# Copyright (C) 2014 - Oscar Campos <[email protected]>
# This program is Free Software see LICENSE file for details
import os
import json
import platform
from collections import defaultdict
from anaconda_go.lib import go
from anaconda_go.lib.plugin import typing
cachepath = {
'linux': os.path.join('~', '.local', 'share', 'anaconda', 'cache'),
'darwin': os.path.join('~', 'Library', 'Cache', 'anaconda'),
'windows': os.path.join(os.getenv('APPDATA') or '~', 'Anaconda', 'Cache')
}
cache_directory = os.path.expanduser(
cachepath.get(platform.system().lower())
)
PACKAGES_CACHE = defaultdict(lambda: [])
def append(package: typing.Dict) -> None:
"""Append the given package into the cache
"""
global PACKAGES_CACHE
if not package_in_cache(package):
PACKAGES_CACHE[go.GOROOT].append(package)
def package_in_cache(package: typing.Dict) -> bool:
"""Look for the given package in the cache and return true if is there
"""
for pkg in PACKAGES_CACHE[go.GOROOT]:
if pkg['ImportPath'] == package['ImportPath']:
return True
return False
def lookup(node_name: str='') -> typing.Dict:
"""Lookup the given node_name in the cache and return it back
"""
node = {}
if node_name == '':
node = PACKAGES_CACHE[go.GOROOT]
else:
for pkg in PACKAGES_CACHE[go.GOROOT]:
guru = pkg.get('Guru')
if guru is None:
continue
path = guru['package'].get('path')
if path is not None and path == node_name:
node = guru
break
for member in guru['package'].get('members', []):
if member.get('name') == node_name:
node = member
break
for method in member.get('methods', []):
if method['name'] == node_name:
node = method
break
return node
def persist_package_cache() -> None:
"""Write the contents of the package cache for this GOROOT into the disk
"""
gopath = go.GOPATH.replace(os.path.sep, '_')
cachefile = os.path.join(cache_directory, gopath, 'packages.cache')
if not os.path.exists(os.path.dirname(cachefile)):
os.makedirs(os.path.dirname(cachefile))
with open(cachefile, 'w') as fd:
json.dump(PACKAGES_CACHE[go.GOROOT], fd)
def load_package_cache() -> typing.List:
"""Load a previously stores package cache file
"""
global PACKAGES_CACHE
gopath = go.GOPATH.replace(os.path.sep, '_')
cachefile = os.path.join(cache_directory, gopath, 'packages.cache')
try:
with open(cachefile, 'r') as fd:
PACKAGES_CACHE[go.GOROOT] = json.load(fd)
except FileNotFoundError:
pass
| DamnWidget/anaconda_go | lib/cache.py | Python | gpl-3.0 | 2,829 |
from rest_framework import status
from rest_framework.test import APITestCase, APIClient
from django.core.urlresolvers import reverse
from cherrymusic.apps.core.models import User, Track
from cherrymusic.apps.api.v1.serializers import TrackSerializer
from cherrymusic.apps.api.v1.tests.views import UNAUTHENTICATED_RESPONSE
class TestTrackView(APITestCase):
fixtures = ['directory', 'file', 'playlist', 'track', 'user']
def setUp(self):
self.user = User.objects.get(pk=1)
self.client = APIClient(enforce_csrf_checks=True)
self.client.force_authenticate(user=self.user)
self.serializer = TrackSerializer()
def test_unauthenticated_track_query(self):
url = reverse('api:track-list')
client = APIClient()
response = client.get(url)
self.assertEqual(response.data, UNAUTHENTICATED_RESPONSE)
def test_track_query(self):
url = reverse('api:track-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
tracks = Track.objects.all()
tracks_json = [self.serializer.to_representation(track) for track in tracks]
self.assertEqual(response.data, tracks_json)
def test_track_detailed(self):
pk = 1
url = reverse('api:track-detail', args=[pk])
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
track = Track.objects.get(pk=pk)
track_json = self.serializer.to_representation(track)
self.assertEqual(response.data, track_json) | pando85/cherrymusic | web/cherrymusic/apps/api/v1/tests/views/test_track_view.py | Python | gpl-3.0 | 1,605 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# tifffile.py
# Copyright (c) 2008-2014, Christoph Gohlke
# Copyright (c) 2008-2014, The Regents of the University of California
# Produced at the Laboratory for Fluorescence Dynamics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holders nor the names of any
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Read and write image data from and to TIFF files.
Image and metadata can be read from TIFF, BigTIFF, OME-TIFF, STK, LSM, NIH,
SGI, ImageJ, MicroManager, FluoView, SEQ and GEL files.
Only a subset of the TIFF specification is supported, mainly uncompressed
and losslessly compressed 2**(0 to 6) bit integer, 16, 32 and 64-bit float,
grayscale and RGB(A) images, which are commonly used in bio-scientific imaging.
Specifically, reading JPEG and CCITT compressed image data or EXIF, IPTC, GPS,
and XMP metadata is not implemented.
Only primary info records are read for STK, FluoView, MicroManager, and
NIH image formats.
TIFF, the Tagged Image File Format, is under the control of Adobe Systems.
BigTIFF allows for files greater than 4 GB. STK, LSM, FluoView, SGI, SEQ, GEL,
and OME-TIFF, are custom extensions defined by Molecular Devices (Universal
Imaging Corporation), Carl Zeiss MicroImaging, Olympus, Silicon Graphics
International, Media Cybernetics, Molecular Dynamics, and the Open Microscopy
Environment consortium respectively.
For command line usage run ``python tifffile.py --help``
:Author:
`Christoph Gohlke <http://www.lfd.uci.edu/~gohlke/>`_
:Organization:
Laboratory for Fluorescence Dynamics, University of California, Irvine
:Version: 2014.08.24
Requirements
------------
* `CPython 2.7 or 3.4 <http://www.python.org>`_
* `Numpy 1.8.2 <http://www.numpy.org>`_
* `Matplotlib 1.4 <http://www.matplotlib.org>`_ (optional for plotting)
* `Tifffile.c 2013.11.05 <http://www.lfd.uci.edu/~gohlke/>`_
(recommended for faster decoding of PackBits and LZW encoded strings)
Notes
-----
The API is not stable yet and might change between revisions.
Tested on little-endian platforms only.
Other Python packages and modules for reading bio-scientific TIFF files:
* `Imread <http://luispedro.org/software/imread>`_
* `PyLibTiff <http://code.google.com/p/pylibtiff>`_
* `SimpleITK <http://www.simpleitk.org>`_
* `PyLSM <https://launchpad.net/pylsm>`_
* `PyMca.TiffIO.py <http://pymca.sourceforge.net/>`_ (same as fabio.TiffIO)
* `BioImageXD.Readers <http://www.bioimagexd.net/>`_
* `Cellcognition.io <http://cellcognition.org/>`_
* `CellProfiler.bioformats
<https://github.com/CellProfiler/python-bioformats>`_
Acknowledgements
----------------
* Egor Zindy, University of Manchester, for cz_lsm_scan_info specifics.
* Wim Lewis for a bug fix and some read_cz_lsm functions.
* Hadrien Mary for help on reading MicroManager files.
References
----------
(1) TIFF 6.0 Specification and Supplements. Adobe Systems Incorporated.
http://partners.adobe.com/public/developer/tiff/
(2) TIFF File Format FAQ. http://www.awaresystems.be/imaging/tiff/faq.html
(3) MetaMorph Stack (STK) Image File Format.
http://support.meta.moleculardevices.com/docs/t10243.pdf
(4) Image File Format Description LSM 5/7 Release 6.0 (ZEN 2010).
Carl Zeiss MicroImaging GmbH. BioSciences. May 10, 2011
(5) File Format Description - LSM 5xx Release 2.0.
http://ibb.gsf.de/homepage/karsten.rodenacker/IDL/Lsmfile.doc
(6) The OME-TIFF format.
http://www.openmicroscopy.org/site/support/file-formats/ome-tiff
(7) UltraQuant(r) Version 6.0 for Windows Start-Up Guide.
http://www.ultralum.com/images%20ultralum/pdf/UQStart%20Up%20Guide.pdf
(8) Micro-Manager File Formats.
http://www.micro-manager.org/wiki/Micro-Manager_File_Formats
(9) Tags for TIFF and Related Specifications. Digital Preservation.
http://www.digitalpreservation.gov/formats/content/tiff_tags.shtml
Examples
--------
>>> data = numpy.random.rand(5, 301, 219)
>>> imsave('temp.tif', data)
>>> image = imread('temp.tif')
>>> numpy.testing.assert_array_equal(image, data)
>>> with TiffFile('temp.tif') as tif:
... images = tif.asarray()
... for page in tif:
... for tag in page.tags.values():
... t = tag.name, tag.value
... image = page.asarray()
"""
import sys
import os
import re
import glob
import math
import zlib
import time
import json
import struct
import warnings
import tempfile
import datetime
import collections
from fractions import Fraction
from xml.etree import cElementTree as etree
import numpy
try:
import _tifffile
except ImportError:
warnings.warn(
"failed to import the optional _tifffile C extension module.\n"
"Loading of some compressed images will be slow.\n"
"Tifffile.c can be obtained at http://www.lfd.uci.edu/~gohlke/")
__version__ = '2014.08.24'
__docformat__ = 'restructuredtext en'
__all__ = ('imsave', 'imread', 'imshow', 'TiffFile', 'TiffWriter',
'TiffSequence')
def imsave(filename, data, **kwargs):
"""Write image data to TIFF file.
Refer to the TiffWriter class and member functions for documentation.
Parameters
----------
filename : str
Name of file to write.
data : array_like
Input image. The last dimensions are assumed to be image depth,
height, width, and samples.
kwargs : dict
Parameters 'byteorder', 'bigtiff', and 'software' are passed to
the TiffWriter class.
Parameters 'photometric', 'planarconfig', 'resolution',
'description', 'compress', 'volume', and 'extratags' are passed to
the TiffWriter.save function.
Examples
--------
>>> data = numpy.random.rand(2, 5, 3, 301, 219)
>>> description = '{"shape": %s}' % str(list(data.shape))
>>> imsave('temp.tif', data, compress=6,
... extratags=[(270, 's', 0, description, True)])
"""
tifargs = {}
for key in ('byteorder', 'bigtiff', 'software', 'writeshape'):
if key in kwargs:
tifargs[key] = kwargs[key]
del kwargs[key]
if 'writeshape' not in kwargs:
kwargs['writeshape'] = True
if 'bigtiff' not in tifargs and data.size * \
data.dtype.itemsize > 2000 * 2 ** 20:
tifargs['bigtiff'] = True
with TiffWriter(filename, **tifargs) as tif:
tif.save(data, **kwargs)
class TiffWriter(object):
"""Write image data to TIFF file.
TiffWriter instances must be closed using the close method, which is
automatically called when using the 'with' statement.
Examples
--------
>>> data = numpy.random.rand(2, 5, 3, 301, 219)
>>> with TiffWriter('temp.tif', bigtiff=True) as tif:
... for i in range(data.shape[0]):
... tif.save(data[i], compress=6)
"""
TYPES = {'B': 1, 's': 2, 'H': 3, 'I': 4, '2I': 5, 'b': 6,
'h': 8, 'i': 9, 'f': 11, 'd': 12, 'Q': 16, 'q': 17}
TAGS = {
'new_subfile_type': 254, 'subfile_type': 255,
'image_width': 256, 'image_length': 257, 'bits_per_sample': 258,
'compression': 259, 'photometric': 262, 'fill_order': 266,
'document_name': 269, 'image_description': 270, 'strip_offsets': 273,
'orientation': 274, 'samples_per_pixel': 277, 'rows_per_strip': 278,
'strip_byte_counts': 279, 'x_resolution': 282, 'y_resolution': 283,
'planar_configuration': 284, 'page_name': 285, 'resolution_unit': 296,
'software': 305, 'datetime': 306, 'predictor': 317, 'color_map': 320,
'tile_width': 322, 'tile_length': 323, 'tile_offsets': 324,
'tile_byte_counts': 325, 'extra_samples': 338, 'sample_format': 339,
'image_depth': 32997, 'tile_depth': 32998}
def __init__(self, filename, bigtiff=False, byteorder=None,
software='tifffile.py'):
"""Create a new TIFF file for writing.
Use bigtiff=True when creating files greater than 2 GB.
Parameters
----------
filename : str
Name of file to write.
bigtiff : bool
If True, the BigTIFF format is used.
byteorder : {'<', '>'}
The endianness of the data in the file.
By default this is the system's native byte order.
software : str
Name of the software used to create the image.
Saved with the first page only.
"""
if byteorder not in (None, '<', '>'):
raise ValueError("invalid byteorder %s" % byteorder)
if byteorder is None:
byteorder = '<' if sys.byteorder == 'little' else '>'
self._byteorder = byteorder
self._software = software
self._fh = open(filename, 'wb')
self._fh.write({'<': b'II', '>': b'MM'}[byteorder])
if bigtiff:
self._bigtiff = True
self._offset_size = 8
self._tag_size = 20
self._numtag_format = 'Q'
self._offset_format = 'Q'
self._val_format = '8s'
self._fh.write(struct.pack(byteorder + 'HHH', 43, 8, 0))
else:
self._bigtiff = False
self._offset_size = 4
self._tag_size = 12
self._numtag_format = 'H'
self._offset_format = 'I'
self._val_format = '4s'
self._fh.write(struct.pack(byteorder + 'H', 42))
# first IFD
self._ifd_offset = self._fh.tell()
self._fh.write(struct.pack(byteorder + self._offset_format, 0))
def save(self, data, photometric=None, planarconfig=None, resolution=None,
description=None, volume=False, writeshape=False, compress=0,
extratags=()):
"""Write image data to TIFF file.
Image data are written in one stripe per plane.
Dimensions larger than 2 to 4 (depending on photometric mode, planar
configuration, and SGI mode) are flattened and saved as separate pages.
The 'sample_format' and 'bits_per_sample' TIFF tags are derived from
the data type.
Parameters
----------
data : array_like
Input image. The last dimensions are assumed to be image depth,
height, width, and samples.
photometric : {'minisblack', 'miniswhite', 'rgb'}
The color space of the image data.
By default this setting is inferred from the data shape.
planarconfig : {'contig', 'planar'}
Specifies if samples are stored contiguous or in separate planes.
By default this setting is inferred from the data shape.
'contig': last dimension contains samples.
'planar': third last dimension contains samples.
resolution : (float, float) or ((int, int), (int, int))
X and Y resolution in dots per inch as float or rational numbers.
description : str
The subject of the image. Saved with the first page only.
compress : int
Values from 0 to 9 controlling the level of zlib compression.
If 0, data are written uncompressed (default).
volume : bool
If True, volume data are stored in one tile (if applicable) using
the SGI image_depth and tile_depth tags.
Image width and depth must be multiple of 16.
Few software can read this format, e.g. MeVisLab.
writeshape : bool
If True, write the data shape to the image_description tag
if necessary and no other description is given.
extratags: sequence of tuples
Additional tags as [(code, dtype, count, value, writeonce)].
code : int
The TIFF tag Id.
dtype : str
Data type of items in 'value' in Python struct format.
One of B, s, H, I, 2I, b, h, i, f, d, Q, or q.
count : int
Number of data values. Not used for string values.
value : sequence
'Count' values compatible with 'dtype'.
writeonce : bool
If True, the tag is written to the first page only.
"""
if photometric not in (None, 'minisblack', 'miniswhite', 'rgb'):
raise ValueError("invalid photometric %s" % photometric)
if planarconfig not in (None, 'contig', 'planar'):
raise ValueError("invalid planarconfig %s" % planarconfig)
if not 0 <= compress <= 9:
raise ValueError("invalid compression level %s" % compress)
fh = self._fh
byteorder = self._byteorder
numtag_format = self._numtag_format
val_format = self._val_format
offset_format = self._offset_format
offset_size = self._offset_size
tag_size = self._tag_size
data = numpy.asarray(
data,
dtype=byteorder +
data.dtype.char,
order='C')
data_shape = shape = data.shape
data = numpy.atleast_2d(data)
# normalize shape of data
samplesperpixel = 1
extrasamples = 0
if volume and data.ndim < 3:
volume = False
if photometric is None:
if planarconfig:
photometric = 'rgb'
elif data.ndim > 2 and shape[-1] in (3, 4):
photometric = 'rgb'
elif volume and data.ndim > 3 and shape[-4] in (3, 4):
photometric = 'rgb'
elif data.ndim > 2 and shape[-3] in (3, 4):
photometric = 'rgb'
else:
photometric = 'minisblack'
if planarconfig and len(shape) <= (3 if volume else 2):
planarconfig = None
photometric = 'minisblack'
if photometric == 'rgb':
if len(shape) < 3:
raise ValueError("not a RGB(A) image")
if len(shape) < 4:
volume = False
if planarconfig is None:
if shape[-1] in (3, 4):
planarconfig = 'contig'
elif shape[-4 if volume else -3] in (3, 4):
planarconfig = 'planar'
elif shape[-1] > shape[-4 if volume else -3]:
planarconfig = 'planar'
else:
planarconfig = 'contig'
if planarconfig == 'contig':
data = data.reshape((-1, 1) + shape[(-4 if volume else -3):])
samplesperpixel = data.shape[-1]
else:
data = data.reshape(
(-1,) + shape[(-4 if volume else -3):] + (1,))
samplesperpixel = data.shape[1]
if samplesperpixel > 3:
extrasamples = samplesperpixel - 3
elif planarconfig and len(shape) > (3 if volume else 2):
if planarconfig == 'contig':
data = data.reshape((-1, 1) + shape[(-4 if volume else -3):])
samplesperpixel = data.shape[-1]
else:
data = data.reshape(
(-1,) + shape[(-4 if volume else -3):] + (1,))
samplesperpixel = data.shape[1]
extrasamples = samplesperpixel - 1
else:
planarconfig = None
# remove trailing 1s
while len(shape) > 2 and shape[-1] == 1:
shape = shape[:-1]
if len(shape) < 3:
volume = False
if False and (
len(shape) > (3 if volume else 2) and shape[-1] < 5 and
all(shape[-1] < i
for i in shape[(-4 if volume else -3):-1])):
# DISABLED: non-standard TIFF, e.g. (220, 320, 2)
planarconfig = 'contig'
samplesperpixel = shape[-1]
data = data.reshape((-1, 1) + shape[(-4 if volume else -3):])
else:
data = data.reshape(
(-1, 1) + shape[(-3 if volume else -2):] + (1,))
if samplesperpixel == 2:
warnings.warn("writing non-standard TIFF (samplesperpixel 2)")
if volume and (data.shape[-2] % 16 or data.shape[-3] % 16):
warnings.warn("volume width or length are not multiple of 16")
volume = False
data = numpy.swapaxes(data, 1, 2)
data = data.reshape(
(data.shape[0] * data.shape[1],) + data.shape[2:])
# data.shape is now normalized 5D or 6D, depending on volume
# (pages, planar_samples, (depth,) height, width, contig_samples)
assert len(data.shape) in (5, 6)
shape = data.shape
bytestr = bytes if sys.version[0] == '2' else (
lambda x: bytes(x) if isinstance(x, str) else x)
tags = [] # list of (code, ifdentry, ifdvalue, writeonce)
if volume:
# use tiles to save volume data
tag_byte_counts = TiffWriter.TAGS['tile_byte_counts']
tag_offsets = TiffWriter.TAGS['tile_offsets']
else:
# else use strips
tag_byte_counts = TiffWriter.TAGS['strip_byte_counts']
tag_offsets = TiffWriter.TAGS['strip_offsets']
def pack(fmt, *val):
return struct.pack(byteorder + fmt, *val)
def addtag(code, dtype, count, value, writeonce=False):
# Compute ifdentry & ifdvalue bytes from code, dtype, count, value.
# Append (code, ifdentry, ifdvalue, writeonce) to tags list.
code = int(TiffWriter.TAGS.get(code, code))
try:
tifftype = TiffWriter.TYPES[dtype]
except KeyError:
raise ValueError("unknown dtype %s" % dtype)
rawcount = count
if dtype == 's':
value = bytestr(value) + b'\0'
count = rawcount = len(value)
value = (value, )
if len(dtype) > 1:
count *= int(dtype[:-1])
dtype = dtype[-1]
ifdentry = [pack('HH', code, tifftype),
pack(offset_format, rawcount)]
ifdvalue = None
if count == 1:
if isinstance(value, (tuple, list)):
value = value[0]
ifdentry.append(pack(val_format, pack(dtype, value)))
elif struct.calcsize(dtype) * count <= offset_size:
ifdentry.append(pack(val_format,
pack(str(count) + dtype, *value)))
else:
ifdentry.append(pack(offset_format, 0))
ifdvalue = pack(str(count) + dtype, *value)
tags.append((code, b''.join(ifdentry), ifdvalue, writeonce))
def rational(arg, max_denominator=1000000):
# return nominator and denominator from float or two integers
try:
f = Fraction.from_float(arg)
except TypeError:
f = Fraction(arg[0], arg[1])
f = f.limit_denominator(max_denominator)
return f.numerator, f.denominator
if self._software:
addtag('software', 's', 0, self._software, writeonce=True)
self._software = None # only save to first page
if description:
addtag('image_description', 's', 0, description, writeonce=True)
elif writeshape and shape[0] > 1 and shape != data_shape:
addtag('image_description', 's', 0,
"shape=(%s)" % (",".join('%i' % i for i in data_shape)),
writeonce=True)
addtag('datetime', 's', 0,
datetime.datetime.now().strftime("%Y:%m:%d %H:%M:%S"),
writeonce=True)
addtag('compression', 'H', 1, 32946 if compress else 1)
addtag('orientation', 'H', 1, 1)
addtag('image_width', 'I', 1, shape[-2])
addtag('image_length', 'I', 1, shape[-3])
if volume:
addtag('image_depth', 'I', 1, shape[-4])
addtag('tile_depth', 'I', 1, shape[-4])
addtag('tile_width', 'I', 1, shape[-2])
addtag('tile_length', 'I', 1, shape[-3])
addtag('new_subfile_type', 'I', 1, 0 if shape[0] == 1 else 2)
addtag('sample_format', 'H', 1,
{'u': 1, 'i': 2, 'f': 3, 'c': 6}[data.dtype.kind])
addtag('photometric', 'H', 1,
{'miniswhite': 0, 'minisblack': 1, 'rgb': 2}[photometric])
addtag('samples_per_pixel', 'H', 1, samplesperpixel)
if planarconfig and samplesperpixel > 1:
addtag('planar_configuration', 'H', 1, 1
if planarconfig == 'contig' else 2)
addtag('bits_per_sample', 'H', samplesperpixel,
(data.dtype.itemsize * 8, ) * samplesperpixel)
else:
addtag('bits_per_sample', 'H', 1, data.dtype.itemsize * 8)
if extrasamples:
if photometric == 'rgb' and extrasamples == 1:
addtag('extra_samples', 'H', 1, 1) # associated alpha channel
else:
addtag('extra_samples', 'H', extrasamples, (0,) * extrasamples)
if resolution:
addtag('x_resolution', '2I', 1, rational(resolution[0]))
addtag('y_resolution', '2I', 1, rational(resolution[1]))
addtag('resolution_unit', 'H', 1, 2)
addtag('rows_per_strip', 'I', 1,
shape[-3] * (shape[-4] if volume else 1))
# use one strip or tile per plane
strip_byte_counts = (data[0, 0].size * data.dtype.itemsize,) * shape[1]
addtag(tag_byte_counts, offset_format, shape[1], strip_byte_counts)
addtag(tag_offsets, offset_format, shape[1], (0, ) * shape[1])
# add extra tags from users
for t in extratags:
addtag(*t)
# the entries in an IFD must be sorted in ascending order by tag code
tags = sorted(tags, key=lambda x: x[0])
if not self._bigtiff and (fh.tell() + data.size * data.dtype.itemsize
> 2 ** 31 - 1):
raise ValueError("data too large for non-bigtiff file")
for pageindex in range(shape[0]):
# update pointer at ifd_offset
pos = fh.tell()
fh.seek(self._ifd_offset)
fh.write(pack(offset_format, pos))
fh.seek(pos)
# write ifdentries
fh.write(pack(numtag_format, len(tags)))
tag_offset = fh.tell()
fh.write(b''.join(t[1] for t in tags))
self._ifd_offset = fh.tell()
fh.write(pack(offset_format, 0)) # offset to next IFD
# write tag values and patch offsets in ifdentries, if necessary
for tagindex, tag in enumerate(tags):
if tag[2]:
pos = fh.tell()
fh.seek(tag_offset + tagindex * tag_size + offset_size + 4)
fh.write(pack(offset_format, pos))
fh.seek(pos)
if tag[0] == tag_offsets:
strip_offsets_offset = pos
elif tag[0] == tag_byte_counts:
strip_byte_counts_offset = pos
fh.write(tag[2])
# write image data
data_offset = fh.tell()
if compress:
strip_byte_counts = []
for plane in data[pageindex]:
plane = zlib.compress(plane, compress)
strip_byte_counts.append(len(plane))
fh.write(plane)
else:
# if this fails try update Python/numpy
data[pageindex].tofile(fh)
fh.flush()
# update strip and tile offsets and byte_counts if necessary
pos = fh.tell()
for tagindex, tag in enumerate(tags):
if tag[0] == tag_offsets: # strip or tile offsets
if tag[2]:
fh.seek(strip_offsets_offset)
strip_offset = data_offset
for size in strip_byte_counts:
fh.write(pack(offset_format, strip_offset))
strip_offset += size
else:
fh.seek(tag_offset + tagindex * tag_size +
offset_size + 4)
fh.write(pack(offset_format, data_offset))
elif tag[0] == tag_byte_counts: # strip or tile byte_counts
if compress:
if tag[2]:
fh.seek(strip_byte_counts_offset)
for size in strip_byte_counts:
fh.write(pack(offset_format, size))
else:
fh.seek(tag_offset + tagindex * tag_size +
offset_size + 4)
fh.write(pack(offset_format, strip_byte_counts[0]))
break
fh.seek(pos)
fh.flush()
# remove tags that should be written only once
if pageindex == 0:
tags = [t for t in tags if not t[-1]]
def close(self):
self._fh.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def imread(files, **kwargs):
"""Return image data from TIFF file(s) as numpy array.
The first image series is returned if no arguments are provided.
Parameters
----------
files : str or list
File name, glob pattern, or list of file names.
key : int, slice, or sequence of page indices
Defines which pages to return as array.
series : int
Defines which series of pages in file to return as array.
multifile : bool
If True (default), OME-TIFF data may include pages from multiple files.
pattern : str
Regular expression pattern that matches axes names and indices in
file names.
kwargs : dict
Additional parameters passed to the TiffFile or TiffSequence asarray
function.
Examples
--------
>>> im = imread('test.tif', key=0)
>>> im.shape
(256, 256, 4)
>>> ims = imread(['test.tif', 'test.tif'])
>>> ims.shape
(2, 256, 256, 4)
"""
kwargs_file = {}
if 'multifile' in kwargs:
kwargs_file['multifile'] = kwargs['multifile']
del kwargs['multifile']
else:
kwargs_file['multifile'] = True
kwargs_seq = {}
if 'pattern' in kwargs:
kwargs_seq['pattern'] = kwargs['pattern']
del kwargs['pattern']
if isinstance(files, str) and any(i in files for i in '?*'):
files = glob.glob(files)
if not files:
raise ValueError('no files found')
if len(files) == 1:
files = files[0]
if isinstance(files, str):
with TiffFile(files, **kwargs_file) as tif:
return tif.asarray(**kwargs)
else:
with TiffSequence(files, **kwargs_seq) as imseq:
return imseq.asarray(**kwargs)
class lazyattr(object):
"""Lazy object attribute whose value is computed on first access."""
__slots__ = ('func', )
def __init__(self, func):
self.func = func
def __get__(self, instance, owner):
if instance is None:
return self
value = self.func(instance)
if value is NotImplemented:
return getattr(super(owner, instance), self.func.__name__)
setattr(instance, self.func.__name__, value)
return value
class TiffFile(object):
"""Read image and metadata from TIFF, STK, LSM, and FluoView files.
TiffFile instances must be closed using the close method, which is
automatically called when using the 'with' statement.
Attributes
----------
pages : list
All TIFF pages in file.
series : list of Records(shape, dtype, axes, TiffPages)
TIFF pages with compatible shapes and types.
micromanager_metadata: dict
Extra MicroManager non-TIFF metadata in the file, if exists.
All attributes are read-only.
Examples
--------
>>> with TiffFile('test.tif') as tif:
... data = tif.asarray()
... data.shape
(256, 256, 4)
"""
def __init__(self, arg, name=None, offset=None, size=None,
multifile=True, multifile_close=True):
"""Initialize instance from file.
Parameters
----------
arg : str or open file
Name of file or open file object.
The file objects are closed in TiffFile.close().
name : str
Optional name of file in case 'arg' is a file handle.
offset : int
Optional start position of embedded file. By default this is
the current file position.
size : int
Optional size of embedded file. By default this is the number
of bytes from the 'offset' to the end of the file.
multifile : bool
If True (default), series may include pages from multiple files.
Currently applies to OME-TIFF only.
multifile_close : bool
If True (default), keep the handles of other files in multifile
series closed. This is inefficient when few files refer to
many pages. If False, the C runtime may run out of resources.
"""
self._fh = FileHandle(arg, name=name, offset=offset, size=size)
self.offset_size = None
self.pages = []
self._multifile = bool(multifile)
self._multifile_close = bool(multifile_close)
self._files = {self._fh.name: self} # cache of TiffFiles
try:
self._fromfile()
except Exception:
self._fh.close()
raise
@property
def filehandle(self):
"""Return file handle."""
return self._fh
@property
def filename(self):
"""Return name of file handle."""
return self._fh.name
def close(self):
"""Close open file handle(s)."""
for tif in self._files.values():
tif._fh.close()
self._files = {}
def _fromfile(self):
"""Read TIFF header and all page records from file."""
self._fh.seek(0)
try:
self.byteorder = {b'II': '<', b'MM': '>'}[self._fh.read(2)]
except KeyError:
raise ValueError("not a valid TIFF file")
version = struct.unpack(self.byteorder + 'H', self._fh.read(2))[0]
if version == 43: # BigTiff
self.offset_size, zero = struct.unpack(self.byteorder + 'HH',
self._fh.read(4))
if zero or self.offset_size != 8:
raise ValueError("not a valid BigTIFF file")
elif version == 42:
self.offset_size = 4
else:
raise ValueError("not a TIFF file")
self.pages = []
while True:
try:
page = TiffPage(self)
self.pages.append(page)
except StopIteration:
break
if not self.pages:
raise ValueError("empty TIFF file")
if self.is_micromanager:
# MicroManager files contain metadata not stored in TIFF tags.
self.micromanager_metadata = read_micromanager_metadata(self._fh)
if self.is_lsm:
self._fix_lsm_strip_offsets()
self._fix_lsm_strip_byte_counts()
def _fix_lsm_strip_offsets(self):
"""Unwrap strip offsets for LSM files greater than 4 GB."""
for series in self.series:
wrap = 0
previous_offset = 0
for page in series.pages:
strip_offsets = []
for current_offset in page.strip_offsets:
if current_offset < previous_offset:
wrap += 2 ** 32
strip_offsets.append(current_offset + wrap)
previous_offset = current_offset
page.strip_offsets = tuple(strip_offsets)
def _fix_lsm_strip_byte_counts(self):
"""Set strip_byte_counts to size of compressed data.
The strip_byte_counts tag in LSM files contains the number of bytes
for the uncompressed data.
"""
if not self.pages:
return
strips = {}
for page in self.pages:
assert len(page.strip_offsets) == len(page.strip_byte_counts)
for offset, bytecount in zip(page.strip_offsets,
page.strip_byte_counts):
strips[offset] = bytecount
offsets = sorted(strips.keys())
offsets.append(min(offsets[-1] + strips[offsets[-1]], self._fh.size))
for i, offset in enumerate(offsets[:-1]):
strips[offset] = min(strips[offset], offsets[i + 1] - offset)
for page in self.pages:
if page.compression:
page.strip_byte_counts = tuple(
strips[offset] for offset in page.strip_offsets)
@lazyattr
def series(self):
"""Return series of TiffPage with compatible shape and properties."""
if not self.pages:
return []
series = []
page0 = self.pages[0]
if self.is_ome:
series = self._omeseries()
elif self.is_fluoview:
dims = {b'X': 'X', b'Y': 'Y', b'Z': 'Z', b'T': 'T',
b'WAVELENGTH': 'C', b'TIME': 'T', b'XY': 'R',
b'EVENT': 'V', b'EXPOSURE': 'L'}
mmhd = list(reversed(page0.mm_header.dimensions))
series = [Record(
axes=''.join(dims.get(i[0].strip().upper(), 'Q')
for i in mmhd if i[1] > 1),
shape=tuple(int(i[1]) for i in mmhd if i[1] > 1),
pages=self.pages, dtype=numpy.dtype(page0.dtype))]
elif self.is_lsm:
lsmi = page0.cz_lsm_info
axes = CZ_SCAN_TYPES[lsmi.scan_type]
if page0.is_rgb:
axes = axes.replace('C', '').replace('XY', 'XYC')
axes = axes[::-1]
shape = tuple(getattr(lsmi, CZ_DIMENSIONS[i]) for i in axes)
pages = [p for p in self.pages if not p.is_reduced]
series = [Record(axes=axes, shape=shape, pages=pages,
dtype=numpy.dtype(pages[0].dtype))]
if len(pages) != len(self.pages): # reduced RGB pages
pages = [p for p in self.pages if p.is_reduced]
cp = 1
i = 0
while cp < len(pages) and i < len(shape) - 2:
cp *= shape[i]
i += 1
shape = shape[:i] + pages[0].shape
axes = axes[:i] + 'CYX'
series.append(Record(axes=axes, shape=shape, pages=pages,
dtype=numpy.dtype(pages[0].dtype)))
elif self.is_imagej:
shape = []
axes = []
ij = page0.imagej_tags
if 'frames' in ij:
shape.append(ij['frames'])
axes.append('T')
if 'slices' in ij:
shape.append(ij['slices'])
axes.append('Z')
if 'channels' in ij and not self.is_rgb:
shape.append(ij['channels'])
axes.append('C')
remain = len(self.pages) // (product(shape) if shape else 1)
if remain > 1:
shape.append(remain)
axes.append('I')
shape.extend(page0.shape)
axes.extend(page0.axes)
axes = ''.join(axes)
series = [Record(pages=self.pages, shape=tuple(shape), axes=axes,
dtype=numpy.dtype(page0.dtype))]
elif self.is_nih:
if len(self.pages) == 1:
shape = page0.shape
axes = page0.axes
else:
shape = (len(self.pages),) + page0.shape
axes = 'I' + page0.axes
series = [Record(pages=self.pages, shape=shape, axes=axes,
dtype=numpy.dtype(page0.dtype))]
elif page0.is_shaped:
# TODO: shaped files can contain multiple series
shape = page0.tags['image_description'].value[7:-1]
shape = tuple(int(i) for i in shape.split(b','))
series = [Record(pages=self.pages, shape=shape,
axes='Q' * len(shape),
dtype=numpy.dtype(page0.dtype))]
# generic detection of series
if not series:
shapes = []
pages = {}
for page in self.pages:
if not page.shape:
continue
shape = page.shape + (page.axes,
page.compression in TIFF_DECOMPESSORS)
if shape not in pages:
shapes.append(shape)
pages[shape] = [page]
else:
pages[shape].append(page)
series = [Record(pages=pages[s],
axes=(('I' + s[-2])
if len(pages[s]) > 1 else s[-2]),
dtype=numpy.dtype(pages[s][0].dtype),
shape=((len(pages[s]), ) + s[:-2]
if len(pages[s]) > 1 else s[:-2]))
for s in shapes]
# remove empty series, e.g. in MD Gel files
series = [s for s in series if sum(s.shape) > 0]
return series
def asarray(self, key=None, series=None, memmap=False):
"""Return image data from multiple TIFF pages as numpy array.
By default the first image series is returned.
Parameters
----------
key : int, slice, or sequence of page indices
Defines which pages to return as array.
series : int
Defines which series of pages to return as array.
memmap : bool
If True, return an array stored in a binary file on disk
if possible.
"""
if key is None and series is None:
series = 0
if series is not None:
pages = self.series[series].pages
else:
pages = self.pages
if key is None:
pass
elif isinstance(key, int):
pages = [pages[key]]
elif isinstance(key, slice):
pages = pages[key]
elif isinstance(key, collections.Iterable):
pages = [pages[k] for k in key]
else:
raise TypeError("key must be an int, slice, or sequence")
if not len(pages):
raise ValueError("no pages selected")
if self.is_nih:
if pages[0].is_palette:
result = stack_pages(pages, colormapped=False, squeeze=False)
result = numpy.take(pages[0].color_map, result, axis=1)
result = numpy.swapaxes(result, 0, 1)
else:
result = stack_pages(pages, memmap=memmap,
colormapped=False, squeeze=False)
elif len(pages) == 1:
return pages[0].asarray(memmap=memmap)
elif self.is_ome:
assert not self.is_palette, "color mapping disabled for ome-tiff"
if any(p is None for p in pages):
# zero out missing pages
firstpage = next(p for p in pages if p)
nopage = numpy.zeros_like(
firstpage.asarray(memmap=False))
s = self.series[series]
if memmap:
with tempfile.NamedTemporaryFile() as fh:
result = numpy.memmap(fh, dtype=s.dtype, shape=s.shape)
result = result.reshape(-1)
else:
result = numpy.empty(s.shape, s.dtype).reshape(-1)
index = 0
class KeepOpen:
# keep Tiff files open between consecutive pages
def __init__(self, parent, close):
self.master = parent
self.parent = parent
self._close = close
def open(self, page):
if self._close and page and page.parent != self.parent:
if self.parent != self.master:
self.parent.filehandle.close()
self.parent = page.parent
self.parent.filehandle.open()
def close(self):
if self._close and self.parent != self.master:
self.parent.filehandle.close()
keep = KeepOpen(self, self._multifile_close)
for page in pages:
keep.open(page)
if page:
a = page.asarray(memmap=False, colormapped=False,
reopen=False)
else:
a = nopage
try:
result[index:index + a.size] = a.reshape(-1)
except ValueError as e:
warnings.warn("ome-tiff: %s" % e)
break
index += a.size
keep.close()
else:
result = stack_pages(pages, memmap=memmap)
if key is None:
try:
result.shape = self.series[series].shape
except ValueError:
try:
warnings.warn("failed to reshape %s to %s" % (
result.shape, self.series[series].shape))
# try series of expected shapes
result.shape = (-1,) + self.series[series].shape
except ValueError:
# revert to generic shape
result.shape = (-1,) + pages[0].shape
else:
result.shape = (-1,) + pages[0].shape
return result
def _omeseries(self):
"""Return image series in OME-TIFF file(s)."""
root = etree.fromstring(self.pages[0].tags['image_description'].value)
uuid = root.attrib.get('UUID', None)
self._files = {uuid: self}
dirname = self._fh.dirname
modulo = {}
result = []
for element in root:
if element.tag.endswith('BinaryOnly'):
warnings.warn("ome-xml: not an ome-tiff master file")
break
if element.tag.endswith('StructuredAnnotations'):
for annot in element:
if not annot.attrib.get('Namespace',
'').endswith('modulo'):
continue
for value in annot:
for modul in value:
for along in modul:
if not along.tag[:-1].endswith('Along'):
continue
axis = along.tag[-1]
newaxis = along.attrib.get('Type', 'other')
newaxis = AXES_LABELS[newaxis]
if 'Start' in along.attrib:
labels = range(
int(along.attrib['Start']),
int(along.attrib['End']) + 1,
int(along.attrib.get('Step', 1)))
else:
labels = [label.text for label in along
if label.tag.endswith('Label')]
modulo[axis] = (newaxis, labels)
if not element.tag.endswith('Image'):
continue
for pixels in element:
if not pixels.tag.endswith('Pixels'):
continue
atr = pixels.attrib
dtype = atr.get('Type', None)
axes = ''.join(reversed(atr['DimensionOrder']))
shape = list(int(atr['Size' + ax]) for ax in axes)
size = product(shape[:-2])
ifds = [None] * size
for data in pixels:
if not data.tag.endswith('TiffData'):
continue
atr = data.attrib
ifd = int(atr.get('IFD', 0))
num = int(atr.get('NumPlanes', 1 if 'IFD' in atr else 0))
num = int(atr.get('PlaneCount', num))
idx = [int(atr.get('First' + ax, 0)) for ax in axes[:-2]]
try:
idx = numpy.ravel_multi_index(idx, shape[:-2])
except ValueError:
# ImageJ produces invalid ome-xml when cropping
warnings.warn("ome-xml: invalid TiffData index")
continue
for uuid in data:
if not uuid.tag.endswith('UUID'):
continue
if uuid.text not in self._files:
if not self._multifile:
# abort reading multifile OME series
# and fall back to generic series
return []
fname = uuid.attrib['FileName']
try:
tif = TiffFile(os.path.join(dirname, fname))
except (IOError, ValueError):
tif.close()
warnings.warn(
"ome-xml: failed to read '%s'" % fname)
break
self._files[uuid.text] = tif
if self._multifile_close:
tif.close()
pages = self._files[uuid.text].pages
try:
for i in range(num if num else len(pages)):
ifds[idx + i] = pages[ifd + i]
except IndexError:
warnings.warn("ome-xml: index out of range")
# only process first uuid
break
else:
pages = self.pages
try:
for i in range(num if num else len(pages)):
ifds[idx + i] = pages[ifd + i]
except IndexError:
warnings.warn("ome-xml: index out of range")
if all(i is None for i in ifds):
# skip images without data
continue
dtype = next(i for i in ifds if i).dtype
result.append(Record(axes=axes, shape=shape, pages=ifds,
dtype=numpy.dtype(dtype)))
for record in result:
for axis, (newaxis, labels) in modulo.items():
i = record.axes.index(axis)
size = len(labels)
if record.shape[i] == size:
record.axes = record.axes.replace(axis, newaxis, 1)
else:
record.shape[i] //= size
record.shape.insert(i + 1, size)
record.axes = record.axes.replace(axis, axis + newaxis, 1)
record.shape = tuple(record.shape)
# squeeze dimensions
for record in result:
record.shape, record.axes = squeeze_axes(record.shape, record.axes)
return result
def __len__(self):
"""Return number of image pages in file."""
return len(self.pages)
def __getitem__(self, key):
"""Return specified page."""
return self.pages[key]
def __iter__(self):
"""Return iterator over pages."""
return iter(self.pages)
def __str__(self):
"""Return string containing information about file."""
result = [
self._fh.name.capitalize(),
format_size(self._fh.size),
{'<': 'little endian', '>': 'big endian'}[self.byteorder]]
if self.is_bigtiff:
result.append("bigtiff")
if len(self.pages) > 1:
result.append("%i pages" % len(self.pages))
if len(self.series) > 1:
result.append("%i series" % len(self.series))
if len(self._files) > 1:
result.append("%i files" % (len(self._files)))
return ", ".join(result)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
@lazyattr
def fstat(self):
try:
return os.fstat(self._fh.fileno())
except Exception: # io.UnsupportedOperation
return None
@lazyattr
def is_bigtiff(self):
return self.offset_size != 4
@lazyattr
def is_rgb(self):
return all(p.is_rgb for p in self.pages)
@lazyattr
def is_palette(self):
return all(p.is_palette for p in self.pages)
@lazyattr
def is_mdgel(self):
return any(p.is_mdgel for p in self.pages)
@lazyattr
def is_mediacy(self):
return any(p.is_mediacy for p in self.pages)
@lazyattr
def is_stk(self):
return all(p.is_stk for p in self.pages)
@lazyattr
def is_lsm(self):
return self.pages[0].is_lsm
@lazyattr
def is_imagej(self):
return self.pages[0].is_imagej
@lazyattr
def is_micromanager(self):
return self.pages[0].is_micromanager
@lazyattr
def is_nih(self):
return self.pages[0].is_nih
@lazyattr
def is_fluoview(self):
return self.pages[0].is_fluoview
@lazyattr
def is_ome(self):
return self.pages[0].is_ome
class TiffPage(object):
"""A TIFF image file directory (IFD).
Attributes
----------
index : int
Index of page in file.
dtype : str {TIFF_SAMPLE_DTYPES}
Data type of image, colormapped if applicable.
shape : tuple
Dimensions of the image array in TIFF page,
colormapped and with one alpha channel if applicable.
axes : str
Axes label codes:
'X' width, 'Y' height, 'S' sample, 'I' image series|page|plane,
'Z' depth, 'C' color|em-wavelength|channel, 'E' ex-wavelength|lambda,
'T' time, 'R' region|tile, 'A' angle, 'P' phase, 'H' lifetime,
'L' exposure, 'V' event, 'Q' unknown, '_' missing
tags : TiffTags
Dictionary of tags in page.
Tag values are also directly accessible as attributes.
color_map : numpy array
Color look up table, if exists.
cz_lsm_scan_info: Record(dict)
LSM scan info attributes, if exists.
imagej_tags: Record(dict)
Consolidated ImageJ description and metadata tags, if exists.
uic_tags: Record(dict)
Consolidated MetaMorph STK/UIC tags, if exists.
All attributes are read-only.
Notes
-----
The internal, normalized '_shape' attribute is 6 dimensional:
0. number planes (stk)
1. planar samples_per_pixel
2. image_depth Z (sgi)
3. image_length Y
4. image_width X
5. contig samples_per_pixel
"""
def __init__(self, parent):
"""Initialize instance from file."""
self.parent = parent
self.index = len(parent.pages)
self.shape = self._shape = ()
self.dtype = self._dtype = None
self.axes = ""
self.tags = TiffTags()
self._fromfile()
self._process_tags()
def _fromfile(self):
"""Read TIFF IFD structure and its tags from file.
File cursor must be at storage position of IFD offset and is left at
offset to next IFD.
Raises StopIteration if offset (first bytes read) is 0.
"""
fh = self.parent.filehandle
byteorder = self.parent.byteorder
offset_size = self.parent.offset_size
fmt = {4: 'I', 8: 'Q'}[offset_size]
offset = struct.unpack(byteorder + fmt, fh.read(offset_size))[0]
if not offset:
raise StopIteration()
# read standard tags
tags = self.tags
fh.seek(offset)
fmt, size = {4: ('H', 2), 8: ('Q', 8)}[offset_size]
try:
numtags = struct.unpack(byteorder + fmt, fh.read(size))[0]
except Exception:
warnings.warn("corrupted page list")
raise StopIteration()
tagcode = 0
for _ in range(numtags):
try:
tag = TiffTag(self.parent)
# print(tag)
except TiffTag.Error as e:
warnings.warn(str(e))
continue
if tagcode > tag.code:
# expected for early LSM and tifffile versions
warnings.warn("tags are not ordered by code")
tagcode = tag.code
if tag.name not in tags:
tags[tag.name] = tag
else:
# some files contain multiple IFD with same code
# e.g. MicroManager files contain two image_description
i = 1
while True:
name = "%s_%i" % (tag.name, i)
if name not in tags:
tags[name] = tag
break
pos = fh.tell()
if self.is_lsm or (self.index and self.parent.is_lsm):
# correct non standard LSM bitspersample tags
self.tags['bits_per_sample']._correct_lsm_bitspersample(self)
if self.is_lsm:
# read LSM info subrecords
for name, reader in CZ_LSM_INFO_READERS.items():
try:
offset = self.cz_lsm_info['offset_' + name]
except KeyError:
continue
if offset < 8:
# older LSM revision
continue
fh.seek(offset)
try:
setattr(self, 'cz_lsm_' + name, reader(fh))
except ValueError:
pass
elif self.is_stk and 'uic1tag' in tags and not tags['uic1tag'].value:
# read uic1tag now that plane count is known
uic1tag = tags['uic1tag']
fh.seek(uic1tag.value_offset)
tags['uic1tag'].value = Record(
read_uic1tag(fh, byteorder, uic1tag.dtype, uic1tag.count,
tags['uic2tag'].count))
fh.seek(pos)
def _process_tags(self):
"""Validate standard tags and initialize attributes.
Raise ValueError if tag values are not supported.
"""
tags = self.tags
for code, (name, default, dtype, count, validate) in TIFF_TAGS.items():
if not (name in tags or default is None):
tags[name] = TiffTag(code, dtype=dtype, count=count,
value=default, name=name)
if name in tags and validate:
try:
if tags[name].count == 1:
setattr(self, name, validate[tags[name].value])
else:
setattr(self, name, tuple(
validate[value] for value in tags[name].value))
except KeyError:
raise ValueError("%s.value (%s) not supported" %
(name, tags[name].value))
tag = tags['bits_per_sample']
if tag.count == 1:
self.bits_per_sample = tag.value
else:
# LSM might list more items than samples_per_pixel
value = tag.value[:self.samples_per_pixel]
if any((v - value[0] for v in value)):
self.bits_per_sample = value
else:
self.bits_per_sample = value[0]
tag = tags['sample_format']
if tag.count == 1:
self.sample_format = TIFF_SAMPLE_FORMATS[tag.value]
else:
value = tag.value[:self.samples_per_pixel]
if any((v - value[0] for v in value)):
self.sample_format = [TIFF_SAMPLE_FORMATS[v] for v in value]
else:
self.sample_format = TIFF_SAMPLE_FORMATS[value[0]]
if 'photometric' not in tags:
self.photometric = None
if 'image_depth' not in tags:
self.image_depth = 1
if 'image_length' in tags:
self.strips_per_image = int(math.floor(
float(self.image_length + self.rows_per_strip - 1) /
self.rows_per_strip))
else:
self.strips_per_image = 0
key = (self.sample_format, self.bits_per_sample)
self.dtype = self._dtype = TIFF_SAMPLE_DTYPES.get(key, None)
if 'image_length' not in self.tags or 'image_width' not in self.tags:
# some GEL file pages are missing image data
self.image_length = 0
self.image_width = 0
self.image_depth = 0
self.strip_offsets = 0
self._shape = ()
self.shape = ()
self.axes = ''
if self.is_palette:
self.dtype = self.tags['color_map'].dtype[1]
self.color_map = numpy.array(self.color_map, self.dtype)
dmax = self.color_map.max()
if dmax < 256:
self.dtype = numpy.uint8
self.color_map = self.color_map.astype(self.dtype)
# else:
# self.dtype = numpy.uint8
# self.color_map >>= 8
# self.color_map = self.color_map.astype(self.dtype)
self.color_map.shape = (3, -1)
# determine shape of data
image_length = self.image_length
image_width = self.image_width
image_depth = self.image_depth
samples_per_pixel = self.samples_per_pixel
if self.is_stk:
assert self.image_depth == 1
planes = self.tags['uic2tag'].count
if self.is_contig:
self._shape = (planes, 1, 1, image_length, image_width,
samples_per_pixel)
if samples_per_pixel == 1:
self.shape = (planes, image_length, image_width)
self.axes = 'YX'
else:
self.shape = (planes, image_length, image_width,
samples_per_pixel)
self.axes = 'YXS'
else:
self._shape = (planes, samples_per_pixel, 1, image_length,
image_width, 1)
if samples_per_pixel == 1:
self.shape = (planes, image_length, image_width)
self.axes = 'YX'
else:
self.shape = (planes, samples_per_pixel, image_length,
image_width)
self.axes = 'SYX'
# detect type of series
if planes == 1:
self.shape = self.shape[1:]
elif numpy.all(self.uic2tag.z_distance != 0):
self.axes = 'Z' + self.axes
elif numpy.all(numpy.diff(self.uic2tag.time_created) != 0):
self.axes = 'T' + self.axes
else:
self.axes = 'I' + self.axes
# DISABLED
if self.is_palette:
assert False, "color mapping disabled for stk"
elif self.is_palette:
samples = 1
if 'extra_samples' in self.tags:
samples += len(self.extra_samples)
if self.is_contig:
self._shape = (1, 1, image_depth, image_length, image_width,
samples)
else:
self._shape = (1, samples, image_depth, image_length,
image_width, 1)
if self.color_map.shape[1] >= 2 ** self.bits_per_sample:
if image_depth == 1:
self.shape = (3, image_length, image_width)
self.axes = 'CYX'
else:
self.shape = (3, image_depth, image_length, image_width)
self.axes = 'CZYX'
else:
warnings.warn("palette cannot be applied")
self.is_palette = False
if image_depth == 1:
self.shape = (image_length, image_width)
self.axes = 'YX'
else:
self.shape = (image_depth, image_length, image_width)
self.axes = 'ZYX'
elif self.is_rgb or samples_per_pixel > 1:
if self.is_contig:
self._shape = (1, 1, image_depth, image_length, image_width,
samples_per_pixel)
if image_depth == 1:
self.shape = (image_length, image_width, samples_per_pixel)
self.axes = 'YXS'
else:
self.shape = (image_depth, image_length, image_width,
samples_per_pixel)
self.axes = 'ZYXS'
else:
self._shape = (1, samples_per_pixel, image_depth,
image_length, image_width, 1)
if image_depth == 1:
self.shape = (samples_per_pixel, image_length, image_width)
self.axes = 'SYX'
else:
self.shape = (samples_per_pixel, image_depth,
image_length, image_width)
self.axes = 'SZYX'
if False and self.is_rgb and 'extra_samples' in self.tags:
# DISABLED: only use RGB and first alpha channel if exists
extra_samples = self.extra_samples
if self.tags['extra_samples'].count == 1:
extra_samples = (extra_samples, )
for exs in extra_samples:
if exs in ('unassalpha', 'assocalpha', 'unspecified'):
if self.is_contig:
self.shape = self.shape[:-1] + (4,)
else:
self.shape = (4,) + self.shape[1:]
break
else:
self._shape = (1, 1, image_depth, image_length, image_width, 1)
if image_depth == 1:
self.shape = (image_length, image_width)
self.axes = 'YX'
else:
self.shape = (image_depth, image_length, image_width)
self.axes = 'ZYX'
if not self.compression and 'strip_byte_counts' not in tags:
self.strip_byte_counts = (
product(self.shape) * (self.bits_per_sample // 8), )
assert len(self.shape) == len(self.axes)
def asarray(self, squeeze=True, colormapped=True, rgbonly=False,
scale_mdgel=False, memmap=False, reopen=True):
"""Read image data from file and return as numpy array.
Raise ValueError if format is unsupported.
If any of 'squeeze', 'colormapped', or 'rgbonly' are not the default,
the shape of the returned array might be different from the page shape.
Parameters
----------
squeeze : bool
If True, all length-1 dimensions (except X and Y) are
squeezed out from result.
colormapped : bool
If True, color mapping is applied for palette-indexed images.
rgbonly : bool
If True, return RGB(A) image without additional extra samples.
memmap : bool
If True, use numpy.memmap to read arrays from file if possible.
For use on 64 bit systems and files with few huge contiguous data.
reopen : bool
If True and the parent file handle is closed, the file is
temporarily re-opened (and closed if no exception occurs).
scale_mdgel : bool
If True, MD Gel data will be scaled according to the private
metadata in the second TIFF page. The dtype will be float32.
"""
if not self._shape:
return
if self.dtype is None:
raise ValueError("data type not supported: %s%i" % (
self.sample_format, self.bits_per_sample))
if self.compression not in TIFF_DECOMPESSORS:
raise ValueError("cannot decompress %s" % self.compression)
tag = self.tags['sample_format']
if tag.count != 1 and any((i - tag.value[0] for i in tag.value)):
raise ValueError("sample formats don't match %s" % str(tag.value))
fh = self.parent.filehandle
closed = fh.closed
if closed:
if reopen:
fh.open()
else:
raise IOError("file handle is closed")
dtype = self._dtype
shape = self._shape
image_width = self.image_width
image_length = self.image_length
image_depth = self.image_depth
typecode = self.parent.byteorder + dtype
bits_per_sample = self.bits_per_sample
if self.is_tiled:
if 'tile_offsets' in self.tags:
byte_counts = self.tile_byte_counts
offsets = self.tile_offsets
else:
byte_counts = self.strip_byte_counts
offsets = self.strip_offsets
tile_width = self.tile_width
tile_length = self.tile_length
tile_depth = self.tile_depth if 'tile_depth' in self.tags else 1
tw = (image_width + tile_width - 1) // tile_width
tl = (image_length + tile_length - 1) // tile_length
td = (image_depth + tile_depth - 1) // tile_depth
shape = (shape[0], shape[1],
td * tile_depth, tl * tile_length, tw * tile_width, shape[-1])
tile_shape = (tile_depth, tile_length, tile_width, shape[-1])
runlen = tile_width
else:
byte_counts = self.strip_byte_counts
offsets = self.strip_offsets
runlen = image_width
if any(o < 2 for o in offsets):
raise ValueError("corrupted page")
if memmap and self._is_memmappable(rgbonly, colormapped):
result = fh.memmap_array(typecode, shape, offset=offsets[0])
elif self.is_contiguous:
fh.seek(offsets[0])
result = fh.read_array(typecode, product(shape))
result = result.astype('=' + dtype)
else:
if self.is_contig:
runlen *= self.samples_per_pixel
if bits_per_sample in (8, 16, 32, 64, 128):
if (bits_per_sample * runlen) % 8:
raise ValueError("data and sample size mismatch")
def unpack(x):
try:
return numpy.fromstring(x, typecode)
except ValueError as e:
# strips may be missing EOI
warnings.warn("unpack: %s" % e)
xlen = ((len(x) // (bits_per_sample // 8))
* (bits_per_sample // 8))
return numpy.fromstring(x[:xlen], typecode)
elif isinstance(bits_per_sample, tuple):
def unpack(x):
return unpackrgb(x, typecode, bits_per_sample)
else:
def unpack(x):
return unpackints(x, typecode, bits_per_sample, runlen)
decompress = TIFF_DECOMPESSORS[self.compression]
if self.compression == 'jpeg':
table = self.jpeg_tables if 'jpeg_tables' in self.tags else b''
decompress = lambda x: decodejpg(x, table, self.photometric)
if self.is_tiled:
result = numpy.empty(shape, dtype)
tw, tl, td, pl = 0, 0, 0, 0
for offset, bytecount in zip(offsets, byte_counts):
fh.seek(offset)
tile = unpack(decompress(fh.read(bytecount)))
tile.shape = tile_shape
if self.predictor == 'horizontal':
numpy.cumsum(tile, axis=-2, dtype=dtype, out=tile)
result[0, pl, td:td + tile_depth,
tl:tl + tile_length, tw:tw + tile_width, :] = tile
del tile
tw += tile_width
if tw >= shape[4]:
tw, tl = 0, tl + tile_length
if tl >= shape[3]:
tl, td = 0, td + tile_depth
if td >= shape[2]:
td, pl = 0, pl + 1
result = result[...,
:image_depth, :image_length, :image_width, :]
else:
strip_size = (self.rows_per_strip * self.image_width *
self.samples_per_pixel)
result = numpy.empty(shape, dtype).reshape(-1)
index = 0
for offset, bytecount in zip(offsets, byte_counts):
fh.seek(offset)
strip = fh.read(bytecount)
strip = decompress(strip)
strip = unpack(strip)
size = min(result.size, strip.size, strip_size,
result.size - index)
result[index:index + size] = strip[:size]
del strip
index += size
result.shape = self._shape
if self.predictor == 'horizontal' and not (self.is_tiled and not
self.is_contiguous):
# work around bug in LSM510 software
if not (self.parent.is_lsm and not self.compression):
numpy.cumsum(result, axis=-2, dtype=dtype, out=result)
if colormapped and self.is_palette:
if self.color_map.shape[1] >= 2 ** bits_per_sample:
# FluoView and LSM might fail here
result = numpy.take(self.color_map,
result[:, 0, :, :, :, 0], axis=1)
elif rgbonly and self.is_rgb and 'extra_samples' in self.tags:
# return only RGB and first alpha channel if exists
extra_samples = self.extra_samples
if self.tags['extra_samples'].count == 1:
extra_samples = (extra_samples, )
for i, exs in enumerate(extra_samples):
if exs in ('unassalpha', 'assocalpha', 'unspecified'):
if self.is_contig:
result = result[..., [0, 1, 2, 3 + i]]
else:
result = result[:, [0, 1, 2, 3 + i]]
break
else:
if self.is_contig:
result = result[..., :3]
else:
result = result[:, :3]
if squeeze:
try:
result.shape = self.shape
except ValueError:
warnings.warn("failed to reshape from %s to %s" % (
str(result.shape), str(self.shape)))
if scale_mdgel and self.parent.is_mdgel:
# MD Gel stores private metadata in the second page
tags = self.parent.pages[1]
if tags.md_file_tag in (2, 128):
scale = tags.md_scale_pixel
scale = scale[0] / scale[1] # rational
result = result.astype('float32')
if tags.md_file_tag == 2:
result **= 2 # squary root data format
result *= scale
if closed:
# TODO: file remains open if an exception occurred above
fh.close()
return result
def _is_memmappable(self, rgbonly, colormapped):
"""Return if image data in file can be memory mapped."""
if not self.parent.filehandle.is_file or not self.is_contiguous:
return False
return not (self.predictor or
(rgbonly and 'extra_samples' in self.tags) or
(colormapped and self.is_palette) or
({'big': '>', 'little': '<'}[sys.byteorder] !=
self.parent.byteorder))
@lazyattr
def is_contiguous(self):
"""Return offset and size of contiguous data, else None.
Excludes prediction and colormapping.
"""
if self.compression or self.bits_per_sample not in (8, 16, 32, 64):
return
if self.is_tiled:
if (self.image_width != self.tile_width or
self.image_length % self.tile_length or
self.tile_width % 16 or self.tile_length % 16):
return
if ('image_depth' in self.tags and 'tile_depth' in self.tags and
(self.image_length != self.tile_length or
self.image_depth % self.tile_depth)):
return
offsets = self.tile_offsets
byte_counts = self.tile_byte_counts
else:
offsets = self.strip_offsets
byte_counts = self.strip_byte_counts
if len(offsets) == 1:
return offsets[0], byte_counts[0]
if self.is_stk or all(offsets[i] + byte_counts[i] == offsets[i + 1]
# no data/ignore offset
or byte_counts[i + 1] == 0
for i in range(len(offsets) - 1)):
return offsets[0], sum(byte_counts)
def __str__(self):
"""Return string containing information about page."""
s = ', '.join(s for s in (
' x '.join(str(i) for i in self.shape),
str(numpy.dtype(self.dtype)),
'%s bit' % str(self.bits_per_sample),
self.photometric if 'photometric' in self.tags else '',
self.compression if self.compression else 'raw',
'|'.join(t[3:] for t in (
'is_stk', 'is_lsm', 'is_nih', 'is_ome', 'is_imagej',
'is_micromanager', 'is_fluoview', 'is_mdgel', 'is_mediacy',
'is_sgi', 'is_reduced', 'is_tiled',
'is_contiguous') if getattr(self, t))) if s)
return "Page %i: %s" % (self.index, s)
def __getattr__(self, name):
"""Return tag value."""
if name in self.tags:
value = self.tags[name].value
setattr(self, name, value)
return value
raise AttributeError(name)
@lazyattr
def uic_tags(self):
"""Consolidate UIC tags."""
if not self.is_stk:
raise AttributeError("uic_tags")
tags = self.tags
result = Record()
result.number_planes = tags['uic2tag'].count
if 'image_description' in tags:
result.plane_descriptions = self.image_description.split(b'\x00')
if 'uic1tag' in tags:
result.update(tags['uic1tag'].value)
if 'uic3tag' in tags:
result.update(tags['uic3tag'].value) # wavelengths
if 'uic4tag' in tags:
result.update(tags['uic4tag'].value) # override uic1 tags
uic2tag = tags['uic2tag'].value
result.z_distance = uic2tag.z_distance
result.time_created = uic2tag.time_created
result.time_modified = uic2tag.time_modified
try:
result.datetime_created = [
julian_datetime(*dt) for dt in
zip(uic2tag.date_created, uic2tag.time_created)]
result.datetime_modified = [
julian_datetime(*dt) for dt in
zip(uic2tag.date_modified, uic2tag.time_modified)]
except ValueError as e:
warnings.warn("uic_tags: %s" % e)
return result
@lazyattr
def imagej_tags(self):
"""Consolidate ImageJ metadata."""
if not self.is_imagej:
raise AttributeError("imagej_tags")
tags = self.tags
if 'image_description_1' in tags:
# MicroManager
result = imagej_description(tags['image_description_1'].value)
else:
result = imagej_description(tags['image_description'].value)
if 'imagej_metadata' in tags:
try:
result.update(imagej_metadata(
tags['imagej_metadata'].value,
tags['imagej_byte_counts'].value,
self.parent.byteorder))
except Exception as e:
warnings.warn(str(e))
return Record(result)
@lazyattr
def is_rgb(self):
"""True if page contains a RGB image."""
return ('photometric' in self.tags and
self.tags['photometric'].value == 2)
@lazyattr
def is_contig(self):
"""True if page contains a contiguous image."""
return ('planar_configuration' in self.tags and
self.tags['planar_configuration'].value == 1)
@lazyattr
def is_palette(self):
"""True if page contains a palette-colored image and not OME or STK."""
try:
# turn off color mapping for OME-TIFF and STK
if self.is_stk or self.is_ome or self.parent.is_ome:
return False
except IndexError:
pass # OME-XML not found in first page
return ('photometric' in self.tags and
self.tags['photometric'].value == 3)
@lazyattr
def is_tiled(self):
"""True if page contains tiled image."""
return 'tile_width' in self.tags
@lazyattr
def is_reduced(self):
"""True if page is a reduced image of another image."""
return bool(self.tags['new_subfile_type'].value & 1)
@lazyattr
def is_mdgel(self):
"""True if page contains md_file_tag tag."""
return 'md_file_tag' in self.tags
@lazyattr
def is_mediacy(self):
"""True if page contains Media Cybernetics Id tag."""
return ('mc_id' in self.tags and
self.tags['mc_id'].value.startswith(b'MC TIFF'))
@lazyattr
def is_stk(self):
"""True if page contains UIC2Tag tag."""
return 'uic2tag' in self.tags
@lazyattr
def is_lsm(self):
"""True if page contains LSM CZ_LSM_INFO tag."""
return 'cz_lsm_info' in self.tags
@lazyattr
def is_fluoview(self):
"""True if page contains FluoView MM_STAMP tag."""
return 'mm_stamp' in self.tags
@lazyattr
def is_nih(self):
"""True if page contains NIH image header."""
return 'nih_image_header' in self.tags
@lazyattr
def is_sgi(self):
"""True if page contains SGI image and tile depth tags."""
return 'image_depth' in self.tags and 'tile_depth' in self.tags
@lazyattr
def is_ome(self):
"""True if page contains OME-XML in image_description tag."""
return ('image_description' in self.tags and self.tags[
'image_description'].value.startswith(b'<?xml version='))
@lazyattr
def is_shaped(self):
"""True if page contains shape in image_description tag."""
return ('image_description' in self.tags and self.tags[
'image_description'].value.startswith(b'shape=('))
@lazyattr
def is_imagej(self):
"""True if page contains ImageJ description."""
return (
('image_description' in self.tags and
self.tags['image_description'].value.startswith(b'ImageJ=')) or
('image_description_1' in self.tags and # Micromanager
self.tags['image_description_1'].value.startswith(b'ImageJ=')))
@lazyattr
def is_micromanager(self):
"""True if page contains Micro-Manager metadata."""
return 'micromanager_metadata' in self.tags
class TiffTag(object):
"""A TIFF tag structure.
Attributes
----------
name : string
Attribute name of tag.
code : int
Decimal code of tag.
dtype : str
Datatype of tag data. One of TIFF_DATA_TYPES.
count : int
Number of values.
value : various types
Tag data as Python object.
value_offset : int
Location of value in file, if any.
All attributes are read-only.
"""
__slots__ = ('code', 'name', 'count', 'dtype', 'value', 'value_offset',
'_offset', '_value', '_type')
class Error(Exception):
pass
def __init__(self, arg, **kwargs):
"""Initialize instance from file or arguments."""
self._offset = None
if hasattr(arg, '_fh'):
self._fromfile(arg)
else:
self._fromdata(arg, **kwargs)
def _fromdata(self, code, dtype, count, value, name=None):
"""Initialize instance from arguments."""
self.code = int(code)
self.name = name if name else str(code)
self.dtype = TIFF_DATA_TYPES[dtype]
self.count = int(count)
self.value = value
self._value = value
self._type = dtype
def _fromfile(self, parent):
"""Read tag structure from open file. Advance file cursor."""
fh = parent.filehandle
byteorder = parent.byteorder
self._offset = fh.tell()
self.value_offset = self._offset + parent.offset_size + 4
fmt, size = {4: ('HHI4s', 12), 8: ('HHQ8s', 20)}[parent.offset_size]
data = fh.read(size)
code, dtype = struct.unpack(byteorder + fmt[:2], data[:4])
count, value = struct.unpack(byteorder + fmt[2:], data[4:])
self._value = value
self._type = dtype
if code in TIFF_TAGS:
name = TIFF_TAGS[code][0]
elif code in CUSTOM_TAGS:
name = CUSTOM_TAGS[code][0]
else:
name = str(code)
try:
dtype = TIFF_DATA_TYPES[self._type]
except KeyError:
raise TiffTag.Error("unknown tag data type %i" % self._type)
fmt = '%s%i%s' % (byteorder, count * int(dtype[0]), dtype[1])
size = struct.calcsize(fmt)
if size > parent.offset_size or code in CUSTOM_TAGS:
pos = fh.tell()
tof = {4: 'I', 8: 'Q'}[parent.offset_size]
self.value_offset = offset = struct.unpack(
byteorder +
tof,
value)[0]
if offset < 0 or offset > parent.filehandle.size:
raise TiffTag.Error("corrupt file - invalid tag value offset")
elif offset < 4:
raise TiffTag.Error("corrupt value offset for tag %i" % code)
fh.seek(offset)
if code in CUSTOM_TAGS:
readfunc = CUSTOM_TAGS[code][1]
value = readfunc(fh, byteorder, dtype, count)
if isinstance(value, dict): # numpy.core.records.record
value = Record(value)
elif code in TIFF_TAGS or dtype[-1] == 's':
value = struct.unpack(fmt, fh.read(size))
else:
value = read_numpy(fh, byteorder, dtype, count)
fh.seek(pos)
else:
value = struct.unpack(fmt, value[:size])
if code not in CUSTOM_TAGS and code not in (273, 279, 324, 325):
# scalar value if not strip/tile offsets/byte_counts
if len(value) == 1:
value = value[0]
if (dtype.endswith('s') and isinstance(value, bytes)
and self._type != 7):
# TIFF ASCII fields can contain multiple strings,
# each terminated with a NUL
value = stripascii(value)
self.code = code
self.name = name
self.dtype = dtype
self.count = count
self.value = value
def _correct_lsm_bitspersample(self, parent):
"""Correct LSM bitspersample tag.
Old LSM writers may use a separate region for two 16-bit values,
although they fit into the tag value element of the tag.
"""
if self.code == 258 and self.count == 2:
# TODO: test this. Need example file.
warnings.warn("correcting LSM bitspersample tag")
fh = parent.filehandle
tof = {4: '<I', 8: '<Q'}[parent.offset_size]
self.value_offset = struct.unpack(tof, self._value)[0]
fh.seek(self.value_offset)
self.value = struct.unpack("<HH", fh.read(4))
def as_str(self):
"""Return value as human readable string."""
return ((str(self.value).split('\n', 1)[0]) if (self._type != 7)
else '<undefined>')
def __str__(self):
"""Return string containing information about tag."""
return ' '.join(str(getattr(self, s)) for s in self.__slots__)
class TiffSequence(object):
"""Sequence of image files.
The data shape and dtype of all files must match.
Properties
----------
files : list
List of file names.
shape : tuple
Shape of image sequence.
axes : str
Labels of axes in shape.
Examples
--------
>>> tifs = TiffSequence("test.oif.files/*.tif")
>>> tifs.shape, tifs.axes
((2, 100), 'CT')
>>> data = tifs.asarray()
>>> data.shape
(2, 100, 256, 256)
"""
_patterns = {
'axes': r"""
# matches Olympus OIF and Leica TIFF series
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
_?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))?
"""}
class ParseError(Exception):
pass
def __init__(self, files, imread=TiffFile, pattern='axes',
*args, **kwargs):
"""Initialize instance from multiple files.
Parameters
----------
files : str, or sequence of str
Glob pattern or sequence of file names.
imread : function or class
Image read function or class with asarray function returning numpy
array from single file.
pattern : str
Regular expression pattern that matches axes names and sequence
indices in file names.
By default this matches Olympus OIF and Leica TIFF series.
"""
if isinstance(files, str):
files = natural_sorted(glob.glob(files))
files = list(files)
if not files:
raise ValueError("no files found")
# if not os.path.isfile(files[0]):
# raise ValueError("file not found")
self.files = files
if hasattr(imread, 'asarray'):
# redefine imread
_imread = imread
def imread(fname, *args, **kwargs):
with _imread(fname) as im:
return im.asarray(*args, **kwargs)
self.imread = imread
self.pattern = self._patterns.get(pattern, pattern)
try:
self._parse()
if not self.axes:
self.axes = 'I'
except self.ParseError:
self.axes = 'I'
self.shape = (len(files),)
self._start_index = (0,)
self._indices = tuple((i,) for i in range(len(files)))
def __str__(self):
"""Return string with information about image sequence."""
return "\n".join([
self.files[0],
'* files: %i' % len(self.files),
'* axes: %s' % self.axes,
'* shape: %s' % str(self.shape)])
def __len__(self):
return len(self.files)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def close(self):
pass
def asarray(self, memmap=False, *args, **kwargs):
"""Read image data from all files and return as single numpy array.
If memmap is True, return an array stored in a binary file on disk.
The args and kwargs parameters are passed to the imread function.
Raise IndexError or ValueError if image shapes don't match.
"""
im = self.imread(self.files[0], *args, **kwargs)
shape = self.shape + im.shape
if memmap:
with tempfile.NamedTemporaryFile() as fh:
result = numpy.memmap(fh, dtype=im.dtype, shape=shape)
else:
result = numpy.zeros(shape, dtype=im.dtype)
result = result.reshape(-1, *im.shape)
for index, fname in zip(self._indices, self.files):
index = [i - j for i, j in zip(index, self._start_index)]
index = numpy.ravel_multi_index(index, self.shape)
im = self.imread(fname, *args, **kwargs)
result[index] = im
result.shape = shape
return result
def _parse(self):
"""Get axes and shape from file names."""
if not self.pattern:
raise self.ParseError("invalid pattern")
pattern = re.compile(self.pattern, re.IGNORECASE | re.VERBOSE)
matches = pattern.findall(self.files[0])
if not matches:
raise self.ParseError("pattern doesn't match file names")
matches = matches[-1]
if len(matches) % 2:
raise self.ParseError("pattern doesn't match axis name and index")
axes = ''.join(m for m in matches[::2] if m)
if not axes:
raise self.ParseError("pattern doesn't match file names")
indices = []
for fname in self.files:
matches = pattern.findall(fname)[-1]
if axes != ''.join(m for m in matches[::2] if m):
raise ValueError("axes don't match within the image sequence")
indices.append([int(m) for m in matches[1::2] if m])
shape = tuple(numpy.max(indices, axis=0))
start_index = tuple(numpy.min(indices, axis=0))
shape = tuple(i - j + 1 for i, j in zip(shape, start_index))
if product(shape) != len(self.files):
warnings.warn("files are missing. Missing data are zeroed")
self.axes = axes.upper()
self.shape = shape
self._indices = indices
self._start_index = start_index
class Record(dict):
"""Dictionary with attribute access.
Can also be initialized with numpy.core.records.record.
"""
__slots__ = ()
def __init__(self, arg=None, **kwargs):
if kwargs:
arg = kwargs
elif arg is None:
arg = {}
try:
dict.__init__(self, arg)
except (TypeError, ValueError):
for i, name in enumerate(arg.dtype.names):
v = arg[i]
self[name] = v if v.dtype.char != 'S' else stripnull(v)
def __getattr__(self, name):
return self[name]
def __setattr__(self, name, value):
self.__setitem__(name, value)
def __str__(self):
"""Pretty print Record."""
s = []
lists = []
for k in sorted(self):
try:
if k.startswith('_'): # does not work with byte
continue
except AttributeError:
pass
v = self[k]
if isinstance(v, (list, tuple)) and len(v):
if isinstance(v[0], Record):
lists.append((k, v))
continue
elif isinstance(v[0], TiffPage):
v = [i.index for i in v if i]
s.append(
("* %s: %s" % (k, str(v))).split("\n", 1)[0]
[:PRINT_LINE_LEN].rstrip())
for k, v in lists:
l = []
for i, w in enumerate(v):
l.append("* %s[%i]\n %s" % (k, i,
str(w).replace("\n", "\n ")))
s.append('\n'.join(l))
return '\n'.join(s)
class TiffTags(Record):
"""Dictionary of TiffTag with attribute access."""
def __str__(self):
"""Return string with information about all tags."""
s = []
for tag in sorted(self.values(), key=lambda x: x.code):
typecode = "%i%s" % (tag.count * int(tag.dtype[0]), tag.dtype[1])
line = "* %i %s (%s) %s" % (
tag.code, tag.name, typecode, tag.as_str())
s.append(line[:PRINT_LINE_LEN].lstrip())
return '\n'.join(s)
class FileHandle(object):
"""Binary file handle.
* Handle embedded files (for CZI within CZI files).
* Allow to re-open closed files (for multi file formats such as OME-TIFF).
* Read numpy arrays and records from file like objects.
Only binary read, seek, tell, and close are supported on embedded files.
When initialized from another file handle, do not use it unless this
FileHandle is closed.
Attributes
----------
name : str
Name of the file.
path : str
Absolute path to file.
size : int
Size of file in bytes.
is_file : bool
If True, file has a filno and can be memory mapped.
All attributes are read-only.
"""
__slots__ = ('_fh', '_arg', '_mode', '_name', '_dir',
'_offset', '_size', '_close', 'is_file')
def __init__(self, arg, mode='rb', name=None, offset=None, size=None):
"""Initialize file handle from file name or another file handle.
Parameters
----------
arg : str, File, or FileHandle
File name or open file handle.
mode : str
File open mode in case 'arg' is a file name.
name : str
Optional name of file in case 'arg' is a file handle.
offset : int
Optional start position of embedded file. By default this is
the current file position.
size : int
Optional size of embedded file. By default this is the number
of bytes from the 'offset' to the end of the file.
"""
self._fh = None
self._arg = arg
self._mode = mode
self._name = name
self._dir = ''
self._offset = offset
self._size = size
self._close = True
self.is_file = False
self.open()
def open(self):
"""Open or re-open file."""
if self._fh:
return # file is open
if isinstance(self._arg, str):
# file name
self._arg = os.path.abspath(self._arg)
self._dir, self._name = os.path.split(self._arg)
self._fh = open(self._arg, self._mode)
self._close = True
if self._offset is None:
self._offset = 0
elif isinstance(self._arg, FileHandle):
# FileHandle
self._fh = self._arg._fh
if self._offset is None:
self._offset = 0
self._offset += self._arg._offset
self._close = False
if not self._name:
if self._offset:
name, ext = os.path.splitext(self._arg._name)
self._name = "%s@%i%s" % (name, self._offset, ext)
else:
self._name = self._arg._name
self._dir = self._arg._dir
else:
# open file object
self._fh = self._arg
if self._offset is None:
self._offset = self._arg.tell()
self._close = False
if not self._name:
try:
self._dir, self._name = os.path.split(self._fh.name)
except AttributeError:
self._name = "Unnamed stream"
if self._offset:
self._fh.seek(self._offset)
if self._size is None:
pos = self._fh.tell()
self._fh.seek(self._offset, 2)
self._size = self._fh.tell()
self._fh.seek(pos)
try:
self._fh.fileno()
self.is_file = True
except Exception:
self.is_file = False
def read(self, size=-1):
"""Read 'size' bytes from file, or until EOF is reached."""
if size < 0 and self._offset:
size = self._size
return self._fh.read(size)
def memmap_array(self, dtype, shape, offset=0, mode='r', order='C'):
"""Return numpy.memmap of data stored in file."""
if not self.is_file:
raise ValueError("Can not memory map file without fileno.")
return numpy.memmap(self._fh, dtype=dtype, mode=mode,
offset=self._offset + offset,
shape=shape, order=order)
def read_array(self, dtype, count=-1, sep=""):
"""Return numpy array from file.
Work around numpy issue #2230, "numpy.fromfile does not accept
StringIO object" https://github.com/numpy/numpy/issues/2230.
"""
try:
return numpy.fromfile(self._fh, dtype, count, sep)
except IOError:
if count < 0:
size = self._size
else:
size = count * numpy.dtype(dtype).itemsize
data = self._fh.read(size)
return numpy.fromstring(data, dtype, count, sep)
def read_record(self, dtype, shape=1, byteorder=None):
"""Return numpy record from file."""
try:
rec = numpy.rec.fromfile(self._fh, dtype, shape,
byteorder=byteorder)
except Exception:
dtype = numpy.dtype(dtype)
if shape is None:
shape = self._size // dtype.itemsize
size = product(sequence(shape)) * dtype.itemsize
data = self._fh.read(size)
return numpy.rec.fromstring(data, dtype, shape,
byteorder=byteorder)
return rec[0] if shape == 1 else rec
def tell(self):
"""Return file's current position."""
return self._fh.tell() - self._offset
def seek(self, offset, whence=0):
"""Set file's current position."""
if self._offset:
if whence == 0:
self._fh.seek(self._offset + offset, whence)
return
elif whence == 2:
self._fh.seek(self._offset + self._size + offset, 0)
return
self._fh.seek(offset, whence)
def close(self):
"""Close file."""
if self._close and self._fh:
self._fh.close()
self._fh = None
self.is_file = False
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def __getattr__(self, name):
"""Return attribute from underlying file object."""
if self._offset:
warnings.warn(
"FileHandle: '%s' not implemented for embedded files" % name)
return getattr(self._fh, name)
@property
def name(self):
return self._name
@property
def dirname(self):
return self._dir
@property
def path(self):
return os.path.join(self._dir, self._name)
@property
def size(self):
return self._size
@property
def closed(self):
return self._fh is None
def read_bytes(fh, byteorder, dtype, count):
"""Read tag data from file and return as byte string."""
dtype = 'b' if dtype[-1] == 's' else byteorder + dtype[-1]
return fh.read_array(dtype, count).tostring()
def read_numpy(fh, byteorder, dtype, count):
"""Read tag data from file and return as numpy array."""
dtype = 'b' if dtype[-1] == 's' else byteorder + dtype[-1]
return fh.read_array(dtype, count)
def read_json(fh, byteorder, dtype, count):
"""Read JSON tag data from file and return as object."""
data = fh.read(count)
try:
return json.loads(str(stripnull(data), 'utf-8'))
except ValueError:
warnings.warn("invalid JSON `%s`" % data)
def read_mm_header(fh, byteorder, dtype, count):
"""Read MM_HEADER tag from file and return as numpy.rec.array."""
return fh.read_record(MM_HEADER, byteorder=byteorder)
def read_mm_stamp(fh, byteorder, dtype, count):
"""Read MM_STAMP tag from file and return as numpy.array."""
return fh.read_array(byteorder + 'f8', 8)
def read_uic1tag(fh, byteorder, dtype, count, plane_count=None):
"""Read MetaMorph STK UIC1Tag from file and return as dictionary.
Return empty dictionary if plane_count is unknown.
"""
assert dtype in ('2I', '1I') and byteorder == '<'
result = {}
if dtype == '2I':
# pre MetaMorph 2.5 (not tested)
values = fh.read_array('<u4', 2 * count).reshape(count, 2)
result = {'z_distance': values[:, 0] / values[:, 1]}
elif plane_count:
for i in range(count):
tagid = struct.unpack('<I', fh.read(4))[0]
if tagid in (28, 29, 37, 40, 41):
# silently skip unexpected tags
fh.read(4)
continue
name, value = read_uic_tag(fh, tagid, plane_count, offset=True)
result[name] = value
return result
def read_uic2tag(fh, byteorder, dtype, plane_count):
"""Read MetaMorph STK UIC2Tag from file and return as dictionary."""
assert dtype == '2I' and byteorder == '<'
values = fh.read_array('<u4', 6 * plane_count).reshape(plane_count, 6)
return {
'z_distance': values[:, 0] / values[:, 1],
'date_created': values[:, 2], # julian days
'time_created': values[:, 3], # milliseconds
'date_modified': values[:, 4], # julian days
'time_modified': values[:, 5], # milliseconds
}
def read_uic3tag(fh, byteorder, dtype, plane_count):
"""Read MetaMorph STK UIC3Tag from file and return as dictionary."""
assert dtype == '2I' and byteorder == '<'
values = fh.read_array('<u4', 2 * plane_count).reshape(plane_count, 2)
return {'wavelengths': values[:, 0] / values[:, 1]}
def read_uic4tag(fh, byteorder, dtype, plane_count):
"""Read MetaMorph STK UIC4Tag from file and return as dictionary."""
assert dtype == '1I' and byteorder == '<'
result = {}
while True:
tagid = struct.unpack('<H', fh.read(2))[0]
if tagid == 0:
break
name, value = read_uic_tag(fh, tagid, plane_count, offset=False)
result[name] = value
return result
def read_uic_tag(fh, tagid, plane_count, offset):
"""Read a single UIC tag value from file and return tag name and value.
UIC1Tags use an offset.
"""
def read_int(count=1):
value = struct.unpack('<%iI' % count, fh.read(4 * count))
return value[0] if count == 1 else value
try:
name, dtype = UIC_TAGS[tagid]
except KeyError:
# unknown tag
return '_tagid_%i' % tagid, read_int()
if offset:
pos = fh.tell()
if dtype not in (int, None):
off = read_int()
if off < 8:
warnings.warn("invalid offset for uic tag '%s': %i"
% (name, off))
return name, off
fh.seek(off)
if dtype is None:
# skip
name = '_' + name
value = read_int()
elif dtype is int:
# int
value = read_int()
elif dtype is Fraction:
# fraction
value = read_int(2)
value = value[0] / value[1]
elif dtype is julian_datetime:
# datetime
value = julian_datetime(*read_int(2))
elif dtype is read_uic_image_property:
# ImagePropertyEx
value = read_uic_image_property(fh)
elif dtype is str:
# pascal string
size = read_int()
if 0 <= size < 2 ** 10:
value = struct.unpack('%is' % size, fh.read(size))[0][:-1]
value = stripnull(value)
elif offset:
value = ''
warnings.warn("corrupt string in uic tag '%s'" % name)
else:
raise ValueError("invalid string size %i" % size)
elif dtype == '%ip':
# sequence of pascal strings
value = []
for i in range(plane_count):
size = read_int()
if 0 <= size < 2 ** 10:
string = struct.unpack('%is' % size, fh.read(size))[0][:-1]
string = stripnull(string)
value.append(string)
elif offset:
warnings.warn("corrupt string in uic tag '%s'" % name)
else:
raise ValueError("invalid string size %i" % size)
else:
# struct or numpy type
dtype = '<' + dtype
if '%i' in dtype:
dtype = dtype % plane_count
if '(' in dtype:
# numpy type
value = fh.read_array(dtype, 1)[0]
if value.shape[-1] == 2:
# assume fractions
value = value[..., 0] / value[..., 1]
else:
# struct format
value = struct.unpack(dtype, fh.read(struct.calcsize(dtype)))
if len(value) == 1:
value = value[0]
if offset:
fh.seek(pos + 4)
return name, value
def read_uic_image_property(fh):
"""Read UIC ImagePropertyEx tag from file and return as dict."""
# TODO: test this
size = struct.unpack('B', fh.read(1))[0]
name = struct.unpack('%is' % size, fh.read(size))[0][:-1]
flags, prop = struct.unpack('<IB', fh.read(5))
if prop == 1:
value = struct.unpack('II', fh.read(8))
value = value[0] / value[1]
else:
size = struct.unpack('B', fh.read(1))[0]
value = struct.unpack('%is' % size, fh.read(size))[0]
return dict(name=name, flags=flags, value=value)
def read_cz_lsm_info(fh, byteorder, dtype, count):
"""Read CS_LSM_INFO tag from file and return as numpy.rec.array."""
assert byteorder == '<'
magic_number, structure_size = struct.unpack('<II', fh.read(8))
if magic_number not in (50350412, 67127628):
raise ValueError("not a valid CS_LSM_INFO structure")
fh.seek(-8, 1)
if structure_size < numpy.dtype(CZ_LSM_INFO).itemsize:
# adjust structure according to structure_size
cz_lsm_info = []
size = 0
for name, dtype in CZ_LSM_INFO:
size += numpy.dtype(dtype).itemsize
if size > structure_size:
break
cz_lsm_info.append((name, dtype))
else:
cz_lsm_info = CZ_LSM_INFO
return fh.read_record(cz_lsm_info, byteorder=byteorder)
def read_cz_lsm_floatpairs(fh):
"""Read LSM sequence of float pairs from file and return as list."""
size = struct.unpack('<i', fh.read(4))[0]
return fh.read_array('<2f8', count=size)
def read_cz_lsm_positions(fh):
"""Read LSM positions from file and return as list."""
size = struct.unpack('<I', fh.read(4))[0]
return fh.read_array('<2f8', count=size)
def read_cz_lsm_time_stamps(fh):
"""Read LSM time stamps from file and return as list."""
size, count = struct.unpack('<ii', fh.read(8))
if size != (8 + 8 * count):
raise ValueError("lsm_time_stamps block is too short")
# return struct.unpack('<%dd' % count, fh.read(8*count))
return fh.read_array('<f8', count=count)
def read_cz_lsm_event_list(fh):
"""Read LSM events from file and return as list of (time, type, text)."""
count = struct.unpack('<II', fh.read(8))[1]
events = []
while count > 0:
esize, etime, etype = struct.unpack('<IdI', fh.read(16))
etext = stripnull(fh.read(esize - 16))
events.append((etime, etype, etext))
count -= 1
return events
def read_cz_lsm_scan_info(fh):
"""Read LSM scan information from file and return as Record."""
block = Record()
blocks = [block]
unpack = struct.unpack
if 0x10000000 != struct.unpack('<I', fh.read(4))[0]:
# not a Recording sub block
raise ValueError("not a lsm_scan_info structure")
fh.read(8)
while True:
entry, dtype, size = unpack('<III', fh.read(12))
if dtype == 2:
# ascii
value = stripnull(fh.read(size))
elif dtype == 4:
# long
value = unpack('<i', fh.read(4))[0]
elif dtype == 5:
# rational
value = unpack('<d', fh.read(8))[0]
else:
value = 0
if entry in CZ_LSM_SCAN_INFO_ARRAYS:
blocks.append(block)
name = CZ_LSM_SCAN_INFO_ARRAYS[entry]
newobj = []
setattr(block, name, newobj)
block = newobj
elif entry in CZ_LSM_SCAN_INFO_STRUCTS:
blocks.append(block)
newobj = Record()
block.append(newobj)
block = newobj
elif entry in CZ_LSM_SCAN_INFO_ATTRIBUTES:
name = CZ_LSM_SCAN_INFO_ATTRIBUTES[entry]
setattr(block, name, value)
elif entry == 0xffffffff:
# end sub block
block = blocks.pop()
else:
# unknown entry
setattr(block, "entry_0x%x" % entry, value)
if not blocks:
break
return block
def read_nih_image_header(fh, byteorder, dtype, count):
"""Read NIH_IMAGE_HEADER tag from file and return as numpy.rec.array."""
a = fh.read_record(NIH_IMAGE_HEADER, byteorder=byteorder)
a = a.newbyteorder(byteorder)
a.xunit = a.xunit[:a._xunit_len]
a.um = a.um[:a._um_len]
return a
def read_micromanager_metadata(fh):
"""Read MicroManager non-TIFF settings from open file and return as dict.
The settings can be used to read image data without parsing the TIFF file.
Raise ValueError if file does not contain valid MicroManager metadata.
"""
fh.seek(0)
try:
byteorder = {b'II': '<', b'MM': '>'}[fh.read(2)]
except IndexError:
raise ValueError("not a MicroManager TIFF file")
results = {}
fh.seek(8)
(index_header, index_offset, display_header, display_offset,
comments_header, comments_offset, summary_header, summary_length
) = struct.unpack(byteorder + "IIIIIIII", fh.read(32))
if summary_header != 2355492:
raise ValueError("invalid MicroManager summary_header")
results['summary'] = read_json(fh, byteorder, None, summary_length)
if index_header != 54773648:
raise ValueError("invalid MicroManager index_header")
fh.seek(index_offset)
header, count = struct.unpack(byteorder + "II", fh.read(8))
if header != 3453623:
raise ValueError("invalid MicroManager index_header")
data = struct.unpack(byteorder + "IIIII" * count, fh.read(20 * count))
results['index_map'] = {
'channel': data[::5], 'slice': data[1::5], 'frame': data[2::5],
'position': data[3::5], 'offset': data[4::5]}
if display_header != 483765892:
raise ValueError("invalid MicroManager display_header")
fh.seek(display_offset)
header, count = struct.unpack(byteorder + "II", fh.read(8))
if header != 347834724:
raise ValueError("invalid MicroManager display_header")
results['display_settings'] = read_json(fh, byteorder, None, count)
if comments_header != 99384722:
raise ValueError("invalid MicroManager comments_header")
fh.seek(comments_offset)
header, count = struct.unpack(byteorder + "II", fh.read(8))
if header != 84720485:
raise ValueError("invalid MicroManager comments_header")
results['comments'] = read_json(fh, byteorder, None, count)
return results
def imagej_metadata(data, bytecounts, byteorder):
"""Return dict from ImageJ metadata tag value."""
_str = str if sys.version_info[0] < 3 else lambda x: str(x, 'cp1252')
def read_string(data, byteorder):
return _str(stripnull(data[0 if byteorder == '<' else 1::2]))
def read_double(data, byteorder):
return struct.unpack(byteorder + ('d' * (len(data) // 8)), data)
def read_bytes(data, byteorder):
# return struct.unpack('b' * len(data), data)
return numpy.fromstring(data, 'uint8')
metadata_types = { # big endian
b'info': ('info', read_string),
b'labl': ('labels', read_string),
b'rang': ('ranges', read_double),
b'luts': ('luts', read_bytes),
b'roi ': ('roi', read_bytes),
b'over': ('overlays', read_bytes)}
metadata_types.update( # little endian
dict((k[::-1], v) for k, v in metadata_types.items()))
if not bytecounts:
raise ValueError("no ImageJ metadata")
if not data[:4] in (b'IJIJ', b'JIJI'):
raise ValueError("invalid ImageJ metadata")
header_size = bytecounts[0]
if header_size < 12 or header_size > 804:
raise ValueError("invalid ImageJ metadata header size")
ntypes = (header_size - 4) // 8
header = struct.unpack(byteorder + '4sI' * ntypes, data[4:4 + ntypes * 8])
pos = 4 + ntypes * 8
counter = 0
result = {}
for mtype, count in zip(header[::2], header[1::2]):
values = []
name, func = metadata_types.get(mtype, (_str(mtype), read_bytes))
for _ in range(count):
counter += 1
pos1 = pos + bytecounts[counter]
values.append(func(data[pos:pos1], byteorder))
pos = pos1
result[name.strip()] = values[0] if count == 1 else values
return result
def imagej_description(description):
"""Return dict from ImageJ image_description tag."""
def _bool(val):
return {b'true': True, b'false': False}[val.lower()]
_str = str if sys.version_info[0] < 3 else lambda x: str(x, 'cp1252')
result = {}
for line in description.splitlines():
try:
key, val = line.split(b'=')
except Exception:
continue
key = key.strip()
val = val.strip()
for dtype in (int, float, _bool, _str):
try:
val = dtype(val)
break
except Exception:
pass
result[_str(key)] = val
return result
def _replace_by(module_function, package=None, warn=False):
"""Try replace decorated function by module.function."""
try:
from importlib import import_module
except ImportError:
warnings.warn('could not import module importlib')
return lambda func: func
def decorate(func, module_function=module_function, warn=warn):
try:
module, function = module_function.split('.')
if not package:
module = import_module(module)
else:
module = import_module('.' + module, package=package)
func, oldfunc = getattr(module, function), func
globals()['__old_' + func.__name__] = oldfunc
except Exception:
if warn:
warnings.warn("failed to import %s" % module_function)
return func
return decorate
def decodejpg(encoded, tables=b'', photometric=None,
ycbcr_subsampling=None, ycbcr_positioning=None):
"""Decode JPEG encoded byte string (using _czifile extension module)."""
import _czifile
image = _czifile.decodejpg(encoded, tables)
if photometric == 'rgb' and ycbcr_subsampling and ycbcr_positioning:
# TODO: convert YCbCr to RGB
pass
return image.tostring()
@_replace_by('_tifffile.decodepackbits')
def decodepackbits(encoded):
"""Decompress PackBits encoded byte string.
PackBits is a simple byte-oriented run-length compression scheme.
"""
func = ord if sys.version[0] == '2' else lambda x: x
result = []
result_extend = result.extend
i = 0
try:
while True:
n = func(encoded[i]) + 1
i += 1
if n < 129:
result_extend(encoded[i:i + n])
i += n
elif n > 129:
result_extend(encoded[i:i + 1] * (258 - n))
i += 1
except IndexError:
pass
return b''.join(result) if sys.version[0] == '2' else bytes(result)
@_replace_by('_tifffile.decodelzw')
def decodelzw(encoded):
"""Decompress LZW (Lempel-Ziv-Welch) encoded TIFF strip (byte string).
The strip must begin with a CLEAR code and end with an EOI code.
This is an implementation of the LZW decoding algorithm described in (1).
It is not compatible with old style LZW compressed files like quad-lzw.tif.
"""
len_encoded = len(encoded)
bitcount_max = len_encoded * 8
unpack = struct.unpack
if sys.version[0] == '2':
newtable = [chr(i) for i in range(256)]
else:
newtable = [bytes([i]) for i in range(256)]
newtable.extend((0, 0))
def next_code():
"""Return integer of `bitw` bits at `bitcount` position in encoded."""
start = bitcount // 8
s = encoded[start:start + 4]
try:
code = unpack('>I', s)[0]
except Exception:
code = unpack('>I', s + b'\x00' * (4 - len(s)))[0]
code <<= bitcount % 8
code &= mask
return code >> shr
switchbitch = { # code: bit-width, shr-bits, bit-mask
255: (9, 23, int(9 * '1' + '0' * 23, 2)),
511: (10, 22, int(10 * '1' + '0' * 22, 2)),
1023: (11, 21, int(11 * '1' + '0' * 21, 2)),
2047: (12, 20, int(12 * '1' + '0' * 20, 2)), }
bitw, shr, mask = switchbitch[255]
bitcount = 0
if len_encoded < 4:
raise ValueError("strip must be at least 4 characters long")
if next_code() != 256:
raise ValueError("strip must begin with CLEAR code")
code = 0
oldcode = 0
result = []
result_append = result.append
while True:
code = next_code() # ~5% faster when inlining this function
bitcount += bitw
if code == 257 or bitcount >= bitcount_max: # EOI
break
if code == 256: # CLEAR
table = newtable[:]
table_append = table.append
lentable = 258
bitw, shr, mask = switchbitch[255]
code = next_code()
bitcount += bitw
if code == 257: # EOI
break
result_append(table[code])
else:
if code < lentable:
decoded = table[code]
newcode = table[oldcode] + decoded[:1]
else:
newcode = table[oldcode]
newcode += newcode[:1]
decoded = newcode
result_append(decoded)
table_append(newcode)
lentable += 1
oldcode = code
if lentable in switchbitch:
bitw, shr, mask = switchbitch[lentable]
if code != 257:
warnings.warn("unexpected end of lzw stream (code %i)" % code)
return b''.join(result)
@_replace_by('_tifffile.unpackints')
def unpackints(data, dtype, itemsize, runlen=0):
"""Decompress byte string to array of integers of any bit size <= 32.
Parameters
----------
data : byte str
Data to decompress.
dtype : numpy.dtype or str
A numpy boolean or integer type.
itemsize : int
Number of bits per integer.
runlen : int
Number of consecutive integers, after which to start at next byte.
"""
if itemsize == 1: # bitarray
data = numpy.fromstring(data, '|B')
data = numpy.unpackbits(data)
if runlen % 8:
data = data.reshape(-1, runlen + (8 - runlen % 8))
data = data[:, :runlen].reshape(-1)
return data.astype(dtype)
dtype = numpy.dtype(dtype)
if itemsize in (8, 16, 32, 64):
return numpy.fromstring(data, dtype)
if itemsize < 1 or itemsize > 32:
raise ValueError("itemsize out of range: %i" % itemsize)
if dtype.kind not in "biu":
raise ValueError("invalid dtype")
itembytes = next(i for i in (1, 2, 4, 8) if 8 * i >= itemsize)
if itembytes != dtype.itemsize:
raise ValueError("dtype.itemsize too small")
if runlen == 0:
runlen = len(data) // itembytes
skipbits = runlen * itemsize % 8
if skipbits:
skipbits = 8 - skipbits
shrbits = itembytes * 8 - itemsize
bitmask = int(itemsize * '1' + '0' * shrbits, 2)
dtypestr = '>' + dtype.char # dtype always big endian?
unpack = struct.unpack
l = runlen * (len(data) * 8 // (runlen * itemsize + skipbits))
result = numpy.empty((l, ), dtype)
bitcount = 0
for i in range(len(result)):
start = bitcount // 8
s = data[start:start + itembytes]
try:
code = unpack(dtypestr, s)[0]
except Exception:
code = unpack(dtypestr, s + b'\x00' * (itembytes - len(s)))[0]
code <<= bitcount % 8
code &= bitmask
result[i] = code >> shrbits
bitcount += itemsize
if (i + 1) % runlen == 0:
bitcount += skipbits
return result
def unpackrgb(data, dtype='<B', bitspersample=(5, 6, 5), rescale=True):
"""Return array from byte string containing packed samples.
Use to unpack RGB565 or RGB555 to RGB888 format.
Parameters
----------
data : byte str
The data to be decoded. Samples in each pixel are stored consecutively.
Pixels are aligned to 8, 16, or 32 bit boundaries.
dtype : numpy.dtype
The sample data type. The byteorder applies also to the data stream.
bitspersample : tuple
Number of bits for each sample in a pixel.
rescale : bool
Upscale samples to the number of bits in dtype.
Returns
-------
result : ndarray
Flattened array of unpacked samples of native dtype.
Examples
--------
>>> data = struct.pack('BBBB', 0x21, 0x08, 0xff, 0xff)
>>> print(unpackrgb(data, '<B', (5, 6, 5), False))
[ 1 1 1 31 63 31]
>>> print(unpackrgb(data, '<B', (5, 6, 5)))
[ 8 4 8 255 255 255]
>>> print(unpackrgb(data, '<B', (5, 5, 5)))
[ 16 8 8 255 255 255]
"""
dtype = numpy.dtype(dtype)
bits = int(numpy.sum(bitspersample))
if not (bits <= 32 and all(
i <= dtype.itemsize * 8 for i in bitspersample)):
raise ValueError("sample size not supported %s" % str(bitspersample))
dt = next(i for i in 'BHI' if numpy.dtype(i).itemsize * 8 >= bits)
data = numpy.fromstring(data, dtype.byteorder + dt)
result = numpy.empty((data.size, len(bitspersample)), dtype.char)
for i, bps in enumerate(bitspersample):
t = data >> int(numpy.sum(bitspersample[i + 1:]))
t &= int('0b' + '1' * bps, 2)
if rescale:
o = ((dtype.itemsize * 8) // bps + 1) * bps
if o > data.dtype.itemsize * 8:
t = t.astype('I')
t *= (2 ** o - 1) // (2 ** bps - 1)
t //= 2 ** (o - (dtype.itemsize * 8))
result[:, i] = t
return result.reshape(-1)
def reorient(image, orientation):
"""Return reoriented view of image array.
Parameters
----------
image : numpy array
Non-squeezed output of asarray() functions.
Axes -3 and -2 must be image length and width respectively.
orientation : int or str
One of TIFF_ORIENTATIONS keys or values.
"""
o = TIFF_ORIENTATIONS.get(orientation, orientation)
if o == 'top_left':
return image
elif o == 'top_right':
return image[..., ::-1, :]
elif o == 'bottom_left':
return image[..., ::-1, :, :]
elif o == 'bottom_right':
return image[..., ::-1, ::-1, :]
elif o == 'left_top':
return numpy.swapaxes(image, -3, -2)
elif o == 'right_top':
return numpy.swapaxes(image, -3, -2)[..., ::-1, :]
elif o == 'left_bottom':
return numpy.swapaxes(image, -3, -2)[..., ::-1, :, :]
elif o == 'right_bottom':
return numpy.swapaxes(image, -3, -2)[..., ::-1, ::-1, :]
def squeeze_axes(shape, axes, skip='XY'):
"""Return shape and axes with single-dimensional entries removed.
Remove unused dimensions unless their axes are listed in 'skip'.
>>> squeeze_axes((5, 1, 2, 1, 1), 'TZYXC')
((5, 2, 1), 'TYX')
"""
if len(shape) != len(axes):
raise ValueError("dimensions of axes and shape don't match")
shape, axes = zip(*(i for i in zip(shape, axes)
if i[0] > 1 or i[1] in skip))
return shape, ''.join(axes)
def transpose_axes(data, axes, asaxes='CTZYX'):
"""Return data with its axes permuted to match specified axes.
A view is returned if possible.
>>> transpose_axes(numpy.zeros((2, 3, 4, 5)), 'TYXC', asaxes='CTZYX').shape
(5, 2, 1, 3, 4)
"""
for ax in axes:
if ax not in asaxes:
raise ValueError("unknown axis %s" % ax)
# add missing axes to data
shape = data.shape
for ax in reversed(asaxes):
if ax not in axes:
axes = ax + axes
shape = (1,) + shape
data = data.reshape(shape)
# transpose axes
data = data.transpose([axes.index(ax) for ax in asaxes])
return data
def stack_pages(pages, memmap=False, *args, **kwargs):
"""Read data from sequence of TiffPage and stack them vertically.
If memmap is True, return an array stored in a binary file on disk.
Additional parameters are passsed to the page asarray function.
"""
if len(pages) == 0:
raise ValueError("no pages")
if len(pages) == 1:
return pages[0].asarray(memmap=memmap, *args, **kwargs)
result = pages[0].asarray(*args, **kwargs)
shape = (len(pages),) + result.shape
if memmap:
with tempfile.NamedTemporaryFile() as fh:
result = numpy.memmap(fh, dtype=result.dtype, shape=shape)
else:
result = numpy.empty(shape, dtype=result.dtype)
for i, page in enumerate(pages):
result[i] = page.asarray(*args, **kwargs)
return result
def stripnull(string):
"""Return string truncated at first null character.
Clean NULL terminated C strings.
>>> stripnull(b'string\\x00')
b'string'
"""
i = string.find(b'\x00')
return string if (i < 0) else string[:i]
def stripascii(string):
"""Return string truncated at last byte that is 7bit ASCII.
Clean NULL separated and terminated TIFF strings.
>>> stripascii(b'string\\x00string\\n\\x01\\x00')
b'string\\x00string\\n'
>>> stripascii(b'\\x00')
b''
"""
# TODO: pythonize this
ord_ = ord if sys.version_info[0] < 3 else lambda x: x
i = len(string)
while i:
i -= 1
if 8 < ord_(string[i]) < 127:
break
else:
i = -1
return string[:i + 1]
def format_size(size):
"""Return file size as string from byte size."""
for unit in ('B', 'KB', 'MB', 'GB', 'TB'):
if size < 2048:
return "%.f %s" % (size, unit)
size /= 1024.0
def sequence(value):
"""Return tuple containing value if value is not a sequence.
>>> sequence(1)
(1,)
>>> sequence([1])
[1]
"""
try:
len(value)
return value
except TypeError:
return value,
def product(iterable):
"""Return product of sequence of numbers.
Equivalent of functools.reduce(operator.mul, iterable, 1).
>>> product([2**8, 2**30])
274877906944
>>> product([])
1
"""
prod = 1
for i in iterable:
prod *= i
return prod
def natural_sorted(iterable):
"""Return human sorted list of strings.
E.g. for sorting file names.
>>> natural_sorted(['f1', 'f2', 'f10'])
['f1', 'f2', 'f10']
"""
def sortkey(x):
return [(int(c) if c.isdigit() else c) for c in re.split(numbers, x)]
numbers = re.compile(r'(\d+)')
return sorted(iterable, key=sortkey)
def excel_datetime(timestamp, epoch=datetime.datetime.fromordinal(693594)):
"""Return datetime object from timestamp in Excel serial format.
Convert LSM time stamps.
>>> excel_datetime(40237.029999999795)
datetime.datetime(2010, 2, 28, 0, 43, 11, 999982)
"""
return epoch + datetime.timedelta(timestamp)
def julian_datetime(julianday, milisecond=0):
"""Return datetime from days since 1/1/4713 BC and ms since midnight.
Convert Julian dates according to MetaMorph.
>>> julian_datetime(2451576, 54362783)
datetime.datetime(2000, 2, 2, 15, 6, 2, 783)
"""
if julianday <= 1721423:
# no datetime before year 1
return None
a = julianday + 1
if a > 2299160:
alpha = math.trunc((a - 1867216.25) / 36524.25)
a += 1 + alpha - alpha // 4
b = a + (1524 if a > 1721423 else 1158)
c = math.trunc((b - 122.1) / 365.25)
d = math.trunc(365.25 * c)
e = math.trunc((b - d) / 30.6001)
day = b - d - math.trunc(30.6001 * e)
month = e - (1 if e < 13.5 else 13)
year = c - (4716 if month > 2.5 else 4715)
hour, milisecond = divmod(milisecond, 1000 * 60 * 60)
minute, milisecond = divmod(milisecond, 1000 * 60)
second, milisecond = divmod(milisecond, 1000)
return datetime.datetime(year, month, day,
hour, minute, second, milisecond)
def test_tifffile(directory='testimages', verbose=True):
"""Read all images in directory.
Print error message on failure.
>>> test_tifffile(verbose=False)
"""
successful = 0
failed = 0
start = time.time()
for f in glob.glob(os.path.join(directory, '*.*')):
if verbose:
print("\n%s>\n" % f.lower(), end='')
t0 = time.time()
try:
tif = TiffFile(f, multifile=True)
except Exception as e:
if not verbose:
print(f, end=' ')
print("ERROR:", e)
failed += 1
continue
try:
img = tif.asarray()
except ValueError:
try:
img = tif[0].asarray()
except Exception as e:
if not verbose:
print(f, end=' ')
print("ERROR:", e)
failed += 1
continue
finally:
tif.close()
successful += 1
if verbose:
print("%s, %s %s, %s, %.0f ms" % (
str(tif), str(img.shape), img.dtype, tif[0].compression,
(time.time() - t0) * 1e3))
if verbose:
print("\nSuccessfully read %i of %i files in %.3f s\n" % (
successful, successful + failed, time.time() - start))
class TIFF_SUBFILE_TYPES(object):
def __getitem__(self, key):
result = []
if key & 1:
result.append('reduced_image')
if key & 2:
result.append('page')
if key & 4:
result.append('mask')
return tuple(result)
TIFF_PHOTOMETRICS = {
0: 'miniswhite',
1: 'minisblack',
2: 'rgb',
3: 'palette',
4: 'mask',
5: 'separated', # CMYK
6: 'ycbcr',
8: 'cielab',
9: 'icclab',
10: 'itulab',
32803: 'cfa', # Color Filter Array
32844: 'logl',
32845: 'logluv',
34892: 'linear_raw'
}
TIFF_COMPESSIONS = {
1: None,
2: 'ccittrle',
3: 'ccittfax3',
4: 'ccittfax4',
5: 'lzw',
6: 'ojpeg',
7: 'jpeg',
8: 'adobe_deflate',
9: 't85',
10: 't43',
32766: 'next',
32771: 'ccittrlew',
32773: 'packbits',
32809: 'thunderscan',
32895: 'it8ctpad',
32896: 'it8lw',
32897: 'it8mp',
32898: 'it8bl',
32908: 'pixarfilm',
32909: 'pixarlog',
32946: 'deflate',
32947: 'dcs',
34661: 'jbig',
34676: 'sgilog',
34677: 'sgilog24',
34712: 'jp2000',
34713: 'nef',
}
TIFF_DECOMPESSORS = {
None: lambda x: x,
'adobe_deflate': zlib.decompress,
'deflate': zlib.decompress,
'packbits': decodepackbits,
'lzw': decodelzw,
# 'jpeg': decodejpg
}
TIFF_DATA_TYPES = {
1: '1B', # BYTE 8-bit unsigned integer.
2: '1s', # ASCII 8-bit byte that contains a 7-bit ASCII code;
# the last byte must be NULL (binary zero).
3: '1H', # SHORT 16-bit (2-byte) unsigned integer
4: '1I', # LONG 32-bit (4-byte) unsigned integer.
5: '2I', # RATIONAL Two LONGs: the first represents the numerator of
# a fraction; the second, the denominator.
6: '1b', # SBYTE An 8-bit signed (twos-complement) integer.
7: '1s', # UNDEFINED An 8-bit byte that may contain anything,
# depending on the definition of the field.
8: '1h', # SSHORT A 16-bit (2-byte) signed (twos-complement) integer.
9: '1i', # SLONG A 32-bit (4-byte) signed (twos-complement) integer.
10: '2i', # SRATIONAL Two SLONGs: the first represents the numerator
# of a fraction, the second the denominator.
11: '1f', # FLOAT Single precision (4-byte) IEEE format.
12: '1d', # DOUBLE Double precision (8-byte) IEEE format.
13: '1I', # IFD unsigned 4 byte IFD offset.
# 14: '', # UNICODE
# 15: '', # COMPLEX
16: '1Q', # LONG8 unsigned 8 byte integer (BigTiff)
17: '1q', # SLONG8 signed 8 byte integer (BigTiff)
18: '1Q', # IFD8 unsigned 8 byte IFD offset (BigTiff)
}
TIFF_SAMPLE_FORMATS = {
1: 'uint',
2: 'int',
3: 'float',
# 4: 'void',
# 5: 'complex_int',
6: 'complex',
}
TIFF_SAMPLE_DTYPES = {
('uint', 1): '?', # bitmap
('uint', 2): 'B',
('uint', 3): 'B',
('uint', 4): 'B',
('uint', 5): 'B',
('uint', 6): 'B',
('uint', 7): 'B',
('uint', 8): 'B',
('uint', 9): 'H',
('uint', 10): 'H',
('uint', 11): 'H',
('uint', 12): 'H',
('uint', 13): 'H',
('uint', 14): 'H',
('uint', 15): 'H',
('uint', 16): 'H',
('uint', 17): 'I',
('uint', 18): 'I',
('uint', 19): 'I',
('uint', 20): 'I',
('uint', 21): 'I',
('uint', 22): 'I',
('uint', 23): 'I',
('uint', 24): 'I',
('uint', 25): 'I',
('uint', 26): 'I',
('uint', 27): 'I',
('uint', 28): 'I',
('uint', 29): 'I',
('uint', 30): 'I',
('uint', 31): 'I',
('uint', 32): 'I',
('uint', 64): 'Q',
('int', 8): 'b',
('int', 16): 'h',
('int', 32): 'i',
('int', 64): 'q',
('float', 16): 'e',
('float', 32): 'f',
('float', 64): 'd',
('complex', 64): 'F',
('complex', 128): 'D',
('uint', (5, 6, 5)): 'B',
}
TIFF_ORIENTATIONS = {
1: 'top_left',
2: 'top_right',
3: 'bottom_right',
4: 'bottom_left',
5: 'left_top',
6: 'right_top',
7: 'right_bottom',
8: 'left_bottom',
}
# TODO: is there a standard for character axes labels?
AXES_LABELS = {
'X': 'width',
'Y': 'height',
'Z': 'depth',
'S': 'sample', # rgb(a)
'I': 'series', # general sequence, plane, page, IFD
'T': 'time',
'C': 'channel', # color, emission wavelength
'A': 'angle',
'P': 'phase', # formerly F # P is Position in LSM!
'R': 'tile', # region, point, mosaic
'H': 'lifetime', # histogram
'E': 'lambda', # excitation wavelength
'L': 'exposure', # lux
'V': 'event',
'Q': 'other',
# 'M': 'mosaic', # LSM 6
}
AXES_LABELS.update(dict((v, k) for k, v in AXES_LABELS.items()))
# Map OME pixel types to numpy dtype
OME_PIXEL_TYPES = {
'int8': 'i1',
'int16': 'i2',
'int32': 'i4',
'uint8': 'u1',
'uint16': 'u2',
'uint32': 'u4',
'float': 'f4',
# 'bit': 'bit',
'double': 'f8',
'complex': 'c8',
'double-complex': 'c16',
}
# NIH Image PicHeader v1.63
NIH_IMAGE_HEADER = [
('fileid', 'a8'),
('nlines', 'i2'),
('pixelsperline', 'i2'),
('version', 'i2'),
('oldlutmode', 'i2'),
('oldncolors', 'i2'),
('colors', 'u1', (3, 32)),
('oldcolorstart', 'i2'),
('colorwidth', 'i2'),
('extracolors', 'u2', (6, 3)),
('nextracolors', 'i2'),
('foregroundindex', 'i2'),
('backgroundindex', 'i2'),
('xscale', 'f8'),
('_x0', 'i2'),
('_x1', 'i2'),
('units_t', 'i2'), # NIH_UNITS_TYPE
('p1', [('x', 'i2'), ('y', 'i2')]),
('p2', [('x', 'i2'), ('y', 'i2')]),
('curvefit_t', 'i2'), # NIH_CURVEFIT_TYPE
('ncoefficients', 'i2'),
('coeff', 'f8', 6),
('_um_len', 'u1'),
('um', 'a15'),
('_x2', 'u1'),
('binarypic', 'b1'),
('slicestart', 'i2'),
('sliceend', 'i2'),
('scalemagnification', 'f4'),
('nslices', 'i2'),
('slicespacing', 'f4'),
('currentslice', 'i2'),
('frameinterval', 'f4'),
('pixelaspectratio', 'f4'),
('colorstart', 'i2'),
('colorend', 'i2'),
('ncolors', 'i2'),
('fill1', '3u2'),
('fill2', '3u2'),
('colortable_t', 'u1'), # NIH_COLORTABLE_TYPE
('lutmode_t', 'u1'), # NIH_LUTMODE_TYPE
('invertedtable', 'b1'),
('zeroclip', 'b1'),
('_xunit_len', 'u1'),
('xunit', 'a11'),
('stacktype_t', 'i2'), # NIH_STACKTYPE_TYPE
]
NIH_COLORTABLE_TYPE = (
'CustomTable', 'AppleDefault', 'Pseudo20', 'Pseudo32', 'Rainbow',
'Fire1', 'Fire2', 'Ice', 'Grays', 'Spectrum')
NIH_LUTMODE_TYPE = (
'PseudoColor', 'OldAppleDefault', 'OldSpectrum', 'GrayScale',
'ColorLut', 'CustomGrayscale')
NIH_CURVEFIT_TYPE = (
'StraightLine', 'Poly2', 'Poly3', 'Poly4', 'Poly5', 'ExpoFit',
'PowerFit', 'LogFit', 'RodbardFit', 'SpareFit1', 'Uncalibrated',
'UncalibratedOD')
NIH_UNITS_TYPE = (
'Nanometers', 'Micrometers', 'Millimeters', 'Centimeters', 'Meters',
'Kilometers', 'Inches', 'Feet', 'Miles', 'Pixels', 'OtherUnits')
NIH_STACKTYPE_TYPE = (
'VolumeStack', 'RGBStack', 'MovieStack', 'HSVStack')
# Map Universal Imaging Corporation MetaMorph internal tag ids to name and type
UIC_TAGS = {
0: ('auto_scale', int),
1: ('min_scale', int),
2: ('max_scale', int),
3: ('spatial_calibration', int),
4: ('x_calibration', Fraction),
5: ('y_calibration', Fraction),
6: ('calibration_units', str),
7: ('name', str),
8: ('thresh_state', int),
9: ('thresh_state_red', int),
10: ('tagid_10', None), # undefined
11: ('thresh_state_green', int),
12: ('thresh_state_blue', int),
13: ('thresh_state_lo', int),
14: ('thresh_state_hi', int),
15: ('zoom', int),
16: ('create_time', julian_datetime),
17: ('last_saved_time', julian_datetime),
18: ('current_buffer', int),
19: ('gray_fit', None),
20: ('gray_point_count', None),
21: ('gray_x', Fraction),
22: ('gray_y', Fraction),
23: ('gray_min', Fraction),
24: ('gray_max', Fraction),
25: ('gray_unit_name', str),
26: ('standard_lut', int),
27: ('wavelength', int),
28: ('stage_position', '(%i,2,2)u4'), # N xy positions as fractions
29: ('camera_chip_offset', '(%i,2,2)u4'), # N xy offsets as fractions
30: ('overlay_mask', None),
31: ('overlay_compress', None),
32: ('overlay', None),
33: ('special_overlay_mask', None),
34: ('special_overlay_compress', None),
35: ('special_overlay', None),
36: ('image_property', read_uic_image_property),
37: ('stage_label', '%ip'), # N str
38: ('autoscale_lo_info', Fraction),
39: ('autoscale_hi_info', Fraction),
40: ('absolute_z', '(%i,2)u4'), # N fractions
41: ('absolute_z_valid', '(%i,)u4'), # N long
42: ('gamma', int),
43: ('gamma_red', int),
44: ('gamma_green', int),
45: ('gamma_blue', int),
46: ('camera_bin', int),
47: ('new_lut', int),
48: ('image_property_ex', None),
49: ('plane_property', int),
50: ('user_lut_table', '(256,3)u1'),
51: ('red_autoscale_info', int),
52: ('red_autoscale_lo_info', Fraction),
53: ('red_autoscale_hi_info', Fraction),
54: ('red_minscale_info', int),
55: ('red_maxscale_info', int),
56: ('green_autoscale_info', int),
57: ('green_autoscale_lo_info', Fraction),
58: ('green_autoscale_hi_info', Fraction),
59: ('green_minscale_info', int),
60: ('green_maxscale_info', int),
61: ('blue_autoscale_info', int),
62: ('blue_autoscale_lo_info', Fraction),
63: ('blue_autoscale_hi_info', Fraction),
64: ('blue_min_scale_info', int),
65: ('blue_max_scale_info', int),
# 66: ('overlay_plane_color', read_uic_overlay_plane_color),
}
# Olympus FluoView
MM_DIMENSION = [
('name', 'a16'),
('size', 'i4'),
('origin', 'f8'),
('resolution', 'f8'),
('unit', 'a64'),
]
MM_HEADER = [
('header_flag', 'i2'),
('image_type', 'u1'),
('image_name', 'a257'),
('offset_data', 'u4'),
('palette_size', 'i4'),
('offset_palette0', 'u4'),
('offset_palette1', 'u4'),
('comment_size', 'i4'),
('offset_comment', 'u4'),
('dimensions', MM_DIMENSION, 10),
('offset_position', 'u4'),
('map_type', 'i2'),
('map_min', 'f8'),
('map_max', 'f8'),
('min_value', 'f8'),
('max_value', 'f8'),
('offset_map', 'u4'),
('gamma', 'f8'),
('offset', 'f8'),
('gray_channel', MM_DIMENSION),
('offset_thumbnail', 'u4'),
('voice_field', 'i4'),
('offset_voice_field', 'u4'),
]
# Carl Zeiss LSM
CZ_LSM_INFO = [
('magic_number', 'u4'),
('structure_size', 'i4'),
('dimension_x', 'i4'),
('dimension_y', 'i4'),
('dimension_z', 'i4'),
('dimension_channels', 'i4'),
('dimension_time', 'i4'),
('data_type', 'i4'), # CZ_DATA_TYPES
('thumbnail_x', 'i4'),
('thumbnail_y', 'i4'),
('voxel_size_x', 'f8'),
('voxel_size_y', 'f8'),
('voxel_size_z', 'f8'),
('origin_x', 'f8'),
('origin_y', 'f8'),
('origin_z', 'f8'),
('scan_type', 'u2'),
('spectral_scan', 'u2'),
('type_of_data', 'u4'), # CZ_TYPE_OF_DATA
('offset_vector_overlay', 'u4'),
('offset_input_lut', 'u4'),
('offset_output_lut', 'u4'),
('offset_channel_colors', 'u4'),
('time_interval', 'f8'),
('offset_channel_data_types', 'u4'),
('offset_scan_info', 'u4'), # CZ_LSM_SCAN_INFO
('offset_ks_data', 'u4'),
('offset_time_stamps', 'u4'),
('offset_event_list', 'u4'),
('offset_roi', 'u4'),
('offset_bleach_roi', 'u4'),
('offset_next_recording', 'u4'),
# LSM 2.0 ends here
('display_aspect_x', 'f8'),
('display_aspect_y', 'f8'),
('display_aspect_z', 'f8'),
('display_aspect_time', 'f8'),
('offset_mean_of_roi_overlay', 'u4'),
('offset_topo_isoline_overlay', 'u4'),
('offset_topo_profile_overlay', 'u4'),
('offset_linescan_overlay', 'u4'),
('offset_toolbar_flags', 'u4'),
('offset_channel_wavelength', 'u4'),
('offset_channel_factors', 'u4'),
('objective_sphere_correction', 'f8'),
('offset_unmix_parameters', 'u4'),
# LSM 3.2, 4.0 end here
('offset_acquisition_parameters', 'u4'),
('offset_characteristics', 'u4'),
('offset_palette', 'u4'),
('time_difference_x', 'f8'),
('time_difference_y', 'f8'),
('time_difference_z', 'f8'),
('internal_use_1', 'u4'),
('dimension_p', 'i4'),
('dimension_m', 'i4'),
('dimensions_reserved', '16i4'),
('offset_tile_positions', 'u4'),
('reserved_1', '9u4'),
('offset_positions', 'u4'),
('reserved_2', '21u4'), # must be 0
]
# Import functions for LSM_INFO sub-records
CZ_LSM_INFO_READERS = {
'scan_info': read_cz_lsm_scan_info,
'time_stamps': read_cz_lsm_time_stamps,
'event_list': read_cz_lsm_event_list,
'channel_colors': read_cz_lsm_floatpairs,
'positions': read_cz_lsm_floatpairs,
'tile_positions': read_cz_lsm_floatpairs,
}
# Map cz_lsm_info.scan_type to dimension order
CZ_SCAN_TYPES = {
0: 'XYZCT', # x-y-z scan
1: 'XYZCT', # z scan (x-z plane)
2: 'XYZCT', # line scan
3: 'XYTCZ', # time series x-y
4: 'XYZTC', # time series x-z
5: 'XYTCZ', # time series 'Mean of ROIs'
6: 'XYZTC', # time series x-y-z
7: 'XYCTZ', # spline scan
8: 'XYCZT', # spline scan x-z
9: 'XYTCZ', # time series spline plane x-z
10: 'XYZCT', # point mode
}
# Map dimension codes to cz_lsm_info attribute
CZ_DIMENSIONS = {
'X': 'dimension_x',
'Y': 'dimension_y',
'Z': 'dimension_z',
'C': 'dimension_channels',
'T': 'dimension_time',
}
# Description of cz_lsm_info.data_type
CZ_DATA_TYPES = {
0: 'varying data types',
1: '8 bit unsigned integer',
2: '12 bit unsigned integer',
5: '32 bit float',
}
# Description of cz_lsm_info.type_of_data
CZ_TYPE_OF_DATA = {
0: 'Original scan data',
1: 'Calculated data',
2: '3D reconstruction',
3: 'Topography height map',
}
CZ_LSM_SCAN_INFO_ARRAYS = {
0x20000000: "tracks",
0x30000000: "lasers",
0x60000000: "detection_channels",
0x80000000: "illumination_channels",
0xa0000000: "beam_splitters",
0xc0000000: "data_channels",
0x11000000: "timers",
0x13000000: "markers",
}
CZ_LSM_SCAN_INFO_STRUCTS = {
# 0x10000000: "recording",
0x40000000: "track",
0x50000000: "laser",
0x70000000: "detection_channel",
0x90000000: "illumination_channel",
0xb0000000: "beam_splitter",
0xd0000000: "data_channel",
0x12000000: "timer",
0x14000000: "marker",
}
CZ_LSM_SCAN_INFO_ATTRIBUTES = {
# recording
0x10000001: "name",
0x10000002: "description",
0x10000003: "notes",
0x10000004: "objective",
0x10000005: "processing_summary",
0x10000006: "special_scan_mode",
0x10000007: "scan_type",
0x10000008: "scan_mode",
0x10000009: "number_of_stacks",
0x1000000a: "lines_per_plane",
0x1000000b: "samples_per_line",
0x1000000c: "planes_per_volume",
0x1000000d: "images_width",
0x1000000e: "images_height",
0x1000000f: "images_number_planes",
0x10000010: "images_number_stacks",
0x10000011: "images_number_channels",
0x10000012: "linscan_xy_size",
0x10000013: "scan_direction",
0x10000014: "time_series",
0x10000015: "original_scan_data",
0x10000016: "zoom_x",
0x10000017: "zoom_y",
0x10000018: "zoom_z",
0x10000019: "sample_0x",
0x1000001a: "sample_0y",
0x1000001b: "sample_0z",
0x1000001c: "sample_spacing",
0x1000001d: "line_spacing",
0x1000001e: "plane_spacing",
0x1000001f: "plane_width",
0x10000020: "plane_height",
0x10000021: "volume_depth",
0x10000023: "nutation",
0x10000034: "rotation",
0x10000035: "precession",
0x10000036: "sample_0time",
0x10000037: "start_scan_trigger_in",
0x10000038: "start_scan_trigger_out",
0x10000039: "start_scan_event",
0x10000040: "start_scan_time",
0x10000041: "stop_scan_trigger_in",
0x10000042: "stop_scan_trigger_out",
0x10000043: "stop_scan_event",
0x10000044: "stop_scan_time",
0x10000045: "use_rois",
0x10000046: "use_reduced_memory_rois",
0x10000047: "user",
0x10000048: "use_bc_correction",
0x10000049: "position_bc_correction1",
0x10000050: "position_bc_correction2",
0x10000051: "interpolation_y",
0x10000052: "camera_binning",
0x10000053: "camera_supersampling",
0x10000054: "camera_frame_width",
0x10000055: "camera_frame_height",
0x10000056: "camera_offset_x",
0x10000057: "camera_offset_y",
0x10000059: "rt_binning",
0x1000005a: "rt_frame_width",
0x1000005b: "rt_frame_height",
0x1000005c: "rt_region_width",
0x1000005d: "rt_region_height",
0x1000005e: "rt_offset_x",
0x1000005f: "rt_offset_y",
0x10000060: "rt_zoom",
0x10000061: "rt_line_period",
0x10000062: "prescan",
0x10000063: "scan_direction_z",
# track
0x40000001: "multiplex_type", # 0 after line; 1 after frame
0x40000002: "multiplex_order",
0x40000003: "sampling_mode", # 0 sample; 1 line average; 2 frame average
0x40000004: "sampling_method", # 1 mean; 2 sum
0x40000005: "sampling_number",
0x40000006: "acquire",
0x40000007: "sample_observation_time",
0x4000000b: "time_between_stacks",
0x4000000c: "name",
0x4000000d: "collimator1_name",
0x4000000e: "collimator1_position",
0x4000000f: "collimator2_name",
0x40000010: "collimator2_position",
0x40000011: "is_bleach_track",
0x40000012: "is_bleach_after_scan_number",
0x40000013: "bleach_scan_number",
0x40000014: "trigger_in",
0x40000015: "trigger_out",
0x40000016: "is_ratio_track",
0x40000017: "bleach_count",
0x40000018: "spi_center_wavelength",
0x40000019: "pixel_time",
0x40000021: "condensor_frontlens",
0x40000023: "field_stop_value",
0x40000024: "id_condensor_aperture",
0x40000025: "condensor_aperture",
0x40000026: "id_condensor_revolver",
0x40000027: "condensor_filter",
0x40000028: "id_transmission_filter1",
0x40000029: "id_transmission1",
0x40000030: "id_transmission_filter2",
0x40000031: "id_transmission2",
0x40000032: "repeat_bleach",
0x40000033: "enable_spot_bleach_pos",
0x40000034: "spot_bleach_posx",
0x40000035: "spot_bleach_posy",
0x40000036: "spot_bleach_posz",
0x40000037: "id_tubelens",
0x40000038: "id_tubelens_position",
0x40000039: "transmitted_light",
0x4000003a: "reflected_light",
0x4000003b: "simultan_grab_and_bleach",
0x4000003c: "bleach_pixel_time",
# laser
0x50000001: "name",
0x50000002: "acquire",
0x50000003: "power",
# detection_channel
0x70000001: "integration_mode",
0x70000002: "special_mode",
0x70000003: "detector_gain_first",
0x70000004: "detector_gain_last",
0x70000005: "amplifier_gain_first",
0x70000006: "amplifier_gain_last",
0x70000007: "amplifier_offs_first",
0x70000008: "amplifier_offs_last",
0x70000009: "pinhole_diameter",
0x7000000a: "counting_trigger",
0x7000000b: "acquire",
0x7000000c: "point_detector_name",
0x7000000d: "amplifier_name",
0x7000000e: "pinhole_name",
0x7000000f: "filter_set_name",
0x70000010: "filter_name",
0x70000013: "integrator_name",
0x70000014: "channel_name",
0x70000015: "detector_gain_bc1",
0x70000016: "detector_gain_bc2",
0x70000017: "amplifier_gain_bc1",
0x70000018: "amplifier_gain_bc2",
0x70000019: "amplifier_offset_bc1",
0x70000020: "amplifier_offset_bc2",
0x70000021: "spectral_scan_channels",
0x70000022: "spi_wavelength_start",
0x70000023: "spi_wavelength_stop",
0x70000026: "dye_name",
0x70000027: "dye_folder",
# illumination_channel
0x90000001: "name",
0x90000002: "power",
0x90000003: "wavelength",
0x90000004: "aquire",
0x90000005: "detchannel_name",
0x90000006: "power_bc1",
0x90000007: "power_bc2",
# beam_splitter
0xb0000001: "filter_set",
0xb0000002: "filter",
0xb0000003: "name",
# data_channel
0xd0000001: "name",
0xd0000003: "acquire",
0xd0000004: "color",
0xd0000005: "sample_type",
0xd0000006: "bits_per_sample",
0xd0000007: "ratio_type",
0xd0000008: "ratio_track1",
0xd0000009: "ratio_track2",
0xd000000a: "ratio_channel1",
0xd000000b: "ratio_channel2",
0xd000000c: "ratio_const1",
0xd000000d: "ratio_const2",
0xd000000e: "ratio_const3",
0xd000000f: "ratio_const4",
0xd0000010: "ratio_const5",
0xd0000011: "ratio_const6",
0xd0000012: "ratio_first_images1",
0xd0000013: "ratio_first_images2",
0xd0000014: "dye_name",
0xd0000015: "dye_folder",
0xd0000016: "spectrum",
0xd0000017: "acquire",
# timer
0x12000001: "name",
0x12000002: "description",
0x12000003: "interval",
0x12000004: "trigger_in",
0x12000005: "trigger_out",
0x12000006: "activation_time",
0x12000007: "activation_number",
# marker
0x14000001: "name",
0x14000002: "description",
0x14000003: "trigger_in",
0x14000004: "trigger_out",
}
# Map TIFF tag code to attribute name, default value, type, count, validator
TIFF_TAGS = {
254: ('new_subfile_type', 0, 4, 1, TIFF_SUBFILE_TYPES()),
255: ('subfile_type', None, 3, 1,
{0: 'undefined', 1: 'image', 2: 'reduced_image', 3: 'page'}),
256: ('image_width', None, 4, 1, None),
257: ('image_length', None, 4, 1, None),
258: ('bits_per_sample', 1, 3, 1, None),
259: ('compression', 1, 3, 1, TIFF_COMPESSIONS),
262: ('photometric', None, 3, 1, TIFF_PHOTOMETRICS),
266: ('fill_order', 1, 3, 1, {1: 'msb2lsb', 2: 'lsb2msb'}),
269: ('document_name', None, 2, None, None),
270: ('image_description', None, 2, None, None),
271: ('make', None, 2, None, None),
272: ('model', None, 2, None, None),
273: ('strip_offsets', None, 4, None, None),
274: ('orientation', 1, 3, 1, TIFF_ORIENTATIONS),
277: ('samples_per_pixel', 1, 3, 1, None),
278: ('rows_per_strip', 2 ** 32 - 1, 4, 1, None),
279: ('strip_byte_counts', None, 4, None, None),
280: ('min_sample_value', None, 3, None, None),
281: ('max_sample_value', None, 3, None, None), # 2**bits_per_sample
282: ('x_resolution', None, 5, 1, None),
283: ('y_resolution', None, 5, 1, None),
284: ('planar_configuration', 1, 3, 1, {1: 'contig', 2: 'separate'}),
285: ('page_name', None, 2, None, None),
286: ('x_position', None, 5, 1, None),
287: ('y_position', None, 5, 1, None),
296: ('resolution_unit', 2, 4, 1, {1: 'none', 2: 'inch', 3: 'centimeter'}),
297: ('page_number', None, 3, 2, None),
305: ('software', None, 2, None, None),
306: ('datetime', None, 2, None, None),
315: ('artist', None, 2, None, None),
316: ('host_computer', None, 2, None, None),
317: ('predictor', 1, 3, 1, {1: None, 2: 'horizontal'}),
318: ('white_point', None, 5, 2, None),
319: ('primary_chromaticities', None, 5, 6, None),
320: ('color_map', None, 3, None, None),
322: ('tile_width', None, 4, 1, None),
323: ('tile_length', None, 4, 1, None),
324: ('tile_offsets', None, 4, None, None),
325: ('tile_byte_counts', None, 4, None, None),
338: ('extra_samples', None, 3, None,
{0: 'unspecified', 1: 'assocalpha', 2: 'unassalpha'}),
339: ('sample_format', 1, 3, 1, TIFF_SAMPLE_FORMATS),
340: ('smin_sample_value', None, None, None, None),
341: ('smax_sample_value', None, None, None, None),
347: ('jpeg_tables', None, 7, None, None),
530: ('ycbcr_subsampling', 1, 3, 2, None),
531: ('ycbcr_positioning', 1, 3, 1, None),
32996: ('sgi_datatype', None, None, 1, None), # use sample_format
32997: ('image_depth', None, 4, 1, None),
32998: ('tile_depth', None, 4, 1, None),
33432: ('copyright', None, 1, None, None),
33445: ('md_file_tag', None, 4, 1, None),
33446: ('md_scale_pixel', None, 5, 1, None),
33447: ('md_color_table', None, 3, None, None),
33448: ('md_lab_name', None, 2, None, None),
33449: ('md_sample_info', None, 2, None, None),
33450: ('md_prep_date', None, 2, None, None),
33451: ('md_prep_time', None, 2, None, None),
33452: ('md_file_units', None, 2, None, None),
33550: ('model_pixel_scale', None, 12, 3, None),
33922: ('model_tie_point', None, 12, None, None),
34665: ('exif_ifd', None, None, 1, None),
34735: ('geo_key_directory', None, 3, None, None),
34736: ('geo_double_params', None, 12, None, None),
34737: ('geo_ascii_params', None, 2, None, None),
34853: ('gps_ifd', None, None, 1, None),
37510: ('user_comment', None, None, None, None),
42112: ('gdal_metadata', None, 2, None, None),
42113: ('gdal_nodata', None, 2, None, None),
50289: ('mc_xy_position', None, 12, 2, None),
50290: ('mc_z_position', None, 12, 1, None),
50291: ('mc_xy_calibration', None, 12, 3, None),
50292: ('mc_lens_lem_na_n', None, 12, 3, None),
50293: ('mc_channel_name', None, 1, None, None),
50294: ('mc_ex_wavelength', None, 12, 1, None),
50295: ('mc_time_stamp', None, 12, 1, None),
50838: ('imagej_byte_counts', None, None, None, None),
65200: ('flex_xml', None, 2, None, None),
# code: (attribute name, default value, type, count, validator)
}
# Map custom TIFF tag codes to attribute names and import functions
CUSTOM_TAGS = {
700: ('xmp', read_bytes),
34377: ('photoshop', read_numpy),
33723: ('iptc', read_bytes),
34675: ('icc_profile', read_bytes),
33628: ('uic1tag', read_uic1tag), # Universal Imaging Corporation STK
33629: ('uic2tag', read_uic2tag),
33630: ('uic3tag', read_uic3tag),
33631: ('uic4tag', read_uic4tag),
34361: ('mm_header', read_mm_header), # Olympus FluoView
34362: ('mm_stamp', read_mm_stamp),
34386: ('mm_user_block', read_bytes),
34412: ('cz_lsm_info', read_cz_lsm_info), # Carl Zeiss LSM
43314: ('nih_image_header', read_nih_image_header),
# 40001: ('mc_ipwinscal', read_bytes),
40100: ('mc_id_old', read_bytes),
50288: ('mc_id', read_bytes),
50296: ('mc_frame_properties', read_bytes),
50839: ('imagej_metadata', read_bytes),
51123: ('micromanager_metadata', read_json),
}
# Max line length of printed output
PRINT_LINE_LEN = 79
def imshow(data, title=None, vmin=0, vmax=None, cmap=None,
bitspersample=None, photometric='rgb', interpolation='nearest',
dpi=96, figure=None, subplot=111, maxdim=8192, **kwargs):
"""Plot n-dimensional images using matplotlib.pyplot.
Return figure, subplot and plot axis.
Requires pyplot already imported ``from matplotlib import pyplot``.
Parameters
----------
bitspersample : int or None
Number of bits per channel in integer RGB images.
photometric : {'miniswhite', 'minisblack', 'rgb', or 'palette'}
The color space of the image data.
title : str
Window and subplot title.
figure : matplotlib.figure.Figure (optional).
Matplotlib to use for plotting.
subplot : int
A matplotlib.pyplot.subplot axis.
maxdim : int
maximum image size in any dimension.
kwargs : optional
Arguments for matplotlib.pyplot.imshow.
"""
# if photometric not in ('miniswhite', 'minisblack', 'rgb', 'palette'):
# raise ValueError("Can't handle %s photometrics" % photometric)
# TODO: handle photometric == 'separated' (CMYK)
isrgb = photometric in ('rgb', 'palette')
data = numpy.atleast_2d(data.squeeze())
data = data[(slice(0, maxdim), ) * len(data.shape)]
dims = data.ndim
if dims < 2:
raise ValueError("not an image")
elif dims == 2:
dims = 0
isrgb = False
else:
if isrgb and data.shape[-3] in (3, 4):
data = numpy.swapaxes(data, -3, -2)
data = numpy.swapaxes(data, -2, -1)
elif not isrgb and (data.shape[-1] < data.shape[-2] // 16 and
data.shape[-1] < data.shape[-3] // 16 and
data.shape[-1] < 5):
data = numpy.swapaxes(data, -3, -1)
data = numpy.swapaxes(data, -2, -1)
isrgb = isrgb and data.shape[-1] in (3, 4)
dims -= 3 if isrgb else 2
if photometric == 'palette' and isrgb:
datamax = data.max()
if datamax > 255:
data >>= 8 # possible precision loss
data = data.astype('B')
elif data.dtype.kind in 'ui':
if not (isrgb and data.dtype.itemsize <= 1) or bitspersample is None:
try:
bitspersample = int(math.ceil(math.log(data.max(), 2)))
except Exception:
bitspersample = data.dtype.itemsize * 8
elif not isinstance(bitspersample, int):
# bitspersample can be tuple, e.g. (5, 6, 5)
bitspersample = data.dtype.itemsize * 8
datamax = 2 ** bitspersample
if isrgb:
if bitspersample < 8:
data <<= 8 - bitspersample
elif bitspersample > 8:
data >>= bitspersample - 8 # precision loss
data = data.astype('B')
elif data.dtype.kind == 'f':
datamax = data.max()
if isrgb and datamax > 1.0:
if data.dtype.char == 'd':
data = data.astype('f')
data /= datamax
elif data.dtype.kind == 'b':
datamax = 1
elif data.dtype.kind == 'c':
raise NotImplementedError("complex type") # TODO: handle complex types
if not isrgb:
if vmax is None:
vmax = datamax
if vmin is None:
if data.dtype.kind == 'i':
dtmin = numpy.iinfo(data.dtype).min
vmin = numpy.min(data)
if vmin == dtmin:
vmin = numpy.min(data > dtmin)
if data.dtype.kind == 'f':
dtmin = numpy.finfo(data.dtype).min
vmin = numpy.min(data)
if vmin == dtmin:
vmin = numpy.min(data > dtmin)
else:
vmin = 0
pyplot = sys.modules['matplotlib.pyplot']
if figure is None:
pyplot.rc('font', family='sans-serif', weight='normal', size=8)
figure = pyplot.figure(dpi=dpi, figsize=(10.3, 6.3), frameon=True,
facecolor='1.0', edgecolor='w')
try:
figure.canvas.manager.window.title(title)
except Exception:
pass
pyplot.subplots_adjust(bottom=0.03 * (dims + 2), top=0.9,
left=0.1, right=0.95, hspace=0.05, wspace=0.0)
subplot = pyplot.subplot(subplot)
if title:
try:
title = str(title, 'Windows-1252')
except TypeError:
pass
pyplot.title(title, size=11)
if cmap is None:
if data.dtype.kind in 'ubf' or vmin == 0:
cmap = 'cubehelix'
else:
cmap = 'coolwarm'
if photometric == 'miniswhite':
cmap += '_r'
image = pyplot.imshow(data[(0, ) * dims].squeeze(), vmin=vmin, vmax=vmax,
cmap=cmap, interpolation=interpolation, **kwargs)
if not isrgb:
pyplot.colorbar() # panchor=(0.55, 0.5), fraction=0.05
def format_coord(x, y):
# callback function to format coordinate display in toolbar
x = int(x + 0.5)
y = int(y + 0.5)
try:
if dims:
return "%s @ %s [%4i, %4i]" % (cur_ax_dat[1][y, x],
current, x, y)
else:
return "%s @ [%4i, %4i]" % (data[y, x], x, y)
except IndexError:
return ""
pyplot.gca().format_coord = format_coord
if dims:
current = list((0, ) * dims)
cur_ax_dat = [0, data[tuple(current)].squeeze()]
sliders = [pyplot.Slider(
pyplot.axes([0.125, 0.03 * (axis + 1), 0.725, 0.025]),
'Dimension %i' % axis, 0, data.shape[axis] - 1, 0, facecolor='0.5',
valfmt='%%.0f [%i]' % data.shape[axis]) for axis in range(dims)]
for slider in sliders:
slider.drawon = False
def set_image(current, sliders=sliders, data=data):
# change image and redraw canvas
cur_ax_dat[1] = data[tuple(current)].squeeze()
image.set_data(cur_ax_dat[1])
for ctrl, index in zip(sliders, current):
ctrl.eventson = False
ctrl.set_val(index)
ctrl.eventson = True
figure.canvas.draw()
def on_changed(index, axis, data=data, current=current):
# callback function for slider change event
index = int(round(index))
cur_ax_dat[0] = axis
if index == current[axis]:
return
if index >= data.shape[axis]:
index = 0
elif index < 0:
index = data.shape[axis] - 1
current[axis] = index
set_image(current)
def on_keypressed(event, data=data, current=current):
# callback function for key press event
key = event.key
axis = cur_ax_dat[0]
if str(key) in '0123456789':
on_changed(key, axis)
elif key == 'right':
on_changed(current[axis] + 1, axis)
elif key == 'left':
on_changed(current[axis] - 1, axis)
elif key == 'up':
cur_ax_dat[0] = 0 if axis == len(data.shape) - 1 else axis + 1
elif key == 'down':
cur_ax_dat[0] = len(data.shape) - 1 if axis == 0 else axis - 1
elif key == 'end':
on_changed(data.shape[axis] - 1, axis)
elif key == 'home':
on_changed(0, axis)
figure.canvas.mpl_connect('key_press_event', on_keypressed)
for axis, ctrl in enumerate(sliders):
ctrl.on_changed(lambda k, a=axis: on_changed(k, a))
return figure, subplot, image
def _app_show():
"""Block the GUI. For use as skimage plugin."""
pyplot = sys.modules['matplotlib.pyplot']
pyplot.show()
def main(argv=None):
"""Command line usage main function."""
if float(sys.version[0:3]) < 2.6:
print("This script requires Python version 2.6 or better.")
print("This is Python version %s" % sys.version)
return 0
if argv is None:
argv = sys.argv
import optparse
parser = optparse.OptionParser(
usage="usage: %prog [options] path",
description="Display image data in TIFF files.",
version="%%prog %s" % __version__)
opt = parser.add_option
opt('-p', '--page', dest='page', type='int', default=-1,
help="display single page")
opt('-s', '--series', dest='series', type='int', default=-1,
help="display series of pages of same shape")
opt('--nomultifile', dest='nomultifile', action='store_true',
default=False, help="don't read OME series from multiple files")
opt('--noplot', dest='noplot', action='store_true', default=False,
help="don't display images")
opt('--interpol', dest='interpol', metavar='INTERPOL', default='bilinear',
help="image interpolation method")
opt('--dpi', dest='dpi', type='int', default=96,
help="set plot resolution")
opt('--debug', dest='debug', action='store_true', default=False,
help="raise exception on failures")
opt('--test', dest='test', action='store_true', default=False,
help="try read all images in path")
opt('--doctest', dest='doctest', action='store_true', default=False,
help="runs the docstring examples")
opt('-v', '--verbose', dest='verbose', action='store_true', default=True)
opt('-q', '--quiet', dest='verbose', action='store_false')
settings, path = parser.parse_args()
path = ' '.join(path)
if settings.doctest:
import doctest
doctest.testmod()
return 0
if not path:
parser.error("No file specified")
if settings.test:
test_tifffile(path, settings.verbose)
return 0
if any(i in path for i in '?*'):
path = glob.glob(path)
if not path:
print('no files match the pattern')
return 0
# TODO: handle image sequences
# if len(path) == 1:
path = path[0]
print("Reading file structure...", end=' ')
start = time.time()
try:
tif = TiffFile(path, multifile=not settings.nomultifile)
except Exception as e:
if settings.debug:
raise
else:
print("\n", e)
sys.exit(0)
print("%.3f ms" % ((time.time() - start) * 1e3))
if tif.is_ome:
settings.norgb = True
images = [(None, tif[0 if settings.page < 0 else settings.page])]
if not settings.noplot:
print("Reading image data... ", end=' ')
def notnone(x):
return next(i for i in x if i is not None)
start = time.time()
try:
if settings.page >= 0:
images = [(tif.asarray(key=settings.page),
tif[settings.page])]
elif settings.series >= 0:
images = [(tif.asarray(series=settings.series),
notnone(tif.series[settings.series].pages))]
else:
images = []
for i, s in enumerate(tif.series):
try:
images.append(
(tif.asarray(series=i), notnone(s.pages)))
except ValueError as e:
images.append((None, notnone(s.pages)))
if settings.debug:
raise
else:
print("\n* series %i failed: %s... " % (i, e),
end='')
print("%.3f ms" % ((time.time() - start) * 1e3))
except Exception as e:
if settings.debug:
raise
else:
print(e)
tif.close()
print("\nTIFF file:", tif)
print()
for i, s in enumerate(tif.series):
print("Series %i" % i)
print(s)
print()
for i, page in images:
print(page)
print(page.tags)
if page.is_palette:
print("\nColor Map:", page.color_map.shape, page.color_map.dtype)
for attr in ('cz_lsm_info', 'cz_lsm_scan_info', 'uic_tags',
'mm_header', 'imagej_tags', 'micromanager_metadata',
'nih_image_header'):
if hasattr(page, attr):
print("", attr.upper(), Record(getattr(page, attr)), sep="\n")
print()
if page.is_micromanager:
print('MICROMANAGER_FILE_METADATA')
print(Record(tif.micromanager_metadata))
if images and not settings.noplot:
try:
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import pyplot
except ImportError as e:
warnings.warn("failed to import matplotlib.\n%s" % e)
else:
for img, page in images:
if img is None:
continue
vmin, vmax = None, None
if 'gdal_nodata' in page.tags:
try:
vmin = numpy.min(img[img > float(page.gdal_nodata)])
except ValueError:
pass
if page.is_stk:
try:
vmin = page.uic_tags['min_scale']
vmax = page.uic_tags['max_scale']
except KeyError:
pass
else:
if vmax <= vmin:
vmin, vmax = None, None
title = "%s\n %s" % (str(tif), str(page))
imshow(img, title=title, vmin=vmin, vmax=vmax,
bitspersample=page.bits_per_sample,
photometric=page.photometric,
interpolation=settings.interpol,
dpi=settings.dpi)
pyplot.show()
TIFFfile = TiffFile # backwards compatibility
if sys.version_info[0] > 2:
basestring = str, bytes
unicode = str
if __name__ == "__main__":
sys.exit(main())
| to266/hyperspy | hyperspy/external/tifffile.py | Python | gpl-3.0 | 172,696 |
import socketserver, os
from configparser import ConfigParser
"""
Byte
[1] = Action 255 items
"""
class BOBServer(socketserver.BaseRequestHandler):
"""
The request handler class for our server.
It is instantiated once per connection to the server, and must
override the handle() method to implement communication to the
client.
"""
config_filename = 'bobsrv-config.ini'
def handle(self):
# self.request is the TCP socket connected to the client
self.data = self.request.recv(1024)
print("{} wrote:".format(self.client_address[0]))
print(self.data)
# just send back the same data, but upper-cased
self.request.sendall(self.data)
def crete_init_configfile():
config = ConfigParser()
config['Server'] = {'port': 7766,
'host': "localhost"}
with open(BOBServer.config_filename, 'w') as configfile:
config.write(configfile)
config.read_file(open(BOBServer.config_filename))
return config
"""
Load's the server config file. If it cannot find one, it will create a basic one
"""
def load_config_file():
config = ConfigParser()
try:
config.read_file(open(BOBServer.config_filename))
return config
except FileNotFoundError:
return BOBServer.crete_init_configfile();
if __name__ == "__main__":
"""
Let's setup the configuration
"""
config = loadConfigFile()
HOST, PORT = config['server']['host'], config['server']['port']
# Create the server, binding to localhost on port 9999
with socketserver.TCPServer((HOST, PORT), BOBServer) as server:
# Activate the server; this will keep running until you
# interrupt the program with Ctrl-C
server.serve_forever()
| arkalon76/bob | bobserver.py | Python | gpl-3.0 | 1,846 |
"""
test-gvar.py
"""
# Copyright (c) 2012-20 G. Peter Lepage.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version (see <http://www.gnu.org/licenses/>).
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
import os
import unittest
import collections
import math
import pickle
import numpy as np
import random
import gvar as gv
from gvar import *
try:
import vegas
have_vegas = True
except:
have_vegas = False
FAST = False
class ArrayTests(object):
def __init__(self):
pass
def assert_gvclose(self,x,y,rtol=1e-5,atol=1e-8,prt=False):
""" asserts that the means and sdevs of all x and y are close """
if hasattr(x,'keys') and hasattr(y,'keys'):
if sorted(x.keys())==sorted(y.keys()):
for k in x:
self.assert_gvclose(x[k],y[k],rtol=rtol,atol=atol)
return
else:
raise ValueError("x and y have mismatched keys")
self.assertSequenceEqual(np.shape(x),np.shape(y))
x = np.asarray(x).flat
y = np.asarray(y).flat
if prt:
print(np.array(x))
print(np.array(y))
for xi,yi in zip(x,y):
self.assertGreater(atol+rtol*abs(yi.mean),abs(xi.mean-yi.mean))
self.assertGreater(10*(atol+rtol*abs(yi.sdev)),abs(xi.sdev-yi.sdev))
def assert_arraysclose(self,x,y,rtol=1e-5,prt=False):
self.assertSequenceEqual(np.shape(x),np.shape(y))
x = np.array(x).flatten()
y = np.array(y).flatten()
max_val = max(np.abs(list(x)+list(y)))
max_rdiff = max(np.abs(x-y))/max_val
if prt:
print(x)
print(y)
print(max_val,max_rdiff,rtol)
self.assertAlmostEqual(max_rdiff,0.0,delta=rtol)
def assert_arraysequal(self,x,y):
self.assertSequenceEqual(np.shape(x),np.shape(y))
x = [float(xi) for xi in np.array(x).flatten()]
y = [float(yi) for yi in np.array(y).flatten()]
self.assertSequenceEqual(x,y)
class test_svec(unittest.TestCase,ArrayTests):
def test_v(self):
""" svec svec.assign svec.toarray """
v = svec(3) # [1,2,0,0,3]
v.assign([1.,3.,2.],[0,4,1])
self.assert_arraysequal(v.toarray(),[1.,2.,0.,0.,3.])
def test_null_v(self):
""" svec(0) """
v = svec(0)
self.assertEqual(len(v.toarray()),0)
self.assertEqual(len(v.clone().toarray()),0)
self.assertEqual(len(v.mul(10.).toarray()),0)
u = svec(1)
u.assign([1],[0])
self.assertEqual(v.dot(u),0.0)
self.assertEqual(u.dot(v),0.0)
self.assert_arraysequal(u.add(v).toarray(),v.add(u).toarray())
def test_v_clone(self):
""" svec.clone """
v1 = svec(3) # [1,2,0,0,3]
v1.assign([1.,3.,2.],[0,4,1])
v2 = v1.clone() # [0,10,0,0,20]
self.assert_arraysequal(v1.toarray(),v2.toarray())
v2.assign([10.,20.,30.],[0,1,2])
self.assert_arraysequal(v2.toarray(),[10.,20.,30.])
def test_v_dot(self):
""" svec.dot """
v1 = svec(3) # [1,2,0,0,3]
v1.assign([1.,3.,2.],[0,4,1])
v2 = svec(2)
v2.assign([10.,20.],[1,4])
self.assertEqual(v1.dot(v2),v2.dot(v1))
self.assertEqual(v1.dot(v2),80.)
v1 = svec(3)
v1.assign([1,2,3],[0,1,2])
v2 = svec(2)
v2.assign([4,5],[3,4])
self.assertEqual(v1.dot(v2),v2.dot(v1))
self.assertEqual(v1.dot(v2),0.0)
def test_v_add(self):
""" svec.add """
v1 = svec(3) # [1,2,0,0,3]
v1.assign([1.,3.,2.],[0,4,1])
v2 = svec(2) # [0,10,0,0,20]
v2.assign([10.,20.],[1,4])
self.assert_arraysequal(v1.add(v2).toarray(),v2.add(v1).toarray())
self.assert_arraysequal(v1.add(v2).toarray(),[1,12,0,0,23])
self.assert_arraysequal(v1.add(v2,10,100).toarray(),[10.,1020.,0,0,2030.])
self.assert_arraysequal(v2.add(v1,100,10).toarray(),[10.,1020.,0,0,2030.])
v1 = svec(2) # overlapping
v1.assign([1,2],[0,1])
v2.assign([3,4],[1,2])
self.assert_arraysequal(v1.add(v2,5,7).toarray(),[5.,31.,28.])
self.assert_arraysequal(v2.add(v1,7,5).toarray(),[5.,31.,28.])
v1 = svec(3)
v2 = svec(3)
v1.assign([1,2,3],[0,1,2])
v2.assign([10,20,30],[1,2,3])
self.assert_arraysequal(v1.add(v2,5,7).toarray(),[5.,80.,155.,210.])
self.assert_arraysequal(v2.add(v1,7,5).toarray(),[5.,80.,155.,210.])
v1 = svec(2)
v2 = svec(2)
v1.assign([1,2],[0,1]) # non-overlapping
v2.assign([3,4],[2,3])
self.assert_arraysequal(v1.add(v2,5,7).toarray(),[5.,10.,21.,28.])
self.assert_arraysequal(v2.add(v1,7,5).toarray(),[5.,10.,21.,28.])
v1 = svec(4) # one encompasses the other
v1.assign([1,2,3,4],[0,1,2,3])
v2.assign([10,20],[1,2])
self.assert_arraysequal(v1.add(v2,5,7).toarray(),[5.,80.,155.,20.])
self.assert_arraysequal(v2.add(v1,7,5).toarray(),[5.,80.,155.,20.])
def test_v_mul(self):
""" svec.mul """
v1 = svec(3) # [1,2,0,0,3]
v1.assign([1.,3.,2.],[0,4,1])
self.assert_arraysequal(v1.mul(10).toarray(),[10,20,0,0,30])
def test_pickle(self):
v = svec(4)
v.assign([1.,2.,5.,22], [3,5,1,0])
with open('outputfile.p', 'wb') as ofile:
pickle.dump(v, ofile)
with open('outputfile.p', 'rb') as ifile:
newv = pickle.load(ifile)
self.assertEqual(type(v), type(newv))
self.assertTrue(np.all(v.toarray() == newv.toarray()))
os.remove('outputfile.p')
class test_smat(unittest.TestCase,ArrayTests):
def setUp(self):
""" make mats for tests """
global smat_m,np_m
smat_m = smat()
smat_m.append_diag(np.array([0.,10.,200.]))
smat_m.append_diag_m(np.array([[1.,2.],[2.,1.]]))
smat_m.append_diag(np.array([4.,5.]))
smat_m.append_diag_m(np.array([[3.]]))
np_m = np.array([[ 0., 0., 0., 0., 0., 0., 0., 0.],
[ 0., 10., 0., 0., 0., 0., 0., 0.],
[ 0., 0., 200., 0., 0., 0., 0., 0.],
[ 0., 0., 0., 1., 2., 0., 0., 0.],
[ 0., 0., 0., 2., 1., 0., 0., 0.],
[ 0., 0., 0., 0., 0., 4., 0., 0.],
[ 0., 0., 0., 0., 0., 0., 5., 0.],
[ 0., 0., 0., 0., 0., 0., 0., 3.]])
def tearDown(self):
global smat_m,np_m
smat_m = None
np_m = None
def test_m_append(self):
""" smat.append_diag smat.append_diag_m smat.append_row smat.toarray"""
self.assert_arraysequal(smat_m.toarray(),np_m)
def test_m_dot(self):
""" smat.dot """
global smat_m,np_m
v = svec(2)
v.assign([10,100],[1,4])
np_v = v.toarray()
nv = len(np_v)
self.assert_arraysequal(smat_m.dot(v).toarray(),np.dot(np_m[:nv,:nv],np_v))
self.assert_arraysequal(smat_m.dot(v).toarray(),[0.,100.,0.,200.,100.])
self.assertEqual(smat_m.dot(v).dot(v),np.dot(np.dot(np_m[:nv,:nv],np_v),np_v))
self.assertEqual(smat_m.dot(v).size,3)
def test_m_expval(self):
""" smat.expval """
global smat_m,np_m
v = svec(2)
v.assign([10.,100.],[1,4])
np_v = v.toarray()
nv = len(np_v)
self.assertEqual(smat_m.expval(v),np.dot(np.dot(np_m[:nv,:nv],np_v),np_v))
def test_pickle(self):
""" pickle.dump(smat, outfile) """
global smat_m
with open('outputfile.p', 'wb') as ofile:
pickle.dump(smat_m, ofile)
with open('outputfile.p', 'rb') as ifile:
m = pickle.load(ifile)
self.assertEqual(type(smat_m), type(m))
self.assertTrue(np.all(smat_m.toarray() == m.toarray()))
os.remove('outputfile.p')
class test_smask(unittest.TestCase):
def test_smask(self):
def _test(imask):
mask = smask(imask)
np.testing.assert_array_equal(sum(imask[mask.starti:mask.stopi]), mask.len)
np.testing.assert_array_equal(imask, np.asarray(mask.mask))
np.testing.assert_array_equal(np.asarray(mask.map)[imask != 0], np.arange(mask.len))
np.testing.assert_array_equal(np.cumsum(imask[imask != 0]) - 1, np.asarray(mask.map)[imask != 0])
g = gvar([1, 2, 3], [4, 5, 6])
gvar(1,0)
imask = np.array(g[0].der + g[2].der, dtype=np.int8)
_test(imask)
def test_masked_ved(self):
def _test(imask, g):
mask = smask(imask)
vec = g.internaldata[1].masked_vec(mask)
np.testing.assert_array_equal(vec, g.der[imask!=0])
g = gvar([1, 2, 3], [4, 5, 6])
gvar(1,0)
imask = np.array(g[0].der + g[1].der, dtype=np.int8)
g[1:] += g[:-1]
g2 = g**2
_test(imask, g2[0])
_test(imask, g2[1])
_test(imask, g2[2])
def test_masked_mat(self):
a = np.random.rand(3,3)
g = gvar([1, 2, 3], a.dot(a.T))
imask = np.array((g[0].der + g[2].der) != 0, dtype=np.int8)
cov = evalcov([g[0], g[2]])
gvar(1,0)
mask = smask(imask)
np.testing.assert_allclose(cov, g[1].cov.masked_mat(mask))
class test_gvar1(unittest.TestCase,ArrayTests):
""" gvar1 - part 1 """
def setUp(self):
""" setup for tests """
global x,xmean,xsdev,gvar
# NB - powers of two important
xmean = 0.5
xsdev = 0.25
gvar = switch_gvar()
x = gvar(xmean,xsdev)
# ranseed((1968,1972,1972,1978,1980))
# random.seed(1952)
def tearDown(self):
""" cleanup after tests """
global x,gvar
gvar = restore_gvar()
x = None
def test_str(self):
""" str(x) """
global x,xmean,xsdev,gvar
self.assertEqual(str(x), x.fmt())
def test_call(self):
""" x() """
global x,xmean,xsdev,gvar
n = 10000
fac = 5. # 4 sigma
xlist = [x() for i in range(n)]
avg = np.average(xlist)
std = np.std(xlist)
self.assertAlmostEqual(avg,x.mean,delta=fac*x.sdev/n**0.5)
self.assertAlmostEqual(std,(1-1./n)**0.5*xsdev,delta=fac*x.sdev/(2*n)**0.5)
def test_cmp(self):
""" x==y x!=y x>y x<y"""
global x,xmean,xsdev,gvar
x = gvar(1, 10)
y = gvar(2, 20)
self.assertTrue(y!=x and 2*x==y and x==1 and y!=1 and 1==x and 1!=y)
self.assertTrue(
not y==x and not 2*x!=y and not x!=1 and not y==1
and not 1!=x and not 1==y
)
self.assertTrue(y>x and x<y and y>=x and x<=y and y>=2*x and 2*x<=y)
self.assertTrue(not y<x and not x>y and not y<=x and not x>=y)
self.assertTrue(y>1 and x<2 and y>=1 and x<=2 and y>=2 and 2*x<=2)
self.assertTrue(not y<1 and not x>2 and not y<=1 and not x>=2)
self.assertTrue(1<y and 2>x and 1<=y and 2>=x and 2<=y and 2>=2*x)
self.assertTrue(not 1>y and not 2<x and not 1>=y and not 2<=x)
def test_neg(self):
""" -x """
global x,xmean,xsdev,gvar
z = -x
self.assertEqual(x.mean,-z.mean)
self.assertEqual(x.var,z.var)
def test_pos(self):
""" +x """
global x,xmean,xsdev,gvar
z = +x
self.assertEqual(x.mean, z.mean)
self.assertEqual(x.var, x.var)
class test_gvar2(unittest.TestCase,ArrayTests):
""" gvar - part 2 """
def setUp(self):
global x,y,gvar
# NB x.mean < 1 and x.var < 1 and y.var > 1 (assumed below)
# and powers of 2 important
gvar = switch_gvar()
x,y = gvar([0.125,4.],[[0.25,0.0625],[0.0625,1.]])
# ranseed((1968,1972,1972,1978,1980))
# random.seed(1952)
self.label = None
def tearDown(self):
""" cleanup after tests """
global x,y,gvar
x = None
y = None
gvar = restore_gvar()
# if self.label is not None:
# print self.label
def test_add(self):
""" x+y """
global x,y,gvar
z = x+y
cov = evalcov([x,y])
self.assertEqual(z.mean,x.mean+y.mean)
self.assertEqual(z.var,cov[0,0]+cov[1,1]+2*cov[0,1])
z = x + y.mean
self.assertEqual(z.mean,x.mean+y.mean)
self.assertEqual(z.var,x.var)
z = y.mean + x
self.assertEqual(z.mean,x.mean+y.mean)
self.assertEqual(z.var,x.var)
def test_sub(self):
""" x-y """
global x,y,gvar
z = x-y
cov = evalcov([x,y])
self.assertEqual(z.mean,x.mean-y.mean)
self.assertEqual(z.var,cov[0,0]+cov[1,1]-2*cov[0,1])
z = x - y.mean
self.assertEqual(z.mean,x.mean-y.mean)
self.assertEqual(z.var,x.var)
z = y.mean - x
self.assertEqual(z.mean,y.mean-x.mean)
self.assertEqual(z.var,x.var)
def test_mul(self):
""" x*y """
z = x*y
dz = [y.mean,x.mean]
cov = evalcov([x,y])
self.assertEqual(z.mean,x.mean*y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
z = x * y.mean
dz = [y.mean,0.]
self.assertEqual(z.mean,x.mean*y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
z = y.mean * x
self.assertEqual(z.mean,x.mean*y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
def test_div(self):
""" x/y """
z = x/y
dz = [1./y.mean,-x.mean/y.mean**2]
cov = evalcov([x,y])
self.assertEqual(z.mean,x.mean/y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
z = x / y.mean
dz = [1./y.mean,0.]
self.assertEqual(z.mean,x.mean/y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
z = y.mean / x
dz = [-y.mean/x.mean**2,0.]
self.assertEqual(z.mean,y.mean/x.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
def test_pow(self):
""" x**y """
z = x**y
dz = [y.mean*x.mean**(y.mean-1),x.mean**y.mean*log(x.mean)]
cov = evalcov([x,y])
self.assertEqual(z.mean,x.mean**y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
z = x ** y.mean
dz = [y.mean*x.mean**(y.mean-1),0.]
self.assertEqual(z.mean,x.mean**y.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
z = y.mean ** x
dz = [y.mean**x.mean*log(y.mean),0.]
self.assertEqual(z.mean,y.mean**x.mean)
self.assertEqual(z.var,np.dot(dz,np.dot(cov,dz)))
def t_fcn(self,f,df):
""" tester for test_fcn """
gdict = dict(globals())
gdict['x'] = x # with GVar
fx = eval(f,gdict)
gdict['x'] = x.mean # with float
fxm = eval(f,gdict)
dfxm = eval(df,gdict)
self.assertAlmostEqual(fx.mean,fxm)
self.assertAlmostEqual(fx.var,x.var*dfxm**2)
def test_fcn(self):
""" f(x) """
flist = [
("sin(x)","cos(x)"), ("cos(x)","-sin(x)"), ("tan(x)","1 + tan(x)**2"),
("arcsin(x)","(1 - x**2)**(-1./2.)"), ("arccos(x)","-1/(1 - x**2)**(1./2.)"),
("arctan(x)","1/(1 + x**2)"),
("sinh(x)","cosh(x)"), ("cosh(x)","sinh(x)"), ("tanh(x)","1 - tanh(x)**2"),
("arcsinh(x)","1./sqrt(x**2+1.)"),("arccosh(1+x)","1./sqrt(x**2+2*x)"),
("arctanh(x)","1./(1-x**2)"),
("exp(x)","exp(x)"), ("log(x)","1/x"), ("sqrt(x)","1./(2*x**(1./2.))")
]
for f,df in flist:
self.label = f
self.t_fcn(f,df)
# arctan2 tests
x = gvar('0.5(0.5)')
y = gvar('2(2)')
f = arctan2(y, x)
fc = arctan(y / x)
self.assertAlmostEqual(f.mean, fc.mean)
self.assertAlmostEqual(f.sdev, fc.sdev)
self.assertAlmostEqual(arctan2(y, x).mean, numpy.arctan2(y.mean, x.mean))
self.assertAlmostEqual(arctan2(y, -x).mean, numpy.arctan2(y.mean, -x.mean))
self.assertAlmostEqual(arctan2(-y, -x).mean, numpy.arctan2(-y.mean, -x.mean))
self.assertAlmostEqual(arctan2(-y, x).mean, numpy.arctan2(-y.mean, x.mean))
self.assertAlmostEqual(arctan2(y, x*0).mean, numpy.arctan2(y.mean, 0))
self.assertAlmostEqual(arctan2(-y, x*0).mean, numpy.arctan2(-y.mean, 0))
def test_gvar_function(self):
""" gvar_function(x, f, dfdx) """
x = sqrt(gvar(0.1, 0.1) + gvar(0.2, 0.5))
def fcn(x):
return sin(x + x**2)
def dfcn_dx(x):
return cos(x + x**2) * (1 + 2*x)
f = fcn(x).mean
dfdx = dfcn_dx(x).mean
diff = gvar_function(x, f, dfdx) - fcn(x)
self.assertAlmostEqual(diff.mean, 0.0)
self.assertAlmostEqual(diff.sdev, 0.0)
diff = gvar_function([x, x + gvar(2,2)], f, [dfdx, 0]) - fcn(x)
self.assertAlmostEqual(diff.mean, 0.0)
self.assertAlmostEqual(diff.sdev, 0.0)
x = gvar(dict(a='1(1)', b=['2(2)', '3(3)']))
z = gvar(1,1)
def fcn(x):
return sin(x['a'] * x['b'][0]) * x['b'][1]
f = fcn(x)
dfdx = dict(a=f.deriv(x['a']), b=[f.deriv(x['b'][0]), f.deriv(x['b'][1])])
f = f.mean
diff = gvar_function(x, f, dfdx) - fcn(x)
self.assertAlmostEqual(diff.mean, 0.0)
self.assertAlmostEqual(diff.sdev, 0.0)
x = gvar(['1(1)', '2(2)', '3(3)'])
def fcn(x):
return sin(x[0] + x[1]) * x[2]
f = fcn(x)
dfdx = np.array([f.deriv(x[0]), f.deriv(x[1]), f.deriv(x[2])])
f = f.mean
diff = gvar_function(x, f, dfdx) - fcn(x)
self.assertAlmostEqual(diff.mean, 0.0)
self.assertAlmostEqual(diff.sdev, 0.0)
def test_wsum_der(self):
""" wsum_der """
gv = GVarFactory()
x = gv([1,2],[[3,4],[4,5]])
self.assert_arraysequal(wsum_der(np.array([10.,100]),x),[10.,100.])
def test_wsum_gvar(self):
""" wsum_gvar """
gv = GVarFactory()
x = gv([1,2],[[3,4],[4,5]])
v = np.array([10.,100.])
ws = wsum_gvar(v,x)
self.assertAlmostEqual(ws.val,np.dot(v,mean(x)))
self.assert_arraysclose(ws.der,wsum_der(v,x))
def test_dotder(self):
""" GVar.dotder """
gv = GVarFactory()
x = gv([1,2],[[3,4],[4,5]])*2
v = np.array([10.,100.])
self.assertAlmostEqual(x[0].dotder(v),20.)
self.assertAlmostEqual(x[1].dotder(v),200.)
def test_fmt(self):
""" x.fmt """
self.assertEqual(x.fmt(None), x.fmt(2))
self.assertEqual(x.fmt(3),"%.3f(%d)"%(x.mean,round(x.sdev*1000)))
self.assertEqual(y.fmt(3),"%.3f(%.3f)"%(y.mean,round(y.sdev,3)))
self.assertEqual(gvar(".1234(341)").fmt(), "0.123(34)")
self.assertEqual(gvar(" .1234(341)").fmt(), "0.123(34)")
self.assertEqual(gvar(".1234(341) ").fmt(), "0.123(34)")
self.assertEqual(gvar(".1234(341)").fmt(1), "0.1(0)")
self.assertEqual(gvar(".1234(341)").fmt(5), "0.12340(3410)")
self.assertEqual(gvar(".1234(0)").fmt(), "0.1234(0)")
self.assertEqual(gvar("-.1234(341)").fmt(), "-0.123(34)")
self.assertEqual(gvar("+.1234(341)").fmt(), "0.123(34)")
self.assertEqual(gvar("-0.1234(341)").fmt(), "-0.123(34)")
self.assertEqual(gvar("10(1.3)").fmt(), "10.0(1.3)")
self.assertEqual(gvar("10.2(1.3)").fmt(), "10.2(1.3)")
self.assertEqual(gvar("-10.2(1.3)").fmt(), "-10.2(1.3)")
self.assertEqual(gvar("10(1.3)").fmt(0),"10(1)")
self.assertEqual(gvar("1e-9 +- 1.23e-12").fmt(), "1.0000(12)e-09")
self.assertEqual(gvar("1e-9 +- 1.23e-6").fmt(), '1(1230)e-09')
self.assertEqual(gvar("1e+9 +- 1.23e+6").fmt(), "1.0000(12)e+09")
self.assertEqual(gvar("1e-9 +- 0").fmt(), "1(0)e-09")
self.assertEqual(gvar("0(0)").fmt(), "0(0)")
self.assertEqual(gvar("1.234e-9 +- 0.129").fmt(), '1e-09 +- 0.13')
self.assertEqual(gvar("1.23(4)e-9").fmt(), "1.230(40)e-09")
self.assertEqual(gvar("1.23 +- 1.23e-12").fmt(), "1.2300000000000(12)")
self.assertEqual(gvar("1.23 +- 1.23e-6").fmt(), "1.2300000(12)")
self.assertEqual(gvar("1.23456 +- inf").fmt(3), "1.235 +- inf")
self.assertEqual(gvar("1.23456 +- inf").fmt(), str(1.23456) + " +- inf")
self.assertEqual(gvar("10.23 +- 1e-10").fmt(), "10.23000000000(10)")
self.assertEqual(gvar("10.23(5.1)").fmt(), "10.2(5.1)")
self.assertEqual(gvar("10.23(5.1)").fmt(-1),"10.23 +- 5.1")
self.assertEqual(gvar(0.021, 0.18).fmt(), '0.02(18)')
self.assertEqual(gvar(0.18, 0.021).fmt(), '0.180(21)')
# boundary cases
self.assertEqual(gvar(0.096, 9).fmt(), '0.1(9.0)')
self.assertEqual(gvar(0.094, 9).fmt(), '0.09(9.00)')
self.assertEqual(gvar(0.96, 9).fmt(), '1.0(9.0)')
self.assertEqual(gvar(0.94, 9).fmt(), '0.9(9.0)')
self.assertEqual(gvar(-0.96, 9).fmt(), '-1.0(9.0)')
self.assertEqual(gvar(-0.94, 9).fmt(), '-0.9(9.0)')
self.assertEqual(gvar(9.6, 90).fmt(), '10(90)')
self.assertEqual(gvar(9.4, 90).fmt(), '9(90)')
self.assertEqual(gvar(99.6, 91).fmt(), '100(91)')
self.assertEqual(gvar(99.4, 91).fmt(), '99(91)')
self.assertEqual(gvar(0.1, 0.0996).fmt(), '0.10(10)')
self.assertEqual(gvar(0.1, 0.0994).fmt(), '0.100(99)')
self.assertEqual(gvar(0.1, 0.994).fmt(), '0.10(99)')
self.assertEqual(gvar(0.1, 0.996).fmt(), '0.1(1.0)')
self.assertEqual(gvar(12.3, 9.96).fmt(), '12(10)')
self.assertEqual(gvar(12.3, 9.94).fmt(), '12.3(9.9)')
# 0 +- stuff
self.assertEqual(gvar(0, 0).fmt(), '0(0)')
self.assertEqual(gvar(0, 99.6).fmt(), '0(100)')
self.assertEqual(gvar(0, 99.4).fmt(), '0(99)')
self.assertEqual(gvar(0, 9.96).fmt(), '0(10)')
self.assertEqual(gvar(0, 9.94).fmt(), '0.0(9.9)')
self.assertEqual(gvar(0, 0.996).fmt(), '0.0(1.0)')
self.assertEqual(gvar(0, 0.994).fmt(), '0.00(99)')
self.assertEqual(gvar(0, 1e5).fmt(), '0.0(1.0)e+05')
self.assertEqual(gvar(0, 1e4).fmt(), '0(10000)')
self.assertEqual(gvar(0, 1e-5).fmt(), '0.0(1.0)e-05')
self.assertEqual(gvar(0, 1e-4).fmt(), '0.00000(10)')
def test_fmt2(self):
""" fmt(x) """
g1 = gvar(1.5,0.5)
self.assertEqual(fmt(g1),g1.fmt())
g2 = [g1,2*g1]
fmtg2 = fmt(g2)
self.assertEqual(fmtg2[0],g2[0].fmt())
self.assertEqual(fmtg2[1],g2[1].fmt())
g3 = dict(g1=g1,g2=g2)
fmtg3 = fmt(g3)
self.assertEqual(fmtg3['g1'],g1.fmt())
self.assertEqual(fmtg3['g2'][0],g2[0].fmt())
self.assertEqual(fmtg3['g2'][1],g2[1].fmt())
def test_tabulate(self):
""" tabulate(g) """
g = BufferDict()
g['scalar'] = gv.gvar('10.3(1.2)')
g['vector'] = gv.gvar(['0.52(3)', '0.09(10)', '1.2(1)'])
g['tensor'] = gv.gvar([
['0.01(50)', '0.001(20)', '0.033(15)'],
['0.001(20)', '2.00(5)', '0.12(52)'],
['0.007(45)', '0.237(4)', '10.23(75)'],
])
table = gv.tabulate(g, ncol=2)
correct = '\n'. join([
' key/index value key/index value',
'--------------------------- ---------------------------',
' scalar 10.3 (1.2) 1,0 0.001 (20)',
' vector 0 0.520 (30) 1,1 2.000 (50)',
' 1 0.09 (10) 1,2 0.12 (52)',
' 2 1.20 (10) 2,0 0.007 (45)',
' tensor 0,0 0.01 (50) 2,1 0.2370 (40)',
' 0,1 0.001 (20) 2,2 10.23 (75)',
' 0,2 0.033 (15)',
])
self.assertEqual(table, correct, 'tabulate wrong')
def test_partialvar(self):
""" x.partialvar x.partialsdev fmt_errorbudget """
gvar = gvar_factory()
## test basic functionality ##
x = gvar(1,2)
y = gvar(3,4)
a,b = gvar([1,2],[[4,5],[5,16]])
z = x+y+2*a+3*b
self.assertEqual(z.var,x.var+y.var
+np.dot([2.,3.],np.dot(evalcov([a,b]),[2.,3.])))
self.assertEqual(z.partialvar(x,y),x.var+y.var)
self.assertEqual(z.partialvar(x,a),
x.var+np.dot([2.,3.],np.dot(evalcov([a,b]),[2.,3.])))
self.assertEqual(z.partialvar(a),z.partialvar(a))
##
## test different arg types, fmt_errorbudget, fmt_values ##
s = gvar(1,2)
a = np.array([[gvar(3,4),gvar(5,6)]])
d = BufferDict(s=gvar(7,8),v=[gvar(9,10),gvar(10,11)])
z = s + sum(a.flat) + sum(d.flat)
self.assertEqual(z.partialvar(s,a,d),z.var)
self.assertEqual(z.partialvar(s),s.var)
self.assertEqual(z.partialvar(a),sum(var(a).flat))
self.assertEqual(z.partialvar(d),sum(var(d).flat))
self.assertAlmostEqual(z.partialsdev(s,a,d),z.sdev)
tmp = fmt_errorbudget(
outputs=dict(z=z),
inputs=collections.OrderedDict([
('a', a), ('s', s), ('d', d),
('ad', [a,d]), ('sa', [s,a]), ('sd', [s,d]), ('sad', [s,a,d])
]),
ndecimal=1
)
out = "\n".join([
"Partial % Errors:",
" z",
"--------------------",
" a: 20.6",
" s: 5.7",
" d: 48.2",
" ad: 52.5",
" sa: 21.4",
" sd: 48.6",
" sad: 52.8",
"--------------------",
" total: 52.8",
""
])
self.assertEqual(tmp,out,"fmt_errorbudget output wrong")
tmp = fmt_errorbudget(
outputs=dict(z=z),
inputs=collections.OrderedDict([
('a', a), ('s', s), ('d', d),
('ad', [a,d]), ('sa', [s,a]), ('sd', [s,d]), ('sad', [s,a,d])
]),
ndecimal=1, colwidth=25
)
out = "\n".join([
"Partial % Errors:",
" z",
"--------------------------------------------------",
" a: 20.6",
" s: 5.7",
" d: 48.2",
" ad: 52.5",
" sa: 21.4",
" sd: 48.6",
" sad: 52.8",
"--------------------------------------------------",
" total: 52.8",
""
])
self.assertEqual(tmp,out,"fmt_errorbudget output wrong (with colwidth)")
tmp = fmt_values(outputs=collections.OrderedDict([('s',s),('z',z)]),ndecimal=1)
out = "\n".join([
"Values:",
" s: 1.0(2.0) ",
" z: 35.0(18.5) ",
""
])
self.assertEqual(tmp,out,"fmt_value output wrong")
def test_errorbudget_warnings(self):
""" fmt_errorbudget(...verify=True) """
a, b, c = gvar(3 * ['1.0(1)'])
b , c = (b+c) / 2., (b-c) /2.
outputs = dict(sum=a+b+c)
warnings.simplefilter('error')
fmt_errorbudget(outputs=outputs, inputs=dict(a=a, b=b), verify=True)
with self.assertRaises(UserWarning):
fmt_errorbudget(outputs=outputs, inputs=dict(a=a, b=b, c=c), verify=True)
with self.assertRaises(UserWarning):
fmt_errorbudget(outputs=outputs, inputs=dict(a=a), verify=True)
def test_der(self):
""" x.der """
global x,y
self.assert_arraysequal(x.der,[1.,0.])
self.assert_arraysequal(y.der,[0.,1.])
z = x*y**2
self.assert_arraysequal(z.der,[y.mean**2,2*x.mean*y.mean])
def test_construct_gvar(self):
""" construct_gvar """
v = 2.0
dv = np.array([0.,1.])
cov = smat()
cov.append_diag_m(np.array([[2.,4.],[4.,16.]]))
y = gvar(v,np.array([1,0.]),cov)
z = gvar(v,dv,cov)
cov = evalcov([y,z])
self.assertEqual(z.mean,v)
self.assert_arraysequal(z.der,dv)
self.assertEqual(z.var,np.dot(dv,np.dot(cov,dv)))
self.assertEqual(z.sdev,sqrt(z.var))
cov = smat()
cov.append_diag_m(np.array([[2.,4.],[4.,16.]]))
y = gvar(v,([1.], [0]), cov)
z = gvar(v, ([1.], [1]), cov)
cov = evalcov([y,z])
self.assertEqual(z.mean,v)
self.assert_arraysequal(z.der,dv)
self.assertEqual(z.var,np.dot(dv,np.dot(cov,dv)))
self.assertEqual(z.sdev,sqrt(z.var))
# zero covariance
x = gvar([1.], [[0.]])
self.assertEqual(str(x), '[1(0)]')
x = gvar(1, 0.)
self.assertEqual(str(x), '1(0)')
def t_gvar(self,args,xm,dx,xcov,xder):
""" worker for test_gvar """
gvar = gvar_factory()
x = gvar(*args)
self.assertEqual(x.mean,xm)
self.assertEqual(x.sdev,dx)
self.assert_arraysequal(evalcov([x]),xcov)
self.assert_arraysequal(x.der,xder)
def test_gvar(self):
""" gvar """
## tests for arguments corresponding to a single gvar ##
cov = smat()
cov.append_diag_m(np.array([[1.,2.],[2.,16.]]))
x = gvar(2.,np.array([0.,1.]),cov)
arglist = [(["4.125(250)"],4.125,0.25,[[.25**2]],[1.0],'"x(dx)"'), #]
(["-4.125(2.0)"],-4.125,2.0,[[2.**2]],[1.0],'"x(dx)"'),
(["4.125 +- 0.5"],4.125,0.5,[[0.5**2]],[1.0],'"x +- dx"'),
([x],x.mean,x.sdev,evalcov([x]),x.der,"x"),
([2.0],2.0,0.0,[[0.0]],[1.0],"x"),
([(2.0,4.0)],2.,4.,[[4.**2]],[1.0],"(x,dx)"),
([2.0,4.0],2.,4.,[[4.**2]],[1.0],"x,dx"),
([x.mean,x.der,x.cov],x.mean,x.sdev,evalcov([x]),x.der,"x,der,cov")
]
for a in arglist:
self.label = "gvar(%s)" % a[0]
self.t_gvar(a[0],a[1],a[2],a[3],a[4])
# tests involving a single argument that is sequence
x = gvar([[(0,1),(1,2)],[(3,4),(5,6)],[(7,8),(9,10)]])
y = np.array([[gvar(0,1),gvar(1,2)],[gvar(3,4),gvar(5,6)],
[gvar(7,8),gvar(9,10)]])
self.assert_gvclose(x,y)
x = gvar([[["0(1)"],["2(3)"]]])
y = np.array([[[gvar(0,1)],[gvar(2,3)]]])
self.assert_gvclose(x,y)
x = gvar([[1.,2.],[3.,4.]])
y = np.array([[gvar(1.,0),gvar(2.,0)],[gvar(3.,0),gvar(4.,0)]])
self.assert_gvclose(x,y)
x = gvar([gvar(0,1),gvar(2,3)])
y = np.array([gvar(0,1),gvar(2,3)])
self.assert_gvclose(x,y)
# tests involving dictionary arguments
x = gvar(dict(a=1,b=[2,3]), dict(a=10, b=[20,30]))
y = dict(a=gvar(1,10), b=[gvar(2,20), gvar(3,30)])
self.assert_gvclose(x,y)
a, b = gvar([1,2],[10,20])
a, b = a+b, a-b
x = gvar([a, a+b, b, b-a])
y = BufferDict()
y['a'] = [a, a+b]
y['b'] = [b, b-a]
self.assert_gvclose(y.flat, x)
z = gvar(mean(y), evalcov(y))
self.assert_gvclose(z.flat, y.flat)
self.assert_arraysclose(evalcov(z.flat), evalcov(x))
def _tst_compare_evalcovs(self):
" evalcov evalcov_blocks evalcov_blocks_dense agree "
def reconstruct(x, blocks, compress):
ans = np.zeros((len(x), len(x)), float)
if compress:
idx, sdev = blocks[0]
ans[idx, idx] = sdev ** 2
n = (len(idx), len(blocks))
blocks = blocks[1:]
else:
n = len(blocks)
for idx, bcov in blocks:
ans[idx[:,None], idx[:]] = bcov
return ans, n
for setup, compress in [
("N=10; a=np.random.rand(N,N); x=gv.gvar(N*[1.],a.dot(a.T)); x=a.dot(x);", True),
("N=10; a=np.random.rand(N,N); x=gv.gvar(N*[1.],a.dot(a.T)); x=a.dot(x);", False),
("N=10; x=gv.gvar(N*[1.],N*[1.]);", True),
("N=10; x=gv.gvar(N*[1.],N*[1.]);", False),
("N=10; x=gv.gvar(N*[1.],N*[1.]); x[1:] += x[:-1];", True),
("N=10; x=gv.gvar(N*[1.],N*[1.]); x[1:] += x[:-1];", False),
("N=10; a=np.random.rand(N,N); x=gv.gvar(N*[1.],a.dot(a.T));", True),
("N=10; a=np.random.rand(N,N); x=gv.gvar(N*[1.],a.dot(a.T));", False),
]:
tmp = locals()
exec(setup, globals(), tmp)
x = tmp['x']
ec = gv.evalcov(x)
ecb, necb = reconstruct(x, gv.evalcov_blocks(x, compress=compress), compress)
ecbd, necbd = reconstruct(x, gv.evalcov_blocks_dense(x, compress=compress), compress)
np.testing.assert_allclose(ec, ecbd)
np.testing.assert_allclose(ec, ecb)
self.assertEqual(necb, necbd)
# print(necb)
def test_compare_evalcovs(self):
" evalcov evalcov_blocks evalcov_blocks_dense agree "
self._tst_compare_evalcovs()
tmp, gv._CONFIG['evalcov_blocks'] = gv._CONFIG['evalcov_blocks'], 1
self._tst_compare_evalcovs()
gv._CONFIG['evalcov_blocks'] = tmp
def test_gvar_blocks(self):
" block structure created by gvar.gvar "
def blockid(g):
return g.cov.blockid(g.internaldata[1].indices()[0])
x = gvar([1., 2., 3.], [1., 1., 1.])
id = [blockid(xi) for xi in x]
self.assertNotEqual(id[0], id[1])
self.assertNotEqual(id[0], id[2])
self.assertNotEqual(id[1], id[2])
idlast = max(id)
x = gvar([1., 2., 3.], [[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]], fast=False)
id = [blockid(xi) for xi in x]
self.assertEqual(min(id), idlast + 1)
self.assertNotEqual(id[0], id[1])
self.assertNotEqual(id[0], id[2])
self.assertNotEqual(id[1], id[2])
idlast = max(id)
x = gvar([1., 2., 3.], [[1., 0.1, 0.], [0.1, 1., 0.], [0., 0., 1.]], fast=False)
id = [blockid(xi) for xi in x]
self.assertEqual(min(id), idlast + 1)
self.assertEqual(id[0], id[1])
self.assertNotEqual(id[0], id[2])
idlast = max(id)
x = gvar([1., 2., 3.], [[1., 0., 0.1], [0.0, 1., 0.0], [0.1, 0., 1.]], fast=False)
id = [blockid(xi) for xi in x]
self.assertEqual(min(id), idlast + 1)
self.assertEqual(id[0], id[2])
self.assertNotEqual(id[0], id[1])
idlast = max(id)
x = gvar([1., 2., 3.], [[1., 0., 0.0], [0.0, 1., 0.1], [0.0, 0.1, 1.]], fast=False)
id = [blockid(xi) for xi in x]
self.assertEqual(min(id), idlast + 1)
self.assertEqual(id[1], id[2])
self.assertNotEqual(id[0], id[1])
idlast = max(id)
x = gvar([1., 2., 3.], [[1., 0.1, 0.0], [0.1, 1., 0.1], [0.0, 0.1, 1.]], fast=False)
id = [blockid(xi) for xi in x]
self.assertEqual(min(id), idlast + 1)
self.assertEqual(id[1], id[2])
self.assertEqual(id[0], id[1])
idlast = max(id)
x = gvar([1., 2., 3.], [[1., 0.1, 0.1], [0.1, 1., 0.1], [0.1, 0.1, 1.]], fast=False)
id = [blockid(xi) for xi in x]
self.assertEqual(min(id), idlast + 1)
self.assertEqual(id[1], id[2])
self.assertEqual(id[0], id[1])
def test_gvar_verify(self):
" gvar(x, xx, verify=True) "
# case that does not generate an error
gvar([1., 2.], [[1., 2./10], [2./10., 1.]])
with self.assertRaises(ValueError):
gvar([1., 2.], [[1., .5], [.6, 1.]])
# cases that do generate errors
for a,b in [
(1., -1.), ([1., 2.], [2., -2.]),
([1., 2.], [[1., 2.], [2., 1.]]),
]:
with self.assertRaises(ValueError):
gvar(a, b, verify=True)
def test_asgvar(self):
""" gvar functions as asgvar """
z = gvar(x)
self.assertTrue(z is x)
z = gvar("2.00(25)")
self.assertEqual(z.mean,2.0)
self.assertEqual(z.sdev,0.25)
def test_basis5(self):
""" gvar(x,dx) """
xa = np.array([[2.,4.]])
dxa = np.array([[16.,64.]])
x = gvar(xa,dxa)
xcov = evalcov(x)
self.assertEqual(xcov.shape,2*x.shape)
for xai,dxai,xi in zip(xa.flat,dxa.flat,x.flat):
self.assertEqual(xai,xi.mean)
self.assertEqual(dxai,xi.sdev)
self.assertEqual(np.shape(xa),np.shape(x))
xcov = xcov.reshape((2,2))
self.assert_arraysequal(xcov.diagonal(),[dxa[0,0]**2,dxa[0,1]**2])
def test_basis6(self):
""" gvar(x,cov) """
xa = np.array([2.,4.])
cov = np.array([[16.,64.],[64.,4.]])
x = gvar(xa,cov)
xcov = evalcov(x)
for xai,dxai2,xi in zip(xa.flat,cov.diagonal().flat,x.flat):
self.assertEqual(xai,xi.mean)
self.assertEqual(dxai2,xi.sdev**2)
self.assertEqual(np.shape(xa),np.shape(x))
self.assert_arraysequal(xcov,cov.reshape((2,2)))
def test_mean_sdev_var(self):
""" mean(g) sdev(g) var(g) """
def compare(x,y):
self.assertEqual(set(x.keys()),set(y.keys()))
for k in x:
self.assertEqual(np.shape(x[k]),np.shape(y[k]))
if np.shape(x[k])==():
self.assertEqual(x[k],y[k])
else:
self.assertTrue(all(x[k]==y[k]))
# dictionaries of GVars
a = dict(x=gvar(1,2),y=np.array([gvar(3,4),gvar(5,6)]))
a_mean = dict(x=1.,y=np.array([3.,5.]))
a_sdev = dict(x=2.,y=np.array([4.,6.]))
a_var = dict(x=2.**2,y=np.array([4.**2,6.**2]))
compare(a_mean,mean(a))
compare(a_sdev,sdev(a))
compare(a_var,var(a))
# arrays of GVars
b = np.array([gvar(1,2),gvar(3,4),gvar(5,6)])
b_mean = np.array([1.,3.,5.])
b_sdev = np.array([2.,4.,6.])
self.assertTrue(all(b_mean==mean(b)))
self.assertTrue(all(b_sdev==sdev(b)))
self.assertTrue(all(b_sdev**2==var(b)))
# single GVar
self.assertEqual(mean(gvar(1,2)),1.)
self.assertEqual(sdev(gvar(1,2)),2.)
self.assertEqual(var(gvar(1,2)),4.)
# single non-GVar
self.assertEqual(mean(1.25), 1.25)
self.assertEqual(sdev(1.25), 0.0)
self.assertEqual(var(1.25), 0.0)
b = np.array([gvar(1,2), 3.0, gvar(5,6)])
self.assertTrue(all(mean(b)==[1., 3., 5.]))
self.assertTrue(all(sdev(b)==[2., 0., 6.]))
self.assertTrue(all(var(b)==[4., 0., 36.]))
def test_sdev_var(self):
" sdev var from covariance matrices "
a = np.random.rand(10, 10)
cov = a.dot(a.T)
x = gvar(cov.shape[0] * [1], cov)
xd = gvar(cov.shape[0] * [1], cov.diagonal() ** 0.5)
xt = a.dot(x)
covt = a.dot(cov.dot(a.T))
for nthreshold in [1, 1000]:
tmp, gv._CONFIG['var'] = gv._CONFIG['var'], nthreshold
numpy.testing.assert_allclose(var(x), cov.diagonal())
numpy.testing.assert_allclose(sdev(x), cov.diagonal() ** 0.5)
numpy.testing.assert_allclose(var(xd), cov.diagonal())
numpy.testing.assert_allclose(sdev(xd), cov.diagonal() ** 0.5)
numpy.testing.assert_allclose(var(xt), covt.diagonal())
numpy.testing.assert_allclose(sdev(xt), covt.diagonal() ** 0.5)
gv._CONFIG['var'] = tmp
def test_uncorrelated(self):
""" uncorrelated(g1, g2) """
a = dict(x=gvar(1,2),y=np.array([gvar(3,4),gvar(5,6)]))
b = dict(x=gvar(1,2),y=np.array([gvar(3,4),gvar(5,6)]))
c = np.array([gvar(1,2),gvar(3,4),gvar(5,6)])
d = np.array([gvar(1,2),gvar(3,4),gvar(5,6)])
self.assertTrue(uncorrelated(a,b))
self.assertTrue(not uncorrelated(a,a))
self.assertTrue(uncorrelated(a['x'],a['y']))
self.assertTrue(not uncorrelated(a['x'],a))
self.assertTrue(uncorrelated(a,c))
self.assertTrue(uncorrelated(c,a))
self.assertTrue(uncorrelated(c,d))
self.assertTrue(not uncorrelated(c,c))
a['x'] += b['x']
self.assertTrue(not uncorrelated(a,b))
d += c[0]
self.assertTrue(not uncorrelated(c,d))
self.assertTrue(not uncorrelated(a,b['x']))
a, b = gvar([1,2],[[1,.1],[.1,4]])
c = 2*a
self.assertTrue(not uncorrelated(a,c))
self.assertTrue(not uncorrelated(b,c))
self.assertTrue(not uncorrelated(a,b))
def test_deriv(self):
global x, y, gvar
f = 2 * x ** 2. + 3 * y
self.assertEqual(deriv(f, x), 4. * x.mean)
self.assertEqual(deriv(f, y), 3.)
with self.assertRaises(ValueError):
deriv(f, x+y)
self.assertEqual(f.deriv(x), 4. * x.mean)
self.assertEqual(f.deriv(y), 3.)
with self.assertRaises(ValueError):
f.deriv(x+y)
self.assertEqual(deriv(f, [x, y]).tolist(), [4. * x.mean, 3.])
self.assertEqual(deriv(f, [[x], [y]]).tolist(), [[4. * x.mean], [3.]])
self.assertEqual(deriv([f], [x, y]).tolist(), [[4. * x.mean, 3.]])
f = [2 * x + 3 * y, 4 * x]
self.assertEqual(deriv(f, x).tolist(), [2., 4.])
self.assertEqual(deriv(f, y).tolist(), [3., 0.])
with self.assertRaises(ValueError):
deriv(f, x+y)
df = deriv(f, [[x, y]])
self.assertEqual(df.tolist(), [[[2., 3.]], [[4., 0.]]])
f = BufferDict([('a', 2 * x + 3 * y), ('b', 4 * x)])
self.assertEqual(deriv(f, x), BufferDict([('a',2.), ('b',4.)]))
self.assertEqual(deriv(f, y), BufferDict([('a',3.), ('b',0.)]))
df = deriv(f, [x, y])
self.assertEqual(df['a'].tolist(), [2., 3.])
self.assertEqual(df['b'].tolist(), [4., 0.])
with self.assertRaises(ValueError):
deriv(f, x+y)
def test_correlate(self):
" correlate(g, corr) "
x = gvar([1., 2.], [[64., 4.], [4., 16.]])
xmean = mean(x)
xsdev = sdev(x)
xx = correlate(gvar(xmean, xsdev), evalcorr(x))
self.assert_arraysequal(xmean, mean(xx))
self.assert_arraysequal(evalcov(x), evalcov(xx))
# with upper, verify
corr = evalcorr(x)
corr[1, 0] = 0.
corr[1, 1] = 10.
with self.assertRaises(ValueError):
xx = correlate(gvar(xmean, xsdev), corr, upper=False, verify=True)
xx = correlate(gvar(xmean, xsdev), corr, upper=True, verify=True)
self.assert_arraysequal(xmean, mean(xx))
self.assert_arraysequal(evalcov(x), evalcov(xx))
# with lower, verify
corr = evalcorr(x)
corr[0, 1] = 0.
corr[0, 0] = 0.
with self.assertRaises(ValueError):
xx = correlate(gvar(xmean, xsdev), corr, lower=False, verify=True)
xx = correlate(gvar(xmean, xsdev), corr, lower=True, verify=True)
self.assert_arraysequal(xmean, mean(xx))
self.assert_arraysequal(evalcov(x), evalcov(xx))
# matrix
x.shape = (2, 1)
xmean = mean(x)
xsdev = sdev(x)
xx = correlate(gvar(xmean, xsdev), evalcorr(x))
self.assert_arraysequal(xmean, mean(xx))
self.assert_arraysequal(evalcov(x), evalcov(xx))
# dict
y = BufferDict()
y['a'] = x[0, 0]
y['b'] = x
ymean = mean(y)
ysdev = sdev(y)
yy = correlate(gvar(ymean, ysdev), evalcorr(y))
for k in y:
self.assert_arraysequal(mean(y[k]), mean(yy[k]))
ycov = evalcov(y)
yycov = evalcov(yy)
for k in ycov:
self.assert_arraysequal(ycov[k], yycov[k])
def test_evalcorr(self):
" evalcorr(array) "
x = gvar([1., 2.], [[64., 4.], [4., 16.]])
a, b = x
c = evalcorr([a, b])
self.assertEqual(corr(a,b), 1/8.)
self.assert_arraysequal(c, [[1., 1/8.], [1/8., 1.]])
c = evalcorr(x.reshape(2, 1))
self.assertEqual(c.shape, 2 * (2, 1))
self.assert_arraysequal(c.reshape(2,2), [[1., 1/8.], [1/8., 1.]])
y = dict(a=x[0], b=x)
c = evalcorr(y)
self.assertEqual(c['a', 'a'], [[1]])
self.assert_arraysequal(c['a', 'b'], [[1., 1/8.]])
self.assert_arraysequal(c['b', 'a'], [[1.], [1./8.]])
self.assert_arraysequal(c['b', 'b'], [[1., 1/8.], [1/8., 1.]])
def _tst_evalcov1(self):
""" evalcov(array) """
a,b = gvar([1.,2.],[[64.,4.],[4.,36.]])
c = evalcov([a,b/2])
self.assert_arraysequal(c,[[ 64.,2.],[ 2.,9.]])
self.assertEqual(cov(a, b/2), 2.)
c = evalcov([a/2,b])
self.assert_arraysequal(c,[[ 16.,2.],[ 2.,36.]])
z = gvar(8.,32.)
c = evalcov([x,y,z])
self.assert_arraysequal(c[:2,:2],evalcov([x,y]))
self.assertEqual(c[2,2],z.var)
self.assert_arraysequal(c[:2,2],np.zeros(np.shape(c[:2,2])))
self.assert_arraysequal(c[2,:2],np.zeros(np.shape(c[2,:2])))
rc = evalcov([x+y/2,2*x-y])
rotn = np.array([[1.,1/2.],[2.,-1.]])
self.assert_arraysequal(rc,np.dot(rotn,np.dot(c[:2,:2],rotn.transpose())))
def test_evalcov1(self):
""" evalcov(array) """
self._tst_evalcov1()
tmp, gv._CONFIG['evalcov'] = gv._CONFIG['evalcov'], 1
self._tst_evalcov1()
gv._CONFIG['evalcov'] = tmp
def _tst_evalcov2(self):
""" evalcov(dict) """
c = evalcov({0:x + y / 2, 1:2 * x - y})
rotn = np.array([[1., 1/2.], [2., -1.]])
cz = np.dot(rotn, np.dot(evalcov([x, y]), rotn.transpose()))
c = [[c[0,0][0,0], c[0,1][0,0]], [c[1,0][0,0], c[1,1][0,0]]]
self.assert_arraysequal(c, cz)
c = evalcov(dict(x=x, y=[x, y]))
self.assert_arraysequal(c['y','y'], evalcov([x, y]))
self.assertEqual(c['x','x'], [[x.var]])
self.assert_arraysequal(c['x','y'], [[x.var, evalcov([x,y])[0,1]]])
self.assert_arraysequal(c['y','x'], c['x','y'].T)
def test_evalcov2(self):
""" evalcov(dict) """
self._tst_evalcov2()
tmp, gv._CONFIG['evalcov'] = gv._CONFIG['evalcov'], 1
self._tst_evalcov2()
gv._CONFIG['evalcov'] = tmp
def test_sample(self):
" sample(g) "
glist = [
gvar('1(2)'), gv.gvar(['10(2)', '20(2)']) * gv.gvar('1(1)'),
gv.gvar(dict(a='100(2)', b=['200(2)', '300(2)'])),
]
for g in glist:
ranseed(12)
svdcut = 0.9
s1 = sample(g, svdcut=svdcut)
ranseed(12)
s2 = next(raniter(g, svdcut=svdcut))
self.assertEqual(str(s1), str(s2))
ranseed(12)
eps = 0.9
s1 = sample(g, eps=eps)
ranseed(12)
s2 = next(raniter(g, eps=eps))
self.assertEqual(str(s1), str(s2))
@unittest.skipIf(FAST,"skipping test_raniter for speed")
def test_raniter(self):
""" raniter """
global x,y,gvar
n = 1000
rtol = 5./n**0.5
x = gvar(x.mean, x.sdev)
y = gvar(y.mean, y.sdev)
f = raniter([x,y],n)
ans = [fi for fi in f]
# print(x, y, evalcov([x,y]))
# print (ans)
ans = np.array(ans).transpose()
self.assertAlmostEqual(ans[0].mean(),x.mean,delta=x.sdev*rtol)
self.assertAlmostEqual(ans[1].mean(),y.mean,delta=y.sdev*rtol)
self.assert_arraysclose(np.cov(ans[0],ans[1]),evalcov([x,y]),rtol=rtol)
@unittest.skipIf(FAST,"skipping test_raniter2 for speed")
def test_raniter2(self):
""" raniter & svd """
for svdcut in [1e-20,1e-2]:
pr = BufferDict()
pr[0] = gvar(1,1)
pr[1] = pr[0]+gvar(0.1,1e-4)
a0 = []
da = []
n = 10000
rtol = 5./n**0.5 # 5 sigma
for p in raniter(pr,n,svdcut=svdcut):
a0.append(p[0])
da.append(p[1]-p[0])
a0 = np.array(a0)
da = np.array(da)
dda = max(2*svdcut**0.5,1e-4) # largest eig is 2 -- hence 2*sqrt(svdcut)
self.assertAlmostEqual(da.std(),dda,delta=rtol*dda)
self.assertAlmostEqual(a0.mean(),1.,delta=rtol)
self.assertAlmostEqual(da.mean(),0.1,delta=rtol*da.std())
def test_bootstrap_iter(self):
""" bootstrap_iter """
p = BufferDict()
p = gvar(1,1)*np.array([1,1])+gvar(0.1,1e-4)*np.array([1,-1])
p_sw = np.array([p[0]+p[1],p[0]-p[1]])/2.
p_cov = evalcov(p_sw.flat)
p_mean = mean(p_sw.flat)
p_sdev = mean(p_sw.flat)
for pb in bootstrap_iter(p,3,svdcut=1e-20):
pb_sw = np.array([pb[0]+pb[1],pb[0]-pb[1]])/2.
self.assert_arraysclose(p_cov,evalcov(pb_sw.flat))
dp = np.abs(mean(pb_sw.flat)-p_mean)
self.assertGreater(p_sdev[0]*5,dp[0])
self.assertGreater(p_sdev[1]*5,dp[1])
for pb in bootstrap_iter(p,3,svdcut=1e-2):
pb_sw = np.array([pb[0]+pb[1],pb[0]-pb[1]])/2.
pb_mean = mean(pb_sw.flat)
pb_sdev = sdev(pb_sw.flat)
self.assertAlmostEqual(pb_sdev[0],p_sdev[0])
self.assertAlmostEqual(pb_sdev[1],p_sdev[0]/10.)
dp = abs(pb_mean-p_mean)
self.assertGreater(p_sdev[0]*5,dp[0])
self.assertGreater(p_sdev[0]*5./10.,dp[1])
def test_raniter3(self):
""" raniter & BufferDict """
pr = BufferDict()
pr['s'] = gvar(2.,4.)
pr['v'] = [gvar(4.,8.),gvar(8.,16.)]
pr['t'] = [[gvar(16.,32.),gvar(32.,64.)],[gvar(64.,128.),gvar(128.,256.)]]
pr['ps'] = gvar(256.,512.)
nran = 49
delta = 5./nran**0.5 # 5 sigma
prmean = mean(pr)
prsdev = sdev(pr)
ans = dict((k,[]) for k in pr)
for p in raniter(pr,nran):
for k in p:
ans[k].append(p[k])
for k in p:
ansmean = np.mean(ans[k],axis=0)
anssdev = np.std(ans[k],axis=0)
pkmean = prmean[k]
pksdev = prsdev[k]
self.assertAlmostEqual(np.max(np.abs((pkmean-ansmean)/pksdev)),0.0,delta=delta)
self.assertAlmostEqual(np.max(np.abs((pksdev-anssdev)/pksdev)),0.0,delta=delta)
def test_SVD(self):
""" SVD """
# error system
with self.assertRaises(ValueError):
SVD([1,2])
# non-singular
x,y = gvar([1,1],[1,4])
cov = evalcov([(x+y)/2**0.5,(x-y)/2**0.5])
s = SVD(cov)
e = s.val
v = s.vec
k = s.kappa
self.assert_arraysclose(e,[1.,16.],rtol=1e-6)
self.assert_arraysclose(e[0]/e[1],1./16.,rtol=1e-6)
self.assert_arraysclose(np.dot(cov,v[0]),e[0]*v[0],rtol=1e-6)
self.assert_arraysclose(np.dot(cov,v[1]),e[1]*v[1],rtol=1e-6)
self.assertTrue(np.allclose([np.dot(v[0],v[0]),np.dot(v[1],v[1]),np.dot(v[0],v[1])],
[1.,1.,0],rtol=1e-6))
self.assert_arraysclose(sum(np.outer(vi,vi)*ei for ei,vi in zip(e,v)),
cov,rtol=1e-6)
self.assertAlmostEqual(s.kappa,1/16.)
# on-axis 0
cov = np.array([[4.,0.0], [0.0, 0.0]])
s = SVD(cov, rescale=False, svdcut=None)
self.assertTrue(np.all(s.val == [0., 4.]))
# singular case
cov = evalcov([(x+y)/2**0.5,(x-y)/2**0.5,x,y])
s = SVD(cov)
e,v,k = s.val,s.vec,s.kappa
self.assert_arraysclose(e,[0,0,2.,32.],rtol=1e-6)
self.assert_arraysclose(sum(np.outer(vi,vi)*ei for ei,vi in zip(e,v)),
cov,rtol=1e-6)
s = SVD(cov,svdcut=1e-4,compute_delta=True)
e,v,k,d = s.val,s.vec,s.kappa,s.delta
self.assert_arraysclose(e,[32*1e-4,32*1e-4,2.,32.],rtol=1e-6)
ncov = sum(np.outer(vi,vi)*ei for ei,vi in zip(e,v))-evalcov(d)
self.assert_arraysclose(cov,ncov,rtol=1e-6)
s = SVD(cov,svdnum=2,compute_delta=True)
e,v,k,d = s.val,s.vec,s.kappa,s.delta
self.assert_arraysclose(e,[2.,32.],rtol=1e-6)
self.assertTrue(d is None)
s = SVD(cov,svdnum=3,svdcut=1e-4,compute_delta=True)
e,v,k,d = s.val,s.vec,s.kappa,s.delta
self.assert_arraysclose(e,[32*1e-4,2.,32.],rtol=1e-6)
# s.delta s.decomp
for rescale in [False,True]:
mat = [[1.,.25],[.25,2.]]
s = SVD(mat,rescale=rescale)
if rescale==False:
self.assertTrue(s.D is None)
else:
diag = np.diag(s.D)
self.assert_arraysclose(np.diag(np.dot(diag,np.dot(mat,diag))),
[1.,1.])
self.assert_arraysclose(mat, sum(np.outer(wj,wj) for wj in s.decomp(1)))
s = SVD(mat,svdcut=0.9,compute_delta=True,rescale=rescale)
mout = sum(np.outer(wj,wj) for wj in s.decomp(1))
self.assert_arraysclose(mat+evalcov(s.delta),mout)
self.assertTrue(not np.allclose(mat,mout))
s = SVD(mat,rescale=rescale)
minv = sum(np.outer(wj,wj) for wj in s.decomp(-1))
self.assert_arraysclose([[1.,0.],[0.,1.]],np.dot(mat,minv))
if rescale==False:
m2 = sum(np.outer(wj,wj) for wj in s.decomp(2))
self.assert_arraysclose(mat,np.dot(m2,minv))
def test_diagonal_blocks(self):
""" find_diagonal_blocks """
def make_blocks(*m_list):
m_list = [np.asarray(m, float) for m in m_list]
n = sum([m.shape[0] for m in m_list])
ans = np.zeros((n,n), float)
i = 0
for m in m_list:
j = i + m.shape[0]
ans[i:j, i:j] = m
i = j
# mean is irrelevant
return gvar(ans[0], ans)
def compare_blocks(b1, b2):
s1 = set([tuple(list(b1i)) for b1i in b1])
s2 = set([tuple(list(b2i)) for b2i in b2])
self.assertEqual(s1, s2)
m = make_blocks(
[[1]],
[[1, 1], [1, 1]],
[[1]]
)
idx = [idx.tolist() for idx,bcov in evalcov_blocks(m)]
compare_blocks(idx, [[0], [3], [1, 2]])
m = make_blocks(
[[1, 0, 1], [0, 1, 0], [1, 0, 1]],
[[1, 1], [1, 1]],
[[1]],
[[1]]
)
idx = [idx.tolist() for idx,bcov in evalcov_blocks(m)]
compare_blocks(idx, [[1], [5], [6], [0, 2], [3, 4]])
m = make_blocks(
[[1, 0, 1, 1],
[0, 1, 0, 1],
[1, 0, 1, 1],
[1, 1, 1, 1]],
[[1, 1], [1, 1]],
[[1]],
[[1]]
)
idx = [idx.tolist() for idx,bcov in evalcov_blocks(m)]
compare_blocks(idx, [[6], [7], [0, 1, 2, 3] , [4, 5]])
def test_evalcov_blocks(self):
def test_cov(g):
if hasattr(g, 'keys'):
g = BufferDict(g)
g = g.flat[:]
cov = np.zeros((len(g), len(g)), dtype=float)
for idx, bcov in evalcov_blocks(g):
cov[idx[:,None], idx] = bcov
self.assertEqual(str(evalcov(g)), str(cov))
g = gv.gvar(5 * ['1(1)'])
test_cov(g)
g[-1] = g[0] + g[1]
test_cov(g)
test_cov(g * gv.gvar('2(1)'))
g = gv.gvar(5 * ['1(1)'])
g[0] = g[-1] + g[-2]
test_cov(g)
def test_evalcov_blocks_compress(self):
def test_cov(g):
if hasattr(g, 'keys'):
g = BufferDict(g)
blocks = evalcov_blocks(g, compress=True)
g = g.flat[:]
cov = np.zeros((len(g), len(g)), dtype=float)
idx, bsdev = blocks[0]
if len(idx) > 0:
cov[idx, idx] = bsdev ** 2
for idx, bcov in blocks[1:]:
cov[idx[:,None], idx] = bcov
self.assertEqual(str(evalcov(g)), str(cov))
g = gv.gvar(5 * ['1(1)'])
test_cov(g)
test_cov(dict(g=g))
g[-1] = g[0] + g[1]
test_cov(g)
test_cov(dict(g=g))
test_cov(g * gv.gvar('2(1)'))
g = gv.gvar(5 * ['1(1)'])
g[0] = g[-1] + g[-2]
test_cov(g)
test_cov(dict(g=g))
g[1:] += g[:-1]
test_cov(g)
test_cov(dict(g=g))
def test_svd(self):
""" svd """
def make_mat(wlist, n):
ans = np.zeros((n,n), float)
i, wgts = wlist[0]
if len(i) > 0:
ans[i, i] = np.array(wgts) ** 2
for i, wgts in wlist[1:]:
for w in wgts:
ans[i, i[:, None]] += np.outer(w, w)
return ans
def test_gvar(a, b):
self.assertEqual(a.fmt(4), b.fmt(4))
def test_cov(wgts, cov, atol=1e-7):
invcov = make_mat(wgts, cov.shape[0])
np.testing.assert_allclose(
invcov.dot(cov), np.eye(*cov.shape), atol=atol
)
np.testing.assert_allclose(svd.logdet, np.log(np.linalg.det(cov)), atol=atol)
# diagonal
f = gvar(['1(2)', '3(4)'])
g, wgts = svd(f, svdcut=0.9, wgts=-1)
test_gvar(g[0], f[0])
test_gvar(g[1], f[1])
test_cov(wgts, evalcov(g))
self.assertEqual(svd.nmod, 0)
self.assertEqual(svd.eigen_range, 1.)
# degenerate
g, wgts = svd(3 * [gvar('1(1)')], svdcut=1e-10, wgts=-1)
test_cov(wgts, evalcov(g), atol=1e-4)
self.assertEqual(svd.nmod, 2)
self.assertAlmostEqual(svd.eigen_range, 0.0)
# blocks
x = gvar(10 * ['1(1)'])
x[:5] += gvar('1(1)') # half are correlated
g = svd(x, svdcut=0.5)
self.assertEqual(svd.nmod, 4)
p = np.random.permutation(10)
gp = svd(x[p], svdcut=0.5)
self.assertEqual(svd.nmod, 4)
invp = np.argsort(p)
np.testing.assert_allclose(evalcov(g), evalcov(gp[invp]), atol=1e-7)
np.testing.assert_allclose(mean(g), mean(gp[invp]), atol=1e-7)
# cov[i,i] independent of i, cov[i,j] != 0
x, dx = gvar(['1(1)', '0.01(1)'])
g, wgts = svd([(x+dx)/2, (x-dx)/2.], svdcut=0.2 ** 2, wgts=-1)
y = g[0] + g[1]
dy = g[0] - g[1]
test_gvar(y, x)
test_gvar(dy, gvar('0.01(20)'))
test_cov(wgts, evalcov(g))
self.assertEqual(svd.nmod, 1)
self.assertAlmostEqual(svd.eigen_range, 0.01**2)
# negative svdcut
x, dx = gvar(['1(1)', '0.01(1)'])
g, wgts = svd([(x+dx)/2, (x-dx)/20.], svdcut=-0.2 ** 2, wgts=-1)
y = g[0] + g[1] * 10
dy = g[0] - g[1] * 10
np.testing.assert_allclose(evalcov([y, dy]), [[1, 0], [0, 0]], atol=1e-7)
test_gvar(y, x)
test_gvar(dy, gvar('0(0)'))
self.assertEqual(svd.dof, 1)
self.assertAlmostEqual(svd.eigen_range, 0.01**2)
# cov[i,i] independent of i, cov[i,j] != 0 --- cut too small
x, dx = gvar(['1(1)', '0.01(1)'])
g, wgts = svd([(x+dx)/2, (x-dx)/2.], svdcut=0.0099999** 2, wgts=-1)
y = g[0] + g[1]
dy = g[0] - g[1]
test_gvar(y, x)
test_gvar(dy, dx)
test_cov(wgts, evalcov(g))
self.assertEqual(svd.nmod, 0)
self.assertAlmostEqual(svd.eigen_range, 0.01**2)
# cov[i,i] independent of i after rescaling, cov[i,j] != 0
# rescaling turns this into the previous case
g, wgts = svd([(x+dx)/2., (x-dx)/20.], svdcut=0.2 ** 2, wgts=-1)
y = g[0] + g[1] * 10.
dy = g[0] - g[1] * 10.
test_gvar(y, x)
test_gvar(dy, gvar('0.01(20)'))
test_cov(wgts, evalcov(g))
self.assertEqual(svd.nmod, 1)
self.assertAlmostEqual(svd.eigen_range, 0.01**2)
# dispersed correlations
g2, g4 = gvar(['2(2)', '4(4)'])
orig_g = np.array([g2, (x+dx)/2., g4, (x-dx)/20.])
g, wgts = svd(orig_g, svdcut=0.2 ** 2, wgts=-1)
y = g[1] + g[3] * 10.
dy = g[1] - g[3] * 10.
test_gvar(y, x)
test_gvar(dy, gvar('0.01(20)'))
test_gvar(g[0], g2)
test_gvar(g[2], g4)
test_cov(wgts, evalcov(g))
self.assertEqual(svd.nmod, 1)
self.assertAlmostEqual(svd.eigen_range, 0.01**2)
self.assertEqual(svd.nblocks[1], 2)
self.assertEqual(svd.nblocks[2], 1)
# remove svd correction
g -= g.correction
y = g[1] + g[3] * 10.
dy = g[1] - g[3] * 10.
test_gvar(y, x)
test_gvar(dy, dx)
test_gvar(g[0], g2)
test_gvar(g[2], g4)
np.testing.assert_allclose(evalcov(g.flat), evalcov(orig_g), atol=1e-7)
# noise=True
x, dx = gvar(['1(1)', '0.01(1)'])
g, wgts = svd([(x+dx)/2, (x-dx)/2.], svdcut=0.2 ** 2, wgts=-1, noise=True)
y = g[0] + g[1]
dy = g[0] - g[1]
offsets = mean(g.correction)
self.assertEqual(g.nmod, 1)
self.assertAlmostEqual(offsets[0], -offsets[1])
self.assertGreater(chi2(g.correction[0]).Q, 0.01)
self.assertLess(chi2(g.correction[0]).Q, 0.99)
with self.assertRaises(AssertionError):
test_gvar(y, x)
test_gvar(dy, gvar('0.01(20)'))
self.assertTrue(equivalent(
g - g.correction, [(x+dx)/2, (x-dx)/2.]
))
self.assertTrue(not equivalent(
g, [(x+dx)/2, (x-dx)/2.]
))
# bufferdict
g = {}
g[0] = (x+dx)/2.
g[1] = (x-dx)/20.
g, wgts = svd({0:(x+dx)/2., 1:(x-dx)/20.}, svdcut=0.2 ** 2, wgts=-1)
assert isinstance(g, BufferDict)
y = g[0] + g[1] * 10.
dy = g[0] - g[1] * 10.
test_gvar(y, x)
test_gvar(dy, gvar('0.01(20)'))
test_cov(wgts, evalcov(g.flat))
self.assertEqual(svd.nmod, 1)
self.assertAlmostEqual(svd.eigen_range, 0.01**2)
self.assertTrue(equivalent(
g - g.correction, {0:(x+dx)/2, 1:(x-dx)/20.}
))
self.assertTrue(not equivalent(
g, {0:(x+dx)/2, 1:(x-dx)/20.}
))
def test_valder(self):
""" valder_var """
alist = [[1.,2.,3.]]
a = valder([[1.,2.,3.]])
alist = np.array(alist)
self.assertEqual(np.shape(a),np.shape(alist))
na = len(alist.flat)
for i,(ai,ali) in enumerate(zip(a.flat,alist.flat)):
der = np.zeros(na,float)
der[i] = 1.
self.assert_arraysequal(ai.der,der)
self.assertEqual(ai.val,ali)
def test_ranseed(self):
""" ranseed """
f = raniter([x,y])
ranseed((1,2))
f1 = next(f)
x1 = x()
y1 = y()
ranseed((1,2))
self.assert_arraysequal(f1,next(f))
self.assertEqual(x1,x())
self.assertEqual(y1,y())
# default initialization
ranseed()
f1 = next(f)
ranseed(ranseed.seed)
self.assert_arraysequal(f1, next(f))
def test_rebuild(self):
""" rebuild """
gvar = gvar_factory()
a = gvar([1.,2.],[[4.,2.],[2.,16.]])
b = a*gvar(1.,10.)
c = rebuild(b)
self.assert_arraysequal(c[0].der[-2:],b[0].der[:-1])
self.assert_arraysclose(evalcov(c),evalcov(b))
gvar = gvar_factory()
c = rebuild({0:b[0],1:b[1]},gvar=gvar)
c = np.array([c[0],c[1]])
self.assert_arraysequal(c[0].der,b[0].der[:-1])
self.assert_arraysclose(evalcov(c),evalcov(b) )
def test_chi2(self):
""" chi2(g1, g2) """
# uncorrelated
g = gvar([1., 2.], [1., 2.])
x = [2., 4.]
ans = chi2(x, g)
self.assertAlmostEqual(ans, 2., places=5)
self.assertEqual(ans.dof, 2)
self.assertAlmostEqual(ans.Q, 0.36787944, places=2)
# correlated
g = np.array([g[0]+g[1], g[0]-g[1]])
x = np.array([x[0]+x[1], x[0]-x[1]])
ans = chi2(x, g)
self.assertAlmostEqual(ans, 2., places=5)
self.assertEqual(ans.dof, 2)
self.assertAlmostEqual(ans.Q, 0.36787944, places=2)
# correlated with 0 mode and svdcut < 0
g = np.array([g[0], g[1], g[0]+g[1]])
x = np.array([x[0], x[1], x[0]+x[1]])
ans = chi2(x, g, svdcut=-1e-10)
self.assertAlmostEqual(ans, 2., places=5)
self.assertEqual(ans.dof, 2)
self.assertAlmostEqual(ans.Q, 0.36787944, places=2)
# dictionaries with different keys
g = dict(a=gvar(1,1), b=[[gvar(2,2)], [gvar(3,3)], [gvar(4,4)]], c=gvar(5,5))
x = dict(a=2., b=[[4.], [6.]])
ans = chi2(x,g)
self.assertAlmostEqual(ans, 3.)
self.assertEqual(ans.dof, 3)
self.assertAlmostEqual(ans.Q, 0.3916252, places=2)
ans = chi2(g,x)
self.assertAlmostEqual(ans, 3.)
self.assertEqual(ans.dof, 3)
self.assertAlmostEqual(ans.Q, 0.3916252, places=2)
ans = chi2(2., gvar(1,1))
self.assertAlmostEqual(ans, 1.)
self.assertEqual(ans.dof, 1)
self.assertAlmostEqual(ans.Q, 0.31731051, places=2)
# two dictionaries
g1 = dict(a=gvar(1, 1), b=[gvar(2, 2)])
g2 = dict(a=gvar(2, 2), b=[gvar(4, 4)])
ans = chi2(g1, g2)
self.assertAlmostEqual(ans, 0.2 + 0.2)
self.assertEqual(ans.dof, 2)
self.assertAlmostEqual(ans.Q, 0.81873075, places=2)
def test_corr(self):
""" rebuild (corr!=0) """
a = gvar([1., 2.], [3., 4.])
corr = 1.
b = rebuild(a, corr=corr)
self.assert_arraysclose(evalcov(a).diagonal(),evalcov(b).diagonal())
bcov = evalcov(b)
self.assert_arraysclose(bcov[0,1],corr*(bcov[0,0]*bcov[1,1])**0.5)
self.assert_arraysclose(bcov[1,0],bcov[0,1])
self.assert_arraysclose((b[1]-b[0]).sdev,1.0)
self.assert_arraysclose((a[1]-a[0]).sdev,5.0)
def test_filter(self):
g = collections.OrderedDict([('a', 2.3), ('b', [gv.gvar('12(2)'), 3.]), ('c', 'string')])
gm = collections.OrderedDict([('a', 2.3), ('b', [2., 3.]), ('c', 'string')])
self.assertEqual(str(gv.filter(g, gv.sdev)), str(gm))
def test_pickle(self):
""" pickle strategies """
for g in [
'1(5)',
[['2(1)'], ['3(2)']],
dict(a='4(2)', b=[['5(5)', '6(9)']]),
]:
g1 = gvar(g)
gtuple = (mean(g1), evalcov(g1))
gpickle = pickle.dumps(gtuple)
g2 = gvar(pickle.loads(gpickle))
self.assertEqual(str(g1), str(g2))
self.assertEqual(str(evalcov(g1)), str(evalcov(g2)))
def test_dump_load(self):
dict = collections.OrderedDict
gs = gv.gvar('1(2)') * gv.gvar('3(2)')
ga = gv.gvar([2, 3], [[5., 1.], [1., 10.]])
gd = gv.gvar(dict(s='1(2)', v=['2(2)', '3(3)'], g='4(4)'))
gd['v'] += gv.gvar('0(1)')
gd[(1,3)] = gv.gvar('13(13)')
gd['v'] = 1 / gd['v']
def _test(g, outputfile=None, test_cov=True):
s = dump(g, outputfile=outputfile)
d = load(s if outputfile is None else outputfile)
self.assertEqual( str(g), str(d))
if test_cov:
self.assertEqual( str(gv.evalcov(g)), str(gv.evalcov(d)))
# cleanup
if isinstance(outputfile, str):
os.remove(outputfile)
return d
for g in [gs, ga, gd]:
_test(g)
_test(g, outputfile='xxx.pickle')
_test(g, outputfile='xxx')
gd['x'] = 5.0
_test(gd, test_cov=False)
_test(gd, outputfile='xxx', test_cov=False)
for g in [gs, ga, gd]:
g = gv.mean(g)
_test(g, test_cov=False)
# misc types
g = dict(
s=set([1,2,12.2]),
a=1,
b=[1,[gv.gvar('3(1)') * gv.gvar('2(1)'), 4]],
c=dict(a=gv.gvar(5 * ['1(2)']), b=np.array([[4]])),
d=collections.deque([1., 2, gv.gvar('4(1)')]),
e='a string',
g=(3, 'hi', gv.gvar('-1(2)')),
)
g['f'] = ['str', g['b'][1][0] * gv.gvar('5(2)')]
d = _test(g, outputfile='xxx', test_cov=False)
# dumping classes, without and with special methods
g['C'] = C(gv.gvar(2 * ['3(4)']) * gv.gvar('10(1)'), 'str', (1,2,gv.gvar('2(1)')))
d = _test(g, test_cov=False)
self.assertEqual(str(gv.evalcov(d['C'].x)), str(gv.evalcov(g['C'].x)))
g['C'] = CC(gv.gvar(2 * ['3(4)']) * gv.gvar('10(1)'), 'str', 12.)
d = gv.loads(gv.dumps(g))
self.assertEqual(d['C'].z, None)
self.assertEqual(g['C'].z, 12.)
self.assertEqual(str(gv.evalcov(d['C'].x)), str(gv.evalcov(g['C'].x)))
def test_dump_load_errbudget(self):
dict = collections.OrderedDict
def _test(d, add_dependencies=False):
d = gv.BufferDict(d)
newd = loads(dumps(d, add_dependencies=add_dependencies))
str1 = str(d) + fmt_errorbudget(
outputs=dict(a=d['a'], b=d['b']),
inputs=dict(x=d['x'], y=d['y'], z=d['z']),
)
d = newd
str2 = str(d) + fmt_errorbudget(
outputs=dict(a=d['a'], b=d['b']),
inputs=dict(x=d['x'], y=d['y'], z=d['z']),
)
self.assertEqual(str1, str2)
# all primaries included
x = gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2
z = gv.gvar('3(4)') ** 0.5
u = gv.gvar([2, 3], [[5., 1.], [1., 10.]])
a = x*y
b = x*y - z
d = dict(a=a, b=b, x=x, y=y, z=z, u=u, uu=u*gv.gvar('1(1)'), xx=x)
_test(d)
del d['xx']
_test(d)
# a,b are primaries
a, b = gvar(mean([d['a'], d['b']]), evalcov([d['a'], d['b']]))
d['a'] = a
d['b'] = b
_test(d)
# no primaries included explicitly
x = gv.gvar('1(2)') + gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2 + gv.gvar('3(1)')
z = gv.gvar('3(4)') ** 0.5 + gv.gvar('4(1)')
a = x*y
b = x*y - z + gv.gvar('10(1)')
d = dict(a=a, b=b, x=x, y=y, z=z, uu=u*gv.gvar('1(1)'), xx=x)
_test(d, add_dependencies=True)
# mixture
x = gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2 + gv.gvar('3(1)')
z = gv.gvar('3(4)') ** 0.5 + gv.gvar('4(1)')
a = x*y
b = x*y - z + gv.gvar('10(1)')
d = dict(a=a, b=b, x=x, y=y, z=z, u=u, uu=u*gv.gvar('1(1)'), xx=x)
_test(d, add_dependencies=True)
def test_more_dump(self):
" check on particular issue "
x = gv.gvar(4 * ['1(2)'])
x[0] -= x[1] * gv.gvar('1(10)')
x[2] += x[1]
str1 = str(x) + str(evalcov(x))
x = loads(dumps(x))
str2 = str(x) + str(evalcov(x))
self.assertEqual(str1, str2)
def test_dumps_loads(self):
dict = collections.OrderedDict
gs = gv.gvar('1(2)')
ga = (gv.gvar(['2(2)', '3(3)']) + gv.gvar('0(1)') )
gd = gv.gvar(dict(s='1(2)', v=['2(2)', '3(3)'], g='4(4)'))
gd['v'] += gv.gvar('0(1)')
gd[(1,3)] = gv.gvar('13(13)')
gd['v'] = 1 / gd['v']
def _test(g):
s = dumps(g)
d = loads(s)
self.assertEqual( str(g), str(d))
self.assertEqual( str(gv.evalcov(g)), str(gv.evalcov(d)))
for g in [gs, ga, gd]:
_test(g)
###############
def test_gdump_gload(self):
gs = gv.gvar('1(2)') * gv.gvar('3(2)')
ga = gv.gvar([2, 3], [[5., 1.], [1., 10.]])
gd = gv.gvar(dict(s='1(2)', v=['2(2)', '3(3)'], g='4(4)'))
gd['v'] += gv.gvar('0(1)')
gd[(1,3)] = gv.gvar('13(13)')
gd['v'] = 1 / gd['v']
def _test(g, outputfile=None, method=None):
s = gdump(g, outputfile=outputfile, method=method)
d = gload(s if outputfile is None else outputfile, method=method)
self.assertEqual( str(g), str(d))
self.assertEqual( str(gv.evalcov(g)), str(gv.evalcov(d)))
# cleanup
if isinstance(outputfile, str):
os.remove(outputfile)
for g in [gs, ga, gd]:
_test(g)
_test(g, outputfile='xxx.json')
_test(g, outputfile='xxx.pickle')
_test(g, outputfile='xxx')
_test(g, outputfile='xxx', method='pickle')
_test(g, method='json')
_test(g, method='pickle')
_test(g, method='dict')
def test_gdump_gload_errbudget(self):
def _test(d, add_dependencies=False):
d = gv.BufferDict(d)
newd = gloads(gdumps(d, add_dependencies=add_dependencies))
str1 = str(d) + fmt_errorbudget(
outputs=dict(a=d['a'], b=d['b']),
inputs=dict(x=d['x'], y=d['y'], z=d['z']),
)
d = newd
str2 = str(d) + fmt_errorbudget(
outputs=dict(a=d['a'], b=d['b']),
inputs=dict(x=d['x'], y=d['y'], z=d['z']),
)
self.assertEqual(str1, str2)
# all primaries included
x = gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2
z = gv.gvar('3(4)') ** 0.5
u = gv.gvar([2, 3], [[5., 1.], [1., 10.]])
a = x*y
b = x*y - z
d = dict(a=a, b=b, x=x, y=y, z=z, u=u, uu=u*gv.gvar('1(1)'), xx=x)
_test(d)
del d['xx']
_test(d)
# a,b are primaries
a, b = gvar(mean([d['a'], d['b']]), evalcov([d['a'], d['b']]))
d['a'] = a
d['b'] = b
_test(d)
# no primaries included explicitly
x = gv.gvar('1(2)') + gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2 + gv.gvar('3(1)')
z = gv.gvar('3(4)') ** 0.5 + gv.gvar('4(1)')
a = x*y
b = x*y - z + gv.gvar('10(1)')
d = dict(a=a, b=b, x=x, y=y, z=z, uu=u*gv.gvar('1(1)'), xx=x)
_test(d, add_dependencies=True)
# mixture
x = gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2 + gv.gvar('3(1)')
z = gv.gvar('3(4)') ** 0.5 + gv.gvar('4(1)')
a = x*y
b = x*y - z + gv.gvar('10(1)')
d = dict(a=a, b=b, x=x, y=y, z=z, u=u, uu=u*gv.gvar('1(1)'), xx=x)
_test(d, add_dependencies=True)
def test_more_gdump(self):
" check on particular issue "
x = gv.gvar(4 * ['1(2)'])
x[0] -= x[1] * gv.gvar('1(10)')
x[2] += x[1]
str1 = str(x) + str(evalcov(x))
x = gloads(gdumps(x))
str2 = str(x) + str(evalcov(x))
self.assertEqual(str1, str2)
def test_gdumps_gloads(self):
gs = gv.gvar('1(2)')
ga = (gv.gvar(['2(2)', '3(3)']) + gv.gvar('0(1)') )
gd = gv.gvar(dict(s='1(2)', v=['2(2)', '3(3)'], g='4(4)'))
gd['v'] += gv.gvar('0(1)')
gd[(1,3)] = gv.gvar('13(13)')
gd['v'] = 1 / gd['v']
# json (implicit)
def _test(g):
s = gdumps(g)
d = gloads(s)
self.assertEqual( str(g), str(d))
self.assertEqual( str(gv.evalcov(g)), str(gv.evalcov(d)))
for g in [gs, ga, gd]:
_test(g)
# json
def _test(g):
s = gdumps(g, method='json')
d = gloads(s)
self.assertEqual( str(g), str(d))
self.assertEqual( str(gv.evalcov(g)), str(gv.evalcov(d)))
for g in [gs, ga, gd]:
_test(g)
# pickle
def _test(g):
s = gdumps(g, method='pickle')
d = gloads(s)
self.assertEqual( str(g), str(d))
self.assertEqual( str(gv.evalcov(g)), str(gv.evalcov(d)))
for g in [gs, ga, gd]:
_test(g)
################
def test_oldload(self):
gd = gv.gvar(dict(s='1(2)', v=['2(2)', '3(3)'], g='4(4)'))
for g in [gd, gd['s'], gd['v']]:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
g = gv.gvar(dict(s='1(2)', v=['2(2)', '3(3)'], g='4(4)'))
olddump(g, 'xxx.p')
d = load('xxx.p')
assert str(g) == str(d)
assert str(gv.evalcov(g)) == str(gv.evalcov(d))
olddump(g, 'xxx.json', method='json')
d = load('xxx.json', method='json')
assert str(g) == str(d)
assert str(gv.evalcov(g)) == str(gv.evalcov(d))
def test_dependencies(self):
def _test(g):
dep = dependencies(g)
new_g = g.mean + sum(dep * g.deriv(dep))
self.assertEqual(str(new_g - g), str(gvar('0(0)')))
self.assertTrue(equivalent(g, new_g))
x = gv.gvar('1(2)')
y = gv.gvar('2(3)') ** 2
z = gv.gvar('3(4)') ** 0.5 * y
_test(x * y)
_test(x * y - z)
self.assertEqual(len(dependencies([y, x])), 0)
self.assertEqual(len(dependencies([y, 'string', x])), 0)
self.assertEqual(len(dependencies([y, x, x**2, 2*y])), 0)
self.assertEqual(len(dependencies([x*y, x])), 1)
self.assertEqual(len(dependencies([x*y, x, x, x])), 1)
self.assertEqual(len(dependencies([x*y, x], all=True)), 2)
self.assertEqual(len(dependencies([x*y, x, 'string'], all=True)), 2)
self.assertEqual(len(dependencies([x*y, x, x, x], all=True)), 2)
self.assertTrue(missing_dependencies([x*y, x]))
self.assertTrue(missing_dependencies([x*y, x+y, x, x]))
self.assertTrue(not missing_dependencies([y, x]))
self.assertTrue(not missing_dependencies([x*y, x, y]))
def test_gammaQ(self):
" gammaQ(a, x) "
cases = [
(2.371, 5.243, 0.05371580082389009, 0.9266599665892222),
(20.12, 20.3, 0.4544782602230986, 0.4864172139106905),
(100.1, 105.2, 0.29649013488390663, 0.6818457585776236),
(1004., 1006., 0.4706659307021259, 0.5209695379094582),
]
for a, x, gax, gxa in cases:
np.testing.assert_allclose(gax, gv._utilities.gammaQ(a, x), rtol=0.01)
np.testing.assert_allclose(gxa, gv._utilities.gammaQ(x, a), rtol=0.01)
def test_erf(self):
" erf(x) "
for x in [-1.1, 0.2]:
self.assertAlmostEqual(erf(x), math.erf(x))
x = [[-1.1], [0.2]]
np.testing.assert_allclose(erf(x), [[math.erf(-1.1)], [math.erf(0.2)]])
x = gv.gvar('0(2)')
erfx = erf(x)
self.assertAlmostEqual(erfx.mean, math.erf(0))
self.assertAlmostEqual(
erfx.sdev,
2 * (math.erf(1e-10) - math.erf(-1e-10)) / 2e-10
)
x = gv.gvar('1.5(2)')
self.assertAlmostEqual(erf(x).mean, math.erf(x.mean))
x = gv.gvar(['0(2)', '1.5(2)'])
erfx = erf(x)
self.assertAlmostEqual(erfx[0].mean, math.erf(x[0].mean))
self.assertAlmostEqual(erfx[1].mean, math.erf(x[1].mean))
def test_equivalent(self):
" equivalent(g1, g2) "
x = gvar(['1(1)', '2(2)'])
y = gvar(['1(1)', '2(2)'])
u = 2 ** 0.5 * np.array([[0.5, 0.5],[-0.5, 0.5]])
ux = u.dot(x)
uTy = u.T.dot(y)
ux_y = ux + y
xnew = u.T.dot(ux_y) - uTy
self.assertTrue(equivalent(x, xnew))
self.assertTrue(not equivalent(x, y))
self.assertTrue(equivalent(x[0], xnew[0]))
d = dict(x=x, y0=y[0])
dnew = dict(x=xnew, y0=y[0])
self.assertTrue(equivalent(d, dnew))
dnew = dict(x=y, y0=y[0])
self.assertTrue(not equivalent(d, dnew))
dnew = dict(x=xnew, y0=x[0])
self.assertTrue(not equivalent(d, dnew))
def test_is_primary(self):
" is_primary(g) "
self.assertTrue(gvar('1(1)').is_primary())
self.assertTrue((2 * gvar('1(1)')).is_primary())
self.assertFalse((gvar('2(1)') * gvar('1(1)')).is_primary())
gs = gvar('1(1)')
ga = gvar(2 * [3 * ['1(1)']])
gd = dict(s=gs, a=ga)
self.assertEqual(is_primary(gs), True)
self.assertEqual(is_primary(ga).tolist(), 2 * [3 * [True]])
self.assertEqual(is_primary(gd).buf.tolist(), 7 * [True])
self.assertEqual(is_primary([gs, gs]).tolist(), [True, False])
gs = gs + gvar('1(1)')
ga[0, 0] += gvar('2(1)')
ga[1, 0] *= 5.
gd = BufferDict()
gd['s'] = gs
gd['a'] = ga
self.assertEqual(is_primary(gs), False)
self.assertEqual(is_primary(ga).tolist(), [[False, True, True], [True, True, True]])
self.assertEqual(is_primary(gd).buf.tolist(), [False, False] + 5 * [True])
def test_disassemble(self):
" d = disassemble(g); reassemble(d) "
# gvar
g = gvar('1(2)')
gn = reassemble(disassemble(g), gvar.cov)
d = gn - g
self.assertEqual(d.mean, 0.0)
self.assertEqual(d.sdev, 0.0)
# array
g = gvar([['1(2)', '2(3)'], ['3(4)', '4(5)']])
gn = reassemble(disassemble(g), gvar.cov)
self.assertEqual(g.shape, gn.shape)
d = gn - g
self.assertTrue(np.all(gv.mean(d) == 0.0))
self.assertTrue(np.all(gv.sdev(d) == 0.0))
# dict
g = gvar(
dict(s=gvar('1(2)'), a=gvar([['1(2)', '2(3)'], ['3(4)', '4(5)']]))
)
gn = reassemble(disassemble(g), gvar.cov)
for k in g:
d = gn[k] - g[k]
self.assertTrue(np.all(gv.mean(d) == 0.0))
self.assertTrue(np.all(gv.mean(d) == 0.0))
@unittest.skipIf(FAST,"skipping test_pdfstats for speed")
def test_pdfstats(self):
" PDFStatistics(moments) "
x = gv.gvar('3.0(4)')
avgs = np.zeros((10,4), float)
for i in range(10):
moments = np.zeros(4, float)
for xi in gv.raniter(x, 100):
moments += xi ** np.arange(1, 5)
s = PDFStatistics(moments / 100.)
avgs[i] = [s.mean, s.sdev, s.skew, s.ex_kurt]
mean = np.mean(avgs,axis=0)
sdev = np.std(avgs, axis=0)
diff = gvar(mean, sdev) - [x.mean, x.sdev, 0., 0.]
self.assertTrue(
np.all(np.fabs(gv.mean(diff)) < 5 * gv.sdev(diff))
)
@unittest.skipIf(not have_vegas, "vegas not installed")
@unittest.skipIf(FAST,"skipping test_pdfstatshist for speed")
def test_pdfstatshist(self):
" PDFStatistics(histogram) "
g = gv.gvar('2(1.0)')
hist = PDFHistogram(g + 0.1, nbin=50, binwidth=0.2)
integ = vegas.PDFIntegrator(g)
integ(neval=1000, nitn=5)
def f(p):
return hist.count(p)
results = integ(f, neval=1000, nitn=5,adapt=False)
for stats in [
PDFStatistics(histogram=(hist.bins, results)),
hist.analyze(results).stats
]:
self.assertTrue(
abs(stats.median.mean - g.mean) < 5 * stats.median.sdev
)
self.assertTrue(
abs(stats.plus.mean - g.sdev) < 5 * stats.plus.sdev
)
self.assertTrue(
abs(stats.minus.mean - g.sdev) < 5 * stats.minus.sdev
)
def test_regulate(self):
D = np.array([1., 2., 3.])
corr = np.array([[1., .1, .2], [.1, 1., .3], [.2, .3, 1.]])
cov = D[:, None] * corr * D[None, :]
g1 = gvar(1, 10)
g2 = gvar(3 * [2], cov)
g3 = gvar(3 * [3], 2 * cov)
g = np.concatenate(([g1], g2, g3))
cov = evalcov(g)
eps = 0.25
norm = np.linalg.norm(evalcorr(g), np.inf)
gr = regulate(g, eps=eps / norm, wgts=False)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
self.assertEqual(g.size, gr.dof)
self.assertEqual(g.size - 1, gr.nmod)
self.assertAlmostEqual(gr.eps, eps / norm)
self.assertEqual(gr.svdcut, None)
covr = evalcov(gr)
np.testing.assert_allclose(covr[0, :], cov[0, :])
np.testing.assert_allclose(covr[:, 0], cov[:, 0])
covr[1:, 1:][np.diag_indices_from(covr[1:, 1:])] -= eps * cov[1:, 1:].diagonal()
np.testing.assert_allclose(covr[1:, 1:], cov[1:, 1:])
gr, dummy = regulate(g, eps=eps / norm, wgts=True)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
self.assertEqual(g.size - 1, gr.nmod)
self.assertEqual(g.size, gr.dof)
covr = evalcov(gr)
np.testing.assert_allclose(covr[0, :], cov[0, :])
np.testing.assert_allclose(covr[:, 0], cov[:, 0])
covr[1:, 1:][np.diag_indices_from(covr[1:, 1:])] -= eps * cov[1:, 1:].diagonal()
np.testing.assert_allclose(covr[1:, 1:], cov[1:, 1:])
def test_regulate_svdcut(self):
" regulate -> svd "
D = np.array([1., 2., 3.])
corr = np.array([[1., .1, .2], [.1, 1., .3], [.2, .3, 1.]])
cov = D[:, None] * corr * D[None, :]
g1 = gvar(1, 10)
g2 = gvar(3 * [2], cov)
g3 = gvar(3 * [3], 2 * cov)
g = np.concatenate(([g1], g2, g3))
svdcut = 0.25
# verify that svd is being called in each case
gr = regulate(g, svdcut=svdcut, wgts=False)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
self.assertEqual(gr.svdcut, svdcut)
self.assertEqual(gr.eps, None)
gr = regulate(g, wgts=False)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
self.assertEqual(gr.svdcut, 1e-12) # default
self.assertEqual(gr.eps, None)
gr = regulate(g, svdcut=svdcut, eps=svdcut, wgts=False)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
self.assertEqual(gr.svdcut, svdcut)
self.assertEqual(gr.eps, None)
def test_regulate_singular(self):
D = np.array([1., 2., 3.])
# two zero eigenvalues
corr = np.array([[1., 1., 1.], [1., 1., 1.], [1.,1.,1.]])
cov = D[:, None] * corr * D[None, :]
g1 = gvar(1, 10)
g2 = gvar(3 * [2], cov)
g3 = gvar(3 * [3], 2 * cov)
g = np.concatenate(([g1], g2, g3))
cov = evalcov(g)
corr = evalcorr(g)
eps = 0.1
norm = np.linalg.norm(evalcorr(g), np.inf)
gr = regulate(g, eps=eps / norm, wgts=False)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
covr = evalcov(gr)
np.testing.assert_allclose(covr[0, :], cov[0, :])
np.testing.assert_allclose(covr[:, 0], cov[:, 0])
covr[1:, 1:][np.diag_indices_from(covr[1:, 1:])] -= eps * cov[1:, 1:].diagonal()
np.testing.assert_allclose(covr[1:, 1:], cov[1:, 1:])
gr, dummy = regulate(g, eps=eps / norm, wgts=True)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
covr = evalcov(gr)
np.testing.assert_allclose(covr[0, :], cov[0, :])
np.testing.assert_allclose(covr[:, 0], cov[:, 0])
covr[1:, 1:][np.diag_indices_from(covr[1:, 1:])] -= eps * cov[1:, 1:].diagonal()
np.testing.assert_allclose(covr[1:, 1:], cov[1:, 1:])
with self.assertRaises(np.linalg.LinAlgError):
# det(corr)=0, so this should trigger an error
gr, dummy = regulate(g, eps=0, wgts=True)
def test_regulate_dict(self):
D = np.array([1., 2., 3.])
corr = np.array([[1., .1, .2], [.1, 1., .3], [.2, .3, 1.]])
cov = D[:, None] * corr * D[None, :]
g = BufferDict()
g[1] = gvar(1, 10)
g[2] = gvar(3 * [2], cov)
g[3] = gvar(3 * [3], 2 * cov)
cov = evalcov(g.flat)
eps = 0.1
norm = np.linalg.norm(evalcorr(g.flat), np.inf)
gr = regulate(g, eps=eps / norm, wgts=False)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
covr = evalcov(gr.flat)
np.testing.assert_allclose(covr[0, :], cov[0, :])
np.testing.assert_allclose(covr[:, 0], cov[:, 0])
covr[1:, 1:][np.diag_indices_from(covr[1:, 1:])] -= eps * cov[1:, 1:].diagonal()
np.testing.assert_allclose(covr[1:, 1:], cov[1:, 1:])
gr, dummy = regulate(g, eps=eps / norm, wgts=True)
self.assertTrue(gv.equivalent(gr - gr.correction, g))
covr = evalcov(gr.flat)
np.testing.assert_allclose(covr[0, :], cov[0, :])
np.testing.assert_allclose(covr[:, 0], cov[:, 0])
covr[1:, 1:][np.diag_indices_from(covr[1:, 1:])] -= eps * cov[1:, 1:].diagonal()
np.testing.assert_allclose(covr[1:, 1:], cov[1:, 1:])
def test_regulate_wgts(self):
D = np.array([1., 2., 3.])
corr = np.array([[1., .1, .2], [.1, 1., .3], [.2, .3, 1.]])
cov = D[:, None] * corr * D[None, :]
g1 = gvar(1, 10)
g2 = gvar(3 * [2], cov)
g3 = gvar(3 * [3], 2 * cov)
g = np.concatenate(([g1], g2, g3))
gr, i_wgts = regulate(g, eps=1e-15, wgts=1)
covr = np.zeros((g.size, g.size), dtype=float)
i, wgts = i_wgts[0]
if len(i) > 0:
covr[i, i] = np.array(wgts) ** 2
for i, wgts in i_wgts[1:]:
covr[i[:, None], i] = (wgts.T).dot(wgts) # wgts.T @ wgts
np.testing.assert_allclose(numpy.log(numpy.linalg.det(covr)), gr.logdet)
self.assertEqual(gr.nmod, 6)
np.testing.assert_allclose(covr[0,0], 100.)
np.testing.assert_allclose(covr[1:4,1:4], cov)
np.testing.assert_allclose(covr[4:7,4:7], 2 * cov)
gr, i_wgts = regulate(g, eps=1e-15, wgts=-1)
invcovr = np.zeros((g.size, g.size), dtype=float)
i, wgts = i_wgts[0]
if len(i) > 0:
invcovr[i, i] = np.array(wgts) ** 2
for i, wgts in i_wgts[1:]:
invcovr[i[:, None], i] += (wgts.T).dot(wgts) # wgts.T @ wgts
np.testing.assert_allclose(invcovr[0,0], 1/100.)
np.testing.assert_allclose(invcovr[1:4,1:4], np.linalg.inv(cov))
np.testing.assert_allclose(invcovr[4:7,4:7], 0.5 * np.linalg.inv(cov))
class C:
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return str(self.__dict__)
class CC:
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def __str__(self):
return str(self.__dict__)
def __repr__(self):
return str(self.__dict__)
def _remove_gvars(self, gvlist):
c = copy.copy(self)
c.z = None
c._dict__ = gv.remove_gvars(c.__dict__, gvlist)
return c
def _distribute_gvars(self, gvlist):
self.__dict__ = gv.distribute_gvars(self.__dict__, gvlist)
return self
if __name__ == '__main__':
unittest.main()
| gplepage/gvar | tests/test_gvar.py | Python | gpl-3.0 | 91,671 |
# -*- coding: utf-8 -*-
# * Authors:
# * TJEBBES Gaston <[email protected]>
# * Arezki Feth <[email protected]>;
# * Miotte Julien <[email protected]>;
import pytest
from autonomie.tests.tools import Dummy
def test_default_disable():
from autonomie.forms.user.user import deferred_company_disable_default
companies = [Dummy(employees=range(2))]
user = Dummy(companies=companies)
req = Dummy(context=user)
assert not deferred_company_disable_default("", {'request': req})
companies = [Dummy(employees=[1])]
user = Dummy(companies=companies)
req = Dummy(context=user)
assert(deferred_company_disable_default("", {'request': req}))
def test_user_add_schema(pyramid_request):
import colander
from autonomie.forms.user.user import get_add_edit_schema
appstruct = {
'civilite': u'Monsieur',
'lastname': u'Test lastname',
'firstname': u"Firstname",
'email': "[email protected]",
'add_login': "0",
}
schema = get_add_edit_schema()
schema = schema.bind(request=pyramid_request)
result = schema.deserialize(appstruct)
assert 'email' in result
# civilite
with pytest.raises(colander.Invalid):
appstruct = {
'civilite': u"Not a valid one",
'lastname': u'Test lastname',
'firstname': u"Firstname",
'email': "[email protected]",
'add_login': "0",
}
schema.deserialize(appstruct)
# lastname
with pytest.raises(colander.Invalid):
appstruct = {
'civilite': u'Monsieur',
'firstname': u"Firstname",
'email': "[email protected]",
'add_login': "0",
}
schema.deserialize(appstruct)
# firstname
with pytest.raises(colander.Invalid):
appstruct = {
'civilite': u'Monsieur',
'lastname': u'Test lastname',
'email': "[email protected]",
'add_login': "0",
}
schema.deserialize(appstruct)
# email
with pytest.raises(colander.Invalid):
appstruct = {
'civilite': u'Monsieur',
'lastname': u'Test lastname',
'firstname': u"Firstname",
'add_login': "0",
}
schema.deserialize(appstruct)
with pytest.raises(colander.Invalid):
appstruct = {
'civilite': u'Monsieur',
'lastname': u'Test lastname',
'firstname': u"Firstname",
'email': "notanemail",
'add_login': "0",
}
schema.deserialize(appstruct)
| CroissanceCommune/autonomie | autonomie/tests/forms/user/test_user.py | Python | gpl-3.0 | 2,558 |
import matplotlib.pyplot as plt
from h5py import File
from numpy import array
def launch_plots(): # TODO set activation of different plots
plot3d = plt.figure('Plot 3D')
xy_plane = plt.figure('XY')
xz_plane = plt.figure('XZ')
yz_plane = plt.figure('YZ')
ax_plot3d = plot3d.add_subplot(111, projection='3d')
ax_xy = xy_plane.add_subplot(111)
ax_xz = xz_plane.add_subplot(111)
ax_yz = yz_plane.add_subplot(111)
ax_plot3d.set_title('3D')
ax_plot3d._axis3don = False
ax_xy.set_ylabel('y')
ax_xy.set_xlabel('x')
ax_xz.set_ylabel('z')
ax_xz.set_xlabel('x')
ax_yz.set_ylabel('z')
ax_yz.set_xlabel('y')
fh5 = File('data.h5', 'r')
total_particles = len(list(fh5['/particles'])) + 1
for particle_count in range(1, total_particles):
route = '/particles/' + str(particle_count) + '/'
trace = fh5[route + 'trace'].value[0]
initial_position = fh5[route + 'initial_position']
final_position = fh5[route + 'final_position']
xs = array([initial_position[0], final_position[0]])
ys = array([initial_position[1], final_position[1]])
zs = array([initial_position[2], final_position[2]])
ax_plot3d.plot(xs, ys, zs, trace)
ax_xy.plot(xs, ys, trace)
ax_xz.plot(xs, zs, trace)
ax_yz.plot(ys, zs, trace)
xy_plane.savefig('XY.jpg')
xz_plane.savefig('XZ.jpg')
yz_plane.savefig('YZ.jpg')
plt.show()
| Neluso/SIFPAF | plot.py | Python | gpl-3.0 | 1,454 |
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
#pylint: disable=no-init,attribute-defined-outside-init
import systemtesting
from mantid.simpleapi import *
from reduction_workflow.instruments.sans.sns_command_interface import *
from reduction_workflow.instruments.sans.hfir_command_interface import *
FILE_LOCATION = "/SNS/EQSANS/IPTS-5636/data/"
class EQSANSFlatTest(systemtesting.MantidSystemTest):
def requiredFiles(self):
files = []
files.append(FILE_LOCATION+"EQSANS_5704_event.nxs")
files.append(FILE_LOCATION+"EQSANS_5734_event.nxs")
files.append(FILE_LOCATION+"EQSANS_5732_event.nxs")
files.append(FILE_LOCATION+"EQSANS_5738_event.nxs")
files.append(FILE_LOCATION+"EQSANS_5729_event.nxs")
files.append(FILE_LOCATION+"EQSANS_5737_event.nxs")
files.append(FILE_LOCATION+"EQSANS_5703_event.nxs")
files.append("bl6_flux_at_sample")
return files
def runTest(self):
"""
System test for EQSANS.
This test is meant to be run at SNS and takes a long time.
It is used to verify that the complete reduction chain works
and reproduces reference results.
"""
configI = ConfigService.Instance()
configI["facilityName"]='SNS'
EQSANS()
SolidAngle()
DarkCurrent(FILE_LOCATION+"EQSANS_5704_event.nxs")
TotalChargeNormalization(beam_file="bl6_flux_at_sample")
AzimuthalAverage(n_bins=100, n_subpix=1, log_binning=False)
IQxQy(nbins=100)
UseConfigTOFTailsCutoff(True)
PerformFlightPathCorrection(True)
UseConfigMask(True)
SetBeamCenter(89.6749, 129.693)
SensitivityCorrection(FILE_LOCATION+'EQSANS_5703_event.nxs',
min_sensitivity=0.5,
max_sensitivity=1.5, use_sample_dc=True)
DirectBeamTransmission(FILE_LOCATION+"EQSANS_5734_event.nxs",
FILE_LOCATION+"EQSANS_5738_event.nxs", beam_radius=3)
ThetaDependentTransmission(False)
AppendDataFile([FILE_LOCATION+"EQSANS_5729_event.nxs"])
CombineTransmissionFits(True)
Background(FILE_LOCATION+"EQSANS_5732_event.nxs")
BckDirectBeamTransmission(FILE_LOCATION+"EQSANS_5737_event.nxs",
FILE_LOCATION+"EQSANS_5738_event.nxs", beam_radius=3)
BckThetaDependentTransmission(False)
BckCombineTransmissionFits(True)
SaveIqAscii(process='None')
SetAbsoluteScale(277.781)
Reduce1D()
# This reference is old, ignore the first non-zero point and
# give the comparison a reasonable tolerance (less than 0.5%).
mtd['EQSANS_5729_event_frame1_Iq'].dataY(0)[1] = 856.30028119108
def validate(self):
self.tolerance = 5.0
self.disableChecking.append('Instrument')
self.disableChecking.append('Sample')
self.disableChecking.append('SpectraMap')
self.disableChecking.append('Axes')
return "EQSANS_5729_event_frame1_Iq", 'EQSANSFlatTest.nxs'
| mganeva/mantid | Testing/SystemTests/tests/analysis/EQSANSFlatTestAPIv2.py | Python | gpl-3.0 | 3,330 |
import time
from datetime import datetime
from pytz import timezone
from dateutil.relativedelta import relativedelta
import openerp
from openerp.report.interface import report_rml
from openerp.tools import to_xml
from openerp.report import report_sxw
from datetime import datetime
from openerp.tools.translate import _
from openerp.osv import osv, fields, orm, fields
import math
import re
class edukits_total_retail(report_rml):
def create_xml(self,cr,uid,ids,datas,context={}):
def _thousand_separator(decimal,amount):
if not amount:
amount = 0.0
if type(amount) is float :
amount = str(decimal%amount)
else :
amount = str(amount)
if (amount == '0'):
return ' '
orig = amount
new = re.sub("^(-?\d+)(\d{3})", "\g<1>.\g<2>", amount)
if orig == new:
return new
else:
return _thousand_separator(decimal,new)
pool = openerp.registry(cr.dbname)
order_obj = pool.get('sale.order')
wh_obj = pool.get('stock.warehouse')
session_obj = pool.get('pos.session')
user_obj = pool.get('res.users')
users = user_obj.browse(cr,uid,uid)
warehouse_ids = datas['form']['warehouse_ids'] or wh_obj.search(cr, uid, [])
company = users.company_id
rml_parser = report_sxw.rml_parse(cr, uid, 'edukits_total_retail', context=context)
rml = """
<document filename="test.pdf">
<template pageSize="(21.0cm,29.7cm)" title="Total Retail Report" author="SGEEDE" allowSplitting="20">
<pageTemplate id="first">
<frame id="first" x1="50.0" y1="0.0" width="500" height="830"/>
</pageTemplate>
</template>
<stylesheet>
<blockTableStyle id="Table1">
<blockAlignment value="LEFT"/>
<blockValign value="TOP"/>
<lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="-1,-1"/>
<lineStyle kind="LINEBELOW" colorName="#000000" start="0,0" stop="-1,-1"/>
<lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="-1,-1"/>
<lineStyle kind="LINEAFTER" colorName="#000000" start="0,0" stop="-1,-1"/>
</blockTableStyle>
<blockTableStyle id="parent_table">
<blockAlignment value="LEFT"/>
<blockLeftPadding start="0,0" length="0.1cm"/>
<blockRightPadding start="0,0" length="0.1cm"/>
<blockTopPadding start="0,0" length="0.15cm"/>
<blockBottomPadding start="0,0" length="0.15cm"/>
</blockTableStyle>
<blockTableStyle id="Table2">
<blockAlignment value="LEFT"/>
<blockValign value="TOP"/>
<lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="-1,-1"/>
<lineStyle kind="LINEBELOW" colorName="#000000" start="0,0" stop="-1,-1"/>
<lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="-1,-1"/>
<lineStyle kind="LINEAFTER" colorName="#000000" start="0,0" stop="-1,-1"/>
</blockTableStyle>
<blockTableStyle id="Table3">
<blockAlignment value="LEFT"/>
<blockValign value="TOP"/>
</blockTableStyle>
<blockTableStyle id="Table3_Normal">
<blockAlignment value="LEFT"/>
<blockValign value="TOP"/>
<blockTopPadding start="0,0" length="-0.15cm"/>
<lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="-1,-1"/>
<lineStyle kind="LINEBELOW" colorName="#000000" start="0,1" stop="0,1"/>
<lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="0,0"/>
<lineStyle kind="LINEAFTER" colorName="#000000" start="0,0" stop="-1,-1"/>
</blockTableStyle>
<blockTableStyle id="Table3_PARENT">
<blockAlignment value="CENTER"/>
<blockValign value="TOP"/>
</blockTableStyle>
"""
for warehouse in wh_obj.browse(cr,uid,warehouse_ids):
if warehouse.color:
rml += """
<blockTableStyle id="Table3""" + to_xml(str(warehouse.color.name)) + """">
<blockBackground colorName="#"""+ to_xml(str(warehouse.color.color)) + """" start="0,0" stop="0,-1"/>
<blockAlignment value="LEFT"/>
<blockValign value="TOP"/>
<blockTopPadding start="0,0" length="0.1cm"/>
<lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="-1,-1"/>
<lineStyle kind="LINEBELOW" colorName="#000000" start="0,1" stop="0,1"/>
<lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="0,0"/>
<lineStyle kind="LINEAFTER" colorName="#000000" start="0,0" stop="-1,-1"/>
</blockTableStyle>
"""
if not warehouse.color:
rml += """
<blockTableStyle id="Table3False">
<blockAlignment value="LEFT"/>
<blockValign value="TOP"/>
<blockTopPadding start="0,0" length="0.1cm"/>
<lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="-1,-1"/>
<lineStyle kind="LINEBELOW" colorName="#000000" start="0,1" stop="0,1"/>
<lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="0,0"/>
<lineStyle kind="LINEAFTER" colorName="#000000" start="0,0" stop="-1,-1"/>
</blockTableStyle>
"""
rml += """
<blockTableStyle id="Table3_LINE">
<blockAlignment value="LEFT"/>
<blockValign value="TOP"/>
<lineStyle kind="LINEBELOW" colorName="#000000" start="2,0" stop="2,3"/>
</blockTableStyle>
<blockTableStyle id="Table3_LINE2">
<blockAlignment value="LEFT"/>
<blockValign value="TOP"/>
</blockTableStyle>
<blockTableStyle id="Table3_LINE2W">
<blockBackground colorName="white"/>
<blockAlignment value="LEFT"/>
<blockValign value="TOP"/>
</blockTableStyle>
<blockTableStyle id="Table1_line">
<blockAlignment value="LEFT"/>
<blockValign value="TOP"/>
<lineStyle kind="LINEBELOW" colorName="#000000" start="0,0" stop="2,0"/>
<lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="2,0"/>
</blockTableStyle>
<blockTableStyle id="Table1_lines">
<blockBackground colorName="white"/>
<blockAlignment value="LEFT"/>
<blockValign value="TOP"/>
<lineStyle kind="LINEBELOW" colorName="#000000" start="0,0" stop="2,0"/>
<lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="2,0"/>
<lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="2,0"/>
<lineStyle kind="LINEAFTER" colorName="#000000" start="0,0" stop="2,0"/>
</blockTableStyle>
<initialize>
<paraStyle name="all" alignment="justify"/>
</initialize>
<paraStyle name="P1" fontName="Helvetica" fontSize="9.0" leading="11" alignment="CENTER" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="P2" fontName="Helvetica-Bold" fontSize="14.0" leading="17" alignment="RIGHT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="P3" fontName="Times-Roman" fontSize="11.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="P4" fontName="Times-Roman" fontSize="11.0" leading="10" alignment="RIGHT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="P5" fontName="Times-Roman" fontSize="11.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="P6" fontName="Helvetica" fontSize="9.0" leading="11" alignment="LEFT" spaceBefore="0.0" spaceAfter="6.0"/>
<paraStyle name="P7" fontName="Helvetica" fontSize="9.0" leading="11" alignment="CENTER" spaceBefore="0.0" spaceAfter="6.0"/>
<paraStyle name="P8" fontName="Helvetica" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="6.0"/>
<paraStyle name="P9" fontName="Times-Roman" fontSize="11.0" leading="14" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="P10" fontName="Times-Roman" fontSize="11.0" leading="14" alignment="RIGHT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="P11" fontName="Times-Roman" fontSize="11.0" leading="14" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="P12" fontName="Helvetica" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="6.0"/>
<paraStyle name="P13" fontName="Helvetica" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="P14" fontName="Helvetica-Bold" fontSize="12.0" leading="11" alignment="LEFT" spaceBefore="0.0" spaceAfter="1.0"/>
<paraStyle name="P15" textColor="black" fontName="Helvetica" fontSize="10.0" leading="11" alignment="LEFT" spaceBefore="0.0" spaceAfter="1.0"/>
<paraStyle name="P15_W" textColor="white" fontName="Helvetica" fontSize="10.0" leading="11" alignment="LEFT" spaceBefore="0.0" spaceAfter="1.0"/>
<paraStyle name="P15_RIGHT" textColor="black" fontName="Helvetica" fontSize="10.0" leading="11" alignment="RIGHT" spaceBefore="0.0" spaceAfter="1.0"/>
<paraStyle name="P15_CENTER" textColor="black" fontName="Helvetica-Bold" fontSize="12.0" leading="11" alignment="CENTER" spaceBefore="0.0" spaceAfter="1.0"/>
<paraStyle name="P15_CENTER_2" textColor="black" fontName="Helvetica-Bold" fontSize="14.0" leading="11" alignment="CENTER" spaceBefore="0.0" spaceAfter="1.0"/>
<paraStyle name="P16" fontName="Helvetica" fontSize="9.0" leading="11" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="P17" fontName="Times-Roman" fontSize="8.0" leading="11" alignment="LEFT" spaceBefore="0.0" spaceAfter="1.0"/>
<paraStyle name="P19" rightIndent="0.0" leftIndent="0.0" fontName="Times-Roman" fontSize="10.0" leading="11" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="P20" rightIndent="0.0" leftIndent="0.0" fontName="Helvetica" fontSize="12.0" leading="11" alignment="CENTER" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="Standard" fontName="Times-Roman"/>
<paraStyle name="Text body" fontName="Times-Roman" spaceBefore="0.0" spaceAfter="6.0"/>
<paraStyle name="List" fontName="Times-Roman" spaceBefore="0.0" spaceAfter="6.0"/>
<paraStyle name="Table Contents" fontName="Times-Roman" spaceBefore="0.0" spaceAfter="6.0"/>
<paraStyle name="Table Heading" fontName="Times-Roman" alignment="CENTER" spaceBefore="0.0" spaceAfter="6.0"/>
<paraStyle name="Caption" fontName="Times-Roman" fontSize="10.0" leading="13" spaceBefore="6.0" spaceAfter="6.0"/>
<paraStyle name="Index" fontName="Times-Roman"/>
<paraStyle name="Heading" fontName="Helvetica" fontSize="15.0" leading="19" spaceBefore="12.0" spaceAfter="6.0"/>
<paraStyle name="Footer" fontName="Times-Roman"/>
<paraStyle name="Horizontal Line" fontName="Times-Roman" fontSize="6.0" leading="8" spaceBefore="0.0" spaceAfter="14.0"/>
<paraStyle name="terp_header" fontName="Helvetica-Bold" fontSize="15.0" leading="19" alignment="LEFT" spaceBefore="12.0" spaceAfter="6.0"/>
<paraStyle name="Heading 9" fontName="Helvetica-Bold" fontSize="75%" leading="NaN" spaceBefore="12.0" spaceAfter="6.0"/>
<paraStyle name="terp_tblheader_General" fontName="Helvetica-Bold" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="6.0" spaceAfter="6.0"/>
<paraStyle name="terp_tblheader_Details" fontName="Helvetica-Bold" fontSize="9.0" leading="11" alignment="LEFT" spaceBefore="6.0" spaceAfter="6.0"/>
<paraStyle name="terp_default_8" fontName="Helvetica" fontSize="9.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="terp_default_Bold_8" fontName="Helvetica-Bold" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="terp_tblheader_General_Centre" fontName="Helvetica-Bold" fontSize="8.0" leading="10" alignment="CENTER" spaceBefore="6.0" spaceAfter="6.0"/>
<paraStyle name="terp_tblheader_General_Right" fontName="Helvetica-Bold" fontSize="8.0" leading="10" alignment="RIGHT" spaceBefore="6.0" spaceAfter="6.0"/>
<paraStyle name="terp_tblheader_Details_Centre" fontName="Helvetica-Bold" fontSize="9.0" leading="11" alignment="CENTER" spaceBefore="6.0" spaceAfter="6.0"/>
<paraStyle name="terp_tblheader_Details_Right" fontName="Helvetica-Bold" fontSize="9.0" leading="11" alignment="RIGHT" spaceBefore="6.0" spaceAfter="6.0"/>
<paraStyle name="terp_default_Right_8" fontName="Helvetica" fontSize="8.0" leading="10" alignment="RIGHT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="terp_default_Centre_8" fontName="Helvetica" fontSize="8.0" leading="10" alignment="CENTER" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="terp_header_Right" fontName="Helvetica-Bold" fontSize="15.0" leading="19" alignment="LEFT" spaceBefore="12.0" spaceAfter="6.0"/>
<paraStyle name="terp_header_Centre" fontName="Helvetica-Bold" fontSize="15.0" leading="19" alignment="CENTER" spaceBefore="12.0" spaceAfter="6.0"/>
<paraStyle name="terp_header_Centre2" fontName="Helvetica-Bold" fontSize="12.0" leading="19" alignment="CENTER" spaceBefore="12.0" spaceAfter="6.0"/>
<paraStyle name="terp_header_Centre3" fontName="Helvetica-Bold" fontSize="12.0" leading="19" alignment="LEFT" spaceBefore="12.0" spaceAfter="6.0"/>
<paraStyle name="terp_default_address" fontName="Helvetica" fontSize="10.0" leading="13" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="terp_default_9" fontName="Helvetica" fontSize="9.0" leading="11" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="terp_default_12" fontName="Helvetica" fontSize="12.0" leading="11" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="terp_default_Bold_9" fontName="Helvetica-Bold" fontSize="9.0" leading="11" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="terp_default_Bold_9_Right" fontName="Helvetica-Bold" fontSize="9.0" leading="11" alignment="RIGHT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="terp_default_Centre_9" fontName="Helvetica" fontSize="9.0" leading="11" alignment="CENTER" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="terp_default_Right_9" fontName="Helvetica" fontSize="9.0" leading="11" alignment="RIGHT" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="Heading 1" fontName="Times-Bold" fontSize="24.0" leading="29" spaceBefore="0.0" spaceAfter="0.0"/>
<paraStyle name="Heading 2" fontName="Times-Bold" fontSize="20.0" leading="29" spaceBefore="0.0" spaceAfter="0.0"/>
<images/>
</stylesheet>
<story>
"""
no_total = 1
rml += """
<blockTable colWidths="250,250" style="Table3_PARENT">
"""
# Day transaction for batamcentre
center = False
currency_amount = 0
currency_symbol =''
bank_ids = []
date_end = datetime.strptime(datas['form']['date_end'],"%Y-%m-%d")
# Normal transaction
for warehouse in wh_obj.browse(cr,uid,warehouse_ids):
currency_amount = warehouse.currency_id.rate_silent
location_id = warehouse.lot_stock_id.id
results = []
total_bank = 0.0
if warehouse.is_split:
date_start_day = datetime.strptime(datas['form']['date_end']+ ' 00:00:00',"%Y-%m-%d %H:%M:%S")
date_stop_day = datetime.strptime(datas['form']['date_end']+ ' 17:59:59',"%Y-%m-%d %H:%M:%S")
date_start = datetime.strptime(datas['form']['date_end']+ ' 18:00:00',"%Y-%m-%d %H:%M:%S")
date_stop = datetime.strptime(datas['form']['date_end']+ ' 23:59:59',"%Y-%m-%d %H:%M:%S")
sessions_ids = session_obj.search(cr,uid,[('stock_location_rel','=',location_id),('stop_at','!=',False)])
session_night_ids = []
session_day_ids = []
for sessions in session_obj.browse(cr,uid,sessions_ids):
stop_temp=datetime.strptime(sessions.stop_at,"%Y-%m-%d %H:%M:%S")
tz_count = 0
hour_offset = ""
minute_offset = ""
for tz_offset in users.tz_offset:
tz_count +=1
if tz_count <= 3:
hour_offset += tz_offset
elif tz_count <= 5:
minute_offset +=tz_offset
stop_at= stop_temp + relativedelta(hours=int(hour_offset))
if (stop_at >= date_start) and (stop_at <= date_stop):
session_night_ids.append(sessions.id)
if (stop_at >= date_start_day) and (stop_at <= date_stop_day):
session_day_ids.append(sessions.id)
# if not warehouse.is_split:
session_ids = session_obj.search(cr,uid,[('stop_at','>=',datas['form']['date_end']+ ' 00:00:00'),('stop_at','<=',datas['form']['date_end']+ ' 23:59:59'),('stock_location_rel','=',location_id)])
if len(warehouse_ids) == 1:
rml += """
<tr>
<td>
"""
elif no_total % 2 == 0:
rml += """<td>"""
else:
rml += """
<tr>
<td>
"""
if warehouse.color:
rml += """
<blockTable colWidths="210" style="Table3">
"""
if not warehouse.color:
rml += """
<blockTable colWidths="210" style="Table3_Normal">
"""
rml += """
<tr>
</tr>
<tr>
<td>
<blockTable rowHeights="38" colWidths="198" style="Table3""" + to_xml(str(warehouse.color.name)) + """">
<tr>
<td>
<para style="P15_CENTER_2">"""+ to_xml(str(warehouse.name)) + """</para>
</td>
</tr>
</blockTable>
<blockTable colWidths="198" style="Table1_lines">
<tr>
<td>
<para style="P15">TGL: """+ to_xml(str(format(date_end,'%d-%B-%y')))+"""</para>
</td>
</tr>
</blockTable>
<blockTable rowHeights="17" colWidths="198" style="Table3""" + to_xml(str(warehouse.color.name)) + """">
<tr>
<td background="pink">
<para style="P15_CENTER">SETORAN</para>
</td>
</tr>
</blockTable>
<blockTable colWidths="198" style="Table1_lines">
<tr>
<td>
"""
total_card = 0.0
# if not session_ids:
# rml +="""
# <para style="P15">-</para>
# """
total_amount = 0.0
total_amount_night = 0.0
#for day transaction if report is split
if warehouse.is_split:
for session in session_obj.browse(cr,uid,session_day_ids):
for bank in session.statement_ids:
if bank.journal_id.type == 'bank':
total_card +=bank.balance_end
if session.cashier_deposit_ids:
for cashier in session.cashier_deposit_ids:
total_amount += cashier.amount_total
else:
for session in session_obj.browse(cr,uid,session_ids):
for bank in session.statement_ids:
if bank.journal_id.type == 'bank':
total_card +=bank.balance_end
if session.cashier_deposit_ids:
for cashier in session.cashier_deposit_ids:
total_amount += cashier.amount_total
rml += """
<para style="P15">""" + rml_parser.formatLang(total_amount+0, currency_obj=company.currency_id) + """</para>
"""
# if warehouse.is_split:
if session_ids:
sessions = session_obj.browse(cr,uid,session_ids[0])
if warehouse.is_split:
rml += """
</td>
</tr>
</blockTable>
<blockTable rowHeights="17" colWidths="198" style="Table3""" + to_xml(str(warehouse.color.name)) + """">
<tr>
<td background="pink">
<para style="P15_CENTER">SETORAN (Malam)</para>
</td>
</tr>
</blockTable>
<blockTable colWidths="198" style="Table1_lines">
<tr>
<td>
"""
for session in session_obj.browse(cr,uid,session_night_ids):
for bank in session.statement_ids:
if bank.journal_id.type == 'bank':
total_card +=bank.balance_end
if session.cashier_deposit_ids:
for cashier in session.cashier_deposit_ids:
total_amount_night += cashier.amount_total
rml += """
<para style="P15">""" + rml_parser.formatLang(total_amount_night+0, currency_obj=company.currency_id) + """</para>
"""
# if not session_night_ids:
# rml +="""
# <para style="P15">-</para>
# """
#normal transaction
rml += """
</td>
</tr>
</blockTable>
<blockTable rowHeights="17" colWidths="198" style="Table3""" + to_xml(str(warehouse.color.name)) + """">
<tr>
<td background="pink">
<para style="P15_CENTER">CC and DC</para>
</td>
</tr>
</blockTable>
<blockTable colWidths="100,98" style="Table1_lines">
<tr>
<td>
"""
if not session_ids:
rml +="""
<para style="P15">-</para>
"""
session_list = []
bank_ids = []
for session in session_obj.browse(cr,uid,session_ids):
session_list.append(session.id)
# for bank in session.statement_ids:
# if bank.journal_id.type == 'bank':
# rml +="""
# <para style="P15">""" + to_xml(str(bank.journal_id.name)) + """</para>
# """
if len(session_list) == 1:
cr.execute(""" SELECT sum(abs.balance_end), aj.name from account_bank_statement abs inner join account_journal aj on abs.journal_id = aj.id where pos_session_id = %s and aj.type != 'cash' group by aj.name; """ % (tuple(session_list)[0],))
bank_ids = cr.fetchall()
if len(session_list) > 1:
cr.execute(""" SELECT sum(abs.balance_end), aj.name from account_bank_statement abs inner join account_journal aj on abs.journal_id = aj.id where pos_session_id in %s and aj.type != 'cash' group by aj.name; """ % (tuple(session_list),))
bank_ids = cr.fetchall()
if bank_ids:
for edukits_bank in bank_ids:
rml +="""
<para style="P15">""" + to_xml(str(edukits_bank[1])) + """</para>
"""
rml +="""
</td>
<td>
"""
if not session_ids:
rml +="""
<para style="P15">-</para>
"""
if bank_ids:
for edukits_bank in bank_ids:
total_bank_amount = 0
if edukits_bank[0]:
total_bank_amount = edukits_bank[0]
total_bank += edukits_bank[0]
rml +="""
<para style="P15">""" + rml_parser.formatLang(total_bank_amount+0,currency_obj=company.currency_id) + """</para>
"""
rml +="""
</td>
</tr>
</blockTable>
<blockTable rowHeights="17" colWidths="198" style="Table3""" + to_xml(str(warehouse.color.name)) + """">
<tr>
<td background="pink">
<para style="P15_CENTER">PENGELUARAN</para>
</td>
</tr>
</blockTable>
<blockTable colWidths="198" style="Table1_lines">
<tr>
<td background="pink">
<para style="P15_W">Table</para>
</td>
</tr>
</blockTable>
<blockTable colWidths="198" style="Table1_lines">
<tr>
<td background="pink">
<para style="P15_W">Table</para>
</td>
</tr>
</blockTable>
<blockTable colWidths="80,118" style="Table1_lines">
<tr>
<td>
<para style="P15">MAITRI</para>
</td>
<td>
<para style="P15_RIGHT"></para>
<para style="P15_RIGHT">""" + rml_parser.formatLang(total_amount +total_amount_night+ total_bank+0, currency_obj=company.currency_id) +"""</para>
</td>
</tr>
</blockTable>
<blockTable colWidths="80,118" style="Table1_lines">
<tr>
<td>
<para style="P15">KURS :""" + rml_parser.formatLang(currency_amount,) +"""</para>
</td>
<td>
<para style="P15_RIGHT">""" + rml_parser.formatLang( (total_amount+total_amount_night)*currency_amount, currency_obj=warehouse.currency_id) +"""</para>
</td>
</tr>
</blockTable>
<blockTable colWidths="80,5,110" style="Table3_LINE2">
<tr>
<td>
<para style="P15"></para>
</td>
<td>
<para style="P15"></para>
</td>
<td>
<para style="P15_CENTER"></para>
</td>
</tr>
</blockTable>
</td>
</tr>
</blockTable>
<spacer length="0.5cm"/>"""
rml += """
</td>
"""
if center:
if len(warehouse_ids) == 1:
rml += """<td></td>"""
rml += """
</tr>
"""
elif ( (no_total % 2 == 1 ) and (len(warehouse_ids)+1 == no_total)):
rml += """<td></td>"""
rml += """
</tr>
"""
elif no_total % 2 == 0:
rml += """
</tr>
"""
else:
if len(warehouse_ids)+1 == no_total:
rml += """
</tr>
"""
else:
if len(warehouse_ids) == 1:
rml += """<td></td>"""
rml += """
</tr>
"""
elif ( (no_total % 2 == 1 ) and (len(warehouse_ids) == no_total)):
rml += """<td></td>"""
rml += """
</tr>
"""
elif no_total % 2 == 0:
rml += """
</tr>
"""
else:
if len(warehouse_ids) == no_total:
rml += """
</tr>
"""
no_total += 1
rml += """
</blockTable>
</story>
</document>"""
date_cur = time.strftime('%Y-%m-%d %H:%M:%S')
return rml
edukits_total_retail('report.edukits.total.retail', 'pos.session', '', '') | davidsetiyadi/draft_python | new_edukits/edukits_total_retail_report.py | Python | gpl-3.0 | 24,578 |
# Copyright (C) 2005-2010 MISG/ICTI/EIA-FR
# See LICENSE for details.
"""
Factories for AMQ clients, Thrift clients and SMAC Clients and servers.
@author: Jonathan Stoppani <[email protected]>
"""
import weakref
from twisted.internet.protocol import ReconnectingClientFactory
from twisted.internet import defer, error
from txamqp.protocol import AMQClient
from txamqp.contrib.thrift.client import ThriftTwistedDelegate
from txamqp.queue import TimeoutDeferredQueue, Closed
from txamqp.contrib.thrift.transport import TwistedAMQPTransport
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from smac.python import log
from smac.amqp.models import Exchange, Queue, IAddress
from smac.conf import topology
from smac.modules import utils
class SMACServerFactory(object):
iprot_factory = TBinaryProtocol.TBinaryProtocolFactory()
oprot_factory = TBinaryProtocol.TBinaryProtocolFactory()
def __init__(self, client, channel=None):
self.client = client
self.channel = channel or 1
if client.check_0_8():
self.reply_to = "reply to"
else:
self.reply_to = "reply-to"
@defer.inlineCallbacks
def build_server(self, delegate, processor, handler, address, queues=None, standalone=True):
processor_name = processor.__name__
log.debug("Creating new server for {0} with ID {1}".format(
processor_name, address.instance))
address = IAddress(address)
if not queues:
queues = topology.queues
if isinstance(self.channel, int):
channel = yield self.client.channel(self.channel)
yield channel.channel_open()
else:
# Assume it's already open!
channel = self.channel
deferreds = []
# Declare all exchanges
exchanges = {}
for k, e in topology.exchanges.iteritems():
e = Exchange(channel, **e)
e.format_name(**dict(address))
e.declare()
exchanges[k] = e
self.responses = Exchange(channel, **topology.exchanges['responses'])
# Declare all queues
qs = []
for q in queues:
q = q.copy()
bindings = q.pop('bindings')
q = Queue(channel, **q)
q.format_name(**dict(address))
q.declare()
deferreds += [q.bind(exchanges[e], k.format(**dict(address))) for e, k in bindings]
qs.append(q)
# Wait for declarations and bindings
yield defer.DeferredList(deferreds)
log.debug("All queues and needed exchanges declared and bound, start listening")
tags = []
for queue in qs:
tag = yield queue.consume()
tags.append(tag)
@defer.inlineCallbacks
def destroy(ref):
log.debug("Server for {0} garbage collected, removing " \
"subscriptions".format(processor_name))
try:
yield defer.DeferredList([channel.basic_cancel(t) for t in tags])
except Exception as e:
pass
if not standalone:
handler = weakref.proxy(handler, destroy)
processor = processor.Processor(handler)
for tag in tags:
queue = yield self.client.queue(tag)
self.get_next_message(channel, queue, processor, delegate)
def parse_message(self, msg, channel, queue, processor, delegate):
tag = msg.delivery_tag
try:
sender = msg.content[self.reply_to]
except KeyError:
sender = None
transport_in = TTransport.TMemoryBuffer(msg.content.body)
transport_out = TwistedAMQPTransport(channel, str(self.responses), sender)
iprot = self.iprot_factory.getProtocol(transport_in)
oprot = self.oprot_factory.getProtocol(transport_out)
d = processor.process(iprot, oprot)
d.addErrback(delegate.processing_error)
channel.basic_ack(tag, True)
self.get_next_message(channel, queue, processor, delegate)
def get_next_message(self, channel, queue, processor, delegate):
d = queue.get()
d.addCallback(self.parse_message, channel, queue, processor, delegate)
d.addErrback(self.catch_closed_queue, delegate)
d.addErrback(delegate.queue_error)
def catch_closed_queue(self, failure, delegate):
failure.trap(Closed)
delegate.queue_closed(failure)
class SMACClientFactory(object):
iprot_factory = TBinaryProtocol.TBinaryProtocolFactory()
oprot_factory = TBinaryProtocol.TBinaryProtocolFactory()
def __init__(self, client, channel=None):
self.client = client
self.client_lock = defer.DeferredLock()
self.clients = {}
if client.check_0_8():
self.reply_to = "reply to"
else:
self.reply_to = "reply-to"
self.channel = channel or 1
@defer.inlineCallbacks
def build_client(self, address, service=None, distribution=None, cache=True):
yield self.client_lock.acquire()
try:
address = IAddress(address)
if not service:
service = utils.get_module_from_address(address)
service_name = service.__name__ + address.routing_key
distribution = distribution or address.distribution
if not distribution:
raise ValueError("The distribution mode was not defined and " \
"could not be inferred from the address.")
key = (service, address.routing_key, distribution)
try:
client = self.clients[key]
except KeyError:
log.debug("Creating new client for {0} with routing key {1} and distribution {2}".format(
service.__name__, address.routing_key, distribution))
if isinstance(self.channel, int):
channel = yield self.client.channel(self.channel)
yield channel.channel_open()
else:
# Assume it's already open!
channel = self.channel
response_exchange = Exchange(channel, **topology.exchanges['responses'])
response_queue = Queue(channel, exclusive=True, auto_delete=True)
yield response_queue.declare()
yield response_queue.bind(response_exchange)
consumer_tag = yield response_queue.consume()
service_exchange = Exchange(channel, **topology.exchanges[distribution])
service_exchange.format_name(**dict(address))
yield service_exchange.declare()
amqp_transport = TwistedAMQPTransport(channel, str(service_exchange),
address.routing_key, service_name,
str(response_queue), self.reply_to)
client = service.Client(amqp_transport, self.oprot_factory)
client.address = address
client.factory = self
if cache:
weak_client = client
self.clients[key] = client
else:
@defer.inlineCallbacks
def destroy(ref):
log.debug("Client for {0} garbage collected, removing " \
"subscriptions".format(service_name))
try:
yield channel.basic_cancel(consumer_tag)
except Exception as e:
pass
weak_client = weakref.proxy(client, destroy)
queue = yield self.client.queue(consumer_tag)
self.get_next_message(channel, queue, weak_client)
queue = yield self.client.get_return_queue(service_name)
self.get_next_unroutable_message(channel, queue, weak_client)
else:
log.debug("Using cached client for {0} with routing key {1} and distribution {2}".format(
service.__name__, address.routing_key, distribution))
finally:
self.client_lock.release()
defer.returnValue(client)
def parse_message(self, msg, channel, queue, client):
tag = msg.delivery_tag
transport = TTransport.TMemoryBuffer(msg.content.body)
iprot = self.iprot_factory.getProtocol(transport)
(fname, mtype, rseqid) = iprot.readMessageBegin()
if rseqid not in client._reqs:
log.warn('Missing rseqid! fname = %r, rseqid = %s, mtype = %r, routing key = %r, client = %r, msg.content.body = %r' % (fname, rseqid, mtype, msg.routing_key, client, msg.content.body))
method = getattr(client, 'recv_' + fname)
method(iprot, mtype, rseqid)
channel.basic_ack(tag, True)
self.get_next_message(channel, queue, client)
def unrouteable_message(self, msg, channel, queue, client):
transport = TTransport.TMemoryBuffer(msg.content.body)
iprot = self.iprot_factory.getProtocol(transport)
(fname, mtype, rseqid) = iprot.readMessageBegin()
try:
d = client._reqs.pop(rseqid)
except KeyError:
# KeyError will occur if the remote Thrift method is oneway,
# since there is no outstanding local request deferred for
# oneway calls.
pass
else:
type = TTransport.TTransportException.NOT_OPEN,
msg = 'Unrouteable message, routing key = %r calling function %r' % (msg.routing_key, fname)
d.errback(TTransport.TTransportException(type, msg))
self.get_next_unroutable_message(channel, queue, client)
def get_next_unroutable_message(self, channel, queue, client):
d = queue.get()
d.addCallback(self.unrouteable_message, channel, queue, client)
d.addErrback(self.catch_closed_queue)
d.addErrback(self.handle_queue_error)
def get_next_message(self, channel, queue, client):
d = queue.get()
d.addCallback(self.parse_message, channel, queue, client)
d.addErrback(self.catch_closed_queue)
d.addErrback(self.handle_queue_error)
def catch_closed_queue(self, failure):
failure.trap(Closed)
self.handle_closed_queue(failure)
def handle_queue_error(self, failure):
log.err("Error in queue")
log.err(failure)
pass
def handle_closed_queue(self, failure):
log.debug("Queue closed")
class ThriftAMQClient(AMQClient, object):
def __init__(self, *args, **kwargs):
super(ThriftAMQClient, self).__init__(*args, **kwargs)
self.return_queues_lock = defer.DeferredLock()
self.return_queues = {}
@defer.inlineCallbacks
def get_return_queue(self, key):
yield self.return_queues_lock.acquire()
try:
try:
q = self.return_queues[key]
except KeyError:
q = TimeoutDeferredQueue()
self.return_queues[key] = q
finally:
self.return_queues_lock.release()
defer.returnValue(q)
thriftBasicReturnQueue = get_return_queue # compatibility with
# ThriftTwistedDelegate
class AMQClientFactory(ReconnectingClientFactory, object):
"""
Factory for AMQP connections intended to be used by thrift clients.
Overriding the C{protocol} property with a more general C{AMQClient} class
should allow a more generic use of the factory.
"""
protocol = ThriftAMQClient
def __init__(self, spec, vhost):
self.spec = spec
self.vhost = vhost
self.closed = False
def buildProtocol(self, _):
client = self.protocol(ThriftTwistedDelegate(), self.vhost, self.spec)
client.factory = self
return client
def clientConnectionLost(self, connector, reason):
if self.closed:
log.info("Connection to the AMQP broker closed.")
return
log.error('Connection to AMQP broker lost. Reason {0}'.format(reason))
super(AMQClientFactory, self).clientConnectionLost(connector, reason)
def clientConnectionFailed(self, connector, reason):
log.error('Connection to AMQP broker failed. Reason {0}'.format(reason))
super(AMQClientFactory, self).clientConnectionFailed(connector, reason)
| SMAC/corelib | smac/amqp/protocol.py | Python | gpl-3.0 | 13,170 |
###
# #%L
# Artimate Model Compiler
# %%
# Copyright (C) 2011 - 2012 INRIA
# %%
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/gpl-3.0.html>.
# #L%
###
import re
class Segmentation:
def __init__(self, lab_file=None):
try:
self.segments = self.parse(lab_file)
except TypeError:
self.segments = []
def parse(self, lab_file):
header = True
segments = []
start = 0
for line in lab_file:
if line.strip() == '#':
header = False
continue
if header:
continue
match = re.match(r'\s*(?P<end>\d+(\.\d+)?)\s+\d+\s+(?P<label>.*)\s*', line)
segment = Segment(start, match.group('end'), match.group('label'))
segments.append(segment)
start = match.group('end')
return segments
def __str__(self):
return '\n'.join(['start\tend\tlabel'] + [str(segment) for segment in self.segments])
class Segment:
def __init__(self, start, end, label):
self.start = float(start)
self.end = float(end)
self.label = label
def startframe(self):
# TODO set this from context
return int(self.start * 200.0)
startframe = property(startframe)
def endframe(self):
# TODO set this from context
return int(self.end * 200.0)
endframe = property(endframe)
def __str__(self):
return "%s\t%s\t%s" % (self.start, self.end, self.label)
| psibre/artimate | artimate-model/src/main/resources/archetype-resources/__rootArtifactId__-model-animator/src/main/scripts/lab.py | Python | gpl-3.0 | 2,097 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-12-17 20:50
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('jordbruksmark', '0002_auto_20161217_2140'),
]
operations = [
migrations.AlterModelOptions(
name='wochen_menge',
options={'verbose_name': 'Wochen Menge', 'verbose_name_plural': 'Wochen Mengen'},
),
]
| ortoloco/jordbruksmark | jordbruksmark/migrations/0003_auto_20161217_2150.py | Python | gpl-3.0 | 472 |
# "$Name: $";
# "$Header: $";
# ============================================================================
#
# file : TestServer.py
#
# description : Python source for the TestServer and its commands.
# The class is derived from Device. It represents the
# CORBA servant object which will be accessed from the
# network. All commands which can be executed on the
# TestServer are implemented in this file.
#
# project : TANGO Device Server
#
# $Author: $
#
# $Revision: $
#
# $Log: $
#
# copyleft : European Synchrotron Radiation Facility
# BP 220, Grenoble 38043
# FRANCE
#
# ============================================================================
# This file is generated by POGO
# (Program Obviously used to Generate tango Object)
#
# (c) - Software Engineering Group - ESRF
# ============================================================================
#
import PyTango
import sys
import numpy
import struct
import pickle
if sys.version_info > (3,):
long = int
# unicode = str
else:
bytes = str
# =================================================================
# TestServer Class Description:
#
# My Simple Server
#
# =================================================================
# Device States Description:
#
# DevState.ON : Server On
# =================================================================
class TestServer(PyTango.Device_4Impl):
# -------- Add you global variables here --------------------------
# -----------------------------------------------------------------
# Device constructor
# -----------------------------------------------------------------
def __init__(self, cl, name):
PyTango.Device_4Impl.__init__(self, cl, name)
self.defaults = {}
self.defaults["ScalarBoolean"] = [
True, PyTango.SCALAR, PyTango.DevBoolean]
self.defaults["ScalarUChar"] = [
12, PyTango.SCALAR, PyTango.DevUChar]
self.defaults["ScalarShort"] = [
12, PyTango.SCALAR, PyTango.DevShort]
self.defaults["ScalarUShort"] = [
12, PyTango.SCALAR, PyTango.DevUShort]
self.defaults["ScalarLong"] = [
123, PyTango.SCALAR, PyTango.DevLong]
self.defaults["ScalarULong"] = [
123, PyTango.SCALAR, PyTango.DevULong]
self.defaults["ScalarLong64"] = [
123, PyTango.SCALAR, PyTango.DevLong64]
self.defaults["ScalarULong64"] = [
123, PyTango.SCALAR, PyTango.DevULong64]
self.defaults["ScalarFloat"] = [
-1.23, PyTango.SCALAR, PyTango.DevFloat]
self.defaults["ScalarDouble"] = [
123.45, PyTango.SCALAR, PyTango.DevDouble]
self.defaults["ScalarString"] = [
"Hello!", PyTango.SCALAR, PyTango.DevString]
self.defaults["ScalarEncoded"] = [
("UTF8", b"Hello UTF8! Pr\xc3\xb3ba \xe6\xb5\x8b"),
PyTango.SCALAR, PyTango.DevEncoded]
self.dtype = None
self.attr_ScalarBoolean = True
self.attr_ScalarUChar = 12
self.attr_ScalarShort = 12
self.attr_ScalarUShort = 12
self.attr_ScalarLong = 123
self.attr_ScalarULong = 123
self.attr_ScalarLong64 = 123
self.attr_ScalarULong64 = 123
self.attr_ScalarFloat = -1.23
self.attr_ScalarDouble = 1.233
self.attr_ScalarString = "Hello!"
self.attr_ScalarEncoded = \
"UTF8", b"Hello UTF8! Pr\xc3\xb3ba \xe6\xb5\x8b"
self.attr_SpectrumBoolean = [True, False]
self.attr_SpectrumUChar = [1, 2]
self.attr_SpectrumShort = [1, -3, 4]
self.attr_SpectrumUShort = [1, 4, 5, 6]
self.attr_SpectrumULong = numpy.array(
[1234, 5678, 45, 345], dtype='uint32')
self.attr_SpectrumLong = [1123, -435, 35, -6345]
self.attr_SpectrumLong64 = [1123, -435, 35, -6345]
self.attr_SpectrumULong64 = [1123, 23435, 35, 3345]
self.attr_SpectrumFloat = [11.23, -4.35, 3.5, -634.5]
self.attr_SpectrumDouble = [1.123, 23.435, 3.5, 3.345]
self.attr_SpectrumString = ["Hello", "Word", "!", "!!"]
self.attr_SpectrumEncoded = [
"INT32", b"\x00\x01\x03\x04\x20\x31\x43\x54\x10\x11\x13\x14"]
self.attr_SpectrumEncoded = self.encodeSpectrum()
self.attr_ImageBoolean = numpy.array([[True]], dtype='int16')
self.attr_ImageUChar = numpy.array([[2, 5], [3, 4]], dtype='uint8')
self.attr_ImageShort = numpy.array([[2, 5], [3, 4]], dtype='int16')
self.attr_ImageUShort = numpy.array([[2, 5], [3, 4]], dtype='uint16')
self.attr_ImageLong = numpy.array([[2, 5], [3, 4]], dtype='int32')
self.attr_ImageULong = numpy.array([[2, 5], [3, 4]], dtype='uint32')
self.attr_ImageLong64 = numpy.array([[2, 5], [3, 4]], dtype='int64')
self.attr_ImageULong64 = numpy.array([[2, 5], [3, 4]], dtype='uint64')
self.attr_ImageFloat = numpy.array([[2., 5.], [3., 4.]],
dtype='float32')
self.attr_ImageDouble = numpy.array([[2.4, 5.45], [3.4, 4.45]],
dtype='double')
self.attr_ImageString = [['True']]
self.attr_ImageEncoded = self.encodeImage()
self.attr_value = ""
TestServer.init_device(self)
def encodeSpectrum(self):
format = 'INT32'
# uint8 B
# mode = 0
# uint16 H
# mode = 1
# uint32 I
# mode = 2
fspectrum = numpy.array(self.attr_SpectrumULong, dtype='int32')
ibuffer = bytes(struct.pack('i' * fspectrum.size, *fspectrum))
return [format, ibuffer]
def encodeImage(self):
format = 'VIDEO_IMAGE'
# uint8 B
mode = 0
# uint16 H
# mode = 1
width, height = self.attr_ImageUChar.shape
version = 1
endian = sys.byteorder == u'big'
# endian = ord(str(struct.pack('=H', 1)[-1]))
hsize = struct.calcsize('!IHHqiiHHHH')
header = struct.pack(
'!IHHqiiHHHH', 0x5644454f, version, mode, -1,
width, height, endian, hsize, 0, 0)
fimage = self.attr_ImageUChar.flatten()
ibuffer = struct.pack('B' * fimage.size, *fimage)
return [format, bytes(header + ibuffer)]
# -----------------------------------------------------------------
# Device destructor
# -----------------------------------------------------------------
def delete_device(self):
""" """
# -----------------------------------------------------------------
# Device initialization
# -----------------------------------------------------------------
def init_device(self):
self.set_state(PyTango.DevState.ON)
self.get_device_properties(self.get_device_class())
env = {'new': {'ActiveMntGrp': 'nxsmntgrp',
'DataCompressionRank': 0,
'NeXusSelectorDevice': u'p09/nxsrecselector/1',
'ScanDir': u'/tmp/',
'ScanFile': [u'sar4r.nxs'],
'ScanID': 192,
'_ViewOptions': {'ShowDial': True}}}
self.attr_Environment = ("pickle", pickle.dumps(env, protocol=2))
self.ChangeValueType("ScalarDouble")
self.attr_DoorList = ['test/door/1', 'test/door/2']
# -----------------------------------------------------------------
# Always excuted hook method
# -----------------------------------------------------------------
def always_executed_hook(self):
pass
# print "In ", self.get_name(), "::always_excuted_hook()"
#
# =================================================================
#
# TestServer read/write attribute methods
#
# =================================================================
#
# -----------------------------------------------------------------
# Read DoorList attribute
# -----------------------------------------------------------------
def read_DoorList(self, attr):
# Add your own code here
attr.set_value(self.attr_DoorList)
# -----------------------------------------------------------------
# Write DoorList attribute
# -----------------------------------------------------------------
def write_DoorList(self, attr):
# Add your own code here
self.attr_DoorList = attr.get_write_value()
# -----------------------------------------------------------------
# Read Environment attribute
# -----------------------------------------------------------------
def read_Environment(self, attr):
# Add your own code here
attr.set_value(self.attr_Environment[0], self.attr_Environment[1])
# -----------------------------------------------------------------
# Write Environment attribute
# -----------------------------------------------------------------
def write_Environment(self, attr):
# Add your own code here
self.attr_Environment = attr.get_write_value()
# -----------------------------------------------------------------
# Read Value attribute
# -----------------------------------------------------------------
def read_Value(self, attr):
# Add your own code here
attr.set_value(self.defaults[self.dtype][0])
# -----------------------------------------------------------------
# Write Value attribute
# -----------------------------------------------------------------
def write_Value(self, attr):
# Add your own code here
self.defaults[self.dtype][0] = attr.get_write_value()
# =================================================================
#
# TestServer command methods
#
# =================================================================
#
# -----------------------------------------------------------------
# SetState command:
#
# Description: Set state of tango device
#
# argin: DevString tango state
# -----------------------------------------------------------------
def SetState(self, state):
if state == "RUNNING":
self.set_state(PyTango.DevState.RUNNING)
elif state == "FAULT":
self.set_state(PyTango.DevState.FAULT)
elif state == "ALARM":
self.set_state(PyTango.DevState.ALARM)
else:
self.set_state(PyTango.DevState.ON)
# -----------------------------------------------------------------
# ChangeValueType command:
#
# Description: Set state of tango device
#
# argin: DevString tango state
# -----------------------------------------------------------------
def ChangeValueType(self, dtype):
if dtype in self.defaults.keys():
if self.dtype is not None:
self.remove_attribute("Value")
self.dtype = dtype
dev_class = self.get_device_class()
attr_data = PyTango.AttrData(
"Value", dev_class.get_name(),
[
[
self.defaults[self.dtype][2],
self.defaults[self.dtype][1],
PyTango.READ_WRITE
],
{
'description': "dynamic attribute",
}
]
)
self.add_attribute(attr_data,
r_meth=self.read_Value,
w_meth=self.write_Value)
# -----------------------------------------------------------------
# Read ScalarLong attribute
# -----------------------------------------------------------------
def read_ScalarLong(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarLong)
# -----------------------------------------------------------------
# Write ScalarLong attribute
# -----------------------------------------------------------------
def write_ScalarLong(self, attr):
# Add your own code here
self.attr_ScalarLong = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarBoolean attribute
# -----------------------------------------------------------------
def read_ScalarBoolean(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarBoolean)
# -----------------------------------------------------------------
# Write ScalarBoolean attribute
# -----------------------------------------------------------------
def write_ScalarBoolean(self, attr):
# Add your own code here
self.attr_ScalarBoolean = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarShort attribute
# -----------------------------------------------------------------
def read_ScalarShort(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarShort)
# -----------------------------------------------------------------
# Write ScalarShort attribute
# -----------------------------------------------------------------
def write_ScalarShort(self, attr):
# Add your own code here
self.attr_ScalarShort = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarUShort attribute
# -----------------------------------------------------------------
def read_ScalarUShort(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarUShort)
# -----------------------------------------------------------------
# Write ScalarUShort attribute
# -----------------------------------------------------------------
def write_ScalarUShort(self, attr):
# Add your own code here
self.attr_ScalarUShort = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarULong attribute
# -----------------------------------------------------------------
def read_ScalarULong(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarULong)
# -----------------------------------------------------------------
# Write ScalarULong attribute
# -----------------------------------------------------------------
def write_ScalarULong(self, attr):
# Add your own code here
self.attr_ScalarULong = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarLong64 attribute
# -----------------------------------------------------------------
def read_ScalarLong64(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarLong64)
# -----------------------------------------------------------------
# Write ScalarLong64 attribute
# -----------------------------------------------------------------
def write_ScalarLong64(self, attr):
# Add your own code here
self.attr_ScalarLong64 = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarULong64 attribute
# -----------------------------------------------------------------
def read_ScalarULong64(self, attr):
# Add your own code here
attr.set_value(long(self.attr_ScalarULong64))
# Do not work as well
# -----------------------------------------------------------------
# Write ScalarULong64 attribute
# -----------------------------------------------------------------
def write_ScalarULong64(self, attr):
# Add your own code here
self.attr_ScalarULong64 = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarFloat attribute
# -----------------------------------------------------------------
def read_ScalarFloat(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarFloat)
# -----------------------------------------------------------------
# Write ScalarFloat attribute
# -----------------------------------------------------------------
def write_ScalarFloat(self, attr):
# Add your own code here
self.attr_ScalarFloat = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarDouble attribute
# -----------------------------------------------------------------
def read_ScalarDouble(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarDouble)
# -----------------------------------------------------------------
# Write ScalarDouble attribute
# -----------------------------------------------------------------
def write_ScalarDouble(self, attr):
# Add your own code here
self.attr_ScalarDouble = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarString attribute
# -----------------------------------------------------------------
def read_ScalarString(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarString)
# -----------------------------------------------------------------
# Write ScalarString attribute
# -----------------------------------------------------------------
def write_ScalarString(self, attr):
# Add your own code here
self.attr_ScalarString = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarEncoded attribute
# -----------------------------------------------------------------
def read_ScalarEncoded(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarEncoded[0], self.attr_ScalarEncoded[1])
# -----------------------------------------------------------------
# Write ScalarEncoded attribute
# -----------------------------------------------------------------
def write_ScalarEncoded(self, attr):
# Add your own code here
self.attr_ScalarEncoded = attr.get_write_value()
# -----------------------------------------------------------------
# Read ScalarUChar attribute
# -----------------------------------------------------------------
def read_ScalarUChar(self, attr):
# Add your own code here
attr.set_value(self.attr_ScalarUChar)
# -----------------------------------------------------------------
# Write ScalarUChar attribute
# -----------------------------------------------------------------
def write_ScalarUChar(self, attr):
# Add your own code here
self.attr_ScalarUChar = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumEncoded attribute
# -----------------------------------------------------------------
def read_SpectrumEncoded(self, attr):
# Add your own code here
self.attr_SpectrumEncoded = self.encodeSpectrum()
attr.set_value(self.attr_SpectrumEncoded[0],
self.attr_SpectrumEncoded[1])
# -----------------------------------------------------------------
# Write SpectrumEncoded attribute
# -----------------------------------------------------------------
def write_SpectrumEncoded(self, attr):
# Add your own code here
self.attr_SpectrumEncoded = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageEncoded attribute
# -----------------------------------------------------------------
def read_ImageEncoded(self, attr):
# Add your own code here
self.attr_ImageEncoded = self.encodeImage()
attr.set_value(self.attr_ImageEncoded[0], self.attr_ImageEncoded[1])
# -----------------------------------------------------------------
# Write ImageEncoded attribute
# -----------------------------------------------------------------
def write_ImageEncoded(self, attr):
# Add your own code here
self.attr_ImageEncoded = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumBoolean attribute
# -----------------------------------------------------------------
def read_SpectrumBoolean(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumBoolean)
# -----------------------------------------------------------------
# Write SpectrumBoolean attribute
# -----------------------------------------------------------------
def write_SpectrumBoolean(self, attr):
# Add your own code here
self.attr_SpectrumBoolean = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumUChar attribute
# -----------------------------------------------------------------
def read_SpectrumUChar(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumUChar)
# -----------------------------------------------------------------
# Write SpectrumUChar attribute
# -----------------------------------------------------------------
def write_SpectrumUChar(self, attr):
# Add your own code here
self.attr_SpectrumUChar = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumShort attribute
# -----------------------------------------------------------------
def read_SpectrumShort(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumShort)
# -----------------------------------------------------------------
# Write SpectrumShort attribute
# -----------------------------------------------------------------
def write_SpectrumShort(self, attr):
# Add your own code here
self.attr_SpectrumShort = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumUShort attribute
# -----------------------------------------------------------------
def read_SpectrumUShort(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumUShort)
# -----------------------------------------------------------------
# Write SpectrumUShort attribute
# -----------------------------------------------------------------
def write_SpectrumUShort(self, attr):
# Add your own code here
self.attr_SpectrumUShort = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumLong attribute
# -----------------------------------------------------------------
def read_SpectrumLong(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumLong)
# -----------------------------------------------------------------
# Write SpectrumLong attribute
# -----------------------------------------------------------------
def write_SpectrumLong(self, attr):
# Add your own code here
self.attr_SpectrumLong = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumULong attribute
# -----------------------------------------------------------------
def read_SpectrumULong(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumULong)
# -----------------------------------------------------------------
# Write SpectrumULong attribute
# -----------------------------------------------------------------
def write_SpectrumULong(self, attr):
# Add your own code here
self.attr_SpectrumULong = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumLong64 attribute
# -----------------------------------------------------------------
def read_SpectrumLong64(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumLong64)
# -----------------------------------------------------------------
# Write SpectrumLong64 attribute
# -----------------------------------------------------------------
def write_SpectrumLong64(self, attr):
# Add your own code here
self.attr_SpectrumLong64 = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumULong64 attribute
# -----------------------------------------------------------------
def read_SpectrumULong64(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumULong64)
# -----------------------------------------------------------------
# Write SpectrumULong64 attribute
# -----------------------------------------------------------------
def write_SpectrumULong64(self, attr):
# Add your own code here
self.attr_SpectrumULong64 = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumFloat attribute
# -----------------------------------------------------------------
def read_SpectrumFloat(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumFloat)
# -----------------------------------------------------------------
# Write SpectrumFloat attribute
# -----------------------------------------------------------------
def write_SpectrumFloat(self, attr):
# Add your own code here
self.attr_SpectrumFloat = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumDouble attribute
# -----------------------------------------------------------------
def read_SpectrumDouble(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumDouble)
# -----------------------------------------------------------------
# Write SpectrumDouble attribute
# -----------------------------------------------------------------
def write_SpectrumDouble(self, attr):
# Add your own code here
self.attr_SpectrumDouble = attr.get_write_value()
# -----------------------------------------------------------------
# Read SpectrumString attribute
# -----------------------------------------------------------------
def read_SpectrumString(self, attr):
# Add your own code here
attr.set_value(self.attr_SpectrumString)
# -----------------------------------------------------------------
# Write SpectrumString attribute
# -----------------------------------------------------------------
def write_SpectrumString(self, attr):
# Add your own code here
self.attr_SpectrumString = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageBoolean attribute
# -----------------------------------------------------------------
def read_ImageBoolean(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageBoolean)
# -----------------------------------------------------------------
# Write ImageBoolean attribute
# -----------------------------------------------------------------
def write_ImageBoolean(self, attr):
# Add your own code here
self.attr_ImageBoolean = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageUChar attribute
# -----------------------------------------------------------------
def read_ImageUChar(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageUChar)
# -----------------------------------------------------------------
# Write ImageUChar attribute
# -----------------------------------------------------------------
def write_ImageUChar(self, attr):
self.attr_ImageUChar = attr.get_write_value()
# Add your own code here
# -----------------------------------------------------------------
# Read ImageShort attribute
# -----------------------------------------------------------------
def read_ImageShort(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageShort)
# -----------------------------------------------------------------
# Write ImageShort attribute
# -----------------------------------------------------------------
def write_ImageShort(self, attr):
# Add your own code here
self.attr_ImageShort = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageUShort attribute
# -----------------------------------------------------------------
def read_ImageUShort(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageUShort)
# -----------------------------------------------------------------
# Write ImageUShort attribute
# -----------------------------------------------------------------
def write_ImageUShort(self, attr):
# Add your own code here
self.attr_ImageUShort = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageLong attribute
# -----------------------------------------------------------------
def read_ImageLong(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageLong)
# -----------------------------------------------------------------
# Write ImageLong attribute
# -----------------------------------------------------------------
def write_ImageLong(self, attr):
# Add your own code here
self.attr_ImageLong = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageULong attribute
# -----------------------------------------------------------------
def read_ImageULong(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageULong)
# -----------------------------------------------------------------
# Write ImageULong attribute
# -----------------------------------------------------------------
def write_ImageULong(self, attr):
# Add your own code here
self.attr_ImageULong = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageLong64 attribute
# -----------------------------------------------------------------
def read_ImageLong64(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageLong64)
# -----------------------------------------------------------------
# Write ImageLong64 attribute
# -----------------------------------------------------------------
def write_ImageLong64(self, attr):
# Add your own code here
self.attr_ImageLong64 = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageULong64 attribute
# -----------------------------------------------------------------
def read_ImageULong64(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageULong64)
# -----------------------------------------------------------------
# Write ImageULong64 attribute
# -----------------------------------------------------------------
def write_ImageULong64(self, attr):
# Add your own code here
self.attr_ImageULong64 = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageFloat attribute
# -----------------------------------------------------------------
def read_ImageFloat(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageFloat)
# -----------------------------------------------------------------
# Write ImageFloat attribute
# -----------------------------------------------------------------
def write_ImageFloat(self, attr):
# Add your own code here
self.attr_ImageFloat = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageDouble attribute
# -----------------------------------------------------------------
def read_ImageDouble(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageDouble)
# -----------------------------------------------------------------
# Write ImageDouble attribute
# -----------------------------------------------------------------
def write_ImageDouble(self, attr):
# Add your own code here
self.attr_ImageDouble = attr.get_write_value()
# -----------------------------------------------------------------
# Read ImageString attribute
# -----------------------------------------------------------------
def read_ImageString(self, attr):
# Add your own code here
attr.set_value(self.attr_ImageString)
# -----------------------------------------------------------------
# Write ImageString attribute
# -----------------------------------------------------------------
def write_ImageString(self, attr):
# Add your own code here
self.attr_ImageString = attr.get_write_value()
# =================================================================
#
# SimpleServer command methods
#
# =================================================================
#
# -----------------------------------------------------------------
# GetBoolean command:
#
# Description: Returns ScalarBoolean
#
# argout: DevBoolean ScalarBoolean
# -----------------------------------------------------------------
def GetBoolean(self):
# Add your own code here
return self.attr_ScalarBoolean
# -----------------------------------------------------------------
# GetShort command:
#
# Description: Returns ScalarShort
#
# argout: DevShort ScalarShort
# -----------------------------------------------------------------
def GetShort(self):
# Add your own code here
return self.attr_ScalarShort
# -----------------------------------------------------------------
# GetLong command:
#
# Description: Returns ScalarLong
#
# argout: DevLong ScalarLong
# -----------------------------------------------------------------
def GetLong(self):
# Add your own code here
return self.attr_ScalarLong
# -----------------------------------------------------------------
# GetLong64 command:
#
# Description: Returns ScalarLong64
#
# argout: DevLong64 ScalarLong64
# -----------------------------------------------------------------
def GetLong64(self):
# Add your own code here
return self.attr_ScalarLong64
# -----------------------------------------------------------------
# GetFloat command:
#
# Description: Returns ScalarFloat
#
# argout: DevFloat ScalarFloat
# -----------------------------------------------------------------
def GetFloat(self):
# Add your own code here
return self.attr_ScalarFloat
# -----------------------------------------------------------------
# GetDouble command:
#
# Description: Returns ScalarDouble
#
# argout: DevDouble ScalarDouble
# -----------------------------------------------------------------
def GetDouble(self):
# Add your own code here
return self.attr_ScalarDouble
# -----------------------------------------------------------------
# GetUShort command:
#
# Description: Returns ScalarUShort
#
# argout: DevUShort ScalarUShort
# -----------------------------------------------------------------
def GetUShort(self):
# Add your own code here
return self.attr_ScalarUShort
# -----------------------------------------------------------------
# GetULong command:
#
# Description: Returns ScalarULong
#
# argout: DevULong ScalarULong
# -----------------------------------------------------------------
def GetULong(self):
# Add your own code here
return self.attr_ScalarULong
# -----------------------------------------------------------------
# GetULong64 command:
#
# Description: Returns ScalarULong64
#
# argout: DevULong64 ScalarULong64
# -----------------------------------------------------------------
def GetULong64(self):
# Add your own code here
return self.attr_ScalarULong64
# -----------------------------------------------------------------
# GetString command:
#
# Description: Returns ScalarString
#
# argout: DevString ScalarString
# -----------------------------------------------------------------
def GetString(self):
# Add your own code here
return self.attr_ScalarString
# -----------------------------------------------------------------
# CreateDataSource command:
#
# -----------------------------------------------------------------
def CreateAttribute(self, name):
# Add your own code here
attr = PyTango.Attr(name, PyTango.DevString, PyTango.READ_WRITE)
self.add_attribute(attr, self.read_General, self.write_General)
def read_General(self, attr):
attr.set_value(self.attr_value)
def write_General(self, attr):
self.attr_value = attr.get_write_value()
# =================================================================
#
# TestServerClass class definition
#
# =================================================================
class TestServerClass(PyTango.DeviceClass):
# Class Properties
class_property_list = {
}
# Device Properties
device_property_list = {
'StringList':
[PyTango.DevVarStringArray,
"element names",
[]],
}
# Command definitions
cmd_list = {
'SetState':
[[PyTango.DevString, "ScalarString"],
[PyTango.DevVoid, ""]],
'CreateAttribute':
[[PyTango.DevString, "ScalarString"],
[PyTango.DevVoid, ""]],
'ChangeValueType':
[[PyTango.DevString, "ScalarString"],
[PyTango.DevVoid, ""]],
'GetBoolean':
[[PyTango.DevVoid, ""],
[PyTango.DevBoolean, "ScalarBoolean"]],
'GetShort':
[[PyTango.DevVoid, ""],
[PyTango.DevShort, "ScalarShort"]],
'GetLong':
[[PyTango.DevVoid, ""],
[PyTango.DevLong, "ScalarLong"]],
'GetLong64':
[[PyTango.DevVoid, ""],
[PyTango.DevLong64, "ScalarLong64"]],
'GetFloat':
[[PyTango.DevVoid, ""],
[PyTango.DevFloat, "ScalarFloat"]],
'GetDouble':
[[PyTango.DevVoid, ""],
[PyTango.DevDouble, "ScalarDouble"]],
'GetUShort':
[[PyTango.DevVoid, ""],
[PyTango.DevUShort, "ScalarUShort"]],
'GetULong':
[[PyTango.DevVoid, ""],
[PyTango.DevULong, "ScalarULong"]],
'GetULong64':
[[PyTango.DevVoid, ""],
[PyTango.DevULong64, "ScalarULong64"]],
'GetString':
[[PyTango.DevVoid, ""],
[PyTango.DevString, "ScalarString"]],
}
# Attribute definitions
attr_list = {
'ScalarLong':
[[PyTango.DevLong,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "test long scalar attribute",
}],
'ScalarBoolean':
[[PyTango.DevBoolean,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "test scalar bool attribute",
}],
'ScalarShort':
[[PyTango.DevShort,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "Scalar Short attribute",
}],
'ScalarUShort':
[[PyTango.DevUShort,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarUShort attribute",
}],
'ScalarULong':
[[PyTango.DevULong,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarULong attribute",
}],
'ScalarLong64':
[[PyTango.DevLong64,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarLong64 attribute",
}],
'ScalarULong64':
[[PyTango.DevULong64,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarULong64 attribute",
}],
'ScalarFloat':
[[PyTango.DevFloat,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarFloat attribute",
}],
'ScalarDouble':
[[PyTango.DevDouble,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarDouble attribute",
}],
'ScalarString':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarString attribute",
}],
'ScalarEncoded':
[[PyTango.DevEncoded,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarEncoded attribute",
}],
'ScalarUChar':
[[PyTango.DevUChar,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ScalarUChar attribute",
}],
'SpectrumEncoded':
[[PyTango.DevEncoded,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "SpectrumEncoded attribute",
}],
'ImageEncoded':
[[PyTango.DevEncoded,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "ImageEncoded attribute",
}],
'SpectrumBoolean':
[[PyTango.DevBoolean,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumBoolean attribute",
}],
'SpectrumUChar':
[[PyTango.DevUChar,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumUChar attribute",
}],
'SpectrumShort':
[[PyTango.DevShort,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumShort attribute",
}],
'SpectrumUShort':
[[PyTango.DevUShort,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumUShort",
}],
'SpectrumLong':
[[PyTango.DevLong,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumLong attribute",
}],
'SpectrumULong':
[[PyTango.DevULong,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumULong attribute",
}],
'SpectrumLong64':
[[PyTango.DevLong64,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumLong64 attribute",
}],
'SpectrumULong64':
[[PyTango.DevULong64,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumULong64 attribute",
}],
'SpectrumFloat':
[[PyTango.DevFloat,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumFloat attribute",
}],
'SpectrumDouble':
[[PyTango.DevDouble,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumDouble attribute",
}],
'SpectrumString':
[[PyTango.DevString,
PyTango.SPECTRUM,
PyTango.READ_WRITE, 4096],
{
'description': "SpectrumString attribute",
}],
'ImageBoolean':
[[PyTango.DevBoolean,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageBoolean attribute",
}],
'ImageUChar':
[[PyTango.DevUChar,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageUChar attribute",
}],
'ImageShort':
[[PyTango.DevShort,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageShort attribute",
}],
'ImageUShort':
[[PyTango.DevUShort,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageUShort attribute",
}],
'ImageLong':
[[PyTango.DevLong,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageLong attribute",
}],
'ImageULong':
[[PyTango.DevULong,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageULong attribute",
}],
'ImageLong64':
[[PyTango.DevLong64,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageLong64 attribute",
}],
'ImageULong64':
[[PyTango.DevULong64,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageULong64 attribute",
}],
'ImageFloat':
[[PyTango.DevFloat,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageFloat attribute",
}],
'ImageDouble':
[[PyTango.DevDouble,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageDouble attribute",
}],
'ImageString':
[[PyTango.DevString,
PyTango.IMAGE,
PyTango.READ_WRITE, 4096, 4096],
{
'description': "ImageString attribute",
}],
'Environment':
[[PyTango.DevEncoded,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'description': "Environment attribute",
}],
'DoorList':
[[PyTango.DevString,
PyTango.SPECTRUM,
PyTango.READ_WRITE,
256],
{
'description': "Environment attribute",
}],
}
# -----------------------------------------------------------------
# TestServerClass Constructor
# -----------------------------------------------------------------
def __init__(self, name):
PyTango.DeviceClass.__init__(self, name)
self.set_type(name)
# =================================================================
#
# TestServer class main method
#
# =================================================================
if __name__ == '__main__':
try:
py = PyTango.Util(sys.argv)
py.add_class(TestServerClass, TestServer, 'TestServer')
U = PyTango.Util.instance()
U.server_init()
U.server_run()
except PyTango.DevFailed as e:
print('-------> Received a DevFailed exception: %s' % e)
except Exception as e:
print('-------> An unforeseen exception occured.... %s' % e)
| nexdatas/recselector | test/TestServer.py | Python | gpl-3.0 | 49,959 |
# coding: utf8
# winetheme.py
# 9/29/2013 jichi
if __name__ == '__main__':
import debug
debug.initenv()
import features
if features.WINE:
from sakurakit.skdebug import dwarn
MAC_THEME = {
'ActiveBorder' : "240 240 240",
'ActiveTitle' : "240 240 240",
'AppWorkSpace' : "198 198 191",
'Background' : "0 0 0",
'ButtonAlternativeFace' : "216 216 216",
'ButtonDkShadow' : "85 85 82",
'ButtonFace' : "240 240 240",
'ButtonHilight' : "255 255 255",
'ButtonLight' : "255 255 255",
'ButtonShadow' : "198 198 191",
'ButtonText' : "0 0 0",
'GradientActiveTitle' : "240 240 240",
'GradientInactiveTitle' : "240 240 240",
'GrayText' : "198 198 191",
'Hilight' : "119 153 221",
'HilightText' : "0 0 0",
'InactiveBorder' : "240 240 240",
'InactiveTitle' : "240 240 240",
'InactiveTitleText' : "255 255 255",
'InfoText' : "0 0 0",
'InfoWindow' : "216 216 216",
'Menu' : "240 240 240",
'MenuBar' : "0 0 0",
'MenuHilight' : "179 145 105",
'MenuText' : "0 0 0",
'Scrollbar' : "240 240 240",
'TitleText' : "255 255 255",
'Window' : "255 255 255",
'WindowFrame' : "0 0 0",
'WindowText' : "0 0 0",
}
def dump():
theme = MAC_THEME
USERDIC_REG_PATH = r"Control Panel\Colors"
import _winreg
hk = _winreg.HKEY_CURRENT_USER
try:
with _winreg.ConnectRegistry(None, hk) as reg: # computer_name = None
with _winreg.OpenKey(reg, USERDIC_REG_PATH) as path:
for k in theme.iterkeys():
try:
v = _winreg.QueryValueEx(path, k)[0]
print k, "=", v
except WindowsError:
print k, "=", None
except (WindowsError, TypeError, AttributeError), e: dwarn(e)
# FIXME 9/29/2013: WindowsError 5: permission denied on Wine!
def install():
theme = MAC_THEME
USERDIC_REG_PATH = r"Control Panel\Colors"
import _winreg
hk = _winreg.HKEY_CURRENT_USER
try:
with _winreg.ConnectRegistry(None, hk) as reg: # computer_name = None
with _winreg.OpenKey(reg, USERDIC_REG_PATH, _winreg.KEY_SET_VALUE) as path:
for k,v in theme.iteritems():
_winreg.SetValueEx(path, k, 0, _winreg.REG_SZ, v)
except (WindowsError, TypeError, AttributeError), e: dwarn(e)
# FIXME 9/29/2013: WindowsError 5: permission denied on Wine!
def uninstall():
theme = MAC_THEME
USERDIC_REG_PATH = r"Control Panel\Colors"
import _winreg
hk = _winreg.HKEY_CURRENT_USER
try:
with _winreg.ConnectRegistry(None, hk) as reg: # computer_name = None
with _winreg.OpenKey(reg, USERDIC_REG_PATH, _winreg.KEY_SET_VALUE) as path:
for k in theme.iterkeys():
try: _winreg.DeleteKeyEx(path, k) # in case the path does not exist
except WindowsError: pass
except (WindowsError, TypeError, AttributeError), e: dwarn(e)
else:
def dump(): pass
def install(): pass
def uninstall(): pass
if __name__ == '__main__':
dump()
install()
#uninstall()
# EOF
| Dangetsu/vnr | Frameworks/Sakura/py/apps/reader/TRASH/winetheme.py | Python | gpl-3.0 | 3,050 |
__all__ = ["core"] | alfiyansys/a-reconsidered-sign | algorithms/__init__.py | Python | gpl-3.0 | 18 |
# implement samba_tool drs commands
#
# Copyright Andrew Tridgell 2010
# Copyright Andrew Bartlett 2017
#
# based on C implementation by Kamen Mazdrashki <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
import samba.getopt as options
import ldb
import logging
from . import common
import json
from samba.auth import system_session
from samba.netcmd import (
Command,
CommandError,
Option,
SuperCommand,
)
from samba.samdb import SamDB
from samba import drs_utils, nttime2string, dsdb
from samba.dcerpc import drsuapi, misc
from samba.join import join_clone
from samba import colour
from samba.uptodateness import (
get_partition_maps,
get_utdv_edges,
get_utdv_distances,
get_utdv_summary,
get_kcc_and_dsas,
)
from samba.common import get_string
from samba.samdb import get_default_backend_store
def drsuapi_connect(ctx):
'''make a DRSUAPI connection to the server'''
try:
(ctx.drsuapi, ctx.drsuapi_handle, ctx.bind_supported_extensions) = drs_utils.drsuapi_connect(ctx.server, ctx.lp, ctx.creds)
except Exception as e:
raise CommandError("DRS connection to %s failed" % ctx.server, e)
def samdb_connect(ctx):
'''make a ldap connection to the server'''
try:
ctx.samdb = SamDB(url="ldap://%s" % ctx.server,
session_info=system_session(),
credentials=ctx.creds, lp=ctx.lp)
except Exception as e:
raise CommandError("LDAP connection to %s failed" % ctx.server, e)
def drs_errmsg(werr):
'''return "was successful" or an error string'''
(ecode, estring) = werr
if ecode == 0:
return "was successful"
return "failed, result %u (%s)" % (ecode, estring)
def attr_default(msg, attrname, default):
'''get an attribute from a ldap msg with a default'''
if attrname in msg:
return msg[attrname][0]
return default
def drs_parse_ntds_dn(ntds_dn):
'''parse a NTDS DN returning a site and server'''
a = ntds_dn.split(',')
if a[0] != "CN=NTDS Settings" or a[2] != "CN=Servers" or a[4] != 'CN=Sites':
raise RuntimeError("bad NTDS DN %s" % ntds_dn)
server = a[1].split('=')[1]
site = a[3].split('=')[1]
return (site, server)
DEFAULT_SHOWREPL_FORMAT = 'classic'
class cmd_drs_showrepl(Command):
"""Show replication status."""
synopsis = "%prog [<DC>] [options]"
takes_optiongroups = {
"sambaopts": options.SambaOptions,
"versionopts": options.VersionOptions,
"credopts": options.CredentialsOptions,
}
takes_options = [
Option("--json", help="replication details in JSON format",
dest='format', action='store_const', const='json'),
Option("--summary", help=("summarize overall DRS health as seen "
"from this server"),
dest='format', action='store_const', const='summary'),
Option("--pull-summary", help=("Have we successfully replicated "
"from all relevent servers?"),
dest='format', action='store_const', const='pull_summary'),
Option("--notify-summary", action='store_const',
const='notify_summary', dest='format',
help=("Have we successfully notified all relevent servers of "
"local changes, and did they say they successfully "
"replicated?")),
Option("--classic", help="print local replication details",
dest='format', action='store_const', const='classic',
default=DEFAULT_SHOWREPL_FORMAT),
Option("-v", "--verbose", help="Be verbose", action="store_true"),
Option("--color", help="Use colour output (yes|no|auto)",
default='no'),
]
takes_args = ["DC?"]
def parse_neighbour(self, n):
"""Convert an ldb neighbour object into a python dictionary"""
dsa_objectguid = str(n.source_dsa_obj_guid)
d = {
'NC dn': n.naming_context_dn,
"DSA objectGUID": dsa_objectguid,
"last attempt time": nttime2string(n.last_attempt),
"last attempt message": drs_errmsg(n.result_last_attempt),
"consecutive failures": n.consecutive_sync_failures,
"last success": nttime2string(n.last_success),
"NTDS DN": str(n.source_dsa_obj_dn),
'is deleted': False
}
try:
self.samdb.search(base="<GUID=%s>" % dsa_objectguid,
scope=ldb.SCOPE_BASE,
attrs=[])
except ldb.LdbError as e:
(errno, _) = e.args
if errno == ldb.ERR_NO_SUCH_OBJECT:
d['is deleted'] = True
else:
raise
try:
(site, server) = drs_parse_ntds_dn(n.source_dsa_obj_dn)
d["DSA"] = "%s\\%s" % (site, server)
except RuntimeError:
pass
return d
def print_neighbour(self, d):
'''print one set of neighbour information'''
self.message("%s" % d['NC dn'])
if 'DSA' in d:
self.message("\t%s via RPC" % d['DSA'])
else:
self.message("\tNTDS DN: %s" % d['NTDS DN'])
self.message("\t\tDSA object GUID: %s" % d['DSA objectGUID'])
self.message("\t\tLast attempt @ %s %s" % (d['last attempt time'],
d['last attempt message']))
self.message("\t\t%u consecutive failure(s)." %
d['consecutive failures'])
self.message("\t\tLast success @ %s" % d['last success'])
self.message("")
def get_neighbours(self, info_type):
req1 = drsuapi.DsReplicaGetInfoRequest1()
req1.info_type = info_type
try:
(info_type, info) = self.drsuapi.DsReplicaGetInfo(
self.drsuapi_handle, 1, req1)
except Exception as e:
raise CommandError("DsReplicaGetInfo of type %u failed" % info_type, e)
reps = [self.parse_neighbour(n) for n in info.array]
return reps
def run(self, DC=None, sambaopts=None,
credopts=None, versionopts=None,
format=DEFAULT_SHOWREPL_FORMAT,
verbose=False, color='no'):
self.apply_colour_choice(color)
self.lp = sambaopts.get_loadparm()
if DC is None:
DC = common.netcmd_dnsname(self.lp)
self.server = DC
self.creds = credopts.get_credentials(self.lp, fallback_machine=True)
self.verbose = verbose
output_function = {
'summary': self.summary_output,
'notify_summary': self.notify_summary_output,
'pull_summary': self.pull_summary_output,
'json': self.json_output,
'classic': self.classic_output,
}.get(format)
if output_function is None:
raise CommandError("unknown showrepl format %s" % format)
return output_function()
def json_output(self):
data = self.get_local_repl_data()
del data['site']
del data['server']
json.dump(data, self.outf, indent=2)
def summary_output_handler(self, typeof_output):
"""Print a short message if every seems fine, but print details of any
links that seem broken."""
failing_repsto = []
failing_repsfrom = []
local_data = self.get_local_repl_data()
if typeof_output != "pull_summary":
for rep in local_data['repsTo']:
if rep['is deleted']:
continue
if rep["consecutive failures"] != 0 or rep["last success"] == 0:
failing_repsto.append(rep)
if typeof_output != "notify_summary":
for rep in local_data['repsFrom']:
if rep['is deleted']:
continue
if rep["consecutive failures"] != 0 or rep["last success"] == 0:
failing_repsfrom.append(rep)
if failing_repsto or failing_repsfrom:
self.message(colour.c_RED("There are failing connections"))
if failing_repsto:
self.message(colour.c_RED("Failing outbound connections:"))
for rep in failing_repsto:
self.print_neighbour(rep)
if failing_repsfrom:
self.message(colour.c_RED("Failing inbound connection:"))
for rep in failing_repsfrom:
self.print_neighbour(rep)
return 1
self.message(colour.c_GREEN("[ALL GOOD]"))
def summary_output(self):
return self.summary_output_handler("summary")
def notify_summary_output(self):
return self.summary_output_handler("notify_summary")
def pull_summary_output(self):
return self.summary_output_handler("pull_summary")
def get_local_repl_data(self):
drsuapi_connect(self)
samdb_connect(self)
# show domain information
ntds_dn = self.samdb.get_dsServiceName()
(site, server) = drs_parse_ntds_dn(ntds_dn)
try:
ntds = self.samdb.search(base=ntds_dn, scope=ldb.SCOPE_BASE, attrs=['options', 'objectGUID', 'invocationId'])
except Exception as e:
raise CommandError("Failed to search NTDS DN %s" % ntds_dn)
dsa_details = {
"options": int(attr_default(ntds[0], "options", 0)),
"objectGUID": get_string(self.samdb.schema_format_value(
"objectGUID", ntds[0]["objectGUID"][0])),
"invocationId": get_string(self.samdb.schema_format_value(
"objectGUID", ntds[0]["invocationId"][0]))
}
conn = self.samdb.search(base=ntds_dn, expression="(objectClass=nTDSConnection)")
repsfrom = self.get_neighbours(drsuapi.DRSUAPI_DS_REPLICA_INFO_NEIGHBORS)
repsto = self.get_neighbours(drsuapi.DRSUAPI_DS_REPLICA_INFO_REPSTO)
conn_details = []
for c in conn:
c_rdn, sep, c_server_dn = str(c['fromServer'][0]).partition(',')
d = {
'name': str(c['name']),
'remote DN': str(c['fromServer'][0]),
'options': int(attr_default(c, 'options', 0)),
'enabled': (get_string(attr_default(c, 'enabledConnection',
'TRUE')).upper() == 'TRUE')
}
conn_details.append(d)
try:
c_server_res = self.samdb.search(base=c_server_dn,
scope=ldb.SCOPE_BASE,
attrs=["dnsHostName"])
d['dns name'] = str(c_server_res[0]["dnsHostName"][0])
except ldb.LdbError as e:
(errno, _) = e.args
if errno == ldb.ERR_NO_SUCH_OBJECT:
d['is deleted'] = True
except (KeyError, IndexError):
pass
d['replicates NC'] = []
for r in c.get('mS-DS-ReplicatesNCReason', []):
a = str(r).split(':')
d['replicates NC'].append((a[3], int(a[2])))
return {
'dsa': dsa_details,
'repsFrom': repsfrom,
'repsTo': repsto,
'NTDSConnections': conn_details,
'site': site,
'server': server
}
def classic_output(self):
data = self.get_local_repl_data()
dsa_details = data['dsa']
repsfrom = data['repsFrom']
repsto = data['repsTo']
conn_details = data['NTDSConnections']
site = data['site']
server = data['server']
self.message("%s\\%s" % (site, server))
self.message("DSA Options: 0x%08x" % dsa_details["options"])
self.message("DSA object GUID: %s" % dsa_details["objectGUID"])
self.message("DSA invocationId: %s\n" % dsa_details["invocationId"])
self.message("==== INBOUND NEIGHBORS ====\n")
for n in repsfrom:
self.print_neighbour(n)
self.message("==== OUTBOUND NEIGHBORS ====\n")
for n in repsto:
self.print_neighbour(n)
reasons = ['NTDSCONN_KCC_GC_TOPOLOGY',
'NTDSCONN_KCC_RING_TOPOLOGY',
'NTDSCONN_KCC_MINIMIZE_HOPS_TOPOLOGY',
'NTDSCONN_KCC_STALE_SERVERS_TOPOLOGY',
'NTDSCONN_KCC_OSCILLATING_CONNECTION_TOPOLOGY',
'NTDSCONN_KCC_INTERSITE_GC_TOPOLOGY',
'NTDSCONN_KCC_INTERSITE_TOPOLOGY',
'NTDSCONN_KCC_SERVER_FAILOVER_TOPOLOGY',
'NTDSCONN_KCC_SITE_FAILOVER_TOPOLOGY',
'NTDSCONN_KCC_REDUNDANT_SERVER_TOPOLOGY']
self.message("==== KCC CONNECTION OBJECTS ====\n")
for d in conn_details:
self.message("Connection --")
if d.get('is deleted'):
self.message("\tWARNING: Connection to DELETED server!")
self.message("\tConnection name: %s" % d['name'])
self.message("\tEnabled : %s" % str(d['enabled']).upper())
self.message("\tServer DNS name : %s" % d.get('dns name'))
self.message("\tServer DN name : %s" % d['remote DN'])
self.message("\t\tTransportType: RPC")
self.message("\t\toptions: 0x%08X" % d['options'])
if d['replicates NC']:
for nc, reason in d['replicates NC']:
self.message("\t\tReplicatesNC: %s" % nc)
self.message("\t\tReason: 0x%08x" % reason)
for s in reasons:
if getattr(dsdb, s, 0) & reason:
self.message("\t\t\t%s" % s)
else:
self.message("Warning: No NC replicated for Connection!")
class cmd_drs_kcc(Command):
"""Trigger knowledge consistency center run."""
synopsis = "%prog [<DC>] [options]"
takes_optiongroups = {
"sambaopts": options.SambaOptions,
"versionopts": options.VersionOptions,
"credopts": options.CredentialsOptions,
}
takes_args = ["DC?"]
def run(self, DC=None, sambaopts=None,
credopts=None, versionopts=None):
self.lp = sambaopts.get_loadparm()
if DC is None:
DC = common.netcmd_dnsname(self.lp)
self.server = DC
self.creds = credopts.get_credentials(self.lp, fallback_machine=True)
drsuapi_connect(self)
req1 = drsuapi.DsExecuteKCC1()
try:
self.drsuapi.DsExecuteKCC(self.drsuapi_handle, 1, req1)
except Exception as e:
raise CommandError("DsExecuteKCC failed", e)
self.message("Consistency check on %s successful." % DC)
class cmd_drs_replicate(Command):
"""Replicate a naming context between two DCs."""
synopsis = "%prog <destinationDC> <sourceDC> <NC> [options]"
takes_optiongroups = {
"sambaopts": options.SambaOptions,
"versionopts": options.VersionOptions,
"credopts": options.CredentialsOptions,
}
takes_args = ["DEST_DC", "SOURCE_DC", "NC"]
takes_options = [
Option("--add-ref", help="use ADD_REF to add to repsTo on source", action="store_true"),
Option("--sync-forced", help="use SYNC_FORCED to force inbound replication", action="store_true"),
Option("--sync-all", help="use SYNC_ALL to replicate from all DCs", action="store_true"),
Option("--full-sync", help="resync all objects", action="store_true"),
Option("--local", help="pull changes directly into the local database (destination DC is ignored)", action="store_true"),
Option("--local-online", help="pull changes into the local database (destination DC is ignored) as a normal online replication", action="store_true"),
Option("--async-op", help="use ASYNC_OP for the replication", action="store_true"),
Option("--single-object", help="Replicate only the object specified, instead of the whole Naming Context (only with --local)", action="store_true"),
]
def drs_local_replicate(self, SOURCE_DC, NC, full_sync=False,
single_object=False,
sync_forced=False):
'''replicate from a source DC to the local SAM'''
self.server = SOURCE_DC
drsuapi_connect(self)
# Override the default flag LDB_FLG_DONT_CREATE_DB
self.local_samdb = SamDB(session_info=system_session(), url=None,
credentials=self.creds, lp=self.lp,
flags=0)
self.samdb = SamDB(url="ldap://%s" % self.server,
session_info=system_session(),
credentials=self.creds, lp=self.lp)
# work out the source and destination GUIDs
res = self.local_samdb.search(base="", scope=ldb.SCOPE_BASE,
attrs=["dsServiceName"])
self.ntds_dn = res[0]["dsServiceName"][0]
res = self.local_samdb.search(base=self.ntds_dn, scope=ldb.SCOPE_BASE,
attrs=["objectGUID"])
self.ntds_guid = misc.GUID(
self.samdb.schema_format_value("objectGUID",
res[0]["objectGUID"][0]))
source_dsa_invocation_id = misc.GUID(self.samdb.get_invocation_id())
dest_dsa_invocation_id = misc.GUID(self.local_samdb.get_invocation_id())
destination_dsa_guid = self.ntds_guid
exop = drsuapi.DRSUAPI_EXOP_NONE
if single_object:
exop = drsuapi.DRSUAPI_EXOP_REPL_OBJ
full_sync = True
self.samdb.transaction_start()
repl = drs_utils.drs_Replicate("ncacn_ip_tcp:%s[seal]" % self.server,
self.lp,
self.creds, self.local_samdb,
dest_dsa_invocation_id)
# Work out if we are an RODC, so that a forced local replicate
# with the admin pw does not sync passwords
rodc = self.local_samdb.am_rodc()
try:
(num_objects, num_links) = repl.replicate(NC,
source_dsa_invocation_id,
destination_dsa_guid,
rodc=rodc,
full_sync=full_sync,
exop=exop,
sync_forced=sync_forced)
except Exception as e:
raise CommandError("Error replicating DN %s" % NC, e)
self.samdb.transaction_commit()
if full_sync:
self.message("Full Replication of all %d objects and %d links "
"from %s to %s was successful." %
(num_objects, num_links, SOURCE_DC,
self.local_samdb.url))
else:
self.message("Incremental replication of %d objects and %d links "
"from %s to %s was successful." %
(num_objects, num_links, SOURCE_DC,
self.local_samdb.url))
def run(self, DEST_DC, SOURCE_DC, NC,
add_ref=False, sync_forced=False, sync_all=False, full_sync=False,
local=False, local_online=False, async_op=False, single_object=False,
sambaopts=None, credopts=None, versionopts=None):
self.server = DEST_DC
self.lp = sambaopts.get_loadparm()
self.creds = credopts.get_credentials(self.lp, fallback_machine=True)
if local:
self.drs_local_replicate(SOURCE_DC, NC, full_sync=full_sync,
single_object=single_object,
sync_forced=sync_forced)
return
if local_online:
server_bind = drsuapi.drsuapi("irpc:dreplsrv", lp_ctx=self.lp)
server_bind_handle = misc.policy_handle()
else:
drsuapi_connect(self)
server_bind = self.drsuapi
server_bind_handle = self.drsuapi_handle
if not async_op:
# Give the sync replication 5 minutes time
server_bind.request_timeout = 5 * 60
samdb_connect(self)
# we need to find the NTDS GUID of the source DC
msg = self.samdb.search(base=self.samdb.get_config_basedn(),
expression="(&(objectCategory=server)(|(name=%s)(dNSHostName=%s)))" % (
ldb.binary_encode(SOURCE_DC),
ldb.binary_encode(SOURCE_DC)),
attrs=[])
if len(msg) == 0:
raise CommandError("Failed to find source DC %s" % SOURCE_DC)
server_dn = msg[0]['dn']
msg = self.samdb.search(base=server_dn, scope=ldb.SCOPE_ONELEVEL,
expression="(|(objectCategory=nTDSDSA)(objectCategory=nTDSDSARO))",
attrs=['objectGUID', 'options'])
if len(msg) == 0:
raise CommandError("Failed to find source NTDS DN %s" % SOURCE_DC)
source_dsa_guid = msg[0]['objectGUID'][0]
dsa_options = int(attr_default(msg, 'options', 0))
req_options = 0
if not (dsa_options & dsdb.DS_NTDSDSA_OPT_DISABLE_OUTBOUND_REPL):
req_options |= drsuapi.DRSUAPI_DRS_WRIT_REP
if add_ref:
req_options |= drsuapi.DRSUAPI_DRS_ADD_REF
if sync_forced:
req_options |= drsuapi.DRSUAPI_DRS_SYNC_FORCED
if sync_all:
req_options |= drsuapi.DRSUAPI_DRS_SYNC_ALL
if full_sync:
req_options |= drsuapi.DRSUAPI_DRS_FULL_SYNC_NOW
if async_op:
req_options |= drsuapi.DRSUAPI_DRS_ASYNC_OP
try:
drs_utils.sendDsReplicaSync(server_bind, server_bind_handle, source_dsa_guid, NC, req_options)
except drs_utils.drsException as estr:
raise CommandError("DsReplicaSync failed", estr)
if async_op:
self.message("Replicate from %s to %s was started." % (SOURCE_DC, DEST_DC))
else:
self.message("Replicate from %s to %s was successful." % (SOURCE_DC, DEST_DC))
class cmd_drs_bind(Command):
"""Show DRS capabilities of a server."""
synopsis = "%prog [<DC>] [options]"
takes_optiongroups = {
"sambaopts": options.SambaOptions,
"versionopts": options.VersionOptions,
"credopts": options.CredentialsOptions,
}
takes_args = ["DC?"]
def run(self, DC=None, sambaopts=None,
credopts=None, versionopts=None):
self.lp = sambaopts.get_loadparm()
if DC is None:
DC = common.netcmd_dnsname(self.lp)
self.server = DC
self.creds = credopts.get_credentials(self.lp, fallback_machine=True)
drsuapi_connect(self)
bind_info = drsuapi.DsBindInfoCtr()
bind_info.length = 28
bind_info.info = drsuapi.DsBindInfo28()
(info, handle) = self.drsuapi.DsBind(misc.GUID(drsuapi.DRSUAPI_DS_BIND_GUID), bind_info)
optmap = [
("DRSUAPI_SUPPORTED_EXTENSION_BASE", "DRS_EXT_BASE"),
("DRSUAPI_SUPPORTED_EXTENSION_ASYNC_REPLICATION", "DRS_EXT_ASYNCREPL"),
("DRSUAPI_SUPPORTED_EXTENSION_REMOVEAPI", "DRS_EXT_REMOVEAPI"),
("DRSUAPI_SUPPORTED_EXTENSION_MOVEREQ_V2", "DRS_EXT_MOVEREQ_V2"),
("DRSUAPI_SUPPORTED_EXTENSION_GETCHG_COMPRESS", "DRS_EXT_GETCHG_DEFLATE"),
("DRSUAPI_SUPPORTED_EXTENSION_DCINFO_V1", "DRS_EXT_DCINFO_V1"),
("DRSUAPI_SUPPORTED_EXTENSION_RESTORE_USN_OPTIMIZATION", "DRS_EXT_RESTORE_USN_OPTIMIZATION"),
("DRSUAPI_SUPPORTED_EXTENSION_ADDENTRY", "DRS_EXT_ADDENTRY"),
("DRSUAPI_SUPPORTED_EXTENSION_KCC_EXECUTE", "DRS_EXT_KCC_EXECUTE"),
("DRSUAPI_SUPPORTED_EXTENSION_ADDENTRY_V2", "DRS_EXT_ADDENTRY_V2"),
("DRSUAPI_SUPPORTED_EXTENSION_LINKED_VALUE_REPLICATION", "DRS_EXT_LINKED_VALUE_REPLICATION"),
("DRSUAPI_SUPPORTED_EXTENSION_DCINFO_V2", "DRS_EXT_DCINFO_V2"),
("DRSUAPI_SUPPORTED_EXTENSION_INSTANCE_TYPE_NOT_REQ_ON_MOD", "DRS_EXT_INSTANCE_TYPE_NOT_REQ_ON_MOD"),
("DRSUAPI_SUPPORTED_EXTENSION_CRYPTO_BIND", "DRS_EXT_CRYPTO_BIND"),
("DRSUAPI_SUPPORTED_EXTENSION_GET_REPL_INFO", "DRS_EXT_GET_REPL_INFO"),
("DRSUAPI_SUPPORTED_EXTENSION_STRONG_ENCRYPTION", "DRS_EXT_STRONG_ENCRYPTION"),
("DRSUAPI_SUPPORTED_EXTENSION_DCINFO_V01", "DRS_EXT_DCINFO_VFFFFFFFF"),
("DRSUAPI_SUPPORTED_EXTENSION_TRANSITIVE_MEMBERSHIP", "DRS_EXT_TRANSITIVE_MEMBERSHIP"),
("DRSUAPI_SUPPORTED_EXTENSION_ADD_SID_HISTORY", "DRS_EXT_ADD_SID_HISTORY"),
("DRSUAPI_SUPPORTED_EXTENSION_POST_BETA3", "DRS_EXT_POST_BETA3"),
("DRSUAPI_SUPPORTED_EXTENSION_GETCHGREQ_V5", "DRS_EXT_GETCHGREQ_V5"),
("DRSUAPI_SUPPORTED_EXTENSION_GET_MEMBERSHIPS2", "DRS_EXT_GETMEMBERSHIPS2"),
("DRSUAPI_SUPPORTED_EXTENSION_GETCHGREQ_V6", "DRS_EXT_GETCHGREQ_V6"),
("DRSUAPI_SUPPORTED_EXTENSION_NONDOMAIN_NCS", "DRS_EXT_NONDOMAIN_NCS"),
("DRSUAPI_SUPPORTED_EXTENSION_GETCHGREQ_V8", "DRS_EXT_GETCHGREQ_V8"),
("DRSUAPI_SUPPORTED_EXTENSION_GETCHGREPLY_V5", "DRS_EXT_GETCHGREPLY_V5"),
("DRSUAPI_SUPPORTED_EXTENSION_GETCHGREPLY_V6", "DRS_EXT_GETCHGREPLY_V6"),
("DRSUAPI_SUPPORTED_EXTENSION_ADDENTRYREPLY_V3", "DRS_EXT_WHISTLER_BETA3"),
("DRSUAPI_SUPPORTED_EXTENSION_GETCHGREPLY_V7", "DRS_EXT_WHISTLER_BETA3"),
("DRSUAPI_SUPPORTED_EXTENSION_VERIFY_OBJECT", "DRS_EXT_WHISTLER_BETA3"),
("DRSUAPI_SUPPORTED_EXTENSION_XPRESS_COMPRESS", "DRS_EXT_W2K3_DEFLATE"),
("DRSUAPI_SUPPORTED_EXTENSION_GETCHGREQ_V10", "DRS_EXT_GETCHGREQ_V10"),
("DRSUAPI_SUPPORTED_EXTENSION_RESERVED_PART2", "DRS_EXT_RESERVED_FOR_WIN2K_OR_DOTNET_PART2"),
("DRSUAPI_SUPPORTED_EXTENSION_RESERVED_PART3", "DRS_EXT_RESERVED_FOR_WIN2K_OR_DOTNET_PART3")
]
optmap_ext = [
("DRSUAPI_SUPPORTED_EXTENSION_ADAM", "DRS_EXT_ADAM"),
("DRSUAPI_SUPPORTED_EXTENSION_LH_BETA2", "DRS_EXT_LH_BETA2"),
("DRSUAPI_SUPPORTED_EXTENSION_RECYCLE_BIN", "DRS_EXT_RECYCLE_BIN")]
self.message("Bind to %s succeeded." % DC)
self.message("Extensions supported:")
for (opt, str) in optmap:
optval = getattr(drsuapi, opt, 0)
if info.info.supported_extensions & optval:
yesno = "Yes"
else:
yesno = "No "
self.message(" %-60s: %s (%s)" % (opt, yesno, str))
if isinstance(info.info, drsuapi.DsBindInfo48):
self.message("\nExtended Extensions supported:")
for (opt, str) in optmap_ext:
optval = getattr(drsuapi, opt, 0)
if info.info.supported_extensions_ext & optval:
yesno = "Yes"
else:
yesno = "No "
self.message(" %-60s: %s (%s)" % (opt, yesno, str))
self.message("\nSite GUID: %s" % info.info.site_guid)
self.message("Repl epoch: %u" % info.info.repl_epoch)
if isinstance(info.info, drsuapi.DsBindInfo48):
self.message("Forest GUID: %s" % info.info.config_dn_guid)
class cmd_drs_options(Command):
"""Query or change 'options' for NTDS Settings object of a Domain Controller."""
synopsis = "%prog [<DC>] [options]"
takes_optiongroups = {
"sambaopts": options.SambaOptions,
"versionopts": options.VersionOptions,
"credopts": options.CredentialsOptions,
}
takes_args = ["DC?"]
takes_options = [
Option("--dsa-option", help="DSA option to enable/disable", type="str",
metavar="{+|-}IS_GC | {+|-}DISABLE_INBOUND_REPL | {+|-}DISABLE_OUTBOUND_REPL | {+|-}DISABLE_NTDSCONN_XLATE"),
]
option_map = {"IS_GC": 0x00000001,
"DISABLE_INBOUND_REPL": 0x00000002,
"DISABLE_OUTBOUND_REPL": 0x00000004,
"DISABLE_NTDSCONN_XLATE": 0x00000008}
def run(self, DC=None, dsa_option=None,
sambaopts=None, credopts=None, versionopts=None):
self.lp = sambaopts.get_loadparm()
if DC is None:
DC = common.netcmd_dnsname(self.lp)
self.server = DC
self.creds = credopts.get_credentials(self.lp, fallback_machine=True)
samdb_connect(self)
ntds_dn = self.samdb.get_dsServiceName()
res = self.samdb.search(base=ntds_dn, scope=ldb.SCOPE_BASE, attrs=["options"])
dsa_opts = int(res[0]["options"][0])
# print out current DSA options
cur_opts = [x for x in self.option_map if self.option_map[x] & dsa_opts]
self.message("Current DSA options: " + ", ".join(cur_opts))
# modify options
if dsa_option:
if dsa_option[:1] not in ("+", "-"):
raise CommandError("Unknown option %s" % dsa_option)
flag = dsa_option[1:]
if flag not in self.option_map.keys():
raise CommandError("Unknown option %s" % dsa_option)
if dsa_option[:1] == "+":
dsa_opts |= self.option_map[flag]
else:
dsa_opts &= ~self.option_map[flag]
# save new options
m = ldb.Message()
m.dn = ldb.Dn(self.samdb, ntds_dn)
m["options"] = ldb.MessageElement(str(dsa_opts), ldb.FLAG_MOD_REPLACE, "options")
self.samdb.modify(m)
# print out new DSA options
cur_opts = [x for x in self.option_map if self.option_map[x] & dsa_opts]
self.message("New DSA options: " + ", ".join(cur_opts))
class cmd_drs_clone_dc_database(Command):
"""Replicate an initial clone of domain, but DO NOT JOIN it."""
synopsis = "%prog <dnsdomain> [options]"
takes_optiongroups = {
"sambaopts": options.SambaOptions,
"versionopts": options.VersionOptions,
"credopts": options.CredentialsOptions,
}
takes_options = [
Option("--server", help="DC to join", type=str),
Option("--targetdir", help="where to store provision (required)", type=str),
Option("-q", "--quiet", help="Be quiet", action="store_true"),
Option("--include-secrets", help="Also replicate secret values", action="store_true"),
Option("--backend-store", type="choice", metavar="BACKENDSTORE",
choices=["tdb", "mdb"],
help="Specify the database backend to be used "
"(default is %s)" % get_default_backend_store()),
Option("--backend-store-size", type="bytes", metavar="SIZE",
help="Specify the size of the backend database, currently" +
"only supported by lmdb backends (default is 8 Gb).")
]
takes_args = ["domain"]
def run(self, domain, sambaopts=None, credopts=None,
versionopts=None, server=None, targetdir=None,
quiet=False, verbose=False, include_secrets=False,
backend_store=None, backend_store_size=None):
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp)
logger = self.get_logger(verbose=verbose, quiet=quiet)
if targetdir is None:
raise CommandError("--targetdir option must be specified")
join_clone(logger=logger, server=server, creds=creds, lp=lp,
domain=domain, dns_backend='SAMBA_INTERNAL',
targetdir=targetdir, include_secrets=include_secrets,
backend_store=backend_store,
backend_store_size=backend_store_size)
class cmd_drs_uptodateness(Command):
"""Show uptodateness status"""
synopsis = "%prog [options]"
takes_optiongroups = {
"sambaopts": options.SambaOptions,
"versionopts": options.VersionOptions,
"credopts": options.CredentialsOptions,
}
takes_options = [
Option("-H", "--URL", metavar="URL", dest="H",
help="LDB URL for database or target server"),
Option("-p", "--partition",
help="restrict to this partition"),
Option("--json", action='store_true',
help="Print data in json format"),
Option("--maximum", action='store_true',
help="Print maximum out-of-date-ness only"),
Option("--median", action='store_true',
help="Print median out-of-date-ness only"),
Option("--full", action='store_true',
help="Print full out-of-date-ness data"),
]
def format_as_json(self, partitions_summaries):
return json.dumps(partitions_summaries, indent=2)
def format_as_text(self, partitions_summaries):
lines = []
for part_name, summary in partitions_summaries.items():
items = ['%s: %s' % (k, v) for k, v in summary.items()]
line = '%-15s %s' % (part_name, ' '.join(items))
lines.append(line)
return '\n'.join(lines)
def run(self, H=None, partition=None,
json=False, maximum=False, median=False, full=False,
sambaopts=None, credopts=None, versionopts=None,
quiet=False, verbose=False):
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp, fallback_machine=True)
local_kcc, dsas = get_kcc_and_dsas(H, lp, creds)
samdb = local_kcc.samdb
short_partitions, _ = get_partition_maps(samdb)
if partition:
if partition in short_partitions:
part_dn = short_partitions[partition]
# narrow down to specified partition only
short_partitions = {partition: part_dn}
else:
raise CommandError("unknown partition %s" % partition)
filters = []
if maximum:
filters.append('maximum')
if median:
filters.append('median')
partitions_distances = {}
partitions_summaries = {}
for part_name, part_dn in short_partitions.items():
utdv_edges = get_utdv_edges(local_kcc, dsas, part_dn, lp, creds)
distances = get_utdv_distances(utdv_edges, dsas)
summary = get_utdv_summary(distances, filters=filters)
partitions_distances[part_name] = distances
partitions_summaries[part_name] = summary
if full:
# always print json format
output = self.format_as_json(partitions_distances)
else:
if json:
output = self.format_as_json(partitions_summaries)
else:
output = self.format_as_text(partitions_summaries)
print(output, file=self.outf)
class cmd_drs(SuperCommand):
"""Directory Replication Services (DRS) management."""
subcommands = {}
subcommands["bind"] = cmd_drs_bind()
subcommands["kcc"] = cmd_drs_kcc()
subcommands["replicate"] = cmd_drs_replicate()
subcommands["showrepl"] = cmd_drs_showrepl()
subcommands["options"] = cmd_drs_options()
subcommands["clone-dc-database"] = cmd_drs_clone_dc_database()
subcommands["uptodateness"] = cmd_drs_uptodateness()
| kernevil/samba | python/samba/netcmd/drs.py | Python | gpl-3.0 | 36,173 |
from setuptools import setup
version = 'y.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('TODO.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'pkginfo',
'setuptools',
'nens',
],
tests_require = [
]
setup(name='timeseries',
version=version,
description="Package to implement time series and generic operations on time series.",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[],
keywords=[],
author='Pieter Swinkels',
author_email='[email protected]',
url='',
license='GPL',
packages=['timeseries'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require = {'test': tests_require},
entry_points={
'console_scripts': [
'ziprelease = adapter.ziprelease:main',
]},
)
| nens/timeseries | setup.py | Python | gpl-3.0 | 1,079 |
import abjad
def test_LilyPondParser__functions__transpose_01():
pitches = ["e'", "gs'", "b'", "e''"]
maker = abjad.NoteMaker()
target = abjad.Staff(maker(pitches, (1, 4)))
key_signature = abjad.KeySignature("e", "major")
abjad.attach(key_signature, target[0])
assert abjad.lilypond(target) == abjad.string.normalize(
r"""
\new Staff
{
\key e \major
e'4
gs'4
b'4
e''4
}
"""
)
string = r"\transpose d e \relative c' \new Staff { \key d \major d4 fs a d }"
parser = abjad.parser.LilyPondParser()
result = parser(string)
assert abjad.lilypond(target) == abjad.lilypond(result) and target is not result
def test_LilyPondParser__functions__transpose_02():
pitches = ["ef'", "f'", "g'", "bf'"]
maker = abjad.NoteMaker()
target = abjad.Staff(maker(pitches, (1, 4)))
key_signature = abjad.KeySignature("ef", "major")
abjad.attach(key_signature, target[0])
assert abjad.lilypond(target) == abjad.string.normalize(
r"""
\new Staff
{
\key ef \major
ef'4
f'4
g'4
bf'4
}
"""
)
string = r"\transpose a c' \relative c' \new Staff { \key c \major c4 d e g }"
parser = abjad.parser.LilyPondParser()
result = parser(string)
assert abjad.lilypond(target) == abjad.lilypond(result) and target is not result
def test_LilyPondParser__functions__transpose_03():
maker = abjad.NoteMaker()
target = abjad.Staff(
[
abjad.Container(maker(["cs'", "ds'", "es'", "fs'"], (1, 4))),
abjad.Container(maker(["df'", "ef'", "f'", "gf'"], (1, 4))),
]
)
assert abjad.lilypond(target) == abjad.string.normalize(
r"""
\new Staff
{
{
cs'4
ds'4
es'4
fs'4
}
{
df'4
ef'4
f'4
gf'4
}
}
"""
)
string = r"""music = \relative c' { c d e f }
\new Staff {
\transpose c cs \music
\transpose c df \music
}
"""
parser = abjad.parser.LilyPondParser()
result = parser(string)
assert abjad.lilypond(target) == abjad.lilypond(result) and target is not result
| Abjad/abjad | tests/test_LilyPondParser__functions__transpose.py | Python | gpl-3.0 | 2,425 |
import numpy as np
import scipy.spatial.distance as dist
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.pyplot as plt
import matplotlib.lines as mplines
import scipy.cluster.hierarchy as clust
import os
def kabsch(coord, ref,app):
C = np.dot(np.transpose(coord), ref)
V, S, W = np.linalg.svd(C)
#print("VSW", V,S,W)
d = (np.linalg.det(V) * np.linalg.det(W)) < 0.0
if d:
S[-1] = -S[-1]
V[:,-1] = -V[:,-1]
# Create Rotation matrix U
U = np.dot(V, W)
# Rotate coord
kcoord = np.dot(app, U)
return kcoord
def rmsd(coord, ref):
sd = (coord - ref)**2
ssd = np.mean(sd)
rmsd = np.sqrt(ssd)
return rmsd
#colors = [(1,.4,.4),(.4,.4,1),(.4,1,.4),(1,.4,1),(.4,1,1),(1,.7,.4),(1,.4,.7)]
colors = [(0,.6,.6),(1,0,.5),(1,1,.2),(1,1,.2),(.8,.4,0),(.6,1,1),(.8,0,.8),(0,.9,0),(0,.6,.6),(1,0,.5),(1,1,.2),(1,1,.2),(.8,.4,0),(.6,1,1),(.8,0,.8),(0,.9,0),(0,.6,.6),(1,0,.5),(1,1,.2),(1,1,.2),(.8,.4,0),(.6,1,1),(.8,0,.8),(0,.9,0)]
def writepym(i,coords,radii):
pymfilename= i + ".pym"
pymfile=open(pymfilename, "w")
pymfile.write('from pymol.cgo import *'+ '\n')
pymfile.write('from pymol import cmd'+ '\n')
pymfile.write('from pymol.vfont import plain' + '\n' + 'data={}' + '\n' + "curdata=[]" + '\n')
#print(x for x in enumerate(coords))
for item in enumerate(coords):
#print(colors[item[0]][0],colors[item[0]][1], colors[item[0]][2])
#print(colors[item[0]][0])
#print(item)
pymfile.write("k='Protein" + str(item[0]) + " geometry'" +'\n'+ "if not k in data.keys():" +'\n'+" data[k]=[]"+'\n'+'curdata=['+'\n'+'COLOR,' + str(colors[item[0]%8][0])+","+str(colors[item[0]%8][1])+","+ str(colors[item[0]%8][2])+"," + '\n' + 'SPHERE,'+ str(item[1][0])+ ','+ str(item[1][1])+',' + str(item[1][2])+','+ str(radii[item[0]]) +'\n')
pymfile.write("]"+"\n"+"k='Protein" + str(item[0]) + " geometry'" + '\n' + "if k in data.keys():" + "\n" + " data[k]= data[k]+curdata"+'\n'+"else:" +'\n' +" data[k]= curdata"+"\n")
pymfile.write("for k in data.keys():" + "\n" + " cmd.load_cgo(data[k], k, 1)" +"\n"+ "data= {}")
pymfile.close()
files=os.listdir(".")
#refs=[x for x in files if x.endswith('1k90_refcoords.npy')]
np.set_printoptions(threshold=1000000)
pdf = PdfPages("corrected.pdf")
# Read the pairwise distance matrix (discard row and column labels).
#fname = "corrected-res.csv"
distmat = np.load("rmsdmat.npy")
# Calculate the mean of the pairwise similarities.
ii = np.tril_indices(distmat.shape[0], -1)
pwise = distmat[ii]
mdist = np.mean(pwise)
print(mdist)
#print(pwise)
# Generate a historgram of the pairwise similarities.
plt.clf()
plt.hist(pwise, 20, color='lightblue')
plt.xlabel("Similarity")#, size=17)
plt.ylabel("Frequency")#, size=17)
pdf.savefig()
# Do the clustering
h = clust.average(distmat)
# Plot the dendrogram
plt.figure(figsize=(16,10))
plt.figure(linewidth=100.0)
plt.clf()
ax = plt.axes()
for pos in 'right','bottom','top':
ax.spines[pos].set_color('none')
ax.xaxis.set_ticks_position('none')
ax.yaxis.set_ticks_position('left')
ax.spines['left'].set_position(('outward', 10))
x=clust.dendrogram(h)
#plt.getp(x)
pdf.savefig()
pdf.close()
#ll = clust.leaves_list(h)
#print(len(ll))
tree = clust.to_tree(h)
#print(tree)
#ctree = clust.cut_tree(h, height = 150)
#print(np.shape(ctree))
ctree = clust.cut_tree(h, n_clusters = 2)
leaves = clust.leaves_list(h)
#print(np.shape(ctree))
ctree = np.reshape(ctree, len(leaves))
#print(np.shape(leaves))
#print(np.shape(ctree))
#print(np.vstack((leaves,ctree)))
files=os.listdir(".")
files=[x for x in files if x.startswith('tetramer_model_')]
print(len(files))
n_clusters = np.max(ctree) + 1
#print(n_clusters)
clusters = [[] for i in range(n_clusters)]
CCC = np.array([2,3,10,11,18,19])
AC3 = np.array([0,2,3,8,10,11,16,18,19])
#MDFG = np.array([4,5,6,7,12,13,14,15,20,21,22,23])
##actually MD
MDFG = np.array([4,5,12,13,20,21])
for i, leaf in enumerate(leaves):
cluster = ctree[i]
structure = np.load("goodmodels0.npy")[i]
# print(len(clusters))
# print(cluster)
clusters[cluster].append(structure)
rmsdlist = []
coordlist = []
for clust in clusters:
l = len(clust)
av = round(l / 2, -1)
av = int(av)
crmsdlist = []
alignedcoordlist = []
for o,st in enumerate(clust):
strmsdlist = []
stCst = st[CCC]
stC = stCst - np.mean(stCst, axis = 0)
st3 = st - np.mean(st, axis = 0)
#ik = i[np.array([2,7,12])]
#ikm = ik - np.mean(ik, axis = 0)
#im = i - np.mean(i, axis = 0)
#print(i)
for st2 in clust:
st2Cst = st2[CCC]
st2C = st2Cst - np.mean(st2Cst, axis = 0)
st23 = st2 - np.mean(st2Cst, axis = 0)
k = kabsch(st2C, stC, st23)
k = k - np.mean(k, axis =0)
#r2 = rmsd(k[np.array([3,4,8,9,13,14])], st3[np.array([3,4,8,9,13,14])])
r = rmsd(k, st3)
#print(r, r2)
#r = rmsd(st, k)
strmsdlist.append(r)
if o == av:
alignedcoordlist.append(k)
#print(r)
#jm = j - np.mean(j, axis = 0)
#jk = j[np.array([2,7,12])]
#jkm = jk - np.mean(jk, axis = 0)
#k = kabsch(jkm, ikm, jm)
#k = k - np.mean(k, axis =0)
#r = rmsd(k[np.array([3,4,8,9,13,14])], im[np.array([3,4,8,9,13,14])])
#r2 = rmsd(k[np.array([2,7,12])], im[np.array([2,7,12])])
#print(i)
#print(r, r2)
#rmsdlist1.append(r)
crmsdlist.append(strmsdlist)
#print(alignedcoordlist)
rmsdlist.append(crmsdlist)
coordlist.append(alignedcoordlist)
radii = np.load("radii.npy")
clustcoords = []
for i,item in enumerate(coordlist):
print(np.shape(item))
mean = np.mean(item, axis = 0)
med = round(len(item)/2)
writepym("cluster_mean_"+str(i), mean, radii)
#writepym("cluster_med_"+str(i), item[med],radii)
#print(item))
np.save("cluster_"+str(i)+".npy", item)
#print("std ", np.std(item, axis = 0))
clustcoords.append(mean)
np.save("clust_av_coordsn.npy",clustcoords)
m = []
for cl in rmsdlist:
mean = np.mean(cl)
m.append(mean)
print(mean)
print(np.mean(m))
| jEschweiler/Urease | urease_software/cluster.py | Python | gpl-3.0 | 5,877 |
#!/usr/bin/env python
import VMSYSTEM.libSBTCVM as libSBTCVM
import VMSYSTEM.libbaltcalc as libbaltcalc
import sys
import os
assmoverrun=19683
instcnt=0
txtblk=0
VMSYSROMS=os.path.join("VMSYSTEM", "ROMS")
critcomperr=0
compvers="v2.2.0"
outfile="assmout.trom"
#define IOmaps
IOmapread={"random": "--0------"}
IOmapwrite={}
#populate IOmaps with memory pointers
scratchmap={}
scratchstart="---------"
shortsccnt=1
scratchstop="---++++++"
IOgen=scratchstart
while IOgen!=scratchstop:
#scratchmap[("mem" + str(shortsccnt))] = IOgen
IOmapread[("mem" + str(shortsccnt))] = IOgen
IOmapwrite[("mem" + str(shortsccnt))] = IOgen
IOgen=libSBTCVM.trunkto6(libbaltcalc.btadd(IOgen, "+"))
shortsccnt += 1
#scratchmap[("mem" + str(shortsccnt))] = scratchstop
IOmapread[("mem" + str(shortsccnt))] = scratchstop
IOmapwrite[("mem" + str(shortsccnt))] = scratchstop
def getlinetern(line):
line=(line-9841)
tline=libSBTCVM.trunkto6(libbaltcalc.DECTOBT(line))
return tline
tracecomp=0
#used to write to the compiler log if the compiler is in tracelog mode
def complog(textis):
if tracecomp==1:
compilerlog.write(textis)
#class used by the goto refrence system
class gotoref:
def __init__(self, line, gtname):
self.line=line
self.tline=getlinetern(line)
self.gtname=gtname
#begin by reading command line arguments
try:
cmd=sys.argv[1]
except:
cmd=None
if "GLOBASMFLG" in globals():
cmd=GLOBASMFLG
if cmd=="-h" or cmd=="--help" or cmd=="help":
print '''This is SBTCVM-asm2.py, SBTCVM Mark 2's assembler.
commands:
SBTCVM-asm2.py -h (--help) (help): this text
SBTCVM-asm2.py -v (--version)
SBTCVM-asm2.py -a (--about): about SBTCVM-asm2.py
SBTCVM-asm2.py -c (--compile) [sourcefile]: build a tasm source into a trom
SBTCVM-asm2.py -t (--tracecompile) [sourcefile]: same as -c but logs the compiling process in detail in the CAP directory.
SBTCVM-asm2.py [sourcefile]: build a tasm source into a trom
'''
elif cmd=="-v" or cmd=="--version":
print ("SBTCVM Assember" + compvers)
elif cmd=="-a" or cmd=="--about":
print '''SBTCVM Assembler 2
''' + compvers + '''
(c)2016-2017 Thomas Leathers and Contributors
SBTCVM Assembler 2 is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
SBTCVM Assembler 2 is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with SBTCVM Assembler 2. If not, see <http://www.gnu.org/licenses/>
'''
elif cmd==None:
print "tip: use SBTCVM-asm2.py -h for help."
elif cmd=="-c" or cmd=="--compile" or cmd[0]!="-" or cmd=="-t" or cmd=="--tracecompile":
print("SBTCVM-asm " + compvers + " starting")
if "GLOBASMFLG" in globals():
arg=GLOBASMFLG
else:
if cmd[0]!="-":
arg=sys.argv[1]
else:
arg=sys.argv[2]
print arg
lowarg=arg.lower()
argisfile=0
argistasm=0
for extq in ["", ".tasm", ".TASM"]:
qarg=(arg + extq)
qlowarg=(lowarg + extq.lower())
print "searching for: \"" + qarg + "\"..."
argisfile
if os.path.isfile(qarg):
argisfile=1
print "found: " + qarg
elif os.path.isfile(os.path.join("VMSYSTEM", qarg)):
qarg=os.path.join("VMSYSTEM", qarg)
print "found: " + qarg
argisfile=1
elif os.path.isfile(os.path.join(VMSYSROMS, qarg)):
qarg=os.path.join(VMSYSROMS, qarg)
print "found: " + qarg
argisfile=1
elif os.path.isfile(os.path.join("VMUSER", qarg)):
qarg=os.path.join("VMUSER", qarg)
print "found: " + qarg
argisfile=1
elif os.path.isfile(os.path.join("ROMS", qarg)):
qarg=os.path.join("ROMS", qarg)
print "found: " + qarg
argisfile=1
if argisfile==1:
if qlowarg.endswith(".tasm") and os.path.isfile(qarg):
print "tasm source found."
arg=qarg
argistasm=1
break
else:
print "Not valid."
argisfile=0
if argisfile==0 or argistasm==0:
#print "ERROR: file not found, or is not a tasm file STOP"
sys.exit("ERROR: SBTCVM assembler was unable to load the specified filename. STOP")
#generate a name for logs in case its needed
#logsub=arg.replace("/", "-")
#logsub=logsub.replace("~", "")
#logsub=logsub.split(".")
logsub=libSBTCVM.namecrunch(arg, "-tasm-comp.log")
#detect if command line options specify tracelog compile mode:
if cmd=="-t" or cmd=="--tracecompile":
tracecomp=1
compilerlog=open(os.path.join('CAP', logsub), "w")
else:
tracecomp=0
#arg=arg.replace("./", "")
#print arg
complog("starting up compiler...\n")
complog("TASM VERSION: SBTCVM-asm " + compvers + "\n")
complog("source: " + arg + "\n")
complog("---------\n\n")
#open 2 instances of source. one per pass.
sourcefile=open(arg, 'r')
sourcefileB=open(arg, 'r')
#open(arg, 'r') as sourcefile
gotoreflist=list()
print "preforming prescan & prep pass"
complog("preforming prescan & prep pass\n")
srcline=0
for linen in sourcefile:
srcline += 1
lined=linen
linen=linen.replace("\n", "")
linen=linen.replace(" ", "")
linenraw=linen
linen=(linen.split("#"))[0]
linelist=linen.split("|")
if (len(linelist))==2:
instword=(linelist[0])
instdat=(linelist[1])
else:
instword=(linelist[0])
instdat="000000000"
if instword=="textstop":
txtblk=0
complog("TEXTBLOCK END\n")
gtflag=1
if txtblk==1:
for f in lined:
instcnt += 1
elif instword=="textstart":
txtblk=1
complog("TEXTBLOCK START\n")
#raw class
elif instword=="romread1":
instcnt += 1
elif instword=="romread2":
instcnt += 1
elif instword=="IOread1":
instcnt += 1
elif instword=="IOread2":
instcnt += 1
elif instword=="IOwrite1":
instcnt += 1
elif instword=="IOwrite2":
instcnt += 1
elif instword=="regswap":
instcnt += 1
elif instword=="copy1to2":
instcnt += 1
elif instword=="copy2to1":
instcnt += 1
elif instword=="invert1":
instcnt += 1
elif instword=="invert2":
instcnt += 1
elif instword=="add":
instcnt += 1
elif instword=="subtract":
instcnt += 1
elif instword=="multiply":
instcnt += 1
elif instword=="divide":
instcnt += 1
elif instword=="setreg1":
instcnt += 1
elif instword=="setreg2":
instcnt += 1
elif instword=="setinst":
instcnt += 1
elif instword=="setdata":
instcnt += 1
#----jump in used opcodes----
#color drawing
elif instword=="continue":
instcnt += 1
elif instword=="colorpixel":
instcnt += 1
elif instword=="setcolorreg":
instcnt += 1
elif instword=="colorfill":
instcnt += 1
elif instword=="setcolorvect":
instcnt += 1
elif instword=="colorline":
instcnt += 1
elif instword=="colorrect":
instcnt += 1
#mono drawing
elif instword=="monopixel":
instcnt += 1
elif instword=="monofill":
instcnt += 1
elif instword=="setmonovect":
instcnt += 1
elif instword=="monoline":
instcnt += 1
elif instword=="monorect":
instcnt += 1
#----opcode --00-+ unused----
elif instword=="stop":
instcnt += 1
elif instword=="null":
instcnt += 1
elif instword=="gotodata":
instcnt += 1
elif instword=="gotoreg1":
instcnt += 1
elif instword=="gotodataif":
instcnt += 1
elif instword=="wait":
instcnt += 1
elif instword=="YNgoto":
instcnt += 1
elif instword=="userwait":
instcnt += 1
elif instword=="TTYclear":
instcnt += 1
#----gap in used opcodes----
elif instword=="gotoA":
instcnt += 1
autostpflg=1
elif instword=="gotoAif":
instcnt += 1
elif instword=="gotoB":
instcnt += 1
autostpflg=1
elif instword=="gotoBif":
instcnt += 1
elif instword=="gotoC":
instcnt += 1
elif instword=="gotoCif":
instcnt += 1
elif instword=="gotoD":
instcnt += 1
elif instword=="gotoDif":
instcnt += 1
elif instword=="gotoE":
instcnt += 1
elif instword=="gotoEif":
instcnt += 1
elif instword=="gotoF":
instcnt += 1
elif instword=="gotoFif":
instcnt += 1
#----gap in used opcodes----
elif instword=="dumpreg1":
instcnt += 1
elif instword=="dumpreg2":
instcnt += 1
elif instword=="TTYwrite":
instcnt += 1
elif instword=="buzzer":
instcnt += 1
elif instword=="setregset":
instcnt += 1
elif instword=="regset":
instcnt += 1
elif instword=="setkeyint":
instcnt += 1
elif instword=="keyint":
instcnt += 1
elif instword=="offsetlen":
instcnt += 1
elif instword=="clearkeyint":
instcnt += 1
elif instword=="gotoifgreater":
instcnt += 1
elif instword=="TTYbg":
instcnt += 2
elif instword=="TTYlinedraw":
instcnt += 2
elif instword=="TTYmode":
instcnt += 2
elif instword=="threadref":
instcnt += 1
elif instword=="threadstart":
instcnt += 1
elif instword=="threadstop":
instcnt += 1
elif instword=="threadkill":
instcnt += 1
else:
gtflag=0
if gtflag==1 and (txtblk==0 or linenraw=="textstart"):
complog("pass 1: srcline:" + str(srcline) + " instcnt:" + str(instcnt) + " inst:" + instword + " instdat:" + instdat + "\n")
elif gtflag==1 and txtblk==1:
complog("TEXTBLOCK: pass 1 : srcline:" + str(srcline) + " instcnt:" + str(instcnt) + " textline: \"" + linenraw + "\"\n")
if (len(linelist))==3 and gtflag==1 and txtblk==0 and instword[0]!="#":
if instword=="textstart":
instcnt += 1
gtox=gotoref((instcnt - 1), linelist[2])
gotoreflist.extend([gtox])
print ("found gotoref: \"" + linelist[2] + "\", at instruction:\"" + str((instcnt - 1)) + "\", Source line:\"" + str(srcline) + "\"")
complog("found gotoref: \"" + linelist[2] + "\", at instruction:\"" + str((instcnt - 1)) + "\", Source line:\"" + str(srcline) + "\"\n")
if instword=="textstart":
instcnt -= 1
#print gotoreflist
instcnt=0
firstloop=1
srcline=0
for linen in sourcefileB:
srcline += 1
if firstloop==1:
print "preforming compileloop startup..."
complog("\n\npreforming compileloop startup...\n")
assmflename=arg
complog("source file: \"" + assmflename + "\"\n")
assmnamelst=assmflename.rsplit('.', 1)
outfile=(assmnamelst[0] + (".trom"))
complog("output file: \"" + outfile + "\"\n")
outn = open(outfile, 'w')
firstloop=0
print "done. begin compile."
complog("done. begin compile.\n")
lined=linen
linen=linen.replace("\n", "")
linen=linen.replace(" ", "")
linenraw=linen
linen=(linen.split("#"))[0]
linelist=linen.split("|")
autostpflg=0
gtflag=1
if (len(linelist))==2 or (len(linelist))==3:
instword=(linelist[0])
instdat=(linelist[1])
else:
instword=(linelist[0])
instdat="000000000"
if instdat=="":
instdat="000000000"
print "NOTICE: data portion at source line:\"" + str(srcline) + "\" blank, defaulting to ground..."
complog("NOTICE: data portion at source line:\"" + str(srcline) + "\" blank, defaulting to ground...\n")
#if len(instdat)==6 and instdat[0]!=">" and instdat[0]!=":":
# print "Mark 1.x legacy NOTICE: instruction \"" + instword + "\" at \"" + str(srcline) + "\" did not have 9 trits data. it has been padded far from radix. please pad any legacy instructions manually."
# complog("Mark 1.x legacy NOTICE: instruction \"" + instword + "\" at \"" + str(srcline) + "\" did not have 9 trits data. it has been padded far from radix. please pad any legacy instructions manually.\n")
# instdat=("000" + instdat)
if instword=="textstop":
txtblk=0
complog("TEXTBLOCK END\n")
if txtblk==1:
for f in lined:
texchout=libSBTCVM.charlook(f)
texchout=("000" + texchout)
outn.write("--+++0" + (texchout) + "\n")
instcnt += 1
elif instword=="textstart":
txtblk=1
complog("TEXTBLOCK START\n")
#raw class
elif instword=="romread1":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("------" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("------" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="romread2":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("-----0" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("-----0" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="IOread1":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("-----+" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
try:
IOpnk=IOmapread[instgpe[1]]
outn.write("-----+" + IOpnk + "\n")
except KeyError:
#print "ERROR: IO read shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: IO read shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: IO read shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
instcnt += 1
#outn.write("-----+" + instdat + "\n")
#instcnt += 1
elif instword=="IOread2":
#outn.write("----0-" + instdat + "\n")
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("----0-" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
try:
IOpnk=IOmapread[instgpe[1]]
outn.write("----0-" + IOpnk + "\n")
except KeyError:
#print "ERROR: IO read shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: IO read shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: IO read shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
instcnt += 1
#instcnt += 1
elif instword=="IOwrite1":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("----00" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
try:
IOpnk=IOmapwrite[instgpe[1]]
outn.write("----00" + IOpnk + "\n")
except KeyError:
#print "ERROR: IO write shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: IO write shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: IO write shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
instcnt += 1
#instcnt += 1
elif instword=="IOwrite2":
#outn.write("----0+" + instdat + "\n")
#instcnt += 1
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("----0+" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
try:
IOpnk=IOmapwrite[instgpe[1]]
outn.write("----0+" + IOpnk + "\n")
except KeyError:
#print "ERROR: IO write shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: IO write shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: IO write shortcut: \"" + instgpe[1] + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
instcnt += 1
elif instword=="regswap":
outn.write("----+-" + instdat + "\n")
instcnt += 1
elif instword=="copy1to2":
outn.write("----+0" + instdat + "\n")
instcnt += 1
elif instword=="copy2to1":
outn.write("----++" + instdat + "\n")
instcnt += 1
elif instword=="invert1":
outn.write("---0--" + instdat + "\n")
instcnt += 1
elif instword=="invert2":
outn.write("---0-0" + instdat + "\n")
instcnt += 1
elif instword=="add":
outn.write("---0-+" + instdat + "\n")
instcnt += 1
elif instword=="subtract":
outn.write("---00-" + instdat + "\n")
instcnt += 1
elif instword=="multiply":
outn.write("---000" + instdat + "\n")
instcnt += 1
elif instword=="divide":
outn.write("---00+" + instdat + "\n")
instcnt += 1
elif instword=="setreg1":
outn.write("---0+-" + instdat + "\n")
instcnt += 1
elif instword=="setreg2":
outn.write("---0+0" + instdat + "\n")
instcnt += 1
elif instword=="setinst":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("---0++" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("---0++" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="setdata":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("---+--" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("---+--" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
#----jump in used opcodes----
elif instword=="continue":
outn.write("---+++" + instdat + "\n")
instcnt += 1
#color drawing
elif instword=="colorpixel":
outn.write("--0---" + instdat + "\n")
instcnt += 1
elif instword=="setcolorreg":
instclst=instdat.split(',')
if len(instclst)==3:
vxR=libSBTCVM.codeshift(instclst[0])
vxB=libSBTCVM.codeshift(instclst[1])
vxG=libSBTCVM.codeshift(instclst[2])
outn.write("--0--0" + ("000" + vxR + vxB + vxG) + "\n")
else:
outn.write("--0--0" + instdat + "\n")
instcnt += 1
elif instword=="colorfill":
instclst=instdat.split(',')
if len(instclst)==3:
vxR=libSBTCVM.codeshift(instclst[0])
vxB=libSBTCVM.codeshift(instclst[1])
vxG=libSBTCVM.codeshift(instclst[2])
outn.write("--0--+" + ("000" + vxR + vxB + vxG) + "\n")
else:
outn.write("--0--+" + instdat + "\n")
instcnt += 1
elif instword=="setcolorvect":
outn.write("--0-0-" + instdat + "\n")
instcnt += 1
elif instword=="colorline":
outn.write("--0-00" + instdat + "\n")
instcnt += 1
elif instword=="colorrect":
outn.write("--0-0+" + instdat + "\n")
instcnt += 1
#mono drawing
elif instword=="monopixel":
outn.write("--0-+-" + instdat + "\n")
instcnt += 1
elif instword=="monofill":
outn.write("--0-+0" + instdat + "\n")
instcnt += 1
elif instword=="setmonovect":
outn.write("--0-++" + instdat + "\n")
instcnt += 1
elif instword=="monoline":
outn.write("--00--" + instdat + "\n")
instcnt += 1
elif instword=="monorect":
outn.write("--00-0" + instdat + "\n")
instcnt += 1
#----opcode --00-+ unused----
elif instword=="stop":
outn.write("--000-" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="null":
outn.write("000000" + instdat + "\n")
instcnt += 1
elif instword=="gotodata":
instgpe=instdat.split(">")
autostpflg=1
if (len(instgpe))==1:
outn.write("--000+" + instdat + "\n")#
instcnt += 1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("--000+" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="gotoreg1":
outn.write("--00+-" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotodataif":
instgpe=instdat.split(">")
autostpflg=1
if (len(instgpe))==1:
outn.write("--00+0" + instdat + "\n")#
instcnt += 1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("--00+0" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="gotoifgreater":
instgpe=instdat.split(">")
autostpflg=1
if (len(instgpe))==1:
outn.write("--0+0-" + instdat + "\n")#
instcnt += 1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("--0+0-" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
#instcnt += 1
elif instword=="wait":
outn.write("--00++" + instdat + "\n")
instcnt += 1
elif instword=="YNgoto":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("--0+--" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("--0+--" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="userwait":
outn.write("--0+-0" + instdat + "\n")
instcnt += 1
elif instword=="TTYclear":
outn.write("--0+-+" + instdat + "\n")
instcnt += 1
#----gap in used opcodes----
elif instword=="gotoA":
outn.write("--+---" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotoAif":
outn.write("--+--0" + instdat + "\n")
instcnt += 1
elif instword=="gotoB":
outn.write("--+--+" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotoBif":
outn.write("--+-0-" + instdat + "\n")
instcnt += 1
elif instword=="gotoC":
outn.write("--+-00" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotoCif":
outn.write("--+-0+" + instdat + "\n")
instcnt += 1
elif instword=="gotoD":
outn.write("--+-+-" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotoDif":
outn.write("--+-+0" + instdat + "\n")
instcnt += 1
elif instword=="gotoE":
outn.write("--+-++" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotoEif":
outn.write("--+0--" + instdat + "\n")
instcnt += 1
elif instword=="gotoF":
outn.write("--+0-0" + instdat + "\n")
instcnt += 1
autostpflg=1
elif instword=="gotoFif":
outn.write("--+0-+" + instdat + "\n")
instcnt += 1
#----gap in used opcodes----
elif instword=="dumpreg1":
outn.write("--++0+" + instdat + "\n")
instcnt += 1
elif instword=="dumpreg2":
outn.write("--+++-" + instdat + "\n")
instcnt += 1
elif instword=="TTYwrite":
#outn.write("--+++0" + instdat + "\n")
#instcnt += 1
instgpe=instdat.split(":")
if (len(instgpe))==1:
outn.write("--+++0" + instdat + "\n")
instcnt += 1
else:
if instgpe[1]=="enter":
ksc=" "
elif instgpe[1]=="space":
ksc="\n"
else:
ksc=(instgpe[1])[0]
outn.write("--+++0" + "000" + (libSBTCVM.charlook(ksc)) + "\n")
instcnt += 1
elif instword=="buzzer":
outn.write("--++++" + instdat + "\n")
instcnt += 1
elif instword=="setregset":
outn.write("-0-000" + instdat + "\n")
instcnt += 1
elif instword=="regset":
outn.write("-0-00+" + instdat + "\n")
instcnt += 1
elif instword=="setkeyint":
instgpe=instdat.split(":")
if (len(instgpe))==1:
outn.write("-0-+++" + instdat + "\n")
instcnt += 1
else:
if instgpe[1]=="space":
ksc=" "
elif instgpe[1]=="enter":
ksc="\n"
else:
ksc=(instgpe[1])[0]
outn.write("-0-+++" + "00000" + (libSBTCVM.texttoscan[ksc]) + "\n")
instcnt += 1
elif instword=="keyint":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("-00---" + instdat + "\n")#
instcnt += 1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("-00---" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="clearkeyint":
outn.write("-00--0" + instdat + "\n")
instcnt += 1
elif instword=="offsetlen":
instclst=instdat.split(",")
if len(instclst)==3:
tritgnd=instclst[0]
tritoffset=int(instclst[1])
tritlen=int(instclst[2])
if tritgnd=="on":
tritgndpar="+"
else:
tritgndpar="0"
if tritoffset==0:
tritoffsetpar="--"
elif tritoffset==1:
tritoffsetpar="-0"
elif tritoffset==2:
tritoffsetpar="-+"
elif tritoffset==3:
tritoffsetpar="0-"
elif tritoffset==4:
tritoffsetpar="00"
elif tritoffset==5:
tritoffsetpar="0+"
elif tritoffset==6:
tritoffsetpar="+-"
elif tritoffset==7:
tritoffsetpar="+0"
elif tritoffset==8:
tritoffsetpar="++"
else:
tritoffsetpar="--"
if tritlen==1:
tritlenpar="--"
elif tritlen==2:
tritlenpar="-0"
elif tritlen==3:
tritlenpar="-+"
elif tritlen==4:
tritlenpar="0-"
elif tritlen==5:
tritlenpar="00"
elif tritlen==6:
tritlenpar="0+"
elif tritlen==7:
tritlenpar="+-"
elif tritlen==8:
tritlenpar="+0"
elif tritlen==9:
tritlenpar="++"
else:
tritlenpar="++"
outn.write("-0-++0" + "0000" + tritgndpar + tritoffsetpar + tritlenpar + "\n")
else:
outn.write("-0-++0" + instdat + "\n")
instcnt += 1
#special regset shortcut commands
elif instword=="TTYbg":
instclst=instdat.split(",")
if len(instclst)==3:
vxR=libSBTCVM.codeshift(instclst[0])
vxB=libSBTCVM.codeshift(instclst[1])
vxG=libSBTCVM.codeshift(instclst[2])
outn.write("-0-000" + "---------" + "\n")
outn.write("-0-00+" + ("000" + vxR + vxB + vxG) + "\n")
else:
outn.write("-0-000" + "---------" + "\n")
outn.write("-0-00+" + instdat + "\n")
instcnt += 2
elif instword=="TTYlinedraw":
if instdat=="on":
outn.write("-0-000" + "--------0" + "\n")
outn.write("-0-00+" + "00000000+" + "\n")
elif instdat=="off":
outn.write("-0-000" + "--------0" + "\n")
outn.write("-0-00+" + "000000000" + "\n")
else:
outn.write("-0-000" + "--------0" + "\n")
outn.write("-0-00+" + "00000000+" + "\n")
instcnt += 2
elif instword=="TTYmode":
if instdat=="27":
outn.write("-0-000" + "--------+" + "\n")
outn.write("-0-00+" + "00000000+" + "\n")
elif instdat=="54":
outn.write("-0-000" + "--------+" + "\n")
outn.write("-0-00+" + "000000000" + "\n")
else:
outn.write("-0-000" + "--------+" + "\n")
outn.write("-0-00+" + "000000000" + "\n")
instcnt += 2
elif instword=="threadref":
instcnt += 1
if len(instdat)==2:
outn.write("--+00-" + "0000000" + instdat + "\n")
else:
outn.write("--+00-" + instdat + "\n")
elif instword=="threadstart":
instgpe=instdat.split(">")
if (len(instgpe))==1:
outn.write("--+000" + instdat + "\n")#
instcnt += 1
autostpflg=1
else:
gtpoint=instgpe[1]
gtmatch=0
instcnt += 1
for fx in gotoreflist:
if fx.gtname==gtpoint:
outn.write("--+000" + fx.tline + "\n")
gtmatch=1
if gtmatch==0:
#print "ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP"
complog("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP \n")
sys.exit("ERROR: pointer: \"" + gtpoint + "\" Pointed at by: \"" + instword + "\" At line: \"" + str(srcline) + "\", not found. STOP")
elif instword=="threadstop":
instcnt += 1
outn.write("--+00+" + instdat + "\n")
elif instword=="threadkill":
instcnt += 1
outn.write("--+0+-" + instdat + "\n")
else:
gtflag=0
if gtflag==1 and (txtblk==0 or linenraw=="textstart"):
complog("pass 2: srcline:" + str(srcline) + " instcnt:" + str(instcnt) + " inst:" + instword + " instdat:" + instdat + "\n")
elif gtflag==1 and txtblk==1:
complog("TEXTBLOCK: pass 2 : srcline:" + str(srcline) + " instcnt:" + str(instcnt) + " textline: \"" + linenraw + "\"\n")
if instcnt>assmoverrun:
#print("ERROR!: assembler has exceded rom size limit of 19683!")
complog("ERROR!: assembler has exceded rom size limit of 19683! \n")
sys.exit("ERROR!: assembler has exceded rom size limit of 19683!")
if txtblk==1:
print "WARNING: unclosed Text block!"
complog("WARNING: unclosed Text block!\n")
if instcnt==0:
#print "ERROR: No instructions found. nothing to compile."
complog("ERROR: No instructions found. nothing to compile. /n")
sys.exit("ERROR: No instructions found. nothing to compile.")
if autostpflg==0 and instcnt<19683:
print "NOTICE: no explicit goto or stop instruction at end of program. SBTCVM-asm will add a stop automatically."
complog("NOTICE: no explicit goto or stop instruction at end of program. SBTCVM-asm will add a stop automatically.\n")
outn.write("--000-" + "000000000" + "\n")
instcnt += 1
instpad=instcnt
while instpad!=19683 and instcnt<19684:
outn.write("000000" + "000000000" + "\n")
instpad += 1
outn.close()
instextra=(instpad - instcnt)
print ("SBTCVM Mk 2 assembly file \"" + assmflename + "\" has been compiled into: \"" + outfile + "\"")
complog("SBTCVM Mk 2 assembly file \"" + assmflename + "\" has been compiled into: \"" + outfile + "\"\n")
if tracecomp==1:
print "tracelog enabled. log file: \"" + (os.path.join('CAP', logsub)) + "\""
print ("total instructions: " + str(instcnt))
complog("total instructions: " + str(instcnt) + "\n")
print ("extra space: " + str(instextra))
complog ("extra space: " + str(instextra) + "\n")
else:
print "tip: use SBTCVM-asm2.py -h for help."
| ThomasTheSpaceFox/SBTCVM-Mark-2 | SBTCVM-asm2.py | Python | gpl-3.0 | 33,216 |
#!/usr/bin/python
#
# generate_nametags_with_barcodes.py
# Copyright (C) 2016 Sandeep M
#
# every year an elementary school in california runs a festival where families
# sign up for parties and events, as well as bid for auctions and donations.
# each family is issued some stickers with unique barcode to make it easier
# to sign up.
#
# i couldn't figure out how to get avery on-line mailmerge to do all i wanted
# (scale fonts to fit, conditionally print parent's names, repeat labels etc)
# so here we are.
#
# uses:
# pylabels, a Python library to create PDFs for printing labels.
# Copyright (C) 2012, 2013, 2014 Blair Bonnett
#
# ReportLab open-source PDF Toolkit
# (C) Copyright ReportLab Europe Ltd. 2000-2015
#
# openpyxl, a Python library to read/write Excel 2010 xlsx/xlsm/xltx/xltm files.
#
# generate_nametags_with_barcodes.py is free software:
# you can redistribute it and/or modify it under the terms of the
# GNU General Public License as published by the Free Software Foundation,
# either version 3 of the License, or (at your option) any later version.
#
# generate_nametags_with_barcodes.py is distributed in the hope that it
# will be useful, but WITHOUT ANY # WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# ok, here we go:
from reportlab.graphics import renderPDF
from reportlab.graphics import shapes
from reportlab.graphics.barcode import code39, code128, code93
from reportlab.graphics.barcode import eanbc, qr, usps
from reportlab.graphics.shapes import Drawing
from reportlab.lib import colors
from reportlab.lib.pagesizes import letter
from reportlab.lib.units import mm, inch
from reportlab.pdfbase.pdfmetrics import registerFont, stringWidth
from reportlab.pdfbase.ttfonts import TTFont
from reportlab.pdfgen import canvas
from reportlab.graphics.barcode import getCodes, getCodeNames, createBarcodeDrawing
import labels
import os.path
import random
random.seed(187459)
# for excel reading
from openpyxl import load_workbook
from pprint import pprint
# for utils
from collections import OrderedDict
import re
#----------------------------------------------------------------------
# Create a page based on Avery 5160:
# portrait (8.5" X 11") sheets with 3 columns and 10 rows of labels.
#
#----------------------------------------------------------------------
def createAvery5160Spec():
f = 25.4 # conversion factor from inch to mm
# Compulsory arguments.
sheet_width = 8.5 * f
sheet_height = 11.0 * f
columns = 3
rows = 10
label_width = 2.63 * f
label_height = 1.00 * f
# Optional arguments; missing ones will be computed later.
left_margin = 0.19 * f
column_gap = 0.12 * f
right_margin = 0
top_margin = 0.50 * f
row_gap = 0
bottom_margin = 0
# Optional arguments with default values.
left_padding = 1
right_padding = 1
top_padding = 1
bottom_padding = 1
corner_radius = 2
padding_radius = 0
background_filename="bg.png"
#specs = labels.Specification(210, 297, 3, 8, 65, 25, corner_radius=2)
# units = mm !
specs = labels.Specification(
sheet_width, sheet_height,
columns, rows,
label_width, label_height,
left_margin = left_margin ,
column_gap = column_gap ,
# right_margin = right_margin ,
top_margin = top_margin ,
row_gap = row_gap ,
# bottom_margin = bottom_margin ,
left_padding = left_padding ,
right_padding = right_padding ,
top_padding = top_padding ,
bottom_padding = bottom_padding ,
corner_radius = corner_radius ,
padding_radius = padding_radius ,
#background_filename=background_filename,
)
return specs
#----------------------------------------------------------------------
# adjust fontsize down until it fits a width/height limit
# should really range for value instead of timidly crepping towards target
#----------------------------------------------------------------------
def fit_text_in_area(the_text,font_name,text_width_limit,text_height_limit):
font_size = text_height_limit
text_width = stringWidth(the_text, font_name, font_size)
while ((text_width > text_width_limit) or (font_size > text_height_limit)):
font_size *= 0.95
text_width = stringWidth(the_text, font_name, font_size)
s = shapes.String(0, 0, the_text, fontName=font_name, fontSize=font_size, textAnchor="start")
#pprint("text_height_limit = " + str(text_height_limit))
#pprint(s.dumpProperties())
#pprint(s)
return s
#----------------------------------------------------------------------
# generate strings of family name from line data
#----------------------------------------------------------------------
def get_labels_from_data (data):
# special pattern to produce blank barcodes
pattern_to_blank = "Zzzzzzz"
#print("write_data")
#pprint(data)
# section1: the actual barcode
num1 = data['parent_id_for_sticker'][0]
#if (num1 > 10000): num1 -= 10000 # DEBUG
# WORKAROUND FOR BUG: the id sometimes has a 0.5 at the end because of the way records were split
#num1 = int(num1) + 1
# section2: family name
str1 = data['child_last_name'][0]
if (pattern_to_blank in str1): str1 = " "
# section3: parent names with & as joiner
str2 = conjunction(data['parent_first_name'])
if (pattern_to_blank in str2): str2 = " "
# section4: child's names
str3 = conjunction(data['child_first_name'])
if (pattern_to_blank in str3): str3 = " "
# section 4 : label number
#str4 = str(data['index']+1) + "/" + str(data['number_of_stickers'] )
str4 = " "
return (num1, str1, str2, str3, str4)
#----------------------------------------------------------------------
# http://stackoverflow.com/questions/21217846/python-join-list-of-strings-with-comma-but-with-some-conditions-code-refractor
#----------------------------------------------------------------------
def conjunction(l, threshold = 5):
length = len(l)
l = map(str,l)
if length <= 2: return " & ".join(l)
elif length < threshold: return ", ".join(l[:-1]) + " & " + l[-1]
elif length == threshold: return ", ".join(l[:-1]) + " & 1 other"
else: return ", ".join(l[:t-1]) + " & +{} others".format(length - (t - 1))
#----------------------------------------------------------------------
# adjust str height if there are any low-hanging letters (ie decenders)
#----------------------------------------------------------------------
def get_font_height(size,str):
pattern = re.compile(r'[gjpqy]')
if pattern.findall(str):
size *= 1.1
return size
#----------------------------------------------------------------------
# Create a callback function to draw each label.
# This will be given the ReportLab drawing object to draw on,
# the dimensions in points, and the data to put on the nametag
#----------------------------------------------------------------------
def write_data(label, width, height, data):
(num1, str1, str2, str3, str4) = get_labels_from_data(data)
pad = 10;
# section 1 : barcode
D = Drawing(width,height)
d = createBarcodeDrawing('Code128', value=num1, barHeight=0.4*inch, humanReadable=True, quiet=False)
#d = createBarcodeDrawing('I2of5', value=the_num, barHeight=10*mm, humanReadable=True)
barcode_width = d.width
barcode_height = d.height
#d.rotate(-90)
#d.translate( - barcode_height ,pad) # translate
d.translate( width-barcode_width-pad/2.0 ,0) # translate
#pprint(d.dumpProperties())
#D.add(d)
#label.add(D)
label.add(d)
rect = shapes.Rect(0, pad, barcode_width + pad, barcode_height+pad)
rect.fillColor = None
rect.strokeColor = random.choice((colors.blue, colors.red, colors.green))
#rect.strokeWidth = d.borderStrokeWidth
#label.add(rect)
# section 2 : room number
#the_text = "gr" + str(data['youngest_child_grade']) + " rm" + str(data['youngest_child_room'])
#label.add(shapes.String(15, height-15, the_text, fontName="Judson Bold", fontSize=8, textAnchor="start"))
# section2: family name
# Measure the width of the name and shrink the font size until it fits.
font_name = "Judson Bold"
font_name = "PermanentMarker"
# Measure the width of the name and shrink the font size until it fits.
# try out 2 options and select the one that gives a taller font
text_width_limit = width - barcode_width - pad
text_height_limit = height / 2.0;
s1 = fit_text_in_area(str1,font_name,text_width_limit,text_height_limit)
text_width_limit = width - pad
text_height_limit = height - barcode_height
s2 = fit_text_in_area(str1,font_name,text_width_limit,text_height_limit)
if (s1.fontSize >= s2.fontSize): s = s1
else: s = s2
s.x = pad/2.0
s.y = height - s.fontSize + pad / 2.0
s.textAnchor = "start"
label.add(s)
family_name_height = get_font_height(s.fontSize,str1)
family_name_width = stringWidth(str1,font_name,s.fontSize)
# section3: parent names
text_width_limit = width - barcode_width - 2 * pad
text_height_limit = (height - family_name_height)/2.0
font_name = "Judson Bold"
s = fit_text_in_area(str2,font_name,text_width_limit,text_height_limit)
s.x = pad/2.0
s.y = height - family_name_height - s.fontSize + pad/2.0
s.textAnchor = "start"
label.add(s)
parent_name_height = get_font_height(s.fontSize,str2)
# section4: child's names
text_width_limit = width - barcode_width - 2 * pad
text_height_limit = height - family_name_height - parent_name_height
font_name = "Judson Bold"
s = fit_text_in_area(str3,font_name,text_width_limit,text_height_limit)
s.x = pad/2.0
s.y = height - family_name_height - parent_name_height - s.fontSize + pad/2.0
s.textAnchor = "start"
label.add(s)
child_name_height = s.fontSize
# section 4 : label number
font_name = "Judson Bold"
font_size = 5
s = shapes.String(width, height - font_size, str4, fontName=font_name, fontSize=font_size, textAnchor="end")
#s.x = width
#s.y = 0
#s.textAnchor = "start"
#label.add(s)
# section 5 : logo
s = shapes.Image(0, 0, 25, 25, "logo.jpg")
s.x = width - barcode_width - (barcode_width-25)/2.0 + 1
s.y = height - pad - 15
# enough space?
if ((width - family_name_width - pad) > barcode_width):
label.add(s)
# section 6 : "anon" label for WHP
#if (num1 == 6710):
# s = shapes.Image(0, 0, 57, 34, "whp-logo.png")
# s.x = barcode_width + pad/2.0
# s.y = pad
# #label.add(s)
#----------------------------------------------------------------------
# helper to catch blank fields in excel file
#----------------------------------------------------------------------
def is_number(s):
if (s is None):
return False
try:
float(s)
return True
except ValueError:
pass
try:
import unicodedata
unicodedata.numeric(s)
return True
except (TypeError, ValueError):
pass
return False
#----------------------------------------------------------------------
#
# create a dict from excel row, assuming all the headers match up order below
#
#----------------------------------------------------------------------
def process_one_record(tag,k,v):
if (len(v) >= 11):
LABELS = """
grade
child_last_name
child_first_name
parent_last_name
parent_first_name
parent_id_for_sticker
phone
email
teacher
room
number_of_stickers
"""
labels=LABELS.split()
line_item = dict(zip(labels,v))
print("one item: len = ", len(v) )
pprint(line_item)
id = line_item['parent_id_for_sticker']
# only store lines with valid id
if (not is_number(id)):
return
if (id == 0):
return
items = {}
if (tag.get(id) is None):
for key in labels:
items[key] = [line_item[key]]
else:
old_items = tag[id]
for key in labels:
items[key] = old_items[key] + [line_item[key]]
tag[id] = items
return
#----------------------------------------------------------------------
#
# slurp in the excel file and return a dict for easy processing
#
#----------------------------------------------------------------------
def print_one_tag(items):
sheet.add_label(items)
# # only print record with > 0 number of stickers
# # otherwise, print a minimum of 3 labels
# # align number of stickers to be easily cut, ie, multiples of 3
#
#
# # see http://stackoverflow.com/questions/9810391/round-to-the-nearest-500-python
# line_item['number_of_stickers'] = 1 # DEBUG OVERRIDE
# c = 3 # number of columns
# x = line_item.get('number_of_stickers')
# if (is_number(x) and (x != 0)):
# if (x < c ): x = c
# else: x = x + (c - x) % c
# line_item['number_of_stickers'] = x
#
#
#----------------------------------------------------------------------
#
# slurp in the excel file and return a dict for easy processing
#
#----------------------------------------------------------------------
def load_records_from_excel(data_file, sheet_name):
# load excel file--hardcoded name of workbook
wb = load_workbook(filename=data_file, read_only=True)
ws = wb[sheet_name]
# now store this in a dict with row number as the key
records = {}
for row in ws.rows:
index = tuple( cell.row for cell in row)[3] # pick one col which definately has a value
records[index] = tuple( cell.value for cell in row)
return records
#----------------------------------------------------------------------
#
# process records using a helper for each one
# collect multiple records that share parent id and produce 1-to-1
# map with labels
#
#----------------------------------------------------------------------
def process_records (records):
tag = {}
record_limit = 1e6 # useful for testing and runaway bugs
count = 0
for k,v in records.items():
process_one_record(tag,k,v)
count += 1
if (count >= record_limit):
break
print("processed " , count, " records ")
return tag
#----------------------------------------------------------------------
#
# check tag id compaction
#
#----------------------------------------------------------------------
def fix_tags (tag):
count = 0
# remove multiple values by ordered uniq list, see:
# http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order
for id,items in tag.items():
for k,v in items.items():
items[k] = list(OrderedDict.fromkeys(v))
tag[id] = items
# can we limit id to some threshold?
limit = 10000
for id,items in tag.items():
if (id > limit):
id_short = id - limit
if (tag.get(id_short) is not None):
print("fix_tags: CLASH for ", limit, " for id= ", id)
# validation tests
for id,items in tag.items():
if (len(items['child_last_name']) > 1):
print("fix_tags: entry " , id, " has children with different last name" , items['child_last_name'])
for id,items in tag.items():
if (len(items['parent_first_name']) == 0):
print("fix_tags: entry " , id, " has no parents")
# children sometimes have names like "beiber gomez" which combine
# parent names. we try to catch cases where the child last name is
# completely different from single parent
for id,items in tag.items():
if (len(items['parent_first_name']) == 1):
parent_first_name = items['parent_first_name'][0]
parent_last_name = items['parent_last_name'][0]
child_last_name = items['child_last_name'][0]
if ((parent_last_name not in child_last_name) and
child_last_name not in parent_last_name):
print("fix_tags: entry " , id, " has 1 parent with different last name from child: parent_last_name = " , parent_last_name, " child_last_name = ", child_last_name)
# append single parent last name into first
parent_first_last_name = parent_first_name + " " + parent_last_name
items['parent_first_name'] = [parent_first_last_name]
print("fix_tags: new parent name is ", parent_first_last_name)
tag[id] = items
return tag
#----------------------------------------------------------------------
#
# print by columns
# print out process records (ie tags) sorted by first child's last name
#
#----------------------------------------------------------------------
def print_tags_by_column (tag):
count = 0
# sorting is complex because some last names have multiple words
# and we want to sort by num of stickers then names
sorted_items_list = tag.values()
sorted_items_list = sorted(sorted_items_list, key=lambda items:
items['child_last_name'][0].split()[-1]
)
sorted_items_list = sorted(sorted_items_list, key=lambda items:
items['number_of_stickers']
)
# duplicate entries for tags requiring extra column of stickers
# OVERRIDE number_of_stickers!
# convert number_of_stickers from 15,30,60 to 20,40,80 to make it easier
# to cut: each column of stickers is 10
duplicate_items_list = []
for items in sorted_items_list:
number_of_stickers = max(items['number_of_stickers'])
number_of_column = 1 + int((number_of_stickers) / 15.0)
#if (number_of_column < 1): number_of_column = 1
for i in range(number_of_column):
duplicate_items_list.append(items)
# output stickers 3 at a time across for 10 in a column
number_of_stickers_per_column = 10
number_of_stickers_per_row = 3
for i in range(0,len(duplicate_items_list),number_of_stickers_per_row):
for j in range(number_of_stickers_per_column):
for k in range(number_of_stickers_per_row):
index = i+k
if (index >= len(duplicate_items_list)):
index = len(duplicate_items_list) - 1
print_one_tag(duplicate_items_list[index])
count += 1
print("printed " , count, " stickers")
return count
#----------------------------------------------------------------------
#
# print by row
# print out process records (ie tags) sorted by first child's last name
#
#----------------------------------------------------------------------
def print_tags_by_row (tag):
number_of_stickers_per_column = 10
number_of_stickers_per_row = 3
number_of_stickers_per_page = number_of_stickers_per_column * number_of_stickers_per_row
# sorting is complex because some last names have multiple words
sorted_items_list = sorted(tag.values(), key=lambda
items: items['child_last_name'][0].split()[-1])
# duplicate entries for tags requiring extra column of stickers
# OVERRIDE number_of_stickers!
# convert number_of_stickers from 15,30,60 to 20,40,80 to make it easier
# to cut: each column of stickers is 10
duplicate_items_list = []
for items in sorted_items_list:
number_of_stickers = max(items['number_of_stickers'])
number_of_stickers = number_of_stickers_per_row * int(number_of_stickers / number_of_stickers_per_row)
if (number_of_stickers < number_of_stickers_per_row): number_of_stickers = number_of_stickers_per_row
for i in range(number_of_stickers):
duplicate_items_list.append(items)
# output stickers 3 at a time across for 10 in a column
sticker_count = 0
for items in duplicate_items_list:
print_one_tag(items)
sticker_count += 1
page_count = sticker_count / number_of_stickers_per_page
print("printed " , sticker_count, " stickers in ", page_count , " pages")
return sticker_count
#----------------------------------------------------------------------
#
# single tags for debug
#
#----------------------------------------------------------------------
def debug_print_tags (tag):
count = 0
sorted_items_list = sorted(tag.values(), key=lambda
items: items['child_last_name'][0].split()[-1])
for items in sorted_items_list:
print_one_tag(items)
count += 1
print("printed " , count, " stickers")
return count
#----------------------------------------------------------------------
#
# main
#
#----------------------------------------------------------------------
# single barcode per person or actually follow number_of_barcodes
DEBUG_PRINT = 0
PRINT_BY_ROW = 0
# register some fonts, assumed to be in the same dir as this script
base_path = os.path.dirname(__file__)
font_path = os.path.join(base_path, "fonts")
registerFont(TTFont('Judson Bold', os.path.join(font_path, 'Judson-Bold.ttf')))
registerFont(TTFont('KatamotzIkasi', os.path.join(font_path, 'KatamotzIkasi.ttf')))
registerFont(TTFont('Magnus Cederholm', os.path.join(font_path, 'FFF_Tusj.ttf')))
registerFont(TTFont('PermanentMarker', os.path.join(font_path, 'PermanentMarker.ttf')))
# load excel and loop through rows
data_file = 'Fallfest Barcode File.xlsx'
sheet_name = 'Barcodes'
# parse data and create
records = load_records_from_excel(data_file, sheet_name)
tag = process_records(records)
tag = fix_tags(tag)
# create the sheet with callback function write_data to process each record
specs = createAvery5160Spec()
sheet = labels.Sheet(specs, write_data, border=True)
if (DEBUG_PRINT):
debug_print_tags(tag)
else:
if (PRINT_BY_ROW):
print_tags_by_row(tag)
else:
print_tags_by_column(tag)
#endif
#endif
# save results in pdf
sheet.save('nametags.pdf')
print("{0:d} label(s) output on {1:d} page(s).".format(sheet.label_count, sheet.page_count))
| d-e-e-p/generate_nametags_with_barcodes | generate_nametags_with_barcodes.py | Python | gpl-3.0 | 22,514 |
#!/usr/bin/env python3
'''Test on server shutdown when a zone transaction is open.'''
import psutil
from dnstest.libknot import libknot
from dnstest.test import Test
from dnstest.utils import *
t = Test()
knot = t.server("knot")
zone = t.zone("example.com.")
t.link(zone, knot)
ctl = libknot.control.KnotCtl()
t.start()
ctl.connect(os.path.join(knot.dir, "knot.sock"))
ctl.send_block(cmd="zone-begin", zone=zone[0].name)
ctl.receive_block()
ctl.send(libknot.control.KnotCtlType.END)
ctl.close()
knot.stop()
t.sleep(1)
if psutil.pid_exists(knot.proc.pid):
set_err("Server still running")
t.end()
| CZ-NIC/knot | tests-extra/tests/ctl/shutdown/test.py | Python | gpl-3.0 | 609 |
# Author: echel0n <[email protected]>
# URL: https://sickrage.ca
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
class SickRageException(Exception):
"""
Generic SiCKRAGE Exception - should never be thrown, only sub-classed
"""
class AuthException(SickRageException):
"""
Your authentication information are incorrect
"""
class CantRefreshShowException(SickRageException):
"""
The show can't be refreshed right now
"""
class CantRemoveShowException(SickRageException):
"""
The show can't removed right now
"""
class CantUpdateShowException(SickRageException):
"""
The show can't be updated right now
"""
class EpisodeDeletedException(SickRageException):
"""
This episode has been deleted
"""
class EpisodeNotFoundException(SickRageException):
"""
The episode wasn't found on the Indexer
"""
class EpisodePostProcessingFailedException(SickRageException):
"""
The episode post-processing failed
"""
class EpisodeDirectoryNotFoundException(SickRageException):
"""
The episode directory was not found
"""
class FailedPostProcessingFailedException(SickRageException):
"""
The failed post-processing failed
"""
class MultipleEpisodesInDatabaseException(SickRageException):
"""
Multiple episodes were found in the database! The database must be fixed first
"""
class MultipleShowsInDatabaseException(SickRageException):
"""
Multiple shows were found in the database! The database must be fixed first
"""
class MultipleShowObjectsException(SickRageException):
"""
Multiple objects for the same show were found! Something is very wrong
"""
class NoNFOException(SickRageException):
"""
No NFO was found
"""
class ShowNotFoundException(SickRageException):
"""
The show wasn't found on the Indexer
"""
| gborri/SickRage | sickrage/core/exceptions/__init__.py | Python | gpl-3.0 | 2,553 |
import re
import asyncio
import threading
from collections import defaultdict
def connector(bot, dispatcher, NICK, CHANNELS, PASSWORD=None):
@bot.on('client_connect')
async def connect(**kwargs):
bot.send('USER', user=NICK, realname=NICK)
if PASSWORD:
bot.send('PASS', password=PASSWORD)
bot.send('NICK', nick=NICK)
# Don't try to join channels until the server has
# sent the MOTD, or signaled that there's no MOTD.
done, pending = await asyncio.wait(
[bot.wait("RPL_ENDOFMOTD"),
bot.wait("ERR_NOMOTD")],
loop=bot.loop,
return_when=asyncio.FIRST_COMPLETED
)
# Cancel whichever waiter's event didn't come in.
for future in pending:
future.cancel()
for channel in CHANNELS:
bot.send('JOIN', channel=channel)
@bot.on('client_disconnect')
async def reconnect(**kwargs):
# Wait a second so we don't flood
await asyncio.sleep(5, loop=bot.loop)
# Schedule a connection when the loop's next available
bot.loop.create_task(bot.connect())
# Wait until client_connect has triggered
await bot.wait("client_connect")
@bot.on('ping')
def keepalive(message, **kwargs):
bot.send('PONG', message=message)
@bot.on('privmsg')
def message(host, target, message, **kwargs):
if host == NICK:
# don't process messages from the bot itself
return
if target == NICK:
# private message
dispatcher.handle_private_message(host, message)
else:
# channel message
dispatcher.handle_channel_message(host, target, message)
class Dispatcher(object):
def __init__(self, client):
self.client = client
self._callbacks = []
self.register_callbacks()
def _register_callbacks(self, callbacks):
"""\
Hook for registering custom callbacks for dispatch patterns
"""
self._callbacks.extend(callbacks)
def register_callbacks(self):
"""\
Hook for registering callbacks with connection -- handled by __init__()
"""
self._register_callbacks((
(re.compile(pattern), callback)
for pattern, callback in self.command_patterns()
))
def _process_command(self, nick, message, channel):
results = []
for pattern, callback in self._callbacks:
match = pattern.search(message) or pattern.search('/privmsg')
if match:
results.append(
callback(nick, message, channel, **match.groupdict()))
return results
def handle_private_message(self, nick, message):
for result in self._process_command(nick, message, None):
if result:
self.respond(result, nick=nick)
def handle_channel_message(self, nick, channel, message):
for result in self._process_command(nick, message, channel):
if result:
self.respond(result, channel=channel)
def command_patterns(self):
"""\
Hook for defining callbacks, stored as a tuple of 2-tuples:
return (
('/join', self.room_greeter),
('!find (^\s+)', self.handle_find),
)
"""
raise NotImplementedError
def respond(self, message, channel=None, nick=None):
"""\
Multipurpose method for sending responses to channel or via message to
a single user
"""
if channel:
if not channel.startswith('#'):
channel = '#%s' % channel
self.client.send('PRIVMSG', target=channel, message=message)
elif nick:
self.client.send('PRIVMSG', target=nick, message=message)
class Locker(object):
def __init__(self, delay=None, user=""):
self.delay = delay if delay or delay == 0 and type(delay) == int else 5
self.locked = False
def lock(self):
if not self.locked:
if self.delay > 0:
self.locked = True
t = threading.Timer(self.delay, self.unlock, ())
t.daemon = True
t.start()
return self.locked
def unlock(self):
self.locked = False
return self.locked
def cooldown(delay):
def decorator(func):
if not hasattr(func, "__cooldowns"):
func.__cooldowns = defaultdict(lambda: Locker(delay))
def inner(*args, **kwargs):
nick = args[1]
user_cd = func.__cooldowns[nick]
if user_cd.locked:
return
ret = func(*args, **kwargs)
user_cd.lock()
return ret
return inner
return decorator
| AiAeGames/DaniBot | dispatcher.py | Python | gpl-3.0 | 4,840 |
import unittest
from aiourlstatus import app
class TestEmpty(unittest.TestCase):
def test_no_urls(self):
data = ''
urls, len_urls = app.find_sort_urls(data)
self.assertEqual(urls, [])
self.assertEqual(len_urls, 0)
class TestTXT(unittest.TestCase):
def test_parse_text(self):
with open('tests/retest.txt') as f:
data = f.read()
urls, len_urls = app.find_sort_urls(data)
url_list = [['http://en.wikipedia.org/wiki/Body_image', 'http://en.wikipedia.org/wiki/Identity_formation',
'http://en.wikipedia.org/wiki/Self-confidence', 'http://en.wikipedia.org/wiki/Self-esteem'],
['http://www.bbc.com/sport/0/'], ['http://www.haskell.org/'], ['http://lxer.com/'],
['http://www.find-happiness.com/definition-of-happiness.html'],
['http://www.wikihow.com/Elevate-Your-Self-Esteem']]
self.assertCountEqual(urls, url_list)
if __name__ == '__main__':
unittest.main()
| riverrun/aiourlstatus | tests/parse_test.py | Python | gpl-3.0 | 990 |
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
# Copyright (C) Zing contributors.
#
# This file is a part of the Zing project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import json
import operator
from django.core.exceptions import PermissionDenied
from django.db.models import ProtectedError, Q
from django.forms.models import modelform_factory
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.utils.functional import cached_property
from django.views.generic import View
from pootle.core.http import (
JsonResponse, JsonResponseBadRequest, JsonResponseForbidden,
JsonResponseNotFound
)
class JSONDecodeError(ValueError):
pass
class APIView(View):
"""View to implement internal RESTful APIs.
Based on djangbone https://github.com/af/djangbone
"""
# Model on which this view operates. Setting this is required
model = None
# Base queryset for accessing data. If `None`, model's default manager will
# be used
base_queryset = None
# Set this to restrict the view to a subset of the available methods
restrict_to_methods = None
# Field names to be included
fields = ()
# Individual forms to use for each method. By default it'll auto-populate
# model forms built using `self.model` and `self.fields`
add_form_class = None
edit_form_class = None
# Permission classes implement logic to determine whether the request
# should be permitted. Empty list means no permission-checking.
permission_classes = []
# Tuple of sensitive field names that will be excluded from any serialized
# responses
sensitive_field_names = ('password', 'pw')
# Set to an integer to enable GET pagination
page_size = None
# HTTP GET parameter to use for accessing pages
page_param_name = 'p'
# HTTP GET parameter to use for search queries
search_param_name = 'q'
# Field names in which searching will be allowed
search_fields = None
@property
def allowed_methods(self):
methods = [m for m in self.http_method_names if hasattr(self, m)]
if self.restrict_to_methods is not None:
restricted_to = map(lambda x: x.lower(), self.restrict_to_methods)
methods = filter(lambda x: x in restricted_to, methods)
return methods
def __init__(self, *args, **kwargs):
if self.model is None:
raise ValueError('No model class specified.')
self.pk_field_name = self.model._meta.pk.name
if self.base_queryset is None:
self.base_queryset = self.model._default_manager
self._init_fields()
self._init_forms()
return super(APIView, self).__init__(*args, **kwargs)
def _init_fields(self):
if len(self.fields) < 1:
form = self.add_form_class or self.edit_form_class
if form is not None:
self.fields = form._meta.fields
else: # Assume all fields by default
self.fields = (f.name for f in self.model._meta.fields)
self.serialize_fields = (f for f in self.fields if
f not in self.sensitive_field_names)
def _init_forms(self):
if 'post' in self.allowed_methods and self.add_form_class is None:
self.add_form_class = modelform_factory(self.model,
fields=self.fields)
if 'put' in self.allowed_methods and self.edit_form_class is None:
self.edit_form_class = modelform_factory(self.model,
fields=self.fields)
@cached_property
def request_data(self):
try:
return json.loads(self.request.body)
except ValueError:
raise JSONDecodeError
def get_permissions(self):
"""Returns permission handler instances required for a particular view."""
return [permission() for permission in self.permission_classes]
def check_permissions(self, request):
"""Checks whether the view is allowed to process the request or not.
"""
for permission in self.get_permissions():
if not permission.has_permission(request, self):
raise PermissionDenied
def check_object_permissions(self, request, obj):
for permission in self.get_permissions():
if not permission.has_object_permission(request, self, obj):
raise PermissionDenied
def handle_exception(self, exc):
"""Handles response exceptions."""
if isinstance(exc, Http404):
return JsonResponseNotFound({
'msg': 'Not found',
})
if isinstance(exc, PermissionDenied):
return JsonResponseForbidden({
'msg': 'Permission denied.',
})
if isinstance(exc, JSONDecodeError):
return JsonResponseBadRequest({
'msg': 'Invalid JSON data',
})
raise
def dispatch(self, request, *args, **kwargs):
try:
self.check_permissions(request)
if request.method.lower() in self.allowed_methods:
handler = getattr(self, request.method.lower(),
self.http_method_not_allowed)
else:
handler = self.http_method_not_allowed
return handler(request, *args, **kwargs)
except Exception as exc:
return self.handle_exception(exc)
def get(self, request, *args, **kwargs):
"""GET handler."""
if self.kwargs.get(self.pk_field_name, None) is not None:
object = self.get_object()
return JsonResponse(self.object_to_values(object))
return self.get_collection(request, *args, **kwargs)
def get_object(self):
"""Returns a single model instance."""
obj = get_object_or_404(
self.base_queryset, pk=self.kwargs[self.pk_field_name],
)
self.check_object_permissions(self.request, obj)
return obj
def get_collection(self, request, *args, **kwargs):
"""Retrieve a full collection."""
return JsonResponse(self.qs_to_values(self.base_queryset))
def get_form_kwargs(self):
kwargs = {
'data': self.request_data,
}
if (self.pk_field_name in self.kwargs and
self.kwargs[self.pk_field_name] is not None):
kwargs.update({
'instance': self.get_object(),
})
return kwargs
def post(self, request, *args, **kwargs):
"""Creates a new model instance.
The form to be used can be customized by setting
`self.add_form_class`. By default a model form will be used with
the fields from `self.fields`.
"""
form = self.add_form_class(**self.get_form_kwargs())
if form.is_valid():
new_object = form.save()
return JsonResponse(self.object_to_values(new_object))
return self.form_invalid(form)
def put(self, request, *args, **kwargs):
"""Update the current model."""
if self.pk_field_name not in self.kwargs:
return self.status_msg('PUT is not supported for collections',
status=405)
form = self.edit_form_class(**self.get_form_kwargs())
if form.is_valid():
updated_object = form.save()
return JsonResponse(self.object_to_values(updated_object))
return self.form_invalid(form)
def delete(self, request, *args, **kwargs):
"""Delete the model and return its JSON representation."""
if self.pk_field_name not in kwargs:
return self.status_msg('DELETE is not supported for collections',
status=405)
obj = self.get_object()
try:
obj.delete()
return JsonResponse({})
except ProtectedError as e:
return self.status_msg(e[0], status=405)
def object_to_values(self, object):
"""Convert an object to values for serialization."""
return {
field: getattr(object, field) for field in self.serialize_fields
}
def qs_to_values(self, queryset):
"""Convert a queryset to values for further serialization.
An array of objects in `models` and the total object count in
`count` is returned.
"""
search_keyword = self.request.GET.get(self.search_param_name, None)
if search_keyword is not None:
filter_by = self.get_search_filter(search_keyword)
queryset = queryset.filter(filter_by)
values = queryset.values(*self.serialize_fields)
# Process pagination options if they are enabled
if isinstance(self.page_size, int):
try:
page_param = self.request.GET.get(self.page_param_name, 1)
page_number = int(page_param)
offset = (page_number - 1) * self.page_size
except ValueError:
offset = 0
values = values[offset:offset+self.page_size]
return_values = {
'models': list(values),
'count': queryset.count(),
}
return return_values
def get_search_filter(self, keyword):
search_fields = getattr(self, 'search_fields', None)
if search_fields is None:
search_fields = self.fields # Assume all fields
field_queries = list(
zip(map(lambda x: '%s__icontains' % x, search_fields),
(keyword,)*len(search_fields))
)
lookups = [Q(x) for x in field_queries]
return reduce(operator.or_, lookups)
def status_msg(self, msg, status=400):
return JsonResponse({'msg': msg}, status=status)
def form_invalid(self, form):
return JsonResponse({'errors': form.errors}, status=400)
| iafan/zing | pootle/core/views/api.py | Python | gpl-3.0 | 10,141 |
from datetime import timedelta
from django import template
import ws.utils.dates as date_utils
import ws.utils.perms as perm_utils
from ws import forms
from ws.utils.itinerary import get_cars
register = template.Library()
@register.inclusion_tag('for_templatetags/show_wimp.html')
def show_wimp(wimp):
return {
'participant': wimp,
}
@register.inclusion_tag('for_templatetags/trip_itinerary.html')
def trip_itinerary(trip):
"""Return a stripped form for read-only display.
Drivers will be displayed separately, and the 'accuracy' checkbox
isn't needed for display.
"""
if not trip.info:
return {'info_form': None}
info_form = forms.TripInfoForm(instance=trip.info)
info_form.fields.pop('drivers')
info_form.fields.pop('accurate')
return {'info_form': info_form}
@register.inclusion_tag('for_templatetags/trip_info.html', takes_context=True)
def trip_info(context, trip, show_participants_if_no_itinerary=False):
participant = context['viewing_participant']
# After a sufficiently long waiting period, hide medical information
# (We could need medical info a day or two after a trip was due back)
# Some trips last for multiple days (trip date is Friday, return is Sunday)
# Because we only record a single trip date, give a few extra days' buffer
is_old_trip = date_utils.local_date() > (trip.trip_date + timedelta(days=5))
return {
'trip': trip,
'participants': (
trip.signed_up_participants.filter(signup__on_trip=True).select_related(
'emergency_info__emergency_contact'
)
),
'trip_leaders': (
trip.leaders.select_related('emergency_info__emergency_contact')
),
'cars': get_cars(trip),
'show_participants_if_no_itinerary': show_participants_if_no_itinerary,
'hide_sensitive_info': is_old_trip,
'is_trip_leader': perm_utils.leader_on_trip(participant, trip),
}
| DavidCain/mitoc-trips | ws/templatetags/medical_tags.py | Python | gpl-3.0 | 1,991 |
#!/usr/bin/python
# -*- coding: windows-1252 -*-
import wxversion
wxversion.select('2.8')
import wx
import wx.aui
from id import *
from model import *
from graphic import *
from sql import *
from django import *
import sqlite3
from xml.dom import minidom
class MainFrame(wx.aui.AuiMDIParentFrame):
def __init__(self, app, posx, posy, sizex, sizey):
self.data = {}
self.locale = wx.Locale()
self.locale.AddCatalogLookupPathPrefix('./locale')
if app.config.Read("language"):
if app.config.Read("language") != 'English':
idioma = app.config.Read("language")
else:
idioma = ''
else:
idioma = 'es_ES'
app.config.Write("language", idioma)
app.config.Flush()
self.locale.AddCatalog(idioma)
for key, value in language.iteritems():
if value == idioma:
self.data["idioma"] = key
self.translation = wx.GetTranslation
self.app = app
#--Iniciar el padre con las posiciones y titulo del Frame--#
wx.aui.AuiMDIParentFrame.__init__(self, None, -1, self.translation(archivo[TITULO]), pos = (posx, posy), size = (sizex, sizey))
#--Imbuir el logo del CUC en la caja de control de la ventana--#
ico = wx.Icon('images/mini_logo_cuc_trans.ico', wx.BITMAP_TYPE_ICO)
self.SetIcon(ico)
#--Inicializamos la libreria OGL de wxPython--#
ogl.OGLInitialize()
#--MENU--#
#Menu de Archivo
self.menuFile = wx.Menu()
self.menuFile.Append(ID_CREAR_MODELO, self.translation(archivo[ID_CREAR_MODELO]), self.translation(archivoHelp[ID_CREAR_MODELO]))
self.menuFile.Append(ID_ABRIR_MODELO, self.translation(archivo[ID_ABRIR_MODELO]), self.translation(archivoHelp[ID_ABRIR_MODELO]))
self.menuFile.AppendSeparator()
self.menuFile.Append(ID_GUARDAR_MODELO, self.translation(archivo[ID_GUARDAR_MODELO]), self.translation(archivoHelp[ID_GUARDAR_MODELO]))
self.menuFile.Enable(ID_GUARDAR_MODELO, False)
self.menuFile.Append(ID_GUARDAR_COMO_MODELO, self.translation(archivo[ID_GUARDAR_COMO_MODELO]), self.translation(archivoHelp[ID_GUARDAR_COMO_MODELO]))
self.menuFile.Enable(ID_GUARDAR_COMO_MODELO, False)
self.menuFile.Append(ID_EXPORTAR_MODELO, self.translation(archivo[ID_EXPORTAR_MODELO]), self.translation(archivoHelp[ID_EXPORTAR_MODELO]))
self.menuFile.Enable(ID_EXPORTAR_MODELO, False)
self.menuFile.AppendSeparator()
self.menuFile.Append(ID_CERRAR_APLICACION, self.translation(archivo[ID_CERRAR_APLICACION]), self.translation(archivoHelp[ID_CERRAR_APLICACION]))
#Menu Ver
self.menuVer = wx.Menu()
self.refrescar = self.menuVer.Append(ID_MENU_VER_REFRESCAR, self.translation(archivo[ID_MENU_VER_REFRESCAR]), self.translation(archivoHelp[ID_MENU_VER_REFRESCAR]))
wx.EVT_MENU(self, ID_MENU_VER_REFRESCAR, self.Actualizar)
self.menuVer.AppendSeparator()
self.menuVerStandard = self.menuVer.Append(ID_MENU_VER_STANDARD, self.translation(archivo[ID_MENU_VER_STANDARD]), self.translation(archivoHelp[ID_MENU_VER_STANDARD]), kind=wx.ITEM_CHECK)
self.menuVerIdef1x = self.menuVer.Append(ID_MENU_VER_IDF1X, self.translation(archivo[ID_MENU_VER_IDF1X]), self.translation(archivoHelp[ID_MENU_VER_IDF1X]), kind=wx.ITEM_CHECK)
self.menuVer.AppendSeparator()
self.menuVerNav = self.menuVer.Append(ID_MENU_VER_NAV, self.translation(archivo[ID_MENU_VER_NAV]), self.translation(archivoHelp[ID_MENU_VER_NAV]), kind=wx.ITEM_CHECK)
self.menuVerCard = self.menuVer.Append(ID_MENU_VER_CARD, self.translation(archivo[ID_MENU_VER_CARD]), self.translation(archivoHelp[ID_MENU_VER_CARD]), kind=wx.ITEM_CHECK)
self.menuVer.AppendSeparator()
self.barraStatus = self.menuVer.Append(ID_MENU_VER_BARRA_ESTADO, self.translation(archivo[ID_MENU_VER_BARRA_ESTADO]), self.translation(archivoHelp[ID_MENU_VER_BARRA_ESTADO]), kind=wx.ITEM_CHECK)
if app.tool:
idf1x, standard, navegador = eval(app.tool)
else:
idf1x, standard, navegador = (True, True, True)
app.config.Write("tool", str( (True, True, True) ))
app.config.Flush()
self.menuVer.Check(ID_MENU_VER_STANDARD, standard)
self.menuVer.Check(ID_MENU_VER_IDF1X, idf1x)
self.menuVer.Check(ID_MENU_VER_BARRA_ESTADO, True)
self.menuVer.Enable(ID_MENU_VER_REFRESCAR, False)
self.menuVer.Enable(ID_MENU_VER_NAV, False)
self.menuVer.Enable(ID_MENU_VER_CARD, False)
#Menu Herramientas
self.menuTool = wx.Menu()
self.menuTool.Append(ID_CREAR_ENTIDAD, self.translation(archivo[ID_CREAR_ENTIDAD]), self.translation(archivoHelp[ID_CREAR_ENTIDAD]))
self.menuTool.Enable(ID_CREAR_ENTIDAD, False)
self.menuTool.AppendSeparator()
self.menuTool.Append(ID_RELACION_IDENTIF, self.translation(archivo[ID_RELACION_IDENTIF]), self.translation(archivoHelp[ID_RELACION_IDENTIF]))
self.menuTool.Enable(ID_RELACION_IDENTIF, False)
self.menuTool.Append(ID_RELACION_NO_IDENTIF, self.translation(archivo[ID_RELACION_NO_IDENTIF]), self.translation(archivoHelp[ID_RELACION_IDENTIF]))
self.menuTool.Enable(ID_RELACION_NO_IDENTIF, False)
self.menuTool.AppendSeparator()
self.menuTool.Append(ID_GENERAR_SCRIPT, self.translation(archivo[ID_GENERAR_SCRIPT]), self.translation(archivoHelp[ID_GENERAR_SCRIPT]))
self.menuTool.Enable(ID_GENERAR_SCRIPT, False)
self.menuTool.Append(ID_GENERAR_SCRIPT_DJANGO, archivo[ID_GENERAR_SCRIPT_DJANGO], archivoHelp[ID_GENERAR_SCRIPT_DJANGO])
self.menuTool.Enable(ID_GENERAR_SCRIPT_DJANGO, False)
#self.menuTool.Append(ID_GUARDAR_SCRIPT, "Guardar Script SQL", "Guarda el Script SQL del modelo para PostgreSQL")
#Menu de Ayuda
self.menuHelp = wx.Menu()
#self.menuLanguage = wx.Menu()
#self.menuLanguage.Append(ID_MENU_HELP_us_US, self.translation(archivo[ID_MENU_HELP_us_US]), self.translation(archivoHelp[ID_MENU_HELP_us_US]), kind=wx.ITEM_RADIO)
#self.menuLanguage.Append(ID_MENU_HELP_es_ES, self.translation(archivo[ID_MENU_HELP_es_ES]), self.translation(archivoHelp[ID_MENU_HELP_es_ES]), kind=wx.ITEM_RADIO).Check(True)
#self.menuLanguage.Append(ID_MENU_HELP_fr_FR, self.translation("frances"), kind=wx.ITEM_RADIO)
#self.menuHelp.AppendMenu(ID_MENU_HELP_LANGUAGE, self.translation(archivo[ID_MENU_HELP_LANGUAGE]), self.menuLanguage)
self.menuHelp.Append(ID_MENU_HELP_LANGUAGE, self.translation(archivo[ID_MENU_HELP_LANGUAGE]), self.translation(archivoHelp[ID_MENU_HELP_LANGUAGE]))
self.menuHelp.Append(ID_MENU_HELP_AYUDA, self.translation(archivo[ID_MENU_HELP_AYUDA]), self.translation(archivoHelp[ID_MENU_HELP_AYUDA]))
self.menuHelp.AppendSeparator()
self.menuHelp.Append(ID_MENU_HELP_LOG, self.translation(archivo[ID_MENU_HELP_LOG]), self.translation(archivoHelp[ID_MENU_HELP_LOG]))
self.menuHelp.Enable(ID_MENU_HELP_LOG, False)
self.menuHelp.AppendSeparator()
self.menuHelp.Append(ID_MENU_HELP_ACERCA_DE, self.translation(archivo[ID_MENU_HELP_ACERCA_DE]), self.translation(archivoHelp[ID_MENU_HELP_ACERCA_DE]))
#--Se adicionan los menues a la barra de menu--#
self.menuBar = wx.MenuBar()
self.menuBar.Append(self.menuFile, self.translation(menuBar[0]))
self.menuBar.Append(self.menuVer, self.translation(menuBar[1]))
self.menuBar.Append(self.menuTool, self.translation(menuBar[2]))
self.menuBar.Append(self.menuHelp, self.translation(menuBar[3]))
#--Se adiciona la barra de menu al frame--#
self.SetMenuBar(self.menuBar)
if not posx:
self.Centre()
#--MENU ToolBar--#
self._mgr = wx.aui.AuiManager()
self._mgr.SetManagedWindow(self)
#self.translationperspectives = []
self.n = 0
self.x = 0
self.toolBarIdef1x = wx.ToolBar(self, -1, wx.DefaultPosition, wx.DefaultSize,
wx.TB_FLAT | wx.TB_NODIVIDER)
self.toolBarIdef1x.SetToolBitmapSize((8, 8))
self.toolBarIdef1x.AddLabelTool(ID_PUNTERO_MOUSE, self.translation(archivo[ID_PUNTERO_MOUSE]), wx.Bitmap('images/Puntero.png'))
self.toolBarIdef1x.AddLabelTool(ID_CREAR_ENTIDAD, self.translation(archivo[ID_CREAR_ENTIDAD]), wx.Bitmap('images/Entidad.png'))
self.toolBarIdef1x.EnableTool(ID_CREAR_ENTIDAD, False)
self.toolBarIdef1x.AddLabelTool(ID_RELACION_IDENTIF, self.translation(archivo[ID_RELACION_IDENTIF]), wx.Bitmap('images/R-identificadora.png'))
self.toolBarIdef1x.EnableTool(ID_RELACION_IDENTIF, False)
self.toolBarIdef1x.AddLabelTool(ID_RELACION_NO_IDENTIF, self.translation(archivo[ID_RELACION_NO_IDENTIF]), wx.Bitmap('images/R-No-identificadora.png'))
self.toolBarIdef1x.EnableTool(ID_RELACION_NO_IDENTIF, False)
self.toolBarIdef1x.Realize()
self._mgr.AddPane(self.toolBarIdef1x, wx.aui.AuiPaneInfo().
Name("toolBarIdef1x").Caption("IDEF1X-Kit").
ToolbarPane().Top().Row(1).
LeftDockable(True).RightDockable(True).CloseButton(False))
if not idf1x:
panelIdef1x = self._mgr.GetPane("toolBarIdef1x");
panelIdef1x.Hide()
self.toolBarStandard = wx.ToolBar(self, -1, wx.DefaultPosition, wx.DefaultSize,
wx.TB_FLAT | wx.TB_NODIVIDER)
self.toolBarStandard.SetToolBitmapSize(wx.Size(32, 32))
self.toolBarStandard.AddLabelTool(ID_CREAR_MODELO, self.translation(archivo[ID_CREAR_MODELO]), wx.ArtProvider.GetBitmap(wx.ART_NEW, wx.ART_TOOLBAR))
self.toolBarStandard.AddLabelTool(ID_ABRIR_MODELO, self.translation(archivo[ID_ABRIR_MODELO]), wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN, wx.ART_TOOLBAR))
self.toolBarStandard.AddSeparator()
self.toolBarStandard.AddLabelTool(ID_GUARDAR_MODELO, self.translation(archivo[ID_GUARDAR_MODELO]), wx.ArtProvider.GetBitmap(wx.ART_FLOPPY, wx.ART_TOOLBAR))
self.toolBarStandard.EnableTool(ID_GUARDAR_MODELO, False)
self.toolBarStandard.AddSeparator()
self.toolBarStandard.AddLabelTool(ID_GENERAR_SCRIPT, self.translation(archivo[ID_GENERAR_SCRIPT]), wx.Bitmap('images/2_sqlLogo.png') )
self.toolBarStandard.EnableTool(ID_GENERAR_SCRIPT, False)
self.toolBarStandard.AddLabelTool(ID_GENERAR_SCRIPT_DJANGO, archivo[ID_GENERAR_SCRIPT_DJANGO], wx.Bitmap('images/django.png') )
self.toolBarStandard.EnableTool(ID_GENERAR_SCRIPT_DJANGO, False)
self.toolBarStandard.Realize()
self._mgr.AddPane(self.toolBarStandard, wx.aui.AuiPaneInfo().
Name("toolBarStandard").Caption("Estandar").
ToolbarPane().Top().Row(1).
LeftDockable(True).RightDockable(True).CloseButton(False))
if not standard:
panelStandard = self._mgr.GetPane("toolBarStandard");
panelStandard.Hide()
self._mgr.Update()
#--Barra de Estado--#
self.statusBar = self.CreateStatusBar()
self.SetStatusText("Listo!")
#--MENU click derecho en el Tree --#
self.menu_tree_entidad = wx.Menu()
self.menu_tree_entidad.Append(ID_CREAR_ENTIDAD, self.translation(archivo[ID_CREAR_ENTIDAD]))
self.menu_tree_atributo = wx.Menu()
self.menu_tree_atributo.Append(ID_TREE_MODIFICAR_ATRIBUTO, self.translation(archivo[ID_TREE_MODIFICAR_ATRIBUTO]))
self.menu_tree_atributo.Append(ID_TREE_ELIMINAR_ATRIBUTO, self.translation(archivo[ID_TREE_ELIMINAR_ATRIBUTO]))
self.menu_tree_relacion = wx.Menu()
self.menu_tree_relacion.Append(ID_CREAR_RELACION, self.translation(archivo[ID_CREAR_RELACION]))
#--MENU click derecho en las formas--#
self.menu_entidad = wx.Menu()
self.menu_entidad.Append(ID_MODIFICAR_ENTIDAD, self.translation(archivo[ID_MODIFICAR_ENTIDAD]))
self.menu_entidad.Append(ID_ELIMINAR_ENTIDAD, self.translation(archivo[ID_ELIMINAR_ENTIDAD]))
self.menu_atributo = wx.Menu()
self.menu_atributo.Append(ID_CREAR_ATRIBUTO, self.translation(archivo[ID_CREAR_ATRIBUTO]))
self.menu_atributo.Append(ID_MODIFICAR_ATRIBUTO, self.translation(archivo[ID_MODIFICAR_ATRIBUTO]))
self.menu_atributo.Append(ID_ELIMINAR_ATRIBUTO, self.translation(archivo[ID_ELIMINAR_ATRIBUTO]))
self.menu_relacion = wx.Menu()
self.menu_relacion.Append(ID_MODIFICAR_RELACION, self.translation(archivo[ID_MODIFICAR_RELACION]))
self.menu_relacion.Append(ID_ELIMINAR_RELACION, self.translation(archivo[ID_ELIMINAR_RELACION]))
self.menu_relacionIdentificadora = wx.Menu()
self.menu_relacionIdentificadora.Append(ID_MODIFICAR_RELACION, self.translation(archivo[ID_MODIFICAR_RELACION]))
self.menu_relacionIdentificadora.Append(ID_ELIMINAR_RELACION, self.translation(archivo[ID_ELIMINAR_RELACION]))
self.menu_relacionNoIdentificadora = wx.Menu()
self.menu_relacionNoIdentificadora.Append(ID_MODIFICAR_RELACION, self.translation(archivo[ID_MODIFICAR_RELACION]))
self.menu_relacionNoIdentificadora.Append(ID_ELIMINAR_RELACION, self.translation(archivo[ID_ELIMINAR_RELACION]))
#--Eventos para todos los botones segun su ID--#
self.Bind(wx.EVT_MENU, self.CrearModelo, id=ID_CREAR_MODELO)
self.Bind(wx.EVT_MENU, self.GuardarModelo, id=ID_GUARDAR_MODELO)
self.Bind(wx.EVT_MENU, self.GuardarModeloComo, id=ID_GUARDAR_COMO_MODELO)
self.Bind(wx.EVT_MENU, self.AbrirModelo, id=ID_ABRIR_MODELO)
self.Bind(wx.EVT_MENU, self.ExportarModelo, id=ID_EXPORTAR_MODELO)
self.Bind(wx.EVT_MENU, self.OnExit, id=ID_CERRAR_APLICACION )
self.Bind(wx.EVT_MENU, self.ToolBarIdef1xVer, id=ID_MENU_VER_IDF1X)
self.Bind(wx.EVT_MENU, self.NavVer, id=ID_MENU_VER_NAV)
self.Bind(wx.EVT_MENU, self.NavCard, id=ID_MENU_VER_CARD)
self.Bind(wx.EVT_MENU, self.ToolBarStandardVer, id=ID_MENU_VER_STANDARD)
self.Bind(wx.EVT_MENU, self.ToggleStatusBar, id=ID_MENU_VER_BARRA_ESTADO)
self.Bind(wx.EVT_MENU, self.Puntero, id = ID_PUNTERO_MOUSE)
self.Bind(wx.EVT_MENU, self.CrearEntidad, id = ID_CREAR_ENTIDAD)
self.Bind(wx.EVT_MENU, self.ModificarEntidad, id= ID_MODIFICAR_ENTIDAD)
self.Bind(wx.EVT_MENU, self.EliminarEntidad, id= ID_ELIMINAR_ENTIDAD)
self.Bind(wx.EVT_MENU, self.CrearAtributo, id = ID_CREAR_ATRIBUTO)
self.Bind(wx.EVT_MENU, self.ModificarAtributo, id = ID_MODIFICAR_ATRIBUTO)
self.Bind(wx.EVT_MENU, self.EliminarAtributo, id = ID_ELIMINAR_ATRIBUTO)
self.Bind(wx.EVT_MENU, self.TreeModificarAtributo, id = ID_TREE_MODIFICAR_ATRIBUTO)
self.Bind(wx.EVT_MENU, self.TreeEliminarAtributo, id = ID_TREE_ELIMINAR_ATRIBUTO)
self.Bind(wx.EVT_MENU, self.CrearRelacion, id = ID_CREAR_RELACION)
self.Bind(wx.EVT_MENU, self.RelacionIdentificadora, id = ID_RELACION_IDENTIF)
self.Bind(wx.EVT_MENU, self.RelacionNoIdentificadora, id = ID_RELACION_NO_IDENTIF)
self.Bind(wx.EVT_MENU, self.ModificarRelacion, id = ID_MODIFICAR_RELACION)
self.Bind(wx.EVT_MENU, self.EliminarRelacion, id = ID_ELIMINAR_RELACION)
self.Bind(wx.EVT_MENU, self.GenerarScriptSql, id = ID_GENERAR_SCRIPT)
self.Bind(wx.EVT_MENU, self.GenerarScriptDjango, id = ID_GENERAR_SCRIPT_DJANGO)
#self.Bind(wx.EVT_MENU, self.GuardarScriptSql, id = ID_GUARDAR_SCRIPT)
#self.Bind(wx.EVT_MENU, self.ActualizarIdioma, id=ID_MENU_HELP_us_US )
#self.Bind(wx.EVT_MENU, self.ActualizarIdioma, id=ID_MENU_HELP_es_ES )
#self.Bind(wx.EVT_MENU, self.ActualizarIdioma, id=ID_MENU_HELP_fr_FR )
self.Bind(wx.EVT_MENU, self.ActualizarIdioma, id=ID_MENU_HELP_LANGUAGE )
self.Bind(wx.EVT_MENU, self.VerLog, id=ID_MENU_HELP_LOG )
self.Bind(wx.EVT_MENU, self.OnAboutBox, id=ID_MENU_HELP_ACERCA_DE )
#--Hilo para verificar y guardar la posicion del Frame--#
self.time = wx.Timer(self)
self.Bind(wx.EVT_TIMER, app.SaveConfig, self.time)
self.time.Start(5000)
self.GetMenuBar().Remove(self.GetMenuBar().FindMenu('&Window'))
def CrearModelo(self, evt):
ejecute = Modelo(self)
ejecute.CrearModelo(self)
if ejecute.num == 1:
ejecute.Close(True)
self.GetMenuBar().Remove(self.GetMenuBar().FindMenu('&Window'))
def GuardarModelo(self, evt):
self.GetActiveChild().GuardarModelo()
def GuardarModeloComo(self, evt):
self.GetActiveChild().GuardarModelo(1)
def AbrirModelo(self, evt):
file = wx.FileDialog(self, message=self.Idioma(archivo[ID_MODELO_ABRIR_TITULO]), defaultDir=os.path.expanduser("~"), wildcard=self.Idioma(archivo[ID_MODELO_ABRIR_ARCHIVO]), style=0)
if file.ShowModal() == wx.ID_OK:
ejecute = Modelo(self)
ejecute.AbrirModelo(self, file.GetPath(), file.GetFilename())
if ejecute.num == 1:
dial = wx.MessageDialog(self, self.Idioma(archivo[ID_MODELO_ABRIR_ERROR]), self.Idioma(archivo[ID_MODELO_ABRIR_ERROR_TITULO]), wx.OK | wx.ICON_ERROR)
dial.ShowModal()
ejecute.Close(True)
self.GetMenuBar().Remove(self.GetMenuBar().FindMenu('&Window'))
def AbrirModeloDirecto(self, file):
ejecute = Modelo(self)
ejecute.AbrirModelo(self, file.strip(), "")
if ejecute.num == 1:
dial = wx.MessageDialog(self, self.Idioma(archivo[ID_MODELO_ABRIR_ERROR]), self.Idioma(archivo[ID_MODELO_ABRIR_ERROR_TITULO]), wx.OK | wx.ICON_ERROR)
dial.ShowModal()
ejecute.Close(True)
self.GetMenuBar().Remove(self.GetMenuBar().FindMenu('&Window'))
def ExportarModelo(self, evt):
self.GetActiveChild().ExportarModelo()
#--Permite salir de la aplicacion--#
def OnExit(self, evt):
self.Close(True)
def Actualizar(self, evt):
dc = wx.ClientDC(self.GetActiveChild().canvas)
self.GetActiveChild().canvas.PrepareDC(dc)
self.GetActiveChild().canvas.Redraw(dc)
self.GetActiveChild().canvas.Refresh()
self.Refresh()
def ToolBarIdef1xVer(self, event):
panelIdef1x = self._mgr.GetPane("toolBarIdef1x");
if self.menuVerIdef1x.IsChecked():
panelIdef1x.Show()
mos = True
else:
panelIdef1x.Hide()
mos = False
self.app.config.Write("tool", str((mos, self.menuVerStandard.IsChecked(), self.menuVerNav.IsChecked())))
self.app.config.Flush()
self._mgr.Update()
def NavVer(self, event):
panelNav = self.GetActiveChild().nav;
if self.menuVerNav.IsChecked() and not panelNav.IsShown():
panelNav.Show()
mos = True
else:
panelNav.Hide()
mos = False
self.menuVer.Check(ID_MENU_VER_NAV, mos)
self.app.config.Write("tool", str((self.menuVerIdef1x.IsChecked(), self.menuVerStandard.IsChecked() , mos)))
self.app.config.Flush()
self.GetActiveChild()._mgr.Update()
def NavCard(self, event):
if self.menuVerCard.IsChecked():
mos = True
else:
mos = False
self.menuVer.Check(ID_MENU_VER_CARD, mos)
for relacion in self.GetActiveChild().relaciones:
relacion.OnCardinalidad()
self.GetActiveChild().canvas.Refresh()
def ToolBarStandardVer(self, event):
panelStandard = self._mgr.GetPane("toolBarStandard");
if self.menuVerStandard.IsChecked():
panelStandard.Show()
mos = True
else:
panelStandard.Hide()
mos = False
self.app.config.Write("tool", str((self.menuVerIdef1x.IsChecked(), mos, self.menuVerNav.IsChecked())))
self.app.config.Flush()
self._mgr.Update()
def ToggleStatusBar(self, event):
if self.barraStatus.IsChecked():
self.statusBar.Show()
else:
self.statusBar.Hide()
def CrearEntidad(self, evt):
ejecute = Entidad()
#validar = ejecute.CrearEntidad(self, self.GetActiveChild().canvas, self.GetActiveChild().contadorEntidad)
dlg = Dialogos(self, self.Idioma(archivo[ENTIDAD_TITULO]))
dlg.Entidad(ejecute.data)
if dlg.ShowModal() == wx.ID_OK:
for elemento in self.GetActiveChild().entidades:
if elemento.nombre == ejecute.data.get("nombre"):
validar = ejecute.ValidarNombreEntidad(self.GetActiveChild().entidades)
if validar == False:
return 0
else:
return 0
ejecute.CrearEntidad(self, self.GetActiveChild().canvas, self.GetActiveChild().contadorEntidad)
self.GetActiveChild().contadorEntidad += 1
self.GetActiveChild().entidades.append(ejecute)
self.GetActiveChild().canvas.Refresh()
def ModificarEntidad(self, evt):
ejecute = Entidad()
for elemento in self.GetActiveChild().entidades:
if elemento.nombreForma.Selected():
ejecute.editar = 1
ejecute.elemento = elemento
if ejecute.editar == 1:
ejecute.ModificarEntidad(self.GetActiveChild().canvas, ejecute.elemento, self.GetActiveChild().entidades)
"""else:
dlg = wx.TextEntryDialog(None, "cual entidad quiere modificar?", 'Modificar Entidad', '')
if dlg.ShowModal() == wx.ID_OK:
response = dlg.GetValue()
for elemento in self.GetActiveChild().entidades:
if elemento.nombre == response:
ejecute.ModificarEntidad(self.GetActiveChild().canvas, elemento, self.GetActiveChild().entidades)"""
self.GetActiveChild().canvas.Refresh()
def EliminarEntidad(self, evt):
ejecute = Entidad()
for elemento in self.GetActiveChild().entidades:
if elemento.nombreForma.Selected():
ejecute.editar = 1
ejecute.elemento = elemento
if ejecute.editar == 1:
respuesta = ejecute.EliminarEntidad(self.GetActiveChild().canvas, ejecute.elemento, self.GetActiveChild().entidades, self.GetActiveChild())
if respuesta == 1:
self.GetActiveChild().entidades.remove(ejecute.elemento)
"""else:
dlg = wx.TextEntryDialog(None, "cual entidad quiere eliminar?", 'Eliminar Entidad', '')
dlg.SetIcon=(icon)
if dlg.ShowModal() == wx.ID_OK:
response = dlg.GetValue()
for elemento in self.GetActiveChild().entidades:
if elemento.nombre == response:
respuesta = ejecute.EliminarEntidad(self.GetActiveChild().canvas, elemento, self.GetActiveChild().entidades, self.GetActiveChild())
if respuesta == 1:
self.GetActiveChild().entidades.remove(elemento)"""
self.GetActiveChild().canvas.Refresh()
def CrearAtributo(self, evt):
ejecute = Atributo()
for elemento in self.GetActiveChild().entidades:
if elemento.atributosForma.Selected():
ejecute.editar = 1
ejecute.elemento = elemento
if ejecute.editar == 1:
dlg = Dialogos(self.GetActiveChild().canvas.frame, self.Idioma(archivo[ATRIBUTO_TITULO]))
dlg.Atributo(ejecute.data)
if dlg.ShowModal() == wx.ID_OK:
for elemento in ejecute.elemento.atributos:
if elemento.nombre == ejecute.data.get("nombreAtributo"):
validar = ejecute.ValidarNombreAtributo(self.GetActiveChild().canvas.frame, ejecute.elemento.atributos)
if validar == False:
return 0
else:
return 0
ejecute.CrearAtributo(self.GetActiveChild().canvas, ejecute.elemento, self.GetActiveChild().contadorAtributo)
self.GetActiveChild().contadorAtributo += 1
for entidadHija in ejecute.elemento.entidadesHijas:
entidadHija.HeredarAtributos(ejecute.elemento, 1)
"""else:
dlg = wx.TextEntryDialog(None, "cual entidad agregar un atributo?", 'Agregar Atributo', '')
if dlg.ShowModal() == wx.ID_OK:
response = dlg.GetValue()
for elemento in self.GetActiveChild().entidades:
if elemento.nombre == response:
ejecute.CrearAtributo(self.GetActiveChild().canvas, elemento, self.GetActiveChild().contadorAtributo)"""
self.GetActiveChild().canvas.Refresh()
def ModificarAtributo(self, evt):
ejecute = Atributo()
for elemento in self.GetActiveChild().entidades:
if elemento.atributosForma.Selected():
ejecute.editar = 1
ejecute.elemento = elemento
if ejecute.editar == 1:
ejecute.DlgModificarAtributo(self.GetActiveChild().canvas, ejecute.elemento)
"""else:
dlg = wx.TextEntryDialog(None, "cuall entidad agregar un atributo?", 'Agregar Atributo', '')
if dlg.ShowModal() == wx.ID_OK:
response = dlg.GetValue()
for elemento in self.GetActiveChild().entidades:
if elemento.nombre == response:
ejecute.ModificarAtributo(self.GetActiveChild().canvas, elemento)"""
dc = wx.ClientDC(self.GetActiveChild().canvas)
for elemento in self.GetActiveChild().entidades:
ejecute.ModificarAtributosForma(dc, elemento)
self.GetActiveChild().canvas.Refresh()
def EliminarAtributo(self, evt):
ejecute = Atributo()
for elemento in self.GetActiveChild().entidades:
if elemento.atributosForma.Selected():
ejecute.editar = 1
ejecute.elemento = elemento
if ejecute.editar == 1:
ejecute.DlgEliminarAtributo(self.GetActiveChild().canvas, ejecute.elemento)
"""else:
dlg = wx.TextEntryDialog(None, "cual entidad remover un atributo?", 'Eliminar Atributo', '')
if dlg.ShowModal() == wx.ID_OK:
response = dlg.GetValue()
for elemento in self.GetActiveChild().entidades:
if elemento.nombre == response:
ejecute.DlgEliminarAtributo(self.GetActiveChild().canvas, elemento)"""
self.GetActiveChild().canvas.Refresh()
def CrearRelacion(self, evt):
ejecute = Relacion()
ejecute.DlgCrearRelacion(self, self.GetActiveChild().canvas, self.GetActiveChild().entidades)
self.GetActiveChild().contadorRelacion += 1
self.GetActiveChild().canvas.Refresh()
def TreeModificarAtributo(self, evt):
ejecute = Atributo()
ejecute.ModificarAtributo(self.GetActiveChild().canvas, self.atributoAcc.entidad, self.atributoAcc)
self.GetActiveChild().canvas.Refresh()
def TreeEliminarAtributo(self, evt):
if self.atributoAcc.claveForanea == True:
dial = wx.MessageDialog(self, self.Idioma(archivo[ATRIBUTO_ELIMINAR_ERROR]) % self.atributoAcc.nombre, 'Error', wx.OK | wx.ICON_ERROR)
dial.ShowModal()
return
dlg = wx.MessageDialog(self.GetActiveChild().canvas, self.Idioma('Want to remove the attribute %s') % self.atributoAcc.nombre, self.Idioma('Delete Attribute %s') % self.atributoAcc.nombre, wx.YES_NO | wx.ICON_QUESTION)
if dlg.ShowModal() == wx.ID_YES:
ejecute = Atributo()
ejecute.EliminarAtributo(self.GetActiveChild().canvas, self.atributoAcc.entidad, self.atributoAcc)
self.GetActiveChild().canvas.Refresh()
def RelacionIdentificadora(self, evt):
self.GetActiveChild().canvas.SetCursor(wx.CROSS_CURSOR)
self.GetActiveChild().relacion = 1
def RelacionNoIdentificadora(self, evt):
self.GetActiveChild().canvas.SetCursor(wx.CROSS_CURSOR)
self.GetActiveChild().relacion = 2
def ModificarRelacion(self, evt):
ejecute = Relacion()
for elemento in self.GetActiveChild().relaciones:
if elemento.Selected():
ejecute.DlgModificarRelacion(elemento, self, self.GetActiveChild().canvas, self.GetActiveChild().entidades)
def EliminarRelacion(self, evt):
ejecute = Relacion()
for elemento in self.GetActiveChild().relaciones:
if elemento.Selected():
ejecute.EliminarRelacion(elemento, self.GetActiveChild().canvas, self.GetActiveChild(), self.GetActiveChild().entidades)
def GenerarScriptSql(self, evt):
script = SQL().ScriptPostgreSQL(self.GetActiveChild())
dlg = Dialogos(self, "Script SQL")
dlg.ScriptSql(script)
dlg.ShowModal()
def GenerarScriptDjango(self, evt):
script = Django().ScriptDjango(self.GetActiveChild())
dlg = Dialogos(self, "Script Django")
dlg.ScriptSql(script)
dlg.ShowModal()
def GuardarScriptSql(self, evt):
script = SQL().ScriptPostgreSQL(self.GetActiveChild())
tempFile = wx.FileDialog(self, message="Guardar SQL", defaultDir=os.path.expanduser("~"), defaultFile="sofiaSQL", wildcard="Archivos SQL (*.sql)|*.sql", style=wx.FD_SAVE|wx.FD_OVERWRITE_PROMPT)
if tempFile.ShowModal() == wx.ID_OK:
fileSQL = "%s.sql" % tempFile.GetPath()
#nombreArchivoTemporal = tempFile.GetFilename()
file = codecs.open(fileSQL, encoding='UTF-8', mode = 'w+')
file.write(script)
file.close()
def Idioma(self, texto):
if language[self.data["idioma"]] != '':
return self.translation(texto)
else:
return texto
def ActualizarIdioma(self, evt):
dlg = Dialogos(self, self.Idioma("Configuration"))
dlg.Configuracion(self.data)
if dlg.ShowModal() == wx.ID_OK:
countMenuBar = 0
if language[self.data["idioma"]] != '':
self.locale.AddCatalog(language[self.data["idioma"]])
idioma = language[self.data["idioma"]]
for menu in self.menuFile.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(self.translation(archivo[menu.GetId()]))
menu.SetHelp(self.translation(archivoHelp[menu.GetId()]))
for menu in self.menuVer.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(self.translation(archivo[menu.GetId()]))
menu.SetHelp(self.translation(archivoHelp[menu.GetId()]))
for menu in self.menuTool.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(self.translation(archivo[menu.GetId()]))
menu.SetHelp(self.translation(archivoHelp[menu.GetId()]))
for menu in self.menuHelp.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(self.translation(archivo[menu.GetId()]))
menu.SetHelp(self.translation(archivoHelp[menu.GetId()]))
for menu in self.menuBar.GetMenus():
try:
menu[0].SetTitle(self.translation(menuBar[countMenuBar]))
self.menuBar.Replace(countMenuBar, menu[0], self.translation(menuBar[countMenuBar]))
countMenuBar = countMenuBar + 1
except:
countMenuBar = countMenuBar + 1
for menu in self.menu_tree_entidad.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(self.translation(archivo[menu.GetId()]))
menu.SetHelp(self.translation(archivoHelp[menu.GetId()]))
for menu in self.menu_tree_atributo.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(self.translation(archivo[menu.GetId()]))
menu.SetHelp(self.translation(archivoHelp[menu.GetId()]))
for menu in self.menu_tree_relacion.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(self.translation(archivo[menu.GetId()]))
menu.SetHelp(self.translation(archivoHelp[menu.GetId()]))
for menu in self.menu_entidad.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(self.translation(archivo[menu.GetId()]))
menu.SetHelp(self.translation(archivoHelp[menu.GetId()]))
for menu in self.menu_atributo.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(self.translation(archivo[menu.GetId()]))
menu.SetHelp(self.translation(archivoHelp[menu.GetId()]))
for menu in self.menu_relacion.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(self.translation(archivo[menu.GetId()]))
menu.SetHelp(self.translation(archivoHelp[menu.GetId()]))
for menu in self.menu_relacionIdentificadora.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(self.translation(archivo[menu.GetId()]))
menu.SetHelp(self.translation(archivoHelp[menu.GetId()]))
for menu in self.menu_relacionNoIdentificadora.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(self.translation(archivo[menu.GetId()]))
menu.SetHelp(self.translation(archivoHelp[menu.GetId()]))
try:
self.SetTitle(self.translation(archivo[TITULO]))
self.GetActiveChild().lienzo.Caption(self.translation("Canvas"))
self.GetActiveChild().nav.Caption(self.translation("Object Browser"))
except:
pass
else:
idioma = 'English'
for menu in self.menuFile.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(archivo[menu.GetId()])
menu.SetHelp(archivoHelp[menu.GetId()])
for menu in self.menuVer.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(archivo[menu.GetId()])
menu.SetHelp(archivoHelp[menu.GetId()])
for menu in self.menuTool.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(archivo[menu.GetId()])
menu.SetHelp(archivoHelp[menu.GetId()])
for menu in self.menuHelp.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(archivo[menu.GetId()])
menu.SetHelp(archivoHelp[menu.GetId()])
for menu in self.menuBar.GetMenus():
try:
menu[0].SetTitle(menuBar[countMenuBar])
self.menuBar.Replace(countMenuBar, menu[0], menuBar[countMenuBar])
countMenuBar = countMenuBar + 1
except:
countMenuBar = countMenuBar + 1
for menu in self.menu_tree_entidad.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(archivo[menu.GetId()])
menu.SetHelp(archivoHelp[menu.GetId()])
for menu in self.menu_tree_atributo.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(archivo[menu.GetId()])
menu.SetHelp(archivoHelp[menu.GetId()])
for menu in self.menu_tree_relacion.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(archivo[menu.GetId()])
menu.SetHelp(archivoHelp[menu.GetId()])
for menu in self.menu_entidad.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(archivo[menu.GetId()])
menu.SetHelp(archivoHelp[menu.GetId()])
for menu in self.menu_atributo.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(archivo[menu.GetId()])
menu.SetHelp(archivoHelp[menu.GetId()])
for menu in self.menu_relacion.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(archivo[menu.GetId()])
menu.SetHelp(archivoHelp[menu.GetId()])
for menu in self.menu_relacionIdentificadora.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(archivo[menu.GetId()])
menu.SetHelp(archivoHelp[menu.GetId()])
for menu in self.menu_relacionNoIdentificadora.GetMenuItems():
if menu.GetId() != -2:
menu.SetText(archivo[menu.GetId()])
menu.SetHelp(archivoHelp[menu.GetId()])
self.SetTitle(archivo[TITULO])
try:
self.GetActiveChild().lienzo.Caption("Canvas")
self.GetActiveChild().nav.Caption("Object Browser")
except:
pass
self.app.config.Write("language", idioma)
self.app.config.Flush()
self.Refresh()
def VerLog(self, event):
dlg = Dialogos(self, "Eventos")
dlg.VerLog(self.GetActiveChild().log.VerEventos())
dlg.ShowModal()
#--Permite desplegar el cuadro de About--#
def OnAboutBox(self, event):
description = """Sofia es una herramienta desarrollada con el lenguaje de programación Python para la modelación de datos, genera el Script SQL para PostgreSQL en esta versión. Es un proyecto de Investigación y Desarrollo del Centro de Investigación en Informatica Aplicada (CENIIA) del Colegio Universitario de Caracas. Creado y dirigido por el Prof. Alejandro Amaro con la colaboración de los estudiantes."""
licence = """Aplicacion liberada bajo la licencia GPLv3, para el uso."""
info = wx.AboutDialogInfo()
info.SetIcon(wx.Icon("images/sofia.png", wx.BITMAP_TYPE_PNG))
info.SetName('Sofia')
info.SetVersion('0.072')
info.SetDescription(description)
info.SetCopyright('(C) 2011 Colegio Universitario de Caracas')
info.SetWebSite('http://www.cuc.edu.ve')
info.SetLicence(licence)
info.AddDeveloper('Prof. Alejandro Amaro - Autor - Tutor')
info.AddDeveloper("Estudiantes de Proyecto Socio-Tecnológico:")
info.AddDeveloper(' Junio 2011 Mayo 2012 - Versión 0.0.7')
info.AddDeveloper(' T.S.U. Arturo Delgado ')
info.AddDeveloper(' T.S.U. Maximo Gonzales ')
info.AddDeveloper(' T.S.U. Alexis Canchica ')
info.AddDeveloper(' Mayo 2010 Mayo 2011 - Versión 0.0.4')
info.AddDeveloper(' Br. Arturo Delgado ')
info.AddDeveloper(' Br. Ruben Rosas ')
info.AddDeveloper(' Br. Carolina Machado')
info.AddDeveloper(' Br. Erik Mejias ')
info.AddDeveloper('Estudiantes Tesistas:')
info.AddDeveloper(' Abril 2009 Junio 2009 - Versión 0.0.1')
info.AddDeveloper(' Br. Dorian Machado ')
info.AddDeveloper(' Br. Daglis Campos ')
info.AddDeveloper(' Br. Felix Rodriguez ')
info.AddDocWriter('Estudiantes de Proyecto Socio-Tecnológico:')
info.AddDocWriter(' Junio 2011 Mayo 2012 - Versión 0.0.7')
info.AddDocWriter(' T.S.U. Arturo Delgado ')
info.AddDocWriter(' T.S.U. Maximo Gonzales ')
info.AddDocWriter(' T.S.U. Alexis Canchica ')
info.AddDocWriter(' Mayo 2010 Mayo 2011 - Versión 0.0.4')
info.AddDocWriter(' Br. Arturo Delgado ')
info.AddDocWriter(' Br. Ruben Rosas ')
info.AddDocWriter(' Br. Carolina Machado')
info.AddDocWriter(' Br. Erik Mejias ')
info.AddArtist('Alumnos del Colegio Universitario de Caracas')
info.AddTranslator('Anonimo')
wx.AboutBox(info)
#dlg = Dialogos(self, "Script SQL")
#dlg.AboutBox()
#dlg.ShowModal()
def Puntero(self, evt):
self.GetActiveChild().canvas.SetCursor(wx.STANDARD_CURSOR)
self.GetActiveChild().click = 0
self.GetActiveChild().relacion = 0 | ajdelgados/Sofia | modules/main.py | Python | gpl-3.0 | 37,233 |
# -*- coding: utf-8 -*-
# __init__.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Key Manager is a Nicknym agent for LEAP client.
"""
# let's do a little sanity check to see if we're using the wrong gnupg
import sys
try:
from gnupg.gnupg import GPGUtilities
assert(GPGUtilities) # pyflakes happy
from gnupg import __version__
from distutils.version import LooseVersion as V
assert(V(__version__) >= V('1.2.3'))
except (ImportError, AssertionError):
print "*******"
print "Ooops! It looks like there is a conflict in the installed version "
print "of gnupg."
print
print "Disclaimer: Ideally, we would need to work a patch and propose the "
print "merge to upstream. But until then do: "
print
print "% pip uninstall python-gnupg"
print "% pip install gnupg"
print "*******"
sys.exit(1)
import logging
import requests
from leap.common.check import leap_assert, leap_assert_type
from leap.common.events import signal
from leap.common.events import events_pb2 as proto
from leap.keymanager.errors import KeyNotFound
from leap.keymanager.keys import (
EncryptionKey,
build_key_from_dict,
KEYMANAGER_KEY_TAG,
TAGS_PRIVATE_INDEX,
)
from leap.keymanager.openpgp import (
OpenPGPKey,
OpenPGPScheme,
)
logger = logging.getLogger(__name__)
#
# The Key Manager
#
class KeyManager(object):
#
# server's key storage constants
#
OPENPGP_KEY = 'openpgp'
PUBKEY_KEY = "user[public_key]"
def __init__(self, address, nickserver_uri, soledad, session_id=None,
ca_cert_path=None, api_uri=None, api_version=None, uid=None,
gpgbinary=None):
"""
Initialize a Key Manager for user's C{address} with provider's
nickserver reachable in C{url}.
:param address: The address of the user of this Key Manager.
:type address: str
:param url: The URL of the nickserver.
:type url: str
:param soledad: A Soledad instance for local storage of keys.
:type soledad: leap.soledad.Soledad
:param session_id: The session ID for interacting with the webapp API.
:type session_id: str
:param ca_cert_path: The path to the CA certificate.
:type ca_cert_path: str
:param api_uri: The URI of the webapp API.
:type api_uri: str
:param api_version: The version of the webapp API.
:type api_version: str
:param uid: The users' UID.
:type uid: str
:param gpgbinary: Name for GnuPG binary executable.
:type gpgbinary: C{str}
"""
self._address = address
self._nickserver_uri = nickserver_uri
self._soledad = soledad
self._session_id = session_id
self.ca_cert_path = ca_cert_path
self.api_uri = api_uri
self.api_version = api_version
self.uid = uid
# a dict to map key types to their handlers
self._wrapper_map = {
OpenPGPKey: OpenPGPScheme(soledad, gpgbinary=gpgbinary),
# other types of key will be added to this mapper.
}
# the following are used to perform https requests
self._fetcher = requests
self._session = self._fetcher.session()
#
# utilities
#
def _key_class_from_type(self, ktype):
"""
Return key class from string representation of key type.
"""
return filter(
lambda klass: str(klass) == ktype,
self._wrapper_map).pop()
def _get(self, uri, data=None):
"""
Send a GET request to C{uri} containing C{data}.
:param uri: The URI of the request.
:type uri: str
:param data: The body of the request.
:type data: dict, str or file
:return: The response to the request.
:rtype: requests.Response
"""
leap_assert(
self._ca_cert_path is not None,
'We need the CA certificate path!')
res = self._fetcher.get(uri, data=data, verify=self._ca_cert_path)
# Nickserver now returns 404 for key not found and 500 for
# other cases (like key too small), so we are skipping this
# check for the time being
# res.raise_for_status()
# Responses are now text/plain, although it's json anyway, but
# this will fail when it shouldn't
# leap_assert(
# res.headers['content-type'].startswith('application/json'),
# 'Content-type is not JSON.')
return res
def _put(self, uri, data=None):
"""
Send a PUT request to C{uri} containing C{data}.
The request will be sent using the configured CA certificate path to
verify the server certificate and the configured session id for
authentication.
:param uri: The URI of the request.
:type uri: str
:param data: The body of the request.
:type data: dict, str or file
:return: The response to the request.
:rtype: requests.Response
"""
leap_assert(
self._ca_cert_path is not None,
'We need the CA certificate path!')
leap_assert(
self._session_id is not None,
'We need a session_id to interact with webapp!')
res = self._fetcher.put(
uri, data=data, verify=self._ca_cert_path,
cookies={'_session_id': self._session_id})
# assert that the response is valid
res.raise_for_status()
return res
def _fetch_keys_from_server(self, address):
"""
Fetch keys bound to C{address} from nickserver and insert them in
local database.
:param address: The address bound to the keys.
:type address: str
@raise KeyNotFound: If the key was not found on nickserver.
"""
# request keys from the nickserver
res = None
try:
res = self._get(self._nickserver_uri, {'address': address})
server_keys = res.json()
# insert keys in local database
if self.OPENPGP_KEY in server_keys:
self._wrapper_map[OpenPGPKey].put_ascii_key(
server_keys['openpgp'])
except Exception as e:
logger.warning("Error retrieving the keys: %r" % (e,))
if res:
logger.warning("%s" % (res.content,))
#
# key management
#
def send_key(self, ktype):
"""
Send user's key of type C{ktype} to provider.
Public key bound to user's is sent to provider, which will sign it and
replace any prior keys for the same address in its database.
If C{send_private} is True, then the private key is encrypted with
C{password} and sent to server in the same request, together with a
hash string of user's address and password. The encrypted private key
will be saved in the server in a way it is publicly retrievable
through the hash string.
:param ktype: The type of the key.
:type ktype: KeyType
@raise KeyNotFound: If the key was not found in local database.
"""
leap_assert(
ktype is OpenPGPKey,
'For now we only know how to send OpenPGP public keys.')
# prepare the public key bound to address
pubkey = self.get_key(
self._address, ktype, private=False, fetch_remote=False)
data = {
self.PUBKEY_KEY: pubkey.key_data
}
uri = "%s/%s/users/%s.json" % (
self._api_uri,
self._api_version,
self._uid)
self._put(uri, data)
signal(proto.KEYMANAGER_DONE_UPLOADING_KEYS, self._address)
def get_key(self, address, ktype, private=False, fetch_remote=True):
"""
Return a key of type C{ktype} bound to C{address}.
First, search for the key in local storage. If it is not available,
then try to fetch from nickserver.
:param address: The address bound to the key.
:type address: str
:param ktype: The type of the key.
:type ktype: KeyType
:param private: Look for a private key instead of a public one?
:type private: bool
:return: A key of type C{ktype} bound to C{address}.
:rtype: EncryptionKey
@raise KeyNotFound: If the key was not found both locally and in
keyserver.
"""
leap_assert(
ktype in self._wrapper_map,
'Unkown key type: %s.' % str(ktype))
try:
signal(proto.KEYMANAGER_LOOKING_FOR_KEY, address)
# return key if it exists in local database
key = self._wrapper_map[ktype].get_key(address, private=private)
signal(proto.KEYMANAGER_KEY_FOUND, address)
return key
except KeyNotFound:
signal(proto.KEYMANAGER_KEY_NOT_FOUND, address)
# we will only try to fetch a key from nickserver if fetch_remote
# is True and the key is not private.
if fetch_remote is False or private is True:
raise
signal(proto.KEYMANAGER_LOOKING_FOR_KEY, address)
self._fetch_keys_from_server(address)
key = self._wrapper_map[ktype].get_key(address, private=False)
signal(proto.KEYMANAGER_KEY_FOUND, address)
return key
def get_all_keys_in_local_db(self, private=False):
"""
Return all keys stored in local database.
:return: A list with all keys in local db.
:rtype: list
"""
return map(
lambda doc: build_key_from_dict(
self._key_class_from_type(doc.content['type']),
doc.content['address'],
doc.content),
self._soledad.get_from_index(
TAGS_PRIVATE_INDEX,
KEYMANAGER_KEY_TAG,
'1' if private else '0'))
def refresh_keys(self):
"""
Fetch keys from nickserver and update them locally.
"""
addresses = set(map(
lambda doc: doc.address,
self.get_all_keys_in_local_db(private=False)))
for address in addresses:
# do not attempt to refresh our own key
if address == self._address:
continue
self._fetch_keys_from_server(address)
def gen_key(self, ktype):
"""
Generate a key of type C{ktype} bound to the user's address.
:param ktype: The type of the key.
:type ktype: KeyType
:return: The generated key.
:rtype: EncryptionKey
"""
signal(proto.KEYMANAGER_STARTED_KEY_GENERATION, self._address)
key = self._wrapper_map[ktype].gen_key(self._address)
signal(proto.KEYMANAGER_FINISHED_KEY_GENERATION, self._address)
return key
#
# Setters/getters
#
def _get_session_id(self):
return self._session_id
def _set_session_id(self, session_id):
self._session_id = session_id
session_id = property(
_get_session_id, _set_session_id, doc='The session id.')
def _get_ca_cert_path(self):
return self._ca_cert_path
def _set_ca_cert_path(self, ca_cert_path):
self._ca_cert_path = ca_cert_path
ca_cert_path = property(
_get_ca_cert_path, _set_ca_cert_path,
doc='The path to the CA certificate.')
def _get_api_uri(self):
return self._api_uri
def _set_api_uri(self, api_uri):
self._api_uri = api_uri
api_uri = property(
_get_api_uri, _set_api_uri, doc='The webapp API URI.')
def _get_api_version(self):
return self._api_version
def _set_api_version(self, api_version):
self._api_version = api_version
api_version = property(
_get_api_version, _set_api_version, doc='The webapp API version.')
def _get_uid(self):
return self._uid
def _set_uid(self, uid):
self._uid = uid
uid = property(
_get_uid, _set_uid, doc='The uid of the user.')
#
# encrypt/decrypt and sign/verify API
#
def encrypt(self, data, pubkey, passphrase=None, sign=None,
cipher_algo='AES256'):
"""
Encrypt C{data} using public @{key} and sign with C{sign} key.
:param data: The data to be encrypted.
:type data: str
:param pubkey: The key used to encrypt.
:type pubkey: EncryptionKey
:param sign: The key used for signing.
:type sign: EncryptionKey
:param cipher_algo: The cipher algorithm to use.
:type cipher_algo: str
:return: The encrypted data.
:rtype: str
"""
leap_assert_type(pubkey, EncryptionKey)
leap_assert(pubkey.__class__ in self._wrapper_map, 'Unknown key type.')
leap_assert(pubkey.private is False, 'Key is not public.')
return self._wrapper_map[pubkey.__class__].encrypt(
data, pubkey, passphrase, sign)
def decrypt(self, data, privkey, passphrase=None, verify=None):
"""
Decrypt C{data} using private @{privkey} and verify with C{verify} key.
:param data: The data to be decrypted.
:type data: str
:param privkey: The key used to decrypt.
:type privkey: OpenPGPKey
:param verify: The key used to verify a signature.
:type verify: OpenPGPKey
:return: The decrypted data.
:rtype: str
@raise InvalidSignature: Raised if unable to verify the signature with
C{verify} key.
"""
leap_assert_type(privkey, EncryptionKey)
leap_assert(
privkey.__class__ in self._wrapper_map,
'Unknown key type.')
leap_assert(privkey.private is True, 'Key is not private.')
return self._wrapper_map[privkey.__class__].decrypt(
data, privkey, passphrase, verify)
def sign(self, data, privkey, digest_algo='SHA512', clearsign=False,
detach=True, binary=False):
"""
Sign C{data} with C{privkey}.
:param data: The data to be signed.
:type data: str
:param privkey: The private key to be used to sign.
:type privkey: EncryptionKey
:param digest_algo: The hash digest to use.
:type digest_algo: str
:param clearsign: If True, create a cleartext signature.
:type clearsign: bool
:param detach: If True, create a detached signature.
:type detach: bool
:param binary: If True, do not ascii armour the output.
:type binary: bool
:return: The signed data.
:rtype: str
"""
leap_assert_type(privkey, EncryptionKey)
leap_assert(
privkey.__class__ in self._wrapper_map,
'Unknown key type.')
leap_assert(privkey.private is True, 'Key is not private.')
return self._wrapper_map[privkey.__class__].sign(
data, privkey, digest_algo=digest_algo, clearsign=clearsign,
detach=detach, binary=binary)
def verify(self, data, pubkey):
"""
Verify signed C{data} with C{pubkey}.
:param data: The data to be verified.
:type data: str
:param pubkey: The public key to be used on verification.
:type pubkey: EncryptionKey
:return: The signed data.
:rtype: str
"""
leap_assert_type(pubkey, EncryptionKey)
leap_assert(pubkey.__class__ in self._wrapper_map, 'Unknown key type.')
leap_assert(pubkey.private is False, 'Key is not public.')
return self._wrapper_map[pubkey.__class__].verify(data, pubkey)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| ivanalejandro0/keymanager | src/leap/keymanager/__init__.py | Python | gpl-3.0 | 16,455 |
import json
import random
import ssl
import string
import threading
import time
import websocket
import settings
from player import Player
class WebsocketPlayerControl(object):
def __init__(self, player, server=settings.WS_SERVER):
websocket.enableTrace(settings.DEBUG)
rand_chars = string.ascii_uppercase + string.digits
self.player_id = ''.join(random.choice(rand_chars) for _ in range(10))
self.player = player
self.ws = websocket.WebSocketApp(server,
on_open=self.on_open,
on_message=self.on_message,
on_error=self.on_error)
player.song_change_callback = self.song_change
def song_change(self, identifier):
data = {
'action': 'song_change',
'player': self.player_id,
'key': settings.CLIENT_TOKEN,
'playlist': settings.PLAYLIST_ID,
'identifier': identifier
}
self.ws.send(json.dumps(data))
def start(self):
while True:
if settings.DEBUG:
print('opening websocket connection...')
sslopt = {"cert_reqs": ssl.CERT_NONE}
self.ws.run_forever(ping_interval=60, sslopt=sslopt)
time.sleep(10)
def quit(self):
self.ws.send("client disconnect")
self.ws.close()
def on_open(self, ws):
try:
name = settings.CLIENT_NAME
except AttributeError:
name = 'Client'
data = {
'action': 'register',
'player': self.player_id,
'key': settings.CLIENT_TOKEN,
'playlist': settings.PLAYLIST_ID,
'name': name
}
ws.send(json.dumps(data))
def on_message(self, ws, message):
if settings.DEBUG:
print('message received:', message)
data = json.loads(message)
if data['action'] == 'play':
self.player.play()
elif data['action'] == 'pause':
self.player.pause()
elif data['action'] == 'update_playlist':
self.player.update_playlist()
elif data['action'] == 'next':
self.player.next()
elif data['action'] == 'play_song':
self.player.play_song(data['identifier'])
def on_error(self, ws, error):
print(error)
def main():
player = Player()
ws = WebsocketPlayerControl(player)
ws_thread = threading.Thread(name='ws', target=ws.start)
try:
ws_thread.start()
player.start()
except KeyboardInterrupt:
player.quit()
ws.quit()
ws_thread.join()
if __name__ == "__main__":
main()
| Menollo/menosic | client/main.py | Python | gpl-3.0 | 2,756 |
# *- coding: utf-8 -*-
# mailbox.py
# Copyright (C) 2013-2015 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
IMAP Mailbox.
"""
import re
import os
import io
import cStringIO
import StringIO
import time
from collections import defaultdict
from email.utils import formatdate
from twisted.internet import defer
from twisted.internet import reactor
from twisted.logger import Logger
from twisted.mail import imap4
from zope.interface import implements
from leap.common.check import leap_assert
from leap.common.check import leap_assert_type
from leap.bitmask.mail.constants import INBOX_NAME, MessageFlags
from leap.bitmask.mail.imap.messages import IMAPMessage
# TODO LIST
# [ ] finish the implementation of IMailboxListener
# [ ] implement the rest of ISearchableMailbox
INIT_FLAGS = (MessageFlags.RECENT_FLAG, MessageFlags.LIST_FLAG)
def make_collection_listener(mailbox):
"""
Wrap a mailbox in a class that can be hashed according to the mailbox name.
This means that dicts or sets will use this new equality rule, so we won't
collect multiple instances of the same mailbox in collections like the
MessageCollection set where we keep track of listeners.
"""
class HashableMailbox(object):
def __init__(self, mbox):
self.mbox = mbox
# See #8083, pixelated adaptor introduces conflicts in the usage
self.mailbox_name = self.mbox.mbox_name + 'IMAP'
def __hash__(self):
return hash(self.mailbox_name)
def __eq__(self, other):
return self.mailbox_name == other.mbox.mbox_name + 'IMAP'
def notify_new(self):
self.mbox.notify_new()
return HashableMailbox(mailbox)
class IMAPMailbox(object):
"""
A Soledad-backed IMAP mailbox.
Implements the high-level method needed for the Mailbox interfaces.
The low-level database methods are contained in the generic
MessageCollection class. We receive an instance of it and it is made
accessible in the `collection` attribute.
"""
implements(
imap4.IMailbox,
imap4.IMailboxInfo,
imap4.ISearchableMailbox,
# XXX I think we do not need to implement CloseableMailbox, do we?
# We could remove ourselves from the collectionListener, although I
# think it simply will be garbage collected.
# imap4.ICloseableMailbox
imap4.IMessageCopier)
init_flags = INIT_FLAGS
CMD_MSG = "MESSAGES"
CMD_RECENT = "RECENT"
CMD_UIDNEXT = "UIDNEXT"
CMD_UIDVALIDITY = "UIDVALIDITY"
CMD_UNSEEN = "UNSEEN"
log = Logger()
# TODO we should turn this into a datastructure with limited capacity
_listeners = defaultdict(set)
def __init__(self, collection, rw=1):
"""
:param collection: instance of MessageCollection
:type collection: MessageCollection
:param rw: read-and-write flag for this mailbox
:type rw: int
"""
self.rw = rw
self._uidvalidity = None
self.collection = collection
self.collection.addListener(make_collection_listener(self))
@property
def mbox_name(self):
return self.collection.mbox_name
@property
def listeners(self):
"""
Returns listeners for this mbox.
The server itself is a listener to the mailbox.
so we can notify it (and should!) after changes in flags
and number of messages.
:rtype: set
"""
return self._listeners[self.mbox_name]
def get_imap_message(self, message):
d = defer.Deferred()
IMAPMessage(message, store=self.collection.store, d=d)
return d
# FIXME this grows too crazily when many instances are fired, like
# during imaptest stress testing. Should have a queue of limited size
# instead.
def addListener(self, listener):
"""
Add a listener to the listeners queue.
The server adds itself as a listener when there is a SELECT,
so it can send EXIST commands.
:param listener: listener to add
:type listener: an object that implements IMailboxListener
"""
listeners = self.listeners
self.log.debug('Adding mailbox listener: %s. Total: %s' % (
listener, len(listeners)))
listeners.add(listener)
def removeListener(self, listener):
"""
Remove a listener from the listeners queue.
:param listener: listener to remove
:type listener: an object that implements IMailboxListener
"""
self.listeners.remove(listener)
def getFlags(self):
"""
Returns the flags defined for this mailbox.
:returns: tuple of flags for this mailbox
:rtype: tuple of str
"""
flags = self.collection.mbox_wrapper.flags
if not flags:
flags = self.init_flags
flags_str = map(str, flags)
return flags_str
def setFlags(self, flags):
"""
Sets flags for this mailbox.
:param flags: a tuple with the flags
:type flags: tuple of str
"""
# XXX this is setting (overriding) old flags.
# Better pass a mode flag
leap_assert(isinstance(flags, tuple),
"flags expected to be a tuple")
return self.collection.set_mbox_attr("flags", flags)
def getUIDValidity(self):
"""
Return the unique validity identifier for this mailbox.
:return: unique validity identifier
:rtype: int
"""
return self.collection.get_mbox_attr("created")
def getUID(self, message_number):
"""
Return the UID of a message in the mailbox
.. note:: this implementation does not make much sense RIGHT NOW,
but in the future will be useful to get absolute UIDs from
message sequence numbers.
:param message: the message sequence number.
:type message: int
:rtype: int
:return: the UID of the message.
"""
# TODO support relative sequences. The (imap) message should
# receive a sequence number attribute: a deferred is not expected
return message_number
def getUIDNext(self):
"""
Return the likely UID for the next message added to this
mailbox. Currently it returns the higher UID incremented by
one.
:return: deferred with int
:rtype: Deferred
"""
d = self.collection.get_uid_next()
return d
def getMessageCount(self):
"""
Returns the total count of messages in this mailbox.
:return: deferred with int
:rtype: Deferred
"""
return self.collection.count()
def getUnseenCount(self):
"""
Returns the number of messages with the 'Unseen' flag.
:return: count of messages flagged `unseen`
:rtype: int
"""
return self.collection.count_unseen()
def getRecentCount(self):
"""
Returns the number of messages with the 'Recent' flag.
:return: count of messages flagged `recent`
:rtype: int
"""
return self.collection.count_recent()
def isWriteable(self):
"""
Get the read/write status of the mailbox.
:return: 1 if mailbox is read-writeable, 0 otherwise.
:rtype: int
"""
# XXX We don't need to store it in the mbox doc, do we?
# return int(self.collection.get_mbox_attr('rw'))
return self.rw
def getHierarchicalDelimiter(self):
"""
Returns the character used to delimite hierarchies in mailboxes.
:rtype: str
"""
return '/'
def requestStatus(self, names):
"""
Handles a status request by gathering the output of the different
status commands.
:param names: a list of strings containing the status commands
:type names: iter
"""
r = {}
maybe = defer.maybeDeferred
if self.CMD_MSG in names:
r[self.CMD_MSG] = maybe(self.getMessageCount)
if self.CMD_RECENT in names:
r[self.CMD_RECENT] = maybe(self.getRecentCount)
if self.CMD_UIDNEXT in names:
r[self.CMD_UIDNEXT] = maybe(self.getUIDNext)
if self.CMD_UIDVALIDITY in names:
r[self.CMD_UIDVALIDITY] = maybe(self.getUIDValidity)
if self.CMD_UNSEEN in names:
r[self.CMD_UNSEEN] = maybe(self.getUnseenCount)
def as_a_dict(values):
return dict(zip(r.keys(), values))
d = defer.gatherResults(r.values())
d.addCallback(as_a_dict)
return d
def addMessage(self, message, flags, date=None):
"""
Adds a message to this mailbox.
:param message: the raw message
:type message: str
:param flags: flag list
:type flags: list of str
:param date: timestamp
:type date: str, or None
:return: a deferred that will be triggered with the UID of the added
message.
"""
# TODO should raise ReadOnlyMailbox if not rw.
# TODO have a look at the cases for internal date in the rfc
# XXX we could treat the message as an IMessage from here
# TODO -- fast appends should be definitely solved by Blobs.
# A better solution will probably involve implementing MULTIAPPEND
# extension or patching imap server to support pipelining.
if isinstance(message,
(cStringIO.OutputType, StringIO.StringIO, io.BytesIO)):
message = message.getvalue()
leap_assert_type(message, basestring)
if flags is None:
flags = tuple()
else:
flags = tuple(str(flag) for flag in flags)
if date is None:
date = formatdate(time.time())
d = self.collection.add_msg(message, flags, date=date)
d.addCallback(lambda message: message.get_uid())
d.addErrback(
lambda failure: self.log.failure('Error while adding msg'))
return d
def notify_new(self, *args):
"""
Notify of new messages to all the listeners.
This will be called indirectly by the underlying collection, that will
notify this IMAPMailbox whenever there are changes in the number of
messages in the collection, since we have added ourselves to the
collection listeners.
:param args: ignored.
"""
def cbNotifyNew(result):
exists, recent = result
for listener in self.listeners:
listener.newMessages(exists, recent)
d = self._get_notify_count()
d.addCallback(cbNotifyNew)
d.addCallback(self.collection.cb_signal_unread_to_ui)
d.addErrback(lambda failure: self.log.failure('Error while notify'))
def _get_notify_count(self):
"""
Get message count and recent count for this mailbox.
:return: a deferred that will fire with a tuple, with number of
messages and number of recent messages.
:rtype: Deferred
"""
# XXX this is way too expensive in cases like multiple APPENDS.
# We should have a way of keep a cache or do a self-increment for that
# kind of calls.
d_exists = defer.maybeDeferred(self.getMessageCount)
d_recent = defer.maybeDeferred(self.getRecentCount)
d_list = [d_exists, d_recent]
def log_num_msg(result):
exists, recent = tuple(result)
self.log.debug(
'NOTIFY (%r): there are %s messages, %s recent' % (
self.mbox_name, exists, recent))
return result
d = defer.gatherResults(d_list)
d.addCallback(log_num_msg)
return d
# commands, do not rename methods
def destroy(self):
"""
Called before this mailbox is permanently deleted.
Should cleanup resources, and set the \\Noselect flag
on the mailbox.
"""
# XXX this will overwrite all the existing flags
# should better simply addFlag
self.setFlags((MessageFlags.NOSELECT_FLAG,))
def remove_mbox(_):
uuid = self.collection.mbox_uuid
d = self.collection.mbox_wrapper.delete(self.collection.store)
d.addCallback(
lambda _: self.collection.mbox_indexer.delete_table(uuid))
return d
d = self.deleteAllDocs()
d.addCallback(remove_mbox)
return d
def expunge(self):
"""
Remove all messages flagged \\Deleted
"""
if not self.isWriteable():
raise imap4.ReadOnlyMailbox
return self.collection.delete_all_flagged()
def _get_message_fun(self, uid):
"""
Return the proper method to get a message for this mailbox, depending
on the passed uid flag.
:param uid: If true, the IDs specified in the query are UIDs;
otherwise they are message sequence IDs.
:type uid: bool
:rtype: callable
"""
get_message_fun = [
self.collection.get_message_by_sequence_number,
self.collection.get_message_by_uid][uid]
return get_message_fun
def _get_messages_range(self, messages_asked, uid=True):
def get_range(messages_asked):
return self._filter_msg_seq(messages_asked)
d = self._bound_seq(messages_asked, uid)
if uid:
d.addCallback(get_range)
d.addErrback(
lambda f: self.log.failure('Error getting msg range'))
return d
def _bound_seq(self, messages_asked, uid):
"""
Put an upper bound to a messages sequence if this is open.
:param messages_asked: IDs of the messages.
:type messages_asked: MessageSet
:return: a Deferred that will fire with a MessageSet
"""
def set_last_uid(last_uid):
messages_asked.last = last_uid
return messages_asked
def set_last_seq(all_uid):
messages_asked.last = len(all_uid)
return messages_asked
if not messages_asked.last:
try:
iter(messages_asked)
except TypeError:
# looks like we cannot iterate
if uid:
d = self.collection.get_last_uid()
d.addCallback(set_last_uid)
else:
d = self.collection.all_uid_iter()
d.addCallback(set_last_seq)
return d
return defer.succeed(messages_asked)
def _filter_msg_seq(self, messages_asked):
"""
Filter a message sequence returning only the ones that do exist in the
collection.
:param messages_asked: IDs of the messages.
:type messages_asked: MessageSet
:rtype: set
"""
# TODO we could pass the asked sequence to the indexer
# all_uid_iter, and bound the sql query instead.
def filter_by_asked(all_msg_uid):
set_asked = set(messages_asked)
set_exist = set(all_msg_uid)
return set_asked.intersection(set_exist)
d = self.collection.all_uid_iter()
d.addCallback(filter_by_asked)
return d
def fetch(self, messages_asked, uid):
"""
Retrieve one or more messages in this mailbox.
from rfc 3501: The data items to be fetched can be either a single atom
or a parenthesized list.
:param messages_asked: IDs of the messages to retrieve information
about
:type messages_asked: MessageSet
:param uid: If true, the IDs are UIDs. They are message sequence IDs
otherwise.
:type uid: bool
:rtype: deferred with a generator that yields...
"""
get_msg_fun = self._get_message_fun(uid)
getimapmsg = self.get_imap_message
def get_imap_messages_for_range(msg_range):
def _get_imap_msg(messages):
d_imapmsg = []
# just in case we got bad data in here
for msg in filter(None, messages):
d_imapmsg.append(getimapmsg(msg))
return defer.gatherResults(d_imapmsg, consumeErrors=True)
def _zip_msgid(imap_messages):
zipped = zip(
list(msg_range), imap_messages)
return (item for item in zipped)
# XXX not called??
def _unset_recent(sequence):
reactor.callLater(0, self.unset_recent_flags, sequence)
return sequence
d_msg = []
for msgid in msg_range:
# XXX We want cdocs because we "probably" are asked for the
# body. We should be smarter at do_FETCH and pass a parameter
# to this method in order not to prefetch cdocs if they're not
# going to be used.
d_msg.append(get_msg_fun(msgid, get_cdocs=True))
d = defer.gatherResults(d_msg, consumeErrors=True)
d.addCallback(_get_imap_msg)
d.addCallback(_zip_msgid)
d.addErrback(
lambda failure: self.log.error(
'Error getting msg for range'))
return d
d = self._get_messages_range(messages_asked, uid)
d.addCallback(get_imap_messages_for_range)
d.addErrback(
lambda failure: self.log.failure('Error on fetch'))
return d
def fetch_flags(self, messages_asked, uid):
"""
A fast method to fetch all flags, tricking just the
needed subset of the MIME interface that's needed to satisfy
a generic FLAGS query.
Given how LEAP Mail is supposed to work without local cache,
this query is going to be quite common, and also we expect
it to be in the form 1:* at the beginning of a session, so
it's not bad to fetch all the FLAGS docs at once.
:param messages_asked: IDs of the messages to retrieve information
about
:type messages_asked: MessageSet
:param uid: If 1, the IDs are UIDs. They are message sequence IDs
otherwise.
:type uid: int
:return: A tuple of two-tuples of message sequence numbers and
flagsPart, which is a only a partial implementation of
MessagePart.
:rtype: tuple
"""
# is_sequence = True if uid == 0 else False
# XXX FIXME -----------------------------------------------------
# imap/tests, or muas like mutt, it will choke until we implement
# sequence numbers. This is an easy hack meanwhile.
is_sequence = False
# ---------------------------------------------------------------
if is_sequence:
raise NotImplementedError(
"FETCH FLAGS NOT IMPLEMENTED FOR MESSAGE SEQUENCE NUMBERS YET")
d = defer.Deferred()
reactor.callLater(0, self._do_fetch_flags, messages_asked, uid, d)
return d
def _do_fetch_flags(self, messages_asked, uid, d):
"""
:param messages_asked: IDs of the messages to retrieve information
about
:type messages_asked: MessageSet
:param uid: If 1, the IDs are UIDs. They are message sequence IDs
otherwise.
:type uid: int
:param d: deferred whose callback will be called with result.
:type d: Deferred
:rtype: A generator that yields two-tuples of message sequence numbers
and flagsPart
"""
class flagsPart(object):
def __init__(self, uid, flags):
self.uid = uid
self.flags = flags
def getUID(self):
return self.uid
def getFlags(self):
return map(str, self.flags)
def pack_flags(result):
_uid, _flags = result
return _uid, flagsPart(_uid, _flags)
def get_flags_for_seq(sequence):
d_all_flags = []
for msgid in sequence:
# TODO implement sequence numbers here too
d_flags_per_uid = self.collection.get_flags_by_uid(msgid)
d_flags_per_uid.addCallback(pack_flags)
d_all_flags.append(d_flags_per_uid)
gotflags = defer.gatherResults(d_all_flags)
gotflags.addCallback(get_uid_flag_generator)
return gotflags
def get_uid_flag_generator(result):
generator = (item for item in result)
d.callback(generator)
d_seq = self._get_messages_range(messages_asked, uid)
d_seq.addCallback(get_flags_for_seq)
return d_seq
@defer.inlineCallbacks
def fetch_headers(self, messages_asked, uid):
"""
A fast method to fetch all headers, tricking just the
needed subset of the MIME interface that's needed to satisfy
a generic HEADERS query.
Given how LEAP Mail is supposed to work without local cache,
this query is going to be quite common, and also we expect
it to be in the form 1:* at the beginning of a session, so
**MAYBE** it's not too bad to fetch all the HEADERS docs at once.
:param messages_asked: IDs of the messages to retrieve information
about
:type messages_asked: MessageSet
:param uid: If true, the IDs are UIDs. They are message sequence IDs
otherwise.
:type uid: bool
:return: A tuple of two-tuples of message sequence numbers and
headersPart, which is a only a partial implementation of
MessagePart.
:rtype: tuple
"""
# TODO implement sequences
is_sequence = True if uid == 0 else False
if is_sequence:
raise NotImplementedError(
"FETCH HEADERS NOT IMPLEMENTED FOR SEQUENCE NUMBER YET")
class headersPart(object):
def __init__(self, uid, headers):
self.uid = uid
self.headers = headers
def getUID(self):
return self.uid
def getHeaders(self, _):
return dict(
(str(key), str(value))
for key, value in
self.headers.items())
messages_asked = yield self._bound_seq(messages_asked, uid)
seq_messg = yield self._filter_msg_seq(messages_asked)
result = []
for msgid in seq_messg:
msg = yield self.collection.get_message_by_uid(msgid)
headers = headersPart(msgid, msg.get_headers())
result.append((msgid, headers))
defer.returnValue(iter(result))
def store(self, messages_asked, flags, mode, uid):
"""
Sets the flags of one or more messages.
:param messages: The identifiers of the messages to set the flags
:type messages: A MessageSet object with the list of messages requested
:param flags: The flags to set, unset, or add.
:type flags: sequence of str
:param mode: If mode is -1, these flags should be removed from the
specified messages. If mode is 1, these flags should be
added to the specified messages. If mode is 0, all
existing flags should be cleared and these flags should be
added.
:type mode: -1, 0, or 1
:param uid: If true, the IDs specified in the query are UIDs;
otherwise they are message sequence IDs.
:type uid: bool
:return: A deferred, that will be called with a dict mapping message
sequence numbers to sequences of str representing the flags
set on the message after this operation has been performed.
:rtype: deferred
:raise ReadOnlyMailbox: Raised if this mailbox is not open for
read-write.
"""
if not self.isWriteable():
self.log.info('Read only mailbox!')
raise imap4.ReadOnlyMailbox
d = defer.Deferred()
reactor.callLater(0, self._do_store, messages_asked, flags,
mode, uid, d)
d.addCallback(self.collection.cb_signal_unread_to_ui)
d.addErrback(lambda f: self.log.error('Error on store'))
return d
def _do_store(self, messages_asked, flags, mode, uid, observer):
"""
Helper method, invoke set_flags method in the IMAPMessageCollection.
See the documentation for the `store` method for the parameters.
:param observer: a deferred that will be called with the dictionary
mapping UIDs to flags after the operation has been
done.
:type observer: deferred
"""
# TODO we should prevent client from setting Recent flag
get_msg_fun = self._get_message_fun(uid)
leap_assert(not isinstance(flags, basestring),
"flags cannot be a string")
flags = tuple(flags)
def set_flags_for_seq(sequence):
def return_result_dict(list_of_flags):
result = dict(zip(list(sequence), list_of_flags))
observer.callback(result)
return result
d_all_set = []
for msgid in sequence:
d = get_msg_fun(msgid)
d.addCallback(lambda msg: self.collection.update_flags(
msg, flags, mode))
d_all_set.append(d)
got_flags_setted = defer.gatherResults(d_all_set)
got_flags_setted.addCallback(return_result_dict)
return got_flags_setted
d_seq = self._get_messages_range(messages_asked, uid)
d_seq.addCallback(set_flags_for_seq)
return d_seq
# ISearchableMailbox
def search(self, query, uid):
"""
Search for messages that meet the given query criteria.
Warning: this is half-baked, and it might give problems since
it offers the SearchableInterface.
We'll be implementing it asap.
:param query: The search criteria
:type query: list
:param uid: If true, the IDs specified in the query are UIDs;
otherwise they are message sequence IDs.
:type uid: bool
:return: A list of message sequence numbers or message UIDs which
match the search criteria or a C{Deferred} whose callback
will be invoked with such a list.
:rtype: C{list} or C{Deferred}
"""
# TODO see if we can raise w/o interrupting flow
# :raise IllegalQueryError: Raised when query is not valid.
# example query:
# ['UNDELETED', 'HEADER', 'Message-ID',
# XXX fixme, does not exist
# '[email protected]']
# TODO hardcoding for now! -- we'll support generic queries later on
# but doing a quickfix for avoiding duplicate saves in the draft
# folder. # See issue #4209
if len(query) > 2:
if query[1] == 'HEADER' and query[2].lower() == "message-id":
msgid = str(query[3]).strip()
self.log.debug('Searching for %s' % (msgid,))
d = self.collection.get_uid_from_msgid(str(msgid))
d.addCallback(lambda result: [result])
return d
# nothing implemented for any other query
self.log.warn('Cannot process query: %s' % (query,))
return []
# IMessageCopier
def copy(self, message):
"""
Copy the given message object into this mailbox.
:param message: an IMessage implementor
:type message: LeapMessage
:return: a deferred that will be fired with the message
uid when the copy succeed.
:rtype: Deferred
"""
d = self.collection.copy_msg(
message.message, self.collection.mbox_uuid)
return d
# convenience fun
def deleteAllDocs(self):
"""
Delete all docs in this mailbox
"""
# FIXME not implemented
return self.collection.delete_all_docs()
def unset_recent_flags(self, uid_seq):
"""
Unset Recent flag for a sequence of UIDs.
"""
# FIXME not implemented
return self.collection.unset_recent_flags(uid_seq)
def __repr__(self):
"""
Representation string for this mailbox.
"""
return u"<IMAPMailbox: mbox '%s' (%s)>" % (
self.mbox_name, self.collection.count())
_INBOX_RE = re.compile(INBOX_NAME, re.IGNORECASE)
def normalize_mailbox(name):
"""
Return a normalized representation of the mailbox ``name``.
This method ensures that an eventual initial 'inbox' part of a
mailbox name is made uppercase.
:param name: the name of the mailbox
:type name: unicode
:rtype: unicode
"""
# XXX maybe it would make sense to normalize common folders too:
# trash, sent, drafts, etc...
if _INBOX_RE.match(name):
# ensure inital INBOX is uppercase
return INBOX_NAME + name[len(INBOX_NAME):]
return name
| leapcode/bitmask-dev | src/leap/bitmask/mail/imap/mailbox.py | Python | gpl-3.0 | 30,254 |
# Copyright 2019-2020 by Christopher C. Little.
# This file is part of Abydos.
#
# Abydos is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Abydos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Abydos. If not, see <http://www.gnu.org/licenses/>.
"""abydos.tests.distance.test_distance__token_distance.
This module contains unit tests for abydos.distance._TokenDistance
"""
import unittest
from collections import Counter
from abydos.distance import (
AverageLinkage,
DamerauLevenshtein,
Jaccard,
JaroWinkler,
SokalMichener,
)
from abydos.stats import ConfusionTable
from abydos.tokenizer import (
CharacterTokenizer,
QSkipgrams,
WhitespaceTokenizer,
)
class TokenDistanceTestCases(unittest.TestCase):
"""Test _TokenDistance functions.
abydos.distance._TokenDistance
"""
cmp_j_crisp = Jaccard(intersection_type='crisp')
cmp_j_soft = Jaccard(intersection_type='soft')
cmp_j_fuzzy = Jaccard(
intersection_type='fuzzy', metric=DamerauLevenshtein(), threshold=0.4
)
cmp_j_linkage = Jaccard(intersection_type='linkage')
def test_crisp_jaccard_sim(self):
"""Test abydos.distance.Jaccard.sim (crisp)."""
# Base cases
self.assertEqual(self.cmp_j_crisp.sim('', ''), 1.0)
self.assertEqual(self.cmp_j_crisp.sim('a', ''), 0.0)
self.assertEqual(self.cmp_j_crisp.sim('', 'a'), 0.0)
self.assertEqual(self.cmp_j_crisp.sim('abc', ''), 0.0)
self.assertEqual(self.cmp_j_crisp.sim('', 'abc'), 0.0)
self.assertEqual(self.cmp_j_crisp.sim('abc', 'abc'), 1.0)
self.assertEqual(self.cmp_j_crisp.sim('abcd', 'efgh'), 0.0)
self.assertAlmostEqual(
self.cmp_j_crisp.sim('Nigel', 'Niall'), 0.3333333333
)
self.assertAlmostEqual(
self.cmp_j_crisp.sim('Niall', 'Nigel'), 0.3333333333
)
self.assertAlmostEqual(
self.cmp_j_crisp.sim('Colin', 'Coiln'), 0.3333333333
)
self.assertAlmostEqual(
self.cmp_j_crisp.sim('Coiln', 'Colin'), 0.3333333333
)
self.assertAlmostEqual(
self.cmp_j_crisp.sim('ATCAACGAGT', 'AACGATTAG'), 0.5
)
def test_soft_jaccard_sim(self):
"""Test abydos.distance.Jaccard.sim (soft)."""
# Base cases
self.assertEqual(self.cmp_j_soft.sim('', ''), 1.0)
self.assertEqual(self.cmp_j_soft.sim('a', ''), 0.0)
self.assertEqual(self.cmp_j_soft.sim('', 'a'), 0.0)
self.assertEqual(self.cmp_j_soft.sim('abc', ''), 0.0)
self.assertEqual(self.cmp_j_soft.sim('', 'abc'), 0.0)
self.assertEqual(self.cmp_j_soft.sim('abc', 'abc'), 1.0)
self.assertAlmostEqual(self.cmp_j_soft.sim('abcd', 'efgh'), 0.11111111)
self.assertAlmostEqual(self.cmp_j_soft.sim('Nigel', 'Niall'), 0.5)
self.assertAlmostEqual(self.cmp_j_soft.sim('Niall', 'Nigel'), 0.5)
self.assertAlmostEqual(self.cmp_j_soft.sim('Colin', 'Coiln'), 0.6)
self.assertAlmostEqual(self.cmp_j_soft.sim('Coiln', 'Colin'), 0.6)
self.assertAlmostEqual(
self.cmp_j_soft.sim('ATCAACGAGT', 'AACGATTAG'), 0.68
)
self.assertAlmostEqual(
Jaccard(
intersection_type='soft', tokenizer=WhitespaceTokenizer()
).sim('junior system analyst', 'systems analyst'),
0.6190476190476191,
)
self.assertAlmostEqual(
Jaccard(
intersection_type='soft', tokenizer=WhitespaceTokenizer()
).sim('systems analyst', 'junior system analyst'),
0.6190476190476191,
)
with self.assertRaises(TypeError):
Jaccard(
intersection_type='soft',
metric=JaroWinkler(),
tokenizer=WhitespaceTokenizer(),
).sim('junior system analyst', 'systems analyst')
def test_fuzzy_jaccard_sim(self):
"""Test abydos.distance.Jaccard.sim (fuzzy)."""
# Base cases
self.assertEqual(self.cmp_j_fuzzy.sim('', ''), 1.0)
self.assertEqual(self.cmp_j_fuzzy.sim('a', ''), 0.0)
self.assertEqual(self.cmp_j_fuzzy.sim('', 'a'), 0.0)
self.assertEqual(self.cmp_j_fuzzy.sim('abc', ''), 0.0)
self.assertEqual(self.cmp_j_fuzzy.sim('', 'abc'), 0.0)
self.assertEqual(self.cmp_j_fuzzy.sim('abc', 'abc'), 1.0)
self.assertAlmostEqual(
self.cmp_j_fuzzy.sim('abcd', 'efgh'), 0.1111111111111111
)
self.assertAlmostEqual(self.cmp_j_fuzzy.sim('Nigel', 'Niall'), 0.5)
self.assertAlmostEqual(self.cmp_j_fuzzy.sim('Niall', 'Nigel'), 0.5)
self.assertAlmostEqual(self.cmp_j_fuzzy.sim('Colin', 'Coiln'), 0.6)
self.assertAlmostEqual(self.cmp_j_fuzzy.sim('Coiln', 'Colin'), 0.6)
self.assertAlmostEqual(
self.cmp_j_fuzzy.sim('ATCAACGAGT', 'AACGATTAG'), 0.68
)
self.assertEqual(sum(self.cmp_j_fuzzy._union().values()), 11.0)
self.assertAlmostEqual(
Jaccard(intersection_type='fuzzy').sim('synonym', 'antonym'),
0.3333333333333333,
)
def test_linkage_jaccard_sim(self):
"""Test abydos.distance.Jaccard.sim (group linkage)."""
# Base cases
self.assertEqual(self.cmp_j_linkage.sim('', ''), 1.0)
self.assertEqual(self.cmp_j_linkage.sim('a', ''), 0.0)
self.assertEqual(self.cmp_j_linkage.sim('', 'a'), 0.0)
self.assertEqual(self.cmp_j_linkage.sim('abc', ''), 0.0)
self.assertEqual(self.cmp_j_linkage.sim('', 'abc'), 0.0)
self.assertEqual(self.cmp_j_linkage.sim('abc', 'abc'), 1.0)
self.assertAlmostEqual(
self.cmp_j_linkage.sim('abcd', 'efgh'), 0.1111111111111111
)
self.assertAlmostEqual(self.cmp_j_linkage.sim('Nigel', 'Niall'), 0.5)
self.assertAlmostEqual(self.cmp_j_linkage.sim('Niall', 'Nigel'), 0.5)
self.assertAlmostEqual(self.cmp_j_linkage.sim('Colin', 'Coiln'), 0.6)
self.assertAlmostEqual(self.cmp_j_linkage.sim('Coiln', 'Colin'), 0.6)
self.assertAlmostEqual(
self.cmp_j_linkage.sim('ATCAACGAGT', 'AACGATTAG'), 0.68
)
self.assertAlmostEqual(
Jaccard(
intersection_type='linkage',
metric=JaroWinkler(),
threshold=0.2,
).sim('synonym', 'antonym'),
0.6,
)
def test_token_distance(self):
"""Test abydos.distance._TokenDistance members."""
self.assertAlmostEqual(
Jaccard(intersection_type='soft', alphabet=24).sim(
'ATCAACGAGT', 'AACGATTAG'
),
0.68,
)
self.assertAlmostEqual(
Jaccard(qval=1, alphabet='CGAT').sim('ATCAACGAGT', 'AACGATTAG'),
0.9,
)
self.assertAlmostEqual(
Jaccard(tokenizer=QSkipgrams(qval=3), alphabet='CGAT').sim(
'ATCAACGAGT', 'AACGATTAG'
),
0.6372795969773299,
)
self.assertAlmostEqual(
Jaccard(alphabet=None).sim('synonym', 'antonym'),
0.3333333333333333,
)
self.assertAlmostEqual(
Jaccard(tokenizer=QSkipgrams(qval=3)).sim('synonym', 'antonym'),
0.34146341463414637,
)
src_ctr = Counter({'a': 5, 'b': 2, 'c': 10})
tar_ctr = Counter({'a': 2, 'c': 1, 'd': 3, 'e': 12})
self.assertAlmostEqual(Jaccard().sim(src_ctr, tar_ctr), 0.09375)
self.assertAlmostEqual(
SokalMichener(normalizer='proportional').sim('synonym', 'antonym'),
0.984777917351113,
)
self.assertAlmostEqual(
SokalMichener(normalizer='log').sim('synonym', 'antonym'),
1.2385752469545532,
)
self.assertAlmostEqual(
SokalMichener(normalizer='exp', alphabet=0).sim(
'synonym', 'antonym'
),
3.221246147982545e18,
)
self.assertAlmostEqual(
SokalMichener(normalizer='laplace').sim('synonym', 'antonym'),
0.98856416772554,
)
self.assertAlmostEqual(
SokalMichener(normalizer='inverse').sim('synonym', 'antonym'),
197.95790155440417,
)
self.assertAlmostEqual(
SokalMichener(normalizer='complement').sim('synonym', 'antonym'),
1.0204081632653061,
)
self.assertAlmostEqual(
SokalMichener(normalizer='base case').sim('synonym', 'antonym'),
0.9897959183673469,
)
self.assertAlmostEqual(
SokalMichener().sim('synonym', 'antonym'), 0.9897959183673469
)
sm = SokalMichener()
sm._tokenize('synonym', 'antonym') # noqa: SF01
self.assertEqual(
sm._get_tokens(), # noqa: SF01
(
Counter(
{
'$s': 1,
'sy': 1,
'yn': 1,
'no': 1,
'on': 1,
'ny': 1,
'ym': 1,
'm#': 1,
}
),
Counter(
{
'$a': 1,
'an': 1,
'nt': 1,
'to': 1,
'on': 1,
'ny': 1,
'ym': 1,
'm#': 1,
}
),
),
)
self.assertEqual(sm._src_card(), 8) # noqa: SF01
self.assertEqual(sm._tar_card(), 8) # noqa: SF01
self.assertEqual(
sm._symmetric_difference(), # noqa: SF01
Counter(
{
'$s': 1,
'sy': 1,
'yn': 1,
'no': 1,
'$a': 1,
'an': 1,
'nt': 1,
'to': 1,
}
),
)
self.assertEqual(sm._symmetric_difference_card(), 8) # noqa: SF01
self.assertEqual(sm._total_complement_card(), 772) # noqa: SF01
self.assertEqual(sm._population_card(), 788) # noqa: SF01
self.assertEqual(
sm._union(), # noqa: SF01
Counter(
{
'$s': 1,
'sy': 1,
'yn': 1,
'no': 1,
'on': 1,
'ny': 1,
'ym': 1,
'm#': 1,
'$a': 1,
'an': 1,
'nt': 1,
'to': 1,
}
),
)
self.assertEqual(sm._union_card(), 12) # noqa: SF01
self.assertEqual(
sm._difference(), # noqa: SF01
Counter(
{
'$s': 1,
'sy': 1,
'yn': 1,
'no': 1,
'on': 0,
'ny': 0,
'ym': 0,
'm#': 0,
'$a': -1,
'an': -1,
'nt': -1,
'to': -1,
}
),
)
self.assertEqual(
sm._intersection(), # noqa: SF01
Counter({'on': 1, 'ny': 1, 'ym': 1, 'm#': 1}),
)
self.assertEqual(
sm._get_confusion_table(), # noqa: SF01
ConfusionTable(tp=4, tn=772, fp=4, fn=4),
)
sm = SokalMichener(
alphabet=Counter({'C': 20, 'G': 20, 'A': 20, 'T': 20}), qval=1
)
sm._tokenize('ATCAACGAGT', 'AACGATTAG') # noqa: SF01
self.assertEqual(sm._total_complement_card(), 61) # noqa: SF01
self.assertAlmostEqual(
self.cmp_j_linkage.sim('abandonned', 'abandoned'),
0.9090909090909091,
)
self.assertAlmostEqual(
self.cmp_j_linkage.sim('abundacies', 'abundances'),
0.6923076923076923,
)
# Some additional constructors needed to complete test coverage
self.assertAlmostEqual(
Jaccard(alphabet=None, qval=range(2, 4)).sim('abc', 'abcd'),
0.42857142857142855,
)
self.assertAlmostEqual(
AverageLinkage(qval=range(2, 4)).sim('abc', 'abcd'),
0.22558922558922556,
)
self.assertAlmostEqual(
Jaccard(alphabet='abcdefghijklmnop', qval=range(2, 4)).sim(
'abc', 'abcd'
),
0.42857142857142855,
)
self.assertAlmostEqual(
Jaccard(
alphabet='abcdefghijklmnop', tokenizer=WhitespaceTokenizer()
).sim('abc', 'abcd'),
0.0,
)
self.assertAlmostEqual(
Jaccard(alphabet=list('abcdefghijklmnop')).sim('abc', 'abcd'), 0.5
)
self.assertAlmostEqual(
Jaccard(tokenizer=CharacterTokenizer()).sim('abc', 'abcd'), 0.75
)
cmp_j_soft = Jaccard(intersection_type='soft')
self.assertEqual(cmp_j_soft._src_card(), 0) # noqa: SF01
self.assertEqual(cmp_j_soft._tar_card(), 0) # noqa: SF01
self.assertEqual(cmp_j_soft._src_only(), Counter()) # noqa: SF01
self.assertEqual(cmp_j_soft._tar_only(), Counter()) # noqa: SF01
self.assertEqual(cmp_j_soft._total(), Counter()) # noqa: SF01
self.assertEqual(cmp_j_soft._union(), Counter()) # noqa: SF01
self.assertEqual(cmp_j_soft._difference(), Counter()) # noqa: SF01
cmp_j_soft.sim('abcd', 'abcde')
self.assertEqual(cmp_j_soft._src_card(), 5) # noqa: SF01
self.assertEqual(cmp_j_soft._tar_card(), 6) # noqa: SF01
self.assertEqual(
cmp_j_soft._src_only(), Counter({'#': 0.5}) # noqa: SF01
)
self.assertEqual(
cmp_j_soft._tar_only(), Counter({'e#': 1, 'e': 0.5}) # noqa: SF01
)
self.assertEqual(
cmp_j_soft._total(), # noqa: SF01
Counter(
{
'e#': 1,
'e': 0.5,
'#': 0.5,
'$a': 2,
'ab': 2,
'bc': 2,
'cd': 2,
'd': 1.0,
}
),
)
self.assertEqual(
cmp_j_soft._union(), # noqa: SF01
Counter(
{
'e#': 1,
'e': 0.5,
'#': 0.5,
'$a': 1,
'ab': 1,
'bc': 1,
'cd': 1,
'd': 0.5,
}
),
)
self.assertEqual(
cmp_j_soft._difference(), # noqa: SF01
Counter({'#': 0.5, 'e#': -1, 'e': -0.5}),
)
if __name__ == '__main__':
unittest.main()
| chrislit/abydos | tests/distance/test_distance__token_distance.py | Python | gpl-3.0 | 15,537 |
#!/usr/bin/env python
import sys
import re
"""Rewrite the doxygen \\file lines to have the full path to the file."""
def fix(filename):
contents = open(filename, "r").read()
contents = re.sub(
"\\\\file .*\\.h",
"\\\\file " + filename[len("build/include/"):],
contents,
1)
contents = re.sub(
"\\\\file .*/.*\\.h",
"\\\\file " + filename[len("build/include/"):],
contents,
1)
f = open(filename, "wr")
f.truncate()
f.write(contents)
if __name__ == '__main__':
for f in sys.argv[1:]:
fix(f)
| shanot/imp | tools/maintenance/fix_doxygen_file_lines.py | Python | gpl-3.0 | 593 |
# coding=utf-8
import unittest
"""69. Sqrt(x)
https://leetcode.com/problems/sqrtx/description/
Implement `int sqrt(int x)`.
Compute and return the square root of _x_ , where _x_ is guaranteed to be a
non-negative integer.
Since the return type is an integer, the decimal digits are truncated and only
the integer part of the result is returned.
**Example 1:**
**Input:** 4
**Output:** 2
**Example 2:**
**Input:** 8
**Output:** 2
**Explanation:** The square root of 8 is 2.82842..., and since
the decimal part is truncated, 2 is returned.
Similar Questions:
Pow(x, n) (powx-n)
Valid Perfect Square (valid-perfect-square)
"""
class Solution(object):
def mySqrt(self, x):
"""
:type x: int
:rtype: int
"""
def test(self):
pass
if __name__ == "__main__":
unittest.main()
| openqt/algorithms | leetcode/python/lc069-sqrtx.py | Python | gpl-3.0 | 925 |
from cloudbot import hook
from cloudbot.util import http
api_root = 'http://api.rottentomatoes.com/api/public/v1.0/'
movie_search_url = api_root + 'movies.json'
movie_reviews_url = api_root + 'movies/%s/reviews.json'
@hook.command('rt')
def rottentomatoes(inp, bot=None):
"""rt <title> -- gets ratings for <title> from Rotten Tomatoes"""
api_key = bot.config.get("api_keys", {}).get("rottentomatoes", None)
if not api_key:
return "error: no api key set"
title = inp.strip()
results = http.get_json(movie_search_url, q=title, apikey=api_key)
if results['total'] == 0:
return 'No results.'
movie = results['movies'][0]
title = movie['title']
movie_id = movie['id']
critics_score = movie['ratings']['critics_score']
audience_score = movie['ratings']['audience_score']
url = movie['links']['alternate']
if critics_score == -1:
return
reviews = http.get_json(movie_reviews_url % movie_id, apikey=api_key, review_type='all')
review_count = reviews['total']
fresh = critics_score * review_count / 100
rotten = review_count - fresh
return "{} - Critics Rating: \x02{}%\x02 ({} liked, {} disliked) " \
"Audience Rating: \x02{}%\x02 - {}".format(title, critics_score, fresh, rotten, audience_score, url)
| Zarthus/CloudBotRefresh | plugins/rottentomatoes.py | Python | gpl-3.0 | 1,312 |
# -*- coding: utf-8 -*-
# This file is part of translate.
#
# translate is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# translate is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# translate. If not, see <http://www.gnu.org/licenses/>.
"""
translate.client.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~~~
These are exception classes that are used by translate.client.Client. Most of
these classes are simple wrappers, just to differentiate different types of
errors. They can be constructed from a requests response object, or JSON
,returned from an API call.
"""
import json
import logging
log = logging.getLogger(__name__)
class TranslateException(Exception):
"""Mostly empty base class for exceptions relating to translate.
This class is used as a catch-all for exceptions thrown by the server. If
possible, a more specific subclass of this exception will be used.
"""
@classmethod
def from_json(cls, obj, status_code=400):
"""Return the proper exception class from the JSON object returned from
the server.
"""
exceptions = {
429: RateLimitException,
431: SizeLimitException,
452: TranslationException,
453: TranslatorException,
454: BadLanguagePairException
}
try:
code = obj['code'] if ('code' in obj) else status_code
klass = exceptions[code]
return klass.from_json(obj)
except KeyError:
return cls("Unknown error occured: " + repr(obj))
@classmethod
def from_response(cls, resp):
"""Generate a proper exception from the given requests response object
and return it.
"""
try:
obj = json.loads(resp.text)
return TranslateException.from_json(obj, resp.status_code)
except ValueError:
log.error("Was given invalid JSON, bailing...")
return TranslateException.from_json({}, resp.status_code)
class HTTPException(TranslateException):
"""Raised when an error occurs with the HTTP connection to the server
(e.g. host is not available, doesn't respond, etc.)
"""
pass
class RateLimitException(TranslateException):
"""Exception raised when a client goes over the ratelimit."""
def __init__(self, limit, per, reset):
self.limit = limit
self.per = per
self.reset = reset
@classmethod
def from_json(cls, obj):
try:
details = obj.get('details', {})
return cls(limit=details['limit'], per=details['per'],
reset=details['reset'])
except KeyError:
log.error("Received invalid JSON: " + repr(obj))
return cls(limit=0, per=0, reset=0)
def __str__(self):
return "Rate limit exceeded: {0} reqs / {1}s. Try again at {2}".format(
self.limit, self.per, self.reset)
class SizeLimitException(TranslateException):
"""Exception raised when a client tries to translate a text that is over
the server's size limit.
"""
def __init__(self, len, limit):
self.len = len
self.limit = limit
@classmethod
def from_json(cls, obj):
try:
details = obj['details']
return cls(len=details['len'], limit=details['limit'])
except KeyError:
log.error("Received invalid JSON: %s", repr(obj))
return cls(len=0, limit=0)
def __str__(self):
return "Specified text was too large: %d bytes. Maximum is %d bytes"\
.format(self.len, self.limit)
class TranslationException(TranslateException):
"""Returned on bad parameters to /translate"""
@classmethod
def from_json(cls, obj):
try:
msg = obj['message']
return cls("Bad parameters to translate API method: " + msg)
except KeyError:
log.error("Received invalid JSON: " + repr(obj))
return cls("Bad parameters to translate API method.")
class TranslatorException(TranslateException):
"""Returned when bad parameters are passed to the /translate method. (This
probably indicates some kind of API / Client bug.)
"""
def __init__(self, lang_pair, tried):
self.lang_pair = lang_pair
self.tried = tried
@classmethod
def from_json(cls, obj):
try:
details = obj['details']
pair = (details['from'], details['to'])
return cls(lang_pair=pair, tried=details['tried'])
except KeyError:
log.error("Received invalid JSON: " + repr(obj))
return cls(lang_pair=('unknown', 'unknown'), tried=['unknown'])
def __str__(self):
return "Failed to translate {0} (tried: {1})".format(self.lang_pair,
self.tried)
class BadLanguagePairException(TranslateException):
"""Raised when the client tried to translate using a language pair not
supported by the server
"""
def __init__(self, lang_pair):
self.lang_pair = lang_pair
@classmethod
def from_json(cls, obj):
try:
details = obj['details']
return cls(lang_pair=(details['from'], details['to']))
except KeyError:
log.error("Received invalid JSON: " + repr(obj))
return cls(lang_pair=('unknown', 'unknown'))
def __str__(self):
return "Unsupported language pair: {0}".format(self.lang_pair)
| erik/translate | translate/client/exceptions.py | Python | gpl-3.0 | 5,955 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'agent_connect_dlg.ui'
#
# Created: Tue Oct 12 14:22:17 2010
# by: PyQt4 UI code generator 4.7.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(253, 111)
self.gridLayout = QtGui.QGridLayout(Dialog)
self.gridLayout.setObjectName("gridLayout")
self.label = QtGui.QLabel(Dialog)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.agent_addrinfo = QtGui.QComboBox(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.agent_addrinfo.sizePolicy().hasHeightForWidth())
self.agent_addrinfo.setSizePolicy(sizePolicy)
self.agent_addrinfo.setEditable(True)
self.agent_addrinfo.setObjectName("agent_addrinfo")
self.gridLayout.addWidget(self.agent_addrinfo, 0, 1, 1, 1)
self.disconnect_from_server = QtGui.QCheckBox(Dialog)
self.disconnect_from_server.setObjectName("disconnect_from_server")
self.gridLayout.addWidget(self.disconnect_from_server, 1, 0, 1, 2)
self.buttonBox = QtGui.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout.addWidget(self.buttonBox, 2, 0, 1, 2)
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("accepted()"), Dialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("rejected()"), Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "Dialog", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Dialog", "Agent", None, QtGui.QApplication.UnicodeUTF8))
self.disconnect_from_server.setText(QtGui.QApplication.translate("Dialog", "Disconnect Clients from server", None, QtGui.QApplication.UnicodeUTF8))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Dialog = QtGui.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
| mnunberg/yobot | py/gui/agent_connect_dlg.py | Python | gpl-3.0 | 2,632 |
from Ponos import init_db
from env_vars import *
import sqlite3
import os
print(DB_PATH)
open(DB_PATH, 'w').close()
init_db()
| shadowjig/ponos | initalize_db.py | Python | gpl-3.0 | 127 |
# Addierer mit += 1
# Eingaben erhalten
a = input("Dies ist ein Addierer!\nGeben Sie a ein: ")
b = input("Geben Sie b ein: ")
# Zeichenketten in Zahlen umwandeln
a = int(a)
b = int(b)
# neue Variable verwenden, Eingaben nicht verändern
result = a
i = 0
if b > 0: # wenn b größer Null
while i < b: # dann Schleife positiv durchlaufen
result += 1
i += 1
elif b < 0: # wenn b kleiner Null
while i > b: # dann Schleife negativ durchlaufen
result -= 1
i -= 1
print("\nDas Ergebnis ist: " + str(result))
| Informatik-AG-KGN-2016/Dokumente | 2016-11-28/aufgabe-addierer.py | Python | gpl-3.0 | 587 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import json
import frappe
from erpnext.accounts.party import get_party_account_currency
from erpnext.controllers.accounts_controller import get_taxes_and_charges
from erpnext.setup.utils import get_exchange_rate
from erpnext.stock.get_item_details import get_pos_profile
from frappe import _
from frappe.core.doctype.communication.email import make
from frappe.utils import nowdate, cint
from six import string_types, iteritems
@frappe.whitelist()
def get_pos_data():
doc = frappe.new_doc('Sales Invoice')
doc.is_pos = 1
pos_profile = get_pos_profile(doc.company) or {}
if not pos_profile:
frappe.throw(_("POS Profile is required to use Point-of-Sale"))
if not doc.company:
doc.company = pos_profile.get('company')
doc.update_stock = pos_profile.get('update_stock')
if pos_profile.get('name'):
pos_profile = frappe.get_doc('POS Profile', pos_profile.get('name'))
pos_profile.validate()
company_data = get_company_data(doc.company)
update_pos_profile_data(doc, pos_profile, company_data)
update_multi_mode_option(doc, pos_profile)
default_print_format = pos_profile.get('print_format') or "Point of Sale"
print_template = frappe.db.get_value('Print Format', default_print_format, 'html')
items_list = get_items_list(pos_profile, doc.company)
customers = get_customers_list(pos_profile)
doc.plc_conversion_rate = update_plc_conversion_rate(doc, pos_profile)
return {
'doc': doc,
'default_customer': pos_profile.get('customer'),
'items': items_list,
'item_groups': get_item_groups(pos_profile),
'customers': customers,
'address': get_customers_address(customers),
'contacts': get_contacts(customers),
'serial_no_data': get_serial_no_data(pos_profile, doc.company),
'batch_no_data': get_batch_no_data(),
'barcode_data': get_barcode_data(items_list),
'tax_data': get_item_tax_data(),
'price_list_data': get_price_list_data(doc.selling_price_list, doc.plc_conversion_rate),
'customer_wise_price_list': get_customer_wise_price_list(),
'bin_data': get_bin_data(pos_profile),
'pricing_rules': get_pricing_rule_data(doc),
'print_template': print_template,
'pos_profile': pos_profile,
'meta': get_meta()
}
def update_plc_conversion_rate(doc, pos_profile):
conversion_rate = 1.0
price_list_currency = frappe.get_cached_value("Price List", doc.selling_price_list, "currency")
if pos_profile.get("currency") != price_list_currency:
conversion_rate = get_exchange_rate(price_list_currency,
pos_profile.get("currency"), nowdate(), args="for_selling") or 1.0
return conversion_rate
def get_meta():
doctype_meta = {
'customer': frappe.get_meta('Customer'),
'invoice': frappe.get_meta('Sales Invoice')
}
for row in frappe.get_all('DocField', fields=['fieldname', 'options'],
filters={'parent': 'Sales Invoice', 'fieldtype': 'Table'}):
doctype_meta[row.fieldname] = frappe.get_meta(row.options)
return doctype_meta
def get_company_data(company):
return frappe.get_all('Company', fields=["*"], filters={'name': company})[0]
def update_pos_profile_data(doc, pos_profile, company_data):
doc.campaign = pos_profile.get('campaign')
if pos_profile and not pos_profile.get('country'):
pos_profile.country = company_data.country
doc.write_off_account = pos_profile.get('write_off_account') or \
company_data.write_off_account
doc.change_amount_account = pos_profile.get('change_amount_account') or \
company_data.default_cash_account
doc.taxes_and_charges = pos_profile.get('taxes_and_charges')
if doc.taxes_and_charges:
update_tax_table(doc)
doc.currency = pos_profile.get('currency') or company_data.default_currency
doc.conversion_rate = 1.0
if doc.currency != company_data.default_currency:
doc.conversion_rate = get_exchange_rate(doc.currency, company_data.default_currency, doc.posting_date, args="for_selling")
doc.selling_price_list = pos_profile.get('selling_price_list') or \
frappe.db.get_value('Selling Settings', None, 'selling_price_list')
doc.naming_series = pos_profile.get('naming_series') or 'SINV-'
doc.letter_head = pos_profile.get('letter_head') or company_data.default_letter_head
doc.ignore_pricing_rule = pos_profile.get('ignore_pricing_rule') or 0
doc.apply_discount_on = pos_profile.get('apply_discount_on') or 'Grand Total'
doc.customer_group = pos_profile.get('customer_group') or get_root('Customer Group')
doc.territory = pos_profile.get('territory') or get_root('Territory')
doc.terms = frappe.db.get_value('Terms and Conditions', pos_profile.get('tc_name'), 'terms') or doc.terms or ''
doc.offline_pos_name = ''
def get_root(table):
root = frappe.db.sql(""" select name from `tab%(table)s` having
min(lft)""" % {'table': table}, as_dict=1)
return root[0].name
def update_multi_mode_option(doc, pos_profile):
from frappe.model import default_fields
if not pos_profile or not pos_profile.get('payments'):
for payment in get_mode_of_payment(doc):
payments = doc.append('payments', {})
payments.mode_of_payment = payment.parent
payments.account = payment.default_account
payments.type = payment.type
return
for payment_mode in pos_profile.payments:
payment_mode = payment_mode.as_dict()
for fieldname in default_fields:
if fieldname in payment_mode:
del payment_mode[fieldname]
doc.append('payments', payment_mode)
def get_mode_of_payment(doc):
return frappe.db.sql("""
select mpa.default_account, mpa.parent, mp.type as type
from `tabMode of Payment Account` mpa,`tabMode of Payment` mp
where mpa.parent = mp.name and mpa.company = %(company)s and mp.enabled = 1""",
{'company': doc.company}, as_dict=1)
def update_tax_table(doc):
taxes = get_taxes_and_charges('Sales Taxes and Charges Template', doc.taxes_and_charges)
for tax in taxes:
doc.append('taxes', tax)
def get_items_list(pos_profile, company):
cond = ""
args_list = []
if pos_profile.get('item_groups'):
# Get items based on the item groups defined in the POS profile
for d in pos_profile.get('item_groups'):
args_list.extend([d.name for d in get_child_nodes('Item Group', d.item_group)])
if args_list:
cond = "and i.item_group in (%s)" % (', '.join(['%s'] * len(args_list)))
return frappe.db.sql("""
select
i.name, i.item_code, i.item_name, i.description, i.item_group, i.has_batch_no,
i.has_serial_no, i.is_stock_item, i.brand, i.stock_uom, i.image,
id.expense_account, id.selling_cost_center, id.default_warehouse,
i.sales_uom, c.conversion_factor, it.item_tax_template, it.valid_from
from
`tabItem` i
left join `tabItem Default` id on id.parent = i.name and id.company = %s
left join `tabItem Tax` it on it.parent = i.name
left join `tabUOM Conversion Detail` c on i.name = c.parent and i.sales_uom = c.uom
where
i.disabled = 0 and i.has_variants = 0 and i.is_sales_item = 1
{cond}
group by i.item_code
""".format(cond=cond), tuple([company] + args_list), as_dict=1)
def get_item_groups(pos_profile):
item_group_dict = {}
item_groups = frappe.db.sql("""Select name,
lft, rgt from `tabItem Group` order by lft""", as_dict=1)
for data in item_groups:
item_group_dict[data.name] = [data.lft, data.rgt]
return item_group_dict
def get_customers_list(pos_profile={}):
cond = "1=1"
customer_groups = []
if pos_profile.get('customer_groups'):
# Get customers based on the customer groups defined in the POS profile
for d in pos_profile.get('customer_groups'):
customer_groups.extend([d.get('name') for d in get_child_nodes('Customer Group', d.get('customer_group'))])
cond = "customer_group in (%s)" % (', '.join(['%s'] * len(customer_groups)))
return frappe.db.sql(""" select name, customer_name, customer_group,
territory, customer_pos_id from tabCustomer where disabled = 0
and {cond}""".format(cond=cond), tuple(customer_groups), as_dict=1) or {}
def get_customers_address(customers):
customer_address = {}
if isinstance(customers, string_types):
customers = [frappe._dict({'name': customers})]
for data in customers:
address = frappe.db.sql(""" select name, address_line1, address_line2, city, state,
email_id, phone, fax, pincode from `tabAddress` where is_primary_address =1 and name in
(select parent from `tabDynamic Link` where link_doctype = 'Customer' and link_name = %s
and parenttype = 'Address')""", data.name, as_dict=1)
address_data = {}
if address:
address_data = address[0]
address_data.update({'full_name': data.customer_name, 'customer_pos_id': data.customer_pos_id})
customer_address[data.name] = address_data
return customer_address
def get_contacts(customers):
customer_contact = {}
if isinstance(customers, string_types):
customers = [frappe._dict({'name': customers})]
for data in customers:
contact = frappe.db.sql(""" select email_id, phone, mobile_no from `tabContact`
where is_primary_contact=1 and name in
(select parent from `tabDynamic Link` where link_doctype = 'Customer' and link_name = %s
and parenttype = 'Contact')""", data.name, as_dict=1)
if contact:
customer_contact[data.name] = contact[0]
return customer_contact
def get_child_nodes(group_type, root):
lft, rgt = frappe.db.get_value(group_type, root, ["lft", "rgt"])
return frappe.db.sql(""" Select name, lft, rgt from `tab{tab}` where
lft >= {lft} and rgt <= {rgt} order by lft""".format(tab=group_type, lft=lft, rgt=rgt), as_dict=1)
def get_serial_no_data(pos_profile, company):
# get itemwise serial no data
# example {'Nokia Lumia 1020': {'SN0001': 'Pune'}}
# where Nokia Lumia 1020 is item code, SN0001 is serial no and Pune is warehouse
cond = "1=1"
if pos_profile.get('update_stock') and pos_profile.get('warehouse'):
cond = "warehouse = %(warehouse)s"
serial_nos = frappe.db.sql("""select name, warehouse, item_code
from `tabSerial No` where {0} and company = %(company)s """.format(cond),{
'company': company, 'warehouse': frappe.db.escape(pos_profile.get('warehouse'))
}, as_dict=1)
itemwise_serial_no = {}
for sn in serial_nos:
if sn.item_code not in itemwise_serial_no:
itemwise_serial_no.setdefault(sn.item_code, {})
itemwise_serial_no[sn.item_code][sn.name] = sn.warehouse
return itemwise_serial_no
def get_batch_no_data():
# get itemwise batch no data
# exmaple: {'LED-GRE': [Batch001, Batch002]}
# where LED-GRE is item code, SN0001 is serial no and Pune is warehouse
itemwise_batch = {}
batches = frappe.db.sql("""select name, item from `tabBatch`
where ifnull(expiry_date, '4000-10-10') >= curdate()""", as_dict=1)
for batch in batches:
if batch.item not in itemwise_batch:
itemwise_batch.setdefault(batch.item, [])
itemwise_batch[batch.item].append(batch.name)
return itemwise_batch
def get_barcode_data(items_list):
# get itemwise batch no data
# exmaple: {'LED-GRE': [Batch001, Batch002]}
# where LED-GRE is item code, SN0001 is serial no and Pune is warehouse
itemwise_barcode = {}
for item in items_list:
barcodes = frappe.db.sql("""
select barcode from `tabItem Barcode` where parent = %s
""", item.item_code, as_dict=1)
for barcode in barcodes:
if item.item_code not in itemwise_barcode:
itemwise_barcode.setdefault(item.item_code, [])
itemwise_barcode[item.item_code].append(barcode.get("barcode"))
return itemwise_barcode
def get_item_tax_data():
# get default tax of an item
# example: {'Consulting Services': {'Excise 12 - TS': '12.000'}}
itemwise_tax = {}
taxes = frappe.db.sql(""" select parent, tax_type, tax_rate from `tabItem Tax Template Detail`""", as_dict=1)
for tax in taxes:
if tax.parent not in itemwise_tax:
itemwise_tax.setdefault(tax.parent, {})
itemwise_tax[tax.parent][tax.tax_type] = tax.tax_rate
return itemwise_tax
def get_price_list_data(selling_price_list, conversion_rate):
itemwise_price_list = {}
price_lists = frappe.db.sql("""Select ifnull(price_list_rate, 0) as price_list_rate,
item_code from `tabItem Price` ip where price_list = %(price_list)s""",
{'price_list': selling_price_list}, as_dict=1)
for item in price_lists:
itemwise_price_list[item.item_code] = item.price_list_rate * conversion_rate
return itemwise_price_list
def get_customer_wise_price_list():
customer_wise_price = {}
customer_price_list_mapping = frappe._dict(frappe.get_all('Customer',fields = ['default_price_list', 'name'], as_list=1))
price_lists = frappe.db.sql(""" Select ifnull(price_list_rate, 0) as price_list_rate,
item_code, price_list from `tabItem Price` """, as_dict=1)
for item in price_lists:
if item.price_list and customer_price_list_mapping.get(item.price_list):
customer_wise_price.setdefault(customer_price_list_mapping.get(item.price_list),{}).setdefault(
item.item_code, item.price_list_rate
)
return customer_wise_price
def get_bin_data(pos_profile):
itemwise_bin_data = {}
filters = { 'actual_qty': ['>', 0] }
if pos_profile.get('warehouse'):
filters.update({ 'warehouse': pos_profile.get('warehouse') })
bin_data = frappe.db.get_all('Bin', fields = ['item_code', 'warehouse', 'actual_qty'], filters=filters)
for bins in bin_data:
if bins.item_code not in itemwise_bin_data:
itemwise_bin_data.setdefault(bins.item_code, {})
itemwise_bin_data[bins.item_code][bins.warehouse] = bins.actual_qty
return itemwise_bin_data
def get_pricing_rule_data(doc):
pricing_rules = ""
if doc.ignore_pricing_rule == 0:
pricing_rules = frappe.db.sql(""" Select * from `tabPricing Rule` where docstatus < 2
and ifnull(for_price_list, '') in (%(price_list)s, '') and selling = 1
and ifnull(company, '') in (%(company)s, '') and disable = 0 and %(date)s
between ifnull(valid_from, '2000-01-01') and ifnull(valid_upto, '2500-12-31')
order by priority desc, name desc""",
{'company': doc.company, 'price_list': doc.selling_price_list, 'date': nowdate()}, as_dict=1)
return pricing_rules
@frappe.whitelist()
def make_invoice(pos_profile, doc_list={}, email_queue_list={}, customers_list={}):
import json
if isinstance(doc_list, string_types):
doc_list = json.loads(doc_list)
if isinstance(email_queue_list, string_types):
email_queue_list = json.loads(email_queue_list)
if isinstance(customers_list, string_types):
customers_list = json.loads(customers_list)
customers_list = make_customer_and_address(customers_list)
name_list = []
for docs in doc_list:
for name, doc in iteritems(docs):
if not frappe.db.exists('Sales Invoice', {'offline_pos_name': name}):
if isinstance(doc, dict):
validate_records(doc)
si_doc = frappe.new_doc('Sales Invoice')
si_doc.offline_pos_name = name
si_doc.update(doc)
si_doc.set_posting_time = 1
si_doc.customer = get_customer_id(doc)
si_doc.due_date = doc.get('posting_date')
name_list = submit_invoice(si_doc, name, doc, name_list)
else:
doc.due_date = doc.get('posting_date')
doc.customer = get_customer_id(doc)
doc.set_posting_time = 1
doc.offline_pos_name = name
name_list = submit_invoice(doc, name, doc, name_list)
else:
name_list.append(name)
email_queue = make_email_queue(email_queue_list)
if isinstance(pos_profile, string_types):
pos_profile = json.loads(pos_profile)
customers = get_customers_list(pos_profile)
return {
'invoice': name_list,
'email_queue': email_queue,
'customers': customers_list,
'synced_customers_list': customers,
'synced_address': get_customers_address(customers),
'synced_contacts': get_contacts(customers)
}
def validate_records(doc):
validate_item(doc)
def get_customer_id(doc, customer=None):
cust_id = None
if doc.get('customer_pos_id'):
cust_id = frappe.db.get_value('Customer',{'customer_pos_id': doc.get('customer_pos_id')}, 'name')
if not cust_id:
customer = customer or doc.get('customer')
if frappe.db.exists('Customer', customer):
cust_id = customer
else:
cust_id = add_customer(doc)
return cust_id
def make_customer_and_address(customers):
customers_list = []
for customer, data in iteritems(customers):
data = json.loads(data)
cust_id = get_customer_id(data, customer)
if not cust_id:
cust_id = add_customer(data)
else:
frappe.db.set_value("Customer", cust_id, "customer_name", data.get('full_name'))
make_contact(data, cust_id)
make_address(data, cust_id)
customers_list.append(customer)
frappe.db.commit()
return customers_list
def add_customer(data):
customer = data.get('full_name') or data.get('customer')
if frappe.db.exists("Customer", customer.strip()):
return customer.strip()
customer_doc = frappe.new_doc('Customer')
customer_doc.customer_name = data.get('full_name') or data.get('customer')
customer_doc.customer_pos_id = data.get('customer_pos_id')
customer_doc.customer_type = 'Company'
customer_doc.customer_group = get_customer_group(data)
customer_doc.territory = get_territory(data)
customer_doc.flags.ignore_mandatory = True
customer_doc.save(ignore_permissions=True)
frappe.db.commit()
return customer_doc.name
def get_territory(data):
if data.get('territory'):
return data.get('territory')
return frappe.db.get_single_value('Selling Settings','territory') or _('All Territories')
def get_customer_group(data):
if data.get('customer_group'):
return data.get('customer_group')
return frappe.db.get_single_value('Selling Settings', 'customer_group') or frappe.db.get_value('Customer Group', {'is_group': 0}, 'name')
def make_contact(args, customer):
if args.get('email_id') or args.get('phone'):
name = frappe.db.get_value('Dynamic Link',
{'link_doctype': 'Customer', 'link_name': customer, 'parenttype': 'Contact'}, 'parent')
args = {
'first_name': args.get('full_name'),
'email_id': args.get('email_id'),
'phone': args.get('phone')
}
doc = frappe.new_doc('Contact')
if name:
doc = frappe.get_doc('Contact', name)
doc.update(args)
doc.is_primary_contact = 1
if not name:
doc.append('links', {
'link_doctype': 'Customer',
'link_name': customer
})
doc.flags.ignore_mandatory = True
doc.save(ignore_permissions=True)
def make_address(args, customer):
if not args.get('address_line1'):
return
name = args.get('name')
if not name:
data = get_customers_address(customer)
name = data[customer].get('name') if data else None
if name:
address = frappe.get_doc('Address', name)
else:
address = frappe.new_doc('Address')
if args.get('company'):
address.country = frappe.get_cached_value('Company',
args.get('company'), 'country')
address.append('links', {
'link_doctype': 'Customer',
'link_name': customer
})
address.is_primary_address = 1
address.is_shipping_address = 1
address.update(args)
address.flags.ignore_mandatory = True
address.save(ignore_permissions=True)
def make_email_queue(email_queue):
name_list = []
for key, data in iteritems(email_queue):
name = frappe.db.get_value('Sales Invoice', {'offline_pos_name': key}, 'name')
if not name: continue
data = json.loads(data)
sender = frappe.session.user
print_format = "POS Invoice" if not cint(frappe.db.get_value('Print Format', 'POS Invoice', 'disabled')) else None
attachments = [frappe.attach_print('Sales Invoice', name, print_format=print_format)]
make(subject=data.get('subject'), content=data.get('content'), recipients=data.get('recipients'),
sender=sender, attachments=attachments, send_email=True,
doctype='Sales Invoice', name=name)
name_list.append(key)
return name_list
def validate_item(doc):
for item in doc.get('items'):
if not frappe.db.exists('Item', item.get('item_code')):
item_doc = frappe.new_doc('Item')
item_doc.name = item.get('item_code')
item_doc.item_code = item.get('item_code')
item_doc.item_name = item.get('item_name')
item_doc.description = item.get('description')
item_doc.stock_uom = item.get('stock_uom')
item_doc.uom = item.get('uom')
item_doc.item_group = item.get('item_group')
item_doc.append('item_defaults', {
"company": doc.get("company"),
"default_warehouse": item.get('warehouse')
})
item_doc.save(ignore_permissions=True)
frappe.db.commit()
def submit_invoice(si_doc, name, doc, name_list):
try:
si_doc.insert()
si_doc.submit()
frappe.db.commit()
name_list.append(name)
except Exception as e:
if frappe.message_log:
frappe.message_log.pop()
frappe.db.rollback()
frappe.log_error(frappe.get_traceback())
name_list = save_invoice(doc, name, name_list)
return name_list
def save_invoice(doc, name, name_list):
try:
if not frappe.db.exists('Sales Invoice', {'offline_pos_name': name}):
si = frappe.new_doc('Sales Invoice')
si.update(doc)
si.set_posting_time = 1
si.customer = get_customer_id(doc)
si.due_date = doc.get('posting_date')
si.flags.ignore_mandatory = True
si.insert(ignore_permissions=True)
frappe.db.commit()
name_list.append(name)
except Exception:
frappe.db.rollback()
frappe.log_error(frappe.get_traceback())
return name_list
| neilLasrado/erpnext | erpnext/accounts/doctype/sales_invoice/pos.py | Python | gpl-3.0 | 21,154 |
import userHelper
import serverPackets
import exceptions
import glob
import consoleHelper
import bcolors
import locationHelper
import countryHelper
import time
import generalFunctions
import channelJoinEvent
def handle(flaskRequest):
# Data to return
responseTokenString = "ayy"
responseData = bytes()
# The IP for your private network, to get the right location you should use your
# public IP (e.g http://ping.eu)
localIP = "172.20.7.107" # The ip you log in with
publicIP = "8.8.8.8" # google lul
# Get IP from flask request
requestIP = flaskRequest.headers.get("X-Forwarded-For")
if requestIP == localIP:
requestIP = publicIP
# Console output
print("> Accepting connection from {}...".format(requestIP))
# Split POST body so we can get username/password/hardware data
# 2:-3 thing is because requestData has some escape stuff that we don't need
loginData = str(flaskRequest.data)[2:-3].split("\\n")
# Process login
print("> Processing login request for {}...".format(loginData[0]))
try:
# If true, print error to console
err = False
# Try to get the ID from username
userID = userHelper.getID(str(loginData[0]))
if userID == False:
# Invalid username
raise exceptions.loginFailedException()
if userHelper.checkLogin(userID, loginData[1]) == False:
# Invalid password
raise exceptions.loginFailedException()
# Make sure we are not banned
userAllowed = userHelper.getAllowed(userID)
if userAllowed == 0:
# Banned
raise exceptions.loginBannedException()
# Activate user (obviously not the banned.-.)
# But those who created an account without logging in through bancho yet
if userAllowed == 2:
# Not activated yet
userHelper.Activate(userID)
# No login errors!
# Delete old tokens for that user and generate a new one
glob.tokens.deleteOldTokens(userID)
responseToken = glob.tokens.addToken(userID)
responseTokenString = responseToken.token
# Get silence end
userSilenceEnd = max(0, userHelper.getSilenceEnd(userID)-int(time.time()))
# Get supporter/GMT
userRank = userHelper.getRankPrivileges(userID)
userGMT = False
userSupporter = True
if userRank >= 3:
userGMT = True
# Server restarting check
if glob.restarting == True:
raise exceptions.banchoRestartingException()
# Maintenance check
if glob.banchoConf.config["banchoMaintenance"] == True:
if userGMT == False:
# We are not mod/admin, delete token, send notification and logout
glob.tokens.deleteToken(responseTokenString)
raise exceptions.banchoMaintenanceException()
else:
# We are mod/admin, send warning notification and continue
responseToken.enqueue(serverPackets.notification("Bancho is in maintenance mode. Only mods/admins have full access to the server.\nType !system maintenance off in chat to turn off maintenance mode."))
# Send all needed login packets
responseToken.enqueue(serverPackets.silenceEndTime(userSilenceEnd))
responseToken.enqueue(serverPackets.userID(userID))
responseToken.enqueue(serverPackets.protocolVersion())
responseToken.enqueue(serverPackets.userSupporterGMT(userSupporter, userGMT))
responseToken.enqueue(serverPackets.userPanel(userID))
responseToken.enqueue(serverPackets.userStats(userID))
# Channel info end (before starting!?! wtf bancho?)
responseToken.enqueue(serverPackets.channelInfoEnd())
# Default opened channels
# TODO: Configurable default channels
channelJoinEvent.joinChannel(responseToken, "#osu")
channelJoinEvent.joinChannel(responseToken, "#announce")
if userRank >= 3:
# Join admin chanenl if we are mod/admin
# TODO: Separate channels for mods and admins
channelJoinEvent.joinChannel(responseToken, "#admin")
# Output channels info
for key, value in glob.channels.channels.items():
if value.publicRead == True:
responseToken.enqueue(serverPackets.channelInfo(key))
responseToken.enqueue(serverPackets.friendList(userID))
# Send main menu icon and login notification if needed
if glob.banchoConf.config["menuIcon"] != "":
responseToken.enqueue(serverPackets.mainMenuIcon(glob.banchoConf.config["menuIcon"]))
if glob.banchoConf.config["loginNotification"] != "":
responseToken.enqueue(serverPackets.notification(glob.banchoConf.config["loginNotification"]))
# Get everyone else userpanel
# TODO: Better online users handling
for key, value in glob.tokens.tokens.items():
responseToken.enqueue(serverPackets.userPanel(value.userID))
responseToken.enqueue(serverPackets.userStats(value.userID))
# Send online users IDs array
responseToken.enqueue(serverPackets.onlineUsers())
if requestIP == None:
# Get Last 'usual' IP from user (default 8.8.8.8 / USA / Google)
requestIP = userHelper.logInIP(userID)
# Get location and country from ip.zxq.co or database
if generalFunctions.stringToBool(glob.conf.config["server"]["localizeusers"]):
# Get location and country from IP
location = locationHelper.getLocation(requestIP)
country = countryHelper.getCountryID(locationHelper.getCountry(requestIP))
else:
# Set location to 0,0 and get country from db
print("[!] Location skipped")
location = [0,0]
country = countryHelper.getCountryID(userHelper.getCountry(userID))
# Set location and country
responseToken.setLocation(location)
responseToken.setCountry(country)
# Send to everyone our userpanel and userStats (so they now we have logged in)
glob.tokens.enqueueAll(serverPackets.userPanel(userID))
glob.tokens.enqueueAll(serverPackets.userStats(userID))
# Set reponse data to right value and reset our queue
responseData = responseToken.queue
responseToken.resetQueue()
# Some things about IP
logInIP = userHelper.logInIP(userID)
logInIP = logInIP['ip']
print("[!] First IP: "+format(logInIP))
if logInIP != requestIP:
# We'll inform...
message = "This is not your usual IP! Remember we don't like multiaccounting! (ignore if you did not)"
responseToken.enqueue(serverPackets.notification(message))
# Print logged in message
consoleHelper.printColored("> {} logged in ({})".format(loginData[0], responseToken.token), bcolors.GREEN)
except exceptions.loginFailedException:
# Login failed error packet
# (we don't use enqueue because we don't have a token since login has failed)
err = True
responseData += serverPackets.loginFailed()
except exceptions.loginBannedException:
# Login banned error packet
err = True
responseData += serverPackets.loginBanned()
except exceptions.banchoMaintenanceException:
# Bancho is in maintenance mode
responseData += serverPackets.notification("Our bancho server is in maintenance mode. Please try to login again later.")
responseData += serverPackets.loginError()
except exceptions.banchoRestartingException:
# Bancho is restarting
responseData += serverPackets.notification("Bancho is restarting. Try again in a few minutes.")
responseData += serverPackets.loginError()
finally:
# Print login failed message to console if needed
if err == True:
consoleHelper.printColored("> {}'s login failed".format(loginData[0]), bcolors.YELLOW)
return (responseTokenString, responseData)
| RlSEN/bannedcho | c.ppy.sh/loginEvent.py | Python | gpl-3.0 | 7,180 |
from yanntricks import *
def KScolorD():
pspict,fig = SinglePicture("KScolorD")
pspict.dilatation(1)
x=var('x')
C=Circle(Point(0,0),1)
N1=C.graph(90,180)
N2=C.graph(270,360)
C.parameters.color="blue"
N1.parameters.color="black"
N2.parameters.color=N1.parameters.color
N1.wave(0.1,0.2)
#N2.wave(0.1,0.2)
N=Point(0,1)
S=Point(0,-1)
pspict.axes.no_graduation()
pspict.DrawGraphs(C,N1,N2,N,S)
pspict.DrawDefaultAxes()
fig.conclude()
fig.write_the_file()
| LaurentClaessens/mazhe | src_yanntricks/yanntricksKScolorD.py | Python | gpl-3.0 | 526 |
# coding: utf-8
# Copyright: (c) 2012-2014, Michael DeHaan <[email protected]>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import base64
import json
import os
import random
import re
import stat
import tempfile
import time
from abc import ABCMeta, abstractmethod
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleActionSkip, AnsibleActionFail
from ansible.executor.module_common import modify_module
from ansible.executor.interpreter_discovery import discover_interpreter, InterpreterDiscoveryRequiredError
from ansible.module_utils.common._collections_compat import Sequence
from ansible.module_utils.json_utils import _filter_non_json_lines
from ansible.module_utils.six import binary_type, string_types, text_type, iteritems, with_metaclass
from ansible.module_utils.six.moves import shlex_quote
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.parsing.utils.jsonify import jsonify
from ansible.release import __version__
from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import wrap_var, AnsibleUnsafeText
from ansible.vars.clean import remove_internal_keys
display = Display()
class ActionBase(with_metaclass(ABCMeta, object)):
'''
This class is the base class for all action plugins, and defines
code common to all actions. The base class handles the connection
by putting/getting files and executing commands based on the current
action in use.
'''
# A set of valid arguments
_VALID_ARGS = frozenset([])
def __init__(self, task, connection, play_context, loader, templar, shared_loader_obj):
self._task = task
self._connection = connection
self._play_context = play_context
self._loader = loader
self._templar = templar
self._shared_loader_obj = shared_loader_obj
self._cleanup_remote_tmp = False
self._supports_check_mode = True
self._supports_async = False
# interpreter discovery state
self._discovered_interpreter_key = None
self._discovered_interpreter = False
self._discovery_deprecation_warnings = []
self._discovery_warnings = []
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
self._used_interpreter = None
@abstractmethod
def run(self, tmp=None, task_vars=None):
""" Action Plugins should implement this method to perform their
tasks. Everything else in this base class is a helper method for the
action plugin to do that.
:kwarg tmp: Deprecated parameter. This is no longer used. An action plugin that calls
another one and wants to use the same remote tmp for both should set
self._connection._shell.tmpdir rather than this parameter.
:kwarg task_vars: The variables (host vars, group vars, config vars,
etc) associated with this task.
:returns: dictionary of results from the module
Implementors of action modules may find the following variables especially useful:
* Module parameters. These are stored in self._task.args
"""
result = {}
if tmp is not None:
result['warning'] = ['ActionModule.run() no longer honors the tmp parameter. Action'
' plugins should set self._connection._shell.tmpdir to share'
' the tmpdir']
del tmp
if self._task.async_val and not self._supports_async:
raise AnsibleActionFail('async is not supported for this task.')
elif self._play_context.check_mode and not self._supports_check_mode:
raise AnsibleActionSkip('check mode is not supported for this task.')
elif self._task.async_val and self._play_context.check_mode:
raise AnsibleActionFail('check mode and async cannot be used on same task.')
# Error if invalid argument is passed
if self._VALID_ARGS:
task_opts = frozenset(self._task.args.keys())
bad_opts = task_opts.difference(self._VALID_ARGS)
if bad_opts:
raise AnsibleActionFail('Invalid options for %s: %s' % (self._task.action, ','.join(list(bad_opts))))
if self._connection._shell.tmpdir is None and self._early_needs_tmp_path():
self._make_tmp_path()
return result
def cleanup(self, force=False):
"""Method to perform a clean up at the end of an action plugin execution
By default this is designed to clean up the shell tmpdir, and is toggled based on whether
async is in use
Action plugins may override this if they deem necessary, but should still call this method
via super
"""
if force or not self._task.async_val:
self._remove_tmp_path(self._connection._shell.tmpdir)
def get_plugin_option(self, plugin, option, default=None):
"""Helper to get an option from a plugin without having to use
the try/except dance everywhere to set a default
"""
try:
return plugin.get_option(option)
except (AttributeError, KeyError):
return default
def get_become_option(self, option, default=None):
return self.get_plugin_option(self._connection.become, option, default=default)
def get_connection_option(self, option, default=None):
return self.get_plugin_option(self._connection, option, default=default)
def get_shell_option(self, option, default=None):
return self.get_plugin_option(self._connection._shell, option, default=default)
def _remote_file_exists(self, path):
cmd = self._connection._shell.exists(path)
result = self._low_level_execute_command(cmd=cmd, sudoable=True)
if result['rc'] == 0:
return True
return False
def _configure_module(self, module_name, module_args, task_vars=None):
'''
Handles the loading and templating of the module code through the
modify_module() function.
'''
if task_vars is None:
task_vars = dict()
# Search module path(s) for named module.
for mod_type in self._connection.module_implementation_preferences:
# Check to determine if PowerShell modules are supported, and apply
# some fixes (hacks) to module name + args.
if mod_type == '.ps1':
# FIXME: This should be temporary and moved to an exec subsystem plugin where we can define the mapping
# for each subsystem.
win_collection = 'ansible.windows'
# async_status, win_stat, win_file, win_copy, and win_ping are not just like their
# python counterparts but they are compatible enough for our
# internal usage
if module_name in ('stat', 'file', 'copy', 'ping') and self._task.action != module_name:
module_name = '%s.win_%s' % (win_collection, module_name)
elif module_name in ['async_status']:
module_name = '%s.%s' % (win_collection, module_name)
# Remove extra quotes surrounding path parameters before sending to module.
if module_name.split('.')[-1] in ['win_stat', 'win_file', 'win_copy', 'slurp'] and module_args and \
hasattr(self._connection._shell, '_unquote'):
for key in ('src', 'dest', 'path'):
if key in module_args:
module_args[key] = self._connection._shell._unquote(module_args[key])
module_path = self._shared_loader_obj.module_loader.find_plugin(module_name, mod_type, collection_list=self._task.collections)
if module_path:
break
else: # This is a for-else: http://bit.ly/1ElPkyg
raise AnsibleError("The module %s was not found in configured module paths" % (module_name))
# insert shared code and arguments into the module
final_environment = dict()
self._compute_environment_string(final_environment)
become_kwargs = {}
if self._connection.become:
become_kwargs['become'] = True
become_kwargs['become_method'] = self._connection.become.name
become_kwargs['become_user'] = self._connection.become.get_option('become_user',
playcontext=self._play_context)
become_kwargs['become_password'] = self._connection.become.get_option('become_pass',
playcontext=self._play_context)
become_kwargs['become_flags'] = self._connection.become.get_option('become_flags',
playcontext=self._play_context)
# modify_module will exit early if interpreter discovery is required; re-run after if necessary
for dummy in (1, 2):
try:
(module_data, module_style, module_shebang) = modify_module(module_name, module_path, module_args, self._templar,
task_vars=task_vars,
module_compression=self._play_context.module_compression,
async_timeout=self._task.async_val,
environment=final_environment,
**become_kwargs)
break
except InterpreterDiscoveryRequiredError as idre:
self._discovered_interpreter = AnsibleUnsafeText(discover_interpreter(
action=self,
interpreter_name=idre.interpreter_name,
discovery_mode=idre.discovery_mode,
task_vars=task_vars))
# update the local task_vars with the discovered interpreter (which might be None);
# we'll propagate back to the controller in the task result
discovered_key = 'discovered_interpreter_%s' % idre.interpreter_name
# store in local task_vars facts collection for the retry and any other usages in this worker
if task_vars.get('ansible_facts') is None:
task_vars['ansible_facts'] = {}
task_vars['ansible_facts'][discovered_key] = self._discovered_interpreter
# preserve this so _execute_module can propagate back to controller as a fact
self._discovered_interpreter_key = discovered_key
return (module_style, module_shebang, module_data, module_path)
def _compute_environment_string(self, raw_environment_out=None):
'''
Builds the environment string to be used when executing the remote task.
'''
final_environment = dict()
if self._task.environment is not None:
environments = self._task.environment
if not isinstance(environments, list):
environments = [environments]
# The order of environments matters to make sure we merge
# in the parent's values first so those in the block then
# task 'win' in precedence
for environment in environments:
if environment is None or len(environment) == 0:
continue
temp_environment = self._templar.template(environment)
if not isinstance(temp_environment, dict):
raise AnsibleError("environment must be a dictionary, received %s (%s)" % (temp_environment, type(temp_environment)))
# very deliberately using update here instead of combine_vars, as
# these environment settings should not need to merge sub-dicts
final_environment.update(temp_environment)
if len(final_environment) > 0:
final_environment = self._templar.template(final_environment)
if isinstance(raw_environment_out, dict):
raw_environment_out.clear()
raw_environment_out.update(final_environment)
return self._connection._shell.env_prefix(**final_environment)
def _early_needs_tmp_path(self):
'''
Determines if a tmp path should be created before the action is executed.
'''
return getattr(self, 'TRANSFERS_FILES', False)
def _is_pipelining_enabled(self, module_style, wrap_async=False):
'''
Determines if we are required and can do pipelining
'''
# any of these require a true
for condition in [
self._connection.has_pipelining,
self._play_context.pipelining or self._connection.always_pipeline_modules, # pipelining enabled for play or connection requires it (eg winrm)
module_style == "new", # old style modules do not support pipelining
not C.DEFAULT_KEEP_REMOTE_FILES, # user wants remote files
not wrap_async or self._connection.always_pipeline_modules, # async does not normally support pipelining unless it does (eg winrm)
(self._connection.become.name if self._connection.become else '') != 'su', # su does not work with pipelining,
# FIXME: we might need to make become_method exclusion a configurable list
]:
if not condition:
return False
return True
def _get_admin_users(self):
'''
Returns a list of admin users that are configured for the current shell
plugin
'''
return self.get_shell_option('admin_users', ['root'])
def _get_remote_user(self):
''' consistently get the 'remote_user' for the action plugin '''
# TODO: use 'current user running ansible' as fallback when moving away from play_context
# pwd.getpwuid(os.getuid()).pw_name
remote_user = None
try:
remote_user = self._connection.get_option('remote_user')
except KeyError:
# plugin does not have remote_user option, fallback to default and/play_context
remote_user = getattr(self._connection, 'default_user', None) or self._play_context.remote_user
except AttributeError:
# plugin does not use config system, fallback to old play_context
remote_user = self._play_context.remote_user
return remote_user
def _is_become_unprivileged(self):
'''
The user is not the same as the connection user and is not part of the
shell configured admin users
'''
# if we don't use become then we know we aren't switching to a
# different unprivileged user
if not self._connection.become:
return False
# if we use become and the user is not an admin (or same user) then
# we need to return become_unprivileged as True
admin_users = self._get_admin_users()
remote_user = self._get_remote_user()
become_user = self.get_become_option('become_user')
return bool(become_user and become_user not in admin_users + [remote_user])
def _make_tmp_path(self, remote_user=None):
'''
Create and return a temporary path on a remote box.
'''
# Network connection plugins (network_cli, netconf, etc.) execute on the controller, rather than the remote host.
# As such, we want to avoid using remote_user for paths as remote_user may not line up with the local user
# This is a hack and should be solved by more intelligent handling of remote_tmp in 2.7
if getattr(self._connection, '_remote_is_local', False):
tmpdir = C.DEFAULT_LOCAL_TMP
else:
# NOTE: shell plugins should populate this setting anyways, but they dont do remote expansion, which
# we need for 'non posix' systems like cloud-init and solaris
tmpdir = self._remote_expand_user(self.get_shell_option('remote_tmp', default='~/.ansible/tmp'), sudoable=False)
become_unprivileged = self._is_become_unprivileged()
basefile = self._connection._shell._generate_temp_dir_name()
cmd = self._connection._shell.mkdtemp(basefile=basefile, system=become_unprivileged, tmpdir=tmpdir)
result = self._low_level_execute_command(cmd, sudoable=False)
# error handling on this seems a little aggressive?
if result['rc'] != 0:
if result['rc'] == 5:
output = 'Authentication failure.'
elif result['rc'] == 255 and self._connection.transport in ('ssh',):
if self._play_context.verbosity > 3:
output = u'SSH encountered an unknown error. The output was:\n%s%s' % (result['stdout'], result['stderr'])
else:
output = (u'SSH encountered an unknown error during the connection. '
'We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue')
elif u'No space left on device' in result['stderr']:
output = result['stderr']
else:
output = ('Failed to create temporary directory.'
'In some cases, you may have been able to authenticate and did not have permissions on the target directory. '
'Consider changing the remote tmp path in ansible.cfg to a path rooted in "/tmp", for more error information use -vvv. '
'Failed command was: %s, exited with result %d' % (cmd, result['rc']))
if 'stdout' in result and result['stdout'] != u'':
output = output + u", stdout output: %s" % result['stdout']
if self._play_context.verbosity > 3 and 'stderr' in result and result['stderr'] != u'':
output += u", stderr output: %s" % result['stderr']
raise AnsibleConnectionFailure(output)
else:
self._cleanup_remote_tmp = True
try:
stdout_parts = result['stdout'].strip().split('%s=' % basefile, 1)
rc = self._connection._shell.join_path(stdout_parts[-1], u'').splitlines()[-1]
except IndexError:
# stdout was empty or just space, set to / to trigger error in next if
rc = '/'
# Catch failure conditions, files should never be
# written to locations in /.
if rc == '/':
raise AnsibleError('failed to resolve remote temporary directory from %s: `%s` returned empty string' % (basefile, cmd))
self._connection._shell.tmpdir = rc
return rc
def _should_remove_tmp_path(self, tmp_path):
'''Determine if temporary path should be deleted or kept by user request/config'''
return tmp_path and self._cleanup_remote_tmp and not C.DEFAULT_KEEP_REMOTE_FILES and "-tmp-" in tmp_path
def _remove_tmp_path(self, tmp_path):
'''Remove a temporary path we created. '''
if tmp_path is None and self._connection._shell.tmpdir:
tmp_path = self._connection._shell.tmpdir
if self._should_remove_tmp_path(tmp_path):
cmd = self._connection._shell.remove(tmp_path, recurse=True)
# If we have gotten here we have a working ssh configuration.
# If ssh breaks we could leave tmp directories out on the remote system.
tmp_rm_res = self._low_level_execute_command(cmd, sudoable=False)
if tmp_rm_res.get('rc', 0) != 0:
display.warning('Error deleting remote temporary files (rc: %s, stderr: %s})'
% (tmp_rm_res.get('rc'), tmp_rm_res.get('stderr', 'No error string available.')))
else:
self._connection._shell.tmpdir = None
def _transfer_file(self, local_path, remote_path):
"""
Copy a file from the controller to a remote path
:arg local_path: Path on controller to transfer
:arg remote_path: Path on the remote system to transfer into
.. warning::
* When you use this function you likely want to use use fixup_perms2() on the
remote_path to make sure that the remote file is readable when the user becomes
a non-privileged user.
* If you use fixup_perms2() on the file and copy or move the file into place, you will
need to then remove filesystem acls on the file once it has been copied into place by
the module. See how the copy module implements this for help.
"""
self._connection.put_file(local_path, remote_path)
return remote_path
def _transfer_data(self, remote_path, data):
'''
Copies the module data out to the temporary module path.
'''
if isinstance(data, dict):
data = jsonify(data)
afd, afile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
afo = os.fdopen(afd, 'wb')
try:
data = to_bytes(data, errors='surrogate_or_strict')
afo.write(data)
except Exception as e:
raise AnsibleError("failure writing module data to temporary file for transfer: %s" % to_native(e))
afo.flush()
afo.close()
try:
self._transfer_file(afile, remote_path)
finally:
os.unlink(afile)
return remote_path
def _fixup_perms2(self, remote_paths, remote_user=None, execute=True):
"""
We need the files we upload to be readable (and sometimes executable)
by the user being sudo'd to but we want to limit other people's access
(because the files could contain passwords or other private
information. We achieve this in one of these ways:
* If no sudo is performed or the remote_user is sudo'ing to
themselves, we don't have to change permissions.
* If the remote_user sudo's to a privileged user (for instance, root),
we don't have to change permissions
* If the remote_user sudo's to an unprivileged user then we attempt to
grant the unprivileged user access via file system acls.
* If granting file system acls fails we try to change the owner of the
file with chown which only works in case the remote_user is
privileged or the remote systems allows chown calls by unprivileged
users (e.g. HP-UX)
* If the chown fails we can set the file to be world readable so that
the second unprivileged user can read the file.
Since this could allow other users to get access to private
information we only do this if ansible is configured with
"allow_world_readable_tmpfiles" in the ansible.cfg
"""
if remote_user is None:
remote_user = self._get_remote_user()
if getattr(self._connection._shell, "_IS_WINDOWS", False):
# This won't work on Powershell as-is, so we'll just completely skip until
# we have a need for it, at which point we'll have to do something different.
return remote_paths
if self._is_become_unprivileged():
# Unprivileged user that's different than the ssh user. Let's get
# to work!
# Try to use file system acls to make the files readable for sudo'd
# user
if execute:
chmod_mode = 'rx'
setfacl_mode = 'r-x'
else:
chmod_mode = 'rX'
# NOTE: this form fails silently on freebsd. We currently
# never call _fixup_perms2() with execute=False but if we
# start to we'll have to fix this.
setfacl_mode = 'r-X'
res = self._remote_set_user_facl(remote_paths, self.get_become_option('become_user'), setfacl_mode)
if res['rc'] != 0:
# File system acls failed; let's try to use chown next
# Set executable bit first as on some systems an
# unprivileged user can use chown
if execute:
res = self._remote_chmod(remote_paths, 'u+x')
if res['rc'] != 0:
raise AnsibleError('Failed to set file mode on remote temporary files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
res = self._remote_chown(remote_paths, self.get_become_option('become_user'))
if res['rc'] != 0 and remote_user in self._get_admin_users():
# chown failed even if remote_user is administrator/root
raise AnsibleError('Failed to change ownership of the temporary files Ansible needs to create despite connecting as a privileged user. '
'Unprivileged become user would be unable to read the file.')
elif res['rc'] != 0:
if C.ALLOW_WORLD_READABLE_TMPFILES:
# chown and fs acls failed -- do things this insecure
# way only if the user opted in in the config file
display.warning('Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user. '
'This may be insecure. For information on securing this, see '
'https://docs.ansible.com/ansible/user_guide/become.html#risks-of-becoming-an-unprivileged-user')
res = self._remote_chmod(remote_paths, 'a+%s' % chmod_mode)
if res['rc'] != 0:
raise AnsibleError('Failed to set file mode on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
else:
raise AnsibleError('Failed to set permissions on the temporary files Ansible needs to create when becoming an unprivileged user '
'(rc: %s, err: %s}). For information on working around this, see '
'https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'
% (res['rc'], to_native(res['stderr'])))
elif execute:
# Can't depend on the file being transferred with execute permissions.
# Only need user perms because no become was used here
res = self._remote_chmod(remote_paths, 'u+x')
if res['rc'] != 0:
raise AnsibleError('Failed to set execute bit on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
return remote_paths
def _remote_chmod(self, paths, mode, sudoable=False):
'''
Issue a remote chmod command
'''
cmd = self._connection._shell.chmod(paths, mode)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _remote_chown(self, paths, user, sudoable=False):
'''
Issue a remote chown command
'''
cmd = self._connection._shell.chown(paths, user)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _remote_set_user_facl(self, paths, user, mode, sudoable=False):
'''
Issue a remote call to setfacl
'''
cmd = self._connection._shell.set_user_facl(paths, user, mode)
res = self._low_level_execute_command(cmd, sudoable=sudoable)
return res
def _execute_remote_stat(self, path, all_vars, follow, tmp=None, checksum=True):
'''
Get information from remote file.
'''
if tmp is not None:
display.warning('_execute_remote_stat no longer honors the tmp parameter. Action'
' plugins should set self._connection._shell.tmpdir to share'
' the tmpdir')
del tmp # No longer used
module_args = dict(
path=path,
follow=follow,
get_checksum=checksum,
checksum_algorithm='sha1',
)
mystat = self._execute_module(module_name='stat', module_args=module_args, task_vars=all_vars,
wrap_async=False)
if mystat.get('failed'):
msg = mystat.get('module_stderr')
if not msg:
msg = mystat.get('module_stdout')
if not msg:
msg = mystat.get('msg')
raise AnsibleError('Failed to get information on remote file (%s): %s' % (path, msg))
if not mystat['stat']['exists']:
# empty might be matched, 1 should never match, also backwards compatible
mystat['stat']['checksum'] = '1'
# happens sometimes when it is a dir and not on bsd
if 'checksum' not in mystat['stat']:
mystat['stat']['checksum'] = ''
elif not isinstance(mystat['stat']['checksum'], string_types):
raise AnsibleError("Invalid checksum returned by stat: expected a string type but got %s" % type(mystat['stat']['checksum']))
return mystat['stat']
def _remote_checksum(self, path, all_vars, follow=False):
'''
Produces a remote checksum given a path,
Returns a number 0-4 for specific errors instead of checksum, also ensures it is different
0 = unknown error
1 = file does not exist, this might not be an error
2 = permissions issue
3 = its a directory, not a file
4 = stat module failed, likely due to not finding python
5 = appropriate json module not found
'''
x = "0" # unknown error has occurred
try:
remote_stat = self._execute_remote_stat(path, all_vars, follow=follow)
if remote_stat['exists'] and remote_stat['isdir']:
x = "3" # its a directory not a file
else:
x = remote_stat['checksum'] # if 1, file is missing
except AnsibleError as e:
errormsg = to_text(e)
if errormsg.endswith(u'Permission denied'):
x = "2" # cannot read file
elif errormsg.endswith(u'MODULE FAILURE'):
x = "4" # python not found or module uncaught exception
elif 'json' in errormsg:
x = "5" # json module needed
finally:
return x # pylint: disable=lost-exception
def _remote_expand_user(self, path, sudoable=True, pathsep=None):
''' takes a remote path and performs tilde/$HOME expansion on the remote host '''
# We only expand ~/path and ~username/path
if not path.startswith('~'):
return path
# Per Jborean, we don't have to worry about Windows as we don't have a notion of user's home
# dir there.
split_path = path.split(os.path.sep, 1)
expand_path = split_path[0]
if expand_path == '~':
# Network connection plugins (network_cli, netconf, etc.) execute on the controller, rather than the remote host.
# As such, we want to avoid using remote_user for paths as remote_user may not line up with the local user
# This is a hack and should be solved by more intelligent handling of remote_tmp in 2.7
become_user = self.get_become_option('become_user')
if getattr(self._connection, '_remote_is_local', False):
pass
elif sudoable and self._connection.become and become_user:
expand_path = '~%s' % become_user
else:
# use remote user instead, if none set default to current user
expand_path = '~%s' % (self._get_remote_user() or '')
# use shell to construct appropriate command and execute
cmd = self._connection._shell.expand_user(expand_path)
data = self._low_level_execute_command(cmd, sudoable=False)
try:
initial_fragment = data['stdout'].strip().splitlines()[-1]
except IndexError:
initial_fragment = None
if not initial_fragment:
# Something went wrong trying to expand the path remotely. Try using pwd, if not, return
# the original string
cmd = self._connection._shell.pwd()
pwd = self._low_level_execute_command(cmd, sudoable=False).get('stdout', '').strip()
if pwd:
expanded = pwd
else:
expanded = path
elif len(split_path) > 1:
expanded = self._connection._shell.join_path(initial_fragment, *split_path[1:])
else:
expanded = initial_fragment
if '..' in os.path.dirname(expanded).split('/'):
raise AnsibleError("'%s' returned an invalid relative home directory path containing '..'" % self._play_context.remote_addr)
return expanded
def _strip_success_message(self, data):
'''
Removes the BECOME-SUCCESS message from the data.
'''
if data.strip().startswith('BECOME-SUCCESS-'):
data = re.sub(r'^((\r)?\n)?BECOME-SUCCESS.*(\r)?\n', '', data)
return data
def _update_module_args(self, module_name, module_args, task_vars):
# set check mode in the module arguments, if required
if self._play_context.check_mode:
if not self._supports_check_mode:
raise AnsibleError("check mode is not supported for this operation")
module_args['_ansible_check_mode'] = True
else:
module_args['_ansible_check_mode'] = False
# set no log in the module arguments, if required
no_target_syslog = C.config.get_config_value('DEFAULT_NO_TARGET_SYSLOG', variables=task_vars)
module_args['_ansible_no_log'] = self._play_context.no_log or no_target_syslog
# set debug in the module arguments, if required
module_args['_ansible_debug'] = C.DEFAULT_DEBUG
# let module know we are in diff mode
module_args['_ansible_diff'] = self._play_context.diff
# let module know our verbosity
module_args['_ansible_verbosity'] = display.verbosity
# give the module information about the ansible version
module_args['_ansible_version'] = __version__
# give the module information about its name
module_args['_ansible_module_name'] = module_name
# set the syslog facility to be used in the module
module_args['_ansible_syslog_facility'] = task_vars.get('ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY)
# let module know about filesystems that selinux treats specially
module_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS
# what to do when parameter values are converted to strings
module_args['_ansible_string_conversion_action'] = C.STRING_CONVERSION_ACTION
# give the module the socket for persistent connections
module_args['_ansible_socket'] = getattr(self._connection, 'socket_path')
if not module_args['_ansible_socket']:
module_args['_ansible_socket'] = task_vars.get('ansible_socket')
# make sure all commands use the designated shell executable
module_args['_ansible_shell_executable'] = self._play_context.executable
# make sure modules are aware if they need to keep the remote files
module_args['_ansible_keep_remote_files'] = C.DEFAULT_KEEP_REMOTE_FILES
# make sure all commands use the designated temporary directory if created
if self._is_become_unprivileged(): # force fallback on remote_tmp as user cannot normally write to dir
module_args['_ansible_tmpdir'] = None
else:
module_args['_ansible_tmpdir'] = self._connection._shell.tmpdir
# make sure the remote_tmp value is sent through in case modules needs to create their own
module_args['_ansible_remote_tmp'] = self.get_shell_option('remote_tmp', default='~/.ansible/tmp')
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=None, wrap_async=False):
'''
Transfer and run a module along with its arguments.
'''
if tmp is not None:
display.warning('_execute_module no longer honors the tmp parameter. Action plugins'
' should set self._connection._shell.tmpdir to share the tmpdir')
del tmp # No longer used
if delete_remote_tmp is not None:
display.warning('_execute_module no longer honors the delete_remote_tmp parameter.'
' Action plugins should check self._connection._shell.tmpdir to'
' see if a tmpdir existed before they were called to determine'
' if they are responsible for removing it.')
del delete_remote_tmp # No longer used
tmpdir = self._connection._shell.tmpdir
# We set the module_style to new here so the remote_tmp is created
# before the module args are built if remote_tmp is needed (async).
# If the module_style turns out to not be new and we didn't create the
# remote tmp here, it will still be created. This must be done before
# calling self._update_module_args() so the module wrapper has the
# correct remote_tmp value set
if not self._is_pipelining_enabled("new", wrap_async) and tmpdir is None:
self._make_tmp_path()
tmpdir = self._connection._shell.tmpdir
if task_vars is None:
task_vars = dict()
# if a module name was not specified for this execution, use the action from the task
if module_name is None:
module_name = self._task.action
if module_args is None:
module_args = self._task.args
self._update_module_args(module_name, module_args, task_vars)
# FIXME: convert async_wrapper.py to not rely on environment variables
# make sure we get the right async_dir variable, backwards compatibility
# means we need to lookup the env value ANSIBLE_ASYNC_DIR first
remove_async_dir = None
if wrap_async or self._task.async_val:
env_async_dir = [e for e in self._task.environment if
"ANSIBLE_ASYNC_DIR" in e]
if len(env_async_dir) > 0:
msg = "Setting the async dir from the environment keyword " \
"ANSIBLE_ASYNC_DIR is deprecated. Set the async_dir " \
"shell option instead"
self._display.deprecated(msg, "2.12")
else:
# ANSIBLE_ASYNC_DIR is not set on the task, we get the value
# from the shell option and temporarily add to the environment
# list for async_wrapper to pick up
async_dir = self.get_shell_option('async_dir', default="~/.ansible_async")
remove_async_dir = len(self._task.environment)
self._task.environment.append({"ANSIBLE_ASYNC_DIR": async_dir})
# FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality
(module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
display.vvv("Using module file %s" % module_path)
if not shebang and module_style != 'binary':
raise AnsibleError("module (%s) is missing interpreter line" % module_name)
self._used_interpreter = shebang
remote_module_path = None
if not self._is_pipelining_enabled(module_style, wrap_async):
# we might need remote tmp dir
if tmpdir is None:
self._make_tmp_path()
tmpdir = self._connection._shell.tmpdir
remote_module_filename = self._connection._shell.get_remote_filename(module_path)
remote_module_path = self._connection._shell.join_path(tmpdir, 'AnsiballZ_%s' % remote_module_filename)
args_file_path = None
if module_style in ('old', 'non_native_want_json', 'binary'):
# we'll also need a tmp file to hold our module arguments
args_file_path = self._connection._shell.join_path(tmpdir, 'args')
if remote_module_path or module_style != 'new':
display.debug("transferring module to remote %s" % remote_module_path)
if module_style == 'binary':
self._transfer_file(module_path, remote_module_path)
else:
self._transfer_data(remote_module_path, module_data)
if module_style == 'old':
# we need to dump the module args to a k=v string in a file on
# the remote system, which can be read and parsed by the module
args_data = ""
for k, v in iteritems(module_args):
args_data += '%s=%s ' % (k, shlex_quote(text_type(v)))
self._transfer_data(args_file_path, args_data)
elif module_style in ('non_native_want_json', 'binary'):
self._transfer_data(args_file_path, json.dumps(module_args))
display.debug("done transferring module to remote")
environment_string = self._compute_environment_string()
# remove the ANSIBLE_ASYNC_DIR env entry if we added a temporary one for
# the async_wrapper task - this is so the async_status plugin doesn't
# fire a deprecation warning when it runs after this task
if remove_async_dir is not None:
del self._task.environment[remove_async_dir]
remote_files = []
if tmpdir and remote_module_path:
remote_files = [tmpdir, remote_module_path]
if args_file_path:
remote_files.append(args_file_path)
sudoable = True
in_data = None
cmd = ""
if wrap_async and not self._connection.always_pipeline_modules:
# configure, upload, and chmod the async_wrapper module
(async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(module_name='async_wrapper', module_args=dict(),
task_vars=task_vars)
async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path)
remote_async_module_path = self._connection._shell.join_path(tmpdir, async_module_remote_filename)
self._transfer_data(remote_async_module_path, async_module_data)
remote_files.append(remote_async_module_path)
async_limit = self._task.async_val
async_jid = str(random.randint(0, 999999999999))
# call the interpreter for async_wrapper directly
# this permits use of a script for an interpreter on non-Linux platforms
# TODO: re-implement async_wrapper as a regular module to avoid this special case
interpreter = shebang.replace('#!', '').strip()
async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path]
if environment_string:
async_cmd.insert(0, environment_string)
if args_file_path:
async_cmd.append(args_file_path)
else:
# maintain a fixed number of positional parameters for async_wrapper
async_cmd.append('_')
if not self._should_remove_tmp_path(tmpdir):
async_cmd.append("-preserve_tmp")
cmd = " ".join(to_text(x) for x in async_cmd)
else:
if self._is_pipelining_enabled(module_style):
in_data = module_data
display.vvv("Pipelining is enabled.")
else:
cmd = remote_module_path
cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path).strip()
# Fix permissions of the tmpdir path and tmpdir files. This should be called after all
# files have been transferred.
if remote_files:
# remove none/empty
remote_files = [x for x in remote_files if x]
self._fixup_perms2(remote_files, self._get_remote_user())
# actually execute
res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data)
# parse the main result
data = self._parse_returned_data(res)
# NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
# get internal info before cleaning
if data.pop("_ansible_suppress_tmpdir_delete", False):
self._cleanup_remote_tmp = False
# NOTE: yum returns results .. but that made it 'compatible' with squashing, so we allow mappings, for now
if 'results' in data and (not isinstance(data['results'], Sequence) or isinstance(data['results'], string_types)):
data['ansible_module_results'] = data['results']
del data['results']
display.warning("Found internal 'results' key in module return, renamed to 'ansible_module_results'.")
# remove internal keys
remove_internal_keys(data)
if wrap_async:
# async_wrapper will clean up its tmpdir on its own so we want the controller side to
# forget about it now
self._connection._shell.tmpdir = None
# FIXME: for backwards compat, figure out if still makes sense
data['changed'] = True
# pre-split stdout/stderr into lines if needed
if 'stdout' in data and 'stdout_lines' not in data:
# if the value is 'False', a default won't catch it.
txt = data.get('stdout', None) or u''
data['stdout_lines'] = txt.splitlines()
if 'stderr' in data and 'stderr_lines' not in data:
# if the value is 'False', a default won't catch it.
txt = data.get('stderr', None) or u''
data['stderr_lines'] = txt.splitlines()
# propagate interpreter discovery results back to the controller
if self._discovered_interpreter_key:
if data.get('ansible_facts') is None:
data['ansible_facts'] = {}
data['ansible_facts'][self._discovered_interpreter_key] = self._discovered_interpreter
if self._discovery_warnings:
if data.get('warnings') is None:
data['warnings'] = []
data['warnings'].extend(self._discovery_warnings)
if self._discovery_deprecation_warnings:
if data.get('deprecations') is None:
data['deprecations'] = []
data['deprecations'].extend(self._discovery_deprecation_warnings)
# mark the entire module results untrusted as a template right here, since the current action could
# possibly template one of these values.
data = wrap_var(data)
display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
return data
def _parse_returned_data(self, res):
try:
filtered_output, warnings = _filter_non_json_lines(res.get('stdout', u''))
for w in warnings:
display.warning(w)
data = json.loads(filtered_output)
data['_ansible_parsed'] = True
except ValueError:
# not valid json, lets try to capture error
data = dict(failed=True, _ansible_parsed=False)
data['module_stdout'] = res.get('stdout', u'')
if 'stderr' in res:
data['module_stderr'] = res['stderr']
if res['stderr'].startswith(u'Traceback'):
data['exception'] = res['stderr']
# in some cases a traceback will arrive on stdout instead of stderr, such as when using ssh with -tt
if 'exception' not in data and data['module_stdout'].startswith(u'Traceback'):
data['exception'] = data['module_stdout']
# The default
data['msg'] = "MODULE FAILURE"
# try to figure out if we are missing interpreter
if self._used_interpreter is not None:
match = re.compile('%s: (?:No such file or directory|not found)' % self._used_interpreter.lstrip('!#'))
if match.search(data['module_stderr']) or match.search(data['module_stdout']):
data['msg'] = "The module failed to execute correctly, you probably need to set the interpreter."
# always append hint
data['msg'] += '\nSee stdout/stderr for the exact error'
if 'rc' in res:
data['rc'] = res['rc']
return data
# FIXME: move to connection base
def _low_level_execute_command(self, cmd, sudoable=True, in_data=None, executable=None, encoding_errors='surrogate_then_replace', chdir=None):
'''
This is the function which executes the low level shell command, which
may be commands to create/remove directories for temporary files, or to
run the module code or python directly when pipelining.
:kwarg encoding_errors: If the value returned by the command isn't
utf-8 then we have to figure out how to transform it to unicode.
If the value is just going to be displayed to the user (or
discarded) then the default of 'replace' is fine. If the data is
used as a key or is going to be written back out to a file
verbatim, then this won't work. May have to use some sort of
replacement strategy (python3 could use surrogateescape)
:kwarg chdir: cd into this directory before executing the command.
'''
display.debug("_low_level_execute_command(): starting")
# if not cmd:
# # this can happen with powershell modules when there is no analog to a Windows command (like chmod)
# display.debug("_low_level_execute_command(): no command, exiting")
# return dict(stdout='', stderr='', rc=254)
if chdir:
display.debug("_low_level_execute_command(): changing cwd to %s for this command" % chdir)
cmd = self._connection._shell.append_command('cd %s' % chdir, cmd)
# https://github.com/ansible/ansible/issues/68054
if executable:
self._connection._shell.executable = executable
ruser = self._get_remote_user()
buser = self.get_become_option('become_user')
if (sudoable and self._connection.become and # if sudoable and have become
self._connection.transport.split('.')[-1] != 'network_cli' and # if not using network_cli
(C.BECOME_ALLOW_SAME_USER or (buser != ruser or not any((ruser, buser))))): # if we allow same user PE or users are different and either is set
display.debug("_low_level_execute_command(): using become for this command")
cmd = self._connection.become.build_become_command(cmd, self._connection._shell)
if self._connection.allow_executable:
if executable is None:
executable = self._play_context.executable
# mitigation for SSH race which can drop stdout (https://github.com/ansible/ansible/issues/13876)
# only applied for the default executable to avoid interfering with the raw action
cmd = self._connection._shell.append_command(cmd, 'sleep 0')
if executable:
cmd = executable + ' -c ' + shlex_quote(cmd)
display.debug("_low_level_execute_command(): executing: %s" % (cmd,))
# Change directory to basedir of task for command execution when connection is local
if self._connection.transport == 'local':
self._connection.cwd = to_bytes(self._loader.get_basedir(), errors='surrogate_or_strict')
rc, stdout, stderr = self._connection.exec_command(cmd, in_data=in_data, sudoable=sudoable)
# stdout and stderr may be either a file-like or a bytes object.
# Convert either one to a text type
if isinstance(stdout, binary_type):
out = to_text(stdout, errors=encoding_errors)
elif not isinstance(stdout, text_type):
out = to_text(b''.join(stdout.readlines()), errors=encoding_errors)
else:
out = stdout
if isinstance(stderr, binary_type):
err = to_text(stderr, errors=encoding_errors)
elif not isinstance(stderr, text_type):
err = to_text(b''.join(stderr.readlines()), errors=encoding_errors)
else:
err = stderr
if rc is None:
rc = 0
# be sure to remove the BECOME-SUCCESS message now
out = self._strip_success_message(out)
display.debug(u"_low_level_execute_command() done: rc=%d, stdout=%s, stderr=%s" % (rc, out, err))
return dict(rc=rc, stdout=out, stdout_lines=out.splitlines(), stderr=err, stderr_lines=err.splitlines())
def _get_diff_data(self, destination, source, task_vars, source_file=True):
# Note: Since we do not diff the source and destination before we transform from bytes into
# text the diff between source and destination may not be accurate. To fix this, we'd need
# to move the diffing from the callback plugins into here.
#
# Example of data which would cause trouble is src_content == b'\xff' and dest_content ==
# b'\xfe'. Neither of those are valid utf-8 so both get turned into the replacement
# character: diff['before'] = u'�' ; diff['after'] = u'�' When the callback plugin later
# diffs before and after it shows an empty diff.
diff = {}
display.debug("Going to peek to see if file has changed permissions")
peek_result = self._execute_module(module_name='file', module_args=dict(path=destination, _diff_peek=True), task_vars=task_vars, persist_files=True)
if peek_result.get('failed', False):
display.warning(u"Failed to get diff between '%s' and '%s': %s" % (os.path.basename(source), destination, to_text(peek_result.get(u'msg', u''))))
return diff
if peek_result.get('rc', 0) == 0:
if peek_result.get('state') in (None, 'absent'):
diff['before'] = u''
elif peek_result.get('appears_binary'):
diff['dst_binary'] = 1
elif peek_result.get('size') and C.MAX_FILE_SIZE_FOR_DIFF > 0 and peek_result['size'] > C.MAX_FILE_SIZE_FOR_DIFF:
diff['dst_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
else:
display.debug(u"Slurping the file %s" % source)
dest_result = self._execute_module(module_name='slurp', module_args=dict(path=destination), task_vars=task_vars, persist_files=True)
if 'content' in dest_result:
dest_contents = dest_result['content']
if dest_result['encoding'] == u'base64':
dest_contents = base64.b64decode(dest_contents)
else:
raise AnsibleError("unknown encoding in content option, failed: %s" % to_native(dest_result))
diff['before_header'] = destination
diff['before'] = to_text(dest_contents)
if source_file:
st = os.stat(source)
if C.MAX_FILE_SIZE_FOR_DIFF > 0 and st[stat.ST_SIZE] > C.MAX_FILE_SIZE_FOR_DIFF:
diff['src_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
else:
display.debug("Reading local copy of the file %s" % source)
try:
with open(source, 'rb') as src:
src_contents = src.read()
except Exception as e:
raise AnsibleError("Unexpected error while reading source (%s) for diff: %s " % (source, to_native(e)))
if b"\x00" in src_contents:
diff['src_binary'] = 1
else:
diff['after_header'] = source
diff['after'] = to_text(src_contents)
else:
display.debug(u"source of file passed in")
diff['after_header'] = u'dynamically generated'
diff['after'] = source
if self._play_context.no_log:
if 'before' in diff:
diff["before"] = u""
if 'after' in diff:
diff["after"] = u" [[ Diff output has been hidden because 'no_log: true' was specified for this result ]]\n"
return diff
def _find_needle(self, dirname, needle):
'''
find a needle in haystack of paths, optionally using 'dirname' as a subdir.
This will build the ordered list of paths to search and pass them to dwim
to get back the first existing file found.
'''
# dwim already deals with playbook basedirs
path_stack = self._task.get_search_path()
# if missing it will return a file not found exception
return self._loader.path_dwim_relative_stack(path_stack, dirname, needle)
| tonk/ansible | lib/ansible/plugins/action/__init__.py | Python | gpl-3.0 | 58,185 |
#!/usr/bin/python
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
#from matplotlib.backends.backend_pdf import PdfPages
import sys
def stats_file_as_matrix(file_name):
with open(file_name, 'r') as f:
return [ map(float,line.strip().split(' ')) for line in f ]
#pdfTitle = 'results.pdf'
#pp = PdfPages(pdfTitle)
titles = ["Bitrate", "Delay", "Jitter", "Packet loss"]
for f in sys.argv[1:]:
print("Starting work on "+f+", converting stats to matrix!")
mat = stats_file_as_matrix(f)
x = range(len(mat))
#define the figure size and grid layout properties
figsize = (10, 8)
cols = 2
rows = 2
gs = gridspec.GridSpec( rows, cols)
fig = plt.figure(num=1, figsize=figsize)
fig.suptitle(f)
ax = []
for i in range(4):
y = map(lambda r:r[i+1],mat)
row = (i // cols)
col = i % cols
ax.append(fig.add_subplot(gs[row, col]))
ax[-1].set_title(titles[i])
ax[-1].set_xlabel('Time [ms]')
ax[-1].plot(x, y, 'o', ls='-', ms=4)
print("Finished with "+f+", creating JPG!")
#pp.savefig(fig)
plt.savefig(f+'.jpg')
plt.clf()
#pp.close()
| yossisolomon/ITGController | plot_from_stats.py | Python | gpl-3.0 | 1,136 |
from util import A, B, B_reversed, C, D, E, F, G, H, instr, spec, spec_reversed
_mark = set(dir()) ; _mark.add('_mark')
@A
def jmp(address):
'''
1001 010k kkkk 110k
kkkk kkkk kkkk kkkk
'''
def cli():
return 16, 0b1001010011111000
@B
def ldi(register, immediate):
'''
1110 KKKK dddd KKKK
'''
@C
def out(io_port, register):
'''
1011 1AAr rrrr AAAA
'''
@A
def rcall(address):
'''
1101 kkkk kkkk kkkk
'''
@B_reversed
def sts(address, register):
'''
1001 001d dddd 0000
kkkk kkkk kkkk kkkk
'''
@D
def mov(Rd, Rr):
'''
0010 11rd dddd rrrr
'''
def sei():
return 16, 0b1001010001111000
def ret():
return 16, 0b1001010100001000
@A
def rjmp(address):
'''
1100 kkkk kkkk kkkk
'''
@B
def lds(register, address):
'''
1001 000d dddd 0000
kkkk kkkk kkkk kkkk
'''
@E
def sbrs(register, bit):
'''
1111 111r rrrr 0bbb
'''
def dw(values, data):
return -1, data
def db(values, data):
return -1, data
@F
def st_post_incr_Y(Rr):
'''
1001 001r rrrr 1001
'''
@F
def st_post_incr_Z(Rr):
'''
1001 001r rrrr 0001
'''
@G
def ld_pre_decr_Y(Rd):
'''
1001 000d dddd 1010
'''
@G
def ld_post_incr_X(Rd):
'''
1001 000d dddd 1101
'''
@G
def ld_post_incr_Z(Rd):
'''
1001 000d dddd 0001
'''
@G
def ld_pre_decr_Z(Rd):
'''
1001 000d dddd 0010
'''
@G
def lpm(Rd):
'''
1001 000d dddd 0100
'''
@G
def lpm_post_incr_Z(Rd):
'''
1001 000d dddd 0101
'''
@B
def cpi(register, immediate):
'''
0011 KKKK dddd KKKK
'''
@A
def brne(address):
'''
1111 01kk kkkk k001
'''
@A
def breq(address):
'''
1111 00kk kkkk k001
'''
@A
def brlo(address):
'''
1111 00kk kkkk k000
'''
@G
def lsr(Rd):
'''
1001 010d dddd 0110
'''
@G
def lsl(Rd):
'''
0000 11dd dddd dddd
'''
@D
def add(Rd, Rr):
'''
0000 11rd dddd rrrr
'''
@A
def brcc(address):
'''
1111 01kk kkkk k000
'''
@G
def inc(Rd):
'''
1001 010d dddd 0011
'''
@G
def dec(Rd):
'''
1001 010d dddd 1010
'''
def ijmp():
return 16, 0b1001010000001001
@D
def cp(Rd, Rr):
'''
0001 01rd dddd rrrr
'''
@D
def cpse(Rd, Rr):
'''
0001 00rd dddd rrrr
'''
@D
def cpc(Rd, Rr):
'''
0000 01rd dddd rrrr
'''
@A
def brsh(address):
'''
1111 01kk kkkk k000
'''
@D
def movw(Rd, Rr):
'''
0000 0001 dddd rrrr
'''
@B
def andi(register, immediate):
'''
0111 KKKK dddd KKKK
'''
@H
def sbis(register, bit):
'''
1001 1011 AAAA Abbb
'''
@G
def clr(Rd):
'''
0010 01dd dddd dddd
'''
@F
def push(Rr):
'''
1001 001r rrrr 1111
'''
@G
def pop(Rd):
'''
1001 000d dddd 1111
'''
@D
def or_(Rd, Rr):
'''
0010 10rd dddd rrrr
'''
@G
def swap(Rd):
'''
1001 010d dddd 0010
'''
@B
def adiw(register, immediate):
'''
1001 0110 KKdd KKKK
'''
@B
def sbiw(register, immediate):
'''
1001 0111 KKdd KKKK
'''
@B
def subi(register, immediate):
'''
0101 KKKK dddd KKKK
'''
@D
def mul(Rd, Rr):
'''
1001 11rd dddd rrrr
'''
ops = dict(
(name, func)
for name, func in locals().iteritems()
if name not in _mark
)
class InstructionsMixin(object):
@instr
def jmp(self, address):
self.here += 2
return address
@instr
def rjmp(self, address):
return address
@instr
def rcall(self, address):
return address
@instr
def cli(self):
pass
@instr
def sei(self):
pass
@instr
def ret(self):
pass
@instr
def ldi(self, target, address):
if isinstance(address, str):
assert len(address) == 1, repr(address)
address = ord(address)
return target, address << 1
@instr
def out(self, target, address):
return target, address
@instr
def sts(self, address, register):
self.here += 2
return address << 1, register
@instr
def mov(self, target, source):
return target, source
@instr
def lds(self, target, address):
self.here += 2
return target, address << 1
@instr
def sbrs(self, target, address):
return target, address
@spec
def st_post_incr(self, ptr, register):
pass
@spec_reversed
def ld_post_incr(self, register, ptr):
pass
@spec_reversed
def ld_pre_decr(self, register, ptr):
pass
@spec_reversed
def lpm_post_incr(self, register, ptr):
pass
@instr
def cpi(self, register, immediate):
if isinstance(immediate, str):
assert len(immediate) == 1, repr(immediate)
immediate = ord(immediate)
return register, immediate << 1
@instr
def brne(self, address):
return address
@instr
def breq(self, address):
return address
@instr
def inc(self, address):
return address
@instr
def mul(self, target, source):
return target, source
@instr
def brlo(self, address):
return address
@instr
def subi(self, target, source):
return target, source
@instr
def add(self, target, source):
return target, source
@instr
def dec(self, address):
return address
@instr
def clr(self, address):
return address
@instr
def lsl(self, address):
return address
@instr
def brcc(self, address):
return address
@instr
def or_(self, target, source):
return target, source
@instr
def push(self, address):
return address
@instr
def swap(self, address):
return address
@instr
def pop(self, address):
return address
@instr
def movw(self, target, source):
return target, source
@instr
def andi(self, target, source):
return target, source
@instr
def adiw(self, target, source):
return target, source
def lpm(self, target, source):
assert source == 30, repr(source) # Must be Z
self._one('lpm', target)
@instr
def cp(self, target, source):
return target, source
@instr
def cpc(self, target, source):
return target, source
@instr
def brsh(self, address):
return address
@instr
def cpse(self, target, source):
return target, source
@instr
def sbiw(self, target, source):
return target, source
@instr
def lsr(self, address):
return address
@instr
def ijmp(self):
pass
def _one(self, op, address):
name, address = self._name_or_addr(address)
addr = self._get_here()
print 'assembling %s instruction at %s to %s' % (op, addr, name)
self.data[addr] = (op, address)
self.here += 2
def _instruction_namespace(self):
for n in dir(InstructionsMixin):
if n.startswith('_'):
continue
yield n, getattr(self, n)
if __name__ == '__main__':
import pprint
pprint.pprint(ops)
pprint.pprint(dict(InstructionsMixin()._instruction_namespace()))
| calroc/AVRV | instructions.py | Python | gpl-3.0 | 6,667 |
#!/usr/bin/env python
import sugar_stats_consolidation
from sugar_stats_consolidation.db import *
from sugar_stats_consolidation.rrd_files import *
from sugar_stats_consolidation.consolidation import *
db = DB_Stats('statistics', 'root', 'gustavo')
db.create();
con = Consolidation('/var/lib/sugar-stats/rrd', db)
con.process_rrds()
| activitycentral/statistics-consolidation | test/test_cons.py | Python | gpl-3.0 | 337 |
'''
testing speedup of code
Created on Sep 17, 2016
@author: jonaswallin
'''
from Mixture.density import mNIG
from Mixture.density.purepython import mNIG as pmNIG
from Mixture import mixOneDims
import numpy as np
import numpy.random as npr
import timeit
# most speed here is used startup (iteration = 500, n = 1000)
# Cython:
# 2000 0.152 0.000 0.268 0.000 NIG.py:82(EV)
# 2000 0.098 0.000 0.145 0.000 NIG.py:39(dens)
# 2000 0.051 0.000 0.051 0.000 {Mixture.util.cython_Bessel.Bessel0approx}
# 2000 0.037 0.000 0.037 0.000 {Mixture.util.cython_Bessel.Bessel1approx}
# Pure Python:
# 2000 1.201 0.001 1.264 0.001 NIG.py:208(EV)
# 2000 1.195 0.001 1.201 0.001 NIG.py:248(dens)
# Pure Python, no precompute:
# 2000 2.322 0.001 2.387 0.001 NIG.py:208(EV)
# 2000 1.205 0.001 1.211 0.001 NIG.py:248(dens)
npr.seed(10)
def speed_python(pure_python=False, precompute = True):
K = 2
d = 2
iteration = 500
mixObj = mixOneDims(K=K, d=d)
if pure_python:
mixObj.set_densites([pmNIG(d=d) for k in range(K)]) # @UnusedVariable
else:
mixObj.set_densites([mNIG(d=d) for k in range(K)]) # @UnusedVariable
paramMat_true = [np.array([[1.1, 1.12, 0.1, 0],
[-1, 0,2 , -4] ]),
np.array([[-2, 0, 0.3, 0],
[1, 0, 2 , -4] ])]
alpha_true = [0]
mixObj.set_paramMat(alpha = alpha_true,paramMat = paramMat_true)
Y = mixObj.sample(n = 1000)
mixObj.set_data(Y)
paramMat = [npr.randn(2,4),npr.randn(2,4)]
paramMat[0][0,0] = 1.1
paramMat[1][0,0] = -2
alpha = np.array(alpha_true)
for i in range(iteration): # @UnusedVariable
p, alpha, paramMat = mixObj.EMstep(alpha = alpha, paramMat = paramMat , precompute = precompute) # @UnusedVariable
if __name__ == "__main__":
| JonasWallin/Mixture | test/speed_checks/speed_mixture.py | Python | gpl-3.0 | 1,989 |
# -*- coding: utf-8 -*-
#------------------------------------------------------------
import sys
PY3 = False
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
if PY3:
import urllib.parse as urlparse # Es muy lento en PY2. En PY3 es nativo
else:
import urlparse # Usamos el nativo de PY2 que es más rápido
import re
from platformcode import config, logger
from core import scrapertools
from core.item import Item
from core import servertools
from core import httptools
host = 'https://frprn.com'
def mainlist(item):
logger.info()
itemlist = []
itemlist.append(item.clone(title="Nuevas" , action="lista", url=host))
itemlist.append(item.clone(title="Mejor valorada" , action="lista", url=host + "/top-rated/"))
itemlist.append(item.clone(title="Mas largo" , action="lista", url=host + "/longest/"))
itemlist.append(item.clone(title="Modelos" , action="categorias", url=host + "/models/most-popular/"))
itemlist.append(item.clone(title="Categorias" , action="categorias", url=host + "/categories/"))
itemlist.append(item.clone(title="Buscar", action="search"))
return itemlist
def search(item, texto):
logger.info()
texto = texto.replace(" ", "%20")
item.url = "%s/search/%s/?mode=async&action=get_block&block_id=list_videos_videos&from2=%s&fromStart=1&fromEnd=%s" % (host, texto,1,1)
try:
return lista(item)
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return []
def categorias(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t| |<br>", "", data)
patron = '<li class="thumb thumb-\w+">.*?'
patron += '<a href="([^"]+)">.*?'
patron += '<img class="lazy" data-original="([^"]+)".*?'
patron += '<div class="title">(.*?)</a>'
matches = re.compile(patron,re.DOTALL).findall(data)
for scrapedurl,scrapedthumbnail,scrapedtitle in matches:
scrapedplot = ""
title = scrapertools.find_single_match(scrapedtitle,'<div class="text">([^<]+)<')
if "/categories/" in item.url:
cantidad = scrapertools.find_single_match(scrapedtitle,'<div class="count">(\d+)</div>')
scrapedtitle = scrapertools.find_single_match(scrapedtitle,'<div class="name">([^<]+)</div>')
title = "%s (%s)" %(scrapedtitle, cantidad)
scrapedurl = urlparse.urljoin(item.url,scrapedurl)
itemlist.append(item.clone(action="lista", title=title, url=scrapedurl,
fanart=scrapedthumbnail, thumbnail=scrapedthumbnail, plot=scrapedplot) )
next_page = scrapertools.find_single_match(data,'<li class="pagination-next"><a href="([^"]+)">')
if next_page!="":
next_page = urlparse.urljoin(item.url,next_page)
itemlist.append(item.clone(action="categorias", title="[COLOR blue]Página Siguiente >>[/COLOR]", url=next_page) )
return itemlist
def lista(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t| |<br>", "", data)
patron = '<div class="thumb">.*?'
patron += '<a href="([^"]+)".*?'
patron += '<img class="lazy" data-original="([^"]+)" alt="([^"]+)".*?'
matches = re.compile(patron,re.DOTALL).findall(data)
for scrapedurl,scrapedthumbnail,scrapedtitle in matches:
url = urlparse.urljoin(item.url,scrapedurl)
duracion = ""
title = "[COLOR yellow]%s[/COLOR] %s" % (duracion, scrapedtitle)
contentTitle = title
thumbnail = scrapedthumbnail
plot = ""
year = ""
action = "play"
if logger.info() == False:
action = "findvideos"
itemlist.append(item.clone(action=action, title=title, url=url, thumbnail=thumbnail,
fanart=thumbnail, plot=plot, contentTitle = contentTitle))
patron = 'data-from="([^"]+)" data-id="([^"]+)" data-total="([^"]+)" data-page="([^"]+)" data-url="([^"]+)"'
matches = re.compile(patron,re.DOTALL).findall(data)
for dfrom,id,total,page,purl in matches:
page = int(page)
page += page
next_page = "%s?action=get_block&block_id=%s&%s=%s" %(purl, id, dfrom, page)
itemlist.append(item.clone(action="lista", title="[COLOR blue]Página Siguiente >>[/COLOR]", url=next_page) )
return itemlist
def findvideos(item):
logger.info()
itemlist = []
itemlist.append(item.clone(action="play", title= "%s", contentTitle = item.title, url=item.url))
itemlist = servertools.get_servers_itemlist(itemlist, lambda i: i.title % i.server.capitalize())
return itemlist
def play(item):
logger.info()
itemlist = []
itemlist.append(item.clone(action="play", title= "%s", contentTitle = item.title, url=item.url))
itemlist = servertools.get_servers_itemlist(itemlist, lambda i: i.title % i.server.capitalize())
return itemlist
| alfa-addon/addon | plugin.video.alfa/channels/freeporn.py | Python | gpl-3.0 | 5,059 |
import json
import re
packageJson = '../../../package.json'
with open(packageJson) as data_file:
data = json.load(data_file)
config = '../../pkjs/config.js'
with open(config) as conf_file:
s = conf_file.readline()
keys = []
while (s):
suggestKey = re.search(r"messageKey\"\:(.[^,]*)", s)
if suggestKey:
keys.append(suggestKey.group(1).strip('" ,'))
s = conf_file.readline()
def func(item):
return item.split('[',1)[0]
knownKeys = list(map(func, data["pebble"]["messageKeys"]))
for key in keys:
#print('processing ', key);
if key not in knownKeys:
print('unknow key', key)
| UnnamedHero/pebble-watchface-time-and-calendar | src/c/test/testMessageKeys.py | Python | gpl-3.0 | 661 |
import plt, ipp
import os, string
print "Starting try-trace.py: dir() = %s" % dir()
# try-trace.py: test program for pypy plt
print "- - - - -"
print "NO_COMRESSION = %d" % plt.NO_COMPRESSION
#base = "/Users/jbro111" # OSX
base = "/home/nevil" # Ubuntu
#fn = "pypy/small-sample.erf"
#fn = "tcp-analyse/fdt-p5.pcap"
#fn = "pypy/small-sample.pcap"
fn = "pypy/1000packets.pcap.gz"
full_fn = base + '/' + fn
print "%s: isfile %s" % (full_fn, os.path.isfile(full_fn))
#try:
# with open(full_fn) as file:
# print "File opened OK"
# file.close()
#except IOError as e:
# print "Unable to open file" #Does not exist OR no read permissions
trace_format = "pcapfile"
#trace_format = "erf"
uri = trace_format + ':' + full_fn
print ">> uri = %s" % uri
t = plt.trace(uri)
t.start()
test_dict = {}
def print_first(s, n):
for x in range(n):
if x%8 == 0:
print "",
print "%02x" % ord(s[x]),
for n,pkt in enumerate(t):
ip = pkt.ip
print "--- n=%d ---" % n
print "pkt linktype %d, ethertype %04x, vlan_id %d" % (
pkt.linktype, pkt.ethertype, pkt.vlan_id)
print "ip.seconds = %.6f, ip.ts_sec = %d, ip.time = %s" % (
ip.seconds, ip.ts_sec, ip.time)
print "ip.erf_time = %s" % ip.erf_time
print "ip.wire_len = %s, ip.capture_len = %s, direction = %s" % (
ip.wire_len, ip.capture_len, ip.direction)
ba = ip.data
print "@@ 1 @@ ba = %s" % ba
print "IP.data:",
for b in ba:
print "%02x" % b, # ba[x],
print
sa = ip.src_prefix; da = ip.dst_prefix
print "*** %s -> %s" % (sa, da)
print "sa.addr = %s" % sa.addr[0:4]
for v in sa.addr:
print "%02x" % v,
print
print "- - - - -"
bsa = bytearray(sa.addr)
for v in bsa:
print "%02x" % v,
print
print "ba = %s" % plt.ipp.IPprefix(4, bsa)
# If we import plt, ipp above, we couild say ipp.IPprefix here
print "= = = = ="
#exit()
s = str(sa.addr) + '|' + str(da.addr) # Have to convert to str explicitly
print "*** %s -> %s %s" % (sa, da, s)
print_first(s, 9)
print " ident = %04x" % ip.ident
v = test_dict.get(s)
if not v:
test_dict[s] = 1
else:
test_dict[s] = v+1
if n == 0: # Zero-org
break
print "EOF - - -"
#exit()
def ca2str(cdata_array):
s = string.join(cdata_array, '')
n = string.find(s, '\x00')
return s[0:n]
keys = sorted(test_dict)
for k in keys:
print "%8d %s" % (test_dict[k], k)
ka = k.split('*')
for j in range(0,4):
print "%02x" % ord(ka[0][j]),
print
psa = ipp.IPprefix(4, bytearray(ka[0]))
print "psa = %s" % psa
print "%8d %s -> %s" % (test_dict[k],
ka[0], ka[1])
| nevil-brownlee/pypy-libtrace | lib/natkit/try-dict.py | Python | gpl-3.0 | 2,776 |
#coding=UTF-8
from pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext
from pyspark.sql.types import *
from datetime import date, datetime, timedelta
import sys, re, os
st = datetime.now()
conf = SparkConf().setAppName('PROC_O_IBK_WSYH_ECACCT').setMaster(sys.argv[2])
sc = SparkContext(conf = conf)
sc.setLogLevel('WARN')
if len(sys.argv) > 5:
if sys.argv[5] == "hive":
sqlContext = HiveContext(sc)
else:
sqlContext = SQLContext(sc)
hdfs = sys.argv[3]
dbname = sys.argv[4]
#处理需要使用的日期
etl_date = sys.argv[1]
#etl日期
V_DT = etl_date
#上一日日期
V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime("%Y%m%d")
#月初日期
V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime("%Y%m%d")
#上月末日期
V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime("%Y%m%d")
#10位日期
V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime("%Y-%m-%d")
V_STEP = 0
O_CI_WSYH_ECACCT = sqlContext.read.parquet(hdfs+'/O_CI_WSYH_ECACCT/*')
O_CI_WSYH_ECACCT.registerTempTable("O_CI_WSYH_ECACCT")
#任务[12] 001-01::
V_STEP = V_STEP + 1
#先删除原表所有数据
ret = os.system("hdfs dfs -rm -r /"+dbname+"/F_CI_WSYH_ECACCT/*.parquet")
#从昨天备表复制一份全量过来
ret = os.system("hdfs dfs -cp -f /"+dbname+"/F_CI_WSYH_ECACCT_BK/"+V_DT_LD+".parquet /"+dbname+"/F_CI_WSYH_ECACCT/"+V_DT+".parquet")
F_CI_WSYH_ECACCT = sqlContext.read.parquet(hdfs+'/F_CI_WSYH_ECACCT/*')
F_CI_WSYH_ECACCT.registerTempTable("F_CI_WSYH_ECACCT")
sql = """
SELECT A.CIFSEQ AS CIFSEQ
,A.ACSEQ AS ACSEQ
,A.DEPTSEQ AS DEPTSEQ
,A.BANKACTYPE AS BANKACTYPE
,A.BANKACSUBTYPE AS BANKACSUBTYPE
,A.ACNO AS ACNO
,A.ACNAME AS ACNAME
,A.ACORDER AS ACORDER
,A.CURRENCY AS CURRENCY
,A.CRFLAG AS CRFLAG
,A.ASSOCIFSEQ AS ASSOCIFSEQ
,A.ASSOCIFACFLAG AS ASSOCIFACFLAG
,A.ASSOCIFLEVEL AS ASSOCIFLEVEL
,A.CORECIFNO AS CORECIFNO
,A.ACALIAS AS ACALIAS
,A.ACSTATE AS ACSTATE
,A.CREATEUSERSEQ AS CREATEUSERSEQ
,A.CREATEDEPTSEQ AS CREATEDEPTSEQ
,A.CREATETIME AS CREATETIME
,A.UPDATEUSERSEQ AS UPDATEUSERSEQ
,A.UPDATEDEPTSEQ AS UPDATEDEPTSEQ
,A.UPDATETIME AS UPDATETIME
,A.FR_ID AS FR_ID
,V_DT AS ODS_ST_DATE
,'IBK' AS ODS_SYS_ID
FROM O_CI_WSYH_ECACCT A --客户账户表
"""
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
F_CI_WSYH_ECACCT_INNTMP1 = sqlContext.sql(sql)
F_CI_WSYH_ECACCT_INNTMP1.registerTempTable("F_CI_WSYH_ECACCT_INNTMP1")
#F_CI_WSYH_ECACCT = sqlContext.read.parquet(hdfs+'/F_CI_WSYH_ECACCT/*')
#F_CI_WSYH_ECACCT.registerTempTable("F_CI_WSYH_ECACCT")
sql = """
SELECT DST.CIFSEQ --客户顺序号:src.CIFSEQ
,DST.ACSEQ --账号顺序号:src.ACSEQ
,DST.DEPTSEQ --账户开户机构:src.DEPTSEQ
,DST.BANKACTYPE --银行账户类型:src.BANKACTYPE
,DST.BANKACSUBTYPE --银行账户性质:src.BANKACSUBTYPE
,DST.ACNO --账号:src.ACNO
,DST.ACNAME --账户名称:src.ACNAME
,DST.ACORDER --显示顺序号:src.ACORDER
,DST.CURRENCY --币种:src.CURRENCY
,DST.CRFLAG --钞汇标志:src.CRFLAG
,DST.ASSOCIFSEQ --:src.ASSOCIFSEQ
,DST.ASSOCIFACFLAG --关联企业账号标志:src.ASSOCIFACFLAG
,DST.ASSOCIFLEVEL --关联企业级别:src.ASSOCIFLEVEL
,DST.CORECIFNO --帐号所属核心客户号:src.CORECIFNO
,DST.ACALIAS --账户别名:src.ACALIAS
,DST.ACSTATE --状态:src.ACSTATE
,DST.CREATEUSERSEQ --创建用户顺序号:src.CREATEUSERSEQ
,DST.CREATEDEPTSEQ --创建机构顺序号:src.CREATEDEPTSEQ
,DST.CREATETIME --创建时间:src.CREATETIME
,DST.UPDATEUSERSEQ --更新用户顺序号:src.UPDATEUSERSEQ
,DST.UPDATEDEPTSEQ --更新机构顺序号:src.UPDATEDEPTSEQ
,DST.UPDATETIME --更新时间:src.UPDATETIME
,DST.FR_ID --法人号:src.FR_ID
,DST.ODS_ST_DATE --系统日期:src.ODS_ST_DATE
,DST.ODS_SYS_ID --系统标志:src.ODS_SYS_ID
FROM F_CI_WSYH_ECACCT DST
LEFT JOIN F_CI_WSYH_ECACCT_INNTMP1 SRC
ON SRC.ACSEQ = DST.ACSEQ
WHERE SRC.ACSEQ IS NULL """
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
F_CI_WSYH_ECACCT_INNTMP2 = sqlContext.sql(sql)
dfn="F_CI_WSYH_ECACCT/"+V_DT+".parquet"
F_CI_WSYH_ECACCT_INNTMP2=F_CI_WSYH_ECACCT_INNTMP2.unionAll(F_CI_WSYH_ECACCT_INNTMP1)
F_CI_WSYH_ECACCT_INNTMP1.cache()
F_CI_WSYH_ECACCT_INNTMP2.cache()
nrowsi = F_CI_WSYH_ECACCT_INNTMP1.count()
nrowsa = F_CI_WSYH_ECACCT_INNTMP2.count()
F_CI_WSYH_ECACCT_INNTMP2.write.save(path = hdfs + '/' + dfn, mode='overwrite')
F_CI_WSYH_ECACCT_INNTMP1.unpersist()
F_CI_WSYH_ECACCT_INNTMP2.unpersist()
et = datetime.now()
print("Step %d start[%s] end[%s] use %d seconds, insert F_CI_WSYH_ECACCT lines %d, all lines %d") % (V_STEP, st.strftime("%H:%M:%S"), et.strftime("%H:%M:%S"), (et-st).seconds, nrowsi, nrowsa)
ret = os.system("hdfs dfs -mv /"+dbname+"/F_CI_WSYH_ECACCT/"+V_DT_LD+".parquet /"+dbname+"/F_CI_WSYH_ECACCT_BK/")
#先删除备表当天数据
ret = os.system("hdfs dfs -rm -r /"+dbname+"/F_CI_WSYH_ECACCT_BK/"+V_DT+".parquet")
#从当天原表复制一份全量到备表
ret = os.system("hdfs dfs -cp -f /"+dbname+"/F_CI_WSYH_ECACCT/"+V_DT+".parquet /"+dbname+"/F_CI_WSYH_ECACCT_BK/"+V_DT+".parquet")
| cysuncn/python | spark/crm/PROC_O_IBK_WSYH_ECACCT.py | Python | gpl-3.0 | 6,823 |
# Copyright (C) 2013 Michael Biggs. See the COPYING file at the top-level
# directory of this distribution and at http://shok.io/code/copyright.html
# Tokens that come from the Lexer are either pairs or tuples:
# colno:type
# colno:type:value
class LexToken:
colno = 0
ttype = ''
tvalue = ''
def __init__(self, tokenstr):
t = tokenstr.split(':')
if len(t) < 2 or len(t) > 3:
raise Exception("invalid token: %s" % t)
self.colno = t[0]
self.ttype = t[1]
if len(t) == 3:
self.tvalue = t[2]
def __repr__(self):
if '' == self.tvalue:
return "<%s:%s>" % (self.colno, self.ttype)
else:
return "<%s:%s:%s>" % (self.colno, self.ttype, self.tvalue)
def NewlineToken():
return LexToken('0:NEWL')
| nfomon/shok | parser/shokparser/LexToken.py | Python | gpl-3.0 | 757 |
from umlfri2.application.commands.base import Command
from umlfri2.application.events.diagram import ConnectionMovedEvent
class MoveConnectionLabelCommand(Command):
def __init__(self, connection_label, delta):
self.__diagram_name = connection_label.connection.diagram.get_display_name()
self.__connection_label = connection_label
self.__delta = delta
self.__label_position = None
@property
def description(self):
return "Moved label on connection in diagram {0}".format(self.__diagram_name)
def _do(self, ruler):
self.__label_position = self.__connection_label.get_position(ruler)
self._redo(ruler)
def _redo(self, ruler):
self.__connection_label.move(ruler, self.__label_position + self.__delta)
def _undo(self, ruler):
self.__connection_label.move(ruler, self.__label_position)
def get_updates(self):
yield ConnectionMovedEvent(self.__connection_label.connection)
| umlfri/umlfri2 | umlfri2/application/commands/diagram/moveconnectionlabel.py | Python | gpl-3.0 | 1,001 |
# Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import urllib
import re
try:
import xml.etree.cElementTree as etree
except ImportError:
import elementtree.ElementTree as etree
import sickbeard
import generic
from sickbeard.common import Quality
from sickbeard import logger
from sickbeard import tvcache
from sickbeard import helpers
class EZRSSProvider(generic.TorrentProvider):
def __init__(self):
self.urls = {'base_url': 'https://www.ezrss.it/'}
self.url = self.urls['base_url']
generic.TorrentProvider.__init__(self, "EZRSS")
self.supportsBacklog = True
self.supportsFrench = False
self.enabled = False
self.ratio = None
self.cache = EZRSSCache(self)
def isEnabled(self):
return self.enabled
def imageName(self):
return 'ezrss.png'
def getQuality(self, item, anime=False):
try:
quality = Quality.sceneQuality(item.filename, anime)
except:
quality = Quality.UNKNOWN
return quality
def findSearchResults(self, show, episodes, search_mode, manualSearch=False, downCurQuality=False):
self.show = show
results = {}
if show.air_by_date or show.sports:
logger.log(self.name + u" doesn't support air-by-date or sports backloging because of limitations on their RSS search.",
logger.WARNING)
return results
results = generic.TorrentProvider.findSearchResults(self, show, episodes, search_mode, manualSearch, downCurQuality)
return results
def _get_season_search_strings(self, ep_obj):
params = {}
params['show_name'] = helpers.sanitizeSceneName(self.show.name, ezrss=True).replace('.', ' ').encode('utf-8')
if ep_obj.show.air_by_date or ep_obj.show.sports:
params['season'] = str(ep_obj.airdate).split('-')[0]
elif ep_obj.show.anime:
params['season'] = "%d" % ep_obj.scene_absolute_number
else:
params['season'] = ep_obj.scene_season
return [params]
def _get_episode_search_strings(self, ep_obj, add_string=''):
params = {}
if not ep_obj:
return params
params['show_name'] = helpers.sanitizeSceneName(self.show.name, ezrss=True).replace('.', ' ').encode('utf-8')
if self.show.air_by_date or self.show.sports:
params['date'] = str(ep_obj.airdate)
elif self.show.anime:
params['episode'] = "%i" % int(ep_obj.scene_absolute_number)
else:
params['season'] = ep_obj.scene_season
params['episode'] = ep_obj.scene_episode
return [params]
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
params = {"mode": "rss"}
if search_params:
params.update(search_params)
search_url = self.url + 'search/index.php?' + urllib.urlencode(params)
logger.log(u"Search string: " + search_url, logger.DEBUG)
results = []
for curItem in self.cache.getRSSFeed(search_url, items=['entries'])['entries'] or []:
(title, url) = self._get_title_and_url(curItem)
if title and url:
logger.log(u"RSS Feed provider: [" + self.name + "] Attempting to add item to cache: " + title, logger.DEBUG)
results.append(curItem)
return results
def _get_title_and_url(self, item):
(title, url) = generic.TorrentProvider._get_title_and_url(self, item)
try:
new_title = self._extract_name_from_filename(item.filename)
except:
new_title = None
if new_title:
title = new_title
logger.log(u"Extracted the name " + title + " from the torrent link", logger.DEBUG)
return (title, url)
def _extract_name_from_filename(self, filename):
name_regex = '(.*?)\.?(\[.*]|\d+\.TPB)\.torrent$'
logger.log(u"Comparing " + name_regex + " against " + filename, logger.DEBUG)
match = re.match(name_regex, filename, re.I)
if match:
return match.group(1)
return None
def seedRatio(self):
return self.ratio
class EZRSSCache(tvcache.TVCache):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
# only poll EZRSS every 15 minutes max
self.minTime = 15
def _getRSSData(self):
rss_url = self.provider.url + 'feed/'
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)
return self.getRSSFeed(rss_url)
provider = EZRSSProvider()
| gylian/sickrage | sickbeard/providers/ezrss.py | Python | gpl-3.0 | 5,370 |
"""
Unit tests for the HashTask object
By Simon Jones
26/8/2017
"""
import unittest
from test.TestFunctions import *
from source.HashTask import *
from source.Channel import Channel
class HashTaskUnitTests(unittest.TestCase):
"""
Unit tests for the HashTask object
"""
def setUp(self):
self.task1_t_channel = Channel(True)
self.task1_c_channel = Channel(True)
self.task2_t_channel = Channel(True)
self.task2_c_channel = Channel(True)
self.task1 = spawn_thread(self.task1_c_channel, self.task1_t_channel)
self.task2 = spawn_thread(self.task2_c_channel, self.task2_t_channel)
self.test_filename = "hash_task_unittest_file.log"
file = open(self.test_filename, "w")
file.write("Test Data 123\nHello, world!")
file.close()
print("\nNew Test case:")
def test_spawning_and_joining_tasks(self):
"""
Tests that tasks can be created and joined
:return:
"""
self.task1.start()
self.task2.start()
self.task1_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 1, TaskMessage.REQUEST, "Hello World Task1")))
self.task2_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 2, TaskMessage.REQUEST, "Hello World Task2")))
# Should be ignored
self.task2_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 2, TaskMessage.RESPONSE, "Hello World Task2")))
delay_do_nothing(1)
self.task1_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 1, TaskMessage.REQUEST)))
self.task2_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 2, TaskMessage.REQUEST)))
self.task1.join()
self.task2.join()
empty_channel(self, self.task1_t_channel.get_in_queue(), 3)
empty_channel(self, self.task2_t_channel.get_in_queue(), 3)
empty_channel(self, self.task1_c_channel.get_in_queue(), 2)
empty_channel(self, self.task2_c_channel.get_in_queue(), 2)
def test_task_errors(self):
"""
Tests that HashTasks can handle errors properly
:return:
"""
self.task1.start()
self.task2.start()
self.task1_t_channel.put(str(TaskMessage("NOT_A_FLAG", 1, TaskMessage.REQUEST, "ERROR_TASK")))
delay_do_nothing()
self.task1_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 1, TaskMessage.REQUEST)))
self.task2_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 2, TaskMessage.REQUEST)))
self.task1.join()
self.task2.join()
empty_channel(self, self.task1_t_channel.get_in_queue(), 3)
empty_channel(self, self.task2_t_channel.get_in_queue(), 1)
empty_channel(self, self.task1_c_channel.get_in_queue(), 2)
empty_channel(self, self.task2_c_channel.get_in_queue(), 2)
def test_another_hash_task_case(self):
"""
Another test for robustness
:return:
"""
self.task1.start()
self.task2.start()
self.task1_t_channel.put(str(TaskMessage(TaskMessage.FLAG_HASH, 1, TaskMessage.REQUEST, self.test_filename, 0, 10)))
self.task1_t_channel.put(str(TaskMessage(TaskMessage.FLAG_HASH, 1, TaskMessage.REQUEST, self.test_filename, 10, 10)))
self.task1_t_channel.put(str(TaskMessage(TaskMessage.FLAG_HASH, 1, TaskMessage.REQUEST, self.test_filename, 20, 10)))
delay_do_nothing(1)
self.task1_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 1, TaskMessage.REQUEST)))
self.task2_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 2, TaskMessage.REQUEST)))
self.task1.join()
self.task2.join()
empty_channel(self, self.task1_t_channel.get_in_queue(), 5)
empty_channel(self, self.task2_t_channel.get_in_queue(), 1)
empty_channel(self, self.task1_c_channel.get_in_queue(), 2)
empty_channel(self, self.task2_c_channel.get_in_queue(), 2)
def test_actions_after_join_are_executes(self):
"""
Tests that the messages received after a join task are executed.
:return:
"""
self.task1_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 1, TaskMessage.REQUEST, "Echo 1")))
self.task2_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 2, TaskMessage.REQUEST, "Echo 1")))
self.task1_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 1, TaskMessage.REQUEST)))
self.task2_c_channel.put(str(TaskMessage(TaskMessage.FLAG_JOIN, 2, TaskMessage.REQUEST)))
self.task1_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 1, TaskMessage.REQUEST, "Echo 2")))
self.task2_t_channel.put(str(TaskMessage(TaskMessage.FLAG_ECHO, 2, TaskMessage.REQUEST, "Echo 2")))
self.task1.start()
self.task2.start()
delay_do_nothing(1)
self.task1.join()
self.task2.join()
empty_channel(self, self.task1_t_channel.get_in_queue(), 2)
empty_channel(self, self.task2_t_channel.get_in_queue(), 2)
empty_channel(self, self.task1_c_channel.get_in_queue(), 2)
empty_channel(self, self.task2_c_channel.get_in_queue(), 2)
if __name__ == "__main__":
unittest.main()
| jonesyboynz/DuplicateFileFinder | unittests/HashTaskUnitTests.py | Python | gpl-3.0 | 4,641 |
# Copyright (C) 2013 Andreas Damgaard Pedersen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This file demonstrates writing tests using the unittest module.
These will pass when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""Test that 1 + 1 always equals 2."""
self.assertEqual(1 + 1, 2)
| Damgaard/account-deleter | deleter/tests.py | Python | gpl-3.0 | 1,050 |
Subsets and Splits