code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
from pippi import dsp, tune
from hcj import snds, keys
key = 'g'
rhodes = snds.load('hcj/rhodes1.wav')
rhodes = dsp.transpose(rhodes, 16.0/15.0)
def chord(length, freqs, amp):
layers = [ keys.rhodes(length, freq, amp * dsp.rand(0.25, 0.5)) for freq in freqs ]
layers = [ dsp.pan(layer, dsp.rand()) for layer in layers ]
return dsp.mix(layers)
def makeStab(length, i):
freqs = tune.fromdegrees([ dsp.randchoose([1,2,3,4,5,6,8]) for _ in range(dsp.randint(2,4)) ], octave=3, root=key)
stab = chord(length, freqs, dsp.rand(0.25, 0.75))
stab = dsp.taper(stab, 40)
stab = dsp.fill(stab, length, silence=True)
return stab
def makePulse(length, i):
freqs = tune.fromdegrees([ dsp.randchoose([1,2,3,4,5,6,8]) for _ in range(dsp.randint(2,4)) ], octave=2, root=key)
pulse = chord(length, freqs, dsp.rand(0.5, 0.75))
pulse = dsp.taper(pulse, 40)
pulse = dsp.amp(pulse, dsp.rand(0.5, 1))
pulse = dsp.fill(pulse, length, silence=True)
return pulse
def makeLongChord(seg):
degrees = [ dsp.randint(1, 9) for _ in range(dsp.randint(2,4)) ]
long_chord = chord(sum(seg), [ freq * 2**dsp.randint(0, 5) for freq in tune.fromdegrees(degrees, octave=1, root=key) ], dsp.rand(0.15, 0.35))
long_chord = dsp.fill(long_chord, sum(seg))
return long_chord
def makeGlitch(length, i):
g = dsp.cut(long_chord, dsp.randint(0, dsp.flen(long_chord) - length), length)
g = dsp.alias(g)
g = dsp.fill(g, length)
return g
| hecanjog/pattern.studies | orc/rhodes.py | Python | cc0-1.0 | 1,488 |
# coding=utf-8
from rolepermissions.roles import AbstractUserRole
class Student(AbstractUserRole):
pass
class Tutor(AbstractUserRole):
available_permissions = {
'create_course': False
}
class SystemAdmin(AbstractUserRole):
pass
| gileno/curso-amadeus | simplemooc/roles.py | Python | cc0-1.0 | 262 |
# formula
#MA
def MA(xArr, yArr, N):
lenY = len(yArr);
lenX = len(xArr);
for idx in range(lenY, lenX):
ss = 0;
av = 0;
for k in range(0, N):
nidx = idx - k;
if nidx < 0:
av = ss / float(k);
break;
else:
ss += xArr[nidx];
if av == 0:
yArr.append(ss / float(N));
else:
yArr.append(av);
#EMA
def EMA(xArr, yArr, N):
k = (2.0 / (N + 1.0));
lenY = len(yArr);
lenX = len(xArr);
for idx in range(lenY, lenX):
value = xArr[idx];
if idx != 0:
yArr.append(value * k + (1.0 - k) * yArr[idx - 1]);
else:
yArr.append(value);
#SMA(X,N,M) = SMA(N-1) * (N-M)/N + X(N)*M/N
def SMA(xArr, yArr, N, M):
lenX = len(xArr);
lenY = len(yArr);
for idx in range(lenY, lenX):
value = xArr[idx];
if idx != 0:
yArr.append(value*(float)(M)/(float)(N) + yArr[idx - 1] * ((float)(N)-(float)(M))/(float)(N));
else:
yArr.append(value);
#HIGH
def HIGH(xArr, yArr, N):
lenX = len(xArr);
lenY = len(yArr);
for idx in range(lenY, lenX):
high = 0;
for n in range(0, N):
nidx = idx - n;
if nidx < 0:
break;
value = xArr[nidx];
if high == 0:
high = value;
if high < value:
high = value;
yArr.append(high);
#LOW
def LOW(xArr, yArr, N):
lenX = len(xArr);
lenY = len(yArr);
for idx in range(lenY, lenX):
low = 0;
for n in range(0, N):
nidx = idx - n;
if nidx < 0:
break;
value = xArr[nidx];
if low == 0:
low = value;
if low > value:
low = value;
yArr.append(low);
#CROSS
def CROSS(AArr, BArr, N=None):
lena = len(AArr);
lenb = len(BArr);
if N == None:
N = lena-1;
if N < 0:
N = lena + N;
if lena < N or lenb < N or N < 2:
return False;
if AArr[N-1] <= BArr[N-1] and AArr[N] >= BArr[N]:
return True;
return False;
# MAX
def MAX(Arr, idx1=None, idx2=None):
l = len(Arr);
if l <= 0:
return None;
idx1 = 0 if idx1==None else idx1;
idx2 = l if idx2==None else idx2;
value = Arr[idx1];
for idx in range(idx1, idx2):
if Arr[idx] > value:
value = Arr[idx];
return value;
# MIN
def MIN(Arr, idx1=None, idx2=None):
l = len(Arr);
if l <= 0:
return None;
idx1 = 0 if idx1==None else idx1;
idx2 = l if idx2==None else idx2;
value = Arr[idx1];
for idx in range(idx1, idx2):
if Arr[idx] < value:
value = Arr[idx];
return value;
# SUM
def SUM(Arr, idx1=None, idx2=None):
l = len(Arr);
if l <= 0:
return 1;
idx1 = 0 if idx1==None else idx1;
idx2 = l if idx2==None else idx2;
value = 0;
for idx in range(idx1, idx2):
value += Arr[idx];
if value < 1:
return 1;
return value;
# RATE
def RATE(xArr, yArr, idx=None):
lenX = len(xArr);
lenY = len(yArr);
if idx == None:
idx = 0;
for k in range(lenY, lenX):
if k > idx:
yArr.append(xArr[k] - xArr[k-1]);
else:
yArr.append(1990214);
| WaitGodot/peatio-client-python | formula/Formula.py | Python | cc0-1.0 | 3,523 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from flask import Flask
from flask_restplus import Api, Resource, fields
from subprocess import check_output
import sys
import os
import shutil
app = Flask(__name__)
api = Api(app, version='0.1', title='Speak API',
description='Get phonemes of a text from various TTS backends')
ns = api.namespace('speaks', descripton='Speak operations')
Backends = {
'espeak': {
'binary': '',
'info': 'http://espeak.sourceforge.net/',
'availability': ['darwin', 'linux', 'win32', 'cygwin'],
'parameters': [
{'name': 'text', 'arg': ' ', 'type': 0, 'required': True},
{'name': 'phoneme', 'arg': '-x'},
{'name': 'quiet', 'arg': '-q'},
],
},
'flite': {
'binary': '',
'info': 'http://www.festvox.org/flite/',
'availability': ['darwin', 'linux', 'cygwin'],
'parameters': [
{'name': 'text', 'arg': '-t', 'required': True},
{'name': 'phoneme', 'arg': '-ps'},
{'name': 'quiet', 'arg': '-o /dev/null'},
],
},
'saypy': {
'binary': '',
'info': 'https://github.com/boltomli/RESTfulSpeak/blob/master/vendor/saypy',
'availability': ['darwin'],
'parameters': [
{'name': 'text', 'arg': ' ', 'required': True},
{'name': 'phoneme', 'arg': ' '},
{'name': 'quiet', 'arg': ' '},
],
},
'CSSpeak.exe': {
'binary': '',
'info': 'https://github.com/boltomli/RESTfulSpeak/blob/master/vendor/CSSpeak/',
'availability': ['win32', 'cygwin'],
'parameters': [
{'name': 'text', 'arg': ' ', 'required': True},
{'name': 'phoneme', 'arg': ' '},
{'name': 'quiet', 'arg': ' '},
],
},
}
backend_runtime = {}
for be in Backends:
if sys.platform in Backends[be]['availability']:
vendor_bin = os.path.join('vendor', be)
if os.path.exists(vendor_bin):
binary = vendor_bin
else:
binary = shutil.which(be)
if binary:
Backends[be]['binary'] = binary
backend_runtime.update({be: Backends[be]})
parameter = api.model('Parameter', {
'name': fields.String(required=True, description='The option name'),
'arg': fields.String(required=True, description='The argument/switch'),
'type': fields.Integer(default=-1, description='A custom field tba'),
'required': fields.Boolean(default=False),
})
backend = api.model('Backend', {
'binary': fields.String(required=True, description='The backend binary'),
'info': fields.String(required=True, description='The info site'),
'availability': fields.List(fields.String(required=True, description='Availability on OS')),
'parameters': fields.List(fields.Nested(parameter, required=True,
description='The parameter list')),
})
backend_list = api.model('Backend list', {
'name': fields.String(required=True, description='Name of the backend'),
'backend': fields.Nested(backend, description='The backend'),
})
result = api.model('Result', {
'value': fields.String,
'cmd': fields.String
})
def abort_if_backend_isnt_available(name):
if name not in backend_runtime:
api.abort(404, message="Backend {} isn't available.".format(name))
def build_cmd(backend_name, text):
default_parameters = backend_runtime[backend_name]['parameters']
text_arg = [p['arg'] for p in default_parameters if p['name'] == 'text'][0]
phoneme_arg = [p['arg'] for p in default_parameters if p['name'] == 'phoneme'][0]
quiet_arg = [p['arg'] for p in default_parameters if p['name'] == 'quiet'][0]
if text_arg.strip() == '': # Such as espeak
text_cmd = [text.strip()]
else:
text_cmd = [text_arg, text.strip()]
if phoneme_arg.strip() == '' and quiet_arg.strip() == '': # Such as enclosed saypy vendor sample on Mac
return [
backend_runtime[backend_name]['binary'],
] + text_cmd
else:
return [
backend_runtime[backend_name]['binary'],
phoneme_arg,
quiet_arg,
] + text_cmd
# Text must appear at the end (to make espeak happy)
parser = api.parser()
parser.add_argument('text', type=str, help='Text to speak', required=True, location='form')
@ns.route('/<string:backend_name>')
@api.doc(responses={404: 'Backend not found'},
params={'backend_name': 'The backend name'})
class Backend(Resource):
"""Show a single backend and speak with it"""
@api.doc(description='Name should be one of {0}'.format(','.join(backend_runtime.keys())))
@api.marshal_with(backend)
def get(self, backend_name):
"""Show a backend"""
abort_if_backend_isnt_available(backend_name)
return backend_runtime[backend_name]
@api.doc(parser=parser)
@api.marshal_with(result, code=201)
def post(self, backend_name):
"""Speak with a backend"""
abort_if_backend_isnt_available(backend_name)
args = parser.parse_args()
cmd = build_cmd(backend_name, args['text'])
outputs = check_output(cmd).decode('utf-8')
return {'value': outputs, 'cmd': cmd}, 201
@ns.route('/')
class BackendList(Resource):
"""Show a list of all available backends"""
@api.marshal_list_with(backend_list)
def get(self):
"""List all backends"""
return [{'name': name, 'backend': backend} for name, backend in backend_runtime.items()]
if __name__ == '__main__':
app.run(host='0.0.0.0')
| boltomli/RESTfulSpeak | api.py | Python | cc0-1.0 | 5,635 |
'''
Created on 03/mar/2014
@author:Sonya
'''
from numpy.random.mtrand import np
import random
from FCM.Class_File_Manager import FileManager
import numpy
################################################################################
# Peach - Computational Intelligence for Python
# Jose Alexandre Nalon
#
# This file: fuzzy/cmeans.py
# Fuzzy C-Means algorithm
################################################################################
# Doc string, reStructuredText formatted:
__doc__ = """
Fuzzy C-Means
Fuzzy C-Means is a clustering algorithm based on fuzzy logic.
This package implements the fuzzy centr-means algorithm for clustering and
classification. This algorithm is very simple, yet very efficient. From a
training set and an initial condition which gives the membership values of each
example in the training set to the clusters, it converges very fastly to crisper
sets.
The initial conditions, ie, the starting membership, must follow some rules.
Please, refer to any bibliography about the subject to see why. Those rules are:
no example might have membership 1 in every class, and the sum of the membership
of every component must be equal to 1. This means that the initial condition is
a fuzzy partition of the universe.
"""
################################################################################
from numpy import dot, array, sum, zeros, outer, any, identity
from numpy import linalg as LA
################################################################################
# Fuzzy C-Means class
################################################################################
class FuzzyCMeans(object):
'''
Fuzzy C-Means convergence.
Use this class to instantiate a fuzzy centr-means object. The object must be
given a training set and initial conditions. The training set is a list or
an array of N-dimensional vectors; the initial conditions are a list of the
initial membership values for every vector in the training set -- thus, the
length of both lists must be the same. The number of columns in the initial
conditions must be the same number of classes. That is, if you are, for
example, classifying in ``C`` classes, then the initial conditions must have
``C`` columns.
There are restrictions in the initial conditions: first, no column can be
all zeros or all ones -- if that happened, then the class described by this
column is unnecessary; second, the sum of the memberships of every example
must be one -- that is, the sum of the membership in every column in each
line must be one. This means that the initial condition is a perfect
partition of ``C`` subsets.
Notice, however, that *no checking* is done. If your algorithm seems to be
behaving strangely, try to check these conditions.
'''
EUCLIDEAN_NORM = 1
DIAGONAL_NORM = 2
MAHALONOBIS_NORM = 3
FUZZY_COVARIANCE_NORM=4
INIT_MU_RANDOM = 1
INIT_MU_CONSTANT = 2
INIT_MU_FCM = 3
def __init__(self, training_set, c, m=2., distType=EUCLIDEAN_NORM, initMu=INIT_MU_RANDOM):
'''
Initializes the algorithm.
:Parameters:
training_set
A list or array of vectors containing the data to be classified.
Each of the vectors in this list *must* have the same dimension, or
the algorithm won't behave correctly. Notice that each vector can be
given as a tuple -- internally, everything is converted to arrays.
initial_conditions
A list or array of vectors containing the initial membership values
associated to each example in the training set. Each column of this
array contains the membership assigned to the corresponding class
for that vector. Notice that each vector can be given as a tuple --
internally, everything is converted to arrays.
m
This is the aggregation value. The bigger it is, the smoother will
be the classification. Please, consult the bibliography about the
subject. ``m`` must be bigger than 1. Its default value is 2
'''
'''
metodo shape restituisce il numero di riga e il numero di colonna
metodo array converte una t-upla oppure una lista in un array
'''
self.__x = array(training_set)
self.m = m
self.setDistanceType(distType)
'''The fuzzyness coefficient. Must be bigger than 1, the closest it is
to 1, the smoother the membership curves will be.'''
self.initMu = initMu
self.__mu = array(self.initMembership(self.__x.shape[0], c))
numpy.set_printoptions(threshold=numpy.nan)
FileManager.writeTxt('mu_iniziale.txt', str(self.__mu))
self.__centr = self.centers()
def getcentr(self):
return self.__centr
def setcentr(self, centr):
self.__centr = array(centr).reshape(self.__centr.shape)
centr = property(getcentr, setcentr)
'''A ``numpy`` array containing the centers of the classes in the algorithm.
Each line represents a center, and the number of lines is the number of
classes. This property is read and write, but care must be taken when
setting new centers: if the dimensions are not exactly the same as given in
the instantiation of the class (*ie*, *C* centers of dimension *N*, an
exception will be raised.'''
def getmu(self):
return self.__mu
def setmu(self, mu):
self.__mu = array(mu).reshape(self.__mu.shape)
mu = property(getmu, None)
'''The membership values for every vector in the training set. This property
is modified at each step of the execution of the algorithm. This property is
not writable.'''
def __getx(self):
return self.__x
x = property(__getx, None)
'''The vectors in which the algorithm bases its convergence. This property
is not writable.'''
def centers(self):
'''
Given the present state of the algorithm, recalculates the centers, that
is, the position of the vectors representing each of the classes. Notice
that this method modifies the state of the algorithm if any change was
made to any parameter. This method receives no arguments and will seldom
be used externally. It can be useful if you want to step over the
algorithm. *This method has a colateral effect!* If you use it, the
``centr`` property (see above) will be modified.
:Returns:
A vector containing, in each line, the position of the centers of the
algorithm.
'''
numpy.set_printoptions(threshold=numpy.nan)
FileManager.writeTxt('mu_finale.txt', str(self.__mu))
mm = self.__mu ** self.m
# FileManager.writeTxt('mm.txt', str(mm))
# FileManager.writeTxt('sum.txt', str(sum(mm, axis=0)))
# FileManager.writeTxt('dot.txt', str(dot(self.__x.T, mm)))
c = dot(self.__x.T, mm) / sum(mm, axis=0)
self.__centr = c.T
return self.__centr
def membership(self):
'''
Given the present state of the algorithm, recalculates the membership of
each example on each class. That is, it modifies the initial conditions
to represent an evolved state of the algorithm. Notice that this method
modifies the state of the algorithm if any change was made to any
parameter.
:Returns:
A vector containing, in each line, the membership of the corresponding
example in each class.
'''
x = self.__x
centr = self.__centr
M, _ = x.shape
C, _ = centr.shape
r = zeros((M, C))
m1 = 1. / (self.m - 1.)
for k in range(M):
'''den = sum((x[k] - centr) ** 2., axis=1)'''
den = self.computeDistance(x[k],self.centr)
if any(den == 0):
return self.__mu
frac = outer(den, 1. / den) ** m1
r[k, :] = 1. / sum(frac, axis=1)
self.__mu = r
return self.__mu
def computeDistance(self, datapoint, prototypes):
c = prototypes
A = self.__A
distance = zeros(c.shape[0])
# distance=sum((datapoint - centr) ** 2., axis=1)
for i in range(c.shape[0]):
d = datapoint - c[i]
if (self.distanceType==FuzzyCMeans.FUZZY_COVARIANCE_NORM):
A=self.computeCovarianceMatrix(i)
distance[i] = dot(dot(d, A), d)
return distance
def setDistanceType (self, type):
''' imposta il tipo di norma per calcolare la matrice di appartenenza
type =1 norma euclidea
type =2 norma diagonale
type =3 norma di Mahalonobis
'''
self.distanceType = type
if(type == self.EUCLIDEAN_NORM):
self.__A = identity(self.__x.shape[1])
elif (type == FuzzyCMeans.DIAGONAL_NORM):
self.__A = LA.inv(self.computeCyMatrix())
elif (type == self.MAHALONOBIS_NORM):
self.__A = LA.inv(np.diag(LA.eigvals(self.computeCyMatrix())))
elif (type==self.FUZZY_COVARIANCE_NORM):
self.__A=zeros((self.__x.shape[1],self.__x.shape[1]))
def step(self):
'''
This method runs one step of the algorithm. It might be useful to track
the changes in the parameters.
:Returns:
The norm of the change in the membership values of the examples. It
can be used to track convergence and as an estimate of the error.
'''
old = self.__mu
self.membership()
self.centers()
return sum((self.__mu - old) ** 2.) ** 1 / 2.
def __call__(self, emax=0.001, imax=50):
'''
The ``__call__`` interface is used to run the algorithm until
convergence is found.
:Parameters:
emax
Specifies the maximum error admitted in the execution of the
algorithm. It defaults to 1.e-10. The error is tracked according to
the norm returned by the ``step()`` method.
imax
Specifies the maximum number of iterations admitted in the execution
of the algorithm. It defaults to 20.
:Returns:
An array containing, at each line, the vectors representing the
centers of the clustered regions.
'''
error = 1.
i = 0
while error > emax and i < imax:
error = self.step()
i = i + 1
print('Iterazione '+str(i))
print("Numero di iterazioni eseguite: " + str(i))
return self.centr
def initMembership(self, n, c):
u = zeros((n, c))
if(c != 0):
if(self.initMu == self.INIT_MU_RANDOM):
for i in range(n):
z = np.random.rand(c)
u[i, :] = np.random.dirichlet(z, size=1)
elif(self.initMu == self.INIT_MU_CONSTANT):
for i in range(n):
for j in range(c):
u[i, j] = 1.0 / c
elif(self.initMu == self.INIT_MU_FCM):
p1 = FuzzyCMeans(self.__x, c, self.m, self.distanceType)
p1(imax=5)
u = p1.getmu()
return u
def getClusters(self):
return np.argmax(self.__mu, axis=1)
def computeCyMatrix(self):
x = self.__x
M = x.shape[0]
n = x.shape[1]
cy = sum(self.__x, axis=0) / M
Cy = zeros((n, n))
for k in range(M):
z = x[k] - cy
den = outer(z, z.T)
Cy = Cy + den
return Cy
def computeCovarianceMatrix(self,y):
c=self.__centr
x=self.__x
n=x.shape[1]
mu=self.__mu
n=x.shape[1]
num=zeros((n,n))
for k in range(len(x)):
z = x[k] - c[y]
num += ((mu[k,y])**2)*(outer(z, z.T))
den=sum(mu[:, y])
Py=num/den
(sign, logdet) = LA.slogdet(Py)
coeff=sign*numpy.exp(-logdet/n)
My=LA.inv(coeff*Py)
return My
def getMedoids(self, trueLabelPoints):
cluster_point = array(self.getClusters())
medoids = []
for k in range(len(self.__centr)):
l = []
for j in range(len(cluster_point)):
if (k == cluster_point[j]):
l.append(j)
medoids.append(self.x[l[np.argmax(self.mu[l, k])]])
return array(medoids)
def getMedoidsTrueLabel(self, trueLabelCluster, trueLabelPoints):
cluster_point = array(self.getClusters())
medoids = []
for k in range(len(self.__centr)):
l = []
for j in range(len(cluster_point)):
if (k == cluster_point[j] and trueLabelCluster[k] == trueLabelPoints[j]):
l.append(j)
medoids.append(self.x[l[np.argmax(self.mu[l, k])]])
return array(medoids)
def getClusterLabel(self, trueLabels):
cluster_point = array(self.getClusters())
labels = []
for k in range(len(self.__centr)):
l = []
for j in range(len(cluster_point)):
if (k == cluster_point[j]):
l.append(j)
labels.append(int(trueLabels[l[np.argmax(self.mu[l, k])]]))
return array(labels)
| fabiolapozyk/IncrementalFCM | Python/FCM/Class_FCM.py | Python | cc0-1.0 | 14,194 |
#< ab || cd > = [[ a,b ] , [ c,d ]]
#A script to find the optimal alignment of diagrams used in the CCDT t3 amplitude equation
def perm(a, i,e):
ai= a[1][e]
ae = a[1][i]
api = a[3][e]
ape = a[3][i]
a[1][i] = ai
a[1][e] = ae
a[3][i] = api
a[3][e] = ape
def perm2(a, i,e):
ai= a[0][e]
ae = a[0][i]
api = a[2][e]
ape = a[2][i]
a[0][i] = ai
a[0][e] = ae
a[2][i] = api
a[2][e] = ape
def align(a, b, c, left_indices):
#1. assign all left_indices in a[0], a[2]
a_ = [[],[],[],[]]
b_ = [[],[],[],[]]
c_ = [[],[],[],[]]
for i in range(len(a[0])):
#bra
if a[0][i] in ["d", "e", "f"]:
#move to ket
a_[1].append(a[0][i])
a_[3].append(a[2][i])
if a[0][i] in ["a", "b", "c"]:
#keep in bra
a_[0].append(a[0][i])
a_[2].append(a[2][i])
#ket
if a[1][i] in ["i", "j", "k"]:
#move to bra
a_[0].append(a[1][i])
a_[2].append(a[3][i])
if a[1][i] in ["l", "m", "n"]:
#keep in ket
a_[1].append(a[1][i])
a_[3].append(a[3][i])
#2. assign all left indices in b to a_[1]
for i in range(len(b[0])):
if b[0][i] in a_[1]:
b_[0].append(b[0][i])
b_[2].append(b[2][i])
if b[0][i] not in a_[1]:
b_[1].append(b[0][i])
b_[3].append(b[2][i])
for i in range(len(b[0])):
if b[1][i] in a_[1]:
b_[0].append(b[1][i])
b_[2].append(b[3][i])
if b[1][i] not in a_[1]:
b_[1].append(b[1][i])
b_[3].append(b[3][i])
#ensure correct order in a[1]
#a_temp = a_
print b_
print a_
for i in range(len(a_[1])):
if a_[1][i] != b_[0][i]:
for e in range(len(a_[1])):
if a_[1][e] == b_[0][i]:
perm(a_, e,i)
#3. align c to b_[1]
for i in range(len(c[0])):
if c[0][i] in b_[1]:
c_[0].append(c[0][i])
c_[2].append(c[2][i])
if c[0][i] not in b_[1]:
c_[1].append(c[0][i])
c_[3].append(c[2][i])
for i in range(len(c[0])):
if c[1][i] in b_[1]:
c_[0].append(c[1][i])
c_[2].append(c[3][i])
if c[1][i] not in b_[1]:
c_[1].append(c[1][i])
c_[3].append(c[3][i])
for i in range(len(c_[0])):
if b_[1][i] != c_[0][i]:
for e in range(len(c_[0])):
if c_[0][e] == b_[1][i]:
perm2(c_, i,e)
#print "A:", a_
#print "B:", b_
#print "C:", c_
return a_,b_,c_
def diagsort(a,c):
#align diagram to the T3 amplitude
nr = {"a": "p", "b": "q","c": "r", "i": "s","j": "t", "k": "u" }
retrs = "update_as_"
for i in range(len(a[0])):
retrs += nr[a[0][i]]
retrs += "_"
for i in range(len(c[1])):
retrs += nr[c[1][i]]
return retrs
#align to t3 amp
def setup(a,b,c):
#assign general indices pqrs
a = [a[0], a[1], [],[]]
b = [b[0], b[1], [],[]]
c = [c[0], c[1], [],[]]
indx = "pqrstu"
n = 0
for i in range(len(a[0])):
a[2].append(indx[n])
n+= 1
for i in range(len(a[1])):
a[3].append(indx[n])
n+= 1
n = 0
for i in range(len(b[0])):
b[2].append(indx[n])
n+= 1
for i in range(len(b[1])):
b[3].append(indx[n])
n+= 1
n = 0
for i in range(len(c[0])):
c[2].append(indx[n])
n+= 1
for i in range(len(c[1])):
c[3].append(indx[n])
n+= 1
#identify left indices
left_indices = []
for i in range(len(a[0])):
if a[0][i] in ["a", "b", "c"]:
left_indices.append(a[0][i])
if a[1][i] in ["i", "j", "k"]:
left_indices.append(a[1][i])
a,b,c = align(a,b,c, left_indices)
"""
#align indices in a,b
diag = [[],[]]
ap = [[],[]]
bp = [[],[]]
cp = [[],[]]
#1. identify open lines in a
for i in range(len(a)):
if a[0][i] in ["d", "e", "f"]:
diag[0].append(a[0][i])
ap[0].append(a[0][i])
#a_s.append(A[0][i])
if a[1][i] in ["i", "j", "k"]:
diag[0].append(a[1][i])
ap[0].append(a[1][i])
#a_s.append(A[1][i])
if a[0][i] not in ["d", "e", "f"]:
ap[1].append(a[0][i])
if a[1][i] not in ["l", "m", "n"]:
ap[1].append(a[1][i])
#align closed lines in a-b
for i in range(len(ap[1])):
pass
a_s = "."
b_s = "."
c_s = "."
"""
#2. use internal lines from a to form first part of b
return a,b,c
def generate_t2t2(v,t2,t3):
#measure "level of alignment" of existing tensors
#we ideally want it to begin with abc, and end with ijk
#contractions occur over lmn and def
t3ind = 0
contractions = ["l","m","d","e"]
#begin by evaluate where to place the t3 amplitudes
for i in range(len(t3[0])):
if t3[0][i] in ["a", "b"]:
t3ind += 1
if t3[1][i] in ["i", "j"]:
t3ind -= 1
#inspect if t2 has a preferred placement
for i in range(len(t2[0])):
if t2[0][i] in ["a", "b"]:
t3ind += 1
if t2[1][i] in ["i", "j"]:
t3ind -= 1
#print t3ind
if t3ind >= 0:
#place t3 first
a,b,c = setup(t3, v, t2)
#a = t3
t3str = "t3."
for i in range(len(a[2])):
t3str += a[2][i]
t3str += "_"
for i in range(len(a[3])):
t3str += a[3][i]
t3str += "()"
t2str = "t2."
for i in range(len(c[2])):
t2str += c[2][i]
t2str += "_"
for i in range(len(c[3])):
t2str += c[3][i]
t2str += "()"
vint = "vhhpp."
for i in range(len(b[2])):
vint += b[2][i]
vint += "_"
for i in range(len(b[3])):
vint += b[3][i]
vint += "()"
matmult = t3str + "*" + vint + "*" + t2str
else:
#place t3 last
a,b,c = setup(t2, v, t3)
t2str = "t3."
for i in range(len(a[2])):
t2str += a[2][i]
t2str += "_"
for i in range(len(a[3])):
t2str += a[3][i]
t2str += "()"
t3str = "t2."
for i in range(len(c[2])):
t3str += c[2][i]
t3str += "_"
for i in range(len(c[3])):
t3str += c[3][i]
t3str += "()"
vint = "vhhpp."
for i in range(len(b[2])):
vint += b[2][i]
vint += "_"
for i in range(len(b[3])):
vint += b[3][i]
vint += "()"
matmult = t2str + "*" + vint + "*" + t3str
#print matmult
retstr = diagsort(a,c)
strng = retstr + "(" + matmult + ")"
#print a
#print b
#print c
return a, b, c, strng
def generate(v,t2,t3):
#measure "level of alignment" of existing tensors
#we ideally want it to begin with abc, and end with ijk
#contractions occur over lmn and def
t3ind = 0
contractions = ["l","m","d","e"]
#begin by evaluate where to place the t3 amplitudes
for i in range(len(t3[0])):
if t3[0][i] in ["a", "b", "c"]:
t3ind += 1
if t3[1][i] in ["i", "j", "k"]:
t3ind -= 1
#inspect if t2 has a preferred placement
for i in range(len(t2[0])):
if t2[0][i] in ["a", "b", "c"]:
t3ind += 1
if t2[1][i] in ["i", "j", "k"]:
t3ind -= 1
#print t3ind
if t3ind >= 0:
#place t3 first
a,b,c = setup(t3, v, t2)
#a = t3
t3str = "t3."
for i in range(len(a[2])):
t3str += a[2][i]
t3str += "_"
for i in range(len(a[3])):
t3str += a[3][i]
t3str += "()"
t2str = "t2."
for i in range(len(c[2])):
t2str += c[2][i]
t2str += "_"
for i in range(len(c[3])):
t2str += c[3][i]
t2str += "()"
vint = "vhhpp."
for i in range(len(b[2])):
vint += b[2][i]
vint += "_"
for i in range(len(b[3])):
vint += b[3][i]
vint += "()"
matmult = t3str + "*" + vint + "*" + t2str
else:
#place t3 last
a,b,c = setup(t2, v, t3)
t2str = "t3."
for i in range(len(a[2])):
t2str += a[2][i]
t2str += "_"
for i in range(len(a[3])):
t2str += a[3][i]
t2str += "()"
t3str = "t2."
for i in range(len(c[2])):
t3str += c[2][i]
t3str += "_"
for i in range(len(c[3])):
t23tr += c[3][i]
t3str += "()"
vint = "vhhpp."
for i in range(len(b[2])):
vint += b[2][i]
vint += "_"
for i in range(len(b[3])):
vint += b[3][i]
vint += "()"
matmult = t2str + "*" + vint + "*" + t3str
#print matmult
retstr = diagsort(a,c)
strng = retstr + "(" + matmult + ")"
#print a
#print b
#print c
return a, b, c, strng
def tex_pre(v,t2,t3):
tx = " \\sum_{"
for i in range(len(v[0])):
tx += v[0][i] + v[1][i]
tx += "} "
tx += "\\langle %s %s \\vert \\vert %s %s \\rangle " % (v[0][0], v[0][1], v[1][0], v[1][1])
tx += "t^{%s %s}_{%s %s}" % (t2[0][0], t2[0][1], t2[1][0], t2[1][1])
#tx += "t^{%s %s %s}_{%s %s %s} " % (t3[0][0], t3[0][1], t3[0][2], t3[1][0], t3[1][1],t3[1][2])
tx += "t^{%s %s}_{%s %s} " % (t3[0][0], t3[0][1], t3[1][0], t3[1][1])
return tx
def tex_aligned(a,b,c):
tx = " \\sum_{"
for i in b[0]:
tx+=i
tx += "}"
tx += " \\sum_{"
for i in b[1]:
tx+=i
tx += "}"
tx += " t^{"
for i in a[0]:
tx += i
tx += "}_{"
for i in a[1]:
tx += i
tx += "} \\langle "
for i in b[0]:
tx += i
tx += "\\vert \\vert "
for i in b[1]:
tx += i
tx +="\\rangle t^{"
for i in c[0]:
tx += i
tx += "}_{"
for i in c[1]:
tx += i
tx += "} "
return tx
def gen_entry(v,t2,t3):
#Generate table entry for diagram given by t2,t3,v
tx1 = tex_pre(v,t2,t3)
a,b,c, strng = generate_t2t2(v,t2,t3)
tx2 = tex_aligned(a,b,c)
return "$$ " + tx1 + " \\rightarrow " + tx2 + "$$" , strng
v = [["l"],["d"]]
t2 = [["a","d"],["i","j"]]
t3 = [["b","c"],["l","k"]]
ltx, strng = gen_entry(v,t2,t3)
print ltx
print strng
def realign_diagram(d1,d2):
n = {a:p, b:q, c:r, i:s, j:t, k:u}
| CompPhysics/ThesisProjects | doc/MSc/msc_students/former/AudunHansen/Audun/Pythonscripts/t3_align.py | Python | cc0-1.0 | 11,625 |
def yoda(text):
return " ".join(text.split(" ")[::-1])
print(yoda("it. invent is do can we All all. at future the predict really can't We"))
| alaudo/coderdojo-python | code/25 - decode3.py | Python | cc0-1.0 | 147 |
level_1 = [
list("........."),
list("........."),
list("........."),
list("........."),
list("........."),
list("........."),
list("........."),
list("........G"),
list(".SSSSSSSS"),
]
level_2 = [
list("........."),
list("........."),
list("........."),
list("........."),
list("........."),
list("........G"),
list("........S"),
list("..C....SS"),
list(".SSSSSSSS"),
]
level_3 = [
list("....SSSSS"),
list("....S...S"),
list("....SG..S"),
list("........S"),
list("........S"),
list("........S"),
list("........S"),
list("........S"),
list(".SSSSSSSS"),
]
level_4 = [
list("......GS"),
list(".......S"),
list("D.....CS"),
list("S....CSS"),
list(".....SSS"),
list("D...SSSS"),
list("S.SSSSSS"),
list(".CSSSSSS"),
]
LEVELS = [level_1, level_2, level_3, level_4]
| 2065983Y/spinner | levels.py | Python | cc0-1.0 | 921 |
volumes={
'oil': [],
'gas': [],
'ngl': [],
}
| WaltXon/curvey | templates.py | Python | cc0-1.0 | 58 |
import getopt, sys
import requests
import json
api_key = ""
secret_key = ""
api_url = "http://ws.audioscrobbler.com/2.0/"
artists = []
tags = {}
tagcloud = {}
#default to an awesome user
user_name = "iLikeSpoons"
# Reat the api-key and secret-key for the last.fm api from a
# file given as a parameter.
def read_config(arg):
global api_key, secret_key
try:
config = open(arg)
api_key, secret_key = config.read().splitlines()
api_key = api_key.rpartition(":")[2]
secret_key = secret_key.rpartition(":")[2]
except IOError:
print("error reading file")
else:
print("Config file successfully read\n")
# An example of how to use the last.fm api to get userinfo for a user
def get_user_info():
payload = {"method": "user.getinfo",
"user": user_name,
"api_key": api_key,
"format": "json"}
r = requests.get(api_url, params=payload)
print(r.status_code)
if r.status_code == 200:
json_response = r.json()
return json_response
# Use the last.fm api to get all the top tags for every one of the top
# 50 bands for a given user.
def get_tag_clouds():
global artists, tags
payload = {"method": "user.getTopArtists",
"user": user_name,
"period": "overall",
"api_key": api_key,
"format": "json"}
r = requests.get(api_url, params=payload)
print(r.status_code)
print(r.headers["content-type"])
if r.status_code == 200:
json_response = r.json()
count = 0
print("getting tags for given user:")
for group in json_response["topartists"]["artist"]:
artists.append({"name": group["name"], "playcount": int(group["playcount"])})
print("".join((str(2*count), "%", "".join(["." for i in range(count)]))), end="\r")
count += 1
payload = {"method": "artist.gettoptags",
"artist": group["name"],
"api_key": api_key,
"format": "json"}
r = requests.get(api_url, params=payload)
artist_tags = r.json()["toptags"]["tag"]
taglist = []
for tag in artist_tags:
if int(tag["count"]) > 1:
taglist.append({"name": tag["name"], "count": int(tag["count"])})
tags.update({group["name"]: taglist})
generate_tagcloud()
#now we have a set of tag clouds!
# Generate a tagcloud from the joint tags and playcounts
# the counts are generated kinda randomly, this still needs tweaking
# and an option to export the object into json for visualisation.
def generate_tagcloud():
global tagcloud
for artist_obj in artists:
score_mul = artist_obj["playcount"]
artist = artist_obj["name"]
for tag_obj in tags[artist]:
if tag_obj["name"] in tagcloud:
curr_value = tagcloud[tag_obj["name"]]
tagcloud[tag_obj["name"]] = curr_value + (tag_obj["count"]/100)*(score_mul/100)
else:
tagcloud.update({tag_obj["name"]: (tag_obj["count"]/100)*(score_mul/100)})
for tag in tagcloud:
if tagcloud[tag] > 1:
print(" - ".join((tag, str(tagcloud[tag]))))
# Read command line parameters, do errythin'
def main(argv):
global api_key, secret_key, user_name
opts, args = getopt.getopt(argv, "c:hu:", ["config=", "help", "user="])
config_file_specified = False
for opt, arg in opts:
if opt in ("-c, --config"):
config_file_specified = True
read_config(arg)
elif opt in ("-h", "--help"):
print("specify a config file or an api key for last.fm")
elif opt in ("-u, --user"):
user_name = arg
if not config_file_specified and len(args) < 1:
print("please specify a valid config file or an api key for last.fm")
elif not config_file_specified:
api_key = args[0]
if len(args) > 1:
secret_key = args[1]
print("".join(("getting info for user ", user_name)))
#get_user_info()
get_tag_clouds()
if __name__ == "__main__":
main(sys.argv[1:])
| ivandervisevic/lastfm-vis | get_data.py | Python | gpl-2.0 | 4,216 |
#Written by Reid McIlroy-Young for Dr. John McLevey, University of Waterloo 2015
import os.path
import re
from setuptools import setup, find_packages
with open('metaknowledge/constants.py') as f:
versionString = re.search(r"__version__ = '(.+)'", f.read()).group(1)
long_descriptionLOC = "README.rst"
if os.path.isfile(long_descriptionLOC):
long_description = open(long_descriptionLOC).read()
else:
long_description = ''
if __name__ == '__main__':
setup(name='metaknowledge',
version = versionString,
description = "A library for handling Web of science files",
long_description = long_description,
author="Reid McIlroy-Young, John McLevey",
author_email = "[email protected], [email protected]",
license = 'GPL',
url="https://github.com/networks-lab/metaknowledge",
download_url = "https://github.com/networks-lab/metaknowledge/archive/{}.tar.gz".format(versionString),
keywords= 'WOS',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: MacOS X',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Education',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Sociology',
'Topic :: Text Processing',
],
install_requires= ['networkx'],
extras_require={'contour' : ['matplotlib', 'scipy', 'numpy']},
packages = find_packages(),
entry_points={'console_scripts': [
'metaknowledge = metaknowledge.bin:mkCLI',
'metaknowledge-mdToNb = metaknowledge.bin:mkMdToNb',
'metaknowledge-DocsGen = metaknowledge.bin:mkDocs',
]},
test_suite='metaknowledge.tests',
)
print("metaknowledge installed\nIf you intend to use the gender name data or journal abbreviations facilities it is\nadvisable to download and setup the required files now.\nRunning following line in your interpreter will do it:\nimport metaknowledge;metaknowledge.downloadExtras()")
| networks-lab/isilib | setup.py | Python | gpl-2.0 | 2,376 |
import os, sys, getopt
import glob
import json
import ConfigParser
config = ConfigParser.ConfigParser()
config.read('/var/www/lib/config.ini')
media_folder = config.get('system', 'usb_folder')
filter = False
path=media_folder + '/'
destination=""
filters_extensions=[".gcode",".nc",".gc",".stl",".obj"]
try:
opts, args = getopt.getopt(sys.argv[1:],"hdpf",["help","dest=","path=","filter="])
except getopt.GetoptError as err:
#Error handling for unknown or incorrect number of options
print "Correct Use"
print usage
print err
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print usage
sys.exit()
elif opt in ("-d", "--dest"):
destination = arg
elif opt in ("-p", "--path"):
path = path + arg
elif opt in ("-f", "--filter"):
filter = True #if enabled show only
#print "filter enabled"
files=[]
if os.path.isdir(path):
for fn in os.listdir(path):
include=False
if(os.path.isfile(path+"/"+fn)):
extension = os.path.splitext(path+"/"+fn)[1]
if extension in filters_extensions:
include=True
elif(os.path.isdir(path+"/"+fn)):
include=True
fn = fn + "/"
if(include):
files.append(fn)
print json.dumps(files)
if(destination != ""):
dest_file= open(destination, 'w')
print>>dest_file, json.dumps(files)
dest_file.close()
else:
print str(path) + " is not a directory or doesn't exists" | FABtotum/FAB-UI | fabui/python/usb_browser.py | Python | gpl-2.0 | 1,657 |
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 08 23:40:43 2015
@author: Ben
"""
import scipy.signal as signal
import numpy as np
import threading
from scipy.signal.signaltools import _next_regular
from numpy.fft import rfftn, irfftn
_rfft_lock = threading.Lock()
def get_g2(P1, P2, lags=20):
''' Returns the Top part of the G2 equation (<P1P2> - <P1><P2>)'''
lags = int(lags)
P1 = np.asarray(P1)
P2 = np.asarray(P2)
# G2 = np.zeros([lags*2-1])
start = len(P1*2-1)-lags
stop = len(P1*2-1)-1+lags
# assume I1 Q1 have the same shape
sP1 = np.array(P1.shape)
complex_result = np.issubdtype(P1.dtype, np.complex)
shape = sP1 - 1
# Speed up FFT by padding to optimal size for FFTPACK
fshape = [_next_regular(int(d)) for d in shape]
fslice = tuple([slice(0, int(sz)) for sz in shape])
if not complex_result and _rfft_lock.acquire(False):
try:
fftP1 = rfftn(P1, fshape)
rfftP2 = rfftn(P2[::-1], fshape)
G2 = irfftn((fftP1*rfftP2))[fslice].copy()[start:stop]/len(fftP1)
return
finally:
_rfft_lock.release()
else:
# If we're here, it's either because we need a complex result, or we
# failed to acquire _rfft_lock (meaning rfftn isn't threadsafe and
# is already in use by another thread). In either case, use the
# (threadsafe but slower) SciPy complex-FFT routines instead.
# ret = ifftn(fftn(in1, fshape) * fftn(in2, fshape))[fslice].copy()
print 'Abort, reason:complex input or Multithreaded FFT not available'
if not complex_result:
pass # ret = ret.real
P12var = np.var(P1)*np.var(P2)
return G2-P12var
def getCovMatrix(I1, Q1, I2, Q2, lags=20):
'''
This function was adaped from scipy.signal.fft.convolve.
By Defining the number of lags one defines an interrest
of region meaning any effect should happen on that oder of
time scale; thus lower frequency effects cannot be displayed on
that scale and can be discarded from the convolution.
All input shapes need to be the same.
requires an updated numpy version (1.9.0 +).
# 0: <I1I1>
# 1: <Q1Q1>
# 2: <I2I2>
# 3: <Q2Q2>
# 4: <I1Q1>
# 5: <I2Q2>
# 6: <I1I2>
# 7: <Q1Q2>
# 8: <I1Q2>
# 9: <Q1I2>
# 10: <Squeezing> Magnitude
# 11: <Squeezing> Phase
'''
lags = int(lags)
I1 = np.asarray(I1)
Q1 = np.asarray(Q1)
I2 = np.asarray(I2)
Q2 = np.asarray(Q2)
CovMat = np.zeros([14, lags*2+1])
start = len(I1) - lags - 1 # len(I1*2-1)-lags
stop = len(I1) + lags # len(I1*2-1)-1+lags
# assume I1 Q1 have the same shape
sI1 = np.array(I1.shape)
sQ2 = np.array(Q2.shape)
complex_result = (np.issubdtype(I1.dtype, np.complex) or
np.issubdtype(Q2.dtype, np.complex))
shape = sI1 + sQ2 - 1
# HPfilt = (int(sI1/(lags*4))) # smallest features visible is lamda/4
# Speed up FFT by padding to optimal size for FFTPACK
fshape = [_next_regular(int(d)) for d in shape]
fslice = tuple([slice(0, int(sz)) for sz in shape])
# Pre-1.9 NumPy FFT routines are not threadsafe. For older NumPys, make
# sure we only call rfftn/irfftn from one thread at a time.
if not complex_result and _rfft_lock.acquire(False):
try:
fftI1 = rfftn(I1, fshape)
fftQ1 = rfftn(Q1, fshape)
fftI2 = rfftn(I2, fshape)
fftQ2 = rfftn(Q2, fshape)
rfftI1 = rfftn(I1[::-1], fshape)
rfftQ1 = rfftn(Q1[::-1], fshape)
rfftI2 = rfftn(I2[::-1], fshape)
rfftQ2 = rfftn(Q2[::-1], fshape)
# filter frequencies outside the lags range (This is buggy atm)
# fftI1 = np.concatenate((np.zeros(HPfilt), fftI1[HPfilt:]))
# fftQ1 = np.concatenate((np.zeros(HPfilt), fftQ1[HPfilt:]))
# fftI2 = np.concatenate((np.zeros(HPfilt), fftI2[HPfilt:]))
# fftQ2 = np.concatenate((np.zeros(HPfilt), fftQ2[HPfilt:]))
# filter frequencies outside the lags range
# rfftI1 = np.concatenate((np.zeros(HPfilt), rfftI1[HPfilt:]))
# rfftQ1 = np.concatenate((np.zeros(HPfilt), rfftQ1[HPfilt:]))
# rfftI2 = np.concatenate((np.zeros(HPfilt), rfftI2[HPfilt:]))
# rfftQ2 = np.concatenate((np.zeros(HPfilt), rfftQ2[HPfilt:]))
# 0: <I1I1>
CovMat[0, :] = (irfftn((fftI1*rfftI1))[fslice].copy()[start:stop] / len(fftI1))
# 1: <Q1Q1>
CovMat[1, :] = (irfftn((fftQ1*rfftQ1))[fslice].copy()[start:stop] / len(fftI1))
# 2: <I2I2>
CovMat[2, :] = (irfftn((fftI2*rfftI2))[fslice].copy()[start:stop] / len(fftI1))
# 3: <Q2Q2>
CovMat[3, :] = (irfftn((fftQ2*rfftQ2))[fslice].copy()[start:stop] / len(fftI1))
# 4: <I1Q1>
CovMat[4, :] = (irfftn((fftI1*rfftQ1))[fslice].copy()[start:stop] / len(fftI1))
# 5: <I2Q2>
CovMat[5, :] = (irfftn((fftI2*rfftQ2))[fslice].copy()[start:stop] / len(fftI1))
# 6: <I1I2>
CovMat[6, :] = (irfftn((fftI1*rfftI2))[fslice].copy()[start:stop] / len(fftI1))
# 7: <Q1Q2>
CovMat[7, :] = (irfftn((fftQ1*rfftQ2))[fslice].copy()[start:stop] / len(fftI1))
# 8: <I1Q2>
CovMat[8, :] = (irfftn((fftI1*rfftQ2))[fslice].copy()[start:stop] / len(fftI1))
# 9: <Q1I2>
CovMat[9, :] = (irfftn((fftQ1*rfftI2))[fslice].copy()[start:stop] / len(fftI1))
# 10: <Squeezing> Magnitude
CovMat[10, :] = (abs(1j*(CovMat[8, :]+CovMat[9, :]) + (CovMat[6, :] - CovMat[7, :])))
# 11: <Squeezing> Angle
CovMat[11, :] = np.angle(1j*(CovMat[8, :]+CovMat[9, :]) + (CovMat[6, :] - CovMat[7, :]))
# 12: <Squeezing> Magnitude For Hyb Coupler
CovMat[12, :] = (abs(1j*(CovMat[6, :]+CovMat[7, :]) + (CovMat[8, :] - CovMat[9, :])))
# 12: Generic Absolute cross_correlation Power
CovMat[13, :] = abs(CovMat[6, :])+abs(CovMat[7, :]) + abs(CovMat[8, :]) + abs(CovMat[9, :])
CovMat = f1pN(CovMat, lags, d=1) # correct Trigger jitter
return CovMat
finally:
_rfft_lock.release()
else:
# If we're here, it's either because we need a complex result, or we
# failed to acquire _rfft_lock (meaning rfftn isn't threadsafe and
# is already in use by another thread). In either case, use the
# (threadsafe but slower) SciPy complex-FFT routines instead.
# ret = ifftn(fftn(in1, fshape) * fftn(in2, fshape))[fslice].copy()
print 'Abort, reason:complex input or Multithreaded FFT not available'
if not complex_result:
print 'Not a complex result'
pass # ret = ret.real
return CovMat
def f1pN(CovMat, lags0, d=1):
'''Simple Trigger correction function'''
tArray = abs(CovMat[6, :])+abs(CovMat[7, :]) + abs(CovMat[8, :]) + abs(CovMat[9, :])
squeezing_noise = np.sqrt(np.var(np.abs(tArray))) # including the peak matters little
if np.max(np.abs(tArray[lags0 - d:lags0 + d + 1])) < 4.0 * squeezing_noise:
# logging.debug('SN ratio too low: Can not find trigger position')
distance = 0
else:
distance = (np.argmax(tArray[lags0 - d:lags0 + d + 1]) - d) * - 1
# fround trigger jitter distance
for i in range(6, 14):
CovMat[i, :] = np.roll(CovMat[i, :], distance) # correct Trigger jitter
return CovMat
def covConv(a, b, lags=20):
''' returns fft convolution result
assumes a, b to be same length 1-d numpy arrays
'''
result = signal.fftconvolve(a, b[::-1], mode='full')/(len(a)-1)
start = len(a)-lags
stop = len(a)-1+lags
return result[start:stop]
| benschneider/Generic-Sweepscript | covfunc.py | Python | gpl-2.0 | 7,868 |
# -*- coding: utf-8 -*-
#
# This file is part of EventGhost.
# Copyright © 2005-2016 EventGhost Project <http://www.eventghost.net/>
#
# EventGhost is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 2 of the License, or (at your option)
# any later version.
#
# EventGhost is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with EventGhost. If not, see <http://www.gnu.org/licenses/>.
# Local imports
import eg
from ContainerItem import ContainerItem
from TreeItem import HINT_MOVE_EVERYWHERE
class FolderItem(ContainerItem):
xmlTag = "Folder"
icon = eg.Icons.FOLDER_ICON
dropBehaviour = {
"Macro": HINT_MOVE_EVERYWHERE,
"Folder": HINT_MOVE_EVERYWHERE,
}
| WoLpH/EventGhost | eg/Classes/FolderItem.py | Python | gpl-2.0 | 1,066 |
"""
WSGI config for untitled project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| sql-viewer/SQLGlimpse | sqlviewer/wsgi.py | Python | gpl-2.0 | 313 |
verbose_names = (
"Cash & Short Term Investments",
"Cash Only",
"Short-Term Investments",
"Cash & Short Term Investments Growth",
"Cash & ST Investments / Total Assets",
"Total Accounts Receivable",
"Accounts Receivables, Net",
"Accounts Receivables, Gross",
"Bad Debt/Doubtful Accounts",
"Other Receivables",
"Accounts Receivable Growth",
"Accounts Receivable Turnover",
"Inventories",
"Finished Goods",
"Work in Progress",
"Raw Materials",
"Progress Payments & Other",
"Other Current Assets",
"Prepaid Expenses",
"Miscellaneous Current Assets",
"Total Current Assets",
"Net Property, Plant & Equipment",
"Property, Plant & Equipment - Gross",
"Buildings",
"Land & Improvements",
"Machinery & Equipment",
"Construction in Progress",
"Leases",
"Computer Software and Equipment",
"Transportation Equipment",
"Other Property, Plant & Equipment",
"Accumulated Depreciation",
"Buildings_2",
"Land & Improvements_2",
"Machinery & Equipment_2",
"Construction in Progress_2",
"Computer Software and Equipment_2",
"Transportation Equipment_2",
"Other Property, Plant & Equipment_2",
"Total Investments and Advances",
"LT Investment - Affiliate Companies",
"Other Long-Term Investments",
"Long-Term Note Receivable",
"Intangible Assets",
"Net Goodwill",
"Net Other Intangibles",
"Other Assets",
"Deferred Charges",
"Tangible Other Assets",
"Total Assets",
"Assets - Total - Growth",
"Asset Turnover",
"Return On Average Assets",
"ST Debt & Current Portion LT Debt",
"Short Term Debt",
"Current Portion of Long Term Debt",
"Accounts Payable",
"Accounts Payable Growth",
"Income Tax Payable",
"Other Current Liabilities",
"Miscellaneous Current Liabilities",
"Total Current Liabilities",
"Current Ratio",
"Quick Ratio",
"Cash Ratio",
"Long-Term Debt",
"Long-Term Debt excl. Capitalized Leases",
"Non-Convertible Debt",
"Convertible Debt",
"Capitalized Lease Obligations",
"Provision for Risks & Charges",
"Deferred Taxes",
"Deferred Taxes - Credit",
"Deferred Taxes - Debit",
"Other Liabilities",
"Deferred Tax Liability-Untaxed Reserves",
"Other Liabilities (excl. Deferred Income)",
"Deferred Income",
"Total Liabilities",
"Non-Equity Reserves",
"Total Liabilities / Total Assets",
"Preferred Stock (Carrying Value)",
"Redeemable Preferred Stock",
"Non-Redeemable Preferred Stock",
"Common Equity (Total)",
"Common Stock Par/Carry Value",
"Additional Paid-In Capital/Capital Surplus",
"Retained Earnings",
"ESOP Debt Guarantee",
"Cumulative Translation Adjustment/Unrealized For. Exch. Gain",
"Unrealized Gain/Loss Marketable Securities",
"Revaluation Reserves",
"Other Appropriated Reserves",
"Unappropriated Reserves",
"Treasury Stock",
"Common Equity / Total Assets",
"Total Shareholders' Equity",
"Total Shareholders' Equity / Total Assets",
"Accumulated Minority Interest",
"Total Equity",
"Liabilities & Shareholders' Equity",
)
| kakarukeys/algo-fa | fa/database/balancesheet_numerical_columns.py | Python | gpl-2.0 | 3,230 |
"""
/***************************************************************************
Name : QTableWidget Combo Box
Description : Custom Combo Box that stores the row number of its container
Date : 14/October/11
copyright : (C) 2011 by John Gitau
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtGui import QComboBox
__all__ = ["TableComboBox"]
class TableComboBox(QComboBox):
#Class constructor
def __init__(self,row):
QComboBox.__init__(self)
self.row=row
| olivierdalang/stdm | ui/customcontrols/table_combobox.py | Python | gpl-2.0 | 1,313 |
#!/usr/bin/env python
class tree:
def __init__(self, l):
N = len(l) + 1
m = []
for i in range(N):
m.append({"nearest": []})
for i, j in l:
m[i]["nearest"].append(j)
m[j]["nearest"].append(i)
self.t = []
for i in range(N):
self.t.append({"children":[]})
self.t[0]["parent"] = -1
stack = [(0,-1)]
while stack:
n,p = stack.pop()
if m[n]["nearest"] == [p]:
self.t[n]["children"] = []
else:
for near_node in m[n]["nearest"]:
if near_node != p:
self.t[n]["children"].append(near_node)
self.t[near_node]["parent"] = n
stack.append((near_node,n))
def add_height(self):
stack = [0]
while stack:
n = stack[-1]
if not self.t[n]["children"]:
self.t[n]["height"] = 0
stack.pop()
else:
height = 0
f = True
for child in self.t[n]["children"]:
if not self.t[child].has_key("height"):
stack.append(child)
f = False
else:
h = self.t[child]["height"]
if h > height: height = h
if f:
self.t[n]["height"] = height + 1
stack.pop()
def add_high(self):
stack = [0]
while stack:
n = stack.pop()
if n == 0:
self.t[n]["high"] = 0
if not self.t[n]["children"]:
pass
elif len(self.t[n]["children"]) == 1:
child = self.t[n]["children"][0]
self.t[child]["high"] = self.t[n]["high"]+1
stack.append(child)
else:
heights = [(child,self.t[child]["height"]) \
for child in self.t[n]["children"]]
heights.sort(key = lambda (fst,snd): snd)
highest_child, highest_height = heights.pop()
snd_highest_child, snd_highest_height = heights.pop()
for child in self.t[n]["children"]:
l = []
if child == highest_child:
l.append(snd_highest_height+2)
else:
l.append(highest_height+2)
l.append(self.t[n]["high"]+1)
self.t[child]["high"] = max(l)
stack.append(child)
def max_distance(self):
l = []
def farthest(n):
if len(self.t[n]["children"]) > 1:
m = []
for child in self.t[n]["children"]:
m.append(self.t[child]["height"])
m.sort()
snd,fst = m[-2:]
l.append(fst+snd+2)
for i in range(len(self.t)):
farthest(i)
return max(l)
def farthest_distance(self,n):
l = []
for child in self.t[n]["children"]:
l.append(self.t[child]["height"]+1)
l.append(self.t[n]["high"])
return max(l)
import sys
N,M = map(int, sys.stdin.next().split())
l = []
for i in range(N-1):
l.append(map(lambda s: int(s)-1 , sys.stdin.next().split()))
t = tree(l)
t.add_height()
t.add_high()
max_d = t.max_distance()
for s in sys.stdin:
V,K = map(int, s.split())
first_travel_distance = t.farthest_distance(V-1)
print first_travel_distance + max_d * (K-1)
| ryota-sugimoto/hackerrank | search/journey_scheduling.py | Python | gpl-2.0 | 3,055 |
#!/usr/bin/python
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, Gdk
from monitor import Monitor
import threading
def refresh_host(server_list_file = None):
if server_list_file is None:
server_list_file = "servers.list"
list_store = builder.get_object("host_ListStore")
list_store.clear()
#list_store.append([True,"a","1.1.1.1",3.14159])
list_store.append([False,"refresh ...","",0])
#window.queue_draw()
file_chooser = builder.get_object("server_list_file_chooser")
server_list_file = file_chooser.get_filename()
if server_list_file is None:
server_list_file = "server.list"
file_chooser.set_filename(server_list_file)
print "chooser: " + server_list_file
m = Monitor(server_list_file)
print "%d servers in list." % len(m.server_list)
results = m.check_all()
list_store.clear()
for i in results:
list_store.append([i.state == i.SERVER_STATE_UP,i.host,i.ip,i.time])
print(len(list_store))
class Handler:
def onButtonClick(self, button):
print "refresh clicked."
#Gtk.main_quit()
list_store = builder.get_object("host_ListStore")
list_store.clear()
list_store.append([False,"refresh ...","",0])
refresh_thread = threading.Thread(target = refresh_host)
refresh_thread.setDaemon(True)
refresh_thread.start()
def onCopy1stIP(self, button):
print "copy to clipboard."
clipboard = Gtk.Clipboard.get(Gdk.SELECTION_CLIPBOARD)
list_store = builder.get_object("host_ListStore")
clipboard.set_text(list_store[0][2], -1)
def onDeleteWindow(self, *args):
Gtk.main_quit(*args)
def onIpEdited(self,arg1, arg2, arg3):
print "ip clicked"
def onServerFileChoosed(self, *args):
print "file choosed"
builder = Gtk.Builder()
builder.add_from_file("monitor_ui.glade")
builder.connect_signals(Handler())
window = builder.get_object("main_window")
#window = builder.get_object("window2")
window.show_all()
Gtk.main()
| SimonTheCoder/server_monitor | wmonitor.py | Python | gpl-2.0 | 2,078 |
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
from Bio._py3k import range
from Bio._py3k import basestring
from Bio.Seq import UnknownSeq
from Bio.SeqUtils.CheckSum import seguid
from Bio.SeqFeature import ExactPosition, UnknownPosition
from Bio.SeqFeature import FeatureLocation, CompoundLocation, SeqFeature
from Bio.SeqRecord import SeqRecord
def checksum_summary(record):
if isinstance(record.seq, UnknownSeq):
return repr(record.seq)
if len(record.seq) < 25:
short = str(record.seq)
else:
short = str(record.seq)[:19] \
+ "..." + str(record.seq)[-3:]
return "%s [%s] len %i" \
% (short, seguid(record.seq), len(record.seq))
def compare_reference(old_r, new_r):
"""Compare two Reference objects
Note new_r is assumed to be a BioSQL DBSeqRecord, due to limitations
of the BioSQL table structure.
"""
assert old_r.title == new_r.title, \
"%s vs %s" % (old_r.title, new_r.title)
assert old_r.authors == new_r.authors, \
"%s vs %s" % (old_r.authors, new_r.authors)
assert old_r.journal == new_r.journal, \
"%s vs %s" % (old_r.journal, new_r.journal)
assert old_r.medline_id == new_r.medline_id, \
"%s vs %s" % (old_r.medline_id, new_r.medline_id)
if old_r.pubmed_id and new_r.pubmed_id:
assert old_r.pubmed_id == new_r.pubmed_id
# Looking at BioSQL/BioSeq.py function _retrieve_reference
# it seems that it will get either the MEDLINE or PUBMED,
# but not both. I *think* the current schema does not allow
# us to store both... must confirm this.
# TODO - assert old_r.comment == new_r.comment
# Looking at the tables, I *think* the current schema does not
# allow us to store a reference comment. Must confirm this.
assert old_r.comment == new_r.comment or new_r.comment == "", \
"%r vs %r" % (old_r.comment, new_r.comment)
# TODO - assert old_r.consrtm == new_r.consrtm
# Looking at the tables, I *think* the current schema does not
# allow us to store a consortium.
assert old_r.consrtm == new_r.consrtm or new_r.consrtm == ""
if len(old_r.location) == 0:
assert len(new_r.location) == 0
else:
# BioSQL can only store ONE location!
# TODO - Check BioPerl with a GenBank file with multiple ref locations
assert isinstance(old_r.location[0], FeatureLocation)
assert isinstance(new_r.location[0], FeatureLocation)
assert old_r.location[0].start == new_r.location[0].start and \
old_r.location[0].end == new_r.location[0].end
return True
def compare_feature(old_f, new_f):
"""Compare two SeqFeature objects"""
assert isinstance(old_f, SeqFeature)
assert isinstance(new_f, SeqFeature)
assert old_f.type == new_f.type, \
"%s -> %s" % (old_f.type, new_f.type)
assert old_f.strand == new_f.strand, \
"%s -> %s" % (old_f.strand, new_f.strand)
assert old_f.ref == new_f.ref, \
"%s -> %s" % (old_f.ref, new_f.ref)
assert old_f.ref_db == new_f.ref_db, \
"%s -> %s" % (old_f.ref_db, new_f.ref_db)
# TODO - BioSQL does not store/retrieve feature's id (Bug 2526)
assert old_f.id == new_f.id or new_f.id == "<unknown id>"
# TODO - Work out how the location_qualifier_value table should
# be used, given BioPerl seems to ignore it (Bug 2766)
# assert old_f.location_operator == new_f.location_operator, \
# "%s -> %s" % (old_f.location_operator, new_f.location_operator)
# We dont store fuzzy locations:
assert old_f.location.start == new_f.location.start \
or (isinstance(old_f.location.start, UnknownPosition) and
isinstance(new_f.location.start, UnknownPosition)), \
"%s -> %s" % (old_f.location.start,
new_f.location.start)
assert old_f.location.end == new_f.location.end \
or (isinstance(old_f.location.end, UnknownPosition) and
isinstance(new_f.location.end, UnknownPosition)), \
"%s -> %s" % (old_f.location.end,
new_f.location.end)
assert isinstance(old_f.location, CompoundLocation) == \
isinstance(new_f.location, CompoundLocation)
if isinstance(old_f.location, CompoundLocation):
assert len(old_f.location.parts) == len(new_f.location.parts)
for old_l, new_l in zip(old_f.location.parts, new_f.location.parts):
assert old_l.start == new_l.start
assert old_l.end == new_l.end
assert old_l.strand == new_l.strand
assert old_l.ref == new_l.ref
assert old_l.ref_db == new_l.ref_db
assert len(old_f.location.parts) == len(new_f.location.parts)
for old_sub, new_sub in zip(old_f.location.parts, new_f.location.parts):
# These are FeatureLocation objects
assert old_sub.nofuzzy_start == new_sub.nofuzzy_start
assert old_sub.nofuzzy_end == new_sub.nofuzzy_end
assert old_sub.strand == new_sub.strand
# Using private variable to avoid deprecation warnings
assert len(old_f._sub_features) == len(new_f._sub_features), \
"number of sub_features: %s -> %s" % \
(len(old_f._sub_features), len(new_f._sub_features))
for old_sub, new_sub in zip(old_f._sub_features, new_f._sub_features):
# These are SeqFeature objects
assert old_sub.type == new_sub.type, \
"%s -> %s" % (old_sub.type, new_sub.type)
assert old_sub.strand == new_sub.strand, \
"%s -> %s" % (old_sub.strand, new_sub.strand)
assert old_sub.ref == new_sub.ref, \
"%s -> %s" % (old_sub.ref, new_sub.ref)
assert old_sub.ref_db == new_sub.ref_db, \
"%s -> %s" % (old_sub.ref_db, new_sub.ref_db)
# TODO - Work out how the location_qualifier_value table should
# be used, given BioPerl seems to ignore it (Bug 2766)
# assert old_sub.location_operator == new_sub.location_operator, \
# "%s -> %s" % (old_sub.location_operator, new_sub.location_operator)
# Compare sub-feature Locations:
#
# BioSQL currently does not store fuzzy locations, but instead stores
# them as FeatureLocation.nofuzzy_start FeatureLocation.nofuzzy_end.
# The vast majority of cases will be comparisons of ExactPosition
# class locations, so we'll try that first and catch the exceptions.
try:
assert str(old_sub.location) == str(new_sub.location), \
"%s -> %s" % (str(old_sub.location), str(new_sub.location))
except AssertionError as e:
if isinstance(old_sub.location.start, ExactPosition) and \
isinstance(old_sub.location.end, ExactPosition):
# Its not a problem with fuzzy locations, re-raise
raise e
else:
# At least one of the locations is fuzzy
assert old_sub.location.nofuzzy_start == \
new_sub.location.nofuzzy_start, \
"%s -> %s" % (old_sub.location.nofuzzy_start,
new_sub.location.nofuzzy_start)
assert old_sub.location.nofuzzy_end == \
new_sub.location.nofuzzy_end, \
"%s -> %s" % (old_sub.location.nofuzzy_end,
new_sub.location.nofuzzy_end)
assert len(old_f.qualifiers) == len(new_f.qualifiers)
assert set(old_f.qualifiers) == set(new_f.qualifiers)
for key in old_f.qualifiers:
if isinstance(old_f.qualifiers[key], str):
if isinstance(new_f.qualifiers[key], str):
assert old_f.qualifiers[key] == new_f.qualifiers[key]
elif isinstance(new_f.qualifiers[key], list):
# Maybe a string turning into a list of strings?
assert [old_f.qualifiers[key]] == new_f.qualifiers[key], \
"%s -> %s" \
% (repr(old_f.qualifiers[key]),
repr(new_f.qualifiers[key]))
else:
assert False, "Problem with feature's '%s' qualifier" & key
else:
# Should both be lists of strings...
assert old_f.qualifiers[key] == new_f.qualifiers[key], \
"%s -> %s" % (old_f.qualifiers[key], new_f.qualifiers[key])
return True
def compare_sequence(old, new):
"""Compare two Seq or DBSeq objects"""
assert len(old) == len(new), "%i vs %i" % (len(old), len(new))
assert str(old) == str(new)
if isinstance(old, UnknownSeq):
assert isinstance(new, UnknownSeq)
else:
assert not isinstance(new, UnknownSeq)
ln = len(old)
s = str(old)
assert isinstance(s, str)
# Don't check every single element; for long sequences
# this takes far far far too long to run!
# Test both positive and negative indices
if ln < 50:
indices = list(range(-ln, ln))
else:
# A selection of end cases, and the mid point
indices = [-ln, -ln+1, -(ln//2), -1, 0, 1, ln//2, ln-2, ln-1]
# Test element access,
for i in indices:
expected = s[i]
assert expected == old[i]
assert expected == new[i]
# Test slices
indices.append(ln) # check copes with overflows
indices.append(ln + 1000) # check copes with overflows
for i in indices:
for j in indices:
expected = s[i:j]
assert expected == str(old[i:j]), \
"Slice %s vs %s" % (repr(expected), repr(old[i:j]))
assert expected == str(new[i:j]), \
"Slice %s vs %s" % (repr(expected), repr(new[i:j]))
# Slicing with step of 1 should make no difference.
# Slicing with step 3 might be useful for codons.
for step in [1, 3]:
expected = s[i:j:step]
assert expected == str(old[i:j:step])
assert expected == str(new[i:j:step])
# Check automatic end points
expected = s[i:]
assert expected == str(old[i:])
assert expected == str(new[i:])
expected = s[:i]
assert expected == str(old[:i])
assert expected == str(new[:i])
# Check "copy" splice
assert s == str(old[:])
assert s == str(new[:])
return True
def compare_features(old_list, new_list):
assert isinstance(old_list, list)
assert isinstance(new_list, list)
assert len(old_list) == len(new_list)
for old_f, new_f in zip(old_list, new_list):
if not compare_feature(old_f, new_f):
return False
return True
def compare_record(old, new):
"""Compare two SeqRecord or DBSeqRecord objects"""
assert isinstance(old, SeqRecord)
assert isinstance(new, SeqRecord)
# Sequence:
compare_sequence(old.seq, new.seq)
# Basics:
assert old.id == new.id
assert old.name == new.name
assert old.description == new.description
assert old.dbxrefs == new.dbxrefs, \
"dbxrefs mismatch\nOld: %s\nNew: %s" \
% (old.dbxrefs, new.dbxrefs)
# Features:
if not compare_features(old.features, new.features):
return False
# Annotation:
# We are expecting to see some "extra" annotations appearing,
# such as 'cross_references', 'dates', 'data_file_division',
# 'ncbi_taxon' and 'gi'.
# TODO - address these, see Bug 2681?
new_keys = set(new.annotations).difference(old.annotations)
new_keys = new_keys.difference(['cross_references', 'date',
'data_file_division', 'ncbi_taxid', 'gi'])
assert not new_keys, "Unexpected new annotation keys: %s" \
% ", ".join(new_keys)
missing_keys = set(old.annotations).difference(new.annotations)
missing_keys = missing_keys.difference(['ncbi_taxid', # Can't store chimeras
])
assert not missing_keys, "Unexpectedly missing annotation keys: %s" \
% ", ".join(missing_keys)
# In the short term, just compare any shared keys:
for key in set(old.annotations).intersection(new.annotations):
if key == "references":
assert len(old.annotations[key]) == len(new.annotations[key])
for old_r, new_r in zip(old.annotations[key], new.annotations[key]):
compare_reference(old_r, new_r)
elif key == "comment":
# Turn them both into containing strings for comparison - due to
# line wrapping in GenBank etc we don't really expect the white
# space to be 100% the same.
if isinstance(old.annotations[key], list):
old_comment = " ".join(old.annotations[key])
else:
old_comment = old.annotations[key]
if isinstance(new.annotations[key], list):
new_comment = " ".join(new.annotations[key])
else:
new_comment = new.annotations[key]
old_comment = old_comment.replace("\n", " ").replace(" ", " ")
new_comment = new_comment.replace("\n", " ").replace(" ", " ")
assert old_comment == new_comment, \
"Comment annotation changed by load/retrieve\n" \
"Was:%s\nNow:%s" \
% (repr(old_comment), repr(new_comment))
elif key in ["taxonomy", "organism", "source"]:
# If there is a taxon id recorded, these fields get overwritten
# by data from the taxon/taxon_name tables. There is no
# guarantee that they will be identical after a load/retrieve.
assert isinstance(new.annotations[key], basestring) \
or isinstance(new.annotations[key], list)
elif type(old.annotations[key]) == type(new.annotations[key]):
assert old.annotations[key] == new.annotations[key], \
"Annotation '%s' changed by load/retrieve\nWas:%s\nNow:%s" \
% (key, old.annotations[key], new.annotations[key])
elif isinstance(old.annotations[key], str) \
and isinstance(new.annotations[key], list):
# Any annotation which is a single string gets turned into
# a list containing one string by BioSQL at the moment.
assert [old.annotations[key]] == new.annotations[key], \
"Annotation '%s' changed by load/retrieve\nWas:%s\nNow:%s" \
% (key, old.annotations[key], new.annotations[key])
elif isinstance(old.annotations[key], list) \
and isinstance(new.annotations[key], str):
assert old.annotations[key] == [new.annotations[key]], \
"Annotation '%s' changed by load/retrieve\nWas:%s\nNow:%s" \
% (key, old.annotations[key], new.annotations[key])
return True
def compare_records(old_list, new_list):
assert isinstance(old_list, list)
assert isinstance(new_list, list)
assert len(old_list) == len(new_list)
for old_r, new_r in zip(old_list, new_list):
if not compare_record(old_r, new_r):
return False
return True
| updownlife/multipleK | dependencies/biopython-1.65/Tests/seq_tests_common.py | Python | gpl-2.0 | 15,350 |
# -*- coding: utf-8 -*-
import simplejson
import urllib
import httplib2
from lxml import etree
from datetime import datetime
from linkedtv.model import Enrichment
from linkedtv.api.dimension.DimensionService import DimensionService
"""
SPARQL END-POINT
https://nwr_hack:[email protected]/nwr/cars-hackathon/sparql
ACCESSIBLE YASGUI
http://yasgui.laurensrietveld.nl/
https://newsreader.scraperwiki.com/
http://www.newsreader-project.eu/domain-ontology/
http://www.newsreader-project.eu/
http://www.comsode.eu/
"""
class NewsReaderAPI(DimensionService):
def __init__(self):
DimensionService.__init__(self, 'AnefoAPI')
self.BASE_URL = 'http://newsreader.scraperwiki.com'
self.DESIRED_AMOUNT_OF_RESULTS = 50
def fetch(self, query, entities, dimension):
if self.__isValidDimension(dimension):
#first do a field query to get the most relevant results
results = self.__search(query, entities, dimension, True, self.DESIRED_AMOUNT_OF_RESULTS)
queries, enrichments = self.__formatResponse(
results,
dimension
)
#queries.append(queryUrl)
return { 'enrichments' : enrichments, 'queries' : queries}
return None
def __isValidDimension(self, dimension):
return True
def __search(self, query, entities, dimension, fieldQuery, numResults):
print entities
http = httplib2.Http()
#create the query
if query == '':
#query = ''.join(e['label'] for e in entities)
for e in entities:
if e['uri'].find('dbpedia') != -1:
query += e['uri'][e['uri'].rfind('/') + 1:]
query = urllib.quote(query.encode('utf8'))
#construct the url
#http://newsreader.scraperwiki.com/cars/summary_of_events_with_event_label?filter=bribe&datefilter=2010
responses = []
url = '%s/cars/summary_of_events_with_event_label?filter=%s&datefilter=2010&output=json' % (self.BASE_URL, query)
if url:
headers = {'Accept':'application/json'}
resp, content = http.request(url, 'GET', headers=headers)
if content:
responses.append({'query' : url, 'data' : content})
#try to look for
#https://newsreader.scraperwiki.com/summary_of_events_with_actor?uris.0=dbpedia:Hartmut_Mehdorn
url = '%s/cars/summary_of_events_with_actor?uris.0=dbpedia:%s&output=json' % (self.BASE_URL, query.replace('"', '').replace('%20', '_'))
if url:
headers = {'Accept':'application/json'}
resp, content = http.request(url, 'GET', headers=headers)
if content:
responses.append({'query' : url, 'data' : content})
url = '%s/cars/event_details_filtered_by_actor?uris.0=dbpedia:%s&output=json' % (self.BASE_URL, query.replace('"', '').replace('%20', '_'))
if url:
headers = {'Accept':'application/json'}
resp, content = http.request(url, 'GET', headers=headers)
if content:
responses.append({'query' : url, 'data' : content})
return responses
def __constructQuery(self, entities):
queryParts = []
for e in entities:
queryParts.append('"%s"' % urllib.quote(e['label'].encode('utf8')))
return ' '.join(queryParts)
"""
{"count": 2, "next page": "https://newsreader.scraperwiki.com/cars/summary_of_events_with_event_label/page/2?filter=berlin&datefilter=2010&output=json",
"page number": 1, "payload": [{"datetime": "2010-05-05",
"event": "http://www.newsreader-project.eu/data/cars/2010/05/05/7YCX-WJS1-2SHG-X1M8.xml#ev17",
"event_label": "BERLIN", "event_size": "5"}, {"datetime": "2010-06-02",
"event": "http://www.newsreader-project.eu/data/cars/2010/06/02/7YM4-G8G1-2PP8-S20K.xml#ev45",
"event_label": "berlin", "event_size": "5"}]}
"""
#the data returnde from Anefo is in some RSS format
def __formatResponse(self, data, dimension):
enrichments = []
queries = []
urls = {}
for i in data:
d = i['data']
q = i['query']
results = simplejson.loads(d)
if results and results.has_key('payload'):
for r in results['payload']:
if not urls.has_key(r['event']):
label = None
source = 'NewsReader'
date = None
metadata = self.getEventMetadata(r['event'])
print metadata
if metadata:
if metadata.has_key('title'):
label = metadata['title']
if metadata.has_key('source'):
source = metadata['source']
if metadata.has_key('date'):
date = metadata['date']
e = Enrichment(
label,
url=r['event'],#'%s%s' % ('https://newsreader.scraperwiki.com/get_document_metadata?uris.0=', r['event']) ,
source=source,
date=date
)
urls[r['event']] = True
enrichments.append(e)
queries.append(q)
return queries, enrichments
def getEventMetadata(self, eventUrl):
http = httplib2.Http()
url = 'https://newsreader.scraperwiki.com/get_document_metadata?uris.0=%s' % eventUrl
print url
headers = {'Accept':'application/json'}
resp, content = http.request(url, 'GET', headers=headers)
md = {}
if content:
results = simplejson.loads(content)
if results.has_key('payload'):
payload = results['payload']
if payload.has_key('@graph'):
g = payload['@graph'][0]
if g.has_key('dct:created'):
md['date'] = g['dct:created']['@value']
if g.has_key('dct:title'):
md['title'] = g['dct:title']['@value']
if g.has_key('dct:source'):
md['source'] = g['dct:source']['@id']
return md
return None
| beeldengeluid/linkedtv-editortool | src/linkedtv/api/dimension/public/NewsReaderAPI.py | Python | gpl-2.0 | 5,284 |
import re
import time
import requests
import pbl.pollers
from pbl.pollers.baseentry import BaseEntry
from pbl.pollers.recentitemspoller import RecentItemsDownloader
from pbl.pollers.recentitemspoller import RecentItemsPoller
class KdeEntry(BaseEntry):
def __init__(self, data, metadata):
BaseEntry.__init__(self, 'kde', data, metadata)
return
class KdeDownloadQueue(RecentItemsDownloader):
""" Downloader thread for pastebin. """
def __init__(self, name, interval):
RecentItemsDownloader.__init__(self, name, interval)
self.download_url = 'http://paste.kde.org/{id}/raw/'
self.details_url = 'http://paste.kde.org/{id}/'
self.details_regexp = dict(
user=r'<i>Posted by (?!Anonymous)(.*?) at',
syntax=r'<i>Language: ([^<]+)</i>',
)
self.session = requests.session()
return
def process_id(self, id):
""" From the ID of a paste, fetch the content and the metadata.
Determine if the paste is worth keeping. """
self.echo('processing %s [%u left]' % (id, len(self.queue)))
# First, download the raw content here
url = self.download_url.format(id=id)
r = self.session.get(url, **pbl.pollers.requestoptions)
if not r.ok:
self.echo('Problem downloading page %s, status=%s, error=%s' % (url, repr(r.status_code), repr(r.error)))
return False
data = r.content
time.sleep(0.2)
# Second, download the details of the entry (metadata)
url = self.details_url.format(id=id)
r = self.session.get(url, **pbl.pollers.requestoptions)
details = r.content
metadata = {}
metadata['uid'] = id
metadata['url'] = url
for rname in self.details_regexp:
m = re.search(self.details_regexp[rname], details, re.DOTALL)
if m:
metadata[rname] = m.group(1)
entry = KdeEntry(data, metadata)
if entry.match():
entry.keep()
return True
class Kde(RecentItemsPoller):
""" Basically just set a custom downloader here. """
def __init__(self, name, interval, poll_url, regexp):
RecentItemsPoller.__init__(self, name, interval, poll_url, regexp)
self.downloader = KdeDownloadQueue(name='%s/downloader' % (self.name, ), interval=0.2)
return
| EiNSTeiN-/pbl | pollers/kde.py | Python | gpl-2.0 | 2,406 |
# -*- coding: utf-8 -*-
import wx
import widgetUtils
from .home import homeTab
class audioTab(homeTab):
def create_list(self):
self.lbl = wx.StaticText(self, wx.NewId(), _("Mu&sic"))
self.list = widgetUtils.multiselectionList(self, *[_("Title"), _("Artist"), _("Duration")], style=wx.LC_REPORT, name=_("Music"))
self.list.set_windows_size(0, 160)
self.list.set_windows_size(1, 380)
self.list.set_windows_size(2, 80)
self.list.set_size()
self.list.list.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.OnKeyDown)
def create_post_buttons(self):
self.postBox = wx.StaticBoxSizer(parent=self, orient=wx.HORIZONTAL, label=_("Actions"))
self.post = wx.Button(self.postBox.GetStaticBox(), -1, _("&Upload audio"))
self.post.Enable(False)
self.play = wx.Button(self.postBox.GetStaticBox(), -1, _("P&lay"))
self.play_all = wx.Button(self.postBox.GetStaticBox(), -1, _("Play &All"))
self.postBox.Add(self.post, 0, wx.ALL, 5)
self.postBox.Add(self.play, 0, wx.ALL, 5)
self.postBox.Add(self.play_all, 0, wx.ALL, 5)
def get_file_to_upload(self):
openFileDialog = wx.FileDialog(self, _("Select the audio file to be uploaded"), "", "", _("Audio files (*.mp3)|*.mp3"), wx.FD_OPEN | wx.FD_FILE_MUST_EXIST)
if openFileDialog.ShowModal() == wx.ID_CANCEL:
return None
return openFileDialog.GetPath()
def get_download_path(self, filename="", multiple=False):
if multiple == False:
d = wx.FileDialog(self, _("Save this file"), "", filename, _("Audio Files(*.mp3)|*.mp3"), wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT)
else:
d = wx.DirDialog(None, _("Select a folder to save all files"))
if d.ShowModal() == wx.ID_OK:
return d.GetPath()
d.Destroy()
| manuelcortez/socializer | src/wxUI/tabs/audio.py | Python | gpl-2.0 | 1,849 |
changelog = """
# 0.1-alpha
* added code to check if scaned filenames have any BSE,AsB or BEI part, and setting one as base image. If it don't - then dialog to chose from scaned types appears to user (so if in Esprit settings it was aliased differently you have to tell that)
* tided up the project dir structure (droped UI files into ui folder, icons to icons, changelog to etc...)
# 0.1-pre-alpha
* filters implimented, making the program partily complete
--known bugs:
-in filter tab and view when selecteting 8bit or 32 bit images the filtered image gets not generated properly (values are cut at 255 instead of geting full 16bit range)
# 0.0.3-dev
* got rid of some widgets, simplified UI. From now one boxWidget (instead of two separate widgets) is used to chose sample which tile (vector) overview and 3x3 stitching preview are changed.
# 0.0.2pre-alpha
* changed the way to recognize broken bruker file names, from now function checks if (y,x) are used once or twice in the file name (Bruker uses once if doing mappings, twice for imaging detector file names if in mapping settings 'get an image' are ticked in bruker jobs, or if doing anything else tiling --THE INCONSISTANCY of BRUKER SUCKS)
* modified functions allowing to open directory with images. (ndimage package is used from scipy)
* input can be also images, from now allowing also stitching just BSE images
--known bugs:
- stitching sometimes doesnt work with some angle corrections, the tile array size missmatches the selection of stitched array by one. The bug is being investigated.
- sometimes bruker have broken tags just with numbers:
i.e.:
<1> blah blah </1>
<2>
as workaround for now is easier to open files and remove fucked up tags.
- some of the data after reset button are not cleared.
- the little memory leak were observed.
- pyqtgraph slows completely further processing if 'view all' is not activated at least once on the graphs
# 0.0.1alpha
* initial version.
* works just with txt input files.
* saves just tif and png""" | sem-geologist/Qstitch | etc/changelog.py | Python | gpl-2.0 | 2,076 |
# -*- coding: utf-8 -*-
# Micha Wildermuth, [email protected] 2020
from qkit.core.instrument_base import Instrument
class mfc_dummy(Instrument):
'''
This is a driver for a dummy mass flow controller as used for sputter deposition monitoring qkit.services.qdepokit.sdi.
Usage:
Initialize with
<name> = qkit.instruments.create('<name>', 'mfc_dummy')
'''
def __init__(self, name):
self.__name__ = __name__
Instrument.__init__(self, name, tags=['virtual'])
self.predef_channels = {'Ar': 0,
'ArO': 0,
'N': 0,
'O': 0}
self.add_function('get_pressure')
self.add_function('get_flow')
def get_pressure(self):
return 1
def get_flow(self, channel=1):
return channel | qkitgroup/qkit | qkit/drivers/mfc_dummy.py | Python | gpl-2.0 | 863 |
from dataclasses import dataclass
from typing import Tuple
Point = Tuple[int, int]
Position = Tuple[int, int]
Size = Tuple[int, int]
@dataclass
class WindowState:
x: int
y: int
width: int
height: int
maximized: bool
# Import Widget last to resolve any import cycles
# from fsui import Widget
| FrodeSolheim/fs-uae-launcher | fswidgets/types.py | Python | gpl-2.0 | 317 |
from .appointment_view_mixin import AppointmentViewMixin
| botswana-harvard/edc-appointment | edc_appointment/view_mixins/__init__.py | Python | gpl-2.0 | 57 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# generated by wxGlade 0.6.3 on Wed Oct 7 22:13:19 2009
#
# Project: VYPe - Conversion from regular expressions to finite automata
# Authors:
# Libor Polčák, [email protected]
# Petr Zemek, [email protected]
# Date: 4.10.2009
#
import wx
from MainFrame import MainFrame
class VYPe2009(wx.App):
def OnInit(self):
wx.InitAllImageHandlers()
app = MainFrame(None, -1, "")
self.SetTopWindow(app)
app.Show()
return 1
# end of class VYPe2009
if __name__ == "__main__":
VYPe2009 = VYPe2009(0)
VYPe2009.MainLoop()
| s3rvac/fit-projects | VYPe/src/gui/VYPe2009.py | Python | gpl-2.0 | 636 |
# -*- coding: utf-8 -*-
# This file is part of the Horus Project
__author__ = 'Jesús Arroyo Torrens <[email protected]>'
__copyright__ = 'Copyright (C) 2014-2015 Mundo Reader S.L.\
Copyright (C) 2013 David Braam from Cura Project'
__license__ = 'GNU General Public License v2 http://www.gnu.org/licenses/gpl2.html'
import os
import sys
import glob
import gettext
from horus.util import system
resourceBasePath = ''
def setBasePath(path):
global resourceBasePath
resourceBasePath = path
def getPathForResource(dir, subdir, resource_name):
assert os.path.isdir(dir), "{p} is not a directory".format(p=dir)
path = os.path.normpath(os.path.join(dir, subdir, resource_name))
return path
def getPathForVersion(name='version'):
return getPathForResource(resourceBasePath, '.', name)
def getPathForImage(name):
return getPathForResource(resourceBasePath, 'images', name)
def getPathForFirmware(name):
return getPathForResource(resourceBasePath, 'firmware', name)
def getPathForTools(name):
if system.isWindows():
path = getPathForResource(resourceBasePath, 'tools/windows', name)
elif system.isDarwin():
path = getPathForResource(resourceBasePath, 'tools/darwin', name)
else:
path = getPathForResource(resourceBasePath, 'tools/linux', name)
return path
def getPathForMesh(name):
return getPathForResource(resourceBasePath, 'meshes', name)
"""def getDefaultMachineProfiles():
path = os.path.normpath(os.path.join(resourceBasePath, 'machine_profiles', '*.ini'))
return glob.glob(path)"""
def setupLocalization(selectedLanguage=None):
# Default to english
languages = ['en']
if selectedLanguage is not None:
for item in getLanguageOptions():
if item[1] == selectedLanguage and item[0] is not None:
languages = [item[0]]
locale_path = os.path.normpath(os.path.join(resourceBasePath, 'locale'))
translation = gettext.translation('horus', locale_path, languages, fallback=True)
translation.install(unicode=True)
def getLanguageOptions():
return [
['en', u'English'],
['es', u'Español'],
['fr', u'Français'],
['de', u'Deutsch'],
['it', u'Italiano'],
['pt', u'Português'],
]
| 3cky/horus | src/horus/util/resources.py | Python | gpl-2.0 | 2,307 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.site.site_header="Scanner Admin"
urlpatterns = patterns('',
# Examples:
url(r'^admin/', include(admin.site.urls), name='admin'),
url(r'^', include('scans.urls')),
)
| sixdub/Minions | minions/urls.py | Python | gpl-2.0 | 270 |
"""Pylons environment configuration"""
import os
from mako.lookup import TemplateLookup
from pylons.configuration import PylonsConfig
from pylons.error import handle_mako_error
from sqlalchemy import engine_from_config
import quickcms.lib.app_globals as app_globals
import quickcms.lib.helpers
from quickcms.config.routing import make_map
from quickcms.model import init_model
def load_environment(global_conf, app_conf):
"""Configure the Pylons environment via the ``pylons.config``
object
"""
config = PylonsConfig()
# Pylons paths
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
paths = dict(root=root,
controllers=os.path.join(root, 'controllers'),
static_files=os.path.join(root, 'public'),
templates=[os.path.join(root, 'templates')])
# Initialize config with the basic options
config.init_app(global_conf, app_conf, package='quickcms', paths=paths)
config['routes.map'] = make_map(config)
config['pylons.app_globals'] = app_globals.Globals(config)
config['pylons.h'] = quickcms.lib.helpers
# Setup cache object as early as possible
import pylons
pylons.cache._push_object(config['pylons.app_globals'].cache)
# Create the Mako TemplateLookup, with the default auto-escaping
config['pylons.app_globals'].mako_lookup = TemplateLookup(
directories=paths['templates'],
error_handler=handle_mako_error,
module_directory=os.path.join(app_conf['cache_dir'], 'templates'),
input_encoding='utf-8', default_filters=['escape'],
imports=['from markupsafe import escape'])
# Setup the SQLAlchemy database engine
engine = engine_from_config(config, 'sqlalchemy.')
init_model(engine)
# CONFIGURATION OPTIONS HERE (note: all config options will override
# any Pylons config options)
return config
| CentroGeo/QuickCMS | quickcms/config/environment.py | Python | gpl-2.0 | 1,917 |
# -*- coding: utf-8 -*-
#-------------------------------------------------
#-- miki - my kivy like creation tools
#--
#-- microelly 2016
#--
#-- GNU Lesser General Public License (LGPL)
#-------------------------------------------------
def creatorFunction(name):
if name.startswith('Part::'):
return "App.activeDocument().addObject(name,label)"
if name.startswith('So'):
return "coin."+name+'()'
if name.startswith('QtGui'):
return name+"()"
if name in ['Plugger','Manager']:
return 'Animation.create'+name+'()'
return name+'()'
import FreeCAD,Animation,FreeCADGui
import re
import pivy
from pivy import coin
App=FreeCAD
import PySide
from PySide import QtCore, QtGui, QtSvg
import traceback,sys
def sayexc(mess=''):
exc_type, exc_value, exc_traceback = sys.exc_info()
ttt=repr(traceback.format_exception(exc_type, exc_value,exc_traceback))
lls=eval(ttt)
l=len(lls)
l2=[lls[(l-3)],lls[(l-1)]]
FreeCAD.Console.PrintError(mess + "\n" +"--> ".join(l2))
print(mess + "\n" +"--> ".join(l2))
#***************
YourSpecialCreator=Animation.createManager
def fv(name="vertical"):
w=QtGui.QWidget()
w.setStyleSheet("QWidget { font: bold 18px;color:brown;border-style: outset;border-width: 3px;border-radius: 10px;border-color: blue;}")
layout = QtGui.QVBoxLayout()
layout.setAlignment(QtCore.Qt.AlignTop)
w.setLayout(layout)
pB= QtGui.QLabel(name)
layout.addWidget(pB)
w.setWindowTitle("Testfenster")
w.show()
w.layout=layout
return w
def fh(name="horizontal"):
w=QtGui.QWidget()
w.setStyleSheet("QWidget { font: bold 18px;color:blue;border-style: outset;border-width: 3px;border-radius: 10px;border-color: blue;}")
layout = QtGui.QHBoxLayout()
layout.setAlignment(QtCore.Qt.AlignLeft)
w.setLayout(layout)
pB= QtGui.QLabel(name)
pB.setStyleSheet("QWidget { font: bold 18px;color:red;border-style: outset;border-width: 3px;border-radius: 10px;border-color: blue;}")
layout.addWidget(pB)
w.show()
w.layout=layout
return w
VerticalLayout=fv
HorzontalLayout=fh
#***************
class Miki():
def __init__(self):
self.objects=[]
self.anchors={}
self.indents=[]
self.olistref=[]
self.indpos=-1
self.roots=[]
self.app=None
self.ids={}
def parse2(self,s):
app=self.app
ls=s.splitlines()
line=0
depth=0
d=[0,0,0,0,0,0,0,0,0,0]
ln=[0,0,0,0,0,0,0,0,0,0]
refs={}
rs=[]
r=None
r=[-1,0,0,'']
for l in ls:
if r:
rs.append(r)
r=[-1,0,0,'']
line += 1
if l.startswith('#:'):
res=re.search("#:\s*(\S.*)",l)
r=[l,line,-1,'cmd',res.group(1)]
continue
if l.startswith('#'):
continue
res=re.search("(\s*)(\S.*)",l)
if res:
l=len(res.group(1))
if l==0:
depth=0
if d[depth]<l:
depth += 1
elif d[depth]>l:
depth -= 1
d[depth]=l
ln[depth]=line
parent=ln[depth-1]
r=[l,line,parent,res.group(2)]
st=res.group(2)
res=re.search("(\S+):\s*\*(\S+)",st)
if res:
r=[l,line,parent,'link',res.group(1),res.group(2),refs[res.group(2)]]
continue
res=re.search("(\S+):\s*&(\S+)\s+(\S.*)",st)
if res:
r=[l,line,parent,"anchor attr",res.group(1),res.group(2),res.group(3)]
refs[res.group(2)]=line
continue
res=re.search("(\S+):\s*&(\S+)",st)
if res:
r=[l,line,parent,"anchor",res.group(1),res.group(2)]
refs[res.group(2)]=line
continue
res=re.search("(\S+[^:]):\s*([^:]\S.*)",st)
if res:
r=[l,line,parent,"att val",res.group(1),eval(res.group(2))]
if res.group(1) =='Name':
rs[parent].append(res.group(2))
else:
res=re.search("(\S+):",st)
if res:
r=[l,line,parent,"obj", res.group(1),'no anchor']
self.lines=rs
def build(self):
for l in self.lines:
if l[3]=='cmd':
try:
exec(l[4])
except:
sayexc(str(["Error exec:",l[4]]))
continue
if l[3]=='obj' or l[3]=='anchor':
name=l[4]
f=creatorFunction(l[4])
if len(l)<7: # no name for object
l.append('')
label=l[6]
h=eval(f)
if len(l)<7:
l.append(None)
l.append(h)
if l[2] != 0:
if l[4]=='Name': continue
if l[3]=='obj' or l[3]=='anchor':
parent=self.lines[l[2]][7]
self.addChild(parent,l[7])
if l[3]=='link':
parent=self.lines[l[2]][7]
try:
child=self.lines[l[6]][7]
self.addChild(parent,child)
except:
# link eines attribs
method=l[4]
v=self.lines[l[6]][6]
kk=eval("parent."+l[4])
cnkk=kk.__class__.__name__
if cnkk.startswith('So'):
ex="parent."+method+".setValue(" +str(v) + ")"
exec(ex)
continue
if cnkk =='builtin_function_or_method':
# qt 2...
kk(v)
continue
cn=v.__class__.__name__
if cn=='int' or cn=='float':
ex="parent."+l[4]+"="+str(v)
elif cn=='str':
ex="parent."+l[4]+"='"+v+"'"
else:
print( "nicht implementierter typ")
ex=''
print( "*** "+ex)
exec(ex)
#-----------------------------------
if l[3]=='att val' or l[3]=='anchor attr':
parent=self.lines[l[2]][7]
method=l[4]
if l[3]=='att val':
v=l[5]
else:
v=l[6]
if method=='id':
self.ids[v]=parent
continue
kk=eval("parent."+l[4])
cnkk=kk.__class__.__name__
if cnkk.startswith('So'):
ex="parent."+method+".setValue(" +str(v) + ")"
exec(ex)
continue
if cnkk =='builtin_function_or_method':
# qt 3...
kk(v)
continue
cn=v.__class__.__name__
if cn=='int' or cn=='float':
ex="parent."+l[4]+"="+str(v)
elif cn=='str':
ex="parent."+l[4]+"='"+v+"'"
else:
print("nicht implementierter typ")
ex=''
print("*** "+ex)
exec(ex)
def showSo(self):
for l in self.lines:
if l[2] == 0 and l[0] !=-1:
r=l[7]
if r.__class__.__name__.startswith('So'):
sg = FreeCADGui.ActiveDocument.ActiveView.getSceneGraph()
sg.addChild(r)
def showSo2(self,dokname):
for l in self.lines:
if l[2] == 0 and l[0] !=-1:
r=l[7]
if r.__class__.__name__.startswith('So'):
dok = FreeCADGui.getDocument(dokname)
sg=dok.ActiveView.getSceneGraph()
sg.addChild(r)
def addChild(self,p,c):
cc=c.__class__.__name__
if str(c.__class__).startswith("<type 'PySide.QtGui."):
p.layout.addWidget(c)
return
if cc.startswith('So'):
p.addChild(c)
return
if str(p.TypeId)=='Part::MultiFuse':
z=p.Shapes
z.append(c)
p.Shapes=z
elif str(p.TypeId)=='Part::Compound':
z=p.Links
z.append(c)
p.Links=z
else:
try:
p.addObject(c)
except:
FreeCAD.Console.PrintError("\naddObject funktioniert nicht")
FreeCAD.Console.PrintError([p,c])
def run(self,string):
self.parse2(string)
self.build()
self.showSo()
def report(results=[]):
for r in results:
if r.__class__.__name__.startswith('So'):
sg = FreeCADGui.ActiveDocument.ActiveView.getSceneGraph()
sg.addChild(r)
| microelly2/Animation | Miki.py | Python | gpl-2.0 | 7,029 |
# -*- coding: utf-8 -*-
#Copyright (C) Fiz Vazquez [email protected]
# Modified by dgranda
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import logging
import os
import traceback
from lxml import etree
from pytrainer.lib.date import getDateTime
from pytrainer.core.activity import Activity
from sqlalchemy.orm import exc
from pytrainer.lib.xmlUtils import XMLParser
class garmintcxv2():
def __init__(self, parent = None, data_path = None):
if parent is not None:
self.parent = parent
self.pytrainer_main = parent.parent
self.tmpdir = self.pytrainer_main.profile.tmpdir
if data_path is not None:
self.main_data_path = data_path
self.data_path = os.path.dirname(__file__)
self.xmldoc = None
self.activitiesSummary = []
self.activities = []
def getXmldoc(self):
''' Function to return parsed xmlfile '''
return self.xmldoc
def getFileType(self):
return _("Garmin training center database file version 2")
def getActivitiesSummary(self):
return self.activitiesSummary
def getDetails(self, activity, startTime):
logging.debug(">>")
distance = 0
duration = 0
laps = activity.findall(".//{http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2}Lap")
if laps:
for lap in laps:
lap_duration = float(lap.findtext(".//{http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2}TotalTimeSeconds"))
lap_distance = float(lap.findtext(".//{http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2}DistanceMeters"))
logging.debug("Lap distance (m): %f | duration (s): %f" % (lap_distance, lap_duration))
distance += lap_distance
duration += lap_duration
hours = int(duration)//3600
minutes = (int(duration)/60)%60
seconds = int(duration)%60
duration_hhmmss = "%02d:%02d:%02d" % (hours, minutes, seconds)
logging.debug("Activity distance (m): %f | duration (hh:mm:ss - s): %s - %f" % (distance, duration_hhmmss, duration))
else:
points = activity.findall(".//{http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2}Trackpoint")
while True:
lastPoint = points[-1]
try:
distance = lastPoint.find(".//{http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2}DistanceMeters")
if distance is None:
points = points[:-1]
continue
time = lastPoint.find(".//{http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2}Time")
distance = distance.text
time = time.text
break
except:
#Try again without the last point (i.e work from end until find time and distance)
points = points[:-1]
continue
duration_hhmmss = getDateTime(time)[0]-startTime[0]
logging.debug("Activity distance (m): %f | duration (hh:mm:ss): %s" % (distance, duration_hhmmss))
logging.debug("<<")
return float(distance), duration_hhmmss
def validate(self, xmldoc, schema):
logging.debug(">>")
xmlschema_doc = etree.parse(self.main_data_path + schema)
xmlschema = etree.XMLSchema(xmlschema_doc)
logging.debug("<<")
return xmlschema.validate(xmldoc)
def buildActivitiesSummary(self):
logging.debug(">>")
self.activities = self.getActivities()
for activity in self.activities:
startTime = getDateTime(self.getStartTimeFromActivity(activity))
inDatabase = self.inDatabase(startTime)
sport = self.getSport(activity)
distance, duration = self.getDetails(activity, startTime)
distance = distance / 1000.0
self.activitiesSummary.append((self.activities.index(activity),
inDatabase,
startTime[1].strftime("%Y-%m-%dT%H:%M:%S"),
"%0.2f" % distance ,
str(duration),
sport,
))
logging.debug("<<")
def testFile(self, filename):
'''Check if file is valid TCXv2 one and if yes, retrieve activities from it'''
logging.debug('>>')
logging.debug("Testing %s" %filename)
result = False
try:
xmldoc = etree.parse(filename)
valid_xml = self.validate(xmldoc, "schemas/GarminTrainingCenterDatabase_v2.xsd")
if (valid_xml):
logging.debug("Valid TCXv2 file (%s)" %filename)
self.xmldoc = xmldoc
self.buildActivitiesSummary()
result = True
except:
logging.debug("Traceback: %s" % traceback.format_exc())
logging.debug('<<')
return result
def getActivities(self):
'''Function to return all activities in Garmin training center version 2 file'''
logging.debug('>>')
activities = self.xmldoc.findall(".//{http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2}Activity")
logging.debug('<<')
return activities
def inDatabase(self, startTime):
#comparing date and start time (sport may have been changed in DB after import)
if startTime is None:
return False
logging.info("Checking if activity from %s exists in db" % startTime[0]) # 2012-10-14 10:02:42+00:00
time = startTime[0].strftime("%Y-%m-%dT%H:%M:%SZ")
try:
self.parent.parent.ddbb.session.query(Activity).filter(Activity.date_time_utc == time).one()
return True
except exc.NoResultFound:
return False
def getSport(self, activity):
try:
sport = activity.get("Sport")
except:
sport = "import"
return sport
def getStartTimeFromActivity(self, activity):
timeElement = activity.find(".//{http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2}Id")
if timeElement is None:
return None
else:
return timeElement.text
def getGPXFile(self, ID, file_id):
""" Generate GPX file based on activity ID
Returns (sport, GPX filename)
"""
sport = None
gpxFile = None
activityID = int(ID)
activitiesCount = len(self.activities)
if activitiesCount > 0 and activityID < activitiesCount:
gpxFile = "%s/garmin-tcxv2-%s-%d.gpx" % (self.tmpdir, file_id, activityID)
activity = self.activities[int(activityID)]
sport = self.getSport(activity)
self.createGPXfile(gpxFile, activity)
return sport, gpxFile
def createGPXfile(self, gpxfile, activity):
""" Function to transform a Garmin Training Center v2 Track to a valid GPX+ file"""
xslt_doc = etree.parse(self.data_path+"/translate_garmintcxv2.xsl")
transform = etree.XSLT(xslt_doc)
xml_doc = activity
result_tree = transform(xml_doc)
result_tree.write(gpxfile, xml_declaration=True, encoding='UTF-8')
| pytrainer/pytrainer | imports/file_garmintcxv2.py | Python | gpl-2.0 | 8,236 |
# -*- coding: utf-8 -*-
# Gedit External Tools plugin
# Copyright (C) 2006 Steve Frécinaux <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import re
import locale
import platform
class Singleton(object):
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Singleton, cls).__new__(
cls, *args, **kwargs)
cls._instance.__init_once__()
return cls._instance
class ToolLibrary(Singleton):
def __init_once__(self):
self.locations = []
def set_locations(self, datadir):
self.locations = []
if platform.platform() != 'Windows':
for d in self.get_xdg_data_dirs():
self.locations.append(os.path.join(d, 'gedit-2', 'plugins', 'externaltools', 'tools'))
self.locations.append(datadir)
# self.locations[0] is where we save the custom scripts
if platform.platform() == 'Windows':
self.locations.insert(0, os.path.expanduser('~/gedit/tools'))
else:
self.locations.insert(0, os.path.expanduser('~/.gnome2/gedit/tools'))
if not os.path.isdir(self.locations[0]):
os.makedirs(self.locations[0])
self.tree = ToolDirectory(self, '')
self.import_old_xml_store()
else:
self.tree = ToolDirectory(self, '')
# cf. http://standards.freedesktop.org/basedir-spec/latest/
def get_xdg_data_dirs(self):
dirs = os.getenv('XDG_DATA_DIRS')
if dirs:
dirs = dirs.split(os.pathsep)
else:
dirs = ('/usr/local/share', '/usr/share')
return dirs
# This function is meant to be ran only once, when the tools directory is
# created. It imports eventual tools that have been saved in the old XML
# storage file.
def import_old_xml_store(self):
import xml.etree.ElementTree as et
filename = os.path.expanduser('~/.gnome2/gedit/gedit-tools.xml')
if not os.path.isfile(filename):
return
print "External tools: importing old tools into the new store..."
xtree = et.parse(filename)
xroot = xtree.getroot()
for xtool in xroot:
for i in self.tree.tools:
if i.name == xtool.get('label'):
tool = i
break
else:
tool = Tool(self.tree)
tool.name = xtool.get('label')
tool.autoset_filename()
self.tree.tools.append(tool)
tool.comment = xtool.get('description')
tool.shortcut = xtool.get('accelerator')
tool.applicability = xtool.get('applicability')
tool.output = xtool.get('output')
tool.input = xtool.get('input')
tool.save_with_script(xtool.text)
def get_full_path(self, path, mode='r', system = True, local = True):
assert (system or local)
if path is None:
return None
if mode == 'r':
if system and local:
locations = self.locations
elif local and not system:
locations = [self.locations[0]]
elif system and not local:
locations = self.locations[1:]
else:
raise ValueError("system and local can't be both set to False")
for i in locations:
p = os.path.join(i, path)
if os.path.lexists(p):
return p
return None
else:
path = os.path.join(self.locations[0], path)
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.mkdir(dirname)
return path
class ToolDirectory(object):
def __init__(self, parent, dirname):
super(ToolDirectory, self).__init__()
self.subdirs = list()
self.tools = list()
if isinstance(parent, ToolDirectory):
self.parent = parent
self.library = parent.library
else:
self.parent = None
self.library = parent
self.dirname = dirname
self._load()
def listdir(self):
elements = dict()
for l in self.library.locations:
d = os.path.join(l, self.dirname)
if not os.path.isdir(d):
continue
for i in os.listdir(d):
elements[i] = None
keys = elements.keys()
keys.sort()
return keys
def _load(self):
for p in self.listdir():
path = os.path.join(self.dirname, p)
full_path = self.library.get_full_path(path)
if os.path.isdir(full_path):
self.subdirs.append(ToolDirectory(self, p))
elif os.path.isfile(full_path) and os.access(full_path, os.X_OK):
self.tools.append(Tool(self, p))
def get_path(self):
if self.parent is None:
return self.dirname
else:
return os.path.join(self.parent.get_path(), self.dirname)
path = property(get_path)
def get_name(self):
return os.path.basename(self.dirname)
name = property(get_name)
def delete_tool(self, tool):
# Only remove it if it lays in $HOME
if tool in self.tools:
path = tool.get_path()
if path is not None:
filename = os.path.join(self.library.locations[0], path)
if os.path.isfile(filename):
os.unlink(filename)
self.tools.remove(tool)
return True
else:
return False
def revert_tool(self, tool):
# Only remove it if it lays in $HOME
filename = os.path.join(self.library.locations[0], tool.get_path())
if tool in self.tools and os.path.isfile(filename):
os.unlink(filename)
tool._load()
return True
else:
return False
class Tool(object):
RE_KEY = re.compile('^([a-zA-Z_][a-zA-Z0-9_.\-]*)(\[([a-zA-Z_@]+)\])?$')
def __init__(self, parent, filename = None):
super(Tool, self).__init__()
self.parent = parent
self.library = parent.library
self.filename = filename
self.changed = False
self._properties = dict()
self._transform = {
'Languages': [self._to_list, self._from_list]
}
self._load()
def _to_list(self, value):
if value.strip() == '':
return []
else:
return map(lambda x: x.strip(), value.split(','))
def _from_list(self, value):
return ','.join(value)
def _parse_value(self, key, value):
if key in self._transform:
return self._transform[key][0](value)
else:
return value
def _load(self):
if self.filename is None:
return
filename = self.library.get_full_path(self.get_path())
if filename is None:
return
fp = file(filename, 'r', 1)
in_block = False
lang = locale.getlocale(locale.LC_MESSAGES)[0]
for line in fp:
if not in_block:
in_block = line.startswith('# [Gedit Tool]')
continue
if line.startswith('##') or line.startswith('# #'): continue
if not line.startswith('# '): break
try:
(key, value) = [i.strip() for i in line[2:].split('=', 1)]
m = self.RE_KEY.match(key)
if m.group(3) is None:
self._properties[m.group(1)] = self._parse_value(m.group(1), value)
elif lang is not None and lang.startswith(m.group(3)):
self._properties[m.group(1)] = self._parse_value(m.group(1), value)
except ValueError:
break
fp.close()
self.changed = False
def _set_property_if_changed(self, key, value):
if value != self._properties.get(key):
self._properties[key] = value
self.changed = True
def is_global(self):
return self.library.get_full_path(self.get_path(), local=False) is not None
def is_local(self):
return self.library.get_full_path(self.get_path(), system=False) is not None
def is_global(self):
return self.library.get_full_path(self.get_path(), local=False) is not None
def get_path(self):
if self.filename is not None:
return os.path.join(self.parent.get_path(), self.filename)
else:
return None
path = property(get_path)
# This command is the one that is meant to be ran
# (later, could have an Exec key or something)
def get_command(self):
return self.library.get_full_path(self.get_path())
command = property(get_command)
def get_applicability(self):
applicability = self._properties.get('Applicability')
if applicability: return applicability
return 'all'
def set_applicability(self, value):
self._set_property_if_changed('Applicability', value)
applicability = property(get_applicability, set_applicability)
def get_name(self):
name = self._properties.get('Name')
if name: return name
return os.path.basename(self.filename)
def set_name(self, value):
self._set_property_if_changed('Name', value)
name = property(get_name, set_name)
def get_shortcut(self):
shortcut = self._properties.get('Shortcut')
if shortcut: return shortcut
return None
def set_shortcut(self, value):
self._set_property_if_changed('Shortcut', value)
shortcut = property(get_shortcut, set_shortcut)
def get_comment(self):
comment = self._properties.get('Comment')
if comment: return comment
return self.filename
def set_comment(self, value):
self._set_property_if_changed('Comment', value)
comment = property(get_comment, set_comment)
def get_input(self):
input = self._properties.get('Input')
if input: return input
return 'nothing'
def set_input(self, value):
self._set_property_if_changed('Input', value)
input = property(get_input, set_input)
def get_output(self):
output = self._properties.get('Output')
if output: return output
return 'output-panel'
def set_output(self, value):
self._set_property_if_changed('Output', value)
output = property(get_output, set_output)
def get_save_files(self):
save_files = self._properties.get('Save-files')
if save_files: return save_files
return 'nothing'
def set_save_files(self, value):
self._set_property_if_changed('Save-files', value)
save_files = property(get_save_files, set_save_files)
def get_languages(self):
languages = self._properties.get('Languages')
if languages: return languages
return []
def set_languages(self, value):
self._set_property_if_changed('Languages', value)
languages = property(get_languages, set_languages)
def has_hash_bang(self):
if self.filename is None:
return True
filename = self.library.get_full_path(self.get_path())
if filename is None:
return True
fp = open(filename, 'r', 1)
for line in fp:
if line.strip() == '':
continue
return line.startswith('#!')
# There is no property for this one because this function is quite
# expensive to perform
def get_script(self):
if self.filename is None:
return ["#!/bin/sh\n"]
filename = self.library.get_full_path(self.get_path())
if filename is None:
return ["#!/bin/sh\n"]
fp = open(filename, 'r', 1)
lines = list()
# before entering the data block
for line in fp:
if line.startswith('# [Gedit Tool]'):
break
lines.append(line)
# in the block:
for line in fp:
if line.startswith('##'): continue
if not (line.startswith('# ') and '=' in line):
# after the block: strip one emtpy line (if present)
if line.strip() != '':
lines.append(line)
break
# after the block
for line in fp:
lines.append(line)
fp.close()
return lines
def _dump_properties(self):
lines = ['# [Gedit Tool]']
for item in self._properties.iteritems():
if item[0] in self._transform:
lines.append('# %s=%s' % (item[0], self._transform[item[0]][1](item[1])))
elif item[1] is not None:
lines.append('# %s=%s' % item)
return '\n'.join(lines) + '\n'
def save_with_script(self, script):
filename = self.library.get_full_path(self.filename, 'w')
fp = open(filename, 'w', 1)
# Make sure to first print header (shebang, modeline), then
# properties, and then actual content
header = []
content = []
inheader = True
# Parse
for line in script:
line = line.rstrip("\n")
if not inheader:
content.append(line)
elif line.startswith('#!'):
# Shebang (should be always present)
header.append(line)
elif line.strip().startswith('#') and ('-*-' in line or 'ex:' in line or 'vi:' in line or 'vim:' in line):
header.append(line)
else:
content.append(line)
inheader = False
# Write out header
for line in header:
fp.write(line + "\n")
fp.write(self._dump_properties())
fp.write("\n")
for line in content:
fp.write(line + "\n")
fp.close()
os.chmod(filename, 0750)
self.changed = False
def save(self):
if self.changed:
self.save_with_script(self.get_script())
def autoset_filename(self):
if self.filename is not None:
return
dirname = self.parent.path
if dirname != '':
dirname += os.path.sep
basename = self.name.lower().replace(' ', '-').replace('/', '-')
if self.library.get_full_path(dirname + basename):
i = 2
while self.library.get_full_path(dirname + "%s-%d" % (basename, i)):
i += 1
basename = "%s-%d" % (basename, i)
self.filename = basename
if __name__ == '__main__':
library = ToolLibrary()
def print_tool(t, indent):
print indent * " " + "%s: %s" % (t.filename, t.name)
def print_dir(d, indent):
print indent * " " + d.dirname + '/'
for i in d.subdirs:
print_dir(i, indent+1)
for i in d.tools:
print_tool(i, indent+1)
print_dir(library.tree, 0)
# ex:ts=4:et:
| nacho/gedit | plugins/externaltools/tools/library.py | Python | gpl-2.0 | 15,886 |
## Copyright (C) 2012 Red Hat Inc., Bryn M. Reeves <[email protected]>
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin
import os
class Cloudforms(Plugin, RedHatPlugin):
"""CloudForms related information
"""
plugin_name = 'cloudforms'
packages = ["katello", "katello-common",
"katello-headpin", "aeoleus-conductor"]
files = ["/usr/share/katello/script/katello-debug",
"aeolus-debug"]
def setup(self):
katello_debug = "/usr/share/katello/script/katello-debug"
aeolus_debug = "aeolus-debug"
if os.path.isfile(katello_debug):
katello_debug_path = os.path.join(self.get_cmd_path(), "katello-debug")
self.add_cmd_output("%s --notar -d %s" % (katello_debug, katello_debug_path))
if os.path.isfile(aeolus_debug):
aeolus_debug_path = os.path.join(self.get_cmd_path(), "aeolus-debug")
self.add_cmd_output("%s --notar -d %s" % (aeolus_debug, aeolus_debug_path))
| beagles/sosreport-neutron | sos/plugins/cloudforms.py | Python | gpl-2.0 | 1,683 |
# This file is part of creddump.
#
# creddump is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# creddump is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with creddump. If not, see <http://www.gnu.org/licenses/>.
"""
@author: Csaba Barta
@license: GNU General Public License 2.0 or later
@contact: [email protected]
"""
from framework.win32.rawreg import *
from framework.addrspace import HiveFileAddressSpace
from framework.win32.hashdump import get_bootkey,str_to_key
from Crypto.Hash import SHA256
from Crypto.Cipher import AES
def get_lsa_key(secaddr, bootkey):
root = get_root(secaddr)
if not root:
return None
enc_reg_key = open_key(root, ["Policy", "PolEKList"])
if not enc_reg_key:
return None
enc_reg_value = enc_reg_key.ValueList.List[0]
if not enc_reg_value:
return None
obf_lsa_key = secaddr.read(enc_reg_value.Data.value,
enc_reg_value.DataLength.value)
lsa_key = decrypt_secret(obf_lsa_key, bootkey)
return lsa_key[68:100]
def decrypt_secret(data, key):
if not data:
return None
aeskey = ""
sha256 = SHA256.new()
sha256.update(key)
for i in range(1000):
sha256.update(data[28:60])
aeskey = sha256.digest()
secret = ""
aes = AES.new(aeskey)
for key_offset in range(0, len(data) - 60, 16):
if (key_offset + 16) <= len(data) - 60:
secret = secret + aes.decrypt(data[60 + key_offset:60 + key_offset + 16])
return secret
def get_secret_by_name(secaddr, name, lsakey):
root = get_root(secaddr)
if not root:
return None
enc_secret_key = open_key(root, ["Policy", "Secrets", name, "CurrVal"])
if not enc_secret_key:
return None
enc_secret_value = enc_secret_key.ValueList.List[0]
if not enc_secret_value:
return None
enc_secret = secaddr.read(enc_secret_value.Data.value,
enc_secret_value.DataLength.value)
if not enc_secret:
return None
secret = decrypt_secret(enc_secret, lsakey)
(secret_len,) = unpack("<L", secret[:4])
# return secret
return secret[16:16 + secret_len]
def get_secrets(sysaddr, secaddr):
root = get_root(secaddr)
if not root:
return None
bootkey = get_bootkey(sysaddr)
lsakey = get_lsa_key(secaddr, bootkey)
secrets_key = open_key(root, ["Policy", "Secrets"])
if not secrets_key:
return None
secrets = {}
for key in subkeys(secrets_key):
sec_val_key = open_key(key, ["CurrVal"])
if not sec_val_key:
continue
enc_secret_value = sec_val_key.ValueList.List[0]
if not enc_secret_value:
continue
enc_secret = secaddr.read(enc_secret_value.Data.value,
enc_secret_value.DataLength.value)
if not enc_secret:
continue
secret = decrypt_secret(enc_secret, lsakey)
(secret_len,) = unpack("<L", secret[:4])
# secrets[key.Name] = secret
secrets[key.Name] = secret[16:16+secret_len]
return secrets
def get_file_secrets(sysfile, secfile):
sysaddr = HiveFileAddressSpace(sysfile)
secaddr = HiveFileAddressSpace(secfile)
return get_secrets(sysaddr, secaddr)
| HarmJ0y/ImpDump | framework/win32/lsasecretsw2k8.py | Python | gpl-2.0 | 3,709 |
# -*- coding: utf-8 -*-
"""
(c) 2015-2016 - Copyright Vivek Anand
Authors:
Vivek Anand <[email protected]>
"""
from anitya.lib.backends import BaseBackend, get_versions_by_regex
from anitya.lib.exceptions import AnityaPluginException
REGEX = 'class="name">([^<]*[^tip])</td'
class BitBucketBackend(BaseBackend):
''' The custom class for projects hosted on bitbucket.org
This backend allows to specify a version_url and a regex that will
be used to retrieve the version information.
'''
name = 'BitBucket'
examples = [
'https://bitbucket.org/zzzeek/sqlalchemy',
'https://bitbucket.org/cherrypy/cherrypy',
]
@classmethod
def get_version(cls, project):
''' Method called to retrieve the latest version of the projects
provided, project that relies on the backend of this plugin.
:arg Project project: a :class:`model.Project` object whose backend
corresponds to the current plugin.
:return: the latest version found upstream
:return type: str
:raise AnityaPluginException: a
:class:`anitya.lib.exceptions.AnityaPluginException` exception
when the version cannot be retrieved correctly
'''
return cls.get_ordered_versions(project)[-1]
@classmethod
def get_versions(cls, project):
''' Method called to retrieve all the versions (that can be found)
of the projects provided, project that relies on the backend of
this plugin.
:arg Project project: a :class:`model.Project` object whose backend
corresponds to the current plugin.
:return: a list of all the possible releases found
:return type: list
:raise AnityaPluginException: a
:class:`anitya.lib.exceptions.AnityaPluginException` exception
when the versions cannot be retrieved correctly
'''
if project.version_url:
url_template = 'https://bitbucket.org/%(version_url)s/'\
'downloads?tab=tags'
version_url = project.version_url.replace(
'https://bitbucket.org/', '')
url = url_template % {'version_url': version_url}
elif project.homepage.startswith('https://bitbucket.org'):
url = project.homepage
if url.endswith('/'):
url = project.homepage[:1]
url += '/downloads?tab=tags'
else:
raise AnityaPluginException(
'Project %s was incorrectly set-up' % project.name)
return get_versions_by_regex(url, REGEX, project)
| pombredanne/anitya | anitya/lib/backends/bitbucket.py | Python | gpl-2.0 | 2,631 |
#!/usr/bin/python
import webapp
class AcortadorApp(webapp.webApp):
Dic = {}
Index = 0
Form = '<form name="input" method="post">\nURL: <input type="text" name="URL">\n<input type="submit" value="Enviar">\n</form>'
def parse(self, request):
lines = request.splitlines()
method = lines[0].split(' ')[0]
resource = lines[0].split(' ')[1]
if method == 'GET':
url = ""
elif method == "POST":
if lines[-1][0:4] == "URL=":
url = lines[-1].split('=')[1]
else:
url = ""
parsedRequest = [method,resource,url]
print parsedRequest
return parsedRequest
def process(self,parsedRequest):
htmlBody = ''
repetido = 0
if parsedRequest[0] == "GET":
htmlBody = "<h1>Bienvenido!! Introduce una URL para acortar</h1>" + self.Form
elif parsedRequest[0] == "POST":
if parsedRequest[2][0:13] == "http%3A%2F%2F":
parsedRequest[2] = "http://" + parsedRequest[2][13:]
elif parsedRequest[2][0:14] == "https%3A%2F%2F":
parsedRequest[2] = "https://" + parsedRequest[2][14:]
else:
parsedRequest[2] = "http://" + parsedRequest[2]
for key in self.Dic.keys():
if key == parsedRequest[2]:
repetido = 1
htmlBody = htmlBody + '<p><a href="' + key + '">' + key + '</a> -- <a href="' + key + '">' + self.Dic[key] + '</a></p>'
if repetido == 0:
self.Dic[parsedRequest[2]] = "localhost:1234/" + str(self.Index)
self.Index = self.Index + 1
htmlBody = htmlBody + '<p><a href="' + parsedRequest[2] + '">' + parsedRequest[2] + '</a> -- <a href="' + parsedRequest[2] + '">' + self.Dic[parsedRequest[2]] + '</a></p>'
htmlBody = "<h1>Nueva URL acortada y agregada. Introduce otra si quieres</h1>" + self.Form + htmlBody
else:
htmlBody = "<h1>URL almacenada previamente. Introduce otra si quieres</h1>" + self.Form + htmlBody
return("200 OK", "<html><body>" + htmlBody + "</html></body>")
if __name__ == "__main__":
testAvortadorApp = AcortadorApp("localhost", 1234)
| rodrigobersan/X-Serv-18.1-Practica1 | webapp-acortador.py | Python | gpl-2.0 | 1,938 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('here', '0022_auto_20151222_2207'),
]
operations = [
migrations.AddField(
model_name='user',
name='avatarThumb',
field=models.FileField(default=b'null', upload_to=b'./avatar/thumb', blank=True),
),
]
| chenjunqian/here | HereServer-deprecated/here/migrations/0023_user_avatarthumb.py | Python | gpl-2.0 | 443 |
from django.conf.urls import url
import userprofile.admin
urlpatterns = [
url(r'^admin/user/download/$', userprofile.admin.download_view,
name='user_list_download'),
]
| oriel-hub/api | django/idsapi/userprofile/urls.py | Python | gpl-2.0 | 182 |
from nose.tools import assert_not_equal
from gcodeutils.tests import open_gcode_file, gcode_eq
__author__ = 'olivier'
def test_identity_equality():
gcode_eq(open_gcode_file('simple1.gcode'), open_gcode_file('simple1.gcode'))
def test_trivial_difference():
assert_not_equal(open_gcode_file('simple1.gcode'), open_gcode_file('simple2.gcode'))
def test_ignore_non_command():
gcode_eq(open_gcode_file('empty_for_good.gcode'), open_gcode_file('empty_skeinforge_format.gcode'))
def test_precision():
"""makes sure that gcode are equals if the numeric values are similar enough
and different is the numeric values are far enough"""
gcode_eq(open_gcode_file('simple1.gcode'), open_gcode_file('simple1_equivalent.gcode'))
assert_not_equal(open_gcode_file('simple1.gcode'), open_gcode_file('simple1_slightly_different.gcode'))
| zeograd/gcodeutils | gcodeutils/tests/test_equality.py | Python | gpl-2.0 | 855 |
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from models import Player
def get_transfer_choices(player_id):
players = Player.objects.all()
player_list = [('bank', 'Bank')]
for player in players:
if player.id != player_id:
player_list.append((player.id, player.name))
return player_list
class TransferForm(forms.Form):
amount = forms.FloatField()
to = forms.ChoiceField(label='Transfer To', choices=[])
def __init__(self, *args, **kwargs):
super(TransferForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.add_input(Submit('submit', 'Transfer'))
def clean_amount(self):
amount = self.cleaned_data['amount']
if amount <= 0:
raise forms.ValidationError("Amount must be > 0.")
return amount
class IncomeForm(forms.Form):
amount = forms.FloatField()
def __init__(self, *args, **kwargs):
super(IncomeForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.add_input(Submit('submit', 'Income'))
def clean_amount(self):
amount = self.cleaned_data['amount']
if amount <= 0:
raise forms.ValidationError("Amount must be > 0.")
return amount
class PlayerForm(forms.ModelForm):
class Meta:
model = Player
fields = ('name',)
def __init__(self, *args, **kwargs):
super(PlayerForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.add_input(Submit('submit', 'Submit'))
| beneboy/pyopoly-banker | bank/forms.py | Python | gpl-2.0 | 1,635 |
#!/usr/bin/python
"""
English to Piglatin translator
It takes a word or a whole sentence from stdin,
lower cases it and transforms it then
based upon the first character of
the given string.
"""
"""
detectWords()
Takes a sentence and splits it by searching
for whitespaces.
input arguments:
string - Sentence or word to examine
return values:
words - array of single word strings
"""
def detectWords(string):
offset = 0
words = []
for idx in range (0, len(string)):
if string[idx] == " ":
words.append(string[offset:idx])
offset = idx + 1
if idx == len(string) - 1:
words.append(string[offset:idx + 1])
#print "Found %d word(s)!" % len(words)
#print words
return words
"""
stripSpecialChars()
Examines a given string for
special characters, extracts them
and stores them into an extra array.
input arguments:
string - array of word strings
return values:
specialChars - Tuple of essential values [word index,
character index, word length, special character]
strippedWords - Array of words which are stripped from
special characters
"""
def stripSpecialChars(words):
specialChars = []
strippedWords = []
for idx in range (0, len(words)):
word = words[idx]
appendAsIs = True
for idx2 in range (0, len(word)):
if word[idx2] == "!" or word[idx2] == "?" or word[idx2] == "." or word[idx2] == ",":
t = idx, idx2, len(word), word[idx2] # [pos in sentence, pos in word, len of word, special char]
specialChars.append(t)
temp = word[ 0 : idx2 ] + word[ idx2 + 1 : len(word) ]
strippedWords.append(temp)
appendAsIs = False
if appendAsIs:
strippedWords.append(word)
return specialChars, strippedWords
"""
en2pl()
This function takes a word string
and tranforms it into it's piglatin
version. First the word is checked
for correctness, means that it must not
be empty or contain any special characters.
Then, depending on the starting character,
the word gets transformed.
a) If the first char is a vowel, then only
the pyg extension is appended to the word
example: "ape" --> "ape-ay"
b) If the first char is a consonant, the first
char is extracted from the string, reappended
at the end in addition to the pyg extension
example: "tiger" --> "iger-t-ay"
input arguments:
string - Word to examine
return values:
new_word - tranformed piglatin word
"""
def en2pl(string):
#pig extension
pyg = 'ay'
#print "Current string is: '" + string + "'"
#check if it's a valid string. Means not empty and only ASCII chars
if len(string) > 0 and string.isalpha():
#lower case the inout
word = string.lower()
#print "lower: " + word
#get first char
first = word[0]
#print "First: " + first
#check for vowels
if first == 'a' or first == 'o' or first == 'e' or first == 'i' or first == 'u':
#just append pig extension in this case
new_word = string + pyg
#return new word
return new_word
else:
#Take the rest of the word append first char and the
#the pig extension
new_word = string[1:len(string)] + first + pyg
#print ne word
return new_word
else:
#Give warning in this case
return '>>Empty or invalid string<<'
"""
doConversion()
This function takes all cleaned words and all
extracted special chars and applies the piglatin
conversion, rematches the special chars to the
converted words, and puts the whole sentence
back together.
input arguments:
specialChars - List of tuple which contains essential information
about each special char extracted
strippedWords - List of all bare stripped strings which resemble
to the sentence in the end
"""
def doConversion(specialChars, strippedWords):
pl_string = ""
#loop through all words
for idx in range (0, len(strippedWords)):
next = en2pl(strippedWords[idx])
next_sp = ""
#loop through all special chars
for idx2 in range (0, len(specialChars)):
#if current word has a special char
if idx == specialChars[idx2][0]:
#print specialChars[idx2]
#if special char is at end of word
if specialChars[idx2][1] == specialChars[idx2][2] - 1:
next_sp = next + specialChars[idx2][3]
#if special char is in the middle of the word
else:
next_sp = next[0:specialChars[idx2][1]] + specialChars[idx2][3] + next[specialChars[idx2][1]:specialChars[idx2][2] + 1]
if next_sp == "":
next_sp = next
pl_string += next_sp + " "
return pl_string
"""
translate()
Wrapper function for easy usage
of the translator.
input arguments:
org_string - Word/sentence to examine
return values:
translated piglatin sentence or word
"""
def translate(org_string):
words = detectWords(org_string)
raw = stripSpecialChars(words)
return doConversion(raw[0], raw[1])
"""
main block
"""
original = raw_input('Enter a word or sentence: ')
print "In pig latin this would be: " + translate(original)
| shitsumon/random_py_stuff | english2piglatin.py | Python | gpl-2.0 | 4,939 |
# coding=utf-8
from misc.brain import Brain
from core.baseclass import PluginBase
from random import random, choice
from pkg_resources import resource_filename
class MioMarkov(PluginBase):
def __init__(self, database, handler):
super().__init__(database, handler, 'MioMarkov')
self.brain = Brain(resource_filename('misc', 'dota2_cobe.sql'))
def execute_titlepost(self, title_only):
pass
def on_new_message(self, message):
pass
def update_procedure(self, thing, created, lifetime, last_updated, interval):
pass
def execute_link(self, link_submission):
pass
def execute_submission(self, submission):
if submission.subreddit.display_name.lower() == 'dota2' and random() <= 0.001:
reply = self.produce_dank_maymays(submission.selftext)
if reply:
self.add_comment(submission.name, reply)
return True
def execute_comment(self, comment):
if comment.subreddit.display_name.lower() == 'dota2' or random() <= 0.001:
reply = self.produce_dank_maymays(comment.body)
if reply:
self.add_comment(comment.name, reply)
return True
def produce_dank_maymays(self, message):
msg = ''
i = 0
while(not (10 < len(msg) < 210)):
if i > 4:
msg = ''
break
msg = self.brain.reply(message)
i += 1
return msg
def init(database, handler):
return MioMarkov(database, handler)
if __name__ == '__main__':
from core.logprovider import setup_logging
from core.handlers import RoverHandler
from core.database import Database
logger = setup_logging('DEBUG')
mm = MioMarkov(Database(), RoverHandler())
mm.test_single_comment('cvfmf9e')
| DarkMio/RedditRoverPlugins | MioMarkov.py | Python | gpl-2.0 | 1,846 |
class Solution(object):
# @staticmethod
# def makeRect(leftLowerCornerX, leftLowerCornerY,
# RightUpperCornerX, RightUpperCornerY):
# """
# :type leftLowerCornerX: int
# :type leftLowerCornerY: int
# :type RightUpperCornerX: int
# :type RightUpperCornerY: int
# :rtype E: list
# """
# return [{'x': leftLowerCornerX, 'y': RightUpperCornerY},
# {'x': leftLowerCornerX, 'y': leftLowerCornerY},
# {'x': RightUpperCornerX, 'y': RightUpperCornerY},
# {'x': RightUpperCornerX, 'y': leftLowerCornerY}]
def computeArea(self, A, B, C, D, E, F, G, H):
"""
:type A: int
:type B: int
:type C: int
:type D: int
:type E: int
:type F: int
:type G: int
:type H: int
:rtype: int
"""
rect1 = {'leftLower': {'x': A, 'y': B}, 'rightUpper': {'x': C, 'y': D}}
rect2 = {'leftLower': {'x': E, 'y': F}, 'rightUpper': {'x': G, 'y': H}}
area1 = (abs(rect1['rightUpper']['x'] - rect1['leftLower']['x'])
* abs(rect1['rightUpper']['y'] - rect1['leftLower']['y']))
area2 = (abs(rect2['rightUpper']['x'] - rect2['leftLower']['x'])
* abs(rect2['rightUpper']['y'] - rect2['leftLower']['y']))
if (rect1['rightUpper']['y'] < rect2['leftLower']['y'] or
rect1['rightUpper']['x'] < rect2['leftLower']['x'] or
rect1['leftLower']['y'] > rect2['rightUpper']['y'] or
rect1['leftLower']['x'] > rect2['rightUpper']['x']):
# Two rectangles do not overlap
return area1 + area2
p1x = min(rect1['rightUpper']['x'], rect2['rightUpper']['x'])
p1y = min(rect1['rightUpper']['y'], rect2['rightUpper']['y'])
p2x = max(rect1['leftLower']['x'], rect2['leftLower']['x'])
p2y = max(rect1['leftLower']['y'], rect2['leftLower']['y'])
area3 = abs(p1x - p2x) * abs(p1y - p2y)
return area1 + area2 - area3
| TanakritBenz/leetcode-adventure | Rectangle_Area.py | Python | gpl-2.0 | 2,070 |
1# this module is populated at initialization from the c++ part of PythonUI
"""Runtime variables, populated at yade startup."""
# default value
hasDisplay=False
# find out about which ipython version we use -- 0.10* and 0.11 are supported, but they have different internals
import IPython
try: # attempt to get numerical version
ipython_version=int(IPython.__version__.split('.',2)[1]) ## convert '0.10' to 10, '0.11.alpha1.bzr.r1223' to 11
except ValueError:
print 'WARN: unable to extract IPython version from %s, defaulting to 10'%(IPython.__version__)
ipython_version=10
if (ipython_version < 10): #set version 10 for very old systems
newipver=10
ipython_version=newipver
| bchareyre/trial | py/runtime.py | Python | gpl-2.0 | 684 |
import sys
import os
import argparse
import re
from time import time
import boto
from boto import ec2
from boto import rds
from boto import route53
import ConfigParser
from collections import defaultdict
try:
import json
except ImportError:
import simplejson as json
class Ec2Inventory(object):
def _empty_inventory(self):
return {"_meta" : {"hostvars" : {}}}
def __init__(self):
''' Main execution path '''
# Inventory grouped by instance IDs, tags, security groups, regions,
# and availability zones
self.inventory = self._empty_inventory()
# Index of hostname (address) to instance ID
self.index = {}
# Read settings and parse CLI arguments
self.read_settings()
self.parse_cli_args()
# Cache
if self.args.refresh_cache:
self.do_api_calls_update_cache()
elif not self.is_cache_valid():
self.do_api_calls_update_cache()
# Data to print
if self.args.host:
data_to_print = self.get_host_info()
elif self.args.list:
# Display list of instances for inventory
if self.inventory == self._empty_inventory():
data_to_print = self.get_inventory_from_cache()
else:
data_to_print = self.json_format_dict(self.inventory, True)
print data_to_print
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
if os.path.isfile(self.cache_path_index):
return True
return False
def read_settings(self):
''' Reads the settings from the ec2.ini file '''
config = ConfigParser.SafeConfigParser()
ec2_default_ini_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ec2.ini')
ec2_ini_path = os.environ.get('EC2_INI_PATH', ec2_default_ini_path)
config.read(ec2_ini_path)
# is eucalyptus?
self.eucalyptus_host = None
self.eucalyptus = False
if config.has_option('ec2', 'eucalyptus'):
self.eucalyptus = config.getboolean('ec2', 'eucalyptus')
if self.eucalyptus and config.has_option('ec2', 'eucalyptus_host'):
self.eucalyptus_host = config.get('ec2', 'eucalyptus_host')
# Regions
self.regions = []
configRegions = config.get('ec2', 'regions')
configRegions_exclude = config.get('ec2', 'regions_exclude')
if (configRegions == 'all'):
if self.eucalyptus_host:
self.regions.append(boto.connect_euca(host=self.eucalyptus_host).region.name)
else:
for regionInfo in ec2.regions():
if regionInfo.name not in configRegions_exclude:
self.regions.append(regionInfo.name)
else:
self.regions = configRegions.split(",")
# Destination addresses
self.destination_variable = config.get('ec2', 'destination_variable')
self.vpc_destination_variable = config.get('ec2', 'vpc_destination_variable')
# Route53
self.route53_enabled = config.getboolean('ec2', 'route53')
self.route53_excluded_zones = []
if config.has_option('ec2', 'route53_excluded_zones'):
self.route53_excluded_zones.extend(
config.get('ec2', 'route53_excluded_zones', '').split(','))
# Include RDS instances?
self.rds_enabled = True
if config.has_option('ec2', 'rds'):
self.rds_enabled = config.getboolean('ec2', 'rds')
# Return all EC2 and RDS instances (if RDS is enabled)
if config.has_option('ec2', 'all_instances'):
self.all_instances = config.getboolean('ec2', 'all_instances')
else:
self.all_instances = False
if config.has_option('ec2', 'all_rds_instances') and self.rds_enabled:
self.all_rds_instances = config.getboolean('ec2', 'all_rds_instances')
else:
self.all_rds_instances = False
# Cache related
cache_dir = os.path.expanduser(config.get('ec2', 'cache_path'))
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
self.cache_path_cache = cache_dir + "/ansible-ec2.cache"
self.cache_path_index = cache_dir + "/ansible-ec2.index"
self.cache_max_age = config.getint('ec2', 'cache_max_age')
# Configure nested groups instead of flat namespace.
if config.has_option('ec2', 'nested_groups'):
self.nested_groups = config.getboolean('ec2', 'nested_groups')
else:
self.nested_groups = False
# Configure which groups should be created.
group_by_options = [
'group_by_instance_id',
'group_by_region',
'group_by_availability_zone',
'group_by_ami_id',
'group_by_instance_type',
'group_by_key_pair',
'group_by_vpc_id',
'group_by_security_group',
'group_by_tag_keys',
'group_by_tag_none',
'group_by_route53_names',
'group_by_rds_engine',
'group_by_rds_parameter_group',
]
for option in group_by_options:
if config.has_option('ec2', option):
setattr(self, option, config.getboolean('ec2', option))
else:
setattr(self, option, True)
# Do we need to just include hosts that match a pattern?
try:
pattern_include = config.get('ec2', 'pattern_include')
if pattern_include and len(pattern_include) > 0:
self.pattern_include = re.compile(pattern_include)
else:
self.pattern_include = None
except ConfigParser.NoOptionError, e:
self.pattern_include = None
# Do we need to exclude hosts that match a pattern?
try:
pattern_exclude = config.get('ec2', 'pattern_exclude');
if pattern_exclude and len(pattern_exclude) > 0:
self.pattern_exclude = re.compile(pattern_exclude)
else:
self.pattern_exclude = None
except ConfigParser.NoOptionError, e:
self.pattern_exclude = None
# Instance filters (see boto and EC2 API docs). Ignore invalid filters.
self.ec2_instance_filters = defaultdict(list)
if config.has_option('ec2', 'instance_filters'):
for instance_filter in config.get('ec2', 'instance_filters', '').split(','):
instance_filter = instance_filter.strip()
if not instance_filter or '=' not in instance_filter:
continue
filter_key, filter_value = [x.strip() for x in instance_filter.split('=', 1)]
if not filter_key:
continue
self.ec2_instance_filters[filter_key].append(filter_value)
def parse_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on EC2')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to EC2 (default: False - use cache files)')
self.args = parser.parse_args()
def do_api_calls_update_cache(self):
''' Do API calls to each region, and save data in cache files '''
if self.route53_enabled:
self.get_route53_records()
for region in self.regions:
self.get_instances_by_region(region)
if self.rds_enabled:
self.get_rds_instances_by_region(region)
self.write_to_cache(self.inventory, self.cache_path_cache)
self.write_to_cache(self.index, self.cache_path_index)
def connect(self, region):
''' create connection to api server'''
if self.eucalyptus:
conn = boto.connect_euca(host=self.eucalyptus_host)
conn.APIVersion = '2010-08-31'
else:
conn = ec2.connect_to_region(region)
# connect_to_region will fail "silently" by returning None if the region name is wrong or not supported
if conn is None:
self.fail_with_error("region name: %s likely not supported, or AWS is down. connection to region failed." % region)
return conn
def get_instances_by_region(self, region):
''' Makes an AWS EC2 API call to the list of instances in a particular
region '''
try:
conn = self.connect(region)
reservations = []
if self.ec2_instance_filters:
for filter_key, filter_values in self.ec2_instance_filters.iteritems():
reservations.extend(conn.get_all_instances(filters = { filter_key : filter_values }))
else:
reservations = conn.get_all_instances()
for reservation in reservations:
for instance in reservation.instances:
self.add_instance(instance, region)
except boto.exception.BotoServerError, e:
if e.error_code == 'AuthFailure':
error = self.get_auth_error_message()
else:
backend = 'Eucalyptus' if self.eucalyptus else 'AWS'
error = "Error connecting to %s backend.\n%s" % (backend, e.message)
self.fail_with_error(error)
def get_rds_instances_by_region(self, region):
''' Makes an AWS API call to the list of RDS instances in a particular
region '''
try:
conn = rds.connect_to_region(region)
if conn:
instances = conn.get_all_dbinstances()
for instance in instances:
self.add_rds_instance(instance, region)
except boto.exception.BotoServerError, e:
error = e.reason
if e.error_code == 'AuthFailure':
error = self.get_auth_error_message()
if not e.reason == "Forbidden":
error = "Looks like AWS RDS is down:\n%s" % e.message
self.fail_with_error(error)
def get_auth_error_message(self):
''' create an informative error message if there is an issue authenticating'''
errors = ["Authentication error retrieving ec2 inventory."]
if None in [os.environ.get('AWS_ACCESS_KEY_ID'), os.environ.get('AWS_SECRET_ACCESS_KEY')]:
errors.append(' - No AWS_ACCESS_KEY_ID or AWS_SECRET_ACCESS_KEY environment vars found')
else:
errors.append(' - AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment vars found but may not be correct')
boto_paths = ['/etc/boto.cfg', '~/.boto', '~/.aws/credentials']
boto_config_found = list(p for p in boto_paths if os.path.isfile(os.path.expanduser(p)))
if len(boto_config_found) > 0:
errors.append(" - Boto configs found at '%s', but the credentials contained may not be correct" % ', '.join(boto_config_found))
else:
errors.append(" - No Boto config found at any expected location '%s'" % ', '.join(boto_paths))
return '\n'.join(errors)
def fail_with_error(self, err_msg):
'''log an error to std err for ansible-playbook to consume and exit'''
sys.stderr.write(err_msg)
sys.exit(1)
def get_instance(self, region, instance_id):
conn = self.connect(region)
reservations = conn.get_all_instances([instance_id])
for reservation in reservations:
for instance in reservation.instances:
return instance
def add_instance(self, instance, region):
''' Adds an instance to the inventory and index, as long as it is
addressable '''
# Only want running instances unless all_instances is True
if not self.all_instances and instance.state != 'running':
return
# Select the best destination address
if instance.subnet_id:
dest = getattr(instance, self.vpc_destination_variable, None)
if dest is None:
dest = getattr(instance, 'tags').get(self.vpc_destination_variable, None)
else:
dest = getattr(instance, self.destination_variable, None)
if dest is None:
dest = getattr(instance, 'tags').get(self.destination_variable, None)
if not dest:
# Skip instances we cannot address (e.g. private VPC subnet)
return
# if we only want to include hosts that match a pattern, skip those that don't
if self.pattern_include and not self.pattern_include.match(dest):
return
# if we need to exclude hosts that match a pattern, skip those
if self.pattern_exclude and self.pattern_exclude.match(dest):
return
# Add to index
self.index[dest] = [region, instance.id]
# Inventory: Group by instance ID (always a group of 1)
if self.group_by_instance_id:
self.inventory[instance.id] = [dest]
if self.nested_groups:
self.push_group(self.inventory, 'instances', instance.id)
# Inventory: Group by region
if self.group_by_region:
self.push(self.inventory, region, dest)
if self.nested_groups:
self.push_group(self.inventory, 'regions', region)
# Inventory: Group by availability zone
if self.group_by_availability_zone:
self.push(self.inventory, instance.placement, dest)
if self.nested_groups:
if self.group_by_region:
self.push_group(self.inventory, region, instance.placement)
self.push_group(self.inventory, 'zones', instance.placement)
# Inventory: Group by Amazon Machine Image (AMI) ID
if self.group_by_ami_id:
ami_id = self.to_safe(instance.image_id)
self.push(self.inventory, ami_id, dest)
if self.nested_groups:
self.push_group(self.inventory, 'images', ami_id)
# Inventory: Group by instance type
if self.group_by_instance_type:
type_name = self.to_safe('type_' + instance.instance_type)
self.push(self.inventory, type_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'types', type_name)
# Inventory: Group by key pair
if self.group_by_key_pair and instance.key_name:
key_name = self.to_safe('key_' + instance.key_name)
self.push(self.inventory, key_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'keys', key_name)
# Inventory: Group by VPC
if self.group_by_vpc_id and instance.vpc_id:
vpc_id_name = self.to_safe('vpc_id_' + instance.vpc_id)
self.push(self.inventory, vpc_id_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'vpcs', vpc_id_name)
# Inventory: Group by security group
if self.group_by_security_group:
try:
for group in instance.groups:
key = self.to_safe("security_group_" + group.name)
self.push(self.inventory, key, dest)
if self.nested_groups:
self.push_group(self.inventory, 'security_groups', key)
except AttributeError:
self.fail_with_error('\n'.join(['Package boto seems a bit older.',
'Please upgrade boto >= 2.3.0.']))
# Inventory: Group by tag keys
if self.group_by_tag_keys:
for k, v in instance.tags.iteritems():
key = self.to_safe("tag_" + k + "=" + v)
self.push(self.inventory, key, dest)
if self.nested_groups:
self.push_group(self.inventory, 'tags', self.to_safe("tag_" + k))
self.push_group(self.inventory, self.to_safe("tag_" + k), key)
# Inventory: Group by Route53 domain names if enabled
if self.route53_enabled and self.group_by_route53_names:
route53_names = self.get_instance_route53_names(instance)
for name in route53_names:
self.push(self.inventory, name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'route53', name)
# Global Tag: instances without tags
if self.group_by_tag_none and len(instance.tags) == 0:
self.push(self.inventory, 'tag_none', dest)
if self.nested_groups:
self.push_group(self.inventory, 'tags', 'tag_none')
# Global Tag: tag all EC2 instances
self.push(self.inventory, 'ec2', dest)
self.inventory["_meta"]["hostvars"][dest] = self.get_host_info_dict_from_instance(instance)
def add_rds_instance(self, instance, region):
''' Adds an RDS instance to the inventory and index, as long as it is
addressable '''
# Only want available instances unless all_rds_instances is True
if not self.all_rds_instances and instance.status != 'available':
return
# Select the best destination address
dest = instance.endpoint[0]
if not dest:
# Skip instances we cannot address (e.g. private VPC subnet)
return
# Add to index
self.index[dest] = [region, instance.id]
# Inventory: Group by instance ID (always a group of 1)
if self.group_by_instance_id:
self.inventory[instance.id] = [dest]
if self.nested_groups:
self.push_group(self.inventory, 'instances', instance.id)
# Inventory: Group by region
if self.group_by_region:
self.push(self.inventory, region, dest)
if self.nested_groups:
self.push_group(self.inventory, 'regions', region)
# Inventory: Group by availability zone
if self.group_by_availability_zone:
self.push(self.inventory, instance.availability_zone, dest)
if self.nested_groups:
if self.group_by_region:
self.push_group(self.inventory, region, instance.availability_zone)
self.push_group(self.inventory, 'zones', instance.availability_zone)
# Inventory: Group by instance type
if self.group_by_instance_type:
type_name = self.to_safe('type_' + instance.instance_class)
self.push(self.inventory, type_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'types', type_name)
# Inventory: Group by VPC
if self.group_by_vpc_id and instance.subnet_group and instance.subnet_group.vpc_id:
vpc_id_name = self.to_safe('vpc_id_' + instance.subnet_group.vpc_id)
self.push(self.inventory, vpc_id_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'vpcs', vpc_id_name)
# Inventory: Group by security group
if self.group_by_security_group:
try:
if instance.security_group:
key = self.to_safe("security_group_" + instance.security_group.name)
self.push(self.inventory, key, dest)
if self.nested_groups:
self.push_group(self.inventory, 'security_groups', key)
except AttributeError:
self.fail_with_error('\n'.join(['Package boto seems a bit older.',
'Please upgrade boto >= 2.3.0.']))
# Inventory: Group by engine
if self.group_by_rds_engine:
self.push(self.inventory, self.to_safe("rds_" + instance.engine), dest)
if self.nested_groups:
self.push_group(self.inventory, 'rds_engines', self.to_safe("rds_" + instance.engine))
# Inventory: Group by parameter group
if self.group_by_rds_parameter_group:
self.push(self.inventory, self.to_safe("rds_parameter_group_" + instance.parameter_group.name), dest)
if self.nested_groups:
self.push_group(self.inventory, 'rds_parameter_groups', self.to_safe("rds_parameter_group_" + instance.parameter_group.name))
# Global Tag: all RDS instances
self.push(self.inventory, 'rds', dest)
self.inventory["_meta"]["hostvars"][dest] = self.get_host_info_dict_from_instance(instance)
def get_route53_records(self):
''' Get and store the map of resource records to domain names that
point to them. '''
r53_conn = route53.Route53Connection()
all_zones = r53_conn.get_zones()
route53_zones = [ zone for zone in all_zones if zone.name[:-1]
not in self.route53_excluded_zones ]
self.route53_records = {}
for zone in route53_zones:
rrsets = r53_conn.get_all_rrsets(zone.id)
for record_set in rrsets:
record_name = record_set.name
if record_name.endswith('.'):
record_name = record_name[:-1]
for resource in record_set.resource_records:
self.route53_records.setdefault(resource, set())
self.route53_records[resource].add(record_name)
def get_instance_route53_names(self, instance):
''' Check if an instance is referenced in the records we have from
Route53. If it is, return the list of domain names pointing to said
instance. If nothing points to it, return an empty list. '''
instance_attributes = [ 'public_dns_name', 'private_dns_name',
'ip_address', 'private_ip_address' ]
name_list = set()
for attrib in instance_attributes:
try:
value = getattr(instance, attrib)
except AttributeError:
continue
if value in self.route53_records:
name_list.update(self.route53_records[value])
return list(name_list)
def get_host_info_dict_from_instance(self, instance):
instance_vars = {}
for key in vars(instance):
value = getattr(instance, key)
key = self.to_safe('ec2_' + key)
# Handle complex types
# state/previous_state changed to properties in boto in https://github.com/boto/boto/commit/a23c379837f698212252720d2af8dec0325c9518
if key == 'ec2__state':
instance_vars['ec2_state'] = instance.state or ''
instance_vars['ec2_state_code'] = instance.state_code
elif key == 'ec2__previous_state':
instance_vars['ec2_previous_state'] = instance.previous_state or ''
instance_vars['ec2_previous_state_code'] = instance.previous_state_code
elif type(value) in [int, bool]:
instance_vars[key] = value
elif type(value) in [str, unicode]:
instance_vars[key] = value.strip()
elif type(value) == type(None):
instance_vars[key] = ''
elif key == 'ec2_region':
instance_vars[key] = value.name
elif key == 'ec2__placement':
instance_vars['ec2_placement'] = value.zone
elif key == 'ec2_tags':
for k, v in value.iteritems():
key = self.to_safe('ec2_tag_' + k)
instance_vars[key] = v
elif key == 'ec2_groups':
group_ids = []
group_names = []
for group in value:
group_ids.append(group.id)
group_names.append(group.name)
instance_vars["ec2_security_group_ids"] = ','.join([str(i) for i in group_ids])
instance_vars["ec2_security_group_names"] = ','.join([str(i) for i in group_names])
else:
pass
# TODO Product codes if someone finds them useful
#print key
#print type(value)
#print value
return instance_vars
def get_host_info(self):
''' Get variables about a specific host '''
if len(self.index) == 0:
# Need to load index from cache
self.load_index_from_cache()
if not self.args.host in self.index:
# try updating the cache
self.do_api_calls_update_cache()
if not self.args.host in self.index:
# host might not exist anymore
return self.json_format_dict({}, True)
(region, instance_id) = self.index[self.args.host]
instance = self.get_instance(region, instance_id)
return self.json_format_dict(self.get_host_info_dict_from_instance(instance), True)
def push(self, my_dict, key, element):
''' Push an element onto an array that may not have been defined in
the dict '''
group_info = my_dict.setdefault(key, [])
if isinstance(group_info, dict):
host_list = group_info.setdefault('hosts', [])
host_list.append(element)
else:
group_info.append(element)
def push_group(self, my_dict, key, element):
''' Push a group as a child of another group. '''
parent_group = my_dict.setdefault(key, {})
if not isinstance(parent_group, dict):
parent_group = my_dict[key] = {'hosts': parent_group}
child_groups = parent_group.setdefault('children', [])
if element not in child_groups:
child_groups.append(element)
def get_inventory_from_cache(self):
''' Reads the inventory from the cache file and returns it as a JSON
object '''
cache = open(self.cache_path_cache, 'r')
json_inventory = cache.read()
return json_inventory
def load_index_from_cache(self):
''' Reads the index from the cache file sets self.index '''
cache = open(self.cache_path_index, 'r')
json_index = cache.read()
self.index = json.loads(json_index)
def write_to_cache(self, data, filename):
''' Writes data in JSON format to a file '''
json_data = self.json_format_dict(data, True)
cache = open(filename, 'w')
cache.write(json_data)
cache.close()
def to_safe(self, word):
''' Converts 'bad' characters in a string to underscores so they can be
used as Ansible groups '''
return re.sub("[^A-Za-z0-9\-]", "_", word)
def json_format_dict(self, data, pretty=False):
''' Converts a dict to a JSON object and dumps it as a formatted
string '''
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
# Run the script
Ec2Inventory()
| grizax/AWS-Lambda | describe-EC2instances.py | Python | gpl-2.0 | 27,681 |
"""
Django settings for moo project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'u@g=^v7)hrcffe-1p82f!q38v#1!w6(b!6p1=61m-$osx2w%!h'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = []
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'moo/templates',),
)
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'building',
'game',
'planet',
'race',
'ship',
'system',
'tech',
'player'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'moo.urls'
WSGI_APPLICATION = 'moo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
| dwagon/pymoo | moo/moo/settings.py | Python | gpl-2.0 | 2,358 |
import numpy as np
import sympy as sy
import sympy.physics.mechanics as me
from sympy.utilities.lambdify import lambdify, implemented_function
import itertools
from functools import partial
import math
import pickle
picklefile_ode = 'ode.pkl'
picklefile_lin = 'lin.pkl'
def get_pickled(fname):
try:
with open('fname') as f:
res = pickle.load(f)
except (IOError, EOFError):
res = dict()
return res
def set_pickled(ob, fname):
with open('fname', 'wb') as f:
pickle.dump(ob, f)
def get_ode_fcn_floating_dp(g_, a1_, L1_, m1_, I1_, a2_, L2_, m2_, I2_):
"""
Equations of motion of floating double pendulum. Computed using sympy.physics.mechanics,
and based on the n-pendulum example from PyDy.
The equations of motion are
H(q) * qdd + Lambda(q) * lam = f(q, qd)
with constraint equations
mc(q)qd + fc(q) = 0
and are rewritten as
[H(q) Lambda(q)] * [qdd; lam] = f(q, qd)
Definitions of the generalized coordinates:
:q1: Angle between link one and vertical. Positive rotation is about the y-axis,
which points into the plane.
:q2: Angle between link one and two.
:q3: Position in x-direction (horizontal) of the base joint (ankle joint).
:q4: Position in z-direction (vertical) of the base joint (ankle joint).
Here, we force the floating double pendulum to have velocity zero at the base joint. Hence, the constraint
equations become
qd3 = 0
qd4 = 0.
If we look at the extra terms in the equations of motion due to these constraints, we see that
H(q) * qdd + Lambda(q) * lam = f(q, qd)
In the 3rd row in this d.e., the term Lambda_3(q)*lam_1 gives the constraint force in the direction of q3,
i.e. in the horizontal direction. The forth term, Lambda_4(q)*lam_2, give the vertical force.
Returns:
Tuple of callable functions: (M, ff), where
M = [I 0 0
0 H Lambda
0 m_cd 0]
ff = [qd
f
f_cd]
The symbols and constants are
:qi: generalized coordinate i
:qdi: generalized velocity i
:g: the local magnitude of the gravitational field
:m1: the mass of link 1
:m2: the mass of link 2
:L1: the length of link 1
:L2: the length of link 2
:I1: the moment of inertia of link 1 wrt to its CoM
:I2: the moment of inertia of link 2 wrt to its CoM
:a1: the position of CoM of link 1 along link 1 from base joint to next joint.
0 <= a1 <= 1. a1=1 means the CoM is at the next joint.
:a2: the position of CoM along link 1
:Date: 2016-04-18
"""
q = me.dynamicsymbols('q:{}'.format(4)) # Generalized coordinates
qd = me.dynamicsymbols('q:{}'.format(4), 1) # Generalized speeds
uu = me.dynamicsymbols('u:{}'.format(4)) # Generalized speeds
tau = me.dynamicsymbols('tau:{}'.format(4)) # Generalized forces
m = sy.symbols('m:{}'.format(2)) # Mass of each link
momIn = sy.symbols('II:{}'.format(2)) # Moment of inertia of each link
L = sy.symbols('l:{}'.format(2)) # Length of each link
a = sy.symbols('a:{}'.format(2)) # Position of CoM. 0 <= a <= 1
g, t = sy.symbols('g t') # Gravity and time
N = me.ReferenceFrame('N') # Inertial reference frame
O = me.Point('O') # Origin point
O.set_vel(N, 0) # Origin's velocity is zero
P0 = me.Point('P0') # Hinge point of base link can move in the x-z plane
P0.set_pos(O, q[2] * N.x + q[3] * N.z) # Set the position of P0
P0.set_vel(N, qd[2] * N.x + qd[3] * N.z) # Set the velocity of P0
frames = [N] # List to hold the 3 frames
joints_and_endp = [P0] # List to hold the 2 joint positions and the end point
links = [] # List to hold the 2 links (rigid bodies)
torques = [] # List to hold the torques at the two joints
kinDiffs = [] # List to hold kinematic ODE's
CoMs = []
S = sy.Matrix([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])
for i in range(2):
proxLink = frames[-1]
Bi = proxLink.orientnew('B' + str(i), 'Axis', [q[i], proxLink.y]) # Create a new frame. Rotation about y
Bi.set_ang_vel(proxLink, qd[i] * proxLink.y) # Set angular velocity
frames.append(Bi) # Add it to the frames list
pivot = joints_and_endp[-1] # The pivot point of this link
Pi = pivot.locatenew('P' + str(i + 1), L[i] * Bi.z) # Create a new point at end of link
Pi.v2pt_theory(pivot, N, Bi) # Set the velocity
joints_and_endp.append(Pi) # Add it to the points list
CoMi = pivot.locatenew('CoM' + str(i + 1), a[i]*L[i] * Bi.z) # Create a new point at CoM
CoMi.v2pt_theory(pivot, N, Bi) # Set the velocity
CoMs.append(CoMi) # Add it to the points list
Ii = me.functions.inertia(Bi, 0, momIn[i], momIn[i])
RBi = me.RigidBody('Link' + str(i + 1), CoMi, Bi, m[i], (Ii, CoMi)) # Create a new link
RBi.set_potential_energy(m[i] * g * CoMi.pos_from(O).dot(N.z))
links.append(RBi) # Add it to the links list
torques.append((Bi, tau[i]*Bi.y)) # Add the torque acting on the link
Lag = me.Lagrangian(N, links[0]) + me.Lagrangian(N, links[1]) # The lagrangian of the system
constraints = [q[2], q[3]] # Force the base joint to be stationary at the origin
LM = me.LagrangesMethod(Lag, q, hol_coneqs=constraints, forcelist=torques, frame=N)
LM.form_lagranges_equations()
# Substitute parameter values
subsDict = {g:g_, a[0]:a1_, L[0]:L1_, m[0]:m1_, momIn[0]:I1_, a[1]:a2_, L[1]:L2_, m[1]:m2_, momIn[1]:I2_}
Hs = LM.mass_matrix_full.subs(subsDict)
fs = LM.forcing_full.subs(subsDict)
#Hs = LM.mass_matrix
#fs = LM.forcing
for i in range(4):
Hs = Hs.subs(qd[i], uu[i])
fs = fs.subs(qd[i], uu[i])
callingargs = q + uu + tau
#callingargs = q + uu + tau + [g, a[0], L[0], m[0], momIn[0], a[1], L[1], m[1], momIn[1]]
f_func = sy.lambdify(callingargs, fs)
H_func = sy.lambdify(callingargs, Hs)
return (H_func, f_func)
def get_ode_fcn_floating_dp_legacy(g_, a1_, L1_, m1_, I1_, a2_, L2_, m2_, I2_):
""" Returns a function object that can be called with fnc(t,x, u),
where x = [q1, q2, q3, q4, qd1, qd2, qd3, qd4], and tau = [tau1, tau2, tau3, tau4], is the torques at each
joint respectively. The function implements the ode for the floating inverted
double pendulum.
For faster return of already constructed models, the set of parameters
are checked against a pickled dict.
"""
params = (g_, a1_, L1_, m1_, I1_, a2_, L2_, m2_, I2_)
tau1, tau2, tau3, tau4 = sy.symbols('tau1, tau2, tau3, tau4')
q1, q2, q3, q4, qd1, qd2, qd3, qd4 = symbols('q1, q2, q3, q4, qd1, qd2, qd3, qd4', real=True)
s1 = sy.sin(q1); s1_ = Symbol('s1')
c1 = sy.cos(q1); c1_ = Symbol('c1')
s2 = sy.sin(q2); s2_ = Symbol('s2')
c2 = sy.cos(q2); c2_ = Symbol('c2')
s12 = sy.sin(q1+q2); s12_ = Symbol('s12')
c12 = sy.cos(q1+q2); c12_ = Symbol('c12')
odes = get_symbolic_ode_floating_dp(params)
# Substitute functions for faster evaluation
odes = odes.subs(s1, s1_).subs(c1,c1_).subs(s2,s2_).subs(c2,c2_).subs(s12,s12_).subs(c12,c12_)
lmb = lambdify( (q1, q2, q3, q4, q1dot, q2dot, q3dot, q4dot, s1_, c1_, s2_, c2_, s12_, c12_, tau1, tau2, tau3, tau4), odes)
return partial(lambda_ode, lambdafunc=lmb)
def get_symbolic_ode_floating_dp(params):
"""
Will generate ode on symbolic form:
qdd = H^{-1} (-C(q,qd)*qd - G(q) - S*tau)
"""
existing_odes = get_pickled(picklefile_ode)
if params in existing_odes:
odes = existing_odes[params]
else:
(g_, a1_, L1_, m1_, I1_, a2_, L2_, m2_, I2_) = params
tau1, tau2, tau3, tau4 = sy.symbols('tau1, tau2, tau3, tau4')
q1, q2, q3, q4, qd1, qd2, qd3, qd4 = symbols('q1, q2, q3, q4, qd1, qd2, qd3, qd4', real=True)
g, a1, L1, m1, I1, a2, L2, m2, I2 = symbols('g, a1, L1, m1, I1, a2, L2, m2, I2')
H, C, G, S = pendulum_ode_manipulator_form_floating_dp()
# Substitute the given parameters
H = H.subs(a1, a1_).subs(L1, L1_).subs(m1, m1_).subs(I1, I1_).subs(a2, a2_).subs(L2, L2_).subs(m2, m2_).subs(I2, I2_).subs(g, g_)
C = C.subs(a1, a1_).subs(L1, L1_).subs(m1, m1_).subs(I1, I1_).subs(a2, a2_).subs(L2, L2_).subs(m2, m2_).subs(I2, I2_).subs(g, g_)
G = G.subs(a1, a1_).subs(L1, L1_).subs(m1, m1_).subs(I1, I1_).subs(a2, a2_).subs(L2, L2_).subs(m2, m2_).subs(I2, I2_).subs(g, g_)
# Invert symbolically
tau = Matrix([tau1, tau2, tau3, tau4])
qdot = Matrix([qd1, qd2, qd3, qd4])
odes = H.LUsolve(S*tau-C*qdot-G)
# pickle
existing_odes[params] = odes
set_pickled(existing_odes, picklefile_ode)
return odes
def get_ode_fcn(a1_, L1_, m1_, I1_, a2_, L2_, m2_, I2_, useRT=True):
""" Returns a function object that can be called with fnc(t,x, u),
where x = [th1,th2], and u = [tau1, tau2], is the torques at each
joint respectively. The function implements the ode for the inverted
double pendulum.
For faster return of already constructed models, the set of parameters
are checked against a pickled dict.
"""
params = (a1_, L1_, m1_, I1_, a2_, L2_, m2_, I2_, useRT)
u1 = Symbol('u1')
u2 = Symbol('u2')
th1 = Symbol('th1')
th2 = Symbol('th2')
th1dot = Symbol('th1dot')
th2dot = Symbol('th2dot')
s1 = sin(th1); s1_ = Symbol('s1')
c1 = cos(th1); c1_ = Symbol('c1')
s2 = sin(th2); s2_ = Symbol('s2')
c2 = cos(th2); c2_ = Symbol('c2')
s12 = sin(th1+th2); s12_ = Symbol('s12')
c12 = cos(th1+th2); c12_ = Symbol('c12')
odes = get_symbolic_ode(params)
# Substitute functions for faster evaluation
odes = odes.subs(s1, s1_).subs(c1,c1_).subs(s2,s2_).subs(c2,c2_).subs(s12,s12_).subs(c12,c12_)
lmb = lambdify((th1,th2,th1dot, th2dot, s1_, c1_, s2_, c2_, s12_, c12_, u1, u2), odes)
return partial(lambda_ode, lambdafunc=lmb)
def get_symbolic_ode(params):
existing_odes = get_pickled(picklefile_ode)
if params in existing_odes:
odes = existing_odes[params]
else:
(a1_, L1_, m1_, I1_, a2_, L2_, m2_, I2_, useRT) = params
u1 = Symbol('u1')
u2 = Symbol('u2')
th1dot = Symbol('th1dot')
th2dot = Symbol('th2dot')
a1, L1, m1, I1, a2, L2, m2, I2, g = symbols('a1, L1, m1, I1, a2, L2, m2, I2, g')
if useRT: # Expressions taken from Tedrake 2009, explicit form
H,C,G = pendulum_ode_manipulator_form_RT()
else:
H,C,G = pendulum_ode_manipulator_form()
# Substitute the given parameters
H = H.subs(a1, a1_).subs(L1, L1_).subs(m1, m1_).subs(I1, I1_).subs(a2, a2_).subs(L2, L2_).subs(m2, m2_).subs(I2, I2_).subs(g, 9.825)
C = C.subs(a1, a1_).subs(L1, L1_).subs(m1, m1_).subs(I1, I1_).subs(a2, a2_).subs(L2, L2_).subs(m2, m2_).subs(I2, I2_).subs(g, 9.825)
G = G.subs(a1, a1_).subs(L1, L1_).subs(m1, m1_).subs(I1, I1_).subs(a2, a2_).subs(L2, L2_).subs(m2, m2_).subs(I2, I2_).subs(g, 9.825)
#return partial(manipulator_ode,H=H, C=C, G=G)
# Invert symbolically
u = Matrix([u1, u2])
qdot = Matrix([th1dot, th2dot])
odes = H.LUsolve(u-C*qdot-G)
# pickle
existing_odes[params] = odes
set_pickled(existing_odes, picklefile_ode)
return odes
def lambda_ode(x, u, lambdafunc):
th1 = x[0]
th2 = x[1]
th1dot = x[2]
th2dot = x[3]
s1 = math.sin(th1)
c1 = math.cos(th1)
s2 = math.sin(th2)
c2 = math.cos(th2)
s12 = math.sin(th1+th2)
c12 = math.cos(th1+th2)
return np.ravel(lambdafunc(th1, th2, th1dot, th2dot, s1, c1, s2, c2, s12, c12, u[0], u[1]))
def manipulator_ode(x, u, H, C, G):
""" Manipulator ode. Will evaluate assuming x = [th1,th2, th1dot, th2dot] """
th1 = Symbol('th1'); th1_ = x[0]
th2 = Symbol('th2'); th2_ = x[1]
th1dot = Symbol('th1dot'); th1dot_ = x[2]
th2dot = Symbol('th2dot'); th2dot_ = x[3]
s1 = Symbol('s1'); s1_ = math.sin(th1_)
c1 = Symbol('c1'); c1_ = math.cos(th1_)
s2 = Symbol('s2'); s2_ = math.sin(th2_)
c2 = Symbol('c2'); c2_ = math.cos(th2_)
s12 = Symbol('s12'); s12_ = math.sin(th1_+th2_)
c12 = Symbol('c12'); c12_ = math.cos(th1_+th2_)
H = H.subs(th1,th1_).subs(th2,th2_).subs(th1dot,th1dot_).subs(th2dot,th2dot_)
H = H.subs(s1,s1_).subs(c1,c1_).subs(s2,s2_).subs(c2,c2_).subs(s12,s12_).subs(c12,c12_)
C = C.subs(th1,th1_).subs(th2,th2_).subs(th1dot,th1dot_).subs(th2dot,th2dot_)
C = C.subs(s1,s1_).subs(c1,c1_).subs(s2,s2_).subs(c2,c2_).subs(s12,s12_).subs(c12,c12_)
G = G.subs(th1,th1_).subs(th2,th2_).subs(th1dot,th1dot_).subs(th2dot,th2dot_)
G = G.subs(s1,s1_).subs(c1,c1_).subs(s2,s2_).subs(c2,c2_).subs(s12,s12_).subs(c12,c12_)
#2/0
# Convert to numpy and invert
Hnp = to_np(H)
b = -np.dot(to_np(C), np.array([th1dot_, th2dot_])).flatten() - to_np(G).flatten() + u
return np.linalg.solve(Hnp, b)
def to_np(A):
""" Converts sympy matrix A to numpy matrix """
shapeA = A.shape
Anp = np.zeros(shapeA)
for i in range(0,shapeA[0]):
for j in range(0,shapeA[1]):
Anp[i,j]=sympy.N(A[i,j])
return Anp
def get_linearized_ode(a1_, L1_, m1_, I1_, a2_, L2_, m2_, I2_):
""" Returns the system matrix A for a linear state space model with the
provided values substituted.
"""
params = (a1_, L1_, m1_, I1_, a2_, L2_, m2_, I2_)
existing_odes = get_pickled(picklefile_lin)
if params in existing_odes:
A,B = existing_odes[params]
else:
Asymb, Bsymb = linearize_ode(th1_=np.pi)
a1, L1, m1, I1, a2, L2, m2, I2, g = symbols('a1, L1, m1, I1, a2, L2, m2, I2, g')
A = Asymb.subs(a1, a1_).subs(L1, L1_).subs(m1, m1_).subs(I1, I1_).subs(a2, a2_).subs(L2, L2_).subs(m2, m2_).subs(I2, I2_).subs(g, 9.825)
B = Bsymb.subs(a1, a1_).subs(L1, L1_).subs(m1, m1_).subs(I1, I1_).subs(a2, a2_).subs(L2, L2_).subs(m2, m2_).subs(I2, I2_).subs(g, 9.825)
A = to_np(A)
B = to_np(B)
existing_odes[params] = (A,B)
set_pickled(existing_odes, picklefile_lin)
return A,B
def is_controllable(B):
""" Checks if the linearized system about the vertical position is controllable with
the provided B matrix
"""
A, BB = get_linearized_ode(1,2,1,1,1,2,1,1)
B = np.dot(BB, B)
C = B.copy()
AA = A.copy()
for i in range(1,4):
C = np.hstack((C, np.dot(AA,B)))
AA = np.dot(AA,A)
cdet = np.linalg.det(C)
print cdet
return (cdet != 0)
def is_controllable_symbolic(B):
""" Checks if the linearized system about the vertical position is controllable with
the provided B matrix
"""
A, BB = linearize_ode()
# 2/0
B = BB*B
C = B[:,:]
AA = A[:,:]
for i in range(1,4):
C = C.row_join(AA*B)
AA = AA*A
2/0
cdet = C.det()
print cdet
return (cdet != 0)
def linearize_ode(th1_=0, th2_=0, th1dot_=0, th2dot_=0):
""" Returns a state space model of the dynamics linearized about the
given angles
"""
th1, th2, th1dot, th2dot = symbols('th1, th2, th1dot, th2dot')
H, C, G = pendulum_ode_manipulator_form_RT()
dG = G.jacobian((th1,th2))
Hinv = H.inv()
A00 = zeros(2,2)
A01 = eye(2)
A10 = -Hinv*dG
A11 = -Hinv*C
A = A00.row_join(A01).col_join(A10.row_join(A11))
#2/0
B = zeros(2,2).col_join(Hinv)
return A.subs(th1,th1_).subs(th2,th2_).subs(th1dot,th1dot_).subs(th2dot,th2dot_), \
B.subs(th1,th1_).subs(th2,th2_).subs(th1dot,th1dot_).subs(th2dot,th2dot_)
def pendulum_ode():
""" Returns the equations of motion for the double pendulum
"""
a1, L1, m1, I1, a2, L2, m2, I2 = symbols('a1, L1, m1, I1, a2, L2, m2, I2')
g = Symbol('g')
th1, th2, th1dot, th2dot = symbols('th1, th2, th1dot, th2dot')
s1 = sin(th1)
s2 = sin(th2)
c1 = cos(th1)
c2 = cos(th2)
c21 = cos(th2-th1)
I1p0 = I1 + m1*a1**2
I2p1 = I2 + m2*a2**2
f1 = a2*L1*c21
H2th2 = I2 + m2*(a2**2 + f1)
H2th1 = m2*(L1**2 + f1)
H2rest = m2 * ( (L1*s1 + a2*s2) * (L1*th1dot**2*c1 + a2*th2dot**2*c2) \
+ (L1*c1 + a2*c2) * (L1*th1dot**2*s1 - a2*th2dot**2*s2) )
A = Matrix([[I1p0+H2th1, H2th2], [-I2p1+m2*f1, I2p1]])
b = Matrix([m1*a1*g*s1 + m2*g*(L1*s1+a2*s2) - H2rest + tau1,
m2*a2*g*s2 + tau2])
dx1 = A.LUsolve(b)
dx= Matrix([th1dot, th2dot]).col_join(dx1)
#print dx
#2/0
return dx
def small_angles_approximation():
""" Returns the equations of motion for the double pendulum using
the small angle approximations for both joints.
"""
a1, L1, m1, I1, a2, L2, m2, I2 = symbols('a1, L1, m1, I1, a2, L2, m2, I2')
g = Symbol('g')
th1func = Function('th1')
th2func = Function('th2')
t = Symbol('t')
th1 = th1func(t)
th2 = th2func(t)
th1dot = diff(th1,t)
th2dot = diff(th2,t)
I1p0 = I1 + m1*a1**2
I2p1 = I2 + m2*a2**2
f1 = a2*L1
H2th2 = I2 + m2*(a2**2 + f1)
H2th1 = m2*(L1**2 + f1)
H2rest = m2 * ( (L1*th1 + a2*th2) * (L1*th1dot**2 + a2*th2dot**2) \
+ (L1 + a2) * (-L1*th1dot**2*th1 - a2*th2dot**2*th2) )
A = Matrix([[I1p0+H2th1, H2th2], [-I2p1+m2*f1, I2p1]])
b = Matrix([m1*a1*g*th1 + m2*g*(L1*th1+a2*th2) - H2rest,
m2*a2*g*th2])
dx = A.LUsolve(b)
print dx
return dx
def test_pendulum_ode_manipulator_form():
""" Compares computed odes with those of Ross Tedrake 2009 """
a1, L1, m1, I1, a2, L2, m2, I2 = symbols('a1, L1, m1, I1, a2, L2, m2, I2')
g = Symbol('g')
th1, th2, th1dot, th2dot = symbols('th1, th2, th1dot, th2dot')
I11 = I1 + m1*a1**2 # Moment of inertia wrt pivot point
I22 = I2 + m2*a2**2 # Moment of inertia wrt joint
s1 = sin(th1)
c1 = cos(th1)
s2 = sin(th2)
c2 = sin(th2)
s12 = sin(th1+th2)
c12 = cos(th1+th2)
H, C, G = pendulum_ode_manipulator_form(False)
Ht, Ct, Gt = pendulum_ode_manipulator_form_RT()
for (expct, got, txt) in ((Ht, H, 'H'), (Ct, C, 'C'), (Gt, G, 'G')):
for (e, g) in itertools.izip(expct, got):
eq = trigsimp((e-g).expand())
if not eq.equals(numbers.Zero()):
# Could still be the same. Try to solve
sol = solve(eq)
if sol[0].values()[0] != numbers.Zero():
print "Error in " + txt
print "Element "
print g
2/0
def pendulum_ode_manipulator_form_floating_dp():
"""
Equations of motion on manipulator form for floating double pendulum. The manipulator form
is
H * qdd + C(q,qd)*qd + G(q) = S*tau
where the matrix S picks the actuated degrees of freedom.
Definitions of the generalized coordinates:
:q1: Angle between link one and vertical. Positive rotation is about the y-axis, which points into the plane.
:q2: Angle between link one and two.
:q3: Position in x-direction (horizontal) of the base joint (ankle joint).
:q4: Position in z-direction (vertical) of the base joint (ankle joint).
Returns:
Tuple: (H, C, G, S)
There are a number of symbols present in the return values. These are:
:qi: generalized coordinate i
:qdi: generalized velocity i
:g: the local magnitude of the gravitational field
:m1: the mass of link 1
:m2: the mass of link 2
:L1: the length of link 1
:L2: the length of link 2
:I1: the moment of inertia of link 1 wrt to its CoM
:I2: the moment of inertia of link 2 wrt to its CoM
:a1: the position of CoM of link 1 along link 1 from base joint to next joint.
0 <= a1 <= 1. a1=1 means the CoM is at the next joint.
:a2: the position of CoM along link 1
:Date: 2016-04-18
"""
q1, q2, q3, q4, qd1, qd2, qd3, qd4 = symbols('q1, q2, q3, q4, qd1, qd2, qd3, qd4', real=True)
qdd1, qdd2, qdd3, qdd4 = symbols('qdd1, qdd2, qdd3, qdd4', real=True)
g, m1, m2, l1, l2, I1, I2, a1, a2 = symbols('g, m1, m2, l1, l2, I1, I2, a1, a2', real=True, positive=True)
tau1, tau2, Fx, Fy = symbols('tau1, tau2, Fx, Fy', real=True)
q = Matrix([[q1],[q2],[q3], [q4]])
qd = Matrix([[qd1],[qd2],[qd3], [qd4]])
qdd = Matrix([[qdd1],[qdd2],[qdd3], [qdd4]])
tau = Matrix([])
c1 = sy.cos(q1)
s1 = sy.sin(q1)
c12 = sy.cos(q1+q2)
s12 = sy.sin(q1+q2)
p1 = sy.Matrix([[q3 +a1*l1*s1],[q4+a1*l1*c1]])
p2 = sy.Matrix([[q3 +l1*s1 + a2*l2*s12],[q4+l1*c1 + a2*l2*c12]])
V1 = sy.Matrix([[a2*l1*c1, 0, 1, 0],[-a2*l1*s1, 0, 0, 1]])
pd1 = V1*qd
V2 = sy.Matrix([[l1*c1+a2*l2*c12, a2*l2*c12, 1, 0 ],[-l1*s1-a2*l2*s12, -a2*l2*s12, 0, 1]])
pd2 = V2*qd
Omega1 = sy.Matrix([[1, 0, 0 , 0]])
w1 = Omega1*qd
Omega2 = sy.Matrix([[1, 1, 0 , 0]])
w2 = Omega2*qd
H = m1*V1.T*V1 + m2*V2.T*V2 + I1*Omega1.T*Omega1 + I2*Omega2.T*Omega2
T = 0.5*qd.T*H*qd
U = sy.Matrix([m1*g*p1[1] + m2*g*p2[1]])
# The equations of motion on manipulator form
C = sy.zeros(4,4)
G = sy.zeros(4,1)
for i in range(4):
qi = q[i]
Hi = H[:,i]
Gammai = Hi.jacobian(q)*qd
#ddtdLdqidot = Hi.T*qdd + Gammai.T*qd
dHdqi = H.diff(qi)
Di = 0.5*dHdqi*qd
Gi = U.diff(qi)
#dLdqi = Di.T*qd - Gi
#lhs1 = ddtdLdq1dot - dLdq1
# Form the terms for the manipulator form
Ci = Gammai - Di
C[i,:] = Ci.T
G[i] = Gi
S = sy.Matrix([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])
return (H,C,G,S)
def pendulum_ode_manipulator_form_RT():
a1, L1, m1, I1, a2, L2, m2, I2 = symbols('a1, L1, m1, I1, a2, L2, m2, I2')
g = Symbol('g')
th1 = Symbol('th1')
th2 = Symbol('th2')
th1dot = Symbol('th1dot')
th2dot = Symbol('th2dot')
I11 = I1 + m1*a1**2 # Moment of inertia wrt pivot point
I22 = I2 + m2*a2**2 # Moment of inertia wrt joint
s1 = sin(th1)
c1 = cos(th1)
s2 = sin(th2)
c2 = cos(th2)
s12 = sin(th1+th2)
c12 = cos(th1+th2)
Ht = Matrix([[I11 + I22 + m2*L1**2 + 2*m2*L1*a2*c2,
I22 + m2*L1*a2*c2],
[I22 + m2*L1*a2*c2, I22]])
Ct = Matrix([[-2*m2*L1*a2*s2*th2dot, -m2*L1*a2*s2*th2dot],
[m2*L1*a2*s2*th1dot, 0]])
Gt = Matrix([(m1*a1 + m2*L1)*g*s1 + m2*g*a2*s12, m2*g*a2*s12])
return Ht, Ct, Gt
def pendulum_ode_manipulator_form(downIsPI=True):
""" Computes the pendulum ode on manipulator form
H(q) qddot + C(q,qdot) + G(q) = B(q) u
using the Lagrangian formulation.
Change to other definition of angles: th1 is the angle of the first link to the
vertical, and is zero when the link is upright. th2 is the angle at the joint.
It is zero when the joint is straight.
"""
a1, L1, m1, I1, a2, L2, m2, I2 = symbols('a1, L1, m1, I1, a2, L2, m2, I2')
g = Symbol('g')
th1func = Function('th1')
th2func = Function('th2')
t = Symbol('t')
th1 = th1func(t)
th2 = th2func(t)
th1dot = diff(th1,t)
th2dot = diff(th2,t)
th1ddot = diff(th1dot,t)
th2ddot = diff(th2dot,t)
th1_ = Symbol('th1')
th2_ = Symbol('th2')
th1dot_ = Symbol('th1dot')
th2dot_ = Symbol('th2dot')
th1ddot_ = Symbol('th1ddot')
th2ddot_ = Symbol('th2ddot')
I11 = I1 + m1*a1**2 # Moment of inertia wrt pivot point
I22 = I2 + m2*a2**2 # Moment of inertia wrt joint
s1 = sin(th1)
c1 = cos(th1)
s2 = sin(th2)
c2 = cos(th2)
s12 = sin(th1+th2)
c12 = cos(th1+th2)
x1 = Matrix([-a1*s1, a1*c1]) # Center of mass of first segment
p1 = Matrix([-L1*s1, L1*c1]) # Position of the joint
x2 = p1 + Matrix([-a2*s12, a2*c12])
x1dot = Matrix([[-a1*c1*th1dot],[-a1*s1*th1dot]])
x2dot = Matrix([[-L1*c1*th1dot],[-L1*s1*th1dot]]) \
+ Matrix([[-a2*c12*(th1dot + th2dot)],[-a2*s12*(th1dot + th2dot)]])
#x1dot = Matrix([diff(x1[0], t), diff(x1[1], t)])
#x2dot = Matrix([diff(x2[0], t), diff(x2[1], t)])
# Kinetic energy
T = 0.5*m1*x1dot.dot(x1dot) + 0.5*m2*x2dot.dot(x2dot) \
+ 0.5*I1*th1dot**2 + 0.5*I2*(th1dot + th2dot)**2
#T = 0.5*I11*th1dot**2 + 0.5*I2*(th1dot + th2dot)**2 + 0.5*m2*x2dot.dot(x2dot)
TT = T.subs(th1dot, th1dot_).subs(th2dot,th2dot_).subs(th1,th1_).subs(th2, th2_)
# Factorize T as T = 0.5 qdot * H * qdot
Td1 = diff(TT, th1dot_).expand()
Td2 = diff(TT, th2dot_).expand()
dd1 = collect(Td1, (th1dot_, th2dot_), evaluate=False)
dd2 = collect(Td2,(th1dot_, th2dot_), evaluate=False)
qdot = Matrix([th1dot_, th2dot_])
zer = numbers.Zero()
for k in (th1dot_, th2dot_):
if k not in dd1.keys():
dd1[k] = zer
if k not in dd2.keys():
dd2[k] = zer
H = Matrix([[trigsimp(dd1[th1dot_].expand()), trigsimp(dd1[th2dot_].expand())],
[trigsimp(dd2[th1dot_].expand()), trigsimp(dd2[th2dot_].expand())]])
# Check if ok
TTT = 0.5*qdot.dot(H.dot(qdot))
null = trigsimp((TTT-TT).expand()).simplify()
if not null.equals(numbers.Zero()):
print "### Error in factorized kinetic energy!"
2/0
# Potential energy
if downIsPI:
V = m1*g*x1[1] + m2*g*x2[1]
else:
V = -m1*g*x1[1] - m2*g*x2[1]
# Lagrangian
L = T-V
dLdth1dot = diff(L, th1dot)
ddtdLdth1dot = diff(dLdth1dot,t)
dLdth1 = diff(L, th1)
dLdth2dot = diff(L, th2dot)
ddtdLdth2dot = diff(dLdth2dot,t)
dLdth2 = diff(L, th2)
# The euler-lagrange equations
EL1 = trigsimp((ddtdLdth1dot -dLdth1).expand())
EL2 = trigsimp((ddtdLdth2dot -dLdth2).expand())
# Substiute symbols for th1, th1dot, etc
EL1 = EL1.subs(th1ddot, th1ddot_).subs(th2ddot, th2ddot_).subs(th1dot, th1dot_).subs(th2dot, th2dot_).subs(th1, th1_).subs(th2,th2_)
EL2 = EL2.subs(th1ddot, th1ddot_).subs(th2ddot, th2ddot_).subs(th1dot, th1dot_).subs(th2dot, th2dot_).subs(th1, th1_).subs(th2,th2_)
one = numbers.One()
# Factorize as H*qddot + C*qdot + G
#H11 = trigsimp(diff(EL1, th1ddot))
#H12 = trigsimp(diff(EL1, th2ddot))
C11 = trigsimp(diff(EL1, th1dot_).expand())
C12 = trigsimp(diff(EL1, th2dot_).expand())
G1 = trigsimp((EL1 - H[0,0]*th1ddot_ - H[0,1]*th2ddot_ - C11*th1dot_ - C12*th2dot_).expand()).simplify()
#H21 = trigsimp(diff(EL2, th1ddot))
#H22 = trigsimp(diff(EL2, th2ddot))
C21 = trigsimp(diff(EL2, th1dot_))
C22 = trigsimp(diff(EL2, th2dot_))
G2 = trigsimp((EL2 - H[1,0]*th1ddot_ - H[1,1]*th2ddot_ - C21*th1dot_ - C22*th2dot_).expand()).simplify()
#if not H11.equals(H[0,0]):
# print "### Error in calculated inertia matrix"
#if not H12.equals(H[0,1]):
# print "### Error in calculated inertia matrix"
#if not H21.equals(H[1,0]):
# print "### Error in calculated inertia matrix"
#if not H22.equals(H[1,1]):
# print "### Error in calculated inertia matrix"
#H = Matrix([[H11,H12], [H21,H22]])
C = Matrix([[C11, C12], [C21, C22]])
G = Matrix([G1,G2])
#Test that calculations are correct
ELtest = G1 + H[0,0]*th1ddot_ + H[0,1]*th2ddot_ + C[0,0]*th1dot_ + C[0,1]*th2dot_
null = trigsimp((ELtest-EL1).expand()).simplify()
if not null.equals(numbers.Zero()):
print "#### Error in equations of motion"
2/0
return H, C, G
def pendulum_model_diff(L1_v, m1_v, I1_v, L2_v, m2_v, I2_v):
a1, L1, m1, I1, a2, L2, m2, I2 = symbols('a1, L1, m1, I1, a2, L2, m2, I2')
g = Symbol('g')
th1 = Function('th1')
th2 = Function('th2')
x1 = Matrix([[-a1*sin(th1(t))],[a1*cos(th1(t))]]) # Center of mass of first segment
p1 = Matrix([[-L1*sin(th1(t))],[L1*cos(th1(t))]]) # Position of the joint
x2 = p1 + Matrix([[-L2*sin(th1(t))],[L2*cos(th1(t))]])
x1dot = Matrix([[diff(x1[0,0], t)], [diff(x1[1,0],t)]])
x2dot = Matrix([[diff(x2[0,0], t)], [diff(x2[1,0],t)]])
# Kinetic energy
T = 0.5*m1*x1dot.dot(x1dot) + 0.5*m2*x2dot.dot(x2dot) \
+ 0.5*I1*diff(th1(t),t)**2 + 0.5*I2*diff(th2(t),t)**2
# Potential energy
V = m1*g*a1*cos(th1(t)) + m2*g*(L1*cos(th1(t)) + a2*cos(th1(t)))
# Lagrangian
L = T-V
def get_cc_ode_fcn(a1_, L1_, m1_, I1_, a2_, L2_, m2_, I2_, useRT=True):
""" Returns a list of four strings that defines the c-functions of the ode for the
double pendulum.
For faster return of already constructed models, the set of parameters
are checked against a pickled dict.
"""
params = (a1_, L1_, m1_, I1_, a2_, L2_, m2_, I2_, useRT)
u1 = Symbol('u1')
u1_ = Symbol('u1( th1(t-tau), th2(t-tau), th1dot(t-tau), th2dot(t-tau) )')
u2 = Symbol('u2')
u2_ = Symbol('u2( th1(t-tau), th2(t-tau), th1dot(t-tau), th2dot(t-tau) )')
th1 = Symbol('th1')
th2 = Symbol('th2')
th1dot = Symbol('th1dot')
th2dot = Symbol('th2dot')
odes = get_symbolic_ode(params)
odes = odes.subs(u1,u1_).subs(u2,u2_)
ccodes = ['th1dot', 'th2dot']
ccodes.append(ccode(odes[0]))
ccodes.append(ccode(odes[1]))
return ccodes
| alfkjartan/pendulum | double_pendulum_symbolic.py | Python | gpl-2.0 | 30,118 |
###############################################################################
# This file is part of openWNS (open Wireless Network Simulator)
# _____________________________________________________________________________
#
# Copyright (C) 2004-2007
# Chair of Communication Networks (ComNets)
# Kopernikusstr. 5, D-52074 Aachen, Germany
# phone: ++49-241-80-27910,
# fax: ++49-241-80-22242
# email: [email protected]
# www: http://www.openwns.org
# _____________________________________________________________________________
#
# openWNS is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License version 2 as published by the
# Free Software Foundation;
#
# openWNS is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from Logger import Logger
from openwns.node import Node
from openwns.node import Component
from openwns.node import NoRadio
class VirtualDHCP(Component):
nameInComponentFactory = "ip.VDHCP"
subnetIdentifier = None
startAddress = None
endAddress = None
subnetMask = None
unbindDelay = None
logger = None
def __init__(self, node, subnetIdentifier, startAddress, endAddress, subnetMask, unbindDelay = 60):
super(VirtualDHCP, self).__init__(node, subnetIdentifier)
self.subnetIdentifier = subnetIdentifier
self.subnetMask = subnetMask
self.startAddress = startAddress
self.endAddress = endAddress
self.unbindDelay = unbindDelay
self.logger = Logger(self.subnetIdentifier, True, node.logger)
class VirtualDHCPServer(Node, NoRadio):
def __init__(self, name, subnetIdentifier, startAddress, endAddress, subnetMask):
Node.__init__(self, name)
vdhcp = VirtualDHCP(self, subnetIdentifier, startAddress, endAddress, subnetMask)
self.setProperty("Type", "Virtual")
| creasyw/IMTAphy | modules/nl/ip/PyConfig/ip/VirtualDHCP.py | Python | gpl-2.0 | 2,275 |
import copy
from copy import deepcopy as cp
class Concordancer:
def __init__(self, args):
self.args = args
self.negations = set([])
self.intervals = []
self.pattern = []
self.indexes = set([])
deduction = 0
for i, (boolean, target) in enumerate(self.args['targets']):
if not boolean:
at_least, up_to = self.__get_interval(target)
deduction += 1
self.intervals.append((i - deduction, at_least, up_to))
else:
self.indexes.add(len(self.pattern))
self.pattern.append(target)
self.__fill_missing_intervals()
self.matches = []
def __fill_missing_intervals(self):
covered = set([i for i, _, _ in self.intervals])
for j in sorted(list(self.indexes))[:-1]:
if j not in covered:
self.intervals.append((j, 0, 1))
self.intervals.sort()
def __get_interval(self, interval):
at_least = interval.group(1)
up_to = interval.group(2)
if at_least:
at_least = int(at_least)
else:
at_least = 2
if up_to:
up_to = int(up_to)
else:
up_to = 3
return at_least, up_to
def __call__(self, tokens):
focus = 0
target = self.pattern[focus]
_, regex = target
matches = []
for i, token in enumerate(tokens):
if regex.match(token):
onset = i + 1
match = [(i, token)]
_matches = self.concordance(
cp(match), focus + 1, onset, tokens
)
for _match in _matches:
start = _match[0][0]
end = _match[-1][0]
window = self.frame(tokens, start, end)
matches.append(' '.join(window))
return matches
def frame(self, tokens, start, end):
_start = start - self.args['left']
_end = end + self.args['right'] + 1
if _start < 0:
_start = 0
if _end > len(tokens):
_end = len(tokens)
return tokens[_start:_end]
def concordance(self, matches, focus, prev_onset, space):
if focus == len(self.pattern):
return [matches]
_, regex = self.pattern[focus]
_, start, end = self.intervals[focus - 1]
area = space[prev_onset + start:prev_onset + end]
new_matches = []
for i, token in enumerate(area):
if regex.match(token):
new_match = [(i + prev_onset + start, token)]
if self.is_last_position(focus):
new_matches.append(cp(matches) + new_match)
else:
forward_matches = cp(matches) + new_match
onset = prev_onset + i + 1
newer_matches = self.concordance(
forward_matches, focus + 1, onset, space
)
new_matches += newer_matches
return new_matches
def is_last_position(self, focus):
if focus == len(self.pattern) - 1:
return True
else:
return False
| JordiCarreraVentura/concordancer | Concordancer.py | Python | gpl-2.0 | 3,301 |
# -*- coding: utf-8 -*-
# Copyright (c) Pilot Systems and Libération, 2010-2011
# This file is part of SeSQL.
# SeSQL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# SeSQL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with SeSQL. If not, see <http://www.gnu.org/licenses/>.
"""
Handle the type map, that is, the map between Django models and SQL tables
"""
from sesql import config
class TypeMap(object):
"""
Handle the classes <=> table mapping
"""
def __init__(self):
"""
Constructor
"""
self.tables = {}
self.classes = {}
self.class_names = {}
self.valid_classes = []
self.valid_class_names = []
type_map = [ len(t) == 3 and t or t + (True,) for t in config.TYPE_MAP ]
for klass, table, recurse in type_map:
# Ensure table exists in the map
self.tables[table] = []
# Now, for each subclasses...
if recurse:
subclasses = self.all_subclasses(klass)
else:
subclasses = [ klass ]
for sc in subclasses:
if not sc in self.classes:
self.classes[sc] = table
self.class_names[sc.__name__] = sc
# And now fill the reverse lookup, we can only do it now, because the
# same class can be reachable twice
for klass, table in self.classes.items():
if table:
self.valid_classes.append(klass)
self.valid_class_names.append(klass.__name__)
self.tables[table].append(klass)
@staticmethod
def all_subclasses(klass, done = None):
"""
Get all subclasses of a given class
"""
if done is None:
done = set()
if klass in done:
return []
res = [ klass ]
done.add(klass)
for sc in klass.__subclasses__():
res += TypeMap.all_subclasses(sc, done)
return res
def all_tables(self):
"""
List all tables
"""
return self.tables.keys()
def all_classes(self):
"""
List all classes
"""
return self.valid_classes
def all_class_names(self):
"""
List all class names
"""
return self.valid_class_names
def get_class_names_for(self, table):
"""
Get the name of classes for this table
"""
return [ k.__name__ for k in self.get_classes_for(table) ]
def get_classes_for(self, table):
"""
Get the list of classes for this table
"""
return self.tables.get(table, [])
def get_table_for(self, klass):
"""
Get the table for this klass
"""
return self.classes.get(self.get_class_by_name(klass), None)
def get_class_by_name(self, klass):
"""
Get the real Django class from its name
"""
if isinstance(klass, (str, unicode)):
return self.class_names.get(klass, None)
return klass
typemap = TypeMap()
| liberation/sesql | sesql/typemap.py | Python | gpl-2.0 | 3,541 |
# -*- coding: utf-8 -*-
from hls_server.fields import Field, BasicField
user_fields = {
'name': BasicField(title='Pseudonyme', weight=0.8, ftype='char'),
'sexe': BasicField(title='Sexe', weight=0.7, ftype='qcm'),
'firstname': BasicField(title='Prénom', weight=0.2, ftype='char'),
'lastname': BasicField(title='Nom de famille', weight=0.2, ftype='char'),
'birthyear': BasicField(title='Année de naissance', weight=0.3, ftype='number'),
'nationality': BasicField(title='Nationalité', weight=0.2, ftype='qcm'),
'legalstatus': BasicField(title='Statut légal', weight=0.2, ftype='qcm'),
'secu': BasicField(title='Couverture médicale', weight=0.3, ftype='qcm'),
'blood_group': Field(title='Groupe sanguin', weight=0.7),
'user_id': BasicField(title='ID Utilisateur', weight=0),
}
def get_field(name):
return user_fields[name]
def get_fields():
return user_fields.items()
class User:
def __init__(self, **kwargs):
# Automatically add init parameters as instance fields
for k, v in kwargs.items():
self.set_value(k, v)
def get_basic_namespace(self):
# TODO : Improve being less dynamic / more explicit
result = {}
for field_name, field in get_fields():
if field.is_basic_field():
value = getattr(self, field_name, None)
result[field_name] = value
return result
def get_namespace(self):
result = {}
for field_name, field in get_fields():
value = getattr(self, field_name, None)
result[field_name] = value
return result
@classmethod
def get_fields(cls):
for field_name, field in get_fields():
yield field_name, field
def set_value(self, name, value):
field = get_field(name)
if not field:
return
setattr(self, name, value)
from hls_server.database import get_database
database = get_database()
if database:
database.commit()
def get_value(self,name):
field = get_field(name)
if not field:
return None
return getattr(self, name, None)
def __repr__(self):
return "<User({0}, {1})>".format(self.user_id, self.name)
| Nabellaleen/hacklasante_052015 | hls_server/user.py | Python | gpl-2.0 | 2,289 |
#!/usr/bin/env python3
# - * -coding: utf - 8 - * -
import argparse
import logging
import os
import time
from tqdm import tqdm
import sys
def main():
"""
It takes in input a variant statistics file ( in output from get.var_stats.py ) and returns how many SNPs
heterozygous and homozygous are present using a threshold value
:return: Info in stdout
"""
parser = argparse.ArgumentParser(description='SNPs classification using a threshold value. Print stats to stdout')
parser.add_argument('-i', action='store', dest='input_file', help='Variant statistics file', required=True)
parser.add_argument('-e', action='store', dest='threshold', help='Threshold for classification. Default: 80',
type=int)
parser.add_argument('-d', action='store', dest='file_del', help='Input file delimiter. Default: \\t')
parser.add_argument('-o', action='store', dest='outputDir',
help='Output (root) directory. Default: current directory')
parser.add_argument('-v', help='increase output verbosity', dest='verbosity', action='store_true')
args = parser.parse_args()
input_file = args.input_file
if args.verbosity:
log_level = logging.DEBUG
else:
log_level = logging.INFO
prepare_loggers(log_level)
logging.info('Program Started')
if args.outputDir and not os.path.exists(args.outputDir):
logging.error('Output dir not found.')
sys.exit(1)
if args.outputDir:
out_dir = args.outputDir + '/'
else:
out_dir = os.getcwd() + '/'
if not os.path.exists(out_dir):
os.mkdir(out_dir)
if args.file_del is None:
file_delimiter = '\t'
else:
file_delimiter = args.file_delim
threshold = 80
if args.threshold is not None:
threshold = int(args.threshold)
logging.info('#### Generating Statistics ####')
classify_snp(input_file, file_delimiter, threshold, out_dir)
logging.info('Program Completed')
# utility
def prepare_loggers(log_level):
logging.basicConfig(level=log_level,
format='%(levelname)-8s [%(asctime)s] %(message)s',
datefmt='%y%m%d %H:%M:%S')
def get_file_length(file):
"""
Return the number of row ( header excluded )
:param file:
:return:
"""
length = 0
if file is not None:
try:
f = open(file, 'r')
for line in f:
if line.startswith('#'):
continue
else:
length += 1
f.close()
except Exception as e:
logging.error("Error during opening file. {0}".format(e))
return length
def check_percentage(base_count, snp_number, threshold):
return (float(base_count) / snp_number) * 100 >= threshold
def classify_snp(input_file, file_delimiter, threshold, out_dir):
"""
:param input_file: individual.stats - from get.var_stats.py script
:param file_delimiter: default \t
:param threshold: default 80% - depends on error rate for technology
:param out_dir:
:return:
"""
logging.debug('Started method classifySNP')
start_time = time.time()
snp_total_number = get_file_length(input_file)
stats_file = open(input_file, 'r')
count_homo = 0
count_het = 0
out_file = open(out_dir + 'homozygous.pos', 'w')
for line in tqdm(stats_file, total=snp_total_number):
if line.strip().startswith("#") or not line.strip():
continue
row = line.split(file_delimiter)
homozygous_flag = False
snp_position = row[0]
snp_ref = row[1]
snp_alt = row[2]
snp__a = float(row[3])
snp__c = float(row[5])
snp__g = float(row[7])
snp__t = float(row[9])
snp_total_reads = float(row[12])
if snp_total_reads > 0:
logging.debug('Reading SNP: {} \n'.format(snp_position))
if ((snp_ref == 'A' or snp_alt == 'A') and check_percentage(snp__a, snp_total_reads, threshold)) \
or check_percentage(snp__a, snp_total_reads, threshold):
homozygous_flag = True
elif ((snp_ref == 'C' or snp_alt == 'C') and check_percentage(snp__c, snp_total_reads, threshold)) \
or check_percentage(snp__c, snp_total_reads, threshold):
homozygous_flag = True
elif ((snp_ref == 'G' or snp_alt == 'G') and check_percentage(snp__g, snp_total_reads, threshold)) \
or check_percentage(snp__g, snp_total_reads, threshold):
homozygous_flag = True
elif ((snp_ref == 'T' or snp_alt == 'T') and check_percentage(snp__t, snp_total_reads, threshold)) \
or check_percentage(snp__t, snp_total_reads, threshold):
homozygous_flag = True
if homozygous_flag:
count_homo += 1
out_file.write("{}\n".format(snp_position))
logging.debug("SNP {} is homozygous".format(snp_position))
else:
count_het += 1
logging.debug("SNP {} is heterozygous".format(snp_position))
stats_file.close()
logging.info("Number or total SNPs: {} \n".format(snp_total_number))
logging.info(
"Number of Homozygous SNPs: {}, {}% \n".format(count_homo, (float(count_homo) / snp_total_number) * 100))
logging.info(
"Number of Heterozygous SNPs: {0}, {1}% \n".format(count_het, (float(count_het) / snp_total_number) * 100))
logging.debug('Finished method classifySNP in: {} seconds'.format(
time.time() - start_time))
if __name__ == '__main__':
main()
| AlgoLab/HapCol | utils/statistics.py | Python | gpl-2.0 | 5,723 |
import logging
from django import forms
from django.contrib.auth import get_user_model
from ..bus.models import Route
from ..users.models import Email, Grade, Phone, Website
logger = logging.getLogger(__name__)
class BusRouteForm(forms.Form):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.BUS_ROUTE_CHOICES = [(None, "Set bus route...")]
routes = Route.objects.all().order_by("route_name")
for route in routes:
self.BUS_ROUTE_CHOICES += [(route.route_name, route.route_name)]
self.fields["bus_route"] = forms.ChoiceField(choices=self.BUS_ROUTE_CHOICES, widget=forms.Select, required=False)
class PreferredPictureForm(forms.Form):
def __init__(self, user, *args, **kwargs):
super().__init__(*args, **kwargs)
self.PREFERRED_PICTURE_CHOICES = [("AUTO", "Auto-select the most recent photo")]
for i in range(4):
try:
grade = Grade.names[i]
user.photos.get(grade_number=(i + 9)) # Only display option if the photo exists
self.PREFERRED_PICTURE_CHOICES += [(i + 9, grade.title() + " Photo")]
except Exception:
pass
self.fields["preferred_photo"] = forms.ChoiceField(choices=self.PREFERRED_PICTURE_CHOICES, widget=forms.RadioSelect(), required=True)
class PrivacyOptionsForm(forms.Form):
def __init__(self, user, *args, **kwargs):
super().__init__(*args, **kwargs)
def flag(label, default):
return forms.BooleanField(initial=default, label=label, required=False)
self.fields["show_address"] = flag(None, False)
self.fields["show_address-self"] = flag("Show Address", False)
self.fields["show_telephone"] = flag(None, False)
self.fields["show_telephone-self"] = flag("Show Phone", False)
self.fields["show_birthday"] = flag(None, False)
self.fields["show_birthday-self"] = flag("Show Birthday", False)
pictures_label = "Show Pictures"
if user.is_student:
pictures_label += " on Import"
self.fields["show_pictures"] = flag(None, False)
self.fields["show_pictures-self"] = flag(pictures_label, False)
# photos = user.photo_permissions["self"]
# for i in range(4):
# grade = Grade.names[i]
# if photos[grade] is not None:
# self.fields["photoperm-{}".format(grade)] = flag(None, False)
# self.fields["photoperm-{}-self".format(grade)] = flag("Show {} Photo".format(grade.capitalize()), False)
self.fields["show_eighth"] = flag(None, False)
self.fields["show_eighth-self"] = flag("Show Eighth Period Schedule", False)
self.fields["show_schedule"] = flag(None, False)
self.fields["show_schedule-self"] = flag("Show Class Schedule", False)
if not user.has_admin_permission("preferences"):
for name in self.fields:
if not name.endswith("-self"):
self.fields[name].widget.attrs["class"] = "disabled"
class NotificationOptionsForm(forms.Form):
def __init__(self, user, *args, **kwargs):
super().__init__(*args, **kwargs)
def flag(label, default):
return forms.BooleanField(initial=default, label=label, required=False)
self.fields["receive_news_emails"] = flag("Receive News Emails", False)
self.fields["receive_eighth_emails"] = flag("Receive Eighth Period Emails", False)
label = "Primary Email"
if user.emails.all().count() == 0:
label = "You can set a primary email after adding emails below."
self.fields["primary_email"] = forms.ModelChoiceField(
queryset=Email.objects.filter(user=user), required=False, label=label, disabled=(user.emails.all().count() == 0)
)
class DarkModeForm(forms.Form):
def __init__(self, user, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["dark_mode_enabled"] = forms.BooleanField(
initial=user.dark_mode_properties.dark_mode_enabled, label="Enable dark mode?", required=False
)
class PhoneForm(forms.ModelForm):
"""Represents a phone number (number + purpose)"""
_number = forms.CharField(max_length=14)
class Meta:
model = Phone
fields = ["purpose", "_number"]
class EmailForm(forms.ModelForm):
def clean_address(self):
data = self.cleaned_data["address"]
if data.lower().strip().endswith("@fcpsschools.net"):
raise forms.ValidationError("You cannot provide a fcpsschools.net address.", code="invalid")
return data
class Meta:
model = Email
fields = ["address"]
class WebsiteForm(forms.ModelForm):
class Meta:
model = Website
fields = ["url"]
PhoneFormset = forms.inlineformset_factory(get_user_model(), Phone, form=PhoneForm, extra=1)
EmailFormset = forms.inlineformset_factory(get_user_model(), Email, form=EmailForm, extra=1)
WebsiteFormset = forms.inlineformset_factory(get_user_model(), Website, form=WebsiteForm, extra=1)
| tjcsl/ion | intranet/apps/preferences/forms.py | Python | gpl-2.0 | 5,148 |
"""
Django settings for office project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SETTINGS_DIR = os.path.dirname(__file__)
PROJECT_PATH = os.path.join(SETTINGS_DIR, os.pardir)
PROJECT_PATH = os.path.abspath(PROJECT_PATH)
TEMPLATE_PATH = os.path.join(PROJECT_PATH, 'templates')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'i)bb+yfzk@wqx_2f9z8&rw$5or*al#=h@5q1n7^1mnb5y0-v$v'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'actions',
'users',
'status',
'task',
'attendance',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'office.urls'
WSGI_APPLICATION = 'office.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Dhaka'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_PATH = os.path.join(PROJECT_PATH, 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
STATIC_PATH,
)
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static", "media")
STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static", "static")
| ShovanSarker/mango_office | office/settings.py | Python | gpl-2.0 | 2,569 |
import filecmp
import os
import unittest
from mascpcr import pipeline, genbankfeatures
from ._common import RECODED_GENOME_FP, REFERENCE_GENOME_FP, TEST_OUTPUT_DIR, \
TEST_CACHE_DIR, REFERENCE_GB_STR, RECODED_GB_STR, \
REFERENCE_GB, RECODED_GB
class TestPipeline(unittest.TestCase):
def test_findMascPrimers(self):
start_idx, end_idx = genbankfeatures.findAggregateBoundaries(
# `sr_obj` expects a SeqRecord object
sr_obj=RECODED_GB,
# These are the features types that we want to look for
feature_types=['synth_fragment'],
# We will look up the qualifier 'label' in each Feature object and check
# to make sure that the regular expression "seg23.*" matches its contents
# (regex refresher: seg23.* will match "seg23" followed by any characters,
# e.g., seg23_001)
qualifier_regexs={'label':'seg23.*'}
)
genome_str, ref_genome_str, idx_lut, edge_lut, mismatch_lut, \
border_lut = pipeline.generateLUTs(
genome_fp=RECODED_GENOME_FP,
ref_genome_fp=REFERENCE_GENOME_FP,
start_idx=start_idx,
end_idx=end_idx,
border_feature_types=['synth_fragment'],
cache_luts=True,
cache_dir=TEST_CACHE_DIR
)
# We have to prevent the output file from containing the parameters
# as it will dump the absolute filepaths, which makes file comparisons
# more difficult
params = {
'dump_params': False,
'output_fp': TEST_CACHE_DIR,
'output_basename': 'seg23'
}
pipeline.findMascPrimers(
idx_lut=idx_lut,
genome_str=RECODED_GB_STR,
ref_genome_str=REFERENCE_GB_STR,
start_idx=start_idx,
end_idx=end_idx,
edge_lut=edge_lut,
mismatch_lut=mismatch_lut,
border_lut=border_lut,
params=params
)
# Now compare the output files to the expected output files
output_report_fp = os.path.join(TEST_CACHE_DIR,
'seg23_masc_report.csv')
check_output_report_fp = os.path.join(TEST_OUTPUT_DIR,
'seg23_masc_report.csv')
# NOTE: Uncomment the following code to re-create expected output.
# If code breaks, you should figure out whether there is really a bug
# before uncommenting the following and changing the expected output.
# TODO: Come up with more robust test strategy.
# import shutil
# shutil.copyfile(output_report_fp, check_output_report_fp)
self.assertTrue(filecmp.cmp(output_report_fp, check_output_report_fp))
| Wyss/mascpcr | tests/test_pipeline.py | Python | gpl-2.0 | 2,863 |
# -*- coding: utf-8 -*-
from django.db.models import get_model
from django.conf import settings
from transifex.resources.signals import post_save_translation
from transifex.addons.lotte.signals import lotte_save_translation
def save_copyrights(sender, **kwargs):
"""
Save copyright info for po files.
"""
resource = kwargs['resource']
language = kwargs['language']
if resource.i18n_type != 'PO':
return
copyrights = kwargs['copyrights']
CModel = get_model('copyright', 'Copyright')
for c in copyrights:
owner = c[0]
years = c[1]
for year in years:
CModel.objects.assign(
resource=resource, language=language,
owner=owner, year=year
)
def connect():
post_save_translation.connect(save_copyrights)
lotte_save_translation.connect(save_copyrights)
| hfeeki/transifex | transifex/addons/copyright/handlers.py | Python | gpl-2.0 | 881 |
# -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2017 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
"""
:mod:`MDAnalysis.lib` --- Access to lower level routines
================================================================
"""
__all__ = ['log', 'transformations', 'util', 'mdamath', 'distances',
'NeighborSearch', 'formats', 'pkdtree', 'nsgrid']
from . import log
from . import transformations
from . import util
from . import mdamath
from . import distances # distances relies on mdamath
from . import NeighborSearch
from . import formats
from . import pkdtree
from . import nsgrid
from .picklable_file_io import (FileIOPicklable,
BufferIOPicklable,
TextIOPicklable)
| MDAnalysis/mdanalysis | package/MDAnalysis/lib/__init__.py | Python | gpl-2.0 | 1,706 |
import numpy as np
def my_funct(t):
p = np.sin(t) + 2*np.sin(2*t) + 7*np.sin(22*t)+30*np.sin(50*t)
return p
| algebrato/Exercise | FFT/funct.py | Python | gpl-2.0 | 120 |
#
#
# Copyright (C) 2008, 2009, 2010, 2011 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""KVM hypervisor
"""
import errno
import os
import os.path
import re
import tempfile
import time
import logging
import pwd
import struct
import fcntl
import shutil
from ganeti import utils
from ganeti import constants
from ganeti import errors
from ganeti import serializer
from ganeti import objects
from ganeti import uidpool
from ganeti import ssconf
from ganeti.hypervisor import hv_base
from ganeti import netutils
from ganeti.utils import wrapper as utils_wrapper
_KVM_NETWORK_SCRIPT = constants.SYSCONFDIR + "/ganeti/kvm-vif-bridge"
# TUN/TAP driver constants, taken from <linux/if_tun.h>
# They are architecture-independent and already hardcoded in qemu-kvm source,
# so we can safely include them here.
TUNSETIFF = 0x400454ca
TUNGETIFF = 0x800454d2
TUNGETFEATURES = 0x800454cf
IFF_TAP = 0x0002
IFF_NO_PI = 0x1000
IFF_VNET_HDR = 0x4000
def _ProbeTapVnetHdr(fd):
"""Check whether to enable the IFF_VNET_HDR flag.
To do this, _all_ of the following conditions must be met:
1. TUNGETFEATURES ioctl() *must* be implemented
2. TUNGETFEATURES ioctl() result *must* contain the IFF_VNET_HDR flag
3. TUNGETIFF ioctl() *must* be implemented; reading the kernel code in
drivers/net/tun.c there is no way to test this until after the tap device
has been created using TUNSETIFF, and there is no way to change the
IFF_VNET_HDR flag after creating the interface, catch-22! However both
TUNGETIFF and TUNGETFEATURES were introduced in kernel version 2.6.27,
thus we can expect TUNGETIFF to be present if TUNGETFEATURES is.
@type fd: int
@param fd: the file descriptor of /dev/net/tun
"""
req = struct.pack("I", 0)
try:
res = fcntl.ioctl(fd, TUNGETFEATURES, req)
except EnvironmentError:
logging.warning("TUNGETFEATURES ioctl() not implemented")
return False
tunflags = struct.unpack("I", res)[0]
if tunflags & IFF_VNET_HDR:
return True
else:
logging.warning("Host does not support IFF_VNET_HDR, not enabling")
return False
def _OpenTap(vnet_hdr=True):
"""Open a new tap device and return its file descriptor.
This is intended to be used by a qemu-type hypervisor together with the -net
tap,fd=<fd> command line parameter.
@type vnet_hdr: boolean
@param vnet_hdr: Enable the VNET Header
@return: (ifname, tapfd)
@rtype: tuple
"""
try:
tapfd = os.open("/dev/net/tun", os.O_RDWR)
except EnvironmentError:
raise errors.HypervisorError("Failed to open /dev/net/tun")
flags = IFF_TAP | IFF_NO_PI
if vnet_hdr and _ProbeTapVnetHdr(tapfd):
flags |= IFF_VNET_HDR
# The struct ifreq ioctl request (see netdevice(7))
ifr = struct.pack("16sh", "", flags)
try:
res = fcntl.ioctl(tapfd, TUNSETIFF, ifr)
except EnvironmentError:
raise errors.HypervisorError("Failed to allocate a new TAP device")
# Get the interface name from the ioctl
ifname = struct.unpack("16sh", res)[0].strip("\x00")
return (ifname, tapfd)
class KVMHypervisor(hv_base.BaseHypervisor):
"""KVM hypervisor interface"""
CAN_MIGRATE = True
_ROOT_DIR = constants.RUN_GANETI_DIR + "/kvm-hypervisor"
_PIDS_DIR = _ROOT_DIR + "/pid" # contains live instances pids
_UIDS_DIR = _ROOT_DIR + "/uid" # contains instances reserved uids
_CTRL_DIR = _ROOT_DIR + "/ctrl" # contains instances control sockets
_CONF_DIR = _ROOT_DIR + "/conf" # contains instances startup data
_NICS_DIR = _ROOT_DIR + "/nic" # contains instances nic <-> tap associations
# KVM instances with chroot enabled are started in empty chroot directories.
_CHROOT_DIR = _ROOT_DIR + "/chroot" # for empty chroot directories
# After an instance is stopped, its chroot directory is removed.
# If the chroot directory is not empty, it can't be removed.
# A non-empty chroot directory indicates a possible security incident.
# To support forensics, the non-empty chroot directory is quarantined in
# a separate directory, called 'chroot-quarantine'.
_CHROOT_QUARANTINE_DIR = _ROOT_DIR + "/chroot-quarantine"
_DIRS = [_ROOT_DIR, _PIDS_DIR, _UIDS_DIR, _CTRL_DIR, _CONF_DIR, _NICS_DIR,
_CHROOT_DIR, _CHROOT_QUARANTINE_DIR]
PARAMETERS = {
constants.HV_KERNEL_PATH: hv_base.OPT_FILE_CHECK,
constants.HV_INITRD_PATH: hv_base.OPT_FILE_CHECK,
constants.HV_ROOT_PATH: hv_base.NO_CHECK,
constants.HV_KERNEL_ARGS: hv_base.NO_CHECK,
constants.HV_ACPI: hv_base.NO_CHECK,
constants.HV_SERIAL_CONSOLE: hv_base.NO_CHECK,
constants.HV_VNC_BIND_ADDRESS:
(False, lambda x: (netutils.IP4Address.IsValid(x) or
utils.IsNormAbsPath(x)),
"the VNC bind address must be either a valid IP address or an absolute"
" pathname", None, None),
constants.HV_VNC_TLS: hv_base.NO_CHECK,
constants.HV_VNC_X509: hv_base.OPT_DIR_CHECK,
constants.HV_VNC_X509_VERIFY: hv_base.NO_CHECK,
constants.HV_VNC_PASSWORD_FILE: hv_base.OPT_FILE_CHECK,
constants.HV_KVM_FLOPPY_IMAGE_PATH: hv_base.OPT_FILE_CHECK,
constants.HV_CDROM_IMAGE_PATH: hv_base.OPT_FILE_CHECK,
constants.HV_KVM_CDROM2_IMAGE_PATH: hv_base.OPT_FILE_CHECK,
constants.HV_BOOT_ORDER:
hv_base.ParamInSet(True, constants.HT_KVM_VALID_BO_TYPES),
constants.HV_NIC_TYPE:
hv_base.ParamInSet(True, constants.HT_KVM_VALID_NIC_TYPES),
constants.HV_DISK_TYPE:
hv_base.ParamInSet(True, constants.HT_KVM_VALID_DISK_TYPES),
constants.HV_KVM_CDROM_DISK_TYPE:
hv_base.ParamInSet(False, constants.HT_KVM_VALID_DISK_TYPES),
constants.HV_USB_MOUSE:
hv_base.ParamInSet(False, constants.HT_KVM_VALID_MOUSE_TYPES),
constants.HV_MIGRATION_PORT: hv_base.NET_PORT_CHECK,
constants.HV_MIGRATION_BANDWIDTH: hv_base.NO_CHECK,
constants.HV_MIGRATION_DOWNTIME: hv_base.NO_CHECK,
constants.HV_MIGRATION_MODE: hv_base.MIGRATION_MODE_CHECK,
constants.HV_USE_LOCALTIME: hv_base.NO_CHECK,
constants.HV_DISK_CACHE:
hv_base.ParamInSet(True, constants.HT_VALID_CACHE_TYPES),
constants.HV_SECURITY_MODEL:
hv_base.ParamInSet(True, constants.HT_KVM_VALID_SM_TYPES),
constants.HV_SECURITY_DOMAIN: hv_base.NO_CHECK,
constants.HV_KVM_FLAG:
hv_base.ParamInSet(False, constants.HT_KVM_FLAG_VALUES),
constants.HV_VHOST_NET: hv_base.NO_CHECK,
constants.HV_KVM_USE_CHROOT: hv_base.NO_CHECK,
constants.HV_MEM_PATH: hv_base.OPT_DIR_CHECK,
}
_MIGRATION_STATUS_RE = re.compile('Migration\s+status:\s+(\w+)',
re.M | re.I)
_MIGRATION_INFO_MAX_BAD_ANSWERS = 5
_MIGRATION_INFO_RETRY_DELAY = 2
_VERSION_RE = re.compile(r"\b(\d+)\.(\d+)\.(\d+)\b")
ANCILLARY_FILES = [
_KVM_NETWORK_SCRIPT,
]
def __init__(self):
hv_base.BaseHypervisor.__init__(self)
# Let's make sure the directories we need exist, even if the RUN_DIR lives
# in a tmpfs filesystem or has been otherwise wiped out.
dirs = [(dname, constants.RUN_DIRS_MODE) for dname in self._DIRS]
utils.EnsureDirs(dirs)
@classmethod
def _InstancePidFile(cls, instance_name):
"""Returns the instance pidfile.
"""
return utils.PathJoin(cls._PIDS_DIR, instance_name)
@classmethod
def _InstanceUidFile(cls, instance_name):
"""Returns the instance uidfile.
"""
return utils.PathJoin(cls._UIDS_DIR, instance_name)
@classmethod
def _InstancePidInfo(cls, pid):
"""Check pid file for instance information.
Check that a pid file is associated with an instance, and retrieve
information from its command line.
@type pid: string or int
@param pid: process id of the instance to check
@rtype: tuple
@return: (instance_name, memory, vcpus)
@raise errors.HypervisorError: when an instance cannot be found
"""
alive = utils.IsProcessAlive(pid)
if not alive:
raise errors.HypervisorError("Cannot get info for pid %s" % pid)
cmdline_file = utils.PathJoin("/proc", str(pid), "cmdline")
try:
cmdline = utils.ReadFile(cmdline_file)
except EnvironmentError, err:
raise errors.HypervisorError("Can't open cmdline file for pid %s: %s" %
(pid, err))
instance = None
memory = 0
vcpus = 0
arg_list = cmdline.split('\x00')
while arg_list:
arg = arg_list.pop(0)
if arg == "-name":
instance = arg_list.pop(0)
elif arg == "-m":
memory = int(arg_list.pop(0))
elif arg == "-smp":
vcpus = int(arg_list.pop(0))
if instance is None:
raise errors.HypervisorError("Pid %s doesn't contain a ganeti kvm"
" instance" % pid)
return (instance, memory, vcpus)
def _InstancePidAlive(self, instance_name):
"""Returns the instance pidfile, pid, and liveness.
@type instance_name: string
@param instance_name: instance name
@rtype: tuple
@return: (pid file name, pid, liveness)
"""
pidfile = self._InstancePidFile(instance_name)
pid = utils.ReadPidFile(pidfile)
alive = False
try:
cmd_instance = self._InstancePidInfo(pid)[0]
alive = (cmd_instance == instance_name)
except errors.HypervisorError:
pass
return (pidfile, pid, alive)
def _CheckDown(self, instance_name):
"""Raises an error unless the given instance is down.
"""
alive = self._InstancePidAlive(instance_name)[2]
if alive:
raise errors.HypervisorError("Failed to start instance %s: %s" %
(instance_name, "already running"))
@classmethod
def _InstanceMonitor(cls, instance_name):
"""Returns the instance monitor socket name
"""
return utils.PathJoin(cls._CTRL_DIR, "%s.monitor" % instance_name)
@classmethod
def _InstanceSerial(cls, instance_name):
"""Returns the instance serial socket name
"""
return utils.PathJoin(cls._CTRL_DIR, "%s.serial" % instance_name)
@staticmethod
def _SocatUnixConsoleParams():
"""Returns the correct parameters for socat
If we have a new-enough socat we can use raw mode with an escape character.
"""
if constants.SOCAT_USE_ESCAPE:
return "raw,echo=0,escape=%s" % constants.SOCAT_ESCAPE_CODE
else:
return "echo=0,icanon=0"
@classmethod
def _InstanceKVMRuntime(cls, instance_name):
"""Returns the instance KVM runtime filename
"""
return utils.PathJoin(cls._CONF_DIR, "%s.runtime" % instance_name)
@classmethod
def _InstanceChrootDir(cls, instance_name):
"""Returns the name of the KVM chroot dir of the instance
"""
return utils.PathJoin(cls._CHROOT_DIR, instance_name)
@classmethod
def _InstanceNICDir(cls, instance_name):
"""Returns the name of the directory holding the tap device files for a
given instance.
"""
return utils.PathJoin(cls._NICS_DIR, instance_name)
@classmethod
def _InstanceNICFile(cls, instance_name, seq):
"""Returns the name of the file containing the tap device for a given NIC
"""
return utils.PathJoin(cls._InstanceNICDir(instance_name), str(seq))
@classmethod
def _TryReadUidFile(cls, uid_file):
"""Try to read a uid file
"""
if os.path.exists(uid_file):
try:
uid = int(utils.ReadOneLineFile(uid_file))
return uid
except EnvironmentError:
logging.warning("Can't read uid file", exc_info=True)
except (TypeError, ValueError):
logging.warning("Can't parse uid file contents", exc_info=True)
return None
@classmethod
def _RemoveInstanceRuntimeFiles(cls, pidfile, instance_name):
"""Removes an instance's rutime sockets/files/dirs.
"""
utils.RemoveFile(pidfile)
utils.RemoveFile(cls._InstanceMonitor(instance_name))
utils.RemoveFile(cls._InstanceSerial(instance_name))
utils.RemoveFile(cls._InstanceKVMRuntime(instance_name))
uid_file = cls._InstanceUidFile(instance_name)
uid = cls._TryReadUidFile(uid_file)
utils.RemoveFile(uid_file)
if uid is not None:
uidpool.ReleaseUid(uid)
try:
shutil.rmtree(cls._InstanceNICDir(instance_name))
except OSError, err:
if err.errno != errno.ENOENT:
raise
try:
chroot_dir = cls._InstanceChrootDir(instance_name)
utils.RemoveDir(chroot_dir)
except OSError, err:
if err.errno == errno.ENOTEMPTY:
# The chroot directory is expected to be empty, but it isn't.
new_chroot_dir = tempfile.mkdtemp(dir=cls._CHROOT_QUARANTINE_DIR,
prefix="%s-%s-" %
(instance_name,
utils.TimestampForFilename()))
logging.warning("The chroot directory of instance %s can not be"
" removed as it is not empty. Moving it to the"
" quarantine instead. Please investigate the"
" contents (%s) and clean up manually",
instance_name, new_chroot_dir)
utils.RenameFile(chroot_dir, new_chroot_dir)
else:
raise
@staticmethod
def _ConfigureNIC(instance, seq, nic, tap):
"""Run the network configuration script for a specified NIC
@param instance: instance we're acting on
@type instance: instance object
@param seq: nic sequence number
@type seq: int
@param nic: nic we're acting on
@type nic: nic object
@param tap: the host's tap interface this NIC corresponds to
@type tap: str
"""
if instance.tags:
tags = " ".join(instance.tags)
else:
tags = ""
env = {
"PATH": "%s:/sbin:/usr/sbin" % os.environ["PATH"],
"INSTANCE": instance.name,
"MAC": nic.mac,
"MODE": nic.nicparams[constants.NIC_MODE],
"INTERFACE": tap,
"INTERFACE_INDEX": str(seq),
"TAGS": tags,
}
if nic.ip:
env["IP"] = nic.ip
if nic.nicparams[constants.NIC_LINK]:
env["LINK"] = nic.nicparams[constants.NIC_LINK]
if nic.nicparams[constants.NIC_MODE] == constants.NIC_MODE_BRIDGED:
env["BRIDGE"] = nic.nicparams[constants.NIC_LINK]
result = utils.RunCmd([constants.KVM_IFUP, tap], env=env)
if result.failed:
raise errors.HypervisorError("Failed to configure interface %s: %s."
" Network configuration script output: %s" %
(tap, result.fail_reason, result.output))
def ListInstances(self):
"""Get the list of running instances.
We can do this by listing our live instances directory and
checking whether the associated kvm process is still alive.
"""
result = []
for name in os.listdir(self._PIDS_DIR):
if self._InstancePidAlive(name)[2]:
result.append(name)
return result
def GetInstanceInfo(self, instance_name):
"""Get instance properties.
@type instance_name: string
@param instance_name: the instance name
@rtype: tuple of strings
@return: (name, id, memory, vcpus, stat, times)
"""
_, pid, alive = self._InstancePidAlive(instance_name)
if not alive:
return None
_, memory, vcpus = self._InstancePidInfo(pid)
stat = "---b-"
times = "0"
return (instance_name, pid, memory, vcpus, stat, times)
def GetAllInstancesInfo(self):
"""Get properties of all instances.
@return: list of tuples (name, id, memory, vcpus, stat, times)
"""
data = []
for name in os.listdir(self._PIDS_DIR):
try:
info = self.GetInstanceInfo(name)
except errors.HypervisorError:
continue
if info:
data.append(info)
return data
def _GenerateKVMRuntime(self, instance, block_devices):
"""Generate KVM information to start an instance.
"""
kvm_version = self._GetKVMVersion()
if kvm_version:
_, v_major, v_min, _ = kvm_version
else:
raise errors.HypervisorError("Unable to get KVM version")
pidfile = self._InstancePidFile(instance.name)
kvm = constants.KVM_PATH
kvm_cmd = [kvm]
# used just by the vnc server, if enabled
kvm_cmd.extend(['-name', instance.name])
kvm_cmd.extend(['-m', instance.beparams[constants.BE_MEMORY]])
kvm_cmd.extend(['-smp', instance.beparams[constants.BE_VCPUS]])
kvm_cmd.extend(['-pidfile', pidfile])
kvm_cmd.extend(['-daemonize'])
if not instance.hvparams[constants.HV_ACPI]:
kvm_cmd.extend(['-no-acpi'])
hvp = instance.hvparams
boot_disk = hvp[constants.HV_BOOT_ORDER] == constants.HT_BO_DISK
boot_cdrom = hvp[constants.HV_BOOT_ORDER] == constants.HT_BO_CDROM
boot_floppy = hvp[constants.HV_BOOT_ORDER] == constants.HT_BO_FLOPPY
boot_network = hvp[constants.HV_BOOT_ORDER] == constants.HT_BO_NETWORK
if hvp[constants.HV_KVM_FLAG] == constants.HT_KVM_ENABLED:
kvm_cmd.extend(["-enable-kvm"])
elif hvp[constants.HV_KVM_FLAG] == constants.HT_KVM_DISABLED:
kvm_cmd.extend(["-disable-kvm"])
if boot_network:
kvm_cmd.extend(['-boot', 'n'])
disk_type = hvp[constants.HV_DISK_TYPE]
if disk_type == constants.HT_DISK_PARAVIRTUAL:
if_val = ',if=virtio'
else:
if_val = ',if=%s' % disk_type
# Cache mode
disk_cache = hvp[constants.HV_DISK_CACHE]
if instance.disk_template in constants.DTS_EXT_MIRROR:
if disk_cache != "none":
# TODO: make this a hard error, instead of a silent overwrite
logging.warning("KVM: overriding disk_cache setting '%s' with 'none'"
" to prevent shared storage corruption on migration",
disk_cache)
cache_val = ",cache=none"
elif disk_cache != constants.HT_CACHE_DEFAULT:
cache_val = ",cache=%s" % disk_cache
else:
cache_val = ""
for cfdev, dev_path in block_devices:
if cfdev.mode != constants.DISK_RDWR:
raise errors.HypervisorError("Instance has read-only disks which"
" are not supported by KVM")
# TODO: handle FD_LOOP and FD_BLKTAP (?)
boot_val = ""
if boot_disk:
kvm_cmd.extend(['-boot', 'c'])
boot_disk = False
if (v_major, v_min) < (0, 14) and disk_type != constants.HT_DISK_IDE:
boot_val = ",boot=on"
drive_val = 'file=%s,format=raw%s%s%s' % (dev_path, if_val, boot_val,
cache_val)
kvm_cmd.extend(['-drive', drive_val])
#Now we can specify a different device type for CDROM devices.
cdrom_disk_type = hvp[constants.HV_KVM_CDROM_DISK_TYPE]
if not cdrom_disk_type:
cdrom_disk_type = disk_type
iso_image = hvp[constants.HV_CDROM_IMAGE_PATH]
if iso_image:
options = ',format=raw,media=cdrom'
if boot_cdrom:
kvm_cmd.extend(['-boot', 'd'])
if cdrom_disk_type != constants.HT_DISK_IDE:
options = '%s,boot=on,if=%s' % (options, constants.HT_DISK_IDE)
else:
options = '%s,boot=on' % options
else:
if cdrom_disk_type == constants.HT_DISK_PARAVIRTUAL:
if_val = ',if=virtio'
else:
if_val = ',if=%s' % cdrom_disk_type
options = '%s%s' % (options, if_val)
drive_val = 'file=%s%s' % (iso_image, options)
kvm_cmd.extend(['-drive', drive_val])
iso_image2 = hvp[constants.HV_KVM_CDROM2_IMAGE_PATH]
if iso_image2:
options = ',format=raw,media=cdrom'
if cdrom_disk_type == constants.HT_DISK_PARAVIRTUAL:
if_val = ',if=virtio'
else:
if_val = ',if=%s' % cdrom_disk_type
options = '%s%s' % (options, if_val)
drive_val = 'file=%s%s' % (iso_image2, options)
kvm_cmd.extend(['-drive', drive_val])
floppy_image = hvp[constants.HV_KVM_FLOPPY_IMAGE_PATH]
if floppy_image:
options = ',format=raw,media=disk'
if boot_floppy:
kvm_cmd.extend(['-boot', 'a'])
options = '%s,boot=on' % options
if_val = ',if=floppy'
options = '%s%s' % (options, if_val)
drive_val = 'file=%s%s' % (floppy_image, options)
kvm_cmd.extend(['-drive', drive_val])
kernel_path = hvp[constants.HV_KERNEL_PATH]
if kernel_path:
kvm_cmd.extend(['-kernel', kernel_path])
initrd_path = hvp[constants.HV_INITRD_PATH]
if initrd_path:
kvm_cmd.extend(['-initrd', initrd_path])
root_append = ['root=%s' % hvp[constants.HV_ROOT_PATH],
hvp[constants.HV_KERNEL_ARGS]]
if hvp[constants.HV_SERIAL_CONSOLE]:
root_append.append('console=ttyS0,38400')
kvm_cmd.extend(['-append', ' '.join(root_append)])
mem_path = hvp[constants.HV_MEM_PATH]
if mem_path:
kvm_cmd.extend(["-mem-path", mem_path, "-mem-prealloc"])
mouse_type = hvp[constants.HV_USB_MOUSE]
vnc_bind_address = hvp[constants.HV_VNC_BIND_ADDRESS]
if mouse_type:
kvm_cmd.extend(['-usb'])
kvm_cmd.extend(['-usbdevice', mouse_type])
elif vnc_bind_address:
kvm_cmd.extend(['-usbdevice', constants.HT_MOUSE_TABLET])
if vnc_bind_address:
if netutils.IP4Address.IsValid(vnc_bind_address):
if instance.network_port > constants.VNC_BASE_PORT:
display = instance.network_port - constants.VNC_BASE_PORT
if vnc_bind_address == constants.IP4_ADDRESS_ANY:
vnc_arg = ':%d' % (display)
else:
vnc_arg = '%s:%d' % (vnc_bind_address, display)
else:
logging.error("Network port is not a valid VNC display (%d < %d)."
" Not starting VNC", instance.network_port,
constants.VNC_BASE_PORT)
vnc_arg = 'none'
# Only allow tls and other option when not binding to a file, for now.
# kvm/qemu gets confused otherwise about the filename to use.
vnc_append = ''
if hvp[constants.HV_VNC_TLS]:
vnc_append = '%s,tls' % vnc_append
if hvp[constants.HV_VNC_X509_VERIFY]:
vnc_append = '%s,x509verify=%s' % (vnc_append,
hvp[constants.HV_VNC_X509])
elif hvp[constants.HV_VNC_X509]:
vnc_append = '%s,x509=%s' % (vnc_append,
hvp[constants.HV_VNC_X509])
if hvp[constants.HV_VNC_PASSWORD_FILE]:
vnc_append = '%s,password' % vnc_append
vnc_arg = '%s%s' % (vnc_arg, vnc_append)
else:
vnc_arg = 'unix:%s/%s.vnc' % (vnc_bind_address, instance.name)
kvm_cmd.extend(['-vnc', vnc_arg])
else:
kvm_cmd.extend(['-nographic'])
monitor_dev = ("unix:%s,server,nowait" %
self._InstanceMonitor(instance.name))
kvm_cmd.extend(['-monitor', monitor_dev])
if hvp[constants.HV_SERIAL_CONSOLE]:
serial_dev = ('unix:%s,server,nowait' %
self._InstanceSerial(instance.name))
kvm_cmd.extend(['-serial', serial_dev])
else:
kvm_cmd.extend(['-serial', 'none'])
if hvp[constants.HV_USE_LOCALTIME]:
kvm_cmd.extend(['-localtime'])
if hvp[constants.HV_KVM_USE_CHROOT]:
kvm_cmd.extend(['-chroot', self._InstanceChrootDir(instance.name)])
# Save the current instance nics, but defer their expansion as parameters,
# as we'll need to generate executable temp files for them.
kvm_nics = instance.nics
hvparams = hvp
return (kvm_cmd, kvm_nics, hvparams)
def _WriteKVMRuntime(self, instance_name, data):
"""Write an instance's KVM runtime
"""
try:
utils.WriteFile(self._InstanceKVMRuntime(instance_name),
data=data)
except EnvironmentError, err:
raise errors.HypervisorError("Failed to save KVM runtime file: %s" % err)
def _ReadKVMRuntime(self, instance_name):
"""Read an instance's KVM runtime
"""
try:
file_content = utils.ReadFile(self._InstanceKVMRuntime(instance_name))
except EnvironmentError, err:
raise errors.HypervisorError("Failed to load KVM runtime file: %s" % err)
return file_content
def _SaveKVMRuntime(self, instance, kvm_runtime):
"""Save an instance's KVM runtime
"""
kvm_cmd, kvm_nics, hvparams = kvm_runtime
serialized_nics = [nic.ToDict() for nic in kvm_nics]
serialized_form = serializer.Dump((kvm_cmd, serialized_nics, hvparams))
self._WriteKVMRuntime(instance.name, serialized_form)
def _LoadKVMRuntime(self, instance, serialized_runtime=None):
"""Load an instance's KVM runtime
"""
if not serialized_runtime:
serialized_runtime = self._ReadKVMRuntime(instance.name)
loaded_runtime = serializer.Load(serialized_runtime)
kvm_cmd, serialized_nics, hvparams = loaded_runtime
kvm_nics = [objects.NIC.FromDict(snic) for snic in serialized_nics]
return (kvm_cmd, kvm_nics, hvparams)
def _RunKVMCmd(self, name, kvm_cmd, tap_fds=None):
"""Run the KVM cmd and check for errors
@type name: string
@param name: instance name
@type kvm_cmd: list of strings
@param kvm_cmd: runcmd input for kvm
@type tap_fds: list of int
@param tap_fds: fds of tap devices opened by Ganeti
"""
try:
result = utils.RunCmd(kvm_cmd, noclose_fds=tap_fds)
finally:
for fd in tap_fds:
utils_wrapper.CloseFdNoError(fd)
if result.failed:
raise errors.HypervisorError("Failed to start instance %s: %s (%s)" %
(name, result.fail_reason, result.output))
if not self._InstancePidAlive(name)[2]:
raise errors.HypervisorError("Failed to start instance %s" % name)
def _ExecuteKVMRuntime(self, instance, kvm_runtime, incoming=None):
"""Execute a KVM cmd, after completing it with some last minute data
@type incoming: tuple of strings
@param incoming: (target_host_ip, port)
"""
# Small _ExecuteKVMRuntime hv parameters programming howto:
# - conf_hvp contains the parameters as configured on ganeti. they might
# have changed since the instance started; only use them if the change
# won't affect the inside of the instance (which hasn't been rebooted).
# - up_hvp contains the parameters as they were when the instance was
# started, plus any new parameter which has been added between ganeti
# versions: it is paramount that those default to a value which won't
# affect the inside of the instance as well.
conf_hvp = instance.hvparams
name = instance.name
self._CheckDown(name)
temp_files = []
kvm_cmd, kvm_nics, up_hvp = kvm_runtime
up_hvp = objects.FillDict(conf_hvp, up_hvp)
kvm_version = self._GetKVMVersion()
if kvm_version:
_, v_major, v_min, _ = kvm_version
else:
raise errors.HypervisorError("Unable to get KVM version")
# We know it's safe to run as a different user upon migration, so we'll use
# the latest conf, from conf_hvp.
security_model = conf_hvp[constants.HV_SECURITY_MODEL]
if security_model == constants.HT_SM_USER:
kvm_cmd.extend(["-runas", conf_hvp[constants.HV_SECURITY_DOMAIN]])
# We have reasons to believe changing something like the nic driver/type
# upon migration won't exactly fly with the instance kernel, so for nic
# related parameters we'll use up_hvp
tapfds = []
taps = []
if not kvm_nics:
kvm_cmd.extend(["-net", "none"])
else:
vnet_hdr = False
tap_extra = ""
nic_type = up_hvp[constants.HV_NIC_TYPE]
if nic_type == constants.HT_NIC_PARAVIRTUAL:
# From version 0.12.0, kvm uses a new sintax for network configuration.
if (v_major, v_min) >= (0, 12):
nic_model = "virtio-net-pci"
vnet_hdr = True
else:
nic_model = "virtio"
if up_hvp[constants.HV_VHOST_NET]:
# vhost_net is only available from version 0.13.0 or newer
if (v_major, v_min) >= (0, 13):
tap_extra = ",vhost=on"
else:
raise errors.HypervisorError("vhost_net is configured"
" but it is not available")
else:
nic_model = nic_type
for nic_seq, nic in enumerate(kvm_nics):
tapname, tapfd = _OpenTap(vnet_hdr)
tapfds.append(tapfd)
taps.append(tapname)
if (v_major, v_min) >= (0, 12):
nic_val = "%s,mac=%s,netdev=netdev%s" % (nic_model, nic.mac, nic_seq)
tap_val = "type=tap,id=netdev%s,fd=%d%s" % (nic_seq, tapfd, tap_extra)
kvm_cmd.extend(["-netdev", tap_val, "-device", nic_val])
else:
nic_val = "nic,vlan=%s,macaddr=%s,model=%s" % (nic_seq,
nic.mac, nic_model)
tap_val = "tap,vlan=%s,fd=%d" % (nic_seq, tapfd)
kvm_cmd.extend(["-net", tap_val, "-net", nic_val])
if incoming:
target, port = incoming
kvm_cmd.extend(['-incoming', 'tcp:%s:%s' % (target, port)])
# Changing the vnc password doesn't bother the guest that much. At most it
# will surprise people who connect to it. Whether positively or negatively
# it's debatable.
vnc_pwd_file = conf_hvp[constants.HV_VNC_PASSWORD_FILE]
vnc_pwd = None
if vnc_pwd_file:
try:
vnc_pwd = utils.ReadOneLineFile(vnc_pwd_file, strict=True)
except EnvironmentError, err:
raise errors.HypervisorError("Failed to open VNC password file %s: %s"
% (vnc_pwd_file, err))
if conf_hvp[constants.HV_KVM_USE_CHROOT]:
utils.EnsureDirs([(self._InstanceChrootDir(name),
constants.SECURE_DIR_MODE)])
if not incoming:
# Configure the network now for starting instances, during
# FinalizeMigration for incoming instances
for nic_seq, nic in enumerate(kvm_nics):
self._ConfigureNIC(instance, nic_seq, nic, taps[nic_seq])
if security_model == constants.HT_SM_POOL:
ss = ssconf.SimpleStore()
uid_pool = uidpool.ParseUidPool(ss.GetUidPool(), separator="\n")
all_uids = set(uidpool.ExpandUidPool(uid_pool))
uid = uidpool.RequestUnusedUid(all_uids)
try:
username = pwd.getpwuid(uid.GetUid()).pw_name
kvm_cmd.extend(["-runas", username])
self._RunKVMCmd(name, kvm_cmd, tapfds)
except:
uidpool.ReleaseUid(uid)
raise
else:
uid.Unlock()
utils.WriteFile(self._InstanceUidFile(name), data=uid.AsStr())
else:
self._RunKVMCmd(name, kvm_cmd, tapfds)
utils.EnsureDirs([(self._InstanceNICDir(instance.name),
constants.RUN_DIRS_MODE)])
for nic_seq, tap in enumerate(taps):
utils.WriteFile(self._InstanceNICFile(instance.name, nic_seq),
data=tap)
if vnc_pwd:
change_cmd = 'change vnc password %s' % vnc_pwd
self._CallMonitorCommand(instance.name, change_cmd)
for filename in temp_files:
utils.RemoveFile(filename)
def StartInstance(self, instance, block_devices):
"""Start an instance.
"""
self._CheckDown(instance.name)
kvm_runtime = self._GenerateKVMRuntime(instance, block_devices)
self._SaveKVMRuntime(instance, kvm_runtime)
self._ExecuteKVMRuntime(instance, kvm_runtime)
def _CallMonitorCommand(self, instance_name, command):
"""Invoke a command on the instance monitor.
"""
socat = ("echo %s | %s STDIO UNIX-CONNECT:%s" %
(utils.ShellQuote(command),
constants.SOCAT_PATH,
utils.ShellQuote(self._InstanceMonitor(instance_name))))
result = utils.RunCmd(socat)
if result.failed:
msg = ("Failed to send command '%s' to instance %s."
" output: %s, error: %s, fail_reason: %s" %
(command, instance_name,
result.stdout, result.stderr, result.fail_reason))
raise errors.HypervisorError(msg)
return result
@classmethod
def _GetKVMVersion(cls):
"""Return the installed KVM version
@return: (version, v_maj, v_min, v_rev), or None
"""
result = utils.RunCmd([constants.KVM_PATH, "--help"])
if result.failed:
return None
match = cls._VERSION_RE.search(result.output.splitlines()[0])
if not match:
return None
return (match.group(0), int(match.group(1)), int(match.group(2)),
int(match.group(3)))
def StopInstance(self, instance, force=False, retry=False, name=None):
"""Stop an instance.
"""
if name is not None and not force:
raise errors.HypervisorError("Cannot shutdown cleanly by name only")
if name is None:
name = instance.name
acpi = instance.hvparams[constants.HV_ACPI]
else:
acpi = False
_, pid, alive = self._InstancePidAlive(name)
if pid > 0 and alive:
if force or not acpi:
utils.KillProcess(pid)
else:
self._CallMonitorCommand(name, 'system_powerdown')
def CleanupInstance(self, instance_name):
"""Cleanup after a stopped instance
"""
pidfile, pid, alive = self._InstancePidAlive(instance_name)
if pid > 0 and alive:
raise errors.HypervisorError("Cannot cleanup a live instance")
self._RemoveInstanceRuntimeFiles(pidfile, instance_name)
def RebootInstance(self, instance):
"""Reboot an instance.
"""
# For some reason if we do a 'send-key ctrl-alt-delete' to the control
# socket the instance will stop, but now power up again. So we'll resort
# to shutdown and restart.
_, _, alive = self._InstancePidAlive(instance.name)
if not alive:
raise errors.HypervisorError("Failed to reboot instance %s:"
" not running" % instance.name)
# StopInstance will delete the saved KVM runtime so:
# ...first load it...
kvm_runtime = self._LoadKVMRuntime(instance)
# ...now we can safely call StopInstance...
if not self.StopInstance(instance):
self.StopInstance(instance, force=True)
# ...and finally we can save it again, and execute it...
self._SaveKVMRuntime(instance, kvm_runtime)
self._ExecuteKVMRuntime(instance, kvm_runtime)
def MigrationInfo(self, instance):
"""Get instance information to perform a migration.
@type instance: L{objects.Instance}
@param instance: instance to be migrated
@rtype: string
@return: content of the KVM runtime file
"""
return self._ReadKVMRuntime(instance.name)
def AcceptInstance(self, instance, info, target):
"""Prepare to accept an instance.
@type instance: L{objects.Instance}
@param instance: instance to be accepted
@type info: string
@param info: content of the KVM runtime file on the source node
@type target: string
@param target: target host (usually ip), on this node
"""
kvm_runtime = self._LoadKVMRuntime(instance, serialized_runtime=info)
incoming_address = (target, instance.hvparams[constants.HV_MIGRATION_PORT])
self._ExecuteKVMRuntime(instance, kvm_runtime, incoming=incoming_address)
def FinalizeMigration(self, instance, info, success):
"""Finalize an instance migration.
Stop the incoming mode KVM.
@type instance: L{objects.Instance}
@param instance: instance whose migration is being finalized
"""
if success:
kvm_runtime = self._LoadKVMRuntime(instance, serialized_runtime=info)
kvm_nics = kvm_runtime[1]
for nic_seq, nic in enumerate(kvm_nics):
try:
tap = utils.ReadFile(self._InstanceNICFile(instance.name, nic_seq))
except EnvironmentError, err:
logging.warning("Failed to find host interface for %s NIC #%d: %s",
instance.name, nic_seq, str(err))
continue
try:
self._ConfigureNIC(instance, nic_seq, nic, tap)
except errors.HypervisorError, err:
logging.warning(str(err))
self._WriteKVMRuntime(instance.name, info)
else:
self.StopInstance(instance, force=True)
def MigrateInstance(self, instance, target, live):
"""Migrate an instance to a target node.
The migration will not be attempted if the instance is not
currently running.
@type instance: L{objects.Instance}
@param instance: the instance to be migrated
@type target: string
@param target: ip address of the target node
@type live: boolean
@param live: perform a live migration
"""
instance_name = instance.name
port = instance.hvparams[constants.HV_MIGRATION_PORT]
pidfile, pid, alive = self._InstancePidAlive(instance_name)
if not alive:
raise errors.HypervisorError("Instance not running, cannot migrate")
if not live:
self._CallMonitorCommand(instance_name, 'stop')
migrate_command = ('migrate_set_speed %dm' %
instance.hvparams[constants.HV_MIGRATION_BANDWIDTH])
self._CallMonitorCommand(instance_name, migrate_command)
migrate_command = ('migrate_set_downtime %dms' %
instance.hvparams[constants.HV_MIGRATION_DOWNTIME])
self._CallMonitorCommand(instance_name, migrate_command)
migrate_command = 'migrate -d tcp:%s:%s' % (target, port)
self._CallMonitorCommand(instance_name, migrate_command)
info_command = 'info migrate'
done = False
broken_answers = 0
while not done:
result = self._CallMonitorCommand(instance_name, info_command)
match = self._MIGRATION_STATUS_RE.search(result.stdout)
if not match:
broken_answers += 1
if not result.stdout:
logging.info("KVM: empty 'info migrate' result")
else:
logging.warning("KVM: unknown 'info migrate' result: %s",
result.stdout)
time.sleep(self._MIGRATION_INFO_RETRY_DELAY)
else:
status = match.group(1)
if status == 'completed':
done = True
elif status == 'active':
# reset the broken answers count
broken_answers = 0
time.sleep(self._MIGRATION_INFO_RETRY_DELAY)
elif status == 'failed' or status == 'cancelled':
if not live:
self._CallMonitorCommand(instance_name, 'cont')
raise errors.HypervisorError("Migration %s at the kvm level" %
status)
else:
logging.warning("KVM: unknown migration status '%s'", status)
broken_answers += 1
time.sleep(self._MIGRATION_INFO_RETRY_DELAY)
if broken_answers >= self._MIGRATION_INFO_MAX_BAD_ANSWERS:
raise errors.HypervisorError("Too many 'info migrate' broken answers")
utils.KillProcess(pid)
self._RemoveInstanceRuntimeFiles(pidfile, instance_name)
def GetNodeInfo(self):
"""Return information about the node.
This is just a wrapper over the base GetLinuxNodeInfo method.
@return: a dict with the following keys (values in MiB):
- memory_total: the total memory size on the node
- memory_free: the available memory on the node for instances
- memory_dom0: the memory used by the node itself, if available
"""
return self.GetLinuxNodeInfo()
@classmethod
def GetInstanceConsole(cls, instance, hvparams, beparams):
"""Return a command for connecting to the console of an instance.
"""
if hvparams[constants.HV_SERIAL_CONSOLE]:
cmd = [constants.SOCAT_PATH,
"STDIO,%s" % cls._SocatUnixConsoleParams(),
"UNIX-CONNECT:%s" % cls._InstanceSerial(instance.name)]
return objects.InstanceConsole(instance=instance.name,
kind=constants.CONS_SSH,
host=instance.primary_node,
user=constants.GANETI_RUNAS,
command=cmd)
vnc_bind_address = hvparams[constants.HV_VNC_BIND_ADDRESS]
if vnc_bind_address and instance.network_port > constants.VNC_BASE_PORT:
display = instance.network_port - constants.VNC_BASE_PORT
return objects.InstanceConsole(instance=instance.name,
kind=constants.CONS_VNC,
host=vnc_bind_address,
port=instance.network_port,
display=display)
return objects.InstanceConsole(instance=instance.name,
kind=constants.CONS_MESSAGE,
message=("No serial shell for instance %s" %
instance.name))
def Verify(self):
"""Verify the hypervisor.
Check that the binary exists.
"""
if not os.path.exists(constants.KVM_PATH):
return "The kvm binary ('%s') does not exist." % constants.KVM_PATH
if not os.path.exists(constants.SOCAT_PATH):
return "The socat binary ('%s') does not exist." % constants.SOCAT_PATH
@classmethod
def CheckParameterSyntax(cls, hvparams):
"""Check the given parameters for validity.
@type hvparams: dict
@param hvparams: dictionary with parameter names/value
@raise errors.HypervisorError: when a parameter is not valid
"""
super(KVMHypervisor, cls).CheckParameterSyntax(hvparams)
kernel_path = hvparams[constants.HV_KERNEL_PATH]
if kernel_path:
if not hvparams[constants.HV_ROOT_PATH]:
raise errors.HypervisorError("Need a root partition for the instance,"
" if a kernel is defined")
if (hvparams[constants.HV_VNC_X509_VERIFY] and
not hvparams[constants.HV_VNC_X509]):
raise errors.HypervisorError("%s must be defined, if %s is" %
(constants.HV_VNC_X509,
constants.HV_VNC_X509_VERIFY))
boot_order = hvparams[constants.HV_BOOT_ORDER]
if (boot_order == constants.HT_BO_CDROM and
not hvparams[constants.HV_CDROM_IMAGE_PATH]):
raise errors.HypervisorError("Cannot boot from cdrom without an"
" ISO path")
security_model = hvparams[constants.HV_SECURITY_MODEL]
if security_model == constants.HT_SM_USER:
if not hvparams[constants.HV_SECURITY_DOMAIN]:
raise errors.HypervisorError("A security domain (user to run kvm as)"
" must be specified")
elif (security_model == constants.HT_SM_NONE or
security_model == constants.HT_SM_POOL):
if hvparams[constants.HV_SECURITY_DOMAIN]:
raise errors.HypervisorError("Cannot have a security domain when the"
" security model is 'none' or 'pool'")
@classmethod
def ValidateParameters(cls, hvparams):
"""Check the given parameters for validity.
@type hvparams: dict
@param hvparams: dictionary with parameter names/value
@raise errors.HypervisorError: when a parameter is not valid
"""
super(KVMHypervisor, cls).ValidateParameters(hvparams)
security_model = hvparams[constants.HV_SECURITY_MODEL]
if security_model == constants.HT_SM_USER:
username = hvparams[constants.HV_SECURITY_DOMAIN]
try:
pwd.getpwnam(username)
except KeyError:
raise errors.HypervisorError("Unknown security domain user %s"
% username)
@classmethod
def PowercycleNode(cls):
"""KVM powercycle, just a wrapper over Linux powercycle.
"""
cls.LinuxPowercycle()
| ekohl/ganeti | lib/hypervisor/hv_kvm.py | Python | gpl-2.0 | 44,067 |
import os
N_FILTERS = [1, 2, 5, 10, 20, 32]
KERNEL_SIZE = [2, 4, 6, 8]
SUBSAMPLE = [2, 3, 4, 5]
for filter_size in N_FILTERS:
for kernel in KERNEL_SIZE:
for sampling in SUBSAMPLE:
yaml_file = open('yaml/convolutional_net2.yaml', 'r')
yaml = yaml_file.read()
# change number of filters
yaml = yaml.replace('output_channels: 32', 'output_channels: '+str(filter_size))
# change kernel size
yaml = yaml.replace('kernel_shape: [4, 4]', 'kernel_shape: ['+str(kernel)+', '+str(kernel)+']')
# change subsampling size
yaml = yaml.replace('pool_shape: [2, 2]', 'pool_shape: ['+str(sampling)+', '+str(sampling)+']')
yaml = yaml.replace('pool_stride: [2, 2]', 'pool_stride: ['+str(sampling)+', '+str(sampling)+']')
# save as a different model
yaml = yaml.replace('save_path: "model/bestmodels/CNN-2-55x55x10_clinic.pkl"', 'save_path: "model/bestmodels/CNN-2-55x55x10_'+str(filter_size)+str(kernel)+str(sampling)+'.pkl"')
yaml_file = open('yaml/convolutional_net2_'+str(filter_size)+str(kernel)+str(sampling)+'.yaml', 'w+')
yaml_file.write(yaml)
yaml_file.close()
os.system('python train.py yaml/convolutional_net2_'+str(filter_size)+str(kernel)+str(sampling)+'.yaml')
| saahil/MSSegmentation | conv_train.py | Python | gpl-2.0 | 1,381 |
# -*- coding: iso-8859-1 -*-
#------------------------------------------------------------
# tvalacarta - XBMC Plugin
# Canal para Plus TV
# http://blog.tvalacarta.info/plugin-xbmc/tvalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
import sys
import xbmc
import xbmcgui
import xbmcplugin
from core import scrapertools
from core import xbmctools
try:
pluginhandle = int( sys.argv[ 1 ] )
except:
pluginhandle = ""
xbmc.output("[plus.py] init")
DEBUG = True
CHANNELNAME = "Plus TV"
CHANNELCODE = "plus"
def mainlist(params,url,category):
xbmc.output("[plus.py] mainlist")
url = "http://www.plus.es/tv/canales.html"
# --------------------------------------------------------
# Descarga la página
# --------------------------------------------------------
data = scrapertools.cachePage(url)
#xbmc.output(data)
# --------------------------------------------------------
# Extrae los programas
# --------------------------------------------------------
'''
<li class="canales estirar">
<h2><a href="index.html?idlist=PLTVCN">Cine </a></h2>
<a href="index.html?idlist=PLTVCN"><img alt="imagen Cine " src="/images/plustv/categorias/PLTVCN.jpg"/></a>
<ul>
<li><span><a title="Taller Canal+: Jaume Balagueró y Paco Plaza" href="index.html?idlist=PLTVCN&idvid=834262&pos=0">Taller Canal+: Jaume Balagueró y Paco Plaza</a></span></li><li><span><a title="Canal+ en Hollywood: globos de oro 2009" href="index.html?idlist=PLTVCN&idvid=817622&pos=1">Canal+ en Hollywood: globos de oro 2009</a></span></li>
<li class="sinPlay"><a title="ver mas" href="emisiones.html?id=PLTVCN">Más ...</a></li>
</ul>
'''
patron = '<li class="canales estirar[^"]*">[^<]+'
patron += '<h2><a href="([^"]+)">([^<]+)</a></h2>[^<]+'
patron += '<a href="[^"]+"><img alt="[^"]+" src="([^"]+)"/></a>'
matches = re.compile(patron,re.DOTALL).findall(data)
if DEBUG: scrapertools.printMatches(matches)
for match in matches:
scrapedtitle = match[1]
try:
scrapedtitle = unicode( scrapedtitle, "utf-8" ).encode("iso-8859-1")
except:
pass
scrapedurl = urlparse.urljoin( url, match[0]).replace("index.html?idlist","emisiones.html?id")
scrapedthumbnail = urlparse.urljoin(url,match[2])
scrapedplot = ""
if (DEBUG): xbmc.output("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
#addfolder( scrapedtitle , scrapedurl , "videolist" )
xbmctools.addnewfolder( CHANNELCODE , "videolist" , CHANNELNAME , scrapedtitle , scrapedurl , scrapedthumbnail , scrapedplot )
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def videolist(params,url,category):
xbmc.output("[plus.py] videolist")
# --------------------------------------------------------
# Descarga la página
# --------------------------------------------------------
data = scrapertools.cachePage(url)
#xbmc.output(data)
# --------------------------------------------------------
# Extrae los vídeos de la página
# --------------------------------------------------------
'''
<li class="video estirar">
<div class="imagen">
<a title="Estrellas de Canal+: Heath Ledger" href="index.html?idlist=PLTVCN&idvid=537147&pos=3">
<img alt="" src="http://www.plus.es/plustv/images/fotogramas/plustv/PO805296.jpg">
<span>Play</span>
</a>
</div>
<div class="tooltip" title="Programa que repasa la trayectoria de las caras más conocidas del cine.">
<div class="textos">
<p class="titulo"><a href="index.html?idlist=PLTVCN&idvid=537147&pos=3">Estrellas de Canal+: Heath Ledger</a></p>
</div>
<a class="addmiplustv show" href="miplustv.html?id=537147&action=add" rel="nofollow">Añadir a Mi PLUSTV</a>
<span>Añadido a Mi PlusTV</span>
</div>
</li>
'''
patron = '<li class="video estirar">[^<]+'
patron += '<div class="imagen">[^<]+'
patron += '<a title="([^"]+)" href="([^"]+)">[^<]+'
patron += '<img alt="[^"]*" src="([^"]+)">.*?'
patron += '<div class="tooltip" title="([^"]+)"'
matches = re.compile(patron,re.DOTALL).findall(data)
if DEBUG: scrapertools.printMatches(matches)
for match in matches:
# Datos
scrapedtitle = match[0]
scrapedurl = urlparse.urljoin( url , match[1] )
scrapedthumbnail = urlparse.urljoin( url , match[2] )
scrapedplot = match[3]
if (DEBUG): xbmc.output("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
#addvideo( scrapedtitle , scrapedurl , category )
xbmctools.addnewvideo( CHANNELCODE , "play" , CHANNELNAME , "" , scrapedtitle , scrapedurl , scrapedthumbnail, scrapedplot )
# --------------------------------------------------------
# Extrae el enlace a la siguiente página
# --------------------------------------------------------
patron = '<li class="siguiente"><a href="([^"]+)">siguiente \>\;</a></li>'
matches = re.compile(patron,re.DOTALL).findall(data)
if DEBUG: scrapertools.printMatches(matches)
for match in matches:
# Datos
scrapedtitle = "Página siguiente"
scrapedurl = "http://www.plus.es/plustv/emisiones.html"+match
scrapedthumbnail = ""
scrapedplot = ""
if (DEBUG): xbmc.output("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]")
# Añade al listado de XBMC
#addvideo( scrapedtitle , scrapedurl , category )
xbmctools.addnewfolder( CHANNELCODE , "videolist" , CHANNELNAME , scrapedtitle , scrapedurl , scrapedthumbnail , scrapedplot )
# Label (top-right)...
xbmcplugin.setPluginCategory( handle=pluginhandle, category=category )
# Disable sorting...
xbmcplugin.addSortMethod( handle=pluginhandle, sortMethod=xbmcplugin.SORT_METHOD_NONE )
# End of directory...
xbmcplugin.endOfDirectory( handle=pluginhandle, succeeded=True )
def play(params,url,category):
xbmc.output("[plus.py] play")
title = urllib.unquote_plus( params.get("title") )
thumbnail = urllib.unquote_plus( params.get("thumbnail") )
plot = urllib.unquote_plus( params.get("plot") )
xbmc.output("[plus.py] thumbnail="+thumbnail)
# Abre dialogo
dialogWait = xbmcgui.DialogProgress()
dialogWait.create( 'Descargando datos del vídeo...', title )
# --------------------------------------------------------
# Descarga pagina detalle
# --------------------------------------------------------
# Averigua la URL
# URL Detalle: http://www.plus.es/tv/index.html?idList=PLTVDO&idVid=725903&pos=0
# URL XML vídeo: http://www.plus.es/tv/bloques.html?id=0&idList=PLTVDO&idVid=725903
#<?xml version="1.0" encoding="iso-8859-1"?>
#<bloque modo="U">
#<video tipo="P" url="http://canalplus.ondemand.flumotion.com/canalplus/ondemand/plustv/GF755806.flv" title=""></video>
#<video tipo="T" url="http://canalplus.ondemand.flumotion.com/canalplus/ondemand/plustv/NF754356.flv" title="Encuentros en el fin del mundo"></video>
#</bloque>
idCategoria = re.compile("idlist=([^&]+)&",re.DOTALL).findall(url)
xbmc.output('idCategoria='+idCategoria[0])
idVideo = re.compile("idvid=(\d+)",re.DOTALL).findall(url)
xbmc.output('idVideo='+idVideo[0])
urldetalle = "http://www.plus.es/tv/bloques.html?id=0&idList=" + idCategoria[0] + "&idVid=" + idVideo[0]
bodydetalle = scrapertools.cachePage(urldetalle)
xbmc.output(bodydetalle)
enlacevideo = re.compile('<video tipo="T" url="([^"]+)"',re.DOTALL).findall(bodydetalle)
xbmc.output("enlacevideo="+enlacevideo[0])
#enlacevideo =
url = enlacevideo[0]
# Playlist vacia
playlist = xbmc.PlayList( xbmc.PLAYLIST_VIDEO )
playlist.clear()
# Crea la entrada y la añade al playlist
if url.endswith(".flv"):
#rtmp://od.flash.plus.es/ondemand/14314/plus/plustv/PO778395.flv
cabecera = url[:32]
xbmc.output("cabecera="+cabecera)
finplaypath = url.rfind(".")
playpath = url[33:finplaypath]
xbmc.output("playpath="+playpath)
#url = "rtmp://od.flash.plus.es/ondemand"
url = cabecera
listitem = xbmcgui.ListItem( title, iconImage="DefaultVideo.png", thumbnailImage=thumbnail )
listitem.setProperty("SWFPlayer", "http://www.plus.es/plustv/carcasa.swf")
#listitem.setProperty("Playpath","14314/plus/plustv/PO778395")
listitem.setProperty("Playpath",playpath)
listitem.setProperty("Hostname","od.flash.plus.es")
listitem.setProperty("Port","1935")
#listitem.setProperty("tcUrl","rtmp://od.flash.plus.es/ondemand")
listitem.setProperty("tcUrl",cabecera)
listitem.setProperty("app","ondemand")
listitem.setProperty("flashVer","LNX 9,0,124,0")
listitem.setProperty("pageUrl","LNX 9,0,124,0")
else:
#rtmp://od.flash.plus.es/ondemand/mp4:14314/plus/plustv/NF805546.f4v
'''
DEBUG: Parsing...
DEBUG: Parsed protocol: 0
DEBUG: Parsed host : od.flash.plus.es
DEBUG: Parsed app : ondemand
DEBUG: Parsed playpath: mp4:14314/plus/plustv/NF805546.f4v
DEBUG: Setting buffer time to: 36000000ms
Connecting ...
DEBUG: Protocol : RTMP
DEBUG: Hostname : od.flash.plus.es
DEBUG: Port : 1935
DEBUG: Playpath : mp4:14314/plus/plustv/NF805546.f4v
DEBUG: tcUrl : rtmp://od.flash.plus.es:1935/ondemand
DEBUG: app : ondemand
DEBUG: flashVer : LNX 9,0,124,0
DEBUG: live : no
DEBUG: timeout : 300 sec
DEBUG: Connect, ... connected, handshaking
DEBUG: HandShake: Type Answer : 03
DEBUG: HandShake: Server Uptime : 1699356683
DEBUG: HandShake: FMS Version : 3.5.1.1
DEBUG: Connect, handshaked
Connected...
'''
# cabecera = "rtmp://od.flash.plus.es/ondemand"
cabecera = url[:32]
xbmc.output("cabecera="+cabecera)
# playpath = mp4:14314/plus/plustv/NF805546.f4v
finplaypath = url.rfind(".")
playpath = url[33:]
xbmc.output("playpath="+playpath)
#url = "rtmp://od.flash.plus.es/ondemand"
url = cabecera
listitem = xbmcgui.ListItem( title, iconImage="DefaultVideo.png", thumbnailImage=thumbnail )
listitem.setProperty("SWFPlayer", "http://www.plus.es/plustv/carcasa.swf")
listitem.setProperty("Playpath",playpath)
listitem.setProperty("Hostname","od.flash.plus.es")
listitem.setProperty("Port","1935")
#listitem.setProperty("tcUrl","rtmp://od.flash.plus.es/ondemand")
listitem.setProperty("tcUrl",cabecera)
listitem.setProperty("app","ondemand")
listitem.setProperty("flashVer","LNX 9,0,124,0")
listitem.setProperty("pageUrl","LNX 9,0,124,0")
listitem.setInfo( "video", { "Title": title, "Plot" : plot , "Studio" : CHANNELNAME , "Genre" : category } )
playlist.add( url, listitem )
#url= rtmp://od.flash.plus.es/ondemand/14314/plus/plustv/PO778395.flv
#DEBUG: Protocol : RTMP
#DEBUG: Playpath : 14314/plus/plustv/PO778395
#DEBUG: Hostname : od.flash.plus.es
#DEBUG: Port : 1935
#DEBUG: tcUrl : rtmp://od.flash.plus.es:1935/ondemand
#DEBUG: app : ondemand
#DEBUG: flashVer : LNX 9,0,124,0
#DEBUG: live : no
#DEBUG: timeout : 300 sec
# Cierra dialogo
dialogWait.close()
del dialogWait
# Reproduce
xbmcPlayer = xbmc.Player( xbmc.PLAYER_CORE_AUTO )
xbmcPlayer.play(playlist)
| titienmiami/mmc.repository | plugin.video.tvalacarta/tvalacarta/channels/plus.py | Python | gpl-2.0 | 11,175 |
import mmap, struct, cPickle, threading, platform
import numpy as np
LOCAL = threading.local()
LOCAL.storage = None
pypy = platform.python_implementation() == 'PyPy'
if pypy:
import cffi
ffi = cffi.FFI()
ffi.cdef('typedef size_t off_t;')
ffi.cdef('void *mmap(void *addr, size_t length, int prot, int flags, int fd, off_t offset);')
ffi.cdef('int munmap(void *addr, size_t length);')
C = ffi.dlopen(None)
def get_mmap(fileno, size):
addr = C.mmap(ffi.NULL, size, mmap.PROT_READ|mmap.PROT_WRITE, mmap.MAP_SHARED, fileno, 0)
assert addr != -1, 'mmap failed'
def close():
C.munmap(addr, size)
return ffi.buffer(addr, size), close
else:
def get_mmap(fileno, size):
result = mmap.mmap(fileno, size, access=mmap.ACCESS_WRITE)
return result, result.close
class Disk_array(np.ndarray):
def __new__(cls, offset, shape, dtype):
result = np.ndarray.__new__(
Disk_array,
shape=shape,
dtype=dtype,
offset=offset,
buffer=LOCAL.storage._mmap)
result._disk_array_storage = LOCAL.storage
result._disk_array_new = (offset, shape, dtype)
return result
def __reduce__(self):
return (Disk_array, self._disk_array_new)
class _Store(object):
""" Object to pickle """
pass
class _Value(object):
""" Default value type """
pass
class Storage(object):
"""
By means of pickle-fu,
store data on disk,
including arbitrary python objects
and memory mapped numpy arrays.
The idea is to work with a fairly small amount of python objects
which are fully unpickled on load,
and some very large memory mapped arrays,
the memory for which is only loaded on demand.
Access data via .data
Create memory-mapped arrays with .empty, .zeros, .ones,
store them in .data. Call .save()
Garbage collection is presently not implemented.
It would be relatively easy to implement as part of ._load()
(ie a GC pass would require saving and loading).
As the file grows it is mmapped multiple times.
Old mmaps are retained as old arrays point into them.
The file grows by (default) 1.5x each time,
so not too many mappings are required.
This seems to work.
Call .close() to unmap mmaps.
Things will explode
if you attempt to access memory mapped arrays after this.
"""
def __init__(self, filename, clear=False, preallocate=0, expand=1.5):
self._closers = [ ] # Functions to close all mmaps
self._close_size = None # Truncate any unused space on close
self._filename = filename
self._file = open(filename, 'ab+')
self._file.seek(0, 2)
if clear or self._file.tell() < 24:
self._file.truncate(0)
self._file.write('nesoni00' + chr(0)*16)
self._file.seek(0, 2)
self._size = self._file.tell()
self._expand = expand
self._mmap = None
self._require(24 + preallocate)
self._load()
def close(self):
del self._obj
while self._closers:
self._closers.pop()()
if self._close_size is not None:
self._file.truncate(self._close_size)
self._file.close()
@property
def data(self):
return self._obj.value
@data.setter
def data(self, value):
self._obj.value = value
def save(self):
dump = self._in_context(cPickle.dumps, self._obj, 2)
offset = self._obj.alloc
size = len(dump)
self._require(offset+size)
self._mmap[8:24] = struct.pack('<QQ', offset,size)
self._mmap[offset:offset+size] = dump
self._close_size = offset+size
def _require(self, size):
if self._size < size:
size = max(int(self._size*self._expand), size)
self._file.truncate(size)
self._size = size
self._mmap = None
if self._mmap is None:
self._mmap, closer = get_mmap(self._file.fileno(), self._size)
self._closers.append(closer)
def _load(self):
self._require(24)
offset, size = struct.unpack('<QQ', self._mmap[8:24])
if size == 0:
self._obj = _Store()
self._obj.alloc = 24
self._obj.value = _Value()
else:
self._file.seek(offset)
self._obj = self._in_context(cPickle.loads,self._mmap[offset:offset+size])
def _in_context(self, func, *args, **kwargs):
old = LOCAL.storage
try:
LOCAL.storage = self
return func(*args, **kwargs)
finally:
LOCAL.storage = old
def _allocate(self, size):
result = self._obj.alloc
self._obj.alloc += size
self._require(self._obj.alloc)
return result
def empty(self, shape, dtype):
if isinstance(shape, int):
shape = (shape,)
size = np.dtype(dtype).itemsize
for item in shape:
size *= item
offset = self._allocate(size)
return self._in_context(Disk_array, offset, shape, dtype)
def zeros(self, shape, dtype):
result = self.empty(shape,dtype)
result[()] = 0
return result
def ones(self, shape, dtype):
result = self.empty(shape,dtype)
result[()] = 1
return result
def array(self, data):
# Slight inefficiency
data = np.asarray(data)
result = self.empty(data.shape, data.dtype)
result[()] = data
return result
| Victorian-Bioinformatics-Consortium/nesoni | nesoni/storage.py | Python | gpl-2.0 | 5,763 |
from json import loads
class DescribeCommonActionTestCase(object):
def test_description_is_updated(self):
self.client.add_fake_response({})
self.client.parse_args([self.subcommand, 'res', 'describe',
'This is my description'])
assert len(self.client.submitted_fake_data()) == 1
assert self.subcommand in self.client.requested_fake_path()[0]
assert 'res' in self.client.requested_fake_path()[0]
assert loads(self.client.submitted_fake_data()[0]) == {
'description': 'This is my description'}
| jexhson/rbx | tests/base_action_describe.py | Python | gpl-2.0 | 590 |
#qpy:kivy
#qpy:2
#-*-coding:utf8;-*-
import random
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.graphics import Rectangle, Color
from kivy.clock import Clock
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.label import Label
from kivy.uix.popup import Popup
from kivy.uix.button import Button
from kivy.uix.textinput import TextInput
class TetrisShape(object):
def __init__(self, canvas, width, height, chunk_size, hoffset):
self.canvas = canvas
self.width = width
self.height = height
self.chunk_size = chunk_size
self.hoffset = hoffset
self.rects = []
def is_external(self, child):
b = True
if not isinstance(child, Rectangle):
b = False
else:
i = 0
while i < len(self.rects) and b:
if child == self.rects[i]:
b = False
i += 1
return b
def can_move_left(self):
b = True
i = 0
while i < len(self.rects) and b:
if self.rects[i].pos[0] - self.chunk_size < 0:
b = False
i += 1
i = 0
while i < len(self.rects) and b:
for child in self.canvas.children:
if self.is_external(child):
if self.rects[i].pos[0] - self.chunk_size == child.pos[0] and \
self.rects[i].pos[1] == child.pos[1]:
b = False
i += 1
return b
def can_move_right(self):
b = True
i = 0
while i < len(self.rects) and b:
if self.rects[i].pos[0] + self.chunk_size >= self.width:
b = False
i += 1
i = 0
while i < len(self.rects) and b:
for child in self.canvas.children:
if self.is_external(child):
if self.rects[i].pos[0] + self.chunk_size == child.pos[0] and \
self.rects[i].pos[1] == child.pos[1]:
b = False
i += 1
return b
def can_move_down(self):
b = True
i = 0
while i < len(self.rects) and b:
if self.rects[i].pos[1] < (self.height / 10) + self.chunk_size:
b = False
i += 1
i = 0
while i < len(self.rects) and b:
for child in self.canvas.children:
if self.is_external(child):
if self.rects[i].pos[1] - self.chunk_size == child.pos[1] and \
self.rects[i].pos[0] == child.pos[0]:
b = False
i += 1
return b
def move_down(self):
for rect in self.rects:
x = rect.pos[0]
y = rect.pos[1]
y -= self.chunk_size
rect.pos=(x, y)
def move_left(self):
for rect in self.rects:
x = rect.pos[0]
y = rect.pos[1]
x -= self.chunk_size
rect.pos=(x, y)
def move_right(self):
for rect in self.rects:
x = rect.pos[0]
y = rect.pos[1]
x += self.chunk_size
rect.pos=(x, y)
def game_over(self):
b = False
i = 0
while i < len(self.rects) and not b:
if self.rects[i].pos[1] > self.height:
b = True
i += 1
return b
def overlaps(self, x, y):
''' Check is a position overlaps with other '''
b = False
if x >=0 and x < self.width:
# Does not exits horizonatlly
if y >= (self.height / 10) + self.chunk_size:
# Does not exits vertically
for child in self.canvas.children:
if self.is_external(child):
if child.pos[0] == x and child.pos[1] == y:
# Overlaps with another square
b = True
else:
b = True
else:
b = True
return b
def remove_lines(self, count):
b = False
yc = []
i = 0
while i < len(self.rects):
yc.append(self.rects[i].pos[1])
i += 1
yc = list(set(yc))
yc = sorted(yc, reverse=True)
for y in yc:
line_complete = True
x = 0
rl = []
while x < self.chunk_size * 10 and line_complete:
found_in_pos = False
for child in self.canvas.children:
if isinstance(child, Rectangle):
if child.pos[0] == x and child.pos[1] == y:
found_in_pos = True
rl.append(child)
if not found_in_pos:
line_complete = False
x += self.chunk_size
if line_complete:
b = True
for child in rl:
self.canvas.remove(child)
for child in self.canvas.children:
if isinstance(child, Rectangle):
if child.pos[1] > y:
t = (child.pos[0], child.pos[1] - self.chunk_size)
child.pos = t
return b
def do_rotate(self, x0, y0, x1, y1, x2, y2, x3, y3, color):
for rect in self.rects:
self.canvas.remove(rect)
self.rects = []
self.canvas.add(color)
self.rects.append(Rectangle(pos=(x0, y0), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(x1, y1), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(x2, y2), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(x3, y3), size=(self.chunk_size, self.chunk_size)))
for rect in self.rects:
self.canvas.add(rect)
def shift_left(self, x0, x1, x2, x3):
return x0 - self.chunk_size, x1 - self.chunk_size, x2 - self.chunk_size, x3 - self.chunk_size
def shift_right(self, x0, x1, x2, x3):
return x0 + self.chunk_size, x1 + self.chunk_size, x2 + self.chunk_size, x3 + self.chunk_size
def shift_up(self, y0, y1, y2, y3):
return y0 + self.chunk_size, y1 + self.chunk_size, y2 + self.chunk_size, y3 + self.chunk_size
class TetrisShape1(TetrisShape):
def __init__(self, canvas, width, height, chunk_size, hoffset):
super(TetrisShape1, self).__init__(canvas, width, height, chunk_size, hoffset)
self.rot = 0
canvas.add(Color(0, 0, 0.5))
self.rects.append(Rectangle(pos=(self.chunk_size * self.hoffset, self.height + (self.chunk_size * 2)), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(self.chunk_size * self.hoffset, self.height + self.chunk_size), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(self.chunk_size * self.hoffset, self.height), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(self.chunk_size * (self.hoffset + 1) , self.height), size=(self.chunk_size, self.chunk_size)))
for rect in self.rects:
canvas.add(rect)
def rotate(self):
if self.rot == 0:
x1 = self.rects[1].pos[0]
y1 = self.rects[1].pos[1]
x0 = x1 - self.chunk_size
y0 = y1
x2 = x1 + self.chunk_size
y2 = y1
x3 = x1 + self.chunk_size
y3 = y1 + self.chunk_size
do_rot = False
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_left(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_right(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
if do_rot:
self.rot = 1
self.do_rotate(x0, y0, x1, y1, x2, y2, x3, y3, Color(0, 0, 0.5))
elif self.rot == 1:
x1 = self.rects[1].pos[0]
y1 = self.rects[1].pos[1]
x0 = x1
y0 = y1 - self.chunk_size
x2 = x1
y2 = y1 + self.chunk_size
x3 = x1 - self.chunk_size
y3 = y1 + self.chunk_size
do_rot = False
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_left(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_right(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
if do_rot:
self.rot = 2
self.do_rotate(x0, y0, x1, y1, x2, y2, x3, y3, Color(0, 0, 0.5))
elif self.rot == 2:
x1 = self.rects[1].pos[0]
y1 = self.rects[1].pos[1]
x0 = x1 - self.chunk_size
y0 = y1
x2 = x1 - self.chunk_size
y2 = y1 - self.chunk_size
x3 = x1 + self.chunk_size
y3 = y1
do_rot = False
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_left(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_right(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
if do_rot:
self.rot = 3
self.do_rotate(x0, y0, x1, y1, x2, y2, x3, y3, Color(0, 0, 0.5))
elif self.rot == 3:
x1 = self.rects[1].pos[0]
y1 = self.rects[1].pos[1]
x0 = x1
y0 = y1 + self.chunk_size
x2 = x1
y2 = y1 - self.chunk_size
x3 = x1 + self.chunk_size
y3 = y1 - self.chunk_size
do_rot = False
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_left(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_right(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
if do_rot:
self.rot = 0
self.do_rotate(x0, y0, x1, y1, x2, y2, x3, y3, Color(0, 0, 0.5))
class TetrisShape2(TetrisShape):
def __init__(self, canvas, width, height, chunk_size, hoffset):
super(TetrisShape2, self).__init__(canvas, width, height, chunk_size, hoffset)
canvas.add(Color(0, 0.5, 0))
self.rects.append(Rectangle(pos=(self.chunk_size * self.hoffset, self.height), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(self.chunk_size * self.hoffset, self.height + self.chunk_size), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(self.chunk_size * (self.hoffset + 1), self.height), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(self.chunk_size * (self.hoffset + 1) , self.height + self.chunk_size), size=(self.chunk_size, self.chunk_size)))
for rect in self.rects:
canvas.add(rect)
def rotate(self):
pass
class TetrisShape3(TetrisShape):
def __init__(self, canvas, width, height, chunk_size, hoffset):
super(TetrisShape3, self).__init__(canvas, width, height, chunk_size, hoffset)
canvas.add(Color(0.5, 0, 0))
self.rot = 0
self.rects.append(Rectangle(pos=(self.chunk_size * self.hoffset, self.height), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(self.chunk_size * self.hoffset, self.height + self.chunk_size), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(self.chunk_size * self.hoffset, self.height + self.chunk_size * 2), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(self.chunk_size * self.hoffset, self.height + self.chunk_size * 3), size=(self.chunk_size, self.chunk_size)))
for rect in self.rects:
canvas.add(rect)
def rotate(self):
if self.rot == 0:
x1 = self.rects[1].pos[0]
y1 = self.rects[1].pos[1]
x0 = x1 - self.chunk_size
y0 = y1
x2 = x1 + self.chunk_size
y2 = y1
x3 = x1 + self.chunk_size * 2
y3 = y1
do_rot = False
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_left(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_left(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_right(x0, x1, x2, x3)
x0, x1, x2, x3 = self.shift_right(x0, x1, x2, x3)
x0, x1, x2, x3 = self.shift_right(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_right(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
if do_rot:
self.rot = 1
self.do_rotate(x0, y0, x1, y1, x2, y2, x3, y3, Color(0.5, 0, 0))
elif self.rot == 1:
x1 = self.rects[1].pos[0]
y1 = self.rects[1].pos[1]
x0 = x1
y0 = y1 + self.chunk_size
x2 = x1
y2 = y1 - self.chunk_size
x3 = x1
y3 = y1 - self.chunk_size * 2
do_rot = False
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
y0, y1, y2, y3 = self.shift_up(y0, y1, y2, y3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
y0, y1, y2, y3 = self.shift_up(y0, y1, y2, y3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
if do_rot:
self.rot = 0
self.do_rotate(x0, y0, x1, y1, x2, y2, x3, y3, Color(0.5, 0, 0))
class TetrisShape4(TetrisShape):
def __init__(self, canvas, width, height, chunk_size, hoffset):
super(TetrisShape4, self).__init__(canvas, width, height, chunk_size, hoffset)
canvas.add(Color(1.0, 0.5, 0))
self.rot = 0
self.rects.append(Rectangle(pos=(self.chunk_size * self.hoffset, self.height + self.chunk_size), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(self.chunk_size * self.hoffset, self.height), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(self.chunk_size * self.hoffset, self.height - self.chunk_size), size=(self.chunk_size, self.chunk_size)))
self.rects.append(Rectangle(pos=(self.chunk_size * (self.hoffset - 1), self.height - self.chunk_size), size=(self.chunk_size, self.chunk_size)))
for rect in self.rects:
canvas.add(rect)
def rotate(self):
if self.rot == 0:
x1 = self.rects[1].pos[0]
y1 = self.rects[1].pos[1]
x0 = x1 - self.chunk_size
y0 = y1
x2 = x1 + self.chunk_size
y2 = y1
x3 = x1 + self.chunk_size
y3 = y1 - self.chunk_size
do_rot = False
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_left(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_right(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
if do_rot:
self.rot = 1
self.do_rotate(x0, y0, x1, y1, x2, y2, x3, y3, Color(1.0, 0.5, 0))
elif self.rot == 1:
x1 = self.rects[1].pos[0]
y1 = self.rects[1].pos[1]
x0 = x1
y0 = y1 - self.chunk_size
x2 = x1
y2 = y1 + self.chunk_size
x3 = x1 + self.chunk_size
y3 = y1 + self.chunk_size
do_rot = False
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_left(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_right(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
if do_rot:
self.rot = 2
self.do_rotate(x0, y0, x1, y1, x2, y2, x3, y3, Color(1.0, 0.5, 0))
elif self.rot == 2:
x1 = self.rects[1].pos[0]
y1 = self.rects[1].pos[1]
x0 = x1 - self.chunk_size
y0 = y1
x2 = x1 + self.chunk_size
y2 = y1
x3 = x1 - self.chunk_size
y3 = y1 + self.chunk_size
do_rot = False
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_left(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_right(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
if do_rot:
self.rot = 3
self.do_rotate(x0, y0, x1, y1, x2, y2, x3, y3, Color(1.0, 0.5, 0))
elif self.rot == 3:
x1 = self.rects[1].pos[0]
y1 = self.rects[1].pos[1]
x0 = x1
y0 = y1 - self.chunk_size
x2 = x1
y2 = y1 + self.chunk_size
x3 = x1 - self.chunk_size
y3 = y1 - self.chunk_size
do_rot = False
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_left(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
else:
x0, x1, x2, x3 = self.shift_right(x0, x1, x2, x3)
if not self.overlaps(x0, y0) and not self.overlaps(x1, y1) and \
not self.overlaps(x2, y2) and not self.overlaps(x3, y3):
do_rot = True
if do_rot:
self.rot = 0
self.do_rotate(x0, y0, x1, y1, x2, y2, x3, y3, Color(1.0, 0.5, 0))
class TetrisGame(Widget):
def __init__(self, **kwargs):
self.score = 0
self.shape = None
self.move_right = 0
self.move_left = 0
self.update_count = 0
self.game_over = False
self.chunk_size = 0
self.slide = False
self.shape_count = 0
self.rot = 0
self.event = NotImplementedError
super(TetrisGame, self).__init__(**kwargs)
def start_game(self, instance):
self.event = Clock.schedule_interval(self.update, 0.05)
def restart_game(self, instance):
self.start()
def start(self):
self.game_over = False
self.score = 0
box = BoxLayout(orientation='vertical', padding=10, spacing=10)
box.add_widget(Label(text="..."))
self.popup = Popup(title="Tetris", content=box, size_hint=(0.9, 0.9))
box.add_widget(Button(text='inizia', on_press=self.popup.dismiss))
self.popup.bind(on_dismiss=self.start_game)
self.popup.open()
def clean(self):
b = False
for child in self.canvas.children:
if isinstance(child, Rectangle):
self.canvas.remove(child)
b = True
return b
def update(self, dt):
self.chunk_size = self.width / 10
if not self.game_over:
if self.shape is None:
t = random.randint(1, 4)
if t == 1:
self.shape = TetrisShape1(self.canvas, self.width, self.height, self.chunk_size, 4)
elif t == 2:
self.shape = TetrisShape2(self.canvas, self.width, self.height, self.chunk_size, 4)
elif t == 3:
self.shape = TetrisShape3(self.canvas, self.width, self.height, self.chunk_size, 4)
elif t == 4:
self.shape = TetrisShape4(self.canvas, self.width, self.height, self.chunk_size, 4)
self.shape_count += 1
self.slide = False
else:
if self.rot > 0:
self.shape.rotate()
self.rot -= 1
if self.move_right > 0:
if self.shape.can_move_right():
self.shape.move_right()
self.move_right -= 1
if self.move_left > 0:
if self.shape.can_move_left():
self.shape.move_left()
self.move_left -= 1
step = 10
if self.score > 128:
step = 8
elif self.score > 512:
step = 6
elif self.score > 1024:
step = 4
if self.update_count >= step or self.slide:
if self.shape.can_move_down():
self.shape.move_down()
else:
self.score += 4
if self.shape.game_over():
self.game_over = True
# Porcheria: cancello i rettangoli finche ce ne sono
while self.clean():
pass
Clock.unschedule(self.event)
self.event = None
box = BoxLayout(orientation='vertical', padding=10, spacing=10)
box.add_widget(Label(text="Punteggio: " + str(self.score)))
self.popup = Popup(title="Partita terminata", content=box, size_hint=(0.9, 0.9))
box.add_widget(Button(text='Chiudi', on_press=self.popup.dismiss))
self.popup.bind(on_dismiss=self.restart_game)
self.popup.open()
else:
if self.shape.remove_lines(self.shape_count):
self.score += 10
if self.score > 0:
self.ids["score"].text = "punteggio: " + str(self.score)
self.shape = None
self.update_count = 0
self.update_count += 1
else:
self.update_count = 0
def move_to_right(self):
self.move_right += 1
def move_to_left(self):
self.move_left += 1
def slide_down(self):
self.slide = True
def rotate(self):
self.rot += 1
class TetrisApp(App):
def build(self):
game = TetrisGame()
game.start()
return game
if __name__ == '__main__':
TetrisApp().run()
| zopo803/python-tetris | main.py | Python | gpl-2.0 | 28,053 |
#
# Copyright (C) 2003-2005 Norwegian University of Science and Technology
# Copyright (C) 2008-2013 UNINETT AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Handling web requests for the Report subsystem."""
from IPy import IP
from operator import itemgetter
from collections import defaultdict, namedtuple
from time import localtime, strftime
import csv
import os
import re
# this is just here to make sure Django finds NAV's settings file
# pylint: disable=W0611
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.core.paginator import Paginator, InvalidPage
from django.shortcuts import render_to_response, render
from django.template import RequestContext
from django.http import HttpResponse, Http404, HttpResponseRedirect
from django.db import connection
from nav.models.manage import Prefix
from nav.report.IPtree import get_max_leaf, build_tree
from nav.report.generator import Generator, ReportList
from nav.report.matrixIPv4 import MatrixIPv4
from nav.report.matrixIPv6 import MatrixIPv6
from nav.report.metaIP import MetaIP
import nav.path
from nav.web.navlets import add_navlet
IpGroup = namedtuple('IpGroup', 'private ipv4 ipv6')
CONFIG_FILE_PACKAGE = os.path.join(nav.path.sysconfdir, "report/report.conf")
CONFIG_FILE_LOCAL = os.path.join(nav.path.sysconfdir,
"report/report.local.conf")
FRONT_FILE = os.path.join(nav.path.sysconfdir, "report/front.html")
DEFAULT_PAGE_SIZE = 25
PAGE_SIZES = [25, 50, 100, 500, 1000]
def index(request):
"""Report front page"""
context = {
'title': 'Report - Index',
'navpath': [('Home', '/'), ('Report', False)],
'heading': 'Report Index'
}
with open(FRONT_FILE, 'r') as f:
context['index'] = f.read()
return render_to_response("report/index.html", context,
RequestContext(request))
def get_report_for_widget(request, report_name):
"""Fetches a report for display in a widget"""
query = _strip_empty_arguments(request)
export_delimiter = _get_export_delimiter(query)
context = make_report(request, report_name, export_delimiter, query)
return render(request, 'report/frag_report_table.html', context)
def get_report(request, report_name):
"""Loads and displays a specific reports with optional search arguments"""
query = _strip_empty_arguments(request)
export_delimiter = _get_export_delimiter(query)
if query != request.GET:
# some arguments were stripped, let's clean up the URL
return HttpResponseRedirect(
"{0}?{1}".format(request.META['PATH_INFO'], query.urlencode()))
context = make_report(request, report_name, export_delimiter, query)
if 'exportcsv' in request.GET:
return context
# Magic flag for adding sorting links to table
context['add_sort_links'] = True
context['page_sizes'] = PAGE_SIZES
return render_to_response('report/report.html', context,
RequestContext(request))
def _strip_empty_arguments(request):
"""Strips empty arguments and their related operator arguments from the
QueryDict in request.GET and returns a new, possibly modified QueryDict.
"""
query = request.GET.copy()
deletable = [key for key, value in query.iteritems() if not value.strip()]
for key in deletable:
del query[key]
if "op_{0}".format(key) in query:
del query["op_{0}".format(key)]
if "not_{0}".format(key) in query:
del query["not_{0}".format(key)]
return query
def _get_export_delimiter(query):
"""Retrieves the CSV export delimiter from a QueryDict, but only if the
query indicates the CSV export submit button was pressed.
If the delimiter is invalid, the export-related arguments are stripped
from the query instance.
"""
if 'exportcsv' in query and 'export' in query:
delimiter = query.get('export')
match = re.search(r"(,|;|:|\|)", delimiter)
if match:
return match.group(0)
else:
del query['export']
del query['exportcsv']
def matrix_report(request):
"""Subnet matrix view
:type request: django.http.request.HttpRequest
"""
show_unused = request.GET.get('show_unused_addresses', False)
context = {
'navpath': [
('Home', '/'),
('Report', reverse('report-index')),
('Subnet matrix', reverse('report-matrix'))
],
'show_unused': show_unused
}
if 'scope' not in request.GET:
scopes = Prefix.objects.filter(vlan__net_type='scope')
if scopes.count() == 1:
# If there is only one scope in the database display that scope
scope = IP(scopes[0].net_address)
else:
# Else let the user select one
context['scopes'] = group_scopes(scopes)
return render(request, 'report/matrix.html', context)
else:
scope = IP(request.GET.get('scope'))
matrix = create_matrix(scope, show_unused)
hide_content_for_colspan = []
if scope.version() == 4:
hide_content_for_colspan = [1, 2, 4]
context.update({
'matrix': matrix,
'sub': matrix.end_net.prefixlen() - matrix.bits_in_matrix,
'ipv4': scope.version() == 4,
'family': scope.version(),
'scope': scope,
'hide_for': hide_content_for_colspan
})
return render_to_response(
'report/matrix.html',
context,
context_instance=RequestContext(request))
def group_scopes(scopes):
"""Group scopes by version and type
:type scopes: list[Prefix]
"""
def _prefix_as_int(prefix):
return IP(prefix.net_address).int()
groups = defaultdict(list)
for scope in scopes:
prefix = IP(scope.net_address)
if prefix.iptype() == 'PRIVATE':
groups['private'].append(scope)
elif prefix.version() == 4:
groups['ipv4'].append(scope)
elif prefix.version() == 6:
groups['ipv6'].append(scope)
if any([groups['private'], groups['ipv4'], groups['ipv6']]):
return IpGroup(*[sorted(groups[x], key=_prefix_as_int)
for x in ('private', 'ipv4', 'ipv6')])
else:
return []
def create_matrix(scope, show_unused):
"""Creates a matrix for the given scope"""
tree = build_tree(scope)
if scope.version() == 6:
if scope.prefixlen() < 60:
end_net = IP(scope.net().strNormal() + '/64')
matrix = MatrixIPv6(scope, end_net=end_net)
else:
end_net = get_max_leaf(tree)
matrix = MatrixIPv6(scope, end_net=end_net)
elif scope.version() == 4:
if scope.prefixlen() < 24:
end_net = IP(scope.net().strNormal() + '/30')
matrix = MatrixIPv4(scope, show_unused, end_net=end_net,
bits_in_matrix=6)
else:
max_leaf = get_max_leaf(tree)
bits_in_matrix = max_leaf.prefixlen() - scope.prefixlen()
matrix = MatrixIPv4(scope, show_unused, end_net=max_leaf,
bits_in_matrix=bits_in_matrix)
else:
raise UnknownNetworkTypeException(
'version: ' + str(scope.version))
# Invalidating the MetaIP cache to get rid of processed data.
MetaIP.invalidateCache()
matrix.build()
return matrix
def report_list(request):
"""Automated report list view"""
key = itemgetter(1)
reports = ReportList(CONFIG_FILE_PACKAGE).get_report_list()
reports.sort(key=key)
reports_local = ReportList(CONFIG_FILE_LOCAL).get_report_list()
reports_local.sort(key=key)
context = {
'title': 'Report - Report List',
'navpath': [
('Home', '/'),
('Report', '/report/'),
('Report List', '/report/reportlist'),
],
'heading': 'Report list',
'report_list': reports,
'report_list_local': reports_local
}
return render_to_response('report/report_list.html', context,
RequestContext(request))
def make_report(request, report_name, export_delimiter, query_dict,
paginate=True):
"""Makes a report
:param paginate: Introduced to be able to toggle display of the paginate
elements. Used in the widget rendering.
"""
# Initiating variables used when caching
report = contents = neg = operator = adv = result_time = None
if not report_name:
return None
# Pagination related variables
page_number = query_dict.get('page_number', 1)
page_size = get_page_size(request)
query_string = "&".join(["%s=%s" % (x, y)
for x, y in query_dict.iteritems()
if x != 'page_number'])
# Deleting meta variables and empty values from uri to help verifying
# that the requested report is in the cache
meta_to_delete = ['offset', 'limit', 'export', 'exportcsv', 'page_number',
'page_size']
uri_strip = {key: value
for key, value in query_dict.items()
if key not in meta_to_delete and value != ""}
mtime_config = (os.stat(CONFIG_FILE_PACKAGE).st_mtime +
os.stat(CONFIG_FILE_LOCAL).st_mtime)
cache_name = 'report_%s__%s%s' % (request.account.login,
report_name, mtime_config)
def _fetch_data_from_db():
(report, contents, neg, operator, adv, config, dbresult) = (
gen.make_report(report_name, CONFIG_FILE_PACKAGE,
CONFIG_FILE_LOCAL, query_dict, None, None))
if not report:
raise Http404
result_time = strftime("%H:%M:%S", localtime())
cache.set(cache_name,
(uri_strip, report, contents, neg, operator, adv, config,
dbresult, result_time))
return report, contents, neg, operator, adv, result_time
gen = Generator()
# Caching. Checks if cache exists for this user, that the cached report is
# the one requested and that config files are unchanged.
report_cache = cache.get(cache_name)
if report_cache and report_cache[0] == uri_strip:
dbresult_cache = report_cache[7]
config_cache = report_cache[6]
(report, contents, neg, operator, adv) = (
gen.make_report(report_name, None, None, query_dict,
config_cache, dbresult_cache))
result_time = cache.get(cache_name)[8]
else: # Report not in cache, fetch data from DB
(report, contents, neg, operator, adv,
result_time) = _fetch_data_from_db()
if export_delimiter:
return generate_export(report, report_name, export_delimiter)
else:
paginator = Paginator(report.table.rows, page_size)
try:
page = paginator.page(page_number)
except InvalidPage:
page_number = 1
page = paginator.page(page_number)
context = {
'heading': 'Report',
'result_time': result_time,
'report': report,
'paginate': paginate,
'page': page,
'current_page_range': find_page_range(page_number,
paginator.page_range),
'query_string': query_string,
'contents': contents,
'operator': operator,
'neg': neg,
}
if report:
# A maintainable list of variables sent to the template
context['operators'] = {
'eq': '=',
'like': '~',
'gt': '>',
'lt': '<',
'geq': '>=',
'leq': '<=',
'between': '[:]',
'in': '(,,)',
}
context['operatorlist'] = [
'eq', 'like', 'gt', 'lt',
'geq', 'leq', 'between', 'in'
]
context['descriptions'] = {
'eq': 'equals',
'like': 'contains substring (case-insensitive)',
'gt': 'greater than',
'lt': 'less than',
'geq': 'greater than or equals',
'leq': 'less than or equals',
'between': 'between (colon-separated)',
'in': 'is one of (comma separated)',
}
context['delimiters'] = (',', ';', ':', '|')
page_name = report.title or report_name
page_link = '/report/{0}'.format(report_name)
else:
page_name = "Error"
page_link = False
navpath = [('Home', '/'),
('Report', '/report/'),
(page_name, page_link)]
adv_block = bool(adv)
context.update({
'title': 'Report - {0}'.format(page_name),
'navpath': navpath,
'adv_block': adv_block,
})
return context
def get_page_size(request):
"""Gets the page size based on preferences"""
account = request.account
key = account.PREFERENCE_KEY_REPORT_PAGE_SIZE
if 'page_size' in request.GET:
page_size = request.GET.get('page_size')
if account.preferences.get(key) != page_size:
account.preferences[key] = page_size
account.save()
elif key in account.preferences:
page_size = account.preferences[key]
else:
page_size = DEFAULT_PAGE_SIZE
return page_size
def find_page_range(page_number, page_range, visible_pages=5):
"""Finds a suitable page range given current page.
Tries to make an even count of pages before and after page_number
"""
length = len(page_range)
page_number = int(page_number)
if length <= visible_pages:
return page_range
padding = visible_pages / 2
start = page_number - 1 - padding
if start < 0:
start = 0
end = start + visible_pages
if end >= length:
end = length
start = length - visible_pages
return page_range[start:end]
def generate_export(report, report_name, export_delimiter):
"""Generates a CSV export version of a report"""
def _cellformatter(cell):
if isinstance(cell.text, unicode):
return cell.text.encode('utf-8')
else:
return cell.text
response = HttpResponse(content_type="text/x-csv; charset=utf-8")
response["Content-Type"] = "application/force-download"
response["Content-Disposition"] = (
"attachment; filename=report-%s-%s.csv" %
(report_name, strftime("%Y%m%d", localtime()))
)
writer = csv.writer(response, delimiter=str(export_delimiter))
# Make a list of headers
header_row = [_cellformatter(cell) for cell in report.table.header.cells]
writer.writerow(header_row)
# Make a list of lists containing each cell. Considers the 'hidden' option
# from the config.
rows = []
for row in report.table.rows:
rows.append([_cellformatter(cell) for cell in row.cells])
writer.writerows(rows)
return response
def add_report_widget(request):
"""
:type request: HttpRequest
"""
report_id = request.POST.get('report_id')
if not report_id:
return HttpResponse('No report name supplied', status=400)
navlet = 'nav.web.navlets.report.ReportWidget'
preferences = {
'report_id': report_id,
'query_string': request.POST.get('query_string')
}
add_navlet(request.account, navlet, preferences)
return HttpResponse()
class UnknownNetworkTypeException(Exception):
"""Unknown network type"""
pass
| sigmunau/nav | python/nav/web/report/views.py | Python | gpl-2.0 | 16,385 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
'''
file generate_contributions.py
This file is part of LyX, the document processor.
Licence details can be found in the file COPYING.
author Angus Leeming
Full author contact details are available in file CREDITS
This script both stores and manipulates the raw data needed to
create CREDITS, credits.inc and blanket-permission.inc
Usage:
$ python generate_contributions.py \
CREDITS \
credits.inc \
blanket-permission.inc
where the arguments are the pathnames of the generated files.
'''
import codecs, sys, textwrap
def xml_escape(s):
s = s.replace("&", "&")
s = s.replace("<", "<")
s = s.replace(">", ">")
s = s.replace('"', '"')
return s
class contributor:
def __init__(self,
name,
contact,
licence,
permission_title,
archive_id,
permission_date,
credit):
self.name = name
self.contact = contact
self.licence = licence
self.permission_title = permission_title
self.archive_id = archive_id
self.permission_date = permission_date
self.credit = credit
def as_txt_credits(self):
result = [ '@b%s\n' % self.name ]
if len(self.contact) != 0:
if self.contact.find("http") != -1:
result.append('@i%s\n' % self.contact)
else:
result.append('@iE-mail: %s\n' % self.contact)
result.append(' %s\n' % self.credit.replace('\n', '\n '))
return "".join(result)
def as_php_credits(self, wrapper):
return '''
$output=$output.credits_contrib("%s",
"%s",
"%s");
''' % ( xml_escape(self.name),
xml_escape(self.contact),
"\n".join(wrapper.wrap(xml_escape(self.credit))) )
def as_php_blanket(self):
return '''
$output=$output.blanket_contrib("%s",
"%s",
"%s",
"%s",
"%s");
''' % ( xml_escape(self.name),
xml_escape(self.contact),
xml_escape(self.permission_title),
xml_escape(self.archive_id),
xml_escape(self.permission_date) )
def error(message):
if message:
sys.stderr.write(message + '\n')
sys.exit(1)
def usage(prog_name):
return '''
Usage:
$ python generate_contributions.py \\
CREDITS \\
credits.inc \\
blanket-permission.inc
where the arguments are the pathnames of the generated files.
'''
def collate_incomplete(contributors):
missing_credit = []
missing_licence = []
for contributor in contributors:
if len(contributor.credit) == 0:
missing_credit.append(contributor.name)
if len(contributor.licence) == 0:
missing_licence.append(contributor.name)
return '''WARNING!
The following contributors do not have a CREDITS entry:
%s
These ones have no explicit licence statement:
%s
''' % ( ",\n ".join(missing_credit), ",\n ".join(missing_licence))
def as_txt_credits(contributors):
results = []
for contributor in contributors:
if len(contributor.credit) != 0:
results.append(contributor.as_txt_credits())
results.append('''
If your name doesn't appear here although you've done something for LyX, or your entry is wrong or incomplete, just drop some e-mail to [email protected]. Thanks.
''')
return "".join(results)
def header():
return '''<?php
// WARNING! This file is autogenerated.
// Any changes to it will be lost.
// Please modify generate_contributions.py direct.
'''
def footer():
return '''
'''
def as_php_credits(contributors, file):
results = []
results.append(header())
results.append('''
function credits_contrib($name, $email, $msg) {
$email = str_replace(' () ', '@', $email);
$email = str_replace(' ! ', '.', $email);
if (isset($email) && $email != "") {
if (strncasecmp($email,"http",4) == 0)
$output =$output. "<dt><b>[[${email} | ${name}]]</b>";
else
$output=$output. "<dt><b>[[mailto:${email} | ${name}]]</b>";
} else
$output=$output. "<dt><b>${name}</b>";
$msg = preg_replace("/\\n */", "\\n ", ltrim($msg));
$output=$output. "
</dt>
<dd>
${msg}
</dd>";
return $output;
}
function credits_output() {
$output=$output."<p>
If your name doesn't appear here although you've done
something for LyX, or your entry is wrong or incomplete,
just drop an e-mail to the
[[mailto:[email protected] | lyx-devel]]
mailing list. Thanks.
</p>
<dl>";
''')
wrapper = textwrap.TextWrapper(width=60, subsequent_indent=" ")
for contributor in contributors:
if len(contributor.credit) != 0:
results.append(contributor.as_php_credits(wrapper))
results.append('''
$output=$output."</dl>";
return $output;
}
''')
results.append(footer())
return "".join(results)
def as_php_blanket(contributors, file):
results = []
results.append(header())
results.append('''
function blanket_contrib($name, $email, $msg_title, $msg_ref, $date) {
$email = str_replace(' () ', '@', $email);
$email = str_replace(' ! ', '.', $email);
$output=$output. "
<dt>
<b>[[mailto:${email} | ${name}]]</b>
</dt>
<dd>
See the lyx-devel mailing list message
"";
if (isset($msg_ref) && $msg_ref != "") {
$msg_ref = htmlspecialchars("$msg_ref");
$output=$output. "[[http://marc.info/?l=lyx-devel&" . ${msg_ref} . "|" . ${msg_title} . "]]";
} else {
$output=$output. "${msg_title}";
}
$output=$output. ""
of $date.
</dd>";
return $output;
}
function blanket_output() {
$output=$output."<p>
The following people hereby grant permission to license their
contributions to LyX under the
[[http://www.opensource.org/licenses/gpl-license.php |
Gnu General Public License]], version 2 or later.
</p>
<dl>";
''')
for contributor in contributors:
if contributor.licence == "GPL":
results.append(contributor.as_php_blanket())
results.append('''
$output=$output."</dl>";
$output=$output."
<p>
The following people hereby grant permission to license their
contributions to LyX under the
[[http://www.opensource.org/licenses/artistic-license-2.0.php |
Artistic License 2]].
</p>
<dl>";
''')
for contributor in contributors:
if contributor.licence == "Artistic":
results.append(contributor.as_php_blanket())
results.append('''
$output=$output."</dl>";
return $output;
}
''')
results.append(footer())
return "".join(results)
def main(argv, contributors):
if len(argv) != 4:
error(usage(argv[0]))
txt_credits_data = unicode(as_txt_credits(contributors)).encode("utf-8")
txt_credits = open(argv[1], "w")
txt_credits.write(txt_credits_data)
php_credits_data = unicode(as_php_credits(contributors, argv[2])).encode("utf-8")
php_credits = open(argv[2], "w")
php_credits.write(php_credits_data)
php_blanket_data = unicode(as_php_blanket(contributors, argv[3])).encode("utf-8")
php_blanket = open(argv[3], "w")
php_blanket.write(php_blanket_data)
warning_data = unicode(collate_incomplete(contributors) + '\n').encode("utf-8")
sys.stderr.write(warning_data)
# Store the raw data.
contributors = [
contributor(u"Ronen Abravanel",
"ronena () gmail ! com",
"GPL",
"Re: Patch: Diagram inset",
"m=128486837824718",
"19 September 2010",
u"Support for feyn diagrams"),
contributor(u"Maarten Afman",
"info () afman ! net",
"GPL",
"Fwd: Re: The LyX licence",
"m=110958096916679",
"27 February 2005",
u"Dutch translation team member"),
contributor(u"Hatim Alahmadi",
"dr.hatim () hotmail ! com",
"GPL",
"license issue",
"m=121727417724431",
"28 July 2008",
u"Arabic translation"),
contributor(u"Asger Alstrup",
"aalstrup () laerdal ! dk",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899716913300",
"21 February 2005",
u"General hacking of user interface stuff and those other bits and pieces"),
contributor(u"Jesper Stemann Andersen",
"jesper () sait ! dk",
"GPL",
"Contributions GPLed",
"m=130336947315984",
"21 April 2011",
u"Danish translation"),
contributor(u"Pascal André",
"andre () via ! ecp ! fr",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111263406200012",
"1 April 2005",
u"External style definition files, linuxdoc sgml support and more ftp-site ftp.lyx.org"),
contributor(u"Liviu Andronic",
"landronimirc () gmail ! com",
"GPL",
"contributions GPLed",
"m=121869084720708",
"14 August 2008",
u"Romanian localization and support for the frletter document class"),
contributor(u"Georger Araujo",
"georger_br () yahoo ! com ! br",
"GPL",
"pt_BR.po translation for LyX 2.1.3",
"m=143058265303480",
"2 May 2015",
u"Brazilian Portuguese translation"),
contributor(u"João Luis Meloni Assirati",
"assirati () nonada ! if ! usp ! br",
"GPL",
"Re: The LyX licence",
"m=110918749022256",
"23 February 2005",
u"Added support for unix sockets and thence the 'inverse DVI' feature"),
contributor(u"Patrick Atamaniuk",
"atamaniuk () frobs ! net",
"GPL",
"License for my contributions",
"m=129594232112957",
"28 January 2011",
u"fix-cm module"),
contributor(u"Gioele Barabucci",
"gioele () svario ! it",
"GPL",
"Contribution license",
"m=136933235620262",
"23 May 2013",
u"ACM-SIGS layouts"),
contributor(u"Özgür Uğraş Baran",
"ugras.baran () gmail ! com",
"GPL",
"Re: [patch] new InsetCommandParams",
"m=116124030512963",
"19 October 2006",
u"New commandparams structure, Nomenclature inset"),
contributor(u"Susana Barbosa",
"susana.barbosa () fc ! up ! pt",
"GPL",
"License",
"m=118707828425316",
"14 August 2007",
u"Portuguese translation"),
contributor(u"Yves Bastide",
"yves.bastide () irisa ! fr",
"GPL",
"Re: The LyX licence",
"m=110959913631678",
"28 February 2005",
u"Bug fixes"),
contributor(u"Heinrich Bauer",
"heinrich.bauer () t-mobile ! de",
"GPL",
"Fwd: Re: The LyX licence",
"m=110910430117798",
"22 February 2005",
u"Fixes for dvi output original version of page selection for printing"),
contributor(u"Georg Baum",
"georg.baum () post ! rwth-aachen ! de",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899912526043",
"21 February 2005",
u"tex2lyx improvements, bug fixes, unicode work"),
contributor(u"Hans Bausewein",
"hans () comerwell ! xs4all ! nl",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111262999400394",
"2 April 2005",
'"case insensitive" and "complete word" search'),
contributor(u"Kornel Benko",
"Kornel.Benko () berlin ! de",
"GPL",
"The LyX licence",
"m=123100818303101",
"3 January 2009",
u"small bugfixes, CMake build system, Slovak translation"),
contributor(u"Jacob Bishop",
"bishop.jacob () gmail ! com",
"GPL",
"Contributions...APA 6 Layout",
"m=135654106502977",
"26 December 2012",
u"APA 6 Layout"),
contributor(u"Punyashloka Biswal",
"punya.biswal () gmail ! com",
"GPL",
"Re: Patch for ticket #6848",
"m=128298296923913",
"28 August 2010",
u"Bug fixes"),
contributor(u"Graham Biswell",
"graham () gbiswell ! com",
"GPL",
"Re: The LyX licence",
"m=111269177728853",
"5 April 2005",
u"Small bugfixes that were very hard to find"),
contributor(u"Lars Gullik Bjønnes",
"larsbj () gullik ! net",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110907078027047",
"22 February 2005",
u"Improvements to user interface (menus and keyhandling) including a configurable toolbar and a few other (not so) minor things, like rewriting most of the LyX kernel. Also previous source maintainer."),
contributor(u"Alfredo Braunstein",
"abraunst () lyx ! org",
"GPL",
"Re: The LyX licence",
"m=110927069513172",
"24 February 2005",
u"A (pseudo) threaded graphics loader queue, lots of fixes, etc."),
contributor(u"Christian Buescher",
"christian.buescher () uni-bielefeld ! de",
"",
"",
"",
"",
u"User-definable keys, lyxserver and more"),
contributor(u"Johnathan Burchill",
"jkerrb () users ! sourceforge ! net",
"GPL",
"Re: The LyX licence",
"m=110908472818670",
"22 February 2005",
u"Ported John Levon's original 'change tracking' code to later versions of LyX. Numerous bug fixes thereof."),
contributor(u"Francesc Burrull i Mestres",
"fburrull () mat ! upc ! es",
"",
"",
"",
"",
u"Catalan translation"),
contributor(u"Sergiu Carpov",
"ssmiler () gmail ! com",
"GPL",
"Re: Bug #5522",
"m=124721248310586",
"10 July 2009",
u"Bug fixes"),
contributor(u"Humberto Nicolás Castejón",
"beconico () gmail ! com",
"GPL",
"Re: The LyX licence",
"m=111833854105023",
"9 June 2005",
u"Spanish translation of the Windows installer"),
contributor(u"Matěj Cepl",
"matej () ceplovi ! cz",
"GPL",
"Re: The LyX licence",
"m=110913090232039",
"22 February 2005",
u"Improvements to the czech keymaps"),
contributor(u"Albert Chin",
"lyx-devel () mlists ! thewrittenword ! com",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111220294831831",
"30 March 2005",
u"Bug fixes"),
contributor(u"Jean-Pierre Chrétien",
"jeanpierre.chretien () free ! fr",
"GPL",
"Re: The LyX licence",
"m=111842518713710",
"10 June 2005",
u"French translations"),
contributor(u"Claudio Coco",
"lacocio () libero ! it",
"GPL",
"Agreement to GNU General Public licence",
"m=113749629514591",
"17 January 2006",
u"Italian translation"),
contributor(u"Yuri Chornoivan",
"yurchor () ukr ! net",
"GPL",
"Permission grant",
"m=121681339315810",
"23 July 2008",
u"Ukrainian translation"),
contributor(u"Tommaso Cucinotta",
"cucinotta () sssup !it",
"GPL",
"Re: View Menu proposal",
"m=119030065212621",
"20 Sep 2007",
u"Advanced search feature"),
contributor(u"Matthias Kalle Dalheimer",
"kalle () kdab ! net",
"GPL",
"Re: The LyX licence",
"m=110908857130107",
"22 February 2005",
u"Qt2 port"),
contributor(u"Ulysse Danglis",
"o2d () freemail ! gr",
"GPL",
"License of el.po",
"m=126738357204586",
"28 February 2010",
u"Greek translations"),
contributor(u"Ewan Davies",
"ewan.davies () googlemail ! com",
"GPL",
"Re: Starting Development",
"m=124248720628359",
"17 May 2009",
u"doxygen to LFUNs.lyx conversion"),
contributor(u"Jack Dessert",
"jackdesert556 () gmail ! com",
"GPL",
"License",
"m=126994985831115",
"30 March 2010",
u"Patches for configure.py"),
contributor(u"Min Ding",
"u5032331 () uds ! anu ! edu ! au",
"GPL",
"Accept GUN GPL",
"m=139864105011133",
"27 April 2014",
u"Chinese (simplified) translations"),
contributor(u"Anders Ekberg",
"anek () chalmers ! se",
"GPL",
"License agreement",
"m=113725822602516",
"14 January 2006",
u"Improvements to the Swedish translation of the Windows Installer"),
contributor(u"Martin Engbers",
"martin.engbers () gmx ! de",
"GPL",
"Re: [patch] Icon replacement",
"m=123877725311464",
"Apr 3 2009",
u"icon loading tweaks"),
contributor(u"Matthias Ettrich",
"ettrich () trolltech ! com",
"GPL",
"Fwd: Re: The LyX licence",
"m=110959638810040",
"28 February 2005",
u"Started the project, implemented the early versions, various improvements including undo/redo, tables, and much, much more"),
contributor(u"Baruch Even",
"baruch () ev-en ! org",
"GPL",
"Re: The LyX licence",
"m=110936007609786",
"25 February 2005",
u"New graphics handling scheme and more"),
contributor(u"Dov Feldstern",
"dfeldstern () fastimap ! com",
"GPL",
"Re: Farsi support re-submission plus a little more",
"m=118064913824836",
"31 May 2007",
u"RTL/BiDi-related fixes"),
contributor(u"Michał Fita",
"michal ! fita () gmail ! com",
"GPL",
"Statement for Polish translation",
"m=121615623122376",
"15 July 2008",
u"Polish translation"),
contributor(u"Ronald Florence",
"ron () 18james ! com",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111262821108510",
"31 March 2005",
u"Maintainer of the OS X port(s)"),
contributor(u"José Ramom Flores d'as Seixas",
"fa2ramon () usc ! es",
"GPL",
"Re: Galician translation",
"m=116136920230072",
"20 October 2006",
u"Galician documentation and localization"),
contributor(u"John Michael Floyd",
"jmf () pwd ! nsw ! gov ! au",
"",
"",
"",
"",
u"Bug fix to the spellchecker"),
contributor(u"Nicola Focci",
"nicola.focci () gmail ! com",
"GPL",
"Permission",
"m=120946605432341",
"29 April 2008",
u"Italian translation of documentations"),
contributor(u"Enrico Forestieri",
"forenr () tlc ! unipr ! it",
"GPL",
"Re: lyxpreview2ppm.py",
"m=111894292115287",
"16 June 2005",
u"Italian translations, many bug fixes and features"),
contributor(u"Eitan Frachtenberg",
"sky8an () gmail ! com",
"GPL",
"Re: [PATCH] BibTeX annotation support",
"m=111130799028250",
"20 March 2005",
u"BibTeX annotation support"),
contributor(u"Darren Freeman",
"dfreeman () ieee ! org",
"GPL",
"Licence",
"m=118612951707590",
"3 August 2007",
u"Improvements to mouse wheel scrolling; many bug reports"),
contributor(u"Max Funk",
"maxkhfunk () gmx ! net",
"GPL",
"GPL",
"m=130659936521230",
"28 May 2011",
u"Bug fixes"),
contributor(u"Edscott Wilson Garcia",
"edscott () xfce ! org",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111219295119021",
"30 March 2005",
u"Bug fixes"),
contributor(u"Ignacio García",
"ignacio.gmorales () gmail ! com",
"GPL",
"Re: es_EmbeddedObjects",
"m=117079592919653",
"06 February 2007",
u"Spanish translation of documentations"),
contributor(u"Michael Gerz",
"michael.gerz () teststep ! org",
"GPL",
"Re: The LyX licence",
"m=110909251110103",
"22 February 2005",
u"Change tracking, German localization, bug fixes"),
contributor(u"Stefano Ghirlanda",
"stefano.ghirlanda () unibo ! it",
"GPL",
"Re: The LyX licence",
"m=110959835300777",
"28 February 2005",
u"Improvements to lyxserver"),
contributor(u"Hartmut Goebel",
"h.goebel () crazy-compilers ! com",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111225910223564",
"30 March 2005",
u"Improvements to Koma-Script classes"),
contributor(u"Riccardo Gori",
"goriccardo () gmail ! com",
"GPL",
"Re: r35561 - lyx-devel/trunk/src/insets",
"m=128626762015975",
"5 Oct 2010",
u"Fixing tabular code"),
contributor(u"Peter Gumm",
"gumm () mathematik ! uni-marburg ! de",
"GPL",
"Re: xy-pic manual",
"m=122469079629276",
"22 October 2008",
u"XY-pic manual"),
contributor(u"İbrahim Güngör",
"h.ibrahim.gungor () gmail ! com",
"GPL",
"Update Turkish Translation",
"m=122583550732670",
"4 Nov 2008",
u"Turkish translation"),
contributor(u"Hartmut Haase",
"hha4491 () web ! de",
"GPL",
"Re: The LyX licence",
"m=110915427710167",
"23 February 2005",
u"German translation of the documentation"),
contributor(u"Helge Hafting",
"helgehaf () aitel ! hist ! no",
"GPL",
"Re: The LyX licence",
"m=110916171925288",
"23 February 2005",
u"Norwegian documentation and localization"),
contributor(u"Richard Heck",
"rgheck () comcast ! net",
"GPL",
"GPL Statement",
"m=117501689204059",
"27 March 2007",
u"Bug fixes, layout modules, BibTeX code, XHTML export. Current stable branch maintainer."),
contributor(u"Bennett Helm",
"bennett.helm () fandm ! edu",
"GPL",
"Re: The LyX licence",
"m=110907988312372",
"22 February 2005",
u"Maintainer of the OSX ports, taking over from Ronald Florence"),
contributor(u"Kevin B. Hendricks",
"kevin.hendricks () sympatico ! ca",
"GPL",
"Fwd: Re: Integration of libmythes and hunspell",
"m=124190107613441",
"9 May 2009",
u"Author of the MyThes thesaurus library"),
contributor(u"Claus Hentschel",
"claus.hentschel () mbau ! fh-hannover ! de",
"",
"",
"",
"",
u"Win32 port of LyX 1.1.x"),
contributor(u"Josh Hieronymous",
"josh.p.hieronymus () gmail ! com",
"GPL",
"licensing my contributions to LyX",
"m=137426932127289",
"19 July 2013",
u"XHTML and ePub Improvements (GSOC Student)"),
contributor(u"Claus Hindsgaul",
"claus_h () image ! dk",
"GPL",
"Re: The LyX licence",
"m=110908607416324",
"22 February 2005",
u"Danish translation"),
contributor(u"Martin Hoffmann",
"hoffimar () gmail ! com",
"GPL",
"Re: #8703: 'new shortcut' box closes if no shortcut",
"m=138105799411067",
"6 October 2013",
u"Dialog usability fix"),
contributor(u"Bernard Hurley",
"bernard () fong-hurley ! org ! uk",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111218682804142",
"30 March 2005",
u"Fixes to literate programming support"),
contributor(u"Marius Ionescu",
"felijohn () gmail ! com",
"GPL",
"permission to licence",
"m=115935958330941",
"27 September 2006",
u"Romanian localization"),
contributor(u"Bernhard Iselborn",
"bernhard.iselborn () sap ! com",
"GPL",
"RE: The LyX licence",
"m=111268306522212",
"5 April 2005",
u"Some minor bug-fixes, FAQ, linuxdoc sgml support"),
contributor(u"Masanori Iwami",
"masa.iwm () gmail ! com",
"GPL",
"Re: [patch] Addition of input method support",
"m=117541512517453",
"1 April 2007",
u"Development of CJK language support"),
contributor(u"Michal Jaegermann",
"michal () ellpspace ! math ! ualberta ! ca",
"GPL",
"Re: The LyX licence",
"m=110909853626643",
"22 February 2005",
u"Fix to a very hard-to-find egcs bug that crashed LyX on alpha architecture"),
contributor(u"Harshula Jayasuriya",
"harshula () gmail ! com",
"GPL",
"Re: Bug in export to DocBook",
"m=116884249725701",
"15 January 2007",
u"Fix docbook generation of nested lists"),
contributor(u"David L. Johnson",
"david.johnson () lehigh ! edu",
"GPL",
"GPL",
"m=110908492016593",
"22 February 2005",
u"Public relations, feedback, documentation and support"),
contributor(u"Robert van der Kamp",
"robnet () wxs ! nl",
"GPL",
"Re: The LyX licence",
"m=111268623330209",
"5 April 2005",
u"Various small things and code simplifying"),
contributor(u"Amir Karger",
"amirkarger () gmail ! com",
"GPL",
"Re: The LyX licence",
"m=110912688520245",
"23 February 2005",
u"Tutorial, reLyX: the LaTeX to LyX translator"),
contributor(u"Zahari Dmitrov Kassabov",
"zaharid () gmail ! com",
"GPL",
"GPL Statement",
"m=135540059615508",
"13 December 2012",
u"Bug fixes"),
contributor(u"Carmen Kauffmann",
"",
"",
"",
"",
"",
u"Original name that is now two characters shorter"),
contributor(u"KDE Artists",
"http://artist.kde.org/",
"",
"",
"",
"",
u"Authors of several of the icons LyX uses"),
contributor(u"Andreas Klostermann",
"andreas_klostermann () web ! de",
"GPL",
"blanket-permission",
"m=111054675600338",
"11 March 2005",
u"Gtk reference insertion dialog"),
contributor(u"Timo Kluck",
"tkluck () gmail ! com",
"GPL",
"license statement",
"m=132334049317495",
"8 December 2011",
u"Dutch translation, icon fixes"),
contributor(u"Kostantino",
"ciclope10 () alice ! it",
"GPL",
"Permission granted",
"m=115513400621782",
"9 August 2006",
u"Italian localization of the interface"),
contributor(u"Scott Kostyshak",
"[email protected]",
"GPL",
"GPL Statement",
"m=133076234031944",
"3 March 2012",
u"Small UI fixes"),
contributor(u"Michael Koziarski",
"koziarski () gmail ! com",
"GPL",
"Re: The LyX licence",
"m=110909592017966",
"22 February 2005",
u"Gnome port"),
contributor(u"Peter Kremer",
"kremer () bme-tel ! ttt ! bme ! hu",
"",
"",
"",
"",
u"Hungarian translation and bind file for menu shortcuts"),
contributor(u'Marcus Kriele',
"mkriele () me ! com",
"GPL",
"License permission",
"m=130384781027177",
"26 April 2011",
u"Fixing various sv* layouts"),
contributor(u'Valeriy Kruchko',
"lerkru () gmail ! com",
"GPL",
"Re: translation in to russian about 68%",
"m=125904983806681",
"24 November 2009",
u"Russian translation of the user interface"),
contributor(u"Peter Kümmel",
"syntheticpp () gmx ! net",
"GPL",
"License",
"m=114968828021007",
"7 June 2006",
u"Qt4 coding, CMake build system, bug fixing, testing, clean ups, and profiling"),
contributor(u"Bernd Kümmerlen",
"bkuemmer () gmx ! net",
"GPL",
"Re: The LyX licence",
"m=110934318821667",
"25 February 2005",
u"Initial version of the koma-script textclasses"),
contributor(u"Felix Kurth",
"felix () fkurth ! de",
"GPL",
"Re: The LyX licence",
"m=110908918916109",
"22 February 2005",
u"Support for textclass g-brief2"),
contributor(u"Rob Lahaye",
"lahaye () snu ! ac ! kr",
"GPL",
"Re: The LyX licence",
"m=110908714131711",
"22 February 2005",
u"Xforms dialogs and GUI related code"),
contributor(u"Jean-Marc Lasgouttes",
"lasgouttes () lyx ! org",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899928510452",
"21 February 2005",
u"configure and Makefile-stuff, many bugfixes and more. Previous stable branch maintainer."),
contributor(u"Victor Lavrenko",
"lyx () lavrenko ! pp ! ru",
"",
"",
"",
"",
u"Russian translation"),
contributor(u"Angus Leeming",
"leeming () lyx ! org",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899671520339",
"21 February 2005",
u"GUI-I-fication of insets and more"),
contributor(u"Edwin Leuven",
"e.leuven () gmail ! com",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899657530749",
"21 February 2005",
u"Tabular and misc UI stuff"),
contributor(u"John Levon",
"levon () movementarian ! org",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899535600562",
"21 February 2005",
u"Qt2 frontend, GUII work, bugfixes"),
contributor(u"Ling Li",
"ling () caltech ! edu",
"GPL",
"Re: LyX 1.4cvs crash on Fedora Core 3",
"m=111204368700246",
"28 March 2005",
u"Added native support for \makebox to mathed. Several bug fixes, both to the source code and to the llncs layout file"),
contributor(u"LibreOffice Team",
"http://www.libreoffice.org/",
"LGPL",
"",
"",
"",
u"Libreoffice Icon Theme"),
contributor(u"Tomasz Łuczak",
"tlu () technodat ! com ! pl",
"GPL",
"Re: [Cvslog] lyx-devel po/: ChangeLog pl.po lib/: CREDITS",
"m=113580483406067",
"28 December 2005",
u"Polish translation and mw* layouts files"),
contributor(u"Hangzai Luo",
"memcache () gmail ! com",
"GPL",
"Re: [patch] tex2lyx crash when full path is given from commandline on Win32",
"m=118326161706627",
"1 July 2007",
u"Bugfixes"),
contributor(u"Mohamed Magdy",
"physicist2010 () gmail ! com>",
"GPL",
"A permission to use my Arabic-Translation for LyX?",
"m=126877445318267",
"16 March 2010",
u"Arabic translation"),
contributor(u"Jari-Matti Mäkelä",
"jmjmak () utu ! fi",
"GPL",
"Re: lyx fi translation update",
"m=142987910907596",
"24 April 2015",
u"Contribution to the Finnish Localization."),
contributor(u"Tetsuya Makimura",
"makimura () ims ! tsukuba.ac ! jp",
"GPL",
"Re: Support request for Japanese without CJK, again (Re: [Fwd: About Japanese edition ...)",
"m=121905769227884",
"18 August 2008",
u"Improvements to the Japanese language support."),
contributor(u"José Matos",
"jamatos () fc ! up ! pt",
"GPL",
"Re: The LyX licence",
"m=110907762926766",
"22 February 2005",
u"linuxdoc sgml support. Previous release manager."),
contributor(u"Roman Maurer",
"roman.maurer () amis ! net",
"GPL",
"Re: The LyX licence",
"m=110952616722307",
"27 February 2005",
u"Slovenian translation coordinator"),
contributor(u"John McCabe-Dansted",
"gmatht () gmail ! com",
"GPL",
"Re: Randomly Generated Crash Reports Useful?",
"m=124515770509946",
"15 June 2009",
u"Keys-test module, bug fixing"),
contributor(u"Caolán McNamara",
"caolanm () redhat ! com",
"GPL",
"Statement for enchant integration",
"m=126389593805123",
"19 January 2010",
u"Support for the enchant spell checking library"),
contributor(u"Tino Meinen",
"a.t.meinen () chello ! nl",
"GPL",
"Re: Licensing your contributions to LyX",
"m=113078277722316",
"31 October 2005",
u"Dutch translation coordinator"),
contributor(u"Siegfried Meunier-Guttin-Cluzel",
"meunier () coria ! fr",
"GPL",
"French translations",
"m=119485816312776",
"12 November 2007",
u"French translations of the documentation"),
contributor(u"Günter Milde",
"milde () users ! berlios ! de",
"GPL",
"copyleft",
"m=122398147620761",
"14 October 2008",
u"Unicode and layout file fixes"),
contributor(u"Dustin J. Mitchell",
"dustin () v ! igoro ! us",
"GPL",
"Fwd: Your patch for LyX",
"m=139255709609015",
"16 February 2014",
u"Fix for csv2lyx"),
contributor(u"Joan Montané",
"jmontane () gmail ! com",
"GPL",
"Re: LyX translation updates needed",
"m=118765575314017",
"21 August 2007",
u"Catalan translations of menus"),
contributor(u"Stéphane Mourey",
"stephane.mourey () impossible-exil ! info",
"GPL",
"Re: gpl",
"m=141381522413781",
"20 October 2014",
u"New lfun server-get-statistics"),
contributor(u"Iñaki Larrañaga Murgoitio",
"dooteo () euskalgnu ! org",
"GPL",
"Re: The LyX licence",
"m=110908606525783",
"22 February 2005",
u"Basque documentation and localization"),
contributor(u"Daniel Naber",
"daniel.naber () t-online ! de",
"GPL",
"Re: The LyX licence",
"m=110911176213928",
"22 February 2005",
u"Improvements to the find&replace dialog"),
contributor(u"Pablo De Napoli",
"pdenapo () mate ! dm ! uba ! ar",
"GPL",
"Re: The LyX licence",
"m=110908904400120",
"22 February 2005",
u"Math panel dialogs"),
contributor(u"Dirk Niggemann",
"dabn100 () cam ! ac ! uk",
"",
"",
"",
"",
u"config. handling enhancements, bugfixes, printer enhancements path mingling"),
contributor(u"Jens Nöckel",
"noeckel () uoregon !edu",
"GPL",
"GPL statement",
"m=128485749516885",
"19 September 2010",
u"Mac OS X enhancements"),
contributor(u"Rob Oakes",
"lyx-devel () oak-tree ! us>",
"GPL",
"Outline Contributions",
"m=124615188102843",
"27 June 2009",
u"Improvements to the outliner."),
contributor(u"Oxygen Team",
"http://www.oxygen-icons.org/",
"LGPL",
"",
"",
"",
u"Oxygen Icon Theme"),
contributor(u"Carl Ollivier-Gooch",
"cfog () mech ! ubc ! ca",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111220662413921",
"30 March 2005",
u"Support for two-column figure (figure*) and table (table*) environments. Fixed minibuffer entry of floats."),
contributor(u"Gilad Orr",
"giladorr () gmail ! com",
"GPL",
"Internationalization-Hebrew",
"m=138314500901798",
"28 October 2013",
u"Hebrew translation."),
contributor(u'Panayotis "PAP" Papasotiriou',
"papasot () upatras ! gr",
"GPL",
"Re: The LyX licence",
"m=110933552929119",
"25 February 2005",
u"Support for kluwer and ijmpd document classes"),
contributor(u'Andrey V. Panov',
"panov () canopus ! iacp ! dvo ! ru",
"GPL",
"Re: Russian translation for LyX",
"m=119853644302866",
"24 December 2007",
u"Russian translation of the user interface"),
contributor(u'Dal Ho Park',
"airdalho () gmail ! com",
"GPL",
"splash.lyx translation (Korean)",
"m=139436383128181",
"9 March 2014",
u"Korean translation"),
contributor(u'Bo Peng',
"ben.bob () gmail ! com",
"GPL",
"Re: Python version of configure script (preview version)",
"m=112681895510418",
"15 September 2005",
u"Conversion of all shell scripts to Python, shortcuts dialog, session, view-source, auto-view, embedding features and scons build system."),
contributor(u'John Perry',
"john.perry () usm ! edu",
"GPL",
"Contributions",
"m=128874016511551",
"2 November 2010",
u"Named theorems module."),
contributor(u"Joacim Persson",
"sp2joap1 () ida ! his ! se",
"",
"",
"",
"",
u"po-file for Swedish, a tool for picking shortcuts, bug reports and hacking atrandom"),
contributor(u"Zvezdan Petkovic",
"zpetkovic () acm ! org",
"GPL",
"Re: The LyX licence",
"m=111276877900892",
"6 April 2005",
u"Better support for serbian and serbocroatian"),
contributor(u"Prannoy Pilligundla",
"prannoy.bits () gmail ! com",
"GPL",
"Contribution license",
"m=139332446711707",
"25 February 2014",
u"Full screen statusbar toggling"),
contributor(u"Geoffroy Piroux",
"piroux () fyma ! ucl ! ac ! be",
"",
"",
"",
"",
u"Mathematica backend for mathed"),
contributor(u"Benjamin Piwowarski",
"benjamin ! piwowarski () lip6 ! fr",
"GPL",
"GPL statement",
"m=133958334631163",
"13 June 2012",
u"AppleScript, integration with bibliography managers"),
contributor(u"Neoklis Polyzotis",
"alkis () soe ! ucsc ! edu",
"GPL",
"Fwd: Re: The LyX licence",
"m=111039215519777",
"9 March 2005",
u"Keymap work"),
contributor(u"André Pönitz",
"andre.poenitz () mathematik ! tu-chemnitz ! de",
"GPL",
"Re: The LyX licence",
"m=111143534724146",
"21 March 2005",
u"mathed rewrite to use STL file io with streams --export and --import command line options"),
contributor(u"Kornelia Pönitz",
"kornelia.poenitz () mathematik ! tu-chemnitz ! de",
"GPL",
"Re: The LyX licence",
"m=111121553103800",
"19 March 2005",
u"heavy mathed testing; provided siamltex document class"),
contributor(u"Bernhard Psaier",
"",
"",
"",
"",
"",
u"Designer of the LyX-Banner"),
contributor(u"Thomas Pundt",
"thomas () pundt ! de",
"GPL",
"Re: The LyX licence",
"m=111277917703326",
"6 April 2005",
u"initial configure script"),
contributor(u"Allan Rae",
"rae () itee ! uq ! edu ! au",
"GPL",
"lyx-1.3.6cvs configure.in patch",
"m=110905169512662",
"21 February 2005",
u"GUI-I architect, LyX PR head, LDN, bug reports/fixes, Itemize Bullet Selection, xforms-0.81 + gcc-2.6.3 compatibility"),
contributor(u"Manoj Rajagopalan",
"rmanoj () umich ! edu",
"GPL",
"Re: patch for case-insensitive reference sorting",
"m=123506398801004",
"Feb 19 2009",
u"reference dialog tweaks"),
contributor(u"Vincent van Ravesteijn",
"V.F.vanRavesteijn () tudelft ! nl",
"GPL",
"RE: crash lyx-1.6rc1",
"m=121786603726114",
"4 August 2008",
u"lots of fixes"),
contributor(u"Adrien Rebollo",
"adrien.rebollo () gmx ! fr",
"GPL",
"Re: The LyX licence",
"m=110918633227093",
"23 February 2005",
u"French translation of the docs; latin 3, 4 and 9 support"),
contributor(u"Garst R. Reese",
"garstr () isn ! net",
"GPL",
"blanket-permission.txt:",
"m=110911480107491",
"22 February 2005",
u"provided hollywood and broadway classes for writing screen scripts and plays"),
contributor(u"Bernhard Reiter",
"ockham () gmx ! net",
"GPL",
"Re: RFC: GThesaurus.C et al.",
"m=112912017013984",
"12 October 2005",
u"Gtk frontend"),
contributor(u"Ruurd Reitsma",
"rareitsma () yahoo ! com",
"GPL",
"Fwd: Re: The LyX licence",
"m=110959179412819",
"28 February 2005",
u"Creator of the native port of LyX to Windows"),
contributor(u"Bernd Rellermeyer",
"bernd.rellermeyer () arcor ! de",
"GPL",
"Re: The LyX licence",
"m=111317142419908",
"10 April 2005",
u"Support for Koma-Script family of classes"),
contributor(u"Michael Ressler",
"mike.ressler () alum ! mit ! edu",
"GPL",
"Re: The LyX licence",
"m=110926603925431",
"24 February 2005",
u"documentation maintainer, AASTeX support"),
contributor(u"Richman Reuven",
"richman.reuven () gmail ! com",
"GPL",
"gpl 2+ ok :)",
"m=130368087529359",
"24 April 2011",
u"Hebrew localisation"),
contributor(u"Christian Ridderström",
"christian.ridderstrom () gmail ! com",
"GPL",
"Re: The LyX licence",
"m=110910933124056",
"22 February 2005",
u"The driving force behind, and maintainer of, the LyX wiki wiki.\nSwedish translation of the Windows installer"),
contributor(u"Julien Rioux",
"jrioux () lyx ! org",
"GPL",
"Re: #6361: configure.py ignores packages required by user-defined modules",
"m=125986505101722",
"3 December 2009",
u"Bug fixes, lilypond and revtex support, citation modules."),
contributor(u"Bernhard Roider",
"bernhard.roider () sonnenkinder ! org",
"GPL",
"Re: [PATCH] immediatly display saved filename in tab",
"m=117009852211669",
"29 January 2007",
u"Various bug fixes"),
contributor(u"Jim Rotmalm",
"jim.rotmalm () gmail ! com",
"GPL",
"License for my contributions.",
"m=129582352017079",
"24 January 2011",
u"Swedish translation"),
contributor(u"Paul A. Rubin",
"rubin () msu ! edu",
"GPL",
"Re: [patch] reworked AMS classes (bugs 4087, 4223)",
"m=119072721929143",
"25 September 2007",
u"Major rework of the AMS classes"),
contributor(u"Guy Rutenberg",
"guyrutenberg () gmail ! com",
"GPL",
"Re: [PATCH] Strange Behaivor: xdg-open left as zombie",
"m=137365070116624",
"12 July 2013",
u"System call fixes"),
contributor(u"Ran Rutenberg",
"ran.rutenberg () gmail ! com",
"GPL",
"The New Hebrew Translation of the Introduction",
"m=116172457024967",
"24 October 2006",
u"Hebrew translation"),
contributor(u'Pavel Sanda',
"ps () ucw ! cz",
"GPL",
"Re: czech translation",
"m=115522417204086",
"10 August 2006",
u"Czech translation, added various features, lfuns docs/review. Current release manager."),
contributor(u"Szõke Sándor",
"alex () lyx ! hu",
"GPL",
"Contribution to LyX",
"m=113449408830523",
"13 December 2005",
u"Hungarian translation"),
contributor(u"Janus Sandsgaard",
"janus () janus ! dk",
"GPL",
"Re: The LyX licence",
"m=111839355328045",
"10 June 2005",
u"Danish translation of the Windows installer"),
contributor(u"Stefan Schimanski",
"sts () 1stein ! org",
"GPL",
"GPL statement",
"m=117541472517274",
"1 April 2007",
u"font improvements, bug fixes"),
contributor(u"Horst Schirmeier",
"horst () schirmeier ! com",
"GPL",
"Re: [patch] reordering capabilities for GuiBibtex",
"m=120009631506298",
"12 January 2008",
u"small fixes"),
contributor(u"Hubert Schreier",
"schreier () sc ! edu",
"",
"",
"",
"",
u"spellchecker (ispell frontend); beautiful document-manager based on the simple table of contents (removed)"),
contributor(u"Ivan Schreter",
"schreter () kdk ! sk",
"",
"",
"",
"",
u"international support and kbmaps for slovak, czech, german, ... wysiwyg figure"),
contributor(u"Eulogio Serradilla Rodríguez",
"eulogio.sr () terra ! es",
"GPL",
"Re: The LyX licence",
"m=110915313018478",
"23 February 2005",
u"contribution to the spanish internationalization"),
contributor(u"Nickolay Shashkin",
"mecareful () gmail ! com",
"GPL",
"GPL statement",
"m=134026564400578",
"21 June 2012",
u"bugfixes"),
contributor(u"Miyata Shigeru",
"miyata () kusm ! kyoto-u ! ac ! jp",
"",
"",
"",
"",
u"OS/2 port"),
contributor(u"Alejandro Aguilar Sierra",
"asierra () servidor ! unam ! mx",
"GPL",
"Fwd: Re: The LyX licence",
"m=110918647812358",
"23 February 2005",
u"Fast parsing with lyxlex, pseudoactions, mathpanel, Math Editor, combox and more"),
contributor(u"Lior Silberman",
"lior () princeton ! edu",
"GPL",
"Fwd: Re: The LyX licence",
"m=110910432427450",
"22 February 2005",
u"Tweaks to various XForms dialogs. Implemented the --userdir command line option, enabling LyX to run with multiple configurations for different users. Implemented the original code to make colours for different inset properties configurable."),
contributor(u"Waluyo Adi Siswanto",
"was.uthm () gmail ! com",
"GPL",
"Licence contributions",
"m=123595530114385",
"Mar 2 2009",
u"Indonesian translation"),
contributor(u"Giovanni Sora",
"g.sora () tiscali ! it",
"GPL",
"License ia.po",
"m=129968786830788",
"9 March 2011",
u"Interlingua translation"),
contributor(u"Andre Spiegel",
"spiegel () gnu ! org",
"GPL",
"Re: The LyX licence",
"m=110908534728505",
"22 February 2005",
u"vertical spaces"),
contributor(u"Jürgen Spitzmüller",
"juergen.sp () t-online ! de",
"GPL",
"Re: The LyX licence",
"m=110907530127164",
"22 February 2005",
u"Qt frontend, bugfixes. Former stable branch maintainer."),
contributor(u"John Spray",
"jcs116 () york ! ac ! uk",
"GPL",
"Re: The LyX licence",
"m=110909415400170",
"22 February 2005",
u"Gtk frontend"),
contributor(u"Ben Stanley",
"ben.stanley () exemail ! com ! au",
"GPL",
"Re: The LyX licence",
"m=110923981012056",
"24 February 2005",
u"fix bugs with error insets placement"),
contributor(u"Uwe Stöhr",
"uwestoehr () web ! de",
"GPL",
"Re: The LyX licence",
"m=111833345825278",
"9 June 2005",
u"Current documentation maintainer, Windows installer, bug fixes"),
contributor(u"David Suárez de Lis",
"excalibor () iname ! com",
"",
"",
"",
"",
u"maintaining es.po since v1.0.0 and other small i18n issues small fixes"),
contributor(u"Peter Sütterlin",
"p.suetterlin () astro ! uu ! nl",
"GPL",
"Re: The LyX licence",
"m=110915086404972",
"23 February 2005",
u"aapaper support, german documentation translation, bug reports"),
contributor(u"Stefan Swerk",
"stefan_lyx () swerk ! priv ! at",
"GPL",
"Contribution license",
"m=142644092217864",
"15 March 2015",
u"europasscv support"),
contributor(u"Kayvan Aghaiepour Sylvan",
"kayvan () sylvan ! com",
"GPL",
"Re: The LyX licence",
"m=110908748407087",
"22 February 2005",
u"noweb2lyx and reLyX integration of noweb files. added Import->Noweb and key bindings to menus"),
contributor(u"TaoWang (mgc)",
"mgcgogo () gmail ! com",
"GPL",
"Re: Chinese Version of Tutorial.lyx",
"m=125785021631705",
"10 November 2009",
u"translation of documentation and user interface to Simplified Chinese"),
contributor(u'Sergey Tereschenko',
"serg.partizan () gmail ! com",
"GPL",
"my contributions",
"m=126065880524135",
"12 December 2009",
u"Russian translation of the user interface"),
contributor(u"Reuben Thomas",
"rrt () sc3d ! org",
"GPL",
"Re: The LyX licence",
"m=110911018202083",
"22 February 2005",
u"ENTCS document class and lots of useful bug reports"),
contributor(u"Dekel Tsur",
"dtsur () cs ! ucsd ! edu",
"GPL",
"Fwd: Re: The LyX licence",
"m=110910437519054",
"22 February 2005",
u"Hebrew support, general file converter, many many bug fixes"),
contributor(u"Matthias Urlichs",
"smurf () smurf ! noris ! de",
"GPL",
"Re: The LyX licence",
"m=110912859312991",
"22 February 2005",
u"bug reports and small fixes"),
contributor(u"H. Turgut Uyar",
"uyar () ce ! itu ! edu ! tr",
"GPL",
"Re: The LyX licence",
"m=110917146423892",
"23 February 2005",
u"turkish kbmaps"),
contributor(u"Mostafa Vahedi",
"vahedi58 () yahoo ! com",
"GPL",
"Re: improving Arabic-like language support",
"m=117769964731842",
"27 April 2007",
u"Farsi support and translations"),
contributor(u"Marko Vendelin",
"markov () ioc ! ee",
"GPL",
"Re: The LyX licence",
"m=110909439912594",
"22 February 2005",
u"Gnome frontend"),
contributor(u"Joost Verburg",
"joostverburg () users ! sourceforge ! net",
"GPL",
"Re: New Windows Installer",
"m=114957884100403",
"6 June 2006",
u"A new and improved Windows installer"),
contributor(u"Martin Vermeer",
"martin.vermeer () hut ! fi",
"GPL",
"Re: The LyX licence",
"m=110907543900367",
"22 February 2005",
u"support for optional argument in sections/captions svjour/svjog, egs and llncs document classes. Lot of bug hunting (and fixing!)"),
contributor(u"Jürgen Vigna",
"jug () lyx ! org",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899839906262",
"21 February 2005",
u"complete rewrite of the tabular, text inset; fax and plain text export support; iletter and dinbrief support"),
contributor(u"Pauli Virtanen",
"pauli.virtanen () hut ! fi",
"GPL",
"Re: The LyX licence",
"m=110918662408397",
"23 February 2005",
u"Finnish localization of the interface"),
contributor(u"Ramanathan Vishnampet",
"rvishnampet () gmail ! com",
"GPL",
"Re: [Patch] -fobjc-exceptions for compiling linkback sources with g++ on Mac",
"",
"17 February 2014",
u"Support for g++ on 4.8 Mac"),
contributor(u"Herbert Voß",
"herbert.voss () alumni ! tu-berlin ! de",
"GPL",
"Fwd: Re: The LyX licence",
"m=110910439013234",
"22 February 2005",
u"The one who answers all questions on lyx-users mailing list and maintains www.lyx.org/help/ Big insetgraphics and bibliography cleanups"),
contributor(u"Andreas Vox",
"avox () arcor ! de",
"GPL",
"Re: The LyX licence",
"m=110907443424620",
"22 February 2005",
u"Bug fixes, feedback on LyX behaviour on the Mac, and improvements to DocBook export"),
contributor(u"venom00 (c/o J-M Lasgouttes)",
"venom00 () arcadiaclub ! com",
"GPL",
"I love GPL, what about you?",
"m=129098897014967",
"29 November 2010",
u"Bug fixing"),
contributor(u"Jason Waskiewicz",
"jason.waskiewicz () sendit ! nodak ! edu",
"GPL",
"[Fwd: Re: tufte-book layout for LyX]",
"m=125659179116032",
"26 October 2009",
u"Layouts for the Tufte document classes"),
contributor(u"John P. Weiss",
"jpweiss () frontiernet ! net",
"Artistic",
"Re: Small problem with BlanketPermission on the new site.",
"m=123238170812776",
"18 January 2009",
u"Bugreports and suggestions, slides class support, editor of the documentationproject, 6/96-9/97. Tutorial chapter 1"),
contributor(u"Edmar Wienskoski",
"edmar () freescale ! com",
"GPL",
"Re: The LyX licence",
"m=111280236425781",
"6 April 2005",
u"literate programming support; various bug fixes"),
contributor(u"Mate Wierdl",
"mw () wierdlmpc ! msci ! memphis ! edu",
"",
"",
"",
"",
u"Maintainer of the @lists.lyx.org mailing-lists"),
contributor(u"Serge Winitzki",
"winitzki () erebus ! phys ! cwru ! edu",
"",
"",
"",
"",
u"updates to the Scientific Word bindings"),
contributor(u"Stephan Witt",
"stephan.witt () beusen ! de",
"GPL",
"Re: The LyX licence",
"m=110909031824764",
"22 February 2005",
u"support for CVS revision control, native spell checker interface for Mac OS"),
contributor(u"Russ Woodroofe",
"paranoia () math ! cornell ! edu",
"GPL",
"Re: AMS math question environment",
"m=123091448326090",
"1 January 2009",
u"question layout environment"),
contributor(u"Mingyi Wu",
"mingi.eo97g () g2 ! nctu ! edu ! tw",
"GPL",
"newcomer",
"m=139389779502232",
"3 March 2014",
u"Chinese (traditional) translations"),
contributor(u"Roy Xia",
"royxia062 () gmail ! com",
"GPL",
"GPL Statement",
"m=139434481324689",
"9 March 2014",
u"Bugfixing"),
contributor(u"Yihui Xie",
"xie () yihui ! name",
"GPL",
"GPL Statement",
"m=130523685427995",
"3 June 2011",
u"Bugfixing, Chinese translation, Sweave support"),
contributor(u"Huang Ying",
"huangy () sh ! necas ! nec ! com ! cn",
"GPL",
"Re: The LyX licence",
"m=110956742604611",
"28 February 2005",
u"Gtk frontend"),
contributor(u"Koji Yokota",
"yokota () res ! otaru-uc ! ac ! jp",
"GPL",
"Re: [PATCH] po/ja.po: Japanese message file for 1.5.0 (merged from",
"m=118033214223720",
"28 May 2007",
u"Japanese translation"),
contributor(u"Abdelrazak Younes",
"younes.a () free ! fr",
"GPL",
"Re: [Patch] RFQ: ParagraphList Rewrite",
"m=113993670602439",
"14 February 2006",
u"Qt4 frontend, editing optimisations"),
contributor(u"Henner Zeller",
"henner.zeller () freiheit ! com",
"GPL",
"Re: The LyX licence",
"m=110911591218107",
"22 February 2005",
u"rotation of wysiwyg figures"),
contributor(u"Xiaokun Zhu",
"xiaokun () aero ! gla ! ac ! uk",
"",
"",
"",
"",
u"bug reports and small fixes") ]
if __name__ == "__main__":
main(sys.argv, contributors)
| apex-hughin/LyX | lib/generate_contributions.py | Python | gpl-2.0 | 68,489 |
import os
import jsonrpc
import socket
from library import configure
from svnup import update
from invalidate import invalidate
from catalog import index
from sensible.loginit import logger
log = logger(os.path.basename(__file__))
class Dispatcher(object):
def __init__(self, broker_url):
self.broker = jsonrpc.Client(broker_url)
def run(self):
msg = cmd = ''
try:
msg = self.broker.send()
except socket.error, e:
log.error(str(e))
if msg:
log.debug('message:', msg)
cmd, arg = msg.split(':')
if cmd:
cmd_name = 'do_%s' % cmd
m = getattr(self, cmd_name, None)
if m:
try:
msg = m(arg)
except Exception, e:
log.exception(str(e))
else:
log.info(msg)
else:
log.error('Unknown command: %s:%s' % (cmd, arg))
else:
log.error('Unknown message: %s' % msg)
def do_update(self, arg):
return update(arg)
def do_invalidate(self, arg):
return invalidate(arg)
def do_index(self, arg):
return index(arg)
def main():
config = configure()
d = Dispatcher(config.get('broker_url'))
d.run()
if __name__ == '__main__':
log.info('Dispatcher: starting...')
main() | viswimmer1/PythonGenerator | data/python_files/29592144/dispatch.py | Python | gpl-2.0 | 1,504 |
import tarfile
import os.path
def make_tarfile(output_filename, source_dir):
with tarfile.open(output_filename, "w:gz") as tar:
tar.add(source_dir, arcname=os.path.basename(source_dir))
def extract (file, location):
print(location)
tar = tarfile.open(file)
tar.extractall(location)
| semitable/g2b | tar_interface.py | Python | gpl-2.0 | 329 |
#CHIPSEC: Platform Security Assessment Framework
#Copyright (c) 2010-2015, Intel Corporation
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; Version 2.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#Contact information:
#[email protected]
#
import sys
import os
import shutil
import fnmatch
from distutils import dir_util
import subprocess
import platform
TARGET_X64 = 'X64'
TARGET_IA32 = 'IA32'
TARGET_I586 = 'i586'
TARGETS= [TARGET_X64 ,TARGET_IA32, TARGET_I586 ]
asmFiles = {TARGET_X64: 'cpu.asm',
TARGET_IA32: 'cpu_ia32.asm',
TARGET_I586: 'cpu_ia32_gcc.s'}
class PythonEDk2:
def __init__(self):
self.edk2_path = ''
self.target = TARGET_X64
self.is_linux = ("linux" == platform.system().lower())
def usage(self):
print "\nUSAGE: %.65s --edk2 <edk2-path> [--target <target>]" % sys.argv[0]
print "OPTIONS:"
print " -e, --edk2 specify path where edk2 exist in the local filesystem"
print " -t, --target one of X64, IA32, i586. if none is specified X64 is used"
print " "
print "NOTES:"
print " By default the tool chanins used are GCC46 for Linux and VS2012x86 for windows"
print " if you need to use a different toolchain please modify manually the EDKII file Conf/target.txt "
print " This script will not change the value unless the current value is MYTOOLS, in that case it will "
print " modify it to be the default values mentioned above."
def parse_args(self, argv):
import getopt
try:
opts, args = getopt.getopt(argv, "e:ht:",
["edk2=","help","target="])
except getopt.GetoptError, err:
print str(err)
self.usage()
return 1
for o, a in opts:
if o in ("-e", "--edk2"):
self.edk2_path = a
elif o in ("-h", "--help"):
self.usage()
return 0
elif o in ("-t", "--target"):
self.target = a
if len(self.edk2_path) <=0:
self.usage()
return 1
if self.target not in TARGETS:
print "\n\nERROR: Invalid target \"%s\". Valid targets are: %s"%(self.target, TARGETS)
return 1
if self.target == TARGET_I586 and not self.is_linux:
print"\n\nERROR: Target \"%s\" can only be compiled in linux"%(self.target)
return 1
return 0
def setupTarget(self):
efi_path = os.path.join(self.edk2_path,"AppPkg","Applications","Python","Efi")
for file in os.listdir('..'):
if fnmatch.fnmatch(file, '*.asm') or fnmatch.fnmatch(file, '*.s') or fnmatch.fnmatch(file, '*.c'):
print "copying %-60s to %s"%(file , efi_path)
shutil.copy(os.path.join(os.pardir,file) , efi_path)
print
py_mod = os.path.join(self.edk2_path,"AppPkg","Applications","Python","PyMod-2.7.2")
py_dir = os.path.join(self.edk2_path,"AppPkg","Applications","Python","Python-2.7.2")
print py_dir
#for dir in os.listdir(py_mod):
print "copying %-80s to %s"%(py_mod , py_dir)
reserve_mode=1
preserve_times=1,
preserve_symlinks=0
update=0
verbose=5
dry_run=0
dir_util.copy_tree(py_mod , py_dir, reserve_mode, preserve_times,
preserve_symlinks, update, verbose, dry_run)
print
import fileinput
# un-comment the line in AppPkg.dsc to compile python
AppPkg_dsc = os.path.join(self.edk2_path,"AppPkg","AppPkg.dsc")
for line in fileinput.input(AppPkg_dsc, inplace=True):
if line.strip().startswith('#') and 'PythonCore.inf' in line:
sys.stdout.write( line.replace('#','',1) )
else:
sys.stdout.write( line )
# add the assembly file to the sources for compilation
PythonCore_inf = os.path.join(self.edk2_path,"AppPkg","Applications","Python","PythonCore.inf")
in_sources= False
wrote_asm = False
for line in fileinput.input(PythonCore_inf, inplace=True):
if not in_sources:
if "[Sources]" in line:
in_sources = True
sys.stdout.write( line )
else:
if "cpu" in line:
sys.stdout.write ( " Efi/%s\n"%asmFiles[self.target])
wrote_asm = True
elif len(line.strip()) <= 1:
in_sources = False
if not wrote_asm:
sys.stdout.write ( " Efi/%s\n"%asmFiles[self.target])
sys.stdout.write( line )
else:
sys.stdout.write( line )
target_txt = os.path.join(self.edk2_path,"Conf","target.txt")
if self.is_linux: tool_chain_tag = "GCC46"
else: tool_chain_tag = "VS2012x86"
for line in fileinput.input(target_txt, inplace=True):
if "MYTOOLS" in line:
sys.stdout.write(line.replace("MYTOOLS",tool_chain_tag))
elif "MAX_CONCURRENT_THREAD_NUMBER" in line and "#" not in line:
sys.stdout.write("MAX_CONCURRENT_THREAD_NUMBER = 12\n")
elif line.startswith("TARGET"):
sys.stdout.write(line.replace("RELEASE", "DEBUG"))
else:
sys.stdout.write(line)
# un-comment pyexpath from config.c
config_c = os.path.join(self.edk2_path,"AppPkg","Applications","Python","Efi","config.c" )
for line in fileinput.input(config_c, inplace=True):
if line.strip().startswith('/') and ('pyexpat' in line or
'_md5' in line or
'_sha' in line or
'_sha256' in line or
'_sha512' in line):
sys.stdout.write( line.replace('/','',2) )
else:
sys.stdout.write( line )
march ='-march=i586'
tools_def = os.path.join(self.edk2_path,"Conf","tools_def.txt")
for line in fileinput.input(tools_def, inplace=True):
if line.strip().startswith("DEFINE GCC46_IA32_CC_FLAGS"):
if self.target == TARGET_I586:
if not march in line:
sys.stdout.write( "%s %s\n" %(line.strip(), march) )
else:
sys.stdout.write( line )
else:
if march in line:
sys.stdout.write( line.replace(march, '') )
else:
sys.stdout.write( line )
else:
sys.stdout.write( line )
def compile(self):
env = os.environ.copy()
print self.edk2_path
if self.is_linux:
ec = subprocess.call(['pwd' ], shell=True, stderr=subprocess.STDOUT, env = env, cwd = self.edk2_path)
ec = subprocess.call(['bash','-c','source edksetup.sh' ], stderr=subprocess.STDOUT, env = env, cwd = self.edk2_path)
else:
ec = subprocess.call(["edk2setup" ], shell=True, stderr=subprocess.STDOUT, env = env, cwd = self.edk2_path)
if ec == 0:
self.setupTarget()
env = os.environ.copy()
if self.is_linux:
ec = subprocess.call(["pwd"], shell=True, stderr=subprocess.STDOUT, env = env)
ec = subprocess.call(['bash', '-c',"chmod 775 build_edk2_python.sh",self.edk2_path, self.target ],stderr=subprocess.STDOUT, env = env)
ec = subprocess.call(["./build_edk2_python.sh",self.edk2_path, self.target ],stderr=subprocess.STDOUT, env = env)
else:
ec = subprocess.call(["build_edk2_python",self.edk2_path, self.target ], shell=True, stderr=subprocess.STDOUT, env = env)
print ec
return ec
if __name__ == "__main__":
pythonEDk2 = PythonEDk2()
ec = pythonEDk2.parse_args(sys.argv[1:])
if ec == 0:
ec = pythonEDk2.compile()
sys.exit(ec)
| naterh/chipsec | tools/edk2/PythonEFI/build/compilePythonEDK2.py | Python | gpl-2.0 | 9,081 |
from game import Game
from console_user_interface import take_user_choice, user_input, output_prompt
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
from create_db import User
from getpass import getpass
import re
def start_menu(session):
options = {"sign_in": sign_in,
"register": register,
"guest": main_menu,
"exit": close_database
}
prompt = ("\nWelcome, please choose:" +
"\n sign_in -> to use your account" +
"\n register -> to make an account" +
"\n guest -> to play as guest" +
"\n exit -> to quit\n")
wait_prompt = "Your choice: "
error_prompt = "There is no such option. Try again!"
def condition(x): return x in options
choice = take_user_choice(condition, wait_prompt, error_prompt, prompt)
options[choice](session)
def sign_in(session):
username = user_input("Username: ")
user = session.query(User).filter(User.name == username).all()
if user:
password = getpass("Password: ")
if user[0].password == password:
if user[0].unfinished_game_mode in ["easy", "hard"]:
load_game(session, user[0])
else:
main_menu(session, user[0])
else:
output_prompt("Invalid password. Try again.")
start_menu(session)
else:
output_prompt("There isn't registered such user. Register?")
start_menu(session)
def register(session):
def condition1(x):
users = session.query(User).filter(User.name == x).all()
return not users
wait_prompt1 = "Username: "
error_prompt1 = "There is already a user with such name. Try another:)"
username = take_user_choice(condition1, wait_prompt1, error_prompt1)
def condition2(x):
return getpass("Confirm password: ") == x
wait_prompt2 = "Please, type your password: "
error_prompt2 = "There is some mistake. Try again!"
password = take_user_choice(condition2, wait_prompt2, error_prompt2,
function=getpass)
add_new_user(session, username, password)
output_prompt("Successful registration! Please, sign in:\n")
sign_in(session)
def add_new_user(session, username, password):
new_user = User(name=username, password=password,
wins_easy_level=0, losses_easy_level=0,
wins_hard_level=0, losses_hard_level=0,
unfinished_game_board="none",
unfinished_game_mode="none")
session.add(new_user)
session.commit()
def main_menu(session, user=None):
options = {"new_game": take_user_game_setup,
"score_board": score_board,
"exit": close_database
}
prompt = ("\nMain menu:" +
"\n new_game -> to start a game" +
"\n score_board -> to see the top 3 players" +
"\n exit -> to quit\n")
def condition(x):
return x in options
wait_prompt = "Your choice: "
error_prompt = "There is no such option. Try again!"
choice = take_user_choice(condition, wait_prompt, error_prompt, prompt)
options[choice](session, user)
def take_user_game_setup(session, user, *args):
def condition1(x):
pattern = re.compile("[\d{1, 2}]")
return pattern.match(x) and (4 <= int(x)) and (int(x) <= 16)
wait_prompt1 = "Board size: "
error_prompt1 = "The size should be between 4 and 16."
size = int(take_user_choice(condition1, wait_prompt1, error_prompt1))
def condition2(x):
pattern = re.compile("[\d{1, 2}]")
return pattern.match(x) and (int(x) == 1) or (int(x) % size == 0)
wait_prompt2 = "Start number of stones: "
error_prompt2 = "Number of stones could be 1 or divisible by board size."
start_stones = int(take_user_choice(condition2, wait_prompt2,
error_prompt2))
def condition3(x):
return x in ["easy", "hard"]
wait_prompt3 = "Mode 'easy' or 'hard': "
error_prompt3 = "There is no such option."
mode = take_user_choice(condition3, wait_prompt3, error_prompt3)
def condition4(x):
return x in ["X", "O"]
wait_prompt4 = "Sign 'X' or 'O': "
error_prompt4 = "There is no such option."
user_sign = take_user_choice(condition4, wait_prompt4, error_prompt4)
computer_sign = find_opponent_sign(user_sign)
board = None
game = Game(user_sign, computer_sign, mode, size, board, start_stones)
start_game(session, user, game)
def find_opponent_sign(player_sign):
players_signs = ["X", "O"]
players_signs.remove(player_sign)
return players_signs[0]
def load_game(session, user):
mode = user.unfinished_game_mode
board_information = user.unfinished_game_board.split('|')
user_sign = board_information[0]
computer_sign = find_opponent_sign(user_sign)
size = len(board_information) - 1
board = board_information[1:]
start_stones = None
game = Game(user_sign, computer_sign, mode, size, board, start_stones)
start_game(session, user, game)
def score_board(session, user):
output_prompt("\nTop 3 players:")
all_users = session.query(User).all()
sorted(all_users, key=lambda user: (user.wins_hard_level,
user.wins_easy_level))
count = 0
while count < 3 and count < len(all_users):
output_prompt(str(count + 1) + " " +
score_board_format(all_users[count]))
count += 1
user_input("Press Enter to return to Main menu")
main_menu(session, user)
def score_board_format(user):
hard_mode = "\nhard mode wins/losses: {}/{}".format(user.wins_hard_level,
user.losses_hard_level)
easy_mode = "\neasy mode wins/losses: {}/{}".format(user.wins_easy_level,
user.losses_easy_level)
return user.name + hard_mode + easy_mode
def start_game(session, user, game):
options = {"new_game": take_user_game_setup,
"save_and_exit": save_and_exit,
"exit": close_database
}
user_prompt = ("\nDuring the game, you could start 'new_game'," +
" 'save_and_exit' or 'exit' at any time")
guest_prompt = ("\nDuring the game, you could start 'new_game'" +
" or 'exit' at any time")
if user:
output_prompt(user_prompt)
else:
output_prompt(guest_prompt)
output_prompt("\nSTART BOARD\n" + str(game.board))
command = None
continue_game = True
while continue_game:
output_prompt("\nYour turn!")
choice = take_user_move()
if choice in options:
command = choice
break
else:
from_row, from_col, to_row, to_col = choice
successful = game.play_user_turn(from_row, from_col, to_row, to_col)
while (not successful) or (choice in options):
choice = take_user_move()
if choice in options:
command = choice
break
else:
from_row, from_col, to_row, to_col = choice
successful = game.play_user_turn(from_row, from_col,
to_row, to_col)
if command:
break
else:
output_prompt("YOUR MOVE:\n" + str(game.board))
game.play_computer_turn()
output_prompt("\nCOMPUTER MOVE:\n" + str(game.board))
continue_game = game.running
if command:
options[command](session, user, game)
else:
game_finished(session, user, game)
def take_user_move():
def condition(x):
pattern = re.compile("[\(\d{1, 2}, \d{1, 2}\)]")
return pattern.match(x) or (x in ["exit", "save_and_exit", "new_game"])
error_prompt = "Not a valid input."
pick = take_user_choice(condition, "Pick stone (row, col): ", error_prompt)
if pick in ["exit", "save_and_exit", "new_game"]:
return pick
move = take_user_choice(condition, "Move to (row, col): ", error_prompt)
if move in ["exit", "save_and_exit", "new_game"]:
return pick
pick = pick[1:-1].split(", ")
move = move[1:-1].split(", ")
from_row, from_col = int(pick[0]), int(pick[1])
to_row, to_col = int(move[0]), int(move[1])
return (from_row, from_col, to_row, to_col)
def game_finished(session, user, game):
options = {"exit": close_database,
"main_menu": main_menu
}
if user:
if user.unfinished_game_mode in ["easy", "hard"]:
user.unfinished_game_mode = "none"
user.unfinished_game_board = "none"
if game.does_user_win:
output_prompt("Congratulations you win!")
if game.mode == "easy":
user.wins_easy_level += 1
else:
user.wins_hard_level += 1
elif game.does_user_win is None:
output_prompt("Equal game! Won't be signed in the database!")
else:
output_prompt("You lose!")
if game.mode == "hard":
user.losses_hard_level += 1
else:
user.losses_easy_level += 1
session.commit()
else:
if game.does_user_win:
output_prompt("Congratulations you win!")
elif game.does_user_win is None:
output_prompt("Equal game!")
else:
output_prompt("You lose!")
def condition(x): return x in ["main_menu", "exit"]
wait_prompt = "Go to 'main_menu' or 'exit': "
error_prompt = "There is no such option."
go_to = take_user_choice(condition, wait_prompt, error_prompt)
options[go_to](session, user)
def save_and_exit(session, user, game):
if user:
user.unfinished_game_mode = game.mode
user.unfinished_game_board = game.board.database_format()
session.commit()
else:
output_prompt("Guests can't save games!")
close_database(session)
def close_database(session, *args):
session.close()
def main():
engine = create_engine("sqlite:///users.db")
session = Session(bind=engine)
start_menu(session)
if __name__ == '__main__':
main()
| valentina-zhekova/Yin-VS-Yang-Game-For-FMI-Python-Course | gameplay.py | Python | gpl-2.0 | 10,362 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Pardus Desktop Services
# Copyright (C) 2010, TUBITAK/UEKAE
# 2010 - Gökmen Göksel <gokmen:pardus.org.tr>
# 2010 - H. İbrahim Güngör <ibrahim:pardus.org.tr>
# 2011 - Comak Developers <comak:pardus.org.tr>
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
# Pardus Desktop Services
from os import path
from os import getenv
from os import popen
import piksemel
import gettext
# PyQt4 Core Libraries
from PyQt4.QtCore import QSettings
# Logging
import logging
# Pds Objects
from pds.environments import *
class Pds:
SupportedDesktops = (DefaultDe, Kde4, Kde3, Xfce, Enlightenment, LXDE,
Fluxbox, Gnome, Gnome3)
def __init__(self, catalogName='', debug=False):
self._session = None
self._version = None
self.home = getenv('HOME').strip()
self._config_content = None
_log_file = path.join(self.home, '.pdsLogfor%s.log' % catalogName)
if debug:
logging.basicConfig(filename = _log_file, level = logging.DEBUG, \
filemode = 'w')
else:
logging.basicConfig(level = logging.INFO)
if catalogName:
self.__trans = gettext.translation(catalogName, fallback=True)
def __i18n(*text):
if len(text) == 1:
return self.__trans.ugettext(text[0])
ttt = unicode(self.__trans.ugettext(text[0]))
for i in range(1,len(text)):
ttt = ttt.replace('%%%d' % i, unicode(text[i]))
return ttt
self.i18n = __i18n
DefaultDe.i18n = staticmethod(__i18n)
self._acceptedMethods = filter(lambda x: not x.startswith('__') or \
not x == 'i18n',
dir(self.session))
self.notifierInitialized = False
self.catalogName = catalogName
def __getattr__(self, name):
if str(name) in self._acceptedMethods:
return getattr(self.session, str(name))
if not self.__dict__.has_key(name):
raise AttributeError, name
def updatei18n(self, lang):
if self.catalogName:
self.__trans = gettext.translation(self.catalogName, \
languages=[lang], fallback=True)
def notify(self, title, message, icon = None):
try:
import pynotify
if not self.notifierInitialized:
pynotify.init(self.catalogName)
self.notifierInitialized = True
notifier = pynotify.Notification(unicode(title), unicode(message),\
icon or self.catalogName)
notifier.show()
except:
logging.info(message)
def settings(self, key, default):
value = None
if self.session.ConfigType == 'ini':
# FIXME we dont need to force everytime.
if path.exists(str(self.config_file)):
settings = self.parse(self.config_file, force = True)
else:
return default
_value = settings.value(key)
if not _value.toString():
# Sometimes kdeglobals stores values without quotes
_value = _value.toStringList()
if _value:
value = _value.join(',')
else:
value = unicode(_value.toString())
if not value or value == '':
logging.debug('Switching to default conf')
alternateConfig = self.session.DefaultConfigPath or \
path.join(self.install_prefix, self.session.ConfigFile)
settings = self.parse(alternateConfig, force = True)
value = unicode(settings.value(key, default).toString())
elif self.session.ConfigType == 'xml':
settings = self.parse(self.config_file, 'xml').getTag('property')
def getval(settings, key):
for tag in settings.tags():
if tag.getAttribute('name') == key:
return tag.getAttribute('value')
value = getval(settings, key)
if not value or value == '':
alternateConfig = self.session.DefaultConfigPath or \
path.join(self.install_prefix, self.session.ConfigFile)
settings = self.parse(alternateConfig, 'xml',
force = True).getTag('property')
value = getval(settings, key)
elif self.session.ConfigType == 'env':
value = getenv(key)
elif self.session.ConfigType == 'cmd':
if key == self.session.IconKey:
try:
value = popen(self.session.GetIconThemeCommand).read().strip()
except:
value = None
return value or default
def parse(self, fpath, ftype = 'ini', force = False):
if self._config_content and not force:
return self._config_content
if ftype == 'ini':
self._config_content = QSettings(fpath, QSettings.IniFormat)
elif ftype == 'xml':
self._config_content = piksemel.parse(fpath)
return self._config_content
@property
def session(self):
if not self._session:
env = getenv('DESKTOP_SESSION')
if env == 'default' or not env or env == 'gnome':
session = readfile('/etc/default/desktop', DefaultDe.Name)
env = session.split('=')[1].strip()
for de in Pds.SupportedDesktops:
if env:
if env in de.SessionTypes or env == de.Name:
self._session = de
else:
if de.VersionKey:
if getenv(de.VersionKey) == de.Version:
self._session = de
if not self._session:
self._session = DefaultDe
else:
for de in Pds.SupportedDesktops:
if de.Version == self.version and (env in de.SessionTypes or env == de.Name):
self._session = de
return self._session
@property
def version(self):
for key in ('KDE_SESSION_VERSION', 'KDEDIR'):
env = getenv(key)
if env:
self._version = env
break
if self._version:
self._version = self._version.split('/')[-1]
return self._version
@property
def config_file(self):
cf = path.join(self.config_path, self.session.ConfigFile)
if path.exists(cf):
return cf
return None
@property
def config_path(self):
cpaths = self.session.ConfigPath
if not type(cpaths) is tuple:
cpaths = [cpaths]
for cpath in cpaths:
rpath = cpath.replace('$HOME', self.home)
if path.exists(rpath):
return rpath
@property
def install_prefix(self):
return popen('%s --prefix' % self.session.ConfigBin).read().strip()
def readfile(file_path, fallback=None):
if path.exists(file_path):
return open(file_path).read()
return fallback
| Pardus-Linux/pds | pds/__init__.py | Python | gpl-2.0 | 7,554 |
# encoding: utf-8
# module apt_pkg
# from /usr/lib/python2.7/dist-packages/apt_pkg.so
# by generator 1.135
"""
Classes and functions wrapping the apt-pkg library.
The apt_pkg module provides several classes and functions for accessing
the functionality provided by the apt-pkg library. Typical uses might
include reading APT index files and configuration files and installing
or removing packages.
"""
# no imports
from object import object
class Hashes(object):
"""
Hashes([object: (bytes, file)])
Calculate hashes for the given object. It can be used to create all
supported hashes for a file.
The parameter 'object' can be a bytestring, an object providing the
fileno() method, or an integer describing a file descriptor.
"""
def __init__(self, *args, **kwargs): # real signature unknown; NOTE: unreliably restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
md5 = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The MD5Sum of the file as a string."""
sha1 = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The SHA1Sum of the file as a string."""
sha256 = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The SHA256Sum of the file as a string."""
| ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/apt_pkg/Hashes.py | Python | gpl-2.0 | 1,533 |
# Copyright (C) 2016 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Author: Michael Simacek <[email protected]>
from sqlalchemy import insert
from koschei.db import get_or_create
from koschei.models import (
Package, BasePackage, PackageGroupRelation, GroupACL, User, Collection,
CollectionGroupRelation, Build, AppliedChange, KojiTask, ResolutionChange,
ResolutionProblem,
)
class PackagesDontExist(Exception):
def __init__(self, packages):
super(PackagesDontExist, self).__init__(self)
self.packages = packages
def __str__(self):
return "Packages don't exist: " + ','.join(self.packages)
def track_packages(session, collection, package_names):
"""
Sets given packages as tracked within collection.
:param: session koschei session
:param: collection Collection ORM object where packages should be added
:param: package_names list of package names to track
:raises: PackagesDontExist if some of the packages cannot be found
:return: a list of Package objects that were newly tracked
"""
package_names = set(package_names)
existing = session.db.query(Package)\
.filter(Package.name.in_(package_names))\
.filter(Package.collection_id == collection.id)
nonexistent = package_names - {p.name for p in existing}
if nonexistent:
raise PackagesDontExist(nonexistent)
to_add = [p for p in existing if not p.tracked]
for package in to_add:
set_package_attribute(session, package, 'tracked', True)
return to_add
def set_package_attribute(session, package, attr, new_value):
"""
Sets given package attribute, such as manual_priority, and logs a new user
action if needed.
"""
prev = getattr(package, attr)
if prev != new_value:
session.log_user_action(
"Package {} (collection {}): {} set from {} to {}"
.format(package.name, package.collection.name, attr, prev, new_value),
base_id=package.base_id,
)
setattr(package, attr, new_value)
def set_group_content(session, group, packages, append=False, delete=False):
"""
Makes given group contain given packages (by name).
In append mode (append=True) doesn't remove any packages from the group.
With append=False, makes the group contain only specified packages.
With delete=True, only deletes given packages from the group.
:param: session koschei session
:param: group PackageGroup object
:param: packages list of package names to be in given group
:param: append whether to clear the group first or append to existing content
:param: delete whether to delete instead of adding
:raises: PackagesDontExist when packages weren't found
"""
assert not append or not delete
contents = set(packages)
new_content = set(
session.db.query(BasePackage)
.filter(BasePackage.name.in_(contents))
.all()
)
if len(new_content) != len(contents):
raise PackagesDontExist(contents - {base.name for base in new_content})
current_content = set(
session.db.query(BasePackage)
.join(PackageGroupRelation)
.filter(PackageGroupRelation.group_id == group.id)
.all()
)
if delete:
to_add = set()
else:
to_add = new_content - current_content
if to_add:
rels = [dict(group_id=group.id, base_id=base.id) for base in to_add]
session.db.execute(insert(PackageGroupRelation, rels))
for base in to_add:
session.log_user_action(
"Group {} modified: package {} added".format(group.name, base.name),
base_id=base.id,
)
if append:
to_delete = set()
elif delete:
to_delete = new_content
else:
to_delete = current_content - new_content
if to_delete:
(
session.db.query(PackageGroupRelation)
.filter(PackageGroupRelation.group_id == group.id)
.filter(PackageGroupRelation.base_id.in_(base.id for base in to_delete))
.delete()
)
for base in to_delete:
session.log_user_action(
"Group {} modified: package {} removed".format(group.name, base.name),
base_id=base.id,
)
def set_group_maintainers(session, group, maintainers):
"""
Sets given group maintaners to given list of names.
:param: session koschei session
:param: group PackageGroup object
:param: maintainers list of maintainer names
"""
current_users = set(
session.db.query(User)
.join(GroupACL)
.filter(GroupACL.group_id == group.id)
.all()
)
new_users = {get_or_create(session.db, User, name=name) for name in set(maintainers)}
session.db.flush()
to_add = new_users - current_users
if to_add:
session.db.execute(
insert(GroupACL),
[dict(group_id=group.id, user_id=user.id) for user in to_add],
)
for user in to_add:
session.log_user_action(
"Group {} modified: maintainer {} added".format(group.name, user.name)
)
to_delete = current_users - new_users
if to_delete:
(
session.db.query(GroupACL)
.filter(GroupACL.group_id == group.id)
.filter(GroupACL.user_id.in_(user.id for user in to_delete))
.delete()
)
for user in to_delete:
session.log_user_action(
"Group {} modified: maintainer {} removed".format(group.name, user.name)
)
def delete_group(session, group):
"""
Deletes given package group
:param: session koschei session
:param: group PackageGroup object
"""
session.log_user_action("Group {} deleted".format(group.name))
session.db.delete(group)
def set_collection_group_content(session, group, collection_names):
"""
Makes given collection group contain given collections
:param: session koschei session
:param: group collection group
:param: collection_names list of collection names
"""
collection_names = set(collection_names)
collection_ids = session.db.query(Collection.id)\
.filter(Collection.name.in_(collection_names))\
.all_flat()
if len(collection_ids) != len(collection_names):
raise RuntimeError("Some collections weren't found")
rels = [dict(group_id=group.id, collection_id=coll_id)
for coll_id in collection_ids]
session.db.query(CollectionGroupRelation)\
.filter(CollectionGroupRelation.group_id == group.id)\
.delete()
if rels:
session.db.execute(insert(CollectionGroupRelation, rels))
def copy_collection(session, source, copy, minimal=False):
def get_cols(entity, exclude=(), qualify=False):
return ', '.join(
(entity.__tablename__ + '.' + c.name if qualify else c.name)
for c in entity.__table__.columns
if c.name != 'id' and c.name not in exclude
)
def deepcopy_table(entity, whereclause='', foreign_keys=None):
if not foreign_keys:
foreign_keys = entity.__table__.foreign_keys
assert len(foreign_keys) == 1
foreign_key = next(iter(foreign_keys))
parent = foreign_key.column.table
fk_cols = [fk.parent.name for fk in foreign_keys if fk.column.table is parent]
assert len(fk_cols) == 1
fk_col = fk_cols[0]
session.log.info("Copying {} table".format(entity.__tablename__))
session.db.execute("""
CREATE TEMPORARY TABLE {table}_copy AS
SELECT {table}.id AS original_id,
nextval('{table}_id_seq') AS id,
{parent}.id AS {fk_col}
FROM {table} JOIN {parent}_copy AS {parent}
ON {fk_col} = {parent}.original_id
{whereclause}
ORDER BY {table}.id;
INSERT INTO {table}(id, {fk_col}, {cols})
SELECT {table}_copy.id AS id,
{table}_copy.{fk_col} AS {fk_col},
{cols_q}
FROM {table} JOIN {table}_copy
ON {table}.id = {table}_copy.original_id
ORDER BY {table}_copy.original_id;
""".format(
table=entity.__tablename__,
parent=parent.name,
fk_col=fk_col,
cols=get_cols(entity, exclude=[fk_col]),
cols_q=get_cols(entity, exclude=[fk_col], qualify=True),
whereclause=whereclause,
))
session.log.info("Copying package table")
session.db.execute("""
CREATE TEMPORARY TABLE package_copy AS
SELECT id AS original_id,
nextval('package_id_seq') AS id
FROM package
WHERE collection_id = {source.id};
INSERT INTO package(id, collection_id, {package_cols})
SELECT package_copy.id AS id,
{copy.id} AS collection_id,
{package_cols_q}
FROM package JOIN package_copy
ON package.id = package_copy.original_id;
-- NOTE: package.last_[complete_]build is updated by trigger
""".format(
copy=copy, source=source,
package_cols=get_cols(Package, exclude=['collection_id']),
package_cols_q=get_cols(Package, exclude=['collection_id'], qualify=True),
))
deepcopy_table(
Build,
whereclause="""
WHERE (NOT {minimal} AND started > now() - '1 month'::interval)
OR build.id IN (
SELECT last_complete_build_id
FROM package
WHERE collection_id = {copy.id}
)
""".format(copy=copy, minimal=minimal),
)
deepcopy_table(KojiTask)
deepcopy_table(ResolutionChange)
deepcopy_table(ResolutionProblem)
deepcopy_table(
AppliedChange,
foreign_keys=AppliedChange.__table__.c.build_id.foreign_keys,
)
| msimacek/koschei | koschei/data.py | Python | gpl-2.0 | 10,829 |
#
# Copyright (c) 2010, 2015, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
"""
This file contains the check index utility. It is used to check for
duplicate or redundant indexes for a list of database (operates on
all tables in each database), a list of tables in the for db.table,
or all tables in all databases except internal databases.
"""
import re
from mysql.utilities.exception import UtilError
from mysql.utilities.common.server import connect_servers
from mysql.utilities.common.database import Database
from mysql.utilities.common.options import PARSE_ERR_OBJ_NAME_FORMAT
from mysql.utilities.common.pattern_matching import REGEXP_QUALIFIED_OBJ_NAME
from mysql.utilities.common.table import Table
from mysql.utilities.common.sql_transform import (is_quoted_with_backticks,
quote_with_backticks,
remove_backtick_quoting)
def check_index(src_val, table_args, options):
"""Check for duplicate or redundant indexes for one or more tables
This method will examine the indexes for one or more tables and identify
any indexes that are potential duplicates or redundant. It prints the
equivalent DROP statements if selected.
src_val[in] a dictionary containing connection information for the
source including:
(user, password, host, port, socket)
table_args[in] list of tables in the form 'db.table' or 'db'
options[in] dictionary of options to include:
show-drops : show drop statements for dupe indexes
skip : skip non-existent tables
verbosity : print extra information
show-indexes : show all indexes for each table
index-format : index format = sql, table, tab, csv
worst : show worst performing indexes
best : show best performing indexes
report-indexes : reports tables without PK or UK
Returns bool True = success, raises UtilError if error
"""
# Get options
show_drops = options.get("show-drops", False)
skip = options.get("skip", False)
verbosity = options.get("verbosity", False)
show_indexes = options.get("show-indexes", False)
index_format = options.get("index-format", False)
stats = options.get("stats", False)
first_indexes = options.get("best", None)
last_indexes = options.get("worst", None)
report_indexes = options.get("report-indexes", False)
# Try to connect to the MySQL database server.
conn_options = {
'quiet': verbosity == 1,
'version': "5.0.0",
}
servers = connect_servers(src_val, None, conn_options)
source = servers[0]
db_list = [] # list of databases
table_list = [] # list of all tables to process
# Build a list of objects to process
# 1. start with db_list if no objects present on command line
# 2. process command line options.
# 3. loop through database list and add all tables
# 4. check indexes
obj_name_regexp = re.compile(REGEXP_QUALIFIED_OBJ_NAME)
# Perform the options check here. Loop through objects presented.
for obj in table_args:
m_obj = obj_name_regexp.match(obj)
# Check if a valid database/table name is specified.
if not m_obj:
raise UtilError(PARSE_ERR_OBJ_NAME_FORMAT.format(
obj_name=obj, option="the database/table arguments"))
else:
db_name, obj_name = m_obj.groups()
if obj_name:
# Table specified
table_list.append(obj)
# Else we are operating on a specific database.
else:
# Remove backtick quotes.
db_name = remove_backtick_quoting(db_name) \
if is_quoted_with_backticks(db_name) else db_name
db_list.append(db_name)
# Loop through database list adding tables
for db in db_list:
db_source = Database(source, db)
db_source.init()
tables = db_source.get_db_objects("TABLE")
if not tables and verbosity >= 1:
print "# Warning: database %s does not exist. Skipping." % (db)
for table in tables:
table_list.append("{0}.{1}".format(quote_with_backticks(db),
quote_with_backticks(table[0])))
# Fail if no tables to check
if not table_list:
raise UtilError("No tables to check.")
if verbosity > 1:
print "# Checking indexes..."
# Check indexes for each table in the list
for table_name in table_list:
tbl_options = {
'verbose': verbosity >= 1,
'get_cols': False,
'quiet': verbosity is None or verbosity < 1
}
tbl = Table(source, table_name, tbl_options)
exists = tbl.exists()
if not exists and not skip:
raise UtilError("Table %s does not exist. Use --skip "
"to skip missing tables." % table_name)
if exists:
if not tbl.get_indexes():
if verbosity > 1 or report_indexes:
print "# Table %s is not indexed." % (table_name)
else:
if show_indexes:
tbl.print_indexes(index_format, verbosity)
# Show if table has primary key
if verbosity > 1 or report_indexes:
if not tbl.has_primary_key():
if not tbl.has_unique_key():
print("# Table {0} does not contain neither a "
"PRIMARY nor UNIQUE key.".format(table_name))
else:
print("# Table {0} does not contain a PRIMARY key."
"".format(table_name))
tbl.check_indexes(show_drops)
# Show best and/or worst indexes
if stats:
if first_indexes is not None:
tbl.show_special_indexes(index_format, first_indexes, True)
if last_indexes is not None:
tbl.show_special_indexes(index_format, last_indexes)
if verbosity > 1:
print "#"
if verbosity > 1:
print "# ...done."
| ioggstream/mysql-utilities | mysql/utilities/command/indexcheck.py | Python | gpl-2.0 | 7,144 |
#!/usr/bin/env python
#coding: utf-8
#Filename: try_except.py
import sys
try:
s = raw_input('Enter something -->')
except EOFError:
print '\nWhy did you do an EOF on me?'
sys.exit()
except:
print '\nSome error/exception occured.'
print 'Done' | daya-prac/Python-prac | python/try_except.py | Python | gpl-2.0 | 261 |
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.conf import settings
from kirt.settings import LOGIN_URL
"""
All the URLs that can be possibly called by user are working here.
"""
urlpatterns = patterns('',
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$','src.views.index'),
url(r'^addworker/','src.views.addworker'),
url(r'^logout/$','src.views.my_logout'),
url(r'^src/addadvance/','src.views.addadvance'),
url(r'^src/promotions/','src.views.promotions'),
url(r'^ajaxpromotions/','src.views.ajaxpromotions'),
url(r'^previous_promotions/','src.views.previous_promotions'),
url(r'^daily_attendance/','src.views.daily_attendance'),
url(r'^ajax_daily_attendance/','src.views.ajax_daily_attendance'),
url(r'^ajaxdetails/','src.views.ajaxdetails'),
url(r'^ajaxrequest/','src.views.ajaxrequest'),
url(r'^ajaxrequestpaid/','src.views.ajaxrequestpaid'),
url(r'^popupadvance/','src.views.popupadvance'),
url(r'^ajaxpopupadvance/','src.views.ajaxpopupadvance'),
url(r'^particulars/','src.views.particulars'),
url(r'^payslip/', 'src.views.payslip'),
url(r'^return_advance/','src.views.return_advance'),
url(r'^deleteworker/','src.views.deleteworker'),
url(r'^jsreverse/', 'src.views.jsreverse'),
)
| KamalKaur/kirt | kirt/urls.py | Python | gpl-2.0 | 1,433 |
#!/usr/bin/python3
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gio, Gtk, GObject, Gdk
from GSettingsWidgets import *
class Module:
name = "windows"
category = "prefs"
comment = _("Manage window preferences")
def __init__(self, content_box):
keywords = _("windows, titlebar, edge, switcher, window list, attention, focus")
sidePage = SidePage(_("Windows"), "cs-windows", keywords, content_box, module=self)
self.sidePage = sidePage
def on_module_selected(self):
if not self.loaded:
print("Loading Windows module")
self.sidePage.stack = SettingsStack()
self.sidePage.add_widget(self.sidePage.stack)
# Titlebar
page = SettingsPage()
self.sidePage.stack.add_titled(page, "titlebar", _("Titlebar"))
widget = TitleBarButtonsOrderSelector()
page.add(widget)
settings = page.add_section(_("Actions"))
action_options = [["toggle-shade", _("Toggle Shade")], ["toggle-maximize", _("Toggle Maximize")],
["toggle-maximize-horizontally", _("Toggle Maximize Horizontally")], ["toggle-maximize-vertically", _("Toggle Maximize Vertically")],
["toggle-stuck", _("Toggle on all workspaces")], ["toggle-above", _("Toggle always on top")],
["minimize", _("Minimize")], ["menu", _("Menu")], ["lower", _("Lower")], ["none", _("None")]]
size_group = Gtk.SizeGroup.new(Gtk.SizeGroupMode.HORIZONTAL)
widget = GSettingsComboBox(_("Action on title bar double-click"), "org.cinnamon.desktop.wm.preferences", "action-double-click-titlebar", action_options, size_group=size_group)
settings.add_row(widget)
widget = GSettingsComboBox(_("Action on title bar middle-click"), "org.cinnamon.desktop.wm.preferences", "action-middle-click-titlebar", action_options, size_group=size_group)
settings.add_row(widget)
widget = GSettingsComboBox(_("Action on title bar right-click"), "org.cinnamon.desktop.wm.preferences", "action-right-click-titlebar", action_options, size_group=size_group)
settings.add_row(widget)
scroll_options = [["none", _("Nothing")],["shade", _("Shade and unshade")],["opacity", _("Adjust opacity")]]
widget = GSettingsComboBox(_("Action on title bar with mouse scroll"), "org.cinnamon.desktop.wm.preferences", "action-scroll-titlebar", scroll_options, size_group=size_group)
settings.add_row(widget)
spin = GSettingsSpinButton(_("Minimum opacity"), "org.cinnamon.desktop.wm.preferences", "min-window-opacity", _("%"))
settings.add_reveal_row(spin)
spin.revealer.settings = Gio.Settings("org.cinnamon.desktop.wm.preferences")
spin.revealer.settings.bind_with_mapping("action-scroll-titlebar", spin.revealer, "reveal-child", Gio.SettingsBindFlags.GET, lambda x: x == "opacity", None)
# Behavior
page = SettingsPage()
self.sidePage.stack.add_titled(page, "behavior", _("Behavior"))
settings = page.add_section(_("Window Focus"))
focus_options = [["click", _("Click")], ["sloppy", _("Sloppy")], ["mouse", _("Mouse")]]
widget = GSettingsComboBox(_("Window focus mode"), "org.cinnamon.desktop.wm.preferences", "focus-mode", focus_options)
settings.add_row(widget)
widget = GSettingsSwitch(_("Automatically raise focused windows"), "org.cinnamon.desktop.wm.preferences", "auto-raise")
settings.add_reveal_row(widget)
widget.revealer.settings = Gio.Settings("org.cinnamon.desktop.wm.preferences")
widget.revealer.settings.bind_with_mapping("focus-mode", widget.revealer, "reveal-child", Gio.SettingsBindFlags.GET, lambda x: x in ("sloppy", "mouse"), None)
widget = GSettingsSwitch(_("Bring windows which require attention to the current workspace"), "org.cinnamon", "bring-windows-to-current-workspace")
settings.add_row(widget)
widget = GSettingsSwitch(_("Prevent focus stealing"), "org.cinnamon", "prevent-focus-stealing")
settings.add_row(widget)
widget = GSettingsSwitch(_("Attach dialog windows to the parent window"), "org.cinnamon.muffin", "attach-modal-dialogs")
settings.add_row(widget)
settings = page.add_section(_("Moving and Resizing Windows"))
size_group = Gtk.SizeGroup.new(Gtk.SizeGroupMode.HORIZONTAL)
placement_options = [["automatic", _("Automatic")], ["pointer", _("Cursor")], ["manual", _("Manual")], ["center", _("Center")]]
widget = GSettingsComboBox(_("Location of newly opened windows"), "org.cinnamon.muffin", "placement-mode", placement_options, size_group=size_group)
settings.add_row(widget)
special_key_options = [["", _("Disabled")], ["<Alt>", "<Alt>"],["<Super>", "<Super>"],["<Control>", "<Control>"]]
widget = GSettingsComboBox(_("Special key to move and resize windows"), "org.cinnamon.desktop.wm.preferences", "mouse-button-modifier", special_key_options, size_group=size_group)
widget.set_tooltip_text(_("While the special key is pressed, windows can be dragged with the left mouse button and resized with the right mouse button."))
settings.add_row(widget)
widget = GSettingsSpinButton(_("Window drag/resize threshold"), "org.cinnamon.muffin", "resize-threshold", _("Pixels"), 1, 100, size_group=size_group)
settings.add_row(widget)
widget = GSettingsSwitch(_("Edge resistance with other windows and monitor boundaries"), "org.cinnamon.muffin", "edge-resistance-window")
widget.set_tooltip_text(_("Make window borders stick when moved or resized near other windows or monitor edges."))
settings.add_row(widget)
# Alt Tab
page = SettingsPage()
self.sidePage.stack.add_titled(page, "alttab", _("Alt-Tab"))
settings = page.add_section(_("Alt-Tab"))
alttab_styles = [
["icons", _("Icons only")],
["thumbnails", _("Thumbnails only")],
["icons+thumbnails", _("Icons and thumbnails")],
["icons+preview", _("Icons and window preview")],
["preview", _("Window preview (no icons)")],
["coverflow", _("Coverflow (3D)")],
["timeline", _("Timeline (3D)")]
]
widget = GSettingsComboBox(_("Alt-Tab switcher style"), "org.cinnamon", "alttab-switcher-style", alttab_styles)
settings.add_row(widget)
widget = GSettingsSwitch(_("Display the alt-tab switcher on the primary monitor instead of the active one"), "org.cinnamon", "alttab-switcher-enforce-primary-monitor")
settings.add_row(widget)
widget = GSettingsSwitch(_("Move minimized windows to the end of the alt-tab switcher"), "org.cinnamon", "alttab-minimized-aware")
settings.add_row(widget)
widget = GSettingsSpinButton(_("Delay before displaying the alt-tab switcher"), "org.cinnamon", "alttab-switcher-delay", units=_("milliseconds"), mini=0, maxi=1000, step=50, page=150)
settings.add_row(widget)
widget = GSettingsSwitch(_("Show windows from all workspaces"), "org.cinnamon", "alttab-switcher-show-all-workspaces")
settings.add_row(widget)
class TitleBarButtonsOrderSelector(SettingsBox):
def __init__(self):
self.schema = "org.cinnamon.desktop.wm.preferences"
self.key = "button-layout"
super(TitleBarButtonsOrderSelector, self).__init__(_("Buttons"))
self.settings = Gio.Settings.new(self.schema)
self.value = self.settings.get_string(self.key)
left_box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
left_box.set_border_width(5)
left_box.set_margin_left(20)
left_box.set_margin_right(20)
left_box.set_spacing(5)
right_box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
right_box.set_border_width(5)
right_box.set_margin_left(20)
right_box.set_margin_right(20)
right_box.set_spacing(5)
try:
left_items, right_items = self.value.split(":")
except:
left_items = right_items = ""
if len(left_items) > 0:
left_items = left_items.split(",")
else:
left_items = []
if len(right_items) > 0:
right_items = right_items.split(",")
else:
right_items = []
left_label = Gtk.Label.new(_("Left side title bar buttons"))
left_label.set_alignment(0.0, 0.5)
left_label.set_line_wrap(True)
left_box.pack_start(left_label, False, False, 0)
left_grid = Gtk.Grid()
left_grid.set_column_spacing(4)
left_box.pack_end(left_grid, False, False, 0)
left_grid.set_valign(Gtk.Align.CENTER)
right_label = Gtk.Label.new(_("Right side title bar buttons"))
right_label.set_alignment(0.0, 0.5)
right_label.set_line_wrap(True)
right_box.pack_start(right_label, False, False, 0)
right_grid = Gtk.Grid()
right_grid.set_column_spacing(4)
right_box.pack_end(right_grid, False, False, 0)
right_grid.set_valign(Gtk.Align.CENTER)
self.left_side_widgets = []
self.right_side_widgets = []
for i in range(4):
self.left_side_widgets.append(Gtk.ComboBox())
self.right_side_widgets.append(Gtk.ComboBox())
buttons = [
("", ""),
("menu", _("Menu")),
("close", _("Close")),
("minimize", _("Minimize")),
("maximize", _("Maximize")),
("stick", _("Sticky")),
("shade", _("Shade"))
]
for i in self.left_side_widgets + self.right_side_widgets:
if i in self.left_side_widgets:
ref_list = left_items
index = self.left_side_widgets.index(i)
else:
ref_list = right_items
index = self.right_side_widgets.index(i)
model = Gtk.ListStore(str, str)
selected_iter = None
for button in buttons:
iter = model.insert_before(None, None)
model.set_value(iter, 0, button[0])
model.set_value(iter, 1, button[1])
if index < len(ref_list) and ref_list[index] == button[0]:
selected_iter = iter
i.set_model(model)
renderer_text = Gtk.CellRendererText()
i.pack_start(renderer_text, True)
i.add_attribute(renderer_text, "text", 1)
if selected_iter is not None:
i.set_active_iter(selected_iter)
i.connect("changed", self.on_my_value_changed)
for i in self.left_side_widgets:
index = self.left_side_widgets.index(i)
left_grid.attach(i, index, 0, 1, 1)
i.set_valign(Gtk.Align.CENTER)
for i in self.right_side_widgets:
index = self.right_side_widgets.index(i)
right_grid.attach(i, index, 0, 1, 1)
i.set_valign(Gtk.Align.CENTER)
self.add_row(left_box)
self.add_row(right_box)
def on_my_value_changed(self, widget):
active_iter = widget.get_active_iter()
if active_iter:
new_value = widget.get_model()[active_iter][0]
else:
new_value = None
left_items = []
right_items = []
for i in self.left_side_widgets + self.right_side_widgets:
active_iter = i.get_active_iter()
if active_iter:
value = i.get_model()[i.get_active_iter()][0]
if i != widget and value == new_value:
i.set_active_iter(None)
elif value != "":
if i in self.left_side_widgets:
left_items.append(value)
else:
right_items.append(value)
self.settings.set_string(self.key, ','.join(str(item) for item in left_items) + ':' + ','.join(str(item) for item in right_items))
| collinss/Cinnamon | files/usr/share/cinnamon/cinnamon-settings/modules/cs_windows.py | Python | gpl-2.0 | 12,390 |
# Author: Hannah Brock
from agent import WumpusAgent
from knowledge_base import KnowledgeBase
class LogicalAgent(WumpusAgent):
"""Implementation of a logcial wumpus agent"""
def __init__(self):
self.kb = KnowledgeBase()
def get_action(self, percept):
"""See WumpusAgent.get_action for further
documentation.
"""
for p in percept:
self.kb.tell(p)
if self.kb.ask('GRAB'):
return WumpusAgent.GRAB
if self.kb.ask('FORWARD'):
return WumpusAgent.FORWARD
return WumpusAgent.LEFT
def reset(self):
self.kb = KnowledgeBase()
| hjbrock/AI-trials | wumpus-world-first-step/agents/logical_agent.py | Python | gpl-2.0 | 644 |
import pythonlab
import agros2d
import scipy.io as sio
import numpy as np
from test_suite.scenario import Agros2DTestCase
from test_suite.scenario import Agros2DTestResult
class TestInternalMatrixSolvers(Agros2DTestCase):
@classmethod
def setUpClass(self):
# store state
self.save_matrix_and_rhs = agros2d.options.save_matrix_and_rhs
self.dump_format = agros2d.options.dump_format
# dump format
agros2d.options.save_matrix_and_rhs = True
agros2d.options.dump_format = "matlab_mat"
# read reference matrix and rhs from file
self.reference_mat, self.reference_rhs = self.read_matrix_and_rhs(pythonlab.datadir("/resources/test/test_suite/core/matrix_solvers_matrix.mat"),
pythonlab.datadir("/resources/test/test_suite/core/matrix_solvers_rhs.mat"))
@classmethod
def tearDownClass(self):
# restore state
agros2d.options.save_matrix_and_rhs = self.save_matrix_and_rhs
agros2d.options.dump_format = self.dump_format
@classmethod
def model(self, solver):
# problem
problem = agros2d.problem(clear = True)
problem.coordinate_type = "axisymmetric"
problem.mesh_type = "triangle"
# fields
# electrostatic
electrostatic = agros2d.field("electrostatic")
electrostatic.analysis_type = "steadystate"
electrostatic.matrix_solver = solver
electrostatic.number_of_refinements = 1
electrostatic.polynomial_order = 2
electrostatic.adaptivity_type = "disabled"
electrostatic.solver = "linear"
# boundaries
electrostatic.add_boundary("Source", "electrostatic_potential", {"electrostatic_potential" : 1e9})
electrostatic.add_boundary("Ground", "electrostatic_potential", {"electrostatic_potential" : 0})
electrostatic.add_boundary("Neumann", "electrostatic_surface_charge_density", {"electrostatic_surface_charge_density" : 0})
# materials
electrostatic.add_material("Air", {"electrostatic_permittivity" : 1, "electrostatic_charge_density" : 1})
electrostatic.add_material("Dielectric 1", {"electrostatic_permittivity" : 3, "electrostatic_charge_density" : 20})
electrostatic.add_material("Dielectric 2", {"electrostatic_permittivity" : 4, "electrostatic_charge_density" : 30})
# geometry
geometry = agros2d.geometry
geometry.add_edge(0, 0.2, 0, 0.08, boundaries = {"electrostatic" : "Neumann"})
geometry.add_edge(0.01, 0.08, 0.01, 0, refinements = {"electrostatic" : 1}, boundaries = {"electrostatic" : "Source"})
geometry.add_edge(0.01, 0, 0.03, 0, boundaries = {"electrostatic" : "Neumann"})
geometry.add_edge(0.03, 0, 0.03, 0.08)
geometry.add_edge(0.03, 0.08, 0.05, 0.08)
geometry.add_edge(0.05, 0, 0.03, 0, boundaries = {"electrostatic" : "Neumann"})
geometry.add_edge(0.05, 0.08, 0.05, 0, refinements = {"electrostatic" : 1}, boundaries = {"electrostatic" : "Ground"})
geometry.add_edge(0.06, 0, 0.06, 0.08, boundaries = {"electrostatic" : "Ground"})
geometry.add_edge(0.05, 0.08, 0.06, 0.08, refinements = {"electrostatic" : 1}, boundaries = {"electrostatic" : "Ground"})
geometry.add_edge(0.06, 0, 0.2, 0, boundaries = {"electrostatic" : "Neumann"})
geometry.add_edge(0.2, 0, 0, 0.2, angle = 90, boundaries = {"electrostatic" : "Neumann"})
geometry.add_edge(0.01, 0.08, 0.03, 0.08)
geometry.add_edge(0.01, 0.08, 0, 0.08, refinements = {"electrostatic" : 1}, boundaries = {"electrostatic" : "Source"})
geometry.add_label(0.019, 0.021, materials = {"electrostatic" : "Dielectric 1"})
geometry.add_label(0.0379, 0.051, materials = {"electrostatic" : "Dielectric 2"})
geometry.add_label(0.0284191, 0.123601, materials = {"electrostatic" : "Air"})
agros2d.view.zoom_best_fit()
problem.solve()
return electrostatic.filename_matrix(), electrostatic.filename_rhs()
@classmethod
def analyse_matrix_and_rhs(self, filename_matrix, filename_rhs):
import pylab as pl
# read matrix and rhs from file
mat_object = sio.loadmat(filename_matrix)
matrix = mat_object["matrix"]
rhs_object = sio.loadmat(filename_rhs)
rhs = rhs_object["rhs"]
# size of the matrix
print("Matrix size: " + str(len(rhs)))
print("Number of nonzeros: " + str(matrix.getnnz()) + " (" + str(round(float(matrix.getnnz()) / (len(rhs)**2) * 100.0, 3)) + " %)")
# visualize matrix sparsity pattern
fig = pl.figure()
pl.spy(matrix, markersize=1)
fn_pattern = pythonlab.tempname("png")
pl.savefig(fn_pattern, dpi=60)
pl.close(fig)
# show in console
pythonlab.image(fn_pattern)
@classmethod
def read_matrix_and_rhs(self, matrix_filename, rhs_filename):
mat_object = sio.loadmat(matrix_filename)
matrix = mat_object["matrix"]
rhs_object = sio.loadmat(rhs_filename)
rhs = rhs_object["rhs"]
return matrix, rhs
def test_mumps(self):
# MUMPS
filename_mumps_matrix, filename_mumps_rhs = self.model("mumps")
mumps_mat, mumps_rhs = self.read_matrix_and_rhs(filename_mumps_matrix, filename_mumps_rhs)
self.assertTrue(np.allclose(self.reference_mat.todense(), mumps_mat.todense(), rtol=1e-15, atol=1e-15),
"MUMPS matrix failed.")
self.assertTrue(np.allclose(self.reference_rhs, mumps_rhs, rtol=1e-15, atol=1e-10),
"MUMPS rhs failed.")
def test_umfpack(self):
# UMFPACK
filename_umfpack_matrix, filename_umfpack_rhs = self.model("umfpack")
umfpack_mat, umfpack_rhs = self.read_matrix_and_rhs(filename_umfpack_matrix, filename_umfpack_rhs)
self.assertTrue(np.allclose(self.reference_mat.todense(), umfpack_mat.todense(), rtol=1e-15, atol=1e-15),
"UMFPACK matrix failed.")
self.assertTrue(np.allclose(self.reference_rhs, umfpack_rhs, rtol=1e-15, atol=1e-10),
"UMFPACK rhs failed.")
def test_paralution_iter(self):
# PARALUTION - iterative
filename_paralution_iterative_matrix, filename_paralution_iterative_rhs = self.model("paralution_iterative")
paralution_iterative_mat, paralution_iterative_rhs = self.read_matrix_and_rhs(filename_paralution_iterative_matrix, filename_paralution_iterative_rhs)
self.assertTrue(np.allclose(self.reference_mat.todense(), paralution_iterative_mat.todense(), rtol=1e-15, atol=1e-15),
"PARALUTION iterative matrix failed.")
self.assertTrue(np.allclose(self.reference_rhs, paralution_iterative_rhs, rtol=1e-15, atol=1e-10),
"PARALUTION iterative rhs failed.")
def test_paralution_amg(self):
# PARALUTION - amg
filename_paralution_amg_matrix, filename_paralution_amg_rhs = self.model("paralution_amg")
paralution_amg_mat, paralution_amg_rhs = self.read_matrix_and_rhs(filename_paralution_amg_matrix, filename_paralution_amg_rhs)
self.assertTrue(np.allclose(self.reference_mat.todense(), paralution_amg_mat.todense(), rtol=1e-15, atol=1e-15),
"PARALUTION AMG matrix failed.")
self.assertTrue(np.allclose(self.reference_rhs, paralution_amg_rhs, rtol=1e-15, atol=1e-10),
"PARALUTION AMG rhs failed.")
def test_external(self):
# external
filename_external_matrix, filename_external_rhs = self.model("external")
external_mat, external_rhs = self.read_matrix_and_rhs(filename_external_matrix, filename_external_rhs)
self.assertTrue(np.allclose(self.reference_mat.todense(), external_mat.todense(), rtol=1e-15, atol=1e-15),
"EXTERNAL matrix failed.")
self.assertTrue(np.allclose(self.reference_rhs, external_rhs, rtol=1e-15, atol=1e-10),
"EXTERNAL rhs failed.")
if __name__ == '__main__':
import unittest as ut
suite = ut.TestSuite()
result = Agros2DTestResult()
suite.addTest(ut.TestLoader().loadTestsFromTestCase(TestInternalMatrixSolvers))
suite.run(result) | hpfem/agros2d | resources/test/test_suite/core/matrix_solvers.py | Python | gpl-2.0 | 8,536 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from yapsy.IPlugin import IPlugin
from PyQt4 import QtCore, QtGui
from threading import Thread, RLock
import websocket
import csv
import time
#l = RLock(False)
class ListenWebsocket(QtCore.QThread):
def __init__(self, parent=None, adress = None):
super(ListenWebsocket, self).__init__(parent)
self.parent = parent
self.WS = websocket.WebSocketApp(adress,
on_message = self.on_message,
on_error = self.on_error,
on_close = self.on_close)
def run(self):
self.WS.on_open = self.on_open;
self.WS.run_forever()
def on_message(self, ws, message):
self.str = message
print "ON_MESSAGE:", ws, message
def on_error(self, ws, error):
print "ON_ERROR:", error
def on_close(self, ws):
print "### closed ###"
def on_open(self, client):
print "##ON_OPEN##", client
self.WS.send("client data")
def send(self, data):
print "##posilam data", data
self.WS.send(data)
def recv(self):
self.str = ""
while self.str == "":
pass
return self.str
class Communicator(QtCore.QThread):
def __init__(self, parent = None, win = None):
super(Communicator, self).__init__()
self.parent= parent
self.adress = "ws://"+str(self.parent.TbxIP.text())+":"+str(self.parent.NbxPORT.value())+"/ws"
print "Adresa:", self.adress
self.exiting = False
self.vTimeSpd = 0
self.vActSpd = 0
self.vTrans = 1
self.vFollow = False
self.vDir = True
self.vActDir = True
self.vSHTtemp = 88.88
self.vSHThumi = 88.88
self.vSHTdew = 88.88
self.vLTStemp = 88.88
self.vChange = False
self.vAutoUpdate = True
self.vLastOutPut = False
self.vRecLoc = False
self.vRecRem = False
self.vEneCam = False # Eneable remote camera
print "##created"
self.ws = ListenWebsocket(None, self.adress)
self.ws.start()
# self.ws.send("$raspd;%d;%d;" %(self.vDir, self.vTimeSpd))
result = self.ws.recv()
print "Received:", result
def __del__(self):
print "Communicator ukoncovani ..."
self.exiting = True
self.wait()
def sendSpd(self):
self.ws.send("$raspd;%d;%d;" %(self.vActSpd, self.vActDir))
result = self.ws.recv()
#if result.split(';')[0] == "<s":
# self.vLTStemp = float(result.split(';')[1])
def sync(self):
print "sync procedure"
self.getWeather()
self.updateUI()
self.ws.send("$raspd;%d;%d;" %(self.vTimeSpd, self.vActDir))
result = self.ws.recv()
if result.split(';')[0] == "<s":
self.vLTStemp = float(result.split(';')[1])
def getWeather(self):
self.ws.send("$sht25;")
print "getWeather"
result = self.ws.recv()
if result.split(';')[0] == "&sht25":
self.vSHTtemp = float(result.split(';')[1])
self.vSHThumi = float(result.split(';')[2])
self.vSHTdew = ((self.vSHThumi / 100) ** 0.125) * (112 + 0.9 * self.vSHTtemp) + (0.1 * self.vSHTtemp) - 112
self.ws.send("$lts;")
result = self.ws.recv()
if result.split(';')[0] == "<s":
self.vLTStemp = float(result.split(';')[1])
def updateUI(self):
self.parent.LcnSHTtemp.display(self.vSHTtemp)
self.parent.LcnSHTHumi.display(self.vSHThumi)
self.parent.LcnSHTDew.display(self.vSHTdew)
self.parent.LcnLTStemp.display(self.vLTStemp)
#self.parent.LcnRAspeed1.display((self.vTimeSpd >> 16)&0xff)
#self.parent.LcnRAspeed2.display((self.vTimeSpd >> 8)&0xff)
#self.parent.LcnRAspeed3.display((self.vTimeSpd >> 0)&0xff)
self.parent.NbxSpdTime.setValue(self.vTimeSpd)
self.parent.LcnRAspeed1.display(self.vTimeSpd)
self.parent.LcnRAspeed2.display(self.vTimeSpd*(self.vTrans**2))
self.parent.LcnRAspeed3.display(self.vActSpd)
def change(self, type=None, widget=None):
print "change,", self, type, widget
if type == "NbxSpdTime":
self.vTimeSpd = widget.value()
#if self.vFollow:
# self.vActSpd = self.vTimeSpd
elif type == "ChbTimeDir":
self.vDir = bool(widget.checkState())
self.vActDir = self.vDir
#print type, ": ", widget.checkState()
self.sendSpd()
elif type == "ChbAutoUpdate":
self.vAutoUpdate = bool(widget.checkState())
#print type, ": ", widget.checkState()
elif type == "SldTrans":
self.vTrans = widget.value()
elif type == "ChbLastOutPut":
self.vLastOutPut = widget.checkState()
elif type == "ChbRecordLocaly":
self.vRecLoc = widget.checkState()
elif type == "ChbRecordRemotly":
self.vRecRem = widget.checkState()
elif type == "ChbEneableCamera":
self.vEneCam = bool(widget.checkState())
if bool(widget.checkState()):
self.parent.LoadCamera(self.parent.BoxCameraProperties)
self.updateUI()
def RCcapture(self, type=None, value=None):
if type == "capture":
self.ws.send("$capture;%d;%d;%d;%d;%d;%d;%d;%d;%d;%s;" %(1, 0, 10, 10, 10, 10, 10, 10, 10, "CaptureCap"))# type(full res, preview); quality; ISO; shutter; clona; Res; Res; Res; Res; id;
elif type == "preview":
self.ws.send("$capture;%d;%d;%d;%d;%d;%d;%d;%d;%d;%s;" %(0, 0, 10, 10, 10, 10, 10, 10, 10, "PreviewCap"))# type(full res, preview); quality; ISO; shutter; clona; Res; Res; Res; Res; id;
def save(self, type = None):
print "SAVE:", type
if type == 0:
self.ws.send("$gettime;")
result = self.ws.recv()
if result.split(';')[0] == "&gettime":
self.vTimeSpd = int(result.split(';')[1])
self.vDir = int(result.split(';')[2])
self.updateUI()
if type == 2:
self.ws.send("$savetime;%d;%d" %(self.vTimeSpd, int(self.vDir)))
def trans(self, type = 0):
print "TRANS:", type
if type == -2:
self.vActSpd = self.vTimeSpd - self.vTimeSpd*(self.vTrans**2)
print self.vActSpd, self.vTimeSpd, self.vTimeSpd*(self.vTrans**2), self.vTimeSpd - self.vTimeSpd*(self.vTrans**2)
if self.vActSpd < 0:
self.vActSpd = self.vActSpd*-1
self.vActDir = not self.vDir
#if self.vActSpd < self.vTimeSpd*(self.vTrans**2):
# self.vActDir != self.vDir
else:
self.vActDir = self.vDir
print self.vActSpd, self.vTimeSpd, self.vTimeSpd*(self.vTrans**2), self.vTimeSpd - self.vTimeSpd*(self.vTrans**2)
if type == 1:
self.vActSpd = self.vTimeSpd
self.vFollow = True
self.vActDir = self.vDir
if type == 0:
self.vActSpd = 0
self.vFollow = False
self.vActDir = self.vDir
if type == +2:
self.vActSpd = self.vTimeSpd + self.vTimeSpd*(self.vTrans**2)
#if self.vActSpd < self.vTimeSpd*(self.vTrans**2):
# self.vActDir != self.vDir
# self.vActSpd = self.vActSpd*-1
#else:
self.vActDir = self.vDir
if type == -1:
self.vActSpd = self.vTimeSpd
self.vActDir = self.vDir
self.updateUI()
self.sendSpd()
def upgrade(self):
pass
def run(self):
# Note: This is never called directly. It is called by Qt once the
# thread environment has been set up.
timer1 = time.time()
timer2 = time.time()
fLog =open("Log_%s.txt" %time.strftime("%Y%m%d"),'a')
while not self.exiting:
if timer1+5 < time.time():
if self.vAutoUpdate:
self.getWeather()
if not self.vChange:
self.upgrade()
if self.vLastOutPut:
try:
fLast=open("LastData.txt",'w')
fLast.write("%.2f\n%.2f" %(round(self.vSHTtemp,2), round(self.vSHThumi,2)))
fLast.close()
except Exception, e:
print e
if self.vRecLoc:
try:
fLog =open("Log_%s.txt" %time.strftime("%Y%m%d"),'a')
fLog.write( str(time.time()) + ";" + str(self.vSHTtemp) + ";" + str(self.vSHThumi) + ";" + str(self.vLTStemp) + ";\n")
fLog.close()
except Exception, e:
print e
timer1 = time.time()
self.updateUI()
#if timer2+2.5 < time.time():
#print self.vActDir, self.vActSpd, self.vDir, self.vTimeSpd
time.sleep(1)
print "Communicator ukoncen"
class RA_HBSTEP_driver(IPlugin):
name = "MLAB telescope driver"
def __init__(self):
self.type = 1 #loader
self.UserName = "MLAB RA driver"
self.Local = False
def getType(self):
return self.type
def getUserName(self):
return self.UserName
def getName(self):
return self.UserName
def activate(self):
print "activated"
def deactivate(self):
print "Ive been deactivated!"
def load(self):
print "loader"
def OnSync(self, data = None):
print "local sync"
self.thread.sync()
def show(self, parent):
self.parent = parent
## Win
## ScroolArea
## ContentWidget
## HorizontalLayout
### GroupBox
self.win = QtGui.QWidget()
self.win.setMinimumHeight(900)
self.win.setMinimumWidth(900)
self.horizontalLayout = QtGui.QVBoxLayout()
self.scrollArea = QtGui.QScrollArea(self.win)
self.scrollArea.resize(900, 900)
self.scrollArea.setWidgetResizable(True)
self.contentWidget = QtGui.QWidget(self.win)
#self.contentWidget.setGeometry(0,0,900, 900)
self.content = QtGui.QHBoxLayout(self.contentWidget)
self.scrollArea.setWidget(self.contentWidget)
self.horizontalLayout.addWidget(self.scrollArea)
AbouteGroup = QtGui.QGroupBox("RA HBSTEP")
Aboute = QtGui.QVBoxLayout(self.win)
LocalRA = QtGui.QCheckBox("Remote driver", self.win)
LocalRA.stateChanged.connect(self.ToggleLocal)
BtnLoad = QtGui.QPushButton("Load")
BtnLoad.clicked.connect(self.onConnect)
Aboute.addWidget(LocalRA)
Aboute.addWidget(BtnLoad)
Aboute.addWidget(QtGui.QLabel("system",self.win))
AbouteGroup.setLayout(Aboute)
self.content.addWidget(AbouteGroup)
return self.win
def ToggleLocal(self, state):
if state == QtCore.Qt.Checked:
print "ANO"
self.Local = True
else:
print "NE"
self.Local = False
def onConnect(self, state):
self.PropertiesGroup = QtGui.QGroupBox("RA driver properties")
PropertiesMainHFrame = QtGui.QVBoxLayout()
if self.Local:
VbxIP = QtGui.QHBoxLayout()
VbxIP.addWidget(QtGui.QLabel("Remote adress:",self.win))
self.TbxIP = QtGui.QLineEdit("telescope.local")
VbxIP.addWidget(self.TbxIP)
VbxPORT = QtGui.QHBoxLayout()
VbxPORT.addWidget(QtGui.QLabel("Remote port:",self.win))
self.NbxPORT = QtGui.QSpinBox()
self.NbxPORT.setRange(0, 99999)
self.NbxPORT.setValue(10123)
VbxPORT.addWidget(self.NbxPORT)
BtnRemConnect = QtGui.QPushButton("Connect to MLAB telescope driver")
BtnRemConnect.clicked.connect(self.RemConnect)
PropertiesMainHFrame.addLayout(VbxIP)
PropertiesMainHFrame.addLayout(VbxPORT)
PropertiesMainHFrame.addWidget(BtnRemConnect)
PropertiesMainHFrame.addStretch()
else:
PropertiesMainHFrame.addWidget(QtGui.QLabel("This operation isn't supported yet.",self.win))
PropertiesMainHFrame.addWidget(QtGui.QLabel("Use Remote driving",self.win))
PropertiesMainHFrame.addWidget(QtGui.QLabel("Local driving will be supported as soos as possible :)",self.win))
PropertiesMainHFrame.addStretch()
self.PropertiesGroup.setLayout(PropertiesMainHFrame)
self.content.addWidget(self.PropertiesGroup)
def RemConnect(self, state):
while self.content.count():
child = self.content.takeAt(0)
if child.widget() is not None:
child.widget().deleteLater()
elif child.self.content() is not None:
clearLayout(child.self.content())
self.thread = Communicator(self, self.win)
self.thread.start()
self.DriverGroup = QtGui.QGroupBox("Remote driver manager")
PropertiesMainHFrame = QtGui.QVBoxLayout()
self.LcnSHTtemp = QtGui.QLCDNumber(0)
self.LcnSHTtemp.setDigitCount(5)
self.LcnSHTtemp.display(99.99)
self.LcnSHTHumi = QtGui.QLCDNumber(0)
self.LcnSHTHumi.setDigitCount(5)
self.LcnSHTHumi.display(999.99)
self.LcnSHTDew = QtGui.QLCDNumber(0)
self.LcnSHTDew.setDigitCount(5)
self.LcnSHTDew.display(99.99)
self.LcnLTStemp = QtGui.QLCDNumber(0)
self.LcnLTStemp.setDigitCount(5)
self.LcnLTStemp.display(99.99)
HbxWeather = QtGui.QHBoxLayout()
HbxWeather.addStretch(1)
HbxWeather.addWidget(self.LcnSHTtemp)
HbxWeather.addWidget(self.LcnSHTHumi)
HbxWeather.addWidget(self.LcnSHTDew)
HbxWeather.addWidget(self.LcnLTStemp)
HbxWeather.addStretch(1)
VbxMovementSpd = QtGui.QVBoxLayout()
VbxMovementSpd.addStretch()
VbxMovementSpd.addWidget(QtGui.QLabel("Speed of translation:"))
self.SldTrans = QtGui.QSlider(1)
self.SldTrans.setMaximum(10)
self.SldTrans.setMinimum(1)
VbxMovementSpd.addWidget(self.SldTrans)
VbxMovementSpd.addWidget(QtGui.QLabel("Speed of time:"))
#VbxMovementSpd.addStretch()
self.ChbTimeDir = QtGui.QCheckBox()
self.ChbTimeDir.setObjectName("ChbLR")
self.NbxSpdTime = QtGui.QSpinBox()
self.NbxSpdTime.setMaximum(0xFFFFFF)
self.LcnRAspeed1 = QtGui.QLCDNumber(0)
self.LcnRAspeed1.display(0x00)
self.LcnRAspeed1.setHexMode()
self.LcnRAspeed1.setDigitCount(6)
self.LcnRAspeed2 = QtGui.QLCDNumber(0)
self.LcnRAspeed2.setHexMode()
self.LcnRAspeed2.setDigitCount(6)
self.LcnRAspeed2.display(0x00)
self.LcnRAspeed3 = QtGui.QLCDNumber(0)
self.LcnRAspeed3.setHexMode()
self.LcnRAspeed3.display(0x00)
self.LcnRAspeed3.setDigitCount(6)
self.LcnRAspeedDec = QtGui.QLCDNumber(0)
self.LcnRAspeedDec.display(0)
self.LcnRAspeedDec.setDigitCount(9)
HbxActualSpeed = QtGui.QHBoxLayout()
HbxActualSpeed.addWidget(self.ChbTimeDir)
HbxActualSpeed.addWidget(self.NbxSpdTime)
HbxActualSpeed.addStretch(1)
HbxActualSpeed.addWidget(self.LcnRAspeed1)
HbxActualSpeed.addWidget(self.LcnRAspeed2)
HbxActualSpeed.addWidget(self.LcnRAspeed3)
'''
#websocket.enableTrace(True)
adress = "ws://"+str(self.TbxIP.text())+":"+str(self.NbxPORT.value())+"/ws"
print "Adresa:", adress
ws = websocket.create_connection(adress)
ws.send("Hello, World")
result = ws.recv()
print("Received {}".format(result))
#ws.close()
'''
self.BtnSync = QtGui.QPushButton("Sync")
self.ChbAutoUpdate = QtGui.QCheckBox("AutoUpdate")
HbxTime = QtGui.QHBoxLayout()
HbxTime2 = QtGui.QHBoxLayout()
self.BtnTrans0 = QtGui.QPushButton("<<")
self.BtnTrans0.setObjectName("BtTransFBW")
self.BtnTime = QtGui.QPushButton("==")
self.BtnTime.setObjectName("BtTransPlay")
self.BtnTrans1 = QtGui.QPushButton(">>")
self.BtnTrans1.setObjectName("BtTransFFW")
HbxTime.addStretch(3)
HbxTime.addWidget(self.BtnTrans0,2)
HbxTime.addWidget(self.BtnTime,1)
HbxTime.addWidget(self.BtnTrans1,2)
HbxTime.addStretch(3)
self.BtnTimeStop = QtGui.QPushButton("||")
self.BtnTimeStop.setObjectName("BtTransPause")
HbxTime2.addStretch(2)
HbxTime2.addWidget(self.BtnTimeStop,7)
HbxTime2.addStretch(2)
VbxSet = QtGui.QVBoxLayout()
self.BtnSetGetAllData = QtGui.QPushButton("Get all data")
self.BtnSetSaveLocaly = QtGui.QPushButton("Save localy")
self.BtnSetSaveRemotly = QtGui.QPushButton("Save remotly")
self.ChbRecordRemotly = QtGui.QCheckBox("Record remotly")
self.ChbRecordLocaly = QtGui.QCheckBox("Record localy")
self.ChbLastOutPut = QtGui.QCheckBox("Record last file")
VbxSet.addWidget(self.BtnSetGetAllData)
VbxSet.addWidget(self.BtnSetSaveLocaly)
VbxSet.addWidget(self.BtnSetSaveRemotly)
VbxSet.addWidget(self.ChbRecordRemotly)
VbxSet.addWidget(self.ChbRecordLocaly)
VbxSet.addWidget(self.ChbLastOutPut)
PropertiesMainHFrame.addLayout(HbxWeather)
PropertiesMainHFrame.addLayout(VbxMovementSpd)
PropertiesMainHFrame.addLayout(HbxActualSpeed)
PropertiesMainHFrame.addWidget(self.BtnSync)
PropertiesMainHFrame.addWidget(self.ChbAutoUpdate)
PropertiesMainHFrame.addLayout(HbxTime)
PropertiesMainHFrame.addLayout(HbxTime2)
PropertiesMainHFrame.addLayout(VbxSet)
PropertiesMainHFrame.addStretch(1)
self.DriverGroup.setLayout(PropertiesMainHFrame)
self.content.addWidget(self.DriverGroup)
self.CameraGroup = QtGui.QGroupBox("Remote camera manager")
self.CameraGroupLayout = QtGui.QVBoxLayout()
self.InitCameraBox(self.CameraGroupLayout)
self.CameraGroup.setLayout(self.CameraGroupLayout)
self.content.addWidget(self.CameraGroup)
self.thread.updateUI()
self.NbxSpdTime.valueChanged.connect(lambda: self.thread.change("NbxSpdTime", self.NbxSpdTime))
#self.BtnSync.clicked.connect(self.OnSync)
self.SldTrans.valueChanged.connect(lambda: self.thread.change("SldTrans", self.SldTrans))
self.BtnSync.clicked.connect(lambda: self.thread.sync())
self.BtnTrans0.pressed.connect(lambda: self.thread.trans(-2))
self.BtnTrans0.released.connect(lambda: self.thread.trans(-1))
self.BtnTime.clicked.connect(lambda: self.thread.trans(1))
self.BtnTimeStop.clicked.connect(lambda: self.thread.trans(0))
self.BtnTrans1.pressed.connect(lambda: self.thread.trans(+2))
self.BtnTrans1.released.connect(lambda: self.thread.trans(-1))
self.BtnSetGetAllData.clicked.connect(lambda: self.thread.save(0))
self.BtnSetSaveLocaly.clicked.connect(lambda: self.thread.save(1))
self.BtnSetSaveRemotly.clicked.connect(lambda: self.thread.save(2))
self.ChbAutoUpdate.stateChanged.connect(lambda: self.thread.change("ChbAutoUpdate", self.ChbAutoUpdate))
self.ChbRecordLocaly.stateChanged.connect(lambda: self.thread.change("ChbRecordLocaly", self.ChbRecordLocaly))
self.ChbRecordRemotly.stateChanged.connect(lambda: self.thread.change("ChbRecordRemotly", self.ChbRecordRemotly))
self.ChbLastOutPut.stateChanged.connect(lambda: self.thread.change("ChbLastOutPut", self.ChbLastOutPut))
self.ChbTimeDir.stateChanged.connect(lambda: self.thread.change("ChbTimeDir", self.ChbTimeDir))
def InitCameraBox(self, group):
while group.count():
child = group.takeAt(0)
if child.widget() is not None:
child.widget().deleteLater()
elif child.group() is not None:
clearLayout(child.group())
self.ComDriverSelect = QtGui.QComboBox()
self.ComDriverSelect.addItem("Canon EOS (python-gphoto2)")
self.ChbEneableCamera = QtGui.QCheckBox("Eneable camera")
self.BoxCameraProperties = QtGui.QVBoxLayout()
group.addWidget(self.ComDriverSelect)
group.addWidget(self.ChbEneableCamera)
group.addLayout(self.BoxCameraProperties)
group.addStretch()
self.ChbEneableCamera.stateChanged.connect(lambda: self.thread.change("ChbEneableCamera", self.ChbEneableCamera))
#LoadCamera(self.BoxCameraProperties)
def LoadCamera(self, layout):
self.getRCpreview = QtGui.QPushButton("Get preview")
self.getRCCapture = QtGui.QPushButton("Get photo")
layout.addWidget(self.getRCpreview)
layout.addWidget(self.getRCCapture)
self.getRCpreview.clicked.connect(lambda: self.thread.RCcapture("preview"))
self.getRCCapture.clicked.connect(lambda: self.thread.RCcapture("capture"))
#layout.addWidget(QtGui.QLabel(""))
| roman-dvorak/TelescopeTools-extensions | RA_HBSTEP_driver/RA_HBSTEP_driver.py | Python | gpl-2.0 | 21,395 |
from scipy.io import wavfile
import bark, os
def dat_from_wav(wav, barkname, **attrs):
rate, data = wavfile.read(wav)
return bark.write_sampled(barkname, data, rate,**attrs)
def _main():
''' Function for getting commandline args.'''
import argparse
p = argparse.ArgumentParser(description='''
converts wav file to bark format
''')
p.add_argument('wav', help='path to wav file')
p.add_argument('out', help="path to bark file")
p.add_argument("-a",
"--attributes",
action='append',
type=lambda kv: kv.split("="),
dest='keyvalues',
help="extra metadata in the form of KEY=VALUE")
args = p.parse_args()
if args.keyvalues:
dat_from_wav(args.wav,
args.out,
**dict(args.keyvalues))
else:
dat_from_wav(args.wav, args.out)
if __name__ == '__main__':
_main()
| gfetterman/bark | bark/io/datfromwav.py | Python | gpl-2.0 | 910 |
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Bogdan Neacsa <[email protected]>
"""
import copy
import json
import numpy
import unittest
import cherrypy
from time import sleep
from tvb.tests.framework.interfaces.web.controllers.base_controller_test import BaseControllersTest
from tvb.config import SIMULATOR_MODULE, SIMULATOR_CLASS
from tvb.interfaces.web.controllers import common
from tvb.interfaces.web.controllers.burst.burst_controller import BurstController
from tvb.datatypes.connectivity import Connectivity
from tvb.core.entities import model
from tvb.core.entities.file.files_helper import FilesHelper
from tvb.core.entities.storage import dao
from tvb.core.entities.model.model_burst import BurstConfiguration, NUMBER_OF_PORTLETS_PER_TAB
from tvb.core.entities.transient.burst_configuration_entities import AdapterConfiguration
from tvb.core.entities.transient.structure_entities import DataTypeMetaData
from tvb.core.adapters.abcadapter import ABCAdapter
from tvb.core.services.burst_service import BurstService
from tvb.core.services.operation_service import OperationService
from tvb.core.services.flow_service import FlowService
from tvb.tests.framework.adapters.storeadapter import StoreAdapter
from tvb.tests.framework.adapters.simulator.simulator_adapter_test import SIMULATOR_PARAMETERS
class BurstContollerTest(BaseControllersTest):
""" Unit tests for burst_controller """
def setUp(self):
"""
Sets up the environment for testing;
creates a `BurstController`
"""
BaseControllersTest.init(self)
self.burst_c = BurstController()
def tearDown(self):
"""
Cleans up the environment after testing is done
"""
self.cleanup()
self.clean_database()
def test_index(self):
"""
Test that index returns a dict with all required keys. Also check
that the default portlets are populated, with only the first being
the TimeSeries portlet and the rest are empty.
"""
result_dict = self.burst_c.index()
self.assertTrue('burst_list' in result_dict and result_dict['burst_list'] == [])
self.assertTrue('available_metrics' in result_dict and isinstance(result_dict['available_metrics'], list))
self.assertTrue('portletList' in result_dict and isinstance(result_dict['portletList'], list))
self.assertEqual(result_dict[common.KEY_SECTION], "burst")
self.assertTrue('burstConfig' in result_dict and isinstance(result_dict['burstConfig'], BurstConfiguration))
portlets = json.loads(result_dict['selectedPortlets'])
portlet_id = dao.get_portlet_by_identifier("TimeSeries").id
for tab_idx, tab in enumerate(portlets):
for index_in_tab, value in enumerate(tab):
if tab_idx == 0 and index_in_tab == 0:
self.assertEqual(value, [portlet_id, "TimeSeries"])
else:
self.assertEqual(value, [-1, "None"])
self.assertTrue(result_dict['draw_hidden_ranges'])
def test_load_burst_history(self):
"""
Create two burst, load the burst and check that we get back
the same stored bursts.
"""
self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst1')
burst = self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst2')
cherrypy.session[common.KEY_BURST_CONFIG] = burst
result_dict = self.burst_c.load_burst_history()
burst_history = result_dict['burst_list']
self.assertEqual(len(burst_history), 2)
for burst in burst_history:
self.assertTrue(burst.name in ('burst1', 'burst2'))
def test_get_selected_burst(self):
"""
Create burst, add it to session, then check that get_selected_burst
return the same burst. Also check that for an unstored entity we get
back 'None'
"""
burst_entity = BurstConfiguration(self.test_project.id, 'started', {}, 'burst1')
cherrypy.session[common.KEY_BURST_CONFIG] = burst_entity
stored_id = self.burst_c.get_selected_burst()
self.assertEqual(stored_id, 'None')
burst_entity = dao.store_entity(burst_entity)
cherrypy.session[common.KEY_BURST_CONFIG] = burst_entity
stored_id = self.burst_c.get_selected_burst()
self.assertEqual(str(stored_id), str(burst_entity.id))
def test_get_portlet_configurable_interface(self):
"""
Look up that an AdapterConfiguration is returned for the default
portlet configuration, if we look at index (0, 0) where TimeSeries portlet
should be default.
"""
self.burst_c.index()
result = self.burst_c.get_portlet_configurable_interface(0)
self.assertTrue(common.KEY_PARAMETERS_CONFIG in result)
self.assertFalse(result[common.KEY_PARAMETERS_CONFIG])
adapter_config = result['adapters_list']
# Default TimeSeries portlet should be available, so we expect
# adapter_config to be a list of AdapterConfiguration with one element
self.assertEqual(len(adapter_config), 1)
self.assertTrue(isinstance(adapter_config[0], AdapterConfiguration))
def test_portlet_tab_display(self):
"""
Update the default portlet configuration, by storing a TimeSeries
portlet for all postions. Then check that we get the same configuration.
"""
self.burst_c.index()
portlet_id = dao.get_portlet_by_identifier("TimeSeries").id
one_tab = [[portlet_id, "TimeSeries"] for _ in range(NUMBER_OF_PORTLETS_PER_TAB)]
full_tabs = [one_tab for _ in range(BurstConfiguration.nr_of_tabs)]
data = {'tab_portlets_list': json.dumps(full_tabs)}
result = self.burst_c.portlet_tab_display(**data)
selected_portlets = result['portlet_tab_list']
for entry in selected_portlets:
self.assertEqual(entry.id, portlet_id)
def test_get_configured_portlets_no_session(self):
"""
Test that if we have no burst stored in session, an empty
portlet list is reduced.
"""
result = self.burst_c.get_configured_portlets()
self.assertTrue('portlet_tab_list' in result)
self.assertTrue(result['portlet_tab_list'] == [])
def test_get_configured_portlets_default(self):
"""
Check that the default configuration holds one portlet
and it's identifier is 'TimeSeries'.
"""
self.burst_c.index()
result = self.burst_c.get_configured_portlets()
self.assertTrue('portlet_tab_list' in result)
portlets_list = result['portlet_tab_list']
self.assertEqual(len(portlets_list), 1)
self.assertTrue(portlets_list[0].algorithm_identifier == 'TimeSeries')
def test_get_portlet_session_configuration(self):
"""
Test that the default portlet session sonciguration is generated
as expected, with a default TimeSeries portlet and rest empty.
"""
self.burst_c.index()
result = json.loads(self.burst_c.get_portlet_session_configuration())
portlet_id = dao.get_portlet_by_identifier("TimeSeries").id
for tab_idx, tab in enumerate(result):
for index_in_tab, value in enumerate(tab):
if tab_idx == 0 and index_in_tab == 0:
self.assertEqual(value, [portlet_id, "TimeSeries"])
else:
self.assertEqual(value, [-1, "None"])
def test_save_parameters_no_relaunch(self):
"""
Test the save parameters for the default TimeSeries portlet and
pass an empty dictionary as the 'new' data. In this case a relaunch
should not be required.
"""
self.burst_c.index()
self.assertEqual('noRelaunch', self.burst_c.save_parameters(0, portlet_parameters="{}"))
def test_rename_burst(self):
"""
Create and store a burst, then rename it and check that it
works as expected.
"""
burst = self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst1')
self.burst_c.rename_burst(burst.id, "test_new_burst_name")
renamed_burst = dao.get_burst_by_id(burst.id)
self.assertEqual(renamed_burst.name, "test_new_burst_name")
def test_launch_burst(self):
"""
Launch a burst and check that it finishes correctly and before timeout (100)
"""
self.burst_c.index()
connectivity = self._burst_create_connectivity()
launch_params = copy.deepcopy(SIMULATOR_PARAMETERS)
launch_params['connectivity'] = connectivity.gid
launch_params['simulation_length'] = '10'
launch_params = {"simulator_parameters": json.dumps(launch_params)}
burst_id = json.loads(self.burst_c.launch_burst("new", "test_burst", **launch_params))['id']
waited = 1
timeout = 100
burst_config = dao.get_burst_by_id(burst_id)
while burst_config.status == BurstConfiguration.BURST_RUNNING and waited <= timeout:
sleep(0.5)
waited += 0.5
burst_config = dao.get_burst_by_id(burst_config.id)
if waited > timeout:
self.fail("Timed out waiting for simulations to finish.")
if burst_config.status != BurstConfiguration.BURST_FINISHED:
BurstService().stop_burst(burst_config)
self.fail("Burst should have finished successfully.")
def test_load_burst(self):
"""
Test loading and burst and checking you get expected dictionary.
"""
self.burst_c.index()
burst = self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst1')
result = json.loads(self.burst_c.load_burst(burst.id))
self.assertEqual(result["status"], "started")
self.assertEqual(result['group_gid'], None)
self.assertEqual(result['selected_tab'], 0)
def test_load_burst_removed(self):
"""
Add burst to session, then remove burst from database. Try to load
burst and check that it will raise exception and remove it from session.
"""
burst = self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst1')
cherrypy.session[common.KEY_BURST_CONFIG] = burst
burst_id = burst.id
BurstService().cancel_or_remove_burst(burst_id)
self.assertRaises(Exception, self.burst_c.load_burst, burst_id)
self.assertTrue(common.KEY_BURST_CONFIG not in cherrypy.session)
def test_remove_burst_not_session(self):
"""
Test removing a burst that is not the one currently stored in
session. SHould just remove and return a 'done' string.
"""
burst = self._store_burst(self.test_project.id, 'finished', {'test': 'test'}, 'burst1')
cherrypy.session[common.KEY_BURST_CONFIG] = burst
another_burst = self._store_burst(self.test_project.id, 'finished', {'test': 'test'}, 'burst1')
result = self.burst_c.cancel_or_remove_burst(another_burst.id)
self.assertEqual(result, 'done')
def test_remove_burst_in_session(self):
"""
Test that if we remove the burst that is the current one from the
session, we get a 'reset-new' string as result.
"""
burst = self._store_burst(self.test_project.id, 'finished', {'test': 'test'}, 'burst1')
cherrypy.session[common.KEY_BURST_CONFIG] = burst
result = self.burst_c.cancel_or_remove_burst(burst.id)
self.assertEqual(result, 'reset-new')
def _store_burst(self, proj_id, status, sim_config, name):
"""
Create and store a burst entity, for the project given project_id, having the
given status and simulator parames config, under the given name.
"""
burst = BurstConfiguration(proj_id, status, sim_config, name)
burst.prepare_before_save()
return dao.store_entity(burst)
def _burst_create_connectivity(self):
"""
Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
TODO: This is duplicate code from burstservice_test. Should go into the 'generic' DataType factory
once that is done.
"""
meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA"}
algorithm, algo_group = FlowService().get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
self.operation = model.Operation(self.test_user.id, self.test_project.id, algo_group.id,
json.dumps(''),
meta=json.dumps(meta), status=model.STATUS_STARTED,
method_name=ABCAdapter.LAUNCH_METHOD)
self.operation = dao.store_entity(self.operation)
storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id))
connectivity = Connectivity(storage_path=storage_path)
connectivity.weights = numpy.ones((74, 74))
connectivity.centres = numpy.ones((74, 3))
adapter_instance = StoreAdapter([connectivity])
OperationService().initiate_prelaunch(self.operation, adapter_instance, {})
return connectivity
def suite():
"""
Gather all the tests in a test suite.
"""
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(BurstContollerTest))
return test_suite
if __name__ == "__main__":
#So you can run tests individually.
TEST_RUNNER = unittest.TextTestRunner()
TEST_SUITE = suite()
TEST_RUNNER.run(TEST_SUITE)
| rajul/tvb-framework | tvb/tests/framework/interfaces/web/controllers/burst_controller_test.py | Python | gpl-2.0 | 15,161 |
###############
# Brian Burns
# MATH 238
# x' = ax + by, y' = cx + dy
# system.py
###############
import sys
import math
import sympy as sy
def main():
# make sure we have enough arguments
if len(sys.argv) != 5:
print("Usage: \"python system.py [a] [b] [c] [d]\"")
sys.exit(0)
# grab the arguments
a = float(sys.argv[1])
b = float(sys.argv[2])
c = float(sys.argv[3])
d = float(sys.argv[4])
# initialize sympy functions
t = sy.symbols("t")
x = sy.Function("x")(t)
y = sy.Function("y")(t)
# create x' = ax + by, y' = cx + dy
e1 = sy.Eq(x.diff(t), a*x + b*y)
e2 = sy.Eq(y.diff(t), c*x + d*y)
eqs = (e1, e2)
# solve the system
sol = sy.dsolve(eqs)
print("The solution to the system is:")
sy.pprint(sy.simplify(sol[0]));
sy.pprint(sy.simplify(sol[1]));
if __name__ == "__main__":
main()
| brnbrns/DifferentialEquations | system.py | Python | gpl-2.0 | 891 |
from jira.client import JIRA
from jira.client import GreenHopper
from optparse import OptionParser
import time
__author__ = 'mattdyer'
#this class manages creating the individual issues that we are parsing from a file
class IssueManager:
## The constructor
# @param self The object pointer
# @param options The option hash
def __init__(self, options):
self.__jiraUser = options.user
self.__jiraPassword = options.password
self.__jiraURL = options.site
self.startSession()
## Create the connection
# @param self Teh object pointer
def startSession(self):
#now make the connection
options = {
'server':self.__jiraURL,
'verify':False
}
self.__jira = JIRA(options, basic_auth=(self.__jiraUser, self.__jiraPassword))
self.__greenhopper = GreenHopper(options, basic_auth=(self.__jiraUser, self.__jiraPassword))
## Kill the jira connection
# @param self The object pointer
def killSession(self):
self.__jira.kill_session()
self.__greenhopper.kill_session()
## Add the epic link to an issue
# @param self The object pointer
# @param issue The issue ID
# @param epic The epic ID
def attachEpic(self, issue, epic):
#attach the epic
self.__greenhopper.add_issues_to_epic(epic, [issue])
## Create an issue set by calling the createIssue and createSubtask methods
# @param self The object pointer
# @param options The option dictionary
# @returns A dictionary of issues that were created
def createIssueSet(self, options):
#dictionary to store jira issues
issues = {}
#set up the description
description = '<h3>User Experience</h3>%s<h3>Acceptance Criteria</h3><ul><li></li></ul>' % (options.description)
#create the parent issue
parentID = self.createIssue(options.story, 'Story', description, options)
issues[parentID] = '%s\t%s\t%s' % (parentID, 'Story', options.story)
#create the tasks for development and testing depending on the product
for specificProduct in options.product.split(';'):
issue1 = self.createIssue('Implementation (%s): %s' % (specificProduct, options.story), 'Implement', '', options)
issues[issue1] = '%s\t%s\t%s' % (issue1, 'Implement', options.story)
issue2 = self.createIssue('Create Unit Tests (%s): %s' % (specificProduct, options.story), 'Unit Test', '', options)
issues[issue2] = '%s\t%s\t%s' % (issue2, 'Unit Test', options.story)
issue3 = self.createIssue('Verification (%s): %s' % (specificProduct, options.story), 'Verification Test', '', options)
issues[issue3] = '%s\t%s\t%s' % (issue3, 'Verification Test', options.story)
#create the links
self.linkIssues(parentID, issue1, 'Develop')
self.linkIssues(parentID, issue2, 'Verify')
self.linkIssues(parentID, issue3, 'Verify')
#print the ids
return(parentID, issues)
## Create a new issue
# @param self The object pointer
# @param summary The summary of the issue
# @param description The description of the issue
# @param issueType The type of the issue
# @param options The option dictionary
# @returns The JIRA issue identifier
def createIssue(self, summary, issueType, description, options):
#create an issue by setting up the dictionary
issueDict = {
#'assignee': {'name':'Unassigned'},
'project': {'key':options.project},
'priority' : {'name':options.priority},
'summary': summary,
'description': description,
'issuetype' : {'name':issueType},
'labels':[
'AddedViaAPI',
'APISetFixVersion'
],
}
#set up software / hardware product type
#if we list more than one product that set the product flag to multiple
productLabel = options.product
if(';' in options.product):
productLabel = 'Multiple'
#see if we are hardware of software
if options.type == 'Hardware':
#hardware product
issueDict['customfield_11200'] = {'value':productLabel}
else:
#software product
issueDict['customfield_11100'] = {'value':productLabel}
#if it is a story type then we want ot add a label for acceptance criteria too
if issueType == 'Story':
issueDict['labels'].append('NeedAcceptanceCriteria')
#add the components if there are any
if(not options.components == ''):
issueDict['components'] = self.addComponents(options.components)
#now create the issue
print issueDict
newIssue = self.__jira.create_issue(fields=issueDict)
#return the id
return(newIssue.key)
## Link two issues
# @param self The object pointer
# @param jiraID1 The JIRA id of the first issue
# @param jiraID2 The JIRA id of the second issue
# @param linkType The type of connect
def linkIssues(self, jiraID1, jiraID2, linkType):
#now link the two issues
print "Linking %s and %s" % (jiraID1, jiraID2)
self.__jira.create_issue_link(type=linkType, inwardIssue=jiraID2, outwardIssue=jiraID1)
## Create an array from a ";"-separated list, used for populating components
# @param self The object pointer
# @param componentString The string to be parsed
# @returns The array of components
def addComponents(self, componentString):
tokens = componentString.split(';')
components = []
#populate the array
for token in tokens:
components.append( {'name':token} )
return(components)
#start here when the script is launched
if (__name__ == '__main__'):
#set up the option parser
parser = OptionParser()
#add the options to parse
#script options
parser.add_option('-a', '--type', dest='type', help='The story type, Software or Hardware')
#task options
parser.add_option('-c', '--components', dest='components', help='The ;-delimited list of components')
parser.add_option('-d', '--product', dest='product', help='The software product to attach the story too')
parser.add_option('-e', '--epic', dest='epic', help='The epic ID')
parser.add_option('-n', '--description', dest='description', help='The story description', default='')
parser.add_option('-r', '--req', dest='requirement', help='The requirement ID')
parser.add_option('-t', '--story', dest='story', help='The story you want to create')
parser.add_option('-v', '--version', dest='version', help='The fix version')
parser.add_option('-x', '--project', dest='project', help='The JIRA project')
parser.add_option('-y', '--priority', dest='priority', help='The priority of the story')
#jira options
parser.add_option('-p', '--password', dest='password', help='The JIRA user password')
parser.add_option('-s', '--site', dest='site', help='The JIRA site URL including the https:// part')
parser.add_option('-u', '--user', dest='user', help='The JIRA user')
(options, args) = parser.parse_args()
#set up the issue manager
manager = IssueManager(options)
#create the issue and implement / test tasks
issueID, issues = manager.createIssueSet(options)
#link to the requirement / epic
manager.linkIssues(issueID, options.requirement, 'Requirement')
manager.attachEpic(issueID, options.epic)
#kill the connection
manager.killSession()
| dyermd/legos | scripts/add_story_to_jira.py | Python | gpl-2.0 | 7,685 |
##########################################################################
# #
# cp.py #
# by Shane #
# #
# Copies a file #
# #
##########################################################################
def main():
s = input("Enter source file: ")
source = open(s, "r")
d = input("Enter destination: ")
destination = open(d, "w")
for line in source:
destination.write(line)
source.close()
destination.close()
main()
| shaneatdit/OOP | cp.py | Python | gpl-2.0 | 855 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('dept', '0003_auto_20150518_0845'),
]
operations = [
migrations.AddField(
model_name='department',
name='DeptIntake',
field=models.IntegerField(default=60),
),
]
| bpain2010/kgecweb | dept/migrations/0004_department_deptintake.py | Python | gpl-2.0 | 405 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
A QGIS plugin
CaigosConnector: Connect CAIGOS-GIS with QGIS
copyright : (C) 2019 by EZUSoft
email : qgis (at) makobo.de
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
try:
from fnc4all import *
from fnc4CaigosConnector import *
except:
from .fnc4all import *
from .fnc4CaigosConnector import *
def EZU366C2CC3BAD145709B8EEEB611D1D6AA():
return "CAIGOS-Konnektor" + str(myqtVersion)
def EZU5067C1BD7E924D33B7D7B27226119B84():
return "V " + EZUF9FB4AE0A2B44C8B8313441BFB307407()
def EZUDDCC484E3DC3474889FE69ED76A61E8F():
if (os.path.exists(os.path.dirname(__file__) + '/00-debug.txt')):
return True
else:
return False
def EZU11DE7CED39F2439E803B738E6E678716():
s = QSettings( "EZUSoft", EZU366C2CC3BAD145709B8EEEB611D1D6AA() )
s.setValue( "-id-", EZU6F6315D895BC410ABCE5C02C6E0C5F14((EZU366C2CC3BAD145709B8EEEB611D1D6AA() + "ID=%02i%02i%02i%02i%02i%02i") % (time.localtime()[0:6])) )
return s.value( "–id–", "" )
def tr( message):
return message
def EZUAC62A428AD734562A807B0FF8D792A61(intCG = None,sStatus = None):
s = QSettings( "EZUSoft", EZU366C2CC3BAD145709B8EEEB611D1D6AA() )
sVersion = "-"
if not sStatus:
sStatus = " - Onlineversion"
if (s.value( "status","")==''):
sStatus = " - Offlineversion"
else:
if (s.value( "status","")!=b'ok'):
sStatus = " - Demoversion"
if not intCG :
intCG = int(s.value( "cgversion",-1))
if intCG == 0:
sVersion = "11.2"
if intCG == 1:
sVersion = "2016-2019"
return u"CAIGOS Importer für Version " + sVersion + " (PlugIn Version " + EZU5067C1BD7E924D33B7D7B27226119B84() + ")" + sStatus
if __name__ == "__main__":
pass
| EZUSoft/CaigosConnector | fnc4CaigosConnector.py | Python | gpl-2.0 | 2,684 |
# -*- coding: utf-8 -*-
# Copyright 2018-2021 Mike Fährmann
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
"""Execute processes"""
from .common import PostProcessor
from .. import util, formatter
import subprocess
if util.WINDOWS:
def quote(s):
return '"' + s.replace('"', '\\"') + '"'
else:
from shlex import quote
class ExecPP(PostProcessor):
def __init__(self, job, options):
PostProcessor.__init__(self, job)
if options.get("async", False):
self._exec = self._exec_async
args = options["command"]
if isinstance(args, str):
self.args = args
execute = self.exec_string
else:
self.args = [formatter.parse(arg) for arg in args]
execute = self.exec_list
events = options.get("event")
if events is None:
events = ("after",)
elif isinstance(events, str):
events = events.split(",")
job.register_hooks({event: execute for event in events}, options)
def exec_list(self, pathfmt, status=None):
if status:
return
kwdict = pathfmt.kwdict
kwdict["_directory"] = pathfmt.realdirectory
kwdict["_filename"] = pathfmt.filename
kwdict["_path"] = pathfmt.realpath
args = [arg.format_map(kwdict) for arg in self.args]
self._exec(args, False)
def exec_string(self, pathfmt, status=None):
if status:
return
if status is None and pathfmt.realpath:
args = self.args.replace("{}", quote(pathfmt.realpath))
else:
args = self.args.replace("{}", quote(pathfmt.realdirectory))
self._exec(args, True)
def _exec(self, args, shell):
self.log.debug("Running '%s'", args)
retcode = subprocess.Popen(args, shell=shell).wait()
if retcode:
self.log.warning("'%s' returned with non-zero exit status (%d)",
args, retcode)
def _exec_async(self, args, shell):
self.log.debug("Running '%s'", args)
subprocess.Popen(args, shell=shell)
__postprocessor__ = ExecPP
| mikf/gallery-dl | gallery_dl/postprocessor/exec.py | Python | gpl-2.0 | 2,293 |
"""
Constants used in ABED
"""
#: Default filename for the abed config file.
CONFIG_FILENAME = "abed_conf.py"
#: Default dataset directory name
DATASET_DIRNAME = "datasets"
#: Default executables directory name
EXECS_DIRNAME = "execs"
#: Default tasksfile filename
TASKS_FILENAME = "abed_tasks.txt"
#: Default auto filename
AUTO_FILENAME = "abed_auto.txt"
| GjjvdBurg/ABED | abed/constants.py | Python | gpl-2.0 | 362 |
# Copyright (C) 2013 Stanislav Golovanov <[email protected]>
# Google Inc.
#
# This file is part of ycmd.
#
# ycmd is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ycmd is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ycmd. If not, see <http://www.gnu.org/licenses/>.
from ycmd.completers.completer import Completer
from ycmd.completers.all.identifier_completer import IdentifierCompleter
from ycmd.completers.general.filename_completer import FilenameCompleter
from ycmd.completers.general.ultisnips_completer import UltiSnipsCompleter
class GeneralCompleterStore( Completer ):
"""
Holds a list of completers that can be used in all filetypes.
It overrides all Competer API methods so that specific calls to
GeneralCompleterStore are passed to all general completers.
"""
def __init__( self, user_options ):
super( GeneralCompleterStore, self ).__init__( user_options )
self._identifier_completer = IdentifierCompleter( user_options )
self._filename_completer = FilenameCompleter( user_options )
self._ultisnips_completer = UltiSnipsCompleter( user_options )
self._non_filename_completers = [ self._identifier_completer ]
if user_options.get( 'use_ultisnips_completer', True ):
self._non_filename_completers.append( self._ultisnips_completer )
self._all_completers = [ self._identifier_completer,
self._filename_completer,
self._ultisnips_completer ]
self._current_query_completers = []
def SupportedFiletypes( self ):
return set()
def GetIdentifierCompleter( self ):
return self._identifier_completer
def ShouldUseNow( self, request_data ):
self._current_query_completers = []
if self._filename_completer.ShouldUseNow( request_data ):
self._current_query_completers = [ self._filename_completer ]
return True
should_use_now = False
for completer in self._non_filename_completers:
should_use_this_completer = completer.ShouldUseNow( request_data )
should_use_now = should_use_now or should_use_this_completer
if should_use_this_completer:
self._current_query_completers.append( completer )
return should_use_now
def ComputeCandidates( self, request_data ):
if not self.ShouldUseNow( request_data ):
return []
candidates = []
for completer in self._current_query_completers:
candidates += completer.ComputeCandidates( request_data )
return candidates
def OnFileReadyToParse( self, request_data ):
for completer in self._all_completers:
completer.OnFileReadyToParse( request_data )
def OnBufferVisit( self, request_data ):
for completer in self._all_completers:
completer.OnBufferVisit( request_data )
def OnBufferUnload( self, request_data ):
for completer in self._all_completers:
completer.OnBufferUnload( request_data )
def OnInsertLeave( self, request_data ):
for completer in self._all_completers:
completer.OnInsertLeave( request_data )
def OnCurrentIdentifierFinished( self, request_data ):
for completer in self._all_completers:
completer.OnCurrentIdentifierFinished( request_data )
def GettingCompletions( self ):
for completer in self._all_completers:
completer.GettingCompletions()
def Shutdown( self ):
for completer in self._all_completers:
completer.Shutdown()
| NorfairKing/sus-depot | shared/shared/vim/dotvim/bundle/YouCompleteMe/third_party/ycmd/ycmd/completers/general/general_completer_store.py | Python | gpl-2.0 | 3,879 |
"""Multiple-producer-multiple-consumer signal-dispatching
dispatcher is the core of the PyDispatcher system,
providing the primary API and the core logic for the
system.
Module attributes of note:
Any -- Singleton used to signal either "Any Sender" or
"Any Signal". See documentation of the _Any class.
Anonymous -- Singleton used to signal "Anonymous Sender"
See documentation of the _Anonymous class.
Internal attributes:
WEAKREF_TYPES -- tuple of types/classes which represent
weak references to receivers, and thus must be de-
referenced on retrieval to retrieve the callable
object
connections -- { senderkey (id) : { signal : [receivers...]}}
senders -- { senderkey (id) : weakref(sender) }
used for cleaning up sender references on sender
deletion
sendersBack -- { receiverkey (id) : [senderkey (id)...] }
used for cleaning up receiver references on receiver
deletion, (considerably speeds up the cleanup process
vs. the original code.)
"""
from __future__ import generators
import types, weakref
import saferef, robustapply, errors
__author__ = "Patrick K. O'Brien <[email protected]>"
__cvsid__ = "$Id: dispatcher.py,v 1.9 2005/09/17 04:55:57 mcfletch Exp $"
__version__ = "$Revision: 1.9 $"[11:-2]
try:
True
except NameError:
True = 1==1
False = 1==0
class _Parameter:
"""Used to represent default parameter values."""
def __repr__(self):
return self.__class__.__name__
class _Any(_Parameter):
"""Singleton used to signal either "Any Sender" or "Any Signal"
The Any object can be used with connect, disconnect,
send, or sendExact to signal that the parameter given
Any should react to all senders/signals, not just
a particular sender/signal.
"""
Any = _Any()
class _Anonymous(_Parameter):
"""Singleton used to signal "Anonymous Sender"
The Anonymous object is used to signal that the sender
of a message is not specified (as distinct from being
"any sender"). Registering callbacks for Anonymous
will only receive messages sent without senders. Sending
with anonymous will only send messages to those receivers
registered for Any or Anonymous.
Note:
The default sender for connect is Any, while the
default sender for send is Anonymous. This has
the effect that if you do not specify any senders
in either function then all messages are routed
as though there was a single sender (Anonymous)
being used everywhere.
"""
Anonymous = _Anonymous()
WEAKREF_TYPES = (weakref.ReferenceType, saferef.BoundMethodWeakref)
connections = {}
senders = {}
sendersBack = {}
def connect(receiver, signal=Any, sender=Any, weak=True):
"""Connect receiver to sender for signal
receiver -- a callable Python object which is to receive
messages/signals/events. Receivers must be hashable
objects.
if weak is True, then receiver must be weak-referencable
(more precisely saferef.safeRef() must be able to create
a reference to the receiver).
Receivers are fairly flexible in their specification,
as the machinery in the robustApply module takes care
of most of the details regarding figuring out appropriate
subsets of the sent arguments to apply to a given
receiver.
Note:
if receiver is itself a weak reference (a callable),
it will be de-referenced by the system's machinery,
so *generally* weak references are not suitable as
receivers, though some use might be found for the
facility whereby a higher-level library passes in
pre-weakrefed receiver references.
signal -- the signal to which the receiver should respond
if Any, receiver will receive any signal from the
indicated sender (which might also be Any, but is not
necessarily Any).
Otherwise must be a hashable Python object other than
None (DispatcherError raised on None).
sender -- the sender to which the receiver should respond
if Any, receiver will receive the indicated signals
from any sender.
if Anonymous, receiver will only receive indicated
signals from send/sendExact which do not specify a
sender, or specify Anonymous explicitly as the sender.
Otherwise can be any python object.
weak -- whether to use weak references to the receiver
By default, the module will attempt to use weak
references to the receiver objects. If this parameter
is false, then strong references will be used.
returns None, may raise DispatcherTypeError
"""
if signal is None:
raise errors.DispatcherTypeError(
'Signal cannot be None (receiver=%r sender=%r)'%( receiver,sender)
)
if weak:
receiver = saferef.safeRef(receiver, onDelete=_removeReceiver)
senderkey = id(sender)
if connections.has_key(senderkey):
signals = connections[senderkey]
else:
connections[senderkey] = signals = {}
# Keep track of senders for cleanup.
# Is Anonymous something we want to clean up?
if sender not in (None, Anonymous, Any):
def remove(object, senderkey=senderkey):
_removeSender(senderkey=senderkey)
# Skip objects that can not be weakly referenced, which means
# they won't be automatically cleaned up, but that's too bad.
try:
weakSender = weakref.ref(sender, remove)
senders[senderkey] = weakSender
except:
pass
receiverID = id(receiver)
# get current set, remove any current references to
# this receiver in the set, including back-references
if signals.has_key(signal):
receivers = signals[signal]
_removeOldBackRefs(senderkey, signal, receiver, receivers)
else:
receivers = signals[signal] = []
try:
current = sendersBack.get( receiverID )
if current is None:
sendersBack[ receiverID ] = current = []
if senderkey not in current:
current.append(senderkey)
except:
pass
receivers.append(receiver)
def disconnect(receiver, signal=Any, sender=Any, weak=True):
"""Disconnect receiver from sender for signal
receiver -- the registered receiver to disconnect
signal -- the registered signal to disconnect
sender -- the registered sender to disconnect
weak -- the weakref state to disconnect
disconnect reverses the process of connect,
the semantics for the individual elements are
logically equivalent to a tuple of
(receiver, signal, sender, weak) used as a key
to be deleted from the internal routing tables.
(The actual process is slightly more complex
but the semantics are basically the same).
Note:
Using disconnect is not required to cleanup
routing when an object is deleted, the framework
will remove routes for deleted objects
automatically. It's only necessary to disconnect
if you want to stop routing to a live object.
returns None, may raise DispatcherTypeError or
DispatcherKeyError
"""
if signal is None:
raise errors.DispatcherTypeError(
'Signal cannot be None (receiver=%r sender=%r)'%( receiver,sender)
)
if weak: receiver = saferef.safeRef(receiver)
senderkey = id(sender)
try:
signals = connections[senderkey]
receivers = signals[signal]
except KeyError:
raise errors.DispatcherKeyError(
"""No receivers found for signal %r from sender %r""" %(
signal,
sender
)
)
try:
# also removes from receivers
_removeOldBackRefs(senderkey, signal, receiver, receivers)
except ValueError:
raise errors.DispatcherKeyError(
"""No connection to receiver %s for signal %s from sender %s""" %(
receiver,
signal,
sender
)
)
_cleanupConnections(senderkey, signal)
def getReceivers( sender = Any, signal = Any ):
"""Get list of receivers from global tables
This utility function allows you to retrieve the
raw list of receivers from the connections table
for the given sender and signal pair.
Note:
there is no guarantee that this is the actual list
stored in the connections table, so the value
should be treated as a simple iterable/truth value
rather than, for instance a list to which you
might append new records.
Normally you would use liveReceivers( getReceivers( ...))
to retrieve the actual receiver objects as an iterable
object.
"""
try:
return connections[id(sender)][signal]
except KeyError:
return []
def liveReceivers(receivers):
"""Filter sequence of receivers to get resolved, live receivers
This is a generator which will iterate over
the passed sequence, checking for weak references
and resolving them, then returning all live
receivers.
"""
for receiver in receivers:
if isinstance( receiver, WEAKREF_TYPES):
# Dereference the weak reference.
receiver = receiver()
if receiver is not None:
yield receiver
else:
yield receiver
def getAllReceivers( sender = Any, signal = Any ):
"""Get list of all receivers from global tables
This gets all receivers which should receive
the given signal from sender, each receiver should
be produced only once by the resulting generator
"""
receivers = {}
for set in (
# Get receivers that receive *this* signal from *this* sender.
getReceivers( sender, signal ),
# Add receivers that receive *any* signal from *this* sender.
getReceivers( sender, Any ),
# Add receivers that receive *this* signal from *any* sender.
getReceivers( Any, signal ),
# Add receivers that receive *any* signal from *any* sender.
getReceivers( Any, Any ),
):
for receiver in set:
if receiver: # filter out dead instance-method weakrefs
try:
if not receivers.has_key( receiver ):
receivers[receiver] = 1
yield receiver
except TypeError:
# dead weakrefs raise TypeError on hash...
pass
def send(signal=Any, sender=Anonymous, *arguments, **named):
"""Send signal from sender to all connected receivers.
signal -- (hashable) signal value, see connect for details
sender -- the sender of the signal
if Any, only receivers registered for Any will receive
the message.
if Anonymous, only receivers registered to receive
messages from Anonymous or Any will receive the message
Otherwise can be any python object (normally one
registered with a connect if you actually want
something to occur).
arguments -- positional arguments which will be passed to
*all* receivers. Note that this may raise TypeErrors
if the receivers do not allow the particular arguments.
Note also that arguments are applied before named
arguments, so they should be used with care.
named -- named arguments which will be filtered according
to the parameters of the receivers to only provide those
acceptable to the receiver.
Return a list of tuple pairs [(receiver, response), ... ]
if any receiver raises an error, the error propagates back
through send, terminating the dispatch loop, so it is quite
possible to not have all receivers called if a raises an
error.
"""
# Call each receiver with whatever arguments it can accept.
# Return a list of tuple pairs [(receiver, response), ... ].
responses = []
for receiver in liveReceivers(getAllReceivers(sender, signal)):
response = robustapply.robustApply(
receiver,
signal=signal,
sender=sender,
*arguments,
**named
)
responses.append((receiver, response))
return responses
def sendExact( signal=Any, sender=Anonymous, *arguments, **named ):
"""Send signal only to those receivers registered for exact message
sendExact allows for avoiding Any/Anonymous registered
handlers, sending only to those receivers explicitly
registered for a particular signal on a particular
sender.
"""
responses = []
for receiver in liveReceivers(getReceivers(sender, signal)):
response = robustapply.robustApply(
receiver,
signal=signal,
sender=sender,
*arguments,
**named
)
responses.append((receiver, response))
return responses
def _removeReceiver(receiver):
"""Remove receiver from connections."""
if not sendersBack:
# During module cleanup the mapping will be replaced with None
return False
backKey = id(receiver)
for senderkey in sendersBack.get(backKey,()):
try:
signals = connections[senderkey].keys()
except KeyError,err:
pass
else:
for signal in signals:
try:
receivers = connections[senderkey][signal]
except KeyError:
pass
else:
try:
receivers.remove( receiver )
except Exception, err:
pass
_cleanupConnections(senderkey, signal)
try:
del sendersBack[ backKey ]
except KeyError:
pass
def _cleanupConnections(senderkey, signal):
"""Delete any empty signals for senderkey. Delete senderkey if empty."""
try:
receivers = connections[senderkey][signal]
except:
pass
else:
if not receivers:
# No more connected receivers. Therefore, remove the signal.
try:
signals = connections[senderkey]
except KeyError:
pass
else:
del signals[signal]
if not signals:
# No more signal connections. Therefore, remove the sender.
_removeSender(senderkey)
def _removeSender(senderkey):
"""Remove senderkey from connections."""
_removeBackrefs(senderkey)
try:
del connections[senderkey]
except KeyError:
pass
# Senderkey will only be in senders dictionary if sender
# could be weakly referenced.
try:
del senders[senderkey]
except:
pass
def _removeBackrefs( senderkey):
"""Remove all back-references to this senderkey"""
try:
signals = connections[senderkey]
except KeyError:
signals = None
else:
items = signals.items()
def allReceivers( ):
for signal,set in items:
for item in set:
yield item
for receiver in allReceivers():
_killBackref( receiver, senderkey )
def _removeOldBackRefs(senderkey, signal, receiver, receivers):
"""Kill old sendersBack references from receiver
This guards against multiple registration of the same
receiver for a given signal and sender leaking memory
as old back reference records build up.
Also removes old receiver instance from receivers
"""
try:
index = receivers.index(receiver)
# need to scan back references here and remove senderkey
except ValueError:
return False
else:
oldReceiver = receivers[index]
del receivers[index]
found = 0
signals = connections.get(signal)
if signals is not None:
for sig,recs in connections.get(signal,{}).iteritems():
if sig != signal:
for rec in recs:
if rec is oldReceiver:
found = 1
break
if not found:
_killBackref( oldReceiver, senderkey )
return True
return False
def _killBackref( receiver, senderkey ):
"""Do the actual removal of back reference from receiver to senderkey"""
receiverkey = id(receiver)
set = sendersBack.get( receiverkey, () )
while senderkey in set:
try:
set.remove( senderkey )
except:
break
if not set:
try:
del sendersBack[ receiverkey ]
except KeyError:
pass
return True
| fregaham/DISP | sqlobject/include/pydispatch/dispatcher.py | Python | gpl-2.0 | 14,593 |
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext
from misago.conf import settings
from misago.core import forms
from ..models import AUTO_SUBSCRIBE_CHOICES, PRIVATE_THREAD_INVITES_LIMITS_CHOICES
from ..validators import validate_email, validate_password
class ForumOptionsForm(forms.ModelForm):
is_hiding_presence = forms.YesNoSwitch()
limits_private_thread_invites_to = forms.TypedChoiceField(
coerce=int, choices=PRIVATE_THREAD_INVITES_LIMITS_CHOICES)
subscribe_to_started_threads = forms.TypedChoiceField(
coerce=int, choices=AUTO_SUBSCRIBE_CHOICES)
subscribe_to_replied_threads = forms.TypedChoiceField(
coerce=int, choices=AUTO_SUBSCRIBE_CHOICES)
class Meta:
model = get_user_model()
fields = [
'is_hiding_presence',
'limits_private_thread_invites_to',
'subscribe_to_started_threads',
'subscribe_to_replied_threads'
]
class EditSignatureForm(forms.ModelForm):
signature = forms.CharField(required=False)
class Meta:
model = get_user_model()
fields = ['signature']
def clean(self):
data = super(EditSignatureForm, self).clean()
if len(data.get('signature', '')) > settings.signature_length_max:
raise forms.ValidationError(_("Signature is too long."))
return data
class ChangePasswordForm(forms.Form):
password = forms.CharField(max_length=200)
new_password = forms.CharField(max_length=200)
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user', None)
super(ChangePasswordForm, self).__init__(*args, **kwargs)
def clean_password(self):
if not self.user.check_password(self.cleaned_data['password']):
raise forms.ValidationError(_("Entered password is invalid."))
def clean_new_password(self):
data = self.cleaned_data['new_password']
validate_password(data)
return data
class ChangeEmailForm(forms.Form):
password = forms.CharField(max_length=200)
new_email = forms.CharField(max_length=200)
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user', None)
super(ChangeEmailForm, self).__init__(*args, **kwargs)
def clean_password(self):
if not self.user.check_password(self.cleaned_data['password']):
raise forms.ValidationError(_("Entered password is invalid."))
def clean_new_email(self):
data = self.cleaned_data['new_email']
if not data:
message = _("You have to enter new e-mail address.")
raise forms.ValidationError(message)
if data.lower() == self.user.email.lower():
message = _("New e-mail is same as current one.")
raise forms.ValidationError(message)
validate_email(data)
return data
| 1905410/Misago | misago/users/forms/options.py | Python | gpl-2.0 | 2,957 |
#!/usr/bin/env python
#coding=utf8
'''
Given integers n and k, find the lexicographically k-th smallest integer in the range from 1 to n.
Note: 1 ≤ k ≤ n ≤ 109.
Example:
Input:
n: 13 k: 2
Output:
10
Explanation:
The lexicographical order is [1, 10, 11, 12, 13, 2, 3, 4, 5, 6, 7, 8, 9], so the second smallest number is 10.
@author: Chauncey
beat 5.56%
'''
import heapq
import datetime
import time
import sys
import collections
class Solution(object):
def findKthNumber(self, n, k):
"""
:type n: int
:type k: int
:rtype: int
"""
if k<=0 or n<k:
return 0
memo = [0]
power = [1]
test = n
cnt = 0
pws = 1
while test > 0:
test /= 10
cnt *= 10
cnt += 1
memo.append(cnt)
pws *= 10
power.append(pws)
bitcnt = len(memo) - 1
res = []
self.findKthNumberHelper(n+memo[bitcnt], k+memo[bitcnt], memo, power, bitcnt, res)
num = 0
for i in res:
num *= 10
num += i
return num
def findKthNumberHelper(self, n, k, memo, power, bitcnt, res):
#print n, k, memo, power, bitcnt
if n < 10:
res.append(k-1)
return
firstbit = (n-memo[bitcnt]+1) / power[bitcnt-1]
#print n-memo[bitcnt]+1, power[bitcnt-1], firstbit
if firstbit > 9:
firstbit = 9
mm = [0] * 10
for i in xrange(firstbit+1, 10):
mm[i] = memo[bitcnt-1]
for i in xrange(firstbit):
mm[i] = memo[bitcnt]
mm[firstbit] = n - sum(mm)
lsum = sum(mm[:firstbit])
rsum = sum(mm[firstbit+1:])
idx = 0
while idx<10:
if k > mm[idx]:
k -= mm[idx]
else:
break
idx+=1
#print mm, k, idx, idx*power[bitcnt-1]
#print
new_n = n-sum(mm[:idx])-sum(mm[idx+1:])-1
res.append(idx)
if k != 1:
self.findKthNumberHelper(new_n, k-1, memo, power, bitcnt-1, res)
if __name__ == '__main__':
solution = Solution()
start_time = datetime.datetime.now()
print solution.findKthNumber(13, 2)
#10, [1, 10, 11, 12, 13, 2, 3, 4, 5, 6, 7, 8, 9]
print solution.findKthNumber(13, 8)
#4, [1, 10, 11, 12, 13, 2, 3, 4, 5, 6, 7, 8, 9]
print solution.findKthNumber(13, 1)
#1, [1, 10, 11, 12, 13, 2, 3, 4, 5, 6, 7, 8, 9]
print solution.findKthNumber(23, 13)
#20, [1, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 2, 20, 21, 22, 23, 3, 4, 5, 6, 7, 8, 9]
print solution.findKthNumber(23, 15)
#22, [1, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 2, 20, 21, 22, 23, 3, 4, 5, 6, 7, 8, 9]
print solution.findKthNumber(4289384, 1922239)
print solution.findKthNumber(1, 1)
#1
print solution.findKthNumber(10000, 10000)
#9999
elapsed = datetime.datetime.now() - start_time
print 'elapsed:', elapsed.total_seconds() | mornsun/javascratch | src/topcoder.py/LC_440_K-th_Smallest_in_Lexicographical_Order.py | Python | gpl-2.0 | 3,037 |
# -*- coding: utf-8
# Several spheres falling down into the box.
# Their weight is measured and compares with real mass particles
from yade import utils,pack,export,geom
tc=0.001
en=.003
es=.003
frictionAngle=radians(35)
density=2300
params=utils.getViscoelasticFromSpheresInteraction(tc,en,es)
defMat=O.materials.append(ViscElMat(density=density,frictionAngle=frictionAngle,**params)) # **params sets kn, cn, ks, cs
O.dt=.1*tc # time step
rad=0.2 # particle radius
tolerance = 0.0001
SpheresID=[]
SpheresID+=O.bodies.append(pack.regularHexa(pack.inSphere((Vector3(0.0,0.0,0.0)),0.5),radius=rad,gap=rad*0.5,material=defMat))
floorId=[]
floorId+=O.bodies.append(geom.facetBox((0,0,0),(0.6,0.6,0.6),material=defMat)) #Floor
#Calculate the weight of spheres
sphMass = utils.getSpheresVolume()*density*9.81
# Create engines
O.engines=[
ForceResetter(),
InsertionSortCollider([Bo1_Sphere_Aabb(),Bo1_Facet_Aabb()]),
InteractionLoop(
[Ig2_Sphere_Sphere_ScGeom(), Ig2_Facet_Sphere_ScGeom()],
[Ip2_ViscElMat_ViscElMat_ViscElPhys()],
[Law2_ScGeom_ViscElPhys_Basic()],
),
NewtonIntegrator(damping=0.0,gravity=[0.0,0.0,-9.81])
]
O.run(30000)
O.wait()
curForce = utils.sumForces(ids=floorId,direction=Vector3(0,0,1))*(-1)
print ("Precalculated weight %f" % sphMass)
print ("Obtained weight %f" % curForce)
if (((sphMass-curForce)/curForce)>tolerance):
resultStatus += 1
| ThomasSweijen/yadesolute2 | scripts/checks-and-tests/checks/checkWeight.py | Python | gpl-2.0 | 1,385 |
# -*- coding: utf-8 -*-
# Dropbox appModule for NVDA
# Improves accessibility of the Dropbox Metro app for Windows 8
# Copyright (C) 2015 Filaos, Patrick ZAJDA <[email protected]> and other contributors
# Windows 8 compatibility contributed by David Parduhn <[email protected]>
# This file is covered by the GNU General Public License.
# You can read the licence by clicking Help->Licence in the NVDA menu
# or by visiting http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
import addonHandler
import appModuleHandler
from NVDAObjects import UIA
import controlTypes
# We keep it in the case it would finally be necessary to change add-on summary by language
# _addonDir = os.path.join(os.path.dirname(__file__), "..").decode("mbcs")
# _curAddon = addonHandler.Addon(_addonDir)
# _addonSummary = _curAddon.manifest['summary']
# We initialize translations
addonHandler.initTranslation()
# Class for Dropbox items in the metro app
class dropboxitem(UIA. ListItem):
def _get_name(self):
dbxList = list()
obj = self.firstChild
if obj.name != u'':
dbxList.append(obj.name)
while (obj != self.lastChild):
if obj.role == controlTypes.ROLE_STATICTEXT:
if obj.name != u'':
dbxList.append(obj.name)
obj = obj.next
return '; '.join(dbxList)
class AppModule(appModuleHandler.AppModule):
# We set the scripts category shown on input gestures dialog
# scriptCategory = unicode(_addonSummary)
scriptCategory = u"Dropbox"
def chooseNVDAObjectOverlayClasses(self, obj, CLSList):
if obj.role == controlTypes.ROLE_LISTITEM:
CLSList.insert(0, dropboxitem)
| pzajda/dropbox | addon/appModules/dropbox.py | Python | gpl-2.0 | 1,581 |
# -*- coding: utf-8 -*-
# copyright (C) 2006 Marek Schmidt
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from event import Event
from pobject import *
class ApplicationBase (PRoot):
"""
Vstupní bod aplikace. Tato třída by měla být přetížena ve
front-endech a doplnit konkrétní funkce.
"""
def __init__(self):
PRoot.__init__ (self)
# Aktuálně zobrazený formulář
self.displayed_form = None
# Zásobník zobrazených formulářů
self.forms_stack = []
self.event_pre_serialize = Event (self)
self.event_deserialized = Event (self)
# Cesta k souborům odkazujících relativně. Front-end by ji měl nastavit na místo,
# kde se pravděpodobně nachází data aplikace.
self.pre_path = "./"
self._dirty = True
def getDeserializedEvent (self):
"""
Událost zavolaná ihned po úspěšné deserializaci všech objektů
"""
return self.event_deserialized
def getSerializingEvent (self):
"""
Událost těsně před serializací objektů
"""
return self.event_pre_serialize
def setTitle (self, title):
"""
Nastaví název aplikace. Obvykle se zobrazí v hlavičce okna aplikace,
pokud to má pro daný front-end smysl.
@type title: string
"""
self._title = title
def getTitle (self):
"""
Vrátí název aplikace nastavený přes setTitle
@rtype: string
"""
return self._title
title = property(fget=lambda self: self.getTitle(), fset=lambda self, v: self.setTitle(v))
def getPrePath (self):
return self.pre_path
def setPrePath (self, pre_path):
self.pre_path = pre_path
def absolutePath (self, filename):
"""
Zkonvertuje relativní cestu na absolutní. Typické použití je při vyhledání datových souborů
aplikace, kdy absolutní cesta datových souborů je závislá na front-endu.
"""
return self.pre_path + filename
def getDisplayedForm (self):
"""
Vrátí aktuálně zobrazovaný formulář
"""
return self.displayed_form
def display (self, form):
"""
Zobrazí formulář, aniž by ho dala do fronty. (Tohle by asi mělo být protected)
"""
self.displayed_form = form
def pushForm (self, form):
self.forms_stack.append (form)
self.display (form)
def popForm (self, form):
oldform = self.forms_stack.pop ()
assert oldform == form
if len (self.forms_stack) > 0:
self.display (self.forms_stack[-1])
else:
self.display (None)
def writeFile (self, file):
"""
Vyžádá si uložení daného souboru. Typicky se tím způsobí uživatelská akce, kde si
uživatel vybere soubor, kam chce soubor uložit a následně se zavolá metoda file.writeTo
@type file: file.FileInput
"""
pass
def readFile (self, file):
"""
Načte soubor. Typicky se tím způsobí uživatelská akce, kde si
uživatel vybere soubor, ze kterého se má číst a následně se zavolá metoda file.readFrom
@type file: file.FileOutput
"""
pass
| fregaham/DISP | disp/application.py | Python | gpl-2.0 | 3,611 |
"""
User management tests package.
"""
from seleniumTests.tests.userManagement.addUser import AddUser
TESTS_TO_RUN = [ AddUser ]
| allembedded/t-case-mx | unitTest/seleniumTests/tests/userManagement/__init__.py | Python | gpl-2.0 | 133 |
import coverage
from unittest import TextTestRunner, TestLoader
from os.path import split, join, abspath
from os import chdir
if __name__ == "__main__":
project_dir = split(split(abspath(__file__))[0])[0]
chdir(project_dir)
cov = coverage.coverage(branch=True)
cov.start()
suite = TestLoader().discover(".", pattern="test_*.py")
TextTestRunner(verbosity=2).run(suite)
cov.stop()
cov.save()
cov.html_report()
| mkli90/Taz | bin/runcoverage.py | Python | gpl-2.0 | 447 |
Subsets and Splits