repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
thorkd1t/lurkbot | query.py | 1 | 1330 | import sqlite3
import time
import joinlist
# the tables look like this:
# db name 'imaqtpie.db' (each channel in joinlist has its own .db)
# Table name chat (each db has one table named 'chat')
#___________________________________________________________________
# usr | mesg | id | flags | channel | date/time |
#===================================================================
# bob | hi | 1 | @badges= | imaqtpie |2017-05-01 12:00:00 |
#-------------------------------------------------------------------
# jim | Kappa | 2 | @badges= | imaqtpie |2017-05-01 12:00:01 |
#-------------------------------------------------------------------
target = "imaqtpie" #channel name
conn = sqlite3.connect(target + '.db')
c = conn.cursor()
#====================================================================
#example queries:
#------------------
#c.execute('select usr from chat')
#c.execute('select * from chat')
#c.execute('select * from chat where usr == ""')
#c.execute('select mesg from chat where usr == "moobot" order by id desc limit 5')
c.execute('select * from chat order by id desc limit 50')
bla = c.fetchall()
if bla != None:
for i in bla:
print i[0] + ": " + i[1]
else:
print "nothing to print"
conn.close()
time.sleep(100)
| mit | -8,376,591,812,289,806,000 | 32.102564 | 82 | 0.437594 | false | 3.518519 | false | false | false |
umeboshi2/vignewton | vignewton/resources.py | 1 | 1855 | from pyramid.security import Allow, Everyone, Authenticated
from fanstatic import Library, Resource
from js.lightbox import lightbox
from haberdashery.resources import jqueryui, fc_css, deform_css
#from trumpet.resources import jqueryui
from trumpet.resources import StaticResources as TrumpetResources
library = Library('vignewton_lib', 'static')
css = Library('vignewton_css', 'static/css')
js = Library('vignewton_js', 'static/js')
favicon = Resource(library, 'favicon.ico')
main_screen = Resource(css, 'mainscreen.css', depends=[deform_css])
admin_screen = Resource(css, 'adminscreen.css', depends=[deform_css])
post_to_url = Resource(js, 'post2url.js', depends=[jqueryui])
main_calendar_view = Resource(js, 'main-calendar-view.js', depends=[fc_css])
main_betgames_view = Resource(js, 'main-betgames-view.js', depends=[jqueryui])
main_betgames_confirm_bet = Resource(js, 'main-betgames-confirm-bet.js',
depends=[jqueryui])
from vignewton.security import authn_policy
class StaticResources(TrumpetResources):
main_screen = main_screen
admin_screen = admin_screen
# override trumpet favicon
favicon = favicon
main_calendar_view = main_calendar_view
main_betgames_view = main_betgames_view
main_betgames_confirm_bet = main_betgames_confirm_bet
post_to_url = post_to_url
lightbox = lightbox
# the acl entries are allow/deny, group, permission
class RootGroupFactory(object):
__name__ = ""
__acl__ = [
(Allow, Everyone, 'public'),
(Allow, Authenticated, 'user'),
(Allow, 'manager', 'manage'),
(Allow, 'editor', ('wiki_add', 'wiki_edit')),
(Allow, 'admin', ('admin', 'manage')),
]
authn_policy = authn_policy
def __init__(self, request):
# comment
pass
| unlicense | 3,318,480,387,530,409,500 | 27.538462 | 78 | 0.667925 | false | 3.46729 | false | false | false |
mikeboers/PyAV | tests/test_codec_context.py | 1 | 10799 | from fractions import Fraction
from unittest import SkipTest
import os
from av import AudioResampler, Codec, Packet
from av.codec.codec import UnknownCodecError
import av
from .common import TestCase, fate_suite
def iter_frames(container, stream):
for packet in container.demux(stream):
for frame in packet.decode():
yield frame
def iter_raw_frames(path, packet_sizes, ctx):
with open(path, 'rb') as f:
for i, size in enumerate(packet_sizes):
packet = Packet(size)
read_size = f.readinto(packet)
assert size
assert read_size == size
if not read_size:
break
for frame in ctx.decode(packet):
yield frame
while True:
try:
frames = ctx.decode(None)
except EOFError:
break
for frame in frames:
yield frame
if not frames:
break
class TestCodecContext(TestCase):
def test_skip_frame_default(self):
ctx = Codec('png', 'w').create()
self.assertEqual(ctx.skip_frame.name, 'DEFAULT')
def test_parse(self):
# This one parses into a single packet.
self._assert_parse('mpeg4', fate_suite('h264/interlaced_crop.mp4'))
# This one parses into many small packets.
self._assert_parse('mpeg2video', fate_suite('mpeg2/mpeg2_field_encoding.ts'))
def _assert_parse(self, codec_name, path):
fh = av.open(path)
packets = []
for packet in fh.demux(video=0):
packets.append(packet)
full_source = b''.join(p.to_bytes() for p in packets)
for size in 1024, 8192, 65535:
ctx = Codec(codec_name).create()
packets = []
for i in range(0, len(full_source), size):
block = full_source[i:i + size]
packets.extend(ctx.parse(block))
packets.extend(ctx.parse())
parsed_source = b''.join(p.to_bytes() for p in packets)
self.assertEqual(len(parsed_source), len(full_source))
self.assertEqual(full_source, parsed_source)
class TestEncoding(TestCase):
def test_encoding_png(self):
self.image_sequence_encode('png')
def test_encoding_mjpeg(self):
self.image_sequence_encode('mjpeg')
def test_encoding_tiff(self):
self.image_sequence_encode('tiff')
def image_sequence_encode(self, codec_name):
try:
codec = Codec(codec_name, 'w')
except UnknownCodecError:
raise SkipTest()
container = av.open(fate_suite('h264/interlaced_crop.mp4'))
video_stream = container.streams.video[0]
width = 640
height = 480
ctx = codec.create()
pix_fmt = ctx.codec.video_formats[0].name
ctx.width = width
ctx.height = height
ctx.time_base = video_stream.codec_context.time_base
ctx.pix_fmt = pix_fmt
ctx.open()
frame_count = 1
path_list = []
for frame in iter_frames(container, video_stream):
new_frame = frame.reformat(width, height, pix_fmt)
new_packets = ctx.encode(new_frame)
self.assertEqual(len(new_packets), 1)
new_packet = new_packets[0]
path = self.sandboxed('%s/encoder.%04d.%s' % (
codec_name,
frame_count,
codec_name if codec_name != 'mjpeg' else 'jpg',
))
path_list.append(path)
with open(path, 'wb') as f:
f.write(new_packet)
frame_count += 1
if frame_count > 5:
break
ctx = av.Codec(codec_name, 'r').create()
for path in path_list:
with open(path, 'rb') as f:
size = os.fstat(f.fileno()).st_size
packet = Packet(size)
size = f.readinto(packet)
frame = ctx.decode(packet)[0]
self.assertEqual(frame.width, width)
self.assertEqual(frame.height, height)
self.assertEqual(frame.format.name, pix_fmt)
def test_encoding_h264(self):
self.video_encoding('libx264', {'crf': '19'})
def test_encoding_mpeg4(self):
self.video_encoding('mpeg4')
def test_encoding_mpeg1video(self):
self.video_encoding('mpeg1video')
def test_encoding_dvvideo(self):
options = {'pix_fmt': 'yuv411p',
'width': 720,
'height': 480}
self.video_encoding('dvvideo', options)
def test_encoding_dnxhd(self):
options = {'b': '90M', # bitrate
'pix_fmt': 'yuv422p',
'width': 1920,
'height': 1080,
'time_base': '1001/30000',
'max_frames': 5}
self.video_encoding('dnxhd', options)
def video_encoding(self, codec_name, options={}):
try:
codec = Codec(codec_name, 'w')
except UnknownCodecError:
raise SkipTest()
container = av.open(fate_suite('h264/interlaced_crop.mp4'))
video_stream = container.streams.video[0]
pix_fmt = options.pop('pix_fmt', 'yuv420p')
width = options.pop('width', 640)
height = options.pop('height', 480)
max_frames = options.pop('max_frames', 50)
time_base = options.pop('time_base', video_stream.codec_context.time_base)
ctx = codec.create()
ctx.width = width
ctx.height = height
ctx.time_base = time_base
ctx.framerate = 1 / ctx.time_base
ctx.pix_fmt = pix_fmt
ctx.options = options # TODO
ctx.open()
path = self.sandboxed('encoder.%s' % codec_name)
packet_sizes = []
frame_count = 0
with open(path, 'wb') as f:
for frame in iter_frames(container, video_stream):
"""
bad_frame = frame.reformat(width, 100, pix_fmt)
with self.assertRaises(ValueError):
ctx.encode(bad_frame)
bad_frame = frame.reformat(100, height, pix_fmt)
with self.assertRaises(ValueError):
ctx.encode(bad_frame)
bad_frame = frame.reformat(width, height, "rgb24")
with self.assertRaises(ValueError):
ctx.encode(bad_frame)
"""
if frame:
frame_count += 1
new_frame = frame.reformat(width, height, pix_fmt) if frame else None
for packet in ctx.encode(new_frame):
packet_sizes.append(packet.size)
f.write(packet)
if frame_count >= max_frames:
break
for packet in ctx.encode(None):
packet_sizes.append(packet.size)
f.write(packet)
dec_codec_name = codec_name
if codec_name == 'libx264':
dec_codec_name = 'h264'
ctx = av.Codec(dec_codec_name, 'r').create()
ctx.open()
decoded_frame_count = 0
for frame in iter_raw_frames(path, packet_sizes, ctx):
decoded_frame_count += 1
self.assertEqual(frame.width, width)
self.assertEqual(frame.height, height)
self.assertEqual(frame.format.name, pix_fmt)
self.assertEqual(frame_count, decoded_frame_count)
def test_encoding_pcm_s24le(self):
self.audio_encoding('pcm_s24le')
def test_encoding_aac(self):
self.audio_encoding('aac')
def test_encoding_mp2(self):
self.audio_encoding('mp2')
def audio_encoding(self, codec_name):
try:
codec = Codec(codec_name, 'w')
except UnknownCodecError:
raise SkipTest()
ctx = codec.create()
if ctx.codec.experimental:
raise SkipTest()
sample_fmt = ctx.codec.audio_formats[-1].name
sample_rate = 48000
channel_layout = "stereo"
channels = 2
ctx.time_base = Fraction(1) / sample_rate
ctx.sample_rate = sample_rate
ctx.format = sample_fmt
ctx.layout = channel_layout
ctx.channels = channels
ctx.open()
resampler = AudioResampler(sample_fmt, channel_layout, sample_rate)
container = av.open(fate_suite('audio-reference/chorusnoise_2ch_44kHz_s16.wav'))
audio_stream = container.streams.audio[0]
path = self.sandboxed('encoder.%s' % codec_name)
samples = 0
packet_sizes = []
with open(path, 'wb') as f:
for frame in iter_frames(container, audio_stream):
# We need to let the encoder retime.
frame.pts = None
"""
bad_resampler = AudioResampler(sample_fmt, "mono", sample_rate)
bad_frame = bad_resampler.resample(frame)
with self.assertRaises(ValueError):
next(encoder.encode(bad_frame))
bad_resampler = AudioResampler(sample_fmt, channel_layout, 3000)
bad_frame = bad_resampler.resample(frame)
with self.assertRaises(ValueError):
next(encoder.encode(bad_frame))
bad_resampler = AudioResampler('u8', channel_layout, 3000)
bad_frame = bad_resampler.resample(frame)
with self.assertRaises(ValueError):
next(encoder.encode(bad_frame))
"""
resampled_frame = resampler.resample(frame)
samples += resampled_frame.samples
for packet in ctx.encode(resampled_frame):
# bytearray because python can
# freaks out if the first byte is NULL
f.write(bytearray(packet))
packet_sizes.append(packet.size)
for packet in ctx.encode(None):
packet_sizes.append(packet.size)
f.write(bytearray(packet))
ctx = Codec(codec_name, 'r').create()
ctx.time_base = Fraction(1) / sample_rate
ctx.sample_rate = sample_rate
ctx.format = sample_fmt
ctx.layout = channel_layout
ctx.channels = channels
ctx.open()
result_samples = 0
# should have more asserts but not sure what to check
# libav and ffmpeg give different results
# so can really use checksums
for frame in iter_raw_frames(path, packet_sizes, ctx):
result_samples += frame.samples
self.assertEqual(frame.rate, sample_rate)
self.assertEqual(len(frame.layout.channels), channels)
| bsd-3-clause | 7,327,797,646,894,519,000 | 30.301449 | 88 | 0.548292 | false | 4.046085 | true | false | false |
cjmathy/ann_bmi203 | ann/rap1.py | 1 | 6499 | import numpy as np
import re
import random
def prepare_data():
"""This method prepares input positive and negative datasets as bitvectors for the Rap1 binding problem. Output: three lists of bitvectors, one containing positive samples, negative samples that are similar to positive samples, and negative examples that are randomly chosen from the fasta sequences. All bitvectors are 17 bp (34 bits) long"""
# read in all positive data, convert to bitvectors
pos_str = read_positives()
pos_vec = str_to_vec(pos_str)
# read in all negative data. then, remove false negatives from the negative fa sequences and their reverse complements. Call this new set of sequences and their reverse complements "neg_str".
neg_str = read_negatives()
neg_str = remove_falseneg(neg_str, pos_str)
rc_neg_str = reverse_complement(neg_str)
rc_neg_str = remove_falseneg(rc_neg_str, pos_str)
neg_str = reverse_complement(rc_neg_str)
neg_str = neg_str + rc_neg_str
# cache interesting cases as "neg_simiar". interesting cases are those that look similar to the positive sequences (in that they contain cysteines at positions 5, 6, and 10) but are considered negative. also cache randomly chosen sequences, so that the neural net can be trained on sequences that are not similar to positive examples.
neg_sim, neg_rand = cache_cases(neg_str)
neg_sim_vec = str_to_vec(neg_sim)
neg_rand_vec = str_to_vec(neg_rand)
return pos_vec, neg_sim_vec, neg_rand_vec
def read_positives():
"reads in positive samples as strings"
seqs = []
file = '/Users/cjmathy/Documents/courses/bmi203/Final-Project/ann_bmi203/rap1-lieb-positives.txt'
with open(file, 'rb') as f:
for seq in f:
seqs.append(seq.strip())
return seqs
def read_negatives():
"reads in negative samples as strings"
seqs = []
file = '/Users/cjmathy/Documents/courses/bmi203/Final-Project/ann_bmi203/yeast-upstream-1k-negative.fa'
with open(file, 'rb') as f:
sequence = ''
for line in f:
if line[0] is not '>':
sequence += line.strip()
else:
if sequence:
seqs.append(sequence)
sequence = ""
return seqs
def str_to_vec(sequences):
"""converts nucleotide strings into vectors using a 2-bit encoding scheme."""
vecs = []
nuc2bit = {"A": (0, 0),
"C": (0, 1),
"T": (1, 0),
"G": (1, 1)}
for seq in sequences:
vec = []
for nuc in seq:
vec.append(nuc2bit[nuc][0])
vec.append(nuc2bit[nuc][1])
vecs.append(vec)
return vecs
def remove_falseneg(negatives, positives):
"""this method removes any negative fasta sequences that contain one of the positive sample sequences (essentially making them false negatives."""
seqs = []
for n in negatives:
if not any(p in n for p in positives):
seqs.append(n)
return seqs
def reverse_complement(sequences):
"""returns a list of reverse complemented sequences"""
rc = []
complement = {'A': 'T',
'C': 'G',
'G': 'C',
'T': 'A'}
for seq in sequences:
seq = list(seq)
seq = reversed([complement.get(nuc) for nuc in seq])
seq = ''.join(seq)
rc.append(seq)
return rc
def cache_cases(sequences):
"""this method separates the negative data into two sets: those that contain the Rap1 binding signature sequence, and a set that is randomly chosen from the negative data."""
# 1) cache negative cases that are similar to positives
sim_cache = []
for seq in sequences:
matches = re.findall(r'....CC...C.......', seq)
for match in matches:
sim_cache.append(match)
sim_cache = list(set(sim_cache))
# 2) cache randomly chosen 17 bp negatives. 5 from each fa sequence (including reverse complements). there are about 30000 neg_sim samples, so this will create about 30000 neg_rand samples from the 3000 sequences and their 3000 reverse complements.
bp = 17
rand_cache = []
for seq in sequences:
for _ in xrange(5):
i = random.randint(0, len(seq)-bp)
substr = seq[i:i+bp]
rand_cache.append(substr)
return sim_cache, rand_cache
def build_training_set(pos, neg_sim, neg_rand):
"""Builds a training set using 50% positive data, and 50% negative data. Negative data consists equally of similar-to-positve and random negative sequences"""
# we have 137 positive examples, 30000 special negative examples, and 30000 random negative examples, all 34 bits long. take 69 special negative examples and 68 random negative examples. add them to the positive examples to make our training set.
neg = []
for _ in xrange(69):
i = np.random.randint(0, len(neg_sim))
neg.append(neg_sim[i])
for _ in xrange(68):
i = np.random.randint(0, len(neg_rand))
neg.append(neg_rand[i])
Xp = np.array(pos)
Xn = np.array(neg)
X = np.concatenate((Xp, Xn), axis=0) # nd array, 274 x 34
yp = np.ones((Xp.shape[0],))
yn = np.zeros((Xn.shape[0],))
y = np.concatenate((yp, yn), axis=0) # nd array, 34 x 1
return X, y
def build_training_set_100(pos, neg_sim, neg_rand):
"""same as above, but allowing for some positive and negative samples to be held out as a test set"""
neg = []
for _ in xrange(50):
i = np.random.randint(0, len(neg_sim))
neg.append(neg_sim[i])
for _ in xrange(50):
i = np.random.randint(0, len(neg_rand))
neg.append(neg_rand[i])
Xp = np.array(pos)
Xn = np.array(neg)
X = np.concatenate((Xp, Xn), axis=0)
yp = np.ones((Xp.shape[0],))
yn = np.zeros((Xn.shape[0],))
y = np.concatenate((yp, yn), axis=0)
return X, y
def build_test_set(pos, neg_sim, neg_rand):
"""same as above, but allowing for some positive and negative samples to be held out as a test set"""
neg = []
for _ in xrange(19):
i = np.random.randint(0, len(neg_sim))
neg.append(neg_sim[i])
for _ in xrange(18):
i = np.random.randint(0, len(neg_rand))
neg.append(neg_rand[i])
Xp = np.array(pos)
Xn = np.array(neg)
X = np.concatenate((Xp, Xn), axis=0)
yp = np.ones((Xp.shape[0],))
yn = np.zeros((Xn.shape[0],))
y = np.concatenate((yp, yn), axis=0)
return X, y
| apache-2.0 | 5,019,768,587,822,818,000 | 35.717514 | 347 | 0.624558 | false | 3.479122 | false | false | false |
m87/pyEM | stepwise.py | 1 | 3019 | from thirdparty import log_mvnpdf, log_mvnpdf_diag
import numpy as np
from online import *
from scipy.misc import logsumexp
from gaussEM import GaussEM
class Stepwise(OnlineEM):
def __init__(self, param):
super().__init__(param)
self.param = float(param['alpha'])
self.skip = int(param['skip'])
self.mbsize= int(param['mb'])
def prepare(self, dataset):
super().prepare(dataset)
class StepwiseGauss(Stepwise, GaussEM):
def __init__(self, param):
super().__init__(param)
self.cov = param['cov']
self.C = float(param['smoothing'])
self.mvnpdf = {'full': log_mvnpdf, 'diag': log_mvnpdf_diag}
def e(self, X):
lg = self.mvnpdf[self.cov](np.array([X]), self.means, self.COV[self.cov])
#s = np.inner((X - self.means),(X-self.means))
#print(s)
#print(self.means[0])
logResps = lg[0] + np.log(self.weights)
self.histAcc += logsumexp(logResps)
self.hist.append(-self.histAcc/self.N)
#self.hist.append(logsumexp(logResps))
maxLg = np.max(logResps)
logResps -= maxLg
self.resps = np.exp(logResps)
np.clip(self.resps, 10*EPS, np.inf, out=self.resps)
self.resps /= np.sum(self.resps)
self.N += 1
lam = np.power(self.N+2, -float(self.param))
for c in np.arange(self.n):
self.accResps[c]= (1-lam) * self.accResps[c] + lam * self.resps[c]
self.accMeans[c]= (1-lam)* self.accMeans[c] + lam * X * self.resps[c]
tmp = self.accMeans[c] / self.accResps[c]
diff = X - tmp
self.accCovars[c] = (1-lam) * self.accCovars[c] + lam * np.outer(self.resps[c] * diff, diff)
self.accResps /= np.sum(self.accResps)
def m(self, X):
if self.N < self.skip: return
if self.N % self.mbsize != 0:
return
for c in np.arange(self.n):
self.weights[c] = self.accResps[c] / (self.N+ 10*EPS ) + EPS
self.means[c] = (self.accMeans[c] + 10* EPS)/ (self.accResps[c] + 10 * EPS )
self.covars[c] = (self.accCovars[c] + 10* EPS * np.identity(self.dim))/ (self.accResps[c] + 10 * EPS ) * self.I[self.cov]
self.diagCovars[c] = np.diag(self.covars[c])
self.weights /= sum(self.weights)
def prepare(self,dataset):
super().prepare(dataset)
self.accResps = np.zeros((self.n,))
self.accMeans = np.zeros((self.n,self.dim))
self.accCovars = np.zeros((self.n,self.dim,self.dim))
self.weights = np.ones((self.n,))
self.weights /= self.n
self.means = np.zeros((self.n,self.dim))
for it,x in enumerate(dataset.I):
self.means[it] = x
self.covars = np.array([np.identity(self.dim) for x in range(self.n)])
self.diagCovars = np.ones((self.n,self.dim))
self.COV = {'full' : self.covars, 'diag' : self.diagCovars}
self.I ={'full': 1.0, 'diag': np.identity(self.dim)}
| mit | 1,769,778,788,891,410,400 | 34.940476 | 133 | 0.56575 | false | 2.925388 | false | false | false |
gunan/tensorflow | tensorflow/python/kernel_tests/rnn_test.py | 1 | 32669 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for rnn module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
import timeit
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops as ops_lib
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import rnn
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variables as variables_lib
import tensorflow.python.ops.data_flow_grad # pylint: disable=unused-import
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
import tensorflow.python.ops.sparse_grad # pylint: disable=unused-import
import tensorflow.python.ops.tensor_array_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
from tensorflow.python.training import saver
class Plus1RNNCell(rnn_cell_impl.RNNCell):
"""RNN Cell generating (output, new_state) = (input + 1, state + 1)."""
@property
def output_size(self):
return 5
@property
def state_size(self):
return 5
def call(self, input_, state, scope=None):
return (input_ + 1, state + 1)
class ScalarStateRNNCell(rnn_cell_impl.RNNCell):
"""RNN Cell generating (output, new_state) = (input + 1, state + 1)."""
@property
def output_size(self):
return 1
@property
def state_size(self):
return tensor_shape.TensorShape([])
def zero_state(self, batch_size, dtype):
return array_ops.zeros([], dtype=dtypes.int32)
def call(self, input_, state, scope=None):
return (input_, state + 1)
class UnbalancedOutputRNNCell(rnn_cell_impl.RNNCell):
"""RNN Cell generating (output, new_state) = (input + 1, state + 1)."""
@property
def output_size(self):
return tensor_shape.TensorShape(1), tensor_shape.TensorShape((2))
@property
def state_size(self):
return tensor_shape.TensorShape([])
def zero_state(self, batch_size, dtype):
return array_ops.zeros([], dtype=dtypes.int32)
def call(self, input_, state, scope=None):
concatenated = array_ops.concat((input_, input_), axis=-1)
return (input_, concatenated), state + 1
class TensorArrayStateRNNCell(rnn_cell_impl.RNNCell):
"""RNN Cell its state as a TensorArray."""
@property
def output_size(self):
return 1
@property
def state_size(self):
return (tensor_shape.TensorShape([]), ())
def zero_state(self, batch_size, dtype):
return (array_ops.zeros([], dtype=dtypes.int32),
tensor_array_ops.TensorArray(
dtype=dtype, size=0, dynamic_size=True))
def call(self, input_, state, scope=None):
new_array = state[1].write(state[0], input_)
return (input_, (state[0] + 1, new_array))
class RNNTest(test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
@test_util.run_in_graph_and_eager_modes
def testInvalidSequenceLengthShape(self):
cell = Plus1RNNCell()
if context.executing_eagerly():
inputs = [constant_op.constant(np.ones((3, 4)))]
else:
inputs = [array_ops.placeholder(dtypes.float32, shape=(3, 4))]
with self.assertRaisesRegexp(ValueError, "must be a vector"):
rnn.dynamic_rnn(
cell,
array_ops.stack(inputs),
dtype=dtypes.float32,
sequence_length=[[4]])
@test_util.run_in_graph_and_eager_modes
def testInvalidDtype(self):
if context.executing_eagerly():
inputs = np.zeros((3, 4, 5), dtype=np.int32)
else:
inputs = array_ops.placeholder(dtypes.int32, shape=(3, 4, 5))
cells = [
rnn_cell_impl.BasicRNNCell,
rnn_cell_impl.GRUCell,
rnn_cell_impl.BasicLSTMCell,
rnn_cell_impl.LSTMCell,
]
for cell_cls in cells:
with self.cached_session():
with self.assertRaisesRegexp(
ValueError, "RNN cell only supports floating"):
cell = cell_cls(2, dtype=dtypes.int32)
rnn.dynamic_rnn(cell, inputs, dtype=dtypes.int32)
@test_util.run_in_graph_and_eager_modes
def testBatchSizeFromInput(self):
cell = Plus1RNNCell()
in_eager_mode = context.executing_eagerly()
# With static batch size
if in_eager_mode:
inputs = np.zeros((3, 4, 5), dtype=np.float32)
initial_state = np.zeros((3, 5), dtype=np.float32)
else:
inputs = array_ops.placeholder(dtypes.float32, shape=(3, 4, 5))
initial_state = array_ops.placeholder(dtypes.float32, shape=(3, 5))
# - Without initial_state
outputs, state = rnn.dynamic_rnn(cell, inputs, dtype=dtypes.float32)
self.assertEqual(3, outputs.shape[0])
self.assertEqual(3, state.shape[0])
# - With initial_state
outputs, state = rnn.dynamic_rnn(
cell, inputs, initial_state=initial_state)
self.assertEqual(3, outputs.shape[0])
self.assertEqual(3, state.shape[0])
# Without static batch size
# Tensor shapes are fully determined with eager execution enabled,
# so only run this test for graph construction.
if not in_eager_mode:
inputs = array_ops.placeholder(dtypes.float32, shape=(None, 4, 5))
# - Without initial_state
outputs, state = rnn.dynamic_rnn(cell, inputs, dtype=dtypes.float32)
self.assertEqual(None, outputs.shape.dims[0].value)
self.assertEqual(None, state.shape.dims[0].value)
# - With initial_state
outputs, state = rnn.dynamic_rnn(
cell,
inputs,
initial_state=array_ops.placeholder(dtypes.float32, shape=(None, 5)))
self.assertEqual(None, outputs.shape.dims[0].value)
self.assertEqual(None, state.shape.dims[0].value)
@test_util.run_in_graph_and_eager_modes
def testScalarStateIsAccepted(self):
cell = ScalarStateRNNCell()
in_eager_mode = context.executing_eagerly()
if in_eager_mode:
inputs = np.array([[[1], [2], [3], [4]]], dtype=np.float32)
else:
inputs = array_ops.placeholder(dtypes.float32, shape=(1, 4, 1))
with self.cached_session(use_gpu=True) as sess:
outputs, state = rnn.dynamic_rnn(
cell, inputs, dtype=dtypes.float32, sequence_length=[4])
if not in_eager_mode:
outputs, state = sess.run(
[outputs, state], feed_dict={inputs: [[[1], [2], [3], [4]]]})
self.assertAllEqual([[[1], [2], [3], [4]]], outputs)
self.assertAllEqual(4, state)
@test_util.run_in_graph_and_eager_modes
def testUnbalancedOutputIsAccepted(self):
cell = UnbalancedOutputRNNCell()
in_eager_mode = context.executing_eagerly()
if in_eager_mode:
inputs = np.array([[[1], [2], [3], [4]]], dtype=np.float32)
else:
inputs = array_ops.placeholder(dtypes.float32, shape=(1, 4, 1))
with self.cached_session(use_gpu=True) as sess:
outputs, state = rnn.dynamic_rnn(
cell, inputs, dtype=dtypes.float32, sequence_length=[4])
if not in_eager_mode:
outputs, state = sess.run(
[outputs, state], feed_dict={inputs: [[[1], [2], [3], [4]]]})
self.assertIsInstance(outputs, tuple)
self.assertAllEqual([[[1], [2], [3], [4]]], outputs[0])
self.assertAllEqual([[[1, 1], [2, 2], [3, 3], [4, 4]]], outputs[1])
self.assertAllEqual(4, state)
@test_util.assert_no_new_pyobjects_executing_eagerly
def testEagerMemory(self):
with context.eager_mode():
cell = TensorArrayStateRNNCell()
inputs = np.array([[[1], [2], [3], [4]]], dtype=np.float32)
rnn.dynamic_rnn(cell, inputs, dtype=dtypes.float32, sequence_length=[4])
@test_util.run_in_graph_and_eager_modes
@test_util.run_v1_only("b/120545219")
def testTensorArrayStateIsAccepted(self):
cell = TensorArrayStateRNNCell()
in_eager_mode = context.executing_eagerly()
if in_eager_mode:
inputs = np.array([[[1], [2], [3], [4]]], dtype=np.float32)
else:
inputs = array_ops.placeholder(dtypes.float32, shape=(1, 4, 1))
with self.cached_session(use_gpu=True) as sess:
outputs, state = rnn.dynamic_rnn(
cell, inputs, dtype=dtypes.float32, sequence_length=[4])
state = (state[0], state[1].stack())
if not in_eager_mode:
outputs, state = sess.run(
[outputs, state], feed_dict={
inputs: [[[1], [2], [3], [4]]]
})
self.assertAllEqual([[[1], [2], [3], [4]]], outputs)
self.assertAllEqual(4, state[0])
self.assertAllEqual([[[1]], [[2]], [[3]], [[4]]], state[1])
@test_util.run_deprecated_v1
def testCellGetInitialState(self):
cell = rnn_cell_impl.BasicRNNCell(5)
with self.assertRaisesRegexp(
ValueError, "batch_size and dtype cannot be None"):
cell.get_initial_state(None, None, None)
inputs = array_ops.placeholder(dtypes.float32, shape=(None, 4, 1))
with self.assertRaisesRegexp(
ValueError, "batch size from input tensor is different from"):
cell.get_initial_state(inputs=inputs, batch_size=50, dtype=None)
with self.assertRaisesRegexp(
ValueError, "batch size from input tensor is different from"):
cell.get_initial_state(
inputs=inputs, batch_size=constant_op.constant(50), dtype=None)
with self.assertRaisesRegexp(
ValueError, "dtype from input tensor is different from"):
cell.get_initial_state(inputs=inputs, batch_size=None, dtype=dtypes.int16)
initial_state = cell.get_initial_state(
inputs=inputs, batch_size=None, dtype=None)
self.assertEqual(initial_state.shape.as_list(), [None, 5])
self.assertEqual(initial_state.dtype, inputs.dtype)
batch = array_ops.shape(inputs)[0]
dtype = inputs.dtype
initial_state = cell.get_initial_state(None, batch, dtype)
self.assertEqual(initial_state.shape.as_list(), [None, 5])
self.assertEqual(initial_state.dtype, inputs.dtype)
def _assert_cell_builds(self, cell_class, dtype, batch_size, in_size,
out_size):
cell = cell_class(out_size, dtype=dtype)
in_shape = tensor_shape.TensorShape((batch_size, in_size))
cell.build(in_shape)
state_output = cell.get_initial_state(
inputs=None, batch_size=batch_size, dtype=dtype)
cell_output, _ = cell(array_ops.zeros(in_shape, dtype), state_output)
self.assertAllEqual([batch_size, out_size], cell_output.shape.as_list())
@test_util.run_in_graph_and_eager_modes
def testCellsBuild(self):
f32 = dtypes.float32
f64 = dtypes.float64
self._assert_cell_builds(rnn_cell_impl.BasicRNNCell, f32, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.BasicRNNCell, f64, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.BasicLSTMCell, f32, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.BasicLSTMCell, f64, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.GRUCell, f32, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.GRUCell, f64, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.LSTMCell, f32, 5, 7, 3)
self._assert_cell_builds(rnn_cell_impl.LSTMCell, f64, 5, 7, 3)
@test_util.run_deprecated_v1
def testBasicLSTMCellInterchangeWithLSTMCell(self):
with self.session(graph=ops_lib.Graph()) as sess:
basic_cell = rnn_cell_impl.BasicLSTMCell(1)
basic_cell(array_ops.ones([1, 1]),
state=basic_cell.get_initial_state(inputs=None,
batch_size=1,
dtype=dtypes.float32))
self.evaluate([v.initializer for v in basic_cell.variables])
self.evaluate(basic_cell._bias.assign([10.] * 4))
save = saver.Saver()
prefix = os.path.join(self.get_temp_dir(), "ckpt")
save_path = save.save(sess, prefix)
with self.session(graph=ops_lib.Graph()) as sess:
lstm_cell = rnn_cell_impl.LSTMCell(1, name="basic_lstm_cell")
lstm_cell(array_ops.ones([1, 1]),
state=lstm_cell.get_initial_state(inputs=None,
batch_size=1,
dtype=dtypes.float32))
self.evaluate([v.initializer for v in lstm_cell.variables])
save = saver.Saver()
save.restore(sess, save_path)
self.assertAllEqual([10.] * 4, self.evaluate(lstm_cell._bias))
######### Benchmarking RNN code
def _static_vs_dynamic_rnn_benchmark_static(inputs_list_t, sequence_length):
(_, input_size) = inputs_list_t[0].get_shape().as_list()
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = rnn_cell_impl.LSTMCell(
num_units=input_size,
use_peepholes=True,
initializer=initializer,
state_is_tuple=False)
outputs, final_state = rnn.static_rnn(
cell,
inputs_list_t,
sequence_length=sequence_length,
dtype=dtypes.float32)
trainable_variables = ops_lib.get_collection(
ops_lib.GraphKeys.TRAINABLE_VARIABLES)
gradients = gradients_impl.gradients(outputs + [final_state],
trainable_variables)
return control_flow_ops.group(final_state, *(gradients + outputs))
def _static_vs_dynamic_rnn_benchmark_dynamic(inputs_t, sequence_length):
(unused_0, unused_1, input_size) = inputs_t.get_shape().as_list()
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = rnn_cell_impl.LSTMCell(
num_units=input_size,
use_peepholes=True,
initializer=initializer,
state_is_tuple=False)
outputs, final_state = rnn.dynamic_rnn(
cell, inputs_t, sequence_length=sequence_length, dtype=dtypes.float32)
trainable_variables = ops_lib.get_collection(
ops_lib.GraphKeys.TRAINABLE_VARIABLES)
gradients = gradients_impl.gradients([outputs, final_state],
trainable_variables)
return control_flow_ops.group(final_state, outputs, *gradients)
def graph_creation_static_vs_dynamic_rnn_benchmark(max_time):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
# These parameters don't matter
batch_size = 512
num_units = 512
# Set up sequence lengths
np.random.seed([127])
sequence_length = np.random.randint(0, max_time, size=batch_size)
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)
]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
def _create_static_rnn():
with session.Session(config=config, graph=ops_lib.Graph()):
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
_static_vs_dynamic_rnn_benchmark_static(inputs_list_t, sequence_length)
def _create_dynamic_rnn():
with session.Session(config=config, graph=ops_lib.Graph()):
inputs_t = variables_lib.Variable(inputs, trainable=False).value()
_static_vs_dynamic_rnn_benchmark_dynamic(inputs_t, sequence_length)
delta_static = timeit.timeit(_create_static_rnn, number=5)
delta_dynamic = timeit.timeit(_create_dynamic_rnn, number=5)
print("%d \t %f \t %f \t %f" %
(max_time, delta_static, delta_dynamic, delta_dynamic / delta_static))
return delta_static, delta_dynamic
def _timer(sess, ops):
# Warm in
for _ in range(2):
sess.run(ops)
# Timing run
runs = 20
start = time.time()
for _ in range(runs):
sess.run(ops)
end = time.time()
return (end - start) / float(runs)
def static_vs_dynamic_rnn_benchmark(batch_size, max_time, num_units, use_gpu):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = np.random.randint(0, max_time, size=batch_size)
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)
]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
# Using rnn()
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
with ops_lib.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
ops = _static_vs_dynamic_rnn_benchmark_static(inputs_list_t,
sequence_length)
variables_lib.global_variables_initializer().run()
delta_static = _timer(sess, ops)
# Using dynamic_rnn()
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
with ops_lib.device("/cpu:0" if not use_gpu else None):
inputs_t = variables_lib.Variable(inputs, trainable=False).value()
ops = _static_vs_dynamic_rnn_benchmark_dynamic(inputs_t, sequence_length)
variables_lib.global_variables_initializer().run()
delta_dynamic = _timer(sess, ops)
print("%d \t %d \t %d \t %s \t %f \t %f \t %f" %
(batch_size, max_time, num_units, use_gpu, delta_static, delta_dynamic,
delta_dynamic / delta_static))
return delta_static, delta_dynamic
def _half_seq_len_vs_unroll_half_rnn_benchmark(inputs_list_t, sequence_length):
(_, input_size) = inputs_list_t[0].get_shape().as_list()
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = rnn_cell_impl.LSTMCell(
num_units=input_size,
use_peepholes=True,
initializer=initializer,
state_is_tuple=False)
outputs, final_state = rnn.static_rnn(
cell,
inputs_list_t,
sequence_length=sequence_length,
dtype=dtypes.float32)
trainable_variables = ops_lib.get_collection(
ops_lib.GraphKeys.TRAINABLE_VARIABLES)
gradients = gradients_impl.gradients(outputs + [final_state],
trainable_variables)
return control_flow_ops.group(final_state, *(gradients + outputs))
def half_seq_len_vs_unroll_half_rnn_benchmark(batch_size, max_time, num_units,
use_gpu):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = max_time * np.ones((batch_size,))
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)
]
# Halve the sequence length, full static unroll
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
with ops_lib.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
ops = _half_seq_len_vs_unroll_half_rnn_benchmark(inputs_list_t,
sequence_length / 2)
variables_lib.global_variables_initializer().run()
delta_half_seq_len = _timer(sess, ops)
# Halve the unroll size, don't use sequence length
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
with ops_lib.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
ops = _half_seq_len_vs_unroll_half_rnn_benchmark(
inputs_list_t[:(max_time // 2)], sequence_length / 2)
variables_lib.global_variables_initializer().run()
delta_unroll_half = _timer(sess, ops)
print("%d \t %d \t\t %d \t %s \t %f \t\t %f \t\t %f" %
(batch_size, max_time, num_units, use_gpu, delta_half_seq_len,
delta_unroll_half, delta_half_seq_len / delta_unroll_half))
return delta_half_seq_len, delta_unroll_half
def _concat_state_vs_tuple_state_rnn_benchmark(inputs_list_t, sequence_length,
state_is_tuple):
(_, input_size) = inputs_list_t[0].get_shape().as_list()
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = rnn_cell_impl.LSTMCell(
num_units=input_size,
use_peepholes=True,
initializer=initializer,
state_is_tuple=state_is_tuple)
outputs, final_state = rnn.static_rnn(
cell,
inputs_list_t,
sequence_length=sequence_length,
dtype=dtypes.float32)
final_state = list(final_state) if state_is_tuple else [final_state]
trainable_variables = ops_lib.get_collection(
ops_lib.GraphKeys.TRAINABLE_VARIABLES)
gradients = gradients_impl.gradients(outputs + final_state,
trainable_variables)
return control_flow_ops.group(*(final_state + gradients + outputs))
def concat_state_vs_tuple_state_rnn_benchmark(batch_size, max_time, num_units,
use_gpu):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = max_time * np.ones((batch_size,))
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)
]
# Run with concatenated states (default)
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
with ops_lib.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
ops = _concat_state_vs_tuple_state_rnn_benchmark(
inputs_list_t, sequence_length, state_is_tuple=False)
variables_lib.global_variables_initializer().run()
delta_concat_state = _timer(sess, ops)
# Run with tuple states (new)
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
with ops_lib.device("/cpu:0" if not use_gpu else None):
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
ops = _concat_state_vs_tuple_state_rnn_benchmark(
inputs_list_t, sequence_length, state_is_tuple=True)
variables_lib.global_variables_initializer().run()
delta_tuple_state = _timer(sess, ops)
print("%d \t %d \t %d \t %s \t %f \t\t %f \t\t %f" %
(batch_size, max_time, num_units, use_gpu, delta_concat_state,
delta_tuple_state, delta_concat_state / delta_tuple_state))
return delta_concat_state, delta_tuple_state
def _dynamic_rnn_swap_memory_benchmark(inputs_t, sequence_length, swap_memory):
(unused_0, unused_1, input_size) = inputs_t.get_shape().as_list()
initializer = init_ops.random_uniform_initializer(-0.01, 0.01, seed=127)
cell = rnn_cell_impl.LSTMCell(
num_units=input_size,
use_peepholes=True,
initializer=initializer,
state_is_tuple=False)
outputs, final_state = rnn.dynamic_rnn(
cell,
inputs_t,
sequence_length=sequence_length,
swap_memory=swap_memory,
dtype=dtypes.float32)
trainable_variables = ops_lib.get_collection(
ops_lib.GraphKeys.TRAINABLE_VARIABLES)
gradients = gradients_impl.gradients([outputs, final_state],
trainable_variables)
return control_flow_ops.group(final_state, outputs, *gradients)
def dynamic_rnn_swap_memory_benchmark(batch_size, max_time, num_units):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = np.random.randint(0, max_time, size=batch_size)
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(max_time)
]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
# No memory swap
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
inputs_t = variables_lib.Variable(inputs, trainable=False).value()
ops = _dynamic_rnn_swap_memory_benchmark(
inputs_t, sequence_length, swap_memory=False)
variables_lib.global_variables_initializer().run()
no_swap = _timer(sess, ops)
# Memory swap
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
inputs_t = variables_lib.Variable(inputs, trainable=False).value()
ops = _dynamic_rnn_swap_memory_benchmark(
inputs_t, sequence_length, swap_memory=True)
variables_lib.global_variables_initializer().run()
swap = _timer(sess, ops)
print("%d \t %d \t %d \t %f \t %f \t %f" %
(batch_size, max_time, num_units, no_swap, swap, swap / no_swap))
return no_swap, swap
def rnn_long_sequence_benchmark(batch_size, seqlen, num_units, dynamic,
swap_memory, nn):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
# Set up sequence lengths
np.random.seed([127])
sequence_length = [seqlen for _ in range(batch_size)]
inputs_list = [
np.random.randn(batch_size, num_units).astype(np.float32)
for _ in range(seqlen)
]
inputs = np.dstack(inputs_list).transpose([0, 2, 1]) # batch x time x depth
for _ in range(nn):
if dynamic:
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
inputs_t = variables_lib.Variable(inputs, trainable=False).value()
ops = _dynamic_rnn_swap_memory_benchmark(
inputs_t, sequence_length, swap_memory=swap_memory)
variables_lib.global_variables_initializer().run()
elapsed = _timer(sess, ops)
else:
with session.Session(config=config, graph=ops_lib.Graph()) as sess:
inputs_list_t = [
variables_lib.Variable(
x, trainable=False).value() for x in inputs_list
]
ops = _static_vs_dynamic_rnn_benchmark_static(inputs_list_t,
sequence_length)
variables_lib.global_variables_initializer().run()
elapsed = _timer(sess, ops)
print("%d \t %d \t %d \t %s \t %f \t %f" % (batch_size, seqlen, num_units,
dynamic, elapsed,
elapsed / seqlen))
class BenchmarkRNN(test.Benchmark):
def benchmarkGraphCreationStaticVsDynamicLSTM(self):
print("Graph Creation: Static Unroll vs. Dynamic Unroll LSTM")
print("max_t \t dt(static) \t dt(dynamic) \t dt(dynamic)/dt(static)")
for max_time in (1, 25, 50):
s_dt, d_dt = graph_creation_static_vs_dynamic_rnn_benchmark(max_time)
self.report_benchmark(
name="graph_creation_time_static_T%02d" % max_time,
iters=5,
wall_time=s_dt)
self.report_benchmark(
name="graph_creation_time_dynamic_T%02d" % max_time,
iters=5,
wall_time=d_dt)
def benchmarkStaticUnrollVsDynamicFlowLSTM(self):
print("Calculation: Static Unroll with Dynamic Flow LSTM "
"vs. Dynamic Unroll LSTM")
print("batch \t max_t \t units \t gpu \t dt(static) \t dt(dynamic) "
"\t dt(dynamic)/dt(static)")
for batch_size in (256,):
for max_time in (50,):
for num_units in (512, 256, 128):
for use_gpu in (False, True):
s_dt, d_dt = static_vs_dynamic_rnn_benchmark(batch_size, max_time,
num_units, use_gpu)
self.report_benchmark(
name="static_unroll_time_T%02d_B%03d_N%03d_gpu_%s" %
(max_time, batch_size, num_units, use_gpu),
iters=20,
wall_time=s_dt)
self.report_benchmark(
name="dynamic_unroll_time_T%02d_B%03d_N%03d_gpu_%s" %
(max_time, batch_size, num_units, use_gpu),
iters=20,
wall_time=d_dt)
def benchmarkDynamicLSTMNoMemorySwapVsMemorySwap(self):
print("Calculation: Dynamic LSTM No Memory Swap vs. Memory Swap")
print("batch \t max_t \t units \t no_swap \t swap \t swap/no_swap")
for batch_size in (256, 512):
for max_time in (100,):
for num_units in (512, 256, 128):
no_swap, swap = dynamic_rnn_swap_memory_benchmark(batch_size,
max_time, num_units)
self.report_benchmark(
name="dynamic_lstm_no_memory_swap_T%02d_B%03d_N%03d" %
(max_time, batch_size, num_units),
iters=20,
wall_time=no_swap)
self.report_benchmark(
name="dynamic_lstm_with_memory_swap_T%02d_B%03d_N%03d" %
(max_time, batch_size, num_units),
iters=20,
wall_time=swap)
def benchmarkStaticUnrollHalfSequenceLengthVsHalfUnroll(self):
print("Calculation: Static Unroll with Halved Sequence Length "
"vs. Half Static Unroll")
print("batch \t full_t \t units \t gpu \t dt(half_seq_len) "
"\t dt(unroll_half) \t dt(half_seq_len)/dt(unroll_half)")
for batch_size in (128,):
for max_time in (50,):
for num_units in (256,):
for use_gpu in (False, True):
s_dt, d_dt = half_seq_len_vs_unroll_half_rnn_benchmark(batch_size,
max_time,
num_units,
use_gpu)
self.report_benchmark(
name="half_seq_len_time_T%02d_B%03d_N%03d_gpu_%s" %
(max_time, batch_size, num_units, use_gpu),
iters=20,
wall_time=s_dt)
self.report_benchmark(
name="unroll_half_time_T%02d_B%03d_N%03d_gpu_%s" %
(max_time, batch_size, num_units, use_gpu),
iters=20,
wall_time=d_dt)
def benchmarkStaticUnrollStateConcatVsStateTuple(self):
print("Calculation: Static Unroll with Concatenated State "
"vs. Tuple State")
print("batch \t time \t units \t gpu \t dt(concat_state) "
"\t dt(tuple_state) \t dt(concat_state)/dt(tuple_state)")
for batch_size in (
16,
128,):
for max_time in (50,):
for num_units in (
16,
128,):
for use_gpu in (False, True):
c_dt, t_dt = concat_state_vs_tuple_state_rnn_benchmark(batch_size,
max_time,
num_units,
use_gpu)
self.report_benchmark(
name="concat_state_time_T%02d_B%03d_N%03d_gpu_%s" %
(max_time, batch_size, num_units, use_gpu),
iters=20,
wall_time=c_dt)
self.report_benchmark(
name="tuple_state_time_T%02d_B%03d_N%03d_gpu_%s" %
(max_time, batch_size, num_units, use_gpu),
iters=20,
wall_time=t_dt)
def _benchmarkDynamicLSTMMemorySwapLongSeq(self):
"""The memory swapping test for the SOSP submission."""
print("Calculation: Long LSTM Sequence")
print("batch \t len \t units \t dynamic \t elapsed_t \t elapsed_t/len")
batch_size = 512
seqlen = 800
num_units = 512
dynamic = True
swap_memory = True
# Some warming up.
if swap_memory:
rnn_long_sequence_benchmark(batch_size, seqlen, num_units,
dynamic, swap_memory, 2)
# Measure the performance.
for slen in xrange(100, 1100, 100):
rnn_long_sequence_benchmark(batch_size, slen, num_units, dynamic,
swap_memory, 3)
if __name__ == "__main__":
test.main()
| apache-2.0 | -9,210,532,734,080,610,000 | 37.388954 | 80 | 0.625578 | false | 3.480239 | true | false | false |
frankrousseau/weboob | weboob/capabilities/shop.py | 1 | 3939 | # -*- coding: utf-8 -*-
# Copyright(C) 2014 Oleg Plakhotniuk
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from .base import BaseObject, StringField, DecimalField, UserError
from .date import DateField
from .collection import CapCollection
__all__ = ['OrderNotFound', 'Order', 'Payment', 'Item', 'CapShop']
class OrderNotFound(UserError):
"""
Raised when an order is not found.
"""
def __init__(self, msg='Order not found'):
UserError.__init__(self, msg)
class Order(BaseObject):
"""
Purchase order.
"""
date = DateField('Date when the order was placed')
shipping = DecimalField('Shipping price')
discount = DecimalField('Discounts')
tax = DecimalField('Tax')
def __repr__(self):
return u"<Order id=%r date=%r>" % (self.id, self.date)
class Payment(BaseObject):
"""
Payment for an order.
"""
date = DateField('The date when payment was applied')
method = StringField('Payment method; e.g. "VISA 1234"')
amount = DecimalField('Payment amount')
def __repr__(self):
return u"<Payment date=%r method=%r amount=%r>" % \
(self.date, self.method, self.amount)
class Item(BaseObject):
"""
Purchased item within an order.
"""
label = StringField('Item label')
url = StringField('URL with item description')
price = DecimalField('Item price')
def __repr__(self):
return u"<Item label=%r price=%r>" % (self.label, self.price)
class CapShop(CapCollection):
"""
Capability of online shops to see orders history.
"""
def iter_resources(self, objs, split_path):
"""
Iter resources.
Default implementation of this method is to return on top-level
all orders (by calling :func:`iter_accounts`).
:param objs: type of objects to get
:type objs: tuple[:class:`BaseObject`]
:param split_path: path to discover
:type split_path: :class:`list`
:rtype: iter[:class:`BaseObject`]
"""
if Order in objs:
self._restrict_level(split_path)
return self.iter_orders()
def get_currency(self):
"""
Get the currency this shop uses.
:rtype: :class:`str`
"""
raise NotImplementedError()
def iter_orders(self):
"""
Iter history of orders.
:rtype: iter[:class:`Order`]
"""
raise NotImplementedError()
def get_order(self, id):
"""
Get an order from its ID.
:param id: ID of the order
:type id: :class:`str`
:rtype: :class:`Order`
:raises: :class:`OrderNotFound`
"""
raise NotImplementedError()
def iter_payments(self, order):
"""
Iter payments of a specific order.
:param order: order to get payments
:type order: :class:`Order`
:rtype: iter[:class:`Payment`]
:raises: :class:`OrderNotFound`
"""
raise NotImplementedError()
def iter_items(self, order):
"""
Iter items of a specific order.
:param order: order to get items
:type order: :class:`Order`
:rtype: iter[:class:`Item`]
:raises: :class:`OrderNotFound`
"""
raise NotImplementedError()
| agpl-3.0 | -8,459,858,471,684,363,000 | 26.165517 | 77 | 0.611323 | false | 4.098855 | false | false | false |
gigglearrows/anniesbot | alembic/versions/3841cd597e_added_a_table_for_duel_stats.py | 1 | 1033 | """Added a table for duel stats
Revision ID: 3841cd597e
Revises: d5f1b8bd68
Create Date: 2015-12-02 00:12:07.548855
"""
# revision identifiers, used by Alembic.
revision = '3841cd597e'
down_revision = 'd5f1b8bd68'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tb_user_duel_stats',
sa.Column('user_id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('duels_won', sa.Integer(), nullable=False),
sa.Column('duels_total', sa.Integer(), nullable=False),
sa.Column('points_won', sa.Integer(), nullable=False),
sa.Column('points_lost', sa.Integer(), nullable=False),
sa.Column('last_duel', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('user_id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('tb_user_duel_stats')
### end Alembic commands ###
| mit | -8,916,108,574,692,642,000 | 27.694444 | 76 | 0.680542 | false | 3.248428 | false | false | false |
marscher/PyEMMA | pyemma/_ext/variational/estimators/tests/benchmark_moments.py | 1 | 5907 | from __future__ import absolute_import
from __future__ import print_function
__author__ = 'noe'
import time
import numpy as np
from .. import moments
def genS(N):
""" Generates sparsities given N (number of cols) """
S = [10, 90, 100, 500, 900, 1000, 2000, 5000, 7500, 9000, 10000, 20000, 50000, 75000, 90000] # non-zero
return [s for s in S if s <= N]
def genX(L, N, n_var=None, const=False):
X = np.random.rand(L, N) # random data
if n_var is not None:
if const:
Xsparse = np.ones((L, N))
else:
Xsparse = np.zeros((L, N))
Xsparse[:, :n_var] = X[:, :n_var]
X = Xsparse
return X
def genY(L, N, n_var=None, const=False):
X = np.random.rand(L, N) # random data
if n_var is not None:
if const:
Xsparse = -np.ones((L, N))
else:
Xsparse = np.zeros((L, N))
Xsparse[:, :n_var] = X[:, :n_var]
X = Xsparse
return X
def reftime_momentsXX(X, remove_mean=False, nrep=3):
# time for reference calculation
t1 = time.time()
for r in range(nrep):
s_ref = X.sum(axis=0) # computation of mean
if remove_mean:
X = X - s_ref/float(X.shape[0])
C_XX_ref = np.dot(X.T, X) # covariance matrix
t2 = time.time()
# return mean time
return (t2-t1)/float(nrep)
def mytime_momentsXX(X, remove_mean=False, nrep=3):
# time for reference calculation
t1 = time.time()
for r in range(nrep):
w, s, C_XX = moments.moments_XX(X, remove_mean=remove_mean)
t2 = time.time()
# return mean time
return (t2-t1)/float(nrep)
def reftime_momentsXXXY(X, Y, remove_mean=False, symmetrize=False, nrep=3):
# time for reference calculation
t1 = time.time()
for r in range(nrep):
sx = X.sum(axis=0) # computation of mean
sy = Y.sum(axis=0) # computation of mean
if symmetrize:
sx = 0.5*(sx + sy)
sy = sx
if remove_mean:
X = X - sx/float(X.shape[0])
Y = Y - sy/float(Y.shape[0])
if symmetrize:
C_XX_ref = np.dot(X.T, X) + np.dot(Y.T, Y)
C_XY = np.dot(X.T, Y)
C_XY_ref = C_XY + C_XY.T
else:
C_XX_ref = np.dot(X.T, X)
C_XY_ref = np.dot(X.T, Y)
t2 = time.time()
# return mean time
return (t2-t1)/float(nrep)
def mytime_momentsXXXY(X, Y, remove_mean=False, symmetrize=False, nrep=3):
# time for reference calculation
t1 = time.time()
for r in range(nrep):
w, sx, sy, C_XX, C_XY = moments.moments_XXXY(X, Y, remove_mean=remove_mean, symmetrize=symmetrize)
t2 = time.time()
# return mean time
return (t2-t1)/float(nrep)
def benchmark_moments(L=10000, N=10000, nrep=5, xy=False, remove_mean=False, symmetrize=False, const=False):
#S = [10, 100, 1000]
S = genS(N)
# time for reference calculation
X = genX(L, N)
if xy:
Y = genY(L, N)
reftime = reftime_momentsXXXY(X, Y, remove_mean=remove_mean, symmetrize=symmetrize, nrep=nrep)
else:
reftime = reftime_momentsXX(X, remove_mean=remove_mean, nrep=nrep)
# my time
times = np.zeros(len(S))
for k, s in enumerate(S):
X = genX(L, N, n_var=s, const=const)
if xy:
Y = genY(L, N, n_var=s, const=const)
times[k] = mytime_momentsXXXY(X, Y, remove_mean=remove_mean, symmetrize=symmetrize, nrep=nrep)
else:
times[k] = mytime_momentsXX(X, remove_mean=remove_mean, nrep=nrep)
# assemble report
rows = ['L, data points', 'N, dimensions', 'S, nonzeros', 'time trivial', 'time moments_XX', 'speed-up']
table = np.zeros((6, len(S)))
table[0, :] = L
table[1, :] = N
table[2, :] = S
table[3, :] = reftime
table[4, :] = times
table[5, :] = reftime / times
# print table
if xy:
fname = 'moments_XXXY'
else:
fname = 'moments_XX'
print(fname + '\tremove_mean = ' + str(remove_mean) + '\tsym = ' + str(symmetrize) + '\tconst = ' + str(const))
print(rows[0] + ('\t%i' * table.shape[1])%tuple(table[0]))
print(rows[1] + ('\t%i' * table.shape[1])%tuple(table[1]))
print(rows[2] + ('\t%i' * table.shape[1])%tuple(table[2]))
print(rows[3] + ('\t%.3f' * table.shape[1])%tuple(table[3]))
print(rows[4] + ('\t%.3f' * table.shape[1])%tuple(table[4]))
print(rows[5] + ('\t%.3f' * table.shape[1])%tuple(table[5]))
print()
def main():
LNs = [(100000, 100, 10), (10000, 1000, 7), (1000, 2000, 5), (250, 5000, 5), (100, 10000, 5)]
for L, N, nrep in LNs:
benchmark_moments(L=L, N=N, nrep=nrep, xy=False, remove_mean=False, symmetrize=False, const=False)
benchmark_moments(L=L, N=N, nrep=nrep, xy=False, remove_mean=False, symmetrize=False, const=True)
benchmark_moments(L=L, N=N, nrep=nrep, xy=False, remove_mean=True, symmetrize=False, const=False)
benchmark_moments(L=L, N=N, nrep=nrep, xy=False, remove_mean=True, symmetrize=False, const=True)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=False, symmetrize=False, const=False)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=False, symmetrize=False, const=True)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=False, symmetrize=True, const=False)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=False, symmetrize=True, const=True)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=True, symmetrize=False, const=False)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=True, symmetrize=False, const=True)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=True, symmetrize=True, const=False)
benchmark_moments(L=L, N=N, nrep=nrep, xy=True, remove_mean=True, symmetrize=True, const=True)
if __name__ == "__main__":
main() | lgpl-3.0 | -6,502,714,212,863,599,000 | 35.695652 | 115 | 0.583206 | false | 2.747442 | false | false | false |
09zwcbupt/undergrad_thesis | ext/poxdesk/qx/tool/pylib/ecmascript/frontend/Comment_2.py | 1 | 27369 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# qooxdoo - the new era of web development
#
# http://qooxdoo.org
#
# Copyright:
# 2006-2012 1&1 Internet AG, Germany, http://www.1und1.de
#
# License:
# LGPL: http://www.gnu.org/licenses/lgpl.html
# EPL: http://www.eclipse.org/org/documents/epl-v10.php
# See the LICENSE file in the project's top-level directory for details.
#
# Authors:
# * Sebastian Werner (wpbasti)
# * Fabian Jakobs (fjakobs)
# * Thomas Herchenroeder (thron7)
#
################################################################################
import sys, string, re
from ecmascript.frontend import tree
from generator import Context as context
from textile import textile
##
# Many Regexp's
S_INLINE_COMMENT = "//.*"
R_INLINE_COMMENT = re.compile("^" + S_INLINE_COMMENT + "$")
R_INLINE_COMMENT_TIGHT = re.compile("^//\S+")
R_INLINE_COMMENT_PURE = re.compile("^//")
S_BLOCK_COMMENT = "/\*(?:[^*]|[\n]|(?:\*+(?:[^*/]|[\n])))*\*+/"
R_BLOCK_COMMENT = re.compile("^" + S_BLOCK_COMMENT + "$")
R_BLOCK_COMMENT_JAVADOC = re.compile("^/\*\*")
R_BLOCK_COMMENT_QTDOC = re.compile("^/\*!")
R_BLOCK_COMMENT_AREA = re.compile("^/\*\n\s*\*\*\*\*\*")
R_BLOCK_COMMENT_DIVIDER = re.compile("^/\*\n\s*----")
R_BLOCK_COMMENT_HEADER = re.compile("^/\* \*\*\*\*")
R_BLOCK_COMMENT_TIGHT_START = re.compile("^/\*\S+")
R_BLOCK_COMMENT_TIGHT_END = re.compile("\S+\*/$")
R_BLOCK_COMMENT_PURE_START = re.compile("^/\*")
R_BLOCK_COMMENT_PURE_END = re.compile("\*/$")
R_ATTRIBUTE = re.compile('[^{]@(\w+)\s*')
R_JAVADOC_STARS = re.compile(r'^\s*\*')
R_NAMED_TYPE = re.compile(r'^\s*([a-zA-Z0-9_\.#-]+)\s*({([^}]+)})?')
R_SIMPLE_TYPE = re.compile(r'^\s*({([^}]+)})?')
VARPREFIXES = {
"a" : "Array",
"b" : "Boolean",
"d" : "Date",
"f" : "Function",
"i" : "Integer",
"h" : "Map",
"m" : "Map",
"n" : "Number",
"o" : "Object",
"r" : "RegExp",
"s" : "String",
"v" : "var",
"w" : "Widget"
}
VARNAMES = {
"a" : "Array",
"arr" : "Array",
"doc" : "Document",
"e" : "Event",
"ev" : "Event",
"evt" : "Event",
"el" : "Element",
"elem" : "Element",
"elm" : "Element",
"ex" : "Exception",
"exc" : "Exception",
"flag" : "Boolean",
"force" : "Boolean",
"f" : "Function",
"func" : "Function",
"h" : "Map",
"hash" : "Map",
"map" : "Map",
"node" : "Node",
"n" : "Number",
"num" : "Number",
"o" : "Object",
"obj" : "Object",
"reg" : "RegExp",
"s" : "String",
"str" : "String",
"win" : "Window"
}
VARDESC = {
"propValue" : "Current value",
"propOldValue" : "Previous value",
"propData" : "Property configuration map"
}
def nameToType(name):
typ = "var"
# Evaluate type from name
if name in VARNAMES:
typ = VARNAMES[name]
elif len(name) > 1:
if name[1].isupper():
if name[0] in VARPREFIXES:
typ = VARPREFIXES[name[0]]
return typ
def nameToDescription(name):
desc = "TODOC"
if name in VARDESC:
desc = VARDESC[name]
return desc
##
# Parsed comments are represented as lists of "attributes". This is a schematic:
# [{
# 'category' : 'description'|'param'|'throws'|'return'| ... (prob. all '@' tags),
# 'text' : <descriptive string>,
# 'name' : <name e.g. param name>,
# 'defaultValue' : <param default value>,
# 'type' : [{ (array for alternatives, e.g. "{Map|null}")
# 'type': 'Map'|'String'|..., (from e.g. "{String[]}")
# 'dimensions': <int> (0 = scalar, 1 = array, ...)
# }]
# }]
#
def getAttrib(attribList, category):
for attrib in attribList:
if attrib["category"] == category:
return attrib
def getParam(attribList, name):
for attrib in attribList:
if attrib["category"] == "param":
if "name" in attrib and attrib["name"] == name:
return attrib
def attribHas(attrib, key):
if attrib != None and key in attrib and not attrib[key] in ["", None]:
return True
return False
##
# Holds a string representing a JS comment
#
class Comment(object):
def __init__(self, s):
self.string = s
def correctInline(self):
if R_INLINE_COMMENT_TIGHT.match(self.string):
return R_INLINE_COMMENT_PURE.sub("// ", self.string)
return self.string
def correctBlock(self):
source = self.string
if not self.getFormat() in ["javadoc", "qtdoc"]:
if R_BLOCK_COMMENT_TIGHT_START.search(self.string):
source = R_BLOCK_COMMENT_PURE_START.sub("/* ", self.string)
if R_BLOCK_COMMENT_TIGHT_END.search(source):
source = R_BLOCK_COMMENT_PURE_END.sub(" */", self.string)
return source
def correct(self):
if self.string[:2] == "//":
return self.correctInline()
else:
return self.correctBlock()
def isMultiLine(self):
return self.string.find("\n") != -1
def getFormat(self):
if R_BLOCK_COMMENT_JAVADOC.search(self.string):
return "javadoc"
elif R_BLOCK_COMMENT_QTDOC.search(self.string):
return "qtdoc"
elif R_BLOCK_COMMENT_AREA.search(self.string):
return "area"
elif R_BLOCK_COMMENT_DIVIDER.search(self.string):
return "divider"
elif R_BLOCK_COMMENT_HEADER.search(self.string):
return "header"
return "block"
def qt2javadoc(self):
attribList = self.parse(False)
res = "/**"
desc = self.getAttrib(attribList, "description")
if "text" in desc:
desc = desc["text"]
else:
desc = ""
if "\n" in desc:
res += "\n"
for line in desc.split("\n"):
res += " * %s\n" % line
res += " "
else:
res += " %s " % desc
res += "*/"
return res
def parse(self, format=True):
# print "Parse: " + intext
# Strip "/**", "/*!" and "*/"
intext = self.string[3:-2]
# Strip leading stars in every line
text = ""
for line in intext.split("\n"):
text += R_JAVADOC_STARS.sub("", line) + "\n"
# Autodent
text = Text(text).autoOutdent()
# Search for attributes
desc = { "category" : "description", "text" : "" }
attribs = [desc]
pos = 0
while True:
# this is necessary to match ^ at the beginning of a line
if pos > 0 and text[pos-1] == "\n": pos -= 1
match = R_ATTRIBUTE.search(text, pos)
if match == None:
prevText = text[pos:].rstrip()
if len(attribs) == 0:
desc["text"] = prevText
else:
attribs[-1]["text"] = prevText
break
prevText = text[pos:match.start(0)].rstrip()
pos = match.end(0)
if len(attribs) == 0:
desc["text"] = prevText
else:
attribs[-1]["text"] = prevText
attribs.append({ "category" : match.group(1), "text" : "" })
# parse details
for attrib in attribs:
self.parseDetail(attrib, format)
return attribs
def parseDetail(self, attrib, format=True):
text = attrib["text"]
if attrib["category"] in ["param", "event", "see", "state", "appearance", "childControl"]:
match = R_NAMED_TYPE.search(text)
else:
match = R_SIMPLE_TYPE.search(text)
if match:
text = text[match.end(0):]
if attrib["category"] in ["param", "event", "see", "state", "appearance", "childControl"]:
attrib["name"] = match.group(1)
#print ">>> NAME: %s" % match.group(1)
remain = match.group(3)
else:
remain = match.group(2)
if remain != None:
defIndex = remain.rfind("?")
if defIndex != -1:
attrib["defaultValue"] = remain[defIndex+1:].strip()
remain = remain[0:defIndex].strip()
#print ">>> DEFAULT: %s" % attrib["defaultValue"]
typValues = []
for typ in remain.split("|"):
typValue = typ.strip()
arrayIndex = typValue.find("[")
if arrayIndex != -1:
arrayValue = (len(typValue) - arrayIndex) / 2
typValue = typValue[0:arrayIndex]
else:
arrayValue = 0
typValues.append({ "type" : typValue, "dimensions" : arrayValue })
if len(typValues) > 0:
attrib["type"] = typValues
#print ">>> TYPE: %s" % attrib["type"]
if format:
attrib["text"] = self.formatText(text)
else:
attrib["text"] = self.cleanupText(text)
if attrib["text"] == "":
del attrib["text"]
def cleanupText(self, text):
#print "============= INTEXT ========================="
#print text
text = text.replace("<p>", "\n")
text = text.replace("<br/>", "\n")
text = text.replace("<br>", "\n")
text = text.replace("</p>", " ")
# on single lines strip the content
if not "\n" in text:
text = text.strip()
else:
newline = False
lines = text.split("\n")
text = u""
for line in lines:
if line == "":
if not newline:
newline = True
else:
if text != "":
text += "\n"
if newline:
text += "\n"
newline = False
text += line
#print "============= OUTTEXT ========================="
#print text
# Process TODOC the same as no text
if text == "TODOC":
return ""
return text
##
# JSDoc can contain macros, which are expanded here.
#
def expandMacros(self, text):
_mmap = {
"qxversion" : (context.jobconf.get("let/QOOXDOO_VERSION", "!!TODO!!") if
hasattr(context,'jobconf') else "[undef]" ) # ecmalint.py doesn't know jobs
}
templ = string.Template(text)
text = templ.safe_substitute(_mmap)
return text
def formatText(self, text):
text = self.cleanupText(text)
#if "\n" in text:
# print
# print "------------- ORIGINAL ----------------"
# print text
text = text.replace("<pre", "\n\n<pre").replace("</pre>", "</pre>\n\n")
text = self.expandMacros(text)
# encode to ascii leads into a translation of umlauts to their XML code.
text = unicode(textile.textile(text.encode("utf-8"), output="ascii"))
#if "\n" in text:
# print "------------- TEXTILED ----------------"
# print text
return text
def splitText(self, attrib=True):
res = ""
first = True
for line in self.string.split("\n"):
if attrib:
if first:
res += " %s\n" % line
else:
res += " * %s\n" % line
else:
res += " * %s\n" % line
first = False
if not res.endswith("\n"):
res += "\n"
return res
@staticmethod
def parseType(vtype):
typeText = ""
firstType = True
for entry in vtype:
if not firstType:
typeText += " | "
typeText += entry["type"]
if "dimensions" in entry and entry["dimensions"] > 0:
typeText += "[]" * entry["dimensions"]
firstType = False
return typeText
##
# Helper class for text-level operations
#
class Text(object):
def __init__(self, s):
self.string = s
##
# Remove a fixed number of spaces from the beginning of each line
# in text.
#
# @param indent {Int} number of spaces to remove
#
def outdent(self, indent):
return re.compile("\n\s{%s}" % indent).sub("\n", self.string)
#def indent(self, source, indent):
# return re.compile("\n").sub("\n" + (" " * indent), source)
##
# Insert <indent> at the beginning of each line in text
#
# @param indent {String} string to insert
#
def indent(self, indent):
return re.compile("\n").sub("\n" + indent, self.string)
def autoOutdent(self):
text = self.string
lines = text.split("\n")
if len(lines) <= 1:
return text.strip()
for line in lines:
if len(line) > 0 and line[0] != " ":
return text
result = ""
for line in lines:
if len(line) >= 0:
result += line[1:]
result += "\n"
return result
# -- Helper functions working on tree nodes ------------------------------------
def hasThrows(node):
if node.type == "throw":
return True
if node.hasChildren():
for child in node.children:
if hasThrows(child):
return True
return False
def getReturns(node, found):
if node.type == "function":
pass
elif node.type == "return":
if node.getChildrenLength(True) > 0:
val = "var"
else:
val = "void"
if node.hasChild("expression"):
expr = node.getChild("expression")
if expr.hasChild("variable"):
var = expr.getChild("variable")
if var.getChildrenLength(True) == 1 and var.hasChild("identifier"):
val = nameToType(var.getChild("identifier").get("name"))
else:
val = "var"
elif expr.hasChild("constant"):
val = expr.getChild("constant").get("constantType")
if val == "number":
val = expr.getChild("constant").get("detail")
elif expr.hasChild("array"):
val = "Array"
elif expr.hasChild("map"):
val = "Map"
elif expr.hasChild("function"):
val = "Function"
elif expr.hasChild("call"):
val = "var"
if not val in found:
found.append(val)
elif node.hasChildren():
for child in node.children:
getReturns(child, found)
return found
def findComment(node):
def findCommentBefore(node):
while node:
if node.hasChild("commentsBefore"):
for comment in node.getChild("commentsBefore").children:
if comment.get("detail") in ["javadoc", "qtdoc"]:
comments = parseNode(node)
return comments
if node.hasParent():
node = node.parent
else:
return None
def findCommentAfter(node):
while node:
if node.hasChild("commentsBefore"):
for comment in node.getChild("commentsBefore").children:
if comment.get("detail") in ["javadoc", "qtdoc"]:
comments = parseNode(node)
return comments
if node.hasChildren():
node = node.children[0]
else:
return None
if node.type == "file":
return findCommentAfter(node)
else:
return findCommentBefore(node)
def parseNode(node):
"""Takes the last doc comment from the commentsBefore child, parses it and
returns a Node representing the doc comment"""
# Find the last doc comment
commentsBefore = node.getChild("commentsBefore", False)
if commentsBefore and commentsBefore.hasChildren():
for child in commentsBefore.children:
if child.type == "comment" and child.get("detail") in ["javadoc", "qtdoc"]:
return Comment(child.get("text")).parse()
return []
##
# fill(node) -- look for function definitions in the tree represented by <node>,
# look for their corresponding comment and amend it, or create it in the first
# place
#
def fill(node):
if node.type in ["comment", "commentsBefore", "commentsAfter"]:
return
if node.hasParent():
target = node
if node.type == "function":
name = node.get("name", False)
else:
name = ""
alternative = False
assignType = None
if name != None:
assignType = "function"
# move to hook operation
while target.parent.type in ["first", "second", "third"] and target.parent.parent.type == "operation" and target.parent.parent.get("operator") == "HOOK":
alternative = True
target = target.parent.parent
# move comment to assignment
while target.parent.type == "right" and target.parent.parent.type == "assignment":
target = target.parent.parent
if target.hasChild("left"):
left = target.getChild("left")
if left and left.hasChild("variable"):
var = left.getChild("variable")
last = var.getLastChild(False, True)
if last and last.type == "identifier":
name = last.get("name")
assignType = "object"
for child in var.children:
if child.type == "identifier":
if child.get("name") in ["prototype", "Proto"]:
assignType = "member"
elif child.get("name") in ["class", "base", "Class"]:
assignType = "static"
elif target.parent.type == "definition":
name = target.parent.get("identifier")
assignType = "definition"
# move to definition
if target.parent.type == "assignment" and target.parent.parent.type == "definition" and target.parent.parent.parent.getChildrenLength(True) == 1:
target = target.parent.parent.parent
assignType = "function"
# move comment to keyvalue
if target.parent.type == "value" and target.parent.parent.type == "keyvalue":
target = target.parent.parent
name = target.get("key")
assignType = "map"
if name == "construct":
assignType = "constructor"
if target.parent.type == "map" and target.parent.parent.type == "value" and target.parent.parent.parent.type == "keyvalue":
paname = target.parent.parent.parent.get("key")
if paname == "members":
assignType = "member"
elif paname == "statics":
assignType = "static"
# filter stuff, only add comments to member and static values and to all functions
if assignType in ["member", "static"] and node.type == "function":
if not hasattr(target, "documentationAdded") and target.parent.type != "params":
old = []
commentNode = None
# create commentsBefore
if target.hasChild("commentsBefore"):
commentsBefore = target.getChild("commentsBefore")
if commentsBefore.hasChild("comment"):
for child in commentsBefore.children:
if child.get("detail") in ["javadoc", "qtdoc"]:
old = Comment(child.get("text")).parse(False)
commentNode = child
commentNodeIndex = commentsBefore.children.index(child)
break
else:
commentsBefore = tree.Node("commentsBefore")
target.addChild(commentsBefore)
# create comment node
if commentNode == None:
commentNodeIndex = None
commentNode = tree.Node("comment")
commentNode.set("detail", "javadoc")
#if node.type == "function":
# commentNode.set("text", fromFunction(node, assignType, name, alternative, old))
#else:
# commentNode.set("text", fromNode(node, assignType, name, alternative, old))
commentNode.set("text", fromFunction(node, assignType, name, alternative, old))
commentNode.set("multiline", True)
commentsBefore.addChild(commentNode,commentNodeIndex)
# in case of alternative methods, use the first one, ignore the others
target.documentationAdded = True
if node.hasChildren():
for child in node.children:
fill(child)
def fromNode(node, assignType, name, alternative, old=[]):
#
# description
##############################################################
oldDesc = getAttrib(old, "description")
if attribHas(oldDesc, "text"):
newText = oldDesc["text"]
else:
newText = "{var} TODOC"
if "\n" in newText:
s = "/**\n%s\n-*/" % Comment(newText).splitText(False)
else:
s = "/** %s */" % newText
s = s.replace("/** ", "/** ").replace(" */", " */")
#
# other @attributes
##############################################################
for attrib in old:
cat = attrib["category"]
if cat != "description":
print " * Found unallowed attribute %s in comment for %s (node)" % (cat, name)
return s
def fromFunction(func, assignType, name, alternative, old=[]):
#
# open comment
##############################################################
s = "/**\n"
#
# description
##############################################################
oldDesc = getAttrib(old, "description")
if attribHas(oldDesc, "text"):
newText = oldDesc["text"]
else:
newText = "TODOC"
s += Comment(newText).splitText(False)
s += " *\n"
#
# add @type
##############################################################
# TODO: Remove the @type annotation as it conflicts with JSdoc
# if assignType != None:
# s += " * @type %s\n" % assignType
# else:
# s += " * @type unknown TODOC\n"
#
# add @abstract
##############################################################
oldAbstract = getAttrib(old, "abstract")
first = func.getChild("body").getChild("block").getFirstChild(False, True)
abstract = first and first.type == "throw"
if abstract:
if attribHas(oldAbstract, "text"):
newText = oldDesc["text"]
else:
newText = ""
s += " * @abstract%s" % Comment(newText).splitText()
if not s.endswith("\n"):
s += "\n"
elif oldAbstract:
print " * Removing old @abstract for %s" % name
#
# add @param
##############################################################
params = func.getChild("params")
if params.hasChildren():
for child in params.children:
if child.type == "variable":
newName = child.getChild("identifier").get("name")
newType = newTypeText = nameToType(newName)
newDefault = ""
newText = nameToDescription(newName)
oldParam = getParam(old, newName)
# Get type and text from old content
if oldParam:
if attribHas(oldParam, "type"):
newTypeText = Comment.parseType(oldParam["type"])
if attribHas(oldParam, "defaultValue"):
newDefault = " ? %s" % oldParam["defaultValue"]
if attribHas(oldParam, "text"):
newText = oldParam["text"].strip()
s += " * @param %s {%s%s}%s" % (newName, newTypeText, newDefault, Comment(newText).splitText())
if not s.endswith("\n"):
s += "\n"
#
# add @return
##############################################################
if name != "construct":
oldReturn = getAttrib(old, "return")
newType = "void"
newText = ""
# Get type and text from old content
if oldReturn:
if attribHas(oldReturn, "type"):
newType = Comment.parseType(oldReturn["type"])
if attribHas(oldReturn, "text"):
newText = oldReturn["text"].strip()
# Try to autodetect the type
if newType == "void":
returns = getReturns(func.getChild("body"), [])
if len(returns) > 0:
newType = " | ".join(returns)
elif name != None and name[:2] == "is" and name[3].isupper():
newType = "Boolean"
# Add documentation hint in non void cases
if newType != "void":
if newText == "":
newText = "TODOC"
s += " * @return {%s}%s" % (newType, Comment(newText).splitText())
if not s.endswith("\n"):
s += "\n"
#
# add @throws
##############################################################
oldThrows = getAttrib(old, "throws")
if hasThrows(func):
if oldThrows and attribHas(oldThrows, "text"):
newText = oldThrows["text"]
elif abstract:
newText = "the abstract function warning."
else:
newText = "TODOC"
s += " * @throws%s" % Comment(newText).splitText()
if not s.endswith("\n"):
s += "\n"
elif oldThrows:
print " * Removing old @throw attribute in comment for %s" % name
#
# other @attributes
##############################################################
for attrib in old:
cat = attrib["category"]
if cat in ["see", "author", "deprecated", "exception", "since", "version", "abstract", "overridden", "lint"]:
s += " * @%s" % cat
if cat == "see":
if attribHas(attrib, "name"):
s += Comment(attrib["name"]).splitText()
elif attribHas(attrib, "text"):
s += Comment(attrib["text"]).splitText()
if not s.endswith("\n"):
s += "\n"
elif not cat in ["description", "type", "abstract", "param", "return", "throws", "link", "internal", "signature"]:
print " * Found unallowed attribute %s in comment for %s (function)" % (cat, name)
#
# close comment
##############################################################
s += " */"
return s
| gpl-3.0 | 1,810,750,978,431,360,500 | 27.27376 | 161 | 0.483796 | false | 4.120596 | false | false | false |
arauzoliver/uip-iiig2016-prog3 | FinalPC3/noteapp.py | 1 | 1122 | import bottle
import pymongo
import book
"""
Ruta por defecto para el index
"""
@bottle.route('/')
def book_index():
mynames_list = book.find_names()
return bottle.template('index', dict(mynames = mynames_list))
"""
Postea las nuevas entrada para ser insertadas a MongoDB
"""
@bottle.route('/newguest', method='POST')
def insert_newguest():
name = bottle.request.forms.get("name")
email = bottle.request.forms.get("email")
book.insert_name(name,email)
bottle.redirect('/')
"""
Se configura la conexion de datos
"""
"""
Configura una conexion string al servidor local
"""
connection_string = "mongodb://localhost"
"""
Gestiona la conexion entre MongoDB y PyMongo, PyMongo maneja nuestro pool
"""
connection = pymongo.MongoClient(connection_string)
#Now we want to set a context to the names database we created using the mongo interactive shell
"""
Enviamos la base de datos de nombres al shell de mongo
"""
database = connection.names
"""
Se adjunta la data u objeto
"""
book = book.book(database)
bottle.debug(True)
bottle.run(host='localhost', port=8082) | mit | 6,061,514,939,707,263,000 | 20.039216 | 96 | 0.697861 | false | 3.008043 | false | false | false |
lhuriguen/tophandball | utils/models.py | 1 | 1767 | import urllib
import json
from decimal import Decimal
from django.db import models
class Marker(models.Model):
"""
Abstract model that provides geocoding for models with address.
"""
address = models.CharField(max_length=200, blank=True,
help_text="Separate address items with commas.")
latitude = models.DecimalField(max_digits=8, decimal_places=6,
null=True, blank=True)
longitude = models.DecimalField(max_digits=9, decimal_places=6,
null=True, blank=True)
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
super(Marker, self).__init__(*args, **kwargs)
self._original_address = self.address
def save(self, *args, **kwargs):
if self._original_address != self.address:
self.latitude, self.longitude = 0, 0
if self.address and (not self.latitude or not self.longitude):
self.latitude, self.longitude = self.geocode(self.address)
# print self.latitude, self.longitude
super(Marker, self).save(*args, **kwargs)
def geocode(self, address):
address = urllib.quote_plus(address.encode('utf-8'))
base_url = "http://maps.googleapis.com/maps/api/geocode/json?"
request = base_url + "address=%s" % address
if self.country:
request += "®ion=%s" % self.country.code
data = json.loads(urllib.urlopen(request).read())
if data['status'] == 'OK':
latitude = data['results'][0]['geometry']['location']['lat']
longitude = data['results'][0]['geometry']['location']['lng']
return Decimal(latitude), Decimal(longitude)
return 0, 0
| mit | 1,176,390,992,221,440,800 | 38.266667 | 79 | 0.594228 | false | 4.187204 | false | false | false |
chop-dbhi/django-concerns | concerns/migrations/0004_auto__chg_field_concern_reporter.py | 1 | 4938 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Concern.reporter'
db.alter_column('concerns_concern', 'reporter_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['auth.User']))
def backwards(self, orm):
# Changing field 'Concern.reporter'
db.alter_column('concerns_concern', 'reporter_id', self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['auth.User']))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'concerns.concern': {
'Meta': {'ordering': "('created',)", 'object_name': 'Concern'},
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'document': ('django.db.models.fields.TextField', [], {}),
'headers': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {}),
'reporter': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reported_concerns'", 'null': 'True', 'to': "orm['auth.User']"}),
'resolution': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'resolver': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resolved_conerns'", 'null': 'True', 'to': "orm['auth.User']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'New'", 'max_length': '100'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['concerns']
| bsd-2-clause | -4,626,761,653,977,192,000 | 66.643836 | 182 | 0.560753 | false | 3.712782 | false | false | false |
JP-Ellis/django-select2-forms | select2/views.py | 1 | 6241 | import copy
import json
from django.apps import apps
from django.db import models
from django.forms.models import ModelChoiceIterator
from django.http import HttpResponse
from django.utils.encoding import force_text
import logging
logger = logging.getLogger(__name__)
class ViewException(Exception):
pass
class InvalidParameter(ViewException):
pass
class JsonResponse(HttpResponse):
callback = None
def __init__(self, content='', callback=None, content_type="application/json", *args, **kwargs):
if not isinstance(content, str):
content = json.dumps(content)
if callback is not None:
self.callback = callback
if self.callback is not None:
content = u"%s(\n%s\n)" % (self.callback, content)
content_type = "text/javascript"
return super(JsonResponse, self).__init__(
content=content,
content_type=content_type,
*args,
**kwargs)
class Select2View(object):
def __init__(self, app_label, model_name, field_name):
self.app_label = app_label
self.model_name = model_name
self.field_name = field_name
_field = None
def get_field_and_model(self):
model_cls = apps.get_model(self.app_label, self.model_name)
if model_cls is None:
raise ViewException('Model %s.%s does not exist' % (self.app_label, self.model_name))
if self._field is None:
self._field = model_cls._meta.get_field(self.field_name)
return self._field, model_cls
def get_response(self, data, request, **kwargs):
callback = request.GET.get('callback', None)
if callback is None:
response_cls = JsonResponse
else:
response_cls = type('JsonpResponse', (JsonResponse,), {
'callback': callback,
})
return response_cls(data, **kwargs)
def get_data(self, queryset, page=None, page_limit=None):
field, model_cls = self.get_field_and_model()
# Check for the existences of a callable %s_queryset method on the
# model class and use it to filter the Select2 queryset.
#
# This is useful for model inheritance where the limit_choices_to can
# not easily be overriden in child classes.
model_queryset_method = '%s_queryset' % field.name
if callable(getattr(model_cls, model_queryset_method, None)):
queryset = getattr(model_cls, model_queryset_method)(queryset)
formfield = field.formfield()
total_count = None
if page is not None and page_limit is not None:
total_count = queryset.count()
offset = (page - 1) * page_limit
end = offset + page_limit
queryset = queryset[offset:end]
else:
offset = None
formfield.queryset = queryset
iterator = ModelChoiceIterator(formfield)
if offset is None:
total_count = len(iterator)
more = False
else:
paged_count = offset + len(iterator)
more = bool(paged_count < total_count)
data = {
'total': total_count,
'more': more,
'results': [],
}
for value, label in iterator:
if value is u'':
continue
data['results'].append({
'id': value,
'text': label,
})
return data
def init_selection(self, pks, is_multiple=False):
field, model_cls = self.get_field_and_model()
pks = [int(pk) for pk in pks]
queryset = field.queryset.filter(**{
('{}__in'.format(field.rel.get_related_field().name)): pks,
}).distinct()
pk_ordering = dict([(force_text(pk), i) for i, pk in enumerate(pks)])
data = self.get_data(queryset)
# Make sure we return in the same order we were passed
def results_sort_callback(item):
pk = force_text(item['id'])
return pk_ordering[pk]
data['results'] = sorted(data['results'], key=results_sort_callback)
return data['results']
def fetch_items(self, request):
try:
field, model_cls = self.get_field_and_model()
except ViewException as e:
return self.get_response({'error': str(e)}, request, status=500)
queryset = copy.deepcopy(field.queryset)
q = request.GET.get('q', None)
page_limit = request.GET.get('page_limit', 10)
page = request.GET.get('page', 1)
try:
if q is None:
return self.get_response({"results": [], "total": 0, "more": False}, request)
try:
page_limit = int(page_limit)
except TypeError:
raise InvalidParameter("Invalid page_limit '%s' passed" % page_limit)
else:
if page_limit < 1:
raise InvalidParameter("Invalid page_limit '%s' passed" % page_limit)
try:
page = int(page)
except TypeError:
raise InvalidParameter("Invalid page '%s' passed")
else:
if page < 1:
raise InvalidParameter("Invalid page '%s' passed")
except InvalidParameter as e:
return self.get_response({'error': str(e)}, request, status=500)
search_field = field.search_field
if callable(search_field):
search_field = search_field(q)
if isinstance(search_field, models.Q):
q_obj = search_field
else:
qset_contains_filter_key = '%(search_field)s__%(insensitive)scontains' % {
'search_field': search_field,
'insensitive': 'i' if not field.case_sensitive else '',
}
q_obj = models.Q(**{qset_contains_filter_key: q})
queryset = queryset.filter(q_obj)
data = self.get_data(queryset, page, page_limit)
return self.get_response(data, request)
def fetch_items(request, app_label, model_name, field_name):
view_cls = Select2View(app_label, model_name, field_name)
return view_cls.fetch_items(request)
| bsd-2-clause | 2,370,086,864,127,999,500 | 32.553763 | 100 | 0.573145 | false | 4.111331 | false | false | false |
Designist/pybuilder | setup.py | 1 | 1823 | #!/usr/bin/env python
#
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script allows to support installation via:
# pip install git+git://github.com/pybuilder/pybuilder.git@<branch>
#
# THIS IS A HACK, DO NOT RUN LOCALLY
#
import os
import subprocess
import sys
import glob
import shutil
script_dir = os.path.dirname(os.path.realpath(__file__))
build_script = os.path.join(script_dir, "build.py")
exit_code = 0
try:
subprocess.check_call([build_script, "clean", "install_dependencies", "publish"])
dist_dir = glob.glob(os.path.join(script_dir, "target", "dist", "*"))[0]
for src_file in glob.glob(os.path.join(dist_dir, "*")):
file_name = os.path.basename(src_file)
target_file_name = os.path.join(script_dir, file_name)
if os.path.exists(target_file_name):
if os.path.isdir(target_file_name):
os.removedirs(target_file_name)
else:
os.remove(target_file_name)
shutil.move(src_file, script_dir)
setup_args = sys.argv[1:]
subprocess.check_call(["./setup.py"] + setup_args, cwd=script_dir)
except subprocess.CalledProcessError as e:
exit_code = e.returncode
sys.exit(exit_code)
| apache-2.0 | -1,571,231,106,159,978,200 | 32.759259 | 85 | 0.679649 | false | 3.375926 | false | false | false |
nens/sufriblib | setup.py | 1 | 1083 | from setuptools import setup
version = '0.5.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'setuptools',
'pyproj',
],
tests_require = [
'nose',
'coverage',
]
setup(name='sufriblib',
version=version,
description="A library for working with SUFRIB 2.1 files (.RIB and .RMB files, sewer system measurement data)",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[],
keywords=[],
author='Remco Gerlich',
author_email='[email protected]',
url='',
license='GPL',
packages=['sufriblib'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require={'test': tests_require},
entry_points={
'console_scripts': [
'sufribcat=sufriblib.scripts:sufribcat',
]},
)
| gpl-3.0 | 5,829,553,528,773,567,000 | 24.785714 | 117 | 0.604801 | false | 3.504854 | false | false | false |
pdevetto/super-duper-disco | movies/migrations/0001_initial.py | 1 | 1056 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-11-28 14:30
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Director',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Movie',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('year', models.DateTimeField(verbose_name='Movie year')),
('director', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='movies.Director')),
],
),
]
| gpl-3.0 | 874,054,082,737,347,300 | 31 | 115 | 0.574811 | false | 4.327869 | false | false | false |
jun66j5/trac-ja | sample-plugins/workflow/VoteOperation.py | 1 | 2994 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2013 Edgewall Software
# Copyright (C) 2007 Eli Carter <[email protected]>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
from genshi.builder import tag
from trac.core import implements,Component
from trac.ticket.api import ITicketActionController
from trac.ticket.default_workflow import ConfigurableTicketWorkflow
from trac.ticket.model import Priority, Ticket
#from trac.perm import IPermissionRequestor # (TODO)
revision = "$Rev: 6326 $"
url = "$URL: https://svn.edgewall.org/repos/trac/trunk/sample-plugins/workflow/VoteOperation.py $"
class VoteOperation(Component):
"""Provides a simplistic vote feature.
This is a sample action controller illustrating how to create additional
''operations''.
Don't forget to add `VoteOperation` to the workflow
option in the `[ticket]` section in TracIni.
If there is no other workflow option, the line will look like this:
{{{
workflow = ConfigurableTicketWorkflow,VoteOperation
}}}
"""
implements(ITicketActionController)
def get_ticket_actions(self, req, ticket):
controller = ConfigurableTicketWorkflow(self.env)
return controller.get_actions_by_operation_for_req(req, ticket, 'vote')
def get_all_status(self):
return []
def render_ticket_action_control(self, req, ticket, action):
id = 'vote_%s_result' % (action, )
selected_value = req.args.get(id, 'for')
options = ['for', 'against']
return ("vote",
tag.select([tag.option(x, selected=(x == selected_value or
None))
for x in options], name=id, id=id),
"Vote on the issue, raising or lowering its priority")
def get_ticket_changes(self, req, ticket, action):
id = 'vote_%s_result' % (action, )
selected = req.args.get(id, 'for')
priorities = list(Priority.select(self.env))
orig_ticket = Ticket(self.env, ticket.id)
current_priority = int(Priority(self.env, name=
orig_ticket['priority']).value)
if selected == 'for':
# priorities are 1-based, not 0-based
new_value = max(1, current_priority - 1)
else:
maxval = max([int(p.value) for p in priorities])
new_value = min(maxval, current_priority + 1)
return {'priority': [p.name for p in priorities
if int(p.value) == new_value][0]}
def apply_action_side_effects(self, req, ticket, action):
pass
| bsd-3-clause | 6,424,570,206,271,528,000 | 38.394737 | 98 | 0.642619 | false | 3.888312 | false | false | false |
CloudVLab/professional-services | tools/gsuite-exporter/setup.py | 1 | 2139 | # Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
from os import path
from io import open
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='gsuite-exporter',
version='0.0.3',
description='GSuite Admin API Exporter',
long_description=long_description,
long_description_content_type='text/markdown',
author='Google Inc.',
author_email='[email protected]',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='gsuite exporter stackdriver',
install_requires=[
'google-api-python-client',
'python-dateutil',
'requests'
],
entry_points={
'console_scripts': [
'gsuite-exporter=gsuite_exporter.cli:main',
],
},
python_requires='>=2.7'
)
| apache-2.0 | -8,173,679,682,487,673,000 | 31.907692 | 74 | 0.661992 | false | 3.998131 | false | false | false |
UTNkar/moore | src/home/migrations/0027_manual_paragraph_data.py | 1 | 1268 | # Generated by Django 2.2.10 on 2020-04-02 21:08
from django.db import migrations
from itertools import chain
from utils.data_migrations import stream_field_filter_map
def richtext_to_paragraph(block):
return {
'type': 'paragraph',
'value': {
'text': block['value'],
'alignment': "Left"
}
}
def paragraph_to_richtext(block):
return {
'type': 'paragraph',
'value': block['value']['text'],
}
def apply_to_all_pages(apps, mapper):
HomePage = apps.get_model('home', 'HomePage')
WebPage = apps.get_model('home', 'WebPage')
hps = HomePage.objects.all()
wps = WebPage.objects.all();
for obj in chain(hps, wps):
obj.body_en = stream_field_filter_map(obj.body_en, "paragraph", mapper)
obj.body_sv = stream_field_filter_map(obj.body_sv, "paragraph", mapper)
obj.save();
def forwards(apps, schema_editor):
apply_to_all_pages(apps, richtext_to_paragraph)
def backwards(apps, schema_editor):
apply_to_all_pages(apps, paragraph_to_richtext)
class Migration(migrations.Migration):
dependencies = [
('home', '0026_auto_20200402_2308'),
]
operations = [
migrations.RunPython(forwards, backwards)
]
| agpl-3.0 | -3,568,786,183,921,958,400 | 25.978723 | 79 | 0.621451 | false | 3.436314 | false | false | false |
JonnyH/pyra-kernel | GTA04/root/vibra.py | 2 | 3531 | #!/usr/bin/env python
import fcntl, struct, time, array
#
# There are two steps to creating a rumble effect
# 1/ describe the effect and give it to the driver using an
# ioctl.
# There a 3 paramaters:
# strength: from 0 to 0xffff - this code takes a value from 0 to
# 1 and scales it
# duration: milliseconds
# delay until start: milliseconds.
#
# 2/ write a request to play a specific effect.
#
# It is possible to have multiple effects active. If they have
# different delays they will start at different times.
# This demo shows combining 3 non-overlapping effects to make
# a simple vibration pattern
#
# An effect is created with f.new_vibe(strength, duration, delay)
# That effect can then be started with 'play' and stopped with 'stop'.
# EVIOCRMFF = _IOW('E', 0x81, int)
# dir: 2 WRITE = 1 == 0x40000
# size 14 4
# type 8 'E' == 0x45
# nr: 8 0x81
#
EVIOCRMFF = 0x40044581
# EVIOCSFF _IOC(_IOC_WRITE, 'E', 0x80, sizeof(struct ff_effect))
EVIOCSFF = 0x402c4580
class Vibra:
def __init__(self, file = "/dev/input/rumble"):
self.f = open(file, "r+")
def close(self):
self.f.close()
def new_vibe(self, strength, length, delay):
# strength is from 0 to 1
# length and delay are in millisecs
# this is 'struct ff_effect' from "linux/input.h"
effect = struct.pack('HhHHHHHxxHH',
0x50, -1, 0, # FF_RUMBLE, id, direction
0, 0, # trigger (button interval)
length, delay,
int(strength * 0xFFFF), 0)
a = array.array('h', effect)
fcntl.ioctl(self.f, EVIOCSFF, a, True)
return a[1]
id = a[1]
return (ev_play, ev_stop)
def multi_vibe(self, length, repeats = 1, delay = None, strength = 1):
start = 0
if delay == None:
delay = length
v = []
for i in range(0, repeats):
v.append(self.new_vibe(strength, length, start))
start += length + delay
return v
def play(self, id):
# this is 'struct input_event': sec, nsec, type, code, value
if type(id) == tuple or type(id) == list:
ev_play = ''
for i in id:
ev_play = ev_play + struct.pack('LLHHi', 0, 0, 0x15, i, 1)
else:
ev_play = struct.pack('LLHHi', 0, 0, 0x15, id, 1)
self.f.write(ev_play)
self.f.flush()
def stop(self, id):
# this is 'struct input_event': sec, nsec, type, code, value
if type(id) == tuple or type(id) == list:
ev_stop = ''
for i in id:
ev_stop = ev_stop + struct.pack('LLHHi', 0, 0, 0x15, i, 0)
else:
ev_stop = struct.pack('LLHHi', 0, 0, 0x15, id, 0)
self.f.write(ev_stop)
self.f.flush()
def forget(self, id):
if type(id) == tuple or type(id) == list:
for i in id:
fcntl.ioctl(self.f, EVIOCRMFF, i)
else:
fcntl.ioctl(self.f, EVIOCRMFF, id)
if __name__ == '__main__':
f = Vibra("/dev/input/rumble")
# rumble for 300ms, pause for 100ms, rumble for 300ms, pause for 200ms
# then half-speed rumble for 600ms
p1 = f.new_vibe(1, 300, 0)
p2 = f.new_vibe(1, 300,400)
p3 = f.new_vibe(0.5, 600, 900)
f.play((p1, p2, p3))
time.sleep(2)
f.forget((p1, p2, p3))
f.play(f.multi_vibe(200, 14, delay=100))
time.sleep(5)
| gpl-2.0 | -7,004,655,211,986,307,000 | 30.810811 | 74 | 0.54857 | false | 3.124779 | false | false | false |
serge-sans-paille/pythran | pythran/transformations/remove_nested_functions.py | 1 | 4044 | """ RemoveNestedFunctions turns nested function into top-level functions. """
from pythran.analyses import GlobalDeclarations, ImportedIds
from pythran.passmanager import Transformation
from pythran.tables import MODULES
from pythran.conversion import mangle
import pythran.metadata as metadata
import gast as ast
class _NestedFunctionRemover(ast.NodeTransformer):
def __init__(self, parent):
ast.NodeTransformer.__init__(self)
self.parent = parent
self.identifiers = set(self.global_declarations.keys())
def __getattr__(self, attr):
return getattr(self.parent, attr)
def visit_FunctionDef(self, node):
self.update = True
if MODULES['functools'] not in self.global_declarations.values():
import_ = ast.Import([ast.alias('functools', mangle('functools'))])
self.ctx.module.body.insert(0, import_)
functools_module = MODULES['functools']
self.global_declarations[mangle('functools')] = functools_module
self.ctx.module.body.append(node)
former_name = node.name
seed = 0
new_name = "pythran_{}{}"
while new_name.format(former_name, seed) in self.identifiers:
seed += 1
new_name = new_name.format(former_name, seed)
self.identifiers.add(new_name)
ii = self.gather(ImportedIds, node)
binded_args = [ast.Name(iin, ast.Load(), None, None)
for iin in sorted(ii)]
node.args.args = ([ast.Name(iin, ast.Param(), None, None)
for iin in sorted(ii)] +
node.args.args)
metadata.add(node, metadata.Local())
class Renamer(ast.NodeTransformer):
def visit_Call(self, node):
self.generic_visit(node)
if (isinstance(node.func, ast.Name) and
node.func.id == former_name):
node.func.id = new_name
node.args = (
[ast.Name(iin, ast.Load(), None, None)
for iin in sorted(ii)] +
node.args
)
return node
Renamer().visit(node)
node.name = new_name
self.global_declarations[node.name] = node
proxy_call = ast.Name(new_name, ast.Load(), None, None)
new_node = ast.Assign(
[ast.Name(former_name, ast.Store(), None, None)],
ast.Call(
ast.Attribute(
ast.Name(mangle('functools'), ast.Load(), None, None),
"partial",
ast.Load()
),
[proxy_call] + binded_args,
[],
),
None)
self.generic_visit(node)
return new_node
class RemoveNestedFunctions(Transformation):
"""
Replace nested function by top-level functions.
Also add a call to a bind intrinsic that
generates a local function with some arguments binded.
>>> import gast as ast
>>> from pythran import passmanager, backend
>>> node = ast.parse("def foo(x):\\n def bar(y): return x+y\\n bar(12)")
>>> pm = passmanager.PassManager("test")
>>> _, node = pm.apply(RemoveNestedFunctions, node)
>>> print(pm.dump(backend.Python, node))
import functools as __pythran_import_functools
def foo(x):
bar = __pythran_import_functools.partial(pythran_bar0, x)
bar(12)
def pythran_bar0(x, y):
return (x + y)
"""
def __init__(self):
super(RemoveNestedFunctions, self).__init__(GlobalDeclarations)
def visit_Module(self, node):
# keep original node as it's updated by _NestedFunctionRemover
for stmt in node.body:
self.visit(stmt)
return node
def visit_FunctionDef(self, node):
nfr = _NestedFunctionRemover(self)
node.body = [nfr.visit(stmt) for stmt in node.body]
self.update |= nfr.update
return node
| bsd-3-clause | -783,504,513,930,404,400 | 32.7 | 79 | 0.568249 | false | 4.011905 | false | false | false |
Hikasgai/HikasgaiApp | placeForMe/settings.py | 1 | 4053 | """
Django settings for gettingstarted project, on Heroku. For more info, see:
https://github.com/heroku/heroku-django-template
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
import dj_database_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: change this before deploying to production!
SECRET_KEY = 'i+acxn5(akgsn!sr4^qgf(^m&*@+g1@u^t@=8s@axc41ml*f=s'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'usuarios',
'asignaturas'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'placeForMe.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates')
],
'APP_DIRS': True,
'OPTIONS': {
'debug': True,
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'placeForMe.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Update database configuration with $DATABASE_URL.
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
MEDIA_ROOT = 'media'
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
| mit | 7,661,293,483,091,938,000 | 27.342657 | 91 | 0.694547 | false | 3.426036 | false | false | false |
pombredanne/invenio | modules/bibindex/lib/bibindex_engine_stemmer.py | 1 | 18378 | ## This file is part of Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
BibIndex stemmer facility based on the Porter Stemming Algorithm.
<http://tartarus.org/~martin/PorterStemmer/>
"""
__revision__ = "$Id$"
from thread import get_ident
from invenio.bibindex_engine_stemmer_greek import greek_stemmer
_stemmers = {}
try:
### Let's try to use SnowBall PyStemmer
import Stemmer
_lang_map = {
'danish' : 'da',
'dutch' : 'nl',
'english' : 'en',
'finnish' : 'fi',
'french' : 'fr',
'german' : 'de',
'hungarian' : 'hu',
'italian' : 'it',
'norwegian' : 'no',
'portuguese' : 'pt',
'romanian' : 'ro',
'russian' : 'ru',
'spanish' : 'es',
'swedish' : 'sv',
'turkish' : 'tr'
}
def is_stemmer_available_for_language(lang):
"""Return true if stemmer for language LANG is available.
Return false otherwise.
"""
thread_ident = get_ident()
if not _stemmers.has_key(thread_ident):
_stemmers[thread_ident] = _create_stemmers()
return _stemmers[thread_ident].has_key(lang)
def stem(word, lang):
"""Return WORD stemmed according to language LANG (e.g. 'en')."""
if lang and is_stemmer_available_for_language(lang):
return _stemmers[get_ident()][lang].stemWord(word)
elif lang == 'el':
#TODO: first we have to capitalize the word
# and remove accents from the vowels
return greek_stemmer().stem_word(word)
else:
return word
def stemWords(words, lang):
"""Return WORDS stemmed according to language LANG (e.g. 'en')."""
if lang and is_stemmer_available_for_language(lang):
return _stemmers[get_ident()][lang].stemWords(words)
else:
return words
def get_stemming_language_map():
"""Return a diction of code language, language name for all the available
languages."""
ret = {}
for language_name, language_code in _lang_map.iteritems():
if is_stemmer_available_for_language(language_code):
ret[language_name] = language_code
return ret
def _create_stemmers():
"""Create stemmers dictionary for all possible languages."""
stemmers_initialized = {}
for src_lang in Stemmer.algorithms():
try:
dst_lang = _lang_map.get(src_lang)
if dst_lang:
stemmers_initialized[dst_lang] = Stemmer.Stemmer(src_lang, 40000)
except (TypeError, KeyError):
pass
return stemmers_initialized
except ImportError:
### Here is the original PorterStemmer class provided as a fallback,
### the "free of charge for any purpose" implementation of the Porter stemmer
### algorithm in Python. The Invenio API interface follows below.
class PorterStemmer:
"""
This is the Porter stemming algorithm, ported to Python from the
version coded up in ANSI C by the author. It may be be regarded
as canonical, in that it follows the algorithm presented in
Porter, 1980, An algorithm for suffix stripping, Program, Vol. 14,
no. 3, pp 130-137,
only differing from it at the points maked --DEPARTURE-- below.
See also http://www.tartarus.org/~martin/PorterStemmer
The algorithm as described in the paper could be exactly replicated
by adjusting the points of DEPARTURE, but this is barely necessary,
because (a) the points of DEPARTURE are definitely improvements, and
(b) no encoding of the Porter stemmer I have seen is anything like
as exact as this version, even with the points of DEPARTURE!
Vivake Gupta ([email protected])
Release 1: January 2001
"""
def __init__(self):
"""The main part of the stemming algorithm starts here.
b is a buffer holding a word to be stemmed. The letters are in b[k0],
b[k0+1] ... ending at b[k]. In fact k0 = 0 in this demo program. k is
readjusted downwards as the stemming progresses. Zero termination is
not in fact used in the algorithm.
Note that only lower case sequences are stemmed. Forcing to lower case
should be done before stem(...) is called.
"""
self.b = "" # buffer for word to be stemmed
self.k = 0
self.k0 = 0
self.j = 0 # j is a general offset into the string
def cons(self, i):
"""cons(i) is TRUE <=> b[i] is a consonant."""
if self.b[i] == 'a' or self.b[i] == 'e' or self.b[i] == 'i' or self.b[i] == 'o' or self.b[i] == 'u':
return 0
if self.b[i] == 'y':
if i == self.k0:
return 1
else:
return (not self.cons(i - 1))
return 1
def m(self):
"""m() measures the number of consonant sequences between k0 and j.
if c is a consonant sequence and v a vowel sequence, and <..>
indicates arbitrary presence,
<c><v> gives 0
<c>vc<v> gives 1
<c>vcvc<v> gives 2
<c>vcvcvc<v> gives 3
....
"""
n = 0
i = self.k0
while 1:
if i > self.j:
return n
if not self.cons(i):
break
i = i + 1
i = i + 1
while 1:
while 1:
if i > self.j:
return n
if self.cons(i):
break
i = i + 1
i = i + 1
n = n + 1
while 1:
if i > self.j:
return n
if not self.cons(i):
break
i = i + 1
i = i + 1
def vowelinstem(self):
"""vowelinstem() is TRUE <=> k0,...j contains a vowel"""
for i in range(self.k0, self.j + 1):
if not self.cons(i):
return 1
return 0
def doublec(self, j):
"""doublec(j) is TRUE <=> j,(j-1) contain a double consonant."""
if j < (self.k0 + 1):
return 0
if (self.b[j] != self.b[j-1]):
return 0
return self.cons(j)
def cvc(self, i):
"""cvc(i) is TRUE <=> i-2,i-1,i has the form consonant - vowel - consonant
and also if the second c is not w,x or y. this is used when trying to
restore an e at the end of a short e.g.
cav(e), lov(e), hop(e), crim(e), but
snow, box, tray.
"""
if i < (self.k0 + 2) or not self.cons(i) or self.cons(i-1) or not self.cons(i-2):
return 0
ch = self.b[i]
if ch == 'w' or ch == 'x' or ch == 'y':
return 0
return 1
def ends(self, s):
"""ends(s) is TRUE <=> k0,...k ends with the string s."""
length = len(s)
if s[length - 1] != self.b[self.k]: # tiny speed-up
return 0
if length > (self.k - self.k0 + 1):
return 0
if self.b[self.k-length+1:self.k+1] != s:
return 0
self.j = self.k - length
return 1
def setto(self, s):
"""setto(s) sets (j+1),...k to the characters in the string s, readjusting k."""
length = len(s)
self.b = self.b[:self.j+1] + s + self.b[self.j+length+1:]
self.k = self.j + length
def r(self, s):
"""r(s) is used further down."""
if self.m() > 0:
self.setto(s)
def step1ab(self):
"""step1ab() gets rid of plurals and -ed or -ing. e.g.
caresses -> caress
ponies -> poni
ties -> ti
caress -> caress
cats -> cat
feed -> feed
agreed -> agree
disabled -> disable
matting -> mat
mating -> mate
meeting -> meet
milling -> mill
messing -> mess
meetings -> meet
"""
if self.b[self.k] == 's':
if self.ends("sses"):
self.k = self.k - 2
elif self.ends("ies"):
self.setto("i")
elif self.b[self.k - 1] != 's':
self.k = self.k - 1
if self.ends("eed"):
if self.m() > 0:
self.k = self.k - 1
elif (self.ends("ed") or self.ends("ing")) and self.vowelinstem():
self.k = self.j
if self.ends("at"): self.setto("ate")
elif self.ends("bl"): self.setto("ble")
elif self.ends("iz"): self.setto("ize")
elif self.doublec(self.k):
self.k = self.k - 1
ch = self.b[self.k]
if ch == 'l' or ch == 's' or ch == 'z':
self.k = self.k + 1
elif (self.m() == 1 and self.cvc(self.k)):
self.setto("e")
def step1c(self):
"""step1c() turns terminal y to i when there is another vowel in the stem."""
if (self.ends("y") and self.vowelinstem()):
self.b = self.b[:self.k] + 'i' + self.b[self.k+1:]
def step2(self):
"""step2() maps double suffices to single ones.
so -ization ( = -ize plus -ation) maps to -ize etc. note that the
string before the suffix must give m() > 0.
"""
if self.b[self.k - 1] == 'a':
if self.ends("ational"): self.r("ate")
elif self.ends("tional"): self.r("tion")
elif self.b[self.k - 1] == 'c':
if self.ends("enci"): self.r("ence")
elif self.ends("anci"): self.r("ance")
elif self.b[self.k - 1] == 'e':
if self.ends("izer"): self.r("ize")
elif self.b[self.k - 1] == 'l':
if self.ends("bli"): self.r("ble") # --DEPARTURE--
# To match the published algorithm, replace this phrase with
# if self.ends("abli"): self.r("able")
elif self.ends("alli"): self.r("al")
elif self.ends("entli"): self.r("ent")
elif self.ends("eli"): self.r("e")
elif self.ends("ousli"): self.r("ous")
elif self.b[self.k - 1] == 'o':
if self.ends("ization"): self.r("ize")
elif self.ends("ation"): self.r("ate")
elif self.ends("ator"): self.r("ate")
elif self.b[self.k - 1] == 's':
if self.ends("alism"): self.r("al")
elif self.ends("iveness"): self.r("ive")
elif self.ends("fulness"): self.r("ful")
elif self.ends("ousness"): self.r("ous")
elif self.b[self.k - 1] == 't':
if self.ends("aliti"): self.r("al")
elif self.ends("iviti"): self.r("ive")
elif self.ends("biliti"): self.r("ble")
elif self.b[self.k - 1] == 'g': # --DEPARTURE--
if self.ends("logi"): self.r("log")
# To match the published algorithm, delete this phrase
def step3(self):
"""step3() dels with -ic-, -full, -ness etc. similar strategy to step2."""
if self.b[self.k] == 'e':
if self.ends("icate"): self.r("ic")
elif self.ends("ative"): self.r("")
elif self.ends("alize"): self.r("al")
elif self.b[self.k] == 'i':
if self.ends("iciti"): self.r("ic")
elif self.b[self.k] == 'l':
if self.ends("ical"): self.r("ic")
elif self.ends("ful"): self.r("")
elif self.b[self.k] == 's':
if self.ends("ness"): self.r("")
def step4(self):
"""step4() takes off -ant, -ence etc., in context <c>vcvc<v>."""
if self.b[self.k - 1] == 'a':
if self.ends("al"): pass
else: return
elif self.b[self.k - 1] == 'c':
if self.ends("ance"): pass
elif self.ends("ence"): pass
else: return
elif self.b[self.k - 1] == 'e':
if self.ends("er"): pass
else: return
elif self.b[self.k - 1] == 'i':
if self.ends("ic"): pass
else: return
elif self.b[self.k - 1] == 'l':
if self.ends("able"): pass
elif self.ends("ible"): pass
else: return
elif self.b[self.k - 1] == 'n':
if self.ends("ant"): pass
elif self.ends("ement"): pass
elif self.ends("ment"): pass
elif self.ends("ent"): pass
else: return
elif self.b[self.k - 1] == 'o':
if self.ends("ion") and (self.b[self.j] == 's' or self.b[self.j] == 't'): pass
elif self.ends("ou"): pass
# takes care of -ous
else: return
elif self.b[self.k - 1] == 's':
if self.ends("ism"): pass
else: return
elif self.b[self.k - 1] == 't':
if self.ends("ate"): pass
elif self.ends("iti"): pass
else: return
elif self.b[self.k - 1] == 'u':
if self.ends("ous"): pass
else: return
elif self.b[self.k - 1] == 'v':
if self.ends("ive"): pass
else: return
elif self.b[self.k - 1] == 'z':
if self.ends("ize"): pass
else: return
else:
return
if self.m() > 1:
self.k = self.j
def step5(self):
"""step5() removes a final -e if m() > 1, and changes -ll to -l if
m() > 1.
"""
self.j = self.k
if self.b[self.k] == 'e':
a = self.m()
if a > 1 or (a == 1 and not self.cvc(self.k-1)):
self.k = self.k - 1
if self.b[self.k] == 'l' and self.doublec(self.k) and self.m() > 1:
self.k = self.k -1
def stem(self, p, i, j):
"""In stem(p,i,j), p is a char pointer, and the string to be stemmed
is from p[i] to p[j] inclusive. Typically i is zero and j is the
offset to the last character of a string, (p[j+1] == '\0'). The
stemmer adjusts the characters p[i] ... p[j] and returns the new
end-point of the string, k. Stemming never increases word length, so
i <= k <= j. To turn the stemmer into a module, declare 'stem' as
extern, and delete the remainder of this file.
"""
# copy the parameters into statics
self.b = p
self.k = j
self.k0 = i
if self.k <= self.k0 + 1:
return self.b # --DEPARTURE--
# With this line, strings of length 1 or 2 don't go through the
# stemming process, although no mention is made of this in the
# published algorithm. Remove the line to match the published
# algorithm.
self.step1ab()
self.step1c()
self.step2()
self.step3()
self.step4()
self.step5()
return self.b[self.k0:self.k+1]
_stemmers[get_ident()] = PorterStemmer()
def is_stemmer_available_for_language(lang):
"""Return true if stemmer for language LANG is available.
Return false otherwise.
"""
return lang == 'en'
def stem(word, lang):
"""Return WORD stemmed according to language LANG (e.g. 'en')."""
if lang == 'en' and _stemmers and _stemmers.has_key(get_ident()):
#make sure _stemmers[get_ident()] is avail..
return _stemmers[get_ident()].stem(word, 0, len(word)-1)
elif lang == 'el':
#TODO: first we have to capitalize the word
# and remove accents from the vowels
return greek_stemmer().stem_word(word)
else:
return word
def stemWords(words, lang):
"""Return WORDS stemmed according to language LANG (e.g. 'en')."""
if lang == 'en' and _stemmers and _stemmers.has_key(get_ident()):
#make sure _stemmers[get_ident()] is avail..
return [_stemmers[get_ident()].stem(word, 0, len(word)-1) for word in words]
else:
return words
def get_stemming_language_map():
"""Return a diction of code language, language name for all the available
languages."""
return {'english' : 'en'}
if __name__ == '__main__':
# when invoked via CLI, simply stem the arguments:
import sys
if len(sys.argv) > 1:
for word in sys.argv[1:]:
print stem(word)
| gpl-2.0 | -8,048,510,486,832,647,000 | 37.367432 | 112 | 0.482098 | false | 3.776043 | false | false | false |
gurneyalex/odoo | addons/account/models/digest.py | 6 | 1557 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models, _
from odoo.exceptions import AccessError
class Digest(models.Model):
_inherit = 'digest.digest'
kpi_account_total_revenue = fields.Boolean('Revenue')
kpi_account_total_revenue_value = fields.Monetary(compute='_compute_kpi_account_total_revenue_value')
def _compute_kpi_account_total_revenue_value(self):
if not self.env.user.has_group('account.group_account_invoice'):
raise AccessError(_("Do not have access, skip this data for user's digest email"))
for record in self:
start, end, company = record._get_kpi_compute_parameters()
self._cr.execute('''
SELECT SUM(line.debit)
FROM account_move_line line
JOIN account_move move ON move.id = line.move_id
JOIN account_journal journal ON journal.id = move.journal_id
WHERE line.company_id = %s AND line.date >= %s AND line.date < %s
AND journal.type = 'sale'
''', [company.id, start, end])
query_res = self._cr.fetchone()
record.kpi_account_total_revenue_value = query_res and query_res[0] or 0.0
def compute_kpis_actions(self, company, user):
res = super(Digest, self).compute_kpis_actions(company, user)
res['kpi_account_total_revenue'] = 'account.action_move_out_invoice_type&menu_id=%s' % self.env.ref('account.menu_finance').id
return res
| agpl-3.0 | -1,840,514,460,584,391,000 | 46.181818 | 134 | 0.635196 | false | 3.680851 | false | false | false |
phil0522/anote | anote-web/anoteweb/data/anote_pb2.py | 1 | 9335 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: anote.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
DESCRIPTOR = _descriptor.FileDescriptor(
name='anote.proto',
package='anote.proto',
serialized_pb='\n\x0b\x61note.proto\x12\x0b\x61note.proto\"\xfc\x01\n\x04Task\x12\x0f\n\x07task_id\x18\x01 \x01(\x05\x12\r\n\x05title\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\x0b\n\x03tag\x18\x04 \x03(\t\x12\x0f\n\x07project\x18\x05 \x01(\t\x12\x11\n\tparent_id\x18\x06 \x01(\x05\x12\x10\n\x08\x61ncestor\x18\x07 \x03(\x05\x12\x13\n\x0b\x64\x65scription\x18\x08 \x01(\t\x12\x0f\n\x07note_id\x18\t \x03(\x05\x12\x1f\n\x04note\x18\n \x03(\x0b\x32\x11.anote.proto.Note\x12\x13\n\x0b\x63reate_time\x18\x0b \x01(\x05\x12\x13\n\x0bupdate_time\x18\x0c \x01(\x05\x12\x10\n\x08position\x18\r \x01(\x05\"6\n\x04Note\x12\x0f\n\x07task_id\x18\x01 \x01(\x05\x12\x0f\n\x07note_id\x18\x02 \x01(\x05\x12\x0c\n\x04text\x18\x03 \x01(\t\"6\n\x03Tag\x12\r\n\x05title\x18\x01 \x01(\t\x12\x12\n\noccurrence\x18\x02 \x01(\x05\x12\x0c\n\x04hide\x18\x03 \x01(\x08')
_TASK = _descriptor.Descriptor(
name='Task',
full_name='anote.proto.Task',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='task_id', full_name='anote.proto.Task.task_id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='title', full_name='anote.proto.Task.title', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='status', full_name='anote.proto.Task.status', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tag', full_name='anote.proto.Task.tag', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='project', full_name='anote.proto.Task.project', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='parent_id', full_name='anote.proto.Task.parent_id', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ancestor', full_name='anote.proto.Task.ancestor', index=6,
number=7, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='description', full_name='anote.proto.Task.description', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='note_id', full_name='anote.proto.Task.note_id', index=8,
number=9, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='note', full_name='anote.proto.Task.note', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='create_time', full_name='anote.proto.Task.create_time', index=10,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='update_time', full_name='anote.proto.Task.update_time', index=11,
number=12, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='position', full_name='anote.proto.Task.position', index=12,
number=13, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=29,
serialized_end=281,
)
_NOTE = _descriptor.Descriptor(
name='Note',
full_name='anote.proto.Note',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='task_id', full_name='anote.proto.Note.task_id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='note_id', full_name='anote.proto.Note.note_id', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='text', full_name='anote.proto.Note.text', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=283,
serialized_end=337,
)
_TAG = _descriptor.Descriptor(
name='Tag',
full_name='anote.proto.Tag',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='title', full_name='anote.proto.Tag.title', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='occurrence', full_name='anote.proto.Tag.occurrence', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hide', full_name='anote.proto.Tag.hide', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=339,
serialized_end=393,
)
_TASK.fields_by_name['note'].message_type = _NOTE
DESCRIPTOR.message_types_by_name['Task'] = _TASK
DESCRIPTOR.message_types_by_name['Note'] = _NOTE
DESCRIPTOR.message_types_by_name['Tag'] = _TAG
class Task(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TASK
# @@protoc_insertion_point(class_scope:anote.proto.Task)
class Note(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _NOTE
# @@protoc_insertion_point(class_scope:anote.proto.Note)
class Tag(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TAG
# @@protoc_insertion_point(class_scope:anote.proto.Tag)
# @@protoc_insertion_point(module_scope)
| mit | -9,181,972,045,732,631,000 | 37.895833 | 857 | 0.681307 | false | 3.016155 | false | false | false |
mfitzp/padua | setup.py | 1 | 1035 | from setuptools import setup, find_packages
version = '0.1.16'
setup(
name='padua',
version=version,
url='http://github.com/mfitzp/padua',
author='Martin Fitzpatrick',
author_email='[email protected]',
description='A Python interface for Proteomic Data Analysis, working with MaxQuant & Perseus outputs',
license='MIT',
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Desktop Environment',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Widget Sets',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'numpy',
'scipy',
'matplotlib',
'pandas',
'statsmodels',
'matplotlib-venn',
'scikit-learn',
'requests',
'requests_toolbelt',
'adjustText'
]
)
| bsd-2-clause | 5,922,774,192,656,956,000 | 27.75 | 106 | 0.596135 | false | 3.980769 | false | false | false |
jianghuaw/nova | nova/conductor/manager.py | 1 | 56574 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Handles database requests from other nova services."""
import contextlib
import copy
import functools
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_utils import excutils
from oslo_utils import versionutils
import six
from nova import availability_zones
from nova.compute import instance_actions
from nova.compute import rpcapi as compute_rpcapi
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute.utils import wrap_instance_event
from nova.compute import vm_states
from nova.conductor.tasks import live_migrate
from nova.conductor.tasks import migrate
from nova import context as nova_context
from nova.db import base
from nova import exception
from nova.i18n import _
from nova import image
from nova import manager
from nova import network
from nova import notifications
from nova import objects
from nova.objects import base as nova_object
from nova import profiler
from nova import rpc
from nova.scheduler import client as scheduler_client
from nova.scheduler import utils as scheduler_utils
from nova import servicegroup
from nova import utils
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
def targets_cell(fn):
"""Wrap a method and automatically target the instance's cell.
This decorates a method with signature func(self, context, instance, ...)
and automatically targets the context with the instance's cell
mapping. It does this by looking up the InstanceMapping.
"""
@functools.wraps(fn)
def wrapper(self, context, *args, **kwargs):
instance = kwargs.get('instance') or args[0]
try:
im = objects.InstanceMapping.get_by_instance_uuid(
context, instance.uuid)
except exception.InstanceMappingNotFound:
LOG.error('InstanceMapping not found, unable to target cell',
instance=instance)
im = None
else:
LOG.debug('Targeting cell %(cell)s for conductor method %(meth)s',
{'cell': im.cell_mapping.identity,
'meth': fn.__name__})
# NOTE(danms): Target our context to the cell for the rest of
# this request, so that none of the subsequent code needs to
# care about it.
nova_context.set_target_cell(context, im.cell_mapping)
return fn(self, context, *args, **kwargs)
return wrapper
class ConductorManager(manager.Manager):
"""Mission: Conduct things.
The methods in the base API for nova-conductor are various proxy operations
performed on behalf of the nova-compute service running on compute nodes.
Compute nodes are not allowed to directly access the database, so this set
of methods allows them to get specific work done without locally accessing
the database.
The nova-conductor service also exposes an API in the 'compute_task'
namespace. See the ComputeTaskManager class for details.
"""
target = messaging.Target(version='3.0')
def __init__(self, *args, **kwargs):
super(ConductorManager, self).__init__(service_name='conductor',
*args, **kwargs)
self.compute_task_mgr = ComputeTaskManager()
self.additional_endpoints.append(self.compute_task_mgr)
# NOTE(hanlind): This can be removed in version 4.0 of the RPC API
def provider_fw_rule_get_all(self, context):
# NOTE(hanlind): Simulate an empty db result for compat reasons.
return []
def _object_dispatch(self, target, method, args, kwargs):
"""Dispatch a call to an object method.
This ensures that object methods get called and any exception
that is raised gets wrapped in an ExpectedException for forwarding
back to the caller (without spamming the conductor logs).
"""
try:
# NOTE(danms): Keep the getattr inside the try block since
# a missing method is really a client problem
return getattr(target, method)(*args, **kwargs)
except Exception:
raise messaging.ExpectedException()
def object_class_action_versions(self, context, objname, objmethod,
object_versions, args, kwargs):
objclass = nova_object.NovaObject.obj_class_from_name(
objname, object_versions[objname])
args = tuple([context] + list(args))
result = self._object_dispatch(objclass, objmethod, args, kwargs)
# NOTE(danms): The RPC layer will convert to primitives for us,
# but in this case, we need to honor the version the client is
# asking for, so we do it before returning here.
# NOTE(hanlind): Do not convert older than requested objects,
# see bug #1596119.
if isinstance(result, nova_object.NovaObject):
target_version = object_versions[objname]
requested_version = versionutils.convert_version_to_tuple(
target_version)
actual_version = versionutils.convert_version_to_tuple(
result.VERSION)
do_backport = requested_version < actual_version
other_major_version = requested_version[0] != actual_version[0]
if do_backport or other_major_version:
result = result.obj_to_primitive(
target_version=target_version,
version_manifest=object_versions)
return result
def object_action(self, context, objinst, objmethod, args, kwargs):
"""Perform an action on an object."""
oldobj = objinst.obj_clone()
result = self._object_dispatch(objinst, objmethod, args, kwargs)
updates = dict()
# NOTE(danms): Diff the object with the one passed to us and
# generate a list of changes to forward back
for name, field in objinst.fields.items():
if not objinst.obj_attr_is_set(name):
# Avoid demand-loading anything
continue
if (not oldobj.obj_attr_is_set(name) or
getattr(oldobj, name) != getattr(objinst, name)):
updates[name] = field.to_primitive(objinst, name,
getattr(objinst, name))
# This is safe since a field named this would conflict with the
# method anyway
updates['obj_what_changed'] = objinst.obj_what_changed()
return updates, result
def object_backport_versions(self, context, objinst, object_versions):
target = object_versions[objinst.obj_name()]
LOG.debug('Backporting %(obj)s to %(ver)s with versions %(manifest)s',
{'obj': objinst.obj_name(),
'ver': target,
'manifest': ','.join(
['%s=%s' % (name, ver)
for name, ver in object_versions.items()])})
return objinst.obj_to_primitive(target_version=target,
version_manifest=object_versions)
def reset(self):
objects.Service.clear_min_version_cache()
@contextlib.contextmanager
def try_target_cell(context, cell):
"""If cell is not None call func with context.target_cell.
This is a method to help during the transition period. Currently
various mappings may not exist if a deployment has not migrated to
cellsv2. If there is no mapping call the func as normal, otherwise
call it in a target_cell context.
"""
if cell:
with nova_context.target_cell(context, cell) as cell_context:
yield cell_context
else:
yield context
@contextlib.contextmanager
def obj_target_cell(obj, cell):
"""Run with object's context set to a specific cell"""
with try_target_cell(obj._context, cell) as target:
with obj.obj_alternate_context(target):
yield target
@profiler.trace_cls("rpc")
class ComputeTaskManager(base.Base):
"""Namespace for compute methods.
This class presents an rpc API for nova-conductor under the 'compute_task'
namespace. The methods here are compute operations that are invoked
by the API service. These methods see the operation to completion, which
may involve coordinating activities on multiple compute nodes.
"""
target = messaging.Target(namespace='compute_task', version='1.17')
def __init__(self):
super(ComputeTaskManager, self).__init__()
self.compute_rpcapi = compute_rpcapi.ComputeAPI()
self.image_api = image.API()
self.network_api = network.API()
self.servicegroup_api = servicegroup.API()
self.scheduler_client = scheduler_client.SchedulerClient()
self.notifier = rpc.get_notifier('compute', CONF.host)
def reset(self):
LOG.info('Reloading compute RPC API')
compute_rpcapi.LAST_VERSION = None
self.compute_rpcapi = compute_rpcapi.ComputeAPI()
# TODO(tdurakov): remove `live` parameter here on compute task api RPC
# version bump to 2.x
@messaging.expected_exceptions(
exception.NoValidHost,
exception.ComputeServiceUnavailable,
exception.ComputeHostNotFound,
exception.InvalidHypervisorType,
exception.InvalidCPUInfo,
exception.UnableToMigrateToSelf,
exception.DestinationHypervisorTooOld,
exception.InvalidLocalStorage,
exception.InvalidSharedStorage,
exception.HypervisorUnavailable,
exception.InstanceInvalidState,
exception.MigrationPreCheckError,
exception.MigrationPreCheckClientException,
exception.LiveMigrationWithOldNovaNotSupported,
exception.UnsupportedPolicyException)
@targets_cell
@wrap_instance_event(prefix='conductor')
def migrate_server(self, context, instance, scheduler_hint, live, rebuild,
flavor, block_migration, disk_over_commit, reservations=None,
clean_shutdown=True, request_spec=None):
if instance and not isinstance(instance, nova_object.NovaObject):
# NOTE(danms): Until v2 of the RPC API, we need to tolerate
# old-world instance objects here
attrs = ['metadata', 'system_metadata', 'info_cache',
'security_groups']
instance = objects.Instance._from_db_object(
context, objects.Instance(), instance,
expected_attrs=attrs)
# NOTE: Remove this when we drop support for v1 of the RPC API
if flavor and not isinstance(flavor, objects.Flavor):
# Code downstream may expect extra_specs to be populated since it
# is receiving an object, so lookup the flavor to ensure this.
flavor = objects.Flavor.get_by_id(context, flavor['id'])
if live and not rebuild and not flavor:
self._live_migrate(context, instance, scheduler_hint,
block_migration, disk_over_commit, request_spec)
elif not live and not rebuild and flavor:
instance_uuid = instance.uuid
with compute_utils.EventReporter(context, 'cold_migrate',
instance_uuid):
self._cold_migrate(context, instance, flavor,
scheduler_hint['filter_properties'],
reservations, clean_shutdown, request_spec)
else:
raise NotImplementedError()
def _cold_migrate(self, context, instance, flavor, filter_properties,
reservations, clean_shutdown, request_spec):
image = utils.get_image_from_system_metadata(
instance.system_metadata)
# NOTE(sbauza): If a reschedule occurs when prep_resize(), then
# it only provides filter_properties legacy dict back to the
# conductor with no RequestSpec part of the payload.
if not request_spec:
# Make sure we hydrate a new RequestSpec object with the new flavor
# and not the nested one from the instance
request_spec = objects.RequestSpec.from_components(
context, instance.uuid, image,
flavor, instance.numa_topology, instance.pci_requests,
filter_properties, None, instance.availability_zone)
else:
# NOTE(sbauza): Resizes means new flavor, so we need to update the
# original RequestSpec object for make sure the scheduler verifies
# the right one and not the original flavor
request_spec.flavor = flavor
task = self._build_cold_migrate_task(context, instance, flavor,
request_spec,
reservations, clean_shutdown)
# TODO(sbauza): Provide directly the RequestSpec object once
# _set_vm_state_and_notify() accepts it
legacy_spec = request_spec.to_legacy_request_spec_dict()
try:
task.execute()
except exception.NoValidHost as ex:
vm_state = instance.vm_state
if not vm_state:
vm_state = vm_states.ACTIVE
updates = {'vm_state': vm_state, 'task_state': None}
self._set_vm_state_and_notify(context, instance.uuid,
'migrate_server',
updates, ex, legacy_spec)
# if the flavor IDs match, it's migrate; otherwise resize
if flavor.id == instance.instance_type_id:
msg = _("No valid host found for cold migrate")
else:
msg = _("No valid host found for resize")
raise exception.NoValidHost(reason=msg)
except exception.UnsupportedPolicyException as ex:
with excutils.save_and_reraise_exception():
vm_state = instance.vm_state
if not vm_state:
vm_state = vm_states.ACTIVE
updates = {'vm_state': vm_state, 'task_state': None}
self._set_vm_state_and_notify(context, instance.uuid,
'migrate_server',
updates, ex, legacy_spec)
except Exception as ex:
with excutils.save_and_reraise_exception():
updates = {'vm_state': instance.vm_state,
'task_state': None}
self._set_vm_state_and_notify(context, instance.uuid,
'migrate_server',
updates, ex, legacy_spec)
# NOTE(sbauza): Make sure we persist the new flavor in case we had
# a successful scheduler call if and only if nothing bad happened
if request_spec.obj_what_changed():
request_spec.save()
def _set_vm_state_and_notify(self, context, instance_uuid, method, updates,
ex, request_spec):
scheduler_utils.set_vm_state_and_notify(
context, instance_uuid, 'compute_task', method, updates,
ex, request_spec)
def _cleanup_allocated_networks(
self, context, instance, requested_networks):
try:
# If we were told not to allocate networks let's save ourselves
# the trouble of calling the network API.
if not (requested_networks and requested_networks.no_allocate):
self.network_api.deallocate_for_instance(
context, instance, requested_networks=requested_networks)
except Exception:
LOG.exception('Failed to deallocate networks', instance=instance)
return
instance.system_metadata['network_allocated'] = 'False'
try:
instance.save()
except exception.InstanceNotFound:
# NOTE: It's possible that we're cleaning up the networks
# because the instance was deleted. If that's the case then this
# exception will be raised by instance.save()
pass
@wrap_instance_event(prefix='conductor')
def live_migrate_instance(self, context, instance, scheduler_hint,
block_migration, disk_over_commit, request_spec):
self._live_migrate(context, instance, scheduler_hint,
block_migration, disk_over_commit, request_spec)
def _live_migrate(self, context, instance, scheduler_hint,
block_migration, disk_over_commit, request_spec):
destination = scheduler_hint.get("host")
def _set_vm_state(context, instance, ex, vm_state=None,
task_state=None):
request_spec = {'instance_properties': {
'uuid': instance.uuid, },
}
scheduler_utils.set_vm_state_and_notify(context,
instance.uuid,
'compute_task', 'migrate_server',
dict(vm_state=vm_state,
task_state=task_state,
expected_task_state=task_states.MIGRATING,),
ex, request_spec)
migration = objects.Migration(context=context.elevated())
migration.dest_compute = destination
migration.status = 'accepted'
migration.instance_uuid = instance.uuid
migration.source_compute = instance.host
migration.migration_type = 'live-migration'
if instance.obj_attr_is_set('flavor'):
migration.old_instance_type_id = instance.flavor.id
migration.new_instance_type_id = instance.flavor.id
else:
migration.old_instance_type_id = instance.instance_type_id
migration.new_instance_type_id = instance.instance_type_id
migration.create()
task = self._build_live_migrate_task(context, instance, destination,
block_migration, disk_over_commit,
migration, request_spec)
try:
task.execute()
except (exception.NoValidHost,
exception.ComputeHostNotFound,
exception.ComputeServiceUnavailable,
exception.InvalidHypervisorType,
exception.InvalidCPUInfo,
exception.UnableToMigrateToSelf,
exception.DestinationHypervisorTooOld,
exception.InvalidLocalStorage,
exception.InvalidSharedStorage,
exception.HypervisorUnavailable,
exception.InstanceInvalidState,
exception.MigrationPreCheckError,
exception.MigrationPreCheckClientException,
exception.LiveMigrationWithOldNovaNotSupported,
exception.MigrationSchedulerRPCError) as ex:
with excutils.save_and_reraise_exception():
# TODO(johngarbutt) - eventually need instance actions here
_set_vm_state(context, instance, ex, instance.vm_state)
migration.status = 'error'
migration.save()
except Exception as ex:
LOG.error('Migration of instance %(instance_id)s to host'
' %(dest)s unexpectedly failed.',
{'instance_id': instance.uuid, 'dest': destination},
exc_info=True)
# Reset the task state to None to indicate completion of
# the operation as it is done in case of known exceptions.
_set_vm_state(context, instance, ex, vm_states.ERROR,
task_state=None)
migration.status = 'error'
migration.save()
raise exception.MigrationError(reason=six.text_type(ex))
def _build_live_migrate_task(self, context, instance, destination,
block_migration, disk_over_commit, migration,
request_spec=None):
return live_migrate.LiveMigrationTask(context, instance,
destination, block_migration,
disk_over_commit, migration,
self.compute_rpcapi,
self.servicegroup_api,
self.scheduler_client,
request_spec)
def _build_cold_migrate_task(self, context, instance, flavor,
request_spec, reservations,
clean_shutdown):
return migrate.MigrationTask(context, instance, flavor,
request_spec,
reservations, clean_shutdown,
self.compute_rpcapi,
self.scheduler_client)
def _destroy_build_request(self, context, instance):
# The BuildRequest needs to be stored until the instance is mapped to
# an instance table. At that point it will never be used again and
# should be deleted.
build_request = objects.BuildRequest.get_by_instance_uuid(
context, instance.uuid)
# TODO(alaski): Sync API updates of the build_request to the
# instance before it is destroyed. Right now only locked_by can
# be updated before this is destroyed.
build_request.destroy()
def _populate_instance_mapping(self, context, instance, host):
try:
inst_mapping = objects.InstanceMapping.get_by_instance_uuid(
context, instance.uuid)
except exception.InstanceMappingNotFound:
# NOTE(alaski): If nova-api is up to date this exception should
# never be hit. But during an upgrade it's possible that an old
# nova-api didn't create an instance_mapping during this boot
# request.
LOG.debug('Instance was not mapped to a cell, likely due '
'to an older nova-api service running.',
instance=instance)
return None
else:
try:
host_mapping = objects.HostMapping.get_by_host(context,
host['host'])
except exception.HostMappingNotFound:
# NOTE(alaski): For now this exception means that a
# deployment has not migrated to cellsv2 and we should
# remove the instance_mapping that has been created.
# Eventually this will indicate a failure to properly map a
# host to a cell and we may want to reschedule.
inst_mapping.destroy()
return None
else:
inst_mapping.cell_mapping = host_mapping.cell_mapping
inst_mapping.save()
return inst_mapping
# NOTE(danms): This is never cell-targeted because it is only used for
# cellsv1 (which does not target cells directly) and n-cpu reschedules
# (which go to the cell conductor and thus are always cell-specific).
def build_instances(self, context, instances, image, filter_properties,
admin_password, injected_files, requested_networks,
security_groups, block_device_mapping=None, legacy_bdm=True):
# TODO(ndipanov): Remove block_device_mapping and legacy_bdm in version
# 2.0 of the RPC API.
# TODO(danms): Remove this in version 2.0 of the RPC API
if (requested_networks and
not isinstance(requested_networks,
objects.NetworkRequestList)):
requested_networks = objects.NetworkRequestList.from_tuples(
requested_networks)
# TODO(melwitt): Remove this in version 2.0 of the RPC API
flavor = filter_properties.get('instance_type')
if flavor and not isinstance(flavor, objects.Flavor):
# Code downstream may expect extra_specs to be populated since it
# is receiving an object, so lookup the flavor to ensure this.
flavor = objects.Flavor.get_by_id(context, flavor['id'])
filter_properties = dict(filter_properties, instance_type=flavor)
request_spec = {}
try:
# check retry policy. Rather ugly use of instances[0]...
# but if we've exceeded max retries... then we really only
# have a single instance.
# TODO(sbauza): Provide directly the RequestSpec object
# when _set_vm_state_and_notify() and populate_retry()
# accept it
request_spec = scheduler_utils.build_request_spec(
context, image, instances)
scheduler_utils.populate_retry(
filter_properties, instances[0].uuid)
instance_uuids = [instance.uuid for instance in instances]
spec_obj = objects.RequestSpec.from_primitives(
context, request_spec, filter_properties)
hosts = self._schedule_instances(
context, spec_obj, instance_uuids)
except Exception as exc:
updates = {'vm_state': vm_states.ERROR, 'task_state': None}
for instance in instances:
self._set_vm_state_and_notify(
context, instance.uuid, 'build_instances', updates,
exc, request_spec)
try:
# If the BuildRequest stays around then instance show/lists
# will pull from it rather than the errored instance.
self._destroy_build_request(context, instance)
except exception.BuildRequestNotFound:
pass
self._cleanup_allocated_networks(
context, instance, requested_networks)
return
for (instance, host) in six.moves.zip(instances, hosts):
instance.availability_zone = (
availability_zones.get_host_availability_zone(context,
host['host']))
try:
# NOTE(danms): This saves the az change above, refreshes our
# instance, and tells us if it has been deleted underneath us
instance.save()
except (exception.InstanceNotFound,
exception.InstanceInfoCacheNotFound):
LOG.debug('Instance deleted during build', instance=instance)
continue
local_filter_props = copy.deepcopy(filter_properties)
scheduler_utils.populate_filter_properties(local_filter_props,
host)
# The block_device_mapping passed from the api doesn't contain
# instance specific information
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
# This is populated in scheduler_utils.populate_retry
num_attempts = local_filter_props.get('retry',
{}).get('num_attempts', 1)
if num_attempts <= 1:
# If this is a reschedule the instance is already mapped to
# this cell and the BuildRequest is already deleted so ignore
# the logic below.
inst_mapping = self._populate_instance_mapping(context,
instance,
host)
try:
self._destroy_build_request(context, instance)
except exception.BuildRequestNotFound:
# This indicates an instance delete has been requested in
# the API. Stop the build, cleanup the instance_mapping and
# potentially the block_device_mappings
# TODO(alaski): Handle block_device_mapping cleanup
if inst_mapping:
inst_mapping.destroy()
return
self.compute_rpcapi.build_and_run_instance(context,
instance=instance, host=host['host'], image=image,
request_spec=request_spec,
filter_properties=local_filter_props,
admin_password=admin_password,
injected_files=injected_files,
requested_networks=requested_networks,
security_groups=security_groups,
block_device_mapping=bdms, node=host['nodename'],
limits=host['limits'])
def _schedule_instances(self, context, request_spec,
instance_uuids=None):
scheduler_utils.setup_instance_group(context, request_spec)
hosts = self.scheduler_client.select_destinations(context,
request_spec, instance_uuids)
return hosts
@targets_cell
def unshelve_instance(self, context, instance, request_spec=None):
sys_meta = instance.system_metadata
def safe_image_show(ctx, image_id):
if image_id:
return self.image_api.get(ctx, image_id, show_deleted=False)
else:
raise exception.ImageNotFound(image_id='')
if instance.vm_state == vm_states.SHELVED:
instance.task_state = task_states.POWERING_ON
instance.save(expected_task_state=task_states.UNSHELVING)
self.compute_rpcapi.start_instance(context, instance)
elif instance.vm_state == vm_states.SHELVED_OFFLOADED:
image = None
image_id = sys_meta.get('shelved_image_id')
# No need to check for image if image_id is None as
# "shelved_image_id" key is not set for volume backed
# instance during the shelve process
if image_id:
with compute_utils.EventReporter(
context, 'get_image_info', instance.uuid):
try:
image = safe_image_show(context, image_id)
except exception.ImageNotFound:
instance.vm_state = vm_states.ERROR
instance.save()
reason = _('Unshelve attempted but the image %s '
'cannot be found.') % image_id
LOG.error(reason, instance=instance)
raise exception.UnshelveException(
instance_id=instance.uuid, reason=reason)
try:
with compute_utils.EventReporter(context, 'schedule_instances',
instance.uuid):
if not request_spec:
# NOTE(sbauza): We were unable to find an original
# RequestSpec object - probably because the instance is
# old. We need to mock that the old way
filter_properties = {}
request_spec = scheduler_utils.build_request_spec(
context, image, [instance])
else:
# NOTE(sbauza): Force_hosts/nodes needs to be reset
# if we want to make sure that the next destination
# is not forced to be the original host
request_spec.reset_forced_destinations()
# TODO(sbauza): Provide directly the RequestSpec object
# when populate_filter_properties and populate_retry()
# accept it
filter_properties = request_spec.\
to_legacy_filter_properties_dict()
request_spec = request_spec.\
to_legacy_request_spec_dict()
scheduler_utils.populate_retry(filter_properties,
instance.uuid)
request_spec = objects.RequestSpec.from_primitives(
context, request_spec, filter_properties)
# NOTE(cfriesen): Ensure that we restrict the scheduler to
# the cell specified by the instance mapping.
instance_mapping = \
objects.InstanceMapping.get_by_instance_uuid(
context, instance.uuid)
LOG.debug('Requesting cell %(cell)s while unshelving',
{'cell': instance_mapping.cell_mapping.identity},
instance=instance)
if ('requested_destination' in request_spec and
request_spec.requested_destination):
request_spec.requested_destination.cell = (
instance_mapping.cell_mapping)
else:
request_spec.requested_destination = (
objects.Destination(
cell=instance_mapping.cell_mapping))
hosts = self._schedule_instances(context, request_spec,
[instance.uuid])
host_state = hosts[0]
scheduler_utils.populate_filter_properties(
filter_properties, host_state)
(host, node) = (host_state['host'], host_state['nodename'])
instance.availability_zone = (
availability_zones.get_host_availability_zone(
context, host))
self.compute_rpcapi.unshelve_instance(
context, instance, host, image=image,
filter_properties=filter_properties, node=node)
except (exception.NoValidHost,
exception.UnsupportedPolicyException):
instance.task_state = None
instance.save()
LOG.warning("No valid host found for unshelve instance",
instance=instance)
return
except Exception:
with excutils.save_and_reraise_exception():
instance.task_state = None
instance.save()
LOG.error("Unshelve attempted but an error "
"has occurred", instance=instance)
else:
LOG.error('Unshelve attempted but vm_state not SHELVED or '
'SHELVED_OFFLOADED', instance=instance)
instance.vm_state = vm_states.ERROR
instance.save()
return
@targets_cell
def rebuild_instance(self, context, instance, orig_image_ref, image_ref,
injected_files, new_pass, orig_sys_metadata,
bdms, recreate, on_shared_storage,
preserve_ephemeral=False, host=None,
request_spec=None):
with compute_utils.EventReporter(context, 'rebuild_server',
instance.uuid):
node = limits = None
if not host:
if not request_spec:
# NOTE(sbauza): We were unable to find an original
# RequestSpec object - probably because the instance is old
# We need to mock that the old way
# TODO(sbauza): Provide directly the RequestSpec object
# when _set_vm_state_and_notify() accepts it
filter_properties = {'ignore_hosts': [instance.host]}
request_spec = scheduler_utils.build_request_spec(
context, image_ref, [instance])
request_spec = objects.RequestSpec.from_primitives(
context, request_spec, filter_properties)
else:
# NOTE(sbauza): Augment the RequestSpec object by excluding
# the source host for avoiding the scheduler to pick it
request_spec.ignore_hosts = request_spec.ignore_hosts or []
request_spec.ignore_hosts.append(instance.host)
# NOTE(sbauza): Force_hosts/nodes needs to be reset
# if we want to make sure that the next destination
# is not forced to be the original host
request_spec.reset_forced_destinations()
try:
hosts = self._schedule_instances(context, request_spec,
[instance.uuid])
host_dict = hosts.pop(0)
host, node, limits = (host_dict['host'],
host_dict['nodename'],
host_dict['limits'])
except exception.NoValidHost as ex:
request_spec = request_spec.to_legacy_request_spec_dict()
with excutils.save_and_reraise_exception():
self._set_vm_state_and_notify(context, instance.uuid,
'rebuild_server',
{'vm_state': instance.vm_state,
'task_state': None}, ex, request_spec)
LOG.warning("No valid host found for rebuild",
instance=instance)
except exception.UnsupportedPolicyException as ex:
request_spec = request_spec.to_legacy_request_spec_dict()
with excutils.save_and_reraise_exception():
self._set_vm_state_and_notify(context, instance.uuid,
'rebuild_server',
{'vm_state': instance.vm_state,
'task_state': None}, ex, request_spec)
LOG.warning("Server with unsupported policy "
"cannot be rebuilt", instance=instance)
try:
migration = objects.Migration.get_by_instance_and_status(
context, instance.uuid, 'accepted')
except exception.MigrationNotFoundByStatus:
LOG.debug("No migration record for the rebuild/evacuate "
"request.", instance=instance)
migration = None
compute_utils.notify_about_instance_usage(
self.notifier, context, instance, "rebuild.scheduled")
instance.availability_zone = (
availability_zones.get_host_availability_zone(
context, host))
self.compute_rpcapi.rebuild_instance(context,
instance=instance,
new_pass=new_pass,
injected_files=injected_files,
image_ref=image_ref,
orig_image_ref=orig_image_ref,
orig_sys_metadata=orig_sys_metadata,
bdms=bdms,
recreate=recreate,
on_shared_storage=on_shared_storage,
preserve_ephemeral=preserve_ephemeral,
migration=migration,
host=host, node=node, limits=limits)
# TODO(avolkov): move method to bdm
@staticmethod
def _volume_size(instance_type, bdm):
size = bdm.get('volume_size')
# NOTE (ndipanov): inherit flavor size only for swap and ephemeral
if (size is None and bdm.get('source_type') == 'blank' and
bdm.get('destination_type') == 'local'):
if bdm.get('guest_format') == 'swap':
size = instance_type.get('swap', 0)
else:
size = instance_type.get('ephemeral_gb', 0)
return size
def _create_block_device_mapping(self, cell, instance_type, instance_uuid,
block_device_mapping):
"""Create the BlockDeviceMapping objects in the db.
This method makes a copy of the list in order to avoid using the same
id field in case this is called for multiple instances.
"""
LOG.debug("block_device_mapping %s", list(block_device_mapping),
instance_uuid=instance_uuid)
instance_block_device_mapping = copy.deepcopy(block_device_mapping)
for bdm in instance_block_device_mapping:
bdm.volume_size = self._volume_size(instance_type, bdm)
bdm.instance_uuid = instance_uuid
with obj_target_cell(bdm, cell):
bdm.update_or_create()
return instance_block_device_mapping
def _create_tags(self, context, instance_uuid, tags):
"""Create the Tags objects in the db."""
if tags:
tag_list = [tag.tag for tag in tags]
instance_tags = objects.TagList.create(
context, instance_uuid, tag_list)
return instance_tags
else:
return tags
def _bury_in_cell0(self, context, request_spec, exc,
build_requests=None, instances=None):
"""Ensure all provided build_requests and instances end up in cell0.
Cell0 is the fake cell we schedule dead instances to when we can't
schedule them somewhere real. Requests that don't yet have instances
will get a new instance, created in cell0. Instances that have not yet
been created will be created in cell0. All build requests are destroyed
after we're done. Failure to delete a build request will trigger the
instance deletion, just like the happy path in
schedule_and_build_instances() below.
"""
try:
cell0 = objects.CellMapping.get_by_uuid(
context, objects.CellMapping.CELL0_UUID)
except exception.CellMappingNotFound:
# Not yet setup for cellsv2. Instances will need to be written
# to the configured database. This will become a deployment
# error in Ocata.
LOG.error('No cell mapping found for cell0 while '
'trying to record scheduling failure. '
'Setup is incomplete.')
return
build_requests = build_requests or []
instances = instances or []
instances_by_uuid = {inst.uuid: inst for inst in instances}
for build_request in build_requests:
if build_request.instance_uuid not in instances_by_uuid:
# This is an instance object with no matching db entry.
instance = build_request.get_new_instance(context)
instances_by_uuid[instance.uuid] = instance
updates = {'vm_state': vm_states.ERROR, 'task_state': None}
legacy_spec = request_spec.to_legacy_request_spec_dict()
for instance in instances_by_uuid.values():
with obj_target_cell(instance, cell0) as cctxt:
instance.create()
# Use the context targeted to cell0 here since the instance is
# now in cell0.
self._set_vm_state_and_notify(
cctxt, instance.uuid, 'build_instances', updates,
exc, legacy_spec)
try:
# We don't need the cell0-targeted context here because the
# instance mapping is in the API DB.
inst_mapping = \
objects.InstanceMapping.get_by_instance_uuid(
context, instance.uuid)
inst_mapping.cell_mapping = cell0
inst_mapping.save()
except exception.InstanceMappingNotFound:
pass
for build_request in build_requests:
try:
build_request.destroy()
except exception.BuildRequestNotFound:
# Instance was deleted before we finished scheduling
inst = instances_by_uuid[build_request.instance_uuid]
with obj_target_cell(inst, cell0):
inst.destroy()
def schedule_and_build_instances(self, context, build_requests,
request_specs, image,
admin_password, injected_files,
requested_networks, block_device_mapping,
tags=None):
# Add all the UUIDs for the instances
instance_uuids = [spec.instance_uuid for spec in request_specs]
try:
hosts = self._schedule_instances(context, request_specs[0],
instance_uuids)
except Exception as exc:
LOG.exception('Failed to schedule instances')
self._bury_in_cell0(context, request_specs[0], exc,
build_requests=build_requests)
return
host_mapping_cache = {}
instances = []
for (build_request, request_spec, host) in six.moves.zip(
build_requests, request_specs, hosts):
instance = build_request.get_new_instance(context)
# Convert host from the scheduler into a cell record
if host['host'] not in host_mapping_cache:
try:
host_mapping = objects.HostMapping.get_by_host(
context, host['host'])
host_mapping_cache[host['host']] = host_mapping
except exception.HostMappingNotFound as exc:
LOG.error('No host-to-cell mapping found for selected '
'host %(host)s. Setup is incomplete.',
{'host': host['host']})
self._bury_in_cell0(context, request_spec, exc,
build_requests=[build_request],
instances=[instance])
# This is a placeholder in case the quota recheck fails.
instances.append(None)
continue
else:
host_mapping = host_mapping_cache[host['host']]
cell = host_mapping.cell_mapping
# Before we create the instance, let's make one final check that
# the build request is still around and wasn't deleted by the user
# already.
try:
objects.BuildRequest.get_by_instance_uuid(
context, instance.uuid)
except exception.BuildRequestNotFound:
# the build request is gone so we're done for this instance
LOG.debug('While scheduling instance, the build request '
'was already deleted.', instance=instance)
# This is a placeholder in case the quota recheck fails.
instances.append(None)
continue
else:
instance.availability_zone = (
availability_zones.get_host_availability_zone(
context, host['host']))
with obj_target_cell(instance, cell):
instance.create()
instances.append(instance)
# NOTE(melwitt): We recheck the quota after creating the
# objects to prevent users from allocating more resources
# than their allowed quota in the event of a race. This is
# configurable because it can be expensive if strict quota
# limits are not required in a deployment.
if CONF.quota.recheck_quota:
try:
compute_utils.check_num_instances_quota(
context, instance.flavor, 0, 0,
orig_num_req=len(build_requests))
except exception.TooManyInstances as exc:
with excutils.save_and_reraise_exception():
self._cleanup_build_artifacts(context, exc, instances,
build_requests,
request_specs)
for (build_request, request_spec, host, instance) in six.moves.zip(
build_requests, request_specs, hosts, instances):
if instance is None:
# Skip placeholders that were buried in cell0 or had their
# build requests deleted by the user before instance create.
continue
filter_props = request_spec.to_legacy_filter_properties_dict()
scheduler_utils.populate_retry(filter_props, instance.uuid)
scheduler_utils.populate_filter_properties(filter_props,
host)
# send a state update notification for the initial create to
# show it going from non-existent to BUILDING
notifications.send_update_with_states(context, instance, None,
vm_states.BUILDING, None, None, service="conductor")
with obj_target_cell(instance, cell) as cctxt:
objects.InstanceAction.action_start(
cctxt, instance.uuid, instance_actions.CREATE,
want_result=False)
instance_bdms = self._create_block_device_mapping(
cell, instance.flavor, instance.uuid, block_device_mapping)
instance_tags = self._create_tags(cctxt, instance.uuid, tags)
# TODO(Kevin Zheng): clean this up once instance.create() handles
# tags; we do this so the instance.create notification in
# build_and_run_instance in nova-compute doesn't lazy-load tags
instance.tags = instance_tags if instance_tags \
else objects.TagList()
# Update mapping for instance. Normally this check is guarded by
# a try/except but if we're here we know that a newer nova-api
# handled the build process and would have created the mapping
inst_mapping = objects.InstanceMapping.get_by_instance_uuid(
context, instance.uuid)
inst_mapping.cell_mapping = cell
inst_mapping.save()
if not self._delete_build_request(
context, build_request, instance, cell, instance_bdms,
instance_tags):
# The build request was deleted before/during scheduling so
# the instance is gone and we don't have anything to build for
# this one.
continue
# NOTE(danms): Compute RPC expects security group names or ids
# not objects, so convert this to a list of names until we can
# pass the objects.
legacy_secgroups = [s.identifier
for s in request_spec.security_groups]
with obj_target_cell(instance, cell) as cctxt:
self.compute_rpcapi.build_and_run_instance(
cctxt, instance=instance, image=image,
request_spec=request_spec,
filter_properties=filter_props,
admin_password=admin_password,
injected_files=injected_files,
requested_networks=requested_networks,
security_groups=legacy_secgroups,
block_device_mapping=instance_bdms,
host=host['host'], node=host['nodename'],
limits=host['limits'])
def _cleanup_build_artifacts(self, context, exc, instances, build_requests,
request_specs):
for (instance, build_request, request_spec) in six.moves.zip(
instances, build_requests, request_specs):
# Skip placeholders that were buried in cell0 or had their
# build requests deleted by the user before instance create.
if instance is None:
continue
updates = {'vm_state': vm_states.ERROR, 'task_state': None}
legacy_spec = request_spec.to_legacy_request_spec_dict()
self._set_vm_state_and_notify(context, instance.uuid,
'build_instances', updates, exc,
legacy_spec)
# Be paranoid about artifacts being deleted underneath us.
try:
build_request.destroy()
except exception.BuildRequestNotFound:
pass
try:
request_spec.destroy()
except exception.RequestSpecNotFound:
pass
def _delete_build_request(self, context, build_request, instance, cell,
instance_bdms, instance_tags):
"""Delete a build request after creating the instance in the cell.
This method handles cleaning up the instance in case the build request
is already deleted by the time we try to delete it.
:param context: the context of the request being handled
:type context: nova.context.RequestContext
:param build_request: the build request to delete
:type build_request: nova.objects.BuildRequest
:param instance: the instance created from the build_request
:type instance: nova.objects.Instance
:param cell: the cell in which the instance was created
:type cell: nova.objects.CellMapping
:param instance_bdms: list of block device mappings for the instance
:type instance_bdms: nova.objects.BlockDeviceMappingList
:param instance_tags: list of tags for the instance
:type instance_tags: nova.objects.TagList
:returns: True if the build request was successfully deleted, False if
the build request was already deleted and the instance is now gone.
"""
try:
build_request.destroy()
except exception.BuildRequestNotFound:
# This indicates an instance deletion request has been
# processed, and the build should halt here. Clean up the
# bdm, tags and instance record.
with obj_target_cell(instance, cell) as cctxt:
with compute_utils.notify_about_instance_delete(
self.notifier, cctxt, instance):
try:
instance.destroy()
except exception.InstanceNotFound:
pass
except exception.ObjectActionError:
# NOTE(melwitt): Instance became scheduled during
# the destroy, "host changed". Refresh and re-destroy.
try:
instance.refresh()
instance.destroy()
except exception.InstanceNotFound:
pass
for bdm in instance_bdms:
with obj_target_cell(bdm, cell):
try:
bdm.destroy()
except exception.ObjectActionError:
pass
if instance_tags:
with try_target_cell(context, cell) as target_ctxt:
try:
objects.TagList.destroy(target_ctxt, instance.uuid)
except exception.InstanceNotFound:
pass
return False
return True
| apache-2.0 | -8,708,059,897,833,092,000 | 48.152042 | 79 | 0.565101 | false | 4.944415 | false | false | false |
E7ernal/quizwhiz | quizard/views/Results.py | 1 | 5042 | # vim: ts=4:sw=4:expandtabs
__author__ = '[email protected]'
from django.conf import settings
from django.views import generic
from django.contrib import messages
from django.shortcuts import redirect
from django.template.loader import get_template
from django.utils.translation import ugettext_lazy as _
from email_utils.tasks import send_mail
from quizard.models.Assignment import Assignment
class Results(generic.DetailView):
model = Assignment
slug_field = 'code'
slug_url_kwarg = 'code'
context_object_name = 'assignment'
template_name = 'quizard/results.html'
def get(self, request, *pos, **kw):
# If the user isn't currently working on an assignment,
# they shouldn't be allowed to access the results page.
if 'assignment_code' not in self.request.session:
messages.info(request, _('You must complete an assignment before visiting the results page.'))
return redirect('index')
# If the assignment is still in progress (i.e., we have a current position),
# send the user back to that position rather than allowing them to view their
# (incomplete) results.
if isinstance(request.session.get('assignment_in_progress', None), basestring):
messages.info(request, _('You must complete this assignment before viewing your results.'))
return redirect(request.session['assignment_in_progress'])
return super(Results, self).get(request, *pos, **kw)
def get_context_data(self, **kw):
context = super(Results, self).get_context_data(**kw)
context.update({
'points_earned': self.object.calculate_score(self.request.session['answers']),
'questions': self.build_question_dicts(
context['assignment'],
self.request.session['answers']
)
})
# Record the user's score on this assignment.
completed_assignments = self.request.session.get('completed_assignments', {})
completed_assignments[self.object.code] = context['points_earned']
self.request.session['completed_assignments'] = completed_assignments
# Clear the user's current assignment.
# del self.request.session['assignment_code']
self.request.session.modified = True
self.send_emails()
return context
def build_question_dicts(self, assignment, answers):
question_list = []
for question in assignment.questions.all():
question_list.append({
'question': question,
'answer': answers[str(question.pk)],
'correct': question.validate_answer(answers[str(question.pk)]),
})
return question_list
def send_emails(self):
self.send_teacher_email(self.object)
self.send_summary_email(self.object)
def send_teacher_email(self, assignment):
"""
Email the assignment creator the results of this particular
quiz-taking session.
"""
self._send_email(
assignment,
assignment.created_by.email,
_("{assignment.code} results -- {assignee_name}").format(
assignment=assignment,
assignee_name=self.request.session['assignee_name'],
),
'quizard/emails/assignment_results.txt'
)
def send_summary_email(self, assignment):
"""
Sent a results receipt to the given third-party, if there is one.
"""
if self.request.session.get('assignee_email', None):
self._send_email(
assignment,
self.request.session['assignee_email'],
_("{assignment.code} summary -- {assignee_name}").format(
assignment=assignment,
assignee_name=self.request.session['assignee_name']
),
'quizard/emails/assignment_results_summary.txt'
)
def _send_email(self, assignment, to_address, subject, email_template):
template_instance = get_template(email_template)
context = {
'assignment': assignment,
'points_earned': assignment.calculate_score(self.request.session['answers']),
'questions': self.build_question_dicts(
assignment,
self.request.session['answers'],
),
'assignee_name': self.request.session['assignee_name'],
'DEFAULT_FROM_EMAIL': settings.DEFAULT_FROM_EMAIL,
'BRAND_NAME': settings.BRAND_NAME
}
args = (
subject,
template_instance.render(context),
settings.DEFAULT_FROM_EMAIL,
to_address
)
# Don't try to invoke the task asynchronously in DEBUG mode,
# because it's a dev environment and celery probably isn't configured.
if settings.DEBUG:
return send_mail(*args)
else:
return send_mail.apply_async(args)
| mit | -2,658,239,532,650,944,500 | 35.80292 | 106 | 0.61067 | false | 4.43058 | false | false | false |
macosforge/ccs-calendarserver | calendarserver/tools/agent.py | 1 | 10761 | #!/usr/bin/env python
# -*- test-case-name: calendarserver.tools.test.test_agent -*-
##
# Copyright (c) 2013-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
A service spawned on-demand by launchd, meant to handle configuration requests
from Server.app. When a request comes in on the socket specified in the
launchd agent.plist, launchd will run "caldavd -t Agent" which ends up creating
this service. Requests are made using HTTP POSTS to /gateway, and are
authenticated by OpenDirectory.
"""
from __future__ import print_function
__all__ = [
"makeAgentService",
]
import cStringIO
from plistlib import readPlistFromString, writePlistToString
import socket
from twext.python.launchd import launchActivateSocket
from twext.python.log import Logger
from twext.who.checker import HTTPDigestCredentialChecker
from twext.who.opendirectory import (
DirectoryService as OpenDirectoryDirectoryService,
NoQOPDigestCredentialFactory
)
from twisted.application.internet import StreamServerEndpointService
from twisted.cred.portal import IRealm, Portal
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet.endpoints import AdoptedStreamServerEndpoint
from twisted.internet.protocol import Factory
from twisted.protocols import amp
from twisted.web.guard import HTTPAuthSessionWrapper
from twisted.web.resource import IResource, Resource, ForbiddenResource
from twisted.web.server import Site, NOT_DONE_YET
from zope.interface import implements
log = Logger()
class AgentRealm(object):
"""
Only allow a specified list of avatar IDs to access the site
"""
implements(IRealm)
def __init__(self, root, allowedAvatarIds):
"""
@param root: The root resource of the site
@param allowedAvatarIds: The list of IDs to allow access to
"""
self.root = root
self.allowedAvatarIds = allowedAvatarIds
def requestAvatar(self, avatarId, mind, *interfaces):
if IResource in interfaces:
if avatarId.shortNames[0] in self.allowedAvatarIds:
return (IResource, self.root, lambda: None)
else:
return (IResource, ForbiddenResource(), lambda: None)
raise NotImplementedError()
class AgentGatewayResource(Resource):
"""
The gateway resource which forwards incoming requests through
gateway.Runner.
"""
isLeaf = True
def __init__(self, store, directory, inactivityDetector):
"""
@param store: an already opened store
@param directory: a directory service
@param inactivityDetector: the InactivityDetector to tell when requests
come in
"""
Resource.__init__(self)
self.store = store
self.directory = directory
self.inactivityDetector = inactivityDetector
def render_POST(self, request):
"""
Take the body of the POST request and feed it to gateway.Runner();
return the result as the response body.
"""
self.inactivityDetector.activity()
def onSuccess(result, output):
txt = output.getvalue()
output.close()
request.write(txt)
request.finish()
def onError(failure):
message = failure.getErrorMessage()
tbStringIO = cStringIO.StringIO()
failure.printTraceback(file=tbStringIO)
tbString = tbStringIO.getvalue()
tbStringIO.close()
error = {
"Error": message,
"Traceback": tbString,
}
log.error("command failed {error}", error=failure)
request.write(writePlistToString(error))
request.finish()
from calendarserver.tools.gateway import Runner
body = request.content.read()
command = readPlistFromString(body)
output = cStringIO.StringIO()
runner = Runner(self.store, [command], output=output)
d = runner.run()
d.addCallback(onSuccess, output)
d.addErrback(onError)
return NOT_DONE_YET
def makeAgentService(store):
"""
Returns a service which will process GatewayAMPCommands, using a socket
file descripter acquired by launchd
@param store: an already opened store
@returns: service
"""
from twisted.internet import reactor
sockets = launchActivateSocket("AgentSocket")
fd = sockets[0]
family = socket.AF_INET
endpoint = AdoptedStreamServerEndpoint(reactor, fd, family)
directory = store.directoryService()
def becameInactive():
log.warn("Agent inactive; shutting down")
reactor.stop()
from twistedcaldav.config import config
inactivityDetector = InactivityDetector(
reactor, config.AgentInactivityTimeoutSeconds, becameInactive
)
root = Resource()
root.putChild(
"gateway",
AgentGatewayResource(
store, directory, inactivityDetector
)
)
# We need this service to be able to return com.apple.calendarserver,
# so tell it not to suppress system accounts.
directory = OpenDirectoryDirectoryService(
"/Local/Default", suppressSystemRecords=False
)
portal = Portal(
AgentRealm(root, [u"com.apple.calendarserver"]),
[HTTPDigestCredentialChecker(directory)]
)
credentialFactory = NoQOPDigestCredentialFactory(
"md5", "/Local/Default"
)
wrapper = HTTPAuthSessionWrapper(portal, [credentialFactory])
site = Site(wrapper)
return StreamServerEndpointService(endpoint, site)
class InactivityDetector(object):
"""
If no 'activity' takes place for a specified amount of time, a method
will get called. Activity causes the inactivity time threshold to be
reset.
"""
def __init__(self, reactor, timeoutSeconds, becameInactive):
"""
@param reactor: the reactor
@timeoutSeconds: the number of seconds considered to mean inactive
@becameInactive: the method to call (with no arguments) when
inactivity is reached
"""
self._reactor = reactor
self._timeoutSeconds = timeoutSeconds
self._becameInactive = becameInactive
if self._timeoutSeconds > 0:
self._delayedCall = self._reactor.callLater(
self._timeoutSeconds,
self._inactivityThresholdReached
)
def _inactivityThresholdReached(self):
"""
The delayed call has fired. We're inactive. Call the becameInactive
method.
"""
self._becameInactive()
def activity(self):
"""
Call this to let the InactivityMonitor that there has been activity.
It will reset the timeout.
"""
if self._timeoutSeconds > 0:
if self._delayedCall.active():
self._delayedCall.reset(self._timeoutSeconds)
else:
self._delayedCall = self._reactor.callLater(
self._timeoutSeconds,
self._inactivityThresholdReached
)
def stop(self):
"""
Cancels the delayed call
"""
if self._timeoutSeconds > 0:
if self._delayedCall.active():
self._delayedCall.cancel()
#
# Alternate implementation using AMP instead of HTTP
#
class GatewayAMPCommand(amp.Command):
"""
A command to be executed by gateway.Runner
"""
arguments = [('command', amp.String())]
response = [('result', amp.String())]
class GatewayAMPProtocol(amp.AMP):
"""
Passes commands to gateway.Runner and returns the results
"""
def __init__(self, store, directory):
"""
@param store: an already opened store
operations
@param directory: a directory service
"""
amp.AMP.__init__(self)
self.store = store
self.directory = directory
@GatewayAMPCommand.responder
@inlineCallbacks
def gatewayCommandReceived(self, command):
"""
Process a command via gateway.Runner
@param command: GatewayAMPCommand
@returns: a deferred returning a dict
"""
command = readPlistFromString(command)
output = cStringIO.StringIO()
from calendarserver.tools.gateway import Runner
runner = Runner(
self.store,
[command], output=output
)
try:
yield runner.run()
result = output.getvalue()
output.close()
except Exception as e:
error = {"Error": str(e)}
result = writePlistToString(error)
output.close()
returnValue(dict(result=result))
class GatewayAMPFactory(Factory):
"""
Builds GatewayAMPProtocols
"""
protocol = GatewayAMPProtocol
def __init__(self, store):
"""
@param store: an already opened store
"""
self.store = store
self.directory = self.store.directoryService()
def buildProtocol(self, addr):
return GatewayAMPProtocol(
self.store, self.davRootResource, self.directory
)
#
# A test AMP client
#
command = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN"
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>command</key>
<string>getLocationAndResourceList</string>
</dict>
</plist>"""
def getList():
# For the sample client, below:
from twisted.internet import reactor
from twisted.internet.protocol import ClientCreator
creator = ClientCreator(reactor, amp.AMP)
host = '127.0.0.1'
import sys
if len(sys.argv) > 1:
host = sys.argv[1]
d = creator.connectTCP(host, 62308)
def connected(ampProto):
return ampProto.callRemote(GatewayAMPCommand, command=command)
d.addCallback(connected)
def resulted(result):
return result['result']
d.addCallback(resulted)
def done(result):
print('Done: %s' % (result,))
reactor.stop()
d.addCallback(done)
reactor.run()
if __name__ == '__main__':
getList()
| apache-2.0 | -6,349,575,303,463,743,000 | 28.401639 | 79 | 0.646222 | false | 4.426573 | false | false | false |
rocky/python2-trepan | trepan/bwprocessor/main.py | 1 | 18126 | # -*- coding: utf-8 -*-
# Copyright (C) 2008-2010, 2013-2015 Rocky Bernstein <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import inspect, linecache, sys, traceback, types
import pyficache
from repr import Repr
from trepan import vprocessor as Mprocessor
from trepan import exception as Mexcept, misc as Mmisc
from trepan.lib import bytecode as Mbytecode, display as Mdisplay
from trepan.lib import thred as Mthread
from trepan.bwprocessor import location as Mlocation, msg as Mmsg
def get_stack(f, t, botframe, proc_obj=None):
"""Return a stack of frames which the debugger will use for in
showing backtraces and in frame switching. As such various frame
that are really around may be excluded unless we are debugging the
sebugger. Also we will add traceback frame on top if that
exists."""
exclude_frame = lambda f: False
if proc_obj:
settings = proc_obj.debugger.settings
if not settings['dbg_trepan']:
exclude_frame = lambda f: \
proc_obj.core.ignore_filter.is_included(f)
pass
pass
stack = []
if t and t.tb_frame is f:
t = t.tb_next
while f is not None:
if exclude_frame(f): break # See commented alternative below
stack.append((f, f.f_lineno))
# bdb has:
# if f is botframe: break
f = f.f_back
pass
stack.reverse()
i = max(0, len(stack) - 1)
while t is not None:
stack.append((t.tb_frame, t.tb_lineno))
t = t.tb_next
pass
return stack, i
def run_hooks(obj, hooks, *args):
"""Run each function in `hooks' with args"""
for hook in hooks:
if hook(obj, *args): return True
pass
return False
def resolve_name(obj, command_name):
if command_name not in obj.commands:
return None
return command_name
# Default settings for command processor method call
DEFAULT_PROC_OPTS = {
# A list of debugger initialization files to read on first command
# loop entry. Often this something like [~/.trepanrc] which the
# front-end sets.
'initfile_list' : []
}
class BWProcessor(Mprocessor.Processor):
def __init__(self, core_obj, opts=None):
Mprocessor.Processor.__init__(self, core_obj)
self.response = {'errs': [], 'msg': []}
self.continue_running = False # True if we should leave command loop
self.cmd_instances = self._populate_commands()
self.cmd_name = '' # command name before alias or
# macro resolution
self.current_command = '' # Current command getting run
self.debug_nest = 1
self.display_mgr = Mdisplay.DisplayMgr()
self.intf = core_obj.debugger.intf
self.last_command = None # Initially a no-op
self.precmd_hooks = []
# If not:
# self.location = lambda : print_location(self)
self.preloop_hooks = []
self.postcmd_hooks = []
self._populate_cmd_lists()
# Stop only if line/file is different from last time
self.different_line = None
# These values updated on entry. Set initial values.
self.curframe = None
self.event = None
self.event_arg = None
self.frame = None
self.list_lineno = 0
# Create a custom safe Repr instance and increase its maxstring.
# The default of 30 truncates error messages too easily.
self._repr = Repr()
self._repr.maxstring = 100
self._repr.maxother = 60
self._repr.maxset = 10
self._repr.maxfrozen = 10
self._repr.array = 10
self._saferepr = self._repr.repr
self.stack = []
self.thread_name = None
self.frame_thread_name = None
return
def add_preloop_hook(self, hook, position=-1, nodups = True):
if hook in self.preloop_hooks: return False
self.preloop_hooks.insert(position, hook)
return True
def adjust_frame(self, pos, absolute_pos):
"""Adjust stack frame by pos positions. If absolute_pos then
pos is an absolute number. Otherwise it is a relative number.
A negative number indexes from the other end."""
if not self.curframe:
Mmsg.errmsg(self, "No stack.")
return
# Below we remove any negativity. At the end, pos will be
# the new value of self.curindex.
if absolute_pos:
if pos >= 0:
pos = len(self.stack)-pos-1
else:
pos = -pos-1
else:
pos += self.curindex
if pos < 0:
Mmsg.errmsg(self,
"Adjusting would put us beyond the oldest frame.")
return
elif pos >= len(self.stack):
Mmsg.errmsg(self,
"Adjusting would put us beyond the newest frame.")
return
self.curindex = pos
self.curframe = self.stack[self.curindex][0]
self.print_location()
self.list_lineno = None
return
# To be overridden in derived debuggers
def defaultFile(self):
"""Produce a reasonable default."""
filename = self.curframe.f_code.co_filename
# Consider using is_exec_stmt(). I just don't understand
# the conditions under which the below test is true.
if filename == '<string>' and self.debugger.mainpyfile:
filename = self.debugger.mainpyfile
pass
return filename
def event_processor(self, frame, event, event_arg, prompt='Trepan'):
'command event processor: reading a commands do something with them.'
self.frame = frame
self.event = event
self.event_arg = event_arg
filename = frame.f_code.co_filename
lineno = frame.f_lineno
line = linecache.getline(filename, lineno, frame.f_globals)
if not line:
opts = {'output': 'plain',
'reload_on_change': self.settings('reload'),
'strip_nl': False}
line = pyficache.getline(filename, lineno, opts)
self.current_source_text = line
if self.settings('skip') is not None:
if Mbytecode.is_def_stmt(line, frame):
return True
if Mbytecode.is_class_def(line, frame):
return True
pass
self.thread_name = Mthread.current_thread_name()
self.frame_thread_name = self.thread_name
self.process_commands()
return True
def forget(self):
""" Remove memory of state variables set in the command processor """
self.stack = []
self.curindex = 0
self.curframe = None
self.thread_name = None
self.frame_thread_name = None
return
def eval(self, arg):
"""Eval string arg in the current frame context."""
try:
return eval(arg, self.curframe.f_globals,
self.curframe.f_locals)
except:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
pass
else: exc_type_name = t.__name__
Mmsg.errmsg(self, str("%s: %s" % (exc_type_name, arg)))
raise
return None # Not reached
def exec_line(self, line):
if self.curframe:
local_vars = self.curframe.f_locals
global_vars = self.curframe.f_globals
else:
local_vars = None
# FIXME: should probably have place where the
# user can store variables inside the debug session.
# The setup for this should be elsewhere. Possibly
# in interaction.
global_vars = None
try:
code = compile(line + '\n', '"%s"' % line, 'single')
exec code in global_vars, local_vars
except:
t, v = sys.exc_info()[:2]
if isinstance(t, types.StringType):
exc_type_name = t
else: exc_type_name = t.__name__
Mmsg.errmsg(self, '%s: %s' % (str(exc_type_name), str(v)))
pass
return
def ok_for_running(self, cmd_obj, name, cmd_hash):
'''We separate some of the common debugger command checks here:
whether it makes sense to run the command in this execution state,
if the command has the right number of arguments and so on.
'''
if hasattr(cmd_obj, 'execution_set'):
if not (self.core.execution_status in cmd_obj.execution_set):
part1 = ("Command '%s' is not available for execution "
"status:" % name)
Mmsg.errmsg(self,
Mmisc.
wrapped_lines(part1,
self.core.execution_status,
self.debugger.settings['width']))
return False
pass
if self.frame is None and cmd_obj.need_stack:
self.intf[-1].errmsg("Command '%s' needs an execution stack."
% name)
return False
return True
def process_commands(self):
"""Handle debugger commands."""
if self.core.execution_status != 'No program':
self.setup()
Mlocation.print_location(self, self.event)
pass
leave_loop = run_hooks(self, self.preloop_hooks)
self.continue_running = False
while not leave_loop:
try:
run_hooks(self, self.precmd_hooks)
# bdb had a True return to leave loop.
# A more straight-forward way is to set
# instance variable self.continue_running.
leave_loop = self.process_command()
if leave_loop or self.continue_running: break
except EOFError:
# If we have stacked interfaces, pop to the next
# one. If this is the last one however, we'll
# just stick with that. FIXME: Possibly we should
# check to see if we are interactive. and not
# leave if that's the case. Is this the right
# thing? investigate and fix.
if len(self.debugger.intf) > 1:
del self.debugger.intf[-1]
self.last_command = ''
else:
if self.debugger.intf[-1].output:
self.debugger.intf[-1].output.writeline('Leaving')
raise Mexcept.DebuggerQuit
pass
break
pass
pass
return run_hooks(self, self.postcmd_hooks)
def process_command(self):
# process command
self.response = {'errs': [], 'msg': []}
cmd_hash = self.intf[-1].read_command()
# FIXME: put this into a routine
if isinstance(cmd_hash, types.DictType):
Mmsg.errmsg(self, "invalid input, expecting a hash: %s" % cmd_hash,
{'set_name': True})
self.intf[-1].msg(self.response)
return False
if 'command' not in cmd_hash:
Mmsg.errmsg(self,
"invalid input, expecting a 'command' key: %s" %
cmd_hash,
{'set_name': True})
self.intf[-1].msg(self.response)
return False
self.cmd_name = cmd_hash['command']
cmd_name = resolve_name(self, self.cmd_name)
if cmd_name:
cmd_obj = self.commands[cmd_name]
if self.ok_for_running(cmd_obj, cmd_name, cmd_hash):
try:
self.response['name'] = cmd_name
result = cmd_obj.run(cmd_hash)
self.intf[-1].msg(self.response)
if result: return result
except (Mexcept.DebuggerQuit,
Mexcept.DebuggerRestart, SystemExit):
# Let these exceptions propagate through
raise
except:
Mmsg.errmsg(self, "INTERNAL ERROR: " +
traceback.format_exc())
pass
pass
else:
self.undefined_cmd(cmd_name)
pass
pass
return False
def remove_preloop_hook(self, hook):
try:
position = self.preloop_hooks.index(hook)
except ValueError:
return False
del self.preloop_hooks[position]
return True
def setup(self):
"""Initialization done before entering the debugger-command
loop. In particular we set up the call stack used for local
variable lookup and frame/up/down commands.
We return True if we should NOT enter the debugger-command
loop."""
self.forget()
if self.settings('dbg_trepan'):
self.frame = inspect.currentframe()
pass
if self.event in ['exception', 'c_exception']:
exc_type, exc_value, exc_traceback = self.event_arg
else:
_, _, exc_traceback = (None, None, None,) # NOQA
pass
if self.frame or exc_traceback:
self.stack, self.curindex = \
get_stack(self.frame, exc_traceback, None, self)
self.curframe = self.stack[self.curindex][0]
self.thread_name = Mthread.current_thread_name()
else:
self.stack = self.curframe = \
self.botframe = None
pass
if self.curframe:
self.list_lineno = \
max(1, inspect.getlineno(self.curframe))
else:
self.list_lineno = None
pass
# if self.execRcLines()==1: return True
return False
def undefined_cmd(self, cmd):
"""Error message when a command doesn't exist"""
Mmsg.errmsg(self, 'Undefined command: "%s". Try "help".' % cmd)
return
def _populate_commands(self):
""" Create an instance of each of the debugger
commands. Commands are found by importing files in the
directory 'command'. Some files are excluded via an array set
in __init__. For each of the remaining files, we import them
and scan for class names inside those files and for each class
name, we will create an instance of that class. The set of
DebuggerCommand class instances form set of possible debugger
commands."""
cmd_instances = []
from trepan.bwprocessor import command as Mcommand
eval_cmd_template = 'command_mod.%s(self)'
for mod_name in Mcommand.__modules__:
import_name = "command." + mod_name
try:
command_mod = getattr(__import__(import_name), mod_name)
except:
print('Error importing %s: %s' %
(mod_name, sys.exc_info()[0]))
continue
classnames = [ tup[0] for tup in
inspect.getmembers(command_mod, inspect.isclass)
if ('DebuggerCommand' != tup[0] and
tup[0].endswith('Command')) ]
for classname in classnames:
eval_cmd = eval_cmd_template % classname
try:
instance = eval(eval_cmd)
cmd_instances.append(instance)
except:
print ('Error loading %s from %s: %s' %
(classname, mod_name, sys.exc_info()[0]))
pass
pass
pass
return cmd_instances
def _populate_cmd_lists(self):
""" Populate self.commands"""
self.commands = {}
for cmd_instance in self.cmd_instances:
cmd_name = cmd_instance.name
self.commands[cmd_name] = cmd_instance
pass
return
pass
# Demo it
if __name__=='__main__':
from trepan.interfaces import bullwinkle as Mbullwinkle
class Debugger:
def __init__(self):
self.intf = [Mbullwinkle.BWInterface()]
self.settings = {'dbg_trepan': True, 'reload': False}
pass
class MockCore:
def filename(self, fn): return fn
def canonic_filename(self, frame): return frame.f_code.co_filename
def __init__(self):
self.debugger = Debugger()
return
pass
core = MockCore()
bwproc = BWProcessor(core)
print 'commands:'
commands = bwproc.commands.keys()
commands.sort()
print commands
print resolve_name(bwproc, 'quit')
# print '-' * 10
# print_source_line(sys.stdout.write, 100, 'source_line_test.py')
# print '-' * 10
bwproc.frame = sys._getframe()
bwproc.setup()
# print
# print '-' * 10
Mlocation.print_location(bwproc)
# print 'Removing non-existing quit hook: ', bwproc.remove_preloop_hook(fn)
# bwproc.add_preloop_hook(fn)
# print bwproc.preloop_hooks
# print 'Removed existing quit hook: ', bwproc.remove_preloop_hook(fn)
pass
| gpl-3.0 | 3,372,832,090,076,952,000 | 35.692308 | 79 | 0.549542 | false | 4.270971 | false | false | false |
drewp/commentserve | commentServe.py | 1 | 12072 | #!/usr/bin/python
"""comment storage for blogs, photo site, etc
see also:
sioc:Post sioc:has_reply sioc:Post / types:Comment
sioc:content
content:encoded
dcterms:created
types:BlogPost
types:Comment
"""
import web, time, logging, pystache, traceback
from datetime import datetime
from uuid import uuid4
from html5lib import html5parser, sanitizer
from web.contrib.template import render_genshi
from rdflib import RDF, URIRef, Literal, Namespace
from dateutil.parser import parse
from honeypot import HoneypotChecker
import restkit
from dateutil.tz import tzlocal
import cyclone.web
from twisted.internet import reactor
from db import DbMongo
SIOC = Namespace("http://rdfs.org/sioc/ns#")
CONTENT = Namespace("http://purl.org/rss/1.0/modules/content/")
DCTERMS = Namespace("http://purl.org/dc/terms/")
XS = Namespace("http://www.w3.org/2001/XMLSchema#")
FOAF = Namespace("http://xmlns.com/foaf/0.1/")
HTTP = Namespace("http://www.w3.org/2006/http#")
OV = Namespace("http://open.vocab.org/terms/")
log = logging.getLogger()
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
log.setLevel(logging.INFO)
render = render_genshi(['.'], auto_reload=False)
def literalFromUnix(t):
i = datetime.fromtimestamp(int(t)).replace(tzinfo=tzlocal()).isoformat()
return Literal(i, datatype=XS['dateTime'])
def agoString(literalTime):
d = parse(str(literalTime))
# (assuming 'now' is in the same timezone as d)
return web.utils.datestr(d, datetime.now().replace(tzinfo=tzlocal()))
def newPublicUser(forwardedFor, name, email):
"""
a non-logged-in user is posting a comment on a resource that's
open for public comments. We make a new URI for this user (every
time) and store some extra statements.
pass your web post params, which might include 'name' and 'email'.
returns user URI and a list of triples to be stored
"""
stmts = []
user = URIRef('http://bigasterisk.com/guest/%s' % uuid4())
header = URIRef(user + "/header1")
stmts.extend([
(user, RDF.type, FOAF.Person),
(user, DCTERMS.created, literalFromUnix(time.time())),
(user, OV.usedHttpHeader, header),
(header, HTTP.fieldName, Literal('X-Forwarded-For')),
(header, HTTP.fieldValue, Literal(forwardedFor)),
])
if name:
stmts.append((user, FOAF.name, Literal(name)))
if email:
stmts.append((user, FOAF.mbox, URIRef("mailto:%s" % email)))
return user, stmts
def newCommentUri(secs=None):
"""this is essentially a bnode, but a real URI is easier to work with"""
if secs is None:
secs = time.time()
return URIRef("http://bigasterisk.com/comment/%r" % secs)
class AnyCase(sanitizer.HTMLSanitizer):
def __init__(self, stream, encoding=None, parseMeta=True, useChardet=True,
lowercaseElementName=True, lowercaseAttrName=True):
sanitizer.HTMLSanitizer.__init__(self, stream, encoding, parseMeta,
useChardet,
lowercaseElementName,
lowercaseAttrName)
class AnyCaseNoSrc(AnyCase):
allowed_attributes = AnyCase.allowed_attributes[:]
allowed_attributes.remove('src')
def sanitize_html(stream, srcAttr=False):
ret = ''.join([token.toxml() for token in
html5parser.HTMLParser(tokenizer=AnyCase if srcAttr else AnyCaseNoSrc).
parseFragment(stream).childNodes])
return ret
def spamCheck(article, content):
if content.lower().count("<a href") > 0:
log.error("too many links in %r" % content)
raise ValueError("too many links")
if '[url=' in content:
raise ValueError("url markup is too suspicious")
if content.split()[-1].startswith(('http://', 'https://')):
raise ValueError("please don't end with a link")
if article in [URIRef("http://drewp.quickwitretort.com/2008/02/22/0"),
URIRef("http://drewp.quickwitretort.com/2010/07/03/0"),
]:
raise ValueError("spam flood")
for pat in ['viagra', 'cialis', 'probleme de sante', 'pfizer', 'pilules']:
if pat in content.lower():
raise ValueError("spam pattern")
class Comments(cyclone.web.RequestHandler):
def get(self, public=False):
"""
post=<uri to post> (or use 'uri' for the arg)
returns html formatted comments (until i get some more content types)
"""
t1 = time.time()
post = (self.get_argument("post", default=None) or
self.get_argument("uri", default=None))
if not post:
raise ValueError("need 'uri' param")
post = URIRef(post)
foafAgent = None
try:
foafAgent = URIRef(self.request.headers['X-Foaf-Agent'])
except KeyError:
if not public:
self.write("Must login to see comments")
return
queryTime = time.time()
rows = self.findComments(post)
queryTime = time.time() - queryTime
self.set_header("Content-Type", "text/html")
ret = render.comments(
includeJs=self.get_argument("js", default="0") != "0",
public=public,
parent=post,
toHttps=lambda uri: uri.replace('http://', 'https://'),
agoString=agoString,
you=self.settings.db.value(foafAgent, FOAF.name) if foafAgent else None,
rows=rows,
)
self.write(ret + "<!-- %.2f ms (%.2f ms in query) -->" % (
1000 * (time.time() - t1),
1000 * queryTime))
def findComments(self, post):
rows = []
for who, when, content in self.settings.db.query("""
SELECT DISTINCT ?who ?when ?content WHERE {
?parent sioc:has_reply [
sioc:has_creator ?cr;
content:encoded ?content;
dcterms:created ?when
]
OPTIONAL { ?cr foaf:name ?who }
} ORDER BY ?when""", initBindings={"parent" : post}):
row = dict(who=who, when=when, content=sanitize_html(content))
rows.append(row)
log.debug("found %s rows with parent %r" % (len(rows), post))
return rows
def post(self, public=False):
"""
post=<parent post>
content=<html content>
we get the user from the x-foaf-agent header
"""
parent = self.get_argument('post', default=None) or self.get_argument("uri")
assert parent is not None
# maybe a legacy problem here with http/https, but blaster is still sending http
parent = URIRef(parent)
# this might be failing on ariblog, but that one is already safe
ip = self.request.headers.get("X-Forwarded-For")
if ip is not None:
HoneypotChecker(open("priv-honeypotkey").read().strip()).check(ip)
contentArg = self.get_argument("content", default="")
if not contentArg.strip():
raise ValueError("no text")
if contentArg.strip() == 'test':
return "not adding test comment"
spamCheck(parent, contentArg)
content = Literal(contentArg, datatype=RDF.XMLLiteral)
stmts = [] # gathered in one list for an atomic add
foafHeader = self.request.headers.get('X-Foaf-Agent')
if not public:
assert foafHeader
user = URIRef(foafHeader)
# make bnode-ish users for anonymous ones. need to get that username passed in here
else:
if foafHeader:
user = URIRef(foafHeader)
else:
user, moreStmts = newPublicUser(
self.request.headers.get("X-Forwarded-For"),
self.get_argument("name", ""),
self.get_argument("email", ""))
stmts.extend(moreStmts)
secs = time.time()
comment = newCommentUri(secs)
now = literalFromUnix(secs)
ctx = URIRef(parent + "/comments")
stmts.extend([(parent, SIOC.has_reply, comment),
(comment, DCTERMS.created, now),
(comment, SIOC.has_creator, user),
])
stmts.extend(commentStatements(user, comment, content))
db.writeFile(stmts, ctx, fileWords=[parent.split('/')[-1], now])
try:
self.sendAlerts(parent, user)
except Exception, e:
import traceback
log.error(e)
traceback.print_exc()
self.write("added")
def sendAlerts(self, parent, user):
c3po = restkit.Resource('http://bang:9040/')
for listener, mode in [
('http://bigasterisk.com/foaf.rdf#drewp', 'xmpp'),
('http://bigasterisk.com/kelsi/foaf.rdf#kelsi', 'xmpp')]:
c3po.post(
path='', payload={
'user': listener,
'msg': '%s comment from %s (http://10.1.0.1:9031/)' % (parent, user),
'mode': mode,
},
# shouldn't this be automatic?
headers={'content-type' : 'application/x-www-form-urlencoded'},
)
class CommentCount(cyclone.web.RequestHandler):
def get(self, public=False):
if not public:
try:
self.request.headers['X-Foaf-Agent']
except KeyError:
self.set_header("Content-Type", "text/plain")
self.write("Must login to see comments")
return
post = URIRef(self.get_argument("post"))
rows = self.settings.db.query("""
SELECT DISTINCT ?r WHERE {
?parent sioc:has_reply ?r
}""", initBindings={"parent" : post})
count = len(list(rows))
self.set_header("Content-Type", "text/plain")
self.write("%s comments" % count if count != 1 else "1 comment")
class Root(cyclone.web.RequestHandler):
def get(self):
recent = self.settings.db.getRecentComments(10, notOlderThan=60,
withSpam=False)
self.write(pystache.render(open("index.mustache").read(),
dict(recent=recent)))
class Spam(cyclone.web.RequestHandler):
def post(self):
try:
self.settings.db.setType(docId=self.get_argument('docId'), type="spam")
except Exception:
traceback.print_exc()
raise
self.redirect("/")
def commentStatements(user, commentUri, realComment):
# here you can put more processing on the comment text
realComment = Literal(realComment.replace("\r", ""), datatype=realComment.datatype) # rdflib n3 can't read these back
return [(commentUri, CONTENT.encoded, realComment)]
class Index(cyclone.web.RequestHandler):
def get(self):
self.set_header("Content-Type", "text/plain")
self.write("commentServe")
class Fav(cyclone.web.RequestHandler):
def get(self):
self.write(open("favicon.ico").read())
class Application(cyclone.web.Application):
def __init__(self, db):
handlers = [
(r'/comments', Comments),
(r'/(public)/comments', Comments),
(r'/commentCount', CommentCount),
(r'/(public)/commentCount', CommentCount),
(r'/', Root),
(r'/favicon.ico', Fav),
(r'/spam', Spam),
]
cyclone.web.Application.__init__(self, handlers,
db=db,
template_path=".")
if __name__ == '__main__':
db = DbMongo()
from twisted.python.log import startLogging
import sys
startLogging(sys.stdout)
reactor.listenTCP(9031, Application(db))
reactor.run()
| bsd-2-clause | -3,934,038,125,280,826,400 | 35.252252 | 121 | 0.574387 | false | 3.91186 | false | false | false |
kivymd/KivyMD | demos/shrine/libs/baseclass/box_bottom_sheet.py | 1 | 4932 | from kivy.animation import Animation
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.metrics import dp
from kivy.properties import BooleanProperty, ObjectProperty, StringProperty
from kivy.uix.behaviors import ButtonBehavior
from kivy.uix.image import Image
from kivy.uix.recycleview import RecycleView
from kivymd.theming import ThemableBehavior
from kivymd.uix.behaviors import CircularRippleBehavior
from kivymd.uix.boxlayout import MDBoxLayout
from kivymd.uix.button import MDIconButton
from kivymd.uix.list import TwoLineAvatarIconListItem
class BoxBottomSheetProductList(RecycleView):
pass
class TotalPriceForBoxBottomSheetProductList(MDBoxLayout):
pass
class ToolbarForBoxBottomSheetProductList(MDBoxLayout):
pass
class ItemForBoxBottomSheetProductList(TwoLineAvatarIconListItem):
pass
class PreviousImage(CircularRippleBehavior, ButtonBehavior, Image):
description = StringProperty()
_root = ObjectProperty()
class BoxBottomSheet(ThemableBehavior, MDBoxLayout):
open_sheet_box = BooleanProperty(False)
def clear_box(self):
while len(self.ids.previous_box.children) != 1:
for widget in self.ids.previous_box.children:
if widget.__class__ is not MDIconButton:
self.ids.previous_box.remove_widget(widget)
def restore_opacity_bottom_sheet(self):
Animation(opacity=1, d=0.2).start(self.ids.previous_box)
Animation(opacity=1, d=0.2).start(self)
def restore_width_bottom_sheet(self):
if len(self.ids.previous_box.children) != 1:
for widget in self.ids.previous_box.children:
self.ids.previous_box.width += widget.width
self.width += widget.width
self.ids.previous_box.height = dp(48)
if self.parent.ids.box_bottom_sheet_product_list.width == 0:
Animation(width=self.width + dp(48), d=0.2).start(self)
def remove_box_list(self, *args):
self.parent.ids.box_bottom_sheet_product_list.data = []
self.restore_width_bottom_sheet()
self.restore_opacity_bottom_sheet()
def hide_box_bottom_sheet(self):
Animation(width=0, d=0.2).start(self)
Animation(opacity=0, d=0.2).start(self)
def do_open_bottom_sheet(self, *args):
total_price = 0
count_item = 0
for widget in self.ids.previous_box.children:
if widget.__class__ is PreviousImage:
count_item += 1
total_price += int(
float(widget.description.split("\n")[1].split("$ ")[1])
)
self.parent.ids.box_bottom_sheet_product_list.data.append(
{
"viewclass": "ItemForBoxBottomSheetProductList",
"height": dp(72),
"path_to_image": widget.source,
"description": widget.description,
}
)
self.parent.ids.box_bottom_sheet_product_list.data.insert(
0,
{
"viewclass": "ToolbarForBoxBottomSheetProductList",
"count_item": count_item,
"callback": self.hide_bottom_sheet,
},
)
self.parent.ids.box_bottom_sheet_product_list.data.append(
{
"viewclass": "TotalPriceForBoxBottomSheetProductList",
"total_price": str(total_price),
}
)
Animation(opacity=1, d=0.2).start(
self.parent.ids.box_bottom_sheet_product_list
)
self.show_clear_button()
def show_clear_button(self):
self.parent.ids.clear_button.opacity = 1
self.parent.ids.clear_button.disabled = False
self.parent.ids.clear_button.grow()
def hide_clear_button(self, *args):
def hide_clear_button(interval):
self.parent.ids.clear_button.opacity = 0
self.parent.ids.clear_button.disabled = True
self.parent.ids.clear_button.grow()
Clock.schedule_once(hide_clear_button, 0.2)
def hide_bottom_sheet(self, *args):
Animation.stop_all(self)
self.hide_clear_button()
Animation(opacity=0, d=0.2).start(
self.parent.ids.box_bottom_sheet_product_list
)
animation = Animation(
height=Window.height // 3, width=Window.width // 2, d=0.1
) + Animation(height=dp(68), width=dp(68), d=0.2)
animation.bind(on_complete=self.remove_box_list)
animation.start(self)
self.open_sheet_box = False
def open_bottom_sheet(self):
Animation.stop_all(self)
anim = Animation(
height=Window.height // 2, width=Window.width, d=0.1
) + Animation(height=Window.height, d=0.1)
anim.bind(on_complete=self.do_open_bottom_sheet)
anim.start(self)
self.open_sheet_box = True
| mit | 7,916,162,452,998,090,000 | 34.228571 | 75 | 0.623682 | false | 3.711061 | false | false | false |
sernst/Trackway-Gait-Analysis | tracksim/cli/commands/list_.py | 1 | 1859 | from argparse import ArgumentParser
from tracksim import system
from tracksim import reader
from tracksim import paths
from tracksim import cli
DESCRIPTION = """
Removes all existing group and trial results from cached results folders
"""
def list_groups():
system.log('===== GROUPS =====', whitespace_bottom=1)
results_path = paths.results('group.html')
for uid, data_path in reader.listings('group').items():
url = 'file://{}?id={}'.format(results_path, uid)
system.log(
"""
--- {uid} ---
{url}
""".format(uid=uid, url=url),
whitespace_bottom=1
)
def list_trials():
system.log('===== TRIALS =====', whitespace_bottom=1)
results_path = paths.results('trials.html')
for uid, data_path in reader.listings('trial').items():
url = 'file://{}?id={}'.format(results_path, uid)
system.log(
"""
--- {uid} ---
{url}
""".format(uid=uid, url=url),
whitespace_bottom=1
)
def execute_command():
"""
:return:
"""
parser = ArgumentParser()
parser.description = cli.reformat(DESCRIPTION)
parser.add_argument(
'list_command',
type=str,
help='The list command itself'
)
parser.add_argument(
'report_type',
type=str,
nargs='?',
default=None,
help='The type of report to list.'
)
args = vars(parser.parse_args())
report_type = args['report_type']
if not report_type:
report_type = 'all'
else:
report_type = report_type.lower()
print('')
if report_type[0] == 'g':
list_groups()
elif report_type[0] == 't':
list_trials()
else:
list_groups()
print('')
list_trials()
| mit | 1,463,846,575,669,535,500 | 20.125 | 76 | 0.534696 | false | 3.972222 | false | false | false |
num1r0/bb_tools | wordlist_generator.py | 1 | 4922 | """
Wordlist generator tool.
Generates extended wordlist based on an initial list of possible words
Used mainly with hash cracking tools: hashcat, john, etc.
TO DO:
- Add logging function
"""
import datetime
import itertools
import sys
import os
def usage():
""" Usage function """
usage_message = """Usage wordlist_generator.py [ OPTIONS ]
OPTIONS:
-i Path to initial wordlist file (default: wordlist.txt)
-o Name of the file to save generated wordlist (default: gen_ext_wl.txt)
-t Datetime got from 'date' command, used as origin timestamp (ex.: Sat 28 Oct 22:06:28 BST 2017)
-w Time window size (in seconds). Subtracted/added to origin timestamp
-h Display this menu
EXAMPLES:
wordlist_generator.py -i wl.txt -o res.txt -t "Sat 28 Oct 22:06:28 BST 2017" -w 10
"""
print usage_message
def create_permutations(wordlist):
"""
Creates all possible permutations for given wordlist
"""
extended_wordlist = []
for length in range(0, len(wordlist)+1):
for subset in itertools.permutations(wordlist, length):
extended_wordlist.append("".join(subset))
return extended_wordlist
def convert_to_epoch_time(origin):
"""
Converts datetime into unix timestamp. Gets as an argument, result of linux 'date' command.
Input example: Sat 28 Oct 22:06:28 BST 2017
"""
pattern = "%a %d %b %H:%M:%S %Z %Y"
timestamp = datetime.datetime.strptime(origin, pattern).strftime("%s")
return timestamp
def generate_timestamps(epoch_origin, seconds_interval):
"""
Gets origin timestamp and generates a list of them, based on specified interval of seconds
"""
timestamps = []
past_timestamp = int(epoch_origin) - int(seconds_interval)
future_timestamp = int(epoch_origin) + int(seconds_interval)
for timestamp in range(past_timestamp, future_timestamp+1):
timestamps.append(timestamp)
return timestamps
def generate_extended_wordlist(timestamps, wordlist):
"""
For each timestamp, we generate the wordlist using permutations
"""
extended_wordlist = []
iter_wordlist = []
for timestamp in timestamps:
iter_wordlist = list(wordlist)
iter_wordlist.append(str(timestamp))
iter_extended_wordlist = create_permutations(iter_wordlist)
del iter_wordlist[:]
diff_wordlist = list(set(iter_extended_wordlist) - set(extended_wordlist))
extended_wordlist += diff_wordlist
return sorted(extended_wordlist)
def get_wordlist_from_file(file_path):
"""
Simple read file function; omits newline '\n' character on each line
"""
f = open(str(file_path), "r")
wordlist = f.read().splitlines()
return wordlist
def save_to_file(file_path, wordlist):
"""
Simple write file function
"""
if not str(file_path):
file_path = "gen_ext_wl.txt"
with open(file_path, 'w') as f:
for word in wordlist:
f.write(word)
f.write("\n")
f.close()
def main():
"""
Entry point
"""
arguments = sys.argv[1:]
if len(arguments) <= 1:
usage()
exit(0)
try:
# Need help?
arguments.index("-h")
usage()
except:
# Get initial wordlist file name
try:
initial_wordlist_path = str(arguments[int(arguments.index("-i") + 1)])
except:
# Logging function
initial_wordlist_path = "wordlist.txt"
print initial_wordlist_path
# Get file name to store generated wordlist
try:
new_wordlist_path = str(arguments[int(arguments.index("-o") + 1)])
except:
# Logging function
new_wordlist_path = "gen_ext_wl.txt"
print new_wordlist_path
# Get origin timestamp
try:
origin_timestamp = str(arguments[int(arguments.index("-t") + 1)])
except:
# Logging function
process = os.popen("date")
origin_timestamp = str(process.read()).strip()
process.close()
print origin_timestamp
# Get time window in seconds
try:
time_window_seconds = str(arguments[int(arguments.index("-w") + 1)])
except:
# Logging function
time_window_seconds = 45
print time_window_seconds
initial_wordlist = get_wordlist_from_file(initial_wordlist_path)
if not origin_timestamp.isdigit():
origin_timestamp = convert_to_epoch_time(origin_timestamp)
generated_timestamps = generate_timestamps(origin_timestamp, time_window_seconds)
generated_extended_wordlist = generate_extended_wordlist(generated_timestamps, initial_wordlist)
save_to_file(new_wordlist_path, generated_extended_wordlist)
if __name__ == "__main__":
main()
| gpl-3.0 | -3,724,301,158,258,055,000 | 30.350318 | 110 | 0.621902 | false | 3.956592 | false | false | false |
beakman/droidlab | droidlab/experiments/serializers.py | 1 | 1991 | from rest_framework import serializers
from rest_framework.reverse import reverse
from .models import Experiment, Result
class ResultSerializer(serializers.ModelSerializer):
class Meta:
model = Result
exclude = ('experiment',)
# class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
# results = serializers.HyperlinkedIdentityField(view_name="results-list")
# class Meta:
# model = Experiment
# fields = ('name', 'date', 'results')
class ExperimentSerializer(serializers.ModelSerializer):
results = ResultSerializer(many=True)
class Meta:
model = Experiment
fields = ('id', 'name', 'date', 'user', 'results')
def create(self, validated_data):
results_data = validated_data.pop('results')
ex = Experiment.objects.create(**validated_data)
for result_data in results_data:
Result.objects.create(experiment=ex, **result_data)
return ex
def update(self, instance, validated_data):
results_data = validated_data.pop('results')
# Unless the application properly enforces that this field is
# always set, the follow could raise a `DoesNotExist`, which
# would need to be handled.
results = instance.results
instance.save()
results.save()
return instance
# class ResultHyperlink(serializers.HyperlinkedRelatedField):
# # We define these as class attributes, so we don't need to pass them as arguments.
# view_name = 'result-detail'
# queryset = Result.objects.all()
# def get_url(self, obj, view_name, request, format):
# url_kwargs = {
# 'experiment_name': obj.experiment.name,
# 'experiment_pk': obj.pk
# }
# return reverse(view_name, kwargs=url_kwargs, request=request, format=format)
# def get_object(self, view_name, view_args, view_kwargs):
# lookup_kwargs = {
# 'experiment__name': view_kwargs['experiment_name'],
# 'pk': view_kwargs['experiment_pk']
# }
# return self.get_queryset().get(**lookup_kwargs) | bsd-3-clause | 8,969,882,641,364,371,000 | 31.655738 | 88 | 0.690105 | false | 3.626594 | false | false | false |
OSSOS/MOP | src/ossos/core/scripts/process.py | 1 | 1397 |
from ossos.pipeline import mk_mopheader, mkpsf, step1, slow
from ossos import util, storage
import logging
import sys
import os
import shutil
util.config_logging(logging.INFO)
version='p'
force=False
dry_run=False
prefix=''
lines = open(sys.argv[1]).readlines()
basedir=os.getcwd()
for line in lines:
expnum = int(line.strip())
for ccd in storage.get_ccdlist(expnum):
try:
os.chdir(basedir)
if not os.access(str(expnum),os.F_OK):
os.mkdir(str(expnum))
os.chdir(str(expnum))
if not os.access(str(ccd), os.F_OK):
os.mkdir(str(ccd))
os.chdir(str(ccd))
try:
print(os.getcwd())
mk_mopheader.run(expnum, ccd=ccd, version=version, dry_run=dry_run, prefix='', force=force, ignore_dependency=False)
mkpsf.run(expnum, ccd=ccd, version=version, dry_run=dry_run, prefix=prefix, force=force)
step1.run(expnum, ccd=ccd, version=version, dry_run=dry_run, prefix=prefix, force=force)
slow.run(expnum, ccd, version=version, dry_run=dry_run, prefix=prefix, force=force)
except Exception as ex:
print(ex)
except Exception as ex:
print(ex)
finally:
os.chdir(basedir)
shutil.rmtree("{}/{}".format(expnum, ccd), ignore_errors=True)
| gpl-3.0 | -1,160,384,444,843,936,800 | 30.75 | 132 | 0.59413 | false | 3.358173 | false | false | false |
rrahmati/roboinstruct-2 | demonstrate_ros_package/scripts/record_demonstration.py | 1 | 15168 | #! /usr/bin/python
import rospy
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
import numpy as np
import cv2
import sys
import os
from os.path import expanduser
import signal
import threading
from multiprocessing import Pool
import time
from random import randint
from std_msgs.msg import Float32MultiArray
from leap_client.msg import HandInfoList
def signal_handler(signal, frame):
global record_demonstratio
n
record_demonstration.end_thread = True
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
class RecordDemonstration(object):
def __init__(self):
# parameters
self.task = 3006
# person controlling the robot: 1-Rouhollah, 2-Pooya
self.user_id = 1
self.image_shape = (540, 540)
self.recordDelay = .03
self.camera1 = True
self.camera2 = False
self.camera3 = False
self.al5d = True
self.mico = False
self.task_description = {
5000: "Human demonstrations",
3001: "Grab a bubble wrap and put it into plate",
3002: "Push the plate to the left",
3003: "Push the box towards the robot's base",
3004: "Push and roll the bottle towards the robot's base",
3005: "Pick up the towel and clean the screwdriver box",
3006: "rotate the pliers wrench to a perpendicular orientation",
# first camera calibration:
1001: "Put three small objects into the container",
1002: "Grab a pen and put it into user's hand",
1003: "Take the stirring bar from the user, stir a coffee cup, give it back to the user",
1004: "Grab capsules from the table and put them into their bottle",
1005: "Grab a paper cup and pour its content into a plate",
1006: "Push all small cubes and gather them in the middle of table",
1007: "The small towel is already folded. fold it one more time",
1008: "Grab a paper cup and put it into a tea cup",
1009: "Grab the spoon and fork and put them into the plate, spoon on right, fork on left",
1010: "Pick up a thick marker and put it into upright position",
1011: "Push and rotate the markers and gather them close to the robot base",
1012: "Stay in the middle position. Don't move!",
1013: "Pick up a mug and place it on the table where the user is pointing",
1014: "scoop ...",
# second camera calibration:
1501: "Grab 6 small cubes in a cluttered situation and put them into a plate",
1502: "Grab a marker and put it into the cup. Then, put it back on the table.",
# second camera calibration, each task 5 minutes, 10,000 waypoints
2001: "Grab 3 small markers and arrange them vertically on the right side",
2002: "Grab 3 small markers and arrange them horizontally on the right side",
2003: "Grab 3 small markers and arrange them vertically on the left side",
2004: "Grab 3 small markers and arrange them horizontally on the left side",
2005: "Grab 3 small markers and make a triangle with them",
2006: "Grab 3 small markers, put one on the left, one on the right, and one in the middle",
2007: "Grab 3 small markers and make a horizontal line with them",
2008: "Grab 3 small markers and write the character Y with them",
2009: "Grab 3 small markers and write the character U with them",
2010: "Grab 3 small markers and write the character H with them",
2011: "Grab 3 small markers and write the character N with them",
2012: "Grab 3 small markers and write the character T with them",
2013: "Grab 3 small markers and write the reversed character N with them",
2014: "Grab 3 small markers and write the reversed character Y with them",
2015: "Grab 3 small markers and write the reversed character U with them",
2016: "Grab 3 small markers and write the 90 degree rotated character H with them",
2017: "Grab 3 small markers and write the reversed character T with them",
2018: "Grab 3 small markers and write the character K with them",
2019: "Grab 3 small markers, put one vertically on the right, and two vertically on the left",
2020: "Grab 3 small markers, put one vertically on the left, and two vertically on the right",
2021: "Grab 3 small markers, put one horizontally on the right, and two horizontally on the left",
2022: "Grab 3 small markers, put one horizontally on the left, and two horizontally on the right",
2023: "Grab 3 small markers, put one vertically on the right, and two horizontally on the left",
2024: "Grab 3 small markers, put one horizontally on the left, and two vertically on the right",
2025: "Grab 3 small markers, put one vertically on the right, and make a vertical line with the other two",
2026: "Grab 3 small markers, put one vertically on the left, and make a vertical line with the other two",
2027: "Grab 3 small markers, put one vertically on the right, and make a horizontal line with the other two",
2028: "Grab 3 small markers, put one vertically on the left, and make a horizontal line with the other two",
2029: "Grab 3 small markers and put them into the coffee cup on the right",
2030: "Grab 3 small markers that are inside a coffee cup on the right and put them on the desk",
2031: "Grab 3 small markers and put them into the coffee cup on the left",
2032: "Grab 3 small markers that are inside a coffee cup on the left and put them on the desk",
2033: "Grab 3 small markers, put one into the coffee cup on the left, and the others into the coffee cup on the right",
2034: "Grab 3 small markers, put one into the coffee cup on the right, and the others into the coffee cup on the left",
2035: "Grab 2 small markers, put one into the coffee cup on the right, and the other into the coffee cup on the left",
2036: "Grab 2 small markers, put one into the coffee cup on the left, and the other into the coffee cup on the right",
2037: "Grab one small marker from each coffee cup and put them on the desk",
2038: "Grab one small marker from the coffee cup on the right and put it into the coffee cup on the left",
2039: "Grab one small marker from the coffee cup on the left and put it into the coffee cup on the right",
2040: "Grab 4 small markers and make a square with them",
2041: "Grab 4 small markers and make a cross with them",
2042: "Grab 4 small markers and make a 45 degree rotated square with them",
2043: "Grab 4 small markers and make a plus with them",
2044: "Grab 4 small markers, put one vertically on the right and three vertically on the left",
2045: "Grab 4 small markers, put one horizontally on the right and three vertically on the left",
2046: "Grab 4 small markers, put one vertically on the right and three horizontally on the left",
2047: "Grab 4 small markers, put one horizontally on the right and three horizontally on the left",
2048: "Grab 4 small markers, put two vertically on the right and two vertically on the left",
2049: "Grab 4 small markers, put two horizontally on the right and two vertically on the left",
2050: "Grab 4 small markers, put two vertically on the right and two horizontally on the left",
2051: "Grab 4 small markers, put two horizontally on the right and two horizontally on the left",
2052: "Grab 4 small markers and draw the bottom half of a star with them",
2053: "Grab 4 small markers and draw the upper half of a star with them",
2054: "Grab 4 small markers and draw the character '=' with them",
2055: "Grab 4 small markers and draw the 90 degree rotated character '=' with them",
2056: "Grab 4 small markers and draw the character 'W' with them",
2057: "Grab 4 small markers and draw the character 'M' with them",
2058: "Grab 4 small markers and draw the character 'E' with them",
2059: "Grab 4 small markers and draw the reversed character 'E' with them",
2060: "Grab 4 small markers and draw the character 'm' with them",
2061: "Grab 4 small markers and draw the reversed character 'm' with them",
}
# initialization
self.filepath = expanduser("~") + '/t/task-' + str(self.task) + '/' + str(randint(0,1000000))
rospy.init_node('record_demonstration')
if self.camera1:
self.create_folders(self.filepath + '/camera-' + str(1) + '/')
# self.create_folders(self.filepath + '/camera-' + str(1) + '-depth/')
rospy.Subscriber("/kinect2/qhd/image_color_rect", Image, self.camera1_callback)
# rospy.Subscriber("/kinect2/hd/image_depth_rect", Image, self.camera1_depth_callback)
if self.camera2:
self.create_folders(self.filepath + '/camera-' + str(2) + '/')
rospy.Subscriber("/usb_cam/image_raw", Image, self.camera2_callback)
if self.camera3:
self.create_folders(self.filepath + '/camera-' + str(3) + '/')
rospy.Subscriber("/kinect2/qhd/image_color_rect", Image, self.camera3_callback)
if self.al5d:
self.write_file_header()
rospy.Subscriber("/leap_al5d_info", Float32MultiArray, self.leap_al5d_callback)
if self.mico:
self.write_file_header()
rospy.Subscriber("/leap_mico_info", Float32MultiArray, self.leap_mico_callback)
self.bridge = CvBridge()
self.timestep = 0
self.task_complete_count = 0
self.rate = rospy.Rate(self.recordDelay*1000)
self.last_reward_time = 0
self.last_robot_msg = 0
self.start_time = rospy.get_time()
self.end_thread = False
self.pause = False
# self.pool = Pool(2)
self.thread = threading.Thread(target= self._update_thread)
self.thread.start()
def save_image(self, img_msg, camera):
try:
img = self.bridge.imgmsg_to_cv2(img_msg, "bgr8")
img = np.array(img, dtype=np.float)
except CvBridgeError, e:
print(e)
else:
img = img[0:540, 250:840]
img = cv2.resize(img, self.image_shape)
cv2.imwrite(self.filepath + '/camera-' + str(camera) + '/' + str(self.timestep) +
'.jpg', img, [int(cv2.IMWRITE_JPEG_QUALITY), 80])
def save_image_depth(self, img_msg, camera):
try:
img = self.bridge.imgmsg_to_cv2(img_msg, "16UC1")
img = np.array(img, dtype=np.float32)
cv2.normalize(img, img, 0, 1, cv2.NORM_MINMAX)
except CvBridgeError, e:
print(e)
else:
img = cv2.resize(img, self.image_shape)
cv2.imwrite(self.filepath + '/camera-' + str(camera) + '-depth/' + str(self.timestep) +
'.jpg', img*255.0, [int(cv2.IMWRITE_JPEG_QUALITY), 80])
def camera1_callback(self, msg):
self.camera1_msg = msg
def camera1_depth_callback(self, msg):
self.camera1_depth_msg = msg
def camera2_callback(self, msg):
self.camera2_msg = msg
def camera3_callback(self, msg):
self.camera3_msg = msg
def leap_al5d_callback(self, msg):
self.leap_al5d_msg = msg
self.last_robot_msg = rospy.get_time()
def leap_mico_callback(self, msg):
self.leap_mico_msg = msg
def create_folders(self, foldername):
if not os.path.exists(foldername):
try:
os.makedirs(foldername)
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
def write_file_header(self):
with open(self.filepath + '.txt', 'w') as f:
f.write(str(time.strftime('%l:%M%p %z on %b %d, %Y')) + '\n' + str(self.task_description[self.task]) + '\n')
f.write('time,task,user,robot,reward,human,gripper,joint1,joint2,joint3,joint4,joint5,joint6')
def append_to_file(self, robot):
with open(self.filepath + '.txt', 'a') as f:
str_to_append = '\n' + str(rospy.get_time() - self.start_time) + ',' + str(self.task) + ',' + str(self.user_id) + ','
if robot == 'al5d':
str_to_append = str_to_append + str(1) + ','
data = [x for x in self.leap_al5d_msg.data]
elif robot == 'mico':
str_to_append = str_to_append + str(2) + ','
data = [x for x in self.leap_mico_msg.data]
if abs(data[0] - 1) < .01: # got reward
if rospy.get_time() - self.last_reward_time > 1:
self.task_complete_count += 1
self.last_reward_time = rospy.get_time()
else:
data[0] = 0
sys.stdout.write('\rTimestep: ' + str(self.timestep) + ' Task done: ' + str(self.task_complete_count))
sys.stdout.flush()
str_to_append = str_to_append + ','.join(str(e) for e in data)
f.write(str_to_append)
def _update_thread(self):
while not rospy.is_shutdown() and not self.end_thread:
if self.pause or rospy.get_time() - self.start_time < 1 or rospy.get_time() - self.last_robot_msg > .1:
continue
save_files = (self.camera1 == hasattr(self, 'camera1_msg') and self.camera2 == hasattr(self, 'camera2_msg')
and self.camera3 == hasattr(self, 'camera3_msg') and self.al5d == hasattr(self, 'leap_al5d_msg')
and self.mico == hasattr(self, 'leap_mico_msg'))
if save_files:
if self.camera1:
# # self.pool.map(self.save_image, [(self.camera1_msg, 1)])
self.save_image(self.camera1_msg, 1)
# self.save_image_depth(self.camera1_depth_msg, 1)
if self.camera2:
# self.pool.map(self.save_image, [(self.camera2_msg, 2)])
self.save_image(self.camera2_msg, 2)
if self.camera3:
self.save_image(self.camera2_msg, 3)
if self.al5d:
self.append_to_file('al5d')
if self.mico:
self.append_to_file('mico')
self.timestep += 1
self.rate.sleep()
def main():
global record_demonstration
record_demonstration = RecordDemonstration()
rospy.spin()
# while not rospy.is_shutdown() and not record_demonstration.end_thread:
# input = raw_input(">>>")
# record_demonstration.pause = not record_demonstration.pause
if __name__ == '__main__':
main()
| mit | -7,426,183,828,414,585,000 | 53.171429 | 131 | 0.610562 | false | 3.709464 | false | false | false |
ChromiumWebApps/chromium | mojo/public/bindings/generators/mojom_js_generator.py | 1 | 7742 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates JavaScript source files from a mojom.Module."""
from generate import mojom
from generate import mojom_pack
from generate import mojom_generator
from generate.template_expander import UseJinja
_kind_to_javascript_default_value = {
mojom.BOOL: "false",
mojom.INT8: "0",
mojom.UINT8: "0",
mojom.INT16: "0",
mojom.UINT16: "0",
mojom.INT32: "0",
mojom.UINT32: "0",
mojom.FLOAT: "0",
mojom.HANDLE: "core.kInvalidHandle",
mojom.DCPIPE: "core.kInvalidHandle",
mojom.DPPIPE: "core.kInvalidHandle",
mojom.MSGPIPE: "core.kInvalidHandle",
mojom.INT64: "0",
mojom.UINT64: "0",
mojom.DOUBLE: "0",
mojom.STRING: '""',
}
def JavaScriptDefaultValue(field):
if field.default:
raise Exception("Default values should've been handled in jinja.")
if field.kind in mojom.PRIMITIVES:
return _kind_to_javascript_default_value[field.kind]
if isinstance(field.kind, mojom.Struct):
return "null";
if isinstance(field.kind, mojom.Array):
return "[]";
if isinstance(field.kind, mojom.Interface):
return _kind_to_javascript_default_value[mojom.MSGPIPE]
def JavaScriptPayloadSize(packed):
packed_fields = packed.packed_fields
if not packed_fields:
return 0;
last_field = packed_fields[-1]
offset = last_field.offset + last_field.size
pad = mojom_pack.GetPad(offset, 8)
return offset + pad;
_kind_to_javascript_type = {
mojom.BOOL: "codec.Uint8",
mojom.INT8: "codec.Int8",
mojom.UINT8: "codec.Uint8",
mojom.INT16: "codec.Int16",
mojom.UINT16: "codec.Uint16",
mojom.INT32: "codec.Int32",
mojom.UINT32: "codec.Uint32",
mojom.FLOAT: "codec.Float",
mojom.HANDLE: "codec.Handle",
mojom.DCPIPE: "codec.Handle",
mojom.DPPIPE: "codec.Handle",
mojom.MSGPIPE: "codec.Handle",
mojom.INT64: "codec.Int64",
mojom.UINT64: "codec.Uint64",
mojom.DOUBLE: "codec.Double",
mojom.STRING: "codec.String",
}
def GetJavaScriptType(kind):
if kind in mojom.PRIMITIVES:
return _kind_to_javascript_type[kind]
if isinstance(kind, mojom.Struct):
return "new codec.PointerTo(%s)" % GetJavaScriptType(kind.name)
if isinstance(kind, mojom.Array):
return "new codec.ArrayOf(%s)" % GetJavaScriptType(kind.kind)
if isinstance(kind, mojom.Interface):
return GetJavaScriptType(mojom.MSGPIPE)
return kind
_kind_to_javascript_decode_snippet = {
mojom.BOOL: "read8() & 1",
mojom.INT8: "read8()",
mojom.UINT8: "read8()",
mojom.INT16: "read16()",
mojom.UINT16: "read16()",
mojom.INT32: "read32()",
mojom.UINT32: "read32()",
mojom.FLOAT: "decodeFloat()",
mojom.HANDLE: "decodeHandle()",
mojom.DCPIPE: "decodeHandle()",
mojom.DPPIPE: "decodeHandle()",
mojom.MSGPIPE: "decodeHandle()",
mojom.INT64: "read64()",
mojom.UINT64: "read64()",
mojom.DOUBLE: "decodeDouble()",
mojom.STRING: "decodeStringPointer()",
}
def JavaScriptDecodeSnippet(kind):
if kind in mojom.PRIMITIVES:
return _kind_to_javascript_decode_snippet[kind]
if isinstance(kind, mojom.Struct):
return "decodeStructPointer(%s)" % GetJavaScriptType(kind.name);
if isinstance(kind, mojom.Array):
return "decodeArrayPointer(%s)" % GetJavaScriptType(kind.kind);
if isinstance(kind, mojom.Interface):
return JavaScriptDecodeSnippet(mojom.MSGPIPE)
_kind_to_javascript_encode_snippet = {
mojom.BOOL: "write8(1 & ",
mojom.INT8: "write8(",
mojom.UINT8: "write8(",
mojom.INT16: "write16(",
mojom.UINT16: "write16(",
mojom.INT32: "write32(",
mojom.UINT32: "write32(",
mojom.FLOAT: "encodeFloat(",
mojom.HANDLE: "encodeHandle(",
mojom.DCPIPE: "encodeHandle(",
mojom.DPPIPE: "encodeHandle(",
mojom.MSGPIPE: "encodeHandle(",
mojom.INT64: "write64(",
mojom.UINT64: "write64(",
mojom.DOUBLE: "encodeDouble(",
mojom.STRING: "encodeStringPointer(",
}
def JavaScriptEncodeSnippet(kind):
if kind in mojom.PRIMITIVES:
return _kind_to_javascript_encode_snippet[kind]
if isinstance(kind, mojom.Struct):
return "encodeStructPointer(%s, " % GetJavaScriptType(kind.name);
if isinstance(kind, mojom.Array):
return "encodeArrayPointer(%s, " % GetJavaScriptType(kind.kind);
if isinstance(kind, mojom.Interface):
return JavaScriptEncodeSnippet(mojom.MSGPIPE)
def GetConstants(module):
"""Returns a generator that enumerates all constants that can be referenced
from this module."""
class Constant:
pass
for enum in module.enums:
for field in enum.fields:
constant = Constant()
constant.namespace = module.namespace
constant.is_current_namespace = True
constant.import_item = None
constant.name = (enum.name, field.name)
yield constant
for each in module.imports:
for enum in each["module"].enums:
for field in enum.fields:
constant = Constant()
constant.namespace = each["namespace"]
constant.is_current_namespace = constant.namespace == module.namespace
constant.import_item = each
constant.name = (enum.name, field.name)
yield constant
def TranslateConstants(value, module):
# We're assuming we're dealing with an identifier, but that may not be
# the case. If we're not, we just won't find any matches.
if value.find(".") != -1:
namespace, identifier = value.split(".")
else:
namespace, identifier = "", value
for constant in GetConstants(module):
if namespace == constant.namespace or (
namespace == "" and constant.is_current_namespace):
if constant.name[1] == identifier:
if constant.import_item:
return "%s.%s.%s" % (constant.import_item["unique_name"],
constant.name[0], constant.name[1])
else:
return "%s.%s" % (constant.name[0], constant.name[1])
return value
def ExpressionToText(value, module):
if value[0] != "EXPRESSION":
raise Exception("Expected EXPRESSION, got" + value)
return "".join(mojom_generator.ExpressionMapper(value,
lambda token: TranslateConstants(token, module)))
def JavascriptType(kind):
if kind.imported_from:
return kind.imported_from["unique_name"] + "." + kind.name
return kind.name
class Generator(mojom_generator.Generator):
js_filters = {
"camel_to_underscores": mojom_generator.CamelToUnderscores,
"default_value": JavaScriptDefaultValue,
"payload_size": JavaScriptPayloadSize,
"decode_snippet": JavaScriptDecodeSnippet,
"encode_snippet": JavaScriptEncodeSnippet,
"expression_to_text": ExpressionToText,
"is_object_kind": mojom_generator.IsObjectKind,
"is_string_kind": mojom_generator.IsStringKind,
"is_array_kind": lambda kind: isinstance(kind, mojom.Array),
"js_type": JavascriptType,
"stylize_method": mojom_generator.StudlyCapsToCamel,
"verify_token_type": mojom_generator.VerifyTokenType,
}
@UseJinja("js_templates/module.js.tmpl", filters=js_filters)
def GenerateJsModule(self):
return {
"imports": self.GetImports(),
"kinds": self.module.kinds,
"enums": self.module.enums,
"module": self.module,
"structs": self.GetStructs() + self.GetStructsFromMethods(),
"interfaces": self.module.interfaces,
}
def GenerateFiles(self):
self.Write(self.GenerateJsModule(), "%s.js" % self.module.name)
def GetImports(self):
# Since each import is assigned a variable in JS, they need to have unique
# names.
counter = 1
for each in self.module.imports:
each["unique_name"] = "import" + str(counter)
counter += 1
return self.module.imports
| bsd-3-clause | 4,849,057,587,439,504,000 | 30.34413 | 78 | 0.681478 | false | 3.290268 | false | false | false |
kmolab/kmolab.github.io | data/Brython-3.3.4/Lib/logging/brython_handlers.py | 1 | 1179 | import logging
from browser.ajax import ajax
class XMLHTTPHandler(logging.Handler):
"""
A class which sends records to a Web server, using either GET or
POST semantics.
"""
def __init__(self, url, method="GET"):
"""
Initialize the instance with the host, the request URL, and the method
("GET" or "POST")
"""
logging.Handler.__init__(self)
method = method.upper()
if method not in ["GET", "POST"]:
raise ValueError("method must be GET or POST")
self.url = url
self.method = method
def mapLogRecord(self, record):
"""
Default implementation of mapping the log record into a dict
that is sent as the CGI data. Overwrite in your class.
Contributed by Franz Glasner.
"""
return record.__dict__
def emit(self, record):
"""
Emit a record.
Send the record to the Web server as a percent-encoded dictionary
"""
try:
req = ajax.open(self.method, self.url, async=False)
req.send(self.mapLogRecord(record))
except:
self.handleError(record)
| agpl-3.0 | 5,833,815,378,473,545,000 | 27.756098 | 78 | 0.579304 | false | 4.366667 | false | false | false |
Stratos42/EveBot | plugins.disabled/bf.py | 1 | 2472 | '''brainfuck interpreter adapted from (public domain) code at
http://brainfuck.sourceforge.net/brain.py'''
import re
import random
from util import hook
BUFFER_SIZE = 5000
MAX_STEPS = 1000000
@hook.command
def bf(inp):
".bf <prog> -- executes brainfuck program <prog>"""
program = re.sub('[^][<>+-.,]', '', inp)
# create a dict of brackets pairs, for speed later on
brackets = {}
open_brackets = []
for pos in range(len(program)):
if program[pos] == '[':
open_brackets.append(pos)
elif program[pos] == ']':
if len(open_brackets) > 0:
brackets[pos] = open_brackets[-1]
brackets[open_brackets[-1]] = pos
open_brackets.pop()
else:
return 'unbalanced brackets'
if len(open_brackets) != 0:
return 'unbalanced brackets'
# now we can start interpreting
ip = 0 # instruction pointer
mp = 0 # memory pointer
steps = 0
memory = [0] * BUFFER_SIZE # initial memory area
rightmost = 0
output = "" # we'll save the output here
# the main program loop:
while ip < len(program):
c = program[ip]
if c == '+':
memory[mp] = memory[mp] + 1 % 256
elif c == '-':
memory[mp] = memory[mp] - 1 % 256
elif c == '>':
mp += 1
if mp > rightmost:
rightmost = mp
if mp >= len(memory):
# no restriction on memory growth!
memory.extend([0] * BUFFER_SIZE)
elif c == '<':
mp = mp - 1 % len(memory)
elif c == '.':
output += chr(memory[mp])
if len(output) > 500:
break
elif c == ',':
memory[mp] = random.randint(1, 255)
elif c == '[':
if memory[mp] == 0:
ip = brackets[ip]
elif c == ']':
if memory[mp] != 0:
ip = brackets[ip]
ip += 1
steps += 1
if steps > MAX_STEPS:
if output == '':
output = '(no output)'
output += '[exceeded %d iterations]' % MAX_STEPS
break
stripped_output = re.sub(r'[\x00-\x1F]', '', output)
if stripped_output == '':
if output != '':
return 'no printable output'
return 'no output'
return stripped_output[:430].decode('utf8', 'ignore')
| gpl-3.0 | 4,300,077,651,816,528,000 | 27.090909 | 61 | 0.480987 | false | 3.948882 | false | false | false |
koala-ai/tensorflow_nlp | nlp/chatbot/model.py | 1 | 10775 | import copy
import numpy as np
import tensorflow as tf
from nlp.chatbot.dataset import data_utils
class S2SModel(object):
def __init__(self,
source_vocab_size,
target_vocab_size,
buckets,
size,
dropout,
num_layers,
max_gradient_norm,
batch_size,
learning_rate,
num_samples,
forward_only=False,
dtype=tf.float32):
# init member variales
self.source_vocab_size = source_vocab_size
self.target_vocab_size = target_vocab_size
self.buckets = buckets
self.batch_size = batch_size
self.learning_rate = learning_rate
# LSTM cells
cell = tf.contrib.rnn.BasicLSTMCell(size)
cell = tf.contrib.rnn.DropoutWrapper(cell, output_keep_prob=dropout)
cell = tf.contrib.rnn.MultiRNNCell([cell] * num_layers)
output_projection = None
softmax_loss_function = None
if num_samples > 0 and num_samples < self.target_vocab_size:
print('开启投影:{}'.format(num_samples))
w_t = tf.get_variable(
"proj_w",
[self.target_vocab_size, size],
dtype=dtype
)
w = tf.transpose(w_t)
b = tf.get_variable(
"proj_b",
[self.target_vocab_size],
dtype=dtype
)
output_projection = (w, b)
def sampled_loss(labels, logits):
labels = tf.reshape(labels, [-1, 1])
# 因为选项有选fp16的训练,这里同意转换为fp32
local_w_t = tf.cast(w_t, tf.float32)
local_b = tf.cast(b, tf.float32)
local_inputs = tf.cast(logits, tf.float32)
return tf.cast(
tf.nn.sampled_softmax_loss(
weights=local_w_t,
biases=local_b,
labels=labels,
inputs=local_inputs,
num_sampled=num_samples,
num_classes=self.target_vocab_size
),
dtype
)
softmax_loss_function = sampled_loss
# seq2seq_f
def seq2seq_f(encoder_inputs, decoder_inputs, do_decode):
tmp_cell = copy.deepcopy(cell)
return tf.contrib.legacy_seq2seq.embedding_attention_seq2seq(
encoder_inputs,
decoder_inputs,
tmp_cell,
num_encoder_symbols=source_vocab_size,
num_decoder_symbols=target_vocab_size,
embedding_size=size,
output_projection=output_projection,
feed_previous=do_decode,
dtype=dtype
)
# inputs
self.encoder_inputs = []
self.decoder_inputs = []
self.decoder_weights = []
# buckets中的最后一个是最大的(即第“-1”个)
for i in range(buckets[-1][0]):
self.encoder_inputs.append(tf.placeholder(
tf.int32,
shape=[None],
name='encoder_input_{}'.format(i)
))
# 输出比输入大 1,这是为了保证下面的targets可以向左shift 1位
for i in range(buckets[-1][1] + 1):
self.decoder_inputs.append(tf.placeholder(
tf.int32,
shape=[None],
name='decoder_input_{}'.format(i)
))
self.decoder_weights.append(tf.placeholder(
dtype,
shape=[None],
name='decoder_weight_{}'.format(i)
))
targets = [
self.decoder_inputs[i + 1] for i in range(buckets[-1][1])
]
if forward_only:
self.outputs, self.losses = tf.contrib.legacy_seq2seq.model_with_buckets(
self.encoder_inputs,
self.decoder_inputs,
targets,
self.decoder_weights,
buckets,
lambda x, y: seq2seq_f(x, y, True),
softmax_loss_function=softmax_loss_function
)
if output_projection is not None:
for b in range(len(buckets)):
self.outputs[b] = [
tf.matmul(
output,
output_projection[0]
) + output_projection[1]
for output in self.outputs[b]
]
else:
self.outputs, self.losses = tf.contrib.legacy_seq2seq.model_with_buckets(
self.encoder_inputs,
self.decoder_inputs,
targets,
self.decoder_weights,
buckets,
lambda x, y: seq2seq_f(x, y, False),
softmax_loss_function=softmax_loss_function
)
params = tf.trainable_variables()
opt = tf.train.AdamOptimizer(
learning_rate=learning_rate
)
if not forward_only:
self.gradient_norms = []
self.updates = []
for output, loss in zip(self.outputs, self.losses):
gradients = tf.gradients(loss, params)
clipped_gradients, norm = tf.clip_by_global_norm(
gradients,
max_gradient_norm
)
self.gradient_norms.append(norm)
self.updates.append(opt.apply_gradients(
zip(clipped_gradients, params)
))
# self.saver = tf.train.Saver(tf.all_variables())
self.saver = tf.train.Saver(
tf.all_variables(),
write_version=tf.train.SaverDef.V2
)
def step(
self,
session,
encoder_inputs,
decoder_inputs,
decoder_weights,
bucket_id,
forward_only
):
encoder_size, decoder_size = self.buckets[bucket_id]
if len(encoder_inputs) != encoder_size:
raise ValueError(
"Encoder length must be equal to the one in bucket,"
" %d != %d." % (len(encoder_inputs), encoder_size)
)
if len(decoder_inputs) != decoder_size:
raise ValueError(
"Decoder length must be equal to the one in bucket,"
" %d != %d." % (len(decoder_inputs), decoder_size)
)
if len(decoder_weights) != decoder_size:
raise ValueError(
"Weights length must be equal to the one in bucket,"
" %d != %d." % (len(decoder_weights), decoder_size)
)
input_feed = {}
for i in range(encoder_size):
input_feed[self.encoder_inputs[i].name] = encoder_inputs[i]
for i in range(decoder_size):
input_feed[self.decoder_inputs[i].name] = decoder_inputs[i]
input_feed[self.decoder_weights[i].name] = decoder_weights[i]
last_target = self.decoder_inputs[decoder_size].name
input_feed[last_target] = np.zeros([self.batch_size], dtype=np.int32)
if not forward_only:
output_feed = [
self.updates[bucket_id],
self.gradient_norms[bucket_id],
self.losses[bucket_id]
]
output_feed.append(self.outputs[bucket_id][i])
else:
output_feed = [self.losses[bucket_id]]
for i in range(decoder_size):
output_feed.append(self.outputs[bucket_id][i])
outputs = session.run(output_feed, input_feed)
if not forward_only:
return outputs[1], outputs[2], outputs[3:]
else:
return None, outputs[0], outputs[1:]
def get_batch_data(self, bucket_dbs, bucket_id):
data = []
data_in = []
bucket_db = bucket_dbs[bucket_id]
for _ in range(self.batch_size):
ask, answer = bucket_db.random()
data.append((ask, answer))
data_in.append((answer, ask))
return data, data_in
def get_batch(self, bucket_dbs, bucket_id, data):
encoder_size, decoder_size = self.buckets[bucket_id]
# bucket_db = bucket_dbs[bucket_id]
encoder_inputs, decoder_inputs = [], []
for encoder_input, decoder_input in data:
# encoder_input, decoder_input = random.choice(data[bucket_id])
# encoder_input, decoder_input = bucket_db.random()
encoder_input = data_utils.sentence_indice(encoder_input)
decoder_input = data_utils.sentence_indice(decoder_input)
# Encoder
encoder_pad = [data_utils.PAD_ID] * (
encoder_size - len(encoder_input)
)
encoder_inputs.append(list(reversed(encoder_input + encoder_pad)))
# Decoder
decoder_pad_size = decoder_size - len(decoder_input) - 2
decoder_inputs.append(
[data_utils.GO_ID] + decoder_input +
[data_utils.EOS_ID] +
[data_utils.PAD_ID] * decoder_pad_size
)
batch_encoder_inputs, batch_decoder_inputs, batch_weights = [], [], []
# batch encoder
for i in range(encoder_size):
batch_encoder_inputs.append(np.array(
[encoder_inputs[j][i] for j in range(self.batch_size)],
dtype=np.int32
))
# batch decoder
for i in range(decoder_size):
batch_decoder_inputs.append(np.array(
[decoder_inputs[j][i] for j in range(self.batch_size)],
dtype=np.int32
))
batch_weight = np.ones(self.batch_size, dtype=np.float32)
for j in range(self.batch_size):
if i < decoder_size - 1:
target = decoder_inputs[j][i + 1]
if i == decoder_size - 1 or target == data_utils.PAD_ID:
batch_weight[j] = 0.0
batch_weights.append(batch_weight)
return batch_encoder_inputs, batch_decoder_inputs, batch_weights
def create_model(forward_only, args):
"""建立模型"""
buckets = data_utils.buckets
dtype = tf.float16 if args.use_fp16 else tf.float32
model = S2SModel(
data_utils.dim,
data_utils.dim,
buckets,
args.size,
args.dropout,
args.num_layers,
args.max_gradient_norm,
args.batch_size,
args.learning_rate,
args.num_samples,
forward_only,
dtype
)
return model | apache-2.0 | 883,289,279,071,003,100 | 34.851852 | 85 | 0.506622 | false | 4.05137 | false | false | false |
rbuffat/pyepw | tests/test_typical_or_extreme_periods.py | 1 | 2262 | import os
import tempfile
import unittest
from pyepw.epw import TypicalOrExtremePeriods, TypicalOrExtremePeriod, EPW
class TestTypicalOrExtremePeriods(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_typical_or_extreme_periods(self):
obj = TypicalOrExtremePeriods()
typical_or_extreme_period_obj = TypicalOrExtremePeriod()
var_typical_or_extreme_period_typical_or_extreme_period_name = "typical_or_extreme_period_name"
typical_or_extreme_period_obj.typical_or_extreme_period_name = var_typical_or_extreme_period_typical_or_extreme_period_name
var_typical_or_extreme_period_typical_or_extreme_period_type = "typical_or_extreme_period_type"
typical_or_extreme_period_obj.typical_or_extreme_period_type = var_typical_or_extreme_period_typical_or_extreme_period_type
var_typical_or_extreme_period_period_start_day = "period_start_day"
typical_or_extreme_period_obj.period_start_day = var_typical_or_extreme_period_period_start_day
var_typical_or_extreme_period_period_end_day = "period_end_day"
typical_or_extreme_period_obj.period_end_day = var_typical_or_extreme_period_period_end_day
obj.add_typical_or_extreme_period(typical_or_extreme_period_obj)
epw = EPW(typical_or_extreme_periods=obj)
epw.save(self.path, check=False)
epw2 = EPW()
epw2.read(self.path)
self.assertEqual(
epw2.typical_or_extreme_periods.typical_or_extreme_periods[0].typical_or_extreme_period_name,
var_typical_or_extreme_period_typical_or_extreme_period_name)
self.assertEqual(
epw2.typical_or_extreme_periods.typical_or_extreme_periods[0].typical_or_extreme_period_type,
var_typical_or_extreme_period_typical_or_extreme_period_type)
self.assertEqual(
epw2.typical_or_extreme_periods.typical_or_extreme_periods[0].period_start_day,
var_typical_or_extreme_period_period_start_day)
self.assertEqual(
epw2.typical_or_extreme_periods.typical_or_extreme_periods[0].period_end_day,
var_typical_or_extreme_period_period_end_day)
| apache-2.0 | 546,571,778,201,440,830 | 49.266667 | 131 | 0.707339 | false | 3.011984 | false | false | false |
prov-suite/interop-test-harness | prov_interop/provman/converter.py | 1 | 5717 | """Manages invocation of ProvScala `provmanagement` script.
"""
# Copyright (c) 2015 University of Southampton
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os.path
import subprocess
from prov_interop.component import CommandLineComponent
from prov_interop.component import ConfigError
from prov_interop.converter import ConversionError
from prov_interop.converter import Converter
class ProvManConverter(Converter, CommandLineComponent):
"""Manages invocation of ProvScala `provmanagement` script."""
INPUT = "INPUT"
"""str or unicode: token for input file in command-line specification"""
OUTPUT = "OUTPUT"
"""str or unicode: token for output file in command-line specification"""
INFORMAT = "INFORMAT"
"""str or unicode: token for output file in command-line specification"""
OUTFORMAT = "OUTFORMAT"
"""str or unicode: token for output file in command-line specification"""
def __init__(self):
"""Create converter.
"""
super(ProvManConverter, self).__init__()
def configure(self, config):
"""Configure converter. The configuration must hold:
- :class:`prov_interop.converter.Converter` configuration
- :class:`prov_interop.component.CommandLineComponent` configuration
``arguments`` must have tokens ``INPUT``, ``OUTPUT`` which are
place-holders for the input file and output file.
A valid configuration is::
{
"executable": "/home/user/provman/bin/provmanagement"
"arguments": "translate --infile INPUT --outfile OUTPUT --inputFormat INFORMAT --outformat OUTFORMAT"
"input-formats": ["provn", "ttl", "trig", "provx", "json"]
"output-formats": ["provn", "ttl", "trig", "provx", "json"]
}
:param config: Configuration
:type config: dict
:raises ConfigError: if `config` does not hold the above entries
"""
super(ProvManConverter, self).configure(config)
for token in [ProvManConverter.INPUT, ProvManConverter.OUTPUT,
ProvManConverter.INFORMAT, ProvManConverter.OUTFORMAT]:
if token not in self._arguments:
raise ConfigError("Missing token " + token)
def convert(self, in_file, out_file):
"""Convert input file into output file.
- Input and output formats are derived from `in_file` and
`out_file` file extensions.
- A check is done to see that `in_file` exists and that the input
and output format are in ``input-formats`` and
``output-formats`` respectively.
- ``executable`` and ``arguments`` are used to create a
command-line invocation, with ``INPUT`` and ``OUTPUT`` being
replaced with `in_file`, and `out_file`
An example command-line invocation is::
/home/user/ProvToolbox/bin/provmanagement translate --infile testcase1.json --outfile testcase1.provx --inputFormat json --outformat provx
:param in_file: Input file
:type in_file: str or unicode
:param out_file: Output file
:type out_file: str or unicode
:raises ConversionError: if the input file cannot be found, or
the exit code of ``provmanagement`` is non-zero
:raises OSError: if there are problems invoking the converter
e.g. the script is not found
"""
super(ProvManConverter, self).convert(in_file, out_file)
in_format = os.path.splitext(in_file)[1][1:]
out_format = os.path.splitext(out_file)[1][1:]
super(ProvManConverter, self).check_formats(in_format, out_format)
command_line = list(self._executable)
command_line.extend(self._arguments)
command_line = [in_file if x == ProvManConverter.INPUT else x
for x in command_line]
command_line = [out_file if x == ProvManConverter.OUTPUT else x
for x in command_line]
command_line = [in_format if x == ProvManConverter.INFORMAT else x
for x in command_line]
command_line = [out_format if x == ProvManConverter.OUTFORMAT else x
for x in command_line]
print((" ".join(command_line)))
return_code = subprocess.call(command_line)
if return_code != 0:
raise ConversionError(" ".join(command_line) +
" returned " + str(return_code))
if not os.path.isfile(out_file):
raise ConversionError("Output file not found: " + out_file)
| mit | 7,667,281,623,494,982,000 | 44.015748 | 148 | 0.659612 | false | 4.275991 | true | false | false |
tavallaie/RoboDynamixel | dxl/dxlregisters.py | 1 | 1221 | #!/usr/bin/env python
# Dynamixel library for MX28 and MX64
# WINDOWS WARNING: For best performance, parameters of the COM Port should be set to maximum baud rate, and 1ms delay (Device Manager, COM Ports, properties, advanced)
class DxlRegister():
def __init__(self,address,size,mode='r',eeprom=False,fromdxl= lambda x: x,todxl= lambda x: x,fromsi=lambda x:x,tosi=lambda x:x,range=None):
self.address=address
self.size=size
self.mode=mode
self.eeprom=eeprom
self.fromdxl=fromdxl
self.todxl=todxl
self.fromsi=fromsi
self.tosi=tosi
self.range=range
class DxlRegisterByte(DxlRegister):
def __init__(self,address,mode='r',eeprom=False,fromsi=lambda x:x,tosi=lambda x:x,range=None):
DxlRegister.__init__(self,address,1,mode,eeprom,fromdxl=lambda x:x[0],todxl=lambda x:[x],range=range,fromsi=fromsi,tosi=tosi)
class DxlRegisterWord(DxlRegister):
def __init__(self,address,mode='r',eeprom=False,fromsi=lambda x:x,tosi=lambda x:x,range=None):
DxlRegister.__init__(self,address,2,mode,eeprom,fromdxl=lambda x:x[0]+(x[1]<<8),todxl=lambda x:[int(x)&0xFF,(int(x)>>8)&0xFF] ,range=range,fromsi=fromsi,tosi=tosi)
| mit | 3,903,318,515,910,308,000 | 41.103448 | 171 | 0.685504 | false | 2.852804 | false | false | false |
trmznt/genaf | genaf/views/utils/plot.py | 1 | 3274 |
# general plot / graphics utility using matplotlib
from genaf.views.tools import *
from matplotlib import pyplot as plt
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
import pandas
import io, base64
@roles( PUBLIC )
def index(request):
# check
if not request.GET.get('_method', None) in [ '_exec', '_dfexec' ]:
pform, jscode = create_form( request )
return render_to_response('genaf:templates/utils/index.mako',
{ 'title': 'Plotting Utility',
'html': pform,
'code': jscode,
}, request = request )
if request.GET.get('method') == '_dfexec':
df = parse_df(request.GET.get('dfdata'))
else:
df = parse_textdata(request.GET.get('textdata'))
plot_type = request.GET.get('plot_type')
if plot_type == 'B':
html, jscode = column_chart(df)
elif plot_type == 'S':
return error_page(request, 'Scatter plot not implemented yet')
elif plot_type == 'P':
html, jscode = pie_chart(df)
return render_to_response('genaf:templates/utils/index.mako',
{ 'title': 'Plot',
'html': html,
'code': jscode,
}, request = request )
def create_form(request):
""" return html, jscode """
pform = form(name='plotform', action='#')
pform.add(
fieldset(name='data')[
input_textarea('textdata', label='Data'),
],
fieldset(name='options')[
input_select(name='plot_type', label='Plot type', value='B',
options = [ ('B', 'Bar (vertical) / column chart'),
('S', 'Scatter x,y plot'),
('P', 'Pie chart'),
] ),
],
fieldset()[ submit_bar('Create plot', '_exec')]
)
return (pform, '')
def parse_textdata(textdata):
""" parse data, with the first line as header, and consecutive lines as data """
header, content = textdata.split('\n', 1)
columns = [ x.strip() for x in header.split('|') ]
buff = io.StringIO(content)
dataframe = pandas.read_table(buff, header=None, names = columns)
return dataframe
def save_figure(canvas):
figfile = io.BytesIO()
canvas.print_figure(figfile)
figfile.seek(0)
figdata_png = figfile.getvalue()
figdata_png = base64.b64encode(figdata_png).decode('ASCII')
fig_html = literal('<img src="data:image/png;base64,%s" >' % figdata_png)
return fig_html,''
def column_chart(df):
""" creates column (vertical bar) chart """
fig = Figure()
canvas = FigureCanvas(fig)
ax = fig.add_subplot(111)
ax.bar(df.index, df.iloc[:,1], align='center')
ax.set_xlabel(df.columns[0])
ax.set_xticks(df.index)
ax.set_xticklabels(df.iloc[:,0], rotation='vertical')
ax.set_ylabel(df.columns[1])
fig.tight_layout()
return save_figure(canvas)
def pie_chart(df):
fig = Figure()
canvas = FigureCanvas(fig)
ax = fig.add_subplot(111, aspect=1)
ax.pie( df.iloc[:,1], labels = df.iloc[:,0], counterclock=False, startangle=90 )
ax.set_xlabel(df.columns[0])
fig.tight_layout()
return save_figure(canvas)
| lgpl-3.0 | -8,268,746,101,189,477,000 | 25.617886 | 84 | 0.583079 | false | 3.566449 | false | false | false |
Osndok/zim-desktop-wiki | zim/plugins/tasklist/__init__.py | 1 | 8109 | # -*- coding: utf-8 -*-
# Copyright 2009-2017 Jaap Karssenberg <[email protected]>
# TODO: allow more complex queries for filter, in particular (NOT tag AND tag)
# allow multiple tabs in dialog / side pane with configurable query
#
# TODO: add an interface for this plugin in the WWW frontend
#
# TODO: commandline option
# - open dialog
# - output to stdout with configurable format
# - force update, intialization
#
# TODO: store parser settings in notebook, not in preferences
# in dialog make it clear what is per notebook and what is user prefs
# tab in properties, link to open that from plugin prefs ?
# TODO: test coverage for the start date label (and due with "<")
# TODO: test coverage for start / due date from calendar page
# TODO: test coverage for sorting in list_open_tasks
# TODO: test coverage include / exclude sections
# TODO: update manual
from __future__ import with_statement
from zim.plugins import PluginClass, extends, ObjectExtension, WindowExtension
from zim.actions import action
from zim.config import StringAllowEmpty
from zim.signals import DelayedCallback
from zim.gui.widgets import RIGHT_PANE, PANE_POSITIONS
from .indexer import TasksIndexer, TasksView
from .gui import TaskListDialog, TaskListWidget
class TaskListPlugin(PluginClass):
plugin_info = {
'name': _('Task List'), # T: plugin name
'description': _('''\
This plugin adds a dialog showing all open tasks in
this notebook. Open tasks can be either open checkboxes
or items marked with tags like "TODO" or "FIXME".
This is a core plugin shipping with zim.
'''), # T: plugin description
'author': 'Jaap Karssenberg',
'help': 'Plugins:Task List'
}
parser_preferences = (
# key, type, label, default
('all_checkboxes', 'bool', _('Consider all checkboxes as tasks'), True),
# T: label for plugin preferences dialog
('labels', 'string', _('Labels marking tasks'), 'FIXME, TODO', StringAllowEmpty),
# T: label for plugin preferences dialog - labels are e.g. "FIXME", "TODO"
('integrate_with_journal', 'choice', _('Use date from journal pages'), 'start', (
('none', _('do not use')), # T: choice for "Use date from journal pages"
('start', _('as start date for tasks')), # T: choice for "Use date from journal pages"
('due', _('as due date for tasks')) # T: choice for "Use date from journal pages"
)),
('included_subtrees', 'string', _('Section(s) to index'), '', StringAllowEmpty),
# T: Notebook sections to search for tasks - default is the whole tree (empty string means everything)
('excluded_subtrees', 'string', _('Section(s) to ignore'), '', StringAllowEmpty),
# T: Notebook sections to exclude when searching for tasks - default is none
)
plugin_preferences = (
# key, type, label, default
('embedded', 'bool', _('Show tasklist in sidepane'), False),
# T: preferences option
('pane', 'choice', _('Position in the window'), RIGHT_PANE, PANE_POSITIONS),
# T: preferences option
) + parser_preferences + (
('nonactionable_tags', 'string', _('Tags for non-actionable tasks'), '', StringAllowEmpty),
# T: label for plugin preferences dialog
('tag_by_page', 'bool', _('Turn page name into tags for task items'), False),
# T: label for plugin preferences dialog
('use_workweek', 'bool', _('Flag tasks due on Monday or Tuesday before the weekend'), False),
# T: label for plugin preferences dialog
)
hide_preferences = ('nonactionable_tags', 'tag_by_page', 'use_workweek')
# These are deprecated, but I don't dare to remove them yet
# so hide them in the configuration dialog instead
@extends('Notebook')
class NotebookExtension(ObjectExtension):
__signals__ = {
'tasklist-changed': (None, None, ()),
}
def __init__(self, plugin, notebook):
ObjectExtension.__init__(self, plugin, notebook)
self.notebook = notebook
self._parser_key = self._get_parser_key()
self.index = notebook.index
if self.index.get_property(TasksIndexer.PLUGIN_NAME) != TasksIndexer.PLUGIN_DB_FORMAT:
self.index._db.executescript(TasksIndexer.TEARDOWN_SCRIPT) # XXX
self.index.flag_reindex()
self.indexer = None
self._setup_indexer(self.index, self.index.update_iter)
self.connectto(self.index, 'new-update-iter', self._setup_indexer)
self.connectto(plugin.preferences, 'changed', self.on_preferences_changed)
def _setup_indexer(self, index, update_iter):
if self.indexer is not None:
self.disconnect_from(self.indexer)
self.indexer.disconnect_all()
self.indexer = TasksIndexer.new_from_index(index, self.plugin.preferences)
update_iter.add_indexer(self.indexer)
self.connectto(self.indexer, 'tasklist-changed')
def on_preferences_changed(self, preferences):
# Need to construct new parser, re-index pages
if self._parser_key != self._get_parser_key():
self._parser_key = self._get_parser_key()
self.disconnect_from(self.indexer)
self.indexer.disconnect_all()
self.indexer = TasksIndexer.new_from_index(self.index, preferences)
self.index.flag_reindex()
self.connectto(self.indexer, 'tasklist-changed')
def on_tasklist_changed(self, indexer):
self.emit('tasklist-changed')
def _get_parser_key(self):
return tuple(
self.plugin.preferences[t[0]]
for t in self.plugin.parser_preferences
)
def teardown(self):
self.indexer.disconnect_all()
self.notebook.index.update_iter.remove_indexer(self.indexer)
self.index._db.executescript(TasksIndexer.TEARDOWN_SCRIPT) # XXX
self.index.set_property(TasksIndexer.PLUGIN_NAME, None)
@extends('MainWindow')
class MainWindowExtension(WindowExtension):
uimanager_xml = '''
<ui>
<menubar name='menubar'>
<menu action='view_menu'>
<placeholder name="plugin_items">
<menuitem action="show_task_list" />
</placeholder>
</menu>
</menubar>
<toolbar name='toolbar'>
<placeholder name='tools'>
<toolitem action='show_task_list'/>
</placeholder>
</toolbar>
</ui>
'''
def __init__(self, plugin, window):
WindowExtension.__init__(self, plugin, window)
self._widget = None
self.on_preferences_changed(plugin.preferences)
self.connectto(plugin.preferences, 'changed', self.on_preferences_changed)
@action(_('Task List'), stock='zim-task-list', readonly=True) # T: menu item
def show_task_list(self):
# TODO: add check + dialog for index probably_up_to_date
index = self.window.ui.notebook.index # XXX
tasksview = TasksView.new_from_index(index)
dialog = TaskListDialog.unique(self, self.window, tasksview, self.plugin.preferences)
dialog.present()
def on_preferences_changed(self, preferences):
if preferences['embedded']:
if self._widget is None:
self._init_widget()
else:
self._widget.task_list.refresh()
try:
self.window.remove(self._widget)
except ValueError:
pass
self.window.add_tab(_('Tasks'), self._widget, preferences['pane'])
# T: tab label for side pane
self._widget.show_all()
else:
if self._widget:
self.window.remove(self._widget)
self._widget = None
def _init_widget(self):
index = self.window.ui.notebook.index # XXX
tasksview = TasksView.new_from_index(index)
opener = self.window.get_resource_opener()
uistate = self.window.ui.uistate['TaskListSidePane']
self._widget = TaskListWidget(tasksview, opener, self.plugin.preferences, uistate)
def on_tasklist_changed(o):
self._widget.task_list.refresh()
callback = DelayedCallback(10, on_tasklist_changed)
# Don't really care about the delay, but want to
# make it less blocking - now it is at least on idle
### XXX HACK to get dependency to connect to
### -- no access to plugin, so can;t use get_extension()
## -- duplicat of this snippet in TaskListDialog
for e in self.window.ui.notebook.__zim_extension_objects__:
if hasattr(e, 'indexer') and e.indexer.__class__.__name__ == 'TasksIndexer':
self.connectto(e, 'tasklist-changed', callback)
break
else:
raise AssertionError('Could not find tasklist notebook extension')
def teardown(self):
if self._widget:
self.window.remove(self._widget)
self._widget = None
| gpl-2.0 | 1,262,954,511,106,585,300 | 34.41048 | 105 | 0.702429 | false | 3.370324 | false | false | false |
turdusmerula/kipartman | kipartbase/swagger_server/controllers/controller_upload_file.py | 1 | 1796 | import connexion
from swagger_server.models.upload_file import UploadFile
from swagger_server.models.upload_file_data import UploadFileData
from swagger_server.models.error import Error
from datetime import date, datetime
from typing import List, Dict
from six import iteritems
from ..util import deserialize_date, deserialize_datetime
import api.models
import api.file_storage
from os.path import expanduser
home = expanduser("~")
def serialize_UploadFileData(fupload_file, upload_file=None):
if upload_file is None:
upload_file = UploadFileData()
upload_file.source_name = fupload_file.source_name
upload_file.storage_path = fupload_file.storage_path
return upload_file
def serialize_UploadFile(fupload_file, upload_file=None):
if upload_file is None:
upload_file = UploadFile()
upload_file.id = fupload_file.id
serialize_UploadFileData(fupload_file, upload_file)
return upload_file
def add_upload_file(upfile=None, description=None):
"""
add_upload_file
Upload a file.
:param upfile: The file to upload.
:type upfile: werkzeug.datastructures.FileStorage
:param description: The file to upload.
:type description: str
:rtype: UploadFile
"""
storage = api.file_storage.FileStorage()
fupload_file = storage.add_file(upfile)
return serialize_UploadFile(fupload_file)
def find_upload_file(upload_file_id):
"""
find_upload_file
Return a file
:param upload_file_id: File id
:type upload_file_id: int
:rtype: UploadFile
"""
try:
fupload_file = api.models.File.objects.get(id=upload_file_id)
except:
return Error(code=1000, message='File %d does not exists'%upload_file_id), 403
return serialize_UploadFile(fupload_file)
| gpl-3.0 | 5,873,611,844,778,625,000 | 27.507937 | 86 | 0.713808 | false | 3.628283 | false | false | false |
mikelolasagasti/revelation | src/lib/ui.py | 1 | 48073 | #
# Revelation - a password manager for GNOME 2
# http://oss.codepoet.no/revelation/
# $Id$
#
# Module for UI functionality
#
#
# Copyright (c) 2003-2006 Erik Grinaker
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from . import config, data, dialog, entry, io, util
import gettext
import time
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import GObject, Gtk, Gdk, Gio, Pango # noqa: E402
_ = gettext.gettext
STOCK_CONTINUE = _("_Continue") # "revelation-continue"
STOCK_DISCARD = "revelation-discard"
STOCK_EDIT = "revelation-edit"
STOCK_EXPORT = _("_Export") # "revelation-export"
STOCK_FOLDER = "revelation-folder"
STOCK_GENERATE = _("_Generate") # "revelation-generate"
STOCK_IMPORT = _("_Import") # "revelation-import"
STOCK_GOTO = "revelation-goto"
STOCK_LOCK = "revelation-lock"
STOCK_NEW_ENTRY = _("_Add Entry") # "revelation-new-entry"
STOCK_NEW_FOLDER = _("_Add Folder") # "revelation-new-folder"
STOCK_NEXT = "go-down" # "revelation-next"
STOCK_PASSWORD_CHANGE = _("_Change") # "revelation-password-change"
STOCK_PASSWORD_CHECK = "revelation-password-check" # nosec
STOCK_PASSWORD_STRONG = "security-high" # nosec "revelation-password-strong"
STOCK_PASSWORD_WEAK = "security-low" # nosec "revelation-password-weak"
STOCK_PREVIOUS = "go-up" # "revelation-previous"
STOCK_RELOAD = _("_Reload") # "revelation-reload"
STOCK_REMOVE = "revelation-remove"
STOCK_REPLACE = _("_Replace") # "revelation-replace"
STOCK_UNKNOWN = "dialog-question" # "revelation-unknown"
STOCK_UNLOCK = _("_Unlock") # "revelation-unlock"
STOCK_UPDATE = _("_Update") # "revelation-update"
STOCK_ENTRY_FOLDER = "folder" # "revelation-account-folder"
STOCK_ENTRY_FOLDER_OPEN = "folder-open" # "revelation-account-folder-open"
STOCK_ENTRY_CREDITCARD = "x-office-contact" # "revelation-account-creditcard"
STOCK_ENTRY_CRYPTOKEY = "dialog-password" # "revelation-account-cryptokey"
STOCK_ENTRY_DATABASE = "server-database" # "revelation-account-database"
STOCK_ENTRY_DOOR = "changes-allow" # "revelation-account-door"
STOCK_ENTRY_EMAIL = "emblem-mail" # "revelation-account-email"
STOCK_ENTRY_FTP = "system-file-manager" # "revelation-account-ftp"
STOCK_ENTRY_GENERIC = "document-new" # "revelation-account-generic"
STOCK_ENTRY_PHONE = "phone" # "revelation-account-phone"
STOCK_ENTRY_SHELL = "utilities-terminal" # "revelation-account-shell"
STOCK_ENTRY_REMOTEDESKTOP = "preferences-desktop-remote-desktop" # "revelation-account-remotedesktop"
STOCK_ENTRY_WEBSITE = "web-browser" # "revelation-account-website"
ICON_SIZE_APPLET = Gtk.IconSize.LARGE_TOOLBAR
ICON_SIZE_DATAVIEW = Gtk.IconSize.LARGE_TOOLBAR
ICON_SIZE_DROPDOWN = Gtk.IconSize.SMALL_TOOLBAR
ICON_SIZE_ENTRY = Gtk.IconSize.MENU
ICON_SIZE_FALLBACK = Gtk.IconSize.LARGE_TOOLBAR
ICON_SIZE_HEADLINE = Gtk.IconSize.LARGE_TOOLBAR
ICON_SIZE_LABEL = Gtk.IconSize.MENU
ICON_SIZE_LOGO = Gtk.IconSize.DND
ICON_SIZE_TREEVIEW = Gtk.IconSize.MENU
STOCK_ICONS = (
(STOCK_ENTRY_CREDITCARD, "contact-new", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_CRYPTOKEY, "dialog-password", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_DATABASE, "package_system", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_DOOR, "changes-allow", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_EMAIL, "emblem-mail", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_FTP, "system-file-manager", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_GENERIC, "document-new", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_PHONE, "phone", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_SHELL, "utilities-terminal", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_REMOTEDESKTOP, "preferences-desktop-remote-desktop", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_WEBSITE, "web-browser", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_FOLDER, "folder", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
(STOCK_ENTRY_FOLDER_OPEN, "folder-open", (ICON_SIZE_DATAVIEW, ICON_SIZE_DROPDOWN, ICON_SIZE_ENTRY, ICON_SIZE_TREEVIEW)),
)
STOCK_ITEMS = (
(STOCK_CONTINUE, _('_Continue'), "stock_test-mode"),
(STOCK_DISCARD, _('_Discard'), Gtk.STOCK_DELETE),
(STOCK_EDIT, _('_Edit'), Gtk.STOCK_EDIT),
(STOCK_EXPORT, _('_Export'), Gtk.STOCK_EXECUTE),
(STOCK_FOLDER, '', "stock_folder"),
(STOCK_GENERATE, _('_Generate'), Gtk.STOCK_EXECUTE),
(STOCK_GOTO, _('_Go to'), Gtk.STOCK_JUMP_TO),
(STOCK_IMPORT, _('_Import'), Gtk.STOCK_CONVERT),
(STOCK_LOCK, _('_Lock'), "stock_lock"),
(STOCK_NEW_ENTRY, _('_Add Entry'), Gtk.STOCK_ADD),
(STOCK_NEW_FOLDER, _('_Add Folder'), "stock_folder"),
(STOCK_NEXT, _('Next'), Gtk.STOCK_GO_DOWN),
(STOCK_PASSWORD_CHANGE, _('_Change'), "stock_lock-ok"),
(STOCK_PASSWORD_CHECK, _('_Check'), "stock_lock-ok"),
(STOCK_PASSWORD_STRONG, '', "stock_lock-ok"),
(STOCK_PASSWORD_WEAK, '', "stock_lock-broken"),
(STOCK_PREVIOUS, _('Previous'), Gtk.STOCK_GO_UP),
(STOCK_RELOAD, _('_Reload'), Gtk.STOCK_REFRESH),
(STOCK_REMOVE, _('Re_move'), Gtk.STOCK_DELETE),
(STOCK_REPLACE, _('_Replace'), Gtk.STOCK_SAVE_AS),
(STOCK_UNKNOWN, _('Unknown'), "dialog-question"),
(STOCK_UNLOCK, _('_Unlock'), "stock_lock-open"),
(STOCK_UPDATE, _('_Update'), "stock_edit"),
)
# EXCEPTIONS #
class DataError(Exception):
"Exception for invalid data"
pass
# FUNCTIONS #
def generate_field_display_widget(field, cfg = None, userdata = None):
"Generates a widget for displaying a field value"
if field.datatype == entry.DATATYPE_EMAIL:
widget = LinkButton("mailto:%s" % field.value, util.escape_markup(field.value))
elif field.datatype == entry.DATATYPE_PASSWORD:
widget = PasswordLabel(util.escape_markup(field.value), cfg, userdata)
elif field.datatype == entry.DATATYPE_URL:
widget = LinkButton(field.value, util.escape_markup(field.value))
else:
widget = Label(util.escape_markup(field.value))
widget.set_selectable(True)
return widget
def generate_field_edit_widget(field, cfg = None, userdata = None):
"Generates a widget for editing a field"
if type(field) == entry.PasswordField:
widget = PasswordEntryGenerate(None, cfg, userdata)
elif type(field) == entry.UsernameField:
widget = Gtk.ComboBox.new_with_entry()
setup_comboboxentry(widget, userdata)
elif field.datatype == entry.DATATYPE_FILE:
widget = FileEntry()
elif field.datatype == entry.DATATYPE_PASSWORD:
widget = PasswordEntry(None, cfg, userdata)
else:
widget = Entry()
widget.set_text(field.value or "")
return widget
def setup_comboboxentry(widget, userdata=None):
widget.entry = widget.get_child()
widget.entry.set_activates_default(True)
widget.set_text = widget.entry.set_text
widget.get_text = widget.entry.get_text
widget.model = Gtk.ListStore(GObject.TYPE_STRING)
widget.set_model(widget.model)
widget.set_entry_text_column(0)
widget.completion = Gtk.EntryCompletion()
widget.completion.set_model(widget.model)
widget.completion.set_text_column(0)
widget.completion.set_minimum_key_length(1)
widget.entry.set_completion(widget.completion)
def set_values(vlist):
"Sets the values for the dropdown"
widget.model.clear()
for item in vlist:
widget.model.append((item,))
widget.set_values = set_values
if userdata is not None:
widget.set_values(userdata)
# CONTAINERS #
class HBox(Gtk.HBox):
"A horizontal container"
def __init__(self, *args):
Gtk.HBox.__init__(self)
self.set_spacing(6)
self.set_border_width(0)
for widget in args:
self.pack_start(widget, True, True, 0)
class HButtonBox(Gtk.HButtonBox):
"A horizontal button box"
def __init__(self, *args):
Gtk.HButtonBox.__init__(self)
self.set_layout(Gtk.ButtonBoxStyle.SPREAD)
self.set_spacing(12)
for button in args:
self.pack_start(button, True, True, 0)
class VBox(Gtk.VBox):
"A vertical container"
def __init__(self, *args):
Gtk.VBox.__init__(self)
self.set_spacing(6)
self.set_border_width(0)
for widget in args:
self.pack_start(widget, True, True, 0)
class Notebook(Gtk.Notebook):
"A notebook (tabbed view)"
def __init__(self):
Gtk.Notebook.__init__(self)
def create_page(self, title):
"Creates a notebook page"
page = NotebookPage()
self.append_page(page, Label(title))
return page
class NotebookPage(VBox):
"A notebook page"
def __init__(self):
VBox.__init__(self)
self.sizegroup = Gtk.SizeGroup(mode=Gtk.SizeGroupMode.HORIZONTAL)
self.set_border_width(12)
self.set_spacing(18)
def add_section(self, title, description = None):
"Adds an input section to the notebook"
section = InputSection(title, description, self.sizegroup)
self.pack_start(section, False, False, 0)
return section
class ScrolledWindow(Gtk.ScrolledWindow):
"A scrolled window for partially displaying a child widget"
def __init__(self, contents = None):
Gtk.ScrolledWindow.__init__(self)
self.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
if contents is not None:
self.add(contents)
class Toolbar(Gtk.Toolbar):
"A Toolbar subclass"
def append_space(self):
"Appends a space to the toolbar"
space = Gtk.SeparatorToolItem()
space.set_draw(False)
self.insert(space, -1)
def append_widget(self, widget, tooltip = None):
"Appends a widget to the toolbar"
toolitem = Gtk.ToolItem()
toolitem.add(widget)
if tooltip != None:
toolitem.set_tooltip_text(tooltip)
self.insert(toolitem, -1)
class InputSection(VBox):
"A section of input fields"
def __init__(self, title = None, description = None, sizegroup = None):
VBox.__init__(self)
self.title = None
self.desc = None
self.sizegroup = sizegroup
if title is not None:
self.title = Label("<span weight=\"bold\">%s</span>" % util.escape_markup(title))
self.pack_start(self.title, False, True, 0)
if description is not None:
self.desc = Label(util.escape_markup(description))
self.pack_start(self.desc, False, True, 0)
if sizegroup is None:
self.sizegroup = Gtk.SizeGroup(mode=Gtk.SizeGroupMode.HORIZONTAL)
def append_widget(self, title, widget, indent = True):
"Adds a widget to the section"
row = HBox()
row.set_spacing(12)
self.pack_start(row, False, False, 0)
if self.title is not None and indent == True:
row.pack_start(Label(""), False, False, 0)
if title is not None:
label = Label("%s:" % util.escape_markup(title))
self.sizegroup.add_widget(label)
row.pack_start(label, False, False, 0)
row.pack_start(widget, True, True, 0)
def clear(self):
"Removes all widgets"
for child in self.get_children():
if child not in (self.title, self.desc):
child.destroy()
# DISPLAY WIDGETS #
class EventBox(Gtk.EventBox):
"A container which handles events for a widget (for tooltips etc)"
def __init__(self, widget = None):
Gtk.EventBox.__init__(self)
if widget is not None:
self.add(widget)
class Image(Gtk.Image):
"A widget for displaying an image"
def __init__(self, stock = None, size = None):
Gtk.Image.__init__(self)
if stock is not None:
self.set_from_icon_name(stock, size)
class ImageLabel(HBox):
"A label with an image"
def __init__(self, text = None, stock = None, size = ICON_SIZE_LABEL):
HBox.__init__(self)
self.image = Image()
self.pack_start(self.image, False, True, 0)
self.label = Label(text)
self.pack_start(self.label, True, True, 0)
if text != None:
self.set_text(text)
if stock != None:
self.set_stock(stock, size)
def set_ellipsize(self, ellipsize):
"Sets label ellisization"
self.label.set_ellipsize(ellipsize)
def set_stock(self, stock, size):
"Sets the image"
self.image.set_from_icon_name(stock, size)
def set_text(self, text):
"Sets the label text"
self.label.set_text(text)
class Label(Gtk.Label):
"A text label"
def __init__(self, text = None, justify = Gtk.Justification.LEFT):
Gtk.Label.__init__(self)
self.set_text(text)
self.set_justify(justify)
self.set_use_markup(True)
self.set_line_wrap(True)
self.set_valign(Gtk.Align.CENTER)
if justify == Gtk.Justification.LEFT:
self.set_halign(Gtk.Align.START)
elif justify == Gtk.Justification.CENTER:
self.set_halign(Gtk.Align.CENTER)
elif justify == Gtk.Justification.RIGHT:
self.set_halign(Gtk.Align.END)
def set_text(self, text):
"Sets the text of the label"
if text is None:
Gtk.Label.set_text(self, "")
else:
Gtk.Label.set_markup(self, text)
class PasswordLabel(EventBox):
"A label for displaying passwords"
def __init__(self, password = "", cfg = None, clipboard = None, justify = Gtk.Justification.LEFT): # nosec
EventBox.__init__(self)
self.password = util.unescape_markup(password)
self.config = cfg
self.clipboard = clipboard
self.label = Label(util.escape_markup(self.password), justify)
self.label.set_selectable(True)
self.add(self.label)
self.show_password(cfg.get_boolean("view-passwords"))
self.config.connect('changed::view-passwords', lambda w, k: self.show_password(w.get_boolean(k)))
self.connect("button-press-event", self.__cb_button_press)
self.connect("drag-data-get", self.__cb_drag_data_get)
def __cb_drag_data_get(self, widget, context, selection, info, timestamp, data = None):
"Provides data for a drag operation"
selection.set_text(self.password, -1)
def __cb_button_press(self, widget, data = None):
"Populates the popup menu"
if self.label.get_selectable() == True:
return False
elif data.button == 3:
menu = Menu()
menuitem = ImageMenuItem(Gtk.STOCK_COPY, _('Copy password'))
menuitem.connect("activate", lambda w: self.clipboard.set([self.password], True))
menu.append(menuitem)
menu.show_all()
menu.popup_at_pointer(data)
return True
def set_ellipsize(self, ellipsize):
"Sets ellipsize for the label"
self.label.set_ellipsize(ellipsize)
def show_password(self, show = True):
"Sets whether to display the password"
if show == True:
self.label.set_text(util.escape_markup(self.password))
self.label.set_selectable(True)
self.drag_source_unset()
else:
self.label.set_text(Gtk.Entry().get_invisible_char()*6)
self.label.set_selectable(False)
self.drag_source_set(
Gdk.ModifierType.BUTTON1_MASK,
[
Gtk.TargetEntry.new("text/plain", 0, 0),
Gtk.TargetEntry.new("TEXT", 0, 1),
Gtk.TargetEntry.new("STRING", 0, 2),
Gtk.TargetEntry.new("COMPOUND TEXT", 0, 3),
Gtk.TargetEntry.new("UTF8_STRING", 0, 4)
],
Gdk.DragAction.COPY
)
class EditableTextView(Gtk.ScrolledWindow):
"An editable text view"
def __init__(self, buffer = None, text = None):
Gtk.ScrolledWindow.__init__(self)
self.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self.set_shadow_type(Gtk.ShadowType.ETCHED_OUT)
self.textview = Gtk.TextView(buffer=buffer)
self.textbuffer = self.textview.get_buffer()
self.add(self.textview)
if text is not None:
self.textview.get_buffer().set_text(text)
def set_text(self, text):
"Sets the entry contents"
if text is None:
self.textbuffer.set_text("")
self.textbuffer.set_text(text)
def get_text(self):
"Returns the text of the entry"
return self.textbuffer.get_text(self.textbuffer.get_start_iter(), self.textbuffer.get_end_iter(), False)
class TextView(Gtk.TextView):
"A text view"
def __init__(self, buffer = None, text = None):
Gtk.TextView.__init__(self)
self.set_buffer(buffer)
self.set_editable(False)
self.set_wrap_mode(Gtk.WrapMode.NONE)
self.set_cursor_visible(False)
self.modify_font(Pango.FontDescription("Monospace"))
if text is not None:
self.get_buffer().set_text(text)
# TEXT ENTRIES #
class Entry(Gtk.Entry):
"A normal text entry"
def __init__(self, text = None):
Gtk.Entry.__init__(self)
self.set_activates_default(True)
self.set_text(text)
def set_text(self, text):
"Sets the entry contents"
if text is None:
text = ""
Gtk.Entry.set_text(self, text)
class FileEntry(HBox):
"A file entry"
def __init__(self, title = None, file = None, type = Gtk.FileChooserAction.OPEN):
HBox.__init__(self)
self.title = title is not None and title or _('Select File')
self.type = type
self.entry = Entry()
self.entry.connect("changed", lambda w: self.emit("changed"))
self.pack_start(self.entry, True, True, 0)
self.button = Button(_('Browse...'), self.__cb_filesel)
self.pack_start(self.button, False, False, 0)
if file is not None:
self.set_filename(file)
def __cb_filesel(self, widget, data = None):
"Displays a file selector when Browse is pressed"
try:
fsel = dialog.FileSelector(None, self.title, self.type)
file = self.get_filename()
if file != None:
fsel.set_filename(file)
self.set_filename(fsel.run())
except dialog.CancelError:
pass
def get_filename(self):
"Gets the current filename"
return io.file_normpath(self.entry.get_text())
def get_text(self):
"Wrapper to emulate Entry"
return self.entry.get_text()
def set_filename(self, filename):
"Sets the current filename"
self.entry.set_text(io.file_normpath(filename))
self.entry.set_position(-1)
def set_text(self, text):
"Wrapper to emulate Entry"
self.entry.set_text(text)
GObject.type_register(FileEntry)
GObject.signal_new("changed", FileEntry, GObject.SignalFlags.ACTION,
GObject.TYPE_BOOLEAN, ())
class PasswordEntry(Gtk.Entry):
"An entry for editing a password (follows the 'show passwords' preference)"
def __init__(self, password = None, cfg = None, clipboard = None):
Gtk.Entry.__init__(self)
self.set_visibility(False)
if password:
self.set_text(password)
self.autocheck = True
self.config = cfg
self.clipboard = clipboard
self.connect("changed", self.__cb_check_password)
self.connect("populate-popup", self.__cb_popup)
if cfg != None:
self.config.bind('view-passwords', self, "visibility", Gio.SettingsBindFlags.DEFAULT)
def __cb_check_password(self, widget, data = None):
"Callback for changed, checks the password"
if self.autocheck == False:
return
password = self.get_text()
if len(password) == 0:
self.set_icon_from_icon_name(Gtk.EntryIconPosition.SECONDARY, None)
else:
try:
util.check_password(password)
except ValueError as reason:
self.set_password_strong(False, _('The password %s') % str(reason))
else:
self.set_password_strong(True, _('The password seems good'))
def __cb_popup(self, widget, menu):
"Populates the popup menu"
if self.clipboard != None:
menuitem = ImageMenuItem(Gtk.STOCK_COPY, _('Copy password'))
menuitem.connect("activate", lambda w: self.clipboard.set([self.get_text()], True))
menu.insert(menuitem, 2)
menu.show_all()
def set_password_strong(self, strong, reason = ""):
"Sets whether the password is strong or not"
self.set_icon_from_icon_name(Gtk.EntryIconPosition.SECONDARY, strong and STOCK_PASSWORD_STRONG or STOCK_PASSWORD_WEAK)
self.set_icon_tooltip_text(Gtk.EntryIconPosition.SECONDARY, reason)
class PasswordEntryGenerate(HBox):
"A password entry with a generator button"
def __init__(self, password = None, cfg = None, clipboard = None):
HBox.__init__(self)
self.config = cfg
self.pwentry = PasswordEntry(password, cfg, clipboard)
self.pack_start(self.pwentry, True, True, 0)
self.button = Button(_('Generate'), lambda w: self.generate())
self.pack_start(self.button, False, False, 0)
self.entry = self.pwentry
def generate(self):
"Generates a password for the entry"
password = util.generate_password(self.config.get_int("passwordgen-length"), self.config.get_boolean("passwordgen-punctuation"))
self.pwentry.set_text(password)
def get_text(self):
"Wrapper for the entry"
return self.pwentry.get_text()
def set_text(self, text):
"Wrapper for the entry"
self.pwentry.set_text(text)
class SpinEntry(Gtk.SpinButton):
"An entry for numbers"
def __init__(self, adjustment = None, climb_rate = 0.0, digits = 0):
Gtk.SpinButton.__init__(self)
self.configure(adjustment, climb_rate, digits)
self.set_increments(1, 5)
self.set_range(0, 100000)
self.set_numeric(True)
# BUTTONS #
class Button(Gtk.Button):
"A normal button"
def __init__(self, label, callback = None):
Gtk.Button.__init__(self, label=label)
if callback is not None:
self.connect("clicked", callback)
class CheckButton(Gtk.CheckButton):
"A checkbutton"
def __init__(self, label = None):
Gtk.CheckButton.__init__(self, label=label)
class DropDown(Gtk.ComboBox):
"A dropdown button"
def __init__(self, icons = False):
Gtk.ComboBox.__init__(self)
self.model = Gtk.ListStore(GObject.TYPE_STRING, GObject.TYPE_STRING, GObject.TYPE_PYOBJECT)
self.set_model(self.model)
if icons == True:
cr = Gtk.CellRendererPixbuf()
cr.set_fixed_size(Gtk.icon_size_lookup(ICON_SIZE_DROPDOWN)[1] + 5, -1)
self.pack_start(cr, False)
self.add_attribute(cr, "icon-name", 1)
cr = Gtk.CellRendererText()
self.pack_start(cr, True)
self.add_attribute(cr, "text", 0)
self.connect("realize", self.__cb_show)
def __cb_show(self, widget, data = None):
"Callback for when widget is shown"
if self.get_active() == -1:
self.set_active(0)
def append_item(self, text, stock = None, data = None):
"Appends an item to the dropdown"
self.model.append((text, stock, data))
def delete_item(self, index):
"Removes an item from the dropdown"
if self.model.iter_n_children(None) > index:
iter = self.model.iter_nth_child(None, index)
self.model.remove(iter)
def get_active_item(self):
"Returns a tuple with data for the current item"
iter = self.model.iter_nth_child(None, self.get_active())
return self.model.get(iter, 0, 1, 2)
def get_item(self, index):
"Returns data for an item"
return self.model.get(self.model.iter_nth_child(None, index), 0, 1, 2)
def get_num_items(self):
"Returns the number of items in the dropdown"
return self.model.iter_n_children(None)
def insert_item(self, index, text, stock = None, data = None):
"Inserts an item in the dropdown"
self.model.insert(index, (text, stock, data))
class EntryDropDown(DropDown):
"An entry type dropdown"
def __init__(self):
DropDown.__init__(self, True)
for e in entry.ENTRYLIST:
if e != entry.FolderEntry:
self.append_item(e().typename, e().icon, e)
def get_active_type(self):
"Get the currently active type"
item = self.get_active_item()
if item is not None:
return item[2]
def set_active_type(self, entrytype):
"Set the active type"
for i in range(self.model.iter_n_children(None)):
iter = self.model.iter_nth_child(None, i)
if self.model.get_value(iter, 2) == entrytype:
self.set_active(i)
class FileButton(Gtk.FileChooserButton):
"A file chooser button"
def __init__(self, title = None, file = None, type = Gtk.FileChooserAction.OPEN):
Gtk.FileChooserButton.__init__(self, title)
self.set_action(type)
self.set_local_only(False)
if file != None:
self.set_filename(file)
def get_filename(self):
"Gets the filename"
return io.file_normpath(self.get_uri())
def set_filename(self, filename):
"Sets the filename"
filename = io.file_normpath(filename)
if filename != io.file_normpath(self.get_filename()):
Gtk.FileChooserButton.set_filename(self, filename)
class LinkButton(Gtk.LinkButton):
"A link button"
def __init__(self, url, label):
Gtk.LinkButton.__init__(self, uri=url, label=label)
self.set_halign(Gtk.Align.START)
self.label = self.get_children()[0]
"If URI is too long reduce it for the label"
if len(label) > 60:
self.label.set_text(label[0:59] + " (...)")
def set_ellipsize(self, ellipsize):
"Sets ellipsize for label"
self.label.set_ellipsize(ellipsize)
def set_justify(self, justify):
"Sets justify for label"
self.label.set_justify(justify)
class RadioButton(Gtk.RadioButton):
"A radio button"
def __init__(self, group, label):
Gtk.RadioButton.__init__(self, group, label)
# MENUS AND MENU ITEMS #
class ImageMenuItem(Gtk.ImageMenuItem):
"A menuitem with a stock icon"
def __init__(self, stock, text = None):
Gtk.ImageMenuItem.__init__(self, stock)
self.label = self.get_children()[0]
self.image = self.get_image()
if text is not None:
self.set_text(text)
def set_stock(self, stock):
"Set the stock item to use as icon"
self.image.set_from_icon_name(stock, Gtk.IconSize.MENU)
def set_text(self, text):
"Set the item text"
self.label.set_text(text)
class Menu(Gtk.Menu):
"A menu"
def __init__(self):
Gtk.Menu.__init__(self)
# MISCELLANEOUS WIDGETS #
class TreeView(Gtk.TreeView):
"A tree display"
def __init__(self, model):
Gtk.TreeView.__init__(self, model=model)
self.set_headers_visible(False)
self.model = model
self.__cbid_drag_motion = None
self.__cbid_drag_end = None
self.selection = self.get_selection()
self.selection.set_mode(Gtk.SelectionMode.MULTIPLE)
self.connect("button-press-event", self.__cb_buttonpress)
self.connect("key-press-event", self.__cb_keypress)
def __cb_buttonpress(self, widget, data):
"Callback for handling mouse clicks"
path = self.get_path_at_pos(int(data.x), int(data.y))
# handle click outside entry
if path is None:
self.unselect_all()
# handle doubleclick
if data.button == 1 and data.type == Gdk.EventType._2BUTTON_PRESS and path != None:
iter = self.model.get_iter(path[0])
self.toggle_expanded(iter)
if iter != None:
self.emit("doubleclick", iter)
# display popup on right-click
elif data.button == 3:
if path != None and self.selection.iter_is_selected(self.model.get_iter(path[0])) == False:
self.set_cursor(path[0], path[1], False)
self.emit("popup", data)
return True
# handle drag-and-drop of multiple rows
elif self.__cbid_drag_motion is None and data.button in (1, 2) and data.type == Gdk.EventType.BUTTON_PRESS and path != None and self.selection.iter_is_selected(self.model.get_iter(path[0])) == True and len(self.get_selected()) > 1:
self.__cbid_drag_motion = self.connect("motion-notify-event", self.__cb_drag_motion, data.copy())
self.__cbid_drag_end = self.connect("button-release-event", self.__cb_button_release, data.copy())
return True
def __cb_button_release(self, widget, data, userdata = None):
"Ends a drag"
self.emit("button-press-event", userdata)
self.__drag_check_end()
def __cb_drag_motion(self, widget, data, userdata = None):
"Monitors drag motion"
if self.drag_check_threshold(int(userdata.x), int(userdata.y), int(data.x), int(data.y)) == True:
self.__drag_check_end()
uritarget = Gtk.TargetEntry.new("revelation/treerow", Gtk.TargetFlags.SAME_APP | Gtk.TargetFlags.SAME_WIDGET, 0)
self.drag_begin_with_coordinates(Gtk.TargetList([uritarget]), Gdk.DragAction.MOVE, userdata.button.button, userdata, userdata.x, userdata.y)
def __cb_keypress(self, widget, data = None):
"Callback for handling key presses"
# expand/collapse node on space
if data.keyval == Gdk.KEY_space:
self.toggle_expanded(self.get_active())
def __drag_check_end(self):
"Ends a drag check"
self.disconnect(self.__cbid_drag_motion)
self.disconnect(self.__cbid_drag_end)
self.__cbid_drag_motion = None
self.__cbid_drag_end = None
def collapse_row(self, iter):
"Collapse a tree row"
Gtk.TreeView.collapse_row(self, self.model.get_path(iter))
def expand_row(self, iter):
"Expand a tree row"
if iter is not None and self.model.iter_n_children(iter) > 0:
Gtk.TreeView.expand_row(self, self.model.get_path(iter), False)
def expand_to_iter(self, iter):
"Expand all items up to and including a given iter"
path = self.model.get_path(iter)
for i in range(len(path)):
iter = self.model.get_iter(path[0:i])
self.expand_row(iter)
def get_active(self):
"Get the currently active row"
if self.model is None:
return None
iter = self.model.get_iter(self.get_cursor()[0])
if iter is None or self.selection.iter_is_selected(iter) == False:
return None
return iter
def get_selected(self):
"Get a list of currently selected rows"
list = []
self.selection.selected_foreach(lambda model, path, iter: list.append(iter))
return list
def select(self, iter):
"Select a particular row"
if iter is None:
self.unselect_all()
else:
self.expand_to_iter(iter)
self.set_cursor(self.model.get_path(iter))
def select_all(self):
"Select all rows in the tree"
self.selection.select_all()
self.selection.emit("changed")
self.emit("cursor_changed")
def set_model(self, model):
"Change the tree model which is being displayed"
Gtk.TreeView.set_model(self, model)
self.model = model
def toggle_expanded(self, iter):
"Toggle the expanded state of a row"
if iter is None:
return
elif self.row_expanded(self.model.get_path(iter)):
self.collapse_row(iter)
else:
self.expand_row(iter)
def unselect_all(self):
"Unselect all rows in the tree"
self.selection.unselect_all()
self.selection.emit("changed")
self.emit("cursor_changed")
self.emit("unselect_all")
GObject.signal_new("doubleclick", TreeView, GObject.SignalFlags.ACTION,
GObject.TYPE_BOOLEAN, (GObject.TYPE_PYOBJECT, ))
GObject.signal_new("popup", TreeView, GObject.SignalFlags.ACTION,
GObject.TYPE_BOOLEAN, (GObject.TYPE_PYOBJECT, ))
class EntryTree(TreeView):
"An entry tree"
def __init__(self, entrystore):
TreeView.__init__(self, entrystore)
column = Gtk.TreeViewColumn()
self.append_column(column)
cr = Gtk.CellRendererPixbuf()
column.pack_start(cr, False)
column.add_attribute(cr, "icon-name", data.COLUMN_ICON)
cr.set_property("stock-size", ICON_SIZE_TREEVIEW)
cr = Gtk.CellRendererText()
column.pack_start(cr, True)
column.add_attribute(cr, "text", data.COLUMN_NAME)
self.connect("doubleclick", self.__cb_doubleclick)
self.connect("row-expanded", self.__cb_row_expanded)
self.connect("row-collapsed", self.__cb_row_collapsed)
def __cb_doubleclick(self, widget, iter):
"Stop doubleclick emission on folder"
if type(self.model.get_entry(iter)) == entry.FolderEntry:
self.stop_emission("doubleclick")
def __cb_row_collapsed(self, object, iter, extra):
"Updates folder icons when collapsed"
self.model.folder_expanded(iter, False)
def __cb_row_expanded(self, object, iter, extra):
"Updates folder icons when expanded"
# make sure all children are collapsed (some may have lingering expand icons)
for i in range(self.model.iter_n_children(iter)):
child = self.model.iter_nth_child(iter, i)
if self.row_expanded(self.model.get_path(child)) == False:
self.model.folder_expanded(child, False)
self.model.folder_expanded(iter, True)
def set_model(self, model):
"Sets the model displayed by the tree view"
TreeView.set_model(self, model)
if model is None:
return
for i in range(model.iter_n_children(None)):
model.folder_expanded(model.iter_nth_child(None, i), False)
class Statusbar(Gtk.Statusbar):
"An application statusbar"
def __init__(self):
Gtk.Statusbar.__init__(self)
self.contextid = self.get_context_id("statusbar")
def clear(self):
"Clears the statusbar"
self.pop(self.contextid)
def set_status(self, text):
"Displays a text in the statusbar"
self.clear()
self.push(self.contextid, text or "")
# ACTION HANDLING #
class Action(Gtk.Action):
"UI Manager Action"
def __init__(self, name, label = None, tooltip = None, stock = "", important = False):
Gtk.Action.__init__(self, name, label, tooltip, stock)
if important == True:
self.set_property("is-important", True)
class ActionGroup(Gtk.ActionGroup):
"UI Manager Actiongroup"
def add_action(self, action, accel = None):
"Adds an action to the actiongroup"
if accel is None:
Gtk.ActionGroup.add_action(self, action)
else:
self.add_action_with_accel(action, accel)
class ToggleAction(Gtk.ToggleAction):
"A toggle action item"
def __init__(self, name, label, tooltip = None, stock = None):
Gtk.ToggleAction.__init__(self, name, label, tooltip, stock)
class UIManager(Gtk.UIManager):
"UI item manager"
def __init__(self):
Gtk.UIManager.__init__(self)
self.connect("connect-proxy", self.__cb_connect_proxy)
def __cb_connect_proxy(self, uimanager, action, widget):
"Callback for connecting proxies to an action"
if type(widget) in (Gtk.MenuItem, Gtk.ImageMenuItem, Gtk.CheckMenuItem):
widget.tooltip = action.get_property("tooltip")
else:
widget.set_property("label", widget.get_property("label").replace("...", ""))
def add_ui_from_file(self, file):
"Loads ui from a file"
try:
Gtk.UIManager.add_ui_from_file(self, file)
except GObject.GError:
raise IOError
def append_action_group(self, actiongroup):
"Appends an action group"
Gtk.UIManager.insert_action_group(self, actiongroup, len(self.get_action_groups()))
def get_action(self, name):
"Looks up an action in the managers actiongroups"
for actiongroup in self.get_action_groups():
action = actiongroup.get_action(name)
if action is not None:
return action
def get_action_group(self, name):
"Returns the named action group"
for actiongroup in self.get_action_groups():
if actiongroup.get_name() == name:
return actiongroup
# APPLICATION COMPONENTS #
class AppWindow(Gtk.ApplicationWindow):
"An application window"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class App(Gtk.Application):
"An application"
def __init__(self, appname):
Gtk.Application.__init__(self,
application_id='info.olasagasti.revelation')
self.toolbars = {}
def __connect_menu_statusbar(self, menu):
"Connects a menus items to the statusbar"
for item in menu.get_children():
if isinstance(item, Gtk.MenuItem) == True:
item.connect("select", self.cb_menudesc, True)
item.connect("deselect", self.cb_menudesc, False)
def cb_menudesc(self, item, show):
"Displays menu descriptions in the statusbar"
if show == True:
self.statusbar.set_status(item.get_label())
else:
self.statusbar.clear()
def __cb_toolbar_hide(self, widget, name):
"Hides the toolbar dock when the toolbar is hidden"
if name in self.toolbars:
self.toolbars[name].hide()
def __cb_toolbar_show(self, widget, name):
"Shows the toolbar dock when the toolbar is shown"
if name in self.toolbars:
self.toolbars[name].show()
def add_toolbar(self, toolbar, name, band):
"Adds a toolbar"
self.toolbars[name] = toolbar
self.main_vbox.pack_start(toolbar, False, True, 0)
toolbar.connect("show", self.__cb_toolbar_show, name)
toolbar.connect("hide", self.__cb_toolbar_hide, name)
toolbar.show_all()
def get_title(self):
"Returns the app title"
title = Gtk.Window.get_title(self.window)
return title.replace(" - " + config.APPNAME, "")
def popup(self, menu, button, time):
"Displays a popup menu"
# get Gtk.Menu
gmenu = Gtk.Menu.new_from_model(menu)
gmenu.attach_to_widget(self.window, None)
# transfer the tooltips from Gio.Menu to Gtk.Menu
menu_item_index = 0
menu_items = gmenu.get_children()
for sect in range(menu.get_n_items()):
for item in range(menu.get_item_link(sect, 'section').get_n_items()):
tooltip_text = menu.get_item_link(sect, 'section').get_item_attribute_value(item, 'tooltip', None)
if tooltip_text:
tooltip_text = tooltip_text.unpack()
menu_items[menu_item_index].set_tooltip_text(tooltip_text)
menu_item_index += 1
# skip section separator
menu_item_index += 1
self.__connect_menu_statusbar(gmenu)
gmenu.popup_at_pointer()
def set_menus(self, menubar):
"Sets the menubar for the application"
for item in menubar.get_children():
self.__connect_menu_statusbar(item.get_submenu())
self.main_vbox.pack_start(menubar, False, True, 0)
def set_title(self, title):
"Sets the window title"
Gtk.Window.set_title(self.window, title + " - " + config.APPNAME)
def set_toolbar(self, toolbar):
"Sets the application toolbar"
self.main_vbox.pack_start(toolbar, False, True, 0)
toolbar.connect("show", self.__cb_toolbar_show, "Toolbar")
toolbar.connect("hide", self.__cb_toolbar_hide, "Toolbar")
def set_contents(self, widget):
self.main_vbox.pack_start(widget, True, True, 0)
class EntryView(VBox):
"A component for displaying an entry"
def __init__(self, cfg = None, clipboard = None):
VBox.__init__(self)
self.set_spacing(12)
self.set_border_width(12)
self.config = cfg
self.clipboard = clipboard
self.entry = None
def clear(self, force = False):
"Clears the data view"
self.entry = None
for child in self.get_children():
child.destroy()
def display_entry(self, e):
"Displays info about an entry"
self.clear()
self.entry = e
if self.entry is None:
return
# set up metadata display
metabox = VBox()
self.pack_start(metabox)
label = ImageLabel(
"<span size=\"large\" weight=\"bold\">%s</span>" % util.escape_markup(e.name),
e.icon, ICON_SIZE_DATAVIEW
)
label.set_halign(Gtk.Align.CENTER)
label.set_valign(Gtk.Align.CENTER)
metabox.pack_start(label, True, True, 0)
label = Label("<span weight=\"bold\">%s</span>%s" % (e.typename + (e.description != "" and ": " or ""), util.escape_markup(e.description)), Gtk.Justification.CENTER)
metabox.pack_start(label, True, True, 0)
# set up field list
fields = [field for field in e.fields if field.value != ""]
if len(fields) > 0:
table = Gtk.Grid()
self.pack_start(table)
table.set_column_spacing(10)
table.set_row_spacing(5)
for rowindex, field in zip(range(len(fields)), fields):
label = Label("<span weight=\"bold\">%s: </span>" % util.escape_markup(field.name))
label.set_hexpand(True)
table.attach(label, 0, rowindex, 1, 1)
widget = generate_field_display_widget(field, self.config, self.clipboard)
widget.set_hexpand(True)
table.attach(widget, 1, rowindex, 1, 1)
# notes
label = Label("<span weight=\"bold\">%s</span>%s" % ((e.notes != "" and _("Notes: ") or ""),
util.escape_markup(e.notes)), Gtk.Justification.LEFT)
self.pack_start(label)
# display updatetime
if type(e) != entry.FolderEntry:
label = Label((_('Updated %s ago') + "\n%s") % (util.time_period_rough(e.updated, time.time()), time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(e.updated))), Gtk.Justification.CENTER)
self.pack_start(label)
self.show_all()
def pack_start(self, widget):
"Adds a widget to the data view"
widget.set_halign(Gtk.Align.CENTER)
widget.set_valign(Gtk.Align.CENTER)
VBox.pack_start(self, widget, False, False, 0)
class Searchbar(Toolbar):
"A toolbar for easy searching"
def __init__(self):
Toolbar.__init__(self)
self.entry = Gtk.SearchEntry()
self.entry.set_tooltip_text(_('Text to search for'))
self.dropdown = EntryDropDown()
self.dropdown.insert_item(0, _('Any type'), "help-about")
box = Gtk.Box.new(Gtk.Orientation.HORIZONTAL, 0)
Gtk.StyleContext.add_class(box.get_style_context(), "linked")
self.button_prev = Gtk.Button.new_from_icon_name(STOCK_PREVIOUS,
Gtk.IconSize.BUTTON)
self.button_prev.set_tooltip_text(_('Find the previous match'))
self.button_next = Gtk.Button.new_from_icon_name(STOCK_NEXT,
Gtk.IconSize.BUTTON)
self.button_next.set_tooltip_text(_('Find the next match'))
box.add(self.entry)
box.add(self.button_prev)
box.add(self.button_next)
box.add(self.dropdown)
self.append_widget(box)
self.connect("show", self.__cb_show)
self.entry.connect("changed", self.__cb_entry_changed)
self.entry.connect("key-press-event", self.__cb_key_press)
self.button_next.set_sensitive(False)
self.button_prev.set_sensitive(False)
def __cb_entry_changed(self, widget, data = None):
"Callback for entry changes"
s = self.entry.get_text() != ""
self.button_next.set_sensitive(s)
self.button_prev.set_sensitive(s)
def __cb_key_press(self, widget, data = None):
"Callback for key presses"
# return
if data.keyval == Gdk.KEY_Return and widget.get_text() != "":
if (data.state & Gdk.ModifierType.SHIFT_MASK) == Gdk.ModifierType.SHIFT_MASK:
self.button_prev.activate()
else:
self.button_next.activate()
return True
def __cb_show(self, widget, data = None):
"Callback for widget display"
self.entry.select_region(0, -1)
self.entry.grab_focus()
| gpl-2.0 | 1,417,248,840,331,954,200 | 30.297526 | 239 | 0.599339 | false | 3.599356 | false | false | false |
conklinbd/MovementAnalysis | TemplateInstall/PortalDeploy/arcrest/ags/featureservice.py | 1 | 15911 | """
Contains information regarding an ArcGIS Server Feature Server
"""
from re import search
from .._abstract.abstract import BaseAGSServer, BaseSecurityHandler
from ..security import security
import layer
import json
from ..common.geometry import SpatialReference
from ..common.general import FeatureSet
from ..common.filters import LayerDefinitionFilter, GeometryFilter, TimeFilter
########################################################################
class FeatureService(BaseAGSServer):
""" contains information about a feature service """
_url = None
_currentVersion = None
_serviceDescription = None
_hasVersionedData = None
_supportsDisconnectedEditing = None
_hasStaticData = None
_maxRecordCount = None
_supportedQueryFormats = None
_capabilities = None
_description = None
_copyrightText = None
_spatialReference = None
_initialExtent = None
_fullExtent = None
_allowGeometryUpdates = None
_units = None
_syncEnabled = None
_syncCapabilities = None
_editorTrackingInfo = None
_documentInfo = None
_layers = None
_tables = None
_enableZDefaults = None
_zDefault = None
_proxy_url = None
_proxy_port = None
_securityHandler = None
_json = None
_json_dict = None
#----------------------------------------------------------------------
def __init__(self, url, securityHandler=None,
initialize=False, proxy_url=None, proxy_port=None):
"""Constructor"""
self._proxy_url = proxy_url
self._proxy_port = proxy_port
self._url = url
if securityHandler is not None:
self._securityHandler = securityHandler
elif securityHandler is None:
pass
else:
raise AttributeError("Invalid Security Handler")
if not securityHandler is None and \
hasattr(securityHandler, 'referer_url'):
self._referer_url = securityHandler.referer_url
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
""" loads the data into the class """
params = {"f": "json"}
json_dict = self._do_get(self._url, params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
self._json_dict = json_dict
self._json = json.dumps(self._json_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.iteritems():
if k in attributes:
setattr(self, "_"+ k, v)
else:
print k, " - attribute not implemented for Feature Service."
#----------------------------------------------------------------------
@property
def administration(self):
"""returns the service admin object (if accessible)"""
from ..manageags._services import AGSService
url = self._url
res = search("/rest/", url).span()
addText = "/admin/"
part1 = url[:res[1]].lower().replace('/rest/', '')
part2 = url[res[1]:].lower().replace('/featureserver', ".mapserver")
adminURL = "%s%s%s" % (part1, addText, part2)
return AGSService(url=adminURL,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initialize=False)
#----------------------------------------------------------------------
@property
def itemInfo(self):
"""gets the item's info"""
params = {"f" : "json"}
url = self._url + "/info/iteminfo"
return self._do_get(url=url, param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def downloadThumbnail(self, outPath):
"""downloads the items's thumbnail"""
url = self._url + "/info/thumbnail"
params = {}
return self._download_file(url=url,
save_path=outPath,
securityHandler=self._securityHandler,
file_name=None,
param_dict=params,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def downloadMetadataFile(self, outPath):
"""downloads the metadata file to a given path"""
fileName = "metadata.xml"
url = self._url + "/info/metadata"
params = {}
return self._download_file(url=url,
save_path=outPath,
file_name=fileName,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
#----------------------------------------------------------------------
def __str__(self):
"""returns object as a string"""
if self._json is None:
self.__init()
return self._json
#----------------------------------------------------------------------
def __iter__(self):
"""returns the JSON response in key/value pairs"""
if self._json_dict is None:
self.__init()
for k,v in self._json_dict.iteritems():
yield [k,v]
#----------------------------------------------------------------------
@property
def securityHandler(self):
""" gets the security handler """
return self._securityHandler
#----------------------------------------------------------------------
@securityHandler.setter
def securityHandler(self, value):
""" sets the security handler """
if isinstance(value, BaseSecurityHandler):
if isinstance(value, security.AGSTokenSecurityHandler):
self._securityHandler = value
else:
pass
elif value is None:
self._securityHandler = None
self._token = None
#----------------------------------------------------------------------
@property
def maxRecordCount(self):
"""returns the max record count"""
if self._maxRecordCount is None:
self.__init()
return self._maxRecordCount
#----------------------------------------------------------------------
@property
def supportedQueryFormats(self):
""""""
if self._supportedQueryFormats is None:
self.__init()
return self._supportedQueryFormats
#----------------------------------------------------------------------
@property
def capabilities(self):
""" returns a list of capabilities """
if self._capabilities is None:
self.__init()
return self._capabilities
#----------------------------------------------------------------------
@property
def description(self):
""" returns the service description """
if self._description is None:
self.__init()
return self._description
#----------------------------------------------------------------------
@property
def copyrightText(self):
""" returns the copyright text """
if self._copyrightText is None:
self.__init()
return self._copyrightText
#----------------------------------------------------------------------
@property
def spatialReference(self):
""" returns the spatial reference """
if self._spatialReference is None:
self.__init()
return self._spatialReference
#----------------------------------------------------------------------
@property
def initialExtent(self):
""" returns the initial extent of the feature service """
if self._initialExtent is None:
self.__init()
return self._initialExtent
#----------------------------------------------------------------------
@property
def fullExtent(self):
""" returns the full extent of the feature service """
if self._fullExtent is None:
self.__init()
return self._fullExtent
#----------------------------------------------------------------------
@property
def allowGeometryUpdates(self):
""" informs the user if the data allows geometry updates """
if self._allowGeometryUpdates is None:
self.__init()
return self._allowGeometryUpdates
#----------------------------------------------------------------------
@property
def units(self):
""" returns the measurement unit """
if self._units is None:
self.__init()
return self._units
#----------------------------------------------------------------------
@property
def syncEnabled(self):
""" informs the user if sync of data can be performed """
if self._syncEnabled is None:
self.__init()
return self._syncEnabled
#----------------------------------------------------------------------
@property
def syncCapabilities(self):
""" type of sync that can be performed """
if self._syncCapabilities is None:
self.__init()
return self._syncCapabilities
#----------------------------------------------------------------------
@property
def editorTrackingInfo(self):
""""""
if self._editorTrackingInfo is None:
self.__init()
return self._editorTrackingInfo
#----------------------------------------------------------------------
@property
def documentInfo(self):
""""""
if self._documentInfo is None:
self.__init()
return self._documentInfo
#----------------------------------------------------------------------
@property
def layers(self):
""" gets the layers for the feature service """
if self._layers is None:
self.__init()
self._getLayers()
return self._layers
#----------------------------------------------------------------------
def _getLayers(self):
""" gets layers for the featuer service """
params = {"f": "json"}
json_dict = self._do_get(self._url, params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
self._layers = []
if json_dict.has_key("layers"):
for l in json_dict["layers"]:
self._layers.append(
layer.FeatureLayer(url=self._url + "/%s" % l['id'],
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
)
#----------------------------------------------------------------------
@property
def tables(self):
"""lists the tables on the feature service"""
if self._tables is None:
self.__init()
return self._tables
#----------------------------------------------------------------------
@property
def enableZDefaults(self):
""""""
if self._enableZDefaults is None:
self.__init()
return self._enableZDefaults
#----------------------------------------------------------------------
@property
def zDefault(self):
""""""
if self._zDefault is None:
self.__init()
return self._zDefault
#----------------------------------------------------------------------
@property
def hasStaticData(self):
""""""
if self._hasStaticData is None:
self.__init()
return self._hasStaticData
#----------------------------------------------------------------------
@property
def currentVersion(self):
""" returns the map service current version """
if self._currentVersion is None:
self.__init()
return self._currentVersion
#----------------------------------------------------------------------
@property
def serviceDescription(self):
""" returns the serviceDescription of the map service """
if self._serviceDescription is None:
self.__init()
return self._serviceDescription
#----------------------------------------------------------------------
@property
def hasVersionedData(self):
""" returns boolean for versioned data """
if self._hasVersionedData is None:
self.__init()
return self._hasVersionedData
#----------------------------------------------------------------------
@property
def supportsDisconnectedEditing(self):
""" returns boolean is disconnecting editted supported """
if self._supportsDisconnectedEditing is None:
self.__init()
return self._supportsDisconnectedEditing
#----------------------------------------------------------------------
def query(self,
layerDefsFilter=None,
geometryFilter=None,
timeFilter=None,
returnGeometry=True,
returnIdsOnly=False,
returnCountOnly=False,
returnZ=False,
returnM=False,
outSR=None
):
"""
The Query operation is performed on a feature service resource
"""
qurl = self._url + "/query"
params = {"f": "json",
"returnGeometry": returnGeometry,
"returnIdsOnly": returnIdsOnly,
"returnCountOnly": returnCountOnly,
"returnZ": returnZ,
"returnM" : returnM}
if not layerDefsFilter is None and \
isinstance(layerDefsFilter, LayerDefinitionFilter):
params['layerDefs'] = layerDefsFilter.filter
if not geometryFilter is None and \
isinstance(geometryFilter, GeometryFilter):
gf = geometryFilter.filter
params['geometryType'] = gf['geometryType']
params['spatialRel'] = gf['spatialRel']
params['geometry'] = gf['geometry']
params['inSR'] = gf['inSR']
if not outSR is None and \
isinstance(outSR, SpatialReference):
params['outSR'] = outSR.asDictionary
if not timeFilter is None and \
isinstance(timeFilter, TimeFilter):
params['time'] = timeFilter.filter
res = self._do_get(url=qurl,
param_dict=params,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port)
if returnIdsOnly == False and returnCountOnly == False:
if isinstance(res, str):
jd = json.loads(res)
return [FeatureSet.fromJSON(json.dumps(lyr)) for lyr in jd['layers']]
elif isinstance(res, dict):
return [FeatureSet.fromJSON(json.dumps(lyr)) for lyr in res['layers']]
else:
return res
return res
| apache-2.0 | -8,352,142,267,933,846,000 | 39.281013 | 86 | 0.452831 | false | 5.612346 | false | false | false |
wbonnet/lffs | toolkit/dft/build_firmware_update.py | 1 | 8151 | #
# The contents of this file are subject to the Apache 2.0 license you may not
# use this file except in compliance with the License.
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
#
# Copyright 2016 DFT project (http://www.debianfirmwaretoolkit.org).
# All rights reserved. Use is subject to license terms.
#
# Debian Firmware Toolkit is the new name of Linux Firmware From Scratch
# Copyright 2014 LFFS project (http://www.linuxfirmwarefromscratch.org).
#
#
# Contributors list :
#
# William Bonnet [email protected], [email protected]
#
#
""" This modules implements the functionnalities used to create the initramfs in charge of
setting up the firmware in memory at system boot.
"""
import logging
import os
import tarfile
from dft.cli_command import CliCommand
from dft.enumkey import Key
#
# Class BuildFirmwareUpdate
#
class BuildFirmwareUpdate(CliCommand):
"""This class implements method needed to create the archives containing
firmware update, and all the scripts needed at deployment.
"""
# -------------------------------------------------------------------------
#
# __init__
#
# -------------------------------------------------------------------------
def __init__(self, dft, project):
"""Default constructor
"""
# Initialize ancestor
CliCommand.__init__(self, dft, project)
# -------------------------------------------------------------------------
#
# build_update_archive
#
# -------------------------------------------------------------------------
def build_update_archive(self):
"""This method generates the final archive containing the elements of the
firmware. The main steps :
. Creating a manisfest describing the content items (hash value)
. Creat a tar file, containing all the data from the content subirectory
. Create a detached signature using either gnupg or openssl
The two generated files are stored under firmware (same levelas content)
"""
# Check that there is a firmware configuration file first
if self.project.firmware is None:
self.project.logging.critical("The firmware configuration file is not defined in \
project file")
exit(1)
# Check that the target files and directories exists
if not os.path.isdir(self.project.get_firmware_content_directory()):
self.project.logging.critical("The firmware directory does not exist. Did you forget to run \
assemble_firmwarec command before ? Expected directory is " + \
self.project.get_firmware_content_directory())
exit(1)
# Create the tar archive
self.create_main_archive()
# Sign the main archive
self.sign_main_archive()
# And we are done
return
# -------------------------------------------------------------------------
#
# create_main_archive
#
# -------------------------------------------------------------------------
def create_main_archive(self):
"""This method create the manifest of the archive (a file listing all the
files with their checksums). Then it creates the archive to be signed.
All the files are stored under firmware directory. In the en only two
files should be produced. The archive, created by this method, and the
detached signature. Coded in next method.
"""
# Output current task to logs
logging.info("Creating the main archive")
# Creating the manifest
# Creating the archive
dest_archive = self.project.get_firmware_output_directory()
dest_archive += "/" + self.project.firmware[Key.CONFIGURATION.value][Key.FILENAME.value]
# Create the tar itself
tar = tarfile.open(name=dest_archive, mode='w')
# Iterate firmware content directory
for name in os.listdir(self.project.get_firmware_content_directory()):
# And add each and every file
filename = self.project.get_firmware_content_directory() + "/" + name
tar.add(filename, name, recursive=True)
# Let's close the tar to flushit
tar.close()
logging.debug("Archive " + dest_archive + " has been created")
# -------------------------------------------------------------------------
#
# sign_main_archive
#
# -------------------------------------------------------------------------
def sign_main_archive(self):
"""This method does a digital signature of the archive, or a hash (should
not be used). Depending on configuration, it ca use either a hash function
such as sha1sum, or a signature software such as gnupg or openssl.
"""
# Output current task to logs
logging.info("Signing the main archive")
# Check if signature is activated
if Key.SECURITY.value in self.project.firmware:
if Key.SIGNATURE.value in self.project.firmware[Key.SECURITY.value]:
# Retrieve the signature tool to use
signing_tool = self.project.firmware[Key.SECURITY.value][Key.SIGNATURE.value]
# Generate the path to the archive and detached signature file
dest_archive = self.project.get_firmware_output_directory()
dest_archive += "/" + self.project.firmware[Key.CONFIGURATION.value][Key.FILENAME.value]
dest_sign = dest_archive + ".sig"
# Remove any exsting signature
if os.path.isfile(dest_sign):
os.remove(dest_sign)
self.project.logging.info("Existing " + dest_archive + " has been removed")
# Expected values are empty (means deactivated), gpg2 (or gnupg2), or openssl
if len(signing_tool) == 0:
self.project.logging.info("Signature is not activated in the security section of the \
firmware definition file")
# Are we using a known tool
elif signing_tool not in [Key.GPG.value, Key.GPG2.value, Key.OPENSSL.value]:
self.project.logging.critical("Unknown signing tool : " + signing_tool)
self.project.logging.critical("Valid values are gpg, gpg2, openssl or empty string to \
deactivate signature")
exit(1)
# Signing tool is valid, now let's generate the command to do it
# First case, are we using GnuPG 1 or 2
if signing_tool == Key.GPG.value or signing_tool == Key.GPG2.value:
# Now let's prepare the signing command
command = signing_tool
# Are we using armor format export ?
if Key.GPG_ARMOR_SIGNATURE.value in self.project.firmware[Key.SECURITY.value] and \
self.project.firmware[Key.SECURITY.value][Key.GPG_ARMOR_SIGNATURE.value]:
# Yes, let's append --armor to the command
command += " --armor"
command += " --output " + dest_sign + " --detach-sig " + dest_archive
self.execute_command(command)
self.project.logging.info(dest_archive + " has been created and signed successfully")
# Update archive has been signed, let's verify signature before finishing
command = signing_tool + " --verify " + dest_sign + " " + dest_archive
self.execute_command(command)
#TODO : add test case
self.project.logging.info(dest_sign + " has been verfied successfully")
# Or is it OpenSSL ?
elif signing_tool == Key.OPENSSL.value:
# TODO OpenSSL support
self.project.logging.critical("OpenSSL is not yet supported for firmware signature")
self.project.logging.critical("Please use GnuPG until support is available")
exit(1)
else:
self.project.logging.info("Signature is not activated in the security section of the \
firmware definition file")
else:
self.project.logging.error("The firmware definition file does not include a security section")
self.project.logging.error("Unable to create signature file. You should add security.")
| apache-2.0 | 8,113,699,216,175,973,000 | 38 | 100 | 0.617961 | false | 4.518293 | false | false | false |
ben-e-whitney/the-points-chart | utilities/views.py | 1 | 2100 | from django.shortcuts import render
import decimal
class TableElement:
"""
Represent an individual cell of an HTML table.
"""
def __init__(self, title=None, CSS_classes=None, content=None):
self.title = title
self.CSS_classes = CSS_classes
self.content = content
class TableParent(TableElement):
"""
Represent a collection of table elements.
The table elements could be TableElements or TableParents. Display is left
up to the template.
"""
def __init__(self, **kwargs):
self.children = kwargs.pop('children')
super().__init__(**kwargs)
def format_balance(balance=None, load=None,
endpoints=(-float('inf'), -0.35, -0.15, 0.15, 0.35, float('inf')),
possible_CSS_classes=('very_low_balance', 'low_balance', 'OK_balance',
'high_balance', 'very_high_balance')):
"""
Format the balance with styling according to the balance:load ratio.
"""
if (len(endpoints) != 1+len(possible_CSS_classes)):
raise ValueError
def sign_int(balance):
"""
Return input with a sign character prepended.
"""
balance = int(balance.to_integral_value())
if balance >= 0:
return '+{bal}'.format(bal=balance)
else:
#Note that '−' is Unicode character U+2212, not a hyphen.
return '−{bal}'.format(bal=abs(balance))
try:
ratio = balance/load
except decimal.DivisionByZero:
ratio = endpoints[-1]+1 if balance >= 0 else endpoints[0]-1
except decimal.InvalidOperation:
ratio = 0
for i, CSS_class in enumerate(possible_CSS_classes):
if endpoints[i] <= ratio < endpoints[i+1]:
# We will use the value of `CSS_class`. If we never make it to this
# block, `CSS_class` will end up `CSS_classes[-1]`.
break
return {
'value': float(balance),
'formatted_value': sign_int(balance),
'html_title': 'Exact value: {val}'.format(val=balance),
'CSS_class': ' '.join(('balance', CSS_class)),
}
| gpl-3.0 | 7,983,015,587,877,793,000 | 30.757576 | 79 | 0.594943 | false | 3.910448 | false | false | false |
OpenBeta/beta | apiserver/model.py | 1 | 9792 | from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.dialects import postgresql
from geoalchemy2 import Geometry
from sqlalchemy import func, ForeignKey, PrimaryKeyConstraint, event, Sequence
from sqlalchemy.schema import DropTable
from sqlalchemy.ext.compiler import compiles
import flask_login
from datetime import datetime
import json
import collections
from key_helper import *
db = SQLAlchemy()
FeatureSet = collections.namedtuple('FeatureSet', 'route, boundary', verbose=True)
class Route(db.Model):
__tablename__ = 'routes'
id = db.Column(db.Integer, primary_key=True)
geo = db.Column(Geometry(geometry_type='POINT', srid=4326), unique=True)
name = db.Column(db.Text, index=True)
grade = db.Column(db.Text)
grade_type = db.Column(db.Text, ForeignKey('grade_types.id'))
properties_json = db.Column(postgresql.JSONB)
def __init__(self, geojson):
self.geo = func.ST_SetSRID(func.ST_GeomFromGeoJSON(json.dumps(geojson['geometry'])), 4326)
self.name = geojson['properties']['name']
if 'grade' in geojson['properties']:
grade = geojson['properties']['grade']
self.grade = grade['value']
self.grade_type = grade['type']
else:
self.grade = ''
self.type = 'unknown'
self.properties_json = geojson['properties'] # store raw data
def __repr__(self):
return '<Route %r>' % self.name
def to_json(self):
return {
"type": "Feature",
"id": "route/{}".format(self.id),
"geometry": json.loads(db.session.scalar(func.ST_AsGeoJSON(self.geo))),
"properties": self.properties_json
}
def __eq__(self, other):
"""Override the default Equals behavior"""
if isinstance(other, self.__class__):
lhs = json.loads(db.session.scalar(func.ST_AsGeoJSON(self.geo)))
rhs = json.loads(db.session.scalar(func.ST_AsGeoJSON(other.geo)))
return lhs == rhs
return NotImplemented
def __ne__(self, other):
"""Define a non-equality test"""
return not self.__eq__(other)
def __hash__(self):
"""Override the default hash behavior (that returns the id or the object)"""
return hash(self.geo)
class GradeType(db.Model):
__tablename__ = 'grade_types'
id = db.Column(db.Text, primary_key=True, unique=True)
full_name = db.Column(db.Text)
def __init__(self, id, full_name):
self.id = id
self.full_name = full_name
@event.listens_for(GradeType.__table__, 'after_create')
def insert_initial_values(*args, **kwargs):
db.session.add(GradeType(id='unknown', full_name='Type Unknown'))
db.session.add(GradeType(id='yds', full_name='Yosemite Decimal System'))
db.session.add(GradeType(id='v', full_name='Hueco V-scale'))
db.session.commit()
event.listen(GradeType.__table__, 'after_create', insert_initial_values)
class GradeDetail(db.Model):
__tablename__ = 'grade_details'
id = db.Column(db.Text, ForeignKey('grade_types.id'))
value = db.Column(db.Text)
weight = db.Column(db.Integer)
__table_args__ = (PrimaryKeyConstraint(id, weight),)
class Boundary(db.Model):
__tablename__ = 'boundaries'
BOUNDARY_ID_SEQ = Sequence('boundary_id_seq', metadata=db.Model.metadata) # define sequence explicitly
boundary_id = db.Column(db.Integer, primary_key=True, server_default=BOUNDARY_ID_SEQ.next_value())
name = db.Column(db.Text, index=True)
is_top_level = db.Column(db.Boolean)
geo = db.Column(Geometry(geometry_type='POLYGON', srid=4326), unique=True)
properties_json = db.Column(postgresql.JSONB)
sys_period = db.Column(postgresql.TSTZRANGE, nullable=False)
def __init__(self, geojson):
props = geojson['properties']
self.name = props.get('name')
self.is_top_level = props.get('topLevel', False)
self.geo = func.ST_SetSRID(func.ST_GeomFromGeoJSON(json.dumps(geojson['geometry'])), 4326)
self.properties_json = props
def to_json(self):
return {
"type": "Feature",
"id": "area/{}".format(self.boundary_id),
"geometry": json.loads(db.session.scalar(func.ST_AsGeoJSON(self.geo))),
"properties": self.properties_json
}
class BoundaryHistory(db.Model):
__tablename__ = 'boundaries_history'
history_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
boundary_id =db.Column(db.Integer)
name = db.Column(db.Text)
is_top_level = db.Column(db.Boolean)
geo = db.Column(Geometry(geometry_type='POLYGON', srid=4326))
properties_json = db.Column(postgresql.JSONB)
sys_period = db.Column(postgresql.TSTZRANGE, nullable=False)
class APIUser(db.Model, flask_login.UserMixin):
__tablename__ = 'api_users'
uid = db.Column(db.Integer, primary_key=True, autoincrement=True)
email = db.Column(db.Text, primary_key=True, unique=True)
api_key = db.Column(db.Text, primary_key=True, unique=True)
active = db.Column(db.Boolean)
created_ts = db.Column(db.DateTime(timezone=True))
mod_ts = db.Column(db.DateTime(timezone=True))
def __init__(self, **kwargs):
self.active = kwargs['active']
self.email = kwargs['email']
now = datetime.utcnow()
self.created_ts = now
self.mpd_ts = now
self.api_key = genkey(userKeySigner)
@property
def is_active(self):
return self.is_active
@property
def is_authenticated(self):
return True
@property
def apikey(self):
return self.api_key
class AuditLog(db.Model):
__tablename__ = 'audit_log'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
op = db.Column(db.CHAR)
row_id = db.Column(db.Integer)
table_name = db.Column(db.VARCHAR(50))
user_id = db.Column(db.VARCHAR(30), nullable=False)
ip = db.Column(postgresql.INET)
ts = db.Column(db.DateTime(timezone=True))
def get_boundary_by_id(boundary_id):
row = db.session.query(Boundary).filter(Boundary.boundary_id == boundary_id).first()
if row is None:
return None
return row.to_json()
def search_within_boundary_by_id(boundary_id):
rows = db.session.query(Route, Boundary)\
.filter("ST_WITHIN(routes.geo, boundaries.geo)")\
.filter("boundaries.boundary_id=:id")\
.params(id=boundary_id).all()
return {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), rows)
}
def search_within_radius_in_meters(location, radius, route=True, boundary=False):
coordinates = location.split(",")
route_rows = list()
boundary_rows = list()
if route:
route_rows = db.session.query(Route).\
filter('ST_DistanceSphere(geo, ST_MakePoint(:lng,:lat))<=:r').\
params(lng=coordinates[0], lat=coordinates[1], r=radius).all()
if boundary:
boundary_rows = db.session.query(Boundary).\
filter('ST_DistanceSphere(geo, ST_MakePoint(:lng,:lat))<=:r').\
params(lng=coordinates[0], lat=coordinates[1], r=radius).all()
route_json = {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), route_rows)
}
boundary_json = {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), boundary_rows)
}
return FeatureSet(route=route_json, boundary=boundary_json)
def recent_activities(count, route=True, boundary=False):
hard_limit = 10;
route_rows = list()
boundary_rows = list()
if count > hard_limit:
count = hard_limit
if route:
route_rows = db.session.query(Route).\
order_by(Route.id.desc()).\
limit(count);
if boundary:
boundary_rows = db.session.query(Boundary).\
order_by(Boundary.boundary_id.desc()).\
limit(count);
route_json = {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), route_rows)
}
boundary_json = {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), boundary_rows)
}
return FeatureSet(route=route_json, boundary=boundary_json)
def setup_temporal_tables():
sql = ("CREATE TRIGGER boundary_history BEFORE INSERT OR UPDATE OR DELETE ON Boundaries "
"FOR EACH ROW EXECUTE PROCEDURE versioning('sys_period', 'boundaries_history', true)")
db.session.execute(sql)
sql = ("create or replace function trxn_history() returns trigger as $$ "
"BEGIN"
" IF (TG_OP = 'DELETE') THEN"
" INSERT INTO audit_log (op, row_id, table_name, user_id, ts) "
" VALUES('D', OLD.boundary_id, TG_TABLE_NAME, current_setting('vars.edited_by'),now());"
" ELSEIF (TG_OP='UPDATE') THEN"
" INSERT INTO audit_log (op, row_id, table_name, user_id, ts) "
" VALUES('U', OLD.boundary_id, TG_TABLE_NAME, NEW.properties_json->>'editedBy', now());"
" ELSEIF (TG_OP='INSERT') THEN"
" INSERT INTO audit_log (op, row_id, table_name, user_id, ts) "
" VALUES('I', NEW.boundary_id, TG_TABLE_NAME, NEW.properties_json->>'editedBy', now());"
" END IF;"
" RETURN null;"
"END;"
"$$ language plpgsql;")
db.session.execute(sql)
sql = ("CREATE TRIGGER audit AFTER INSERT OR UPDATE OR DELETE ON boundaries "
"FOR EACH ROW EXECUTE procedure trxn_history();")
db.session.execute(sql)
db.session.commit()
@compiles(DropTable, "postgresql")
def _compile_drop_table(element, compiler, **kwargs):
return compiler.visit_drop_table(element) + " CASCADE"
| gpl-3.0 | -8,655,785,475,700,712,000 | 33.478873 | 107 | 0.625613 | false | 3.495894 | false | false | false |
xia2/xia2 | src/xia2/Wrappers/Dials/EstimateGain.py | 1 | 1399 | from xia2.Driver.DriverFactory import DriverFactory
from xia2.Schema.Interfaces.FrameProcessor import FrameProcessor
def EstimateGain(DriverType=None):
"""A factory for EstimateGainWrapper classes."""
DriverInstance = DriverFactory.Driver(DriverType)
class EstimateGainWrapper(DriverInstance.__class__, FrameProcessor):
def __init__(self):
super().__init__()
self.set_executable("dials.estimate_gain")
self._sweep_filename = None
self._kernel_size = None
self._gain = None
def set_sweep_filename(self, sweep_filename):
self._sweep_filename = sweep_filename
def set_kernel_size(self, kernel_size):
self._kernel_size = kernel_size
def get_gain(self):
return self._gain
def run(self):
self.clear_command_line()
assert self._sweep_filename is not None
self.add_command_line(self._sweep_filename)
if self._kernel_size is not None:
self.add_command_line("kernel_size=%i,%i" % self._kernel_size)
self.start()
self.close_wait()
self.check_for_errors()
for line in self.get_all_output():
if "Estimated gain:" in line:
self._gain = float(line.split(":")[-1].strip())
return EstimateGainWrapper()
| bsd-3-clause | 5,109,846,030,004,419,000 | 30.795455 | 78 | 0.591851 | false | 4.239394 | false | false | false |
yueyongyue/saltshaker | shaker/highstate.py | 1 | 1467 | import os
class HighState(object):
def __init__(self):
if os.path.isfile('/etc/salt/master.d/file_roots.conf') == True:
os.system("mkdir -p /srv/salt")
else:
file_roots = file("/etc/salt/master.d/file_roots.conf", "w+")
add = ["file_roots:\n", " base:\n", " - /srv/salt\n"]
file_roots.writelines(add)
file_roots.close()
def list_sls(self, dir):
all_sls = {}
list_filename = os.listdir(dir)
for filename in list_filename:
print filename.split('.')
if os.path.isfile("/srv/salt/"+filename):
content = open(dir+filename).readlines()
name = filename.split('.')[0]
dic_sls = {name: content}
all_sls.update(dic_sls)
return all_sls
def add_sls(self, filename, content):
files = file("/srv/salt/"+filename+".sls", "w")
files.writelines(content)
files.close()
def del_sls(self, filename):
path = r"/srv/salt/" + filename + ".sls"
if os.path.exists(path):
os.remove(path)
else:
return "file not exit"
def main():
highstate = HighState()
a = highstate.list_sls("/srv/salt/")
#b = ['dfgdfgfgfdg\n',' fgfgfdgfgfgfg\n']
#a = highstate.add_sls("tomcat", b)
#print a
#filename = "test"
#a = highstate.del_sls(filename)
if __name__ == '__main__':
main()
| apache-2.0 | 7,973,569,440,805,450,000 | 28.34 | 73 | 0.521472 | false | 3.356979 | false | false | false |
mkollaro/destroystack | destroystack/tools/server_manager.py | 1 | 7151 | # Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import destroystack.tools.state_restoration.metaopenstack as metaopenstack
import destroystack.tools.state_restoration.vagrant as vagrant
import destroystack.tools.state_restoration.manual as manual_restoration
import destroystack.tools.common as common
import destroystack.tools.servers as server_tools
# Possible roles that a server can have, depending what services are installed
# on it. It can have more than one role.
ROLES = set(['keystone', 'swift_proxy', 'swift_data', 'controller', 'compute',
'glance', 'cinder', 'neutron'])
MANAGEMENT_TYPES = ['none', 'manual', 'metaopenstack']
LOG = logging.getLogger(__name__)
class Singleton(object):
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Singleton, cls).__new__(cls, *args, **kwargs)
return cls._instance
class ServerManager(Singleton):
def __init__(self):
self._servers = server_tools.create_servers(common.CONFIG['servers'])
self._workaround_single_swift_disk()
def servers(self, role=None, roles=None):
"""Generator that gets a server by its parameters.
If no parameters are given, it will just return any of them.
:param role: get a server that has this role, choose from `ROLES`
:param roles: get a server that has all of these roles, see param
`role`
"""
if role:
assert role in ROLES
assert not roles # cannot use both
if roles:
roles = set(roles)
assert roles.issubset(ROLES)
for server in self._servers:
if not role and not roles:
# no conditions, return any
yield server
elif role in server.roles \
or (roles and roles.issubset(server.roles)):
yield server
def get(self, role=None, roles=None):
"""Get the first server that matches the parameters.
For more info, look at the `ServerManager.servers() generator - it uses
the same parameters.
:returns: the server in question or None
"""
try:
return self.servers(role, roles).next()
except StopIteration:
return None
def get_all(self, role=None, roles=None):
"""Same as `get`, but returns a list of all the matching servers."""
return list(self.servers(role, roles))
def save_state(self, tag=''):
"""Create a snapshot of all the servers
Depending on what is in the configuration in "management.type":
* manual - Just create some backup of the files and maybe
databases. Unsupported and not recommended.
* none - Do nothing
* metaopenstack - Create a snapshot of all the servers
If it's being created, the name of the snapshots (if created) will be
"config.management.snapshot_prefix" + name of the VM + tag, where the
prefix is "destroystack-snapshot" by default. The VMs have to have
unique names (at least among each other) and snapshots/images with that
name cannot already exist.
:param tag: will be appended to the name of the snapshots
"""
self._choose_state_restoration_action('save', tag)
def load_state(self, tag=''):
"""Restore all the servers from their snapshots.
For more information, see the function ``save``.
Depending on what is in the configuration in "management.type":
* manual - Restore backups, mount disks that got umounted, start up
services again. Unsupported, might not work - it's just a best
effort.
* none - Do nothing
* metaopenstack - Rebuild the VMs with the snapshot images, which
are going to be found by the name as described in the `save`
function.
"""
self._choose_state_restoration_action('load', tag)
self.connect()
# workaround for the fact that the extra disk might not get snapshotted
self._restore_swift_disks()
def connect(self):
"""Create ssh connections to all the servers.
Will re-create them if called a second time.
"""
for server in self._servers:
server.connect()
def disconnect(self):
for server in self._servers:
server.disconnect()
def _choose_state_restoration_action(self, action, tag):
"""Choose which function to use, based on "management.type" in config.
:param action: save or load
"""
assert action in ['save', 'load']
man_type = common.CONFIG['management']['type']
if man_type == 'metaopenstack':
if action == 'save':
metaopenstack.create_snapshots(tag)
else:
metaopenstack.restore_snapshots(tag)
elif man_type == 'vagrant':
if action == 'save':
vagrant.create_snapshots(tag)
else:
vagrant.restore_snapshots(tag)
elif man_type == 'manual':
if action == 'save':
manual_restoration.create_backup(self)
else:
manual_restoration.restore_backup(self)
elif man_type == 'none':
LOG.info("State save and restoration has been turned off")
else:
raise Exception("This type of server management, '%s', is not"
"supported, choose among: %s"
% (man_type, MANAGEMENT_TYPES))
def _restore_swift_disks(self):
"""These disks might not have been snapshotted.
Since the extra disk is currently maybe not being snapshotted (it is
just some ephemeral storage or cinder volume), format them and restore
their flags.
Additionally, if the user provided only one disk, we create 3
partitions on it and use them as "disks" to simplify things for the
user.
"""
data_servers = list(self.servers(role='swift_data'))
server_tools.prepare_swift_disks(data_servers)
for server in data_servers:
for disk in server.disks:
server.restore_disk(disk)
def _workaround_single_swift_disk(self):
for server in list(self.servers(role='swift_data')):
if len(server.disks) == 1:
disk = server.disks[0]
server.disks = [disk + "1", disk + "2", disk + "3"]
| apache-2.0 | 3,963,596,485,430,058,500 | 37.240642 | 79 | 0.61418 | false | 4.441615 | true | false | false |
Som-Energia/invoice-janitor | invoicing/f1fixing/import_error/models.py | 1 | 24323 | # -*- coding: utf-8 -*-
from lxml import etree, objectify
import base64
import re
import os
from datetime import datetime
import dateutil.parser
import xmlformatter
## Codis OCSUM - F1
# Codi periode
codigoPeriodo_to_P = {
1:'P1', 03:'P2',10:'P1',21:'P1',22:'P2',31:'P1',32:'P2',33:'P3',41:'P1',
42:'P2',43:'P3',51:'P1',52:'P2',53:'P3',61:'P1',62:'P2',63:'P3',64:'P4',
65:'P5',66:'P6',71:'P1',72:'P2',73:'P3',74:'P4',75:'P5',76:'P6',77:'P7'
}
# Codi origen de lectura
codigoOrigen_to_O = {
'10': 'Telemedida',
'11': 'Telemedida corregida',
'20': 'TPL',
'21': 'TPL corregida',
'30': 'Visual',
'31': 'Visual corregida',
'40': 'Estimada',
'50': 'Autolectura',
'99': 'Sin Lectura'
}
O_to_codigoOrigen =\
{
'Telemedida':1,
'Telemedida corregida':2,
'TPL':3,
'TPL corregida':4,
'Visual':5,
'Visual corregida':6,
'Estimada':7,
'Autolectura':8,
'Sin Lectura':9,
'Sense Lectura':9
}
class OpenObject(object):
O = None
def __init__(self, O):
self.O = O
class F1(object):
root = None
raw = None
def __init__(self, xml=None, filename=None):
if not xml and not filename:
raise
if filename:
with open(filename) as f:
xml = f.read()
self.root = objectify.fromstring(xml)
@property
def raw(self):
objectify.deannotate(self.root, xsi_nil=True)
etree.cleanup_namespaces(self.root)
return etree.tostring(self.root,
encoding="ISO-8859-1",
xml_declaration=True)
def dump(self, filename):
formatter = xmlformatter.Formatter(indent="1",
indent_char="\t",
encoding_output="ISO-8859-1",
preserve=["literal"])
raw = formatter.format_string(self.raw)
with open(filename, "w") as f:
f.write(raw)
def update_xml_value(self, comptador, data, periode, tipus, attribute, value):
if attribute not in ['FechaHora', 'Procedencia', 'Lectura']:
raise Exception('Attribute not supported')
root = self.root
if not hasattr(root, 'Facturas'):
raise Exception('F1 format failed')
Facturas = root.Facturas
if not hasattr(Facturas, 'FacturaATR'):
raise Exception('F1 format failed')
FacturaATR = Facturas.FacturaATR
if not hasattr(FacturaATR, '__iter__'):
FacturaATR = [FacturaATR]
for FacturaATR_ in FacturaATR:
if not hasattr(FacturaATR_, 'Medidas'):
raise Exception('F1 format failed')
Medidas = FacturaATR_.Medidas
if not hasattr(Medidas, '__iter__'):
Medidas = [Medidas]
for Medidas_ in Medidas:
if not hasattr(Medidas_, 'Aparato'):
raise Exception('F1 format failed')
Aparato = Medidas_.Aparato
if not hasattr(Aparato, '__iter__'):
Aparato = [Aparato]
for Aparato_ in Aparato:
if not hasattr(Aparato_, 'NumeroSerie'):
raise Exception('F1 format failed')
try:
if not ((int(Aparato_.NumeroSerie) == int(comptador)) or
(int(Aparato_.NumeroSerie) == int(comptador))):
continue
except Exception, e:
continue
if not hasattr(Aparato_,'Integrador'):
raise Exception('F1 format failed')
Integrador = Aparato_.Integrador
if not hasattr(Integrador, '__iter__'):
Integrador = [Integrador]
for Integrador_ in Integrador:
if not hasattr(Integrador_,'Magnitud'):
raise Exception('F1 format failed')
if (tipus == 'A') and not (str(Integrador_.Magnitud) == 'AE'):
continue
if (tipus == 'R') and not (str(Integrador_.Magnitud).startswith('R')):
continue
if not Integrador_.CodigoPeriodo:
continue
if codigoPeriodo_to_P[Integrador_.CodigoPeriodo] == periode:
if not hasattr(Integrador_, 'LecturaDesde'):
raise Exception('F1 format failed')
if not hasattr(Integrador_, 'LecturaHasta'):
raise Exception('F1 format failed')
if dateutil.parser.parse(str(Integrador_.LecturaDesde.FechaHora)) == dateutil.parser.parse(data):
setattr(Integrador_.LecturaDesde, attribute, value)
elif dateutil.parser.parse(str(Integrador_.LecturaHasta.FechaHora)) == dateutil.parser.parse(data):
setattr(Integrador_.LecturaHasta, attribute, value)
def get_xml_value(self, comptador, data, periode, tipus, attribute):
if attribute not in ['FechaHora', 'Procedencia', 'Lectura']:
raise Exception('Attribute not supported')
root = self.root
if not hasattr(root, 'Facturas'):
raise Exception('F1 format failed')
Facturas = root.Facturas
if not hasattr(Facturas, 'FacturaATR'):
raise Exception('F1 format failed')
FacturaATR = Facturas.FacturaATR
if not hasattr(FacturaATR, '__iter__'):
FacturaATR = [FacturaATR]
for FacturaATR_ in FacturaATR:
if not hasattr(FacturaATR_, 'Medidas'):
raise Exception('F1 format failed')
Medidas = FacturaATR_.Medidas
if not hasattr(Medidas, '__iter__'):
Medidas = [Medidas]
for Medidas_ in Medidas:
if not hasattr(Medidas_, 'Aparato'):
raise Exception('F1 format failed')
Aparato = Medidas_.Aparato
if not hasattr(Aparato, '__iter__'):
Aparato = [Aparato]
for Aparato_ in Aparato:
if not hasattr(Aparato_, 'NumeroSerie'):
raise Exception('F1 format failed')
try:
if comptador.isdigit():
if not int(Aparato_.NumeroSerie) == int(comptador):
continue
else:
if not Aparato_.NumeroSerie == comptador:
continue
except Exception, e:
continue
if not hasattr(Aparato_,'Integrador'):
raise Exception('F1 format failed')
Integrador = Aparato_.Integrador
if not hasattr(Integrador, '__iter__'):
Integrador = [Integrador]
for Integrador_ in Integrador:
if not hasattr(Integrador_,'Magnitud'):
raise Exception('F1 format failed')
if (tipus == 'A') and not (str(Integrador_.Magnitud) == 'AE'):
continue
if (tipus == 'R') and not (str(Integrador_.Magnitud).startswith('R')):
continue
if not Integrador_.CodigoPeriodo:
continue
if codigoPeriodo_to_P[Integrador_.CodigoPeriodo] == periode:
if not hasattr(Integrador_, 'LecturaDesde'):
raise Exception('F1 format failed')
if not hasattr(Integrador_, 'LecturaHasta'):
raise Exception('F1 format failed')
if dateutil.parser.parse(str(Integrador_.LecturaDesde.FechaHora)) == dateutil.parser.parse(data):
return getattr(Integrador_.LecturaDesde, attribute)
elif dateutil.parser.parse(str(Integrador_.LecturaHasta.FechaHora)) == dateutil.parser.parse(data):
return getattr(Integrador_.LecturaDesde, attribute)
raise Exception('F1 error')
def is_abonadora(self):
Facturas = self.root.Facturas
if not hasattr(Facturas, 'FacturaATR'):
raise Exception('F1 format failed')
FacturaATR = Facturas.FacturaATR
if not hasattr(FacturaATR, '__iter__'):
FacturaATR = [FacturaATR]
return FacturaATR.DatosGeneralesFacturaATR.DatosGeneralesFactura.IndicativoFacturaRectificadora in ['A', 'B']
def is_rectificadora(self):
Facturas = self.root.Facturas
if not hasattr(Facturas, 'FacturaATR'):
raise Exception('F1 format failed')
FacturaATR = Facturas.FacturaATR
if not hasattr(FacturaATR, '__iter__'):
FacturaATR = [FacturaATR]
return FacturaATR.DatosGeneralesFacturaATR.DatosGeneralesFactura.IndicativoFacturaRectificadora == 'R'
class LectPool(OpenObject):
def __init__(self,O):
super(LectPool, self).__init__(O)
class Comptador(OpenObject):
id = None
def __init__(self, O, id):
super(Comptador,self).__init__(O)
self.id = id
class Polissa(OpenObject):
id = None
def __init__(self, O, id):
super(Polissa,self).__init__(O)
self.id = id
fields_to_read = ['name', 'cups', 'tarifa', 'state', 'comptador', 'distribuidora', 'data_alta', 'data_baixa']
data = self.O.GiscedataPolissa.read(self.id, fields_to_read)[0]
self.name = data['name']
self.tarifa = data['tarifa'][1]
self.state = data['state']
self.comptador = Comptador(self.O, data['comptador'])
self.distribuidora = data['distribuidora']
self.data_alta = data['data_alta']
self.data_baixa = data['data_baixa']
def daily_consumption(self):
return self.O.GiscedataPolissa.consum_diari(self.id)
def monthly_consumption(self, period):
return self.daily_consumption()[period]*30
class LectBase(object):
id = None
data = None
tarifa = None
periode_id = None
periode = None
lectura = None
origen_comer = None
origen = None
tipus = None
observacions = None
obj = None
def __init__(self, obj, id):
self.obj = obj
self.id = id
fields_to_read = ['name', 'lectura', 'origen_comer_id', 'origen_id', 'periode', 'tipus', 'observacions']
lect_read = self.obj.read(self.id, fields_to_read)
lect_perm_read = self.obj.perm_read([self.id])[0]
(tarifa,periode) = lect_read['periode'][1].split(' ')
periode_id = lect_read['periode'][0]
periode = periode[1:3]
self.write_date = lect_perm_read['write_date']
self.date = lect_read['name']
self.tarifa = tarifa
self.periode_id = periode_id
self.periode = periode
self.lectura = lect_read['lectura']
self.origen_comer = lect_read['origen_comer_id'][1]
self.origen = lect_read['origen_id'][1]
self.tipus = lect_read['tipus']
self.observacions = lect_read['observacions']
def update_lectura(self, old, new, origen, update_observacions, observacions='', observacions_date='-'):
write_values = {'lectura': int(new), 'origen_id': int(origen)}
if update_observacions:
obs = self.observacions
txt = 'R. {observacions} {old} [{observacions_date}] (ABr)\n'.format(**locals())
if not obs:
obs = ''
obs = txt + obs
write_values.update({'observacions':obs})
self.obj.write([self.id], write_values)
def update_observacions(self, value=None):
if value:
obs = self.observacions
today = datetime.strftime(datetime.today(),'%Y-%m-%d')
txt = 'R. {value} [{today}] (ABr)\n'.format(**locals())
if not obs:
obs = ''
obs = txt + ' ' + obs
self.obj.write([self.id], {'observacions': obs})
class LectPool(LectBase):
def __init__(self, O, id):
super(LectPool, self).__init__(O.GiscedataLecturesLecturaPool, id)
class Lect(LectBase):
def __init__(self, O, id):
super(Lect, self).__init__(O.GiscedataLecturesLectura, id)
class Error(OpenObject):
raw = None
factura = None
comptador = None
data = None
periode = None
tipus = None
valor_xml = None
valor_db = None
lects_pool = {}
last_lects_pool = {}
last_lects_invoice = {}
def __init__(self, O, polissa_id, raw):
super(Error, self).__init__(O)
self.parse(raw)
# LectPool
fields_to_search = [('polissa', '=', polissa_id), ('name', '=', self.comptador)]
comptador_ids = O.GiscedataLecturesComptador.search(fields_to_search, 0, 0, False, {'active_test': False})
if len(comptador_ids) == 0:
raise Exception('Comptador missing')
comptador_id = comptador_ids[0]
fields_to_search = [('comptador', '=', comptador_id), ('name', '=', self.data)]
lect_pool_ids = O.GiscedataLecturesLecturaPool.search(fields_to_search)
if not len(lect_pool_ids) > 0:
raise Exception('Lectpool missing')
for lect_pool_id in lect_pool_ids:
lect_pool = LectPool(self.O, lect_pool_id)
self.lects_pool[lect_pool.periode] = lect_pool
fields_to_search = [('comptador', '=', comptador_id),
('origen_id', 'in',
[O_to_codigoOrigen['Telemedida'],
O_to_codigoOrigen['Telemedida corregida'],
O_to_codigoOrigen['TPL'],
O_to_codigoOrigen['TPL corregida'],
O_to_codigoOrigen['Visual'],
O_to_codigoOrigen['Visual corregida']])]
last_lects_pool_ids = O.GiscedataLecturesLecturaPool.search(fields_to_search)
if not len(last_lects_pool_ids) > 0:
raise Exception('Lectpool missing')
last_lects_pool_id = last_lects_pool_ids[0]
fields_to_read = ['name']
last_lects_pool_date = O.GiscedataLecturesLecturaPool.read(last_lects_pool_id, fields_to_read)['name']
fields_to_search = [('comptador', '=', comptador_id),
('name', '=', last_lects_pool_date)]
last_lects_pool_ids = O.GiscedataLecturesLecturaPool.search(fields_to_search)
if not len(last_lects_pool_ids) > 0:
raise Exception('Lectpool missing')
for last_lects_pool_id in last_lects_pool_ids:
last_lects_pool = LectPool(self.O, last_lects_pool_id)
self.last_lects_pool[last_lects_pool.periode] = last_lects_pool
fields_to_search = [('comptador', '=', comptador_id)]
last_lects_invoice_id = O.GiscedataLecturesLectura.search(fields_to_search)[0]
fields_to_read = ['name']
last_lects_invoice_date = O.GiscedataLecturesLectura.read(last_lects_invoice_id, fields_to_read)['name']
fields_to_search = [('comptador', '=', comptador_id),
('name', '=', last_lects_invoice_date)]
last_lects_invoice_ids = O.GiscedataLecturesLectura.search(fields_to_search)
if not len(last_lects_invoice_ids) > 0:
raise Exception('Lect invoice missing')
last_lects_invoice_id = last_lects_invoice_ids[0]
if not len(last_lects_invoice_ids) > 0:
raise Exception('Lect missing')
for last_lects_invoice_id in last_lects_invoice_ids:
last_lects_invoice = Lect(self.O, last_lects_invoice_id)
self.last_lects_invoice[last_lects_invoice.periode] = last_lects_invoice
@property
def FechaHora(self):
return self.data
@property
def Lectura(self):
return self.valor_db
def parse(self,raw):
self.raw = raw
try:
# Format descripció divergència (GISCEMaster/giscedata_lectures_switching/giscedata_lectures.py
# _msg = _(u"Divergència en el valor de lectura existent."
# u" Comptador: %s Data: %s. Període: %s. Tipus: %s"
# u" valor: XML: %s BBDD:%s") \
# % (c_obj.name,
# valor, lect_bw.lectura)
m = re.match(u'Factura (.+): Divergència en el valor de lectura existent. Comptador: (\w+).*Data: ([0-9\-]+).+Període: (\w+)\. Tipus: (\w+) valor: XML: (\d*[.]?\d*).+BBDD:(\d*[.]?\d*)',raw)
if not m:
raise ('Error not matching')
if not len(m.groups()) == 7:
raise ('Error not matching')
self.factura = m.groups()[0]
self.comptador = m.groups()[1]
self.data = m.groups()[2]
self.periode = m.groups()[3]
self.tipus = m.groups()[4]
self.valor_xml = float(m.groups()[5])
self.valor_db = float(m.groups()[6])
except Exception, e:
raise e
class F1ImportError(OpenObject):
id = None
def __init__(self, O, id):
super(F1ImportError, self).__init__(O)
self.id = id
fields_to_read = ['name', 'cups_id', 'info']
data = O.GiscedataFacturacioImportacioLinia.read(self.id, fields_to_read)
self.name = data['name']
self.cups_id = data['cups_id'][0]
perm_data = O.GiscedataFacturacioImportacioLinia.perm_read([self.id])[0]
self.write_date = perm_data['write_date']
self.create_date = perm_data['create_date']
polissa_id = self.O.GiscedataPolissa.search([('cups', '=', self.cups_id)], 0, 0, False, {'active_test': False})
if not polissa_id:
raise('No contract information available')
self.polissa = Polissa(self.O, polissa_id)
# error
self.error = Error(self.O, polissa_id, data['info'])
# F1
attach_id = self.O.IrAttachment.search([
('res_model', '=', 'giscedata.facturacio.importacio.linia'), ('res_id', '=', self.id)])[0]
if not attach_id:
raise ValueError('Resource id not found')
xml_ = O.IrAttachment.read(attach_id, ['name', 'datas'])
xml = base64.b64decode(xml_["datas"])
self.F1 = F1(xml)
self.request_date = dateutil.parser.parse(str(self.F1.root.Cabecera.FechaSolicitud))
def reload(self, update=False):
if update:
(filename_,extension_) = os.path.splitext(self.name)
self.name = filename_ + '_A' + extension_
filename = os.path.join('/tmp', self.name)
self.F1.dump(filename)
with open(filename, 'rb') as file_:
encoded_string = base64.b64encode(file_.read())
ctx = {'active_id': self.id,
'fitxer_xml': True}
wizard_id = self.O.GiscedataFacturacioSwitchingWizard.create({}, ctx)
wizard = self.O.GiscedataFacturacioSwitchingWizard.get(wizard_id)
vals = {
'origen':'nou',
'filename': self.name,
'file':encoded_string
}
wizard.write(vals)
wizard.action_importar_f1(ctx)
else:
ctx = {'active_id': self.id, 'fitxer_xml': True}
wizard_id = self.O.GiscedataFacturacioSwitchingWizard.create({}, ctx)
wizard = self.O.GiscedataFacturacioSwitchingWizard.get(wizard_id)
wizard.action_importar_f1(ctx)
def update_xml_attribute(self, attribute):
if not hasattr(self.error, attribute):
raise Exception('Attribute %s not supported' % attribute)
self.F1.update_xml_value(self.error.comptador,
self.error.data,
self.error.periode,
self.error.tipus,
attribute,
getattr(self.error, attribute))
def get_xml_attribute(self, attribute):
return self.F1.get_xml_value(self.error.comptador,
self.error.data,
self.error.periode,
self.error.tipus,
attribute)
def dump(self, fmt='txt'):
vars = []
vars.append(('Error_id', self.id))
vars.append(('Polissa', self.polissa.name))
vars.append(('Tarifa', self.polissa.tarifa))
vars.append(('Distribuidora', self.polissa.distribuidora))
vars.append(('Data', self.error.data))
vars.append(('Periode', self.error.periode))
vars.append(('Tipus', self.error.tipus))
if self.F1.is_abonadora():
vars.append(('IndicativoFactura', 'Abonadora'))
elif self.F1.is_rectificadora():
vars.append(('IndicativoFactura', 'Rectificadora'))
else:
vars.append(('IndicativoFactura', 'Normal'))
procedencia = str(self.get_xml_attribute('Procedencia'))
vars.append(('Valor_XML', '%0.2f (%s)' % (self.error.valor_xml, codigoOrigen_to_O[procedencia])))
vars.append(('Valor_DB', '%0.2f' % self.error.valor_db))
vars.append(('Data DB', self.error.lects_pool[self.error.periode].write_date))
fields_to_search = [('comptador.polissa', '=', self.polissa.id[0])]
lect_pool_ids = self.O.GiscedataLecturesLecturaPool.search(fields_to_search)
lect_ids = self.O.GiscedataLecturesLectura.search(fields_to_search)
fields_to_read = ['name', 'periode', 'lectura', 'origen_id', 'observacions']
lect_pools = self.O.GiscedataLecturesLecturaPool.read(lect_pool_ids, fields_to_read)
lects = self.O.GiscedataLecturesLectura.read(lect_ids, fields_to_read)
lect_n = max(len(lects), len(lect_pools))
from tabulate import tabulate
table = []
for lect_idx in range(lect_n):
row = []
if lect_idx < len(lects):
observacions_ = ''
if lects[lect_idx]['observacions']:
observacions = lects[lect_idx]['observacions'].split('\n')
for o in observacions:
if o.startswith(u'From') or \
o.startswith(u'Lectura') or \
o.startswith(u'Tenim') or \
o.startswith(u'Data') or \
o.startswith(u'Limitació') or \
o.startswith(u'Consum'):
continue
observacions_ += o
row += [lects[lect_idx]['name'],
lects[lect_idx]['periode'][1],
lects[lect_idx]['lectura'],
lects[lect_idx]['origen_id'][1],
observacions_]
else:
row += [None, None, None, None, None]
if lect_idx < len(lect_pools):
row += [lect_pools[lect_idx]['name'],
lect_pools[lect_idx]['periode'][1],
lect_pools[lect_idx]['lectura'],
lect_pools[lect_idx]['origen_id'][1],
lect_pools[lect_idx]['observacions']]
else:
row += [None, None, None, None, None]
table.append(row)
for var in vars:
(var_name, var_value) = var
txt = '{var_name}:{var_value}'.format(**locals())
txt = txt.rstrip()
print txt
print tabulate(table, tablefmt=fmt) | agpl-3.0 | -3,406,983,610,301,352,000 | 36.642415 | 201 | 0.525827 | false | 3.543573 | false | false | false |
Kayoku/iotari | work-area/wireless-sensor/sensor.py | 1 | 2978 | import datetime
import json
from pprint import pprint
import requests
class Sensor():
"""Abstract sensor class."""
def __init__(self, uuid):
"""Initialisation."""
# find a way to get a stable name
self.uuid = uuid
def save_measure(self):
"""How to save a new measure."""
raise NotImplementedError
class APISensor(Sensor):
"""Save a sensor value using a remote API."""
HTTP_STATUS_CREATED = 201
HTTP_STATUS_SUCCESS = 200
def __init__(self, uuid, baseurl):
"""Initialize."""
super().__init__(uuid)
self.baseurl = baseurl
self.uuid = uuid
self.get_id()
def get_id(self):
"""Get the database id for the sensor.
If the sensor doesn't exist, it creates it.
"""
filters = [dict(name='location', op='equals', val=self.uuid)]
params = dict(q=json.dumps(dict(filters=filters)))
r = requests.get(self.baseurl + '/api/sensor',
params=params,
headers={'content-type': 'application/json'})
if r.status_code == self.HTTP_STATUS_SUCCESS:
json_content = json.loads(r.text)
if json_content["num_results"] == 1:
self.id_ = json_content["objects"][0]["id"]
elif json_content["num_results"] == 0:
# add a new sensor in db with the UUID
r = requests.post(baseurl + '/api/sensor',
data=json.dumps({"location": self.uuid}),
headers={'content-type': 'application/json'})
if r.status_code == self.HTTP_STATUS_CREATED:
self.id_ = json.loads(r.text)["id"]
else:
raise Exception("impossible to add new sensor")
else:
raise Exception("mulltiple sensors with same id")
def save_measure(self, measure, time_stamp):
new_mesure = {'value': measure,
'sensor_id': self.id_,
'time_stamp': time_stamp}
try:
r = requests.post(self.baseurl + '/api/measure',
data=json.dumps(new_mesure),
headers={'content-type': 'application/json'})
except requests.exceptions.ConnectionError:
return False
return r.status_code == self.HTTP_STATUS_CREATED
if __name__ == "__main__":
baseurl = 'http://localhost:5000'
sensor = APISensor("salon", baseurl)
for _ in range(50):
sensor.save_measure(_, datetime.datetime.now().isoformat())
r = requests.get(baseurl + '/api/sensor',
headers={'content-type': 'application/json'})
print("Sensors: ")
pprint({"status": r.status_code, "headers": r.headers['content-type'], "content": json.loads(str(r.text))})
| mit | 6,726,347,953,359,607,000 | 33.879518 | 111 | 0.520484 | false | 4.309696 | false | false | false |
ContinuumIO/dask | dask/dataframe/accessor.py | 2 | 5362 | import numpy as np
import pandas as pd
from functools import partial
from ..utils import derived_from
def maybe_wrap_pandas(obj, x):
if isinstance(x, np.ndarray):
if isinstance(obj, pd.Series):
return pd.Series(x, index=obj.index, dtype=x.dtype)
return pd.Index(x)
return x
class Accessor(object):
"""
Base class for pandas Accessor objects cat, dt, and str.
Notes
-----
Subclasses should define ``_accessor_name``
"""
_not_implemented = set()
def __init__(self, series):
from .core import Series
if not isinstance(series, Series):
raise ValueError("Accessor cannot be initialized")
series_meta = series._meta
if hasattr(series_meta, "to_series"): # is index-like
series_meta = series_meta.to_series()
meta = getattr(series_meta, self._accessor_name)
self._meta = meta
self._series = series
@staticmethod
def _delegate_property(obj, accessor, attr):
out = getattr(getattr(obj, accessor, obj), attr)
return maybe_wrap_pandas(obj, out)
@staticmethod
def _delegate_method(obj, accessor, attr, args, kwargs):
out = getattr(getattr(obj, accessor, obj), attr)(*args, **kwargs)
return maybe_wrap_pandas(obj, out)
def _property_map(self, attr):
meta = self._delegate_property(self._series._meta, self._accessor_name, attr)
token = "%s-%s" % (self._accessor_name, attr)
return self._series.map_partitions(
self._delegate_property, self._accessor_name, attr, token=token, meta=meta
)
def _function_map(self, attr, *args, **kwargs):
if "meta" in kwargs:
meta = kwargs.pop("meta")
else:
meta = self._delegate_method(
self._series._meta_nonempty, self._accessor_name, attr, args, kwargs
)
token = "%s-%s" % (self._accessor_name, attr)
return self._series.map_partitions(
self._delegate_method,
self._accessor_name,
attr,
args,
kwargs,
meta=meta,
token=token,
)
@property
def _delegates(self):
return set(dir(self._meta)).difference(self._not_implemented)
def __dir__(self):
o = self._delegates
o.update(self.__dict__)
o.update(dir(type(self)))
return list(o)
def __getattr__(self, key):
if key in self._delegates:
if callable(getattr(self._meta, key)):
return partial(self._function_map, key)
else:
return self._property_map(key)
else:
raise AttributeError(key)
class DatetimeAccessor(Accessor):
""" Accessor object for datetimelike properties of the Series values.
Examples
--------
>>> s.dt.microsecond # doctest: +SKIP
"""
_accessor_name = "dt"
class StringAccessor(Accessor):
""" Accessor object for string properties of the Series values.
Examples
--------
>>> s.str.lower() # doctest: +SKIP
"""
_accessor_name = "str"
_not_implemented = {"get_dummies"}
@derived_from(pd.core.strings.StringMethods)
def split(self, pat=None, n=-1, expand=False):
if expand:
if n == -1:
raise NotImplementedError(
"To use the expand parameter you must specify the number of "
"expected splits with the n= parameter. Usually n splits result in n+1 output columns."
)
else:
delimiter = " " if pat is None else pat
meta = type(self._series._meta)([delimiter.join(["a"] * (n + 1))])
meta = meta.str.split(n=n, expand=expand, pat=pat)
else:
meta = (self._series.name, object)
return self._function_map("split", pat=pat, n=n, expand=expand, meta=meta)
@derived_from(pd.core.strings.StringMethods)
def cat(self, others=None, sep=None, na_rep=None):
from .core import Series, Index
if others is None:
raise NotImplementedError("x.str.cat() with `others == None`")
valid_types = (Series, Index, pd.Series, pd.Index)
if isinstance(others, valid_types):
others = [others]
elif not all(isinstance(a, valid_types) for a in others):
raise TypeError("others must be Series/Index")
return self._series.map_partitions(
str_cat, *others, sep=sep, na_rep=na_rep, meta=self._series._meta
)
@derived_from(pd.core.strings.StringMethods)
def extractall(self, pat, flags=0):
# TODO: metadata inference here won't be necessary for pandas >= 0.23.0
meta = self._series._meta.str.extractall(pat, flags=flags)
return self._series.map_partitions(
str_extractall, pat, flags, meta=meta, token="str-extractall"
)
def __getitem__(self, index):
return self._series.map_partitions(str_get, index, meta=self._series._meta)
def str_extractall(series, pat, flags):
return series.str.extractall(pat, flags=flags)
def str_get(series, index):
""" Implements series.str[index] """
return series.str[index]
def str_cat(self, *others, **kwargs):
return self.str.cat(others=others, **kwargs)
| bsd-3-clause | -6,140,759,249,159,344,000 | 29.64 | 107 | 0.586162 | false | 3.925329 | false | false | false |
fakdora/flaksy-upto-login | app/main/views.py | 1 | 3865 | from flask import render_template, redirect, url_for, abort, flash, request,\
current_app
from flask.ext.login import login_required, current_user
from . import main
from .forms import EditProfileForm, EditProfileAdminForm, PostForm
from .. import db
from ..models import Permission, Role, User, Post
from ..decorators import admin_required
@main.route('/', methods=['GET', 'POST'])
def index():
form = PostForm()
if current_user.can(Permission.WRITE_ARTICLES) and \
form.validate_on_submit():
post = Post(body=form.body.data,
author=current_user._get_current_object())
db.session.add(post)
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = Post.query.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('index.html', form=form, posts=posts,
pagination=pagination)
@main.route('/user/<username>')
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
pagination = user.posts.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('user.html', user=user, posts=posts,
pagination=pagination)
@main.route('/edit-profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.about_me = form.about_me.data
db.session.add(current_user)
flash('Your profile has been updated.')
return redirect(url_for('.user', username=current_user.username))
form.name.data = current_user.name
form.location.data = current_user.location
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', form=form)
@main.route('/edit-profile/<int:id>', methods=['GET', 'POST'])
@login_required
@admin_required
def edit_profile_admin(id):
user = User.query.get_or_404(id)
form = EditProfileAdminForm(user=user)
if form.validate_on_submit():
user.email = form.email.data
user.username = form.username.data
user.confirmed = form.confirmed.data
user.role = Role.query.get(form.role.data)
user.name = form.name.data
user.location = form.location.data
user.about_me = form.about_me.data
db.session.add(user)
flash('The profile has been updated.')
return redirect(url_for('.user', username=user.username))
form.email.data = user.email
form.username.data = user.username
form.confirmed.data = user.confirmed
form.role.data = user.role_id
form.name.data = user.name
form.location.data = user.location
form.about_me.data = user.about_me
return render_template('edit_profile.html', form=form, user=user)
@main.route('/post/<int:id>')
def post(id):
post = Post.query.get_or_404(id)
return render_template('post.html', posts=[post])
@main.route('/edit/<int:id>', methods=['GET', 'POST'])
@login_required
def edit(id):
post = Post.query.get_or_404(id)
if current_user != post.author and \
not current_user.can(Permission.ADMINISTER):
abort(403)
form = PostForm()
if form.validate_on_submit():
post.body = form.body.data
db.session.add(post)
flash('The post has been updated.')
return redirect(url_for('.post', id=post.id))
form.body.data = post.body
return render_template('edit_post.html', form=form)
| mit | -6,375,092,692,908,061,000 | 35.809524 | 77 | 0.656145 | false | 3.545872 | false | false | false |
quentinhardy/odat | ExternalTable.py | 1 | 7140 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from DirectoryManagement import DirectoryManagement
import logging, random, string
from Utils import checkOptionsGivenByTheUser
from Constants import *
class ExternalTable (DirectoryManagement):
'''
Allow the user to read file thanks to external tables
'''
def __init__(self,args):
'''
Constructor
'''
logging.debug("ExternalTable object created")
DirectoryManagement.__init__(self,args)
self.tableName = self.__generateRandomString__()
self.__setDirectoryName__()
self.ERROR_EXTERNAL_TABLE_WITH_WRITE = "ORA-30653: "
self.ERROR_EXTERNAL_TABLE_READ ="ORA-29400: "
self.ERROR_ODCIEXTTABLEOPEN="ORA-29913: "
def __createTableForReadFile__(self,remoteNameFile):
'''
Create table name with, for exemple:
CREATE TABLE rf1 (id NUMBER PRIMARY KEY, path VARCHAR(255) UNIQUE, ot_format VARCHAR(6));
'''
logging.info('Create the table: {0}'.format(self.tableName))
query = "CREATE TABLE {0} (line varchar2(256)) ORGANIZATION EXTERNAL (TYPE oracle_loader DEFAULT DIRECTORY {1} ACCESS PARAMETERS ( RECORDS DELIMITED BY NEWLINE BADFILE 'bad_data.bad' NOLOGFILE FIELDS TERMINATED BY ',' MISSING FIELD VALUES ARE NULL REJECT ROWS WITH ALL NULL FIELDS (line)) LOCATION ('{2}')) PARALLEL REJECT LIMIT 0 NOMONITORING".format(self.tableName, self.directoryName, remoteNameFile)
response = self.__execThisQuery__(query=query,isquery=False)
if isinstance(response,Exception) :
logging.info('Error with the SQL request {0}: {1}'.format(query,str(response)))
return response
else : return True
def __createTableForExec__(self,remoteNameFile):
'''
Create a table in order to execute a command
'''
logging.info('Create the table: {0}'.format(self.tableName))
query = """CREATE TABLE {0} ( line NUMBER , text VARCHAR2(4000)) ORGANIZATION EXTERNAL ( TYPE ORACLE_LOADER DEFAULT DIRECTORY {1} ACCESS PARAMETERS ( RECORDS DELIMITED BY NEWLINE NOLOGFILE PREPROCESSOR {1}: '{2}' FIELDS TERMINATED BY WHITESPACE ( line RECNUM , text POSITION(1:4000)) ) LOCATION ('{2}') ) REJECT LIMIT UNLIMITED""".format(self.tableName, self.directoryName, remoteNameFile)
response = self.__execThisQuery__(query=query,isquery=False)
if isinstance(response,Exception) :
logging.info('Error with the SQL request {0}: {1}'.format(query,str(response)))
return response
else : return True
def __dropTable__(self):
'''
Drop the table with, for exemple
DROP TABLE my_table PURGE;
'''
logging.info('Drop the table: {0}'.format(self.tableName))
query = "DROP TABLE {0} PURGE".format(self.tableName)
response = self.__execThisQuery__(query=query,isquery=False)
if isinstance(response,Exception) :
logging.info('Error with the SQL request {0}: {1}'.format(query,str(response)))
return response
else : return True
def getFile (self,remotePath, remoteNameFile, localFile):
'''
Create the localFile file containing data stored on the remoteNameFile (stored in the remotePath)
'''
data = ""
logging.info("Copy the {0} remote file (stored in {1}) to {2}".format(remoteNameFile,remotePath,localFile))
status = self.__createOrRemplaceDirectory__(remotePath)
if isinstance(status,Exception): return status
status = self.__createTableForReadFile__(remoteNameFile)
if isinstance(status,Exception): return status
request = "select line from {0}".format(self.tableName)
response = self.__execThisQuery__(query=request,ld=['line'])
if isinstance(response,Exception):
logging.info('Error with the SQL request {0}: {1}'.format(request,response))
status = self.__dropDirectory__()
status = self.__dropTable__()
return response
else :
for l in response:
data += l['line']+'\n'
status = self.__dropDirectory__()
status = self.__dropTable__()
return data
def execute (self, remotePath, remoteNameFile):
'''
Execute a command
'''
logging.info("Execute the {0} command stored stored in {1}".format(remoteNameFile,remotePath))
status = self.__createOrRemplaceDirectory__(remotePath)
if isinstance(status,Exception): return status
status = self.__createTableForExec__(remoteNameFile)
if isinstance(status,Exception): return status
request = "select line from {0}".format(self.tableName)
response = self.__execThisQuery__(query=request, ld=['line'])
if isinstance(response,Exception):
logging.info('Error with the SQL request {0}: {1}'.format(request,response))
status = self.__dropDirectory__()
status = self.__dropTable__()
return response
else :
logging.info("{0} command executed without errors".format(remoteNameFile))
status = self.__dropDirectory__()
status = self.__dropTable__()
return response
def testAll(self):
'''
Test all functions
'''
folder = self.__generateRandomString__()
self.args['print'].subtitle("External table to read files ?")
logging.info("Simulate the file reading in the {0} folder thanks to an external table".format(folder))
status = self.getFile(remotePath=folder, remoteNameFile='data.txt', localFile="test.txt")
if (status == True or self.ERROR_EXTERNAL_TABLE_WITH_WRITE in str(status) or self.ERROR_EXTERNAL_TABLE_READ in str(status)):
self.args['print'].goodNews("OK")
else :
self.args['print'].badNews("KO")
self.args['print'].subtitle("External table to execute system commands ?")
logging.info("Simulate the file execution thanks to an external table")
status = self.execute (remotePath=folder, remoteNameFile='test')
if (status == True or self.ERROR_EXTERNAL_TABLE_WITH_WRITE in str(status) or self.ERROR_EXTERNAL_TABLE_READ in str(status)):
self.args['print'].goodNews("OK")
else :
self.args['print'].badNews("KO")
def runExternalTableModule (args):
'''
Run the External Table module
'''
status = True
if checkOptionsGivenByTheUser(args,["test-module","getFile","exec"]) == False : return EXIT_MISS_ARGUMENT
externalTable = ExternalTable(args)
status = externalTable.connection(stopIfError=True)
if args['test-module'] == True :
args['print'].title("Test if the External Table module can be used")
status = externalTable.testAll()
#Option 1: getFile
if args['getFile'] != None:
args['print'].title("Read the {0} file stored in the {1} path".format(args['getFile'][1],args['getFile'][0]))
data = externalTable.getFile (remotePath=args['getFile'][0], remoteNameFile=args['getFile'][1], localFile=args['getFile'][2])
if isinstance(data,Exception):
args['print'].badNews("There is an error: {0}".format(data))
else:
args['print'].goodNews("Data stored in the remote file {0} stored in {1}".format(args['getFile'][1],args['getFile'][0]))
print(data)
#Option 2: exec a script or command
if args['exec'] != None:
args['print'].title("Execute the {0} command stored in the {1} path".format(args['exec'][1],args['exec'][0]))
data = externalTable.execute (remotePath=args['exec'][0], remoteNameFile=args['exec'][1])
if isinstance(data,Exception):
args['print'].badNews("There is an error: {0}".format(data))
else:
args['print'].goodNews("The {0} command stored in {1} has been executed (normally)".format(args['exec'][1],args['exec'][0]))
| lgpl-3.0 | 8,598,786,521,398,740,000 | 43.886792 | 405 | 0.711223 | false | 3.361752 | true | false | false |
werehuman/cocaine-tools | cocaine/tools/actions/crashlog.py | 1 | 7060 | #
# Copyright (c) 2013+ Anton Tyurin <[email protected]>
# Copyright (c) 2013+ Evgeny Safronov <[email protected]>
# Copyright (c) 2011-2014 Other contributors as noted in the AUTHORS file.
#
# This file is part of Cocaine-tools.
#
# Cocaine is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Cocaine is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import datetime
import itertools
import time
from tornado import gen
from cocaine.tools import actions, log
from cocaine.decorators import coroutine
from cocaine.tools.actions import app
__author__ = 'Evgeny Safronov <[email protected]>'
def parse_crashlog_day_format(day_string):
index_format = 'cocaine-%Y-%m-%d'
if not day_string:
return day_string
if 'today'.startswith(day_string):
return datetime.date.today().strftime(index_format)
elif 'yesterday'.startswith(day_string):
yesterday = datetime.date.today() - datetime.timedelta(days=1)
return yesterday.strftime(index_format)
else:
values_count = day_string.count("-")
if values_count == 0: # only day specified
today = datetime.date.today()
day = datetime.datetime.strptime(day_string, "%d").replace(year=today.year,
month=today.month)
return day.strftime(index_format)
elif values_count == 1: # day and month
day = datetime.datetime.strptime(day_string,
"%d-%m").replace(year=datetime.date.today().year)
return day.strftime(index_format)
elif values_count == 2: # the whole date
return datetime.datetime.strptime(day_string, "%d-%m-%Y").strftime(index_format)
raise ValueError("Invalid day format %s. Must be day-month-year|today|yesterday" % day_string)
class List(actions.Storage):
def __init__(self, storage, name, day_string=''):
super(List, self).__init__(storage)
self.name = name
if not self.name:
raise ValueError('Please specify a crashlog name')
self.day = parse_crashlog_day_format(day_string)
@coroutine
def execute(self):
indexes = [self.name]
if self.day:
indexes.append(self.day)
channel = yield self.storage.find('crashlogs', indexes)
listing = yield channel.rx.get()
raise gen.Return(listing)
def _parseCrashlogs(crashlogs, timestamp=None):
def is_filter(arg):
return arg == timestamp if timestamp else True
_list = (log.split(':', 1) for log in crashlogs)
return [(ts, time.ctime(float(ts) / 1000000), name) for ts, name in _list if is_filter(ts)]
class Specific(actions.Storage):
def __init__(self, storage, name, timestamp=None):
super(Specific, self).__init__(storage)
self.name = name
self.timestamp = timestamp
if not self.name:
raise ValueError('Please specify application name')
class View(Specific):
@coroutine
def execute(self):
channel = yield self.storage.find('crashlogs', [self.name])
crashlogs = yield channel.rx.get()
parsed_crashlogs = _parseCrashlogs(crashlogs, timestamp=self.timestamp)
contents = []
for crashlog in parsed_crashlogs:
key = '%s:%s' % (crashlog[0], crashlog[2])
channel = yield self.storage.read('crashlogs', key)
content = yield channel.rx.get()
contents.append(content)
raise gen.Return(''.join(contents))
class Remove(Specific):
@coroutine
def execute(self):
channel = yield self.storage.find('crashlogs', [self.name])
crashlogs = yield channel.rx.get()
parsed_crashlogs = _parseCrashlogs(crashlogs, timestamp=self.timestamp)
for crashlog in parsed_crashlogs:
try:
key = '%s:%s' % (crashlog[0], crashlog[2])
channel = yield self.storage.remove('crashlogs', key)
yield channel.rx.get()
except Exception as err:
log.error("unable to delete crashlog %s: %s", str(crashlog), err)
raise gen.Return('Done')
class RemoveAll(Remove):
def __init__(self, storage, name):
super(RemoveAll, self).__init__(storage, name, timestamp=None)
class Status(actions.Storage):
@coroutine
def execute(self):
applications = yield app.List(self.storage).execute()
crashed = []
for application in applications:
crashlogs = yield List(self.storage, application).execute()
if crashlogs:
last = max(_parseCrashlogs(crashlogs), key=lambda (timestamp, time, uuid): timestamp)
crashed.append((application, last, len(crashlogs)))
raise gen.Return(crashed)
def splitted(collection, sep=None, maxsplit=None):
for item in collection:
yield item.split(sep, maxsplit)
def filtered(crashlogs):
for (ts, uuid) in splitted(crashlogs, ':', 1):
yield int(ts), uuid
class Clean(Specific):
def __init__(self, storage, name, size, timestamp=None):
super(Clean, self).__init__(storage, name, timestamp)
self.size = int(size)
@coroutine
def execute(self):
if not self.name:
apps = yield app.List(self.storage).execute()
else:
apps = [self.name]
result = []
if self.timestamp:
try:
dt = datetime.datetime.strptime(self.timestamp, '%Y-%m-%dT%H:%M:%S')
timestamp = int(time.mktime(dt.timetuple())) * 1000000 + dt.microsecond
except ValueError:
timestamp = int(self.timestamp)
for app_name in apps:
channel = yield self.storage.find('crashlogs', [app_name])
crashlogs = yield channel.rx.get()
result = filter(lambda (ts, uuid): ts < timestamp, filtered(crashlogs))
elif self.size > 0:
for app_name in apps:
channel = yield self.storage.find('crashlogs', [app_name])
crashlogs = yield channel.rx.get()
result = itertools.islice(
sorted(filtered(crashlogs[0]), key=lambda (ts, uuid): ts, reverse=True), self.size, None)
for crashlog in result:
print('removing', '%d:%s' % crashlog)
channel = yield self.storage.remove('crashlogs', '%d:%s' % crashlog)
yield channel.rx.get()
raise gen.Return('Done')
| lgpl-3.0 | -3,078,409,603,253,153,300 | 36.157895 | 109 | 0.616997 | false | 3.950755 | false | false | false |
yeti-platform/yeti | core/web/api/export.py | 1 | 3616 | from __future__ import unicode_literals
import os
from flask import send_from_directory, make_response
from flask_classy import route
from mongoengine.errors import DoesNotExist
from core.web.api.crud import CrudApi
from core import exports
from core.web.api.api import render
from core.helpers import string_to_timedelta
from core.observables import Tag
from core.web.helpers import requires_permissions
class ExportTemplate(CrudApi):
template = "export_template_api.html"
objectmanager = exports.ExportTemplate
class Export(CrudApi):
template = "export_api.html"
template_single = "export_api_single.html"
objectmanager = exports.Export
@route("/<string:id>/content")
@requires_permissions("read")
def content(self, id):
"""Return export content
Returns a given export's content.
:query ObjectID id: Export ID
:resheader X-Yeti-Export-MD5: The MD5 hash of the exported content. Use it to check the export's integrity
"""
try:
e = self.objectmanager.objects.get(id=id)
except DoesNotExist:
return render({"error": "No Export found for id {}".format(id)}), 404
if e.output_dir.startswith("/"):
d = e.output_dir
else:
d = os.path.join(
os.path.dirname(
os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
)
),
e.output_dir,
)
response = make_response(
send_from_directory(
d, e.name, as_attachment=True, attachment_filename=e.name
)
)
response.headers["X-Yeti-Export-MD5"] = e.hash_md5
return response
@route("/<string:id>/refresh", methods=["POST"])
@requires_permissions("refresh")
def refresh(self, id):
"""Refresh an export
Manually executes an export if it is not already exporting.
:query ObjectID id: Export ID
:>json ObjectID id: The export's ObjectID
"""
exports.execute_export.delay(id)
return render({"id": id})
@route("/<string:id>/toggle", methods=["POST"])
@requires_permissions("toggle")
def toggle(self, id):
"""Toggle an export
Toggles an export. A deactivated export will not execute when called (manually or scheduled)
:query ObjectID id: Export ID
:>json ObjectID id: The export's ObjectID
:>json boolean status: The result of the toggle operation (``true`` means the export has been enabled, ``false`` means it has been disabled)
"""
e = self.objectmanager.objects.get(id=id)
e.enabled = not e.enabled
e.save()
return render({"id": id, "status": e.enabled})
def _parse_request(self, json):
params = json
params["frequency"] = string_to_timedelta(params.get("frequency", "1:00:00"))
params["ignore_tags"] = [
Tag.objects.get(name=name.strip())
for name in params["ignore_tags"].split(",")
if name.strip()
]
params["include_tags"] = [
Tag.objects.get(name=name.strip())
for name in params["include_tags"].split(",")
if name.strip()
]
params["exclude_tags"] = [
Tag.objects.get(name=name.strip())
for name in params["exclude_tags"].split(",")
if name.strip()
]
params["template"] = exports.ExportTemplate.objects.get(name=params["template"])
return params
| apache-2.0 | 2,652,748,986,496,160,000 | 32.174312 | 148 | 0.595409 | false | 4.067492 | false | false | false |
Alexoner/mooc | coursera/nlpintro-001/Assignment2/solutionsA.py | 1 | 9462 | import math
import nltk
import time
import sys
# Constants to be used by you when you fill the functions
START_SYMBOL = '*'
STOP_SYMBOL = 'STOP'
MINUS_INFINITY_SENTENCE_LOG_PROB = -1000
log2 = lambda x: math.log(x, 2)
# TODO: IMPLEMENT THIS FUNCTION
# Calculates unigram, bigram, and trigram probabilities given a training corpus
# training_corpus: is a list of the sentences. Each sentence is a string with tokens separated by spaces, ending in a newline character.
# This function outputs three python dictionaries, where the keys are
# tuples expressing the ngram and the value is the log probability of that
# ngram
def calc_probabilities(training_corpus):
"""
this is docstring
"""
# unigram_tuples = []
# bigram_tuples = []
# trigram_tuples = []
unigram_count = {}
bigram_count = {}
trigram_count = {}
unigram_count_pnodes = {}
bigram_count_pnodes = {}
trigram_count_pnodes = {}
unigram_total = 0
bigram_total = 0
trigram_total = 0
print 'total {} sentences'.format(len(training_corpus))
for i in xrange(0, len(training_corpus)):
if i % 3000 == 0:
print 'processing ', i, 'th sentence...'
training_corpus[i] = START_SYMBOL + ' ' + training_corpus[i]
training_corpus[i] = training_corpus[i] + ' ' + STOP_SYMBOL
# training_corpus[i].replace('.',' ' + STOP_SYMBOL)
tokens = training_corpus[i].split()
unigram_tuples_i = list((token,) for token in tokens)
bigram_tuples_i = list(nltk.bigrams(tokens))
trigram_tuples_i = list(nltk.trigrams(tokens))
unigram_total += len(unigram_tuples_i)
bigram_total += len(bigram_tuples_i)
trigram_total += len(trigram_tuples_i)
for item in unigram_tuples_i:
if item in [(START_SYMBOL,)]:
continue
unigram_count.setdefault(item, 0)
unigram_count_pnodes.setdefault(item[0:-1], 0)
unigram_count[item] = unigram_count[item] + 1
unigram_count_pnodes[
item[0:-1]] = unigram_count_pnodes[item[0:-1]] + 1
for item in bigram_tuples_i:
bigram_count.setdefault(item, 0)
bigram_count_pnodes.setdefault(item[0:-1], 0)
bigram_count[item] = bigram_count[item] + 1
bigram_count_pnodes[
item[0:-1]] = bigram_count_pnodes[item[0:-1]] + 1
for item in trigram_tuples_i:
trigram_count.setdefault(item, 0)
trigram_count_pnodes.setdefault(item[0:-1], 0)
trigram_count[item] = trigram_count[item] + 1
trigram_count_pnodes[
item[0:-1]] = trigram_count_pnodes[item[0:-1]] + 1
unigram_p = {
item: math.log(
unigram_count[item],
2) -
math.log(
unigram_count_pnodes[
item[
0:-
1]],
2) for item in set(unigram_count)}
bigram_p = {
item: math.log(
bigram_count[item],
2) -
math.log(
bigram_count_pnodes[
item[
0:-
1]],
2) for item in set(bigram_count)}
trigram_p = {
item: math.log(
trigram_count[item],
2) -
math.log(
trigram_count_pnodes[
item[
0:-
1]],
2) for item in set(trigram_count)}
print "calc_probabilities finished!"
return unigram_p, bigram_p, trigram_p
# Prints the output for q1
# Each input is a python dictionary where keys are a tuple expressing the
# ngram, and the value is the log probability of that ngram
def q1_output(unigrams, bigrams, trigrams, filename):
# output probabilities
outfile = open(filename, 'w')
unigrams_keys = sorted(unigrams.keys())
for unigram in unigrams_keys:
outfile.write('UNIGRAM ' +
unigram[0] +
' ' +
str(unigrams[unigram]) +
'\n')
outfile.flush()
bigrams_keys = sorted(bigrams.keys())
for bigram in bigrams_keys:
outfile.write('BIGRAM ' +
bigram[0] +
' ' +
bigram[1] +
' ' +
str(bigrams[bigram]) +
'\n')
outfile.flush()
trigrams_keys = sorted(trigrams.keys())
for trigram in trigrams_keys:
outfile.write('TRIGRAM ' +
trigram[0] +
' ' +
trigram[1] +
' ' +
trigram[2] +
' ' +
str(trigrams[trigram]) +
'\n')
outfile.flush()
outfile.close()
# TODO: IMPLEMENT THIS FUNCTION
# Calculates scores (log probabilities) for every sentence
# ngram_p: python dictionary of probabilities of uni-, bi- and trigrams.
# n: size of the ngram you want to use to compute probabilities
# corpus: list of sentences to score. Each sentence is a string with tokens separated by spaces, ending in a newline character.
# This function must return a python list of scores, where the first
# element is the score of the first sentence, etc.
def score(ngram_p, n, corpus):
print "scoring corpus for ", n, "-grams"
scores = []
for i, sentence in enumerate(corpus):
ngram_tuples = None
score_i = 0
if i % 10000 == 0:
print 'scoring ', i, 'th sentence...'
tokens = sentence.split()
if n == 1:
ngram_tuples = list([(token,) for token in tokens])
elif n == 2:
ngram_tuples = list(nltk.bigrams(tokens))
elif n == 3:
ngram_tuples = list(nltk.trigrams(tokens))
try:
score_i = sum([ngram_p[gram] for gram in ngram_tuples
if gram not in [(START_SYMBOL,)]])
except KeyError as error:
score_i = MINUS_INFINITY_SENTENCE_LOG_PROB
print 'ngram_tuple ', gram, ' not in dict ', error.message
scores.append(score_i)
return scores
# Outputs a score to a file
# scores: list of scores
# filename: is the output file name
def score_output(scores, filename):
outfile = open(filename, 'w')
for score in scores:
outfile.write(str(score) + '\n')
outfile.close()
# TODO: IMPLEMENT THIS FUNCTION
# Calculates scores (log probabilities) for every sentence with a linearly interpolated model
# Each ngram argument is a python dictionary where the keys are tuples that express an ngram and the value is the log probability of that ngram
# Like score(), this function returns a python list of scores
# TODO: `EM` algorithm to find the optimal weights.
def linearscore(unigrams, bigrams, trigrams, corpus):
scores = []
weights = (1. / 3, 1. / 3, 1. / 3,)
for i, sentence in enumerate(corpus):
if i % 3000 == 0:
print 'linearscore ', i, 'th sentence...'
score_i = 0
tokens = sentence.split()
trigram_tuples = list(nltk.trigrams(tokens))
try:
for trigram in trigram_tuples:
score_i += log2(sum([weights[0] * 2 ** trigrams[trigram[0:]],
weights[1] * 2 ** bigrams[trigram[1:]],
weights[2] * 2 ** unigrams[trigram[2:]],
]))
except KeyError as e:
score_i = MINUS_INFINITY_SENTENCE_LOG_PROB
print i, 'th sentence', 'ngram ', trigram, ' not in dict', e.message
scores.append(score_i)
return scores
DATA_PATH = 'data/'
OUTPUT_PATH = 'output/'
# DO NOT MODIFY THE MAIN FUNCTION
def main():
# start timer
time.clock()
# get data
infile = open(DATA_PATH + 'Brown_train.txt', 'r')
corpus = infile.readlines()
infile.close()
# calculate ngram probabilities (question 1)
unigrams, bigrams, trigrams = calc_probabilities(corpus)
# question 1 output
q1_output(unigrams, bigrams, trigrams, OUTPUT_PATH + 'A1.txt')
# score sentences (question 2)
uniscores = score(unigrams, 1, corpus)
biscores = score(bigrams, 2, corpus)
triscores = score(trigrams, 3, corpus)
# question 2 output
score_output(uniscores, OUTPUT_PATH + 'A2.uni.txt')
score_output(biscores, OUTPUT_PATH + 'A2.bi.txt')
score_output(triscores, OUTPUT_PATH + 'A2.tri.txt')
# linear interpolation (question 3)
linearscores = linearscore(unigrams, bigrams, trigrams, corpus)
# question 3 output
score_output(linearscores, OUTPUT_PATH + 'A3.txt')
# open Sample1 and Sample2 (question 5)
infile = open(DATA_PATH + 'Sample1.txt', 'r')
sample1 = infile.readlines()
infile.close()
infile = open(DATA_PATH + 'Sample2.txt', 'r')
sample2 = infile.readlines()
infile.close()
# score the samples
sample1scores = linearscore(unigrams, bigrams, trigrams, sample1)
sample2scores = linearscore(unigrams, bigrams, trigrams, sample2)
# question 5 output
score_output(sample1scores, OUTPUT_PATH + 'Sample1_scored.txt')
score_output(sample2scores, OUTPUT_PATH + 'Sample2_scored.txt')
# print total time to run Part A
print("Part A time: " + str(time.clock()) + ' sec')
if __name__ == "__main__":
main()
| apache-2.0 | 5,338,700,109,794,599,000 | 32.792857 | 143 | 0.570915 | false | 3.636434 | false | false | false |
soumyanishan/azure-linux-extensions | VMAccess/vmaccess.py | 1 | 18922 | #!/usr/bin/env python
#
# VMAccess extension
#
# Copyright 2014 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import platform
import re
import shutil
import sys
import tempfile
import time
import traceback
import Utils.HandlerUtil as Util
from waagentloader import load_waagent
waagent = load_waagent()
# Define global variables
ExtensionShortName = 'VMAccess'
BeginCertificateTag = '-----BEGIN CERTIFICATE-----'
EndCertificateTag = '-----END CERTIFICATE-----'
OutputSplitter = ';'
SshdConfigPath = '/etc/ssh/sshd_config'
def main():
waagent.LoggerInit('/var/log/waagent.log', '/dev/stdout')
waagent.Log("%s started to handle." % (ExtensionShortName))
waagent.MyDistro = waagent.GetMyDistro()
try:
for a in sys.argv[1:]:
if re.match("^([-/]*)(disable)", a):
disable()
elif re.match("^([-/]*)(uninstall)", a):
uninstall()
elif re.match("^([-/]*)(install)", a):
install()
elif re.match("^([-/]*)(enable)", a):
enable()
elif re.match("^([-/]*)(update)", a):
update()
except Exception as e:
err_msg = "Failed with error: {0}, {1}".format(e, traceback.format_exc())
waagent.Error(err_msg)
def install():
hutil = Util.HandlerUtility(waagent.Log, waagent.Error)
hutil.do_parse_context('Uninstall')
hutil.do_exit(0, 'Install', 'success', '0', 'Install Succeeded')
def enable():
hutil = Util.HandlerUtility(waagent.Log, waagent.Error)
hutil.do_parse_context('Enable')
try:
_forcibly_reset_chap(hutil)
reset_ssh = None
remove_user = None
protect_settings = hutil.get_protected_settings()
if protect_settings:
reset_ssh = protect_settings.get('reset_ssh')
remove_user = protect_settings.get('remove_user')
if remove_user and _is_sshd_config_modified(protect_settings):
hutil.error("Cannot reset sshd_config and remove a user in one operation.")
waagent.AddExtensionEvent(name=hutil.get_name(),
op=waagent.WALAEventOperation.Enable,
isSuccess=False,
message="(03002)Argument error, conflicting operations")
hutil.do_exit(1, 'Enable', 'error', '0', 'Enable failed.')
# check port each time the VM boots up
if reset_ssh:
_open_ssh_port()
hutil.log("Succeeded in check and open ssh port.")
hutil.exit_if_enabled()
if _is_sshd_config_modified(protect_settings):
_backup_sshd_config(SshdConfigPath)
if reset_ssh:
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="reset-ssh")
_reset_sshd_config(SshdConfigPath)
hutil.log("Succeeded in reset sshd_config.")
if remove_user:
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="remove-user")
_remove_user_account(remove_user, hutil)
_set_user_account_pub_key(protect_settings, hutil)
if _is_sshd_config_modified(protect_settings):
waagent.MyDistro.restartSshService()
check_and_repair_disk(hutil)
hutil.do_exit(0, 'Enable', 'success', '0', 'Enable succeeded.')
except Exception as e:
hutil.error(("Failed to enable the extension with error: {0}, "
"stack trace: {1}").format(str(e), traceback.format_exc()))
hutil.do_exit(1, 'Enable', 'error', '0', 'Enable failed.')
def _forcibly_reset_chap(hutil):
name = "ChallengeResponseAuthentication"
config = waagent.GetFileContents(SshdConfigPath).split("\n")
for i in range(0, len(config)):
if config[i].startswith(name) and "no" in config[i].lower():
waagent.AddExtensionEvent(name=hutil.get_name(), op="sshd", isSuccess=True, message="ChallengeResponseAuthentication no")
return
waagent.AddExtensionEvent(name=hutil.get_name(), op="sshd", isSuccess=True, message="ChallengeResponseAuthentication yes")
_backup_sshd_config(SshdConfigPath)
_set_sshd_config(config, name, "no")
waagent.ReplaceFileContentsAtomic(SshdConfigPath, "\n".join(config))
waagent.MyDistro.restartSshService()
def _is_sshd_config_modified(protected_settings):
result = protected_settings.get('reset_ssh') or protected_settings.get('password')
return result is not None
def uninstall():
hutil = Util.HandlerUtility(waagent.Log, waagent.Error)
hutil.do_parse_context('Uninstall')
hutil.do_exit(0, 'Uninstall', 'success', '0', 'Uninstall succeeded')
def disable():
hutil = Util.HandlerUtility(waagent.Log, waagent.Error)
hutil.do_parse_context('Disable')
hutil.do_exit(0, 'Disable', 'success', '0', 'Disable Succeeded')
def update():
hutil = Util.HandlerUtility(waagent.Log, waagent.Error)
hutil.do_parse_context('Update')
hutil.do_exit(0, 'Update', 'success', '0', 'Update Succeeded')
def _remove_user_account(user_name, hutil):
hutil.log("Removing user account")
try:
sudoers = _get_other_sudoers(user_name)
waagent.MyDistro.DeleteAccount(user_name)
_save_other_sudoers(sudoers)
except Exception as e:
waagent.AddExtensionEvent(name=hutil.get_name(),
op=waagent.WALAEventOperation.Enable,
isSuccess=False,
message="(02102)Failed to remove user.")
raise Exception("Failed to remove user {0}".format(e))
waagent.AddExtensionEvent(name=hutil.get_name(),
op=waagent.WALAEventOperation.Enable,
isSuccess=True,
message="Successfully removed user")
def _set_user_account_pub_key(protect_settings, hutil):
ovf_xml = waagent.GetFileContents('/var/lib/waagent/ovf-env.xml')
ovf_env = waagent.OvfEnv().Parse(ovf_xml)
# user name must be provided if set ssh key or password
if not protect_settings or not protect_settings.has_key('username'):
return
user_name = protect_settings['username']
user_pass = protect_settings.get('password')
cert_txt = protect_settings.get('ssh_key')
expiration = protect_settings.get('expiration')
no_convert = False
if not user_pass and not cert_txt and not ovf_env.SshPublicKeys:
raise Exception("No password or ssh_key is specified.")
if user_pass is not None and len(user_pass) == 0:
user_pass = None
hutil.log("empty passwords are not allowed, ignoring password reset")
# Reset user account and password, password could be empty
sudoers = _get_other_sudoers(user_name)
error_string = waagent.MyDistro.CreateAccount(
user_name, user_pass, expiration, None)
_save_other_sudoers(sudoers)
if error_string is not None:
err_msg = "Failed to create the account or set the password"
waagent.AddExtensionEvent(name=hutil.get_name(),
op=waagent.WALAEventOperation.Enable,
isSuccess=False,
message="(02101)" + err_msg)
raise Exception(err_msg + " with " + error_string)
hutil.log("Succeeded in create the account or set the password.")
# Allow password authentication if user_pass is provided
if user_pass is not None:
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="create-user-with-password")
_allow_password_auth()
# Reset ssh key with the new public key passed in or reuse old public key.
if cert_txt or len(ovf_env.SshPublicKeys) > 0:
if cert_txt and cert_txt.strip().lower().startswith("ssh-rsa"):
no_convert = True
try:
pub_path = os.path.join('/home/', user_name, '.ssh',
'authorized_keys')
ovf_env.UserName = user_name
if no_convert:
if cert_txt:
pub_path = ovf_env.PrepareDir(pub_path)
final_cert_txt = cert_txt
if(not cert_txt.endswith("\n")):
final_cert_txt = final_cert_txt+"\n"
waagent.AppendFileContents(pub_path, final_cert_txt)
waagent.MyDistro.setSelinuxContext(pub_path,
'unconfined_u:object_r:ssh_home_t:s0')
waagent.ChangeOwner(pub_path, user_name)
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="create-user")
hutil.log("Succeeded in resetting ssh_key.")
else:
err_msg = "Failed to reset ssh key because the cert content is empty."
waagent.AddExtensionEvent(name=hutil.get_name(),
op=waagent.WALAEventOperation.Enable,
isSuccess=False,
message="(02100)"+err_msg)
else:
if cert_txt:
_save_cert_str_as_file(cert_txt, 'temp.crt')
else:
for pkey in ovf_env.SshPublicKeys:
if pkey[1]:
shutil.copy(
os.path.join(waagent.LibDir, pkey[0] + '.crt'),
os.path.join(os.getcwd(), 'temp.crt'))
break
pub_path = ovf_env.PrepareDir(pub_path)
retcode = waagent.Run(waagent.Openssl + " x509 -in temp.crt -noout -pubkey > temp.pub")
if retcode > 0:
raise Exception("Failed to generate public key file.")
waagent.MyDistro.sshDeployPublicKey('temp.pub', pub_path)
waagent.MyDistro.setSelinuxContext(pub_path,
'unconfined_u:object_r:ssh_home_t:s0')
waagent.ChangeOwner(pub_path, user_name)
os.remove('temp.pub')
os.remove('temp.crt')
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="create-user")
hutil.log("Succeeded in resetting ssh_key.")
except Exception as e:
hutil.log(str(e))
waagent.AddExtensionEvent(name=hutil.get_name(),
op=waagent.WALAEventOperation.Enable,
isSuccess=False,
message="(02100)Failed to reset ssh key.")
def _get_other_sudoers(userName):
sudoersFile = '/etc/sudoers.d/waagent'
if not os.path.isfile(sudoersFile):
return None
sudoers = waagent.GetFileContents(sudoersFile).split("\n")
pattern = '^{0}\s'.format(userName)
sudoers = filter(lambda x : re.match(pattern, x) is None, sudoers)
return sudoers
def _save_other_sudoers(sudoers):
sudoersFile = '/etc/sudoers.d/waagent'
if sudoers is None:
return
waagent.AppendFileContents(sudoersFile, "\n".join(sudoers))
os.chmod("/etc/sudoers.d/waagent", 0o440)
def _allow_password_auth():
config = waagent.GetFileContents(SshdConfigPath).split("\n")
_set_sshd_config(config, "PasswordAuthentication", "yes")
waagent.ReplaceFileContentsAtomic(SshdConfigPath, "\n".join(config))
def _set_sshd_config(config, name, val):
notfound = True
for i in range(0, len(config)):
if config[i].startswith(name):
config[i] = "{0} {1}".format(name, val)
notfound = False
elif config[i].startswith("Match"):
# Match block must be put in the end of sshd config
break
if notfound:
config.insert(i, "{0} {1}".format(name, val))
return config
def _reset_sshd_config(sshd_file_path):
distro = platform.dist()
distro_name = distro[0]
version = distro[1]
config_file_path = os.path.join(os.getcwd(), 'resources', '%s_%s' % (distro_name, version))
if not(os.path.exists(config_file_path)):
config_file_path = os.path.join(os.getcwd(), 'resources', '%s_%s' % (distro_name, 'default'))
if not(os.path.exists(config_file_path)):
config_file_path = os.path.join(os.getcwd(), 'resources', 'default')
if distro_name == "CoreOS":
# Parse sshd port from config_file_path
sshd_port = 22
regex = re.compile(r"^Port\s+(\d+)", re.VERBOSE)
with open(config_file_path) as f:
for line in f:
match = regex.match(line)
if match:
sshd_port = match.group(1)
break
# Prepare cloud init config for coreos-cloudinit
f = tempfile.NamedTemporaryFile(delete=False)
f.close()
cfg_tempfile = f.name
cfg_content = "#cloud-config\n\n"
# Overwrite /etc/ssh/sshd_config
cfg_content += "write_files:\n"
cfg_content += " - path: {0}\n".format(sshd_file_path)
cfg_content += " permissions: 0600\n"
cfg_content += " owner: root:root\n"
cfg_content += " content: |\n"
for line in waagent.GetFileContents(config_file_path).split('\n'):
cfg_content += " {0}\n".format(line)
# Change the sshd port in /etc/systemd/system/sshd.socket
cfg_content += "\ncoreos:\n"
cfg_content += " units:\n"
cfg_content += " - name: sshd.socket\n"
cfg_content += " command: restart\n"
cfg_content += " content: |\n"
cfg_content += " [Socket]\n"
cfg_content += " ListenStream={0}\n".format(sshd_port)
cfg_content += " Accept=yes\n"
waagent.SetFileContents(cfg_tempfile, cfg_content)
waagent.Run("coreos-cloudinit -from-file " + cfg_tempfile, chk_err=False)
os.remove(cfg_tempfile)
else:
shutil.copyfile(config_file_path, sshd_file_path)
waagent.MyDistro.restartSshService()
def _backup_sshd_config(sshd_file_path):
if os.path.exists(sshd_file_path):
backup_file_name = '%s_%s' % (
sshd_file_path, time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()))
shutil.copyfile(sshd_file_path, backup_file_name)
def _save_cert_str_as_file(cert_txt, file_name):
cert_start = cert_txt.find(BeginCertificateTag)
if cert_start >= 0:
cert_txt = cert_txt[cert_start + len(BeginCertificateTag):]
cert_end = cert_txt.find(EndCertificateTag)
if cert_end >= 0:
cert_txt = cert_txt[:cert_end]
cert_txt = cert_txt.strip()
cert_txt = "{0}\n{1}\n{2}\n".format(BeginCertificateTag, cert_txt, EndCertificateTag)
waagent.SetFileContents(file_name, cert_txt)
def _open_ssh_port():
_del_rule_if_exists('INPUT -p tcp -m tcp --dport 22 -j DROP')
_del_rule_if_exists('INPUT -p tcp -m tcp --dport 22 -j REJECT')
_del_rule_if_exists('INPUT -p -j DROP')
_del_rule_if_exists('INPUT -p -j REJECT')
_insert_rule_if_not_exists('INPUT -p tcp -m tcp --dport 22 -j ACCEPT')
_del_rule_if_exists('OUTPUT -p tcp -m tcp --sport 22 -j DROP')
_del_rule_if_exists('OUTPUT -p tcp -m tcp --sport 22 -j REJECT')
_del_rule_if_exists('OUTPUT -p -j DROP')
_del_rule_if_exists('OUTPUT -p -j REJECT')
_insert_rule_if_not_exists('OUTPUT -p tcp -m tcp --dport 22 -j ACCEPT')
def _del_rule_if_exists(rule_string):
cmd_result = waagent.RunGetOutput("iptables-save")
while cmd_result[0] == 0 and (rule_string in cmd_result[1]):
waagent.Run("iptables -D %s" % rule_string)
cmd_result = waagent.RunGetOutput("iptables-save")
def _insert_rule_if_not_exists(rule_string):
cmd_result = waagent.RunGetOutput("iptables-save")
if cmd_result[0] == 0 and (rule_string not in cmd_result[1]):
waagent.Run("iptables -I %s" % rule_string)
def check_and_repair_disk(hutil):
public_settings = hutil.get_public_settings()
if public_settings:
check_disk = public_settings.get('check_disk')
repair_disk = public_settings.get('repair_disk')
disk_name = public_settings.get('disk_name')
if check_disk and repair_disk:
err_msg = ("check_disk and repair_disk was both specified."
"Only one of them can be specified")
hutil.error(err_msg)
hutil.do_exit(1, 'Enable', 'error', '0', 'Enable failed.')
if check_disk:
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="check_disk")
outretcode = _fsck_check(hutil)
hutil.log("Successfully checked disk")
return outretcode
if repair_disk:
waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="repair_disk")
outdata = _fsck_repair(hutil, disk_name)
hutil.log("Repaired and remounted disk")
return outdata
def _fsck_check(hutil):
try:
retcode = waagent.Run("fsck -As -y")
if retcode > 0:
hutil.log(retcode)
raise Exception("Disk check was not successful")
else:
return retcode
except Exception as e:
hutil.error("Failed to run disk check with error: {0}, {1}".format(
str(e), traceback.format_exc()))
hutil.do_exit(1, 'Check', 'error', '0', 'Check failed.')
def _fsck_repair(hutil, disk_name):
# first unmount disks and loop devices lazy + forced
try:
cmd_result = waagent.Run("umount -f /%s" % disk_name)
if cmd_result != 0:
# Fail fast
hutil.log("Failed to unmount disk: %s" % disk_name)
# run repair
retcode = waagent.Run("fsck -AR -y")
hutil.log("Ran fsck with return code: %d" % retcode)
if retcode == 0:
retcode, output = waagent.RunGetOutput("mount")
hutil.log(output)
return output
else:
raise Exception("Failed to mount disks")
except Exception as e:
hutil.error("{0}, {1}".format(str(e), traceback.format_exc()))
hutil.do_exit(1, 'Repair','error','0', 'Repair failed.')
if __name__ == '__main__' :
main()
| apache-2.0 | 2,167,910,506,784,171,300 | 38.919831 | 133 | 0.595973 | false | 3.615899 | true | false | false |
firebase/firebase-admin-python | setup.py | 1 | 2584 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup file for distribution artifacts."""
from __future__ import print_function
from os import path
import sys
from setuptools import setup
(major, minor) = (sys.version_info.major, sys.version_info.minor)
if major != 3 or minor < 6:
print('firebase_admin requires python >= 3.6', file=sys.stderr)
sys.exit(1)
# Read in the package metadata per recommendations from:
# https://packaging.python.org/guides/single-sourcing-package-version/
about_path = path.join(path.dirname(path.abspath(__file__)), 'firebase_admin', '__about__.py')
about = {}
with open(about_path) as fp:
exec(fp.read(), about) # pylint: disable=exec-used
long_description = ('The Firebase Admin Python SDK enables server-side (backend) Python developers '
'to integrate Firebase into their services and applications.')
install_requires = [
'cachecontrol>=0.12.6',
'google-api-core[grpc] >= 1.22.1, < 2.0.0dev; platform.python_implementation != "PyPy"',
'google-api-python-client >= 1.7.8',
'google-cloud-firestore>=2.1.0; platform.python_implementation != "PyPy"',
'google-cloud-storage>=1.37.1',
]
setup(
name=about['__title__'],
version=about['__version__'],
description='Firebase Admin Python SDK',
long_description=long_description,
url=about['__url__'],
author=about['__author__'],
license=about['__license__'],
keywords='firebase cloud development',
install_requires=install_requires,
packages=['firebase_admin'],
python_requires='>=3.6',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'License :: OSI Approved :: Apache Software License',
],
)
| apache-2.0 | -2,863,027,261,551,218,000 | 35.914286 | 100 | 0.674923 | false | 3.839525 | false | false | false |
iot-factory/synapse | synapse/storage/transactions.py | 1 | 10722 | # -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import SQLBaseStore
from synapse.util.caches.descriptors import cached
from collections import namedtuple
from canonicaljson import encode_canonical_json
import logging
logger = logging.getLogger(__name__)
class TransactionStore(SQLBaseStore):
"""A collection of queries for handling PDUs.
"""
def get_received_txn_response(self, transaction_id, origin):
"""For an incoming transaction from a given origin, check if we have
already responded to it. If so, return the response code and response
body (as a dict).
Args:
transaction_id (str)
origin(str)
Returns:
tuple: None if we have not previously responded to
this transaction or a 2-tuple of (int, dict)
"""
return self.runInteraction(
"get_received_txn_response",
self._get_received_txn_response, transaction_id, origin
)
def _get_received_txn_response(self, txn, transaction_id, origin):
result = self._simple_select_one_txn(
txn,
table=ReceivedTransactionsTable.table_name,
keyvalues={
"transaction_id": transaction_id,
"origin": origin,
},
retcols=ReceivedTransactionsTable.fields,
allow_none=True,
)
if result and result["response_code"]:
return result["response_code"], result["response_json"]
else:
return None
def set_received_txn_response(self, transaction_id, origin, code,
response_dict):
"""Persist the response we returened for an incoming transaction, and
should return for subsequent transactions with the same transaction_id
and origin.
Args:
txn
transaction_id (str)
origin (str)
code (int)
response_json (str)
"""
return self._simple_insert(
table=ReceivedTransactionsTable.table_name,
values={
"transaction_id": transaction_id,
"origin": origin,
"response_code": code,
"response_json": buffer(encode_canonical_json(response_dict)),
},
or_ignore=True,
desc="set_received_txn_response",
)
def prep_send_transaction(self, transaction_id, destination,
origin_server_ts):
"""Persists an outgoing transaction and calculates the values for the
previous transaction id list.
This should be called before sending the transaction so that it has the
correct value for the `prev_ids` key.
Args:
transaction_id (str)
destination (str)
origin_server_ts (int)
Returns:
list: A list of previous transaction ids.
"""
return self.runInteraction(
"prep_send_transaction",
self._prep_send_transaction,
transaction_id, destination, origin_server_ts
)
def _prep_send_transaction(self, txn, transaction_id, destination,
origin_server_ts):
next_id = self._transaction_id_gen.get_next_txn(txn)
# First we find out what the prev_txns should be.
# Since we know that we are only sending one transaction at a time,
# we can simply take the last one.
query = (
"SELECT * FROM sent_transactions"
" WHERE destination = ?"
" ORDER BY id DESC LIMIT 1"
)
txn.execute(query, (destination,))
results = self.cursor_to_dict(txn)
prev_txns = [r["transaction_id"] for r in results]
# Actually add the new transaction to the sent_transactions table.
self._simple_insert_txn(
txn,
table=SentTransactions.table_name,
values={
"id": next_id,
"transaction_id": transaction_id,
"destination": destination,
"ts": origin_server_ts,
"response_code": 0,
"response_json": None,
}
)
# TODO Update the tx id -> pdu id mapping
return prev_txns
def delivered_txn(self, transaction_id, destination, code, response_dict):
"""Persists the response for an outgoing transaction.
Args:
transaction_id (str)
destination (str)
code (int)
response_json (str)
"""
return self.runInteraction(
"delivered_txn",
self._delivered_txn,
transaction_id, destination, code,
buffer(encode_canonical_json(response_dict)),
)
def _delivered_txn(self, txn, transaction_id, destination,
code, response_json):
self._simple_update_one_txn(
txn,
table=SentTransactions.table_name,
keyvalues={
"transaction_id": transaction_id,
"destination": destination,
},
updatevalues={
"response_code": code,
"response_json": None, # For now, don't persist response_json
}
)
def get_transactions_after(self, transaction_id, destination):
"""Get all transactions after a given local transaction_id.
Args:
transaction_id (str)
destination (str)
Returns:
list: A list of dicts
"""
return self.runInteraction(
"get_transactions_after",
self._get_transactions_after, transaction_id, destination
)
def _get_transactions_after(self, txn, transaction_id, destination):
query = (
"SELECT * FROM sent_transactions"
" WHERE destination = ? AND id >"
" ("
" SELECT id FROM sent_transactions"
" WHERE transaction_id = ? AND destination = ?"
" )"
)
txn.execute(query, (destination, transaction_id, destination))
return self.cursor_to_dict(txn)
@cached()
def get_destination_retry_timings(self, destination):
"""Gets the current retry timings (if any) for a given destination.
Args:
destination (str)
Returns:
None if not retrying
Otherwise a dict for the retry scheme
"""
return self.runInteraction(
"get_destination_retry_timings",
self._get_destination_retry_timings, destination)
def _get_destination_retry_timings(self, txn, destination):
result = self._simple_select_one_txn(
txn,
table=DestinationsTable.table_name,
keyvalues={
"destination": destination,
},
retcols=DestinationsTable.fields,
allow_none=True,
)
if result and result["retry_last_ts"] > 0:
return result
else:
return None
def set_destination_retry_timings(self, destination,
retry_last_ts, retry_interval):
"""Sets the current retry timings for a given destination.
Both timings should be zero if retrying is no longer occuring.
Args:
destination (str)
retry_last_ts (int) - time of last retry attempt in unix epoch ms
retry_interval (int) - how long until next retry in ms
"""
# XXX: we could chose to not bother persisting this if our cache thinks
# this is a NOOP
return self.runInteraction(
"set_destination_retry_timings",
self._set_destination_retry_timings,
destination,
retry_last_ts,
retry_interval,
)
def _set_destination_retry_timings(self, txn, destination,
retry_last_ts, retry_interval):
txn.call_after(self.get_destination_retry_timings.invalidate, (destination,))
self._simple_upsert_txn(
txn,
"destinations",
keyvalues={
"destination": destination,
},
values={
"retry_last_ts": retry_last_ts,
"retry_interval": retry_interval,
},
insertion_values={
"destination": destination,
"retry_last_ts": retry_last_ts,
"retry_interval": retry_interval,
}
)
def get_destinations_needing_retry(self):
"""Get all destinations which are due a retry for sending a transaction.
Returns:
list: A list of dicts
"""
return self.runInteraction(
"get_destinations_needing_retry",
self._get_destinations_needing_retry
)
def _get_destinations_needing_retry(self, txn):
query = (
"SELECT * FROM destinations"
" WHERE retry_last_ts > 0 and retry_next_ts < ?"
)
txn.execute(query, (self._clock.time_msec(),))
return self.cursor_to_dict(txn)
class ReceivedTransactionsTable(object):
table_name = "received_transactions"
fields = [
"transaction_id",
"origin",
"ts",
"response_code",
"response_json",
"has_been_referenced",
]
class SentTransactions(object):
table_name = "sent_transactions"
fields = [
"id",
"transaction_id",
"destination",
"ts",
"response_code",
"response_json",
]
EntryType = namedtuple("SentTransactionsEntry", fields)
class TransactionsToPduTable(object):
table_name = "transaction_id_to_pdu"
fields = [
"transaction_id",
"destination",
"pdu_id",
"pdu_origin",
]
class DestinationsTable(object):
table_name = "destinations"
fields = [
"destination",
"retry_last_ts",
"retry_interval",
]
| apache-2.0 | -488,149,266,400,128,260 | 29.202817 | 85 | 0.561929 | false | 4.533615 | false | false | false |
esa/SpaceAMPL | interplanetary/impulsive/single_phase/include/writeequations.py | 1 | 13720 | import sys;
file = open("equations.inc","w")
file2 = open("writeinitialguess.inc","w")
file3 = open("writesolution.inc","w")
file4 = open("guesstangential.inc","w")
n=int(sys.argv[1]);
file.write("#------------------------------------------------------------------------\n")
file.write("#Optimisation Variables\n\n")
file.write("#Impulsive DVs\n")
file.write("var ux{i in 2..n-1};\n")
file.write("var uy{i in 2..n-1};\n")
file.write("var uz{i in 2..n-1};\n")
file.write("var uT{i in 2..n-1} = sqrt(ux[i]**2+uy[i]**2+uz[i]**2);\n\n")
file.write("#Starting VINF\n")
file.write("var VINFx:=0.0001;\n")
file.write("var VINFy:=0.0001;\n")
file.write("var VINFz:=0.0001;\n")
file.write("var VINF = sqrt(VINFx^2+VINFy^2+VINFz^2);\n\n")
file.write("#Ending VINF\n")
file.write("var VINFxf:=0.0001;\n")
file.write("var VINFyf:=0.0001;\n")
file.write("var VINFzf:=0.0001;\n")
file.write("var VINFf = sqrt(VINFxf^2+VINFyf^2+VINFzf^2);\n\n")
file.write("#Eccentric Anomaly Differences between nodes\n")
file.write("var DE{i in J};\n\n")
file.write("#Initial time\n")
file.write("var timod := tI * d2u * f, <= (tI+tbnd)*d2u*f, >= (tI-tbnd)*d2u*f; \n")
file.write("#Time of flight \n")
file.write("var tfmod := tT * d2u * f, <= (tT+tbnd)*d2u*f, >= (tT-tbnd)*d2u*f; \n")
file.write("#--------------------------------------------------------------------------\n\n")
file.write("#--------------------------------------------------------------------------\n")
file.write("#We here introduce some time variables that simplifies the formulas \n")
file.write("var ti = timod /f; #Initial time non dimensional\n")
file.write("var tf = tfmod /f; #Time of flight non dimensional\n")
file.write("var tF = ti/d2u + tf/d2u; #Arrival time (MJD2000)\n")
file.write("var dt = tf/(n-1); #Inter-node temporal separation\n")
file.write("#--------------------------------------------------------------------------\n\n")
file.write("#--------------------------------------------------------------------------\n")
file.write("#Planet ephemerides are set and evaluated in tI, tI+tT\n")
file.write("include include/ephcalc.inc;\n")
file.write("fix timod;\n")
file.write("fix tfmod;\n")
file.write("solve;\n")
file.write("unfix timod;\n")
file.write("unfix tfmod;\n")
file.write("#--------------------------------------------------------------------------\n\n\n\n")
file.write("#--------------------------------------------------------------------------\n")
file.write("# Node 1: Departure Node\n")
file.write("var x1 = x0;\n")
file.write("var y1 = y0;\n")
file.write("var z1 = z0;\n")
file.write("var dx1 = dx0 + VINFx;\n")
file.write("var dy1 = dy0 + VINFy;\n")
file.write("var dz1 = dz0 + VINFz;\n\n")
file.write("#Basic definitions\n")
file.write("var r1 = sqrt(x1^2+y1^2+z1^2);\n")
file.write("var v1 = sqrt(dx1^2+dy1^2+dz1^2);\n")
file.write("var a1 = 1 / (2/r1 - v1^2);\n")
file.write("var sigma1 = x1*dx1+y1*dy1+z1*dz1;\n")
file.write("var meanmotion1 = sqrt(1/a1^3);\n")
file.write("var DM1 = meanmotion1 * dt/2;\n\n")
file.write("#Lagrange Coefficients\n")
file.write("var rvar1 = a1 + (r1-a1)*cos(DE[1]) + sigma1*sqrt(a1)*sin(DE[1]);\n")
file.write("var F1 = 1 - a1/r1 * (1-cos(DE[1]));\n")
file.write("var G1 = a1*sigma1*(1-cos(DE[1])) + r1*sqrt(a1)*sin(DE[1]);\n")
file.write("var Ft1 = -sqrt(a1)/(r1*rvar1) * sin(DE[1]);\n")
file.write("var Gt1 = 1 - a1/rvar1*(1-cos(DE[1]));\n\n")
file.write("subject to KeplerEquations1: \n")
file.write(" DM1 - DE[1] - sigma1/sqrt(a1) * (1 - cos(DE[1])) + (1 - r1/a1)*sin(DE[1]) = 0;\n")
file.write("#--------------------------------------------------------------------------\n\n")
for i in range(2,n-1):
file.write("#--------------------------------------------------------------------------\n")
file.write("# Node " +str(i)+"\n")
file.write("var x" +str(i)+" = F" +str(i-1)+"*x" +str(i-1)+" + G" +str(i-1)+"*dx" +str(i-1)+";\n")
file.write("var y" +str(i)+" = F" +str(i-1)+"*y" +str(i-1)+" + G" +str(i-1)+"*dy" +str(i-1)+";\n")
file.write("var z" +str(i)+" = F" +str(i-1)+"*z" +str(i-1)+" + G" +str(i-1)+"*dz" +str(i-1)+";\n")
file.write("var dx" +str(i)+" = Ft" +str(i-1)+"*x" +str(i-1)+" + Gt" +str(i-1)+"*dx" +str(i-1)+" + ux[" +str(i)+"];\n")
file.write("var dy" +str(i)+" = Ft" +str(i-1)+"*y" +str(i-1)+" + Gt" +str(i-1)+"*dy" +str(i-1)+" + uy[" +str(i)+"];\n")
file.write("var dz" +str(i)+" = Ft" +str(i-1)+"*z" +str(i-1)+" + Gt" +str(i-1)+"*dz" +str(i-1)+" + uz[" +str(i)+"];\n\n")
file.write("#Basic definitions\n")
file.write("var r" +str(i)+" = sqrt(x" +str(i)+"^2+y" +str(i)+"^2+z" +str(i)+"^2);\n")
file.write("var v" +str(i)+" = sqrt(dx" +str(i)+"^2+dy" +str(i)+"^2+dz" +str(i)+"^2);\n")
file.write("var a" +str(i)+" = 1 / (2/r" +str(i)+" - v" +str(i)+"^2);\n")
file.write("var sigma" +str(i)+" = x" +str(i)+"*dx" +str(i)+"+y" +str(i)+"*dy" +str(i)+"+z" +str(i)+"*dz" +str(i)+";\n")
file.write("var meanmotion" +str(i)+" = sqrt(1/a" +str(i)+"^3);\n")
file.write("var DM" +str(i)+" = meanmotion" +str(i)+" * dt;\n\n")
file.write("#Lagrange Coefficients\n")
file.write("var rvar" +str(i)+" = a" +str(i)+" + (r" +str(i)+"-a" +str(i)+")*cos(DE[" +str(i)+"]) + sigma" +str(i)+"*sqrt(a" +str(i)+")*sin(DE[" +str(i)+"]);\n")
file.write("var F" +str(i)+" = 1 - a" +str(i)+"/r" +str(i)+" * (1-cos(DE[" +str(i)+"]));\n")
file.write("var G" +str(i)+" = a" +str(i)+"*sigma" +str(i)+"*(1-cos(DE[" +str(i)+"])) + r" +str(i)+"*sqrt(a" +str(i)+")*sin(DE[" +str(i)+"]);\n")
file.write("var Ft" +str(i)+" = -sqrt(a" +str(i)+")/(r" +str(i)+"*rvar" +str(i)+") * sin(DE[" +str(i)+"]);\n")
file.write("var Gt" +str(i)+" = 1 - a" +str(i)+"/rvar" +str(i)+"*(1-cos(DE[" +str(i)+"]));\n\n")
file.write("subject to KeplerEquations" +str(i)+": \n")
file.write(" DM" +str(i)+" - DE[" +str(i)+"] - sigma" +str(i)+"/sqrt(a" +str(i)+") * (1 - cos(DE[" +str(i)+"])) + (1 - r" +str(i)+"/a" +str(i)+")*sin(DE[" +str(i)+"]) = 0;\n")
file.write("#--------------------------------------------------------------------------\n\n")
i=n-1
file.write("#--------------------------------------------------------------------------\n")
file.write("# Node " +str(i)+"\n")
file.write("var x" +str(i)+" = F" +str(i-1)+"*x" +str(i-1)+" + G" +str(i-1)+"*dx" +str(i-1)+";\n")
file.write("var y" +str(i)+" = F" +str(i-1)+"*y" +str(i-1)+" + G" +str(i-1)+"*dy" +str(i-1)+";\n")
file.write("var z" +str(i)+" = F" +str(i-1)+"*z" +str(i-1)+" + G" +str(i-1)+"*dz" +str(i-1)+";\n")
file.write("var dx" +str(i)+" = Ft" +str(i-1)+"*x" +str(i-1)+" + Gt" +str(i-1)+"*dx" +str(i-1)+" + ux[" +str(i)+"];\n")
file.write("var dy" +str(i)+" = Ft" +str(i-1)+"*y" +str(i-1)+" + Gt" +str(i-1)+"*dy" +str(i-1)+" + uy[" +str(i)+"];\n")
file.write("var dz" +str(i)+" = Ft" +str(i-1)+"*z" +str(i-1)+" + Gt" +str(i-1)+"*dz" +str(i-1)+" + uz[" +str(i)+"];\n\n")
file.write("#Basic definitions\n")
file.write("var r" +str(i)+" = sqrt(x" +str(i)+"^2+y" +str(i)+"^2+z" +str(i)+"^2);\n")
file.write("var v" +str(i)+" = sqrt(dx" +str(i)+"^2+dy" +str(i)+"^2+dz" +str(i)+"^2);\n")
file.write("var a" +str(i)+" = 1 / (2/r" +str(i)+" - v" +str(i)+"^2);\n")
file.write("var sigma" +str(i)+" = x" +str(i)+"*dx" +str(i)+"+y" +str(i)+"*dy" +str(i)+"+z" +str(i)+"*dz" +str(i)+";\n")
file.write("var meanmotion" +str(i)+" = sqrt(1/a" +str(i)+"^3);\n")
file.write("var DM" +str(i)+" = meanmotion" +str(i)+" * dt/2;\n\n")
file.write("#Lagrange Coefficients\n")
file.write("var rvar" +str(i)+" = a" +str(i)+" + (r" +str(i)+"-a" +str(i)+")*cos(DE[" +str(i)+"]) + sigma" +str(i)+"*sqrt(a" +str(i)+")*sin(DE[" +str(i)+"]);\n")
file.write("var F" +str(i)+" = 1 - a" +str(i)+"/r" +str(i)+" * (1-cos(DE[" +str(i)+"]));\n")
file.write("var G" +str(i)+" = a" +str(i)+"*sigma" +str(i)+"*(1-cos(DE[" +str(i)+"])) + r" +str(i)+"*sqrt(a" +str(i)+")*sin(DE[" +str(i)+"]);\n")
file.write("var Ft" +str(i)+" = -sqrt(a" +str(i)+")/(r" +str(i)+"*rvar" +str(i)+") * sin(DE[" +str(i)+"]);\n")
file.write("var Gt" +str(i)+" = 1 - a" +str(i)+"/rvar" +str(i)+"*(1-cos(DE[" +str(i)+"]));\n\n")
file.write("subject to KeplerEquations" +str(i)+": \n")
file.write(" DM" +str(i)+" - DE[" +str(i)+"] - sigma" +str(i)+"/sqrt(a" +str(i)+") * (1 - cos(DE[" +str(i)+"])) + (1 - r" +str(i)+"/a" +str(i)+")*sin(DE[" +str(i)+"]) = 0;\n")
file.write("#--------------------------------------------------------------------------\n\n")
file.write("#--------------------------------------------------------------------------\n")
file.write("# Node n: Arrival node\n")
file.write("var xn = F" +str(n-1)+"*x" +str(n-1)+" + G" +str(n-1)+"*dx" +str(n-1)+";\n")
file.write("var yn = F" +str(n-1)+"*y" +str(n-1)+" + G" +str(n-1)+"*dy" +str(n-1)+";\n")
file.write("var zn = F" +str(n-1)+"*z" +str(n-1)+" + G" +str(n-1)+"*dz" +str(n-1)+";\n")
file.write("var dxn = Ft" +str(n-1)+"*x" +str(n-1)+" + Gt" +str(n-1)+"*dx" +str(n-1)+"+ VINFxf;\n")
file.write("var dyn = Ft" +str(n-1)+"*y" +str(n-1)+" + Gt" +str(n-1)+"*dy" +str(n-1)+"+ VINFyf;\n")
file.write("var dzn = Ft" +str(n-1)+"*z" +str(n-1)+" + Gt" +str(n-1)+"*dz" +str(n-1)+"+ VINFzf;\n\n")
file.write("#Basic definitions\n")
file.write("var rn = sqrt(xn^2+yn^2+zn^2);\n")
file.write("var vn = sqrt(dxn^2+dyn^2+dzn^2);\n")
file.write("var an = 1 / (2/rn - vn^2);\n")
file.write("#--------------------------------------------------------------------------\n\n")
file.write("#--------------------------------------------------------------------------\n")
file.write("#Match Constraint\n")
file.write("subject to \n")
file.write(" FinalPositionx : xn = xf;\n")
file.write(" FinalPositiony : yn = yf;\n")
file.write(" FinalPositionz : zn = zf;\n")
file.write(" FinalVelocityx : dxn = dxf;\n")
file.write(" FinalVelocityy : dyn = dyf;\n")
file.write(" FinalVelocityz : dzn = dzf;\n")
file.write("#--------------------------------------------------------------------------\n")
#file2.write("printf \"%17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e\\n\",x1,y1,z1,dx1,dy1,dz1,1,VINFx,VINFy,VINFz>out/InitialGuess.out;\n")
for i in range(2,n):
file2.write("printf \"%17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e\\n\\n\",x"+str(i)+",y"+str(i)+",z"+str(i)+",dx"+str(i)+",dy"+str(i)+",dz"+str(i)+",m["+str(i)+"],ux["+str(i)+"],uy["+str(i)+"],uz["+str(i)+"]>out/InitialGuess.out;\n")
#file2.write("printf \"%17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e\\n\",xn,yn,zn,dxn,dyn,dzn,m[n-1],VINFxf,VINFyf,VINFzf>out/InitialGuess.out;\n")
file2.write("close out/InitialGuess.out;")
#file3.write("printf \"%17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e\\n\",x1,y1,z1,dx1,dy1,dz1,1,VINFx,VINFy,VINFz>out/solution.out;\n")
for i in range(2,n):
file3.write("printf \"%17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e\\n\\n\",x"+str(i)+",y"+str(i)+",z"+str(i)+",dx"+str(i)+",dy"+str(i)+",dz"+str(i)+",m["+str(i)+"],ux["+str(i)+"],uy["+str(i)+"],uz["+str(i)+"]>out/solution.out;\n")
#file3.write("printf \"%17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e, %17.16e\\n\",xn,yn,zn,dxn,dyn,dzn,m[n-1],VINFxf,VINFyf,VINFzf>out/solution.out;\n")
file3.write("close out/solution.out;")
file4.write("let {i in 2..n-1} ux[i]:=Tmax*0.0000001;\n")
file4.write("let {i in 2..n-1} uy[i]:=Tmax*0.0000001;\n")
file4.write("let {i in 2..n-1} uz[i]:=Tmax*0.0000001;\n\n")
#Tangentialguess
file4.write("#--------------------------------------------------------------------------\n")
file4.write("#Initial Guess for the DE variables\n")
file4.write("let {i in J} DE[i] := DM1;\n")
file4.write("#-----------------------------------------------------------------------\n\n")
for i in range(2,n-1):
file4.write("let ux["+str(i)+"]:=dx"+str(i)+"/v"+str(i)+"*Tmax/2* tf/(n-1);\n")
file4.write("let uy["+str(i)+"]:=dy"+str(i)+"/v"+str(i)+"*Tmax/2* tf/(n-1);\n")
file4.write("let uz["+str(i)+"]:=dz"+str(i)+"/v"+str(i)+"*Tmax/2* tf/(n-1);\n")
file4.write("subject to\n")
file4.write(" thrustON{i in 2..n-1}: uT[i] <= Tmax*tf/(n-1);\n\n")
file4.write("minimize\n")
file4.write(" position: (xf-xn)^2+(yf-yn)^2+(zf-zn)^2+(dxf-dxn)^2+(dyf-dyn)^2+(dzf-dzn)^2;\n\n")
file4.write("drop FinalPositionx;\n")
file4.write("drop FinalPositiony;\n")
file4.write("drop FinalPositionz;\n")
file4.write("drop FinalVelocityx;\n")
file4.write("drop FinalVelocityy;\n")
file4.write("drop FinalVelocityz;\n")
file4.write("#--------------------------------------------------------------------------\n")
file4.write("solve;\n")
file4.write("#-----------------------------------------------------------------------\n")
file4.write("#--------------------------------------------------------------------------\n")
file4.write("#Print The Initial Guess x,y,z,dx,dy,dz,m,ux,uy,uz variables\n\n")
file4.write("param m{i in I} := 1;\n")
file4.write("include include/writeinitialguess.inc;\n")
file4.write("purge m;\n\n")
file4.write("#Print the initial and final times\n")
file4.write("printf \"%17.16e, %17.16e \\n\", ti/d2u , tF-ti/d2u > out/TimesGuess.out;\n")
file4.write("close out/TimesGuess.out;\n")
file4.write("#------------------------------------------------------------------------\n\n")
file4.write("#--------------------------------------------------------------------------\n")
file4.write("#Clean up\n")
file4.write("unfix timod;\n")
file4.write("unfix tfmod;\n")
file4.write("restore FinalPositionx;\n")
file4.write("restore FinalPositiony;\n")
file4.write("restore FinalPositionz;\n")
file4.write("restore FinalVelocityx;\n")
file4.write("restore FinalVelocityy;\n")
file4.write("restore FinalVelocityz;\n")
file4.write("drop thrustON;\n")
file4.write("drop position;\n")
| gpl-2.0 | -1,191,002,370,958,845,400 | 54.772358 | 279 | 0.490233 | false | 2.27529 | false | false | false |
sky15179/Debug | TestTabelController/my-python/relaceGuideImages.py | 1 | 3738 | #!/usr/bin/python
# coding:utf-8
import os
import re
import fnmatch
import zipfile
import shutil
import getpass
import glob
PATH = "/Users/"+ getpass.getuser() + "/Downloads"
ProtectImagePath = "/Users/wzg/Downloads/testimage/help_images"
prefix = "FS_HelpGuide_"
#获取替换文件的路径
def realProtectImagePath():
global ProtectImagePath
if os.path.isdir(ProtectImagePath):
pass
else:
inputContent = raw_input("请输入待替换图片文件的路径:")
if os.path.isdir(ProtectImagePath):
ProtectImagePath = inputContent
pass
#删除已有图片文件夹
def deleteExistDirs():
# '''delete files and folders'''
for path,dirs,files in os.walk(PATH):
for secDir in dirs:
if fnmatch.fnmatch(secDir,'*引导*'):
turePath = os.path.join(PATH,secDir)
shutil.rmtree(turePath)
pass
#解压操作
def unzip(file_name):
# """unzip zip file"""
zip_file = zipfile.ZipFile(file_name)
zipDirName = file_name.replace('.zip','',1)
if os.path.isdir(zipDirName):
pass
else:
os.mkdir(zipDirName)
for names in zip_file.namelist():
if names.startswith('__MACOSX/'):
continue
zip_file.extract(names,zipDirName)
zip_file.close()
# zip_file.printdir()
pass
#解压得路径
def unzipImages():
for filename in os.listdir(PATH):
if fnmatch.fnmatch(filename,'*引导*'):
return os.path.join(PATH,filename)
pass
#获取zip包的路径
def realPath():
for path,dirs,files in os.walk(PATH):
for secDir in dirs:
if fnmatch.fnmatch(secDir,'*引导*'):
# print '压缩包' + secDir
turePath = os.path.join(PATH,secDir)
# print '真实路径:' + turePath
return turePath
pass
# 替换文件名
def rename_fils(turePath):
for path,secdirs,files in os.walk(turePath):
for subDir in secdirs:
subPath = os.path.join(turePath,subDir)
for subfile in os.listdir(subPath):
# print '文件:' + subfile
subfilePath = os.path.join(subPath,subfile)
if os.path.isfile(subfilePath):
if '.DS_Store' not in subfile:
newName = os.path.join(subPath,prefix+subDir+'_'+subfile.replace('0','',1))
os.rename(subfilePath,newName)
pass
# 根目录下得文件完整路径
def fileInDirPath(turePath):
fileList = []
for path,secdirs,files in os.walk(turePath):
for subDir in secdirs:
subPath = os.path.join(turePath,subDir)
for subfile in os.listdir(subPath):
subfilePath = os.path.join(subPath,subfile)
if os.path.isfile(subfilePath):
if '.DS_Store' not in subfile:
newName = os.path.join(subPath,subfile)
fileList.append(newName)
return fileList
pass
#替换图片
def repalceImages(newImageDirPath,oldImageDirPath):
if (os.path.isdir(newImageDirPath)) and (os.path.isdir(oldImageDirPath)):
for newImageFilePath in fileInDirPath(newImageDirPath):
if os.path.isfile(newImageFilePath):
shutil.copy(newImageFilePath,oldImageDirPath)
print "替换成功" + os.path.basename(newImageFilePath)
pass
if __name__ == '__main__':
deleteExistDirs()
unzipPath = unzipImages()
if os.path.isfile(unzipPath):
unzip(unzipPath)
rename_fils(realPath())
realProtectImagePath()
repalceImages(realPath(),ProtectImagePath)
else:
print '无效解压地址'
| apache-2.0 | 614,262,965,934,811,400 | 28.38843 | 99 | 0.600675 | false | 3.212285 | false | false | false |
mxcube/mxcube | mxcubeqt/utils/widget_utils.py | 1 | 9801 | #
# Project: MXCuBE
# https://github.com/mxcube
#
# This file is part of MXCuBE software.
#
# MXCuBE is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MXCuBE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with MXCuBE. If not, see <http://www.gnu.org/licenses/>.
from mxcubeqt.utils import colors, qt_import
from mxcubecore.dispatcher import dispatcher
from mxcubecore.ConvertUtils import string_types
__credits__ = ["MXCuBE collaboration"]
__license__ = "LGPLv3+"
class DataModelInputBinder(object):
def __init__(self, obj):
object.__init__(self)
self.__model = obj
# Key - field name/attribute name of the persistant object.
# Value - The tuple (widget, validator, type_fn)
self.bindings = {}
dispatcher.connect(self._update_widget, "model_update", dispatcher.Any)
def __checkbox_update_value(self, field_name, new_value):
setattr(self.__model, field_name, new_value)
dispatcher.send("model_update", self.__model, field_name, self)
def __combobox_update_value(self, field_name, new_value):
setattr(self.__model, field_name, new_value)
dispatcher.send("model_update", self.__model, field_name, self)
def __ledit_update_value(self, field_name, widget, new_value, type_fn, validator):
if not self.bindings[field_name][3]:
origin_value = new_value
if type_fn == float and validator:
pattern = "%." + str(validator.decimals()) + "f"
new_value = pattern % float(new_value)
# fix validation if PyQt4 and sipapi 1 is used
if isinstance(new_value, string_types):
if "QString" in globals():
new_value = qt_import.QString(new_value)
self.__validated(
field_name, validator, self.bindings[field_name][0], new_value
)
if isinstance(widget, qt_import.QLineEdit):
if type_fn is float and validator:
widget.setText(
"{:g}".format(
round(float(origin_value), validator.decimals())
)
)
try:
setattr(self.__model, field_name, type_fn(origin_value))
except ValueError:
if origin_value != "":
raise
else:
dispatcher.send("model_update", self.__model, field_name, self)
def __ledit_text_edited(self, field_name, widget, new_value, type_fn, validator):
self.bindings[field_name][3] = True
if self.__validated(
field_name, validator, self.bindings[field_name][0], new_value
):
try:
setattr(self.__model, field_name, type_fn(new_value))
except ValueError:
if new_value != "":
raise
else:
dispatcher.send("model_update", self.__model, field_name, self)
def __validated(self, field_name, validator, widget, new_value):
if validator:
try:
flt_value = float(new_value)
except BaseException:
colors.set_widget_color(
widget, colors.LIGHT_RED, qt_import.QPalette.Base
)
return False
if flt_value >= min(
validator.bottom(), validator.top()
) and flt_value <= max(validator.bottom(), validator.top()):
# if validator.validate(new_value, widget.cursorPosition())[0] \
# == QValidator.Acceptable:
if self.bindings[field_name][3]:
colors.set_widget_color(
widget, colors.LIGHT_YELLOW, qt_import.QPalette.Base
)
else:
colors.set_widget_color(
widget, colors.WHITE, qt_import.QPalette.Base
)
return True
else:
colors.set_widget_color(
widget, colors.LIGHT_RED, qt_import.QPalette.Base
)
return False
else:
if self.bindings[field_name][3]:
colors.set_widget_color(
widget, colors.LIGHT_YELLOW, qt_import.QPalette.Base
)
else:
colors.set_widget_color(widget, colors.WHITE, qt_import.QPalette.Base)
return True
def get_model(self):
return self.__model
def set_model(self, obj):
self.__model = obj
self.init_bindings()
self.clear_edit()
self.validate_all()
def init_bindings(self):
for field_name in self.bindings.keys():
self._update_widget(field_name, None)
def _update_widget(self, field_name, data_binder):
if data_binder == self:
return
try:
widget, validator, type_fn, edited = self.bindings[field_name]
except KeyError:
return
try:
widget.blockSignals(True)
if isinstance(widget, qt_import.QLineEdit):
if type_fn is float and validator:
if getattr(self.__model, field_name):
value = float(getattr(self.__model, field_name))
widget.setText(
"{:g}".format(round(float(value), validator.decimals()))
)
elif type_fn is int and validator:
value = int(getattr(self.__model, field_name))
widget.setText("%d" % value)
else:
widget.setText(str(getattr(self.__model, field_name)))
elif isinstance(widget, qt_import.QLabel):
widget.setText(str(getattr(self.__model, field_name)))
elif isinstance(widget, qt_import.QComboBox):
widget.setCurrentIndex(int(getattr(self.__model, field_name)))
elif isinstance(widget, qt_import.QCheckBox) or isinstance(
widget, qt_import.QRadioButton
):
widget.setChecked(bool(getattr(self.__model, field_name)))
finally:
widget.blockSignals(False)
def bind_value_update(self, field_name, widget, type_fn, validator=None):
self.bindings[field_name] = [widget, validator, type_fn, False]
if isinstance(widget, qt_import.QLineEdit):
widget.textChanged.connect(
lambda new_value: self.__ledit_update_value(
field_name, widget, new_value, type_fn, validator
)
)
widget.textEdited.connect(
lambda new_value: self.__ledit_text_edited(
field_name, widget, new_value, type_fn, validator
)
)
if type_fn is float and validator:
pattern = "%." + str(validator.decimals()) + "f"
if getattr(self.__model, field_name):
widget.setText(pattern % float(getattr(self.__model, field_name)))
else:
widget.setText(str(getattr(self.__model, field_name)))
elif isinstance(widget, qt_import.QLabel):
widget.setText(str(getattr(self.__model, field_name)))
elif isinstance(widget, qt_import.QComboBox):
widget.activated.connect(
lambda new_value: self.__combobox_update_value(field_name, new_value)
)
widget.setCurrentIndex(int(getattr(self.__model, field_name)))
elif isinstance(widget, qt_import.QCheckBox) or isinstance(
widget, qt_import.QRadioButton
):
widget.toggled.connect(
lambda new_value: self.__checkbox_update_value(field_name, new_value)
)
widget.setChecked(bool(getattr(self.__model, field_name)))
if validator and not widget.toolTip():
if isinstance(validator, qt_import.QDoubleValidator):
tooltip = "%s limits %.2f : %.2f" % (
field_name.replace("_", " ").capitalize(),
validator.bottom(),
validator.top(),
)
else:
tooltip = "%s limits %d : %d" % (
field_name.replace("_", " ").capitalize(),
validator.bottom(),
validator.top(),
)
widget.setToolTip(tooltip)
def validate_all(self):
result = []
for item in self.bindings.items():
key = item[0]
widget = item[1][0]
validator = item[1][1]
# if validator:
if isinstance(widget, qt_import.QLineEdit):
if not self.__validated(key, validator, widget, widget.text()):
result.append(key)
elif isinstance(widget, qt_import.QComboBox):
pass
elif isinstance(widget, qt_import.QCheckBox) or isinstance(
widget, qt_import.QRadioButton
):
pass
return result
def clear_edit(self):
for key in self.bindings.keys():
self.bindings[key][3] = False
| lgpl-3.0 | -875,978,514,272,446,100 | 37.435294 | 86 | 0.540149 | false | 4.3502 | false | false | false |
google/tink | python/tink/jwt/_jwt_key_templates.py | 1 | 4813 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
"""Pre-generated JWT KeyTemplate."""
from tink.proto import jwt_ecdsa_pb2
from tink.proto import jwt_hmac_pb2
from tink.proto import jwt_rsa_ssa_pkcs1_pb2
from tink.proto import jwt_rsa_ssa_pss_pb2
from tink.proto import tink_pb2
_F4 = 65537
# TODO(juerg): Add TINK key templates.
def _create_jwt_hmac_template(algorithm: jwt_hmac_pb2.JwtHmacAlgorithm,
key_size: int) -> tink_pb2.KeyTemplate:
key_format = jwt_hmac_pb2.JwtHmacKeyFormat(
algorithm=algorithm, key_size=key_size)
return tink_pb2.KeyTemplate(
type_url='type.googleapis.com/google.crypto.tink.JwtHmacKey',
value=key_format.SerializeToString(),
output_prefix_type=tink_pb2.RAW)
def _create_jwt_ecdsa_template(
algorithm: jwt_ecdsa_pb2.JwtEcdsaAlgorithm) -> tink_pb2.KeyTemplate:
key_format = jwt_ecdsa_pb2.JwtEcdsaKeyFormat(
algorithm=algorithm)
return tink_pb2.KeyTemplate(
type_url='type.googleapis.com/google.crypto.tink.JwtEcdsaPrivateKey',
value=key_format.SerializeToString(),
output_prefix_type=tink_pb2.RAW)
# TODO(juerg): Move this function into a util lib.
def _num_to_bytes(n: int) -> bytes:
"""Converts a number to bytes."""
if n < 0:
raise OverflowError("number can't be negative")
if n == 0:
return b'\x00'
octets = bytearray()
while n:
octets.append(n % 256)
n //= 256
return bytes(octets[::-1])
def _create_jwt_rsa_ssa_pkcs1_template(
algorithm: jwt_rsa_ssa_pkcs1_pb2.JwtRsaSsaPkcs1Algorithm,
modulus_size: int
) -> tink_pb2.KeyTemplate:
key_format = jwt_rsa_ssa_pkcs1_pb2.JwtRsaSsaPkcs1KeyFormat(
algorithm=algorithm,
modulus_size_in_bits=modulus_size,
public_exponent=_num_to_bytes(_F4))
return tink_pb2.KeyTemplate(
type_url='type.googleapis.com/google.crypto.tink.JwtRsaSsaPkcs1PrivateKey',
value=key_format.SerializeToString(),
output_prefix_type=tink_pb2.RAW)
def _create_jwt_rsa_ssa_pss_template(
algorithm: jwt_rsa_ssa_pss_pb2.JwtRsaSsaPssAlgorithm,
modulus_size: int
) -> tink_pb2.KeyTemplate:
key_format = jwt_rsa_ssa_pss_pb2.JwtRsaSsaPssKeyFormat(
algorithm=algorithm,
modulus_size_in_bits=modulus_size,
public_exponent=_num_to_bytes(_F4))
return tink_pb2.KeyTemplate(
type_url='type.googleapis.com/google.crypto.tink.JwtRsaSsaPssPrivateKey',
value=key_format.SerializeToString(),
output_prefix_type=tink_pb2.RAW)
# Hmac Templates
def jwt_hs256_template() -> tink_pb2.KeyTemplate:
return _create_jwt_hmac_template(jwt_hmac_pb2.HS256, 32)
def jwt_hs384_template() -> tink_pb2.KeyTemplate:
return _create_jwt_hmac_template(jwt_hmac_pb2.HS384, 48)
def jwt_hs512_template() -> tink_pb2.KeyTemplate:
return _create_jwt_hmac_template(jwt_hmac_pb2.HS512, 64)
# ECDSA Templates
def jwt_es256_template() -> tink_pb2.KeyTemplate:
return _create_jwt_ecdsa_template(jwt_ecdsa_pb2.ES256)
def jwt_es384_template() -> tink_pb2.KeyTemplate:
return _create_jwt_ecdsa_template(jwt_ecdsa_pb2.ES384)
def jwt_es512_template() -> tink_pb2.KeyTemplate:
return _create_jwt_ecdsa_template(jwt_ecdsa_pb2.ES512)
# RSA SSA PKCS1 Templates
def jwt_rs256_2048_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pkcs1_template(jwt_rsa_ssa_pkcs1_pb2.RS256, 2048)
def jwt_rs256_3072_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pkcs1_template(jwt_rsa_ssa_pkcs1_pb2.RS256, 3072)
def jwt_rs384_3072_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pkcs1_template(jwt_rsa_ssa_pkcs1_pb2.RS384, 3072)
def jwt_rs512_4096_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pkcs1_template(jwt_rsa_ssa_pkcs1_pb2.RS512, 4096)
# RSA SSA PSS Templates
def jwt_ps256_2048_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pss_template(jwt_rsa_ssa_pss_pb2.PS256, 2048)
def jwt_ps256_3072_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pss_template(jwt_rsa_ssa_pss_pb2.PS256, 3072)
def jwt_ps384_3072_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pss_template(jwt_rsa_ssa_pss_pb2.PS384, 3072)
def jwt_ps512_4096_f4_template() -> tink_pb2.KeyTemplate:
return _create_jwt_rsa_ssa_pss_template(jwt_rsa_ssa_pss_pb2.PS512, 4096)
| apache-2.0 | -4,097,483,200,969,170,000 | 31.965753 | 81 | 0.724288 | false | 2.73777 | false | false | false |
ramrom/haus | gmail.py | 1 | 2126 | #!/usr/local/bin/python
import httplib2
import os, pdb
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/gmail-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/gmail.readonly'
CLIENT_SECRET_FILE = '/Users/smittapalli/.creds/gcloud_oauth2_webapp_haus.json'
APPLICATION_NAME = 'Gmail API Python Quickstart'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.creds')
credential_path = os.path.join(credential_dir, 'gmail-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
def main():
"""Shows basic usage of the Gmail API.
Creates a Gmail API service object and outputs a list of label names
of the user's Gmail account.
"""
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('gmail', 'v1', http=http)
results = service.users().labels().list(userId='me').execute()
labels = results.get('labels', [])
if not labels:
print('No labels found.')
else:
print('Labels:')
for label in labels:
print(label['name'])
if __name__ == '__main__':
pdb.set_trace()
#main()
| mit | -8,412,286,651,052,823,000 | 28.943662 | 80 | 0.716369 | false | 3.796429 | false | false | false |
mouton5000/DiscreteEventApplicationEditor | game/Registeries/SpriteRegistery.py | 1 | 2662 | from pygame.rect import Rect
__author__ = 'mouton'
from pygame.sprite import Sprite
import pygame
from collections import defaultdict
from copy import copy
_rootDir = None
_spritesList = defaultdict(pygame.sprite.OrderedUpdates)
_rectsToUpdate = []
def init(rootDir):
global _rootDir
_rootDir = rootDir
reinit()
def reinit():
_spritesList.clear()
del _rectsToUpdate[:]
def getLayers():
return iter(_spritesList.keys())
def draw(z, scene):
_spritesList[z].draw(scene)
def addRectToUpdate(rectToUpdate):
_rectsToUpdate.append(rectToUpdate)
def getRectsToUpdate():
return _rectsToUpdate
def clearRectsToUpdate():
del _rectsToUpdate[:]
class SpriteReg(Sprite):
def __init__(self, fileName, x, y, z, rotate, scale):
Sprite.__init__(self)
self.fileName = None
self.z = None
self.rect = None
self.reload(fileName, x, y, z, rotate, scale)
def reload(self, fileName, x, y, z, rotate, scale):
filePath = _rootDir + '/' + fileName
import game.gameWindow as gameWindow
scene = gameWindow.getScene()
prevRect = copy(self.rect)
if self.fileName is None or self.fileName != fileName or rotate != 0 or scale != 1:
self.fileName = fileName
self.image = pygame.image.load(filePath).convert_alpha(scene)
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
if rotate != 0 or scale != 1:
self.image = pygame.transform.rotozoom(self.image, rotate, scale)
transformedRect = self.image.get_rect()
transformedRect.center = self.rect.center
self.rect = transformedRect
if prevRect is not None:
rectToUpdate = Rect(prevRect.x - 1, prevRect.y - 1, prevRect.width + 2, prevRect.height + 2)
r2 = Rect(self.rect.x - 1, self.rect.y - 1, self.rect.width + 2, self.rect.height + 2)
rectToUpdate.union_ip(r2)
addRectToUpdate(rectToUpdate)
else:
rectToUpdate = Rect(self.rect.x - 1, self.rect.y - 1, self.rect.width + 2, self.rect.height + 2)
addRectToUpdate(rectToUpdate)
if self.z is not None:
self.remove()
_spritesList[z].add(self)
self.z = z
def __str__(self):
return str((self.fileName, self.rect))
def __repr__(self):
return str((self.fileName, self.rect))
def remove(self):
_spritesList[self.z].remove(self)
rectToUpdate = Rect(self.rect.x - 1, self.rect.y - 1, self.rect.width + 2, self.rect.height + 2)
addRectToUpdate(rectToUpdate) | mit | 7,548,703,405,394,868,000 | 25.89899 | 108 | 0.614576 | false | 3.507246 | false | false | false |
CVSoft/UTQuery | Demo_GSQuery.py | 1 | 3672 | from time import sleep
import GSQuery
# Let's pick a server. We'll use TeamRectifier as they're usually populated.
gs = GSQuery.GSServer('31.186.250.42')
# Let's get the basic server details with the GameSpy query protocol.
# The query methods return dictionary types, so we can store them for later use
# instead of having to ask the server every time we want to know something.
try: gs_bsd = gs.parse_query()
# Sometimes, our packets get lost, or the server is restarting. In that case,
# we can just wait a few seconds, try again, and hope our query is returned.
except:
sleep(5)
gs_bsd = gs.parse_query()
# and find out the server's name
print "Server Name :", gs_bsd["hostname"]
# Now let's see what map they're on
print "Map Name :", gs_bsd["mapname"]
# But what are they playing? (Assume the server name didn't make this obvious.)
# Let's see what game type is active.
print "Gametype :", gs_bsd["gametype"]
# What game version do they use?
print "Game Version:", gs_bsd["gamever"]
#a little cleanup for what follows...
print "\n====\n"
# Why do all of these methods start with parse? This is because they take a
# `query` argument, which is a raw query returned by UTServer.query().
# Specifying the `query` argument is optional, and the method will send the
# necessary type of query needed if one is not provided.
################################################################################
# Unlike the query method used above, the player query method does not return a
# dictionary of key-value pairs, but rather a list of UTPlayer objects.
#
# UTPlayer objects have six attributes:
# - Name, which is the colored name shown in-game, if colored names are used.
# - Score
# - Ping, in milliseconds. This ping value is the one shown in-game.
# - Team, which for team games is (red=0, blue=1). For DeathMatch, all players
# have a team value of 0. Unlike UTQuery, spectators are not shown at all.
# - Player ID, which is simply the player's index in the GameSpy query response.
# - Stats ID, which the GameSpy protocol doesn't implement and is set to None.
#
# We can access these values through their values:
# name, score, ping, team, pid, sid
# respectively.
#
# Let's start with getting the online player list.
gs_players = gs.parse_players()
# If we get an empty list, one of two things happened: either no players are
# online, or our query was not returned. The server will return data if our
# query was lost, but I haven't bothered to implement that check in my code
# yet.
# Now let's display their information. We really only care about name, score,
# team, and ping. Since we are requesting information from a TeamArenaMaster
# server, we are going to assume teams are present. For a DeathMatch server,
# all players have a team value of 0, since there are no teams.
# First, we should check if players are online.
if len(gs_players) > 0:
#If there are, let's display some information about them.
print "Online Players:"
for p in gs_players:
# Skip anything with a ping of 0, as they're probably not real players.
# Team scores appear as players with a ping of 0.
if p.ping == 0: continue
# Translate the team number to English. The rest are just numbers.
team = ["red", "blue"][p.team]
# Show their name, score, and ping.
print p.name + " is on " + team + " with a score of " + str(p.score) + \
" and a ping of " + str(p.ping) + "ms."
# If we didn't find anyone online, we go here.
else:
print "No online players!"
| gpl-3.0 | -4,055,352,095,003,567,600 | 40.697674 | 80 | 0.668573 | false | 3.607073 | false | false | false |
otsaloma/gaupol | aeidon/pattern.py | 1 | 1548 | # -*- coding: utf-8 -*-
# Copyright (C) 2007 Osmo Salomaa
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Regular expression substitution for subtitle text."""
import aeidon
import re
__all__ = ("Pattern",)
class Pattern(aeidon.MetadataItem):
"""
Regular expression substitution for subtitle text.
:ivar enabled: ``True`` if pattern should be used, ``False`` if not
:ivar fields: Dictionary of all data field names and values
:ivar local: ``True`` if pattern is defined by user, ``False`` if system
"""
def __init__(self, fields=None):
"""Initialize a :class:`Pattern` instance."""
aeidon.MetadataItem.__init__(self, fields)
self.enabled = True
self.local = False
def get_flags(self):
"""Return the evaluated value of the ``Flags`` field."""
flags = 0
for name in self.get_field_list("Flags"):
flags = flags | getattr(re, name)
return flags
| gpl-3.0 | 660,636,134,076,191,900 | 31.93617 | 76 | 0.677649 | false | 4.095238 | false | false | false |
fernandog/Medusa | ext/sqlalchemy/engine/__init__.py | 1 | 20438 | # engine/__init__.py
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""SQL connections, SQL execution and high-level DB-API interface.
The engine package defines the basic components used to interface
DB-API modules with higher-level statement construction,
connection-management, execution and result contexts. The primary
"entry point" class into this package is the Engine and its public
constructor ``create_engine()``.
This package includes:
base.py
Defines interface classes and some implementation classes which
comprise the basic components used to interface between a DB-API,
constructed and plain-text statements, connections, transactions,
and results.
default.py
Contains default implementations of some of the components defined
in base.py. All current database dialects use the classes in
default.py as base classes for their own database-specific
implementations.
strategies.py
The mechanics of constructing ``Engine`` objects are represented
here. Defines the ``EngineStrategy`` class which represents how
to go from arguments specified to the ``create_engine()``
function, to a fully constructed ``Engine``, including
initialization of connection pooling, dialects, and specific
subclasses of ``Engine``.
threadlocal.py
The ``TLEngine`` class is defined here, which is a subclass of
the generic ``Engine`` and tracks ``Connection`` and
``Transaction`` objects against the identity of the current
thread. This allows certain programming patterns based around
the concept of a "thread-local connection" to be possible.
The ``TLEngine`` is created by using the "threadlocal" engine
strategy in conjunction with the ``create_engine()`` function.
url.py
Defines the ``URL`` class which represents the individual
components of a string URL passed to ``create_engine()``. Also
defines a basic module-loading strategy for the dialect specifier
within a URL.
"""
from .interfaces import (
Connectable,
CreateEnginePlugin,
Dialect,
ExecutionContext,
ExceptionContext,
# backwards compat
Compiled,
TypeCompiler
)
from .base import (
Connection,
Engine,
NestedTransaction,
RootTransaction,
Transaction,
TwoPhaseTransaction,
)
from .result import (
BaseRowProxy,
BufferedColumnResultProxy,
BufferedColumnRow,
BufferedRowResultProxy,
FullyBufferedResultProxy,
ResultProxy,
RowProxy,
)
from .util import (
connection_memoize
)
from . import util, strategies
# backwards compat
from ..sql import ddl
default_strategy = 'plain'
def create_engine(*args, **kwargs):
"""Create a new :class:`.Engine` instance.
The standard calling form is to send the URL as the
first positional argument, usually a string
that indicates database dialect and connection arguments::
engine = create_engine("postgresql://scott:tiger@localhost/test")
Additional keyword arguments may then follow it which
establish various options on the resulting :class:`.Engine`
and its underlying :class:`.Dialect` and :class:`.Pool`
constructs::
engine = create_engine("mysql://scott:tiger@hostname/dbname",
encoding='latin1', echo=True)
The string form of the URL is
``dialect[+driver]://user:password@host/dbname[?key=value..]``, where
``dialect`` is a database name such as ``mysql``, ``oracle``,
``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
``**kwargs`` takes a wide variety of options which are routed
towards their appropriate components. Arguments may be specific to
the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the
:class:`.Pool`. Specific dialects also accept keyword arguments that
are unique to that dialect. Here, we describe the parameters
that are common to most :func:`.create_engine()` usage.
Once established, the newly resulting :class:`.Engine` will
request a connection from the underlying :class:`.Pool` once
:meth:`.Engine.connect` is called, or a method which depends on it
such as :meth:`.Engine.execute` is invoked. The :class:`.Pool` in turn
will establish the first actual DBAPI connection when this request
is received. The :func:`.create_engine` call itself does **not**
establish any actual DBAPI connections directly.
.. seealso::
:doc:`/core/engines`
:doc:`/dialects/index`
:ref:`connections_toplevel`
:param case_sensitive=True: if False, result column names
will match in a case-insensitive fashion, that is,
``row['SomeColumn']``.
.. versionchanged:: 0.8
By default, result row names match case-sensitively.
In version 0.7 and prior, all matches were case-insensitive.
:param connect_args: a dictionary of options which will be
passed directly to the DBAPI's ``connect()`` method as
additional keyword arguments. See the example
at :ref:`custom_dbapi_args`.
:param convert_unicode=False: if set to True, sets
the default behavior of ``convert_unicode`` on the
:class:`.String` type to ``True``, regardless
of a setting of ``False`` on an individual
:class:`.String` type, thus causing all :class:`.String`
-based columns
to accommodate Python ``unicode`` objects. This flag
is useful as an engine-wide setting when using a
DBAPI that does not natively support Python
``unicode`` objects and raises an error when
one is received (such as pyodbc with FreeTDS).
See :class:`.String` for further details on
what this flag indicates.
:param creator: a callable which returns a DBAPI connection.
This creation function will be passed to the underlying
connection pool and will be used to create all new database
connections. Usage of this function causes connection
parameters specified in the URL argument to be bypassed.
:param echo=False: if True, the Engine will log all statements
as well as a repr() of their parameter lists to the engines
logger, which defaults to sys.stdout. The ``echo`` attribute of
``Engine`` can be modified at any time to turn logging on and
off. If set to the string ``"debug"``, result rows will be
printed to the standard output as well. This flag ultimately
controls a Python logger; see :ref:`dbengine_logging` for
information on how to configure logging directly.
:param echo_pool=False: if True, the connection pool will log
all checkouts/checkins to the logging stream, which defaults to
sys.stdout. This flag ultimately controls a Python logger; see
:ref:`dbengine_logging` for information on how to configure logging
directly.
:param empty_in_strategy: The SQL compilation strategy to use when
rendering an IN or NOT IN expression for :meth:`.ColumnOperators.in_`
where the right-hand side
is an empty set. This is a string value that may be one of
``static``, ``dynamic``, or ``dynamic_warn``. The ``static``
strategy is the default, and an IN comparison to an empty set
will generate a simple false expression "1 != 1". The ``dynamic``
strategy behaves like that of SQLAlchemy 1.1 and earlier, emitting
a false expression of the form "expr != expr", which has the effect
of evaluting to NULL in the case of a null expression.
``dynamic_warn`` is the same as ``dynamic``, however also emits a
warning when an empty set is encountered; this because the "dynamic"
comparison is typically poorly performing on most databases.
.. versionadded:: 1.2 Added the ``empty_in_strategy`` setting and
additionally defaulted the behavior for empty-set IN comparisons
to a static boolean expression.
:param encoding: Defaults to ``utf-8``. This is the string
encoding used by SQLAlchemy for string encode/decode
operations which occur within SQLAlchemy, **outside of
the DBAPI.** Most modern DBAPIs feature some degree of
direct support for Python ``unicode`` objects,
what you see in Python 2 as a string of the form
``u'some string'``. For those scenarios where the
DBAPI is detected as not supporting a Python ``unicode``
object, this encoding is used to determine the
source/destination encoding. It is **not used**
for those cases where the DBAPI handles unicode
directly.
To properly configure a system to accommodate Python
``unicode`` objects, the DBAPI should be
configured to handle unicode to the greatest
degree as is appropriate - see
the notes on unicode pertaining to the specific
target database in use at :ref:`dialect_toplevel`.
Areas where string encoding may need to be accommodated
outside of the DBAPI include zero or more of:
* the values passed to bound parameters, corresponding to
the :class:`.Unicode` type or the :class:`.String` type
when ``convert_unicode`` is ``True``;
* the values returned in result set columns corresponding
to the :class:`.Unicode` type or the :class:`.String`
type when ``convert_unicode`` is ``True``;
* the string SQL statement passed to the DBAPI's
``cursor.execute()`` method;
* the string names of the keys in the bound parameter
dictionary passed to the DBAPI's ``cursor.execute()``
as well as ``cursor.setinputsizes()`` methods;
* the string column names retrieved from the DBAPI's
``cursor.description`` attribute.
When using Python 3, the DBAPI is required to support
*all* of the above values as Python ``unicode`` objects,
which in Python 3 are just known as ``str``. In Python 2,
the DBAPI does not specify unicode behavior at all,
so SQLAlchemy must make decisions for each of the above
values on a per-DBAPI basis - implementations are
completely inconsistent in their behavior.
:param execution_options: Dictionary execution options which will
be applied to all connections. See
:meth:`~sqlalchemy.engine.Connection.execution_options`
:param implicit_returning=True: When ``True``, a RETURNING-
compatible construct, if available, will be used to
fetch newly generated primary key values when a single row
INSERT statement is emitted with no existing returning()
clause. This applies to those backends which support RETURNING
or a compatible construct, including PostgreSQL, Firebird, Oracle,
Microsoft SQL Server. Set this to ``False`` to disable
the automatic usage of RETURNING.
:param isolation_level: this string parameter is interpreted by various
dialects in order to affect the transaction isolation level of the
database connection. The parameter essentially accepts some subset of
these string arguments: ``"SERIALIZABLE"``, ``"REPEATABLE_READ"``,
``"READ_COMMITTED"``, ``"READ_UNCOMMITTED"`` and ``"AUTOCOMMIT"``.
Behavior here varies per backend, and
individual dialects should be consulted directly.
Note that the isolation level can also be set on a per-:class:`.Connection`
basis as well, using the
:paramref:`.Connection.execution_options.isolation_level`
feature.
.. seealso::
:attr:`.Connection.default_isolation_level` - view default level
:paramref:`.Connection.execution_options.isolation_level`
- set per :class:`.Connection` isolation level
:ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
:ref:`PostgreSQL Transaction Isolation <postgresql_isolation_level>`
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
:ref:`session_transaction_isolation` - for the ORM
:param label_length=None: optional integer value which limits
the size of dynamically generated column labels to that many
characters. If less than 6, labels are generated as
"_(counter)". If ``None``, the value of
``dialect.max_identifier_length`` is used instead.
:param listeners: A list of one or more
:class:`~sqlalchemy.interfaces.PoolListener` objects which will
receive connection pool events.
:param logging_name: String identifier which will be used within
the "name" field of logging records generated within the
"sqlalchemy.engine" logger. Defaults to a hexstring of the
object's id.
:param max_overflow=10: the number of connections to allow in
connection pool "overflow", that is connections that can be
opened above and beyond the pool_size setting, which defaults
to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
:param module=None: reference to a Python module object (the module
itself, not its string name). Specifies an alternate DBAPI module to
be used by the engine's dialect. Each sub-dialect references a
specific DBAPI which will be imported before first connect. This
parameter causes the import to be bypassed, and the given module to
be used instead. Can be used for testing of DBAPIs as well as to
inject "mock" DBAPI implementations into the :class:`.Engine`.
:param paramstyle=None: The `paramstyle <http://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
to use when rendering bound parameters. This style defaults to the
one recommended by the DBAPI itself, which is retrieved from the
``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
more than one paramstyle, and in particular it may be desirable
to change a "named" paramstyle into a "positional" one, or vice versa.
When this attribute is passed, it should be one of the values
``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
``"pyformat"``, and should correspond to a parameter style known
to be supported by the DBAPI in use.
:param pool=None: an already-constructed instance of
:class:`~sqlalchemy.pool.Pool`, such as a
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
pool will be used directly as the underlying connection pool
for the engine, bypassing whatever connection parameters are
present in the URL argument. For information on constructing
connection pools manually, see :ref:`pooling_toplevel`.
:param poolclass=None: a :class:`~sqlalchemy.pool.Pool`
subclass, which will be used to create a connection pool
instance using the connection parameters given in the URL. Note
this differs from ``pool`` in that you don't actually
instantiate the pool in this case, you just indicate what type
of pool to be used.
:param pool_logging_name: String identifier which will be used within
the "name" field of logging records generated within the
"sqlalchemy.pool" logger. Defaults to a hexstring of the object's
id.
:param pool_pre_ping: boolean, if True will enable the connection pool
"pre-ping" feature that tests connections for liveness upon
each checkout.
.. versionadded:: 1.2
.. seealso::
:ref:`pool_disconnects_pessimistic`
:param pool_size=5: the number of connections to keep open
inside the connection pool. This used with
:class:`~sqlalchemy.pool.QueuePool` as
well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With
:class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
of 0 indicates no limit; to disable pooling, set ``poolclass`` to
:class:`~sqlalchemy.pool.NullPool` instead.
:param pool_recycle=-1: this setting causes the pool to recycle
connections after the given number of seconds has passed. It
defaults to -1, or no timeout. For example, setting to 3600
means connections will be recycled after one hour. Note that
MySQL in particular will disconnect automatically if no
activity is detected on a connection for eight hours (although
this is configurable with the MySQLDB connection itself and the
server configuration as well).
.. seealso::
:ref:`pool_setting_recycle`
:param pool_reset_on_return='rollback': set the "reset on return"
behavior of the pool, which is whether ``rollback()``,
``commit()``, or nothing is called upon connections
being returned to the pool. See the docstring for
``reset_on_return`` at :class:`.Pool`.
.. versionadded:: 0.7.6
:param pool_timeout=30: number of seconds to wait before giving
up on getting a connection from the pool. This is only used
with :class:`~sqlalchemy.pool.QueuePool`.
:param plugins: string list of plugin names to load. See
:class:`.CreateEnginePlugin` for background.
.. versionadded:: 1.2.3
:param strategy='plain': selects alternate engine implementations.
Currently available are:
* the ``threadlocal`` strategy, which is described in
:ref:`threadlocal_strategy`;
* the ``mock`` strategy, which dispatches all statement
execution to a function passed as the argument ``executor``.
See `example in the FAQ
<http://docs.sqlalchemy.org/en/latest/faq/metadata_schema.html#how-can-i-get-the-create-table-drop-table-output-as-a-string>`_.
:param executor=None: a function taking arguments
``(sql, *multiparams, **params)``, to which the ``mock`` strategy will
dispatch all statement execution. Used only by ``strategy='mock'``.
"""
strategy = kwargs.pop('strategy', default_strategy)
strategy = strategies.strategies[strategy]
return strategy.create(*args, **kwargs)
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
"""Create a new Engine instance using a configuration dictionary.
The dictionary is typically produced from a config file.
The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument
indicates the prefix to be searched for. Each matching key (after the
prefix is stripped) is treated as though it were the corresponding keyword
argument to a :func:`.create_engine` call.
The only required key is (assuming the default prefix) ``sqlalchemy.url``,
which provides the :ref:`database URL <database_urls>`.
A select set of keyword arguments will be "coerced" to their
expected type based on string values. The set of arguments
is extensible per-dialect using the ``engine_config_types`` accessor.
:param configuration: A dictionary (typically produced from a config file,
but this is not a requirement). Items whose keys start with the value
of 'prefix' will have that prefix stripped, and will then be passed to
:ref:`create_engine`.
:param prefix: Prefix to match and then strip from keys
in 'configuration'.
:param kwargs: Each keyword argument to ``engine_from_config()`` itself
overrides the corresponding item taken from the 'configuration'
dictionary. Keyword arguments should *not* be prefixed.
"""
options = dict((key[len(prefix):], configuration[key])
for key in configuration
if key.startswith(prefix))
options['_coerce_config'] = True
options.update(kwargs)
url = options.pop('url')
return create_engine(url, **options)
__all__ = (
'create_engine',
'engine_from_config',
)
| gpl-3.0 | 5,816,460,592,037,326,000 | 42.392781 | 137 | 0.683139 | false | 4.615628 | true | false | false |
Workday/OpenFrame | tools/telemetry/catapult_base/dependency_manager/cloud_storage_info.py | 1 | 3811 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import stat
from catapult_base import cloud_storage
from catapult_base.dependency_manager import exceptions
class CloudStorageInfo(object):
def __init__(self, cs_bucket, cs_hash, download_path, cs_remote_path,
version_in_cs=None, archive_info=None):
""" Container for the information needed to download a dependency from
cloud storage.
Args:
cs_bucket: The cloud storage bucket the dependency is located in.
cs_hash: The hash of the file stored in cloud storage.
download_path: Where the file should be downloaded to.
cs_remote_path: Where the file is stored in the cloud storage bucket.
version_in_cs: The version of the file stored in cloud storage.
archive_info: An instance of ArchiveInfo if this dependency is an
archive. Else None.
"""
self._download_path = download_path
self._cs_remote_path = cs_remote_path
self._cs_bucket = cs_bucket
self._cs_hash = cs_hash
self._version_in_cs = version_in_cs
self._archive_info = archive_info
if not self._has_minimum_data:
raise ValueError(
'Not enough information specified to initialize a cloud storage info.'
' %s' % self)
def GetRemotePath(self):
"""Gets the path to a downloaded version of the dependency.
May not download the file if it has already been downloaded.
Will unzip the downloaded file if a non-empty archive_info was passed in at
init.
Returns: A path to an executable that was stored in cloud_storage, or None
if not found.
Raises:
CredentialsError: If cloud_storage credentials aren't configured.
PermissionError: If cloud_storage credentials are configured, but not
with an account that has permission to download the needed file.
NotFoundError: If the needed file does not exist where expected in
cloud_storage or the downloaded zip file.
ServerError: If an internal server error is hit while downloading the
needed file.
CloudStorageError: If another error occured while downloading the remote
path.
FileNotFoundError: If the download was otherwise unsuccessful.
"""
if not self._has_minimum_data:
return None
download_dir = os.path.dirname(self._download_path)
if not os.path.exists(download_dir):
os.makedirs(download_dir)
dependency_path = self._download_path
cloud_storage.GetIfHashChanged(
self._cs_remote_path, self._download_path, self._cs_bucket,
self._cs_hash)
if not os.path.exists(dependency_path):
raise exceptions.FileNotFoundError(dependency_path)
logging.error('has archive_info %s', self._archive_info)
if self.has_archive_info:
dependency_path = self._archive_info.GetUnzippedPath()
else:
mode = os.stat(dependency_path).st_mode
os.chmod(dependency_path, mode | stat.S_IXUSR)
return os.path.abspath(dependency_path)
@property
def version_in_cs(self):
return self._version_in_cs
@property
def _has_minimum_data(self):
return all([self._cs_bucket, self._cs_remote_path, self._download_path,
self._cs_hash])
@property
def has_archive_info(self):
return bool(self._archive_info)
def __repr__(self):
return (
'CloudStorageInfo(download_path=%s, cs_remote_path=%s, cs_bucket=%s, '
'cs_hash=%s, version_in_cs=%s, archive_info=%s)' % (
self._download_path, self._cs_remote_path, self._cs_bucket,
self._cs_hash, self._version_in_cs, self._archive_info))
| bsd-3-clause | 6,174,460,862,816,630,000 | 36.362745 | 80 | 0.674888 | false | 3.998951 | false | false | false |
smallyear/linuxLearn | salt/salt/states/reg.py | 1 | 13518 | # -*- coding: utf-8 -*-
r'''
===========================
Manage the Windows registry
===========================
Many python developers think of registry keys as if they were python keys in a
dictionary which is not the case. The windows registry is broken down into the
following components:
-----
Hives
-----
This is the top level of the registry. They all begin with HKEY.
- HKEY_CLASSES_ROOT (HKCR)
- HKEY_CURRENT_USER(HKCU)
- HKEY_LOCAL MACHINE (HKLM)
- HKEY_USER (HKU)
- HKEY_CURRENT_CONFIG
----
Keys
----
Hives contain keys. These are basically the folders beneath the hives. They can
contain any number of subkeys.
-----------------
Values or Entries
-----------------
Values or Entries are the name/data pairs beneath the keys and subkeys. All keys
have a default name/data pair. It is usually "(Default)"="(value not set)". The
actual value for the name and the date is Null. The registry editor will display
"(Default)" and "(value not set)".
-------
Example
-------
The following example is taken from the windows startup portion of the registry:
```
[HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\CurrentVersion\Run]
"RTHDVCPL"="\"C:\\Program Files\\Realtek\\Audio\\HDA\\RtkNGUI64.exe\" -s"
"NvBackend"="\"C:\\Program Files (x86)\\NVIDIA Corporation\\Update Core\\NvBackend.exe\""
"BTMTrayAgent"="rundll32.exe \"C:\\Program Files (x86)\\Intel\\Bluetooth\\btmshellex.dll\",TrayApp"
```
In this example these are the values for each:
Hive: `HKEY_LOCAL_MACHINE`
Key and subkeys: `SOFTWARE\Microsoft\Windows\CurrentVersion\Run`
Value:
- There are 3 value names: `RTHDVCPL`, `NvBackend`, and `BTMTrayAgent`
- Each value name has a corresponding value
'''
from __future__ import absolute_import
# Import python libs
import logging
# Import salt libs
import salt.utils
log = logging.getLogger(__name__)
def __virtual__():
'''
Load this state if the reg module exists
'''
return 'reg' if 'reg.read_key' in __salt__ else False
def _parse_key_value(key):
'''
split the full path in the registry to the key and the rest
'''
splt = key.split("\\")
hive = splt.pop(0)
vname = splt.pop(-1)
key = '\\'.join(splt)
return hive, key, vname
def _parse_key(key):
'''
split the hive from the key
'''
splt = key.split("\\")
hive = splt.pop(0)
key = '\\'.join(splt)
return hive, key
def present(name,
value=None,
vname=None,
vdata=None,
vtype='REG_SZ',
reflection=True,
use_32bit_registry=False):
'''
Ensure a registry key or value is present.
:param str name: A string value representing the full path of the key to
include the HIVE, Key, and all Subkeys. For example:
``HKEY_LOCAL_MACHINE\\SOFTWARE\\Salt``
Valid hive values include:
- HKEY_CURRENT_USER or HKCU
- HKEY_LOCAL_MACHINE or HKLM
- HKEY_USERS or HKU
:param str value: Deprecated. Use vname and vdata instead. Included here for
backwards compatibility.
:param str vname: The name of the value you'd like to create beneath the
Key. If this parameter is not passed it will assume you want to set the
(Default) value
:param str vdata: The value you'd like to set for the Key. If a value name
(vname) is passed, this will be the data for that value name. If not, this
will be the (Default) value for the key.
The type for the (Default) value is always REG_SZ and cannot be changed.
This parameter is optional. If not passed, the Key will be created with no
associated item/value pairs.
:param str vtype: The value type for the data you wish to store in the
registry. Valid values are:
- REG_BINARY
- REG_DWORD
- REG_EXPAND_SZ
- REG_MULTI_SZ
- REG_SZ (Default)
:param bool reflection: On 64 bit machines a duplicate value will be created
in the ``Wow6432Node`` for 32bit programs. This only applies to the SOFTWARE
key. This option is ignored on 32bit operating systems. This value defaults
to True. Set it to False to disable reflection.
.. deprecated:: 2015.8.2
Use `use_32bit_registry` instead.
The parameter seems to have no effect since Windows 7 / Windows 2008R2
removed support for reflection. The parameter will be removed in Boron.
:param bool use_32bit_registry: Use the 32bit portion of the registry.
Applies only to 64bit windows. 32bit Windows will ignore this parameter.
Default if False.
:return: Returns a dictionary showing the results of the registry operation.
:rtype: dict
The following example will set the ``(Default)`` value for the
``SOFTWARE\\Salt`` key in the ``HKEY_CURRENT_USER`` hive to ``0.15.3``. The
value will not be reflected in ``Wow6432Node``:
Example:
.. code-block:: yaml
HKEY_CURRENT_USER\\SOFTWARE\\Salt:
reg.present:
- vdata: 0.15.3
- reflection: False
The following example will set the value for the ``version`` entry under the
``SOFTWARE\\Salt`` key in the ``HKEY_CURRENT_USER`` hive to ``0.15.3``. The
value will be reflected in ``Wow6432Node``:
Example:
.. code-block:: yaml
HKEY_CURRENT_USER\\SOFTWARE\\Salt:
reg.present:
- vname: version
- vdata: 0.15.3
In the above example the path is interpreted as follows:
- ``HKEY_CURRENT_USER`` is the hive
- ``SOFTWARE\\Salt`` is the key
- ``vname`` is the value name ('version') that will be created under the key
- ``vdata`` is the data that will be assigned to 'version'
'''
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
# This is for backwards compatibility
# If 'value' is passed a value, vdata becomes value and the vname is
# obtained from the key path
if value or value in [0, '']:
hive, key, vname = _parse_key_value(name)
vdata = value
ret['comment'] = 'State file is using deprecated syntax. Please update.'
salt.utils.warn_until(
'Boron',
'The \'value\' argument has been deprecated. '
'Please use vdata instead.'
)
else:
hive, key = _parse_key(name)
# Determine what to do
reg_current = __salt__['reg.read_value'](hive=hive,
key=key,
vname=vname,
use_32bit_registry=use_32bit_registry)
if vdata == reg_current['vdata'] and reg_current['success']:
ret['comment'] = '{0} in {1} is already configured'.\
format(vname if vname else '(Default)', name)
return ret
add_change = {'Key': r'{0}\{1}'.format(hive, key),
'Entry': '{0}'.format(vname if vname else '(Default)'),
'Value': '{0}'.format(vdata)}
# Check for test option
if __opts__['test']:
ret['result'] = None
ret['changes'] = {'reg': {'Will add': add_change}}
return ret
# Configure the value
ret['result'] = __salt__['reg.set_value'](hive=hive,
key=key,
vname=vname,
vdata=vdata,
vtype=vtype,
use_32bit_registry=use_32bit_registry)
if not ret['result']:
ret['changes'] = {}
ret['comment'] = r'Failed to add {0} to {1}\{2}'.format(name, hive, key)
else:
ret['changes'] = {'reg': {'Added': add_change}}
ret['comment'] = r'Added {0} to {1}\{2}'.format(name, hive, key)
return ret
def absent(name, vname=None, use_32bit_registry=False):
'''
Ensure a registry value is removed. To remove a key use key_absent.
:param str name: A string value representing the full path of the key to
include the HIVE, Key, and all Subkeys. For example:
``HKEY_LOCAL_MACHINE\\SOFTWARE\\Salt``
Valid hive values include:
- HKEY_CURRENT_USER or HKCU
- HKEY_LOCAL_MACHINE or HKLM
- HKEY_USERS or HKU
:param str vname: The name of the value you'd like to create beneath the
Key. If this parameter is not passed it will assume you want to set the
(Default) value
:param bool use_32bit_registry: Use the 32bit portion of the registry.
Applies only to 64bit windows. 32bit Windows will ignore this parameter.
Default if False.
:return: Returns a dictionary showing the results of the registry operation.
:rtype: dict
CLI Example:
.. code-block:: yaml
'HKEY_CURRENT_USER\\SOFTWARE\\Salt\\version':
reg.absent
In the above example the path is interpreted as follows:
- ``HKEY_CURRENT_USER`` is the hive
- ``SOFTWARE\\Salt`` is the key
- ``version`` is the value name
So the value ``version`` will be deleted from the ``SOFTWARE\\Salt`` key in
the ``HKEY_CURRENT_USER`` hive.
'''
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
hive, key = _parse_key(name)
# Determine what to do
reg_check = __salt__['reg.read_value'](hive=hive,
key=key,
vname=vname,
use_32bit_registry=use_32bit_registry)
if not reg_check['success'] or reg_check['vdata'] == '(value not set)':
if not vname:
hive, key, vname = _parse_key_value(name)
reg_check = __salt__['reg.read_value'](hive=hive,
key=key,
vname=vname,
use_32bit_registry=use_32bit_registry)
if not reg_check['success'] or reg_check['vdata'] == '(value not set)':
ret['comment'] = '{0} is already absent'.format(name)
return ret
else:
ret['comment'] = '{0} is already absent'.format(name)
return ret
remove_change = {'Key': r'{0}\{1}'.format(hive, key),
'Entry': '{0}'.format(vname if vname else '(Default)')}
# Check for test option
if __opts__['test']:
ret['result'] = None
ret['changes'] = {'reg': {'Will remove': remove_change}}
return ret
# Delete the value
ret['result'] = __salt__['reg.delete_value'](hive=hive,
key=key,
vname=vname,
use_32bit_registry=use_32bit_registry)
if not ret['result']:
ret['changes'] = {}
ret['comment'] = r'Failed to remove {0} from {1}'.format(key, hive)
else:
ret['changes'] = {'reg': {'Removed': remove_change}}
ret['comment'] = r'Removed {0} from {1}'.format(key, hive)
return ret
def key_absent(name, force=False, use_32bit_registry=False):
r'''
.. versionadded:: 2015.5.4
Ensure a registry key is removed. This will remove a key and all value
entries it contains. It will fail if the key contains subkeys.
:param str name: A string representing the full path to the key to be
removed to include the hive and the keypath. The hive can be any of the following:
- HKEY_LOCAL_MACHINE or HKLM
- HKEY_CURRENT_USER or HKCU
- HKEY_USER or HKU
:param bool force: A boolean value indicating that all subkeys should be
deleted with the key. If force=False and subkeys exists beneath the key you
want to delete, key_absent will fail. Use with caution. The default is False.
:return: Returns a dictionary showing the results of the registry operation.
:rtype: dict
The following example will delete the ``SOFTWARE\Salt`` key and all subkeys
under the ``HKEY_CURRENT_USER`` hive.
Example:
.. code-block:: yaml
'HKEY_CURRENT_USER\SOFTWARE\Salt':
reg.key_absent:
- force: True
In the above example the path is interpreted as follows:
- ``HKEY_CURRENT_USER`` is the hive
- ``SOFTWARE\Salt`` is the key
'''
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
hive, key = _parse_key(name)
# Determine what to do
if not __salt__['reg.read_value'](hive=hive,
key=key,
use_32bit_registry=use_32bit_registry)['success']:
ret['comment'] = '{0} is already absent'.format(name)
return ret
ret['changes'] = {'reg': {
'Removed': {
'Key': r'{0}\{1}'.format(hive, key)
}}}
# Check for test option
if __opts__['test']:
ret['result'] = None
return ret
# Delete the value
__salt__['reg.delete_key_recursive'](hive=hive,
key=key,
use_32bit_registry=use_32bit_registry)
if __salt__['reg.read_value'](hive=hive,
key=key,
use_32bit_registry=use_32bit_registry)['success']:
ret['result'] = False
ret['changes'] = {}
ret['comment'] = 'Failed to remove registry key {0}'.format(name)
return ret
| apache-2.0 | 7,227,832,016,917,860,000 | 31.731235 | 99 | 0.575159 | false | 3.961899 | true | false | false |
rfaulkner/databayes | http/databayes_api/views.py | 1 | 13088 | """
Defines the routing endpoints of the RESTful API for databayes.
Each method corresponds to an API action and returns the status of the action and the output. This
layer handles communication to the databayes daemon.
IMPORTANT NOTE! - Only one of these server instances should be running to avoid race conditions
"""
from databayes_api import app, log, redisio, config, \
gen_queue_id, exists_queue_item
import json, time
from flask import render_template, redirect, url_for, \
request, escape, flash, g, session, Response
ERR_MSG_BADLY_FORMED_REQUEST = 'Malformed request, try again'
# UTILITY METHODS
def handle_queue_validation():
"""
Method for handling queue validation in the view logic
:return:
"""
qid = str(gen_queue_id())
iterations = 0
while exists_queue_item(qid):
if iterations == config.REDIS_QUEUE_COUNTER_MAX:
return -1 # Indicates failure
qid = str(gen_queue_id())
iterations += 1
return str(qid)
def unpack_query_params(request):
"""
Helper method to fetch query paramaters for command requests
:param request:
:return:
"""
ret = dict()
ret['ok'] = True
ret['types'] = []
ret['fields'] = []
ret['fields1'] = []
ret['fields2'] = []
ret['values1'] = []
ret['values2'] = []
ret['message'] = ''
ret['fields'] = request.args.get('fields').split(',') \
if request.args.get('fields') else []
ret['types'] = request.args.get('types').split(',') \
if request.args.get('fields') else []
ret['fields1'] = request.args.get('fields1').split(',') \
if request.args.get('fields1') else []
ret['fields2'] = request.args.get('fields2').split(',') \
if request.args.get('fields2') else []
ret['values1'] = request.args.get('values1').split(',') \
if request.args.get('values1') else []
ret['values2'] = request.args.get('values2').split(',') \
if request.args.get('values2') else []
if len(ret['fields']) != len(ret['types']) or \
len(ret['fields1']) != len(ret['values1']) or \
len(ret['fields2']) != len(ret['values2']):
ret['ok'] = False
ret['message'] = 'Count of fields and types or values do not match'
return ret
def wait_for_response(qid, poll_frequency=10.0, max_tries=5):
"""
Handles polling a response from the redis queue determined by id. Returns
an empty response if it never arrives.
:param qid: int redis queue id
:param poll_frequency: int millisecond frequency of a poll
:param max_tries: int poll no more times than this
:return: string response written to redis from the daemon
"""
rsp = ""
for i in xrange(max_tries):
rsp = redisio.DataIORedis().read(config.DBY_RSP_QUEUE_PREFIX + qid)
if rsp: # got response, stop polling
break
time.sleep(float(poll_frequency) / 1000.0)
return rsp
# --- VIEW METHODS ---
# ====================
def get_arg_str(fields, values, delimiter):
"""
Synthesizes argument strings for entity attributes for databayes. Length
of fields and values must be equal.
:param fields: list of field names
:param values: list of field values
:param delimeter: str, relevant delimeter
:return: argument string
"""
items = []
for i in xrange(len(fields)):
items.append(str(fields[i]) + str(delimiter) + str(values[i]))
return ",".join(items)
def view_switch(view, args):
"""
General method which implements view logic
:param view: str, view to construct a response for
:param args: view arguments passed along
:return: text response from databayes or error
"""
log.debug('Processing view: "{0}"'.format(view))
log.debug('Processing args: "{0}"'.format(str(args)))
query_param_obj = unpack_query_params(request)
if (not query_param_obj['ok']):
return Response(json.dumps([query_param_obj['message']]),
mimetype='application/json')
# Retrieve a valid queue item
qid = handle_queue_validation()
if qid == -1:
return Response(json.dumps(['Queue is full, try again later.']),
mimetype='application/json')
# Construct command
cmd = ""
if view == 'define_entity':
if 'values' in query_param_obj.keys() and \
'fields' in query_param_obj.keys():
arg_str = get_arg_str(query_param_obj['fields'],
query_param_obj['values'], '_')
else:
arg_str = ""
log.info('Warning: entity has no attributes')
cmd = 'def {0}({1})'.format(args['entity'], arg_str) \
if arg_str else 'def ' + str(args['entity'])
elif view == 'add_relation':
arg_str_1 = get_arg_str(query_param_obj['fields1'], query_param_obj['values1'], '=')
arg_str_2 = get_arg_str(query_param_obj['fields2'], query_param_obj['values2'], '=')
cmd = 'add rel {0}({1}) {2}({3})'.format(args['entity_1'], arg_str_1,
args['entity_2'], arg_str_2)
elif view == 'generate':
pass
elif view == 'list_entity':
cmd = 'lst ent {0}'.format(args['pattern'])
elif view == 'list_relation':
arg_str_1 = get_arg_str(query_param_obj['fields1'], query_param_obj['values1'], '=')
arg_str_2 = get_arg_str(query_param_obj['fields2'], query_param_obj['values2'], '=')
cmd = 'lst rel {0}({1}) {2}({3})'.format(args['entity_1'], arg_str_1,
args['entity_2'], arg_str_2)
elif view == 'remove_entity':
cmd = 'rm ent {0}'.format(args['entity'])
elif view == 'remove_relation':
arg_str_1 = get_arg_str(query_param_obj['fields1'], query_param_obj['values1'], '=')
arg_str_2 = get_arg_str(query_param_obj['fields2'], query_param_obj['values2'], '=')
cmd = 'rm rel {0}({1}) {2}({3})'.format(args['entity_1'], arg_str_1,
args['entity_2'], arg_str_2)
log.info('sending command: "{0}"'.format(cmd))
# Send cmd to databayes daemon
redisio.DataIORedis().connect()
redisio.DataIORedis().write(config.DBY_CMD_QUEUE_PREFIX + qid, cmd)
# check response
rsp = wait_for_response(qid)
if not rsp:
rsp = "Could not find response before max retires expired."
return rsp
def home(entity):
"""
Defines web interface to the tool and help.
"""
# TODO - add content here, primarily an interface to an instance
# run on rackspace host
return Response("Welcome to databayes!",
mimetype='application/json')
def version(entity):
"""
Basic version info for databayes
"""
return Response("databayes v1. 2015. Ryan Faulkner",
mimetype='application/json')
def define_entity(entity):
"""
Handles remote requests to databayes for entity definition
Translation: def e(<f1>_<t1>, <f2>_<t2>, ...) ->
/def/e?fields=f1,f2,...&types=t1,t2,...
:return: JSON response indicating status of action & output
"""
try:
return Response(
json.dumps([view_switch('define_entity', {'entity': entity})]),
mimetype='application/json')
except Exception as e:
log.error(e.message)
return Response(
json.dumps([ERR_MSG_BADLY_FORMED_REQUEST]),
mimetype='application/json')
def add_relation(entity_1, entity_2):
"""
Handles remote requests to databayes for adding relations
Translation: add rel e1(<f1_1>_<v1_1>,...) e2(<f2_1>_<v2_1>,...) ->
/add/rel/e1/e2?fields1=f1_1,...&types1=t1_1,...
&fields2=f2_1,...&types2=t2_1,...
:return: JSON response indicating status of action & output
"""
try:
return Response(
json.dumps([view_switch(
'add_relation', {'entity_1': entity_1, 'entity_2': entity_2})]),
mimetype='application/json')
except Exception as e:
log.error(e.message)
return Response(
json.dumps([ERR_MSG_BADLY_FORMED_REQUEST]),
mimetype='application/json')
def generate(entity_1, entity_2):
"""
Handles remote requests to databayes for generating samples
Translation: gen e1(<f1_1>_<v1_1>,...) constrain e2(<f2_1>_<v2_1>,...) ->
/gen/e1/e2?fields1=f1_1,...&types1=t1_1,...&fields2=f2_1,...&types2=t2_1,...
:return: JSON response indicating status of action & output
"""
try:
return Response(
json.dumps(
[view_switch('generate',
{'entity_1': entity_1, 'entity_2': entity_2})]),
mimetype='application/json')
except Exception as e:
log.error(e.message)
return Response(
json.dumps([ERR_MSG_BADLY_FORMED_REQUEST]),
mimetype='application/json')
def list_entity(pattern):
"""
Handles remote requests to databayes for listing entities
Translation: lst ent regex -> /lst/ent/regex
:return: JSON response indicating status of action & output
"""
try:
return Response(
json.dumps([view_switch('list_entity', {'pattern': pattern})]),
mimetype='application/json')
except Exception as e:
log.error(e.message)
return Response(
json.dumps([ERR_MSG_BADLY_FORMED_REQUEST]),
mimetype='application/json')
def list_relation(entity_1, entity_2):
"""
Handles remote requests to databayes for listing relations
Translation: lst rel regex1 regex2 -> /lst/ent/regex1/regex2
:return: JSON response indicating status of action & output
"""
try:
return Response(
json.dumps(
[view_switch('list_relation',
{'entity_1': entity_1, 'entity_2': entity_2})]),
mimetype='application/json')
except Exception as e:
log.error(e.message)
return Response(
json.dumps([ERR_MSG_BADLY_FORMED_REQUEST]),
mimetype='application/json')
def remove_entity(entity):
"""
Handles remote requests to databayes for removing entities
Translation: rm ent e -> /rm/ent/e
:return: JSON response indicating status of action & output
"""
try:
return Response(
json.dumps([view_switch('remove_entity', {'entity': entity})]),
mimetype='application/json')
except Exception as e:
log.error(e.message)
return Response(
json.dumps([ERR_MSG_BADLY_FORMED_REQUEST]),
mimetype='application/json')
def remove_relation(entity_1, entity_2):
"""
Handles remote requests to databayes for removing relations
Translation: rm rel e1(<f1_1>_<v1_1>,...) e2(<f2_1>_<v2_1>,...)
-> /rm/rel/e1/e2?fields1=f1_1,...&values1=t1_1,...&fields2=f2_1,
...&values2=t2_1,...
:return: JSON response indicating status of action & output
"""
try:
return Response(
json.dumps(
[view_switch('remove_relation',
{'entity_1': entity_1, 'entity_2': entity_2})]),
mimetype='application/json')
except Exception as e:
log.error(e.message)
return Response(
json.dumps([ERR_MSG_BADLY_FORMED_REQUEST]),
mimetype='application/json')
# Stores view references in structure
view_list = {
home.__name__: home,
version.__name__: version,
define_entity.__name__: define_entity,
add_relation.__name__: add_relation,
generate.__name__: generate,
list_entity.__name__: list_entity,
list_relation.__name__: list_relation,
remove_entity.__name__: remove_entity,
remove_relation.__name__: remove_relation,
}
route_deco = {
home.__name__: app.route('/', methods=['GET']),
version.__name__: app.route('/v', methods=['GET']),
define_entity.__name__: app.route('/def/<entity>', methods=['GET', 'POST']),
add_relation.__name__: app.route('/add/<entity_1>/<entity_2>', methods=['GET', 'POST']),
generate.__name__: app.route('/gen', methods=['GET', 'POST']),
list_entity.__name__: app.route('/lst/ent/<pattern>', methods=['GET', 'POST']),
list_relation.__name__: app.route('/lst/rel/<pattern_1>/<pattern_2>', methods=['GET', 'POST']),
remove_entity.__name__: app.route('/rm/ent/<entity>', methods=['GET', 'POST']),
remove_relation.__name__: app.route('/rm/rel/<entity_1>/<entity_2>', methods=['GET', 'POST']),
}
# Apply decorators to views
def init_views():
for key in route_deco:
log.info('Registering view - {0}'.format(key))
route = route_deco[key]
view_method = view_list[key]
view_list[key] = route(view_method)
| apache-2.0 | 8,312,816,424,362,879,000 | 34.468835 | 103 | 0.575718 | false | 3.684685 | false | false | false |
purisc-group/purisc | compiler/class_def/conversions/arithmetic.py | 1 | 29184 | from helpers import next_subleq
from helpers import subleq
from helpers import clear
import re
def add(instr, assem):
a = instr.args[0];
b = instr.args[1];
c = instr.result;
t0 = assem.getNextTemp();
#check for literals
if re.match("\d+",a):
if a not in assem.dataMem:
assem.dataMem[a] = a;
if re.match("\d+",b):
if b not in assem.dataMem:
assem.dataMem[b] = b;
assem.progMem.append("\n// " + instr.raw);
assem.subleq(t0,t0,"NEXT");
assem.subleq(a,t0,"NEXT");
assem.subleq(b,t0,"NEXT");
assem.subleq(c,c,"NEXT");
assem.subleq(t0,c,"NEXT");
def sub(instr, assem):
a = instr.args[0];
b = instr.args[1];
c = instr.result;
#check for literals
if re.match("\d+",a):
if a not in assem.dataMem:
assem.dataMem[a] = a;
if re.match("\d+",b):
if b not in assem.dataMem:
assem.dataMem[b] = b;
assem.progMem.append("\n // " + instr.raw);
assem.subleq(c,c,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(a,t0,"NEXT");
assem.subleq(t0,c,"NEXT");
assem.subleq(b,c,"NEXT");
def mul(instr, assem):
arg1 = instr.args[0];
arg2 = instr.args[1];
result = instr.result;
c = assem.getNextReserved("workingResult"); # will hold the value of the negative answer until it is flipped at the end if necessary
a = assem.getNextReserved("mul");
b = assem.getNextReserved("mul");
flip = assem.getNextReserved("flip");
i0 = assem.getNextReserved("i");
operand = assem.getNextReserved("operand");
power = assem.getNextReserved("power");
decomp = assem.getNextReserved("decomp");
decomp_ = assem.getNextReserved("mul_decomp_");
powers = assem.getNextReserved("powers");
p_ = "powersOf2_";
#labels
flipA = assem.getNextReserved("flipA");
checkB = assem.getNextReserved("checkB");
flipB = assem.getNextReserved("flipB");
continue0 = assem.getNextReserved("continue0_");
continue1 = assem.getNextReserved("continue1_");
aLess = assem.getNextReserved("aLess");
continue2 = assem.getNextReserved("continue2_");
begin = assem.getNextReserved("begin");
p_0 = assem.getNextReserved("p_0_");
d_0 = assem.getNextReserved("d_0_");
p_1 = assem.getNextReserved("p_1_");
less = assem.getNextReserved("less");
test = assem.getNextReserved("test");
restore = assem.getNextReserved("restore");
continue3 = assem.getNextReserved("continue3_");
begin2 = assem.getNextReserved("begin2_");
d_2 = assem.getNextReserved("d_2_");
d_3 = assem.getNextReserved("d_3_");
d_4 = assem.getNextReserved("d_4_");
add = assem.getNextReserved("add");
regardless = assem.getNextReserved("regardless");
flipSign = assem.getNextReserved("flipSign");
finish = assem.getNextReserved("finish");
noflipA = assem.getNextReserved("noFlipA");
noflipB = assem.getNextReserved("noFlipB");
t0 = assem.getNextTemp();
t1 = assem.getNextTemp();
t3 = assem.getNextTemp();
t4 = assem.getNextTemp();
assem.progMem.append("\n// " + instr.raw);
#determine the sign of the result
assem.subleq(a,a,"NEXT"); #check the sign of A
assem.subleq(b,b,"NEXT");
assem.subleq(flip,flip,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(arg1,t0,noflipA);
assem.subleq(t0,a,"NEXT");
assem.subleq(1,flip,checkB);
assem.subleq(noflipA + ":" + arg1,a,"NEXT");
assem.subleq(checkB + ":" + t0,t0,"NEXT"); #check the sign of B
assem.subleq(arg2,t0,noflipB);
assem.subleq(t0,b,"NEXT");
assem.subleq(-1,flip,"NEXT");
assem.subleq(t0,t0,continue0);
assem.subleq(noflipB + ":" + arg2,b,"NEXT");
#determine the operand
assem.subleq(continue0 + ":" + operand,operand,"NEXT");
assem.subleq(power,power,"NEXT");
assem.subleq(a,b,aLess);
assem.subleq(a,power,"NEXT");
assem.subleq(b,operand,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(power,t0,"NEXT");
assem.subleq(t0,operand,"NEXT");
assem.subleq(t0,t0,continue1);
assem.subleq(aLess + ":" + a,operand,"NEXT");
assem.subleq(b,power,"NEXT");
assem.subleq(t0,t0,'NEXT');
assem.subleq(operand,t0,"NEXT");
assem.subleq(t0,power,"NEXT");
#decompose the operand into powers of 2
#maxPower = -1;
# for i = 30 -> 0
#if operand - 2^i >= 0
#powers[i] = 1
#operand = operand - 2^i
#maxPower == -1
#maxPower = i
#if operand - 2^i == 0:
#break;
two_i = assem.getNextReserved("two_i");
decomp_i = assem.getNextReserved("decomp_i");
restore = assem.getNextReserved("restore");
maxPower = assem.getNextReserved("maxPower");
maxFlag = assem.getNextReserved("maxFlag");
notMax = assem.getNextReserved("notMax");
continue2 = assem.getNextReserved("continue2");
incr0 = assem.getNextReserved("inc");
loop0 = assem.getNextReserved("loop");
t4 = assem.getNextTemp();
assem.dataMem[-2] = -2;
assem.dataMem[0] = 0;
#setup loop
assem.subleq(continue1 + ":" + i0,i0,"NEXT");
assem.subleq(-30,i0,"NEXT");
assem.subleq(two_i,two_i,"NEXT");
assem.subleq("powersOf2_",two_i,"NEXT");
assem.subleq(30,two_i,"NEXT");
assem.subleq(decomp_i,decomp_i,"NEXT");
assem.subleq("mul_decomp_",decomp_i,"NEXT");
assem.subleq(30,decomp_i,"NEXT");
assem.subleq(maxPower,maxPower,"NEXT");
assem.subleq(maxFlag,maxFlag,"NEXT");
assem.subleq(-2,maxFlag, "NEXT");
assem.subleq(loop0 + ":" + p_0,p_0,"NEXT");
assem.subleq(two_i,p_0,"NEXT");
assem.subleq(d_0,d_0,"NEXT");
assem.subleq(decomp_i,d_0,"NEXT");
assem.subleq(p_1,p_1,"NEXT");
assem.subleq(two_i,p_1,"NEXT");
assem.subleq(p_0 + ":#1",operand,"NEXT"); #operand = operand - 2^i
assem.subleq(-1,operand,restore); #add one to handle zero case
assem.subleq(1,operand,"NEXT");
assem.subleq(-1,d_0 + ":#1","NEXT"); #subtract the one
assem.subleq(1,maxFlag,notMax);
assem.subleq(i0,maxPower,"NEXT");
assem.subleq(notMax + ":0",operand,continue2);
assem.subleq(t0,t0,incr0);
assem.subleq(restore + ":" + t0,t0,"NEXT");
assem.subleq(p_1 + ":#1",t0,"NEXT");
assem.subleq(t0,operand,"NEXT");
assem.subleq(1,operand,"NEXT");
#decrement and repeat if necessary
assem.subleq(incr0 + ":-1",decomp_i,"NEXT");
assem.subleq(-1,two_i,"NEXT");
assem.subleq(1,i0,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(i0,t0,loop0);
#do successive additions of powers of 2
i1 = assem.getNextReserved("i");
adder = assem.getNextReserved("adder");
op = assem.getNextReserved("op");
loop2 = assem.getNextReserved("loop");
continue3 = assem.getNextReserved("continue3");
continueLoop = assem.getNextReserved("contLoop");
d_3 = assem.getNextReserved("d_3");
noADD = assem.getNextReserved("noAdd");
assem.subleq(continue2 + ":" + i1,i1,"NEXT");
assem.subleq("2938483",t0,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(maxPower,t0,"NEXT")
assem.subleq(t1,t1,"NEXT");
assem.subleq(t0,t1,"NEXT");
assem.subleq(maxPower,maxPower,"NEXT");
assem.subleq(t1,maxPower,"NEXT");
assem.subleq(adder,adder,"NEXT");
assem.subleq(op,op,"NEXT");
assem.subleq(power,op,"NEXT");
assem.subleq(op,adder,'NEXT');
assem.subleq(decomp_i,decomp_i,"NEXT");
assem.subleq("mul_decomp_",decomp_i,"NEXT");
assem.subleq(c,c,"NEXT");
assem.subleq(loop2 + ":" + maxPower,i1,continueLoop); #for i = 0 -> maxPower
assem.subleq(t0,t0,continue3);
assem.subleq(continueLoop + ":" + t0,t0,"NEXT");
assem.subleq(d_3,d_3,"NEXT");
assem.subleq(decomp_i,d_3,"NEXT");
assem.subleq(maxPower,t0,"NEXT"); #restore i to what it was before comparison
assem.subleq(t0,i1,"NEXT");
assem.subleq(0,d_3 + ":#1",noADD);
assem.subleq(adder,c,"NEXT");
assem.subleq(noADD + ":" + t0,t0,"NEXT");
assem.subleq(adder,t0,"NEXT");
assem.subleq(t0,adder,"NEXT");
#increment stuff
assem.subleq(-1,i1,"NEXT");
assem.subleq(1,decomp_i,"NEXT");
assem.subleq(t0,t0,loop2);
assem.subleq(continue3 + ":" + t0,t0,"NEXT");
#determine sign. c is the negative right now so flip if flip flag == 0
done = assem.getNextReserved("done");
ansPos = assem.getNextReserved("ansPos");
ansNeg = assem.getNextReserved("ansNeg");
'''assem.subleq(result,result,"NEXT");
assem.subleq(flip,result,"NEXT");
assem.subleq(t0,t0,"#-1");'''
assem.subleq(-1,flip,ansNeg);
assem.subleq(1,flip,ansPos);
assem.subleq(t0,t0,ansNeg);
assem.subleq(ansPos + ":" + result,result,"NEXT");
assem.subleq(c,result,"NEXT");
assem.subleq(t0,t0,done);
assem.subleq(ansNeg + ":" + t0,t0,"NEXT");
assem.subleq(c,t0,"NEXT");
assem.subleq(t0,result,"NEXT");
assem.subleq(done + ":" + t0,t0,"NEXT");
assem.dataMem["1"] = "#1";
assem.dataMem["-30"] = "#-30";
assem.dataMem["0"] = "#0";
assem.dataMem["30"] = "#30";
assem.dataMem["-1"] = "#-1";
assem.dataMem["2"] = "#2";
assem.dataMem["2938483"] = "#2938483";
#space for the powers of 2
assem.dataMem["powersOf2_1"] = "#1"
assem.dataMem["powersOf2_2"] = "#2"
assem.dataMem["powersOf2_4"] = "#4"
assem.dataMem["powersOf2_8"] = "#8"
assem.dataMem["powersOf2_16"] = "#16"
assem.dataMem["powersOf2_32"] = "#32"
assem.dataMem["powersOf2_64"] = "#64"
assem.dataMem["powersOf2_128"] = "#128"
assem.dataMem["powersOf2_256"] = "#256"
assem.dataMem["powersOf2_512"] = "#512"
assem.dataMem["powersOf2_1024"] = "#1024"
assem.dataMem["powersOf2_2048"] = "#2048"
assem.dataMem["powersOf2_4096"] = "#4096"
assem.dataMem["powersOf2_8192"] = "#8192"
assem.dataMem["powersOf2_16384"] = "#16384"
assem.dataMem["powersOf2_32768"] = "#32768"
assem.dataMem["powersOf2_65536"] = "#65536"
assem.dataMem["powersOf2_131072"] = "#131072"
assem.dataMem["powersOf2_262144"] = "#262144"
assem.dataMem["powersOf2_524288"] = "#524288"
assem.dataMem["powersOf2_1048576"] = "#1048576"
assem.dataMem["powersOf2_2097152"] = "#2097152"
assem.dataMem["powersOf2_4194304"] = "#4194304"
assem.dataMem["powersOf2_8388608"] = "#8388608"
assem.dataMem["powersOf2_16777216"] = "#16777216"
assem.dataMem["powersOf2_33554432"] = "#33554432"
assem.dataMem["powersOf2_67108864"] = "#67108864"
assem.dataMem["powersOf2_134217728"] = "#134217728"
assem.dataMem["powersOf2_268435456"] = "#268435456"
assem.dataMem["powersOf2_536870912"] = "#536870912"
assem.dataMem["powersOf2_1073741824"] = "#1073741824"
assem.dataMem["powersOf2_"] = "&powersOf2_1"
#space for the decomposition, will be reused every multiplication
assem.dataMem["mul_decomp_0"] = "#0"
assem.dataMem["mul_decomp_1"] = "#0"
assem.dataMem["mul_decomp_2"] = "#0"
assem.dataMem["mul_decomp_3"] = "#0"
assem.dataMem["mul_decomp_4"] = "#0"
assem.dataMem["mul_decomp_5"] = "#0"
assem.dataMem["mul_decomp_6"] = "#0"
assem.dataMem["mul_decomp_7"] = "#0"
assem.dataMem["mul_decomp_8"] = "#0"
assem.dataMem["mul_decomp_9"] = "#0"
assem.dataMem["mul_decomp_10"] = "#0"
assem.dataMem["mul_decomp_11"] = "#0"
assem.dataMem["mul_decomp_12"] = "#0"
assem.dataMem["mul_decomp_13"] = "#0"
assem.dataMem["mul_decomp_14"] = "#0"
assem.dataMem["mul_decomp_15"] = "#0"
assem.dataMem["mul_decomp_16"] = "#0"
assem.dataMem["mul_decomp_17"] = "#0"
assem.dataMem["mul_decomp_18"] = "#0"
assem.dataMem["mul_decomp_19"] = "#0"
assem.dataMem["mul_decomp_20"] = "#0"
assem.dataMem["mul_decomp_21"] = "#0"
assem.dataMem["mul_decomp_22"] = "#0"
assem.dataMem["mul_decomp_23"] = "#0"
assem.dataMem["mul_decomp_24"] = "#0"
assem.dataMem["mul_decomp_25"] = "#0"
assem.dataMem["mul_decomp_26"] = "#0"
assem.dataMem["mul_decomp_27"] = "#0"
assem.dataMem["mul_decomp_28"] = "#0"
assem.dataMem["mul_decomp_29"] = "#0"
assem.dataMem["mul_decomp_30"] = "#0"
assem.dataMem["mul_decomp_"] = "&mul_decomp_0"
def div(instr, assem):
arg1 = instr.args[0];
arg2 = instr.args[1];
c = instr.result;
a = assem.getNextReserved("A");
b = assem.getNextReserved("B");
num = assem.getNextReserved("num");
denom = assem.getNextReserved("denom");
t0 = assem.getNextTemp();
t1 = assem.getNextTemp();
flip = assem.getNextReserved("flip");
noflipA = assem.getNextReserved("noflipA");
noflipB = assem.getNextReserved("noflipB");
checkB = assem.getNextReserved("checkB");
continue0 = assem.getNextReserved("continue");
continue1 = assem.getNextReserved("continue");
zero = assem.getNextReserved("zero");
done = assem.getNextReserved("done");
i0 = assem.getNextReserved("i");
loop0 = assem.getNextReserved("loop");
d_0 = assem.getNextReserved("d_0");
d_1 = assem.getNextReserved("d_1");
d_2 = assem.getNextReserved("d_2");
d_3 = assem.getNextReserved("d_3");
d_prev_0 = assem.getNextReserved("d_prev_0");
d_prev_1 = assem.getNextReserved("d_prev_1");
d_prev_2 = assem.getNextReserved("d_prev_2");
assem.progMem.append("\n// " + instr.raw);
#check for signs
assem.subleq(a,a,"NEXT"); #check the sign of A
assem.subleq(b,b,"NEXT");
assem.subleq(flip,flip,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(arg1,t0,noflipA);
assem.subleq(arg1,a,"NEXT");
assem.subleq(1,flip,checkB);
assem.subleq(noflipA + ":" + t0,a,"NEXT");
assem.subleq(checkB + ":" + t0,t0,"NEXT"); #check the sign of B
assem.subleq(arg2,t0,noflipB);
assem.subleq(t0,b,"NEXT");
assem.subleq(-1,flip,"NEXT");
assem.subleq(t0,t0,continue1);
assem.subleq(noflipB + ":" + arg2,b,"NEXT");
#compute d*2^i
assem.subleq(continue1 + ":" + b,"div_d_pwrs_0","NEXT");
assem.subleq(i0,i0,"NEXT");
assem.subleq(-1,i0,"NEXT");
#for i = 1 -> 30
assem.subleq(loop0 + ":" + t0,t0,"NEXT");
assem.subleq(t1,t1,"NEXT");
assem.subleq("div_d_pwrs_",t1,"NEXT"); #dereference d[i]
assem.subleq(i0,t1,"NEXT");
assem.subleq(d_0,d_0,"NEXT"); #change the appropriate instructions pointing to d[i]
assem.subleq(t1,d_0,"NEXT");
assem.subleq(d_1,d_1,"NEXT");
assem.subleq(t1,d_1,"NEXT");
assem.subleq(d_2,d_2,"NEXT");
assem.subleq(t1,d_2,"NEXT");
assem.subleq(d_3,d_3,"NEXT");
assem.subleq(t1,d_3,"NEXT");
assem.subleq(-1,t1,"NEXT"); #dereference d[i-1]
assem.subleq(d_prev_0,d_prev_0,"NEXT"); #rewrite the appropriate instructions pointing to d[i-1]
assem.subleq(t1,d_prev_0,"NEXT");
assem.subleq(d_prev_0 + ":#1",t0,"NEXT");
assem.subleq(d_0 + ":#1",d_1 + ":#1", "NEXT");
assem.subleq(t0,d_2 + ":#1","NEXT");
assem.subleq(t0,d_3 + ":#1","NEXT");
assem.subleq(-1,i0,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(i0,t0,"NEXT");
assem.subleq(t1,t1,"NEXT");
assem.subleq(t0,t1,"NEXT");
assem.subleq(30,t1,loop0);
#for i = 30 -> 0
#if n - d*2^i >= 0
#n = n - d
#result += 2^i
# if n-d*2^i == 0
#break
loop1 = assem.getNextReserved("loop");
n = assem.getNextReserved("n");
i1 = assem.getNextReserved("i");
inc = assem.getNextReserved("inc");
restore = assem.getNextReserved("restore");
break0 = assem.getNextReserved("break0");
continue2 = assem.getNextReserved("continue2");
d_i = "d_i"; #pointer to d*2^i
two_i = "two_i"; #pointer to 2^i
d_0 = assem.getNextReserved("d_0");
d_1 = assem.getNextReserved("d_1");
p_0 = assem.getNextReserved("p_0");
assem.subleq(c,c,"NEXT");
assem.subleq(n,n,"NEXT"); #setupt loop
assem.subleq(t0,t0,"NEXT");
assem.subleq(a,t0,"NEXT");
assem.subleq(t0,n,"NEXT")
assem.subleq(i1,i1,"NEXT");
assem.subleq(-30,i1,"NEXT");
assem.subleq(loop1 + ":" + d_0,d_0,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(d_i,t0,"NEXT");
assem.subleq(t0,d_0,"NEXT");
assem.subleq(d_1,d_1,"NEXT");
assem.subleq(t0,d_1,"NEXT");
assem.subleq(p_0,p_0,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(two_i,t0,"NEXT");
assem.subleq(t0,p_0,"NEXT");
assem.subleq(d_0 + ":#1",n,"NEXT");
assem.subleq(-1,n,restore);
assem.subleq(t1,t1,"NEXT");
assem.subleq(p_0 + ":#1",t1,"NEXT");
assem.subleq(t1,c,"NEXT");
assem.subleq(1,n,break0); #restore n to n = n -d*2^i and also break if necessary
assem.subleq(t0,t0,inc);
assem.subleq(break0 + ":" + t0,t0,continue2);
assem.subleq(restore + ":" + t0,t0,"NEXT");
assem.subleq(d_1 + ":#1",t0,"NEXT");
assem.subleq(t0,n,"NEXT");
assem.subleq(1,n,"NEXT");
assem.subleq(inc + ":1",i1,"NEXT"); #decrement and check
assem.subleq(1,d_i,"NEXT");
assem.subleq(1,two_i,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(i1,t0,loop1);
#assem.subleq(continue2 + ":" + t0,t0,"NEXT");
#fli if necessary
flipResult = assem.getNextReserved("flipResult");
assem.subleq(continue2 +":-1" ,flip,flipResult);
assem.subleq(1,flip,done);
assem.subleq(flipResult + ":" + t0,t0,"NEXT");
assem.subleq(c,t0,"NEXT");
assem.subleq(c,c,"NEXT");
assem.subleq(t1,t1,"NEXT");
assem.subleq(t0,t1,"NEXT");
assem.subleq(t1,c,"NEXT");
#done
assem.subleq(done + ":" + t0,t0,"NEXT");
assem.dataMem[-1] = -1;
assem.dataMem[1] = 1;
assem.dataMem[30] = 30;
assem.dataMem[-30] = -30;
assem.dataMem["div_d_pwrs_0"] = "#0"
assem.dataMem["div_d_pwrs_1"] = "#0"
assem.dataMem["div_d_pwrs_2"] = "#0"
assem.dataMem["div_d_pwrs_3"] = "#0"
assem.dataMem["div_d_pwrs_4"] = "#0"
assem.dataMem["div_d_pwrs_5"] = "#0"
assem.dataMem["div_d_pwrs_6"] = "#0"
assem.dataMem["div_d_pwrs_7"] = "#0"
assem.dataMem["div_d_pwrs_8"] = "#0"
assem.dataMem["div_d_pwrs_9"] = "#0"
assem.dataMem["div_d_pwrs_10"] = "#0"
assem.dataMem["div_d_pwrs_11"] = "#0"
assem.dataMem["div_d_pwrs_12"] = "#0"
assem.dataMem["div_d_pwrs_13"] = "#0"
assem.dataMem["div_d_pwrs_14"] = "#0"
assem.dataMem["div_d_pwrs_15"] = "#0"
assem.dataMem["div_d_pwrs_16"] = "#0"
assem.dataMem["div_d_pwrs_17"] = "#0"
assem.dataMem["div_d_pwrs_18"] = "#0"
assem.dataMem["div_d_pwrs_19"] = "#0"
assem.dataMem["div_d_pwrs_20"] = "#0"
assem.dataMem["div_d_pwrs_21"] = "#0"
assem.dataMem["div_d_pwrs_22"] = "#0"
assem.dataMem["div_d_pwrs_23"] = "#0"
assem.dataMem["div_d_pwrs_24"] = "#0"
assem.dataMem["div_d_pwrs_25"] = "#0"
assem.dataMem["div_d_pwrs_26"] = "#0"
assem.dataMem["div_d_pwrs_27"] = "#0"
assem.dataMem["div_d_pwrs_28"] = "#0"
assem.dataMem["div_d_pwrs_29"] = "#0"
assem.dataMem["div_d_pwrs_30"] = "#0"
assem.dataMem["div_d_pwrs_"] = "&div_d_pwrs_0"
assem.dataMem["powersOf2_1"] = "#1"
assem.dataMem["powersOf2_2"] = "#2"
assem.dataMem["powersOf2_4"] = "#4"
assem.dataMem["powersOf2_8"] = "#8"
assem.dataMem["powersOf2_16"] = "#16"
assem.dataMem["powersOf2_32"] = "#32"
assem.dataMem["powersOf2_64"] = "#64"
assem.dataMem["powersOf2_128"] = "#128"
assem.dataMem["powersOf2_256"] = "#256"
assem.dataMem["powersOf2_512"] = "#512"
assem.dataMem["powersOf2_1024"] = "#1024"
assem.dataMem["powersOf2_2048"] = "#2048"
assem.dataMem["powersOf2_4096"] = "#4096"
assem.dataMem["powersOf2_8192"] = "#8192"
assem.dataMem["powersOf2_16384"] = "#16384"
assem.dataMem["powersOf2_32768"] = "#32768"
assem.dataMem["powersOf2_65536"] = "#65536"
assem.dataMem["powersOf2_131072"] = "#131072"
assem.dataMem["powersOf2_262144"] = "#262144"
assem.dataMem["powersOf2_524288"] = "#524288"
assem.dataMem["powersOf2_1048576"] = "#1048576"
assem.dataMem["powersOf2_2097152"] = "#2097152"
assem.dataMem["powersOf2_4194304"] = "#4194304"
assem.dataMem["powersOf2_8388608"] = "#8388608"
assem.dataMem["powersOf2_16777216"] = "#16777216"
assem.dataMem["powersOf2_33554432"] = "#33554432"
assem.dataMem["powersOf2_67108864"] = "#67108864"
assem.dataMem["powersOf2_134217728"] = "#134217728"
assem.dataMem["powersOf2_268435456"] = "#268435456"
assem.dataMem["powersOf2_536870912"] = "#536870912"
assem.dataMem["powersOf2_1073741824"] = "#1073741824"
assem.dataMem["powersOf2_"] = "&powersOf2_1"
assem.dataMem["d_i"] = "&div_d_pwrs_30";
assem.dataMem["two_i"] = "&powersOf2_1073741824";
def mod(instr, assem):
arg1 = instr.args[0];
arg2 = instr.args[1];
c = instr.result;
a = assem.getNextReserved("A");
b = assem.getNextReserved("B");
num = assem.getNextReserved("num");
denom = assem.getNextReserved("denom");
t0 = assem.getNextTemp();
t1 = assem.getNextTemp();
flip = assem.getNextReserved("flip");
noflipA = assem.getNextReserved("noflipA");
noflipB = assem.getNextReserved("noflipB");
checkB = assem.getNextReserved("checkB");
continue0 = assem.getNextReserved("continue");
continue1 = assem.getNextReserved("continue");
zero = assem.getNextReserved("zero");
done = assem.getNextReserved("done");
i0 = assem.getNextReserved("i");
loop0 = assem.getNextReserved("loop");
d_0 = assem.getNextReserved("d_0");
d_1 = assem.getNextReserved("d_1");
d_2 = assem.getNextReserved("d_2");
d_3 = assem.getNextReserved("d_3");
d_prev_0 = assem.getNextReserved("d_prev_0");
d_prev_1 = assem.getNextReserved("d_prev_1");
d_prev_2 = assem.getNextReserved("d_prev_2");
assem.progMem.append("\n// " + instr.raw);
#check for signs
assem.subleq(a,a,"NEXT"); #check the sign of A
assem.subleq(b,b,"NEXT");
assem.subleq(flip,flip,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(arg1,t0,noflipA);
assem.subleq(arg1,a,"NEXT");
assem.subleq(1,flip,checkB);
assem.subleq(noflipA + ":" + t0,a,"NEXT");
assem.subleq(checkB + ":" + t0,t0,"NEXT"); #check the sign of B
assem.subleq(arg2,t0,noflipB);
assem.subleq(t0,b,"NEXT");
assem.subleq(-1,flip,"NEXT");
assem.subleq(t0,t0,continue1);
assem.subleq(noflipB + ":" + arg2,b,"NEXT");
#compute d*2^i
assem.subleq(continue1 + ":" + b,"div_d_pwrs_0","NEXT");
assem.subleq(i0,i0,"NEXT");
assem.subleq(-1,i0,"NEXT");
#for i = 1 -> 30
assem.subleq(loop0 + ":" + t0,t0,"NEXT");
assem.subleq(t1,t1,"NEXT");
assem.subleq("div_d_pwrs_",t1,"NEXT"); #dereference d[i]
assem.subleq(i0,t1,"NEXT");
assem.subleq(d_0,d_0,"NEXT"); #change the appropriate instructions pointing to d[i]
assem.subleq(t1,d_0,"NEXT");
assem.subleq(d_1,d_1,"NEXT");
assem.subleq(t1,d_1,"NEXT");
assem.subleq(d_2,d_2,"NEXT");
assem.subleq(t1,d_2,"NEXT");
assem.subleq(d_3,d_3,"NEXT");
assem.subleq(t1,d_3,"NEXT");
assem.subleq(-1,t1,"NEXT"); #dereference d[i-1]
assem.subleq(d_prev_0,d_prev_0,"NEXT"); #rewrite the appropriate instructions pointing to d[i-1]
assem.subleq(t1,d_prev_0,"NEXT");
assem.subleq(d_prev_0 + ":#1",t0,"NEXT");
assem.subleq(d_0 + ":#1",d_1 + ":#1", "NEXT");
assem.subleq(t0,d_2 + ":#1","NEXT");
assem.subleq(t0,d_3 + ":#1","NEXT");
assem.subleq(-1,i0,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(i0,t0,"NEXT");
assem.subleq(t1,t1,"NEXT");
assem.subleq(t0,t1,"NEXT");
assem.subleq(30,t1,loop0);
#for i = 30 -> 0
#if n - d*2^i >= 0
#n = n - d
#result += 2^i
# if n-d*2^i == 0
#break
loop1 = assem.getNextReserved("loop");
n = assem.getNextReserved("n");
i1 = assem.getNextReserved("i");
inc = assem.getNextReserved("inc");
restore = assem.getNextReserved("restore");
break0 = assem.getNextReserved("break0");
continue2 = assem.getNextReserved("continue2");
d_i = "d_i"; #pointer to d*2^i
two_i = "two_i"; #pointer to 2^i
d_0 = assem.getNextReserved("d_0");
d_1 = assem.getNextReserved("d_1");
p_0 = assem.getNextReserved("p_0");
assem.subleq(c,c,"NEXT");
assem.subleq(n,n,"NEXT"); #setupt loop
assem.subleq(t0,t0,"NEXT");
assem.subleq(a,t0,"NEXT");
assem.subleq(t0,n,"NEXT")
assem.subleq(i1,i1,"NEXT");
assem.subleq(-30,i1,"NEXT");
assem.subleq(loop1 + ":" + d_0,d_0,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(d_i,t0,"NEXT");
assem.subleq(t0,d_0,"NEXT");
assem.subleq(d_1,d_1,"NEXT");
assem.subleq(t0,d_1,"NEXT");
assem.subleq(p_0,p_0,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(two_i,t0,"NEXT");
assem.subleq(t0,p_0,"NEXT");
assem.subleq(d_0 + ":#1",n,"NEXT");
assem.subleq(-1,n,restore);
assem.subleq(t1,t1,"NEXT");
assem.subleq(p_0 + ":#1",t1,"NEXT");
assem.subleq(t1,c,"NEXT");
assem.subleq(1,n,break0); #restore n to n = n -d*2^i and also break if necessary
assem.subleq(t0,t0,inc);
assem.subleq(break0 + ":" + t0,t0,continue2);
assem.subleq(restore + ":" + t0,t0,"NEXT");
assem.subleq(d_1 + ":#1",t0,"NEXT");
assem.subleq(t0,n,"NEXT");
assem.subleq(1,n,"NEXT");
assem.subleq(inc + ":1",i1,"NEXT"); #decrement and check
assem.subleq(1,d_i,"NEXT");
assem.subleq(1,two_i,"NEXT");
assem.subleq(t0,t0,"NEXT");
assem.subleq(i1,t0,loop1);
#assem.subleq(continue2 + ":" + t0,t0,"NEXT");
#fli if necessary
flipResult = assem.getNextReserved("flipResult");
assem.subleq(continue2 +":-1" ,flip,flipResult);
assem.subleq(1,flip,done);
assem.subleq(flipResult + ":" + t0,t0,"NEXT");
assem.subleq(c,t0,"NEXT");
assem.subleq(c,c,"NEXT");
assem.subleq(t1,t1,"NEXT");
assem.subleq(t0,t1,"NEXT");
assem.subleq(t1,c,"NEXT");
#done
assem.subleq(done + ":" + t0,t0,"NEXT");
assem.dataMem[-1] = -1;
assem.dataMem[1] = 1;
assem.dataMem[30] = 30;
assem.dataMem[-30] = -30;
assem.dataMem["div_d_pwrs_0"] = "#0"
assem.dataMem["div_d_pwrs_1"] = "#0"
assem.dataMem["div_d_pwrs_2"] = "#0"
assem.dataMem["div_d_pwrs_3"] = "#0"
assem.dataMem["div_d_pwrs_4"] = "#0"
assem.dataMem["div_d_pwrs_5"] = "#0"
assem.dataMem["div_d_pwrs_6"] = "#0"
assem.dataMem["div_d_pwrs_7"] = "#0"
assem.dataMem["div_d_pwrs_8"] = "#0"
assem.dataMem["div_d_pwrs_9"] = "#0"
assem.dataMem["div_d_pwrs_10"] = "#0"
assem.dataMem["div_d_pwrs_11"] = "#0"
assem.dataMem["div_d_pwrs_12"] = "#0"
assem.dataMem["div_d_pwrs_13"] = "#0"
assem.dataMem["div_d_pwrs_14"] = "#0"
assem.dataMem["div_d_pwrs_15"] = "#0"
assem.dataMem["div_d_pwrs_16"] = "#0"
assem.dataMem["div_d_pwrs_17"] = "#0"
assem.dataMem["div_d_pwrs_18"] = "#0"
assem.dataMem["div_d_pwrs_19"] = "#0"
assem.dataMem["div_d_pwrs_20"] = "#0"
assem.dataMem["div_d_pwrs_21"] = "#0"
assem.dataMem["div_d_pwrs_22"] = "#0"
assem.dataMem["div_d_pwrs_23"] = "#0"
assem.dataMem["div_d_pwrs_24"] = "#0"
assem.dataMem["div_d_pwrs_25"] = "#0"
assem.dataMem["div_d_pwrs_26"] = "#0"
assem.dataMem["div_d_pwrs_27"] = "#0"
assem.dataMem["div_d_pwrs_28"] = "#0"
assem.dataMem["div_d_pwrs_29"] = "#0"
assem.dataMem["div_d_pwrs_30"] = "#0"
assem.dataMem["div_d_pwrs_"] = "&div_d_pwrs_0"
assem.dataMem["powersOf2_1"] = "#1"
assem.dataMem["powersOf2_2"] = "#2"
assem.dataMem["powersOf2_4"] = "#4"
assem.dataMem["powersOf2_8"] = "#8"
assem.dataMem["powersOf2_16"] = "#16"
assem.dataMem["powersOf2_32"] = "#32"
assem.dataMem["powersOf2_64"] = "#64"
assem.dataMem["powersOf2_128"] = "#128"
assem.dataMem["powersOf2_256"] = "#256"
assem.dataMem["powersOf2_512"] = "#512"
assem.dataMem["powersOf2_1024"] = "#1024"
assem.dataMem["powersOf2_2048"] = "#2048"
assem.dataMem["powersOf2_4096"] = "#4096"
assem.dataMem["powersOf2_8192"] = "#8192"
assem.dataMem["powersOf2_16384"] = "#16384"
assem.dataMem["powersOf2_32768"] = "#32768"
assem.dataMem["powersOf2_65536"] = "#65536"
assem.dataMem["powersOf2_131072"] = "#131072"
assem.dataMem["powersOf2_262144"] = "#262144"
assem.dataMem["powersOf2_524288"] = "#524288"
assem.dataMem["powersOf2_1048576"] = "#1048576"
assem.dataMem["powersOf2_2097152"] = "#2097152"
assem.dataMem["powersOf2_4194304"] = "#4194304"
assem.dataMem["powersOf2_8388608"] = "#8388608"
assem.dataMem["powersOf2_16777216"] = "#16777216"
assem.dataMem["powersOf2_33554432"] = "#33554432"
assem.dataMem["powersOf2_67108864"] = "#67108864"
assem.dataMem["powersOf2_134217728"] = "#134217728"
assem.dataMem["powersOf2_268435456"] = "#268435456"
assem.dataMem["powersOf2_536870912"] = "#536870912"
assem.dataMem["powersOf2_1073741824"] = "#1073741824"
assem.dataMem["powersOf2_"] = "&powersOf2_1"
assem.dataMem["d_i"] = "&div_d_pwrs_30";
assem.dataMem["two_i"] = "&powersOf2_1073741824";
def parseArgs(argStr):
arg1 = re.findall("(?<=\s)[^\s,]+(?=,)",argStr)[0];
arg2 = re.findall("(?<=,\s)\s*\S+",argStr)[0];
return [arg1.strip(),arg2.strip()]
| gpl-2.0 | 1,400,089,787,447,496,400 | 34.20386 | 136 | 0.601186 | false | 2.42856 | false | false | false |
markmuetz/stormtracks | stormtracks/results.py | 1 | 3180 | import os
from glob import glob
import pandas as pd
from load_settings import settings
from utils.utils import compress_file, decompress_file
RESULTS_TPL = '{0}.hdf'
class ResultNotFound(Exception):
'''Simple exception thrown if result cannot be found in results manager or on disk'''
pass
class StormtracksResultsManager(object):
'''Manager class that is responsible for loading and saving all python results
Simple key/value store.
Load/saves to settings.OUTPUT_DIR.
'''
def __init__(self, name, output_dir=None):
self.name = name
if output_dir:
self.output_dir = output_dir
else:
self.output_dir = settings.OUTPUT_DIR
def save_result(self, year, result_key, result):
'''Saves a given result based on year, user chosen result_key'''
dirname = os.path.join(self.output_dir, self.name)
if not os.path.exists(dirname):
os.makedirs(dirname)
filename = RESULTS_TPL.format(year)
print('saving {0}'.format(filename))
path = os.path.join(dirname, filename)
result.to_hdf(path, result_key)
def get_result(self, year, result_key):
'''Returns a result from an HDF file.'''
dirname = os.path.join(self.output_dir, self.name)
filename = RESULTS_TPL.format(year)
path = os.path.join(dirname, filename)
try:
result = pd.read_hdf(path, result_key)
except Exception, e:
raise ResultNotFound
return result
def delete(self, year, result_key):
'''Deletes a specific result from disk'''
raise NotImplementedError('Not sure how to delete one result')
def compress_year(self, year, delete=False):
'''Compresses a given year's dir and then optionally deletes that year'''
year_filename = os.path.join(self.output_dir, self.name, RESULTS_TPL.format(year))
compressed_filename = compress_file(year_filename)
if delete:
self.delete_year(year)
return compressed_filename
def delete_year(self, year):
'''Deletes a year (use with caution!)'''
year_filename = os.path.join(self.output_dir, self.name, RESULTS_TPL.format(year))
os.remove(year_filename)
def decompress_year(self, year):
'''Decompresses a given year's tarball'''
filename = os.path.join(self.output_dir, self.name, '{0}.bz2'.format(RESULTS_TPL.format(year)))
decompress_file(filename)
def list_years(self):
'''List all saved years'''
years = []
dirname = os.path.join(self.output_dir, self.name)
for year_dirname in glob(os.path.join(dirname, '*')):
try:
year = int(os.path.splitext(os.path.basename(year_dirname))[0])
years.append(year)
except:
pass
return sorted(years)
def list_results(self, year):
'''List all results saved for a particular year'''
dirname = os.path.join(self.output_dir, self.name)
print(os.path.join(dirname, RESULTS_TPL.format(year)))
store = pd.HDFStore(os.path.join(dirname, RESULTS_TPL.format(year)))
results = [field[0][1:] for field in store.items()]
store.close()
return sorted(results)
| mit | -2,510,691,410,342,341,000 | 31.783505 | 103 | 0.650629 | false | 3.638444 | false | false | false |
guolivar/totus-niwa | service/thirdparty/featureserver/FeatureServer/DataSource/Flickr.py | 1 | 4910 | from FeatureServer.DataSource import DataSource
from vectorformats.Feature import Feature
from FeatureServer.Exceptions.NoGeometryException import NoGeometryException
import md5
import urllib
from lxml import etree
from StringIO import StringIO
class Flickr (DataSource):
def __init__(self, name, api_key, api_secret, attributes = "*", srid_out = 4326, **args):
DataSource.__init__(self, name, **args)
self.api_key = api_key
self.api_secret = api_secret
self.srid_out = srid_out
self.attributes = attributes
self.api = FlickrAPI(self.api_key, self.api_secret)
def select (self, action):
features = []
if action.id is not None:
data = self.api.request({'method':'flickr.photos.getInfo','photo_id':action.id})
doc = etree.parse(StringIO(data)).getroot()
photo = doc.xpath('/rsp/photo')[0]
try:
features.append(self.convert_photo(photo))
except Exception as e:
''' '''
else:
params = {'method' : 'flickr.photos.search','extras':'description,owner_name,geo,tags,license'}
if action.bbox:
params['bbox'] = "%f,%f,%f,%f" % tuple(action.bbox)
if hasattr(self, 'user_id'):
params['user_id'] = self.user_id
if hasattr(self, 'tags'):
params['tags'] = self.tags
if hasattr(self, 'tag_mode'):
params['tag_mode'] = self.tag_mode
else:
params['tag_mode'] = "any"
data = self.api.request(params)
doc = etree.parse(StringIO(data)).getroot()
photos = [ photo for photo in doc.xpath('/rsp/photos')[0] ]
for photo in photos:
try:
features.append(self.convert_photo(photo))
except Exception as e:
continue
return features
def convert_photo (self, xml):
node_names = self.get_node_names(xml)
props = {'img_url' : self.get_url(xml)}
owners = xml.xpath('./owner')
if len(owners) > 0:
props['owner'] = owners[0].attrib['nsid']
props['username'] = owners[0].attrib['username']
for i in node_names:
if i == "tags":
tags = [ tag.text for tag in xml.xpath('./%s' % str(i))[0] ]
props[i] = ",".join(tags)
else:
nodes = xml.xpath('./%s' % str(i))
if len(nodes) > 0:
if len(list(nodes[0])) == 0:
if nodes[0].text is None:
props[i] = ""
else:
props[i] = nodes[0].text
try:
coordinates = self.get_coordinates(xml)
except:
raise
return Feature(id=xml.attrib["id"], geometry={'type':"Point", 'coordinates':coordinates}, geometry_attr="geometry", srs=self.srid_out, props=props)
def get_node_names(self, xml):
if self.attributes == "*":
props = [ child.tag for child in xml ]
props.remove("location")
props.remove("owner")
else:
props = self.attributes.split(',')
return props
def get_coordinates(self, xml):
location = xml.xpath('./location')
if len(location) > 0:
loc = location[0]
return [float(loc.attrib['longitude']), float(loc.attrib['latitude'])]
if "longitude" in xml.attrib and "latitude" in xml.attrib:
return [float(xml.attrib['longitude']), float(xml.attrib['latitude'])]
raise NoGeometryException("Twitter", self.name)
def get_url(self, xml):
return "http://farm%s.static.flickr.com/%s/%s_%s_b.jpg" % (xml.attrib['farm'], xml.attrib['server'], xml.attrib['id'], xml.attrib['secret'])
class FlickrAPI:
urls = {
'xml' : 'http://api.flickr.com/services/rest/'
}
def __init__(self, api_key, api_secret):
self.api_key = api_key
self.api_secret = api_secret
def request(self, params = {}, format = "rest"):
params['api_key'] = self.api_key
params['format'] = format
params['api_sig'] = self.signature(params)
return urllib.urlopen(self.urls["xml"], urllib.urlencode(params)).read()
def signature(self, params):
items = []
keys = params.keys()
keys.sort()
for key in keys:
items.append("%s%s" % (key,params[key]))
sign_string = "%s%s" % (self.api_secret, "".join(items))
return md5.md5(sign_string).hexdigest()
| gpl-3.0 | -8,789,628,092,533,478,000 | 31.959732 | 155 | 0.509776 | false | 4.074689 | false | false | false |
rmsk2/Das-grosse-Quiz | client/playingfield.py | 1 | 19375 | ################################################################################
# Copyright 2016 Martin Grap
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
## @package playingfield Contains a class that implements the playing field of "Das grosse Quiz"
#
# \file playingfield.py
# \brief Contains a class that implements the playing field of "Das grosse Quiz".
import pickle
import questions
import displayclient
ERR_OK = 0
ERR_ERROR = 42
## \brief An excpetion class that is used for constructing exception objects in this module.
#
class PlayingFieldException(Exception):
## \brief An excpetion class that is used for constructing exception objects in this module.
#
# \param [error_message] Is a string. It has to contain an error message that is to be conveyed to
# receiver of the corresponding exception.
#
def __init__(self, error_message):
Exception.__init__(self, 'PlayingField error:' + error_message)
## \brief This class implements the playing field of "Das grosse Quiz".
#
# The central data structure is a multi level dictionary which is referenced through the self._field member.
# The outmost dictionary has the category names as its keys. The values of each of these categories is another
# dictionary that has the keys 20, 40, 60, 80, 100. The value of these keys is a third dictionary that has the
# keys 'answeredby' and 'wronganswersby'. The value for the 'answeredby' key is either None (when the question has
# not been answered yet) or a string which specifies the name of the team that answered the question. The key
# 'wronganswersby' has a set() as its value which contains the names of the team(s) that have given a wrong answer
# to the question.
#
class PlayingField:
## \brief Constructor.
#
# \param [question_repo] An object of type questions.QuestionRepository which holds information about questions, teams
# and network configuration.
#
def __init__(self, question_repo):
## \brief An object of type questions.QuestionRepository.
self._repo = question_repo
## \brief A list of strings. Each list element denotes a category.
self._categories = self._repo.categories
## \brief A multi level dictionary that holds the playing field information.
self._field = {}
## \brief A multi level dictionary that holds the question for each cell in the playing field.
self._questions = {}
## \brief A list of strings. Each list element denotes a name of a team.
self._teams = self._repo.teams
## \brief An object of type displayclient.SignClient which is used to talk to the displayserver.
self._sign_client = displayclient.SignClient(self._repo.config['host'], self._repo.config['port'])
## \brief An object of type questions.Question. It holds the question which is currently displayed by the displayserver.
self._current_question = None
field_column = {20:None, 40:None, 60:None, 80:None, 100:None}
# Initialize the self._field and self._questions dictionaries
for i in self._categories:
self._field[i] = field_column.copy()
self._questions[i] = field_column.copy()
# Retrieve all questions the the repository
for i in self._categories:
for j in [20, 40, 60, 80, 100]:
self._questions[i][j] = self._repo.get_question(i, j)
self._field[i][j] = {'answeredby':None, 'wronganswersby':set()}
## \brief Returns a reference to the playing field dictionary.
#
# \returns A dictionary as described in the class documentation.
#
@property
def playing_field(self):
return self._field
## \brief Returns a reference to displayserver client object which is in use in this PlayingField instance.
#
# \returns An object of type displayclient.SignClient.
#
@property
def raspi(self):
return self._sign_client
## \brief Returns a string describing the hostname and port which have been specified in the question repository that is used
# by this PlayingField instance.
#
# \returns A string.
#
@property
def server_info(self):
return '{}:{}'.format(self._repo.config['host'], self._repo.config['port'])
## \brief Returns a reference to the questions.Question object which represents the question currently displayed by the displaserver.
#
# \returns An object of type questions.Question or None.
#
@property
def current_question(self):
return self._current_question
## \brief This method allows to deserialize the current state of the playing field from a file.
#
# \param [file_name] A string. Has to contain the name of the file which contains a serialized state.
#
# \returns A boolean. A return value of True means that reconstructing the state was successfull.
#
def load_state(self, file_name):
result = ERR_OK
dumped_playing_field = None
try:
with open(file_name, 'rb') as f:
dumped_playing_field = f.read()
restored_playing_field = pickle.loads(dumped_playing_field)
for i in self._categories:
for j in [20, 40, 60, 80, 100]:
for t in restored_playing_field[i][j]['wronganswersby']:
if not (t in self.current_teams):
raise PlayingFieldException('Loaded state contains unknown team names')
# NB: If restored_playing_field[i][j]['answeredby'] contains an unknown team name the question is regarded as
# answered by noone.
self._field = restored_playing_field
self._current_question = None
except:
result = ERR_ERROR
return result
## \brief This method allows to serialize the current state of the playing field into a file.
#
# \param [file_name] A string. Has to contain the name of the file into which the serialized state should be stored.
#
# \returns A boolean. A return value of True means that saving the state was successfull.
#
def save_state(self, file_name):
result = ERR_OK
try:
dumped_playing_field = pickle.dumps(self._field)
with open(file_name, 'wb') as f:
f.write(dumped_playing_field)
except:
result = ERR_ERROR
return result
## \brief This method clears the state of playing field, i.e. sets all cells to the default value which
# means that no question has been answered either right or wrong yet.
#
# \returns Nothing.
#
def clear(self):
for i in self._categories:
for j in [20, 40, 60, 80, 100]:
self._field[i][j] = {'answeredby':None, 'wronganswersby':set()}
## \brief This method evaluates the current state of the playing field. It iterates over all cells and sums up the
# points earned by each team. A correct answer adds the value of the question to the team result. In case of
# a wrong answer the question value is substracted from the team result.
#
# \returns A dictionary. It maps a string key to an int value. Each key is the name of a team and its value
# is the number of points earned by the corresponding team.
#
def calc_result(self):
result = {}
for i in self._teams:
result[i] = 0
for i in self._categories:
for j in [20, 40, 60, 80, 100]:
if self._field[i][j]['answeredby'] in self._teams:
result[self._field[i][j]['answeredby']] += j
for k in self._field[i][j]['wronganswersby']:
if k in self._teams:
result[k] -= j
return result
## \brief This method evaluates the current state of the playing field. It iterates over all cells and counts the
# questions which have already been answered.
#
# \returns An int.
#
def num_questions_answered(self):
result = 0
for i in self._categories:
for j in [20, 40, 60, 80, 100]:
if self._field[i][j]['answeredby'] != None:
result += 1
return result
## \brief Instructs the displayserver to display the intro message and resets the value of self._current_question to None.
#
# \returns An int. A return value of 0 indicates a successfull execution.
#
def show_intro(self):
self._current_question = None
return self._sign_client.show_intro()
## \brief Instructs the displayserver to display the "Thank you" message and resets the value of self._current_question to None.
#
# \returns An int. A return value of 0 indicates a successfull execution.
#
def show_thanks(self):
self._current_question = None
return self._sign_client.show_thanks()
## \brief Instructs the displayserver to display final result message and resets the value of self._current_question to None.
#
# \returns An int. A return value of 0 indicates a successfull execution.
#
def show_result(self):
self._current_question = None
res = self.calc_result()
return self._sign_client.show_result(res)
## \brief Instructs the displayserver to display the playing field and resets the value of self._current_question to None.
#
# \returns An int. A return value of 0 indicates a successfull execution.
#
def show(self):
self._current_question = None
return self._sign_client.show_playing_field(self._field)
## \brief Records that a team has answered a question correctly. If the question has already been answered this method
# does nothing.
#
# \param [category] A string. Denotes the category of the question that has been answered correctly.
#
# \param [value] An int. Denotes the value of the question that has been answered correctly.
#
# \param [who_answered] A string. Specifies the name of the team which has answered the question correctly.
#
# \returns Nothing.
#
def answer_question(self, category, value, who_answered):
if (self._field[category][value]['answeredby'] == None) and (who_answered not in self._field[category][value]['wronganswersby']):
self._field[category][value]['answeredby'] = who_answered
self._current_question = None
## \brief Resets the state of a question to its default value (no correct and no wrong answers).
#
# \param [category] A string. Denotes the category of the question that has been answered correctly.
#
# \param [value] An int. Denotes the value of the question that has been answered correctly.
#
# \returns Nothing.
#
def clear_question(self, category, value):
self._field[category][value]['answeredby'] = None
self._field[category][value]['wronganswersby'] = set()
self._current_question = None
## \brief Records that a team has given a wrong answer to a question. If the question has already been answered this method
# does nothing.
#
# \param [category] A string. Denotes the category of the question that has been answered wrongly.
#
# \param [value] An int. Denotes the value of the question that has been answered wrongly.
#
# \param [who_answered] A string. Specifies the name of the team which has answered the question wrongly.
#
# \returns Nothing.
#
def wrong_answer_question(self, category, value, who_answered):
if self._field[category][value]['answeredby'] == None:
self._field[category][value]['wronganswersby'].add(who_answered)
## \brief Records that a team has answered the current question correctly. If the question has already been answered this method
# does nothing. Additionaly this method instructs the displayserver to show the playing field again. The current question
# is also reset to None.
#
# \param [who_answered] A string. Specifies the name of the team which has answered the question correctly.
#
# \returns An int. A value of 0 indicates that displaying the playing field was successfull.
#
def answer_current_question(self, who_answered):
result = ERR_OK
if self._current_question != None:
c = self._current_question.category
v = self._current_question.value
if (self._field[c][v]['answeredby'] == None) and (who_answered not in self._field[c][v]['wronganswersby']):
self.answer_question(c, v, who_answered)
result = self.show()
return result
## \brief Resets the state of the current question to its default value (no correct and no wrong answers). Additionaly this method instructs
# the displayserver to show the playing field again. The current question is also reset to None.
#
# \returns An int. A value of 0 indicates that displaying the playing field was successfull.
#
def clear_current_question(self):
result = ERR_OK
if self._current_question != None:
self.clear_question(self._current_question.category, self._current_question.value)
result = self.show()
return result
## \brief Records that a team has answered the current question wrongly. If the question has already been answered this method
# does nothing.
#
# \param [who_answered] A string. Specifies the name of the team which has given a wrong answer.
#
# \returns Nothing.
#
def wrong_answer_current_question(self, who_answered):
if self._current_question != None:
self.wrong_answer_question(self._current_question.category, self._current_question.value, who_answered)
## \brief Returns the category names in use in this PlayingField instance.
#
# \returns A list of strings. The strings denote the category names and the list is sorted.
#
@property
def current_categories(self):
result = self._categories[:]
result.sort()
return result
## \brief Returns the names of the three teams names in use in this PlayingField instance.
#
# \returns A list of strings. The strings denote the team names and the list is sorted.
#
@property
def current_teams(self):
result = self._teams[:]
result.sort()
return result
## \brief Returns the name of the team that has answered the specified question correctly.
#
# \param [category] A string. Denotes the category of the question for which the answer information is to be retrieved.
#
# \param [value] An int. Denotes the value of the question for which the answer information is to be retrieved.
#
# \returns A string. The name of the team which has given a correct answer or None in case the question
# has not been answered yet.
#
def question_answered_by(self, category, value):
return self._field[category][value]['answeredby']
## \brief Returns the names of the teams that have given a wrong answer to the specified question.
#
# \param [category] A string. Denotes the category of the question for which the answer information is to be retrieved.
#
# \param [value] An int. Denotes the value of the question for which the answer information is to be retrieved.
#
# \returns A set of strings. The set contains the names of the teams which have given a wrong answer.
#
def question_answered_wrong_by(self, category, value):
return self._field[category][value]['wronganswersby']
## \brief This method instructs the display server to show a certain question. This question then becomes the current question
# and the time value which specifies how many seconds remain to answer the question is set to its start value.
#
# \param [category] A string. Denotes the category of the question which is to become the current question.
#
# \param [value] An int. Denotes the value of the question which is to become the current question.
#
# \returns An int. A value of 0 indicates that displaying the question was successfull.
#
def ask_question(self, category, value):
question = self._questions[category][value]
time = question.time_allowance
if not question.show_time:
time = -1
self._current_question = question
self._current_question.reset()
return self._sign_client.show_question(question.text, time)
## \brief This method decrements the number of seconds that remain to answer the current question and updates the display to
# reflect the changed timer value.
#
# \returns An int. A value of 0 indicates that displaying the question was successfull.
#
def decrement_question_time(self):
result = ERR_OK
# Check if there is a valid current question, that its timer value is positive and that a time value should be displayed
if (self._current_question != None) and (self._current_question.current_time > 0) and (self._current_question.show_time):
self._current_question.current_time -= 1
result = self._sign_client.show_question(self._current_question.text, self._current_question.current_time)
return result
| apache-2.0 | 3,814,428,693,370,683,400 | 45.130952 | 145 | 0.612284 | false | 4.459148 | false | false | false |
tcstewar/embodied_benchmarks | control.py | 1 | 3938 | import numpy as np
class Signal(object):
def __init__(self, D, L, dt, max_freq, seed=None):
rng = np.random.RandomState(seed=seed)
steps = int(max_freq * L)
self.w = 2 * np.pi * np.arange(steps) / L
self.A = rng.randn(D, steps) + 1.0j * rng.randn(D, steps)
power = np.sqrt(np.sum(self.A * self.A.conj()))
self.A /= power
def value(self, t):
s = np.sin(self.w * t) * self.A
return np.sum(s, axis=1).real
def dvalue(self, t):
s = np.cos(self.w * t) * self.w * self.A
return np.sum(s, axis=1).real
class Environment(object):
def __init__(self, seed=None):
self.rng = np.random.RandomState(seed=seed)
class LinearSystem(Environment):
def __init__(self, d_controlled, d_motor, dt=0.001, seed=None,
scale_mult=10, scale_add=10, diagonal=False,
max_sense_noise=0.1, max_motor_noise=0.1,
period=5.0, max_freq=1.0):
super(LinearSystem, self).__init__(seed=seed)
self.d_motor = d_motor
self.d_controlled = d_controlled
self.dt = dt
self.state = self.rng.randn(d_controlled)
if diagonal:
assert d_controlled == d_motor
self.J = np.abs(np.diag(self.rng.randn(d_motor))) * scale_mult
else:
self.J = self.rng.randn(d_motor, d_controlled) * scale_mult
self.sense_noise = self.rng.uniform(0, max_sense_noise)
self.motor_noise = self.rng.uniform(0, max_motor_noise)
self.additive = self.rng.rand(d_controlled) * scale_add
def step(self, motor):
motor = motor + self.rng.randn(self.d_motor) * self.motor_noise
dstate = (np.dot(motor, self.J) + self.additive) * self.dt
self.state = self.state + dstate
return self.state + self.rng.randn(self.d_controlled) * self.sense_noise
class Controller(object):
pass
class PID(Controller):
def __init__(self, Kp, Kd=0, Ki=0, J=None, tau_d=0.1, dt=0.001):
self.Kp = Kp
self.Kd = Kd
self.Ki = Ki
if J is not None:
x = np.dot(J.T, J)
scale = np.linalg.det(x) ** (1.0 / x.shape[0])
self.JT = J.T / scale
else:
self.JT = None
self.prev_state = None
self.dstate = None
self.istate = None
self.scale = np.exp(-dt / tau_d)
self.dt = dt
def step(self, state, desired_state):
if self.prev_state is None:
self.prev_state = None
self.dstate = np.zeros_like(state)
self.istate = np.zeros_like(state)
else:
d = state - self.prev_state
self.dstate = self.dstate * self.scale + d * (1.0 - self.scale)
self.istate += self.dt * (desired_state - state)
v = (self.Kp * (desired_state - state) +
self.Kd * (-self.dstate) +
self.Ki * self.istate)
if self.JT is not None:
v = np.dot(v, self.JT)
return v
if __name__ == '__main__':
D_state = 3
D_motor = 5
dt = 0.001
env = LinearSystem(d_controlled=D_state, d_motor=D_motor, diagonal=False, scale_add=5)
ctrl = PID(100, 10, 1000, J=env.J)
desired_state = Signal(D_state, L=3.0, dt=dt, max_freq=2.0)
T = 6.0
steps = int(T / dt)
t = np.arange(steps) * dt
state = np.zeros((D_state, steps), dtype=float)
desired = np.zeros((D_state, steps), dtype=float)
sense = np.zeros((D_state, steps), dtype=float)
m = np.zeros(D_motor, dtype=float)
for i in range(steps):
desired[:,i] = desired_state.value(t[i])
s = env.step(m)
m = ctrl.step(s, desired[:,i])
state[:,i] = env.state
sense[:,i] = s
import pylab
pylab.plot(t, state.T, label='state')
pylab.plot(t, desired.T, label='desired')
#pylab.plot(sense.T, label='sense')
#pylab.legend(loc='best')
pylab.show()
| gpl-2.0 | 7,529,953,417,347,471,000 | 28.38806 | 90 | 0.550279 | false | 3.026902 | false | false | false |
mariodebian/jclic-browser | python-examples/demo.py | 1 | 3681 | # This is an example for demonstrating use of the GtkTreeView widget.
# The code in this example is not particularly good: it is written to
# concentrate on widget usage demonstration, not for maintainability.
import pygtk
pygtk.require("2.0")
import gtk
import gobject
view = None
choose_parent_view = None
dialog = None
def move(old_iter, new_parent, model):
if old_iter:
folder = model.get_value(old_iter, 0)
model.remove(old_iter)
new_iter = model.insert_before(new_parent, None)
model.set_value(new_iter, 0, folder)
model.set_value(new_iter, 1, folder["name"])
def dialog_ok(*args):
dialog.hide()
model, parent_iter = choose_parent_view.get_selection().get_selected()
model, old_iter = view.get_selection().get_selected()
if parent_iter and old_iter:
move(old_iter, parent_iter, model)
def dialog_cancel(*args):
dialog.hide()
def choose_parent(*args):
dialog.show()
def move_to_top(*args):
model, old_iter = view.get_selection().get_selected()
if old_iter:
move(old_iter, None, model)
def quit(*args):
gtk.main_quit()
def make_view(model):
# Create the view itself.
view = gtk.TreeView(model)
renderer = gtk.CellRendererText()
column = gtk.TreeViewColumn("Folder", renderer, text=1)
view.append_column(column)
view.show()
# Create scrollbars around the view.
scrolled = gtk.ScrolledWindow()
scrolled.add(view)
scrolled.show()
return view, scrolled
def make_buttons(list):
buttonbox = gtk.HBox()
for label, func in list:
button = gtk.Button()
button.set_label(label)
button.connect("clicked", func)
button.show()
buttonbox.pack_start(button, expand=gtk.FALSE, fill=gtk.FALSE)
buttonbox.show()
return buttonbox
def main():
# Create the model.
model = gtk.TreeStore(gobject.TYPE_PYOBJECT, gobject.TYPE_STRING)
# Populate the model with data. We represent folders with Python
# dicts (hash tables or hashmaps in other languages), for simplicity.
# In a real program, they would be programmer defined classes.
for i in range(100):
folder = { "name": "folder %d" % i, "files": ["foo", "bar"] }
iter = model.insert_before(None, None)
model.set_value(iter, 0, folder)
model.set_value(iter, 1, folder["name"])
# Create the main view.
global view
view, scrolled = make_view(model)
view.set_reorderable(gtk.TRUE)
# Create some command buttons.
buttonbox = make_buttons([("Quit", quit), ("Choose parent", choose_parent),
("Move to top", move_to_top)])
# Create a vertical box to hold the above stuff.
vbox = gtk.VBox()
vbox.pack_start(buttonbox, expand=gtk.FALSE, fill=gtk.FALSE)
vbox.pack_start(scrolled, expand=gtk.TRUE, fill=gtk.TRUE)
vbox.show()
# Create toplevel window to show it all.
win = gtk.Window(gtk.WINDOW_TOPLEVEL)
win.connect("delete_event", quit)
win.add(vbox)
win.show()
win.resize(300, 500)
# Create the GtkTreeView for choosing a parent.
global choose_parent_view
choose_parent_view, scrolled = make_view(model)
buttonbox = make_buttons([("OK", dialog_ok), ("Cancel", dialog_cancel)])
vbox = gtk.VBox()
vbox.pack_start(scrolled, expand=gtk.TRUE, fill=gtk.TRUE)
vbox.pack_start(buttonbox, expand=gtk.FALSE, fill=gtk.FALSE)
vbox.show()
global dialog
dialog = gtk.Window(gtk.WINDOW_TOPLEVEL)
dialog.set_default_size(200, 400)
dialog.add(vbox)
# Run the Gtk+ main loop.
gtk.main()
if __name__ == "__main__":
main()
| gpl-2.0 | 3,050,538,211,119,226,000 | 28.685484 | 79 | 0.646292 | false | 3.459586 | false | false | false |
blckshrk/Weboob | contrib/windows-install/ez_setup.py | 1 | 11838 | #!python
"""Bootstrap setuptools installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from ez_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import os
import shutil
import sys
import tempfile
import tarfile
import optparse
import subprocess
import platform
from distutils import log
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
DEFAULT_VERSION = "1.1.6"
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
def _python_cmd(*args):
args = (sys.executable,) + args
return subprocess.call(args) == 0
def _check_call_py24(cmd, *args, **kwargs):
res = subprocess.call(cmd, *args, **kwargs)
class CalledProcessError(Exception):
pass
if not res == 0:
msg = "Command '%s' return non-zero exit status %d" % (cmd, res)
raise CalledProcessError(msg)
vars(subprocess).setdefault('check_call', _check_call_py24)
def _install(tarball, install_args=()):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# installing
log.warn('Installing Setuptools')
if not _python_cmd('setup.py', 'install', *install_args):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
# exitcode will be 2
return 2
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
def _build_egg(egg, tarball, to_dir):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# building an egg
log.warn('Building a Setuptools egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
def _do_download(version, download_base, to_dir, download_delay):
egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
tarball = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, tarball, to_dir)
sys.path.insert(0, egg)
# Remove previously-imported pkg_resources if present (see
# https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
if 'pkg_resources' in sys.modules:
del sys.modules['pkg_resources']
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, download_delay=15):
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
was_imported = 'pkg_resources' in sys.modules or \
'setuptools' in sys.modules
try:
import pkg_resources
except ImportError:
return _do_download(version, download_base, to_dir, download_delay)
try:
pkg_resources.require("setuptools>=" + version)
return
except pkg_resources.VersionConflict:
e = sys.exc_info()[1]
if was_imported:
sys.stderr.write(
"The required version of setuptools (>=%s) is not available,\n"
"and can't be installed while this script is running. Please\n"
"install a more recent version first, using\n"
"'easy_install -U setuptools'."
"\n\n(Currently using %r)\n" % (version, e.args[0]))
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return _do_download(version, download_base, to_dir,
download_delay)
except pkg_resources.DistributionNotFound:
return _do_download(version, download_base, to_dir,
download_delay)
def download_file_powershell(url, target):
"""
Download the file at url to target using Powershell (which will validate
trust). Raise an exception if the command cannot complete.
"""
target = os.path.abspath(target)
cmd = [
'powershell',
'-Command',
"(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(),
]
subprocess.check_call(cmd)
def has_powershell():
if platform.system() != 'Windows':
return False
cmd = ['powershell', '-Command', 'echo test']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_powershell.viable = has_powershell
def download_file_curl(url, target):
cmd = ['curl', url, '--silent', '--output', target]
subprocess.check_call(cmd)
def has_curl():
cmd = ['curl', '--version']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_curl.viable = has_curl
def download_file_wget(url, target):
cmd = ['wget', url, '--quiet', '--output-document', target]
subprocess.check_call(cmd)
def has_wget():
cmd = ['wget', '--version']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return False
download_file_wget.viable = has_wget
def download_file_insecure(url, target):
"""
Use Python to download the file, even though it cannot authenticate the
connection.
"""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
src = dst = None
try:
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(target, "wb")
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
download_file_insecure.viable = lambda: True
def get_best_downloader():
downloaders = [
download_file_powershell,
download_file_curl,
download_file_wget,
download_file_insecure,
]
for dl in downloaders:
if dl.viable():
return dl
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15,
downloader_factory=get_best_downloader):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
``downloader_factory`` should be a function taking no arguments and
returning a function for downloading a URL to a target.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
tgz_name = "setuptools-%s.tar.gz" % version
url = download_base + tgz_name
saveto = os.path.join(to_dir, tgz_name)
if not os.path.exists(saveto): # Avoid repeated downloads
log.warn("Downloading %s", url)
downloader = downloader_factory()
downloader(url, saveto)
return os.path.realpath(saveto)
def _extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
import copy
import operator
from tarfile import ExtractError
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 448 # decimal for oct 0700
self.extract(tarinfo, path)
# Reverse sort directories.
if sys.version_info < (2, 4):
def sorter(dir1, dir2):
return cmp(dir1.name, dir2.name)
directories.sort(sorter)
directories.reverse()
else:
directories.sort(key=operator.attrgetter('name'), reverse=True)
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError:
e = sys.exc_info()[1]
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def _build_install_args(options):
"""
Build the arguments to 'python setup.py install' on the setuptools package
"""
install_args = []
if options.user_install:
if sys.version_info < (2, 6):
log.warn("--user requires Python 2.6 or later")
raise SystemExit(1)
install_args.append('--user')
return install_args
def _parse_args():
"""
Parse the command line for options
"""
parser = optparse.OptionParser()
parser.add_option(
'--user', dest='user_install', action='store_true', default=False,
help='install in user site package (requires Python 2.6 or later)')
parser.add_option(
'--download-base', dest='download_base', metavar="URL",
default=DEFAULT_URL,
help='alternative URL from where to download the setuptools package')
parser.add_option(
'--insecure', dest='downloader_factory', action='store_const',
const=lambda: download_file_insecure, default=get_best_downloader,
help='Use internal, non-validating downloader'
)
options, args = parser.parse_args()
# positional arguments are ignored
return options
def main(version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
options = _parse_args()
tarball = download_setuptools(download_base=options.download_base,
downloader_factory=options.downloader_factory)
return _install(tarball, _build_install_args(options))
if __name__ == '__main__':
sys.exit(main())
| agpl-3.0 | -6,922,584,917,193,558,000 | 30.994595 | 87 | 0.622149 | false | 3.94995 | false | false | false |
KingxBanana/zulip | zproject/settings.py | 1 | 41223 | from __future__ import absolute_import
# Django settings for zulip project.
########################################################################
# Here's how settings for the Zulip project work:
#
# * settings.py contains non-site-specific and settings configuration
# for the Zulip Django app.
# * settings.py imports prod_settings.py, and any site-specific configuration
# belongs there. The template for prod_settings.py is prod_settings_template.py
#
# See http://zulip.readthedocs.io/en/latest/settings.html for more information
#
########################################################################
import os
import platform
import time
import sys
import six.moves.configparser
from zerver.lib.db import TimeTrackingConnection
import six
########################################################################
# INITIAL SETTINGS
########################################################################
DEPLOY_ROOT = os.path.join(os.path.realpath(os.path.dirname(__file__)), '..')
config_file = six.moves.configparser.RawConfigParser()
config_file.read("/etc/zulip/zulip.conf")
# Whether this instance of Zulip is running in a production environment.
PRODUCTION = config_file.has_option('machine', 'deploy_type')
DEVELOPMENT = not PRODUCTION
secrets_file = six.moves.configparser.RawConfigParser()
if PRODUCTION:
secrets_file.read("/etc/zulip/zulip-secrets.conf")
else:
secrets_file.read(os.path.join(DEPLOY_ROOT, "zproject/dev-secrets.conf"))
def get_secret(key):
if secrets_file.has_option('secrets', key):
return secrets_file.get('secrets', key)
return None
# Make this unique, and don't share it with anybody.
SECRET_KEY = get_secret("secret_key")
# A shared secret, used to authenticate different parts of the app to each other.
SHARED_SECRET = get_secret("shared_secret")
# We use this salt to hash a user's email into a filename for their user-uploaded
# avatar. If this salt is discovered, attackers will only be able to determine
# that the owner of an email account has uploaded an avatar to Zulip, which isn't
# the end of the world. Don't use the salt where there is more security exposure.
AVATAR_SALT = get_secret("avatar_salt")
# SERVER_GENERATION is used to track whether the server has been
# restarted for triggering browser clients to reload.
SERVER_GENERATION = int(time.time())
if 'DEBUG' not in globals():
# Uncomment end of next line to test JS/CSS minification.
DEBUG = DEVELOPMENT # and platform.node() != 'your-machine'
if DEBUG:
INTERNAL_IPS = ('127.0.0.1',)
# Detect whether we're running as a queue worker; this impacts the logging configuration.
if len(sys.argv) > 2 and sys.argv[0].endswith('manage.py') and sys.argv[1] == 'process_queue':
IS_WORKER = True
else:
IS_WORKER = False
# This is overridden in test_settings.py for the test suites
TEST_SUITE = False
# The new user tutorial is enabled by default, but disabled for client tests.
TUTORIAL_ENABLED = True
# This is overridden in test_settings.py for the test suites
CASPER_TESTS = False
# Import variables like secrets from the prod_settings file
# Import prod_settings after determining the deployment/machine type
if PRODUCTION:
from .prod_settings import *
else:
from .dev_settings import *
########################################################################
# DEFAULT VALUES FOR SETTINGS
########################################################################
# For any settings that are not defined in prod_settings.py,
# we want to initialize them to sane default
DEFAULT_SETTINGS = {'TWITTER_CONSUMER_KEY': '',
'TWITTER_CONSUMER_SECRET': '',
'TWITTER_ACCESS_TOKEN_KEY': '',
'TWITTER_ACCESS_TOKEN_SECRET': '',
'EMAIL_GATEWAY_PATTERN': '',
'EMAIL_GATEWAY_EXAMPLE': '',
'EMAIL_GATEWAY_BOT': None,
'EMAIL_GATEWAY_LOGIN': None,
'EMAIL_GATEWAY_PASSWORD': None,
'EMAIL_GATEWAY_IMAP_SERVER': None,
'EMAIL_GATEWAY_IMAP_PORT': None,
'EMAIL_GATEWAY_IMAP_FOLDER': None,
'EMAIL_GATEWAY_EXTRA_PATTERN_HACK': None,
'S3_KEY': '',
'S3_SECRET_KEY': '',
'S3_AVATAR_BUCKET': '',
'LOCAL_UPLOADS_DIR': None,
'MAX_FILE_UPLOAD_SIZE': 25,
'ERROR_REPORTING': True,
'STAGING_ERROR_NOTIFICATIONS': False,
'EVENT_LOGS_ENABLED': False,
'SAVE_FRONTEND_STACKTRACES': False,
'JWT_AUTH_KEYS': {},
'NAME_CHANGES_DISABLED': False,
'DEPLOYMENT_ROLE_NAME': "",
'RABBITMQ_HOST': 'localhost',
'RABBITMQ_USERNAME': 'zulip',
'MEMCACHED_LOCATION': '127.0.0.1:11211',
'RATE_LIMITING': True,
'REDIS_HOST': '127.0.0.1',
'REDIS_PORT': 6379,
# The following bots only exist in non-VOYAGER installs
'ERROR_BOT': None,
'NEW_USER_BOT': None,
'NAGIOS_STAGING_SEND_BOT': None,
'NAGIOS_STAGING_RECEIVE_BOT': None,
'APNS_CERT_FILE': None,
'APNS_KEY_FILE': None,
'APNS_SANDBOX': True,
'ANDROID_GCM_API_KEY': None,
'INITIAL_PASSWORD_SALT': None,
'FEEDBACK_BOT': '[email protected]',
'FEEDBACK_BOT_NAME': 'Zulip Feedback Bot',
'ADMINS': '',
'SHARE_THE_LOVE': False,
'INLINE_IMAGE_PREVIEW': True,
'INLINE_URL_EMBED_PREVIEW': False,
'CAMO_URI': '',
'ENABLE_FEEDBACK': PRODUCTION,
'SEND_MISSED_MESSAGE_EMAILS_AS_USER': False,
'SERVER_EMAIL': None,
'FEEDBACK_EMAIL': None,
'WELCOME_EMAIL_SENDER': None,
'EMAIL_DELIVERER_DISABLED': False,
'ENABLE_GRAVATAR': True,
'DEFAULT_AVATAR_URI': '/static/images/default-avatar.png',
'AUTH_LDAP_SERVER_URI': "",
'EXTERNAL_URI_SCHEME': "https://",
'ZULIP_COM': False,
'SHOW_OSS_ANNOUNCEMENT': False,
'REGISTER_LINK_DISABLED': False,
'LOGIN_LINK_DISABLED': False,
'ABOUT_LINK_DISABLED': False,
'CUSTOM_LOGO_URL': None,
'VERBOSE_SUPPORT_OFFERS': False,
'STATSD_HOST': '',
'OPEN_REALM_CREATION': False,
'REALMS_HAVE_SUBDOMAINS': False,
'SUBDOMAINS_HOMEPAGE': False,
'ROOT_SUBDOMAIN_ALIASES': ["www"],
'REMOTE_POSTGRES_HOST': '',
'REMOTE_POSTGRES_SSLMODE': '',
# Default GOOGLE_CLIENT_ID to the value needed for Android auth to work
'GOOGLE_CLIENT_ID': '835904834568-77mtr5mtmpgspj9b051del9i9r5t4g4n.apps.googleusercontent.com',
'SOCIAL_AUTH_GITHUB_KEY': None,
'SOCIAL_AUTH_GITHUB_ORG_NAME': None,
'SOCIAL_AUTH_GITHUB_TEAM_ID': None,
'SOCIAL_AUTH_FIELDS_STORED_IN_SESSION': ['subdomain'],
'DBX_APNS_CERT_FILE': None,
'DBX_APNS_KEY_FILE': None,
'PERSONAL_ZMIRROR_SERVER': None,
'EXTRA_INSTALLED_APPS': [],
'DEFAULT_NEW_REALM_STREAMS': {
"social": {"description": "For socializing", "invite_only": False},
"general": {"description": "For general stuff", "invite_only": False},
"zulip": {"description": "For zulip stuff", "invite_only": False}
},
'REALM_CREATION_LINK_VALIDITY_DAYS': 7,
'TERMS_OF_SERVICE': None,
'TOS_VERSION': None,
'SYSTEM_ONLY_REALMS': {"zulip.com"},
'FIRST_TIME_TOS_TEMPLATE': None,
'USING_PGROONGA': False,
'POST_MIGRATION_CACHE_FLUSHING': False,
'ENABLE_FILE_LINKS': False,
'USE_WEBSOCKETS': True,
}
for setting_name, setting_val in six.iteritems(DEFAULT_SETTINGS):
if setting_name not in vars():
vars()[setting_name] = setting_val
# Extend ALLOWED_HOSTS with localhost (needed to RPC to Tornado).
ALLOWED_HOSTS += ['127.0.0.1', 'localhost']
# These are the settings that we will check that the user has filled in for
# production deployments before starting the app. It consists of a series
# of pairs of (setting name, default value that it must be changed from)
REQUIRED_SETTINGS = [("EXTERNAL_HOST", "zulip.example.com"),
("ZULIP_ADMINISTRATOR", "[email protected]"),
# SECRET_KEY doesn't really need to be here, in
# that we set it automatically, but just in
# case, it seems worth having in this list
("SECRET_KEY", ""),
("AUTHENTICATION_BACKENDS", ()),
("NOREPLY_EMAIL_ADDRESS", "[email protected]"),
("DEFAULT_FROM_EMAIL", "Zulip <[email protected]>"),
("ALLOWED_HOSTS", ["*", '127.0.0.1', 'localhost']),
]
if ADMINS == "":
ADMINS = (("Zulip Administrator", ZULIP_ADMINISTRATOR),)
MANAGERS = ADMINS
# Voyager is a production zulip server that is not zulip.com or
# staging.zulip.com VOYAGER is the standalone all-on-one-server
# production deployment model for based on the original Zulip
# ENTERPRISE implementation. We expect most users of the open source
# project will be using VOYAGER=True in production.
VOYAGER = PRODUCTION and not ZULIP_COM
########################################################################
# STANDARD DJANGO SETTINGS
########################################################################
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/New_York'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# The ID, as an integer, of the current site in the django_site database table.
# This is used so that application data can hook into specific site(s) and a
# single database can manage content for multiple sites.
#
# We set this site's domain to 'zulip.com' in populate_db.
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
DEPLOY_ROOT = os.path.join(os.path.realpath(os.path.dirname(__file__)), '..')
# this directory will be used to store logs for development environment
DEVELOPMENT_LOG_DIRECTORY = os.path.join(DEPLOY_ROOT, 'var', 'log')
# Make redirects work properly behind a reverse proxy
USE_X_FORWARDED_HOST = True
# List of callables that know how to import templates from various sources.
LOADERS = [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]
if PRODUCTION:
# Template caching is a significant performance win in production.
LOADERS = [('django.template.loaders.cached.Loader', LOADERS)]
TEMPLATES = [
{
'BACKEND': 'zproject.jinja2.backends.Jinja2',
'DIRS': [
os.path.join(DEPLOY_ROOT, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'debug': DEBUG,
'environment': 'zproject.jinja2.environment',
'extensions': [
'jinja2.ext.i18n',
'jinja2.ext.autoescape',
'pipeline.jinja2.PipelineExtension',
],
'context_processors': [
'zerver.context_processors.add_settings',
'zerver.context_processors.add_metrics',
'django.template.context_processors.i18n',
],
},
},
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(DEPLOY_ROOT, 'django_templates'),
],
'APP_DIRS': False,
'OPTIONS': {
'debug': DEBUG,
'loaders': LOADERS,
'context_processors': [
'zerver.context_processors.add_settings',
'zerver.context_processors.add_metrics',
],
},
},
]
MIDDLEWARE_CLASSES = (
# Our logging middleware should be the first middleware item.
'zerver.middleware.TagRequests',
'zerver.middleware.LogRequests',
'zerver.middleware.JsonErrorHandler',
'zerver.middleware.RateLimitMiddleware',
'zerver.middleware.FlushDisplayRecipientCache',
'django.middleware.common.CommonMiddleware',
'zerver.middleware.SessionHostDomainMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ANONYMOUS_USER_ID = None
AUTH_USER_MODEL = "zerver.UserProfile"
TEST_RUNNER = 'zerver.lib.test_runner.Runner'
ROOT_URLCONF = 'zproject.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'zproject.wsgi.application'
# A site can include additional installed apps via the
# EXTRA_INSTALLED_APPS setting
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.staticfiles',
'confirmation',
'guardian',
'pipeline',
'zerver',
'social.apps.django_app.default',
]
if USING_PGROONGA:
INSTALLED_APPS += ['pgroonga']
INSTALLED_APPS += EXTRA_INSTALLED_APPS
ZILENCER_ENABLED = 'zilencer' in INSTALLED_APPS
# Base URL of the Tornado server
# We set it to None when running backend tests or populate_db.
# We override the port number when running frontend tests.
TORNADO_SERVER = 'http://127.0.0.1:9993'
RUNNING_INSIDE_TORNADO = False
########################################################################
# DATABASE CONFIGURATION
########################################################################
DATABASES = {"default": {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'zulip',
'USER': 'zulip',
'PASSWORD': '', # Authentication done via certificates
'HOST': '', # Host = '' => connect through a local socket
'SCHEMA': 'zulip',
'CONN_MAX_AGE': 600,
'OPTIONS': {
'connection_factory': TimeTrackingConnection
},
},
}
if DEVELOPMENT:
LOCAL_DATABASE_PASSWORD = get_secret("local_database_password")
DATABASES["default"].update({
'PASSWORD': LOCAL_DATABASE_PASSWORD,
'HOST': 'localhost'
})
elif REMOTE_POSTGRES_HOST != '':
DATABASES['default'].update({
'HOST': REMOTE_POSTGRES_HOST,
})
if get_secret("postgres_password") is not None:
DATABASES['default'].update({
'PASSWORD': get_secret("postgres_password"),
})
if REMOTE_POSTGRES_SSLMODE != '':
DATABASES['default']['OPTIONS']['sslmode'] = REMOTE_POSTGRES_SSLMODE
else:
DATABASES['default']['OPTIONS']['sslmode'] = 'verify-full'
if USING_PGROONGA:
# We need to have "pgroonga" schema before "pg_catalog" schema in
# the PostgreSQL search path, because "pgroonga" schema overrides
# the "@@" operator from "pg_catalog" schema, and "pg_catalog"
# schema is searched first if not specified in the search path.
# See also: http://www.postgresql.org/docs/current/static/runtime-config-client.html
pg_options = '-c search_path=%(SCHEMA)s,zulip,public,pgroonga,pg_catalog' % \
DATABASES['default']
DATABASES['default']['OPTIONS']['options'] = pg_options
########################################################################
# RABBITMQ CONFIGURATION
########################################################################
USING_RABBITMQ = True
RABBITMQ_PASSWORD = get_secret("rabbitmq_password")
########################################################################
# CACHING CONFIGURATION
########################################################################
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache',
'LOCATION': MEMCACHED_LOCATION,
'TIMEOUT': 3600,
'OPTIONS': {
'verify_keys': True,
'tcp_nodelay': True,
'retry_timeout': 1,
}
},
'database': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'third_party_api_results',
# Basically never timeout. Setting to 0 isn't guaranteed
# to work, see https://code.djangoproject.com/ticket/9595
'TIMEOUT': 2000000000,
'OPTIONS': {
'MAX_ENTRIES': 100000000,
'CULL_FREQUENCY': 10,
}
},
}
########################################################################
# REDIS-BASED RATE LIMITING CONFIGURATION
########################################################################
RATE_LIMITING_RULES = [
(60, 100), # 100 requests max every minute
]
DEBUG_RATE_LIMITING = DEBUG
REDIS_PASSWORD = get_secret('redis_password')
########################################################################
# SECURITY SETTINGS
########################################################################
# Tell the browser to never send our cookies without encryption, e.g.
# when executing the initial http -> https redirect.
#
# Turn it off for local testing because we don't have SSL.
if PRODUCTION:
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
try:
# For get_updates hostname sharding.
domain = config_file.get('django', 'cookie_domain')
SESSION_COOKIE_DOMAIN = '.' + domain
CSRF_COOKIE_DOMAIN = '.' + domain
except six.moves.configparser.Error:
# Failing here is OK
pass
# Prevent Javascript from reading the CSRF token from cookies. Our code gets
# the token from the DOM, which means malicious code could too. But hiding the
# cookie will slow down some attackers.
CSRF_COOKIE_PATH = '/;HttpOnly'
CSRF_FAILURE_VIEW = 'zerver.middleware.csrf_failure'
if DEVELOPMENT:
# Use fast password hashing for creating testing users when not
# PRODUCTION. Saves a bunch of time.
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher'
)
# Also we auto-generate passwords for the default users which you
# can query using ./manage.py print_initial_password
INITIAL_PASSWORD_SALT = get_secret("initial_password_salt")
########################################################################
# API/BOT SETTINGS
########################################################################
if "EXTERNAL_API_PATH" not in vars():
EXTERNAL_API_PATH = EXTERNAL_HOST + "/api"
EXTERNAL_API_URI = EXTERNAL_URI_SCHEME + EXTERNAL_API_PATH
SERVER_URI = EXTERNAL_URI_SCHEME + EXTERNAL_HOST
if "NAGIOS_BOT_HOST" not in vars():
NAGIOS_BOT_HOST = EXTERNAL_HOST
S3_KEY = get_secret("s3_key")
S3_SECRET_KEY = get_secret("s3_secret_key")
# GCM tokens are IP-whitelisted; if we deploy to additional
# servers you will need to explicitly add their IPs here:
# https://cloud.google.com/console/project/apps~zulip-android/apiui/credential
ANDROID_GCM_API_KEY = get_secret("android_gcm_api_key")
GOOGLE_OAUTH2_CLIENT_SECRET = get_secret('google_oauth2_client_secret')
DROPBOX_APP_KEY = get_secret("dropbox_app_key")
MAILCHIMP_API_KEY = get_secret("mailchimp_api_key")
# This comes from our mandrill accounts page
MANDRILL_API_KEY = get_secret("mandrill_api_key")
# Twitter API credentials
# Secrecy not required because its only used for R/O requests.
# Please don't make us go over our rate limit.
TWITTER_CONSUMER_KEY = get_secret("twitter_consumer_key")
TWITTER_CONSUMER_SECRET = get_secret("twitter_consumer_secret")
TWITTER_ACCESS_TOKEN_KEY = get_secret("twitter_access_token_key")
TWITTER_ACCESS_TOKEN_SECRET = get_secret("twitter_access_token_secret")
# These are the bots that Zulip sends automated messages as.
INTERNAL_BOTS = [{'var_name': 'NOTIFICATION_BOT',
'email_template': 'notification-bot@%s',
'name': 'Notification Bot'},
{'var_name': 'EMAIL_GATEWAY_BOT',
'email_template': 'emailgateway@%s',
'name': 'Email Gateway'},
{'var_name': 'NAGIOS_SEND_BOT',
'email_template': 'nagios-send-bot@%s',
'name': 'Nagios Send Bot'},
{'var_name': 'NAGIOS_RECEIVE_BOT',
'email_template': 'nagios-receive-bot@%s',
'name': 'Nagios Receive Bot'},
{'var_name': 'WELCOME_BOT',
'email_template': 'welcome-bot@%s',
'name': 'Welcome Bot'}]
if PRODUCTION:
INTERNAL_BOTS += [
{'var_name': 'NAGIOS_STAGING_SEND_BOT',
'email_template': 'nagios-staging-send-bot@%s',
'name': 'Nagios Staging Send Bot'},
{'var_name': 'NAGIOS_STAGING_RECEIVE_BOT',
'email_template': 'nagios-staging-receive-bot@%s',
'name': 'Nagios Staging Receive Bot'},
]
INTERNAL_BOT_DOMAIN = "zulip.com"
# Set the realm-specific bot names
for bot in INTERNAL_BOTS:
if vars().get(bot['var_name']) is None:
bot_email = bot['email_template'] % (INTERNAL_BOT_DOMAIN,)
vars()[bot['var_name']] = bot_email
if EMAIL_GATEWAY_PATTERN != "":
EMAIL_GATEWAY_EXAMPLE = EMAIL_GATEWAY_PATTERN % ("support+abcdefg",)
DEPLOYMENT_ROLE_KEY = get_secret("deployment_role_key")
if PRODUCTION:
FEEDBACK_TARGET = "https://zulip.com/api"
else:
FEEDBACK_TARGET = "http://localhost:9991/api"
########################################################################
# STATSD CONFIGURATION
########################################################################
# Statsd is not super well supported; if you want to use it you'll need
# to set STATSD_HOST and STATSD_PREFIX.
if STATSD_HOST != '':
INSTALLED_APPS += ['django_statsd']
STATSD_PORT = 8125
STATSD_CLIENT = 'django_statsd.clients.normal'
########################################################################
# CAMO HTTPS CACHE CONFIGURATION
########################################################################
if CAMO_URI != '':
# This needs to be synced with the Camo installation
CAMO_KEY = get_secret("camo_key")
########################################################################
# STATIC CONTENT AND MINIFICATION SETTINGS
########################################################################
STATIC_URL = '/static/'
# ZulipStorage is a modified version of PipelineCachedStorage,
# and, like that class, it inserts a file hash into filenames
# to prevent the browser from using stale files from cache.
#
# Unlike PipelineStorage, it requires the files to exist in
# STATIC_ROOT even for dev servers. So we only use
# ZulipStorage when not DEBUG.
# This is the default behavior from Pipeline, but we set it
# here so that urls.py can read it.
PIPELINE_ENABLED = not DEBUG
if DEBUG:
STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'pipeline.finders.PipelineFinder',
)
if PIPELINE_ENABLED:
STATIC_ROOT = os.path.abspath('prod-static/serve')
else:
STATIC_ROOT = os.path.abspath('static/')
else:
STATICFILES_STORAGE = 'zerver.storage.ZulipStorage'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'pipeline.finders.PipelineFinder',
)
if PRODUCTION:
STATIC_ROOT = '/home/zulip/prod-static'
else:
STATIC_ROOT = os.path.abspath('prod-static/serve')
LOCALE_PATHS = (os.path.join(STATIC_ROOT, 'locale'),)
# We want all temporary uploaded files to be stored on disk.
FILE_UPLOAD_MAX_MEMORY_SIZE = 0
STATICFILES_DIRS = ['static/']
STATIC_HEADER_FILE = 'zerver/static_header.txt'
# To use minified files in dev, set PIPELINE_ENABLED = True. For the full
# cache-busting behavior, you must also set DEBUG = False.
#
# You will need to run update-prod-static after changing
# static files.
PIPELINE = {
'PIPELINE_ENABLED': PIPELINE_ENABLED,
'CSS_COMPRESSOR': 'pipeline.compressors.yui.YUICompressor',
'YUI_BINARY': '/usr/bin/env yui-compressor',
'STYLESHEETS': {
# If you add a style here, please update stylesheets()
# in frontend_tests/zjsunit/output.js as needed.
'activity': {
'source_filenames': ('styles/activity.css',),
'output_filename': 'min/activity.css'
},
'portico': {
'source_filenames': (
'third/zocial/zocial.css',
'styles/portico.css',
'styles/pygments.css',
'styles/thirdparty-fonts.css',
'styles/fonts.css',
),
'output_filename': 'min/portico.css'
},
# Two versions of the app CSS exist because of QTBUG-3467
'app-fontcompat': {
'source_filenames': (
'third/bootstrap-notify/css/bootstrap-notify.css',
'third/spectrum/spectrum.css',
'styles/components.css',
'styles/zulip.css',
'styles/settings.css',
'styles/subscriptions.css',
'styles/compose.css',
'styles/left-sidebar.css',
'styles/overlay.css',
'styles/pygments.css',
'styles/thirdparty-fonts.css',
'styles/media.css',
# We don't want fonts.css on QtWebKit, so its omitted here
),
'output_filename': 'min/app-fontcompat.css'
},
'app': {
'source_filenames': (
'third/bootstrap-notify/css/bootstrap-notify.css',
'third/spectrum/spectrum.css',
'third/jquery-perfect-scrollbar/css/perfect-scrollbar.css',
'styles/components.css',
'styles/zulip.css',
'styles/settings.css',
'styles/subscriptions.css',
'styles/compose.css',
'styles/left-sidebar.css',
'styles/overlay.css',
'styles/pygments.css',
'styles/thirdparty-fonts.css',
'styles/fonts.css',
'styles/media.css',
),
'output_filename': 'min/app.css'
},
'common': {
'source_filenames': (
'third/bootstrap/css/bootstrap.css',
'third/bootstrap/css/bootstrap-btn.css',
'third/bootstrap/css/bootstrap-responsive.css',
),
'output_filename': 'min/common.css'
},
},
'JAVASCRIPT': {},
}
JS_SPECS = {
'common': {
'source_filenames': (
'node_modules/jquery/dist/jquery.js',
'third/underscore/underscore.js',
'js/blueslip.js',
'third/bootstrap/js/bootstrap.js',
'js/common.js',
),
'output_filename': 'min/common.js'
},
'signup': {
'source_filenames': (
'js/signup.js',
'node_modules/jquery-validation/dist/jquery.validate.js',
),
'output_filename': 'min/signup.js'
},
'api': {
'source_filenames': ('js/api.js',),
'output_filename': 'min/api.js'
},
'app_debug': {
'source_filenames': ('js/debug.js',),
'output_filename': 'min/app_debug.js'
},
'app': {
'source_filenames': [
'third/bootstrap-notify/js/bootstrap-notify.js',
'third/html5-formdata/formdata.js',
'node_modules/jquery-validation/dist/jquery.validate.js',
'node_modules/sockjs-client/sockjs.js',
'third/jquery-form/jquery.form.js',
'third/jquery-filedrop/jquery.filedrop.js',
'third/jquery-caret/jquery.caret.1.5.2.js',
'third/xdate/xdate.dev.js',
'third/spin/spin.js',
'third/jquery-mousewheel/jquery.mousewheel.js',
'third/jquery-throttle-debounce/jquery.ba-throttle-debounce.js',
'third/jquery-idle/jquery.idle.js',
'third/jquery-autosize/jquery.autosize.js',
'third/jquery-perfect-scrollbar/js/perfect-scrollbar.js',
'third/lazyload/lazyload.js',
'third/spectrum/spectrum.js',
'third/string-prototype-codepointat/codepointat.js',
'third/winchan/winchan.js',
'third/handlebars/handlebars.runtime.js',
'third/marked/lib/marked.js',
'templates/compiled.js',
'js/feature_flags.js',
'js/loading.js',
'js/util.js',
'js/dict.js',
'js/components.js',
'js/localstorage.js',
'js/channel.js',
'js/setup.js',
'js/unread_ui.js',
'js/muting.js',
'js/muting_ui.js',
'js/viewport.js',
'js/rows.js',
'js/people.js',
'js/unread.js',
'js/topic_list.js',
'js/pm_list.js',
'js/stream_list.js',
'js/filter.js',
'js/message_list_view.js',
'js/message_list.js',
'js/narrow.js',
'js/reload.js',
'js/compose_fade.js',
'js/fenced_code.js',
'js/echo.js',
'js/socket.js',
'js/compose.js',
'js/stream_color.js',
'js/admin.js',
'js/stream_data.js',
'js/subs.js',
'js/message_edit.js',
'js/condense.js',
'js/resize.js',
'js/floating_recipient_bar.js',
'js/ui.js',
'js/pointer.js',
'js/click_handlers.js',
'js/scroll_bar.js',
'js/gear_menu.js',
'js/copy_and_paste.js',
'js/popovers.js',
'js/typeahead_helper.js',
'js/search_suggestion.js',
'js/search.js',
'js/composebox_typeahead.js',
'js/navigate.js',
'js/hotkey.js',
'js/favicon.js',
'js/notifications.js',
'js/hashchange.js',
'js/invite.js',
'js/message_flags.js',
'js/alert_words.js',
'js/alert_words_ui.js',
'js/message_store.js',
'js/server_events.js',
'js/zulip.js',
'js/activity.js',
'js/colorspace.js',
'js/timerender.js',
'js/tutorial.js',
'js/templates.js',
'js/avatar.js',
'js/settings.js',
'js/tab_bar.js',
'js/emoji.js',
'js/referral.js',
'js/custom_markdown.js',
'js/bot_data.js',
# JS bundled by webpack is also included here if PIPELINE_ENABLED setting is true
],
'output_filename': 'min/app.js'
},
'activity': {
'source_filenames': (
'third/sorttable/sorttable.js',
),
'output_filename': 'min/activity.js'
},
# We also want to minify sockjs separately for the sockjs iframe transport
'sockjs': {
'source_filenames': ('node_modules/sockjs-client/sockjs.js',),
'output_filename': 'min/sockjs.min.js'
},
}
if PIPELINE_ENABLED:
# This is also done in test_settings.py, see comment there..
JS_SPECS['app']['source_filenames'].append('js/bundle.js')
app_srcs = JS_SPECS['app']['source_filenames']
########################################################################
# LOGGING SETTINGS
########################################################################
ZULIP_PATHS = [
("SERVER_LOG_PATH", "/var/log/zulip/server.log"),
("ERROR_FILE_LOG_PATH", "/var/log/zulip/errors.log"),
("MANAGEMENT_LOG_PATH", "/var/log/zulip/manage.log"),
("WORKER_LOG_PATH", "/var/log/zulip/workers.log"),
("PERSISTENT_QUEUE_FILENAME", "/home/zulip/tornado/event_queues.pickle"),
("JSON_PERSISTENT_QUEUE_FILENAME", "/home/zulip/tornado/event_queues.json"),
("EMAIL_MIRROR_LOG_PATH", "/var/log/zulip/email_mirror.log"),
("EMAIL_DELIVERER_LOG_PATH", "/var/log/zulip/email-deliverer.log"),
("LDAP_SYNC_LOG_PATH", "/var/log/zulip/sync_ldap_user_data.log"),
("QUEUE_ERROR_DIR", "/var/log/zulip/queue_error"),
("STATS_DIR", "/home/zulip/stats"),
("DIGEST_LOG_PATH", "/var/log/zulip/digest.log"),
("ANALYTICS_LOG_PATH", "/var/log/zulip/analytics.log"),
]
# The Event log basically logs most significant database changes,
# which can be useful for debugging.
if EVENT_LOGS_ENABLED:
ZULIP_PATHS.append(("EVENT_LOG_DIR", "/home/zulip/logs/event_log"))
else:
EVENT_LOG_DIR = None
for (var, path) in ZULIP_PATHS:
if DEVELOPMENT:
# if DEVELOPMENT, store these files in the Zulip checkout
path = os.path.join(DEVELOPMENT_LOG_DIRECTORY, os.path.basename(path))
# only `JSON_PERSISTENT_QUEUE_FILENAME` will be stored in `var`
if var == 'JSON_PERSISTENT_QUEUE_FILENAME':
path = os.path.join(os.path.join(DEPLOY_ROOT, 'var'), os.path.basename(path))
vars()[var] = path
ZULIP_WORKER_TEST_FILE = '/tmp/zulip-worker-test-file'
if IS_WORKER:
FILE_LOG_PATH = WORKER_LOG_PATH
else:
FILE_LOG_PATH = SERVER_LOG_PATH
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'default': {
'format': '%(asctime)s %(levelname)-8s %(message)s'
}
},
'filters': {
'ZulipLimiter': {
'()': 'zerver.lib.logging_util.ZulipLimiter',
},
'EmailLimiter': {
'()': 'zerver.lib.logging_util.EmailLimiter',
},
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
'nop': {
'()': 'zerver.lib.logging_util.ReturnTrue',
},
'require_really_deployed': {
'()': 'zerver.lib.logging_util.RequireReallyDeployed',
},
},
'handlers': {
'zulip_admins': {
'level': 'ERROR',
'class': 'zerver.logging_handlers.AdminZulipHandler',
# For testing the handler delete the next line
'filters': ['ZulipLimiter', 'require_debug_false', 'require_really_deployed'],
'formatter': 'default'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'default'
},
'file': {
'level': 'DEBUG',
'class': 'logging.handlers.WatchedFileHandler',
'formatter': 'default',
'filename': FILE_LOG_PATH,
},
'errors_file': {
'level': 'WARNING',
'class': 'logging.handlers.WatchedFileHandler',
'formatter': 'default',
'filename': ERROR_FILE_LOG_PATH,
},
},
'loggers': {
'': {
'handlers': ['console', 'file', 'errors_file'],
'level': 'INFO',
'propagate': False,
},
'django': {
'handlers': (['zulip_admins'] if ERROR_REPORTING else [] +
['console', 'file', 'errors_file']),
'level': 'INFO',
'propagate': False,
},
'zulip.requests': {
'handlers': ['console', 'file', 'errors_file'],
'level': 'INFO',
'propagate': False,
},
'zulip.queue': {
'handlers': ['console', 'file', 'errors_file'],
'level': 'WARNING',
'propagate': False,
},
'zulip.management': {
'handlers': ['file', 'errors_file'],
'level': 'INFO',
'propagate': False,
},
'requests': {
'handlers': ['console', 'file', 'errors_file'],
'level': 'WARNING',
'propagate': False,
},
'django.security.DisallowedHost': {
'handlers': ['file'],
'propagate': False,
},
## Uncomment the following to get all database queries logged to the console
# 'django.db': {
# 'handlers': ['console'],
# 'level': 'DEBUG',
# 'propagate': False,
# },
}
}
ACCOUNT_ACTIVATION_DAYS = 7
LOGIN_REDIRECT_URL = '/'
# Client-side polling timeout for get_events, in milliseconds.
# We configure this here so that the client test suite can override it.
# We already kill the connection server-side with heartbeat events,
# but it's good to have a safety. This value should be greater than
# (HEARTBEAT_MIN_FREQ_SECS + 10)
POLL_TIMEOUT = 90 * 1000
# iOS App IDs
ZULIP_IOS_APP_ID = 'com.zulip.Zulip'
DBX_IOS_APP_ID = 'com.dropbox.Zulip'
########################################################################
# SSO AND LDAP SETTINGS
########################################################################
USING_APACHE_SSO = ('zproject.backends.ZulipRemoteUserBackend' in AUTHENTICATION_BACKENDS)
if len(AUTHENTICATION_BACKENDS) == 1 and (AUTHENTICATION_BACKENDS[0] ==
"zproject.backends.ZulipRemoteUserBackend"):
HOME_NOT_LOGGED_IN = "/accounts/login/sso"
ONLY_SSO = True
else:
HOME_NOT_LOGGED_IN = '/login'
ONLY_SSO = False
AUTHENTICATION_BACKENDS += ('zproject.backends.ZulipDummyBackend',)
POPULATE_PROFILE_VIA_LDAP = bool(AUTH_LDAP_SERVER_URI)
if POPULATE_PROFILE_VIA_LDAP and \
'zproject.backends.ZulipLDAPAuthBackend' not in AUTHENTICATION_BACKENDS:
AUTHENTICATION_BACKENDS += ('zproject.backends.ZulipLDAPUserPopulator',)
else:
POPULATE_PROFILE_VIA_LDAP = 'zproject.backends.ZulipLDAPAuthBackend' in AUTHENTICATION_BACKENDS or POPULATE_PROFILE_VIA_LDAP
########################################################################
# GITHUB AUTHENTICATION SETTINGS
########################################################################
# SOCIAL_AUTH_GITHUB_KEY is set in /etc/zulip/settings.py
SOCIAL_AUTH_GITHUB_SECRET = get_secret('social_auth_github_secret')
SOCIAL_AUTH_LOGIN_ERROR_URL = '/login/'
SOCIAL_AUTH_GITHUB_SCOPE = ['email']
SOCIAL_AUTH_GITHUB_ORG_KEY = SOCIAL_AUTH_GITHUB_KEY
SOCIAL_AUTH_GITHUB_ORG_SECRET = SOCIAL_AUTH_GITHUB_SECRET
SOCIAL_AUTH_GITHUB_TEAM_KEY = SOCIAL_AUTH_GITHUB_KEY
SOCIAL_AUTH_GITHUB_TEAM_SECRET = SOCIAL_AUTH_GITHUB_SECRET
########################################################################
# EMAIL SETTINGS
########################################################################
# If an email host is not specified, fail silently and gracefully
if not EMAIL_HOST and PRODUCTION:
EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend'
elif DEVELOPMENT:
# In the dev environment, emails are printed to the run-dev.py console.
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
else:
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST_PASSWORD = get_secret('email_password')
if EMAIL_GATEWAY_PASSWORD is None:
EMAIL_GATEWAY_PASSWORD = get_secret('email_gateway_password')
if vars().get("AUTH_LDAP_BIND_PASSWORD") is None:
AUTH_LDAP_BIND_PASSWORD = get_secret('auth_ldap_bind_password')
# Set the sender email address for Django traceback error reporting
if SERVER_EMAIL is None:
SERVER_EMAIL = DEFAULT_FROM_EMAIL
########################################################################
# MISC SETTINGS
########################################################################
if PRODUCTION:
# Filter out user data
DEFAULT_EXCEPTION_REPORTER_FILTER = 'zerver.filters.ZulipExceptionReporterFilter'
# This is a debugging option only
PROFILE_ALL_REQUESTS = False
CROSS_REALM_BOT_EMAILS = set(('[email protected]', '[email protected]'))
| apache-2.0 | -763,140,899,692,655,200 | 36.853994 | 128 | 0.555418 | false | 3.860916 | true | false | false |
ecell/libmoleculizer | python-src/language_parser/moleculizer/moleculizerrules.py | 1 | 21379 | ###############################################################################
# Copyright (C) 2007, 2008, 2009 The Molecular Sciences Institute
# Original Author:
# Nathan Addy, Scientific Programmer Email: [email protected]
# The Molecular Sciences Institute
#
###############################################################################
import pdb
import re
import util
from xmlobject import XmlObject
import StringIO
from sectionparameter import SymbolicExpressionEvaluator
from sectionmodifications import ModificationsSection
from sectionmols import MolsSection
from sectionallostery import AllostericPlexesSection, AllostericOmnisSection
from sectionreactionrules import ReactionRulesSection
from sectionspeciesstreams import SpeciesStreamsSection
from sectionexplicitspeciesblock import ExplicitSpeciesSection
from moleculizer_xcpt import *
class MoleculizerRulesFile:
"""
This object acts as an parsing thing that outputs moleculizer files xml,
suitable for processing internally by a mzr::moleculizer instance."""
def BlockPassesSanityCheck( linearray ):
linearray = [x for x in linearray if x.strip() != ""]
if len(linearray) == 0: return True
everyLineEndsWithSemiColon = [ x[-1] == ";" and x.count(";") == 1for x in linearray]
noWhiteSpace = [ (x.count("\n") + x.count(" ") + x.count("\t") == 0) for x in linearray]
return reduce(util.And, everyLineEndsWithSemiColon) and reduce(util.And, noWhiteSpace)
BlockPassesSanityCheck = staticmethod( BlockPassesSanityCheck )
def addWholeRulesString( self, rulesString):
print "Reading file '%s' " % rulesString
lines = rulesString.split("\n")
parameterBlock, modificationsBlock, molsBlock, allostericPlexes, allostericOmnis,\
reactionRulesBlock, dimerizationGenBlock, omniGenBlock, \
explicitSpeciesBlock, speciesStreamBlock = parseBlockTypesFromRulesFile( lines )
self.addParameterBlock( parameterBlock )
self.addModicationsBlock( modificationsBlock )
self.addMolsBlock( molsBlock )
self.addAllostericPlexesBlock( allostericPlexes )
self.addAllostericOmnisBlock( allostericOmnis )
self.addReactionRulesBlock( reactionRulesBlock, dimerizationGenBlock, \
omniGenBlock, [] )
self.addExplicitSpeciesBlock( explicitSpeciesBlock )
self.addSpeciesStreamsBlock( speciesStreamBlock )
return
def addWholeRulesFile(self, rulesFile):
parameterBlock, modificationsBlock, molsBlock, allostericPlexes, allostericOmnis, \
reactionRulesBlock, dimerizationGenBlock, omniGenBlock, \
explicitSpeciesBlock, speciesStreamBlock = parseBlockTypesFromRulesFile( open(rulesFile).readlines() )
self.addParameterBlock( parameterBlock )
self.addModicationsBlock( modificationsBlock )
self.addMolsBlock( molsBlock )
self.addAllostericPlexesBlock( allostericPlexes )
self.addAllostericOmnisBlock( allostericOmnis )
self.addReactionRulesBlock( reactionRulesBlock, dimerizationGenBlock, \
omniGenBlock, [] )
self.addExplicitSpeciesBlock( explicitSpeciesBlock )
self.addSpeciesStreamsBlock( speciesStreamBlock )
return
def addParameterStatement(self, paramStatement):
paramStatement = self.PreProcessStatement( paramStatement )
print "Adding param line: '%s'" % paramStatement
self.parameterBlock.append( paramStatement)
self.parameterEE = SymbolicExpressionEvaluator( self.parameterBlock )
return
def addModificationStatement(self, modLine):
modLine = self.PreProcessStatement( modLine )
print "Adding mod line: '%s'" % modLine
self.modificationsBlock.append( modLine)
self.modificationsSection = ModificationsSection( self.modificationsBlock )
return
def addMolsStatement(self, molsLine):
molsLine = self.PreProcessStatement( molsLine )
self.molsBlock.append( molsLine )
self.molsSection = MolsSection( molsBlock )
return
def addAllostericPlexStatement(self, alloPlexLine):
alloPlexLine = self.PreProcessStatement( alloPlexLine )
self.allostericPlexes.append( alloPlexLine )
self.allostericPlexesSection = AllostericPlexesSection( self.allostericPlexes )
return
def addAllostericOmniStatement(self, alloOmniLine):
alloOmniLine = self.PreProcessStatement( alloOmniLine )
self.allostericOmnis.append( alloOmniLine )
self.allostericOmnisSection = AllostericOmnisSection( self.allostericOmnis )
return
def addDimerizationGenStatement(self, dimerGenLine):
dimerGenLine = self.PreProcessStatement( dimerGenLine )
self.dimerizationGenBlock.append(dimerGenLine)
self.reactionGensSection = ReactionRulesSection( self.reactionRulesBlock,
self.dimerizationGenBlock,
self.omniGenBlock,
self.uniMolGenBlock)
return
def addOmniGenStatement(self, omniGenLine):
omniGenLine = self.PreProcessStatement( omniGenLine )
self.omniGenLine.append( omniGenLine )
self.reactionGensSection = ReactionRulesSection( self.reactionRulesBlock,
self.dimerizationGenBlock,
self.omniGenBlock,
self.uniMolGenBlock)
return
def addUniMolGenStatement(self, uniMolGenLine):
uniMolGenLine = self.PreProcessStatement( uniMolGenLine )
self.uniMolGenBlock.append( uniMolGenLine )
self.reactionGensSection = ReactionRulesSection( self.reactionRulesBlock,
self.dimerizationGenBlock,
self.omniGenBlock,
self.uniMolGenBlock)
return
def addExplicitSpeciesStatement(self, explicitSpeciesStatement):
explicitSpeciesStatement = self.PreProcessStatement( explicitSpeciesStatement )
self.explicitSpeciesBlock.append( explicitSpeciesStatement )
self.explicitSpeciesSection = ExplicitSpeciesSection( self.explicitSpeciesBlock )
return
def addSpeciesStreamStatement(self, speciesStreamLine):
speciesStreamLine = self.PreProcessStatement( speciesStreamLine )
self.speciesStreamBlock.append( speciesStreamLine )
self.speciesStreamSection = SpeciesStreamsSection( self.speciesStreamBlock )
return
def __init__(self):
# These are the lines of input, in one statement per line form, with no whitespace
self.parameterBlock = []
self.modificationsBlock = []
self.molsBlock = []
self.allostericPlexes = []
self.allostericOmnis = []
self.reactionRulesBlock = []
self.dimerizationGenBlock = []
self.omniGenBlock = []
self.uniMolGenBlock = []
self.explicitSpeciesBlock = []
self.speciesStreamBlock = []
# These are the objects that will be used to process the parsed
# data.
# A section is an intermediate between a rules file (they have lines, for example,
# and can answer questions about what has been parsed ) and an xml section (it can
# write out an xml section -
# Parameters doesn't write anything out currently, but easily could
self.parameterSection = 0
self.modificationsSection = 0
self.molsSection = 0
self.allostericPlexesSection = 0
self.allostericOmnisSection = 0
self.reactionGensSection = 0
self.explicitSpeciesSection = 0
self.speciesStreamSection = 0
def getOutputFileName(self):
return self.outputFileName
def write(self):
self.openXmlFile = open(self.outputFileName, 'w')
self.__writeOutput(self.openXmlFile)
return
def writeToString(self):
myString = StringIO.StringIO()
self.__writeOutput( myString )
return myString.getvalue()
def close(self):
self.openXmlFile.close()
def addParameterBlock(self, parameterBlock, overwrite = False):
if self.parameterBlock and not overwrite:
raise MzrExceptions.MoleculizerException("Error: Cannot add a parameter block twice.")
if not self.BlockPassesSanityCheck( parameterBlock ):
raise InsaneBlockOnTheLooseException(parameterBlock, "parameter block")
self.parameterBlock = parameterBlock[:]
self.parameterEE = SymbolicExpressionEvaluator( self.parameterBlock )
def addModicationsBlock(self, modificationsBlock, overwrite = False):
if self.modificationsBlock and not overwrite:
raise MzrExceptions.MoleculizerException("Error: Cannot add a modifications block twice.")
if not self.BlockPassesSanityCheck( modificationsBlock ):
raise InsaneBlockOnTheLooseException(modificationsBlock, "modifications block")
self.modificationsBlock = modificationsBlock[:]
self.modificationsSection = ModificationsSection( self.modificationsBlock )
return
def addMolsBlock(self, molsBlock):
if self.molsBlock and not overwrite:
raise MzrExceptions.MoleculizerException("Error: Cannot add a mols block twice.")
if not self.BlockPassesSanityCheck( molsBlock ):
raise InsaneBlockOnTheLooseException(molsBlock, "mols block")
self.molsBlock = molsBlock[:]
self.molsSection = MolsSection( molsBlock )
def addAllostericPlexesBlock(self, apBlock, overwrite = False):
if self.allostericPlexes and not overwrite:
raise MzrExceptions.MoleculizerException("Error: Cannot add an allosteric plexes block twice.")
if not self.BlockPassesSanityCheck( apBlock ):
raise InsaneBlockOnTheLooseException(apBlock, "allosteric plexes block")
self.allostericPlexes = apBlock[:]
self.allostericPlexesSection = AllostericPlexesSection( self.allostericPlexes )
def addAllostericOmnisBlock(self, aoBlock, overwrite = False):
if self.allostericOmnis and not overwrite: raise MzrExceptions.MoleculizerException("Error: Cannot add an allosteric omnis block twice.")
if not self.BlockPassesSanityCheck( aoBlock ):
raise InsaneBlockOnTheLooseException( aoBlock, "allosteric omnis block")
self.allostericOmnis = aoBlock[:]
self.allostericOmnisSection = AllostericOmnisSection( self.allostericOmnis )
def addReactionRulesBlock( self, rrBlock, dimerGenBlock, omniGenBlock, uniMolGenBlock, overwrite = False):
if self.reactionRulesBlock and not overwrite:
raise MzrExceptions.MoleculizerException("Error: Cannot add a reaction rules block twice.")
if not self.BlockPassesSanityCheck( rrBlock ):
raise InsaneBlockOnTheLooseException(rrBlock, "reaction rules")
if not self.BlockPassesSanityCheck( dimerGenBlock ):
raise InsaneBlockOnTheLooseException(dimerGenBlock, "dimerization gen block")
if not self.BlockPassesSanityCheck( omniGenBlock ):
raise InsaneBlockOnTheLooseException(omniGenBlock, "omni-gen block")
if not self.BlockPassesSanityCheck( uniMolGenBlock ):
raise InsaneBlockOnTheLooseException(uniMolGenBlock, "uni-mol-gen block")
self.reactionRulesBlock.extend( rrBlock )
self.dimerizationGenBlock.extend( dimerGenBlock )
self.omniGenBlock.extend( omniGenBlock )
self.uniMolGenBlock.extend( uniMolGenBlock )
self.reactionGensSection = ReactionRulesSection( self.reactionRulesBlock,
self.dimerizationGenBlock,
self.omniGenBlock,
self.uniMolGenBlock)
def addExplicitSpeciesBlock( self, esBlock, overwrite = False):
if self.explicitSpeciesBlock and not overwrite:
raise MzrExceptions.MoleculizerException("Error: Cannot add an explicit species block twice.")
if not self.BlockPassesSanityCheck( esBlock ):
raise InsaneBlockOnTheLooseException(esBlock, "explicit-species")
self.explicitSpeciesBlock = esBlock[:]
self.explicitSpeciesSection = ExplicitSpeciesSection( esBlock )
def addSpeciesStreamsBlock(self, ssBlock, overwrite = False):
if self.speciesStreamBlock and not overwrite:
raise MzrExceptions.MoleculizerException("Error: Cannot add a species stream block twice.")
if not self.BlockPassesSanityCheck( ssBlock ):
raise InsaneBlockOnTheLooseException(ssBlock, "")
self.speciesStreamBlock = ssBlock[:]
self.speciesStreamSection = SpeciesStreamsSection( self.speciesStreamBlock )
def __processAllostericRulesBlocks( self, allostericPlexBlock, allostericOmniBlock):
return 0
def __processReactionRulesBlocks( self, rxnRulesBlock, dimerBlock, omniGenBlock, uniGenBlock):
return 0
def __processExplicitSpeciesBlock( self, explicitSpeciesBlock):
return 0
def __processSpeciesStreamBlock( self, ssBlock):
return 0
def __writeOutput(self, openXMLFile):
xmlobject = self.__constructXMLRepresentation()
xmlobject.writeall(openXMLFile)
def __constructXMLRepresentation(self):
rootNode = XmlObject("moleculizer-input")
modelElmt = XmlObject("model")
modelElmt.attachToParent(rootNode)
streamsElmt = XmlObject("streams", rootNode)
self.__addModifications( modelElmt )
self.__addMols( modelElmt )
self.__addAllostericPlexes( modelElmt )
self.__addAllostericOmnis( modelElmt )
self.__addReactionGens( modelElmt )
self.__addExplicitSpecies( modelElmt )
self.__addExplicitReactions( modelElmt )
self.__addSpeciesStreams( streamsElmt )
return rootNode
def __addModifications(self, parentObject):
# Write me!!!
modificationsSection = XmlObject("modifications", parentObject)
if self.modificationsSection:
self.modificationsSection.writeModificationsSections( modificationsSection )
return
def __addMols(self, parentObject):
molsSection = XmlObject("mols", parentObject)
if self.molsSection:
self.molsSection.writeMolsSection( molsSection)
return
def __addAllostericPlexes(self, parentObject):
allostericPlexes = XmlObject("allosteric-plexes", parentObject)
if self.allostericPlexesSection:
self.allostericPlexesSection.writeAllostericPlexesSection(allostericPlexes)
return
def __addAllostericOmnis(self, parentObject):
allostericOmnis = XmlObject("allosteric-omnis", parentObject)
if self.allostericOmnisSection:
self.allostericOmnisSection.writeAllostericOmnisSection( allostericOmnis )
return
def __addReactionGens(self, parentObject):
reactionGenElmt = XmlObject("reaction-gens", parentObject)
if self.reactionGensSection:
self.reactionGensSection.writeReactionGensSection( reactionGenElmt )
return
def __addSpeciesStreams( self, parentObject):
speciesStreamsElement = XmlObject("species-streams", parentObject)
if self.speciesStreamSection:
self.speciesStreamSection.writeSpeciesStreamSection( speciesStreamsElement )
def __addExplicitSpecies(self, parentObject):
explicitSpeciesElmt = XmlObject("explicit-species", parentObject)
if self.explicitSpeciesSection:
self.explicitSpeciesSection.writeExplicitSpeciesSection( explicitSpeciesElmt )
return
def __addExplicitReactions( self, modelElmt ):
explicitReactionsElmt = XmlObject("explicit-reactions", modelElmt)
return
def parseBlockTypesFromRulesFile(textRulesFile):
textRulesFile = [re.sub("#.*$", "", x) for x in textRulesFile] # Delete all comments
# textRulesFile = [re.sub("//.*$", "", x) for x in textRulesFile] # Delete all comments
textRulesFile = [re.sub(r"\s*", "", x) for x in textRulesFile] # Delete all whitespace
textRulesFile = [x.strip() for x in textRulesFile] # Strip it for good measure
textRulesFile = [x for x in textRulesFile if x != ""] # This must be last, because line.strip() results in some empty lines.
parameterBlock = []
modificationsBlock = []
molsBlock = []
allostericPlexes = []
allostericOmnis = []
reactionRulesBlock = []
dimerizationGenBlock = []
omniGenBlock = []
uniMolGenBlock = []
explicitSpeciesBlock = []
speciesStreamBlock = []
# textRulesFile = '\n'.join(textRulesFile)
# textRulesFile = re.sub(r"\\\s*\n\s*", " ", textRulesFile)
# textRulesFile = textRulesFile.split("\n")
blockCodes = ["Parameters", "Modifications", "Molecules", "Explicit-Allostery", "Allosteric-Classes",
"Reaction-Rules", "Association-Reactions", "Transformation-Reactions",
"Explicit-Species", "Species-Classes" ]
blockObjNdx = -1
blockDataObj = [ (blockCodes[0], parameterBlock), \
(blockCodes[1], modificationsBlock), \
(blockCodes[2], molsBlock), \
(blockCodes[3], allostericPlexes),
(blockCodes[4], allostericOmnis),
(blockCodes[5], reactionRulesBlock), \
(blockCodes[6], dimerizationGenBlock), \
(blockCodes[7], omniGenBlock), \
(blockCodes[8], explicitSpeciesBlock),\
(blockCodes[9], speciesStreamBlock) ]
currentDmp = []
try:
assert( textRulesFile[0].startswith("="))
except:
raise Exception("Line '%s' should start with a '=', but does not." % textRulesFile[0])
blockObjNdx = -1
for line in textRulesFile:
if line.startswith("="):
blockObjNdx = returnNewIndex(line, blockDataObj)
currentDmp = blockDataObj[blockObjNdx][1]
else:
currentDmp.append(line)
return getFormattedArray(parameterBlock), getFormattedArray(modificationsBlock), getFormattedArray(molsBlock), getFormattedArray(allostericPlexes), getFormattedArray(allostericOmnis), \
getFormattedArray(reactionRulesBlock), getFormattedArray(dimerizationGenBlock), getFormattedArray(omniGenBlock), \
getFormattedArray(explicitSpeciesBlock), getFormattedArray(speciesStreamBlock)
def returnNewIndex(lineOfText, blockObjData):
key = lineOfText.strip().strip("=").strip()
for ndx in range(len(blockObjData)):
if key == blockObjData[ndx][0]:
return ndx
raise Exception("Section title '%s' cannot be found" % key)
return -1
def barf(msg):
sys.stderr.write(msg + '\n')
sys.stderr.write("Crashing....\n")
sys.exit(1)
def printerror(msg):
sys.stderr.write(msg + '\n')
return
def getFormattedArray( arrayToFormat ):
tmpArray = getBalancedArray( arrayToFormat )
tmpString = "".join( tmpArray )
if tmpString == "":
return []
try:
assert( tmpString[-1] == ";" )
except:
raise Exception("Error parsing block '%s'. Line does not end in ';'." % repr(arrayToFormat))
tmpArray = tmpString.split(";")
tmpArray.pop() # Last entry is blank
tmpArray = [tok + ";" for tok in tmpArray]
return tmpArray
def getBalancedArray( arrayToBalance ):
if not EachEntryIsParenBalanced( arrayToBalance ):
# Combine the ..., ndx_i, ndx_(i+1) where ndx_i is the smallest i not balanced
return getBalancedArray( GetIncrementallyBetterArray( arrayToBalance ) )
else:
return arrayToBalance
def GetIncrementallyBetterArray( anArray ):
values = [ StringIsParenBalenced(x) for x in anArray]
# This is correct: this function should only be used if the array does not pass
# EachEntryIsParenBalanced.
assert( False in values)
badNdx = values.index( False )
combinedTokens = anArray[badNdx] + anArray[badNdx + 1]
returnArray = anArray[ : badNdx]
returnArray.append( combinedTokens )
returnArray.extend( anArray[badNdx + 2 : ] )
return returnArray
def EachEntryIsParenBalanced( array ):
entries = [ StringIsParenBalenced(x) for x in array ]
returnVal = True
for val in entries:
returnVal = returnVal and val
return returnVal
def StringIsParenBalenced(line):
return ( line.count("(") == line.count(")") and
line.count("[") == line.count("]") and
line.count("{") == line.count("}") )
| gpl-2.0 | -8,921,513,987,681,306,000 | 36.245645 | 189 | 0.658263 | false | 4.037583 | false | false | false |
nevermoreluo/privateoverseas | overseas/migrations/0001_initial.py | 1 | 3314 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-05 02:47
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AccessGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('agid', models.PositiveIntegerField(unique=True)),
('name', models.CharField(max_length=100)),
('desc', models.CharField(max_length=200)),
('api_correlation_id', models.CharField(max_length=100)),
('active', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='Geo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('metro', models.CharField(blank=True, max_length=100, null=True)),
('region', models.CharField(max_length=100)),
('requests', models.DecimalField(decimal_places=2, max_digits=20)),
('throughput', models.DecimalField(decimal_places=2, max_digits=20)),
('peak_throughput', models.DecimalField(decimal_places=2, max_digits=20)),
('bandwidth', models.DecimalField(decimal_places=2, max_digits=20)),
('peak_bandwidth', models.DecimalField(decimal_places=2, max_digits=20)),
('hit_rate', models.DecimalField(decimal_places=2, max_digits=20)),
('status_4XX', models.DecimalField(decimal_places=2, max_digits=20)),
('status_5XX', models.DecimalField(decimal_places=2, max_digits=20)),
('time', models.DateTimeField(auto_now_add=True)),
('active', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='NetworkIdentifiers',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ni', models.CharField(max_length=100, unique=True)),
('active', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='Service',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('scid', models.CharField(max_length=50, unique=True)),
('active', models.BooleanField(default=True)),
('access_group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='overseas.AccessGroup')),
],
),
migrations.AddField(
model_name='networkidentifiers',
name='service',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='overseas.Service'),
),
migrations.AddField(
model_name='geo',
name='ni',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='overseas.NetworkIdentifiers'),
),
]
| gpl-3.0 | -9,114,533,386,148,599,000 | 44.39726 | 124 | 0.573929 | false | 4.287193 | false | false | false |
zstackio/zstack-woodpecker | integrationtest/vm/mini/paths/path54.py | 1 | 2373 | import zstackwoodpecker.test_state as ts_header
import os
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template5", path_list=[
[TestAction.add_image, 'image1', 'root', os.environ.get('isoForVmUrl')],
[TestAction.create_vm_by_image, 'image1', 'iso', 'vm1'],
[TestAction.create_volume, 'volume1', 'flag=scsi'],
[TestAction.attach_volume, 'vm1', 'volume1'],
[TestAction.create_volume_backup, 'volume1', 'volume1-backup1'],
[TestAction.detach_volume, 'volume1'],
[TestAction.create_mini_vm, 'vm2', 'memory=random'],
[TestAction.resize_data_volume, 'volume1', 5*1024*1024],
[TestAction.attach_volume, 'vm2', 'volume1'],
[TestAction.create_mini_vm, 'vm3', 'network=random'],
[TestAction.create_volume, 'volume2', 'flag=thin,scsi'],
[TestAction.add_image, 'image2', 'root', 'http://172.20.1.28/mirror/diskimages/centos_vdbench.qcow2'],
[TestAction.stop_vm, 'vm2'],
[TestAction.use_volume_backup, 'volume1-backup1'],
[TestAction.start_vm, 'vm2'],
[TestAction.delete_image, 'image2'],
[TestAction.recover_image, 'image2'],
[TestAction.delete_image, 'image2'],
[TestAction.expunge_image, 'image2'],
[TestAction.create_vm_backup, 'vm1', 'vm1-backup2'],
[TestAction.reboot_vm, 'vm2'],
[TestAction.resize_data_volume, 'volume2', 5*1024*1024],
[TestAction.create_volume, 'volume3', 'size=random', 'flag=scsi'],
[TestAction.delete_volume, 'volume3'],
[TestAction.stop_vm, 'vm3'],
[TestAction.add_image, 'image3', 'root', 'http://172.20.1.28/mirror/diskimages/centos_vdbench.qcow2'],
[TestAction.delete_volume, 'volume1'],
[TestAction.expunge_volume, 'volume1'],
[TestAction.reboot_vm, 'vm2'],
[TestAction.create_vm_backup, 'vm2', 'vm2-backup3'],
[TestAction.resize_volume, 'vm3', 5*1024*1024],
[TestAction.delete_image, 'image1'],
[TestAction.delete_vm_backup, 'vm2-backup3'],
[TestAction.add_image, 'image4', 'root', 'http://172.20.1.28/mirror/diskimages/centos_vdbench.qcow2'],
[TestAction.create_volume, 'volume4', 'flag=scsi'],
[TestAction.attach_volume, 'vm2', 'volume2'],
])
'''
The final status:
Running:['vm1', 'vm2']
Stopped:['vm3']
Enadbled:['volume1-backup1', 'vm1-backup2', 'image3', 'image4']
attached:['volume2']
Detached:['volume4']
Deleted:['volume3', 'vm2-backup3', 'image1']
Expunged:['volume1', 'image2']
Ha:[]
Group:
vm_backup1:['vm1-backup2']---vm1_
'''
| apache-2.0 | -4,760,517,556,792,538,000 | 39.220339 | 104 | 0.686473 | false | 2.616318 | true | false | false |
v6ak/qubes-core-admin | core/storage/__init__.py | 2 | 15124 | #!/usr/bin/python2
#
# The Qubes OS Project, http://www.qubes-os.org
#
# Copyright (C) 2013 Marek Marczykowski <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
#
from __future__ import absolute_import
import ConfigParser
import os
import os.path
import shutil
import subprocess
import sys
import qubes.qubesutils
from qubes.qubes import QubesException, defaults, system_path
CONFIG_FILE = '/etc/qubes/storage.conf'
class QubesVmStorage(object):
"""
Class for handling VM virtual disks. This is base class for all other
implementations, mostly with Xen on Linux in mind.
"""
def __init__(self, vm,
private_img_size = None,
root_img_size = None,
modules_img = None,
modules_img_rw = False):
self.vm = vm
self.vmdir = vm.dir_path
if private_img_size:
self.private_img_size = private_img_size
else:
self.private_img_size = defaults['private_img_size']
if root_img_size:
self.root_img_size = root_img_size
else:
self.root_img_size = defaults['root_img_size']
self.root_dev = "xvda"
self.private_dev = "xvdb"
self.volatile_dev = "xvdc"
self.modules_dev = "xvdd"
# For now compute this path still in QubesVm
self.modules_img = modules_img
self.modules_img_rw = modules_img_rw
# Additional drive (currently used only by HVM)
self.drive = None
def format_disk_dev(self, path, script, vdev, rw=True, type="disk",
domain=None):
if path is None:
return ''
template = " <disk type='block' device='{type}'>\n" \
" <driver name='phy'/>\n" \
" <source dev='{path}'/>\n" \
" <target dev='{vdev}' bus='xen'/>\n" \
"{params}" \
" </disk>\n"
params = ""
if not rw:
params += " <readonly/>\n"
if domain:
params += " <backenddomain name='%s'/>\n" % domain
if script:
params += " <script path='%s'/>\n" % script
return template.format(path=path, vdev=vdev, type=type, params=params)
def get_config_params(self):
args = {}
args['rootdev'] = self.root_dev_config()
args['privatedev'] = self.private_dev_config()
args['volatiledev'] = self.volatile_dev_config()
args['otherdevs'] = self.other_dev_config()
return args
def root_dev_config(self):
raise NotImplementedError
def private_dev_config(self):
raise NotImplementedError
def volatile_dev_config(self):
raise NotImplementedError
def other_dev_config(self):
if self.modules_img is not None:
return self.format_disk_dev(self.modules_img,
None,
self.modules_dev,
self.modules_img_rw)
elif self.drive is not None:
(drive_type, drive_domain, drive_path) = self.drive.split(":")
if drive_type == "hd":
drive_type = "disk"
writable = False
if drive_type == "disk":
writable = True
if drive_domain.lower() == "dom0":
drive_domain = None
return self.format_disk_dev(drive_path, None,
self.modules_dev,
rw=writable,
type=drive_type,
domain=drive_domain)
else:
return ''
def _copy_file(self, source, destination):
"""
Effective file copy, preserving sparse files etc.
"""
# TODO: Windows support
# We prefer to use Linux's cp, because it nicely handles sparse files
retcode = subprocess.call (["cp", "--reflink=auto", source, destination])
if retcode != 0:
raise IOError ("Error while copying {0} to {1}".\
format(source, destination))
def get_disk_utilization(self):
return qubes.qubesutils.get_disk_usage(self.vmdir)
def get_disk_utilization_private_img(self):
return qubes.qubesutils.get_disk_usage(self.private_img)
def get_private_img_sz(self):
if not os.path.exists(self.private_img):
return 0
return os.path.getsize(self.private_img)
def resize_private_img(self, size):
raise NotImplementedError
def create_on_disk_private_img(self, verbose, source_template = None):
raise NotImplementedError
def create_on_disk_root_img(self, verbose, source_template = None):
raise NotImplementedError
def create_on_disk(self, verbose, source_template = None):
if source_template is None:
source_template = self.vm.template
old_umask = os.umask(002)
if verbose:
print >> sys.stderr, "--> Creating directory: {0}".format(self.vmdir)
os.mkdir (self.vmdir)
self.create_on_disk_private_img(verbose, source_template)
self.create_on_disk_root_img(verbose, source_template)
self.reset_volatile_storage(verbose, source_template)
os.umask(old_umask)
def clone_disk_files(self, src_vm, verbose):
if verbose:
print >> sys.stderr, "--> Creating directory: {0}".format(self.vmdir)
os.mkdir (self.vmdir)
if src_vm.private_img is not None and self.private_img is not None:
if verbose:
print >> sys.stderr, "--> Copying the private image:\n{0} ==>\n{1}".\
format(src_vm.private_img, self.private_img)
self._copy_file(src_vm.private_img, self.private_img)
if src_vm.updateable and src_vm.root_img is not None and self.root_img is not None:
if verbose:
print >> sys.stderr, "--> Copying the root image:\n{0} ==>\n{1}".\
format(src_vm.root_img, self.root_img)
self._copy_file(src_vm.root_img, self.root_img)
# TODO: modules?
def rename(self, old_name, new_name):
old_vmdir = self.vmdir
new_vmdir = os.path.join(os.path.dirname(self.vmdir), new_name)
os.rename(self.vmdir, new_vmdir)
self.vmdir = new_vmdir
if self.private_img:
self.private_img = self.private_img.replace(old_vmdir, new_vmdir)
if self.root_img:
self.root_img = self.root_img.replace(old_vmdir, new_vmdir)
if self.volatile_img:
self.volatile_img = self.volatile_img.replace(old_vmdir, new_vmdir)
def verify_files(self):
if not os.path.exists (self.vmdir):
raise QubesException (
"VM directory doesn't exist: {0}".\
format(self.vmdir))
if self.root_img and not os.path.exists (self.root_img):
raise QubesException (
"VM root image file doesn't exist: {0}".\
format(self.root_img))
if self.private_img and not os.path.exists (self.private_img):
raise QubesException (
"VM private image file doesn't exist: {0}".\
format(self.private_img))
if self.modules_img is not None:
if not os.path.exists(self.modules_img):
raise QubesException (
"VM kernel modules image does not exists: {0}".\
format(self.modules_img))
def remove_from_disk(self):
shutil.rmtree (self.vmdir)
def reset_volatile_storage(self, verbose = False, source_template = None):
if source_template is None:
source_template = self.vm.template
# Re-create only for template based VMs
if source_template is not None and self.volatile_img:
if os.path.exists(self.volatile_img):
os.remove(self.volatile_img)
# For StandaloneVM create it only if not already exists (eg after backup-restore)
if self.volatile_img and not os.path.exists(self.volatile_img):
if verbose:
print >> sys.stderr, "--> Creating volatile image: {0}...".\
format(self.volatile_img)
subprocess.check_call([system_path["prepare_volatile_img_cmd"],
self.volatile_img, str(self.root_img_size / 1024 / 1024)])
def prepare_for_vm_startup(self, verbose):
self.reset_volatile_storage(verbose=verbose)
if self.private_img and not os.path.exists (self.private_img):
print >>sys.stderr, "WARNING: Creating empty VM private image file: {0}".\
format(self.private_img)
self.create_on_disk_private_img(verbose=False)
def dump(o):
""" Returns a string represention of the given object
Args:
o (object): anything that response to `__module__` and `__class__`
Given the class :class:`qubes.storage.QubesVmStorage` it returns
'qubes.storage.QubesVmStorage' as string
"""
return o.__module__ + '.' + o.__class__.__name__
def load(string):
""" Given a dotted full module string representation of a class it loads it
Args:
string (str) i.e. 'qubes.storage.xen.QubesXenVmStorage'
Returns:
type
See also:
:func:`qubes.storage.dump`
"""
if not type(string) is str:
# This is a hack which allows giving a real class to a vm instead of a
# string as string_class parameter.
return string
components = string.split(".")
module_path = ".".join(components[:-1])
klass = components[-1:][0]
module = __import__(module_path, fromlist=[klass])
return getattr(module, klass)
def get_pool(name, vm):
""" Instantiates the storage for the specified vm """
config = _get_storage_config_parser()
klass = _get_pool_klass(name, config)
keys = [k for k in config.options(name) if k != 'driver' and k != 'class']
values = [config.get(name, o) for o in keys]
config_kwargs = dict(zip(keys, values))
if name == 'default':
kwargs = defaults['pool_config'].copy()
kwargs.update(keys)
else:
kwargs = config_kwargs
return klass(vm, **kwargs)
def pool_exists(name):
""" Check if the specified pool exists """
try:
_get_pool_klass(name)
return True
except StoragePoolException:
return False
def add_pool(name, **kwargs):
""" Add a storage pool to config."""
config = _get_storage_config_parser()
config.add_section(name)
for key, value in kwargs.iteritems():
config.set(name, key, value)
_write_config(config)
def remove_pool(name):
""" Remove a storage pool from config file. """
config = _get_storage_config_parser()
config.remove_section(name)
_write_config(config)
def _write_config(config):
with open(CONFIG_FILE, 'w') as configfile:
config.write(configfile)
def _get_storage_config_parser():
""" Instantiates a `ConfigParaser` for specified storage config file.
Returns:
RawConfigParser
"""
config = ConfigParser.RawConfigParser()
config.read(CONFIG_FILE)
return config
def _get_pool_klass(name, config=None):
""" Returns the storage klass for the specified pool.
Args:
name: The pool name.
config: If ``config`` is not specified
`_get_storage_config_parser()` is called.
Returns:
type: A class inheriting from `QubesVmStorage`
"""
if config is None:
config = _get_storage_config_parser()
if not config.has_section(name):
raise StoragePoolException('Uknown storage pool ' + name)
if config.has_option(name, 'class'):
klass = load(config.get(name, 'class'))
elif config.has_option(name, 'driver'):
pool_driver = config.get(name, 'driver')
klass = defaults['pool_drivers'][pool_driver]
else:
raise StoragePoolException('Uknown storage pool driver ' + name)
return klass
class StoragePoolException(QubesException):
pass
class Pool(object):
def __init__(self, vm, dir_path):
assert vm is not None
assert dir_path is not None
self.vm = vm
self.dir_path = dir_path
self.create_dir_if_not_exists(self.dir_path)
self.vmdir = self.vmdir_path(vm, self.dir_path)
appvms_path = os.path.join(self.dir_path, 'appvms')
self.create_dir_if_not_exists(appvms_path)
servicevms_path = os.path.join(self.dir_path, 'servicevms')
self.create_dir_if_not_exists(servicevms_path)
vm_templates_path = os.path.join(self.dir_path, 'vm-templates')
self.create_dir_if_not_exists(vm_templates_path)
def vmdir_path(self, vm, pool_dir):
""" Returns the path to vmdir depending on the type of the VM.
The default QubesOS file storage saves the vm images in three
different directories depending on the ``QubesVM`` type:
* ``appvms`` for ``QubesAppVm`` or ``QubesHvm``
* ``vm-templates`` for ``QubesTemplateVm`` or ``QubesTemplateHvm``
* ``servicevms`` for any subclass of ``QubesNetVm``
Args:
vm: a QubesVM
pool_dir: the root directory of the pool
Returns:
string (str) absolute path to the directory where the vm files
are stored
"""
if vm.is_appvm():
subdir = 'appvms'
elif vm.is_template():
subdir = 'vm-templates'
elif vm.is_netvm():
subdir = 'servicevms'
elif vm.is_disposablevm():
subdir = 'appvms'
return os.path.join(pool_dir, subdir, vm.template.name + '-dvm')
else:
raise QubesException(vm.type() + ' unknown vm type')
return os.path.join(pool_dir, subdir, vm.name)
def create_dir_if_not_exists(self, path):
""" Check if a directory exists in if not create it.
This method does not create any parent directories.
"""
if not os.path.exists(path):
os.mkdir(path)
| gpl-2.0 | 4,979,779,882,463,710,000 | 32.910314 | 91 | 0.583047 | false | 3.872983 | true | false | false |
eloquence/unisubs | apps/teams/models.py | 1 | 128528 | # Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from collections import defaultdict
from itertools import groupby
from math import ceil
import csv
import datetime
import logging
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.core.files import File
from django.db import models
from django.db.models import query, Q
from django.db.models.signals import post_save, post_delete, pre_delete
from django.http import Http404
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _, ugettext
from haystack import site
from haystack.query import SQ
import teams.moderation_const as MODERATION
from caching import ModelCacheManager
from comments.models import Comment
from auth.models import UserLanguage, CustomUser as User
from auth.providers import get_authentication_provider
from messages import tasks as notifier
from subtitles import shims
from subtitles.signals import language_deleted
from teams.moderation_const import WAITING_MODERATION, UNMODERATED, APPROVED
from teams.permissions_const import (
TEAM_PERMISSIONS, PROJECT_PERMISSIONS, ROLE_OWNER, ROLE_ADMIN, ROLE_MANAGER,
ROLE_CONTRIBUTOR
)
from teams import tasks
from teams import workflows
from teams.signals import api_subtitles_approved, api_subtitles_rejected
from utils import DEFAULT_PROTOCOL
from utils import translation
from utils.amazon import S3EnabledImageField, S3EnabledFileField
from utils.panslugify import pan_slugify
from utils.searching import get_terms
from utils.text import fmt
from videos.models import (Video, VideoUrl, SubtitleVersion, SubtitleLanguage,
Action)
from videos.tasks import video_changed_tasks
from subtitles.models import (
SubtitleVersion as NewSubtitleVersion,
SubtitleLanguage as NewSubtitleLanguage,
SubtitleNoteBase,
ORIGIN_IMPORTED
)
from subtitles import pipeline
from functools import partial
logger = logging.getLogger(__name__)
celery_logger = logging.getLogger('celery.task')
BILLING_CUTOFF = getattr(settings, 'BILLING_CUTOFF', None)
# Teams
class TeamQuerySet(query.QuerySet):
def add_members_count(self):
"""Add _members_count field to this query
This can be used to order/filter the query and also avoids a query in
when Team.members_count() is called.
"""
select = {
'_members_count': (
'SELECT COUNT(1) '
'FROM teams_teammember tm '
'WHERE tm.team_id=teams_team.id'
)
}
return self.extra(select=select)
def add_videos_count(self):
"""Add _videos_count field to this query
This can be used to order/filter the query and also avoids a query in
when Team.video_count() is called.
"""
select = {
'_videos_count': (
'SELECT COUNT(1) '
'FROM teams_teamvideo tv '
'WHERE tv.team_id=teams_team.id'
)
}
return self.extra(select=select)
def add_user_is_member(self, user):
"""Add user_is_member field to this query """
if not user.is_authenticated():
return self.extra(select={'user_is_member': 0})
select = {
'user_is_member': (
'EXISTS (SELECT 1 '
'FROM teams_teammember tm '
'WHERE tm.team_id=teams_team.id '
'AND tm.user_id=%s)'
)
}
return self.extra(select=select, select_params=[user.id])
class TeamManager(models.Manager):
def get_query_set(self):
"""Return a QS of all non-deleted teams."""
return TeamQuerySet(Team).filter(deleted=False)
def for_user(self, user, exclude_private=False):
"""Return the teams visible for the given user.
If exclude_private is True, then we will exclude private teams, even
if the user can apply to them.
"""
# policies where we should show the team, even if they're not visible
visible_policies = [Team.OPEN, Team.APPLICATION]
q = models.Q(is_visible=True)
if not exclude_private:
q |= models.Q(membership_policy__in=visible_policies)
if user.is_authenticated():
user_teams = TeamMember.objects.filter(user=user)
q |= models.Q(id__in=user_teams.values('team_id'))
return self.get_query_set().filter(q)
def with_recent_billing_record(self, day_range):
"""Find teams that have had a new video recently"""
start_date = (datetime.datetime.now() -
datetime.timedelta(days=day_range))
team_ids = list(BillingRecord.objects
.order_by()
.filter(created__gt=start_date)
.values_list('team_id', flat=True)
.distinct())
return Team.objects.filter(id__in=team_ids)
def needs_new_video_notification(self, notify_interval):
return (self.filter(
notify_interval=notify_interval,
teamvideo__created__gt=models.F('last_notification_time'))
.distinct())
class Team(models.Model):
APPLICATION = 1
INVITATION_BY_MANAGER = 2
INVITATION_BY_ALL = 3
OPEN = 4
INVITATION_BY_ADMIN = 5
MEMBERSHIP_POLICY_CHOICES = (
(OPEN, _(u'Open')),
(APPLICATION, _(u'Application')),
(INVITATION_BY_ALL, _(u'Invitation by any team member')),
(INVITATION_BY_MANAGER, _(u'Invitation by manager')),
(INVITATION_BY_ADMIN, _(u'Invitation by admin')),
)
VP_MEMBER = 1
VP_MANAGER = 2
VP_ADMIN = 3
VIDEO_POLICY_CHOICES = (
(VP_MEMBER, _(u'Any team member')),
(VP_MANAGER, _(u'Managers and admins')),
(VP_ADMIN, _(u'Admins only'))
)
TASK_ASSIGN_CHOICES = (
(10, 'Any team member'),
(20, 'Managers and admins'),
(30, 'Admins only'),
)
TASK_ASSIGN_NAMES = dict(TASK_ASSIGN_CHOICES)
TASK_ASSIGN_IDS = dict([choice[::-1] for choice in TASK_ASSIGN_CHOICES])
SUBTITLE_CHOICES = (
(10, 'Anyone'),
(20, 'Any team member'),
(30, 'Only managers and admins'),
(40, 'Only admins'),
)
SUBTITLE_NAMES = dict(SUBTITLE_CHOICES)
SUBTITLE_IDS = dict([choice[::-1] for choice in SUBTITLE_CHOICES])
NOTIFY_DAILY = 'D'
NOTIFY_HOURLY = 'H'
NOTIFY_INTERVAL_CHOICES = (
(NOTIFY_DAILY, _('Daily')),
(NOTIFY_HOURLY, _('Hourly')),
)
name = models.CharField(_(u'name'), max_length=250, unique=True)
slug = models.SlugField(_(u'slug'), unique=True)
description = models.TextField(_(u'description'), blank=True, help_text=_('All urls will be converted to links. Line breaks and HTML not supported.'))
logo = S3EnabledImageField(verbose_name=_(u'logo'), blank=True,
upload_to='teams/logo/',
default='',
thumb_sizes=[(280, 100), (100, 100)])
square_logo = S3EnabledImageField(verbose_name=_(u'square logo'),
blank=True,
default='',
upload_to='teams/square-logo/',
thumb_sizes=[(100, 100), (48, 48)])
is_visible = models.BooleanField(_(u'videos public?'), default=True)
videos = models.ManyToManyField(Video, through='TeamVideo', verbose_name=_('videos'))
users = models.ManyToManyField(User, through='TeamMember', related_name='teams', verbose_name=_('users'))
points = models.IntegerField(default=0, editable=False)
applicants = models.ManyToManyField(User, through='Application', related_name='applicated_teams', verbose_name=_('applicants'))
created = models.DateTimeField(auto_now_add=True)
highlight = models.BooleanField(default=False)
video = models.ForeignKey(Video, null=True, blank=True, related_name='intro_for_teams', verbose_name=_(u'Intro Video'))
application_text = models.TextField(blank=True)
page_content = models.TextField(_(u'Page content'), blank=True, help_text=_(u'You can use markdown. This will replace Description.'))
is_moderated = models.BooleanField(default=False)
header_html_text = models.TextField(blank=True, default='', help_text=_(u"HTML that appears at the top of the teams page."))
last_notification_time = models.DateTimeField(editable=False, default=datetime.datetime.now)
notify_interval = models.CharField(max_length=1,
choices=NOTIFY_INTERVAL_CHOICES,
default=NOTIFY_DAILY)
auth_provider_code = models.CharField(_(u'authentication provider code'),
max_length=24, blank=True, default="")
# code value from one the TeamWorkflow subclasses
# Since other apps can add workflow types, let's use this system to avoid
# conflicts:
# - Core types are defined in the teams app and 1 char long
# - Extention types are defined on other apps. They are 2 chars long,
# with the first one being unique to the app.
workflow_type = models.CharField(max_length=2, default='O')
# Enabling Features
projects_enabled = models.BooleanField(default=False)
# Deprecated field that enables the tasks workflow
workflow_enabled = models.BooleanField(default=False)
# Policies and Permissions
membership_policy = models.IntegerField(_(u'membership policy'),
choices=MEMBERSHIP_POLICY_CHOICES,
default=OPEN)
video_policy = models.IntegerField(_(u'video policy'),
choices=VIDEO_POLICY_CHOICES,
default=VP_MEMBER)
# The values below here are mostly specific to the tasks workflow and will
# probably be deleted.
task_assign_policy = models.IntegerField(_(u'task assignment policy'),
choices=TASK_ASSIGN_CHOICES,
default=TASK_ASSIGN_IDS['Any team member'])
subtitle_policy = models.IntegerField(_(u'subtitling policy'),
choices=SUBTITLE_CHOICES,
default=SUBTITLE_IDS['Anyone'])
translate_policy = models.IntegerField(_(u'translation policy'),
choices=SUBTITLE_CHOICES,
default=SUBTITLE_IDS['Anyone'])
max_tasks_per_member = models.PositiveIntegerField(_(u'maximum tasks per member'),
default=None, null=True, blank=True)
task_expiration = models.PositiveIntegerField(_(u'task expiration (days)'),
default=None, null=True, blank=True)
deleted = models.BooleanField(default=False)
partner = models.ForeignKey('Partner', null=True, blank=True,
related_name='teams')
objects = TeamManager()
all_objects = models.Manager() # For accessing deleted teams, if necessary.
cache = ModelCacheManager()
class Meta:
ordering = ['name']
verbose_name = _(u'Team')
verbose_name_plural = _(u'Teams')
def __init__(self, *args, **kwargs):
models.Model.__init__(self, *args, **kwargs)
self._member_cache = {}
def save(self, *args, **kwargs):
creating = self.pk is None
super(Team, self).save(*args, **kwargs)
self.cache.invalidate()
if creating:
# create a default project
self.default_project
# setup our workflow
self.new_workflow.setup_team()
def __unicode__(self):
return self.name or self.slug
def is_tasks_team(self):
return self.workflow_enabled
@property
def new_workflow(self):
if not hasattr(self, '_new_workflow'):
self._new_workflow = workflows.TeamWorkflow.get_workflow(self)
return self._new_workflow
def is_old_style(self):
return self.workflow_type == "O"
def get_tasks_page_url(self):
return reverse('teams:team_tasks', kwargs={
'slug': self.slug,
})
def languages(self, members_joined_since=None):
"""Returns the languages spoken by the member of the team
"""
if members_joined_since:
users = self.members_since(members_joined_since)
else:
users = self.users.all()
return UserLanguage.objects.filter(user__in=users).values_list('language', flat=True)
def active_users(self, since=None, published=True):
sv = NewSubtitleVersion.objects.filter(video__in=self.videos.all())
if published:
sv = sv.filter(Q(visibility_override='public') | Q(visibility='public'))
if since:
sv = sv.filter(created__gt=datetime.datetime.now() - datetime.timedelta(days=since))
return sv.exclude(author__username="anonymous").values_list('author', 'subtitle_language')
def get_default_message(self, name):
return fmt(Setting.MESSAGE_DEFAULTS.get(name, ''), team=self)
def get_messages(self, names):
"""Fetch messages from the settings objects
This method fetches the messages assocated with names and interpolates
them to replace %(team)s with the team name.
Returns:
dict mapping names to message text
"""
messages = {
name: self.get_default_message(name)
for name in names
}
for setting in self.settings.with_names(names):
if setting.data:
messages[setting.key_name] = setting.data
return messages
def render_message(self, msg):
"""Return a string of HTML represention a team header for a notification.
TODO: Get this out of the model and into a templatetag or something.
"""
author_page = msg.author.get_absolute_url() if msg.author else ''
context = {
'team': self,
'msg': msg,
'author': msg.author,
'author_page': author_page,
'team_page': self.get_absolute_url(),
"STATIC_URL": settings.STATIC_URL,
}
return render_to_string('teams/_team_message.html', context)
def is_open(self):
"""Return whether this team's membership is open to the public."""
return self.membership_policy == self.OPEN
def is_by_application(self):
"""Return whether this team's membership is by application only."""
return self.membership_policy == self.APPLICATION
def get_workflow(self):
"""Return the workflow for the given team.
A workflow will always be returned. If one isn't specified for the team
a default (unsaved) one will be populated with default values and
returned.
TODO: Refactor this behaviour into something less confusing.
"""
return Workflow.get_for_target(self.id, 'team')
@property
def auth_provider(self):
"""Return the authentication provider class for this Team, or None.
No DB queries are used, so this is safe to call many times.
"""
if not self.auth_provider_code:
return None
else:
return get_authentication_provider(self.auth_provider_code)
# Thumbnails
def logo_thumbnail(self):
"""URL for a kind-of small version of this team's logo, or None."""
if self.logo:
return self.logo.thumb_url(100, 100)
def logo_thumbnail_medium(self):
"""URL for a medium version of this team's logo, or None."""
if self.logo:
return self.logo.thumb_url(280, 100)
def square_logo_thumbnail(self):
"""URL for this team's square logo, or None."""
if self.square_logo:
return self.square_logo.thumb_url(100, 100)
def square_logo_thumbnail_small(self):
"""URL for a small version of this team's square logo, or None."""
if self.square_logo:
return self.square_logo.thumb_url(48, 48)
# URLs
@models.permalink
def get_absolute_url(self):
return ('teams:dashboard', [self.slug])
def get_site_url(self):
"""Return the full, absolute URL for this team, including http:// and the domain."""
return '%s://%s%s' % (DEFAULT_PROTOCOL,
Site.objects.get_current().domain,
self.get_absolute_url())
# Membership and roles
def get_member(self, user):
"""Get a TeamMember object for a user or None."""
if not user.is_authenticated():
return None
if user.id in self._member_cache:
return self._member_cache[user.id]
try:
member = self.members.get(user=user)
except TeamMember.DoesNotExist:
member = None
self._member_cache[user.id] = member
return member
def user_is_member(self, user):
members = self.cache.get('members')
if members is None:
members = list(self.members.values_list('user_id', flat=True))
self.cache.set('members', members)
return user.id in members
def uncache_member(self, user):
try:
del self._member_cache[user.id]
except KeyError:
pass
def user_can_view_videos(self, user):
return self.is_visible or self.user_is_member(user)
def _is_role(self, user, role=None):
"""Return whether the given user has the given role in this team.
Safe to use with null or unauthenticated users.
If no role is given, simply return whether the user is a member of this team at all.
TODO: Change this to use the stuff in teams.permissions.
"""
if not user or not user.is_authenticated():
return False
qs = self.members.filter(user=user)
if role:
qs = qs.filter(role=role)
return qs.exists()
def can_bulk_approve(self, user):
return self.is_owner(user) or self.is_admin(user)
def is_owner(self, user):
"""
Return whether the given user is an owner of this team.
"""
return self._is_role(user, TeamMember.ROLE_OWNER)
def is_admin(self, user):
"""Return whether the given user is an admin of this team."""
return self._is_role(user, TeamMember.ROLE_ADMIN)
def is_manager(self, user):
"""Return whether the given user is a manager of this team."""
return self._is_role(user, TeamMember.ROLE_MANAGER)
def is_member(self, user):
"""Return whether the given user is a member of this team."""
return self._is_role(user)
def is_contributor(self, user, authenticated=True):
"""Return whether the given user is a contributor of this team, False otherwise."""
return self._is_role(user, TeamMember.ROLE_CONTRIBUTOR)
def can_see_video(self, user, team_video=None):
"""I have no idea.
TODO: Figure out what this thing is, and if it's still necessary.
"""
if not user.is_authenticated():
return False
return self.is_member(user)
def fetch_video_actions(self, video_language=None):
"""Fetch the Action objects for this team's videos
Args:
video_language: only actions for videos with this
primary_audio_language_code
"""
video_q = TeamVideo.objects.filter(team=self).values_list('video_id')
if video_language is not None:
video_q = video_q.filter(
video__primary_audio_language_code=video_language)
return Action.objects.filter(video_id__in=video_q)
# moderation
# Moderation
def moderates_videos(self):
"""Return whether this team moderates videos in some way, False otherwise.
Moderation means the team restricts who can create subtitles and/or
translations.
"""
if self.subtitle_policy != Team.SUBTITLE_IDS['Anyone']:
return True
if self.translate_policy != Team.SUBTITLE_IDS['Anyone']:
return True
return False
def video_is_moderated_by_team(self, video):
"""Return whether this team moderates the given video."""
return video.moderated_by == self
# Item counts
@property
def members_count(self):
"""Return the number of members of this team.
Caches the result in-object for performance.
"""
if not hasattr(self, '_members_count'):
setattr(self, '_members_count', self.users.count())
return self._members_count
def members_count_since(self, joined_since):
"""Return the number of members of this team who joined the last n days.
"""
return self.users.filter(date_joined__gt=datetime.datetime.now() - datetime.timedelta(days=joined_since)).count()
def members_since(self, joined_since):
""" Returns the members who joined the team the last n days
"""
return self.users.filter(date_joined__gt=datetime.datetime.now() - datetime.timedelta(days=joined_since))
@property
def videos_count(self):
"""Return the number of videos of this team.
Caches the result in-object for performance.
"""
if not hasattr(self, '_videos_count'):
setattr(self, '_videos_count', self.teamvideo_set.count())
return self._videos_count
def videos_count_since(self, added_since = None):
"""Return the number of videos of this team added the last n days.
"""
return self.teamvideo_set.filter(created__gt=datetime.datetime.now() - datetime.timedelta(days=added_since)).count()
def videos_since(self, added_since):
"""Returns the videos of this team added the last n days.
"""
return self.videos.filter(created__gt=datetime.datetime.now() - datetime.timedelta(days=added_since))
def unassigned_tasks(self, sort=None):
qs = Task.objects.filter(team=self, deleted=False, completed=None, assignee=None, type=Task.TYPE_IDS['Approve'])
if sort is not None:
qs = qs.order_by(sort)
return qs
def get_task(self, task_pk):
return Task.objects.get(pk=task_pk)
def get_tasks(self, task_pks):
return Task.objects.filter(pk__in=task_pks).select_related('new_subtitle_version', 'new_subtitle_version__subtitle_language', 'team_video', 'team_video__video', 'team_video__video__teamvideo', 'workflow')
def _count_tasks(self):
qs = Task.objects.filter(team=self, deleted=False, completed=None)
# quick, check, are there more than 1000 tasks, if so return 1001, and
# let the UI display > 1000
if qs[1000:1001].exists():
return 1001
else:
return qs.count()
@property
def tasks_count(self):
"""Return the number of incomplete, undeleted tasks of this team.
Caches the result in-object for performance.
Note: the count is capped at 1001 tasks. If a team has more than
that, we generally just want to display "> 1000". Use
get_tasks_count_display() to do that.
"""
if not hasattr(self, '_tasks_count'):
setattr(self, '_tasks_count', self._count_tasks())
return self._tasks_count
def get_tasks_count_display(self):
"""Get a string to display for our tasks count."""
if self.tasks_count <= 1000:
return unicode(self.tasks_count)
else:
return ugettext('> 1000')
# Applications (people applying to join)
def application_message(self):
"""Return the membership application message for this team, or '' if none exists."""
try:
return self.settings.get(key=Setting.KEY_IDS['messages_application']).data
except Setting.DoesNotExist:
return ''
@property
def applications_count(self):
"""Return the number of open membership applications to this team.
Caches the result in-object for performance.
"""
if not hasattr(self, '_applications_count'):
setattr(self, '_applications_count', self.applications.count())
return self._applications_count
# Language pairs
def _lang_pair(self, lp, suffix):
return SQ(content="{0}_{1}_{2}".format(lp[0], lp[1], suffix))
def get_videos_for_languages_haystack(self, language=None,
num_completed_langs=None,
project=None, user=None, query=None,
sort=None, exclude_language=None):
qs = self.get_videos_for_user(user)
if project:
qs = qs.filter(project_pk=project.pk)
if query:
for term in get_terms(query):
qs = qs.auto_query(qs.query.clean(term).decode('utf-8'))
if language:
qs = qs.filter(video_completed_langs=language)
if exclude_language:
qs = qs.exclude(video_completed_langs=exclude_language)
if num_completed_langs is not None:
qs = qs.filter(num_completed_langs=num_completed_langs)
qs = qs.order_by({
'name': 'video_title_exact',
'-name': '-video_title_exact',
'subs': 'num_completed_langs',
'-subs': '-num_completed_langs',
'time': 'team_video_create_date',
'-time': '-team_video_create_date',
}.get(sort or '-time'))
return qs
def get_videos_for_user(self, user):
from teams.search_indexes import TeamVideoLanguagesIndex
is_member = (user and user.is_authenticated()
and self.members.filter(user=user).exists())
if is_member:
return TeamVideoLanguagesIndex.results_for_members(self).filter(team_id=self.id)
else:
return TeamVideoLanguagesIndex.results().filter(team_id=self.id)
# Projects
@property
def default_project(self):
"""Return the default project for this team.
If it doesn't already exist it will be created.
TODO: Move the creation into a signal on the team to avoid creating
multiple default projects here?
"""
try:
return Project.objects.get(team=self, slug=Project.DEFAULT_NAME)
except Project.DoesNotExist:
p = Project(team=self,name=Project.DEFAULT_NAME)
p.save()
return p
@property
def has_projects(self):
"""Return whether this team has projects other than the default one."""
return self.project_set.count() > 1
# Readable/writeable language codes
def get_writable_langs(self):
"""Return a list of language code strings that are writable for this team.
This value may come from memcache.
"""
return TeamLanguagePreference.objects.get_writable(self)
def get_readable_langs(self):
"""Return a list of language code strings that are readable for this team.
This value may come from memcache.
"""
return TeamLanguagePreference.objects.get_readable(self)
def get_team_languages(self, since=None):
query_sl = NewSubtitleLanguage.objects.filter(video__in=self.videos.all())
new_languages = []
if since:
query_sl = query_sl.filter(id__in=NewSubtitleVersion.objects.filter(video__in=self.videos.all(),
created__gt=datetime.datetime.now() - datetime.timedelta(days=since)).order_by('subtitle_language').values_list('subtitle_language', flat=True).distinct())
new_languages = list(NewSubtitleLanguage.objects.filter(video__in=self.videos_since(since)).values_list('language_code', 'subtitles_complete'))
query_sl = query_sl.values_list('language_code', 'subtitles_complete')
languages = list(query_sl)
def first_member(x):
return x[0]
complete_languages = map(first_member, filter(lambda x: x[1], languages))
incomplete_languages = map(first_member, filter(lambda x: not x[1], languages))
new_languages = map(first_member, new_languages)
if since:
return (complete_languages, incomplete_languages, new_languages)
else:
return (complete_languages, incomplete_languages)
# This needs to be constructed after the model definition since we need a
# reference to the class itself.
Team._meta.permissions = TEAM_PERMISSIONS
# Project
class ProjectManager(models.Manager):
def for_team(self, team_identifier):
"""Return all non-default projects for the given team with the given identifier.
The team_identifier passed may be an actual Team object, or a string
containing a team slug, or the primary key of a team as an integer.
"""
if hasattr(team_identifier, "pk"):
team = team_identifier
elif isinstance(team_identifier, int):
team = Team.objects.get(pk=team_identifier)
elif isinstance(team_identifier, str):
team = Team.objects.get(slug=team_identifier)
return Project.objects.filter(team=team).exclude(name=Project.DEFAULT_NAME)
class Project(models.Model):
# All tvs belong to a project, wheather the team has enabled them or not
# the default project is just a convenience UI that pretends to be part of
# the team . If this ever gets changed, you need to change migrations/0044
DEFAULT_NAME = "_root"
team = models.ForeignKey(Team)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(blank=True)
name = models.CharField(max_length=255, null=False)
description = models.TextField(blank=True, null=True, max_length=2048)
guidelines = models.TextField(blank=True, null=True, max_length=2048)
slug = models.SlugField(blank=True)
order = models.PositiveIntegerField(default=0)
workflow_enabled = models.BooleanField(default=False)
objects = ProjectManager()
def __unicode__(self):
if self.is_default_project:
return u"---------"
return u"%s" % (self.name)
def display(self, default_project_label=None):
if self.is_default_project and default_project_label is not None:
return default_project_label
return self.__unicode__()
def save(self, slug=None,*args, **kwargs):
self.modified = datetime.datetime.now()
if slug is not None:
self.slug = pan_slugify(slug)
elif not self.slug:
self.slug = pan_slugify(self.name)
super(Project, self).save(*args, **kwargs)
@property
def is_default_project(self):
"""Return whether this project is a default project for a team."""
return self.name == Project.DEFAULT_NAME
def get_site_url(self):
"""Return the full, absolute URL for this project, including http:// and the domain."""
return '%s://%s%s' % (DEFAULT_PROTOCOL, Site.objects.get_current().domain, self.get_absolute_url())
@models.permalink
def get_absolute_url(self):
return ('teams:project_video_list', [self.team.slug, self.slug])
@property
def videos_count(self):
"""Return the number of videos in this project.
Caches the result in-object for performance.
"""
if not hasattr(self, '_videos_count'):
setattr(self, '_videos_count', TeamVideo.objects.filter(project=self).count())
return self._videos_count
def _count_tasks(self):
qs = tasks.filter(team_video__project = self)
# quick, check, are there more than 1000 tasks, if so return 1001, and
# let the UI display > 1000
if qs[1000:1001].exists():
return 1001
else:
return qs.count()
@property
def tasks_count(self):
"""Return the number of incomplete, undeleted tasks in this project.
Caches the result in-object for performance.
"""
tasks = Task.objects.filter(team=self.team, deleted=False, completed=None)
if not hasattr(self, '_tasks_count'):
setattr(self, '_tasks_count', self._count_tasks())
return self._tasks_count
class Meta:
unique_together = (
("team", "name",),
("team", "slug",),
)
permissions = PROJECT_PERMISSIONS
# TeamVideo
class TeamVideo(models.Model):
THUMBNAIL_SIZE = (288, 162)
team = models.ForeignKey(Team)
video = models.OneToOneField(Video)
description = models.TextField(blank=True,
help_text=_(u'Use this space to explain why you or your team need to '
u'caption or subtitle this video. Adding a note makes '
u'volunteers more likely to help out!'))
thumbnail = S3EnabledImageField(upload_to='teams/video_thumbnails/', null=True, blank=True,
help_text=_(u'We automatically grab thumbnails for certain sites, e.g. Youtube'),
thumb_sizes=(THUMBNAIL_SIZE, (120,90),))
all_languages = models.BooleanField(_('Need help with all languages'), default=False,
help_text=_(u'If you check this, other languages will not be displayed.'))
added_by = models.ForeignKey(User, null=True)
# this is an auto_add like field, but done on the model save so the
# admin doesn't throw a fit
created = models.DateTimeField(blank=True)
partner_id = models.CharField(max_length=100, blank=True, default="")
project = models.ForeignKey(Project)
class Meta:
unique_together = (('team', 'video'),)
def __unicode__(self):
return unicode(self.video)
@models.permalink
def get_absolute_url(self):
return ('teams:team_video', [self.pk])
def get_tasks_page_url(self):
return "%s?team_video=%s" % (self.team.get_tasks_page_url(), self.pk)
def get_thumbnail(self):
if self.thumbnail:
return self.thumbnail.thumb_url(*TeamVideo.THUMBNAIL_SIZE)
video_thumb = self.video.get_thumbnail(fallback=False)
if video_thumb:
return video_thumb
return "%simages/video-no-thumbnail-medium.png" % settings.STATIC_URL
def _original_language(self):
if not hasattr(self, 'original_language_code'):
sub_lang = self.video.subtitle_language()
setattr(self, 'original_language_code', None if not sub_lang else sub_lang.language)
return getattr(self, 'original_language_code')
def save(self, *args, **kwargs):
if not hasattr(self, "project"):
self.project = self.team.default_project
assert self.project.team == self.team, \
"%s: Team (%s) is not equal to project's (%s) team (%s)"\
% (self, self.team, self.project, self.project.team)
if not self.pk:
self.created = datetime.datetime.now()
self.video.cache.invalidate()
self.video.clear_team_video_cache()
super(TeamVideo, self).save(*args, **kwargs)
def is_checked_out(self, ignore_user=None):
'''Return whether this video is checked out in a task.
If a user is given, checkouts by that user will be ignored. This
provides a way to ask "can user X check out or work on this task?".
This is similar to the writelocking done on Videos and
SubtitleLanguages.
'''
tasks = self.task_set.filter(
# Find all tasks for this video which:
deleted=False, # - Aren't deleted
assignee__isnull=False, # - Are assigned to someone
language="", # - Aren't specific to a language
completed__isnull=True, # - Are unfinished
)
if ignore_user:
tasks = tasks.exclude(assignee=ignore_user)
return tasks.exists()
# Convenience functions
def subtitles_started(self):
"""Return whether subtitles have been started for this video."""
from subtitles.models import SubtitleLanguage
return (SubtitleLanguage.objects.having_nonempty_versions()
.filter(video=self.video)
.exists())
def subtitles_finished(self):
"""Return whether at least one set of subtitles has been finished for this video."""
qs = (self.video.newsubtitlelanguage_set.having_public_versions()
.filter(subtitles_complete=True))
for lang in qs:
if lang.is_synced():
return True
return False
def get_workflow(self):
"""Return the appropriate Workflow for this TeamVideo."""
return Workflow.get_for_team_video(self)
def move_to(self, new_team, project=None):
"""
Moves this TeamVideo to a new team.
This method expects you to have run the correct permissions checks.
"""
old_team = self.team
if old_team == new_team and project == self.project:
return
within_team = (old_team == new_team)
# these imports are here to avoid circular imports, hacky
from teams.signals import api_teamvideo_new
from teams.signals import video_moved_from_team_to_team
from videos import metadata_manager
# For now, we'll just delete any tasks associated with the moved video.
if not within_team:
self.task_set.update(deleted=True)
# We move the video by just switching the team, instead of deleting and
# recreating it.
self.team = new_team
# projects are always team dependent:
if project:
self.project = project
else:
self.project = new_team.default_project
self.save()
if not within_team:
# We need to make any as-yet-unmoderated versions public.
# TODO: Dedupe this and the team video delete signal.
video = self.video
video.newsubtitleversion_set.extant().update(visibility='public')
video.is_public = new_team.is_visible
video.moderated_by = new_team if new_team.moderates_videos() else None
video.save()
TeamVideoMigration.objects.create(from_team=old_team,
to_team=new_team,
to_project=self.project)
# Update search data and other things
video_changed_tasks.delay(video.pk)
# Create any necessary tasks.
autocreate_tasks(self)
# fire a http notification that a new video has hit this team:
api_teamvideo_new.send(self)
video_moved_from_team_to_team.send(sender=self,
destination_team=new_team, video=self.video)
def get_task_for_editor(self, language_code):
if not hasattr(self, '_editor_task'):
self._editor_task = self._get_task_for_editor(language_code)
return self._editor_task
def _get_task_for_editor(self, language_code):
task_set = self.task_set.incomplete().filter(language=language_code)
# 2533: We can get 2 review tasks if we include translate/transcribe
# tasks in the results. This is because when we have a task id and
# the user clicks endorse, we do the following:
# - save the subtitles
# - save the task, setting subtitle_version to the version that we
# just saved
#
# However, the task code creates a task on both of those steps. I'm not
# sure exactly what the old editor does to make this not happen, but
# it's safest to just not send task_id in that case
task_set = task_set.filter(type__in=(Task.TYPE_IDS['Review'],
Task.TYPE_IDS['Approve']))
# This assumes there is only 1 incomplete tasks at once, hopefully
# that's a good enough assumption to hold until we dump tasks for the
# collab model.
tasks = list(task_set[:1])
if tasks:
return tasks[0]
else:
return None
@staticmethod
def get_videos_non_language_ids(team, language_code, non_empty_language_code=False):
if non_empty_language_code:
return TeamVideo.objects.filter(
team=team).exclude(
video__primary_audio_language_code__gt=language_code).values_list('id', flat=True)
return TeamVideo.objects.filter(
team=team).exclude(
video__primary_audio_language_code=language_code).values_list('id', flat=True)
class TeamVideoMigration(models.Model):
from_team = models.ForeignKey(Team, related_name='+')
to_team = models.ForeignKey(Team, related_name='+')
to_project = models.ForeignKey(Project, related_name='+')
datetime = models.DateTimeField()
def __init__(self, *args, **kwargs):
if 'datetime' not in kwargs:
kwargs['datetime'] = self.now()
models.Model.__init__(self, *args, **kwargs)
@staticmethod
def now():
# Make now a function so we can patch it in the unittests
return datetime.datetime.now()
def _create_translation_tasks(team_video, subtitle_version=None):
"""Create any translation tasks that should be autocreated for this video.
subtitle_version should be the original SubtitleVersion that these tasks
will probably be translating from.
"""
preferred_langs = TeamLanguagePreference.objects.get_preferred(team_video.team)
for lang in preferred_langs:
# Don't create tasks for languages that are already complete.
sl = team_video.video.subtitle_language(lang)
if sl and sl.is_complete_and_synced():
continue
# Don't create tasks for languages that already have one. This includes
# review/approve tasks and such.
# Doesn't matter if it's complete or not.
task_exists = Task.objects.not_deleted().filter(
team=team_video.team, team_video=team_video, language=lang
).exists()
if task_exists:
continue
# Otherwise, go ahead and create it.
task = Task(team=team_video.team, team_video=team_video,
language=lang, type=Task.TYPE_IDS['Translate'])
# we should only update the team video after all tasks for
# this video are saved, else we end up with a lot of
# wasted tasks
task.save(update_team_video_index=False)
tasks.update_one_team_video.delay(team_video.pk)
def autocreate_tasks(team_video):
workflow = Workflow.get_for_team_video(team_video)
existing_subtitles = team_video.video.completed_subtitle_languages(public_only=True)
# We may need to create a transcribe task, if there are no existing subs.
if workflow.autocreate_subtitle and not existing_subtitles:
if not team_video.task_set.not_deleted().exists():
original_language = team_video.video.primary_audio_language_code
Task(team=team_video.team,
team_video=team_video,
subtitle_version=None,
language= original_language or '',
type=Task.TYPE_IDS['Subtitle']
).save()
# If there are existing subtitles, we may need to create translate tasks.
#
# TODO: This sets the "source version" for the translations to an arbitrary
# language's version. In practice this probably won't be a problem
# because most teams will transcribe one language and then send to a
# new team for translation, but we can probably be smarter about this
# if we spend some time.
if workflow.autocreate_translate and existing_subtitles:
_create_translation_tasks(team_video)
def team_video_save(sender, instance, created, **kwargs):
"""Update the Solr index for this team video.
TODO: Rename this to something more specific.
"""
tasks.update_one_team_video.delay(instance.id)
def team_video_delete(sender, instance, **kwargs):
"""Perform necessary actions for when a TeamVideo is deleted.
TODO: Split this up into separate signals.
"""
from videos import metadata_manager
# not using an async task for this since the async task
# could easily execute way after the instance is gone,
# and backend.remove requires the instance.
tv_search_index = site.get_index(TeamVideo)
tv_search_index.backend.remove(instance)
try:
video = instance.video
# we need to publish all unpublished subs for this video:
NewSubtitleVersion.objects.filter(video=video,
visibility='private').update(visibility='public')
video.is_public = True
video.moderated_by = None
video.save()
metadata_manager.update_metadata(video.pk)
video.update_search_index()
except Video.DoesNotExist:
pass
if instance.video_id is not None:
Video.cache.invalidate_by_pk(instance.video_id)
def on_language_deleted(sender, **kwargs):
"""When a language is deleted, delete all tasks associated with it."""
team_video = sender.video.get_team_video()
if not team_video:
return
Task.objects.filter(team_video=team_video,
language=sender.language_code).delete()
# check if there are no more source languages for the video, and in that
# case delete all transcribe tasks. Don't delete:
# - transcribe tasks that have already been started
# - review tasks
# - approve tasks
if not sender.video.has_public_version():
# filtering on new_subtitle_version=None excludes all 3 cases where we
# don't want to delete tasks
Task.objects.filter(team_video=team_video,
new_subtitle_version=None).delete()
def team_video_autocreate_task(sender, instance, created, raw, **kwargs):
"""Create subtitle/translation tasks for a newly added TeamVideo, if necessary."""
if created and not raw:
autocreate_tasks(instance)
def team_video_add_video_moderation(sender, instance, created, raw, **kwargs):
"""Set the .moderated_by attribute on a newly created TeamVideo's Video, if necessary."""
if created and not raw and instance.team.moderates_videos():
instance.video.moderated_by = instance.team
instance.video.save()
def team_video_rm_video_moderation(sender, instance, **kwargs):
"""Clear the .moderated_by attribute on a newly deleted TeamVideo's Video, if necessary."""
try:
# when removing a video, this will be triggered by the fk constraing
# and will be already removed
instance.video.moderated_by = None
instance.video.save()
except Video.DoesNotExist:
pass
post_save.connect(team_video_save, TeamVideo, dispatch_uid="teams.teamvideo.team_video_save")
post_save.connect(team_video_autocreate_task, TeamVideo, dispatch_uid='teams.teamvideo.team_video_autocreate_task')
post_save.connect(team_video_add_video_moderation, TeamVideo, dispatch_uid='teams.teamvideo.team_video_add_video_moderation')
post_delete.connect(team_video_delete, TeamVideo, dispatch_uid="teams.teamvideo.team_video_delete")
post_delete.connect(team_video_rm_video_moderation, TeamVideo, dispatch_uid="teams.teamvideo.team_video_rm_video_moderation")
language_deleted.connect(on_language_deleted, dispatch_uid="teams.subtitlelanguage.language_deleted")
# TeamMember
class TeamMemberManager(models.Manager):
use_for_related_fields = True
def create_first_member(self, team, user):
"""Make sure that new teams always have an 'owner' member."""
tm = TeamMember(team=team, user=user, role=ROLE_OWNER)
tm.save()
return tm
def admins(self):
return self.filter(role__in=(ROLE_OWNER, ROLE_ADMIN))
class TeamMember(models.Model):
ROLE_OWNER = ROLE_OWNER
ROLE_ADMIN = ROLE_ADMIN
ROLE_MANAGER = ROLE_MANAGER
ROLE_CONTRIBUTOR = ROLE_CONTRIBUTOR
ROLES = (
(ROLE_OWNER, _("Owner")),
(ROLE_MANAGER, _("Manager")),
(ROLE_ADMIN, _("Admin")),
(ROLE_CONTRIBUTOR, _("Contributor")),
)
team = models.ForeignKey(Team, related_name='members')
user = models.ForeignKey(User, related_name='team_members')
role = models.CharField(max_length=16, default=ROLE_CONTRIBUTOR, choices=ROLES, db_index=True)
created = models.DateTimeField(default=datetime.datetime.now, null=True,
blank=True)
objects = TeamMemberManager()
def __unicode__(self):
return u'%s' % self.user
def save(self, *args, **kwargs):
super(TeamMember, self).save(*args, **kwargs)
Team.cache.invalidate_by_pk(self.team_id)
def delete(self):
super(TeamMember, self).delete()
Team.cache.invalidate_by_pk(self.team_id)
def project_narrowings(self):
"""Return any project narrowings applied to this member."""
return self.narrowings.filter(project__isnull=False)
def language_narrowings(self):
"""Return any language narrowings applied to this member."""
return self.narrowings.filter(project__isnull=True)
def project_narrowings_fast(self):
"""Return any project narrowings applied to this member.
Caches the result in-object for speed.
"""
return [n for n in self.narrowings_fast() if n.project]
def language_narrowings_fast(self):
"""Return any language narrowings applied to this member.
Caches the result in-object for speed.
"""
return [n for n in self.narrowings_fast() if n.language]
def narrowings_fast(self):
"""Return any narrowings (both project and language) applied to this member.
Caches the result in-object for speed.
"""
if hasattr(self, '_cached_narrowings'):
if self._cached_narrowings is not None:
return self._cached_narrowings
self._cached_narrowings = self.narrowings.all()
return self._cached_narrowings
def has_max_tasks(self):
"""Return whether this member has the maximum number of tasks."""
max_tasks = self.team.max_tasks_per_member
if max_tasks:
if self.user.task_set.incomplete().filter(team=self.team).count() >= max_tasks:
return True
return False
def is_manager(self):
"""Test if the user is a manager or above."""
return self.role in (ROLE_OWNER, ROLE_ADMIN, ROLE_MANAGER)
def is_admin(self):
"""Test if the user is an admin or owner."""
return self.role in (ROLE_OWNER, ROLE_ADMIN)
class Meta:
unique_together = (('team', 'user'),)
def clear_tasks(sender, instance, *args, **kwargs):
"""Unassign all tasks assigned to a user.
Used when deleting a user from a team.
"""
tasks = instance.team.task_set.incomplete().filter(assignee=instance.user)
tasks.update(assignee=None)
pre_delete.connect(clear_tasks, TeamMember, dispatch_uid='teams.members.clear-tasks-on-delete')
# MembershipNarrowing
class MembershipNarrowing(models.Model):
"""Represent narrowings that can be made on memberships.
A single MembershipNarrowing can apply to a project or a language, but not both.
"""
member = models.ForeignKey(TeamMember, related_name="narrowings")
project = models.ForeignKey(Project, null=True, blank=True)
language = models.CharField(max_length=24, blank=True,
choices=translation.ALL_LANGUAGE_CHOICES)
added_by = models.ForeignKey(TeamMember, related_name="narrowing_includer", null=True, blank=True)
created = models.DateTimeField(auto_now_add=True, blank=None)
modified = models.DateTimeField(auto_now=True, blank=None)
def __unicode__(self):
if self.project:
return u"Permission restriction for %s to project %s " % (self.member, self.project)
else:
return u"Permission restriction for %s to language %s " % (self.member, self.language)
def save(self, *args, **kwargs):
# Cannot have duplicate narrowings for a language.
if self.language:
duplicate_exists = MembershipNarrowing.objects.filter(
member=self.member, language=self.language
).exclude(id=self.id).exists()
assert not duplicate_exists, "Duplicate language narrowing detected!"
# Cannot have duplicate narrowings for a project.
if self.project:
duplicate_exists = MembershipNarrowing.objects.filter(
member=self.member, project=self.project
).exclude(id=self.id).exists()
assert not duplicate_exists, "Duplicate project narrowing detected!"
super(MembershipNarrowing, self).save(*args, **kwargs)
Team.cache.invalidate_by_pk(self.member.team_id)
def delete(self):
super(MembershipNarrowing, self).delete()
Team.cache.invalidate_by_pk(self.member.team_id)
class TeamSubtitleNote(SubtitleNoteBase):
team = models.ForeignKey(Team, related_name='+')
class ApplicationInvalidException(Exception):
pass
class ApplicationManager(models.Manager):
def can_apply(self, team, user):
"""
A user can apply either if he is not a member of the team yet, the
team hasn't said no to the user (either application denied or removed the user'
and if no applications are pending.
"""
sour_application_exists = self.filter(team=team, user=user, status__in=[
Application.STATUS_MEMBER_REMOVED, Application.STATUS_DENIED,
Application.STATUS_PENDING]).exists()
if sour_application_exists:
return False
return not team.is_member(user)
def open(self, team=None, user=None):
qs = self.filter(status=Application.STATUS_PENDING)
if team:
qs = qs.filter(team=team)
if user:
qs = qs.filter(user=user)
return qs
# Application
class Application(models.Model):
team = models.ForeignKey(Team, related_name='applications')
user = models.ForeignKey(User, related_name='team_applications')
note = models.TextField(blank=True)
# None -> not acted upon
# True -> Approved
# False -> Rejected
STATUS_PENDING,STATUS_APPROVED, STATUS_DENIED, STATUS_MEMBER_REMOVED,\
STATUS_MEMBER_LEFT = xrange(0, 5)
STATUSES = (
(STATUS_PENDING, u"Pending"),
(STATUS_APPROVED, u"Approved"),
(STATUS_DENIED, u"Denied"),
(STATUS_MEMBER_REMOVED, u"Member Removed"),
(STATUS_MEMBER_LEFT, u"Member Left"),
)
STATUSES_IDS = dict([choice[::-1] for choice in STATUSES])
status = models.PositiveIntegerField(default=STATUS_PENDING, choices=STATUSES)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(blank=True, null=True)
# free text keeping a log of changes to this application
history = models.TextField(blank=True, null=True)
objects = ApplicationManager()
class Meta:
unique_together = (('team', 'user', 'status'),)
def approve(self, author, interface):
"""Approve the application.
This will create an appropriate TeamMember if this application has
not been already acted upon
"""
if self.status not in (Application.STATUS_PENDING, Application.STATUS_MEMBER_LEFT):
raise ApplicationInvalidException("")
member, created = TeamMember.objects.get_or_create(team=self.team, user=self.user)
if created:
notifier.team_member_new.delay(member.pk)
self.modified = datetime.datetime.now()
self.status = Application.STATUS_APPROVED
self.save(author=author, interface=interface)
return self
def deny(self, author, interface):
"""
Marks the application as not approved, then
Queue a Celery task that will handle properly denying this
application.
"""
if self.status != Application.STATUS_PENDING:
raise ApplicationInvalidException("")
self.modified = datetime.datetime.now()
self.status = Application.STATUS_DENIED
self.save(author=author, interface=interface)
notifier.team_application_denied.delay(self.pk)
return self
def on_member_leave(self, author, interface):
"""
Marks the appropriate status, but users can still
reapply to a team if they so desire later.
"""
self.status = Application.STATUS_MEMBER_LEFT
self.save(author=author, interface=interface)
def on_member_removed(self, author, interface):
"""
Marks the appropriate status so that user's cannot reapply
to a team after being removed.
"""
self.status = Application.STATUS_MEMBER_REMOVED
self.save(author=author, interface=interface)
def _generate_history_line(self, new_status, author=None, interface=None):
author = author or "?"
interface = interface or "web UI"
new_status = new_status if new_status != None else Application.STATUS_PENDING
for value,name in Application.STATUSES:
if value == new_status:
status = name
assert status
return u"%s by %s from %s (%s)\n" % (status, author, interface, datetime.datetime.now())
def save(self, dispatches_http_callback=True, author=None, interface=None, *args, **kwargs):
"""
Saves the model, but also appends a line on the history for that
model, like these:
- CoolGuy Approved through the web UI.
- Arthur Left team through the web UI.
This way,we can keep one application per user per team, never
delete them (so the messages stay current) and we still can
track history
"""
self.history = (self.history or "") + self._generate_history_line(self.status, author, interface)
super(Application, self).save(*args, **kwargs)
if dispatches_http_callback:
from teams.signals import api_application_new
api_application_new.send(self)
def __unicode__(self):
return "Application: %s - %s - %s" % (self.team.slug, self.user.username, self.get_status_display())
# Invites
class InviteExpiredException(Exception):
pass
class InviteManager(models.Manager):
def pending_for(self, team, user):
return self.filter(team=team, user=user, approved=None)
def acted_on(self, team, user):
return self.filter(team=team, user=user, approved__notnull=True)
class Invite(models.Model):
team = models.ForeignKey(Team, related_name='invitations')
user = models.ForeignKey(User, related_name='team_invitations')
note = models.TextField(blank=True, max_length=200)
author = models.ForeignKey(User)
role = models.CharField(max_length=16, choices=TeamMember.ROLES,
default=TeamMember.ROLE_CONTRIBUTOR)
# None -> not acted upon
# True -> Approved
# False -> Rejected
approved = models.NullBooleanField(default=None)
objects = InviteManager()
def accept(self):
"""Accept this invitation.
Creates an appropriate TeamMember record, sends a notification and
deletes itself.
"""
if self.approved is not None:
raise InviteExpiredException("")
self.approved = True
member, created = TeamMember.objects.get_or_create(
team=self.team, user=self.user, role=self.role)
if created:
notifier.team_member_new.delay(member.pk)
self.save()
return True
def deny(self):
"""Deny this invitation.
Could be useful to send a notification here in the future.
"""
if self.approved is not None:
raise InviteExpiredException("")
self.approved = False
self.save()
def message_json_data(self, data, msg):
data['can-reply'] = False
return data
# Workflows
class Workflow(models.Model):
REVIEW_CHOICES = (
(00, "Don't require review"),
(10, 'Peer must review'),
(20, 'Manager must review'),
(30, 'Admin must review'),
)
REVIEW_NAMES = dict(REVIEW_CHOICES)
REVIEW_IDS = dict([choice[::-1] for choice in REVIEW_CHOICES])
APPROVE_CHOICES = (
(00, "Don't require approval"),
(10, 'Manager must approve'),
(20, 'Admin must approve'),
)
APPROVE_NAMES = dict(APPROVE_CHOICES)
APPROVE_IDS = dict([choice[::-1] for choice in APPROVE_CHOICES])
team = models.ForeignKey(Team)
project = models.ForeignKey(Project, blank=True, null=True)
team_video = models.ForeignKey(TeamVideo, blank=True, null=True)
autocreate_subtitle = models.BooleanField(default=False)
autocreate_translate = models.BooleanField(default=False)
review_allowed = models.PositiveIntegerField(
choices=REVIEW_CHOICES, verbose_name='reviewers', default=0)
approve_allowed = models.PositiveIntegerField(
choices=APPROVE_CHOICES, verbose_name='approvers', default=0)
created = models.DateTimeField(auto_now_add=True, editable=False)
modified = models.DateTimeField(auto_now=True, editable=False)
class Meta:
unique_together = ('team', 'project', 'team_video')
@classmethod
def _get_target_team(cls, id, type):
"""Return the team for the given target.
The target is identified by id (its PK as an integer) and type (a string
of 'team_video', 'project', or 'team').
"""
if type == 'team_video':
return TeamVideo.objects.select_related('team').get(pk=id).team
elif type == 'project':
return Project.objects.select_related('team').get(pk=id).team
else:
return Team.objects.get(pk=id)
@classmethod
def get_for_target(cls, id, type, workflows=None):
'''Return the most specific Workflow for the given target.
If target object does not exist, None is returned.
If workflows is given, it should be a QS or List of all Workflows for
the TeamVideo's team. This will let you look it up yourself once and
use it in many of these calls to avoid hitting the DB each time.
If workflows is not given it will be looked up with one DB query.
'''
if not workflows:
team = Workflow._get_target_team(id, type)
workflows = list(Workflow.objects.filter(team=team.id)
.select_related('project', 'team',
'team_video'))
else:
team = workflows[0].team
default_workflow = Workflow(team=team)
if not workflows:
return default_workflow
if type == 'team_video':
try:
return [w for w in workflows
if w.team_video and w.team_video.id == id][0]
except IndexError:
# If there's no video-specific workflow for this video, there
# might be a workflow for its project, so we'll start looking
# for that instead.
team_video = TeamVideo.objects.get(pk=id)
id, type = team_video.project_id, 'project'
if type == 'project':
try:
return [w for w in workflows
if w.project and w.project.workflow_enabled
and w.project.id == id and not w.team_video][0]
except IndexError:
# If there's no project-specific workflow for this project,
# there might be one for its team, so we'll fall through.
pass
if not team.workflow_enabled:
return default_workflow
return [w for w in workflows
if (not w.project) and (not w.team_video)][0]
@classmethod
def get_for_team_video(cls, team_video, workflows=None):
'''Return the most specific Workflow for the given team_video.
If workflows is given, it should be a QuerySet or List of all Workflows
for the TeamVideo's team. This will let you look it up yourself once
and use it in many of these calls to avoid hitting the DB each time.
If workflows is not given it will be looked up with one DB query.
NOTE: This function caches the workflow for performance reasons. If the
workflow changes within the space of a single request that
_cached_workflow should be cleared.
'''
if not hasattr(team_video, '_cached_workflow'):
team_video._cached_workflow = Workflow.get_for_target(
team_video.id, 'team_video', workflows)
return team_video._cached_workflow
@classmethod
def get_for_project(cls, project, workflows=None):
'''Return the most specific Workflow for the given project.
If workflows is given, it should be a QuerySet or List of all Workflows
for the Project's team. This will let you look it up yourself once
and use it in many of these calls to avoid hitting the DB each time.
If workflows is not given it will be looked up with one DB query.
'''
return Workflow.get_for_target(project.id, 'project', workflows)
@classmethod
def add_to_team_videos(cls, team_videos):
'''Add the appropriate Workflow objects to each TeamVideo as .workflow.
This will only perform one DB query, and it will add the most specific
workflow possible to each TeamVideo.
This only exists for performance reasons.
'''
if not team_videos:
return []
workflows = list(Workflow.objects.filter(team=team_videos[0].team))
for tv in team_videos:
tv.workflow = Workflow.get_for_team_video(tv, workflows)
def get_specific_target(self):
"""Return the most specific target that this workflow applies to."""
return self.team_video or self.project or self.team
def __unicode__(self):
target = self.get_specific_target()
return u'Workflow %s for %s (%s %d)' % (
self.pk, target, target.__class__.__name__, target.pk)
# Convenience functions for checking if a step of the workflow is enabled.
@property
def review_enabled(self):
"""Return whether any form of review is enabled for this workflow."""
return True if self.review_allowed else False
@property
def approve_enabled(self):
"""Return whether any form of approval is enabled for this workflow."""
return True if self.approve_allowed else False
@property
def requires_review_or_approval(self):
"""Return whether a given workflow requires review or approval."""
return self.approve_enabled or self.review_enabled
@property
def requires_tasks(self):
"""Return whether a given workflow requires the use of tasks."""
return (self.requires_review_or_approval or self.autocreate_subtitle
or self.autocreate_translate)
# Tasks
class TaskManager(models.Manager):
def not_deleted(self):
"""Return a QS of tasks that are not deleted."""
return self.get_query_set().filter(deleted=False)
def incomplete(self):
"""Return a QS of tasks that are not deleted or completed."""
return self.not_deleted().filter(completed=None)
def complete(self):
"""Return a QS of tasks that are not deleted, but are completed."""
return self.not_deleted().filter(completed__isnull=False)
def _type(self, types, completed=None, approved=None):
"""Return a QS of tasks that are not deleted and are one of the given types.
types should be a list of strings matching a label in Task.TYPE_CHOICES.
completed should be one of:
* True (only show completed tasks)
* False (only show incomplete tasks)
* None (don't filter on completion status)
approved should be either None or a string matching a label in
Task.APPROVED_CHOICES.
"""
type_ids = [Task.TYPE_IDS[type] for type in types]
qs = self.not_deleted().filter(type__in=type_ids)
if completed == False:
qs = qs.filter(completed=None)
elif completed == True:
qs = qs.filter(completed__isnull=False)
if approved:
qs = qs.filter(approved=Task.APPROVED_IDS[approved])
return qs
def incomplete_subtitle(self):
"""Return a QS of subtitle tasks that are not deleted or completed."""
return self._type(['Subtitle'], False)
def incomplete_translate(self):
"""Return a QS of translate tasks that are not deleted or completed."""
return self._type(['Translate'], False)
def incomplete_review(self):
"""Return a QS of review tasks that are not deleted or completed."""
return self._type(['Review'], False)
def incomplete_approve(self):
"""Return a QS of approve tasks that are not deleted or completed."""
return self._type(['Approve'], False)
def incomplete_subtitle_or_translate(self):
"""Return a QS of subtitle or translate tasks that are not deleted or completed."""
return self._type(['Subtitle', 'Translate'], False)
def incomplete_review_or_approve(self):
"""Return a QS of review or approve tasks that are not deleted or completed."""
return self._type(['Review', 'Approve'], False)
def complete_subtitle(self):
"""Return a QS of subtitle tasks that are not deleted, but are completed."""
return self._type(['Subtitle'], True)
def complete_translate(self):
"""Return a QS of translate tasks that are not deleted, but are completed."""
return self._type(['Translate'], True)
def complete_review(self, approved=None):
"""Return a QS of review tasks that are not deleted, but are completed.
If approved is given the tasks are further filtered on their .approved
attribute. It must be a string matching one of the labels in
Task.APPROVED_CHOICES, like 'Rejected'.
"""
return self._type(['Review'], True, approved)
def complete_approve(self, approved=None):
"""Return a QS of approve tasks that are not deleted, but are completed.
If approved is given the tasks are further filtered on their .approved
attribute. It must be a string matching one of the labels in
Task.APPROVED_CHOICES, like 'Rejected'.
"""
return self._type(['Approve'], True, approved)
def complete_subtitle_or_translate(self):
"""Return a QS of subtitle or translate tasks that are not deleted, but are completed."""
return self._type(['Subtitle', 'Translate'], True)
def complete_review_or_approve(self, approved=None):
"""Return a QS of review or approve tasks that are not deleted, but are completed.
If approved is given the tasks are further filtered on their .approved
attribute. It must be a string matching one of the labels in
Task.APPROVED_CHOICES, like 'Rejected'.
"""
return self._type(['Review', 'Approve'], True, approved)
def all_subtitle(self):
"""Return a QS of subtitle tasks that are not deleted."""
return self._type(['Subtitle'])
def all_translate(self):
"""Return a QS of translate tasks that are not deleted."""
return self._type(['Translate'])
def all_review(self):
"""Return a QS of review tasks that are not deleted."""
return self._type(['Review'])
def all_approve(self):
"""Return a QS of tasks that are not deleted."""
return self._type(['Approve'])
def all_subtitle_or_translate(self):
"""Return a QS of subtitle or translate tasks that are not deleted."""
return self._type(['Subtitle', 'Translate'])
def all_review_or_approve(self):
"""Return a QS of review or approve tasks that are not deleted."""
return self._type(['Review', 'Approve'])
class Task(models.Model):
TYPE_CHOICES = (
(10, 'Subtitle'),
(20, 'Translate'),
(30, 'Review'),
(40, 'Approve'),
)
TYPE_NAMES = dict(TYPE_CHOICES)
TYPE_IDS = dict([choice[::-1] for choice in TYPE_CHOICES])
APPROVED_CHOICES = (
(10, 'In Progress'),
(20, 'Approved'),
(30, 'Rejected'),
)
APPROVED_NAMES = dict(APPROVED_CHOICES)
APPROVED_IDS = dict([choice[::-1] for choice in APPROVED_CHOICES])
APPROVED_FINISHED_IDS = (20, 30)
type = models.PositiveIntegerField(choices=TYPE_CHOICES)
team = models.ForeignKey(Team)
team_video = models.ForeignKey(TeamVideo)
language = models.CharField(max_length=16,
choices=translation.ALL_LANGUAGE_CHOICES,
blank=True, db_index=True)
assignee = models.ForeignKey(User, blank=True, null=True)
subtitle_version = models.ForeignKey(SubtitleVersion, blank=True, null=True)
new_subtitle_version = models.ForeignKey(NewSubtitleVersion,
blank=True, null=True)
# The original source version being reviewed or approved.
#
# For example, if person A creates two versions while working on a subtitle
# task:
#
# v1 v2
# --o---o
# s s
#
# and then the reviewer and approver make some edits
#
# v1 v2 v3 v4 v5
# --o---o---o---o---o
# s s r r a
# *
#
# the review_base_version will be v2. Once approved, if an edit is made it
# needs to be approved as well, and the same thing happens:
#
# v1 v2 v3 v4 v5 v6 v7
# --o---o---o---o---o---o---o
# s s r r a e a
# *
#
# This is used when rejecting versions, and may be used elsewhere in the
# future as well.
review_base_version = models.ForeignKey(SubtitleVersion, blank=True,
null=True,
related_name='tasks_based_on')
new_review_base_version = models.ForeignKey(NewSubtitleVersion, blank=True,
null=True,
related_name='tasks_based_on_new')
deleted = models.BooleanField(default=False)
# TODO: Remove this field.
public = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True, editable=False)
modified = models.DateTimeField(auto_now=True, editable=False)
completed = models.DateTimeField(blank=True, null=True)
expiration_date = models.DateTimeField(blank=True, null=True)
# Arbitrary priority for tasks. Some teams might calculate this
# on complex criteria and expect us to be able to sort tasks on it.
# Higher numbers mean higher priority
priority = models.PositiveIntegerField(blank=True, default=0, db_index=True)
# Review and Approval -specific fields
approved = models.PositiveIntegerField(choices=APPROVED_CHOICES,
null=True, blank=True)
body = models.TextField(blank=True, default="")
objects = TaskManager()
def __unicode__(self):
return u'Task %s (%s) for %s' % (self.id or "unsaved",
self.get_type_display(),
self.team_video)
@property
def summary(self):
"""
Return a brief summary of the task
"""
output = unicode(self.team_video)
if self.body:
output += unicode(self.body.split('\n',1)[0].strip()[:20])
return output
@staticmethod
def now():
"""datetime.datetime.now as a method
This lets us patch it in the unittests.
"""
return datetime.datetime.now()
def is_subtitle_task(self):
return self.type == Task.TYPE_IDS['Subtitle']
def is_translate_task(self):
return self.type == Task.TYPE_IDS['Translate']
def is_review_task(self):
return self.type == Task.TYPE_IDS['Review']
def is_approve_task(self):
return self.type == Task.TYPE_IDS['Approve']
@property
def workflow(self):
'''Return the most specific workflow for this task's TeamVideo.'''
return Workflow.get_for_team_video(self.team_video)
@staticmethod
def add_cached_video_urls(tasks):
"""Add the cached_video_url attribute to a list of atkss
cached_video_url is the URL as a string for the video.
"""
team_video_pks = [t.team_video_id for t in tasks]
video_urls = (VideoUrl.objects
.filter(video__teamvideo__id__in=team_video_pks)
.filter(primary=True))
video_url_map = dict((vu.video_id, vu.effective_url)
for vu in video_urls)
for t in tasks:
t.cached_video_url = video_url_map.get(t.team_video.video_id)
def _add_comment(self, lang_ct=None):
"""Add a comment on the SubtitleLanguage for this task with the body as content."""
if self.body.strip():
if lang_ct is None:
lang_ct = ContentType.objects.get_for_model(NewSubtitleLanguage)
comment = Comment(
content=self.body,
object_pk=self.new_subtitle_version.subtitle_language.pk,
content_type=lang_ct,
submit_date=self.completed,
user=self.assignee,
)
comment.save()
notifier.send_video_comment_notification.delay(
comment.pk, version_pk=self.new_subtitle_version.pk)
def future(self):
"""Return whether this task expires in the future."""
return self.expiration_date > self.now()
# Functions related to task completion.
def _send_back(self, sends_notification=True):
"""Handle "rejection" of this task.
This will:
* Create a new task with the appropriate type (translate or subtitle).
* Try to reassign it to the previous assignee, leaving it unassigned
if that's not possible.
* Send a notification unless sends_notification is given as False.
NOTE: This function does not modify the *current* task in any way.
"""
# when sending back, instead of always sending back
# to the first step (translate/subtitle) go to the
# step before this one:
# Translate/Subtitle -> Review -> Approve
# also, you can just send back approve and review tasks.
if self.type == Task.TYPE_IDS['Approve'] and self.workflow.review_enabled:
type = Task.TYPE_IDS['Review']
else:
is_primary = (self.new_subtitle_version
.subtitle_language
.is_primary_audio_language())
if is_primary:
type = Task.TYPE_IDS['Subtitle']
else:
type = Task.TYPE_IDS['Translate']
# let's guess which assignee should we use
# by finding the last user that did this task type
previous_task = Task.objects.complete().filter(
team_video=self.team_video, language=self.language, team=self.team, type=type
).order_by('-completed')[:1]
if previous_task:
assignee = previous_task[0].assignee
else:
assignee = None
# The target assignee may have left the team in the mean time.
if not self.team.members.filter(user=assignee).exists():
assignee = None
task = Task(team=self.team, team_video=self.team_video,
language=self.language, type=type,
assignee=assignee)
task.new_subtitle_version = self.new_subtitle_version
task.set_expiration()
task.save()
if sends_notification:
# notify original submiter (assignee of self)
notifier.reviewed_and_sent_back.delay(self.pk)
return task
def complete_approved(self, user):
"""Mark a review/approve task as Approved and complete it.
:param user: user who is approving he task
:returns: next task in the workflow.
"""
self.assignee = user
self.approved = Task.APPROVED_IDS['Approved']
return self.complete()
def complete_rejected(self, user):
"""Mark a review/approve task as Rejected and complete it.
:param user: user who is approving he task
:returns: next task in the workflow.
"""
self.assignee = user
self.approved = Task.APPROVED_IDS['Rejected']
return self.complete()
def complete(self):
'''Mark as complete and return the next task in the process if applicable.'''
self.completed = self.now()
self.save()
return { 'Subtitle': self._complete_subtitle,
'Translate': self._complete_translate,
'Review': self._complete_review,
'Approve': self._complete_approve,
}[Task.TYPE_NAMES[self.type]]()
def _can_publish_directly(self, subtitle_version):
from teams.permissions import can_publish_edits_immediately
type = {10: 'Review',
20: 'Review',
30: 'Approve'}.get(self.type)
tasks = (Task.objects._type([type], True, 'Approved')
.filter(language=self.language))
return (can_publish_edits_immediately(self.team_video,
self.assignee,
self.language) and
subtitle_version and
subtitle_version.previous_version() and
subtitle_version.previous_version().is_public() and
subtitle_version.subtitle_language.is_complete_and_synced() and
tasks.exists())
def _find_previous_assignee(self, type):
"""Find the previous assignee for a new review/approve task for this video.
NOTE: This is different than finding out the person to send a task back
to! This is for saying "who reviewed this task last time?".
For now, we'll assign the review/approval task to whomever did it last
time (if it was indeed done), but only if they're still eligible to
perform it now.
"""
from teams.permissions import can_review, can_approve
if type == 'Approve':
# Check if this is a post-publish edit.
# According to #1039 we don't wanna auto-assign the assignee
version = self.get_subtitle_version()
if (version and
version.is_public() and
version.subtitle_language.is_complete_and_synced()):
return None
type = Task.TYPE_IDS['Approve']
can_do = can_approve
elif type == 'Review':
type = Task.TYPE_IDS['Review']
can_do = partial(can_review, allow_own=True)
else:
return None
last_task = self.team_video.task_set.complete().filter(
language=self.language, type=type
).order_by('-completed')[:1]
if last_task:
candidate = last_task[0].assignee
if candidate and can_do(self.team_video, candidate, self.language):
return candidate
def _complete_subtitle(self):
"""Handle the messy details of completing a subtitle task."""
sv = self.get_subtitle_version()
# TL;DR take a look at #1206 to know why i did this
if self.workflow.requires_review_or_approval and not self._can_publish_directly(sv):
if self.workflow.review_enabled:
task = Task(team=self.team, team_video=self.team_video,
new_subtitle_version=sv,
new_review_base_version=sv,
language=self.language, type=Task.TYPE_IDS['Review'],
assignee=self._find_previous_assignee('Review'))
task.set_expiration()
task.save()
elif self.workflow.approve_enabled:
task = Task(team=self.team, team_video=self.team_video,
new_subtitle_version=sv,
new_review_base_version=sv,
language=self.language, type=Task.TYPE_IDS['Approve'],
assignee=self._find_previous_assignee('Approve'))
task.set_expiration()
task.save()
else:
# Subtitle task is done, and there is no approval or review
# required, so we mark the version as approved.
sv.publish()
# We need to make sure this is updated correctly here.
from videos import metadata_manager
metadata_manager.update_metadata(self.team_video.video.pk)
if self.workflow.autocreate_translate:
# TODO: Switch to autocreate_task?
_create_translation_tasks(self.team_video, sv)
task = None
return task
def _complete_translate(self):
"""Handle the messy details of completing a translate task."""
sv = self.get_subtitle_version()
# TL;DR take a look at #1206 to know why i did this
if self.workflow.requires_review_or_approval and not self._can_publish_directly(sv):
if self.workflow.review_enabled:
task = Task(team=self.team, team_video=self.team_video,
new_subtitle_version=sv,
new_review_base_version=sv,
language=self.language, type=Task.TYPE_IDS['Review'],
assignee=self._find_previous_assignee('Review'))
task.set_expiration()
task.save()
elif self.workflow.approve_enabled:
# The review step may be disabled. If so, we check the approve step.
task = Task(team=self.team, team_video=self.team_video,
new_subtitle_version=sv,
new_review_base_version=sv,
language=self.language, type=Task.TYPE_IDS['Approve'],
assignee=self._find_previous_assignee('Approve'))
task.set_expiration()
task.save()
else:
sv.publish()
# We need to make sure this is updated correctly here.
from videos import metadata_manager
metadata_manager.update_metadata(self.team_video.video.pk)
task = None
return task
def _complete_review(self):
"""Handle the messy details of completing a review task."""
approval = self.approved == Task.APPROVED_IDS['Approved']
sv = self.get_subtitle_version()
if approval:
self._ensure_language_complete(sv.subtitle_language)
self._add_comment()
task = None
if self.workflow.approve_enabled:
# Approval is enabled, so...
if approval:
# If the reviewer thought these subtitles were good we create
# the next task.
task = Task(team=self.team, team_video=self.team_video,
new_subtitle_version=sv,
new_review_base_version=sv,
language=self.language, type=Task.TYPE_IDS['Approve'],
assignee=self._find_previous_assignee('Approve'))
task.set_expiration()
task.save()
# Notify the appropriate users.
notifier.reviewed_and_pending_approval.delay(self.pk)
else:
# Otherwise we send the subtitles back for improvement.
task = self._send_back()
else:
# Approval isn't enabled, so the ruling of this Review task
# determines whether the subtitles go public.
if approval:
# Make these subtitles public!
self.new_subtitle_version.publish()
# If the subtitles are okay, go ahead and autocreate translation
# tasks if necessary.
if self.workflow.autocreate_translate:
_create_translation_tasks(self.team_video, sv)
# Notify the appropriate users and external services.
notifier.reviewed_and_published.delay(self.pk)
else:
# Send the subtitles back for improvement.
task = self._send_back()
# Before we go, we need to record who reviewed these subtitles, so if
# necessary we can "send back" to them later.
if self.assignee:
sv.set_reviewed_by(self.assignee)
return task
def do_complete_approve(self, lang_ct=None):
return self._complete_approve(lang_ct=lang_ct)
def _complete_approve(self, lang_ct=None):
"""Handle the messy details of completing an approve task."""
approval = self.approved == Task.APPROVED_IDS['Approved']
sv = self.get_subtitle_version()
if approval:
self._ensure_language_complete(sv.subtitle_language)
self._add_comment(lang_ct=lang_ct)
if approval:
# The subtitles are acceptable, so make them public!
self.new_subtitle_version.publish()
# Create translation tasks if necessary.
if self.workflow.autocreate_translate:
_create_translation_tasks(self.team_video, sv)
task = None
# Notify the appropriate users.
notifier.approved_notification.delay(self.pk, approval)
else:
# Send the subtitles back for improvement.
task = self._send_back()
# Before we go, we need to record who approved these subtitles, so if
# necessary we can "send back" to them later.
if self.assignee:
sv.set_approved_by(self.assignee)
if approval:
api_subtitles_approved.send(sv)
else:
api_subtitles_rejected.send(sv)
return task
def _ensure_language_complete(self, subtitle_language):
if not subtitle_language.subtitles_complete:
subtitle_language.subtitles_complete = True
subtitle_language.save()
def get_perform_url(self):
"""Return a URL for whatever dialog is used to perform this task."""
return reverse('teams:perform_task', args=(self.team.slug, self.id))
def tasks_page_perform_link_text(self):
"""Get the link text for perform link on the tasks page."""
if self.assignee:
return _('Resume')
else:
return _('Start now')
def get_widget_url(self):
"""Get the URL to edit the video for this task. """
return reverse("subtitles:subtitle-editor", kwargs={
"video_id": self.team_video.video.video_id,
"language_code": self.language
})
def needs_start_dialog(self):
"""Check if this task needs the start dialog.
The only time we need it is when a user is starting a
transcribe/translate task. We don't need it for review/approval, or
if the task is being resumed.
"""
# We use the start dialog for select two things:
# - primary audio language
# - language of the subtitles
return (self.language == '' or
self.team_video.video.primary_audio_language_code == '')
def get_reviewer(self):
"""For Approve tasks, return the last user to Review these subtitles.
May be None if this task is not an Approve task, or if we can't figure
out the last reviewer for any reason.
"""
if self.get_type_display() == 'Approve':
previous = Task.objects.complete().filter(
team_video=self.team_video,
language=self.language,
team=self.team,
type=Task.TYPE_IDS['Review']).order_by('-completed')[:1]
if previous:
return previous[0].assignee
def set_expiration(self):
"""Set the expiration_date of this task. Does not save().
Requires that self.team and self.assignee be set correctly.
"""
if not self.assignee or not self.team.task_expiration:
self.expiration_date = None
else:
limit = datetime.timedelta(days=self.team.task_expiration)
self.expiration_date = self.now() + limit
def get_subtitle_version(self):
""" Gets the subtitle version related to this task.
If the task has a subtitle_version attached, return it and
if not, try to find it throught the subtitle language of the video.
Note: we need this since we don't attach incomplete subtitle_version
to the task (and if we do we need to set the status to unmoderated and
that causes the version to get published).
"""
# autocreate sets the subtitle_version to another
# language's subtitle_version and that was breaking
# not only the interface but the new upload method.
if (self.new_subtitle_version and
self.new_subtitle_version.language_code == self.language):
return self.new_subtitle_version
if not hasattr(self, "_subtitle_version"):
language = self.team_video.video.subtitle_language(self.language)
self._subtitle_version = (language.get_tip(public=False)
if language else None)
return self._subtitle_version
def is_blocked(self):
"""Return whether this task is "blocked".
"Blocked" means that it's a translation task but the source language
isn't ready to be translated yet.
"""
subtitle_version = self.get_subtitle_version()
if not subtitle_version:
return False
source_language = subtitle_version.subtitle_language.get_translation_source_language()
if not source_language:
return False
can_perform = (source_language and
source_language.is_complete_and_synced())
if self.get_type_display() != 'Translate':
if self.get_type_display() in ('Review', 'Approve'):
# review and approve tasks will be blocked if they're
# a translation and they have a draft and the source
# language no longer has published version
if not can_perform or source_language.language_code == self.language:
return True
return not can_perform
def save(self, update_team_video_index=True, *args, **kwargs):
is_review_or_approve = self.get_type_display() in ('Review', 'Approve')
if self.language:
if not self.language in translation.ALL_LANGUAGE_CODES:
raise ValidationError(
"Subtitle Language should be a valid code.")
result = super(Task, self).save(*args, **kwargs)
if update_team_video_index:
tasks.update_one_team_video.delay(self.team_video.pk)
Video.cache.invalidate_by_pk(self.team_video.video_id)
return result
# Settings
class SettingManager(models.Manager):
use_for_related_fields = True
def guidelines(self):
"""Return a QS of settings related to team guidelines."""
keys = [key for key, name in Setting.KEY_CHOICES
if name.startswith('guidelines_')]
return self.get_query_set().filter(key__in=keys)
def messages(self):
"""Return a QS of settings related to team messages."""
keys = [key for key, name in Setting.KEY_CHOICES
if name.startswith('messages_')]
return self.get_query_set().filter(key__in=keys)
def messages_guidelines(self):
"""Return a QS of settings related to team messages or guidelines."""
return self.get_query_set().filter(key__in=Setting.MESSAGE_KEYS)
def with_names(self, names):
return self.filter(key__in=[Setting.KEY_IDS[name] for name in names])
def all_messages(self):
messages = {}
for key in Setting.MESSAGE_KEYS:
name = Setting.KEY_NAMES[key]
messages[name] = self.instance.get_default_message(name)
messages.update({
s.key_name: s.data
for s in self.messages_guidelines()
if s.data
})
return messages
class Setting(models.Model):
KEY_CHOICES = (
(100, 'messages_invite'),
(101, 'messages_manager'),
(102, 'messages_admin'),
(103, 'messages_application'),
(104, 'messages_joins'),
(200, 'guidelines_subtitle'),
(201, 'guidelines_translate'),
(202, 'guidelines_review'),
# 300s means if this team will block those notifications
(300, 'block_invitation_sent_message'),
(301, 'block_application_sent_message'),
(302, 'block_application_denided_message'),
(303, 'block_team_member_new_message'),
(304, 'block_team_member_leave_message'),
(305, 'block_task_assigned_message'),
(306, 'block_reviewed_and_published_message'),
(307, 'block_reviewed_and_pending_approval_message'),
(308, 'block_reviewed_and_sent_back_message'),
(309, 'block_approved_message'),
(310, 'block_new_video_message'),
# 400 is for text displayed on web pages
(401, 'pagetext_welcome_heading'),
)
KEY_NAMES = dict(KEY_CHOICES)
KEY_IDS = dict([choice[::-1] for choice in KEY_CHOICES])
MESSAGE_KEYS = [
key for key, name in KEY_CHOICES
if name.startswith('messages_') or name.startswith('guidelines_')
or name.startswith('pagetext_')
]
MESSAGE_DEFAULTS = {
'pagetext_welcome_heading': _("Help %(team)s reach a world audience"),
}
key = models.PositiveIntegerField(choices=KEY_CHOICES)
data = models.TextField(blank=True)
team = models.ForeignKey(Team, related_name='settings')
created = models.DateTimeField(auto_now_add=True, editable=False)
modified = models.DateTimeField(auto_now=True, editable=False)
objects = SettingManager()
class Meta:
unique_together = (('key', 'team'),)
def __unicode__(self):
return u'%s - %s' % (self.team, self.key_name)
@property
def key_name(self):
"""Return the key name for this setting.
TODO: Remove this and replace with get_key_display()?
"""
return Setting.KEY_NAMES[self.key]
# TeamLanguagePreferences
class TeamLanguagePreferenceManager(models.Manager):
def _generate_writable(self, team):
"""Return the set of language codes that are writeable for this team."""
unwritable = self.for_team(team).filter(allow_writes=False, preferred=False).values("language_code")
unwritable = set([x['language_code'] for x in unwritable])
return translation.ALL_LANGUAGE_CODES - unwritable
def _generate_readable(self, team):
"""Return the set of language codes that are readable for this team."""
unreadable = self.for_team(team).filter(allow_reads=False, preferred=False).values("language_code")
unreadable = set([x['language_code'] for x in unreadable])
return translation.ALL_LANGUAGE_CODES - unreadable
def _generate_preferred(self, team):
"""Return the set of language codes that are preferred for this team."""
preferred = self.for_team(team).filter(preferred=True).values("language_code")
return set([x['language_code'] for x in preferred])
def for_team(self, team):
"""Return a QS of all language preferences for the given team."""
return self.get_query_set().filter(team=team)
def on_changed(cls, sender, instance, *args, **kwargs):
"""Perform any necessary actions when a language preference changes.
TODO: Refactor this out of the manager...
"""
from teams.cache import invalidate_lang_preferences
invalidate_lang_preferences(instance.team)
def get_readable(self, team):
"""Return the set of language codes that are readable for this team.
This value may come from memcache if possible.
"""
from teams.cache import get_readable_langs
return get_readable_langs(team)
def get_writable(self, team):
"""Return the set of language codes that are writeable for this team.
This value may come from memcache if possible.
"""
from teams.cache import get_writable_langs
return get_writable_langs(team)
def get_preferred(self, team):
"""Return the set of language codes that are preferred for this team.
This value may come from memcache if possible.
"""
from teams.cache import get_preferred_langs
return get_preferred_langs(team)
class TeamLanguagePreference(models.Model):
"""Represent language preferences for a given team.
First, TLPs may mark a language as "preferred". If that's the case then the
other attributes of this model are irrelevant and can be ignored.
"Preferred" languages will have translation tasks automatically created for
them when subtitles are added.
If preferred is False, the TLP describes a *restriction* on the language
instead. Writing in that language may be prevented, or both reading and
writing may be prevented.
(Note: "writing" means not only writing new subtitles but also creating
tasks, etc)
This is how the restriction settings should interact. TLP means that we
have created a TeamLanguagePreference for that team and language.
| Action | NO | allow_read=True, | allow_read=False, |
| | TLP | allow_write=False | allow_write=False |
========================================================================================
| assignable as tasks | X | | |
| assignable as narrowing | X | | |
| listed on the widget for viewing | X | X | |
| listed on the widget for improving | X | | |
| returned from the api read operations | X | X | |
| upload / write operations from the api | X | | |
| show up on the start dialog | X | | |
+----------------------------------------+-----+-------------------+-------------------+
Remember, this table only applies if preferred=False. If the language is
preferred the "restriction" attributes are effectively garbage. Maybe we
should make the column nullable to make this more clear?
allow_read=True, allow_write=True, preferred=False is invalid. Just remove
the row all together.
"""
team = models.ForeignKey(Team, related_name="lang_preferences")
language_code = models.CharField(max_length=16)
allow_reads = models.BooleanField(default=False)
allow_writes = models.BooleanField(default=False)
preferred = models.BooleanField(default=False)
objects = TeamLanguagePreferenceManager()
class Meta:
unique_together = ('team', 'language_code')
def clean(self, *args, **kwargs):
if self.allow_reads and self.allow_writes:
raise ValidationError("No sense in having all allowed, just remove the preference for this language.")
if self.preferred and (self.allow_reads or self.allow_writes):
raise ValidationError("Cannot restrict a preferred language.")
super(TeamLanguagePreference, self).clean(*args, **kwargs)
def __unicode__(self):
return u"%s preference for team %s" % (self.language_code, self.team)
post_save.connect(TeamLanguagePreference.objects.on_changed, TeamLanguagePreference)
# TeamNotificationSettings
class TeamNotificationSettingManager(models.Manager):
def notify_team(self, team_pk, event_name, **kwargs):
"""Notify the given team of a given event.
Finds the matching notification settings for this team, instantiates
the notifier class, and sends the appropriate notification.
If the notification settings has an email target, sends an email.
If the http settings are filled, then sends the request.
This can be ran as a Celery task, as it requires no objects to be passed.
"""
try:
team = Team.objects.get(pk=team_pk)
except Team.DoesNotExist:
logger.error("A pk for a non-existent team was passed in.",
extra={"team_pk": team_pk, "event_name": event_name})
return
try:
if team.partner:
notification_settings = self.get(partner=team.partner)
else:
notification_settings = self.get(team=team)
except TeamNotificationSetting.DoesNotExist:
return
notification_settings.notify(event_name, **kwargs)
class TeamNotificationSetting(models.Model):
"""Info on how a team should be notified of changes to its videos.
For now, a team can be notified by having a http request sent with the
payload as the notification information. This cannot be hardcoded since
teams might have different urls for each environment.
Some teams have strict requirements on mapping video ids to their internal
values, and also their own language codes. Therefore we need to configure
a class that can do the correct mapping.
TODO: allow email notifications
"""
EVENT_VIDEO_NEW = "video-new"
EVENT_VIDEO_EDITED = "video-edited"
EVENT_LANGUAGE_NEW = "language-new"
EVENT_LANGUAGE_EDITED = "language-edit"
EVENT_LANGUAGE_DELETED = "language-deleted"
EVENT_SUBTITLE_NEW = "subs-new"
EVENT_SUBTITLE_APPROVED = "subs-approved"
EVENT_SUBTITLE_REJECTED = "subs-rejected"
EVENT_APPLICATION_NEW = 'application-new'
team = models.OneToOneField(Team, related_name="notification_settings",
null=True, blank=True)
partner = models.OneToOneField('Partner',
related_name="notification_settings", null=True, blank=True)
# the url to post the callback notifing partners of new video activity
request_url = models.URLField(blank=True, null=True)
basic_auth_username = models.CharField(max_length=255, blank=True, null=True)
basic_auth_password = models.CharField(max_length=255, blank=True, null=True)
# not being used, here to avoid extra migrations in the future
email = models.EmailField(blank=True, null=True)
# integers mapping to classes, see unisubs-integration/notificationsclasses.py
notification_class = models.IntegerField(default=1,)
objects = TeamNotificationSettingManager()
def get_notification_class(self):
try:
from ted.notificationclasses import NOTIFICATION_CLASS_MAP
return NOTIFICATION_CLASS_MAP[self.notification_class]
except ImportError:
logger.exception("Apparently unisubs-integration is not installed")
def notify(self, event_name, **kwargs):
"""Resolve the notification class for this setting and fires notfications."""
notification_class = self.get_notification_class()
if not notification_class:
logger.error("Could not find notification class %s" % self.notification_class)
return
notification = notification_class(self.team, self.partner,
event_name, **kwargs)
if self.request_url:
success, content = notification.send_http_request(
self.request_url,
self.basic_auth_username,
self.basic_auth_password
)
return success, content
# FIXME: spec and test this, for now just return
return
def __unicode__(self):
if self.partner:
return u'NotificationSettings for partner %s' % self.partner
return u'NotificationSettings for team %s' % self.team
class BillingReport(models.Model):
# use BillingRecords to signify completed work
TYPE_BILLING_RECORD = 2
# use approval tasks to signify completed work
TYPE_APPROVAL = 3
# Like TYPE_APPROVAL, but centered on the users who subtitle/review the
# work
TYPE_APPROVAL_FOR_USERS = 4
TYPE_CHOICES = (
(TYPE_BILLING_RECORD, 'Crowd sourced'),
(TYPE_APPROVAL, 'Professional services'),
(TYPE_APPROVAL_FOR_USERS, 'On-demand translators'),
)
teams = models.ManyToManyField(Team, related_name='billing_reports')
start_date = models.DateField()
end_date = models.DateField()
csv_file = S3EnabledFileField(blank=True, null=True,
upload_to='teams/billing/')
processed = models.DateTimeField(blank=True, null=True)
type = models.IntegerField(choices=TYPE_CHOICES,
default=TYPE_BILLING_RECORD)
def __unicode__(self):
if hasattr(self, 'id') and self.id is not None:
team_count = self.teams.all().count()
else:
team_count = 0
return "%s teams (%s - %s)" % (team_count,
self.start_date.strftime('%Y-%m-%d'),
self.end_date.strftime('%Y-%m-%d'))
def _get_approved_tasks(self):
return Task.objects.complete_approve().filter(
approved=Task.APPROVED_IDS['Approved'],
team__in=self.teams.all(),
completed__range=(self.start_date, self.end_date))
def _report_date(self, datetime):
return datetime.strftime('%Y-%m-%d %H:%M:%S')
def generate_rows_type_approval(self):
header = (
'Team',
'Video Title',
'Video ID',
'Project',
'Language',
'Minutes',
'Original',
'Translation?',
'Approver',
'Date',
)
rows = [header]
for approve_task in self._get_approved_tasks():
video = approve_task.team_video.video
project = approve_task.team_video.project.name if approve_task.team_video.project else 'none'
version = approve_task.new_subtitle_version
language = version.subtitle_language
subtitle_task = (Task.objects.complete_subtitle_or_translate()
.filter(team_video=approve_task.team_video,
language=approve_task.language)
.order_by('-completed'))[0]
rows.append((
approve_task.team.name,
video.title_display(),
video.video_id,
project,
approve_task.language,
get_minutes_for_version(version, False),
language.is_primary_audio_language(),
subtitle_task.type==Task.TYPE_IDS['Translate'],
unicode(approve_task.assignee),
self._report_date(approve_task.completed),
))
return rows
def generate_rows_type_approval_for_users(self):
header = (
'User',
'Task Type',
'Team',
'Video Title',
'Video ID',
'Project',
'Language',
'Minutes',
'Original',
'Approver',
'Note',
'Date',
'Pay Rate',
)
data_rows = []
for approve_task in self._get_approved_tasks():
video = approve_task.team_video.video
project = approve_task.team_video.project.name if approve_task.team_video.project else 'none'
version = approve_task.get_subtitle_version()
language = version.subtitle_language
all_tasks = [approve_task]
try:
all_tasks.append((Task.objects.complete_subtitle_or_translate()
.filter(team_video=approve_task.team_video,
language=approve_task.language)
.order_by('-completed'))[0])
except IndexError:
# no subtitling task, probably the review task was manually
# created.
pass
try:
all_tasks.append((Task.objects.complete_review()
.filter(team_video=approve_task.team_video,
language=approve_task.language)
.order_by('-completed'))[0])
except IndexError:
# review not enabled
pass
for task in all_tasks:
data_rows.append((
unicode(task.assignee),
task.get_type_display(),
approve_task.team.name,
video.title_display(),
video.video_id,
project,
language.language_code,
get_minutes_for_version(version, False),
language.is_primary_audio_language(),
unicode(approve_task.assignee),
unicode(task.body),
self._report_date(task.completed),
task.assignee.pay_rate_code,
))
data_rows.sort(key=lambda row: row[0])
return [header] + data_rows
def generate_rows_type_billing_record(self):
rows = []
for i,team in enumerate(self.teams.all()):
rows = rows + BillingRecord.objects.csv_report_for_team(team,
self.start_date, self.end_date, add_header=i == 0)
return rows
def generate_rows(self):
if self.type == BillingReport.TYPE_BILLING_RECORD:
rows = self.generate_rows_type_billing_record()
elif self.type == BillingReport.TYPE_APPROVAL:
rows = self.generate_rows_type_approval()
elif self.type == BillingReport.TYPE_APPROVAL_FOR_USERS:
rows = self.generate_rows_type_approval_for_users()
else:
raise ValueError("Unknown type: %s" % self.type)
return rows
def convert_unicode_to_utf8(self, rows):
def _convert(value):
if isinstance(value, unicode):
return value.encode("utf-8")
else:
return value
return [tuple(_convert(v) for v in row) for row in rows]
def process(self):
"""
Generate the correct rows (including headers), saves it to a tempo file,
then set's that file to the csv_file property, which if , using the S3
storage will take care of exporting it to s3.
"""
try:
rows = self.generate_rows()
except StandardError:
logger.error("Error generating billing report: (id: %s)", self.id)
self.csv_file = None
else:
self.csv_file = self.make_csv_file(rows)
self.processed = datetime.datetime.utcnow()
self.save()
def make_csv_file(self, rows):
rows = self.convert_unicode_to_utf8(rows)
fn = '/tmp/bill-%s-teams-%s-%s-%s-%s.csv' % (
self.teams.all().count(),
self.start_str, self.end_str,
self.get_type_display(), self.pk)
with open(fn, 'w') as f:
writer = csv.writer(f)
writer.writerows(rows)
return File(open(fn, 'r'))
@property
def start_str(self):
return self.start_date.strftime("%Y%m%d")
@property
def end_str(self):
return self.end_date.strftime("%Y%m%d")
class BillingReportGenerator(object):
def __init__(self, all_records, add_header=True):
if add_header:
self.rows = [self.header()]
else:
self.rows = []
all_records = list(all_records)
self.make_language_number_map(all_records)
self.make_languages_without_records(all_records)
for video, records in groupby(all_records, lambda r: r.video):
records = list(records)
if video:
for lang in self.languages_without_records.get(video.id, []):
self.rows.append(
self.make_row_for_lang_without_record(video, lang))
for r in records:
self.rows.append(self.make_row(video, r))
def header(self):
return [
'Video Title',
'Video ID',
'Project',
'Language',
'Minutes',
'Original',
'Language number',
'Team',
'Created',
'Source',
'User',
]
def make_row(self, video, record):
return [
(video and video.title_display()) or "----",
(video and video.video_id) or "deleted",
(record.project.name if record.project else 'none'),
(record.new_subtitle_language and record.new_subtitle_language.language_code) or "----",
record.minutes,
record.is_original,
(self.language_number_map and (record.id in self.language_number_map) and self.language_number_map[record.id]) or "----",
record.team.slug,
record.created.strftime('%Y-%m-%d %H:%M:%S'),
record.source,
record.user.username,
]
def make_language_number_map(self, records):
self.language_number_map = {}
videos = set(r.video for r in records)
video_counts = dict((v and v.id, 0) for v in videos)
qs = (BillingRecord.objects
.filter(video__in=videos)
.order_by('created'))
for record in qs:
vid = record.video and record.video.id
video_counts[vid] += 1
self.language_number_map[record.id] = video_counts[vid]
def make_languages_without_records(self, records):
self.languages_without_records = {}
videos = [r.video for r in records]
language_ids = [r.new_subtitle_language_id for r in records]
no_billing_record_where = """\
NOT EXISTS (
SELECT 1
FROM teams_billingrecord br
WHERE br.new_subtitle_language_id = subtitles_subtitlelanguage.id
)"""
qs = (NewSubtitleLanguage.objects
.filter(video__in=videos, subtitles_complete=True)
.exclude(id__in=language_ids).
extra(where=[no_billing_record_where]))
for lang in qs:
vid = lang.video_id
if vid not in self.languages_without_records:
self.languages_without_records[vid] = [lang]
else:
self.languages_without_records[vid].append(lang)
def make_row_for_lang_without_record(self, video, language):
return [
video.title_display(),
video.video_id,
'none',
language.language_code,
0,
language.is_primary_audio_language(),
0,
'unknown',
language.created.strftime('%Y-%m-%d %H:%M:%S'),
'unknown',
'unknown',
]
class BillingRecordManager(models.Manager):
def data_for_team(self, team, start, end):
return self.filter(team=team, created__gte=start, created__lte=end)
def csv_report_for_team(self, team, start, end, add_header=True):
all_records = self.data_for_team(team, start, end)
generator = BillingReportGenerator(all_records, add_header)
return generator.rows
def insert_records_for_translations(self, billing_record):
"""
IF you've translated from an incomplete language, and later on that
language is completed, we must check if any translations are now
complete and therefore should have billing records with them
"""
translations = billing_record.new_subtitle_language.get_dependent_subtitle_languages()
inserted = []
for translation in translations:
version = translation.get_tip(public=False)
if version:
inserted.append(self.insert_record(version))
return filter(bool, inserted)
def insert_record(self, version):
"""
Figures out if this version qualifies for a billing record, and
if so creates one. This should be self contained, e.g. safe to call
for any version. No records should be created if not needed, and it
won't create multiples.
If this language has translations it will check if any of those are now
eligible for BillingRecords and create one accordingly.
"""
from teams.models import BillingRecord
celery_logger.debug('insert billing record')
language = version.subtitle_language
video = language.video
tv = video.get_team_video()
if not tv:
celery_logger.debug('not a team video')
return
if not language.is_complete_and_synced(public=False):
celery_logger.debug('language not complete')
return
try:
# we already have a record
previous_record = BillingRecord.objects.get(video=video,
new_subtitle_language=language)
# make sure we update it
celery_logger.debug('a billing record for this language exists')
previous_record.is_original = \
video.primary_audio_language_code == language.language_code
previous_record.save()
return
except BillingRecord.DoesNotExist:
pass
if NewSubtitleVersion.objects.filter(
subtitle_language=language,
created__lt=BILLING_CUTOFF).exclude(
pk=version.pk).exists():
celery_logger.debug('an older version exists')
return
is_original = language.is_primary_audio_language()
source = version.origin
team = tv.team
project = tv.project
new_record = BillingRecord.objects.create(
video=video,
project = project,
new_subtitle_version=version,
new_subtitle_language=language,
is_original=is_original, team=team,
created=version.created,
source=source,
user=version.author)
from_translations = self.insert_records_for_translations(new_record)
return new_record, from_translations
def get_minutes_for_version(version, round_up_to_integer):
"""
Return the number of minutes the subtitles specified in version
"""
subs = version.get_subtitles()
if len(subs) == 0:
return 0
for sub in subs:
if sub.start_time is not None:
start_time = sub.start_time
break
# we shouldn't have an end time set without a start time, but handle
# it just in case
if sub.end_time is not None:
start_time = sub.end_time
break
else:
return 0
for sub in reversed(subs):
if sub.end_time is not None:
end_time = sub.end_time
break
# we shouldn't have an end time not set, but check for that just in
# case
if sub.start_time is not None:
end_time = sub.start_time
break
else:
return 0
duration_seconds = (end_time - start_time) / 1000.0
minutes = duration_seconds/60.0
if round_up_to_integer:
minutes = int(ceil(minutes))
return minutes
class BillingRecord(models.Model):
# The billing record should still exist if the video is deleted
video = models.ForeignKey(Video, blank=True, null=True, on_delete=models.SET_NULL)
project = models.ForeignKey(Project, blank=True, null=True, on_delete=models.SET_NULL)
subtitle_version = models.ForeignKey(SubtitleVersion, null=True,
blank=True, on_delete=models.SET_NULL)
new_subtitle_version = models.ForeignKey(NewSubtitleVersion, null=True,
blank=True, on_delete=models.SET_NULL)
subtitle_language = models.ForeignKey(SubtitleLanguage, null=True,
blank=True, on_delete=models.SET_NULL)
new_subtitle_language = models.ForeignKey(NewSubtitleLanguage, null=True,
blank=True, on_delete=models.SET_NULL)
minutes = models.FloatField(blank=True, null=True)
is_original = models.BooleanField(default=False)
team = models.ForeignKey(Team)
created = models.DateTimeField()
source = models.CharField(max_length=255)
user = models.ForeignKey(User)
objects = BillingRecordManager()
class Meta:
unique_together = ('video', 'new_subtitle_language')
def __unicode__(self):
return "%s - %s" % (self.video and self.video.video_id,
self.new_subtitle_language and self.new_subtitle_language.language_code)
def save(self, *args, **kwargs):
if not self.minutes and self.minutes != 0.0:
self.minutes = self.get_minutes()
assert self.minutes is not None
return super(BillingRecord, self).save(*args, **kwargs)
def get_minutes(self):
return get_minutes_for_version(self.new_subtitle_version, True)
class Partner(models.Model):
name = models.CharField(_(u'name'), max_length=250, unique=True)
slug = models.SlugField(_(u'slug'), unique=True)
can_request_paid_captions = models.BooleanField(default=False)
# The `admins` field specifies users who can do just about anything within
# the partner realm.
admins = models.ManyToManyField('auth.CustomUser',
related_name='managed_partners', blank=True, null=True)
def __unicode__(self):
return self.name
def is_admin(self, user):
return user in self.admins.all()
| agpl-3.0 | -5,704,561,719,830,586,000 | 36.958653 | 232 | 0.608513 | false | 4.212928 | false | false | false |
codedsk/hubcheck | hubcheck/pageobjects/po_time_overview_page.py | 1 | 1349 | from hubcheck.pageobjects.po_time_base_page import TimeBasePage
from hubcheck.pageobjects.basepageelement import Link
class TimeOverviewPage(TimeBasePage):
"""time overview page"""
def __init__(self,browser,catalog,groupid=None):
super(TimeOverviewPage,self).__init__(browser,catalog)
self.path = "/time/overview"
# load hub's classes
TimeOverviewPage_Locators = self.load_class('TimeOverviewPage_Locators')
TimeOverview = self.load_class('TimeOverview')
# update this object's locator
self.locators.update(TimeOverviewPage_Locators.locators)
# setup page object's components
self.overview = TimeOverview(self,{'base':'overview'})
def get_active_hubs_count(self):
return self.overview.get_active_hubs_count()
def get_active_tasks_count(self):
return self.overview.get_active_tasks_count()
def get_total_hours_count(self):
return self.overview.get_total_hours_count()
def goto_hubs(self):
self.overview.goto_hubs()
def goto_tasks(self):
self.overview.goto_tasks()
def goto_records(self):
self.overview.goto_records()
class TimeOverviewPage_Locators_Base(object):
"""locators for TimeOverviewPage object"""
locators = {
'overview' : "css=#plg_time_overview",
}
| mit | 2,818,290,263,119,417,000 | 28.977778 | 80 | 0.673091 | false | 3.768156 | false | false | false |
madeso/prettygood | dotnet/Tagger/TagValidator.py | 1 | 2567 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using MusicBrainz;
using PrettyGood.Util;
namespace Tagger
{
class TagValidator
{
public bool validate(IdTag tag)
{
Artist artist = null;
if (string.IsNullOrEmpty(tag.Artist) == false) artist = getArtist(tag.Artist);
if (artist == null) return false;
Release album = null;
if (string.IsNullOrEmpty(tag.Album) == false) album = getRelease(artist, tag.Album);
Track track = null;
if (string.IsNullOrEmpty(tag.Title))
{
int num = int.Parse(tag.TrackNumber.RemoveLeadingZeros());
num %= 100;
track = album.GetTracks()[num];
}
else
{
foreach (var t in Track.Query(tag.Title, artist.GetName()))
{
track = t;
break;
}
}
if (track == null) return false;
if (album == null)
{
foreach (var r in track.GetReleases())
{
album = r;
break;
}
}
tag.Artist = artist.GetName();
tag.Album = album.GetTitle();
tag.TrackNumber = track.GetTrackNumber(album).ToString();
tag.TotalTracks = album.GetTracks().Count.ToString();
//tag.Year = album.GetReleaseRelations()[0].BeginDate;
return true;
}
private Release getRelease(Artist artist, string a)
{
string album = a.ToLower();
foreach (Release r in artist.GetReleases())
{
if (album == r.GetTitle().ToLower()) return r;
}
return null;
}
private Artist getArtist(string art)
{
string artist = art.ToLower();
if (artists.ContainsKey(artist)) return artists[artist];
Artist info = null;
System.Threading.Thread.Sleep(500);
foreach (Artist a in Artist.Query(artist))
{
string name = a.GetName();
if (artist.Contains(name.ToLower()))
{
info = a;
break;
}
}
artists.Add(artist, info);
return info;
}
Dictionary<string, Artist> artists = new Dictionary<string, Artist>();
}
}
| mit | -570,241,561,569,758,850 | 27.186813 | 96 | 0.475244 | false | 4.47644 | false | false | false |
clasnake/recommender | similarity.py | 1 | 6369 | from __future__ import division
from math import sqrt
def sim_distance(prefs, item1, item2):
#get the list of shared items
si = {};
for item in prefs[item1]:
if item in prefs[item2]:
si[item] = 1;
#if they have no shared items,return 0;
if len(si) == 0: return 0;
#Add the squares of all the differences
sum_of_squares = sum(
[pow(prefs[item1][item] - prefs[item2][item], 2) for item in prefs[item1] if item in prefs[item2]])
return 1 / (1 + sqrt(sum_of_squares))
# Returns the Pearson correlation coefficient for p1 and p2
def sim_pearson(prefs, p1, p2):
# Get the list of mutually rated items
si = {}
for item in prefs[p1]:
if item in prefs[p2]: si[item] = 1
# if they are no ratings in common, return 0
if len(si) == 0: return 0
# Sum calculations
n = len(si)
# Sums of all the preferences
sum1 = float(sum([prefs[p1][it] for it in si]))
sum2 = float(sum([prefs[p2][it] for it in si]))
# Sums of the squares
sum1Sq = float(sum([pow(prefs[p1][it], 2) for it in si]))
sum2Sq = float(sum([pow(prefs[p2][it], 2) for it in si]))
# Sum of the products
pSum = float(sum([prefs[p1][it] * prefs[p2][it] for it in si]))
# Calculate r (Pearson score)
num = float(pSum - (sum1 * sum2 / n))
den = float(sqrt((sum1Sq - pow(sum1, 2) / n) * (sum2Sq - pow(sum2, 2) / n)))
if den == 0: return 0
r = float(num / den)
return round(r, 7)
def sim_pearson1(prefs, person1, person2):
#get the list of shared items
si = {}
for item in prefs[person1]:
if item in prefs[person2]:
si[item] = 1
#if they have no shared items, return 0
if len(si) == 0: return 0
#find the number of elements
n = len(si)
#add up all the prefs
sum1 = sum([prefs[person1][item] for item in si])
sum2 = sum([prefs[person2][item] for item in si])
#calculate the mean of the critics of p1 and p2
mean1 = sum1 / n;
mean2 = sum2 / n;
#calculate the covariance
covariance = sum([(prefs[person1][item] - mean1) * (prefs[person2][item] - mean2) for item in si]) / n
#calculate the standard_deviation
sd1 = sqrt(sum([pow(prefs[person1][item] - mean1, 2) for item in si]) / n)
sd2 = sqrt(sum([pow(prefs[person2][item] - mean2, 2) for item in si]) / n)
if sd1 * sd2 == 0: return 0
#calculate the pearson correlation improved
pearson = (covariance / (sd1 * sd2))
return pearson
def sim_pearson_improved(prefs, person1, person2):
#get the list of shared items
si = {}
for item in prefs[person1]:
if item in prefs[person2]:
si[item] = 1
#if they have no shared items, return 0
if len(si) == 0: return 0
#find the number of elements
n = len(si)
#get the count of items rated by person
count1 = 0
count2 = 0
for person in prefs[person1]:
count1 += 1
for item in prefs[person2]:
count2 += 1
totalCount = count1 + count2 - n
#add up all the prefs
sum1 = sum([prefs[person1][item] for item in si])
sum2 = sum([prefs[person2][item] for item in si])
#calculate the mean of the critics of p1 and p2
mean1 = sum1 / n;
mean2 = sum2 / n;
#calculate the covariance
covariance = sum([(prefs[person1][item] - mean1) * (prefs[person2][item] - mean2) for item in si]) / n
#calculate the standard_deviation
sd1 = sqrt(sum([pow(prefs[person1][item] - mean1, 2) for item in si]) / n)
sd2 = sqrt(sum([pow(prefs[person2][item] - mean2, 2) for item in si]) / n)
if sd1 * sd2 == 0: return 0
#calculate the pearson correlation improved
pearson = (covariance / (sd1 * sd2)) * (float(n) / float(totalCount))
#print n,count,float(n)/float(count),pearson
return pearson
def sim_cosine(prefs, item1, item2):
si = {}
for i in prefs[item1]:
if i in prefs[item2]:
si[i] = 1
#print si
if len(si) == 0: return 0
x = sqrt(sum([prefs[item1][it] ** 2 for it in si]))
y = sqrt(sum([prefs[item2][it] ** 2 for it in si]))
xy = sum([prefs[item1][it] * prefs[item2][it] for it in si])
cos = xy / (x * y)
return cos
def sim_cosine_improved(prefs, item1, item2):
si = {}
for i in prefs[item1]:
if i in prefs[item2]:
si[i] = 1
#print si
n = len(si)
if n == 0: return 0
count1 = 0
count2 = 0
for item in prefs[item1]:
count1 += 1
for item in prefs[item2]:
count2 += 1
totalCount = count1 + count2 - n
x = sqrt(sum([prefs[item1][it] ** 2 for it in si]))
y = sqrt(sum([prefs[item2][it] ** 2 for it in si]))
xy = sum([prefs[item1][it] * prefs[item2][it] for it in si])
cos = xy / (x * y)
return cos * (float(n) / float(totalCount))
def sim_Jaccard(s1, s2, length):
count = 0
for i in range(0, length):
if s1[i] == '1' and s2[i] == '1':
count += 1
if s1[i] == '1\n' and s2[i] == '1\n':
count += 1
return count / (length - count)
def sim_itemType(s1, s2, length):
count = 0
for i in range(0, length):
if s1[i] == '1' and s2[i] == '1':
count += 1
if s1[i] == '1\n' and s2[i] == '1\n':
count += 1
return count / 5
def sim_cosine_improved_tag(prefs, item1, item2, movieTags):
common = 0
for i in movieTags[item1]:
if i in movieTags[item2]:
common += 1
if common >= 5:
return 0.8
else:
si = {}
for i in prefs[item1]:
if i in prefs[item2]:
si[i] = 1
#print si
n = len(si)
if n == 0: return 0
count1 = 0
count2 = 0
for item in prefs[item1]:
count1 += 1
for item in prefs[item2]:
count2 += 1
totalCount = count1 + count2 - n
x = sqrt(sum([prefs[item1][it] ** 2 for it in si]))
y = sqrt(sum([prefs[item2][it] ** 2 for it in si]))
xy = sum([prefs[item1][it] * prefs[item2][it] for it in si])
cos = xy / (x * y)
return cos * (float(n) / float(totalCount))
#def sim_pearson_improved_typeAdded(prefs,item1,item2):
# pearson_improved=sim_pearson_improved(prefs,item1,item2)
# item_type=itemSimSet[item1][item2]
# return 0.9*(pearson_improved+1)/2.0+0.1*item_type
| mit | 2,447,981,508,787,601,000 | 27.560538 | 107 | 0.566808 | false | 3.040095 | false | false | false |
fossdevil/Assignments | Machine Learning/Assignment3Final/ML4.py | 1 | 3746 | import numpy as np
import scipy
import matplotlib.pyplot as plt
import random
# N points in d dimensions
def generatePoints(n,d):
points = []
for i in range(0,n):
point = np.random.normal(0,1,d);
p = point**2;
den = np.sqrt(sum(p));
point = list(point/den);
points.append(point);
return points;
def interPointDistance(points,n,d):
distMat = []
distance = 0;
for i in range(0,n):
disti = []
for j in range(0,n):
distance = np.linalg.norm(list(np.asarray(points[i])-np.asarray(points[j])));
disti.append(distance);
distMat.append(disti);
return distMat;
def projection(points,subspace,n):
projPoint = []
subspacet = np.asmatrix(subspace);
subspace = subspacet.T;
for i in range(0,n):
inv = np.linalg.inv(np.dot(subspacet,subspace));
proj = np.dot(np.dot(np.dot(subspace,inv),subspacet),points[i]);
projPoint.append(proj);
return projPoint;
def subspaceGen(n,d):
subspace = [];
subv = np.zeros(d);
r = np.arange(0,d);
k = list(random.sample(r,n));
j = 0;
for i in range(0,n):
subv = np.zeros(d);
subv[k[j]] = 1;
j = j+1;
subspace.append(subv);
return subspace;
n = 50;
d = 200;
points50 = generatePoints(n,d);
distMat = interPointDistance(points50,n,d);
print("Please open file \"Solution4.txt\":");
filename = "Solution4.txt"
target = open(filename,'w');
target.write("The interpoint distance Matrix is as follows:\n");
for i in range(0,n):
target.write(str(distMat[i]));
target.write("\n");
target.write("\n");
target.write("\n");
target.write("\n");
subspaces1 = np.asmatrix(subspaceGen(1,d));
subspaces2 = np.asmatrix(subspaceGen(2,d));
subspaces3 = np.asmatrix(subspaceGen(3,d));
subspaces10 = np.asmatrix(subspaceGen(10,d));
subspaces50 = np.asmatrix(subspaceGen(50,d));
projPoint1 = projection(points50,subspaces1,n);
projPoint2 = projection(points50,subspaces2,n);
projPoint3 = projection(points50,subspaces3,n);
projPoint10 = projection(points50,subspaces10,n);
projPoint50 = projection(points50,subspaces50,n);
distMat1 = interPointDistance(projPoint1,n,d);
distMat2 = interPointDistance(projPoint2,n,d);
distMat3 = interPointDistance(projPoint3,n,d);
distMat10 = interPointDistance(projPoint10,n,d);
distMat50 = interPointDistance(projPoint50,n,d);
num = np.sqrt(1.0/200);
diff1 = list((num*np.asmatrix(distMat))-np.asmatrix(distMat1));
num = np.sqrt(2.0/200);
diff2 = list((num*np.asmatrix(distMat))-np.asmatrix(distMat2));
num = np.sqrt(3.0/200);
diff3 = list((num*np.asmatrix(distMat))-np.asmatrix(distMat3));
num = np.sqrt(10.0/200);
diff10 = list((num*np.asmatrix(distMat))-np.asmatrix(distMat10));
num = np.sqrt(50.0/200);
diff50 = list((num*np.asmatrix(distMat))-np.asmatrix(distMat50));
target.write("Difference matrix is as follows:\n");
target.write("For k = 1");
target.write("\n");
for i in range(0,n):
target.write(str(diff1[i]));
target.write("\n");
target.write("\n");
target.write("\n");
target.write("\n");
target.write("For k = 2");
target.write("\n");
for i in range(0,n):
target.write(str(diff2[i]));
target.write("\n");
target.write("\n");
target.write("\n");
target.write("\n");
target.write("For k = 3");
target.write("\n");
for i in range(0,n):
target.write(str(diff3[i]));
target.write("\n");
target.write("\n");
target.write("\n");
target.write("\n");
target.write("For k = 10");
target.write("\n");
for i in range(0,n):
target.write(str(diff10[i]));
target.write("\n");
target.write("\n");
target.write("\n");
target.write("\n");
target.write("For k = 50");
target.write("\n");
for i in range(0,n):
target.write(str(diff50[i]));
target.write("\n");
target.close();
| mit | 5,465,868,872,613,046,000 | 26.544118 | 82 | 0.652429 | false | 2.77071 | false | false | false |
4383/street-workout-database | sport/web/commons/templatetags/common_tags.py | 1 | 3392 | __author__ = 'herve.beraud'
from datetime import datetime, timedelta
from django import template
from django.core.exceptions import ObjectDoesNotExist
from django.conf import settings
from django.utils.timesince import timesince
from community.models import InformationMessage
from exercises.models import Category
from exercises.models import MuscleGroup
from exercises.models import Muscle
register = template.Library()
@register.inclusion_tag('common_tags/show_exercises_menu.html')
def show_exercises_menu():
categories = Category.objects.filter(active=True).count()
muscles_groups = MuscleGroup.objects.filter(active=True).count()
muscles = Muscle.objects.filter(active=True).count()
return {'categories': categories, 'muscles_group': muscles_groups, 'muscles': muscles}
@register.inclusion_tag('common_tags/image_gallery.html')
def images_gallery(images):
return {"images": images}
@register.inclusion_tag('common_tags/grid-list-gallery.html')
def grid_list_gallery(items,
display_level=True,
display_menu=True,
shortcut_menu=True,
semantic_type="exercise",
margin_bottom=False
):
return {"items": items,
"display_level": display_level,
"display_menu": display_menu,
"shortcut_menu": shortcut_menu,
"semantic_type": semantic_type,
"margin_bottom": margin_bottom
}
@register.inclusion_tag('common_tags/video_gallery.html')
def videos_gallery(videos):
return {"videos": videos}
@register.inclusion_tag('common_tags/grid-list-gallery-menu.html')
def grid_list_gallery_menu():
return {}
@register.inclusion_tag('common_tags/display_information_message.html', takes_context=True)
def display_information_message(context):
expiration_date = datetime.today() + timedelta(days=365)
cookie_date_format = "%a, %d %b %Y %I:%M:%S GMT"
try:
information_message = InformationMessage.objects.filter(
active=True,
display_date__lte=datetime.now(), expiration_date__gt=datetime.now()).latest('publish_date')
request = context['request']
if information_message.display_once:
try:
already_read_information_message_id = int(request.COOKIES.get('information_message_id'))
if already_read_information_message_id == information_message.id:
information_message = None
# Cookie not found
except TypeError:
pass
except ObjectDoesNotExist:
information_message = None
return {"information_message": information_message, "expiration_date": expiration_date.strftime(cookie_date_format)}
@register.simple_tag
def current_version():
return settings.CURRENT_VERSION
@register.simple_tag
def current_revision():
return settings.CURRENT_REVISION
@register.simple_tag
def last_update_date_since():
now = datetime.now()
update = datetime.fromtimestamp(settings.LAST_UPDATE_DATE)
return timesince(update, now)
@register.simple_tag
def last_update_date():
return datetime.fromtimestamp(settings.LAST_UPDATE_DATE)
@register.simple_tag
def last_update_status():
return settings.LAST_UPDATE_STATUS
@register.simple_tag
def debugging():
return settings.DEBUG
| gpl-2.0 | 5,402,247,503,451,819,000 | 29.558559 | 120 | 0.680425 | false | 3.916859 | false | false | false |
openprocurement/openprocurement.auctions.dgf | openprocurement/auctions/dgf/views/other/question.py | 1 | 3280 | # -*- coding: utf-8 -*-
from openprocurement.auctions.core.utils import (
apply_patch,
context_unpack,
get_now,
json_view,
opresource,
save_auction,
)
from openprocurement.auctions.core.validation import (
validate_question_data,
validate_patch_question_data,
)
from openprocurement.auctions.core.views.mixins import AuctionQuestionResource
@opresource(name='dgfOtherAssets:Auction Questions',
collection_path='/auctions/{auction_id}/questions',
path='/auctions/{auction_id}/questions/{question_id}',
auctionsprocurementMethodType="dgfOtherAssets",
description="Auction questions")
class AuctionQuestionResource(AuctionQuestionResource):
@json_view(content_type="application/json", validators=(validate_question_data,), permission='create_question')
def collection_post(self):
"""Post a question
"""
auction = self.request.validated['auction']
if auction.status != 'active.tendering' or get_now() < auction.enquiryPeriod.startDate or get_now() > auction.enquiryPeriod.endDate:
self.request.errors.add('body', 'data', 'Can add question only in enquiryPeriod')
self.request.errors.status = 403
return
question = self.request.validated['question']
if any([i.status != 'active' for i in auction.lots if i.id == question.relatedItem]):
self.request.errors.add('body', 'data', 'Can add question only in active lot status')
self.request.errors.status = 403
return
auction.questions.append(question)
if save_auction(self.request):
self.LOGGER.info('Created auction question {}'.format(question.id),
extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_question_create'}, {'question_id': question.id}))
self.request.response.status = 201
route = self.request.matched_route.name.replace("collection_", "")
self.request.response.headers['Location'] = self.request.current_route_url(_route_name=route, question_id=question.id, _query={})
return {'data': question.serialize("view")}
@json_view(content_type="application/json", permission='edit_auction', validators=(validate_patch_question_data,))
def patch(self):
"""Post an Answer
"""
auction = self.request.validated['auction']
if auction.status != 'active.tendering':
self.request.errors.add('body', 'data', 'Can\'t update question in current ({}) auction status'.format(auction.status))
self.request.errors.status = 403
return
if any([i.status != 'active' for i in auction.lots if i.id == self.request.context.relatedItem]):
self.request.errors.add('body', 'data', 'Can update question only in active lot status')
self.request.errors.status = 403
return
if apply_patch(self.request, src=self.request.context.serialize()):
self.LOGGER.info('Updated auction question {}'.format(self.request.context.id),
extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_question_patch'}))
return {'data': self.request.context.serialize(auction.status)}
| apache-2.0 | -685,575,204,288,687,500 | 51.063492 | 141 | 0.651829 | false | 3.995128 | false | false | false |
tommy-u/chaco | chaco/multi_line_plot.py | 1 | 16640 | """ Defines the MultiLinePlot class.
"""
from __future__ import with_statement
# Standard library imports
import warnings
from math import ceil, floor
# Major library imports
import numpy as np
from numpy import argsort, array, invert, isnan, take, transpose
# Enthought library imports
from enable.api import black_color_trait, ColorTrait, LineStyle
from traits.api import Float, List, Str, Trait, \
Bool, Callable, Property, cached_property, Instance, Array
from traitsui.api import Item, View, ScrubberEditor, HGroup
from array_data_source import ArrayDataSource
from base import arg_find_runs, bin_search
from base_xy_plot import BaseXYPlot
class MultiLinePlot(BaseXYPlot):
""" A plot consisting of multiple lines.
The data to be plotted must come from a two-dimensional array with shape M by N
stored in a MultiArrayDataSource object. M is the number of lines to be plotted,
and N is the number of points in each line.
Constructor Parameters
----------------------
index : instance of an ArrayDataSource
These are the 'x' or abscissa coordinates.
yindex : instance of ArrayDataSource
These are the 'y' coordinates.
value : instance of a MultiArrayDataSource
Note that the `scale`, `offset` and `normalized_amplitude` attributes of the
MultiLinePlot control the projection of the traces into the (x,y)
plot. In simplest case, `scale=1` and `offset=0`, and `normalized_amplitude`
controls the scaling of the traces relative to their base y value.
global_min, global_max : float
The minimum and maximum values of the data in `value`. For large
arrays, computing these could take excessive time, so they must be
provided when an instance is created.
normalized_amplitude : Float
color : ColorTrait
color_func : Callable or None
If not None, this Callable overrides `color`. The argument to `color_func`
will be the integer index of the trace to be rendered. `color_func` must
return an RGBA 4-tuple.
Default: None
orientation : str
Must be 'v' or 'h' (for 'vertical' or 'horizontal', respectively). This is
the orientation of the index axis (i.e. the 'x' axis).
Default: 'h'
fast_clip : bool
If True, traces whose *base* 'y' coordinate is outside the value axis range
are not plotted, even if some of the data in the curve extends into the plot
region.
Default: False
line_width : float
Width of the plotted lines.
line_style :
The style of the trace lines in the plot.
The following are from the original LinePlot code, and are untested:
selected_color
selected_line_style
"""
# M and N appearing in the comments are as defined in the docstring.
yindex = Instance(ArrayDataSource)
# amplitude = Float(0.0)
# `scale` and `offset` provide a more general transformation, but are currently
# untested.
scale = Float(1.0)
offset = Float(0.0)
fast_clip = Bool(False)
# The color of the lines.
color = black_color_trait
# A function that returns the color of lines. Overrides `color` if not None.
color_func = Trait(None, None, Callable)
# The color to use to highlight the line when selected.
selected_color = ColorTrait("lightyellow")
# The style of the selected line.
selected_line_style = LineStyle("solid")
# The name of the key in self.metadata that holds the selection mask
metadata_name = Str("selections")
# The thickness of the line.
line_width = Float(1.0)
# The line dash style.
line_style = LineStyle
use_global_bounds = Bool(True)
# Minimum value in the `value` data source. This must be provided
# in the call to the constructor.
global_min = Float
# Maximum value in the `value` data source. This must be provided
# in the call to the constructor.
global_max = Float
# Normalized amplitude is the value exposed to the user.
normalized_amplitude = Float(-0.5)
amplitude_scale = Property(Float, depends_on=['global_min', 'global_max', 'data',
'use_global_bounds', 'yindex'])
amplitude = Property(Float, depends_on=['normalized_amplitude',
'amplitude_scale'])
#------------------------------------------------------------------------
# Private traits
#------------------------------------------------------------------------
# The projected 2D numpy array.
_trace_data = Property(Array, depends_on=['index', 'index.data_changed',
'value', 'value.data_changed', 'yindex', 'yindex.data_changed',
'amplitude', 'scale', 'offset'])
# Cached list of non-NaN arrays of (x,y) data-space points; regardless of
# self.orientation, this is always stored as (index_pt, value_pt). This is
# different from the default BaseXYPlot definition.
_cached_data_pts = List
# Cached list of non-NaN arrays of (x,y) screen-space points.
_cached_screen_pts = List
#------------------------------------------------------------------------
#
#------------------------------------------------------------------------
def trait_view(self, obj):
"""Create a minimalist View, with just the amplitude and color attributes."""
# Minimalist Traits UI View for customizing the plot: only the trace amplitude
# and line color are exposed.
view = View(
HGroup(
Item('use_global_bounds'),
# Item('normalized_amplitude'),
# Item('normalized_amplitude', editor=RangeEditor()),
Item('normalized_amplitude',
editor=ScrubberEditor(increment=0.2, hover_color=0xFFFFFF, active_color=0xA0CD9E,
border_color=0x0000FF)),
),
Item("color", label="Trace color", style="simple"),
width=480,
title="Trace Plot Line Attributes",
buttons=["OK", "Cancel"])
return view
#------------------------------------------------------------------------
#
#------------------------------------------------------------------------
# See base_xy_plot.py for these:
## def hittest(self, screen_pt, threshold=7.0):
## def interpolate(self, index_value):
def get_screen_points(self):
self._gather_points()
scrn_pts_list = [[self.map_screen(ary) for ary in line]
for line in self._cached_data_pts]
return scrn_pts_list
#------------------------------------------------------------------------
# Private methods
#------------------------------------------------------------------------
@cached_property
def _get_amplitude_scale(self):
"""
If the amplitude is set to this value, the largest trace deviation from
its base y coordinate will be equal to the y coordinate spacing.
"""
# Note: Like the rest of the current code, this ignores the `scale` attribute.
if self.yindex is not None:
coordinates = self.yindex.get_data()
else:
coordinates = []
if len(coordinates) > 1:
dy = coordinates[1] - coordinates[0]
if dy == 0:
dy = 1.0
else:
# default coordinate spacing if there is only 1 coordinate
dy = 1.0
if self.use_global_bounds:
max_abs = max(abs(self.global_min), abs(self.global_max))
else:
data = self.value._data
max_abs = np.max(np.abs(data))
if max_abs == 0:
amp_scale = 0.5 * dy
else:
amp_scale = 0.5 * dy / max_abs
return amp_scale
@cached_property
def _get_amplitude(self):
amplitude = self.normalized_amplitude * self.amplitude_scale
return amplitude
@cached_property
def _get__trace_data(self):
"""Compute the transformed data."""
# Get the array from `value`
data = self.value._data
coordinates = self.yindex.get_data()
channel_data = self.scale*(self.amplitude*data + coordinates[:,np.newaxis]) \
+ self.offset
return channel_data
def _gather_points(self):
"""
Collects the data points that are within the bounds of the plot and
caches them.
"""
if self._cache_valid:
return
if not self.index or not self.value:
return
index = self.index.get_data()
varray = self._trace_data
if varray.size == 0:
self._cached_data_pts = []
self._cached_valid = True
return
coordinates = self.yindex.get_data()
if self.fast_clip:
coord_min = float(coordinates[0])
coord_max = coordinates[-1]
slice_min = max(0,ceil((varray.shape[0]-1)*(self.value_range.low - coord_min)/(coord_max - coord_min)))
slice_max = min(varray.shape[0], 1+floor((varray.shape[0]-1)*(self.value_range.high - coord_min)/(coord_max - coord_min)))
varray = varray[slice_min:slice_max]
# FIXME: The y coordinates must also be sliced to match varray.
# Check to see if the data is completely outside the view region.
outside = False
# Check x coordinates.
low, high = self.index.get_bounds()
if low > self.index_range.high or high < self.index_range.low:
outside = True
# Check y coordinates. Use varray because it is nased on the yindex,
# but has been shifted up or down depending on the values.
ylow, yhigh = varray.min(), varray.max()
if ylow > self.value_range.high or yhigh < self.value_range.low:
outside = True
if outside:
self._cached_data_pts = []
self._cached_valid = True
return
if len(index) == 0 or varray.shape[0] == 0 or varray.shape[1] == 0 \
or len(index) != varray.shape[1]:
self._cached_data_pts = []
self._cache_valid = True
return
size_diff = varray.shape[1] - len(index)
if size_diff > 0:
warnings.warn('Chaco.LinePlot: value.shape[1] %d - len(index) %d = %d\n' \
% (varray.shape[1], len(index), size_diff))
index_max = len(index)
varray = varray[:,:index_max]
else:
index_max = varray.shape[1]
index = index[:index_max]
# Split the index and value raw data into non-NaN chunks.
# nan_mask is a boolean M by N array.
nan_mask = invert(isnan(varray)) & invert(isnan(index))
blocks_list = []
for nm in nan_mask:
blocks = [b for b in arg_find_runs(nm, "flat") if nm[b[0]] != 0]
blocks_list.append(blocks)
line_points = []
for k, blocks in enumerate(blocks_list):
points = []
for block in blocks:
start, end = block
block_index = index[start:end]
block_value = varray[k, start:end]
index_mask = self.index_mapper.range.mask_data(block_index)
runs = [r for r in arg_find_runs(index_mask, "flat") \
if index_mask[r[0]] != 0]
# Check to see if our data view region is between two points in the
# index data. If so, then we have to reverse map our current view
# into the appropriate index and draw the bracketing points.
if runs == []:
data_pt = self.map_data((self.x_mapper.low_pos, self.y_mapper.low_pos))
if self.index.sort_order == "none":
indices = argsort(index)
sorted_index = take(index, indices)
sorted_value = take(varray[k], indices)
sort = 1
else:
sorted_index = index
sorted_value = varray[k]
if self.index.sort_order == "ascending":
sort = 1
else:
sort = -1
ndx = bin_search(sorted_index, data_pt, sort)
if ndx == -1:
# bin_search can return -1 if data_pt is outside the bounds
# of the source data
continue
z = transpose(array((sorted_index[ndx:ndx+2],
sorted_value[ndx:ndx+2])))
points.append(z)
else:
# Expand the width of every group of points so we draw the lines
# up to their next point, outside the plot area
data_end = len(index_mask)
for run in runs:
start, end = run
if start != 0:
start -= 1
if end != data_end:
end += 1
run_data = transpose(array((block_index[start:end],
block_value[start:end])))
points.append(run_data)
line_points.append(points)
self._cached_data_pts = line_points
self._cache_valid = True
return
# See base_xy_plot.py for:
## def _downsample(self):
## def _downsample_vectorized(self):
def _render(self, gc, line_points, selected_points=None):
if len(line_points) == 0:
return
with gc:
gc.set_antialias(True)
gc.clip_to_rect(self.x, self.y, self.width, self.height)
render = self._render_normal
if selected_points is not None:
gc.set_stroke_color(self.selected_color_)
gc.set_line_width(self.line_width+10.0)
gc.set_line_dash(self.selected_line_style_)
render(gc, selected_points)
if self.color_func is not None:
# Existence of self.color_func overrides self.color.
color_func = self.color_func
else:
color_func = lambda k: self.color_
tmp = list(enumerate(line_points))
# Note: the list is reversed for testing with _render_filled.
for k, points in reversed(tmp):
color = color_func(k)
# Apply the alpha
alpha = color[-1] if len(color) == 4 else 1
color = color[:3] + (alpha * self.alpha,)
gc.set_stroke_color(color)
gc.set_line_width(self.line_width)
gc.set_line_dash(self.line_style_)
render(gc, points)
# Draw the default axes, if necessary
self._draw_default_axes(gc)
def _render_normal(self, gc, points):
for ary in points:
if len(ary) > 0:
gc.begin_path()
gc.lines(ary)
gc.stroke_path()
return
def _render_icon(self, gc, x, y, width, height):
with gc:
gc.set_stroke_color(self.color_)
gc.set_line_width(self.line_width)
gc.set_line_dash(self.line_style_)
gc.set_antialias(0)
gc.move_to(x, y+height/2)
gc.line_to(x+width, y+height/2)
gc.stroke_path()
def _alpha_changed(self):
self.invalidate_draw()
self.request_redraw()
return
def _color_changed(self):
self.invalidate_draw()
self.request_redraw()
return
def _line_style_changed(self):
self.invalidate_draw()
self.request_redraw()
return
def _line_width_changed(self):
self.invalidate_draw()
self.request_redraw()
return
def _amplitude_changed(self):
self.value.data_changed = True
self.invalidate_draw()
self.request_redraw()
return
def __getstate__(self):
state = super(MultiLinePlot,self).__getstate__()
for key in ['traits_view']:
if state.has_key(key):
del state[key]
return state
| bsd-3-clause | -2,342,800,567,750,205,400 | 34.031579 | 134 | 0.534796 | false | 4.275437 | false | false | false |
pylover/network-interfaces | network_interfaces/stanza.py | 1 | 4473 | # -*- coding: utf-8 -*-
import re
from .helpers import clean_list, list_hash
__author__ = 'vahid'
class Stanza(object):
_type = None
_filename = None
_headers = None
def __init__(self, filename, *headers):
self._filename = filename
self._headers = list(headers)
def __repr__(self):
return ' '.join(self._headers)
def _headers_hash(self):
result = 0
for h in self._headers:
result ^= h.__hash__()
return result
def __hash__(self):
return \
self._type.__hash__() ^ \
self._headers_hash()
@classmethod
def is_stanza(cls, s):
return re.match(r'^(iface|mapping|auto|allow-|source).*', s)
@classmethod
def subclasses(cls):
return cls.__subclasses__() + [g for s in cls.__subclasses__()
for g in s.subclasses()]
@classmethod
def create(cls, header, filename):
cells = re.split('\s+', header)
cells = clean_list(cells)
stanza_type = cells[0]
subclasses = cls.subclasses()
# Checking for exact match
for subclass in subclasses:
if subclass._type and stanza_type == subclass._type:
return subclass(filename, *cells)
# Partial start match
for subclass in subclasses:
if subclass._type and stanza_type.startswith(subclass._type):
return subclass(filename, *cells)
def validate(self, allow_correction=False):
pass
class MultilineStanza(Stanza):
_items = None
def __init__(self, *args, **kwargs):
super(MultilineStanza, self).__init__(*args, **kwargs)
self._items = []
def __getattr__(self, item):
try:
return self[item]
except (KeyError, IndexError):
return super(MultilineStanza, self).__getattribute__(item)
#raise AttributeError('%s %s' % (object.__repr__(self), item))
def __setattr__(self, key, value):
if hasattr(self.__class__, key):
super(Stanza, self).__setattr__(key, value)
else:
self[key] = value
def __delattr__(self, item):
if hasattr(self.__class__, item):
super(Stanza, self).__delattr__(item)
else:
del self[item]
def __contains__(self, item):
return self.__getitem_internal(item) is not None
def __getitem__(self, item):
if not isinstance(item, str):
raise TypeError(type(item))
result = self.__getitem_internal(item)
if not result:
raise KeyError(item)
return ' '.join(result[1:])
def __setitem__(self, key, value):
if not isinstance(key, str):
raise TypeError(type(key))
values = re.split('\s', value)
cells = self.__getitem_internal(key)
if not cells:
self.add_entry(' '.join([key] + values))
else:
del cells[1:]
cells += values
def __delitem__(self, item):
if not isinstance(item, str):
raise TypeError(type(item))
self.__delitem_internal(item)
def __repr__(self):
items = [(i[0], ' '.join(i[1:]).strip()) for i in self._items]
return '%s\n%s\n' % (
super(MultilineStanza, self).__repr__(),
'\n'.join([' %s %s' % (i[0], i[1]) for i in items if i[1]]))
def __hash__(self):
return super(MultilineStanza, self).__hash__() ^ self._items_hash()
def update(self, other):
if isinstance(other, dict):
for k, v in other.items():
self[k.replace('_', '-')] = v
else:
raise ValueError('A dict is required, but %s was passed.' % type(other))
def _items_hash(self):
result = 0
for i in self._items:
result ^= list_hash(i)
return result
def add_entry(self, l):
cells = re.split('\s+', l)
cells = clean_list(cells)
if cells and cells not in self._items:
self._items.append(cells)
def __getitem_internal(self, item):
key = item.replace('_', '-')
for i in self._items:
if i[0] == key:
return i
return None
def __delitem_internal(self, item):
key = item.replace('_', '-')
for i in self._items:
if i[0] == key:
self._items.remove(i)
return
| gpl-3.0 | -2,740,219,951,952,730,600 | 27.673077 | 84 | 0.521127 | false | 4.066364 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.