text
stringlengths 4
1.02M
| meta
dict |
---|---|
"""Utility helpers to handle progress bars in `huggingface_hub`.
Example:
1. Use `huggingface_hub.utils.tqdm` as you would use `tqdm.tqdm` or `tqdm.auto.tqdm`.
2. To disable progress bars, either use `disable_progress_bars()` helper or set the
environment variable `HF_HUB_DISABLE_PROGRESS_BARS` to 1.
3. To re-enable progress bars, use `enable_progress_bars()`.
4. To check whether progress bars are disabled, use `are_progress_bars_disabled()`.
NOTE: Environment variable `HF_HUB_DISABLE_PROGRESS_BARS` has the priority.
Example:
```py
from huggingface_hub.utils import (
are_progress_bars_disabled,
disable_progress_bars,
enable_progress_bars,
tqdm,
)
# Disable progress bars globally
disable_progress_bars()
# Use as normal `tqdm`
for _ in tqdm(range(5)):
do_something()
# Still not showing progress bars, as `disable=False` is overwritten to `True`.
for _ in tqdm(range(5), disable=False):
do_something()
are_progress_bars_disabled() # True
# Re-enable progress bars globally
enable_progress_bars()
# Progress bar will be shown !
for _ in tqdm(range(5)):
do_something()
```
"""
import warnings
from tqdm.auto import tqdm as old_tqdm
from ..constants import HF_HUB_DISABLE_PROGRESS_BARS
# `HF_HUB_DISABLE_PROGRESS_BARS` is `Optional[bool]` while `_hf_hub_progress_bars_disabled`
# is a `bool`. If `HF_HUB_DISABLE_PROGRESS_BARS` is set to True or False, it has priority.
# If `HF_HUB_DISABLE_PROGRESS_BARS` is None, it means the user have not set the
# environment variable and is free to enable/disable progress bars programmatically.
# TL;DR: env variable has priority over code.
#
# By default, progress bars are enabled.
_hf_hub_progress_bars_disabled: bool = HF_HUB_DISABLE_PROGRESS_BARS or False
def disable_progress_bars() -> None:
"""
Disable globally progress bars used in `huggingface_hub` except if
`HF_HUB_DISABLE_PROGRESS_BARS` environment variable has been set.
"""
if HF_HUB_DISABLE_PROGRESS_BARS is False:
warnings.warn(
"Cannot disable progress bars: environment variable"
" `HF_HUB_DISABLE_PROGRESS_BARS=0` is set and has priority."
)
return
global _hf_hub_progress_bars_disabled
_hf_hub_progress_bars_disabled = True
def enable_progress_bars() -> None:
"""
Enable globally progress bars used in `huggingface_hub` except if
`HF_HUB_DISABLE_PROGRESS_BARS` environment variable has been set.
"""
if HF_HUB_DISABLE_PROGRESS_BARS is True:
warnings.warn(
"Cannot enable progress bars: environment variable"
" `HF_HUB_DISABLE_PROGRESS_BARS=1` is set and has priority."
)
return
global _hf_hub_progress_bars_disabled
_hf_hub_progress_bars_disabled = False
def are_progress_bars_disabled() -> bool:
"""Return whether progress bars are globally disabled or not."""
global _hf_hub_progress_bars_disabled
return _hf_hub_progress_bars_disabled
class tqdm(old_tqdm):
"""
Class to override `disable` argument in case progress bars are globally disabled.
Taken from https://github.com/tqdm/tqdm/issues/619#issuecomment-619639324.
"""
def __init__(self, *args, **kwargs):
if are_progress_bars_disabled():
kwargs["disable"] = True
super().__init__(*args, **kwargs)
| {
"content_hash": "3b636e3c7e5bb7fa103f8e478458abab",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 91,
"avg_line_length": 32.76190476190476,
"alnum_prop": 0.6718023255813953,
"repo_name": "huggingface/huggingface_hub",
"id": "c16beb58e79ba90694c08e5287493e1ca9e0c3c9",
"size": "4087",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/huggingface_hub/utils/tqdm.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "338"
},
{
"name": "Python",
"bytes": "1086946"
}
],
"symlink_target": ""
} |
import copy
import sys
import numpy as np
import json
import chainer
import chainer.links as L
import chainer.functions as F
from my_iterator import SerialIterator
from chainer import cuda
from chainer import training, Variable, ChainList
from chainer.training import extensions
from chainer.optimizer import WeightDecay, GradientClipping
from chainer.dataset.convert import _concat_arrays
from ccgbank import walk_autodir
from japanese_ccg import JaCCGReader
from collections import defaultdict, OrderedDict
from py_utils import read_pretrained_embeddings, read_model_defs
from tree import Leaf, Tree, get_leaves
from biaffine import Biaffine, Bilinear
from dyer_lstm import DyerLSTM
from param import Param
from qrnn import QRNNLayer
from lstm_tagger import UNK, OOR2, OOR3, OOR4, START, END, IGNORE, MISS
from lstm_tagger import log, get_suffix, get_prefix, normalize
from lstm_parser import TrainingDataCreator, FeatureExtractor
from lstm_parser import LSTMParser, LSTMParserTriTrainDataset
def scanl(f, base, l):
res = [base]
acc = base
for x in l:
acc = f(acc, x)
res += [acc]
return res
class LSTMParserDataset(chainer.dataset.DatasetMixin):
def __init__(self, model_path, samples_path):
self.model_path = model_path
self.targets = read_model_defs(model_path + "/target.txt")
self.extractor = FeatureExtractor(model_path)
with open(samples_path) as f:
self.samples = sorted(
json.load(f), key=lambda x: len(x[1][0]))
def __len__(self):
return len(self.samples)
def get_example(self, i):
words, [cats, deps] = self.samples[i]
splitted = words.split(" ")
w, s, p = self.extractor.process(splitted)
cats = np.array([-1] + [self.targets.get(x, IGNORE) for x in cats] + [-1], 'i')
deps = np.array([-1] + deps + [-1], 'i')
l = len(splitted) + 2
weight = np.array(1, 'f')
return w, s, p, l, cats, deps, weight
class QRNNTriTrainDataset(LSTMParserTriTrainDataset):
def __init__(self, model_path, ccgbank_path, tritrain_path, weight):
self.model_path = model_path
self.targets = read_model_defs(model_path + "/target.txt")
self.extractor = FeatureExtractor(model_path)
self.weight = weight
self.ncopies = 15
with open(ccgbank_path) as f:
self.ccgbank_samples = sorted(
json.load(f), key=lambda x: len(x[1][0]))
self.ccgbank_size = len(self.ccgbank_samples)
with open(tritrain_path) as f:
self.tritrain_samples = sorted(
json.load(f), key=lambda x: len(x[1][0]))
self.tritrain_size = len(self.tritrain_samples)
print >> sys.stderr, "len(ccgbank):", self.ccgbank_size
print >> sys.stderr, "len(ccgbank) * # copies:", self.ccgbank_size * self.ncopies
print >> sys.stderr, "len(tritrain):", self.tritrain_size
def get_example(self, i):
w, s, p, cats, deps, weight = super(QRNNTriTrainDataset, self).get_example(i)
l = w.shape[0]
return w, s, p, l, cats, deps, weight
class QRNNParser(chainer.Chain):
def __init__(self, model_path, word_dim=None, afix_dim=None, nlayers=2,
hidden_dim=128, elu_dim=64, dep_dim=100, dropout_ratio=0.5, use_cudnn=False):
self.model_path = model_path
defs_file = model_path + "/tagger_defs.txt"
if word_dim is None:
self.train = False
Param.load(self, defs_file)
self.extractor = FeatureExtractor(model_path)
else:
self.train = True
p = Param(self)
p.dep_dim = dep_dim
p.word_dim = word_dim
p.afix_dim = afix_dim
p.hidden_dim = hidden_dim
p.elu_dim = elu_dim
p.nlayers = nlayers
p.n_words = len(read_model_defs(model_path + "/words.txt"))
p.n_suffixes = len(read_model_defs(model_path + "/suffixes.txt"))
p.n_prefixes = len(read_model_defs(model_path + "/prefixes.txt"))
p.targets = read_model_defs(model_path + "/target.txt")
p.dump(defs_file)
self.in_dim = self.word_dim + 8 * self.afix_dim
self.dropout_ratio = dropout_ratio
super(QRNNParser, self).__init__(
emb_word=L.EmbedID(self.n_words, self.word_dim, ignore_label=IGNORE),
emb_suf=L.EmbedID(self.n_suffixes, self.afix_dim, ignore_label=IGNORE),
emb_prf=L.EmbedID(self.n_prefixes, self.afix_dim, ignore_label=IGNORE),
qrnn_fs=ChainList(),
qrnn_bs=ChainList(),
arc_dep=L.Linear(2 * self.hidden_dim, self.dep_dim),
arc_head=L.Linear(2 * self.hidden_dim, self.dep_dim),
rel_dep=L.Linear(2 * self.hidden_dim, self.dep_dim),
rel_head=L.Linear(2 * self.hidden_dim, self.dep_dim),
biaffine_arc=Biaffine(self.dep_dim),
biaffine_tag=Bilinear(self.dep_dim, self.dep_dim, len(self.targets))
)
in_dim = self.in_dim
for _ in range(self.nlayers):
self.qrnn_fs.add_link(QRNNLayer(in_dim, self.hidden_dim))
self.qrnn_bs.add_link(QRNNLayer(in_dim, self.hidden_dim))
in_dim = self.hidden_dim
# in_dim += self.hidden_dim
def load_pretrained_embeddings(self, path):
self.emb_word.W.data = read_pretrained_embeddings(path)
def __call__(self, ws, ss, ps, ls, cat_ts, dep_ts, weights):
"""
xs [(w,s,p,y), ..., ]
w: word, s: suffix, p: prefix, y: label
"""
try:
batchsize, length = ws.shape
cat_ys, dep_ys = self.forward(ws, ss, ps, ls, dep_ts if self.train else None)
cat_loss = reduce(lambda x, y: x + y,
[we * F.softmax_cross_entropy(y, t) \
for y, t, we in zip(cat_ys, cat_ts, weights)])
cat_acc = reduce(lambda x, y: x + y,
[F.accuracy(y, t, ignore_label=IGNORE) \
for y, t in zip(cat_ys, cat_ts)]) / batchsize
dep_loss = reduce(lambda x, y: x + y,
[we * F.softmax_cross_entropy(y, t) \
for y, t, we in zip(dep_ys, dep_ts, weights)])
dep_acc = reduce(lambda x, y: x + y,
[F.accuracy(y, t, ignore_label=IGNORE) \
for y, t in zip(dep_ys, dep_ts)]) / batchsize
except:
print "caught erroneous example ignoring..."
print [w.shape for w in ws]
print [w.shape for w in ss]
print [w.shape for w in ps]
print ls
print [w.shape for w in cat_ts]
print [w.shape for w in dep_ts]
xp = chainer.cuda.get_array_module(ws[0])
return Variable(xp.array(0, 'f'))
chainer.report({
"tagging_loss": cat_loss,
"tagging_accuracy": cat_acc,
"parsing_loss": dep_loss,
"parsing_accuracy": dep_acc
}, self)
return cat_loss + dep_loss
def forward(self, ws, ss, ps, ls, dep_ts=None):
batchsize, length = ws.shape
split = scanl(lambda x,y: x+y, 0, ls)[1:-1]
xp = chainer.cuda.get_array_module(ws[0])
ws = self.emb_word(ws) # (batch, length, word_dim)
ss = F.reshape(self.emb_suf(ss), (batchsize, length, -1))
ps = F.reshape(self.emb_prf(ps), (batchsize, length, -1))
hs = F.concat([ws, ss, ps], 2)
hs = F.dropout(hs, self.dropout_ratio, train=self.train)
fs = hs
for qrnn_f in self.qrnn_fs:
inp = fs
fs = qrnn_f(inp)
bs = hs[:, ::-1, :]
for qrnn_b in self.qrnn_bs:
inp = bs
bs = qrnn_b(inp)
# fs = [hs]
# for qrnn_f in self.qrnn_fs:
# inp = F.concat(fs, 2)
# fs.append(F.dropout(qrnn_f(inp), 0.32, train=self.train))
# fs = fs[-1]
#
# bs = [hs[:, ::-1, :]]
# for qrnn_b in self.qrnn_bs:
# inp = F.concat(bs, 2)
# bs.append(F.dropout(qrnn_b(inp), 0.32, train=self.train))
# bs = bs[-1]
#
hs = F.concat([fs, bs[:, ::-1, :]], 2)
_, hs_len, hidden = hs.shape
hs = [F.reshape(var, (hs_len, hidden))[:l] for l, var in \
zip(ls, F.split_axis(hs, batchsize, 0))]
dep_ys = [self.biaffine_arc(
F.elu(F.dropout(self.arc_dep(h), 0.32, train=self.train)),
F.elu(F.dropout(self.arc_head(h), 0.32, train=self.train))) for h in hs]
if dep_ts is not None:
heads = dep_ts
else:
heads = [F.argmax(y, axis=1) for y in dep_ys]
heads = F.elu(F.dropout(
self.rel_head(
F.vstack([F.embed_id(t, h, ignore_label=IGNORE) \
for h, t in zip(hs, heads)])),
0.32, train=self.train))
childs = F.elu(F.dropout(self.rel_dep(F.vstack(hs)), 0.32, train=self.train))
cat_ys = self.biaffine_tag(childs, heads)
cat_ys = list(F.split_axis(cat_ys, split, 0))
return cat_ys, dep_ys
def predict(self, xs):
"""
batch: list of splitted sentences
"""
fs = [self.extractor.process(x) for x in xs]
ws, ss, ps = concat_examples(fs)
ls = [len(x)+2 for x in xs]
cat_ys, dep_ys = self.forward(ws, ss, ps, ls)
return zip([F.log_softmax(y[1:-1]).data for y in cat_ys],
[F.log_softmax(y[1:-1, :-1]).data for y in dep_ys])
def predict_doc(self, doc, batchsize=16):
"""
doc list of splitted sentences
"""
res = []
for i in range(0, len(doc), batchsize):
res.extend([(i + j, 0, y)
for j, y in enumerate(self.predict(doc[i:i + batchsize]))])
return res
@property
def cats(self):
return zip(*sorted(self.targets.items(), key=lambda x: x[1]))[0]
def concat_examples(batch, device=None):
if len(batch) == 0:
raise ValueError('batch is empty')
if device is None:
def to_device(x):
return x
elif device < 0:
to_device = cuda.to_cpu
else:
def to_device(x):
return cuda.to_gpu(x, device, cuda.Stream.null)
result = [to_device(_concat_arrays([s[0] for s in batch], -1)), # ws
to_device(_concat_arrays([s[1] for s in batch], -1)), # ps
to_device(_concat_arrays([s[2] for s in batch], -1))] # ss
if len(batch[0]) == 7:
result.append([s[3] for s in batch]) # ls
result.append([to_device(s[4]) for s in batch]) # cat_ts
result.append([to_device(s[5]) for s in batch]) # dep_ts
result.append(to_device(_concat_arrays([s[6] for s in batch], None))) # weights
return tuple(result)
class MyUpdater(training.StandardUpdater):
def update_core(self):
batch = self._iterators['main'].next()
optimizer = self._optimizers['main']
loss_func = self.loss_func or optimizer.target
optimizer.update(loss_func, *self.converter(batch, self.device))
from chainer import reporter as reporter_module
class MyEvaluator(extensions.Evaluator):
def evaluate(self):
iterator = self._iterators['main']
target = self._targets['main']
eval_func = self.eval_func or target
if self.eval_hook:
self.eval_hook(self)
it = copy.copy(iterator)
summary = reporter_module.DictSummary()
for batch in it:
observation = {}
with reporter_module.report_scope(observation):
eval_func(*self.converter(batch, self.device))
summary.add(observation)
return summary.compute_mean()
def train(args):
model = QRNNParser(args.model, args.word_emb_size, args.afix_emb_size, args.nlayers,
args.hidden_dim, args.elu_dim, args.dep_dim, args.dropout_ratio, args.gpu >= 0)
with open(args.model + "/params", "w") as f: log(args, f)
if args.initmodel:
print 'Load model from', args.initmodel
chainer.serializers.load_npz(args.initmodel, model)
if args.pretrained:
print 'Load pretrained word embeddings from', args.pretrained
model.load_pretrained_embeddings(args.pretrained)
if args.gpu >= 0:
chainer.cuda.get_device(args.gpu).use()
model.to_gpu()
converter = lambda x, device: concat_examples(x, device=device)
if args.tritrain is not None:
train = QRNNTriTrainDataset(
args.model, args.train, args.tritrain, args.tri_weight)
else:
train = LSTMParserDataset(args.model, args.train)
train_iter = SerialIterator(train, args.batchsize)
val = LSTMParserDataset(args.model, args.val)
val_iter = chainer.iterators.SerialIterator(
val, 32, repeat=False, shuffle=False)
# val, args.batchsize, repeat=False, shuffle=False)
# optimizer = chainer.optimizers.Adam(beta2=0.9)
# optimizer = chainer.optimizers.MomentumSGD(momentum=0.7)
optimizer = chainer.optimizers.RMSprop(0.001, 0.9, 1e-8)
optimizer.setup(model)
optimizer.add_hook(WeightDecay(4e-6))
# optimizer.add_hook(GradientClipping(5.))
updater = MyUpdater(train_iter, optimizer,
device=args.gpu, converter=converter)
trainer = training.Trainer(updater, (args.epoch, 'epoch'), args.model)
val_interval = 1000, 'iteration'
log_interval = 200, 'iteration'
eval_model = model.copy()
eval_model.train = False
# trainer.extend(extensions.ExponentialShift(
# "eps", .75, init=2e-3, optimizer=optimizer), trigger=(2500, 'iteration'))
trainer.extend(MyEvaluator(val_iter, eval_model,
converter, device=args.gpu), trigger=val_interval)
trainer.extend(extensions.snapshot_object(
model, 'model_iter_{.updater.iteration}'), trigger=val_interval)
trainer.extend(extensions.LogReport(trigger=log_interval))
trainer.extend(extensions.PrintReport([
'epoch', 'iteration',
'main/tagging_accuracy', 'main/tagging_loss',
'main/parsing_accuracy', 'main/parsing_loss',
'validation/main/tagging_accuracy', 'validation/main/parsing_accuracy'
]), trigger=log_interval)
trainer.extend(extensions.ProgressBar(update_interval=10))
trainer.run()
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(
"CCG parser's LSTM supertag tagger")
subparsers = parser.add_subparsers()
# Creating training data
parser_c = subparsers.add_parser(
"create", help="create tagger input data")
parser_c.add_argument("path",
help="path to ccgbank data file")
parser_c.add_argument("out",
help="output directory path")
parser_c.add_argument("--cat-freq-cut",
type=int, default=10,
help="only allow categories which appear >= freq-cut")
parser_c.add_argument("--word-freq-cut",
type=int, default=5,
help="only allow words which appear >= freq-cut")
parser_c.add_argument("--afix-freq-cut",
type=int, default=5,
help="only allow afixes which appear >= freq-cut")
parser_c.add_argument("--subset",
choices=["train", "test", "dev", "all"],
default="train")
parser_c.add_argument("--mode",
choices=["train", "test"],
default="train")
parser_c.set_defaults(func=
(lambda args:
TrainingDataCreator.create_traindata(args)
if args.mode == "train"
else TrainingDataCreator.create_testdata(args)))
#TODO updater
# Do training using training data created through `create`
parser_t = subparsers.add_parser(
"train", help="train supertagger model")
parser_t.add_argument("model",
help="path to model directory")
parser_t.add_argument("--gpu", type=int, default=-1,
help="path to model directory")
parser_t.add_argument("train",
help="training data file path")
parser_t.add_argument("val",
help="validation data file path")
parser_t.add_argument("--tritrain",
help="tri-training data file path")
parser_t.add_argument("--tri-weight",
type=float, default=0.4,
help="multiply tri-training sample losses")
parser_t.add_argument("--batchsize",
type=int, default=16, help="batch size")
parser_t.add_argument("--epoch",
type=int, default=20, help="epoch")
parser_t.add_argument("--word-emb-size",
type=int, default=50,
help="word embedding size")
parser_t.add_argument("--afix-emb-size",
type=int, default=32,
help="character embedding size")
parser_t.add_argument("--nlayers",
type=int, default=1,
help="number of layers for each LSTM")
parser_t.add_argument("--hidden-dim",
type=int, default=128,
help="dimensionality of hidden layer")
parser_t.add_argument("--elu-dim",
type=int, default=64,
help="dimensionality of elu layer")
parser_t.add_argument("--dep-dim",
type=int, default=100,
help="dim")
parser_t.add_argument("--dropout-ratio",
type=float, default=0.5,
help="dropout ratio")
parser_t.add_argument("--initmodel",
help="initialize model with `initmodel`")
parser_t.add_argument("--pretrained",
help="pretrained word embeddings")
parser_t.set_defaults(func=train)
args = parser.parse_args()
args.func(args)
| {
"content_hash": "4821227d5b09378a7c6fd97cc5b77867",
"timestamp": "",
"source": "github",
"line_count": 466,
"max_line_length": 95,
"avg_line_length": 38.547210300429185,
"alnum_prop": 0.5760173690363525,
"repo_name": "masashi-y/myccg",
"id": "90344659cf053329f5da14fbc3bc22f5bef8c978",
"size": "17964",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/py/qrnn_parser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5099"
},
{
"name": "C++",
"bytes": "114374"
},
{
"name": "Makefile",
"bytes": "979"
},
{
"name": "Python",
"bytes": "179658"
},
{
"name": "Shell",
"bytes": "718"
}
],
"symlink_target": ""
} |
__author__ = 'Andreas Bader'
__version__ = "0.01"
import threading
import time
import vagrant
import os
import Util
import re
from fabric.api import *
import subprocess
class Vm():
vagrantFolders=None
pathVagrantfile=None
credFiles=None
basicFilesFolder=None
pathFolder=None
tmpFolder=None
name=None
logging=False
logger=None
created=False
vm=None
thread=None
provider=None
ip=None
# vagrantFolders: Folders inside which vagrantfiles+folders are
# tmpFolder: Path to tempfolder
# name: name of the VM
# logger: Logger instance
# logging: if true, vagrant debug logging will be done
def __init__(self, vagrantFolders, credFiles, basicFilesFolder, tmpFolder, name, logger, provider, logging):
self.vagrantFolders = vagrantFolders
self.credFiles=credFiles
self.basicFilesFolder=basicFilesFolder
self.tmpFolder = tmpFolder
self.name = name
self.logger = logger
self.logging = logging
self.provider = provider
def start(self):
self.thread = threading.Thread(target=self.run)
self.thread.start()
def join(self):
self.thread.join()
def run(self):
if not self.created:
self.create_vm()
else:
self.destroy()
def destroy(self):
if self.vm != None:
try:
if self.vm.status()[0].state == vagrant.Vagrant.RUNNING:
self.vm.halt()
startTime = int(time.time())
while self.vm.status()[0].state == "powering-off" and (int(time.time()) - startTime) < 60 :
time.sleep(1)
if self.vm.status()[0].state != vagrant.Vagrant.POWEROFF and self.vm.status()[0].state != vagrant.Vagrant.SHUTOFF:
self.vm.halt(force=True)
return self.vm.destroy()
except subprocess.CalledProcessError:
# ignore warnings that destroy did not work (occurs sometimes)
self.logger.warning("There might be some leftovers from vm '%s'." %(self.name))
return True
return True
def create_vm(self):
# Check if you want to log
log_cm=None
self.logger.info("BEGIN Creating %s." %(self.name))
if self.logging:
try:
filename="%s_%s.log" %(self.name,time.strftime("%Y%m%d%H%M%S", time.localtime()))
log_cm = vagrant.make_file_cm(filename)
self.logger.info("Logging creation of %s to %s." % (self.name,filename))
except Exception, e:
self.logger.error('Failed to open file', exc_info=True)
return False
pathName=self.name.rsplit("_",1)[0] # Name is something like VMNAME_1, split _1 away!
# copy to tmpfolder (already checked that it exist, need to check if vm folder exists)
paths=Util.create_folder_path_list(self.vagrantFolders, [pathName])
pathTmp=os.path.join(self.tmpFolder,self.name)
if not Util.check_folders(paths, self.logger, False, True, True):
return False
if not Util.check_folder(pathTmp, self.logger, True):
return False
# Copy Vagrantfile in place
pathVagrantFileOld=os.path.join(pathTmp,"%s.vagrant"%(self.name))
pathVagrantFileNew=os.path.join(pathTmp,"Vagrantfile")
if not Util.copy_folders(paths,pathTmp,self.logger, True):
return False
self.pathFolder=pathTmp
for credFile in self.credFiles:
credFilesOld = Util.create_folder_path_list(self.vagrantFolders, [credFile])
pathCredFileNew = os.path.join(pathTmp,credFile)
for credFileOld in credFilesOld:
if Util.check_file_exists(credFileOld):
if not Util.copy_file(credFileOld,pathCredFileNew,self.logger):
return False
basicFileFoldersOld=Util.create_folder_path_list(self.vagrantFolders, [self.basicFilesFolder])
pathBasicFileFolderNew=os.path.join(pathTmp,self.basicFilesFolder)
if not Util.copy_folders(basicFileFoldersOld,pathBasicFileFolderNew,self.logger, True):
return False
# digital ocean needs a random name, otherwise we always get problems with multiple measurements
if Util.check_file_exists(pathVagrantFileNew):
self.logger.error("'%s' does already exist." %(pathVagrantFileNew))
return False
try:
file_old = open(pathVagrantFileOld, "r")
file_new = open(pathVagrantFileNew, "w")
for line in file_old:
if re.match(r'^\s*HOSTNAME\s+=\s+("[^"]+"|\'[^\']+\')\s*$',line) != None:
split_res = re.search(r'("[^"]+"|\'[^\']+\')',line)
if split_res != None:
if self.provider == "digital_ocean":
# allowed are numbers, letters, hyphens and dots
random_string = Util.get_random_string(10)
file_new.write("HOSTNAME = \"%s-%s\"\n" %(split_res.group()[1:-1],
random_string))
self.name = "%s-%s" % (split_res.group()[1:-1], random_string)
else:
self.name = "%s" % (split_res.group()[1:-1])
file_new.write(line)
else:
self.logger.warning("Could not parse hostname out of '%s'. "
"Using the default. Errors can occur." % (line))
file_new.write(line)
else:
file_new.write(line)
file_new.flush()
file_new.close()
file_old.close()
except Exception,e:
self.logger.error("An error occured while copying '%s' to '%s'." % (pathVagrantFileOld,
pathVagrantFileNew), exc_info=True)
# else:
# if not Util.copy_file(pathVagrantFileOld,pathVagrantFileNew,self.logger):
# return False
self.pathVagrantfile=pathVagrantFileNew
Util.clear_vagrant_files(pathTmp,self.logger)
# Create VM
try:
vm = vagrant.Vagrant(root=pathTmp, out_cm=log_cm, err_cm=log_cm)
self.vm=vm
vm.up(provider=self.provider)
# with settings(host_string= vm.user_hostname_port(),
# key_filename = vm.keyfile(),
# disable_known_hosts = True):
# run("uname -a")
except Exception, e:
self.logger.error('Failed while creating vm %s.' %(self.name), exc_info=True)
self.logger.info('Since creation failed, trying to destroy vm %s.' %(self.name), exc_info=True)
#if not self.vm.destroy():
# self.logger.error('Can not destroy %s.' %(self.name), exc_info=True)
## vm.destroy() seems to be always returning false at this stage (opentsack), but destroying works fine -> Ignore it.
self.destroy()
return False
self.logger.info("END Creating %s." %(self.name))
self.logger.info("GET IP of %s." %(self.name))
self.ip = self.get_ip()
# the IP Part is needed, because on OpenStack the 'internal' IP differs from the 'external' (SSH) IP
if self.ip == None:
self.logger.error('Failed getting IP while creating vm %s.' %(self.name), exc_info=True)
self.logger.info('Since creation failed, trying to destroy vm %s.' %(self.name), exc_info=True)
self.destroy()
return False
self.created=True
return True
# Return the output
def run_with_output (self,disableKnownHosts,command,warn_only=False,quiet=False):
with hide('output','running', 'warnings', 'stdout', 'stderr'),\
settings(host_string = self.vm.user_hostname_port(),
key_filename = self.vm.keyfile(),
disable_known_hosts = disableKnownHosts,
warn_only = warn_only):
result = run(command,warn_only=warn_only, quiet=quiet)
return result
# Only return true/false
def run_without_output (self,disableKnownHosts,command,warn_only=False,quiet=False,test=False):
result = self.run_with_output (disableKnownHosts,command,warn_only,quiet)
if result.return_code == 0 or (warn_only and result.return_code == 255) or (test and result.return_code == -1):
return True
else:
self.logger.error("Command '%s' on %s returned %s." %(command, self.vm.user_hostname_port(), result.return_code))
return False
def get_ip(self):
head_str = " | head -n1"
if self.provider in ["virtualbox", "digital_ocean"]:
# virtualbox uses second network interface for vm-interconnections
head_str = " | head -n4 | tail -n1"
result = self.run_with_output (True,'sudo ifconfig | grep -E -o "(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)"%s' % (head_str),True, True)
if result.return_code == 0:
return result.stdout
else:
self.logger.error("Can't get IP on %s, command returned %s." %(self.vm.user_hostname_port(), result.return_code))
return None
def get_hostname(self):
result = self.run_with_output (True,'hostname',True, True)
if result.return_code == 0:
return result.stdout
else:
self.logger.error("Can't get hostname on %s, command returned %s." %(self.vm.user_hostname_port(), result.return_code))
return None
| {
"content_hash": "6b12a13f4d565604e1545589262fe6c7",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 260,
"avg_line_length": 46.58139534883721,
"alnum_prop": 0.5649525711432851,
"repo_name": "TSDBBench/Overlord",
"id": "88c9b672abce7df290eb568a96c20f224dbcc162",
"size": "10063",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Vm.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2418"
},
{
"name": "HTML",
"bytes": "4489"
},
{
"name": "Python",
"bytes": "578202"
},
{
"name": "Ruby",
"bytes": "107217"
},
{
"name": "Shell",
"bytes": "2134"
}
],
"symlink_target": ""
} |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("site", "0029_auto_20210120_0934"),
]
operations = [
migrations.AddField(
model_name="sitesettings",
name="fulfillment_allow_unpaid",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="sitesettings",
name="fulfillment_auto_approve",
field=models.BooleanField(default=True),
),
]
| {
"content_hash": "8d252047a4a85a212e527f6c1ddb708b",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 52,
"avg_line_length": 25.476190476190474,
"alnum_prop": 0.5813084112149532,
"repo_name": "mociepka/saleor",
"id": "a7216c810f18f32c4475c406bd6316b6b3f2af28",
"size": "584",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "saleor/site/migrations/0030_auto_20210722_1141.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "2228"
},
{
"name": "HTML",
"bytes": "249248"
},
{
"name": "Procfile",
"bytes": "290"
},
{
"name": "Python",
"bytes": "12686831"
},
{
"name": "Shell",
"bytes": "439"
}
],
"symlink_target": ""
} |
"""
Event pub/sub routines.
"""
import inspect
from .enum import EnumType, EnumBase
from .errors import prepare_for_reraise, reraise
class EventHook:
"""This type allows to implement event pattern.
Allowed operations on EventHook objects:
* hook.subscribe(handler) # subscribe
* hook.unsubscribe(handler) # unsubscribe (requires O(handlerCount)!)
* hook(...) # invokes all event handlers
* hook.trigger(...) # another way to raise the event
* hook.safe_trigger(...) # definitely invokes all event handlers and raises
# the first thrown exception (if any)
"""
def __init__(self, handlers=None):
"""Constructor."""
self.handlers = handlers if handlers is not None else []
def subscribe(self, handler):
"""Adds a new event handler."""
assert callable(handler), "Invalid handler %s" % handler
self.handlers.append(handler)
def unsubscribe(self, handler):
"""Removes an event handler."""
self.handlers.remove(handler)
def safe_trigger(self, *args):
"""*Safely* triggers the event by invoking all its
handlers, even if few of them raise an exception.
If a set of exceptions is raised during handler
invocation sequence, this method rethrows the first one.
:param args: the arguments to invoke event handlers with.
"""
error = None
# iterate over a copy of the original list because some event handlers
# may mutate the list
for handler in list(self.handlers):
try:
handler(*args)
except BaseException as e:
if error is None:
prepare_for_reraise(e)
error = e
if error is not None:
reraise(error)
def trigger(self, *args):
"""Triggers the event by invoking all its handlers
with provided arguments.
.. note::
If one of event handlers raises an exception,
other handlers won't be invoked by this method.
:param args: the arguments to invoke event handlers with.
"""
for handler in list(self.handlers):
handler(*args)
def __call__(self, *args):
"""A shortcut to trigger method.
.. note::
If one of event handlers raises an exception,
other handlers won't be invoked by this method.
:param args: the arguments to invoke event handlers with.
"""
self.trigger(*args)
def __contains__(self, item):
"""Checks whether this set contains the specified event handler."""
return item in self.handlers
def __iter__(self):
"""Iterates through all registered event handlers."""
for handler in self.handlers:
yield handler
def __str__(self):
"""Gets the string representation of this object."""
return "EventHook" + repr(tuple(self.handlers))
def __repr__(self):
"""Gets the ``repr`` representation of this object."""
return self.__str__()
class SinkingEventHook(EventHook):
"""An implementation of EventHook that actually does nothing.
This type allows to implement a simple performance
optimization for ConstFuture, ErrorFuture and similar
classes, since they never raise their events.
"""
def subscribe(self, handler):
"""Does nothing."""
return self
def unsubscribe(self, handler):
"""Does nothing."""
return self
def safe_trigger(self, *args):
"""Does nothing."""
return
def trigger(self, *args):
"""Does nothing."""
return
def __call__(self, *args):
"""Does nothing."""
return
def __contains__(self, item):
"""Always returns False."""
return False
def __iter__(self):
"""Returns empty generator."""
return iter([])
def __str__(self):
"""Gets the string representation of this object."""
return "SinkingEventHook()"
sinking_event_hook = SinkingEventHook()
globals()["sinking_event_hook"] = sinking_event_hook
class EventInterceptor:
"""A context object helping to temporarily intercept
a set of events on an object exposing a set of event hooks.
"""
def __init__(self, source, **events):
"""
Constructor.
:param source: the object exposing a set of event hook properies
:param events: a set of event_hook_name=event_handler pairs specifying
which events to intercept.
"""
self.source = source
self.events = events
def __enter__(self):
"""Starts event interception."""
source = self.source
for name, handler in self.events.items():
hook = getattr(source, name)
hook.subscribe(handler)
def __exit__(self, typ, value, traceback):
"""Stops event interception."""
source = self.source
for name, handler in self.events.items():
hook = getattr(source, name)
hook.unsubscribe(handler)
class EventHub:
"""Provides named event hooks on demand.
Use properties (or keys) of this object to access
named event hooks created on demand (i.e. on the first
access attempt).
"""
def __init__(self, source=None):
"""Constructor.
:param source: ``dict`` with initial set of named event hooks.
"""
if source is not None:
self.__dict__ = source
def on(self, event, handler):
"""Attaches the handler to the specified event.
@param event: event to attach the handler to. Any object can be passed
as event, but string is preferable. If qcore.EnumBase
instance is passed, its name is used as event key.
@param handler: event handler.
@return: self, so calls like this can be chained together.
"""
event_hook = self.get_or_create(event)
event_hook.subscribe(handler)
return self
def off(self, event, handler):
"""Detaches the handler from the specified event.
@param event: event to detach the handler to. Any object can be passed
as event, but string is preferable. If qcore.EnumBase
instance is passed, its name is used as event key.
@param handler: event handler.
@return: self, so calls like this can be chained together.
"""
event_hook = self.get_or_create(event)
event_hook.unsubscribe(handler)
return self
def trigger(self, event, *args):
"""Triggers the specified event by invoking EventHook.trigger under the hood.
@param event: event to trigger. Any object can be passed
as event, but string is preferable. If qcore.EnumBase
instance is passed, its name is used as event key.
@param args: event arguments.
@return: self, so calls like this can be chained together.
"""
event_hook = self.get_or_create(event)
event_hook.trigger(*args)
return self
def safe_trigger(self, event, *args):
"""Safely triggers the specified event by invoking
EventHook.safe_trigger under the hood.
@param event: event to trigger. Any object can be passed
as event, but string is preferable. If qcore.EnumBase
instance is passed, its name is used as event key.
@param args: event arguments.
@return: self, so calls like this can be chained together.
"""
event_hook = self.get_or_create(event)
event_hook.safe_trigger(*args)
return self
def get_or_create(self, event):
"""Gets or creates a new event hook for the specified event (key).
This method treats qcore.EnumBase-typed event keys specially:
enum_member.name is used as key instead of enum instance
in case such a key is passed.
Note that on/off/trigger/safe_trigger methods rely on this method,
so you can pass enum members there as well.
"""
if isinstance(event, EnumBase):
event = event.short_name
return self.__dict__.setdefault(event, EventHook())
def __getattr__(self, key):
"""Gets or creates a new event hook with the specified name.
Calls get_or_create under the hood.
Specified key must start with ``on_`` prefix; this prefix is
trimmed when key is passed to self.get_or_create.
"""
if key.startswith("on_"):
return self.get_or_create(key[3:])
else:
raise AttributeError(key)
def __contains__(self, item):
"""Checks if there is an event hook with the specified name."""
return item in self.__dict__
def __len__(self):
"""Gets the count of created event hooks."""
return len(self.__dict__)
def __getitem__(self, item):
"""Gets the event hook with the specified name."""
return self.__dict__[item]
def __setitem__(self, key, value):
"""Sets the event hook by its name."""
self.__dict__[key] = value
def __delitem__(self, key):
"""Removes the event hook with the specified name."""
del self.__dict__[key]
def __iter__(self):
"""Iterates over all (name, event_hook) pairs."""
return iter(self.__dict__.items())
def __repr__(self):
"""Gets the ``repr`` representation of this object."""
return "%s(%r)" % (self.__class__.__name__, self.__dict__)
# Needed bcz of a six bug: https://github.com/benjaminp/six/issues/252
@classmethod
def __prepare__(cls, name, bases, **kwargs):
return {}
class EnumBasedEventHubType(type):
"""Metaclass for enum-based event hubs.
Asserts that all enum members are defined in class and vice versa.
"""
def __init__(cls, what, bases=None, dict=None):
super().__init__(what, bases, dict)
if cls.__name__ == "NewBase" and cls.__module__ == "six" and not dict:
# some versions of six generate an intermediate class that is created without a
# __based_on__
return
assert dict is not None and "__based_on__" in dict, (
"__based_on__ = [EnumA, EnumB] class member "
"must be used to subclass EnumBasedEventHub"
)
based_on = cls.__based_on__
if isinstance(based_on, EnumType):
based_on = [based_on]
cls_member_names = set()
for k, v in inspect.getmembers(cls):
if not k.startswith("on_"):
continue
if not isinstance(v, EventHook):
continue
cls_member_names.add(k[3:])
enum_members = {}
for enum_type in based_on:
for member in enum_type.get_members():
name = member.short_name
assert (
name not in enum_members
), "Two enum members share the same name: %r and %r " % (
member,
enum_members[name],
)
enum_members[name] = member
enum_member_names = set(enum_members.keys())
for name in enum_member_names:
assert name in cls_member_names, (
"Member %r is declared in one of enums, "
+ "but %r is not declared in class."
) % (name, "on_" + name)
for name in cls_member_names:
assert name in enum_member_names, (
"Member %r is declared in class, "
+ "but %r is not declared in any of enum(s)."
) % ("on_" + name, name)
# Members are removed from class, since EventHub anyway creates
# similar instance members
delattr(cls, "on_" + name)
# Needed bcz of a six bug: https://github.com/benjaminp/six/issues/252
@classmethod
def __prepare__(cls, name, bases, **kwargs):
return {}
class EnumBasedEventHub(EventHub, metaclass=EnumBasedEventHubType):
__based_on__ = []
hub = EventHub() # Default global event hub
| {
"content_hash": "2bd20d05f5628f87ccf6be401a82e26b",
"timestamp": "",
"source": "github",
"line_count": 387,
"max_line_length": 91,
"avg_line_length": 31.82170542635659,
"alnum_prop": 0.5821356069833536,
"repo_name": "quora/qcore",
"id": "8bd33ff39dda0892e5d98a45f85676ff92b0c05d",
"size": "12889",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qcore/events.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Cython",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "211611"
}
],
"symlink_target": ""
} |
"""Spanish dictionary"""
es = {
"LANGUAGE": "Español",
# Client notifications
"config-cleared-notification": "Ajustes limpiados. Los cambios serán guardados cuando almacenes una configuración válida.",
"relative-config-notification": "Cargados los archivo(s) de configuración relativa: {}",
"connection-attempt-notification": "Intentando conectarse a {}:{}", # Port, IP
"reconnection-attempt-notification": "Se perdió la conexión con el servidor, intentando reconectar",
"disconnection-notification": "Desconectado del servidor",
"connection-failed-notification": "La conexión con el servidor falló",
"connected-successful-notification": "Conectado al servidor exitosamente",
"retrying-notification": "%s, Reintentando en %d segundos...", # Seconds
"reachout-successful-notification": "Se alcanzó {} ({}) satisfactoriamente",
"rewind-notification": "Rebobinado debido a diferencia de tiempo con {}", # User
"fastforward-notification": "Adelantado debido a diferencia de tiempo con {}", # User
"slowdown-notification": "Ralentizando debido a diferencia de tiempo con {}", # User
"revert-notification": "Revirtiendo a la velocidad normal",
"pause-notification": "{} pausado", # User
"unpause-notification": "{} resumido", # User
"seek-notification": "{} saltó desde {} hasta {}", # User, from time, to time
"current-offset-notification": "Compensación actual: {} segundos", # Offset
"media-directory-list-updated-notification": "Se han actualizado los directorios multimedia de Syncplay.",
"room-join-notification": "{} se unió al canal: '{}'", # User
"left-notification": "{} se fue", # User
"left-paused-notification": "{} se fue, {} pausó", # User who left, User who paused
"playing-notification": "{} está reproduciendo '{}' ({})", # User, file, duration
"playing-notification/room-addendum": " en la sala: '{}'", # Room
"not-all-ready": "No están listos: {}", # Usernames
"all-users-ready": "Todos están listos ({} users)", # Number of ready users
"ready-to-unpause-notification": "Se te ha establecido como listo - despausa nuevamente para resumir",
"set-as-ready-notification": "Se te ha establecido como listo",
"set-as-not-ready-notification": "Se te ha establecido como no-listo",
"autoplaying-notification": "Reproduciendo automáticamente en {}...", # Number of seconds until playback will start
"identifying-as-controller-notification": "Autentificando como el operador de la sala, con contraseña '{}'...",
"failed-to-identify-as-controller-notification": "{} falló la autentificación como operador de la sala.",
"authenticated-as-controller-notification": "{} autentificado como operador de la sala",
"created-controlled-room-notification": "Sala administrada '{}' creada con contraseña '{}'. Por favor guarda esta información para referencias futuras!\n\nIn managed rooms everyone is kept in sync with the room operator(s) who are the only ones who can pause, unpause, seek, and change the playlist.\n\nYou should ask regular viewers to join the room '{}' but the room operators can join the room '{}' to automatically authenticate themselves.", # RoomName, operatorPassword, roomName, roomName:operatorPassword # TODO: Translate
"file-different-notification": "El archivo que reproduces parece ser diferente al archivo de {}", # User
"file-differences-notification": "Tu archivo difiere de la(s) siguiente(s) forma(s): {}", # Differences
"room-file-differences": "Diferencias de archivo: {}", # File differences (filename, size, and/or duration)
"file-difference-filename": "nombre",
"file-difference-filesize": "tamaño",
"file-difference-duration": "duración",
"alone-in-the-room": "Estás solo en la sala",
"different-filesize-notification": " (el tamaño de su archivo difiere con el tuyo!)",
"userlist-playing-notification": "{} está reproduciendo:", # Username
"file-played-by-notification": "Archivo: {} está siendo reproducido por:", # File
"no-file-played-notification": "{} está ahora reproduciendo un archivo", # Username
"notplaying-notification": "Personas que no reproducen algún archivo:",
"userlist-room-notification": "En sala '{}':", # Room
"userlist-file-notification": "Archivo",
"controller-userlist-userflag": "Operador",
"ready-userlist-userflag": "Listo",
"update-check-failed-notification": "No se pudo determinar automáticamente que Syncplay {} esté actualizado. ¿Te gustaría visitar https://syncplay.pl/ para buscar actualizaciones manualmente?", # Syncplay version
"syncplay-uptodate-notification": "Syncplay está actualizado",
"syncplay-updateavailable-notification": "Una nueva versión de Syncplay está disponible. ¿Te gustaría visitar la página del lanzamiento?",
"mplayer-file-required-notification": "Al utilizar Syncplay con mplayer se debe proveer un archivo al inicio.",
"mplayer-file-required-notification/example": "Ejemplo de uso: syncplay [opciones] [url|ubicación/]nombreDelArchivo",
"mplayer2-required": "Syncplay no es compatible con MPlayer 1.x, por favor utiliza mplayer2 o mpv",
"unrecognized-command-notification": "Comando no reconocido",
"commandlist-notification": "Comandos disponibles:",
"commandlist-notification/room": "\tr [nombre] - cambiar de sala",
"commandlist-notification/list": "\tl - mostrar lista de usuarios",
"commandlist-notification/undo": "\tu - deshacer última búsqueda",
"commandlist-notification/pause": "\tp - activar pausa",
"commandlist-notification/seek": "\t[s][+-]tiempo - ir al tiempo definido, si no se especifica + o -, será el tiempo absoluto en segundos o min:sec",
"commandlist-notification/help": "\th - esta ayuda",
"commandlist-notification/toggle": "\tt - activa/inactiva señal que estás listo para ver",
"commandlist-notification/create": "\tc [nombre] - crear sala administrada usando el nombre de la sala actual",
"commandlist-notification/auth": "\ta [contraseña] - autentificar como operador de la sala con la contraseña de operador",
"commandlist-notification/chat": "\tch [mensaje] - enviar un mensaje en la sala",
"commandList-notification/queue": "\tqa [file/url] - add file or url to bottom of playlist", # TO DO: Translate
"commandList-notification/playlist": "\tql - show the current playlist", # TO DO: Translate
"commandList-notification/select": "\tqs [index] - select given entry in the playlist", # TO DO: Translate
"commandList-notification/delete": "\tqd [index] - delete the given entry from the playlist", # TO DO: Translate
"syncplay-version-notification": "Versión de Syncplay: {}", # syncplay.version
"more-info-notification": "Más información disponible en: {}", # projectURL
"gui-data-cleared-notification": "Syncplay limpió la ruta y el estado de la ventana utilizado por la GUI.",
"language-changed-msgbox-label": "El lenguaje se modificará cuando ejecutes Syncplay.",
"promptforupdate-label": "¿Está bien si Syncplay comprueba por actualizaciones automáticamente y de vez en cuando?",
"media-player-latency-warning": "Advertencia: El reproductor multimedia tardó {} segundos en responder. Si experimentas problemas de sincronización, cierra otros programas para liberar recursos del sistema; si esto no funciona, intenta con otro reproductor multimedia.", # Seconds to respond
"mpv-unresponsive-error": "mpv no ha respondido por {} segundos. Al aparecer no está funcionando correctamente. Por favor reinicia Syncplay.", # Seconds to respond
# Client prompts
"enter-to-exit-prompt": "Presiona intro para salir\n",
# Client errors
"missing-arguments-error": "Están faltando algunos argumentos necesarios. Por favor revisa --help",
"server-timeout-error": "La conexión con el servidor ha caducado",
"mpc-slave-error": "No se logró iniciar MPC en modo esclavo!",
"mpc-version-insufficient-error": "La versión de MPC no es suficiente, por favor utiliza `mpc-hc` >= `{}`",
"mpc-be-version-insufficient-error": "La versión de MPC no es suficiente, por favor utiliza `mpc-be` >= `{}`",
"mpv-version-error": "Syncplay no es compatible con esta versión de mpv. Por favor utiliza una versión diferente de mpv (p.ej. Git HEAD).",
"mpv-failed-advice": "The reason mpv cannot start may be due to the use of unsupported command line arguments or an unsupported version of mpv.", # TODO: Translate
"player-file-open-error": "El reproductor falló al abrir el archivo",
"player-path-error": "La ruta del reproductor no está definida correctamente. Los reproductores soportados son: mpv, mpv.net, VLC, MPC-HC, MPC-BE, mplayer2, y IINA",
"hostname-empty-error": "El nombre del host no puede ser vacío",
"empty-error": "{} no puede ser vacío", # Configuration
"media-player-error": "Error del reproductor multimedia: \"{}\"", # Error line
"unable-import-gui-error": "No se lograron importar las librerías GUI. Si no tienes instalado PySide, entonces tendrás que instalarlo para que funcione el GUI.",
"unable-import-twisted-error": "No se logró importar Twisted. Por favor instala Twisted v16.4.0 o posterior.",
"arguments-missing-error": "Están faltando algunos argumentos necesarios. Por favor revisa --help",
"unable-to-start-client-error": "No se logró iniciar el cliente",
"player-path-config-error": "La ruta del reproductor no está definida correctamente. Los reproductores soportados son: mpv, mpv.net, VLC, MPC-HC, MPC-BE, mplayer2 y IINA.",
"no-file-path-config-error": "El archivo debe ser seleccionado antes de iniciar el reproductor",
"no-hostname-config-error": "El nombre del host no puede ser vacío",
"invalid-port-config-error": "El puerto debe ser válido",
"empty-value-config-error": "{} no puede ser vacío", # Config option
"not-json-error": "No es una cadena de caracteres JSON válida\n",
"hello-arguments-error": "Not enough Hello arguments\n", # DO NOT TRANSLATE
"version-mismatch-error": "No coinciden las versiones del cliente y servidor\n",
"vlc-failed-connection": "Falló la conexión con VLC. Si no has instalado syncplay.lua y estás usando la última versión de VLC, por favor revisa https://syncplay.pl/LUA/ para obtener instrucciones. Syncplay and VLC 4 are not currently compatible, so either use VLC 3 or an alternative such as mpv.", # TO DO: TRANSLATE
"vlc-failed-noscript": "VLC ha reportado que la interfaz syncplay.lua no se ha instalado. Por favor revisa https://syncplay.pl/LUA/ para obtener instrucciones.",
"vlc-failed-versioncheck": "Esta versión de VLC no está soportada por Syncplay.",
"vlc-initial-warning": 'VLC does not always provide accurate position information to Syncplay, especially for .mp4 and .avi files. If you experience problems with erroneous seeking then please try an alternative media player such as <a href="https://mpv.io/">mpv</a> (or <a href="https://github.com/stax76/mpv.net/">mpv.net</a> for Windows users).', # TODO: Translatef
"feature-sharedPlaylists": "listas de reproducción compartidas", # used for not-supported-by-server-error
"feature-chat": "chat", # used for not-supported-by-server-error
"feature-readiness": "preparación", # used for not-supported-by-server-error
"feature-managedRooms": "salas administradas", # used for not-supported-by-server-error
"not-supported-by-server-error": "La característica {} no está soportada por este servidor..", # feature
"shared-playlists-not-supported-by-server-error": "El servidor no admite la función de listas de reproducción compartidas. Para asegurarse de que funciona correctamente, se requiere un servidor que ejecute Syncplay {}+, pero el servidor está ejecutando Syncplay {}.", # minVersion, serverVersion
"shared-playlists-disabled-by-server-error": "La función de lista de reproducción compartida no está habilitada en la configuración del servidor. Para utilizar esta función, debes conectarte a un servidor distinto.",
"invalid-seek-value": "Valor de búsqueda inválido",
"invalid-offset-value": "Valor de desplazamiento inválido",
"switch-file-not-found-error": "No se pudo cambiar el archivo '{0}'. Syncplay busca en los directorios de medios especificados.", # File not found
"folder-search-timeout-error": "Se anuló la búsqueda de medios en el directorio de medios, ya que tardó demasiado buscando en '{}'. Esto ocurrirá si seleccionas una carpeta con demasiadas subcarpetas en tu lista de carpetas de medios para buscar. Para que el cambio automático de archivos vuelva a funcionar, selecciona Archivo->Establecer directorios de medios en la barra de menú y elimina este directorio o reemplázalo con una subcarpeta apropiada. Si la carpeta está bien, puedes volver a reactivarlo seleccionando Archivo->Establecer directorios de medios y presionando 'OK'.", # Folder
"folder-search-first-file-timeout-error": "Se anuló la búsqueda de medios en '{}', ya que tardó demasiado buscando en acceder al directorio. Esto podría ocurrir si se trata de una unidad de red, o si tienes configurada la unidad para centrifugar luego de cierto período de inactividad. Para que el cambio automático de archivos vuelva a funcionar, por favor dirígete a Archivo->Establecer directorios de medios y elimina el directorio o resuelve el problema (p.ej. cambiando la configuración de ahorro de energía).", # Folder
"added-file-not-in-media-directory-error": "Has cargado un archivo en '{}' el cual no es un directorio de medios conocido. Puedes agregarlo como un directorio de medios seleccionado Archivo->Establecer directorios de medios en la barra de menú.", # Folder
"no-media-directories-error": "No se han establecido directorios de medios. Para que las funciones de lista de reproducción compartida y cambio de archivos funcionen correctamente, selecciona Archivo->Establecer directorios de medios y especifica dónde debe buscar Syncplay para encontrar archivos multimedia.",
"cannot-find-directory-error": "No se encontró el directorio de medios '{}'.Para actualizar tu lista de directorios de medios, seleccciona Archivo->Establecer directorios de medios desde la barra de menú y especifica dónde debe buscar Syncplay para encontrar archivos multimedia.",
"failed-to-load-server-list-error": "Error al cargar la lista de servidor públicos. Por favor visita https://www.syncplay.pl/ en tu navegador.",
# Client arguments
"argument-description": 'Solución para sincronizar la reproducción de múltiples instancias de reproductores de medios, a través de la red.',
"argument-epilog": 'Si no se especifican opciones, se utilizarán los valores de _config',
"nogui-argument": 'no mostrar GUI',
"host-argument": 'dirección del servidor',
"name-argument": 'nombre de usuario deseado',
"debug-argument": 'modo debug',
"force-gui-prompt-argument": 'hacer que aparezca el aviso de configuración',
"no-store-argument": 'no guardar valores en .syncplay',
"room-argument": 'sala por defecto',
"password-argument": 'contraseña del servidor',
"player-path-argument": 'ruta al ejecutable de tu reproductor',
"file-argument": 'archivo a reproducir',
"args-argument": 'opciones del reproductor, si necesitas pasar opciones que empiezan con -, pásalas utilizando \'--\'',
"clear-gui-data-argument": 'restablece ruta y los datos del estado de la ventana GUI almacenados como QSettings',
"language-argument": 'lenguaje para los mensajes de Syncplay (de/en/ru/it/es/pt_BR/pt_PT/tr)',
"version-argument": 'imprime tu versión',
"version-message": "Estás usando la versión de Syncplay {} ({})",
"load-playlist-from-file-argument": "loads playlist from text file (one entry per line)", # TODO: Translate
# Client labels
"config-window-title": "Configuración de Syncplay",
"connection-group-title": "Configuración de conexión",
"host-label": "Dirección del servidor: ",
"name-label": "Nombre de usuario (opcional):",
"password-label": "Contraseña del servidor (si corresponde):",
"room-label": "Sala por defecto: ",
"roomlist-msgbox-label": "Edit room list (one per line)", # TODO: Translate
"media-setting-title": "Configuración del reproductor multimedia",
"executable-path-label": "Ruta al reproductor multimedia:",
"media-path-label": "Ruta al video (opcional):",
"player-arguments-label": "Argumentos del reproductor (si corresponde):",
"browse-label": "Visualizar",
"update-server-list-label": "Actualizar lista",
"more-title": "Mostrar más configuraciones",
"never-rewind-value": "Nunca",
"seconds-suffix": " segs",
"privacy-sendraw-option": "Enviar crudo",
"privacy-sendhashed-option": "Enviar \"hasheado\"",
"privacy-dontsend-option": "No enviar",
"filename-privacy-label": "Información del nombre de archivo:",
"filesize-privacy-label": "Información del tamaño de archivo:",
"checkforupdatesautomatically-label": "Buscar actualizaciones de Syncplay automáticamente",
"autosavejoinstolist-label": "Add rooms you join to the room list", # TO DO: Translate
"slowondesync-label": "Ralentizar si hay una desincronización menor (no soportado en MPC-HC/BE)",
"rewindondesync-label": "Rebobinar si hay una desincronización mayor (recomendado)",
"fastforwardondesync-label": "Avanzar rápidamente si hay un retraso (recomendado)",
"dontslowdownwithme-label": "Nunca ralentizar ni rebobinar a otros (experimental)",
"pausing-title": "Pausando",
"pauseonleave-label": "Pausar cuando un usuario se va (p.ej. si se desconectan)",
"readiness-title": "Estado de preparación inicial",
"readyatstart-label": "Establecerme como \"listo-para-ver\" por defecto",
"forceguiprompt-label": "No mostrar siempre la ventana de configuración de Syncplay", # (Inverted)
"showosd-label": "Activar mensajes OSD",
"showosdwarnings-label": "Incluir advertencias (p.ej. cuando los archivos son distintos, los usuarios no están listos)",
"showsameroomosd-label": "Incluir eventos en tu sala",
"shownoncontrollerosd-label": "Incluir eventos de no-operadores en salas administradas",
"showdifferentroomosd-label": "Incluir eventos en otras salas",
"showslowdownosd-label": "Incluir notificaciones de ralentización/reversión",
"language-label": "Lenguaje:",
"automatic-language": "Predeterminado ({})", # Default language
"showdurationnotification-label": "Advertir sobre discrepancias en la duración de los medios",
"basics-label": "Básicos",
"readiness-label": "Reproducir/Pausar",
"misc-label": "Misc.",
"core-behaviour-title": "Comportamiento de la sala central",
"syncplay-internals-title": "Internos de Syncplay",
"syncplay-mediasearchdirectories-title": "Directorios para buscar medios",
"syncplay-mediasearchdirectories-label": "Directorios para buscar medios (una ruta por línea)",
"sync-label": "Sincronizar",
"sync-otherslagging-title": "Si otros se están quedando atrás...",
"sync-youlaggging-title": "Si tú te estás quedando atrás...",
"messages-label": "Mensajes",
"messages-osd-title": "Configuraciones de visualización en pantalla",
"messages-other-title": "Otras configuraciones de visualización",
"chat-label": "Chat",
"privacy-label": "Privacidad", # Currently unused, but will be brought back if more space is needed in Misc tab
"privacy-title": "Configuración de privacidad",
"unpause-title": "Si presionas reproducir, definir como listo y:",
"unpause-ifalreadyready-option": "Despausar si ya está definido como listo",
"unpause-ifothersready-option": "Despausar si ya está listo u otros en la sala están listos (predeterminado)",
"unpause-ifminusersready-option": "Despausar si ya está listo, o si todos los demás están listos y el mín. de usuarios están listos",
"unpause-always": "Siempre despausar",
"syncplay-trusteddomains-title": "Dominios de confianza (para servicios de transmisión y contenido alojado)",
"chat-title": "Entrada de mensaje de chat",
"chatinputenabled-label": "Habilitar entrada de chat a través de mpv",
"chatdirectinput-label": "Permitir entrada de chat instantánea (omitir tener que presionar Intro para chatear)",
"chatinputfont-label": "Fuente de entrada de chat",
"chatfont-label": "Establecer fuente",
"chatcolour-label": "Establecer color",
"chatinputposition-label": "Posición del área de entrada del mensaje en mpv",
"chat-top-option": "Arriba",
"chat-middle-option": "Medio",
"chat-bottom-option": "Fondo",
"chatoutputheader-label": "Salida de mensaje de chat",
"chatoutputfont-label": "Fuente de salida de chat",
"chatoutputenabled-label": "Habilitar salida de chat en el reproductor (solo mpv por ahora)",
"chatoutputposition-label": "Modo de salida",
"chat-chatroom-option": "Estilo de sala de chat",
"chat-scrolling-option": "Estilo de desplazamiento",
"mpv-key-tab-hint": "[TAB] para alternar acceso a los accesos directos de las teclas de la fila del alfabeto",
"mpv-key-hint": "[INTRO] para enviar mensaje. [ESC] para salir del modo de chat.",
"alphakey-mode-warning-first-line": "Puedes usar temporalmente los enlaces de mpv con las teclas a-z.",
"alphakey-mode-warning-second-line": "Presiona [TAB] para retornar al modo de chat de Syncplay.",
"help-label": "Ayuda",
"reset-label": "Restaurar valores predeterminados",
"run-label": "Ejecutar Syncplay",
"storeandrun-label": "Almacenar la configuración y ejecutar Syncplay",
"contact-label": "No dudes en enviar un correo electrónico a <a href=\"mailto:[email protected]\"><nobr>[email protected]</nobr></a>, chatea a través del canal de IRC <a href=\"https://webchat.freenode.net/?channels=#syncplay\"><nobr>#Syncplay</nobr></a> en irc.freenode.net, <a href=\"https://github.com/Uriziel/syncplay/issues\"><nobr>reportar un problema</nobr></a> vía GitHub, <a href=\"https://www.facebook.com/SyncplaySoftware\"><nobr>danos \"me gusta\" en Facebook</nobr></a>, <a href=\"https://twitter.com/Syncplay/\"><nobr>síguenos en Twitter</nobr></a>, o visita <a href=\"https://syncplay.pl/\"><nobr>https://syncplay.pl/</nobr></a>. No utilices Syncplay para enviar información sensible.",
"joinroom-label": "Unirse a la sala",
"joinroom-menu-label": "Unirse a la sala {}",
"seektime-menu-label": "Buscar tiempo",
"undoseek-menu-label": "Deshacer búsqueda",
"play-menu-label": "Reproducir",
"pause-menu-label": "Pausar",
"playbackbuttons-menu-label": "Mostrar botones de reproducción",
"autoplay-menu-label": "Mostrar botón de auto-reproducción",
"autoplay-guipushbuttonlabel": "Reproducir cuando todos estén listos",
"autoplay-minimum-label": "Mín. de usuarios:",
"sendmessage-label": "Enviar",
"ready-guipushbuttonlabel": "¡Estoy listo para ver!",
"roomuser-heading-label": "Sala / Usuario",
"size-heading-label": "Tamaño",
"duration-heading-label": "Duración",
"filename-heading-label": "Nombre de archivo",
"notifications-heading-label": "Notificaciones",
"userlist-heading-label": "Lista de quién reproduce qué",
"browseformedia-label": "Buscar archivos multimedia",
"file-menu-label": "&Archivo", # & precedes shortcut key
"openmedia-menu-label": "A&brir archivo multimedia",
"openstreamurl-menu-label": "Abrir URL de &flujo de medios",
"setmediadirectories-menu-label": "&Establecer directorios de medios",
"loadplaylistfromfile-menu-label": "&Load playlist from file", # TODO: Translate
"saveplaylisttofile-menu-label": "&Save playlist to file", # TODO: Translate
"exit-menu-label": "&Salir",
"advanced-menu-label": "A&vanzado",
"window-menu-label": "&Ventana",
"setoffset-menu-label": "Establecer &compensación",
"createcontrolledroom-menu-label": "C&rear sala administrada",
"identifyascontroller-menu-label": "&Identificar como operador de sala",
"settrusteddomains-menu-label": "Es&tablecer dominios de confianza",
"addtrusteddomain-menu-label": "Agregar {} como dominio de confianza", # Domain
"edit-menu-label": "&Edición",
"cut-menu-label": "Cor&tar",
"copy-menu-label": "&Copiar",
"paste-menu-label": "&Pegar",
"selectall-menu-label": "&Seleccionar todo",
"playback-menu-label": "Re&producción",
"help-menu-label": "A&yuda",
"userguide-menu-label": "Abrir &guía de usuario",
"update-menu-label": "Buscar actuali&zaciones",
"startTLS-initiated": "Intentando conexión segura",
"startTLS-secure-connection-ok": "Conexión segura establecida ({})",
"startTLS-server-certificate-invalid": 'Falló la conexión segura. El servidor utiliza un certificado inválido. Esta comunicación podría ser interceptada por un tercero. Para más detalles y solución de problemas, consulta <a href="https://syncplay.pl/trouble">aquí</a>.',
"startTLS-server-certificate-invalid-DNS-ID": "Syncplay does not trust this server because it uses a certificate that is not valid for its hostname.", # TODO: Translate
"startTLS-not-supported-client": "Este cliente no soporta TLS",
"startTLS-not-supported-server": "Este servidor no soporta TLS",
# TLS certificate dialog
"tls-information-title": "Detalles del certificado",
"tls-dialog-status-label": "<strong>Syncplay está utilizando una conexión cifrada con {}.</strong>",
"tls-dialog-desc-label": "El cifrado con un certificado digital, mantiene la información privada cuando se envía hacia o desde<br/>el servidor {}.",
"tls-dialog-connection-label": "Información cifrada utilizando \"Transport Layer Security\" (TLS), versión {} con la<br/>suite de cifrado: {}.",
"tls-dialog-certificate-label": "Certificado emitido por {} válido hasta {}.",
# About dialog
"about-menu-label": "Acerca de Sy&ncplay",
"about-dialog-title": "Acerca de Syncplay",
"about-dialog-release": "Versión {} lanzamiento {}",
"about-dialog-license-text": "Licenciado bajo la Licencia Apache Versión 2.0",
"about-dialog-license-button": "Licencia",
"about-dialog-dependencies": "Dependencias",
"setoffset-msgbox-label": "Establecer compensación",
"offsetinfo-msgbox-label": "Compensación (consulta https://syncplay.pl/guide/ para obtener instrucciones de uso):",
"promptforstreamurl-msgbox-label": "Abrir URL de flujo de medios",
"promptforstreamurlinfo-msgbox-label": "Publicar URL",
"addfolder-label": "Agregar carpeta",
"adduris-msgbox-label": "Agregar URLs a la lista de reproducción (una por línea)",
"editplaylist-msgbox-label": "Establecer lista de reproducción (una por línea)",
"trusteddomains-msgbox-label": "Dominios con los cuales está bien intercambiar automáticamente (uno por línea)",
"createcontrolledroom-msgbox-label": "Crear sala administrada",
"controlledroominfo-msgbox-label": "Ingresa el nombre de la sala administrada\r\n(consulta https://syncplay.pl/guide/ para obtener instrucciones de uso):",
"identifyascontroller-msgbox-label": "Identificar como operador de la sala",
"identifyinfo-msgbox-label": "Ingresa la contraseña de operador para esta sala\r\n(consulta https://syncplay.pl/guide/ para obtener instrucciones de uso):",
"public-server-msgbox-label": "Selecciona el servidor público para esta sesión de visualización",
"megabyte-suffix": " MB",
# Tooltips
"host-tooltip": "Nombre de host o IP para conectarse, opcionalmente incluyendo puerto (p.ej. syncplay.pl:8999). Sólo sincronizado con personas en el mismo servidor/puerto.",
"name-tooltip": "Apodo por el que se te conocerá. No hay registro, por lo que puedes cambiarlo fácilmente más tarde. Si no se especifica, se genera un nombre aleatorio.",
"password-tooltip": "Las contraseñas son sólo necesarias para conectarse a servidores privados.",
"room-tooltip": "La sala para unirse en la conexión puede ser casi cualquier cosa, pero sólo se sincronizará con las personas en la misma sala.",
"edit-rooms-tooltip": "Edit room list.", # TO DO: Translate
"executable-path-tooltip": "Ubicación de tu reproductor multimedia compatible elegido (mpv, mpv.net, VLC, MPC-HC/BE, mplayer2 o IINA).",
"media-path-tooltip": "Ubicación del video o flujo que se abrirá. Necesario para mplayer2.",
"player-arguments-tooltip": "Arguementos de línea de comandos adicionales / parámetros para pasar a este reproductor multimedia.",
"mediasearcdirectories-arguments-tooltip": "Directorios donde Syncplay buscará archivos de medios, p.ej. cuando estás usando la función \"clic para cambiar\". Syncplay buscará recursivamente a través de las subcarpetas.",
"more-tooltip": "Mostrar configuraciones usadas con menos frecuencia.",
"filename-privacy-tooltip": "Modo de privacidad para enviar el nombre del archivo que se está reproduciendo actualmente al servidor.",
"filesize-privacy-tooltip": "Modo de privacidad para enviar el tamaño del archivo que se está reproduciendo actualmente al servidor.",
"privacy-sendraw-tooltip": "Enviar esta información sin ofuscación. Ésta es la opción predeterminada en la mayoría de las funciones.",
"privacy-sendhashed-tooltip": "Enviar una versión \"hasheada\" de la información, para que sea menos visible para otros clientes.",
"privacy-dontsend-tooltip": "No enviar esta información al servidor. Esto proporciona la máxima privacidad.",
"checkforupdatesautomatically-tooltip": "Regularmente verificar con el sitio Web de Syncplay para ver si hay una nueva versión de Syncplay disponible.",
"autosavejoinstolist-tooltip": "When you join a room in a server, automatically remember the room name in the list of rooms to join.", # TO DO: Translate
"slowondesync-tooltip": "Reducir la velocidad de reproducción temporalmente cuando sea necesario, para volver a sincronizar con otros espectadores. No soportado en MPC-HC/BE.",
"dontslowdownwithme-tooltip": "Significa que otros no se ralentizan ni rebobinan si la reproducción se retrasa. Útil para operadores de la sala.",
"pauseonleave-tooltip": "Pausa la reproducción si te desconectas o alguien sale de tu sala.",
"readyatstart-tooltip": "Establecerte como 'listo' al inicio (de lo contrario, se te establecerá como 'no-listo' hasta que cambies tu estado de preparación)",
"forceguiprompt-tooltip": "El diálogo de configuración no es mostrado cuando se abre un archivo con Syncplay.", # (Inverted)
"nostore-tooltip": "Ejecutar Syncplay con la configuración dada, pero no guardar los cambios permanentemente.", # (Inverted)
"rewindondesync-tooltip": "Retroceder cuando sea necesario para volver a sincronizar. ¡Deshabilitar esta opción puede resultar en desincronizaciones importantes!",
"fastforwardondesync-tooltip": "Saltar hacia adelante cuando no está sincronizado con el operador de la sala (o tu posición ficticia 'Nunca ralentizar o rebobinar a otros' está activada).",
"showosd-tooltip": "Envía mensajes de Syncplay al reproductor multimedia OSD.",
"showosdwarnings-tooltip": "Mostrar advertencias si se está reproduciendo un archivo diferente, solo en la sala, usuarios no están listos, etc.",
"showsameroomosd-tooltip": "Mostrar notificaciones de OSD para eventos relacionados con la sala en la que está el usuario.",
"shownoncontrollerosd-tooltip": "Mostrar notificaciones de OSD para eventos relacionados con no-operadores que están en salas administradas.",
"showdifferentroomosd-tooltip": "Mostrar notificaciones de OSD para eventos relacionados la sala en la que no está el usuario.",
"showslowdownosd-tooltip": "Mostrar notificaciones de desaceleración / diferencia de la reversión.",
"showdurationnotification-tooltip": "Útil cuando falta un segmento de un archivo de varias partes, pero puede dar lugar a falsos positivos.",
"language-tooltip": "Idioma a ser utilizado por Syncplay.",
"unpause-always-tooltip": "Si presionas despausar siempre te pone como listo y despausa, en lugar de simplemente ponerte como listo.",
"unpause-ifalreadyready-tooltip": "Si presionas despausar cuando no estás listo, te pondrá como listo - presiona despausar nuevamente para despausar.",
"unpause-ifothersready-tooltip": "Si presionas despausar cuando no estás listo, sólo se despausará si los otros están listos.",
"unpause-ifminusersready-tooltip": "Si presionas despausar cuando no estás listo, sólo se despausará si los otros están listos y se cumple con el mínimo requerido de usuarios.",
"trusteddomains-arguments-tooltip": "Dominios con los cuales está bien intercambiar automáticamente, cuando las listas de reproducción compartidas están activas.",
"chatinputenabled-tooltip": "Activa la entrada de chat en mpv (presiona intro para chatear, intro para enviar, escape para cancelar)",
"chatdirectinput-tooltip": "Omitir tener que presionar 'intro' para ir al modo de entrada de chat en mpv. Presiona TAB en mpv para desactivar temporalmente esta función.",
"font-label-tooltip": "Fuente utilizada cuando se ingresan mensajes de chat en mpv. Sólo del lado del cliente, por lo que no afecta lo que otros ven.",
"set-input-font-tooltip": "Familia de fuentes utilizada cuando se ingresan mensajes de chat en mpv. Sólo del lado del cliente, por lo que no afecta lo que otros ven.",
"set-input-colour-tooltip": "Color de fuente utilizado cuando se ingresan mensajes de chat en mpv. Sólo del lado del cliente, por lo que no afecta lo que otros ven.",
"chatinputposition-tooltip": "Ubicación en mpv donde aparecerán los mensajes de chat cuando se presione intro y se escriba.",
"chatinputposition-top-tooltip": "Colocar la entrada del chat en la parte superior de la ventana de mpv.",
"chatinputposition-middle-tooltip": "Colocar la entrada del chat en el centro muerto de la ventana de mpv.",
"chatinputposition-bottom-tooltip": "Colocar la entrada del chat en la parte inferior de la ventana de mpv.",
"chatoutputenabled-tooltip": "Mostrar mensajes de chat en OSD (si está soportado por el reproductor multimedia).",
"font-output-label-tooltip": "Fuente de salida del chat.",
"set-output-font-tooltip": "Fuente utilizada para mostrar mensajes de chat.",
"chatoutputmode-tooltip": "Cómo se muestran los mensajes de chat.",
"chatoutputmode-chatroom-tooltip": "Mostrar nuevas líneas de chat directamente debajo de la línea anterior.",
"chatoutputmode-scrolling-tooltip": "Desplazar el texto del chat de derecha a izquierda.",
"help-tooltip": "Abrir la guía de usuario de Syncplay.pl.",
"reset-tooltip": "Restablecer todas las configuraciones a la configuración predeterminada.",
"update-server-list-tooltip": "Conectar a syncplay.pl para actualizar la lista de servidores públicos.",
"sslconnection-tooltip": "Conectado de forma segura al servidor. Haga clic para obtener los detalles del certificado.",
"joinroom-tooltip": "Abandonar la sala actual y unirse a la sala especificada.",
"seektime-msgbox-label": "Saltar al tiempo especificado (en segundos / min:seg). Usar +/- para una búsqueda relativa.",
"ready-tooltip": "Indica si estás listo para ver.",
"autoplay-tooltip": "Reproducir automáticamente cuando todos los usuarios que tienen indicador de preparación están listos, y se ha alcanzado el mínimo requerido de usuarios.",
"switch-to-file-tooltip": "Hacer doble clic para cambiar a {}", # Filename
"sendmessage-tooltip": "Enviar mensaje a la sala",
# In-userlist notes (GUI)
"differentsize-note": "¡Tamaño diferente!",
"differentsizeandduration-note": "¡Tamaño y duración diferentes!",
"differentduration-note": "¡Duración diferente!",
"nofile-note": "(No se está reproduciendo ningún archivo)",
# Server messages to client
"new-syncplay-available-motd-message": "Estás usando Syncplay {} pero hay una versión más nueva disponible en https://syncplay.pl", # ClientVersion
# Server notifications
"welcome-server-notification": "Bienvenido al servidor de Syncplay, ver. {0}", # version
"client-connected-room-server-notification": "{0}({2}) conectado a la sala '{1}'", # username, host, room
"client-left-server-notification": "{0} abandonó el servidor", # name
"no-salt-notification": "IMPORTANTE: Para permitir que las contraseñas del operador de la sala, generadas por esta instancia del servidor, sigan funcionando cuando se reinicie el servidor, por favor en el futuro agregar el siguiente argumento de línea de comandos al ejecutar el servidor de Syncplay: --salt {}", # Salt
# Server arguments
"server-argument-description": 'Solución para sincronizar la reproducción de múltiples instancias de MPlayer y MPC-HC/BE a través de la red. Instancia del servidor',
"server-argument-epilog": 'Si no se especifican opciones, serán utilizados los valores de _config',
"server-port-argument": 'puerto TCP del servidor',
"server-password-argument": 'contraseña del servidor',
"server-isolate-room-argument": '¿las salas deberían estar aisladas?',
"server-salt-argument": "cadena aleatoria utilizada para generar contraseñas de salas administradas",
"server-disable-ready-argument": "deshabilitar la función de preparación",
"server-motd-argument": "ruta al archivo del cual se obtendrá el texto motd",
"server-chat-argument": "¿Debería deshabilitarse el chat?",
"server-chat-maxchars-argument": "Número máximo de caracteres en un mensaje de chat (el valor predeterminado es {})", # Default number of characters
"server-maxusernamelength-argument": "Número máximo de caracteres para el nombre de usuario (el valor predeterminado es {})",
"server-stats-db-file-argument": "Habilitar estadísticas del servidor utilizando el archivo db SQLite proporcionado",
"server-startTLS-argument": "Habilitar conexiones TLS usando los archivos de certificado en la ruta provista",
"server-messed-up-motd-unescaped-placeholders": "El mensaje del dia contiene marcadores de posición sin escapar. Todos los signos $ deberían ser dobles ($$).",
"server-messed-up-motd-too-long": "El mensaje del día es muy largo - máximo de {} caracteres, se recibieron {}.",
# Server errors
"unknown-command-server-error": "Comando desconocido {}", # message
"not-json-server-error": "No es una cadena JSON válida {}", # message
"line-decode-server-error": "No es una cadena utf-8",
"not-known-server-error": "Debes ser reconocido por el servidor antes de enviar este comando",
"client-drop-server-error": "Caída del cliente: {} -- {}", # host, error
"password-required-server-error": "Contraseña requerida",
"wrong-password-server-error": "Contraseña ingresada incorrecta",
"hello-server-error": "Not enough Hello arguments", # DO NOT TRANSLATE
# Playlists
"playlist-selection-changed-notification": "{} cambió la selección de la lista de reproducción", # Username
"playlist-contents-changed-notification": "{} actualizó la lista de reproducción", # Username
"cannot-find-file-for-playlist-switch-error": "¡No se encontró el archivo {} en el directorio de medios para intercambiar en la lista de reproducción!", # Filename
"cannot-add-duplicate-error": "No se pudo agregar una segunda entrada para '{}' a la lista de reproducción ya que no se admiten duplicados.", # Filename
"cannot-add-unsafe-path-error": "No se pudo cargar automáticamente {} porque no es un dominio de confianza. Puedes intercambiar la URL manualmente dándole doble clic en la lista de reproducción, y agregar dominios de confianza vía Archivo->Avanzado->Establecer dominios de confianza. Si haces doble clic en una URL entonces puedes agregar su dominio como un dominio de confianza, desde el menú de contexto.", # Filename
"sharedplaylistenabled-label": "Activar listas de reproducción compartidas",
"removefromplaylist-menu-label": "Remover de la lista de reproducción",
"shuffleremainingplaylist-menu-label": "Mezclar el resto de la lista de reproducción",
"shuffleentireplaylist-menu-label": "Mezclar toda la lista de reproducción",
"undoplaylist-menu-label": "Deshacer el último cambio a la lista de reproducción",
"addfilestoplaylist-menu-label": "Agregar archivo(s) al final de la lista de reproducción",
"addurlstoplaylist-menu-label": "Agregar URL(s) al final de la lista de reproducción",
"editplaylist-menu-label": "Editar lista de reproducción",
"open-containing-folder": "Abrir directorio que contiene este archivo",
"addyourfiletoplaylist-menu-label": "Agregar tu archivo a la lista de reproducción",
"addotherusersfiletoplaylist-menu-label": "Agregar el archivo de {} a la lista de reproducción", # [Username]
"addyourstreamstoplaylist-menu-label": "Agregar tu flujo a la lista de reproducción",
"addotherusersstreamstoplaylist-menu-label": "Agregar el flujo de {} a la lista de reproducción", # [Username]
"openusersstream-menu-label": "Abrir el flujo de {}", # [username]'s
"openusersfile-menu-label": "Abrir el archivo de {}", # [username]'s
"playlist-instruction-item-message": "Desplazar aquí el archivo para agregarlo a la lista de reproducción compartida.",
"sharedplaylistenabled-tooltip": "Los operadores de la sala pueden agregar archivos a una lista de reproducción sincronizada, para que visualizar la misma cosa sea más sencillo para todos. Configurar directorios multimedia en 'Misc'.",
"playlist-empty-error": "Playlist is currently empty.", # TO DO: Translate
"playlist-invalid-index-error": "Invalid playlist index", # TO DO: Translate
}
| {
"content_hash": "127f31a217e9640f25c52f51e6d36f06",
"timestamp": "",
"source": "github",
"line_count": 519,
"max_line_length": 701,
"avg_line_length": 80.4393063583815,
"alnum_prop": 0.7325620389000671,
"repo_name": "alby128/syncplay",
"id": "60ca7419ee64d6a4b60329abf0385df20a209293",
"size": "42160",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "syncplay/messages_es.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Lua",
"bytes": "51824"
},
{
"name": "Makefile",
"bytes": "3693"
},
{
"name": "Python",
"bytes": "602477"
},
{
"name": "Shell",
"bytes": "192"
}
],
"symlink_target": ""
} |
import lib.requests as requests
import conf.config as conf
base = conf.read_config(conf.path, 'API', 'baseUrl')
class AddImg(object):
def __addImg(self,data):
api = conf.read_config(conf.path, 'API', 'addImgApi')
url = base + api
s = requests.session()
result = s.post(url,data)
s.keep_alive = False
return result.text
def callService(self,data):
return self.__addImg(data)
class GetImg(object):
def __getImg(self,imgId):
api = conf.read_config(conf.path, 'API', 'getImgApi')
url = base + api + '?image_id=' + imgId
s = requests.session()
result = s.post(url)
s.keep_alive = False
if result.text != u"null":
return result.json()['THUMBNAIL']
else:
return ""
def callService(self,imgId):
return self.__getImg(imgId) | {
"content_hash": "c4569105e198506da0d703c29cafea02",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 61,
"avg_line_length": 30.258064516129032,
"alnum_prop": 0.5415778251599147,
"repo_name": "HH890612/MiliCloud",
"id": "0edc916f2c9bfd851ccc12b7c0e14b3b92ab6721",
"size": "963",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "service/thumbnailservice.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "938973"
}
],
"symlink_target": ""
} |
"""
Contains view context classes.
Context is our own concept.
It's part of the View abstract layer in MTV paradigm.
Data flow looks like this:
Urls -> Context -> View.
So, by concept, View receives context directly
without accessing url args.
It stands between urls and view functions.
Every context class is used via objects composition.
Code example to create tagged category:
>>> from django.http import HttpRequest
>>> url_kwargs = {'slug': 'my-page'}
>>> Category(url_kwargs, request=HttpRequest()) | TaggedCategory()
"""
import typing
from abc import ABC, abstractmethod
from collections import defaultdict
from functools import lru_cache, partial
from django import http
from django.conf import settings
from django.core.paginator import Paginator, InvalidPage
from django.db.models import QuerySet
from django_user_agents.utils import get_user_agent
from catalog.models import ProductQuerySet, Tag, TagQuerySet
from images.models import Image
from pages.models import ModelPage
class SortingOption:
def __init__(self, index=0):
options = settings.CATEGORY_SORTING_OPTIONS[index]
self.label = options['label']
self.field = options['field']
self.direction = options['direction']
@property
def directed_field(self):
return self.direction + self.field
class PaginatorLinks:
def __init__(self, number, path, paginated: Paginator):
self.paginated = paginated
self.number = number
self.path = path
self.index = number - 1
self.neighbor_bounds = settings.PAGINATION_NEIGHBORS // 2
self.neighbor_range = list(self.paginated.page_range)
def page(self):
try:
return self.paginated.page(self.number)
except InvalidPage:
raise http.Http404('Page does not exist')
def showed_number(self):
return self.index * self.paginated.per_page + self.page().object_list.count()
def _url(self, number):
self.paginated.validate_number(number)
return self.path if number == 1 else f'{self.path}?page={number}'
def prev_numbers(self):
return self.neighbor_range[:self.index][-self.neighbor_bounds:]
def next_numbers(self):
return self.neighbor_range[self.index + 1:][:self.neighbor_bounds]
def number_url_map(self):
numbers = self.prev_numbers() + self.next_numbers()
return {number: self._url(number) for number in numbers}
@lru_cache(maxsize=64)
def prepare_tile_products(
products: ProductQuerySet, product_pages: QuerySet, tags: TagQuerySet=None
):
assert isinstance(products, ProductQuerySet)
brands = (
tags
.filter_by_products(products)
.get_brands(products)
) if tags else defaultdict(lambda: None)
return [
(product, brands.get(product))
for product in products
]
class ObjectsComposition:
super: 'ObjectsComposition' = None
def __or__(self, other: 'ObjectsComposition'):
other.super = self
return other
# @todo #550:120m Move context realization to pure to objects composition.
# Discussed some thoughts with Artemiy via call.
# Artemiy will do it.
# For example SortedCategory should
# consist of separated SortedList and Category classes/objects.
class AbstractContext(ObjectsComposition, ABC):
super: 'AbstractContext' = None
def __init__( # Ignore PyDocStyleBear
self,
url_kwargs: typing.Dict[str, str]=None,
request: http.HttpRequest=None
):
"""
:param url_kwargs: Came from `urls` module.
:param request: Came from `urls` module
"""
self.url_kwargs_ = url_kwargs or {}
self.request_ = request
@property
def url_kwargs(self) -> typing.Dict[str, str]:
return self.url_kwargs_ or getattr(self.super, 'url_kwargs', {})
@property
def request(self) -> http.HttpRequest:
return self.request_ or self.super.request
@abstractmethod
def get_context_data(self) -> typing.Dict[str, typing.Any]:
...
class AbstractPageContext(AbstractContext, ABC):
super: 'AbstractPageContext' = None
def __init__( # Ignore PyDocStyleBear
self,
url_kwargs: typing.Dict[str, str]=None,
request: http.HttpRequest=None,
page: ModelPage=None
):
"""
:param url_kwargs: Came from `urls` module.
:param request: Came from `urls` module
"""
self.page_ = page
super().__init__(url_kwargs, request)
@property
@lru_cache(maxsize=1)
def page(self):
return (
self.page_
if self.page_ is not None
else getattr(self.super, 'page', None)
)
class AbstractProductsListContext(AbstractPageContext, ABC):
super: 'AbstractProductsListContext' = None
def __init__( # Ignore PyDocStyleBear
self,
url_kwargs: typing.Dict[str, str]=None,
request: http.HttpRequest=None,
# parent class requires it
page: ModelPage=None,
products: ProductQuerySet=None,
product_pages: QuerySet=None,
):
"""
:param url_kwargs: Came from `urls` module.
:param request: Came from `urls` module.
:param products: Every project provides products from DB.
"""
super().__init__(url_kwargs, request, page)
assert(isinstance(products, ProductQuerySet) or products is None)
self.products_ = products
self.product_pages_ = product_pages
@property
def product_pages(self) -> QuerySet:
return (
self.product_pages_
if self.product_pages_ is not None
else self.super.product_pages
)
@property
def products(self) -> ProductQuerySet:
if self.super:
return self.super.products
elif isinstance(self.products_, ProductQuerySet):
return self.products_
else:
raise NotImplementedError('Set products queryset')
class ProductImages(AbstractProductsListContext):
@property
def images(self) -> typing.Dict[int, Image]:
raise NotImplemented()
def get_context_data(self):
return {
'product_images': self.images,
**(
self.super.get_context_data()
if self.super else {}
),
}
class Category(AbstractProductsListContext):
# this list is synced with templates.
# See templates/catalog/category_navigation.html at SE project for example.
PRODUCT_LIST_VIEW_TYPES = ['tile', 'list']
@property
def products(self) -> ProductQuerySet:
# code like this breaks isolation,
# it'll be fixed at #183
products = self.products_ or super().products
return products.active().filter_descendants(self.page.model)
def get_context_data(self):
"""Add sorting options and view_types in context."""
view_type = self.request.session.get('view_type', 'tile')
assert view_type in self.PRODUCT_LIST_VIEW_TYPES
return {
'products_data': [], # empty for optimization
# can be `tile` or `list`. Defines products list layout.
'view_type': view_type,
}
class TaggedCategory(AbstractProductsListContext):
def __init__( # Ignore PyDocStyleBear
self,
url_kwargs: typing.Dict[str, str]=None,
request: http.HttpRequest=None,
page: ModelPage=None,
products: ProductQuerySet=None,
product_pages: QuerySet=None,
tags: TagQuerySet=None
):
"""
:param url_kwargs: Came from `urls` module.
:param request: Came from `urls` module.
:param products: Every project provides products from DB.
:param tags: Every project provides tags from DB.
"""
super().__init__(url_kwargs, request, page, products, product_pages)
# it's not good. Arg should not be default.
# That's how we'll prevent assertion.
# But we'll throw away inheritance in se#567.
assert isinstance(tags, QuerySet), 'tags is required arg'
self.all_tags = tags
def get_sorting_index(self):
return int(self.url_kwargs.get('sorting', 0))
def get_undirected_sorting_options(self) -> typing.List[str]:
sorting_option = SortingOption(index=self.get_sorting_index())
return [sorting_option.field]
@property
def tags(self) -> typing.Optional[TagQuerySet]:
request_tags = self.url_kwargs.get('tags')
if not request_tags:
return None
slugs = Tag.parse_url_tags(request_tags)
tags = self.all_tags.filter(slug__in=slugs)
if not tags:
raise http.Http404('No such tag.')
return tags
@property
def products(self):
products = self.super.products
tags = self.tags
if tags:
products = (
products
.filter(tags__in=tags)
# See the ProductQuerySet.tagged
# for detail about `distinct` and `order_by` above
.distinct(*self.get_undirected_sorting_options())
.order_by(*self.get_undirected_sorting_options())
)
return products
def get_context_data(self):
context = self.super.get_context_data()
tags = self.tags
group_tags_pairs = (
self.all_tags
.filter_by_products(self.products)
.group_tags()
)
return {
**context,
'tags': tags,
'group_tags_pairs': group_tags_pairs.items(),
'products_data': [], # empty for optimization
}
class DBTemplate(AbstractPageContext):
"""Processes some page data fields as templates with their own context."""
@property
@lru_cache(maxsize=1)
def page(self):
page = self.super.page
context = self.get_super_context_data_cached()
def template_context(page, tag_titles, tags):
return {
'page': page,
'tag_titles': tag_titles,
'tags': tags,
}
tags = context['tags']
if tags:
tag_titles = tags.as_title()
page.get_template_render_context = partial(
template_context, page, tag_titles, tags
)
return page
@lru_cache(maxsize=1)
def get_super_context_data_cached(self):
return self.super.get_context_data()
@lru_cache(maxsize=1)
def get_context_data(self):
return {
**self.get_super_context_data_cached(),
'page': self.page,
}
class SortingCategory(AbstractProductsListContext):
def get_sorting_index(self):
return int(self.url_kwargs.get('sorting', 0))
def get_sorting_options(self) -> typing.List[str]:
sorting_index = int(self.url_kwargs.get('sorting', 0))
sorting_option = SortingOption(index=sorting_index)
return [sorting_option.directed_field]
@property
def products(self) -> ProductQuerySet:
return self.super.products.order_by(*self.get_sorting_options())
def get_context_data(self):
context = self.super.get_context_data()
return {
**context,
'products_data': [], # empty for optimization
'sort': self.get_sorting_index(),
}
class PaginationCategory(AbstractProductsListContext):
def get_products_count(self):
"""Calculate max products list size from request. List size depends on device type."""
mobile_view = get_user_agent(self.request).is_mobile
return settings.PRODUCTS_ON_PAGE_MOB if mobile_view else settings.PRODUCTS_ON_PAGE_PC
def get_paginated_page_or_404(self, per_page, page_number) -> Paginator:
try:
return Paginator(self.all_products, per_page).page(page_number)
except InvalidPage:
raise http.Http404('Page does not exist')
@property
def products_on_page(self):
return int(self.request.GET.get(
'step', self.get_products_count(),
))
@property
def page_number(self):
return int(self.request.GET.get('page', 1))
@property
def all_products(self) -> ProductQuerySet:
return self.super.products
@property
def products(self) -> ProductQuerySet:
"""Only products for current page."""
paginated_page = self.get_paginated_page_or_404(
self.products_on_page, self.page_number
)
# it's queryset, but it's sliced
products: ProductQuerySet = paginated_page.object_list
return products
@property
def product_pages(self):
return self.super.product_pages.filter(shopelectro_product__in=self.products)
@property
def products_count(self):
return (self.page_number - 1) * self.products_on_page + self.products.count()
def check_pagination_args(self):
if (
self.page_number < 1 or
self.products_on_page not in settings.CATEGORY_STEP_MULTIPLIERS
):
raise http.Http404('Page does not exist.') # Ignore CPDBear
def get_context_data(self):
context = self.super.get_context_data()
self.check_pagination_args()
# @todo #187:30m Uncomment the if_404 check for empty products list.
# To do it fix stb tests to cover this case.
# if not self.products:
# raise http.Http404('Page without products does not exist.')
paginated = PaginatorLinks(
self.page_number,
self.request.path,
Paginator(self.all_products, self.products_on_page)
)
paginated_page = paginated.page()
total_products = self.all_products.count()
return {
**context,
'products': self.products,
'total_products': total_products,
'products_count': self.products_count,
'paginated': paginated,
'paginated_page': paginated_page,
'sorting_options': settings.CATEGORY_SORTING_OPTIONS.values(),
'limits': settings.CATEGORY_STEP_MULTIPLIERS,
}
class ProductBrands(AbstractProductsListContext):
@property
def tags(self):
return self.super.super.super.tags
@property
def brands(self):
assert isinstance(self.products, ProductQuerySet)
brands = (
self.tags
.filter_by_products(self.products)
.get_brands(self.products)
) if self.tags else defaultdict(lambda: None)
return {
product.id: brands.get(product)
for product in self.products
}
def get_context_data(self):
return {
'product_brands': self.brands,
**(
self.super.get_context_data()
if self.super else {}
),
}
| {
"content_hash": "ecb6469d0cf46b4db680a943414bb010",
"timestamp": "",
"source": "github",
"line_count": 501,
"max_line_length": 94,
"avg_line_length": 29.99201596806387,
"alnum_prop": 0.6130041261812857,
"repo_name": "fidals/refarm-site",
"id": "a2ab2545b5c0914f268e7c839e96396e44b6d5a5",
"size": "15026",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "catalog/context.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "64305"
},
{
"name": "HTML",
"bytes": "28723"
},
{
"name": "JavaScript",
"bytes": "31422"
},
{
"name": "Python",
"bytes": "296885"
}
],
"symlink_target": ""
} |
try:
import smbus
except ImportError:
raise ImportError("Error : python-smbus not found.\nInstall with 'sudo apt-get install python-smbus'")
import re
"""
================================================
ABElectronics Python Helper Functions
Version 1.1 Created 20/01/2015
Python 2 only
Requires python 2 smbus to be installed with: sudo apt-get install python-smbus
This file contains functions to load puthon smbus into an instance variable.
The bus object can then be used by multiple devices without conflicts.
================================================
"""
class i2c_core:
@staticmethod
def get_smbus():
# detect i2C port number and assign to i2c_bus
i2c_bus = 0
for line in open('/proc/cpuinfo').readlines():
m = re.match('(.*?)\s*:\s*(.*)', line)
if m:
(name, value) = (m.group(1), m.group(2))
# Banana Pi / Pro / R1
if name == "Hardware":
if value[-4:] in ('sun7i'):
i2c_bus = 2
return 2
break
# Raspberry Pi
elif name == "Revision":
if value[-4:] in ('0002', '0003'):
i2c_bus = 0
return 0
else:
i2c_bus = 1
return 1
break
try:
return smbus.SMBus(i2c_bus)
except IOError:
print ("Could not open the i2c bus.")
print ("Please check that i2c is enabled and python-smbus and i2c-tools are installed.")
result = self._bus.read_byte_data(address, reg)
return result
except IOError as err:
return err
def __init__(self, address, busnum=-1, debug=False):
self.address = address
self.bus = smbus.SMBus(busnum if busnum >= 0 else i2c_core.get_smbus())
self.debug = debug
if self.debug==True:
print self.bus
# Read
def read_byte(self, adr):
return self.bus.read_byte_data(self.address, adr)
# Read a single byte
def read(self):
return self.bus.read_byte(self.address)
# Read a block of data
def read_block_data(self, cmd):
return self.bus.read_block_data(self.address, cmd)
# Read a word of data
def read_word_data(self, cmd):
return self.bus.read_word_data(self.address, cmd)
# Read a block of ranged data
def read_block(self, start, length):
return self.bus.read_i2c_block_data(self.address, start, length)
# Reads a unsigned 16-bit value from the I2C device #
def read_word_U16(self, adr, little_endian=True):
result = self.bus.read_byte_data(self.address, adr)
if not little_endian:
result = ((result << 8) & 0xFF00) + (result >> 8)
return result
# Reads a signed 16-bit value from the I2C device
def read_word_S16(self, adr, little_endian=True):
val = self.read_word_U16(adr,little_endian)
if (val >= 32768):
return -((65535 - val) + 1)
else:
return val
def read_word(self, reg):
high = self.read_byte(reg)
low = self.read_byte(reg+1)
val = (high << 8) + low
return val
def read_word_2c(self, reg):
val = self.read_word(reg)
if (val >= 0x8000):
return -((65535 - val) + 1)
else:
return val
# Write a single command
def write_cmd(self, cmd):
self.bus.write_byte(self.address, cmd)
# Writes an 8-bit value to the specified register/address
def write_8(self, reg, value):
self.bus.write_byte_data(self.address, reg, value)
# Writes a 16-bit value to the specified register/address pair
def write_16(self, reg, value):
self.bus.write_word_data(self.address, reg, value)
# Write a block of data
def write_block_data(self, cmd, data):
self.bus.write_block_data(self.address, cmd, data)
# Writes an array of bytes using I2C format
def writeList(self, reg, list):
self.bus.write_i2c_block_data(self.address, reg, list)
| {
"content_hash": "2351ec80a01973285574000b944a118c",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 106,
"avg_line_length": 28.294573643410853,
"alnum_prop": 0.6389041095890411,
"repo_name": "CaptainStouf/python_libs",
"id": "d88fa5440889a5c6a255f9442e73b44c61c9b7eb",
"size": "3695",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "i2c/i2c_core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42017"
}
],
"symlink_target": ""
} |
import telnetlib
import time
from .adapter import Adapter
class TelnetAdapter(Adapter):
""" Adapter class for using the Python telnetlib package to allow
communication to instruments
:param host: host address of the instrument
:param port: TCPIP port
:param query_delay: delay in seconds between write and read in the ask
method
:param preprocess_reply: optional callable used to preprocess strings
received from the instrument. The callable returns the processed string.
:param kwargs: Valid keyword arguments for telnetlib.Telnet, currently
this is only 'timeout'
"""
def __init__(self, host, port=0, query_delay=0, preprocess_reply=None,
**kwargs):
super().__init__(preprocess_reply=preprocess_reply)
self.query_delay = query_delay
safe_keywords = ['timeout']
for kw in kwargs:
if kw not in safe_keywords:
raise TypeError(
f"TelnetAdapter: unexpected keyword argument '{kw}', "
f"allowed are: {str(safe_keywords)}")
self.connection = telnetlib.Telnet(host, port, **kwargs)
def write(self, command):
""" Writes a command to the instrument
:param command: command string to be sent to the instrument
"""
self.connection.write(command.encode())
def read(self):
""" Read something even with blocking the I/O. After something is
received check again to obtain a full reply.
:returns: String ASCII response of the instrument.
"""
return self.connection.read_some().decode() + \
self.connection.read_very_eager().decode()
def ask(self, command):
""" Writes a command to the instrument and returns the resulting ASCII
response
:param command: command string to be sent to the instrument
:returns: String ASCII response of the instrument
"""
self.write(command)
time.sleep(self.query_delay)
return self.read()
def __repr__(self):
return "<TelnetAdapter(host=%s, port=%d)>" % (self.connection.host, self.connection.port)
| {
"content_hash": "54d072789794dde7e09e1179d8fa91ba",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 97,
"avg_line_length": 35.85245901639344,
"alnum_prop": 0.6360310928212163,
"repo_name": "ralph-group/pymeasure",
"id": "9efaa5e27921e385bf8dd10d84d401fba1354bc1",
"size": "3344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymeasure/adapters/telnet.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "689774"
}
],
"symlink_target": ""
} |
from __future__ import print_function # This *must* be the first line
import pytest
import os
import subprocess
from pyethereum import tester, utils, transactions, blocks, processblock, rlp
import serpent
from solc import solc
class TestMul7(object):
OWNER = {'address': tester.a0, 'key': tester.k0}
MANAGER1 = {'address': tester.a1, 'key': tester.k1}
USER1 = {'address': tester.a2, 'key': tester.k2}
# Contracts
mul7 = 'mul7.sol'
mul7se = 'mul7.se'
# Setup
def setup_class(self):
self.state = tester.state() # Create test blockchain
# Solidity version
_, self.mul7_evm = solc(self.mul7) # Compile
print('>>> Solidity evm: {}'.format(self.mul7_evm))
self.addr = self.state.contract(self.mul7_evm, OWNER['key'], 0)
print('>>> Solidity contract address: {}'.format(self.addr))
#self.mul7_evm = '0x{}'.format(self.mul7_evm)
self.mul7_decoded = self.mul7_evm.decode('hex')
print('>>> Solidity decode-hex: {}'.format(self.mul7_evm.decode('hex')))
#self.state.evm(self.mul7_evm, sender=self.OWNER, endowment=0)
self.state.evm(self.mul7_decoded, sender=self.OWNER, endowment=0)
# Serpent version
#self.mul7se_evm = self.state.abi_contract(self.mul7se)
self.mul7se_evm = serpent.compile(open(self.mul7se).read())
self.mul7se_rlp_decode = rlp.decode(self.mul7se_evm)
print('>>> Serpent compile: {}'.format(self.mul7se_evm))
#print('>>> Serpent rlp-decode: {}'.format(self.mul7se_rlp_decode))
#self.state.evm(self.mul7_evm, sender=self.OWNER, endowment=0)
#self.state.mine(n=1, coinbase=self.OWNER)
#self.snapshot = self.state.snapshot()
def setup_method(self, method):
#self.state.revert(self.snapshot)
pass
# Tests
def test_mul7(self):
#ans = self.mul7.register(self.OWNER['address'])
assert False
| {
"content_hash": "c17e5e08d73748407d8c481ddd55e54f",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 80,
"avg_line_length": 34.67857142857143,
"alnum_prop": 0.631307929969104,
"repo_name": "joelcan/tools-eth-contract-dev",
"id": "cedc1b276580b2396ef44f37b7f72502403cacf2",
"size": "1942",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test_mul7.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1657"
},
{
"name": "Python",
"bytes": "513392"
},
{
"name": "Shell",
"bytes": "1202"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from .models import (
InvoiceSettings,
TimeCode,
)
class InvoiceSettingsAdmin(admin.ModelAdmin):
pass
admin.site.register(InvoiceSettings, InvoiceSettingsAdmin)
class TimeCodeAdmin(admin.ModelAdmin):
pass
admin.site.register(TimeCode, TimeCodeAdmin)
| {
"content_hash": "6bd1ffd808b4861f248950552b9794e4",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 58,
"avg_line_length": 17,
"alnum_prop": 0.7777777777777778,
"repo_name": "pkimber/invoice",
"id": "6a85f05e74d5d8a2824f7ded1e6669fc758f4de4",
"size": "332",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "invoice/admin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "44415"
},
{
"name": "Python",
"bytes": "202112"
},
{
"name": "Shell",
"bytes": "808"
}
],
"symlink_target": ""
} |
from typing import Any, List
import logging
from pajbot.managers.adminlog import AdminLogManager
from pajbot.managers.db import DBManager
from pajbot.managers.handler import HandlerManager
from pajbot.models.command import Command, CommandExample
from pajbot.modules.base import BaseModule
log = logging.getLogger(__name__)
class BanphraseModule(BaseModule):
ID = __name__.split(".")[-1]
NAME = "Banphrases"
DESCRIPTION = "Looks at each message for banned phrases, and takes actions accordingly"
ENABLED_DEFAULT = True
CATEGORY = "Moderation"
SETTINGS: List[Any] = []
def is_message_bad(self, source, msg_raw, _event):
res = self.bot.banphrase_manager.check_message(msg_raw, source)
if res is not False:
self.bot.banphrase_manager.punish(source, res)
return True
return False # message was ok
def enable(self, bot):
HandlerManager.add_handler("on_message", self.on_message, priority=150, run_if_propagation_stopped=True)
def disable(self, bot):
HandlerManager.remove_handler("on_message", self.on_message)
def on_message(self, source, message, whisper, event, **rest):
if whisper:
return
if source.level >= 500 or source.moderator:
return
if self.is_message_bad(source, message, event):
# we matched a filter.
# return False so no more code is run for this message
return False
@staticmethod
def add_banphrase(bot, source, message, **rest):
"""Method for creating and editing banphrases.
Usage: !add banphrase BANPHRASE [options]
Multiple options available:
--length LENGTH
--perma/--no-perma
"""
if message:
options, phrase = bot.banphrase_manager.parse_banphrase_arguments(message)
if options is False:
bot.whisper(source, "Invalid banphrase")
return False
options["added_by"] = source.id
options["edited_by"] = source.id
banphrase, new_banphrase = bot.banphrase_manager.create_banphrase(phrase, **options)
if new_banphrase is True:
bot.whisper(source, f"Added your banphrase (ID: {banphrase.id})")
AdminLogManager.post("Banphrase added", source, banphrase.id, banphrase.phrase)
return True
banphrase.set(**options)
banphrase.data.set(edited_by=options["edited_by"])
DBManager.session_add_expunge(banphrase)
bot.banphrase_manager.commit()
bot.whisper(
source,
f"Updated your banphrase (ID: {banphrase.id}) with ({', '.join([key for key in options if key != 'added_by'])})",
)
AdminLogManager.post("Banphrase edited", source, banphrase.id, banphrase.phrase)
@staticmethod
def remove_banphrase(bot, source, message, **rest):
if not message:
bot.whisper(source, "Usage: !remove banphrase (BANPHRASE_ID)")
return False
banphrase_id = None
try:
banphrase_id = int(message)
except ValueError:
pass
banphrase = bot.banphrase_manager.find_match(message=message, banphrase_id=banphrase_id)
if banphrase is None:
bot.whisper(source, "No banphrase with the given parameters found")
return False
AdminLogManager.post("Banphrase removed", source, banphrase.id, banphrase.phrase)
bot.whisper(source, f"Successfully removed banphrase with id {banphrase.id}")
bot.banphrase_manager.remove_banphrase(banphrase)
def load_commands(self, **options):
self.commands["add"] = Command.multiaction_command(
level=100,
delay_all=0,
delay_user=0,
default=None,
command="add",
commands={
"banphrase": Command.raw_command(
self.add_banphrase,
level=500,
description="Add a banphrase!",
delay_all=0,
delay_user=0,
examples=[
CommandExample(
None,
"Create a banphrase",
chat="user:!add banphrase testman123\n" "bot>user:Inserted your banphrase (ID: 83)",
description="This creates a banphrase with the default settings. Whenever a non-moderator types testman123 in chat they will be timed out for 300 seconds and notified through a whisper that they said something they shouldn't have said",
).parse(),
CommandExample(
None,
"Create a banphrase that permabans people",
chat="user:!add banphrase testman123 --perma\n" "bot>user:Inserted your banphrase (ID: 83)",
description="This creates a banphrase that permabans the user who types testman123 in chat. The user will be notified through a whisper that they said something they shouldn't have said",
).parse(),
CommandExample(
None,
"Change the default timeout length for a banphrase",
chat="user:!add banphrase testman123 --time 123\n"
"bot>user:Updated the given banphrase (ID: 83) with (time, extra_args)",
description="Changes the default timeout length to a custom time of 123 seconds",
).parse(),
CommandExample(
None,
"Make it so a banphrase cannot be triggered by subs",
chat="user:!add banphrase testman123 --subimmunity\n"
"bot>user:Updated the given banphrase (ID: 83) with (sub_immunity)",
description="Changes a command so that the banphrase can only be triggered by people who are not subscribed to the channel.",
).parse(),
],
)
},
)
self.commands["remove"] = Command.multiaction_command(
level=100,
delay_all=0,
delay_user=0,
default=None,
command="remove",
commands={
"banphrase": Command.raw_command(
self.remove_banphrase,
level=500,
delay_all=0,
delay_user=0,
description="Remove a banphrase!",
examples=[
CommandExample(
None,
"Remove a banphrase",
chat="user:!remove banphrase KeepoKeepo\n"
"bot>user:Successfully removed banphrase with id 33",
description="Removes a banphrase with the trigger KeepoKeepo.",
).parse(),
CommandExample(
None,
"Remove a banphrase with the given ID.",
chat="user:!remove banphrase 25\n" "bot>user:Successfully removed banphrase with id 25",
description="Removes a banphrase with id 25",
).parse(),
],
)
},
)
| {
"content_hash": "30cbb28437f7a29ebdb82044e9f5b52b",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 264,
"avg_line_length": 42.15934065934066,
"alnum_prop": 0.53864199139841,
"repo_name": "pajlada/tyggbot",
"id": "9d86fe4e4213f7d1f73e9aa2ff1861e624383629",
"size": "7673",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pajbot/modules/banphrase.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8495"
},
{
"name": "HTML",
"bytes": "114111"
},
{
"name": "JavaScript",
"bytes": "163746"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "727982"
},
{
"name": "Shell",
"bytes": "544"
}
],
"symlink_target": ""
} |
import time
from urllib import urlencode
from threading import current_thread, RLock
from django.conf import settings
from graphite.http_pool import http
from graphite.readers import FetchInProgress
from graphite.logger import log
from graphite.util import unpickle
from graphite.worker_pool.pool import pool_apply
from graphite.readers.utils import BaseReader
class InFlight(object):
"""This object is used to cache sub-requests within a single request."""
def __init__(self, store, requestContext):
# Make sure we add our data to requestContext.
if requestContext is None:
requestContext = {}
for k in ['inflight_requests', 'inflight_locks']:
if k not in requestContext:
requestContext[k] = {}
self.lock = store.lock
self.requests = requestContext['inflight_requests']
self.locks = requestContext['inflight_locks']
def start_request(self, url, request):
with self.lock:
self.requests[url] = request
def get_request_lock(self, url):
with self.lock:
lock = self.locks.get(url, None)
if not lock:
self.locks[url] = lock = RLock()
return lock
def get_request(self, url):
with self.lock:
return self.requests.get(url, None)
class RemoteReader(BaseReader):
__slots__ = (
'store',
'metric_path',
'intervals',
'bulk_query',
'connection')
def __init__(self, store, node_info, bulk_query=None):
self.store = store
self.metric_path = node_info.get(
'path') or node_info.get('metric_path')
self.intervals = node_info['intervals']
self.bulk_query = set(bulk_query) or (
[self.metric_path] if self.metric_path else []
)
self.connection = None
def __repr__(self):
return '<RemoteReader[%x]: %s>' % (id(self), self.store.host)
@staticmethod
def _log(msg, logger):
logger(('thread %s at %fs ' %
(current_thread().name, time.time())) + msg)
@classmethod
def log_debug(cls, msg):
if settings.DEBUG:
cls._log(msg, log.info)
@classmethod
def log_error(cls, msg):
cls._log(msg, log.exception)
def get_intervals(self):
return self.intervals
def fetch(self, startTime, endTime, now=None, requestContext=None):
seriesList = self.fetch_list(startTime, endTime, now, requestContext)
def _fetch(seriesList):
if seriesList is None:
return None
for series in seriesList:
if series['name'] == self.metric_path:
time_info = (
series['start'],
series['end'],
series['step'])
return (time_info, series['values'])
return None
if isinstance(seriesList, FetchInProgress):
return FetchInProgress(lambda: _fetch(seriesList.waitForResults()))
return _fetch(seriesList)
def fetch_list(self, startTime, endTime, now=None, requestContext=None):
t = time.time()
in_flight = InFlight(self.store, requestContext)
query_params = [
('format', 'pickle'),
('local', '1'),
('noCache', '1'),
('from', str(int(startTime))),
('until', str(int(endTime)))
]
if not self.bulk_query:
return []
for target in self.bulk_query:
query_params.append(('target', target))
if now is not None:
query_params.append(('now', str(int(now))))
query_string = urlencode(query_params)
urlpath = '/render/'
url = "%s://%s%s" % ('https' if settings.INTRACLUSTER_HTTPS else 'http',
self.store.host, urlpath)
url_full = "%s?%s" % (url, query_string)
headers = requestContext.get('forwardHeaders') if requestContext else None
lock = in_flight.get_request_lock(url_full)
with lock:
request = in_flight.get_request(url_full)
if request:
log.debug("RemoteReader:: Returning cached FetchInProgress %s" % url_full)
return request
data = self._fetch_list_locked(url, query_string, query_params, headers)
in_flight.start_request(url_full, data)
log.debug(
"RemoteReader:: Returning %s in %fs" % (url_full, time.time() - t))
return data
def _fetch_list_locked(self, url, query_string, query_params, headers):
url_full = "%s?%s" % (url, query_string)
jobs = [(self._fetch, url, query_string, query_params, headers)]
q = pool_apply(self.store.finder.worker_pool(), jobs)
log.debug('RemoteReader:: Storing FetchInProgress for %s' % url_full)
return FetchInProgress(_Results(q))
def _fetch(self, url, query_string, query_params, headers):
url_full = "%s?%s" % (url, query_string)
log.debug(
"RemoteReader:: Starting to execute _fetch %s" % url_full)
try:
log.debug("ReadResult:: Requesting %s" % url_full)
result = http.request(
'POST' if settings.REMOTE_STORE_USE_POST else 'GET',
url,
fields=query_params,
headers=headers,
timeout=settings.REMOTE_FETCH_TIMEOUT,
)
if result.status != 200:
self.store.fail()
self.log_error("ReadResult:: Error response %d from %s" % (result.status, url_full))
data = []
else:
data = unpickle.loads(result.data)
except Exception as err:
self.store.fail()
self.log_error("ReadResult:: Error requesting %s: %s" % (url_full, err))
data = []
log.debug("RemoteReader:: Completed _fetch %s" % url_full)
return data
class _Results(object):
def __init__(self, queue):
self.lock = RLock()
self.results = None
self.queue = queue
def __call__(self):
with self.lock:
return self.read_locked()
def read_locked(self):
if self.results is not None:
log.debug(
'RemoteReader:: retrieve completed (cached) %s' %
(', '.join([result['path'] for result in self.results])),
)
return self.results
# otherwise we get it from the queue and keep it for later
results = self.queue.get(block=True)
for i in range(len(results)):
results[i]['path'] = results[i]['name']
if not results:
log.debug('RemoteReader:: retrieve has received no results')
self.results = results or []
log.debug(
'RemoteReader:: retrieve completed %s' %
(', '.join([result['path'] for result in results])),
)
return self.results
| {
"content_hash": "0821f7fe78c9b411ca15b4620212adf2",
"timestamp": "",
"source": "github",
"line_count": 222,
"max_line_length": 100,
"avg_line_length": 31.815315315315317,
"alnum_prop": 0.5571286988531785,
"repo_name": "brutasse/graphite-web",
"id": "9509d717eacd5ad2c985b32b67d76524ef6621c3",
"size": "7063",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webapp/graphite/readers/remote.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "150191"
},
{
"name": "HTML",
"bytes": "21170"
},
{
"name": "JavaScript",
"bytes": "1681705"
},
{
"name": "Perl 6",
"bytes": "857"
},
{
"name": "Python",
"bytes": "1068379"
},
{
"name": "Ruby",
"bytes": "1950"
},
{
"name": "Shell",
"bytes": "1113"
}
],
"symlink_target": ""
} |
"""Built-in volume type properties."""
from oslo_config import cfg
from oslo_db import exception as db_exc
import six
from cinder import context
from cinder import db
from cinder import exception
from cinder.i18n import _, _LE
from cinder.openstack.common import log as logging
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
def create(context,
name,
extra_specs=None,
is_public=True,
projects=None,
description=None):
"""Creates volume types."""
extra_specs = extra_specs or {}
projects = projects or []
try:
type_ref = db.volume_type_create(context,
dict(name=name,
extra_specs=extra_specs,
is_public=is_public,
description=description),
projects=projects)
except db_exc.DBError as e:
LOG.exception(_LE('DB error: %s') % six.text_type(e))
raise exception.VolumeTypeCreateFailed(name=name,
extra_specs=extra_specs)
return type_ref
def update(context, id, description):
"""Update volume type by id."""
if id is None:
msg = _("id cannot be None")
raise exception.InvalidVolumeType(reason=msg)
try:
type_updated = db.volume_type_update(context,
id,
dict(description=description))
except db_exc.DBError as e:
LOG.exception(_LE('DB error: %s') % six.text_type(e))
raise exception.VolumeTypeUpdateFailed(id=id)
return type_updated
def destroy(context, id):
"""Marks volume types as deleted."""
if id is None:
msg = _("id cannot be None")
raise exception.InvalidVolumeType(reason=msg)
else:
db.volume_type_destroy(context, id)
def get_all_types(context, inactive=0, search_opts=None):
"""Get all non-deleted volume_types.
Pass true as argument if you want deleted volume types returned also.
"""
search_opts = search_opts or {}
filters = {}
if 'is_public' in search_opts:
filters['is_public'] = search_opts['is_public']
del search_opts['is_public']
vol_types = db.volume_type_get_all(context, inactive, filters=filters)
if search_opts:
LOG.debug("Searching by: %s" % search_opts)
def _check_extra_specs_match(vol_type, searchdict):
for k, v in searchdict.iteritems():
if (k not in vol_type['extra_specs'].keys()
or vol_type['extra_specs'][k] != v):
return False
return True
# search_option to filter_name mapping.
filter_mapping = {'extra_specs': _check_extra_specs_match}
result = {}
for type_name, type_args in vol_types.iteritems():
# go over all filters in the list
for opt, values in search_opts.iteritems():
try:
filter_func = filter_mapping[opt]
except KeyError:
# no such filter - ignore it, go to next filter
continue
else:
if filter_func(type_args, values):
result[type_name] = type_args
break
vol_types = result
return vol_types
def get_volume_type(ctxt, id, expected_fields=None):
"""Retrieves single volume type by id."""
if id is None:
msg = _("id cannot be None")
raise exception.InvalidVolumeType(reason=msg)
if ctxt is None:
ctxt = context.get_admin_context()
return db.volume_type_get(ctxt, id, expected_fields=expected_fields)
def get_volume_type_by_name(context, name):
"""Retrieves single volume type by name."""
if name is None:
msg = _("name cannot be None")
raise exception.InvalidVolumeType(reason=msg)
return db.volume_type_get_by_name(context, name)
def get_default_volume_type():
"""Get the default volume type."""
name = CONF.default_volume_type
vol_type = {}
if name is not None:
ctxt = context.get_admin_context()
try:
vol_type = get_volume_type_by_name(ctxt, name)
except exception.VolumeTypeNotFoundByName as e:
# Couldn't find volume type with the name in default_volume_type
# flag, record this issue and move on
# TODO(zhiteng) consider add notification to warn admin
LOG.exception(_LE('Default volume type is not found,'
'please check default_volume_type config: %s') %
six.text_type(e))
return vol_type
def get_volume_type_extra_specs(volume_type_id, key=False):
volume_type = get_volume_type(context.get_admin_context(),
volume_type_id)
extra_specs = volume_type['extra_specs']
if key:
if extra_specs.get(key):
return extra_specs.get(key)
else:
return False
else:
return extra_specs
def add_volume_type_access(context, volume_type_id, project_id):
"""Add access to volume type for project_id."""
if volume_type_id is None:
msg = _("volume_type_id cannot be None")
raise exception.InvalidVolumeType(reason=msg)
return db.volume_type_access_add(context, volume_type_id, project_id)
def remove_volume_type_access(context, volume_type_id, project_id):
"""Remove access to volume type for project_id."""
if volume_type_id is None:
msg = _("volume_type_id cannot be None")
raise exception.InvalidVolumeType(reason=msg)
return db.volume_type_access_remove(context, volume_type_id, project_id)
def is_encrypted(context, volume_type_id):
if volume_type_id is None:
return False
encryption = db.volume_type_encryption_get(context, volume_type_id)
return encryption is not None
def get_volume_type_encryption(context, volume_type_id):
if volume_type_id is None:
return None
encryption = db.volume_type_encryption_get(context, volume_type_id)
return encryption
def get_volume_type_qos_specs(volume_type_id):
ctxt = context.get_admin_context()
res = db.volume_type_qos_specs_get(ctxt,
volume_type_id)
return res
def volume_types_diff(context, vol_type_id1, vol_type_id2):
"""Returns a 'diff' of two volume types and whether they are equal.
Returns a tuple of (diff, equal), where 'equal' is a boolean indicating
whether there is any difference, and 'diff' is a dictionary with the
following format:
{'extra_specs': {'key1': (value_in_1st_vol_type, value_in_2nd_vol_type),
'key2': (value_in_1st_vol_type, value_in_2nd_vol_type),
...}
'qos_specs': {'key1': (value_in_1st_vol_type, value_in_2nd_vol_type),
'key2': (value_in_1st_vol_type, value_in_2nd_vol_type),
...}
'encryption': {'cipher': (value_in_1st_vol_type, value_in_2nd_vol_type),
{'key_size': (value_in_1st_vol_type, value_in_2nd_vol_type),
...}
"""
def _fix_qos_specs(qos_specs):
if qos_specs:
qos_specs.pop('id', None)
qos_specs.pop('name', None)
qos_specs.update(qos_specs.pop('specs', {}))
def _fix_encryption_specs(encryption):
if encryption:
encryption = dict(encryption)
for param in ['volume_type_id', 'created_at', 'updated_at',
'deleted_at']:
encryption.pop(param, None)
return encryption
def _dict_diff(dict1, dict2):
res = {}
equal = True
if dict1 is None:
dict1 = {}
if dict2 is None:
dict2 = {}
for k, v in dict1.iteritems():
res[k] = (v, dict2.get(k))
if k not in dict2 or res[k][0] != res[k][1]:
equal = False
for k, v in dict2.iteritems():
res[k] = (dict1.get(k), v)
if k not in dict1 or res[k][0] != res[k][1]:
equal = False
return (res, equal)
all_equal = True
diff = {}
vol_type_data = []
for vol_type_id in (vol_type_id1, vol_type_id2):
if vol_type_id is None:
specs = {'extra_specs': None,
'qos_specs': None,
'encryption': None}
else:
specs = {}
vol_type = get_volume_type(context, vol_type_id)
specs['extra_specs'] = vol_type.get('extra_specs')
qos_specs = get_volume_type_qos_specs(vol_type_id)
specs['qos_specs'] = qos_specs.get('qos_specs')
_fix_qos_specs(specs['qos_specs'])
specs['encryption'] = get_volume_type_encryption(context,
vol_type_id)
specs['encryption'] = _fix_encryption_specs(specs['encryption'])
vol_type_data.append(specs)
diff['extra_specs'], equal = _dict_diff(vol_type_data[0]['extra_specs'],
vol_type_data[1]['extra_specs'])
if not equal:
all_equal = False
diff['qos_specs'], equal = _dict_diff(vol_type_data[0]['qos_specs'],
vol_type_data[1]['qos_specs'])
if not equal:
all_equal = False
diff['encryption'], equal = _dict_diff(vol_type_data[0]['encryption'],
vol_type_data[1]['encryption'])
if not equal:
all_equal = False
return (diff, all_equal)
| {
"content_hash": "f8b6ad35755699212e2376cd235d8157",
"timestamp": "",
"source": "github",
"line_count": 284,
"max_line_length": 79,
"avg_line_length": 34.61267605633803,
"alnum_prop": 0.5552390640895218,
"repo_name": "Akrog/cinder",
"id": "8624a10f4d367568de18c44959fe0d1f23f017cc",
"size": "10692",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cinder/volume/volume_types.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "3555"
},
{
"name": "Gettext Catalog",
"bytes": "483634"
},
{
"name": "Python",
"bytes": "11055908"
},
{
"name": "Shell",
"bytes": "8111"
}
],
"symlink_target": ""
} |
from dirbalak import repomirrorcache
from upseto import gitwrapper
import collections
import logging
Dependency = collections.namedtuple(
"Dependency", "gitURL hash requiringURL requiringURLHash type masterHash broken")
class Traverse:
def __init__(self, visitMasterBranchOfEachDependency=True):
self._visitMasterBranchOfEachDependency = visitMasterBranchOfEachDependency
self._visitedTuples = set()
self._dependencies = []
def dependencies(self):
return self._dependencies
def traverse(self, gitURL, hash):
mirror = repomirrorcache.get(gitURL)
hash = mirror.branchName(hash)
self._traverse(gitURL, hash, None, None, 'root')
def _traverse(self, gitURL, hash, requiringURL, requiringURLHash, type):
try:
tuple = gitURL, hash, requiringURL, requiringURLHash
if tuple in self._visitedTuples:
return
self._visitedTuples.add(tuple)
mirror = repomirrorcache.get(gitURL)
masterHash = mirror.hash('origin/master')
hash = mirror.branchName(hash)
broken = not mirror.hashExists(hash)
dep = Dependency(
gitURL=gitURL, hash=hash, requiringURL=requiringURL,
requiringURLHash=requiringURLHash, type=type, masterHash=masterHash,
broken=broken)
self._dependencies.append(dep)
if not broken:
upsetoManifest = mirror.upsetoManifest(hash)
solventManifest = mirror.solventManifest(hash)
basenameForBuild = self._basenameForBuild(mirror, hash)
for requirement in upsetoManifest.requirements():
self._traverse(requirement['originURL'], requirement['hash'], gitURL, hash, 'upseto')
for requirement in solventManifest.requirements():
basename = gitwrapper.originURLBasename(requirement['originURL'])
type = 'dirbalak_build_rootfs' if basename == basenameForBuild else 'solvent'
self._traverse(requirement['originURL'], requirement['hash'], gitURL, hash, type)
if self._visitMasterBranchOfEachDependency:
self._traverse(gitURL, 'origin/master', None, None, 'master')
except:
logging.error(
"Exception while handling '%(gitURL)s'/%(hash)s "
"('%(type)s' dependency of '%(requiringURL)s'/%(requiringURLHash)s)", dict(
gitURL=gitURL, hash=hash, requiringURL=requiringURL, requiringURLHash=requiringURLHash,
type=type))
raise
def _basenameForBuild(self, mirror, hash):
try:
return mirror.dirbalakManifest(hash).buildRootFSRepositoryBasename()
except KeyError:
return None
| {
"content_hash": "beaa02a68cb7982743fe25fb0aa98b65",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 107,
"avg_line_length": 42.656716417910445,
"alnum_prop": 0.6273617914625612,
"repo_name": "Stratoscale/dirbalak",
"id": "d9f0dd5eee34f3be81e469520b3eec00bcc550c1",
"size": "2858",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "py/dirbalak/traverse.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "340"
},
{
"name": "HTML",
"bytes": "21593"
},
{
"name": "JavaScript",
"bytes": "4477"
},
{
"name": "Makefile",
"bytes": "2024"
},
{
"name": "Python",
"bytes": "120641"
},
{
"name": "Shell",
"bytes": "42"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tracker', '0012_auto_20160221_1400'),
]
operations = [
migrations.AlterField(
model_name='system',
name='color_code',
field=models.CharField(max_length=10),
),
]
| {
"content_hash": "7b0db8236b3795149d79d0c97be79cfa",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 50,
"avg_line_length": 21.22222222222222,
"alnum_prop": 0.5916230366492147,
"repo_name": "paftree/WHturk",
"id": "08ccf9cc561fdd42c87501b60fbdcb0c0e42d602",
"size": "454",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tracker/migrations/0013_auto_20160221_1403.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "412079"
},
{
"name": "HTML",
"bytes": "36344"
},
{
"name": "JavaScript",
"bytes": "1526712"
},
{
"name": "Python",
"bytes": "48695"
},
{
"name": "Shell",
"bytes": "42"
}
],
"symlink_target": ""
} |
"""Tests for `imawurk` package."""
import pytest
from click.testing import CliRunner
from imawurk import imawurk
from imawurk import cli
@pytest.fixture
def response():
"""Sample pytest fixture.
See more at: http://doc.pytest.org/en/latest/fixture.html
"""
# import requests
# return requests.get('https://github.com/audreyr/cookiecutter-pypackage')
def test_content(response):
"""Sample pytest test function with the pytest fixture as an argument."""
# from bs4 import BeautifulSoup
# assert 'GitHub' in BeautifulSoup(response.content).title.string
def test_command_line_interface():
"""Test the CLI."""
runner = CliRunner()
result = runner.invoke(cli.main)
assert result.exit_code == 0
assert 'imawurk.cli.main' in result.output
help_result = runner.invoke(cli.main, ['--help'])
assert help_result.exit_code == 0
assert '--help Show this message and exit.' in help_result.output
| {
"content_hash": "e31fe3a6cb45ebfcde18ed954efca9a6",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 78,
"avg_line_length": 27.314285714285713,
"alnum_prop": 0.696652719665272,
"repo_name": "gityom/imawurk",
"id": "35109761866d31af5b7fe6867aaa3181ce530260",
"size": "1003",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_imawurk.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2269"
},
{
"name": "Python",
"bytes": "3336"
}
],
"symlink_target": ""
} |
from django.apps import AppConfig
class ImraisingConfig(AppConfig):
name = 'imraising'
verbose_name = 'Imraising Application'
def ready(self):
import imraising.signals
| {
"content_hash": "cf5cc34959e80b7c9efabb3a3d163437",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 42,
"avg_line_length": 21.555555555555557,
"alnum_prop": 0.7010309278350515,
"repo_name": "google/mirandum",
"id": "994056232ab52842a4c5e554e01eb5c2650c3546",
"size": "808",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "alerts/imraising/apps.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "9472"
},
{
"name": "Elixir",
"bytes": "574"
},
{
"name": "HTML",
"bytes": "122101"
},
{
"name": "JavaScript",
"bytes": "19438"
},
{
"name": "Jinja",
"bytes": "4124"
},
{
"name": "Python",
"bytes": "398732"
},
{
"name": "Shell",
"bytes": "3296"
}
],
"symlink_target": ""
} |
"""ML-ENSEMBLE
:author: Sebastian Flennerhag
:copyright: 2017-2018
:licence: MIT
Blend indexing.
"""
from __future__ import division
from numbers import Integral
import numpy as np
from ._checks import check_partial_index
from .base import BaseIndex
class BlendIndex(BaseIndex):
"""Indexer that generates two non-overlapping subsets of ``X``.
Iterator that generates one training fold and one test fold that are
non-overlapping and that may or may not partition all of X depending on the
user's specification.
BlendIndex creates a singleton generator (has on iteration) that
yields two tuples of ``(start, stop)`` integers that can be used for
numpy array slicing (i.e. ``X[stop:start]``). If a full array index
is desired this can easily be achieved with::
for train_tup, test_tup in self.generate():
train_slice = numpy.hstack([numpy.arange(t0, t1) for t0, t1 in
train_tup])
test_slice = numpy.hstack([numpy.arange(t0, t1) for t0, t1 in
test_tup])
Parameters
----------
test_size : int or float (default = 0.5)
Size of the test set. If ``float``, assumed to be proportion of full
data set.
train_size : int or float, optional
Size of test set. If not specified (i.e. ``train_size = None``,
train_size is equal to ``n_samples - test_size``. If ``float``, assumed
to be a proportion of full data set. If ``train_size`` + ``test_size``
amount to less than the observations in the full data set, a subset
of specified size will be used.
X : array-like of shape [n_samples,] , optional
the training set to partition. The training label array is also,
accepted, as only the first dimension is used. If ``X`` is not
passed
at instantiation, the ``fit`` method must be called before
``generate``, or ``X`` must be passed as an argument of
``generate``.
raise_on_exception : bool (default = True)
whether to warn on suspicious slices or raise an error.
See Also
--------
:class:`FoldIndex`, :class:`SubsetIndex`
Examples
--------
Selecting an absolute test size, with train size as the remainder
>>> import numpy as np
>>> from mlens.index import BlendIndex
>>> X = np.arange(8)
>>> idx = BlendIndex(3, rebase=True)
>>> print('Test size: 3')
>>> for tri, tei in idx.generate(X):
... print('TEST (idx | array): (%i, %i) | %r ' % (tei[0], tei[1],
... X[tei[0]:tei[1]]))
... print('TRAIN (idx | array): (%i, %i) | %r ' % (tri[0], tri[1],
... X[tri[0]:tri[1]]))
Test size: 3
TEST (idx | array): (5, 8) | array([5, 6, 7])
TRAIN (idx | array): (0, 5) | array([0, 1, 2, 3, 4])
Selecting a test and train size less than the total
>>> import numpy as np
>>> from mlens.index import BlendIndex
>>> X = np.arange(8)
>>> idx = BlendIndex(3, 4, X)
>>> print('Test size: 3')
>>> print('Train size: 4')
>>> for tri, tei in idx.generate(X):
... print('TEST (idx | array): (%i, %i) | %r ' % (tei[0], tei[1],
... X[tei[0]:tei[1]]))
... print('TRAIN (idx | array): (%i, %i) | %r ' % (tri[0], tri[1],
... X[tri[0]:tri[1]]))
Test size: 3
Train size: 4
TEST (idx | array): (4, 7) | array([4, 5, 6])
TRAIN (idx | array): (0, 4) | array([0, 1, 2, 3])
Selecting a percentage of observations as test and train set
>>> import numpy as np
>>> from mlens.index import BlendIndex
>>> X = np.arange(8)
>>> idx = BlendIndex(0.25, 0.45, X)
>>> print('Test size: 25% * 8 = 2')
>>> print('Train size: 45% * 8 < 4 -> 3')
>>> for tri, tei in idx.generate(X):
... print('TEST (idx | array): (%i, %i) | %r ' % (tei[0], tei[1],
... X[tei[0]:tei[1]]))
... print('TRAIN (idx | array): (%i, %i) | %r ' % (tri[0], tri[1],
... X[tri[0]:tri[1]]))
Test size: 25% * 8 = 2
Train size: 50% * 8 < 4 ->
TEST (idx | array): (3, 5) | array([[3, 4]])
TRAIN (idx | array): (0, 3) | array([[0, 1, 2]])
Rebasing the test set to be 0-indexed
>>> import numpy as np
>>> from mlens.index import BlendIndex
>>> X = np.arange(8)
>>> idx = BlendIndex(3, rebase=True)
>>> print('Test size: 3')
>>> for tri, tei in idx.generate(X):
... print('TEST tuple: (%i, %i) | array: %r' % (tei[0], tei[1],
... np.arange(tei[0],
... tei[1])))
Test size: 3
TEST tuple: (0, 3) | array: array([0, 1, 2])
"""
def __init__(self,
test_size=0.5,
train_size=None,
X=None,
raise_on_exception=True):
super(BlendIndex, self).__init__()
self.n_train = None
self.n_test = None
self.test_size = test_size
self.train_size = train_size
self.raise_on_exception = raise_on_exception
if X is not None:
self.fit(X)
def fit(self, X, y=None, job=None):
"""Method for storing array data.
Parameters
----------
X : array-like of shape [n_samples, optional]
array to _collect dimension data from.
y : None
for compatibility
job : None
for compatibility
Returns
-------
instance :
indexer with stores sample size data.
"""
self.n_samples = X.shape[0]
# Get number of test samples
if isinstance(self.test_size, Integral):
self.n_test = self.test_size
else:
self.n_test = int(np.floor(self.test_size * self.n_samples))
# Get number of train samples
if self.train_size is None:
# Partition X - we coerce a positive value here:
# if n_test is oversampled will get at final check
self.n_train = int(np.floor(np.abs(self.n_samples - self.n_test)))
elif isinstance(self.train_size, Integral):
self.n_train = self.train_size
else:
self.n_train = int(np.floor(self.train_size * self.n_samples))
check_partial_index(self.n_samples, self.test_size, self.train_size,
self.n_test, self.n_train)
self.n_test_samples = self.n_test
self.__fitted__ = True
return self
def _gen_indices(self):
"""Return train and test set index generator."""
# Blended train set is from 0 to n, with test set from n to N
# There is no iteration.
yield (0, self.n_train), (self.n_train, self.n_train + self.n_test)
| {
"content_hash": "727f3c564457aef5d37ea59d5082a562",
"timestamp": "",
"source": "github",
"line_count": 200,
"max_line_length": 79,
"avg_line_length": 35.46,
"alnum_prop": 0.5190355329949239,
"repo_name": "flennerhag/mlens",
"id": "4ff7901c688487ce618e10b96ec527c0d6e90402",
"size": "7092",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mlens/index/blend.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "933041"
},
{
"name": "Shell",
"bytes": "238"
}
],
"symlink_target": ""
} |
from django.db import models
class Portal(models.Model):
TYPE_CHOICES = (
("CKAN", "CKAN"),
)
name = models.CharField(max_length=100)
type = models.CharField(max_length=8, choices=TYPE_CHOICES)
url = models.URLField()
def __str__(self):
return self.name
| {
"content_hash": "03473fe6ed4d643bd0423058d1163372",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 63,
"avg_line_length": 21.285714285714285,
"alnum_prop": 0.6174496644295302,
"repo_name": "bellisk/opendata-multisearch",
"id": "c1404f3ceac86c3c0b44181489b38641eb158c65",
"size": "298",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ord_hackday/search/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1521"
},
{
"name": "HTML",
"bytes": "36674"
},
{
"name": "Python",
"bytes": "10919"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, url
urlpatterns = patterns('pythonMDA.apps.home.views',
url(r'^$', 'index_view', name='vista_principal',),
)
| {
"content_hash": "caa2729ea10befe08bdc5f7f11aa572a",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 62,
"avg_line_length": 30.666666666666668,
"alnum_prop": 0.5815217391304348,
"repo_name": "caballerojavier13/python-mda",
"id": "64b238e8e1065a6340efa33f825c5978c1e67c4b",
"size": "184",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pythonMDA/apps/home/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "8294"
},
{
"name": "JavaScript",
"bytes": "10059"
},
{
"name": "Python",
"bytes": "20862"
}
],
"symlink_target": ""
} |
import json
import os
from datetime import datetime
from dotenv import load_dotenv
from flask import Flask, jsonify, make_response, render_template, request, url_for
load_dotenv()
# set token value in order to validate posts
SECRET = os.getenv("TOKEN")
QUEUE_PATH = os.getenv("QUEUE_PATH", "cache/queue.json")
# Load JSON message store if available
# (saved to a disk volume, preserves status across restarts)
try:
QUEUE = json.load(open(QUEUE_PATH, "r"))
except (ValueError, FileNotFoundError):
QUEUE = {}
# Instantiate app
app = Flask(__name__)
@app.route("/", methods=["GET", "POST"], defaults={"handle": None})
@app.route("/<handle>", methods=["GET", "POST", "DELETE"])
def api_endpoint(handle):
if request.method == "POST":
args = request.get_json()
if not handle:
handle = args["handle"]
status = args["status"]
token = args["token"]
color = args["color"]
stamp = str(datetime.now())
if (token == SECRET) or (SECRET is None): # check that token is valid
content = {
"link": url_for("api_request", handle=handle),
"handle": handle,
"status": status,
"color": color,
"at": stamp,
"client_ip": request.remote_addr,
}
QUEUE[handle] = content
with open(QUEUE_PATH, "w") as fh:
json.dump(QUEUE, fh)
return jsonify(QUEUE[handle]), 201
else:
return "invalid token", 403
if request.method == "GET":
if not handle:
lst = [item for item in QUEUE.values()]
return jsonify(lst)
try:
return jsonify(QUEUE[handle]), 201
except KeyError:
no_status = {"handle": handle, "status": "No such handle"}
return jsonify(no_status), 404
if request.method == "DELETE":
QUEUE.pop(handle, None)
with open(QUEUE_PATH, "w") as fh:
json.dump(QUEUE, fh)
return jsonify({handle: "Deleted"}), 204
# Little web UI
# Log monitor page
@app.route("/view")
def index():
stamp = str(datetime.now())
freight = render_template("statuslist.html", queue=QUEUE, dt=stamp)
v = make_response(freight)
return v
# view only one handle status
@app.route("/view/n/<handle>")
def nick_view(handle):
stamp = str(datetime.now())
handle_queue = {}
handle_queue[handle] = QUEUE[handle]
freight = render_template(
"filtered_status.html", queue=handle_queue, dt=stamp, handle=handle
)
v = make_response(freight)
return v
# view only one color status
@app.route("/view/c/<color>")
def color_view(color):
stamp = str(datetime.now())
color_queue = {}
for alias, content in QUEUE.items():
if color in content["color"]:
color_queue[alias] = content
freight = render_template(
"filtered_status.html", queue=color_queue, dt=stamp, handle=color
)
v = make_response(freight)
return v
if __name__ == "__main__":
app.run(debug=True, host="0.0.0.0", port=5001)
| {
"content_hash": "ab66f4a2f65f0dcde3ba315a8eda44d7",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 82,
"avg_line_length": 29.214953271028037,
"alnum_prop": 0.5863723608445297,
"repo_name": "tym-oao/algoth",
"id": "eeaba00abbe355b9a790c6722483e172c2a3b29a",
"size": "3165",
"binary": false,
"copies": "1",
"ref": "refs/heads/trunk",
"path": "algoth.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "337"
},
{
"name": "HTML",
"bytes": "3716"
},
{
"name": "Python",
"bytes": "3165"
}
],
"symlink_target": ""
} |
import uuid
from db.common import Base
from db.specific_event import SpecificEvent
from db.event import Event
from db.player import Player
from db.team import Team
class Faceoff(Base, SpecificEvent):
__tablename__ = 'faceoffs'
__autoload__ = True
HUMAN_READABLE = 'faceoff'
STANDARD_ATTRS = [
"team_id", "player_id", "zone", "faceoff_lost_team_id",
"faceoff_lost_player_id", "faceoff_lost_zone"
]
def __init__(self, event_id, data_dict):
self.faceoff_id = uuid.uuid4().urn
self.event_id = event_id
for attr in self.STANDARD_ATTRS:
if attr in data_dict:
setattr(self, attr, data_dict[attr])
else:
setattr(self, attr, None)
def __str__(self):
won_plr = Player.find_by_id(self.player_id)
lost_plr = Player.find_by_id(self.faceoff_lost_player_id)
won_team = Team.find_by_id(self.team_id)
lost_team = Team.find_by_id(self.faceoff_lost_team_id)
event = Event.find_by_id(self.event_id)
return "Faceoff: %s (%s) won vs. %s (%s) in %s. zone - %s" % (
won_plr.name, won_team.abbr,
lost_plr.name, lost_team.abbr,
self.zone.lower(), event)
| {
"content_hash": "77f5f311250b7b6331040639dacc1664",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 70,
"avg_line_length": 31.871794871794872,
"alnum_prop": 0.5905068382944489,
"repo_name": "leaffan/pynhldb",
"id": "a20e470aa0b4a82a9a375f5b348ece698d483e47",
"size": "1290",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/faceoff.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PLpgSQL",
"bytes": "80868"
},
{
"name": "Python",
"bytes": "479953"
}
],
"symlink_target": ""
} |
import os
import setuptools
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
with open('test-requirements.txt') as f:
test_required = f.read().splitlines()
setuptools.setup(
# Do not use underscores in the plugin name.
name='cloudify-deployment-proxy-plugin',
version='0.1',
author='Gigaspaces.com',
author_email='Gigaspaces.com',
description='plugin that defines dependencies between deployments',
# This must correspond to the actual packages in the plugin.
packages=[
'deployments',
],
license='LICENSE',
install_requires=[
'cloudify-plugins-common==3.2',
],
test_requires=test_required,
)
| {
"content_hash": "37a6c94773b94645837971ec78823689",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 71,
"avg_line_length": 22.967741935483872,
"alnum_prop": 0.6657303370786517,
"repo_name": "denismakogon/cloudify-deployment-proxy-plugin",
"id": "6e137acee411f5987585d74c5936e3bd71f76b81",
"size": "1347",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "6898"
}
],
"symlink_target": ""
} |
"""
Tests are comprised of Python scripts. Files whose names begin with an
underscore will *not* be executed as test scripts.
Test scripts drive the browser with Selenium WebDriver by importing and
using SST actions.
The standard set of actions are imported by starting the test scripts with::
from sst.actions import *
Actions that work on page elements take either an element id or an
element object as their first argument. If the element you are working with
doesn't have a specific id you can get the element object with the
`get_element` action. `get_element` allows you to find an element by its
id, tag, text, class or other attributes. See the `get_element` documentation.
"""
import os
import re
import time
from datetime import datetime
from pdb import set_trace as debug
from textwrap import TextWrapper
from urlparse import urljoin, urlparse
from unittest2 import SkipTest
from selenium import webdriver
from selenium.webdriver.common import keys
from selenium.webdriver.remote.webelement import WebElement
from selenium.common.exceptions import (
NoSuchElementException, NoSuchAttributeException,
InvalidElementStateException, WebDriverException,
NoSuchWindowException, NoSuchFrameException)
from sst import config
from sst import bmobproxy
__all__ = [
'accept_alert', 'add_cleanup', 'assert_attribute', 'assert_button',
'assert_checkbox', 'assert_checkbox_value', 'assert_css_property',
'assert_displayed', 'assert_dropdown', 'assert_dropdown_value',
'assert_element', 'assert_equal', 'assert_link', 'assert_not_equal',
'assert_radio', 'assert_radio_value', 'assert_table_has_rows',
'assert_table_headers', 'assert_table_row_contains_text', 'assert_text',
'assert_text_contains', 'assert_textfield', 'assert_title',
'assert_title_contains', 'assert_url', 'assert_url_contains', 'check_flags',
'click_link', 'clear_cookies', 'close_window', 'debug', 'dismiss_alert',
'end_test', 'click_button', 'click_element', 'execute_script',
'exists_element', 'fails', 'get_argument', 'get_base_url', 'get_cookies',
'get_current_url', 'get_element', 'get_element_source', 'get_element_by_css',
'get_element_by_xpath', 'get_elements', 'get_elements_by_css',
'get_elements_by_xpath', 'get_link_url', 'get_page_source', 'go_back',
'go_to', 'refresh', 'reset_base_url', 'run_test', 'set_base_url',
'set_checkbox_value', 'set_dropdown_value', 'set_radio_value',
'set_wait_timeout', 'simulate_keys', 'skip', 'sleep', 'start', 'stop',
'switch_to_frame', 'switch_to_window', 'take_screenshot', 'toggle_checkbox',
'wait_for', 'wait_for_and_refresh', 'write_textfield',
]
browser = None
browsermob_proxy = None
_check_flags = True
_test = None
BASE_URL = 'http://localhost:8000/'
__DEFAULT_BASE_URL__ = BASE_URL
VERBOSE = True
class EndTest(StandardError):
pass
debug.__doc__ = """Start the debugger, a shortcut for `pdb.set_trace()`."""
class _Sentinel(object):
def __repr__(self):
return 'default'
_sentinel = _Sentinel()
def _raise(msg):
_print(msg)
raise AssertionError(msg)
def set_base_url(url):
"""Set the url used for relative arguments to the `go_to` action."""
global BASE_URL
if not url.startswith('http') and not url.startswith('file'):
url = 'http://' + url
_print('Setting base url to: %r' % url)
BASE_URL = url
def get_base_url():
"""Return the base url used by `go_to`."""
return BASE_URL
def reset_base_url():
"""
Restore the base url to the default. This is called automatically for
you when a test script completes."""
global BASE_URL
BASE_URL = __DEFAULT_BASE_URL__
def end_test():
"""
If called it ends the test. Can be used conditionally to exit a
test under certain conditions."""
raise EndTest
def skip(reason=''):
"""
Skip the test. Unlike `end_test` a skipped test will be reported
as a skip rather than a pass."""
raise SkipTest(reason)
def _print(text):
if VERBOSE:
'''
text_wrapper = TextWrapper(
width=160,
break_long_words=False,
initial_indent=(' ' * 4),
subsequent_indent=(' ' * 8),
)
print text_wrapper.fill(text)
'''
ct = time.time()
msecs = (ct - long(ct)) * 1000
print "%s -- %s" % ("%s,%03d" % (time.strftime("%Y-%m-%d %H:%M:%S", ), msecs), text)
def start(browser_type=None, browser_version='',
browser_platform='ANY', session_name='',
javascript_disabled=False, assume_trusted_cert_issuer=False,
webdriver_remote=None):
"""
Starts Browser with a new session. Called for you at
the start of each test script."""
global browser
global browsermob_proxy
if browser_type is None:
browser_type = config.browser_type
_print('')
_print('Starting browser: %s' % browser_type)
if webdriver_remote is None:
if browser_type == 'Firefox':
# profile features are FF only
profile = getattr(webdriver, '%sProfile' % browser_type)()
profile.set_preference('intl.accept_languages', 'en')
if config.browsermob_enabled:
# proxy integration is currently FF only
browsermob_proxy = bmobproxy.BrowserMobProxy(
'localhost', 8080)
selenium_proxy = webdriver.Proxy(
{'httpProxy': browsermob_proxy.url})
profile.set_proxy(selenium_proxy)
if assume_trusted_cert_issuer:
profile.set_preference(
'webdriver_assume_untrusted_issuer', False)
profile.setPreference(
'capability.policy.default.Window.QueryInterface',
'allAccess')
profile.setPreference(
'capability.policy.default.Window.frameElement.get',
'allAccess')
if javascript_disabled:
profile.set_preference('javascript.enabled', False)
browser = getattr(webdriver, browser_type)(profile)
else:
browser = getattr(webdriver, browser_type)()
else:
desired_capabilities = {"browserName": browser_type.lower(),
"platform": browser_platform.upper(),
"version": browser_version,
"javascriptEnabled": not javascript_disabled,
"name": session_name}
browser = webdriver.Remote(desired_capabilities=desired_capabilities,
command_executor=webdriver_remote)
def stop():
"""
Stops Firefox and ends the browser session. Called automatically for you at
the end of each test script."""
global browser
global browsermob_proxy
_print('Stopping browser')
# quit calls close() and does cleanup
browser.quit()
browser = None
if browsermob_proxy is not None:
_print('Closing http proxy')
browsermob_proxy.close()
browsermob_proxy = None
def refresh(wait=True):
"""
Refresh the current page.
By default this action will wait until a page with a body element is
available after the click. You can switch off this behaviour by passing
`wait=False`."""
if browsermob_proxy is not None:
_print('Capturing http traffic...')
browsermob_proxy.new_har()
_print('Refreshing current page')
browser.refresh()
if wait:
_waitforbody()
if browsermob_proxy is not None:
_print('Saving HAR output')
_make_results_dir()
browsermob_proxy.save_har(_make_useable_har_name())
def take_screenshot(filename='screenshot.png'):
"""
Takes a screenshot of the browser window. Called automatically on failures
when running in `-s` mode."""
_print('Capturing Screenshot')
_make_results_dir()
screenshot_file = os.path.join(config.results_directory, filename)
browser.get_screenshot_as_file(screenshot_file)
def _make_results_dir():
"""
Make results directory if it does not exist."""
try:
os.makedirs(config.results_directory)
except OSError:
pass # already exists
def sleep(secs):
"""
Delay execution for a given number of seconds. The argument may be a
floating point number for subsecond precision."""
_print('Sleeping %s secs' % secs)
time.sleep(secs)
def _fix_url(url):
parsed = urlparse(url)
if not parsed.scheme:
url = urljoin(BASE_URL, url)
return url
def _add_trailing_slash(url):
if not url.endswith('/'):
url += '/'
return url
def get_argument(name, default=_sentinel):
"""
Get an argument from the one the test was called with.
A test is called with arguments when it is executed by
the `run_test`. You can optionally provide a default value
that will be used if the argument is not set. If you don't
provide a default value and the argument is missing an
exception will be raised."""
args = config.__args__
value = args.get(name, default)
if value is _sentinel:
raise LookupError(name)
return value
def run_test(name, **kwargs):
"""
Execute a named test, with the specified arguments.
Arguments can be retrieved by the test with `get_argument`.
The `name` is the test file name without the '.py'.
You can specify tests in an alternative directory with
relative path syntax. e.g.::
run_test('subdir/foo', spam='eggs')
Tests can return a result by setting the name `RESULT`
in the test.
Tests are executed with the same browser (and browser
session) as the test calling `test_run`. This includes
whether or not Javascript is enabled.
Before the test is called the timeout and base url are
reset, but will be restored to their orginal value
when `run_test` returns."""
# delayed import to workaround circular imports
from sst import context
_print('Executing test: %s' % name)
return context.run_test(name, kwargs)
def _make_useable_har_name(stem=''):
now = datetime.now()
timestamped_base = 'har-%s' % now.strftime('%Y-%m-%d_%H-%M-%S-%f')
if stem:
slug_name = ''.join(x for x in stem if x.isalnum())
out_name = '%s-%s.har' % (timestamped_base, slug_name)
else:
out_name = '%s.har' % timestamped_base
file_name = os.path.join(config.results_directory, out_name)
return file_name
def go_to(url='', wait=True):
"""
Go to a specific URL. If the url provided is a relative url it will be added
to the base url. You can change the base url for the test with
`set_base_url`.
By default this action will wait until a page with a body element is
available after the click. You can switch off this behaviour by passing
`wait=False`."""
if browser is None:
start()
url = _fix_url(url)
if browsermob_proxy is not None:
_print('Capturing http traffic...')
browsermob_proxy.new_har()
_print('Going to... %s' % url)
browser.get(url)
if wait:
_waitforbody()
if browsermob_proxy is not None:
_print('Saving HAR output')
_make_results_dir()
browsermob_proxy.save_har(_make_useable_har_name(url))
def go_back(wait=True):
"""
Go one step backward in the browser history.
By default this action will wait until a page with a body element is
available after the click. You can switch off this behaviour by passing
`wait=False`."""
if browsermob_proxy is not None:
_print('Capturing http traffic...')
browsermob_proxy.new_har()
_print('Going back one step in browser history')
browser.back()
if wait:
_waitforbody()
if browsermob_proxy is not None:
_print('Saving HAR output')
_make_results_dir()
browsermob_proxy.save_har(_make_useable_har_name())
def assert_checkbox(id_or_elem):
"""
Assert that the element is a checkbox.
Takes an id or an element object.
Raises a failure exception if the element specified doesn't exist or isn't
a checkbox."""
elem = _get_elem(id_or_elem)
_elem_is_type(elem, id_or_elem, 'checkbox')
return elem
def assert_checkbox_value(id_or_elem, value):
"""
Assert checkbox value. Takes an element id or object plus either True or
False. Raises a failure exception if the element specified doesn't exist
or isn't a checkbox."""
checkbox = assert_checkbox(id_or_elem)
real = checkbox.is_selected()
msg = 'Checkbox: %r - Has Value: %r' % (_get_text(checkbox), real)
if real != value:
_raise(msg)
def toggle_checkbox(id_or_elem):
"""
Toggle the checkbox value. Takes an element id or object. Raises a failure
exception if the element specified doesn't exist or isn't a checkbox."""
checkbox = assert_checkbox(id_or_elem)
_print('Toggling checkbox: %r' % _get_text(checkbox))
before = checkbox.is_selected()
checkbox.click()
after = checkbox.is_selected()
msg = 'Checkbox: %r - was not toggled, value remains: %r' \
% (_get_text(checkbox), before)
if before == after:
_raise(msg)
def set_checkbox_value(id_or_elem, new_value):
"""
Set a checkbox to a specific value, either True or False. Raises a failure
exception if the element specified doesn't exist or isn't a checkbox."""
checkbox = assert_checkbox(id_or_elem)
_print('Setting checkbox %r to %r' % (_get_text(checkbox), new_value))
# There is no method to 'unset' a checkbox in the browser object
current_value = checkbox.is_selected()
if new_value != current_value:
toggle_checkbox(id_or_elem)
def _make_keycode(key_to_make):
"""
Take a key and return a keycode"""
k = keys.Keys()
keycode = k.__getattribute__(key_to_make.upper())
return keycode
def simulate_keys(id_or_elem, key_to_press):
"""
Simulate key sent to specified element.
(available keys located in `selenium/webdriver/common/keys.py`)
e.g.::
simulate_keys('text_1', 'BACK_SPACE')
"""
key_element = _get_elem(id_or_elem)
_print('Simulating keypress on %r with %r key' \
% (_get_text(key_element), key_to_press))
key_code = _make_keycode(key_to_press)
key_element.send_keys(key_code)
_textfields = (
'text', 'password', 'textarea', 'email',
'url', 'search', 'number', 'file')
def assert_textfield(id_or_elem):
"""
Assert that the element is a textfield, textarea or password box.
Takes an id or an element object.
Raises a failure exception if the element specified doesn't exist
or isn't a textfield."""
elem = _get_elem(id_or_elem)
_elem_is_type(elem, id_or_elem, *_textfields) # see _textfields tuple
return elem
def write_textfield(id_or_elem, new_text, check=True, clear=True):
"""
Set the specified text into the textfield. If the text fails to write (the
textfield contents after writing are different to the specified text) this
function will fail. You can switch off the checking by passing
`check=False`. The field is cleared before written to. You can switch this
off by passing `clear=False`."""
textfield = assert_textfield(id_or_elem)
_print('Writing to textfield %r with text %r' % (_get_text(textfield), new_text))
# clear field like this, don't use clear()
if clear:
textfield.send_keys(keys.Keys().CONTROL, 'a')
textfield.send_keys(keys.Keys().DELETE)
if isinstance(new_text, unicode):
textfield.send_keys(new_text)
else:
textfield.send_keys(str(new_text))
if not check:
return
_print('Check text wrote correctly')
current_text = textfield.get_attribute('value')
if current_text != new_text:
msg = 'Textfield: %r - did not write. Text was: %r' \
% (_get_text(textfield), current_text)
_raise(msg)
def assert_link(id_or_elem):
"""
Assert that the element is a link.
Raises a failure exception if the element specified doesn't exist or
isn't a link"""
link = _get_elem(id_or_elem)
href = link.get_attribute('href')
if href is None:
msg = 'The text %r is not part of a Link or a Link ID' \
% _get_text(link)
_raise(msg)
return link
def get_link_url(id_or_elem):
"""Return the URL from a link."""
_print('Getting url from link %r' % id_or_elem)
link = assert_link(id_or_elem)
link_url = link.get_attribute('href')
return link_url
def get_current_url():
"""Gets the URL of the current page."""
return browser.current_url
def click_link(id_or_elem, check=False, wait=True):
"""
Click the specified link. As some links do redirects the location you end
up at is not checked by default. If you pass in `check=True` then this
action asserts that the resulting url is the link url.
By default this action will wait until a page with a body element is
available after the click. You can switch off this behaviour by passing
`wait=False`."""
link = assert_link(id_or_elem)
link_url = link.get_attribute('href')
if browsermob_proxy is not None:
_print('Capturing http traffic...')
browsermob_proxy.new_har()
_print('Clicking link %r' % _get_text(link))
link.click()
if wait:
_waitforbody()
if browsermob_proxy is not None:
_print('Saving HAR output')
_make_results_dir()
browsermob_proxy.save_har(_make_useable_har_name())
# some links do redirects - so we
# don't check by default
if check:
assert_url(link_url)
def assert_displayed(id_or_elem):
"""
Assert that the element is displayed.
Takes an id or an element object.
Raises a failure exception if the element specified doesn't exist or isn't
displayed. Returns the element if it is displayed."""
element = _get_elem(id_or_elem)
if not element.is_displayed():
message = 'Element is not displayed'
_raise(message)
return element
def click_element(id_or_elem, wait=True):
"""
Click on an element of any kind not specific to links or buttons.
By default this action will wait until a page with a body element is
available after the click. You can switch off this behaviour by passing
`wait=False`."""
elem = _get_elem(id_or_elem)
if browsermob_proxy is not None:
_print('Capturing http traffic...')
browsermob_proxy.new_har()
_print('Clicking element %r' % _get_text(elem))
elem.click()
if wait:
_waitforbody()
if browsermob_proxy is not None:
_print('Saving HAR output')
_make_results_dir()
browsermob_proxy.save_har(_make_useable_har_name())
def assert_title(title):
"""Assert the page title is as specified."""
real_title = browser.title
msg = 'Title is: %r. Should be: %r' % (real_title, title)
if real_title != title:
_raise(msg)
def assert_title_contains(text, regex=False):
"""
Assert the page title contains the specified text.
set `regex=True` to use a regex pattern."""
real_title = browser.title
msg = 'Title is: %r. Does not contain %r' % (real_title, text)
if regex:
if not re.search(text, real_title):
_raise(msg)
else:
if not text in real_title:
_raise(msg)
def assert_url(url):
"""
Assert the current url is as specified. Can be an absolute url or
relative to the base url."""
url = _fix_url(url)
url = _add_trailing_slash(url)
real_url = browser.current_url
real_url = _add_trailing_slash(real_url)
msg = 'Url is: %r. Should be: %r' % (real_url, url)
if url != real_url:
_raise(msg)
def assert_url_contains(text, regex=False):
"""
Assert the current url contains the specified text.
set `regex=True` to use a regex pattern."""
real_url = browser.current_url
msg = 'Url is %r. Does not contain %r' % (real_url, text)
if regex:
if not re.search(text, real_url):
_raise(msg)
else:
if text not in real_url:
_raise(msg)
_TIMEOUT = 10
_POLL = 0.1
def set_wait_timeout(timeout, poll=None):
"""
Set the timeout, in seconds, used by `wait_for`. The default at the start of
a test is always 10 seconds.
The optional second argument, is how long (in seconds) `wait_for` should
wait in between checking its condition (the poll frequency). The default
at the start of a test is always 0.1 seconds."""
global _TIMEOUT
global _POLL
_TIMEOUT = timeout
msg = 'Setting wait timeout to %rs' % timeout
if poll is not None:
msg += ('. Setting poll time to %rs' % poll)
_POLL = poll
_print(msg)
def _get_name(obj):
try:
return obj.__name__
except:
return repr(obj)
def _wait_for(condition, refresh, timeout, poll, *args, **kwargs):
global VERBOSE
_print('Waiting for %r' % _get_name(condition))
original = VERBOSE
VERBOSE = False
try:
max_time = time.time() + timeout
msg = _get_name(condition)
while True:
#refresh the page if requested
if refresh:
refresh()
e = None
try:
result = condition(*args, **kwargs)
except AssertionError as e:
pass
else:
if result != False:
break
if time.time() > max_time:
error = 'Timed out waiting for: %s' % msg
if e:
error += '\nError during wait: %s' % e
_raise(error)
time.sleep(poll)
finally:
VERBOSE = original
def wait_for(condition, *args, **kwargs):
"""
Wait for an action to pass. Useful for checking the results of actions
that may take some time to complete.
This action takes a condition function and any arguments it should be
called with. The condition function can either be an action or a function
that returns True for success and False for failure. For example::
wait_for(assert_title, 'Some page title')
If the specified condition does not become true within 10 seconds then
`wait_for` fails.
You can set the timeout for `wait_for` by calling `set_wait_timeout`."""
_wait_for(condition, False, _TIMEOUT, _POLL, *args, **kwargs)
def wait_for_and_refresh(condition, *args, **kwargs):
"""
Wait for an action to pass. Useful for checking the results of actions
that may take some time to complete. The difference to wait_for() is, that
wait_for_and_refresh() refresh the current page with refresh() after every
condition check.
This action takes a condition function and any arguments it should be
called with. The condition function can either be an action or a function
that returns True for success and False for failure. For example::
wait_for_and_refresh(assert_title, 'Some page title')
If the specified condition does not become true within 10 seconds then
`wait_for_and_refresh` fails.
You can set the timeout for `wait_for_and_refresh` by calling `set_wait_timeout`.
"""
_wait_for(condition, True, _TIMEOUT, _POLL, *args, **kwargs)
def fails(action, *args, **kwargs):
"""
This action is particularly useful for *testing* other actions, by
checking that they fail when they should do. `fails` takes a function
(usually an action) and any arguments and keyword arguments to call the
function with. If calling the function raises an AssertionError then
`fails` succeeds. If the function does *not* raise an AssertionError then
this action raises the appropriate failure exception. Alll other
exceptions will be propagated normally."""
_print('Trying action failure: %s' % _get_name(action))
try:
action(*args, **kwargs)
except AssertionError:
return
msg = 'Action %r did not fail' % _get_name(action)
_raise(msg)
def _get_elem(id_or_elem):
if isinstance(id_or_elem, WebElement):
return id_or_elem
try:
return browser.find_element_by_id(id_or_elem)
except (NoSuchElementException, WebDriverException):
msg = 'Element with id: %r does not exist' % id_or_elem
_raise(msg)
# Takes an optional 2nd input type for cases like textfield & password
# where types are similar
def _elem_is_type(elem, name, *elem_types):
try:
result = elem.get_attribute('type')
except NoSuchAttributeException:
msg = 'Element has no type attribute'
_raise(msg)
if not result in elem_types:
msg = 'Element %r is not one of %r' % (name, elem_types)
_raise(msg)
def assert_dropdown(id_or_elem):
"""Assert the specified element is a select drop-list."""
elem = _get_elem(id_or_elem)
_elem_is_type(elem, id_or_elem, 'select-one')
return elem
def set_dropdown_value(id_or_elem, text=None, value=None):
"""Set the select drop-list to a text or value specified."""
elem = assert_dropdown(id_or_elem)
_print('Setting %r option list to %r' % (_get_text(elem), text or value))
if text and not value:
for element in elem.find_elements_by_tag_name('option'):
if element.text == text:
element.click()
return
msg = 'The following option could not be found in the list: %r' % text
elif value and not text:
for element in elem.find_elements_by_tag_name('option'):
if element.get_attribute("value") == value:
element.click()
return
msg = 'The following option could not be found in the list: %r' % value
else:
msg = 'Use set_dropdown_value() with either text or value!'
_raise(msg)
def assert_dropdown_value(id_or_elem, text_in):
"""Assert the specified select drop-list is set to the specified value."""
elem = assert_dropdown(id_or_elem)
# Because there is no way to connect the current
# text of a select element we have to use 'value'
current = elem.get_attribute('value')
for element in elem.find_elements_by_tag_name('option'):
if text_in == element.text and \
current == element.get_attribute('value'):
return
msg = 'The option is not currently set to: %r' % text_in
_raise(msg)
def assert_radio(id_or_elem):
"""
Assert the specified element is a radio button.
Takes an id or an element object.
Raises a failure exception if the element specified doesn't exist or isn't
a radio button"""
elem = _get_elem(id_or_elem)
_elem_is_type(elem, id_or_elem, 'radio')
return elem
def assert_radio_value(id_or_elem, value):
"""
Assert the specified element is a radio button with the specified value;
True for selected and False for unselected.
Takes an id or an element object.
Raises a failure exception if the element specified doesn't exist or isn't
a radio button"""
elem = assert_radio(id_or_elem)
selected = elem.is_selected()
msg = 'Radio %r should be set to: %s.' % (_get_text(elem), value)
if value != selected:
_raise(msg)
def set_radio_value(id_or_elem):
"""Select the specified radio button."""
elem = assert_radio(id_or_elem)
_print('Selecting radio button item %r' % _get_text(elem))
elem.click()
def _get_text(elem):
text = None
try:
text = elem.text
except InvalidElementStateException:
pass
if text:
# Note that some elements (like textfields) return empty string
# for text and we still need to call value
return text
try:
text = elem.get_attribute('value')
except InvalidElementStateException:
pass
return text
def assert_text(id_or_elem, text):
"""
Assert the specified element text is as specified.
Raises a failure exception if the element specified doesn't exist or isn't
as specified"""
elem = _get_elem(id_or_elem)
real = _get_text(elem)
if real is None:
msg = 'Element %r has no text attribute' % _get_text(elem)
_raise(msg)
if real != text:
msg = 'Element text should be %r. It is %r.' % (text, real)
_raise(msg)
def assert_text_contains(id_or_elem, text, regex=False):
"""
Assert the specified element contains the specified text.
set `regex=True` to use a regex pattern."""
elem = _get_elem(id_or_elem)
real = _get_text(elem)
if real is None:
msg = 'Element %r has no text attribute' % _get_text(elem)
_raise(msg)
msg = 'Element text is %r. Does not contain %r' % (real, text)
if regex:
if not re.search(text, real):
_raise(msg)
else:
if text not in real:
_raise(msg)
def _check_text(elem, text):
return _get_text(elem) == text
def _match_text(elem, regex):
text = _get_text(elem) or ''
return bool(re.search(regex, text))
def get_elements(tag=None, css_class=None, id=None, text=None,
text_regex=None, **kwargs):
"""
This function will find and return all matching elements by any of several
attributes. If the elements cannot be found from the attributes you
provide, the call will fail with an exception.
You can specify as many or as few attributes as you like.
`text_regex` finds elements by doing a regular expression search against
the text of elements. It cannot be used in conjunction with the `text`
argument and cannot be the *only* argument to find elements."""
if text and text_regex:
raise TypeError("You can't use text and text_regex arguments")
selector_string = ''
if tag:
selector_string = tag
if css_class:
css_class_selector = css_class.strip().replace(' ', '.')
selector_string += ('.%s' % css_class_selector)
if id:
selector_string += ('#%s' % id)
selector_string += ''.join(['[%s=%r]' % (key, value) for
key, value in kwargs.items()])
try:
if text and not selector_string:
elems = browser.find_elements_by_xpath('//*[text() = %r]' % text)
else:
if not selector_string:
msg = 'Could not identify element: no arguments provided'
_raise(msg)
elems = browser.find_elements_by_css_selector(selector_string)
except (WebDriverException, NoSuchElementException) as e:
msg = 'Element not found: %s' % e
_raise(msg)
if text:
# if text was specified, filter elements
elems = [element for element in elems if _check_text(element, text)]
elif text_regex:
elems = [elem for elem in elems if _match_text(elem, text_regex)]
if not elems:
msg = 'Could not identify elements: 0 elements found'
_raise(msg)
return elems
def get_element(tag=None, css_class=None, id=None, text=None,
text_regex=None, **kwargs):
"""
This function will find and return an element by any of several
attributes. If the element cannot be found from the attributes you
provide, or the attributes match more than one element, the call will fail
with an exception.
Finding elements is useful for checking that the element exists, and also
for passing to other actions that work with element objects.
You can specify as many or as few attributes as you like, so long as they
uniquely identify one element.
`text_regex` finds elements by doing a regular expression search against
the text of elements. It cannot be used in conjunction with the `text`
argument and cannot be the *only* argument to find elements."""
elems = get_elements(tag=tag, css_class=css_class,
id=id, text=text, text_regex=text_regex, **kwargs)
if len(elems) != 1:
msg = 'Could not identify element: %s elements found' % len(elems)
_raise(msg)
return elems[0]
def exists_element(tag=None, css_class=None, id=None, text=None,
text_regex=None, **kwargs):
"""
This function will find if an element exists by any of several
attributes. It returns True if the element is found or False
if it can't be found.
You can specify as many or as few attributes as you like."""
try:
get_elements(tag=tag, css_class=css_class, id=id, text=text,
text_regex=text_regex, **kwargs)
return True
except AssertionError:
return False
def assert_element(tag=None, css_class=None, id=None, text=None,
text_regex=None, **kwargs):
"""
Assert an element exists by any of several attributes.
You can specify as many or as few attributes as you like."""
try:
elems = get_elements(tag=tag, css_class=css_class, id=id, text=text,
text_regex=text_regex, **kwargs)
return elems
except AssertionError:
msg = 'Could not assert element exists'
_raise(msg)
def assert_button(id_or_elem):
"""
Assert that the specified element is a button.
Takes an id or an element object.
Raises a failure exception if the element specified doesn't exist or isn't
a button"""
elem = _get_elem(id_or_elem)
if elem.tag_name == 'button':
return elem
if elem.get_attribute('type') == 'button':
return elem
_elem_is_type(elem, id_or_elem, 'submit')
return elem
def click_button(id_or_elem, wait=True):
"""
Click the specified button.
By default this action will wait until a page with a body element is
available after the click. You can switch off this behaviour by passing
`wait=False`."""
button = assert_button(id_or_elem)
if browsermob_proxy is not None:
_print('Capturing http traffic...')
browsermob_proxy.new_har()
_print('Clicking button %r' % _get_text(button))
button.click()
if wait:
_waitforbody()
if browsermob_proxy is not None:
_print('Saving HAR output')
_make_results_dir()
browsermob_proxy.save_har(_make_useable_har_name())
def get_elements_by_css(selector):
"""Find all elements that match a css selector."""
try:
return browser.find_elements_by_css_selector(selector)
except (WebDriverException, NoSuchElementException) as e:
msg = 'Element not found: %s' % e
_raise(msg)
def get_element_by_css(selector):
"""Find an element by css selector."""
elements = get_elements_by_css(selector)
if len(elements) != 1:
msg = 'Could not identify element: %s elements found' % len(elements)
_raise(msg)
return elements[0]
def get_elements_by_xpath(selector):
"""Find all elements that match an xpath."""
try:
return browser.find_elements_by_xpath(selector)
except (WebDriverException, NoSuchElementException) as e:
msg = 'Element not found: %s' % e
_raise(msg)
def get_element_by_xpath(selector):
"""Find an element by xpath."""
elements = get_elements_by_xpath(selector)
if len(elements) != 1:
msg = 'Could not identify element: %s elements found' % len(elements)
_raise(msg)
return elements[0]
def _waitforbody():
wait_for(get_element, tag='body')
def get_page_source():
"""Gets the source of the current page."""
return browser.page_source
def close_window():
""" Closes the current window """
_print('Closing the current window')
browser.close()
def switch_to_window(index_or_name=None):
"""
Switch focus to the specified window (by index or name).
if no window is given, switch focus to the default window."""
if index_or_name is None:
_print('Switching to default window')
browser.switch_to_window('')
elif isinstance(index_or_name, int):
index = index_or_name
window_handles = browser.window_handles
if index >= len(window_handles):
msg = 'Index %r is greater than available windows.' % index
_raise(msg)
window = window_handles[index]
try:
_print('Switching to window: %r' % window)
browser.switch_to_window(window)
except NoSuchWindowException:
msg = 'Could not find window: %r' % window
_raise(msg)
else:
name = index_or_name
try:
_print('Switching to window: %r' % name)
browser.switch_to_window(name)
except NoSuchWindowException:
msg = 'Could not find window: %r' % name
_raise(msg)
def switch_to_frame(index_or_name=None):
"""
Switch focus to the specified frame (by index or name).
if no frame is given, switch focus to the default content frame."""
if index_or_name is None:
_print('Switching to default content frame')
browser.switch_to_default_content()
else:
_print('Switching to frame: %r' % index_or_name)
try:
browser.switch_to_frame(index_or_name)
except NoSuchFrameException:
msg = 'Could not find frame: %r' % index_or_name
_raise(msg)
def _alert_action(action, expected_text=None, text_to_write=None):
"""
Accept or dismiss a JavaScript alert, confirmation or prompt.
Optionally, it takes the expected text of the Popup box to check it,
and the text to write in the prompt."""
wait_for(browser.switch_to_alert)
alert = browser.switch_to_alert()
alert_text = alert.text
# XXX workaround because Selenium sometimes returns the value in a
# dictionary. See http://code.google.com/p/selenium/issues/detail?id=2955
if isinstance(alert_text, dict):
alert_text = alert_text['text']
if expected_text and expected_text != alert_text:
error_message = 'Element text should be %r. It is %r.' \
% (expected_text, alert_text)
_raise(error_message)
if text_to_write:
alert.send_keys(text_to_write)
if action == 'accept':
alert.accept()
elif action == 'dismiss':
alert.dismiss()
else:
_raise('%r is an unknown action for an alert' % action)
def accept_alert(expected_text=None, text_to_write=None):
"""
Accept a JavaScript alert, confirmation or prompt.
Optionally, it takes the expected text of the Popup box to check it,
and the text to write in the prompt.
Note that the action that opens the alert should not wait for a page with
a body element. This means that you should call functions like
click_element with the argument wait=Fase."""
_print('Accepting Alert')
_alert_action('accept', expected_text, text_to_write)
def dismiss_alert(expected_text=None, text_to_write=None):
"""
Dismiss a JavaScript alert.
Optionally, it takes the expected text of the Popup box to check it.,
and the text to write in the prompt.
Note that the action that opens the alert should not wait for a page with
a body element. This means that you should call functions like
click_element with the argument wait=Fase."""
_print('Dismissing Alert')
_alert_action('dismiss', expected_text, text_to_write)
def assert_table_headers(id_or_elem, headers):
"""
Assert table `id_or_elem` has headers (<th> tags) where the text matches
the sequence `headers`.
"""
_print('Checking headers for %r' % (id_or_elem,))
elem = _get_elem(id_or_elem)
if not elem.tag_name == 'table':
_raise('Element %r is not a table.' % (id_or_elem,))
header_elems = elem.find_elements_by_tag_name('th')
header_text = [_get_text(elem) for elem in header_elems]
if not header_text == headers:
msg = ('Expected headers:%r. Actual headers%r\n' %
(headers, header_text))
_raise(msg)
def assert_table_has_rows(id_or_elem, num_rows):
"""
Assert the specified table has the specified number of rows (<tr> tags
inside the <tbody>).
"""
_print('Checking table %r has %s rows' % (id_or_elem, num_rows))
elem = _get_elem(id_or_elem)
if not elem.tag_name == 'table':
_raise('Element %r is not a table.' % (id_or_elem,))
body = elem.find_elements_by_tag_name('tbody')
if not body:
_raise('Table %r has no tbody.' % (id_or_elem,))
rows = body[0].find_elements_by_tag_name('tr')
if not len(rows) == num_rows:
msg = 'Expected %s rows. Found %s.' % (num_rows, len(rows))
_raise(msg)
def assert_table_row_contains_text(id_or_elem, row, contents, regex=False):
"""
Assert the specified row (starting from 0) in the specified table
contains the specified contents.
contents should be a sequence of strings, where each string is the same
as the text of the corresponding column.
If `regex` is True (the default is False) then each cell is checked
with a regular expression search.
The row will be looked for inside the <tbody>, to check headers use
`assert_table_headers`.
"""
_print('Checking the contents of table %r, row %s.' % (id_or_elem, row))
elem = _get_elem(id_or_elem)
if not elem.tag_name == 'table':
_raise('Element %r is not a table.' % (id_or_elem,))
body = elem.find_elements_by_tag_name('tbody')
if not body:
_raise('Table %r has no tbody.' % (id_or_elem,))
rows = body[0].find_elements_by_tag_name('tr')
if len(rows) <= row:
msg = 'Asked to fetch row %s. Highest row is %s' % (row, len(rows) - 1)
_raise(msg)
columns = rows[row].find_elements_by_tag_name('td')
cells = [_get_text(elem) for elem in columns]
if not regex:
success = cells == contents
elif len(contents) != len(cells):
success = False
else:
success = all(re.search(expected, actual) for expected, actual in
zip(contents, cells))
if not success:
msg = ('Expected row contents: %r. Actual contents: %r' %
(contents, cells))
_raise(msg)
def assert_attribute(id_or_elem, attribute, value, regex=False):
"""
assert that the specified `attribute` on the element is equal to the
`value`.
If `regex` is True (default is False) then the value will be compared to
the attribute using a regular expression search.
"""
elem = _get_elem(id_or_elem)
_print('Checking attribute %r of %r' % (attribute, _get_text(elem)))
actual = elem.get_attribute(attribute)
if not regex:
success = value == actual
else:
success = actual is not None and re.search(value, actual)
if not success:
msg = 'Expected attribute: %r. Actual attribute: %r' % (value, actual)
_raise(msg)
def assert_css_property(id_or_elem, property, value, regex=False):
"""
assert that the specified `css property` on the element is equal to the
`value`.
If `regex` is True (default is False) then the value will be compared to
the property using a regular expression search.
"""
elem = _get_elem(id_or_elem)
_print('Checking css property %r: %s of %r' %
(property, value, _get_text(elem)))
actual = elem.value_of_css_property(property)
if not regex:
success = value == actual
else:
success = actual is not None and re.search(value, actual)
if not success:
msg = 'Expected property: %r. Actual property: %r' % (value, actual)
_raise(msg)
def check_flags(*args):
"""
A test will only run if all the flags passed to this action were supplied
at the command line. If a required flag is missing the test is skipped.
Flags are case-insensitive.
"""
if not _check_flags:
# Flag checking disabled
return
missing = set(arg.lower() for arg in args) - set(config.flags)
if missing:
_msg = 'Flags required but not used: %s' % ', '.join(missing)
skip(_msg)
def assert_equal(first, second):
"""Assert two objects are equal."""
if _test is None:
assert first == second
else:
_test.assertEqual(first, second)
def assert_not_equal(first, second):
"""Assert two objects are not equal."""
if _test is None:
assert first != second
else:
_test.assertNotEqual(first, second)
def add_cleanup(func, *args, **kwargs):
"""
Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called on test failure or success.
They allow a test to clean up after itself.
"""
_test.addCleanup(func, *args, **kwargs)
def get_cookies():
"""Gets the cookies of current session (set of dicts)."""
return browser.get_cookies()
def clear_cookies():
"""Clear the cookies of current session."""
_print('Clearing browser session cookies')
browser.delete_all_cookies()
def execute_script(script, *args):
"""
Executes JavaScript in the context of the currently selected
frame or window.
Within the script, use `document` to refer to the current document.
For example::
execute_script('document.title = "New Title"')
args will be made available to the script if given.
"""
_print('Executing script')
browser.execute_script(script, *args)
def get_element_source(id_or_elem):
"""Gets the innerHTML source of an element."""
elem = _get_elem(id_or_elem)
return elem.get_attribute('innerHTML')
| {
"content_hash": "34e89e4051c6ff4ba21fe1f0850f2b72",
"timestamp": "",
"source": "github",
"line_count": 1434,
"max_line_length": 94,
"avg_line_length": 31.96792189679219,
"alnum_prop": 0.6330657475677326,
"repo_name": "ktan2020/legacy-automation",
"id": "aae07a3a93cc49d9e2629c34a31a6fe2e3ac4e30",
"size": "46578",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "win/Lib/site-packages/sst/actions.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "913"
},
{
"name": "Ada",
"bytes": "289"
},
{
"name": "Assembly",
"bytes": "687"
},
{
"name": "Boo",
"bytes": "540"
},
{
"name": "C",
"bytes": "40116"
},
{
"name": "C#",
"bytes": "474"
},
{
"name": "C++",
"bytes": "393"
},
{
"name": "CSS",
"bytes": "70883"
},
{
"name": "ColdFusion",
"bytes": "1012"
},
{
"name": "Common Lisp",
"bytes": "1034"
},
{
"name": "D",
"bytes": "1858"
},
{
"name": "Eiffel",
"bytes": "426"
},
{
"name": "Erlang",
"bytes": "9243"
},
{
"name": "FORTRAN",
"bytes": "1810"
},
{
"name": "Forth",
"bytes": "182"
},
{
"name": "Groovy",
"bytes": "2366"
},
{
"name": "Haskell",
"bytes": "816"
},
{
"name": "Haxe",
"bytes": "455"
},
{
"name": "Java",
"bytes": "1155"
},
{
"name": "JavaScript",
"bytes": "69444"
},
{
"name": "Lua",
"bytes": "795"
},
{
"name": "Matlab",
"bytes": "1278"
},
{
"name": "OCaml",
"bytes": "350"
},
{
"name": "Objective-C++",
"bytes": "885"
},
{
"name": "PHP",
"bytes": "1411"
},
{
"name": "Pascal",
"bytes": "388"
},
{
"name": "Perl",
"bytes": "252651"
},
{
"name": "Pike",
"bytes": "589"
},
{
"name": "Python",
"bytes": "42085780"
},
{
"name": "R",
"bytes": "1156"
},
{
"name": "Ruby",
"bytes": "480"
},
{
"name": "Scheme",
"bytes": "282"
},
{
"name": "Shell",
"bytes": "30518"
},
{
"name": "Smalltalk",
"bytes": "926"
},
{
"name": "Squirrel",
"bytes": "697"
},
{
"name": "Stata",
"bytes": "302"
},
{
"name": "SystemVerilog",
"bytes": "3145"
},
{
"name": "Tcl",
"bytes": "1039"
},
{
"name": "TeX",
"bytes": "1746"
},
{
"name": "VHDL",
"bytes": "985"
},
{
"name": "Vala",
"bytes": "664"
},
{
"name": "Verilog",
"bytes": "439"
},
{
"name": "Visual Basic",
"bytes": "2142"
},
{
"name": "XSLT",
"bytes": "152770"
},
{
"name": "ooc",
"bytes": "890"
},
{
"name": "xBase",
"bytes": "769"
}
],
"symlink_target": ""
} |
class Node( object ):
def __init__( self, attr, attrs, dataSet ):
self.falsePath = None # Destination Node for False
self.truePath = None # Destination Node for True
self.attribute = attr # Name of Attribute
self.attributes = attrs # Remaining possible attributes
self.dataSet = dataSet # Subset of data from previous decision
def nextNode( self, testPassed ):
return self.truePath if testPassed else self.falsePath
def newFalsePath( self, attr, dataSubset ):
falsePathAttributes = self.attributes[:]
falsePathAttributes.remove( attr )
#print( "New False Path:", falsePathAttributes )
self.falsePath = Node( attr, falsePathAttributes, dataSubset )
def newTruePath( self, attr, dataSubset ):
truePathAttributes = self.attributes[:]
truePathAttributes.remove( attr )
#print( "New True Path:", truePathAttributes )
self.truePath = Node( attr, truePathAttributes, dataSubset )
def subsetWithLabel( self, attr_index, label ):
return list( filter( lambda row: row[ attr_index ] == label, self.dataSet ) )
def resultsOfSet( self, S ):
no = len( list( filter( lambda row: row[-1] is False, S ) ) )
return ( len( S ) - no, no )
def makePrediction( self, indexOfAttr, pathBool ):
subset = self.subsetWithLabel( indexOfAttr, pathBool )
ratio = self.resultsOfSet( subset )
return ratio[0] > ratio[1] # Trues > Falses
class DecisionTree( object ):
def __init__( self, attrs ):
self.root = None
self.attributes = attrs
return
def newRoot( self, attr, attrs, data ):
self.root = Node( attr, attrs, data )
return
def getRoot( self ):
return self.root
def printNode( self, node, depth, pathBool, output_file ):
output = "| " * depth + node.attribute + " = " + ("1" if pathBool else "0") + " : "
if pathBool:
output += ( "1\n" if node.makePrediction( self.attributes.index( node.attribute ), True ) else "0\n" if node.truePath is None else "\n" )
else:
output += ( "1\n" if node.makePrediction( self.attributes.index( node.attribute ), False ) else "0\n" if node.falsePath is None else "\n" )
output_file.write( output )
def preorderTraversal( self, node, depth, output_file ):
if node is None: return
self.printNode( node, depth, False, output_file )
self.preorderTraversal( node.falsePath, depth + 1, output_file )
self.printNode( node, depth, True, output_file )
self.preorderTraversal( node.truePath , depth + 1, output_file )
def printTree( self, filename ):
with open( filename, 'w+' ) as output_file:
self.preorderTraversal( self.root, 0, output_file )
def getLastNodeFromDecisions( self, decisionDict ):
node = self.root
nextNode = node.nextNode( decisionDict[ node.attribute ] )
while nextNode is not None:
node = nextNode
nextNode = node.nextNode( decisionDict[ node.attribute ] )
return node
def dataSetFromDecisions( self, decisionDict ):
return self.getLastNodeFromDecisions( decisionDict ).dataSet
def indexOfAttribute( self, attr ):
return self.attributes.index( attr )
def makePrediction( self ):
node = self.getLastNodeFromDecisions( self, decisionDict )
return node.makePrediction( self.indexOfAttribute( node.attribute ), decisionDict[ node.attribute ] )
| {
"content_hash": "9ebbab1959600ae9e44fb7a91e8d468f",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 142,
"avg_line_length": 35.43956043956044,
"alnum_prop": 0.7007751937984497,
"repo_name": "CKPalk/MachineLearning",
"id": "e103d872a5cac8663113a0738ed1d4d68f32b520",
"size": "3228",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Assignment1/Assignment1_EC/DecisionTree.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "52712"
}
],
"symlink_target": ""
} |
from f5.bigip import ManagementRoot
from f5.bigip.resource import OrganizingCollection
from f5.bigip.tm.analytics.dos_vis_common import Generate_Report
from f5.bigip.tm.analytics.dos_vis_common import Report_Results
from f5.sdk_exception import MissingRequiredCreationParameter
from f5.sdk_exception import UnsupportedOperation
import mock
import pytest
from six import iterkeys
@pytest.fixture
def FakeGenerateReport():
fake_analytics = mock.MagicMock()
fake_genrep = Generate_Report(fake_analytics)
fake_genrep._meta_data['bigip'].tmos_version = '13.1.0'
return fake_genrep
@pytest.fixture
def FakeReportResults():
fake_analytics = mock.MagicMock()
fake_repres = Report_Results(fake_analytics)
return fake_repres
class TestDosVisCommonOC(object):
def test_collection(self, fakeicontrolsession):
b = ManagementRoot('192.168.1.1', 'admin', 'admin')
t1 = b.tm.analytics.dos_vis_common
assert isinstance(t1, OrganizingCollection)
assert hasattr(t1, 'generate_reports')
assert hasattr(t1, 'report_results_s')
class TestGenerateReport(object):
def test_modify_raises(self, FakeGenerateReport):
with pytest.raises(UnsupportedOperation):
FakeGenerateReport.modify()
def test_create_no_args(self, FakeGenerateReport):
with pytest.raises(MissingRequiredCreationParameter):
FakeGenerateReport.create()
def test_create_two(self, fakeicontrolsession):
b = ManagementRoot('192.168.1.1', 'admin', 'admin')
t1 = b.tm.analytics.dos_vis_common.generate_reports.generate_report
t2 = b.tm.analytics.dos_vis_common.generate_reports.generate_report
assert t1 is t2
def test_collection(self, fakeicontrolsession):
b = ManagementRoot('192.168.1.1', 'admin', 'admin')
t = b.tm.analytics.dos_vis_common.generate_reports
test_meta = t._meta_data['attribute_registry']
test_meta2 = t._meta_data['allowed_lazy_attributes']
kind = 'tm:analytics:dos-vis-common:generate-report:avrgeneratereporttaskitemstate'
assert kind in list(iterkeys(test_meta))
assert Generate_Report in test_meta2
assert t._meta_data['object_has_stats'] is False
class TestReportResults(object):
def test_create_raises(self, FakeReportResults):
with pytest.raises(UnsupportedOperation):
FakeReportResults.create()
def test_modify_raises(self, FakeReportResults):
with pytest.raises(UnsupportedOperation):
FakeReportResults.modify()
def test_collection(self, fakeicontrolsession):
b = ManagementRoot('192.168.1.1', 'admin', 'admin')
t = b.tm.analytics.dos_vis_common.report_results_s
test_meta = t._meta_data['attribute_registry']
test_meta2 = t._meta_data['allowed_lazy_attributes']
kind = 'tm:analytics:dos-vis-common:report-results:avrreportresultitemstate'
assert kind in list(iterkeys(test_meta))
assert Report_Results in test_meta2
assert t._meta_data['object_has_stats'] is False
| {
"content_hash": "2da6dba131eb6a85dc8cc45c39cda702",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 91,
"avg_line_length": 38.111111111111114,
"alnum_prop": 0.7058632977000324,
"repo_name": "F5Networks/f5-common-python",
"id": "d39bc4b7aec9047b6724d025f5c54216daae68b1",
"size": "3669",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "f5/bigip/tm/analytics/test/unit/test_dos_vis_common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "713"
},
{
"name": "Groovy",
"bytes": "4321"
},
{
"name": "Python",
"bytes": "2705690"
},
{
"name": "Shell",
"bytes": "6398"
}
],
"symlink_target": ""
} |
import os
from ._compat import itervalues
from ._globals import GLOBAL_LOCKER, THREAD_LOCAL
from ._load import OrderedDict
from .helpers._internals import Cursor
class ConnectionPool(object):
POOLS = {}
check_active_connection = True
def __init__(self):
_iid_ = str(id(self))
self._connection_thname_ = '_pydal_connection_' + _iid_ + '_'
self._cursors_thname_ = '_pydal_cursors_' + _iid_ + '_'
@property
def _pid_(self):
return str(os.getpid())
@property
def _connection_uname_(self):
return self._connection_thname_ + self._pid_
@property
def _cursors_uname_(self):
return self._cursors_thname_ + self._pid_
@staticmethod
def set_folder(folder):
THREAD_LOCAL._pydal_folder_ = folder
@property
def connection(self):
return getattr(THREAD_LOCAL, self._connection_uname_)
@connection.setter
def connection(self, val):
setattr(THREAD_LOCAL, self._connection_uname_, val)
self._clean_cursors()
if val is not None:
self._build_cursor()
def _clean_cursors(self):
setattr(THREAD_LOCAL, self._cursors_uname_, OrderedDict())
@property
def cursors(self):
return getattr(THREAD_LOCAL, self._cursors_uname_)
def _build_cursor(self):
rv = Cursor(self.connection)
self.cursors[id(rv.cursor)] = rv
return rv
def _get_or_build_free_cursor(self):
for handler in itervalues(self.cursors):
if handler.available:
return handler
return self._build_cursor()
@property
def cursor(self):
return self._get_or_build_free_cursor().cursor
def lock_cursor(self, cursor):
self.cursors[id(cursor)].lock()
def release_cursor(self, cursor):
self.cursors[id(cursor)].release()
def close_cursor(self, cursor):
cursor.close()
del self.cursors[id(cursor)]
def close(self, action='commit', really=True):
#: if we have an action (commit, rollback), try to execute it
succeeded = True
if action:
try:
if callable(action):
action(self)
else:
getattr(self, action)()
except:
#: connection had some problems, we want to drop it
succeeded = False
#: if we have pools, we should recycle the connection (but only when
# we succeded in `action`, if any and `len(pool)` is good)
if self.pool_size and succeeded:
GLOBAL_LOCKER.acquire()
pool = ConnectionPool.POOLS[self.uri]
if len(pool) < self.pool_size:
pool.append(self.connection)
really = False
GLOBAL_LOCKER.release()
#: closing the connection when we `really` want to, in particular:
# - when we had an exception running `action`
# - when we don't have pools
# - when we have pools but they're full
if really:
try:
self.close_connection()
except:
pass
#: always unset `connection` attribute
self.connection = None
@staticmethod
def close_all_instances(action):
""" to close cleanly databases in a multithreaded environment """
dbs = getattr(THREAD_LOCAL, '_pydal_db_instances_', {}).items()
for db_uid, db_group in dbs:
for db in db_group:
if hasattr(db, '_adapter'):
db._adapter.close(action)
getattr(THREAD_LOCAL, '_pydal_db_instances_', {}).clear()
getattr(THREAD_LOCAL, '_pydal_db_instances_zombie_', {}).clear()
if callable(action):
action(None)
return
def _find_work_folder(self):
self.folder = getattr(THREAD_LOCAL, '_pydal_folder_', '')
def after_connection_hook(self):
"""Hook for the after_connection parameter"""
if callable(self._after_connection):
self._after_connection(self)
self.after_connection()
def after_connection(self):
#this it is supposed to be overloaded by adapters
pass
def reconnect(self):
"""
Defines: `self.connection` and `self.cursor`
if `self.pool_size>0` it will try pull the connection from the pool
if the connection is not active (closed by db server) it will loop
if not `self.pool_size` or no active connections in pool makes a new one
"""
if getattr(THREAD_LOCAL, self._connection_uname_, None) is not None:
return
if not self.pool_size:
self.connection = self.connector()
self.after_connection_hook()
else:
uri = self.uri
POOLS = ConnectionPool.POOLS
while True:
GLOBAL_LOCKER.acquire()
if uri not in POOLS:
POOLS[uri] = []
if POOLS[uri]:
self.connection = POOLS[uri].pop()
GLOBAL_LOCKER.release()
try:
if self.check_active_connection:
self.test_connection()
break
except:
pass
else:
GLOBAL_LOCKER.release()
self.connection = self.connector()
self.after_connection_hook()
break
| {
"content_hash": "93afa913dbc1e0ab792542a161869497",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 80,
"avg_line_length": 32.857988165680474,
"alnum_prop": 0.5515937331172339,
"repo_name": "niphlod/pydal",
"id": "b36a5969df72b74d0273ce502c69f4779a393819",
"size": "5577",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pydal/connection.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "820628"
}
],
"symlink_target": ""
} |
from setuptools import setup
import os
def get_version(version_tuple):
if not isinstance(version_tuple[-1], int):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
init = os.path.join(os.path.dirname(__file__), 'hupwatch', '__init__.py')
version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
INSTALL_REQUIRES = []
try:
from pypandoc import convert
def read_md(f):
return convert(f, 'rst')
except ImportError:
print(
"warning: pypandoc module not found, could not convert Markdown to RST"
)
def read_md(f):
return open(f, 'r').read() # noqa
README = os.path.join(os.path.dirname(__file__), 'README.md')
setup(
name='hupwatch',
version=VERSION,
author='Michał Jaworski',
author_email='[email protected]',
description='Simple process supervision agnostic utility for graceful reloading of services', # noqa
long_description=read_md(README),
packages=['hupwatch'],
url='https://github.com/swistakm/hupwatch',
include_package_data=True,
install_requires=[],
zip_safe=False,
license="BSD",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: POSIX',
'Topic :: System :: Systems Administration',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: BSD License',
],
entry_points={
'console_scripts': [
'hupwatch = hupwatch.command:main'
]
}
)
| {
"content_hash": "2172212b071baf681b37ee1dd1b88169",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 105,
"avg_line_length": 26.112676056338028,
"alnum_prop": 0.6175836030204962,
"repo_name": "swistakm/hupwatch",
"id": "d591f160b4ba347d2ab9a256308dc262a69e19e1",
"size": "1879",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "10465"
}
],
"symlink_target": ""
} |
from osgeo import ogr
class Point(object):
""" Wrapper for ogr point """
def __init__(self, lat, lng):
""" Coordinates are in degrees """
self.point = ogr.Geometry(ogr.wkbPoint)
self.point.AddPoint(lng, lat)
def getOgr(self):
return self.point
ogr = property(getOgr)
class Country(object):
""" Wrapper for ogr country shape. Not meant to be instantiated directly. """
def __init__(self, shape):
self.shape = shape
def getIso(self):
return self.shape.GetField('ISO2')
iso = property(getIso)
def __str__(self):
return self.shape.GetField('NAME')
def contains(self, point):
return self.shape.geometry().Contains(point.ogr)
class CountryChecker(object):
""" Loads a country shape file, checks coordinates for country location. """
def __init__(self, country_file):
driver = ogr.GetDriverByName('ESRI Shapefile')
self.countryFile = driver.Open(country_file)
self.layer = self.countryFile.GetLayer()
def getCountry(self, point):
"""
Checks given gps-incoming coordinates for country.
Output is either country shape index or None
"""
for i in range(self.layer.GetFeatureCount()):
country = self.layer.GetFeature(i)
if country.geometry().Contains(point.ogr):
return Country(country)
# nothing found
return None
| {
"content_hash": "e1305577af736ecde8da232faa5c837d",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 81,
"avg_line_length": 29.591836734693878,
"alnum_prop": 0.6186206896551724,
"repo_name": "rmichnovicz/Sick-Slopes",
"id": "06548990de94ac88942dc35135440f91926140e7",
"size": "1538",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "countries.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1021"
},
{
"name": "HTML",
"bytes": "7022"
},
{
"name": "JavaScript",
"bytes": "22890"
},
{
"name": "Jupyter Notebook",
"bytes": "686034"
},
{
"name": "Python",
"bytes": "48162"
}
],
"symlink_target": ""
} |
import cStringIO
import urllib
import numpy as np
import pylru
import utilities
from PIL import Image
def fetch_image_from_url(url):
"""
Returns a pil image from given url
:rtype Image.Image:
"""
if utilities.url_exists(url):
img_stream = cStringIO.StringIO(urllib.urlopen(url).read())
pil_image = Image.open(img_stream)
return pil_image
return Image.new("RGB", size=(256, 266), color='black')
class ImageValue:
"""
An image class that has the ability to provide raster image at any resolution. In either array or pil Image format.
"""
def get_pil_image(self, resolution):
"""
Returns a pil Image at requested resolution.
:rtype: Image.Image
"""
raise NotImplementedError('This is an interface')
def get_np_array(self, resolution):
raise NotImplementedError('This is an interface')
def is_set(self):
raise NotImplementedError('This is an interface')
def get_pil_image_at_full_resolution(self):
raise NotImplementedError('This is an interface')
def get_pil_image_at_full_resolution_proper_shape(self):
pass
image_cache_size = 1000
pil_image_cache = pylru.lrucache(image_cache_size)
proper_shape_image_cache = pylru.lrucache(image_cache_size)
class JpgWebImage(ImageValue):
"""
A lazily fetched web image.
"""
def __init__(self, url):
"""
Given url is stored. When asked to render it fetches the image.
:type _pil_image: Image.Image
"""
self.url = url
if url in pil_image_cache:
self._pil_image = pil_image_cache[url]
else:
self._pil_image = None
if url in proper_shape_image_cache:
self._proper_shape_image = proper_shape_image_cache[url]
else:
self._proper_shape_image = None
self.cache = pylru.lrucache(10)
@property
def pil_image(self):
if self._pil_image is None:
self._pil_image = fetch_image_from_url(self.url)
pil_image_cache[self.url] = self._pil_image
return self._pil_image
def get_pil_image_at_full_resolution(self):
return self.pil_image
def get_pil_image_at_full_resolution_proper_shape(self):
if self._proper_shape_image is None:
self._proper_shape_image = utilities.reshape_proper_pil_image(self.get_pil_image_at_full_resolution())
proper_shape_image_cache[self.url] = self._proper_shape_image
return self._proper_shape_image
def get_pil_image(self, resolution):
"""
Returns a pil Image at requested resolution.
:rtype: Image.Image
"""
return self.pil_image.resize((resolution, resolution))
def get_np_array(self, resolution):
"""Convert to numpy array.
Drop alpha"""
cache_key = str(resolution)
if cache_key in self.cache:
return self.cache[cache_key]
pil_image = self.get_pil_image_at_full_resolution()
im_array = np.array(pil_image, dtype=np.uint8)
square_image = utilities.reshape_proper(im_array)
return_value = np.array(Image.fromarray(square_image).resize((resolution, resolution)), dtype=np.uint8)
self.cache[cache_key] = return_value
return return_value
def is_set(self):
return True
def __eq__(self, other):
return self.url == other.url
def __ne__(self, other):
return not self == other
def __repr__(self):
return 'JpegWebImage({})'.format(self.url)
| {
"content_hash": "a233c9eaaff88f8b2f946a68127dd231",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 119,
"avg_line_length": 29.203252032520325,
"alnum_prop": 0.6227728285077951,
"repo_name": "abhishekraok/GraphMap",
"id": "516064dd8435d5b5002cb9410d3db081e99ef6c1",
"size": "3592",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "graphmap/imagevalue.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "61"
},
{
"name": "Jupyter Notebook",
"bytes": "863859"
},
{
"name": "Protocol Buffer",
"bytes": "358"
},
{
"name": "Python",
"bytes": "166698"
},
{
"name": "Shell",
"bytes": "61"
}
],
"symlink_target": ""
} |
from setuptools import setup
from querycount import __version__
url = "https://github.com/bradmontgomery/django-querycount/tarball/{0}".format(__version__)
setup(
name="django-querycount",
version=__version__,
author="Brad Montgomery",
author_email="[email protected]",
description=("Middleware that Prints the number of DB queries to the runserver console."),
install_requires=[],
license="MIT",
keywords="django querycount database performance",
url=url,
packages=[
"querycount",
],
long_description="this project gives you a middleware that"
"prints DB query counts in Django's runserver console output.",
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
"Topic :: Utilities",
],
)
| {
"content_hash": "9287912a0ec209f06b12e049bd3e6264",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 94,
"avg_line_length": 33.225806451612904,
"alnum_prop": 0.6485436893203883,
"repo_name": "bradmontgomery/django-querycount",
"id": "68112dabfd5ca3b2a5b143f836de176cee979826",
"size": "1030",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13839"
}
],
"symlink_target": ""
} |
import logging
import pytest
import salt.cache
from tests.pytests.functional.cache.helpers import run_common_cache_tests
log = logging.getLogger(__name__)
@pytest.fixture
def cache(minion_opts):
opts = minion_opts.copy()
opts["memcache_expire_seconds"] = 42
cache = salt.cache.factory(opts)
return cache
def test_caching(subtests, cache):
run_common_cache_tests(subtests, cache)
| {
"content_hash": "6f832c0fac08e30bfd691e3128ddedaa",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 73,
"avg_line_length": 20.3,
"alnum_prop": 0.7315270935960592,
"repo_name": "saltstack/salt",
"id": "30bc5fec26b50f29c016b78c821997fdd401684a",
"size": "406",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/pytests/functional/cache/test_memcache.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14911"
},
{
"name": "C",
"bytes": "1571"
},
{
"name": "Cython",
"bytes": "1458"
},
{
"name": "Dockerfile",
"bytes": "184"
},
{
"name": "Groovy",
"bytes": "12318"
},
{
"name": "HCL",
"bytes": "257"
},
{
"name": "HTML",
"bytes": "8031"
},
{
"name": "Jinja",
"bytes": "45598"
},
{
"name": "Makefile",
"bytes": "713"
},
{
"name": "NSIS",
"bytes": "76572"
},
{
"name": "PowerShell",
"bytes": "75891"
},
{
"name": "Python",
"bytes": "41444811"
},
{
"name": "Rich Text Format",
"bytes": "6242"
},
{
"name": "Roff",
"bytes": "191"
},
{
"name": "Ruby",
"bytes": "961"
},
{
"name": "SaltStack",
"bytes": "35856"
},
{
"name": "Scheme",
"bytes": "895"
},
{
"name": "Scilab",
"bytes": "1147"
},
{
"name": "Shell",
"bytes": "524917"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from ratelimitbackend import admin
from ratelimitbackend.views import login
admin.autodiscover()
from . import views
from .profiles.forms import AuthForm
urlpatterns = [
url(r'^admin/rq/', include('django_rq_dashboard.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^subscriber/', include('django_push.subscriber.urls')),
url(r'^health/$', views.health, name='health'),
url(r'^robots.txt$', views.robots),
url(r'^humans.txt$', views.humans),
url(r'^favicon.ico$', views.favicon),
url(r'^apple-touch-icon-precomposed.png$', views.touch_icon),
url(r'^apple-touch-icon.png$', views.touch_icon),
url(r'^', include('feedhq.reader.urls', namespace='reader')),
url(r'^accounts/', include('feedhq.profiles.urls')),
url(r'^', include('feedhq.feeds.urls', namespace='feeds')),
url(r'^login/$', login, {'authentication_form': AuthForm}, name='login'),
url(r'^logout/$', views.logout, name='logout'),
]
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| {
"content_hash": "41bd8738664c8df0364bba0c3c45d472",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 77,
"avg_line_length": 39.5625,
"alnum_prop": 0.7045813586097947,
"repo_name": "rmoorman/feedhq",
"id": "5d3e14cf1dd421799bdc87afbc22a863d6bfdd1d",
"size": "1290",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "feedhq/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "48727"
},
{
"name": "HTML",
"bytes": "49008"
},
{
"name": "JavaScript",
"bytes": "8882"
},
{
"name": "Makefile",
"bytes": "1501"
},
{
"name": "Python",
"bytes": "403391"
},
{
"name": "Ruby",
"bytes": "656"
},
{
"name": "Shell",
"bytes": "278"
}
],
"symlink_target": ""
} |
import requests
from bs4 import BeautifulSoup
import json
import urllib
headers = {'user-agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:50.0) Gecko/20100101 Firefox/50.0'}
queries = ["Happy human face", "Sad human face", "Angry human face", "Disgusted human face", "Surprised human face", "Fearful human face",
"Счастливое лицо", "Печальное лицо", "Злое лицо", "Лицо отвращение", "Удивленное лицо", "Испуганное лицо"
]
class ImagesUrlFetcher (object):
def generateRequest(self, query, from_, num):
pass
def getLinks(self, query, count):
pass
class GoogleImagesUrlFetcher(ImagesUrlFetcher):
request = "https://www.google.ru/search?async=_id:rg_s,_pms:s&ei=35miWKGKF8KKsAGipoCABQ&yv=2&q={query}&start={start}&asearch=ichunk&newwindow=1&tbm=isch&ijn=3"
num_per_request = 100
def generateRequest(self, query, from_, num=0):
return self.request.format(query=query, start=from_, yv=from_//self.num_per_request)
def getLinks(self, query, count):
links = set()
q = urllib.parse.quote_plus(query)
for i in range(count // self.num_per_request):
r = self.request.format(query=q, start=i * self.num_per_request, yv=i)
response = requests.get(r, headers=headers)
if response.status_code == 200:
soup = BeautifulSoup(response.json()[1][1], 'html.parser')
divs = soup.find_all('div', class_ = 'rg_meta')
links.update(map(lambda div: json.loads(div.text)['ou'], divs))
return links
class YandexImagesUrlFetcher(ImagesUrlFetcher):
request = "https://yandex.ru/images/search?text={query}&p={p}"
num_per_request = 30
def generateRequest(self, query, from_, num=0):
return self.request.format(query=query, p=from_)
def getLinks(self, query, count):
links = set()
q = urllib.parse.quote_plus(query)
for i in range(count // self.num_per_request):
print(i)
r = self.generateRequest(q, i)
response = requests.get(r, headers=headers)
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
divs = soup.find_all('div', class_='serp-item')
links.update(map(lambda div: json.loads(div.attrs['data-bem'])['serp-item']['img_href'], divs))
print (len(links))
return links
class BingImagesUrlFetcher(ImagesUrlFetcher):
request = "http://www.bing.com/images/async?q={query}&first={start}&count=100"
num_per_request = 100
def generateRequest(self, query, from_, num=0 ):
return self.request.format(query=query, start = from_)
def getLinks(self, query, count):
links = set()
q = urllib.parse.quote_plus(query)
for i in range(count // self.num_per_request):
print (i)
r = self.generateRequest(q, i*self.num_per_request)
response = requests.get(r, headers=headers)
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
a = soup.find_all('a', class_='iusc')
links.update(map(lambda div: json.loads(div.attrs['m'])['murl'], a))
return links
if __name__ == '__main__':
count = 1000
giuf = GoogleImagesUrlFetcher()
yiuf = YandexImagesUrlFetcher()
biuf = BingImagesUrlFetcher()
for query in queries:
links = set()
with open (query+".urls", "w") as fout:
print (query)
links.update(yiuf.getLinks (query, count))
print (len(links))
links.update (giuf.getLinks(query, count))
print (len(links))
links.update (biuf.getLinks(query, count))
print (len(links))
for link in links:
fout.write(link)
fout.write("\n")
| {
"content_hash": "95ed7d0858de310e2245f96a001bc629",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 163,
"avg_line_length": 39.04,
"alnum_prop": 0.6004098360655737,
"repo_name": "mkeyran/EmotionRecognizer",
"id": "3cb3f8260248bf7e82afe37f87ebd41a871759e7",
"size": "4029",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "image_link_extractor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "2663"
},
{
"name": "Python",
"bytes": "51983"
}
],
"symlink_target": ""
} |
import factory
from .base import BaseUserFactory
from libs.core.models.users.admin import AdminUser
class AdminProfileFactory(factory.Factory):
class Meta:
model = dict
first_name = factory.Faker('first_name')
last_name = factory.Faker('last_name')
class AdminUserFactory(BaseUserFactory):
class Meta:
model = AdminUser
profile = factory.SubFactory(AdminProfileFactory)
| {
"content_hash": "7781ec20017a67819ab9bfec61fe1197",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 53,
"avg_line_length": 20.7,
"alnum_prop": 0.7270531400966184,
"repo_name": "thnee/django-template",
"id": "2fb1afa6fd4a0ab336d16f92eb5090562016e5e8",
"size": "415",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "project_name/libs/core/factories/users/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17169"
}
],
"symlink_target": ""
} |
"""Adds accounts to the specified multi-client account, in a single batch."""
from __future__ import absolute_import
from __future__ import print_function
import json
import sys
from shopping.content import common
from six.moves import range
# Number of accounts to insert.
BATCH_SIZE = 5
def main(argv):
# Authenticate and construct service.
service, config, _ = common.init(argv, __doc__)
merchant_id = config['merchantId']
common.check_mca(config, True)
account_names = [
'account%s' % common.get_unique_id() for i in range(BATCH_SIZE)
]
batch = {
'entries': [{
'batchId': i,
'merchantId': merchant_id,
'method': 'insert',
'account': {
'name': v,
'websiteUrl': 'https://%s.example.com/' % v,
},
} for i, v in enumerate(account_names)],
}
request = service.accounts().custombatch(body=batch)
result = request.execute()
if result['kind'] == 'content#accountsCustomBatchResponse':
for entry in result['entries']:
account = entry.get('account')
errors = entry.get('errors')
if account:
print('Account %s with name "%s" was created.' %
(account['id'], account['name']))
elif errors:
print('Errors for batch entry %d:' % entry['batchId'])
print(json.dumps(errors, sort_keys=True, indent=2,
separators=(',', ': ')))
else:
print('There was an error. Response: %s' % result)
if __name__ == '__main__':
main(sys.argv)
| {
"content_hash": "328fa38ef07e74926387bea1b661c1ea",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 77,
"avg_line_length": 27.963636363636365,
"alnum_prop": 0.5929778933680104,
"repo_name": "googleads/googleads-shopping-samples",
"id": "a7ee50fc38b79e2c5c6873ef5de59e3509e47405",
"size": "2155",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "python/shopping/content/accounts/insert_batch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "109971"
},
{
"name": "Go",
"bytes": "62736"
},
{
"name": "Java",
"bytes": "147661"
},
{
"name": "PHP",
"bytes": "83914"
},
{
"name": "Python",
"bytes": "132074"
},
{
"name": "Ruby",
"bytes": "123581"
}
],
"symlink_target": ""
} |
import optparse
from collections import defaultdict
import numpy as np
def silk_output_parser(input_file_name,output_file_name,id1,id2):
input_read = open(input_file_name,'rU') #read input file
train = open(output_file_name,'w') #write on output file
#write the headers
train.write(id1)
train.write(',')
train.write(id2)
train.write('\n')
k = 0 #counter
#file to parse
#<http://dbpedia.org/resource/Where_Are_My_Children%3F> <http://www.w3.org/2002/07/owl#sameAs> <http://data.linkedmdb.org/resource/film/236>
for line in input_read.readlines(): #iterate through the lines of the file
line = line.split(' ') #split when there's a space
if line[0] == 'End': #we need to pass to the next configuration
k += 1
continue
ID1_whole = line[0] #<http://dbpedia.org/resource/Where_Are_My_Children%3F>, first element of the list
ID2_whole = line[-2] #<http://data.linkedmdb.org/resource/film/236> , second last element of the list
#ID1_whole = ID1_whole.split('/')
#ID2_whole = ID2_whole.split('/')
ID1 = ID1_whole.strip('<').strip('>')
ID2 = ID2_whole.strip('<').strip('>')
train.write(ID1)
train.write(',')
train.write(ID2)
train.write('\n')
train.close()
#execute as a script
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option('-i','--input', dest = 'input_file_name', help = 'input_file_name')
parser.add_option('-o','--output', dest = 'output_file_name', help = 'output_file_name')
parser.add_option('-u','--id1', dest = 'id1', help = 'id1')
parser.add_option('-k','--id2', dest = 'id2', help = 'id2')
(options, args) = parser.parse_args()
if options.input_file_name is None:
options.input_file_name = raw_input('Enter input file name:')
if options.output_file_name is None:
options.output_file_name = raw_input('Enter output file name:')
if options.id1 is None:
options.id1 = raw_input('Enter identifier of the first column (e.g. FFIEC_ID):')
if options.id2 is None:
options.id2 = raw_input('Enter identifier of the second column (e.g. SEC_ID):')
input_file_name = options.input_file_name
output_file_name = options.output_file_name
id1 = options.id1
id2 = options.id2
silk_output_parser(input_file_name,output_file_name,id1,id2) | {
"content_hash": "210f69e72263a51fbcb80df0501b0514",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 149,
"avg_line_length": 31.81578947368421,
"alnum_prop": 0.6286186931348222,
"repo_name": "enricopal/STEM",
"id": "82e36d23da46b455f3c889fd6c60dd7927278ed4",
"size": "2418",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/parser/silk_output_parser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "847"
},
{
"name": "Java",
"bytes": "636761"
},
{
"name": "Python",
"bytes": "149144"
},
{
"name": "R",
"bytes": "11363"
},
{
"name": "Shell",
"bytes": "7880"
}
],
"symlink_target": ""
} |
import importlib
import pretend
import pytest
from fenrir.utils import resolve_app
def test_resolve_app_resolves(monkeypatch):
app = pretend.stub()
module = pretend.stub(myapp=app)
monkeypatch.setattr(importlib, "import_module", lambda x: module)
assert resolve_app("example:myapp") is app
def test_resolve_app_resolves_default(monkeypatch):
app = pretend.stub()
module = pretend.stub(app=app)
monkeypatch.setattr(importlib, "import_module", lambda x: module)
assert resolve_app("example") is app
def test_resolve_app_valid():
with pytest.raises(ValueError):
resolve_app("foo:bar:wat")
| {
"content_hash": "9f91679dc14dcae805efecbf5e02ac41",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 69,
"avg_line_length": 23.703703703703702,
"alnum_prop": 0.715625,
"repo_name": "dstufft/fenrir",
"id": "f194843a56b282675f45453481335490759a0537",
"size": "1181",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "72668"
},
{
"name": "Python",
"bytes": "96151"
},
{
"name": "Ragel in Ruby Host",
"bytes": "9587"
}
],
"symlink_target": ""
} |
""" Testing report generation
"""
from __future__ import absolute_import
from bigmler.tests.world import (world, common_setup_module,
common_teardown_module, teardown_class)
import bigmler.tests.basic_tst_prediction_steps as test_pred
def setup_module():
"""Setup for the module
"""
common_setup_module()
def teardown_module():
"""Teardown for the module
"""
common_teardown_module()
class TestReport(object):
def teardown(self):
"""Calling generic teardown for every method
"""
print "\nEnd of tests in: %s\n-------------------\n" % __name__
teardown_class()
def setup(self):
"""
Debug information
"""
print "\n-------------------\nTests in: %s\n" % __name__
def test_scenario1(self):
"""
Scenario: Successfully generating reports in Gazibit:
Given I create BigML resources and share them uploading train "<data>" file to evaluate and log evaluation and reports in "<output>"
And I check that the source has been created
And I check that the dataset has been created and shared
And I check that the model has been created and shared
Then I check that the evaluation has been created and shared
And I check that the Gazibit report has been created
And I check that the Gazibit shared report has been created
Examples:
| data | output |
| ../data/iris.csv | ./scenario_rpt_1/evaluation |
"""
print self.test_scenario1.__doc__
examples = [
['data/iris.csv', 'scenario_rpt_1/evaluation']]
for example in examples:
print "\nTesting with:\n", example
test_pred.i_create_all_resources_to_evaluate_and_report(self, data=example[0], output=example[1])
test_pred.i_check_create_source(self)
test_pred.i_check_create_dataset_shared(self)
test_pred.i_check_create_model_shared(self)
test_pred.i_check_create_evaluation_shared(self)
test_pred.i_check_gazibit_reports(self, shared=None)
test_pred.i_check_gazibit_reports(self, shared='shared ')
| {
"content_hash": "951a9de027631b90dd86923707b9cb38",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 148,
"avg_line_length": 35.46969696969697,
"alnum_prop": 0.5779581375480564,
"repo_name": "brokendata/bigmler",
"id": "11d3943c1b25f92b2633b9ea775eef53b2cf2926",
"size": "2960",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bigmler/tests/test_16_reports.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "25598"
},
{
"name": "Python",
"bytes": "965199"
}
],
"symlink_target": ""
} |
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
from urllib.request import urlretrieve
from os.path import isfile, isdir
from tqdm import tqdm
import problem_unittests as tests
import tarfile
import ipdb
import matplotlib
ipdb.set_trace()
matplotlib.use('Agg')
import matplotlib.pyplot as plt
cifar10_dataset_folder_path = 'cifar-10-batches-py'
class DLProgress(tqdm):
last_block = 0
def hook(self, block_num=1, block_size=1, total_size=None):
self.total = total_size
self.update((block_num - self.last_block) * block_size)
self.last_block = block_num
if not isfile('cifar-10-python.tar.gz'):
with DLProgress(unit='B', unit_scale=True, miniters=1, desc='CIFAR-10 Dataset') as pbar:
urlretrieve(
'https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz',
'cifar-10-python.tar.gz',
pbar.hook)
if not isdir(cifar10_dataset_folder_path):
with tarfile.open('cifar-10-python.tar.gz') as tar:
tar.extractall()
tar.close()
tests.test_folder_path(cifar10_dataset_folder_path)
# ## Explore the Data
# The dataset is broken into batches to prevent your machine from running out of memory. The CIFAR-10 dataset consists of 5 batches, named `data_batch_1`, `data_batch_2`, etc.. Each batch contains the labels and images that are one of the following:
# * airplane
# * automobile
# * bird
# * cat
# * deer
# * dog
# * frog
# * horse
# * ship
# * truck
#
# Understanding a dataset is part of making predictions on the data. Play around with the code cell below by changing the `batch_id` and `sample_id`. The `batch_id` is the id for a batch (1-5). The `sample_id` is the id for a image and label pair in the batch.
#
# Ask yourself "What are all possible labels?", "What is the range of values for the image data?", "Are the labels in order or random?". Answers to questions like these will help you preprocess the data and end up with better predictions.
# In[3]:
#get_ipython().magic(u'matplotlib inline')
#get_ipython().magic(u"config InlineBackend.figure_format = 'retina'")
import helper
import numpy as np
# Explore the dataset
batch_id = 1
sample_id = 5
helper.display_stats(cifar10_dataset_folder_path, batch_id, sample_id)
# ## Implement Preprocess Functions
# ### Normalize
# In the cell below, implement the `normalize` function to take in image data, `x`, and return it as a normalized Numpy array. The values should be in the range of 0 to 1, inclusive. The return object should be the same shape as `x`.
# In[6]:
def normalize(x):
"""
Normalize a list of sample image data in the range of 0 to 1
: x: List of image data. The image shape is (32, 32, 3)
: return: Numpy array of normalize data
"""
if 1:
xmin = np.min(x.reshape(-1,1))
xmax = np.max(x.reshape(-1,1))
else:
xmin = 0
xmax = 255
new_x = (x - xmin)/(xmax-xmin)
return new_x
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_normalize(normalize)
# ### One-hot encode
# Just like the previous code cell, you'll be implementing a function for preprocessing. This time, you'll implement the `one_hot_encode` function. The input, `x`, are a list of labels. Implement the function to return the list of labels as One-Hot encoded Numpy array. The possible values for labels are 0 to 9. The one-hot encoding function should return the same encoding for each value between each call to `one_hot_encode`. Make sure to save the map of encodings outside the function.
#
# Hint: Don't reinvent the wheel.
# In[8]:
def one_hot_encode(x):
"""
One hot encode a list of sample labels. Return a one-hot encoded vector for each label.
: x: List of sample Labels
: return: Numpy array of one-hot encoded labels
"""
nClasses = 10
one_hot_labels = np.zeros((len(x),nClasses), dtype=np.int32)
for i,item in enumerate(x):
one_hot_labels[i][item] = 1
return one_hot_labels
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_one_hot_encode(one_hot_encode)
# ### Randomize Data
# As you saw from exploring the data above, the order of the samples are randomized. It doesn't hurt to randomize it again, but you don't need to for this dataset.
# ## Preprocess all the data and save it
# Running the code cell below will preprocess all the CIFAR-10 data and save it to file. The code below also uses 10% of the training data for validation.
# In[9]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
# Preprocess Training, Validation, and Testing Data
helper.preprocess_and_save_data(cifar10_dataset_folder_path, normalize, one_hot_encode)
# # Check Point
# This is your first checkpoint. If you ever decide to come back to this notebook or have to restart the notebook, you can start from here. The preprocessed data has been saved to disk.
# In[10]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
import pickle
import problem_unittests as tests
import helper
# Load the Preprocessed Validation data
valid_features, valid_labels = pickle.load(open('preprocess_validation.p', mode='rb'))
# ## Build the network
# For the neural network, you'll build each layer into a function. Most of the code you've seen has been outside of functions. To test your code more thoroughly, we require that you put each layer in a function. This allows us to give you better feedback and test for simple mistakes using our unittests before you submit your project.
#
# >**Note:** If you're finding it hard to dedicate enough time for this course each week, we've provided a small shortcut to this part of the project. In the next couple of problems, you'll have the option to use classes from the [TensorFlow Layers](https://www.tensorflow.org/api_docs/python/tf/layers) or [TensorFlow Layers (contrib)](https://www.tensorflow.org/api_guides/python/contrib.layers) packages to build each layer, except the layers you build in the "Convolutional and Max Pooling Layer" section. TF Layers is similar to Keras's and TFLearn's abstraction to layers, so it's easy to pickup.
#
# >However, if you would like to get the most out of this course, try to solve all the problems _without_ using anything from the TF Layers packages. You **can** still use classes from other packages that happen to have the same name as ones you find in TF Layers! For example, instead of using the TF Layers version of the `conv2d` class, [tf.layers.conv2d](https://www.tensorflow.org/api_docs/python/tf/layers/conv2d), you would want to use the TF Neural Network version of `conv2d`, [tf.nn.conv2d](https://www.tensorflow.org/api_docs/python/tf/nn/conv2d).
#
# Let's begin!
#
# ### Input
# The neural network needs to read the image data, one-hot encoded labels, and dropout keep probability. Implement the following functions
# * Implement `neural_net_image_input`
# * Return a [TF Placeholder](https://www.tensorflow.org/api_docs/python/tf/placeholder)
# * Set the shape using `image_shape` with batch size set to `None`.
# * Name the TensorFlow placeholder "x" using the TensorFlow `name` parameter in the [TF Placeholder](https://www.tensorflow.org/api_docs/python/tf/placeholder).
# * Implement `neural_net_label_input`
# * Return a [TF Placeholder](https://www.tensorflow.org/api_docs/python/tf/placeholder)
# * Set the shape using `n_classes` with batch size set to `None`.
# * Name the TensorFlow placeholder "y" using the TensorFlow `name` parameter in the [TF Placeholder](https://www.tensorflow.org/api_docs/python/tf/placeholder).
# * Implement `neural_net_keep_prob_input`
# * Return a [TF Placeholder](https://www.tensorflow.org/api_docs/python/tf/placeholder) for dropout keep probability.
# * Name the TensorFlow placeholder "keep_prob" using the TensorFlow `name` parameter in the [TF Placeholder](https://www.tensorflow.org/api_docs/python/tf/placeholder).
#
# These names will be used at the end of the project to load your saved model.
#
# Note: `None` for shapes in TensorFlow allow for a dynamic size.
# In[11]:
import tensorflow as tf
def neural_net_image_input(image_shape):
"""
Return a Tensor for a bach of image input
: image_shape: Shape of the images
: return: Tensor for image input.
"""
# TODO: Implement Function
input_holder = tf.placeholder(tf.float32,shape=(None,image_shape[0],image_shape[1],image_shape[2]), name = 'x')
return input_holder
def neural_net_label_input(n_classes):
"""
Return a Tensor for a batch of label input
: n_classes: Number of classes
: return: Tensor for label input.
"""
label_holder = tf.placeholder(tf.float32, shape=(None,n_classes), name ='y')# TODO: Implement Function
return label_holder
def neural_net_keep_prob_input():
"""
Return a Tensor for keep probability
: return: Tensor for keep probability.
"""
# TODO: Implement Function
keep_prob = tf.placeholder(tf.float32, name='keep_prob')
return keep_prob
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tf.reset_default_graph()
tests.test_nn_image_inputs(neural_net_image_input)
tests.test_nn_label_inputs(neural_net_label_input)
tests.test_nn_keep_prob_inputs(neural_net_keep_prob_input)
# In[28]:
dd = tf.placeholder(tf.float32,shape=(1))
dd.get_shape()
# ### Convolution and Max Pooling Layer
# Convolution layers have a lot of success with images. For this code cell, you should implement the function `conv2d_maxpool` to apply convolution then max pooling:
# * Create the weight and bias using `conv_ksize`, `conv_num_outputs` and the shape of `x_tensor`.
# * Apply a convolution to `x_tensor` using weight and `conv_strides`.
# * We recommend you use same padding, but you're welcome to use any padding.
# * Add bias
# * Add a nonlinear activation to the convolution.
# * Apply Max Pooling using `pool_ksize` and `pool_strides`.
# * We recommend you use same padding, but you're welcome to use any padding.
#
# **Note:** You **can't** use [TensorFlow Layers](https://www.tensorflow.org/api_docs/python/tf/layers) or [TensorFlow Layers (contrib)](https://www.tensorflow.org/api_guides/python/contrib.layers) for **this** layer, but you can still use TensorFlow's [Neural Network](https://www.tensorflow.org/api_docs/python/tf/nn) package. You may still use the shortcut option for all the **other** layers.
# In[32]:
get_ipython().magic(u'pdb')
# In[12]:
def conv2d_maxpool(x_tensor, conv_num_outputs, conv_ksize, conv_strides, pool_ksize, pool_strides):
"""
Apply convolution then max pooling to x_tensor
:param x_tensor: TensorFlow Tensor
:param conv_num_outputs: Number of outputs for the convolutional layer
:param conv_ksize: kernal size 2-D Tuple for the convolutional layer
:param conv_strides: Stride 2-D Tuple for convolution
:param pool_ksize: kernal size 2-D Tuple for pool
:param pool_strides: Stride 2-D Tuple for pool
: return: A tensor that represents convolution and max pooling of x_tensor
"""
# TODO: Implement Function
num_inputs = x_tensor.get_shape().as_list()[-1]
weight_vals = tf.truncated_normal([conv_ksize[0], conv_ksize[1], num_inputs, conv_num_outputs])
bias_vals = tf.zeros(shape=(conv_num_outputs))
W = tf.Variable(weight_vals, dtype=tf.float32)
bias = tf.Variable(bias_vals,dtype=tf.float32)
conv_1 = tf.nn.conv2d(x_tensor, W, [1, conv_strides[0], conv_strides[1], 1], padding='SAME')
x = tf.nn.relu(conv_1 + bias)
x = tf.nn.max_pool(x,[1,pool_ksize[0],pool_ksize[1],1], [1,pool_strides[0],pool_strides[1],1], padding='VALID')
return x
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_con_pool(conv2d_maxpool)
# ### Flatten Layer
# Implement the `flatten` function to change the dimension of `x_tensor` from a 4-D tensor to a 2-D tensor. The output should be the shape (*Batch Size*, *Flattened Image Size*). Shortcut option: you can use classes from the [TensorFlow Layers](https://www.tensorflow.org/api_docs/python/tf/layers) or [TensorFlow Layers (contrib)](https://www.tensorflow.org/api_guides/python/contrib.layers) packages for this layer. For more of a challenge, only use other TensorFlow packages.
# In[13]:
get_ipython().magic(u'pdb')
# In[ ]:
from functools import reduce
import pdb
def flatten(x_tensor):
"""
Flatten x_tensor to (Batch Size, Flattened Image Size)
: x_tensor: A tensor of size (Batch Size, ...), where ... are the image dimensions.
: return: A tensor of size (Batch Size, Flattened Image Size).
"""
# TODO: Implement Function
input_shape = x_tensor.get_shape().as_list()[1:]
n_input_elems = reduce((lambda x, y: x*y),input_shape)
#pdb.set_trace()
new_tensor = tf.reshape(x_tensor,(-1,n_input_elems))
return new_tensor
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_flatten(flatten)
# ### Fully-Connected Layer
# Implement the `fully_conn` function to apply a fully connected layer to `x_tensor` with the shape (*Batch Size*, *num_outputs*). Shortcut option: you can use classes from the [TensorFlow Layers](https://www.tensorflow.org/api_docs/python/tf/layers) or [TensorFlow Layers (contrib)](https://www.tensorflow.org/api_guides/python/contrib.layers) packages for this layer. For more of a challenge, only use other TensorFlow packages.
# In[8]:
from functools import reduce
def fully_conn(x_tensor, num_outputs):
"""
Apply a fully connected layer to x_tensor using weight and bias
: x_tensor: A 2-D tensor where the first dimension is batch size.
: num_outputs: The number of output that the new tensor should be.
: return: A 2-D tensor where the second dimension is num_outputs.
"""
# TODO: Implement Function
input_shape = x_tensor.get_shape().as_list()[1:]
n_input_elems = reduce((lambda x, y: x*y),input_shape)
W_vals = tf.truncated_normal([n_input_elems,num_outputs])
b_vals = tf.zeros((num_outputs),dtype=tf.float32)
W = tf.Variable(W_vals)
b = tf.Variable(b_vals)
# reshape the x_tensor
x_tensor = tf.reshape(x_tensor,shape=(-1,n_input_elems))
x = tf.matmul(x_tensor,W) + b
x = tf.nn.relu(x)
return x
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_fully_conn(fully_conn)
# ### Output Layer
# Implement the `output` function to apply a fully connected layer to `x_tensor` with the shape (*Batch Size*, *num_outputs*). Shortcut option: you can use classes from the [TensorFlow Layers](https://www.tensorflow.org/api_docs/python/tf/layers) or [TensorFlow Layers (contrib)](https://www.tensorflow.org/api_guides/python/contrib.layers) packages for this layer. For more of a challenge, only use other TensorFlow packages.
#
# **Note:** Activation, softmax, or cross entropy should **not** be applied to this.
# In[1]:
import tensorflow as tf
# In[6]:
def output(x_tensor, num_outputs):
"""
Apply a output layer to x_tensor using weight and bias
: x_tensor: A 2-D tensor where the first dimension is batch size.
: num_outputs: The number of output that the new tensor should be.
: return: A 2-D tensor where the second dimension is num_outputs.
"""
input_shape = x_tensor.get_shape().as_list()[1:]
n_input_elems = reduce((lambda x, y: x*y),input_shape)
W_vals = tf.truncated_normal([n_input_elems,num_outputs])
b_vals = tf.zeros((num_outputs),dtype=tf.float32)
W = tf.Variable(W_vals)
b = tf.Variable(b_vals)
# reshape the x_tensor
x_tensor = tf.reshape(x_tensor,shape=(-1,n_input_elems))
x = tf.matmul(x_tensor,W) + b
return x
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_output(output)
# ### Create Convolutional Model
# Implement the function `conv_net` to create a convolutional neural network model. The function takes in a batch of images, `x`, and outputs logits. Use the layers you created above to create this model:
#
# * Apply 1, 2, or 3 Convolution and Max Pool layers
# * Apply a Flatten Layer
# * Apply 1, 2, or 3 Fully Connected Layers
# * Apply an Output Layer
# * Return the output
# * Apply [TensorFlow's Dropout](https://www.tensorflow.org/api_docs/python/tf/nn/dropout) to one or more layers in the model using `keep_prob`.
# In[ ]:
conv_ksize = (3,3)
conv_strides = (1,1)
pool_ksize = (2,2)
pool_strides = (1,1)
num_outputs = 1024
num_classes = 10
def conv_net(x, keep_prob):
"""
Create a convolutional neural network model
: x: Placeholder tensor that holds image data.
: keep_prob: Placeholder tensor that hold dropout keep probability.
: return: Tensor that represents logits
"""
# TODO: Apply 1, 2, or 3 Convolution and Max Pool layers
# Play around with different number of outputs, kernel size and stride
# Function Definition from Above:
# conv2d_maxpool(x_tensor, conv_num_outputs, conv_ksize, conv_strides, pool_ksize, pool_strides)
conv_num_outputs = 64
x = conv2d_maxpool(x, conv_num_outputs, conv_ksize, conv_strides, pool_ksize, pool_strides)
conv_num_outputs = 128
x = conv2d_maxpool(x, conv_num_outputs, conv_ksize, conv_strides, pool_ksize, pool_strides)
conv_num_outputs = 256
x = conv2d_maxpool(x, conv_num_outputs, conv_ksize, conv_strides, pool_ksize, pool_strides)
# TODO: Apply a Flatten Layer
# Function Definition from Above:
# flatten(x_tensor)
x = flatten(x)
# TODO: Apply 1, 2, or 3 Fully Connected Layers
# Play around with different number of outputs
# Function Definition from Above:
# fully_conn(x_tensor, num_outputs)
x = fully_conn(x, num_outputs)
# TODO: Apply an Output Layer
# Set this to the number of classes
# Function Definition from Above:
# output(x_tensor, num_outputs)
x = output(x,num_classes)
# TODO: return output
return x
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
##############################
## Build the Neural Network ##
##############################
# Remove previous weights, bias, inputs, etc..
tf.reset_default_graph()
# Inputs
x = neural_net_image_input((32, 32, 3))
y = neural_net_label_input(10)
keep_prob = neural_net_keep_prob_input()
# Model
logits = conv_net(x, keep_prob)
# Name logits Tensor, so that is can be loaded from disk after training
logits = tf.identity(logits, name='logits')
# Loss and Optimizer
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=y))
optimizer = tf.train.AdamOptimizer().minimize(cost)
# Accuracy
correct_pred = tf.equal(tf.argmax(logits, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32), name='accuracy')
tests.test_conv_net(conv_net)
# ## Train the Neural Network
# ### Single Optimization
# Implement the function `train_neural_network` to do a single optimization. The optimization should use `optimizer` to optimize in `session` with a `feed_dict` of the following:
# * `x` for image input
# * `y` for labels
# * `keep_prob` for keep probability for dropout
#
# This function will be called for each batch, so `tf.global_variables_initializer()` has already been called.
#
# Note: Nothing needs to be returned. This function is only optimizing the neural network.
# In[ ]:
def train_neural_network(session, optimizer, keep_probability, feature_batch, label_batch):
"""
Optimize the session on a batch of images and labels
: session: Current TensorFlow session
: optimizer: TensorFlow optimizer function
: keep_probability: keep probability
: feature_batch: Batch of Numpy image data
: label_batch: Batch of Numpy label data
"""
# TODO: Implement Function
pass
"""
DON'T MODIFY ANYTHING IN THIS CELL THAT IS BELOW THIS LINE
"""
tests.test_train_nn(train_neural_network)
# ### Show Stats
# Implement the function `print_stats` to print loss and validation accuracy. Use the global variables `valid_features` and `valid_labels` to calculate validation accuracy. Use a keep probability of `1.0` to calculate the loss and validation accuracy.
# In[ ]:
def print_stats(session, feature_batch, label_batch, cost, accuracy):
"""
Print information about loss and validation accuracy
: session: Current TensorFlow session
: feature_batch: Batch of Numpy image data
: label_batch: Batch of Numpy label data
: cost: TensorFlow cost function
: accuracy: TensorFlow accuracy function
"""
# TODO: Implement Function
pass
# ### Hyperparameters
# Tune the following parameters:
# * Set `epochs` to the number of iterations until the network stops learning or start overfitting
# * Set `batch_size` to the highest number that your machine has memory for. Most people set them to common sizes of memory:
# * 64
# * 128
# * 256
# * ...
# * Set `keep_probability` to the probability of keeping a node using dropout
# In[ ]:
# TODO: Tune Parameters
epochs = None
batch_size = None
keep_probability = None
# ### Train on a Single CIFAR-10 Batch
# Instead of training the neural network on all the CIFAR-10 batches of data, let's use a single batch. This should save time while you iterate on the model to get a better accuracy. Once the final validation accuracy is 50% or greater, run the model on all the data in the next section.
# In[ ]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
print('Checking the Training on a Single Batch...')
with tf.Session() as sess:
# Initializing the variables
sess.run(tf.global_variables_initializer())
# Training cycle
for epoch in range(epochs):
batch_i = 1
for batch_features, batch_labels in helper.load_preprocess_training_batch(batch_i, batch_size):
train_neural_network(sess, optimizer, keep_probability, batch_features, batch_labels)
print('Epoch {:>2}, CIFAR-10 Batch {}: '.format(epoch + 1, batch_i), end='')
print_stats(sess, batch_features, batch_labels, cost, accuracy)
# ### Fully Train the Model
# Now that you got a good accuracy with a single CIFAR-10 batch, try it with all five batches.
# In[ ]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
save_model_path = './image_classification'
print('Training...')
with tf.Session() as sess:
# Initializing the variables
sess.run(tf.global_variables_initializer())
# Training cycle
for epoch in range(epochs):
# Loop over all batches
n_batches = 5
for batch_i in range(1, n_batches + 1):
for batch_features, batch_labels in helper.load_preprocess_training_batch(batch_i, batch_size):
train_neural_network(sess, optimizer, keep_probability, batch_features, batch_labels)
print('Epoch {:>2}, CIFAR-10 Batch {}: '.format(epoch + 1, batch_i), end='')
print_stats(sess, batch_features, batch_labels, cost, accuracy)
# Save Model
saver = tf.train.Saver()
save_path = saver.save(sess, save_model_path)
# # Checkpoint
# The model has been saved to disk.
# ## Test Model
# Test your model against the test dataset. This will be your final accuracy. You should have an accuracy greater than 50%. If you don't, keep tweaking the model architecture and parameters.
# In[ ]:
"""
DON'T MODIFY ANYTHING IN THIS CELL
"""
get_ipython().magic(u'matplotlib inline')
get_ipython().magic(u"config InlineBackend.figure_format = 'retina'")
import tensorflow as tf
import pickle
import helper
import random
# Set batch size if not already set
try:
if batch_size:
pass
except NameError:
batch_size = 64
save_model_path = './image_classification'
n_samples = 4
top_n_predictions = 3
def test_model():
"""
Test the saved model against the test dataset
"""
test_features, test_labels = pickle.load(open('preprocess_training.p', mode='rb'))
loaded_graph = tf.Graph()
with tf.Session(graph=loaded_graph) as sess:
# Load model
loader = tf.train.import_meta_graph(save_model_path + '.meta')
loader.restore(sess, save_model_path)
# Get Tensors from loaded model
loaded_x = loaded_graph.get_tensor_by_name('x:0')
loaded_y = loaded_graph.get_tensor_by_name('y:0')
loaded_keep_prob = loaded_graph.get_tensor_by_name('keep_prob:0')
loaded_logits = loaded_graph.get_tensor_by_name('logits:0')
loaded_acc = loaded_graph.get_tensor_by_name('accuracy:0')
# Get accuracy in batches for memory limitations
test_batch_acc_total = 0
test_batch_count = 0
for train_feature_batch, train_label_batch in helper.batch_features_labels(test_features, test_labels, batch_size):
test_batch_acc_total += sess.run(
loaded_acc,
feed_dict={loaded_x: train_feature_batch, loaded_y: train_label_batch, loaded_keep_prob: 1.0})
test_batch_count += 1
print('Testing Accuracy: {}\n'.format(test_batch_acc_total/test_batch_count))
# Print Random Samples
random_test_features, random_test_labels = tuple(zip(*random.sample(list(zip(test_features, test_labels)), n_samples)))
random_test_predictions = sess.run(
tf.nn.top_k(tf.nn.softmax(loaded_logits), top_n_predictions),
feed_dict={loaded_x: random_test_features, loaded_y: random_test_labels, loaded_keep_prob: 1.0})
helper.display_image_predictions(random_test_features, random_test_labels, random_test_predictions)
test_model()
# ## Why 50-70% Accuracy?
# You might be wondering why you can't get an accuracy any higher. First things first, 50% isn't bad for a simple CNN. Pure guessing would get you 10% accuracy. However, you might notice people are getting scores [well above 70%](http://rodrigob.github.io/are_we_there_yet/build/classification_datasets_results.html#43494641522d3130). That's because we haven't taught you all there is to know about neural networks. We still need to cover a few more techniques.
# ## Submitting This Project
# When submitting this project, make sure to run all the cells before saving the notebook. Save the notebook file as "dlnd_image_classification.ipynb" and save it as a HTML file under "File" -> "Download as". Include the "helper.py" and "problem_unittests.py" files in your submission.
| {
"content_hash": "84009d4d486f2271e6f2fe0660852c3b",
"timestamp": "",
"source": "github",
"line_count": 675,
"max_line_length": 603,
"avg_line_length": 39.06666666666667,
"alnum_prop": 0.6992036405005688,
"repo_name": "kitu2007/dl_class",
"id": "0df4a2a8be589dcd0ca6356c937a8b739e7cdf5e",
"size": "27124",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "image-classification/dlnd_image_classification.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "25401453"
},
{
"name": "Python",
"bytes": "178729"
}
],
"symlink_target": ""
} |
from unittest import TestCase
import pandas as pd
import numpy as np
import pandas_validator as pv
class DataFrameValidatorFixture(pv.DataFrameValidator):
"""Fixture for testing the validation of column type."""
integer_field = pv.IntegerColumnValidator('i')
float_field = pv.FloatColumnValidator('f')
class DataFrameValidatorTest(TestCase):
"""Testing the validation of column type."""
def setUp(self):
self.validator = DataFrameValidatorFixture()
def test_valid(self):
df = pd.DataFrame({'i': [0, 1], 'f': [0., 1.]})
self.assertTrue(self.validator.is_valid(df))
def test_invalid_when_given_integer_series_to_float_column_validator(self):
df = pd.DataFrame({'i': [0, 1], 'f': [0, 1]})
self.assertFalse(self.validator.is_valid(df))
class DataFrameValidatorFixtureWithSize(pv.DataFrameValidator):
"""Fixture for testing the validation of column and row number."""
row_num = 3
column_num = 2
class DataFrameValidatorSizeTest(TestCase):
"""Testing the validation of column and row number."""
def setUp(self):
self.validator = DataFrameValidatorFixtureWithSize()
def test_valid_when_matches_row_numbers(self):
df = pd.DataFrame({'x': [0, 1, 2], 'y': [1., 2., 3.]})
self.assertTrue(self.validator.is_valid(df))
def test_invalid_when_not_matches_row_numbers(self):
df = pd.DataFrame({'x': [0, 1], 'y': [1., 2.]})
self.assertFalse(self.validator.is_valid(df))
def test_invalid_when_not_matches_column_numbers(self):
df = pd.DataFrame({'x': [0, 1, 2], 'y': [1., 2., 3.], 'z': [1, 2, 3]})
self.assertFalse(self.validator.is_valid(df))
class DataFrameValidatorFixtureWithIndex(pv.DataFrameValidator):
"""Fixture for testing the validation of index validator."""
index = pv.IndexValidator(size=3, type=np.int64)
class DataFrameValidatorIndexTest(TestCase):
"""Testing the validation of index size and type."""
def setUp(self):
self.validator = DataFrameValidatorFixtureWithIndex()
def test_valid_when_matches_index_size_and_type(self):
df = pd.DataFrame([0, 1, 2])
self.assertTrue(self.validator.is_valid(df))
def test_invalid_when_not_matches_index_size(self):
df = pd.DataFrame([0, 1, 2, 3])
self.assertFalse(self.validator.is_valid(df))
def test_invalid_when_not_matches_index_type(self):
df = pd.DataFrame([0, 1, 2], index=['a', 'b', 'c'])
self.assertFalse(self.validator.is_valid(df))
class DataFrameValidatorFixtureWithColumns(pv.DataFrameValidator):
"""Fixture for testing the validation of columns validator."""
columns = pv.ColumnsValidator(size=2, type=np.object_)
class DataFrameValidatorColumnsIndexTest(TestCase):
"""Testing the validation of columns size and type"""
def setUp(self):
self.validator = DataFrameValidatorFixtureWithColumns()
def test_valid_when_matches_columns_size_and_type(self):
df = pd.DataFrame({'x': [0, 1, 2], 'y': [1., 2., 3.]})
self.assertTrue(self.validator.is_valid(df))
def test_invalid_when_not_matches_columns_size(self):
df = pd.DataFrame({'x': [0, 1, 2], 'y': [1., 2., 3.], 'z': [1, 2, 3]})
self.assertFalse(self.validator.is_valid(df))
def test_invalid_when_not_matches_columns_type(self):
df = pd.DataFrame([[0, 1, 2], [1., 2., 3.]])
self.assertFalse(self.validator.is_valid(df))
| {
"content_hash": "1d9a90a77b83e5f0cc2fedd840609ccc",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 79,
"avg_line_length": 36.50526315789474,
"alnum_prop": 0.6594579008073818,
"repo_name": "c-bata/pandas-validator",
"id": "52111521c38284b2af8d09790799edc0205e4afa",
"size": "3468",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pandas_validator/validators/test/test_dataframe.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "8355"
},
{
"name": "Python",
"bytes": "19230"
}
],
"symlink_target": ""
} |
from enum import auto
from typing import Union
from fastapi import FastAPI
from fastapi_utils.enums import StrEnum
from fastapi_utils.tasks import repeat_every
from panoptes.utils.config.client import get_config
from pydantic import BaseModel
from panoptes.pocs.sensor.power import PowerBoard
class RelayAction(StrEnum):
turn_on = auto()
turn_off = auto()
class RelayCommand(BaseModel):
relay: Union[str, int]
command: RelayAction
app = FastAPI()
power_board: PowerBoard
read_interval = get_config('environment.power.read_interval', default=60)
@app.on_event('startup')
async def startup():
global power_board
power_board = PowerBoard(**get_config('environment.power', {}))
@app.on_event('startup')
@repeat_every(seconds=60, wait_first=True)
def record_readings():
"""Record the current readings in the db."""
global power_board
return power_board.record(collection_name='power')
@app.get('/')
async def root():
"""Returns the power board status."""
global power_board
return power_board.status
@app.get('/readings')
async def readings():
"""Return the current readings as a dict."""
global power_board
return power_board.to_dataframe().to_dict()
@app.post('/control')
def control_relay(relay_command: RelayCommand):
"""Control a relay via a POST request."""
return do_command(relay_command)
@app.get('/relay/{relay}/control/{command}')
def control_relay_url(relay: Union[int, str], command: str = 'turn_on'):
"""Control a relay via a GET request"""
return do_command(RelayCommand(relay=relay, command=command))
def do_command(relay_command: RelayCommand):
"""Control a relay.
This function performs the actual relay control and is used by both request
types.
"""
global power_board
relay_id = relay_command.relay
try:
relay = power_board.relay_labels[relay_id]
except KeyError:
relay = power_board.relays[relay_id]
command_func = getattr(relay, relay_command.command)
# Perform function.
command_func()
return relay_command
| {
"content_hash": "eef23a35e6f33cdcfb0605dcb17b3cc5",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 79,
"avg_line_length": 24.86904761904762,
"alnum_prop": 0.7027285782671134,
"repo_name": "panoptes/POCS",
"id": "38b22acb374bf59810dc019221d662e2190ab448",
"size": "2089",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/panoptes/pocs/utils/service/power.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "5689"
},
{
"name": "JavaScript",
"bytes": "18198"
},
{
"name": "Python",
"bytes": "837393"
},
{
"name": "Shell",
"bytes": "9960"
}
],
"symlink_target": ""
} |
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
def get_unspent(listunspent, amount):
for utx in listunspent:
if utx['amount'] == amount:
return utx
raise AssertionError('Could not find unspent with amount={}'.format(amount))
class RawTransactionsTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 4
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
def run_test(self):
print("Mining blocks...")
min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
# if the fee's positive delta is higher than this value tests will fail,
# neg. delta always fail the tests.
# The size of the signature of every input may be at most 2 bytes larger
# than a minimum sized signature.
# = 2 bytes * minRelayTxFeePerByte
feeTolerance = 2 * min_relay_tx_fee/1000
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(121)
self.sync_all()
watchonly_address = self.nodes[0].getnewaddress()
watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
watchonly_amount = Decimal(200)
self.nodes[3].importpubkey(watchonly_pubkey, "", True)
watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0)
self.nodes[0].generate(1)
self.sync_all()
###############
# simple test #
###############
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test that we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.2 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
################################
# simple test with two outputs #
################################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
#########################################################################
# test a fundrawtransaction with a VIN greater than the required amount #
#########################################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#####################################################################
# test a fundrawtransaction with which will not get a change output #
#####################################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : Decimal(5.0) - fee - feeTolerance }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(rawtxfund['changepos'], -1)
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
####################################################
# test a fundrawtransaction with an invalid option #
####################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
try:
self.nodes[2].fundrawtransaction(rawtx, {'foo': 'bar'})
raise AssertionError("Accepted invalid option foo")
except JSONRPCException as e:
assert("Unexpected key foo" in e.error['message'])
############################################################
# test a fundrawtransaction with an invalid change address #
############################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
try:
self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': 'foobar'})
raise AssertionError("Accepted invalid precious address")
except JSONRPCException as e:
assert("changeAddress must be a valid precious address" in e.error['message'])
############################################################
# test a fundrawtransaction with a provided change address #
############################################################
utx = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ]
outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
change = self.nodes[2].getnewaddress()
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 2})
except JSONRPCException as e:
assert('changePosition out of bounds' == e.error['message'])
else:
assert(False)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 0})
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
out = dec_tx['vout'][0];
assert_equal(change, out['scriptPubKey']['addresses'][0])
#########################################################################
# test a fundrawtransaction with a VIN smaller than the required amount #
#########################################################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
# 4-byte version + 1-byte vin count + 36-byte prevout then script_len
rawtx = rawtx[:82] + "0100" + rawtx[84:]
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for i, out in enumerate(dec_tx['vout']):
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
else:
assert_equal(i, rawtxfund['changepos'])
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
###########################################
# test a fundrawtransaction with two VINs #
###########################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
utx2 = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
matchingIns = 0
for vinOut in dec_tx['vin']:
for vinIn in inputs:
if vinIn['txid'] == vinOut['txid']:
matchingIns+=1
assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
#########################################################
# test a fundrawtransaction with two VINs and two vOUTs #
#########################################################
utx = get_unspent(self.nodes[2].listunspent(), 1)
utx2 = get_unspent(self.nodes[2].listunspent(), 5)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 2)
assert_equal(len(dec_tx['vout']), 3)
##############################################
# test a fundrawtransaction with invalid vin #
##############################################
listunspent = self.nodes[2].listunspent()
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 1.0}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
raise AssertionError("Spent more than available")
except JSONRPCException as e:
assert("Insufficient" in e.error['message'])
############################################################
#compare fee of a standard pubkeyhash transaction
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction with multiple outputs
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1,self.nodes[1].getnewaddress():1.2,self.nodes[1].getnewaddress():0.1,self.nodes[1].getnewaddress():1.3,self.nodes[1].getnewaddress():0.2,self.nodes[1].getnewaddress():0.3}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendmany("", outputs)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a 2of2 multisig p2sh transaction
# create 2of2 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction
# create 4of5 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr3 = self.nodes[1].getnewaddress()
addr4 = self.nodes[1].getnewaddress()
addr5 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
addr3Obj = self.nodes[1].validateaddress(addr3)
addr4Obj = self.nodes[1].validateaddress(addr4)
addr5Obj = self.nodes[1].validateaddress(addr5)
mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])
inputs = []
outputs = {mSigObj:1.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 1.1)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
# spend a 2of2 multisig transaction over fundraw
# create 2of2 addr
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
# send 1.2 BTC to msig addr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
oldBalance = self.nodes[1].getbalance()
inputs = []
outputs = {self.nodes[1].getnewaddress():1.1}
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawTx)
signedTx = self.nodes[2].signrawtransaction(fundedTx['hex'])
txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('1.10000000'), self.nodes[1].getbalance())
############################################################
# locked wallet test
self.nodes[1].encryptwallet("test")
self.nodes.pop(1)
stop_nodes(self.nodes)
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
# drain the keypool
self.nodes[1].getnewaddress()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
# fund a transaction that requires a new key for the change output
# creating the key must be impossible because the wallet is locked
try:
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('Keypool ran out' in e.error['message'])
#refill the keypool
self.nodes[1].walletpassphrase("test", 100)
self.nodes[1].walletlock()
try:
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.2)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('walletpassphrase' in e.error['message'])
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#now we need to unlock
self.nodes[1].walletpassphrase("test", 100)
signedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance())
###############################################
# multiple (~19) inputs tx test | Compare fee #
###############################################
#empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[1].sendmany("", outputs)
signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs
#############################################
# multiple (~19) inputs tx test | sign/send #
#############################################
#again, empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(oldBalance+Decimal('50.19000000'), self.nodes[0].getbalance()) #0.19+block reward
#####################################################
# test fundrawtransaction with OP_RETURN and no vin #
#####################################################
rawtx = "0100000000010000000000000000066a047465737400000000"
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(len(dec_tx['vin']), 0)
assert_equal(len(dec_tx['vout']), 1)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_greater_than(len(dec_tx['vin']), 0) # at least one vin
assert_equal(len(dec_tx['vout']), 2) # one change output added
##################################################
# test a fundrawtransaction using only watchonly #
##################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, {'includeWatching': True })
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 1)
assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
assert("fee" in result.keys())
assert_greater_than(result["changepos"], -1)
###############################################################
# test fundrawtransaction using the entirety of watched funds #
###############################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
# Backward compatibility test (2nd param is includeWatching)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 2)
assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid)
assert_greater_than(result["fee"], 0)
assert_greater_than(result["changepos"], -1)
assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
signedtx = self.nodes[3].signrawtransaction(result["hex"])
assert(not signedtx["complete"])
signedtx = self.nodes[0].signrawtransaction(signedtx["hex"])
assert(signedtx["complete"])
self.nodes[0].sendrawtransaction(signedtx["hex"])
self.nodes[0].generate(1)
self.sync_all()
#######################
# Test feeRate option #
#######################
# Make sure there is exactly one input so coin selection can't skew the result
assert_equal(len(self.nodes[3].listunspent(1)), 1)
inputs = []
outputs = {self.nodes[2].getnewaddress() : 1}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx) # uses min_relay_tx_fee (set by settxfee)
result2 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee})
result3 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 10*min_relay_tx_fee})
result_fee_rate = result['fee'] * 1000 / count_bytes(result['hex'])
assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate)
assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate)
if __name__ == '__main__':
RawTransactionsTest().main()
| {
"content_hash": "b72639ad0e619a1ef4c7cb697fba1a82",
"timestamp": "",
"source": "github",
"line_count": 659,
"max_line_length": 223,
"avg_line_length": 42.78907435508346,
"alnum_prop": 0.5523441378821193,
"repo_name": "precious-project/precious",
"id": "395a880fe3c8400d5a34c4756f40d02404281251",
"size": "28413",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qa/rpc-tests/fundrawtransaction.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28456"
},
{
"name": "C",
"bytes": "702574"
},
{
"name": "C++",
"bytes": "4591049"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "M4",
"bytes": "185597"
},
{
"name": "Makefile",
"bytes": "105228"
},
{
"name": "Objective-C",
"bytes": "3892"
},
{
"name": "Objective-C++",
"bytes": "7232"
},
{
"name": "Protocol Buffer",
"bytes": "2329"
},
{
"name": "Python",
"bytes": "1030158"
},
{
"name": "QMake",
"bytes": "2020"
},
{
"name": "Roff",
"bytes": "30558"
},
{
"name": "Shell",
"bytes": "47200"
}
],
"symlink_target": ""
} |
import serial
import time
import Reactive
import plot
from math import sqrt
serial = serial.Serial('/dev/ttyS0')
IPLOT=True
TURNING_TOWARDS_HOME = False
ANGLE_THRESH = 3
ANGLE_PRECISE_THRESH = 20
ANGLE_CAREFUL_THRESH = 10
DIST_THRESH = 8
DIST_NEAR = 60
DEFAULT_SPEED_ACTIVE = 0.05
DEFAULT_SPEED_HOME = 0.05
EXPLORE_FOR = 30
class Robot:
def __init__(self):
global serial
self.serial = serial
self.reactive = Reactive.Reactive(self.serial);
if IPLOT:
self.ip = plot.InteractivePlot()
def start(self):
global TURNING_TOWARDS_HOME
global ANGLE_THRESH
global DIST_THRESH
global DIST_NEAR
global ANGLE_PRECISE_THRESH
global DEFAULT_SPEED_ACTIVE
global DEFAULT_SPEED_HOME
global EXPLORE_FOR
self.starttime = time.time()
prevDistToHome = 100000
suggestAction = 0
perceive_speed = DEFAULT_SPEED_ACTIVE
while(True):
print
print
extra_sleep = 0
self.reactive.sensors.updateModel()
if IPLOT:
self.ip.update(robot.reactive.sensors.historyPosX, robot.reactive.sensors.historyPosY)
if TURNING_TOWARDS_HOME:
#self.serial.write('D,0,0\n')
#self.serial.readline()
#time.sleep(0.1)
self.reactive.sensors.updateModel()
self.reactive.sensors.updatePos()
angle = (180-robot.reactive.sensors.getAngleToHome())%360
other_angle = 360-angle
print "Turning towards home angle is " + str(angle) + " other one is " + str(other_angle)
distToHome = self.reactive.sensors.getDistanceFromHome()
if distToHome < DIST_NEAR and distToHome > prevDistToHome:
print "Home!!!"
self.serial.write('D,0,0\n')
self.serial.readline()
return
if angle > ANGLE_THRESH and other_angle > ANGLE_THRESH:
if angle < ANGLE_PRECISE_THRESH or other_angle < ANGLE_PRECISE_THRESH:
turnSpeed = 1
elif angle < ANGLE_CAREFUL_THRESH or other_angle < ANGLE_CAREFUL_THRESH:
turnSpeed = 2
else:
turnSpeed = -1
if angle > 180:
print "Suggesting left"
suggestAction = ("turnRight", turnSpeed)
else:
print "Suggesting right"
suggestAction = ("turnLeft", turnSpeed)
else:
print "Going home!"
if distToHome > DIST_THRESH:
if distToHome < DIST_NEAR:
speed = 2
else:
speed = 5
suggestAction = ("goStraight", speed)
extra_sleep += 0.1
else:
print "Home!!!"
self.serial.write('D,0,0\n')
self.serial.readline()
return
prevDistToHome = distToHome
self.reactive.act(suggestAction)
time.sleep(perceive_speed + extra_sleep)
print "Distance from home " + str(self.reactive.sensors.getDistanceFromHome())
print "Angle from home " + str(self.reactive.sensors.getAngleFromHome())
if not TURNING_TOWARDS_HOME and (time.time() - self.starttime) > EXPLORE_FOR:
self.serial.write('D,0,0\n')
self.serial.readline()
# plot.plotPath(robot.reactive.sensors.historyPosX, robot.reactive.sensors.historyPosY)
TURNING_TOWARDS_HOME = True
perceive_speed = DEFAULT_SPEED_HOME
#self.distToHome = self.reactive.sensors.getDistanceFromHome()
def stop(self):
self.serial.write('D,0,0\n')
self.serial.readall()
def stop():
serial.write('D,0,0\n')
serial.readall()
robot = Robot()
| {
"content_hash": "961cf6d6ced7607dbe10d676f569ad7c",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 105,
"avg_line_length": 35.559322033898304,
"alnum_prop": 0.5238322211630124,
"repo_name": "XapaJIaMnu/iar",
"id": "1b2ce7a1d5b5ddb26cba9f487db68e0b33d02290",
"size": "4272",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "task2.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "107221"
},
{
"name": "Shell",
"bytes": "36"
}
],
"symlink_target": ""
} |
"""Paster"""
from os.path import dirname, exists
import promus.util as util
def paste(prs, name):
"Paste a template to current location. "
tmp = dirname(__file__)
if exists('%s/%s' % (tmp, name)):
util.exec_cmd('cp -r %s/%s/* ./' % (tmp, name), True)
else:
prs.dismiss("PASTE>> no template found...", 1)
prs.dismiss("PASTE>> done...", 0)
| {
"content_hash": "c91c028a4e3e2666f42a5ff984b0d8ce",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 61,
"avg_line_length": 26.928571428571427,
"alnum_prop": 0.5729442970822282,
"repo_name": "jmlopez-rod/promus",
"id": "04f76ebb9b803ab626c171a09af5bb5fc11fad7d",
"size": "377",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "promus/paster/__init__.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "99855"
},
{
"name": "Shell",
"bytes": "1600"
},
{
"name": "TeX",
"bytes": "1429"
}
],
"symlink_target": ""
} |
"""Script to automatically roll dependencies in the libyuv DEPS file."""
import argparse
import base64
import collections
import logging
import os
import re
import subprocess
import sys
import urllib2
# Skip these dependencies (list without solution name prefix).
DONT_AUTOROLL_THESE = [
'src/third_party/gflags/src',
]
LIBYUV_URL = 'https://chromium.googlesource.com/libyuv/libyuv'
CHROMIUM_SRC_URL = 'https://chromium.googlesource.com/chromium/src'
CHROMIUM_COMMIT_TEMPLATE = CHROMIUM_SRC_URL + '/+/%s'
CHROMIUM_LOG_TEMPLATE = CHROMIUM_SRC_URL + '/+log/%s'
CHROMIUM_FILE_TEMPLATE = CHROMIUM_SRC_URL + '/+/%s/%s'
COMMIT_POSITION_RE = re.compile('^Cr-Commit-Position: .*#([0-9]+).*$')
CLANG_REVISION_RE = re.compile(r'^CLANG_REVISION = \'([0-9a-z]+)\'$')
ROLL_BRANCH_NAME = 'roll_chromium_revision'
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
CHECKOUT_SRC_DIR = os.path.realpath(os.path.join(SCRIPT_DIR, os.pardir,
os.pardir))
CHECKOUT_ROOT_DIR = os.path.realpath(os.path.join(CHECKOUT_SRC_DIR, os.pardir))
sys.path.append(os.path.join(CHECKOUT_SRC_DIR, 'build'))
import find_depot_tools
find_depot_tools.add_depot_tools_to_path()
CLANG_UPDATE_SCRIPT_URL_PATH = 'tools/clang/scripts/update.py'
CLANG_UPDATE_SCRIPT_LOCAL_PATH = os.path.join(CHECKOUT_SRC_DIR, 'tools',
'clang', 'scripts', 'update.py')
DepsEntry = collections.namedtuple('DepsEntry', 'path url revision')
ChangedDep = collections.namedtuple('ChangedDep',
'path url current_rev new_rev')
class RollError(Exception):
pass
def VarLookup(local_scope):
return lambda var_name: local_scope['vars'][var_name]
def ParseDepsDict(deps_content):
local_scope = {}
global_scope = {
'Var': VarLookup(local_scope),
'deps_os': {},
}
exec(deps_content, global_scope, local_scope)
return local_scope
def ParseLocalDepsFile(filename):
with open(filename, 'rb') as f:
deps_content = f.read()
return ParseDepsDict(deps_content)
def ParseRemoteCrDepsFile(revision):
deps_content = ReadRemoteCrFile('DEPS', revision)
return ParseDepsDict(deps_content)
def ParseCommitPosition(commit_message):
for line in reversed(commit_message.splitlines()):
m = COMMIT_POSITION_RE.match(line.strip())
if m:
return int(m.group(1))
logging.error('Failed to parse commit position id from:\n%s\n',
commit_message)
sys.exit(-1)
def _RunCommand(command, working_dir=None, ignore_exit_code=False,
extra_env=None):
"""Runs a command and returns the output from that command.
If the command fails (exit code != 0), the function will exit the process.
Returns:
A tuple containing the stdout and stderr outputs as strings.
"""
working_dir = working_dir or CHECKOUT_SRC_DIR
logging.debug('CMD: %s CWD: %s', ' '.join(command), working_dir)
env = os.environ.copy()
if extra_env:
assert all(isinstance(value, str) for value in extra_env.values())
logging.debug('extra env: %s', extra_env)
env.update(extra_env)
p = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, env=env,
cwd=working_dir, universal_newlines=True)
std_output = p.stdout.read()
err_output = p.stderr.read()
p.wait()
p.stdout.close()
p.stderr.close()
if not ignore_exit_code and p.returncode != 0:
logging.error('Command failed: %s\n'
'stdout:\n%s\n'
'stderr:\n%s\n', ' '.join(command), std_output, err_output)
sys.exit(p.returncode)
return std_output, err_output
def _GetBranches():
"""Returns a tuple of active,branches.
The 'active' is the name of the currently active branch and 'branches' is a
list of all branches.
"""
lines = _RunCommand(['git', 'branch'])[0].split('\n')
branches = []
active = ''
for line in lines:
if '*' in line:
# The assumption is that the first char will always be the '*'.
active = line[1:].strip()
branches.append(active)
else:
branch = line.strip()
if branch:
branches.append(branch)
return active, branches
def _ReadGitilesContent(url):
# Download and decode BASE64 content until
# https://code.google.com/p/gitiles/issues/detail?id=7 is fixed.
base64_content = ReadUrlContent(url + '?format=TEXT')
return base64.b64decode(base64_content[0])
def ReadRemoteCrFile(path_below_src, revision):
"""Reads a remote Chromium file of a specific revision. Returns a string."""
return _ReadGitilesContent(CHROMIUM_FILE_TEMPLATE % (revision,
path_below_src))
def ReadRemoteCrCommit(revision):
"""Reads a remote Chromium commit message. Returns a string."""
return _ReadGitilesContent(CHROMIUM_COMMIT_TEMPLATE % revision)
def ReadUrlContent(url):
"""Connect to a remote host and read the contents. Returns a list of lines."""
conn = urllib2.urlopen(url)
try:
return conn.readlines()
except IOError as e:
logging.exception('Error connecting to %s. Error: %s', url, e)
raise
finally:
conn.close()
def GetMatchingDepsEntries(depsentry_dict, dir_path):
"""Gets all deps entries matching the provided path.
This list may contain more than one DepsEntry object.
Example: dir_path='src/testing' would give results containing both
'src/testing/gtest' and 'src/testing/gmock' deps entries for Chromium's DEPS.
Example 2: dir_path='src/build' should return 'src/build' but not
'src/buildtools'.
Returns:
A list of DepsEntry objects.
"""
result = []
for path, depsentry in depsentry_dict.iteritems():
if path == dir_path:
result.append(depsentry)
else:
parts = path.split('/')
if all(part == parts[i]
for i, part in enumerate(dir_path.split('/'))):
result.append(depsentry)
return result
def BuildDepsentryDict(deps_dict):
"""Builds a dict of paths to DepsEntry objects from a raw parsed deps dict."""
result = {}
def AddDepsEntries(deps_subdict):
for path, deps_url_spec in deps_subdict.iteritems():
# The deps url is either an URL and a condition, or just the URL.
if isinstance(deps_url_spec, dict):
if deps_url_spec.get('dep_type') == 'cipd':
continue
deps_url = deps_url_spec['url']
else:
deps_url = deps_url_spec
if not result.has_key(path):
url, revision = deps_url.split('@') if deps_url else (None, None)
result[path] = DepsEntry(path, url, revision)
AddDepsEntries(deps_dict['deps'])
for deps_os in ['win', 'mac', 'unix', 'android', 'ios', 'unix']:
AddDepsEntries(deps_dict.get('deps_os', {}).get(deps_os, {}))
return result
def CalculateChangedDeps(libyuv_deps, new_cr_deps):
"""
Calculate changed deps entries based on entries defined in the libyuv DEPS
file:
- If a shared dependency with the Chromium DEPS file: roll it to the same
revision as Chromium (i.e. entry in the new_cr_deps dict)
- If it's a Chromium sub-directory, roll it to the HEAD revision (notice
this means it may be ahead of the chromium_revision, but generally these
should be close).
- If it's another DEPS entry (not shared with Chromium), roll it to HEAD
unless it's configured to be skipped.
Returns:
A list of ChangedDep objects representing the changed deps.
"""
result = []
libyuv_entries = BuildDepsentryDict(libyuv_deps)
new_cr_entries = BuildDepsentryDict(new_cr_deps)
for path, libyuv_deps_entry in libyuv_entries.iteritems():
if path in DONT_AUTOROLL_THESE:
continue
cr_deps_entry = new_cr_entries.get(path)
if cr_deps_entry:
# Use the revision from Chromium's DEPS file.
new_rev = cr_deps_entry.revision
assert libyuv_deps_entry.url == cr_deps_entry.url, (
'Libyuv DEPS entry %s has a different URL (%s) than Chromium (%s).' %
(path, libyuv_deps_entry.url, cr_deps_entry.url))
else:
# Use the HEAD of the deps repo.
stdout, _ = _RunCommand(['git', 'ls-remote', libyuv_deps_entry.url,
'HEAD'])
new_rev = stdout.strip().split('\t')[0]
# Check if an update is necessary.
if libyuv_deps_entry.revision != new_rev:
logging.debug('Roll dependency %s to %s', path, new_rev)
result.append(ChangedDep(path, libyuv_deps_entry.url,
libyuv_deps_entry.revision, new_rev))
return sorted(result)
def CalculateChangedClang(new_cr_rev):
def GetClangRev(lines):
for line in lines:
match = CLANG_REVISION_RE.match(line)
if match:
return match.group(1)
raise RollError('Could not parse Clang revision!')
with open(CLANG_UPDATE_SCRIPT_LOCAL_PATH, 'rb') as f:
current_lines = f.readlines()
current_rev = GetClangRev(current_lines)
new_clang_update_py = ReadRemoteCrFile(CLANG_UPDATE_SCRIPT_URL_PATH,
new_cr_rev).splitlines()
new_rev = GetClangRev(new_clang_update_py)
return ChangedDep(CLANG_UPDATE_SCRIPT_LOCAL_PATH, None, current_rev, new_rev)
def GenerateCommitMessage(current_cr_rev, new_cr_rev, current_commit_pos,
new_commit_pos, changed_deps_list, clang_change):
current_cr_rev = current_cr_rev[0:10]
new_cr_rev = new_cr_rev[0:10]
rev_interval = '%s..%s' % (current_cr_rev, new_cr_rev)
git_number_interval = '%s:%s' % (current_commit_pos, new_commit_pos)
commit_msg = ['Roll chromium_revision %s (%s)\n' % (rev_interval,
git_number_interval)]
commit_msg.append('Change log: %s' % (CHROMIUM_LOG_TEMPLATE % rev_interval))
commit_msg.append('Full diff: %s\n' % (CHROMIUM_COMMIT_TEMPLATE %
rev_interval))
if changed_deps_list:
commit_msg.append('Changed dependencies:')
for c in changed_deps_list:
commit_msg.append('* %s: %s/+log/%s..%s' % (c.path, c.url,
c.current_rev[0:10],
c.new_rev[0:10]))
change_url = CHROMIUM_FILE_TEMPLATE % (rev_interval, 'DEPS')
commit_msg.append('DEPS diff: %s\n' % change_url)
else:
commit_msg.append('No dependencies changed.')
if clang_change.current_rev != clang_change.new_rev:
commit_msg.append('Clang version changed %s:%s' %
(clang_change.current_rev, clang_change.new_rev))
change_url = CHROMIUM_FILE_TEMPLATE % (rev_interval,
CLANG_UPDATE_SCRIPT_URL_PATH)
commit_msg.append('Details: %s\n' % change_url)
else:
commit_msg.append('No update to Clang.\n')
# TBR needs to be non-empty for Gerrit to process it.
git_author = _RunCommand(['git', 'config', 'user.email'],
working_dir=CHECKOUT_SRC_DIR)[0].strip()
commit_msg.append('TBR=%s' % git_author)
commit_msg.append('BUG=None')
return '\n'.join(commit_msg)
def UpdateDepsFile(deps_filename, old_cr_revision, new_cr_revision,
changed_deps):
"""Update the DEPS file with the new revision."""
# Update the chromium_revision variable.
with open(deps_filename, 'rb') as deps_file:
deps_content = deps_file.read()
deps_content = deps_content.replace(old_cr_revision, new_cr_revision)
with open(deps_filename, 'wb') as deps_file:
deps_file.write(deps_content)
# Update each individual DEPS entry.
for dep in changed_deps:
local_dep_dir = os.path.join(CHECKOUT_ROOT_DIR, dep.path)
if not os.path.isdir(local_dep_dir):
raise RollError(
'Cannot find local directory %s. Make sure the .gclient file\n'
'contains all platforms in the target_os list, i.e.\n'
'target_os = ["android", "unix", "mac", "ios", "win"];\n'
'Then run "gclient sync" again.' % local_dep_dir)
_RunCommand(
['gclient', 'setdep', '--revision', '%s@%s' % (dep.path, dep.new_rev)],
working_dir=CHECKOUT_SRC_DIR)
def _IsTreeClean():
stdout, _ = _RunCommand(['git', 'status', '--porcelain'])
if len(stdout) == 0:
return True
logging.error('Dirty/unversioned files:\n%s', stdout)
return False
def _EnsureUpdatedMasterBranch(dry_run):
current_branch = _RunCommand(
['git', 'rev-parse', '--abbrev-ref', 'HEAD'])[0].splitlines()[0]
if current_branch != 'master':
logging.error('Please checkout the master branch and re-run this script.')
if not dry_run:
sys.exit(-1)
logging.info('Updating master branch...')
_RunCommand(['git', 'pull'])
def _CreateRollBranch(dry_run):
logging.info('Creating roll branch: %s', ROLL_BRANCH_NAME)
if not dry_run:
_RunCommand(['git', 'checkout', '-b', ROLL_BRANCH_NAME])
def _RemovePreviousRollBranch(dry_run):
active_branch, branches = _GetBranches()
if active_branch == ROLL_BRANCH_NAME:
active_branch = 'master'
if ROLL_BRANCH_NAME in branches:
logging.info('Removing previous roll branch (%s)', ROLL_BRANCH_NAME)
if not dry_run:
_RunCommand(['git', 'checkout', active_branch])
_RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME])
def _LocalCommit(commit_msg, dry_run):
logging.info('Committing changes locally.')
if not dry_run:
_RunCommand(['git', 'add', '--update', '.'])
_RunCommand(['git', 'commit', '-m', commit_msg])
def ChooseCQMode(skip_cq, cq_over, current_commit_pos, new_commit_pos):
if skip_cq:
return 0
if (new_commit_pos - current_commit_pos) < cq_over:
return 1
return 2
def _UploadCL(commit_queue_mode):
"""Upload the committed changes as a changelist to Gerrit.
commit_queue_mode:
- 2: Submit to commit queue.
- 1: Run trybots but do not submit to CQ.
- 0: Skip CQ, upload only.
"""
cmd = ['git', 'cl', 'upload', '--force', '--bypass-hooks', '--send-mail']
if commit_queue_mode >= 2:
logging.info('Sending the CL to the CQ...')
cmd.extend(['--use-commit-queue'])
elif commit_queue_mode >= 1:
logging.info('Starting CQ dry run...')
cmd.extend(['--cq-dry-run'])
extra_env = {
'EDITOR': 'true',
'SKIP_GCE_AUTH_FOR_GIT': '1',
}
stdout, stderr = _RunCommand(cmd, extra_env=extra_env)
logging.debug('Output from "git cl upload":\nstdout:\n%s\n\nstderr:\n%s',
stdout, stderr)
def main():
p = argparse.ArgumentParser()
p.add_argument('--clean', action='store_true', default=False,
help='Removes any previous local roll branch.')
p.add_argument('-r', '--revision',
help=('Chromium Git revision to roll to. Defaults to the '
'Chromium HEAD revision if omitted.'))
p.add_argument('--dry-run', action='store_true', default=False,
help=('Calculate changes and modify DEPS, but don\'t create '
'any local branch, commit, upload CL or send any '
'tryjobs.'))
p.add_argument('-i', '--ignore-unclean-workdir', action='store_true',
default=False,
help=('Ignore if the current branch is not master or if there '
'are uncommitted changes (default: %(default)s).'))
grp = p.add_mutually_exclusive_group()
grp.add_argument('--skip-cq', action='store_true', default=False,
help='Skip sending the CL to the CQ (default: %(default)s)')
grp.add_argument('--cq-over', type=int, default=1,
help=('Commit queue dry run if the revision difference '
'is below this number (default: %(default)s)'))
p.add_argument('-v', '--verbose', action='store_true', default=False,
help='Be extra verbose in printing of log messages.')
opts = p.parse_args()
if opts.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
if not opts.ignore_unclean_workdir and not _IsTreeClean():
logging.error('Please clean your local checkout first.')
return 1
if opts.clean:
_RemovePreviousRollBranch(opts.dry_run)
if not opts.ignore_unclean_workdir:
_EnsureUpdatedMasterBranch(opts.dry_run)
new_cr_rev = opts.revision
if not new_cr_rev:
stdout, _ = _RunCommand(['git', 'ls-remote', CHROMIUM_SRC_URL, 'HEAD'])
head_rev = stdout.strip().split('\t')[0]
logging.info('No revision specified. Using HEAD: %s', head_rev)
new_cr_rev = head_rev
deps_filename = os.path.join(CHECKOUT_SRC_DIR, 'DEPS')
libyuv_deps = ParseLocalDepsFile(deps_filename)
current_cr_rev = libyuv_deps['vars']['chromium_revision']
current_commit_pos = ParseCommitPosition(ReadRemoteCrCommit(current_cr_rev))
new_commit_pos = ParseCommitPosition(ReadRemoteCrCommit(new_cr_rev))
new_cr_deps = ParseRemoteCrDepsFile(new_cr_rev)
changed_deps = CalculateChangedDeps(libyuv_deps, new_cr_deps)
clang_change = CalculateChangedClang(new_cr_rev)
commit_msg = GenerateCommitMessage(current_cr_rev, new_cr_rev,
current_commit_pos, new_commit_pos,
changed_deps, clang_change)
logging.debug('Commit message:\n%s', commit_msg)
_CreateRollBranch(opts.dry_run)
UpdateDepsFile(deps_filename, current_cr_rev, new_cr_rev, changed_deps)
_LocalCommit(commit_msg, opts.dry_run)
commit_queue_mode = ChooseCQMode(opts.skip_cq, opts.cq_over,
current_commit_pos, new_commit_pos)
logging.info('Uploading CL...')
if not opts.dry_run:
_UploadCL(commit_queue_mode)
return 0
if __name__ == '__main__':
sys.exit(main())
| {
"content_hash": "8389930e6a526507745718faea83f82a",
"timestamp": "",
"source": "github",
"line_count": 493,
"max_line_length": 80,
"avg_line_length": 35.8316430020284,
"alnum_prop": 0.6393433342768186,
"repo_name": "endlessm/chromium-browser",
"id": "2b95e302cdbf9f552717ee1f5bb5cdd6dcd7bbc4",
"size": "18230",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/libyuv/tools_libyuv/autoroller/roll_deps.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
Django app
"""
from django.apps import AppConfig
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
class RootConfig(AppConfig):
"""AppConfig for this project"""
name = 'main'
def ready(self):
missing_settings = [
setting_name for setting_name in settings.MANDATORY_SETTINGS
if getattr(settings, setting_name, None) in (None, '')
]
if missing_settings:
raise ImproperlyConfigured(
'The following settings are missing: {}'.format(', '.join(missing_settings))
)
| {
"content_hash": "6b66392a4106399563339c9d548e54c5",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 92,
"avg_line_length": 27.545454545454547,
"alnum_prop": 0.6353135313531353,
"repo_name": "mitodl/cookiecutter-djangoapp",
"id": "5ab6467da749dd11f65dfaffdedbc1a7acf553b1",
"size": "606",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "{{ cookiecutter.project_name }}/main/apps.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "148"
},
{
"name": "Dockerfile",
"bytes": "1045"
},
{
"name": "HTML",
"bytes": "4070"
},
{
"name": "JavaScript",
"bytes": "35669"
},
{
"name": "Python",
"bytes": "46923"
},
{
"name": "Shell",
"bytes": "9859"
}
],
"symlink_target": ""
} |
"""
Client side of the compute RPC API.
"""
from oslo.config import cfg
from oslo import messaging
from nova import block_device
from nova import exception
from nova.objects import base as objects_base
from nova.openstack.common.gettextutils import _
from nova.openstack.common import jsonutils
from nova import rpc
rpcapi_opts = [
cfg.StrOpt('compute_topic',
default='compute',
help='The topic compute nodes listen on'),
]
CONF = cfg.CONF
CONF.register_opts(rpcapi_opts)
rpcapi_cap_opt = cfg.StrOpt('compute',
help='Set a version cap for messages sent to compute services. If you '
'plan to do a live upgrade from havana to icehouse, you should '
'set this option to "icehouse-compat" before beginning the live '
'upgrade procedure.')
CONF.register_opt(rpcapi_cap_opt, 'upgrade_levels')
def _compute_host(host, instance):
'''Get the destination host for a message.
:param host: explicit host to send the message to.
:param instance: If an explicit host was not specified, use
instance['host']
:returns: A host
'''
if host:
return host
if not instance:
raise exception.NovaException(_('No compute host specified'))
if not instance['host']:
raise exception.NovaException(_('Unable to find host for '
'Instance %s') % instance['uuid'])
return instance['host']
class ComputeAPI(object):
'''Client side of the compute rpc API.
API version history:
1.0 - Initial version.
1.1 - Adds get_host_uptime()
1.2 - Adds check_can_live_migrate_[destination|source]
1.3 - Adds change_instance_metadata()
1.4 - Remove instance_uuid, add instance argument to reboot_instance()
1.5 - Remove instance_uuid, add instance argument to pause_instance(),
unpause_instance()
1.6 - Remove instance_uuid, add instance argument to suspend_instance()
1.7 - Remove instance_uuid, add instance argument to
get_console_output()
1.8 - Remove instance_uuid, add instance argument to
add_fixed_ip_to_instance()
1.9 - Remove instance_uuid, add instance argument to attach_volume()
1.10 - Remove instance_id, add instance argument to
check_can_live_migrate_destination()
1.11 - Remove instance_id, add instance argument to
check_can_live_migrate_source()
1.12 - Remove instance_uuid, add instance argument to confirm_resize()
1.13 - Remove instance_uuid, add instance argument to detach_volume()
1.14 - Remove instance_uuid, add instance argument to finish_resize()
1.15 - Remove instance_uuid, add instance argument to
finish_revert_resize()
1.16 - Remove instance_uuid, add instance argument to get_diagnostics()
1.17 - Remove instance_uuid, add instance argument to get_vnc_console()
1.18 - Remove instance_uuid, add instance argument to inject_file()
1.19 - Remove instance_uuid, add instance argument to
inject_network_info()
1.20 - Remove instance_id, add instance argument to
post_live_migration_at_destination()
1.21 - Remove instance_uuid, add instance argument to
power_off_instance() and stop_instance()
1.22 - Remove instance_uuid, add instance argument to
power_on_instance() and start_instance()
1.23 - Remove instance_id, add instance argument to
pre_live_migration()
1.24 - Remove instance_uuid, add instance argument to
rebuild_instance()
1.25 - Remove instance_uuid, add instance argument to
remove_fixed_ip_from_instance()
1.26 - Remove instance_id, add instance argument to
remove_volume_connection()
1.27 - Remove instance_uuid, add instance argument to
rescue_instance()
1.28 - Remove instance_uuid, add instance argument to reset_network()
1.29 - Remove instance_uuid, add instance argument to resize_instance()
1.30 - Remove instance_uuid, add instance argument to resume_instance()
1.31 - Remove instance_uuid, add instance argument to revert_resize()
1.32 - Remove instance_id, add instance argument to
rollback_live_migration_at_destination()
1.33 - Remove instance_uuid, add instance argument to
set_admin_password()
1.34 - Remove instance_uuid, add instance argument to
snapshot_instance()
1.35 - Remove instance_uuid, add instance argument to
unrescue_instance()
1.36 - Remove instance_uuid, add instance argument to
change_instance_metadata()
1.37 - Remove instance_uuid, add instance argument to
terminate_instance()
1.38 - Changes to prep_resize():
- remove instance_uuid, add instance
- remove instance_type_id, add instance_type
- remove topic, it was unused
1.39 - Remove instance_uuid, add instance argument to run_instance()
1.40 - Remove instance_id, add instance argument to live_migration()
1.41 - Adds refresh_instance_security_rules()
1.42 - Add reservations arg to prep_resize(), resize_instance(),
finish_resize(), confirm_resize(), revert_resize() and
finish_revert_resize()
1.43 - Add migrate_data to live_migration()
1.44 - Adds reserve_block_device_name()
2.0 - Remove 1.x backwards compat
2.1 - Adds orig_sys_metadata to rebuild_instance()
2.2 - Adds slave_info parameter to add_aggregate_host() and
remove_aggregate_host()
2.3 - Adds volume_id to reserve_block_device_name()
2.4 - Add bdms to terminate_instance
2.5 - Add block device and network info to reboot_instance
2.6 - Remove migration_id, add migration to resize_instance
2.7 - Remove migration_id, add migration to confirm_resize
2.8 - Remove migration_id, add migration to finish_resize
2.9 - Add publish_service_capabilities()
2.10 - Adds filter_properties and request_spec to prep_resize()
2.11 - Adds soft_delete_instance() and restore_instance()
2.12 - Remove migration_id, add migration to revert_resize
2.13 - Remove migration_id, add migration to finish_revert_resize
2.14 - Remove aggregate_id, add aggregate to add_aggregate_host
2.15 - Remove aggregate_id, add aggregate to remove_aggregate_host
2.16 - Add instance_type to resize_instance
2.17 - Add get_backdoor_port()
2.18 - Add bdms to rebuild_instance
2.19 - Add node to run_instance
2.20 - Add node to prep_resize
2.21 - Add migrate_data dict param to pre_live_migration()
2.22 - Add recreate, on_shared_storage and host arguments to
rebuild_instance()
2.23 - Remove network_info from reboot_instance
2.24 - Added get_spice_console method
2.25 - Add attach_interface() and detach_interface()
2.26 - Add validate_console_port to ensure the service connects to
vnc on the correct port
2.27 - Adds 'reservations' to terminate_instance() and
soft_delete_instance()
... Grizzly supports message version 2.27. So, any changes to existing
methods in 2.x after that point should be done such that they can
handle the version_cap being set to 2.27.
2.28 - Adds check_instance_shared_storage()
2.29 - Made start_instance() and stop_instance() take new-world
instance objects
2.30 - Adds live_snapshot_instance()
2.31 - Adds shelve_instance(), shelve_offload_instance, and
unshelve_instance()
2.32 - Make reboot_instance take a new world instance object
2.33 - Made suspend_instance() and resume_instance() take new-world
instance objects
2.34 - Added swap_volume()
2.35 - Made terminate_instance() and soft_delete_instance() take
new-world instance objects
2.36 - Made pause_instance() and unpause_instance() take new-world
instance objects
2.37 - Added the legacy_bdm_in_spec parameter to run_instance
2.38 - Made check_can_live_migrate_[destination|source] take
new-world instance objects
2.39 - Made revert_resize() and confirm_resize() take new-world
instance objects
2.40 - Made reset_network() take new-world instance object
2.41 - Make inject_network_info take new-world instance object
2.42 - Splits snapshot_instance() into snapshot_instance() and
backup_instance() and makes them take new-world instance
objects.
2.43 - Made prep_resize() take new-world instance object
2.44 - Add volume_snapshot_create(), volume_snapshot_delete()
2.45 - Made resize_instance() take new-world objects
2.46 - Made finish_resize() take new-world objects
2.47 - Made finish_revert_resize() take new-world objects
... Havana supports message version 2.47. So, any changes to existing
methods in 2.x after that point should be done such that they can
handle the version_cap being set to 2.47.
2.48 - Make add_aggregate_host() and remove_aggregate_host() take
new-world objects
... - Remove live_snapshot() that was never actually used
3.0 - Remove 2.x compatibility
3.1 - Update get_spice_console() to take an instance object
3.2 - Update get_vnc_console() to take an instance object
3.3 - Update validate_console_port() to take an instance object
3.4 - Update rebuild_instance() to take an instance object
3.5 - Pass preserve_ephemeral flag to rebuild_instance()
3.6 - Make volume_snapshot_{create,delete} use new-world objects
3.7 - Update change_instance_metadata() to take an instance object
3.8 - Update set_admin_password() to take an instance object
3.9 - Update rescue_instance() to take an instance object
3.10 - Added get_rdp_console method
3.11 - Update unrescue_instance() to take an object
3.12 - Update add_fixed_ip_to_instance() to take an object
3.13 - Update remove_fixed_ip_from_instance() to take an object
3.14 - Update post_live_migration_at_destination() to take an object
3.15 - Adds filter_properties and node to unshelve_instance()
3.16 - Make reserve_block_device_name and attach_volume use new-world
objects, and add disk_bus and device_type params to
reserve_block_device_name, and bdm param to attach_volume
3.17 - Update attach_interface and detach_interface to take an object
3.18 - Update get_diagnostics() to take an instance object
... - Removed inject_file(), as it was unused.
3.19 - Update pre_live_migration to take instance object
3.20 - Make restore_instance take an instance object
3.21 - Made rebuild take new-world BDM objects
3.22 - Made terminate_instance take new-world BDM objects
3.23 - Added external_instance_event()
3.24 - Update rescue_instance() to take optional rescue_image_ref
3.25 - Make detach_volume take an object
'''
VERSION_ALIASES = {
'grizzly': '2.27',
'havana': '2.47',
# NOTE(russellb) 'icehouse-compat' is the version that is supported by
# both havana and icehouse. Later, 'icehouse' will be added that lists
# the maximum version supported by icehouse.
'icehouse-compat': '3.0',
}
def __init__(self):
super(ComputeAPI, self).__init__()
target = messaging.Target(topic=CONF.compute_topic, version='3.0')
version_cap = self.VERSION_ALIASES.get(CONF.upgrade_levels.compute,
CONF.upgrade_levels.compute)
serializer = objects_base.NovaObjectSerializer()
self.client = self.get_client(target, version_cap, serializer)
# Cells overrides this
def get_client(self, target, version_cap, serializer):
return rpc.get_client(target,
version_cap=version_cap,
serializer=serializer)
def _get_compat_version(self, current, havana_compat):
if not self.client.can_send_version(current):
return havana_compat
return current
def add_aggregate_host(self, ctxt, aggregate, host_param, host,
slave_info=None):
'''Add aggregate host.
:param ctxt: request context
:param aggregate_id:
:param host_param: This value is placed in the message to be the 'host'
parameter for the remote method.
:param host: This is the host to send the message to.
'''
if self.client.can_send_version('3.0'):
version = '3.0'
elif self.client.can_send_version('2.48'):
version = '2.48'
else:
# NOTE(russellb) Havana compat
version = '2.14'
aggregate = jsonutils.to_primitive(aggregate)
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'add_aggregate_host',
aggregate=aggregate, host=host_param,
slave_info=slave_info)
def add_fixed_ip_to_instance(self, ctxt, instance, network_id):
if self.client.can_send_version('3.12'):
version = '3.12'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'add_fixed_ip_to_instance',
instance=instance, network_id=network_id)
def attach_interface(self, ctxt, instance, network_id, port_id,
requested_ip):
# NOTE(russellb) Havana compat
if self.client.can_send_version('3.17'):
version = '3.17'
else:
version = self._get_compat_version('3.0', '2.25')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'attach_interface',
instance=instance, network_id=network_id,
port_id=port_id, requested_ip=requested_ip)
def attach_volume(self, ctxt, instance, volume_id, mountpoint, bdm=None):
# NOTE(ndipanov): Remove volume_id and mountpoint on the next major
# version bump - they are not needed when using bdm objects.
version = '3.16'
kw = {'instance': instance, 'volume_id': volume_id,
'mountpoint': mountpoint, 'bdm': bdm}
if not self.client.can_send_version(version):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
kw['instance'] = jsonutils.to_primitive(
objects_base.obj_to_primitive(instance))
del kw['bdm']
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'attach_volume', **kw)
def change_instance_metadata(self, ctxt, instance, diff):
if self.client.can_send_version('3.7'):
version = '3.7'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'change_instance_metadata',
instance=instance, diff=diff)
def check_can_live_migrate_destination(self, ctxt, instance, destination,
block_migration, disk_over_commit):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.38')
cctxt = self.client.prepare(server=destination, version=version)
return cctxt.call(ctxt, 'check_can_live_migrate_destination',
instance=instance,
block_migration=block_migration,
disk_over_commit=disk_over_commit)
def check_can_live_migrate_source(self, ctxt, instance, dest_check_data):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.38')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'check_can_live_migrate_source',
instance=instance,
dest_check_data=dest_check_data)
def check_instance_shared_storage(self, ctxt, instance, data):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.28')
instance_p = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'check_instance_shared_storage',
instance=instance_p,
data=data)
def confirm_resize(self, ctxt, instance, migration, host,
reservations=None, cast=True):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.39')
cctxt = self.client.prepare(server=_compute_host(host, instance),
version=version)
rpc_method = cctxt.cast if cast else cctxt.call
return rpc_method(ctxt, 'confirm_resize',
instance=instance, migration=migration,
reservations=reservations)
def detach_interface(self, ctxt, instance, port_id):
# NOTE(russellb) Havana compat
if self.client.can_send_version('3.17'):
version = '3.17'
else:
version = self._get_compat_version('3.0', '2.25')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'detach_interface',
instance=instance, port_id=port_id)
def detach_volume(self, ctxt, instance, volume_id):
if self.client.can_send_version('3.25'):
version = '3.25'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'detach_volume',
instance=instance, volume_id=volume_id)
def finish_resize(self, ctxt, instance, migration, image, disk_info,
host, reservations=None):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.46')
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'finish_resize',
instance=instance, migration=migration,
image=image, disk_info=disk_info, reservations=reservations)
def finish_revert_resize(self, ctxt, instance, migration, host,
reservations=None):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.47')
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'finish_revert_resize',
instance=instance, migration=migration,
reservations=reservations)
def get_console_output(self, ctxt, instance, tail_length):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance_p = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'get_console_output',
instance=instance_p, tail_length=tail_length)
def get_console_pool_info(self, ctxt, console_type, host):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'get_console_pool_info',
console_type=console_type)
def get_console_topic(self, ctxt, host):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'get_console_topic')
def get_diagnostics(self, ctxt, instance):
if self.client.can_send_version('3.18'):
version = '3.18'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'get_diagnostics', instance=instance)
def get_vnc_console(self, ctxt, instance, console_type):
if self.client.can_send_version('3.2'):
version = '3.2'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'get_vnc_console',
instance=instance, console_type=console_type)
def get_spice_console(self, ctxt, instance, console_type):
if self.client.can_send_version('3.1'):
version = '3.1'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.24')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'get_spice_console',
instance=instance, console_type=console_type)
def get_rdp_console(self, ctxt, instance, console_type):
version = '3.10'
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'get_rdp_console',
instance=instance, console_type=console_type)
def validate_console_port(self, ctxt, instance, port, console_type):
if self.client.can_send_version('3.3'):
version = '3.3'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.26')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'validate_console_port',
instance=instance, port=port,
console_type=console_type)
def host_maintenance_mode(self, ctxt, host_param, mode, host):
'''Set host maintenance mode
:param ctxt: request context
:param host_param: This value is placed in the message to be the 'host'
parameter for the remote method.
:param mode:
:param host: This is the host to send the message to.
'''
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'host_maintenance_mode',
host=host_param, mode=mode)
def host_power_action(self, ctxt, action, host):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'host_power_action', action=action)
def inject_network_info(self, ctxt, instance):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.41')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'inject_network_info', instance=instance)
def live_migration(self, ctxt, instance, dest, block_migration, host,
migrate_data=None):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance_p = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'live_migration', instance=instance_p,
dest=dest, block_migration=block_migration,
migrate_data=migrate_data)
def pause_instance(self, ctxt, instance):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.36')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'pause_instance', instance=instance)
def post_live_migration_at_destination(self, ctxt, instance,
block_migration, host):
if self.client.can_send_version('3.14'):
version = '3.14'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'post_live_migration_at_destination',
instance=instance, block_migration=block_migration)
def pre_live_migration(self, ctxt, instance, block_migration, disk,
host, migrate_data=None):
if self.client.can_send_version('3.19'):
version = '3.19'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.21')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'pre_live_migration',
instance=instance,
block_migration=block_migration,
disk=disk, migrate_data=migrate_data)
def prep_resize(self, ctxt, image, instance, instance_type, host,
reservations=None, request_spec=None,
filter_properties=None, node=None):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.43')
instance_type_p = jsonutils.to_primitive(instance_type)
image_p = jsonutils.to_primitive(image)
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'prep_resize',
instance=instance,
instance_type=instance_type_p,
image=image_p, reservations=reservations,
request_spec=request_spec,
filter_properties=filter_properties,
node=node)
def reboot_instance(self, ctxt, instance, block_device_info,
reboot_type):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.32')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'reboot_instance',
instance=instance,
block_device_info=block_device_info,
reboot_type=reboot_type)
def rebuild_instance(self, ctxt, instance, new_pass, injected_files,
image_ref, orig_image_ref, orig_sys_metadata, bdms,
recreate=False, on_shared_storage=False, host=None,
preserve_ephemeral=False, kwargs=None):
# NOTE(danms): kwargs is only here for cells compatibility, don't
# actually send it to compute
extra = {'preserve_ephemeral': preserve_ephemeral}
if self.client.can_send_version('3.21'):
version = '3.21'
else:
bdms = block_device.legacy_mapping(bdms)
bdms = jsonutils.to_primitive(objects_base.obj_to_primitive(bdms))
if self.client.can_send_version('3.5'):
version = '3.5'
elif self.client.can_send_version('3.4'):
version = '3.4'
extra = {}
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.22')
instance = jsonutils.to_primitive(instance)
extra = {}
cctxt = self.client.prepare(server=_compute_host(host, instance),
version=version)
cctxt.cast(ctxt, 'rebuild_instance',
instance=instance, new_pass=new_pass,
injected_files=injected_files, image_ref=image_ref,
orig_image_ref=orig_image_ref,
orig_sys_metadata=orig_sys_metadata, bdms=bdms,
recreate=recreate, on_shared_storage=on_shared_storage,
**extra)
def refresh_provider_fw_rules(self, ctxt, host):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'refresh_provider_fw_rules')
def remove_aggregate_host(self, ctxt, aggregate, host_param, host,
slave_info=None):
'''Remove aggregate host.
:param ctxt: request context
:param aggregate_id:
:param host_param: This value is placed in the message to be the 'host'
parameter for the remote method.
:param host: This is the host to send the message to.
'''
if self.client.can_send_version('3.0'):
version = '3.0'
elif self.client.can_send_version('2.48'):
version = '2.48'
else:
# NOTE(russellb) Havana compat
version = '2.15'
aggregate = jsonutils.to_primitive(aggregate)
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'remove_aggregate_host',
aggregate=aggregate, host=host_param,
slave_info=slave_info)
def remove_fixed_ip_from_instance(self, ctxt, instance, address):
if self.client.can_send_version('3.13'):
version = '3.13'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'remove_fixed_ip_from_instance',
instance=instance, address=address)
def remove_volume_connection(self, ctxt, instance, volume_id, host):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance_p = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'remove_volume_connection',
instance=instance_p, volume_id=volume_id)
def rescue_instance(self, ctxt, instance, rescue_password,
rescue_image_ref=None):
instance = jsonutils.to_primitive(instance)
msg_args = {'rescue_password': rescue_password, 'instance': instance}
if self.client.can_send_version('3.24'):
version = '3.24'
msg_args['rescue_image_ref'] = rescue_image_ref
elif self.client.can_send_version('3.9'):
version = '3.9'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.44')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'rescue_instance', **msg_args)
def reset_network(self, ctxt, instance):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.40')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'reset_network', instance=instance)
def resize_instance(self, ctxt, instance, migration, image, instance_type,
reservations=None):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.45')
instance_type_p = jsonutils.to_primitive(instance_type)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'resize_instance',
instance=instance, migration=migration,
image=image, reservations=reservations,
instance_type=instance_type_p)
def resume_instance(self, ctxt, instance):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.33')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'resume_instance', instance=instance)
def revert_resize(self, ctxt, instance, migration, host,
reservations=None):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.39')
cctxt = self.client.prepare(server=_compute_host(host, instance),
version=version)
cctxt.cast(ctxt, 'revert_resize',
instance=instance, migration=migration,
reservations=reservations)
def rollback_live_migration_at_destination(self, ctxt, instance, host):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance_p = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'rollback_live_migration_at_destination',
instance=instance_p)
def run_instance(self, ctxt, instance, host, request_spec,
filter_properties, requested_networks,
injected_files, admin_password,
is_first_time, node=None, legacy_bdm_in_spec=True):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.37')
instance_p = jsonutils.to_primitive(instance)
msg_kwargs = {'instance': instance_p, 'request_spec': request_spec,
'filter_properties': filter_properties,
'requested_networks': requested_networks,
'injected_files': injected_files,
'admin_password': admin_password,
'is_first_time': is_first_time, 'node': node,
'legacy_bdm_in_spec': legacy_bdm_in_spec}
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'run_instance', **msg_kwargs)
def set_admin_password(self, ctxt, instance, new_pass):
if self.client.can_send_version('3.8'):
version = '3.8'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'set_admin_password',
instance=instance, new_pass=new_pass)
def set_host_enabled(self, ctxt, enabled, host):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'set_host_enabled', enabled=enabled)
def swap_volume(self, ctxt, instance, old_volume_id, new_volume_id):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.34')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'swap_volume',
instance=instance, old_volume_id=old_volume_id,
new_volume_id=new_volume_id)
def get_host_uptime(self, ctxt, host):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
cctxt = self.client.prepare(server=host, version=version)
return cctxt.call(ctxt, 'get_host_uptime')
def reserve_block_device_name(self, ctxt, instance, device, volume_id,
disk_bus=None, device_type=None):
version = '3.16'
kw = {'instance': instance, 'device': device,
'volume_id': volume_id, 'disk_bus': disk_bus,
'device_type': device_type}
if not self.client.can_send_version(version):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.3')
kw['instance'] = jsonutils.to_primitive(
objects_base.obj_to_primitive(instance))
del kw['disk_bus']
del kw['device_type']
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
return cctxt.call(ctxt, 'reserve_block_device_name', **kw)
def backup_instance(self, ctxt, instance, image_id, backup_type,
rotation):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.42')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'backup_instance',
instance=instance,
image_id=image_id,
backup_type=backup_type,
rotation=rotation)
def snapshot_instance(self, ctxt, instance, image_id):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.42')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'snapshot_instance',
instance=instance,
image_id=image_id)
def start_instance(self, ctxt, instance):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.29')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'start_instance', instance=instance)
def stop_instance(self, ctxt, instance, do_cast=True):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.29')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
rpc_method = cctxt.cast if do_cast else cctxt.call
return rpc_method(ctxt, 'stop_instance', instance=instance)
def suspend_instance(self, ctxt, instance):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.33')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'suspend_instance', instance=instance)
def terminate_instance(self, ctxt, instance, bdms, reservations=None):
# NOTE(russellb) Havana compat
if self.client.can_send_version('3.22'):
version = '3.22'
else:
version = self._get_compat_version('3.0', '2.35')
bdms = block_device.legacy_mapping(bdms)
bdms = jsonutils.to_primitive(objects_base.obj_to_primitive(bdms))
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'terminate_instance',
instance=instance, bdms=bdms,
reservations=reservations)
def unpause_instance(self, ctxt, instance):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.36')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'unpause_instance', instance=instance)
def unrescue_instance(self, ctxt, instance):
if self.client.can_send_version('3.11'):
version = '3.11'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'unrescue_instance', instance=instance)
def soft_delete_instance(self, ctxt, instance, reservations=None):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.35')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'soft_delete_instance',
instance=instance, reservations=reservations)
def restore_instance(self, ctxt, instance):
if self.client.can_send_version('3.18'):
version = '3.20'
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'restore_instance', instance=instance)
def shelve_instance(self, ctxt, instance, image_id=None):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.31')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'shelve_instance',
instance=instance, image_id=image_id)
def shelve_offload_instance(self, ctxt, instance):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.31')
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'shelve_offload_instance', instance=instance)
def unshelve_instance(self, ctxt, instance, host, image=None,
filter_properties=None, node=None):
msg_kwargs = {'instance': instance, 'image': image}
if self.client.can_send_version('3.15'):
version = '3.15'
msg_kwargs['filter_properties'] = filter_properties
msg_kwargs['node'] = node
else:
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.31')
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'unshelve_instance', **msg_kwargs)
def volume_snapshot_create(self, ctxt, instance, volume_id,
create_info):
version = '3.6'
if not self.client.can_send_version(version):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.44')
instance = jsonutils.to_primitive(
objects_base.obj_to_primitive(instance))
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'volume_snapshot_create', instance=instance,
volume_id=volume_id, create_info=create_info)
def volume_snapshot_delete(self, ctxt, instance, volume_id, snapshot_id,
delete_info):
version = '3.6'
if not self.client.can_send_version(version):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.44')
instance = jsonutils.to_primitive(
objects_base.obj_to_primitive(instance))
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'volume_snapshot_delete', instance=instance,
volume_id=volume_id, snapshot_id=snapshot_id,
delete_info=delete_info)
def external_instance_event(self, ctxt, instances, events):
cctxt = self.client.prepare(
server=_compute_host(None, instances[0]),
version='3.23')
cctxt.cast(ctxt, 'external_instance_event', instances=instances,
events=events)
class SecurityGroupAPI(object):
'''Client side of the security group rpc API.
API version history:
1.0 - Initial version.
1.41 - Adds refresh_instance_security_rules()
2.0 - Remove 1.x backwards compat
3.0 - Identical to 2.x, but has to be bumped at the same time as the
compute API since it's all together on the server side.
'''
def __init__(self):
super(SecurityGroupAPI, self).__init__()
target = messaging.Target(topic=CONF.compute_topic, version='3.0')
version_cap = ComputeAPI.VERSION_ALIASES.get(
CONF.upgrade_levels.compute, CONF.upgrade_levels.compute)
self.client = rpc.get_client(target, version_cap)
def _get_compat_version(self, current, havana_compat):
if not self.client.can_send_version(current):
return havana_compat
return current
def refresh_security_group_rules(self, ctxt, security_group_id, host):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'refresh_security_group_rules',
security_group_id=security_group_id)
def refresh_security_group_members(self, ctxt, security_group_id,
host):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
cctxt = self.client.prepare(server=host, version=version)
cctxt.cast(ctxt, 'refresh_security_group_members',
security_group_id=security_group_id)
def refresh_instance_security_rules(self, ctxt, host, instance):
# NOTE(russellb) Havana compat
version = self._get_compat_version('3.0', '2.0')
instance_p = jsonutils.to_primitive(instance)
cctxt = self.client.prepare(server=_compute_host(None, instance),
version=version)
cctxt.cast(ctxt, 'refresh_instance_security_rules',
instance=instance_p)
| {
"content_hash": "8cf529536591a2be43de509dfb840281",
"timestamp": "",
"source": "github",
"line_count": 1015,
"max_line_length": 79,
"avg_line_length": 46.63743842364532,
"alnum_prop": 0.6004816528297103,
"repo_name": "eharney/nova",
"id": "c87e0c79f45ffa7d2f36193311ea26665f57422a",
"size": "47943",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/compute/rpcapi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13921563"
},
{
"name": "Shell",
"bytes": "17451"
}
],
"symlink_target": ""
} |
from jingo import register
from tower import ugettext_lazy as _lazy
from fjord.feedback.config import CODE_TO_COUNTRY
@register.filter
def country_name(country, native=False, default=_lazy(u'Unknown')):
"""Convert a country code into a human readable country name"""
if country in CODE_TO_COUNTRY:
display_locale = 'native' if native else 'English'
return CODE_TO_COUNTRY[country][display_locale]
else:
return default
| {
"content_hash": "3e6567f34bc15bebc2463c2659350544",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 67,
"avg_line_length": 32.642857142857146,
"alnum_prop": 0.7199124726477024,
"repo_name": "DESHRAJ/fjord",
"id": "ffa9ae7b23b8671d15cd1ca98c7fc5a1506692a0",
"size": "457",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fjord/feedback/helpers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "168457"
},
{
"name": "JavaScript",
"bytes": "299449"
},
{
"name": "Makefile",
"bytes": "4594"
},
{
"name": "Python",
"bytes": "709245"
},
{
"name": "Shell",
"bytes": "13991"
}
],
"symlink_target": ""
} |
"""empty message
Revision ID: 324666fdfa8a
Revises: f18180ede802
Create Date: 2016-08-04 13:45:37.492317
"""
# revision identifiers, used by Alembic.
revision = '324666fdfa8a'
down_revision = 'f18180ede802'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(u'prod_process_association_product_id_fkey', 'prod_process_association', type_='foreignkey')
op.create_foreign_key(None, 'prod_process_association', 'product', ['product_id'], ['id'], ondelete='CASCADE')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'prod_process_association', type_='foreignkey')
op.create_foreign_key(u'prod_process_association_product_id_fkey', 'prod_process_association', 'product', ['product_id'], ['id'])
### end Alembic commands ###
| {
"content_hash": "b03b6beb8a9eca8cd6b3ae1b0e6f77fd",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 133,
"avg_line_length": 33.214285714285715,
"alnum_prop": 0.7043010752688172,
"repo_name": "schinke/solid-fortnight-ba",
"id": "70ac4b8866361736b8dccdec635e8aa522bb5941",
"size": "930",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flask/migrations/versions/324666fdfa8a_.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5939"
},
{
"name": "CSS",
"bytes": "10234"
},
{
"name": "HTML",
"bytes": "27364"
},
{
"name": "JavaScript",
"bytes": "364749"
},
{
"name": "Mako",
"bytes": "9299"
},
{
"name": "Python",
"bytes": "8371284"
},
{
"name": "Shell",
"bytes": "3284"
}
],
"symlink_target": ""
} |
__author__ = 'Mehmet Mert Yildiran, [email protected]'
import rethinkdb as r # Rethinkdb Python driver
# Memory class
class Memory(object):
def __init__(self, starting_time, ending_time, data): # Initialize the object
self.starting_time = starting_time # Starting time attribute
self.ending_time = ending_time # Ending time attribute
self.data = data # Data attribute
# Timestamp class
class Timestamp(object):
def __init__(self, starting_time, ending_time): # Initialize the object
self.starting_time = starting_time # Starting time attribute
self.ending_time = ending_time # Ending time attribute
# Convert object to dictionary
def makeit_dict(obj):
if isinstance(obj, set):
return list(obj)
return obj.__dict__
class HearingMemoryUtil():
# Add a memory function
@staticmethod
def add_memory(data, starting_time, ending_time):
conn = r.connect("localhost", 28015)
r.db('test').table("hearing_memory").insert([
{ "starting_time": starting_time.strftime("%Y-%m-%d %H:%M:%S.%f"),
"ending_time": ending_time.strftime("%Y-%m-%d %H:%M:%S.%f"),
"data": r.binary(data)
}
]).run(conn)
r.db('test').table("hearing_timestamps").insert([
{ "starting_time": starting_time.strftime("%Y-%m-%d %H:%M:%S.%f"),
"ending_time": ending_time.strftime("%Y-%m-%d %H:%M:%S.%f")
}
]).run(conn)
conn.close()
# Get a memory function
@staticmethod
def get_memory(starting_time):
conn = r.connect("localhost", 28015)
cursor = r.db('test').table("hearing_memory").filter({'starting_time': starting_time}).run(conn)
#r.db('test').table("hearing_memory").filter({'starting_time': starting_time}).delete().run(conn)
conn.close()
return cursor
# Get timestamps function
@staticmethod
def get_timestamps():
conn = r.connect("localhost", 28015)
cursor = r.db('test').table("hearing_timestamps").run(conn)
r.db('test').table("hearing_timestamps").delete().run(conn)
conn.close()
return cursor
| {
"content_hash": "2371485820a6080227481d7f4ed80efe",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 99,
"avg_line_length": 33.220338983050844,
"alnum_prop": 0.6811224489795918,
"repo_name": "mertyildiran/Cerebrum",
"id": "d64ba925f27bfdbf4b94051778f43f65ff19acf9",
"size": "1960",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cerebrum/hearing/utilities.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Julia",
"bytes": "4184"
},
{
"name": "Python",
"bytes": "109364"
}
],
"symlink_target": ""
} |
from django.urls import path
from frequencia.calendario import views
app_name = 'calendario'
urlpatterns = [
#Feriados
path('', views.feriados, name='feriados'),
path('<int:ano>', views.feriados, name='feriados'),
path('feriado/novo/', views.feriado_create, name='feriado_create'),
path('feriado/remover/<int:pk>', views.feriado_remove, name='feriado_remove'),
] | {
"content_hash": "e9401241287662e5b9f6f5f1eead00c8",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 79,
"avg_line_length": 30.75,
"alnum_prop": 0.7235772357723578,
"repo_name": "bczmufrn/frequencia",
"id": "a2b5198fca2afb0aaa31f8f0ba27f7db6f7179a2",
"size": "369",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "frequencia/calendario/urls.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "28546"
},
{
"name": "HTML",
"bytes": "182716"
},
{
"name": "JavaScript",
"bytes": "67928"
},
{
"name": "Python",
"bytes": "94322"
}
],
"symlink_target": ""
} |
from __future__ import print_function, division, absolute_import
from struct import pack
from ..message import BulkFrontendMessage
class LoadBalanceRequest(BulkFrontendMessage):
message_id = None
LOADBALANCE_REQUEST = 80936960
def read_bytes(self):
bytes_ = pack('!I', self.LOADBALANCE_REQUEST)
return bytes_
| {
"content_hash": "8f54517f4872720b734bd33b533aeb79",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 64,
"avg_line_length": 24.428571428571427,
"alnum_prop": 0.7222222222222222,
"repo_name": "uber/vertica-python",
"id": "c0988a8ca6511b16a502e444a6baa7ac754f83e4",
"size": "2105",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vertica_python/vertica/messages/frontend_messages/load_balance_request.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "209204"
}
],
"symlink_target": ""
} |
from django.core import serializers
from django.conf import settings
import os
import itertools
import json
from information.models import PageData
from urlocators.models import Page
from workers.models import Job
class Pull:
'''
'''
@classmethod
def set_job_id(cls, job_id):
'''
'''
cls._job_id=job_id
@classmethod
def data_per_job(cls, fields_name=[]):
'''
'''
pages_data=[]
pages=Page.objects.filter(job=cls._job_id)
for page in pages:
page_data=PageData.objects.filter(page=page)
if fields_name:
page_data=page_data.filter(field_name__in=fields_name)
if not page_data or page_data.count()==0:continue
pages_data.append({'page_data':page_data,
'page_url':page.addr.url})
return pages_data
@classmethod
def data_per_urls(cls, seed_urls, chain_fields, data_fields):
'''
params
------
seed_urls: the query roots urls
urls_chain: target_fields name [that hold urls chain]
values_field_name:fields name from the target values
'''
pages_data=[]
for seed in seed_urls:
pages=Page.objects.filter(addr__url=seed)
page_data=PageData.objects.filter(page__in=pages)
page_dict={
'seed':seed,
'page_data':page_data,
'index':0,
'chain_fields':chain_fields,
'data_fields':data_fields,
}
pages_data.append(page_dict)
return pages_data
class JsonPage:
'''
'''
_base=[]
_file_name=''
_pages_json=[]
_job_name=[]
_to='file' #or redis
@classmethod
def set_export_format(cls, to):
'''
'''
cls._to=to
@classmethod
def _file(cls):
'''
'''
job_path=os.path.join(settings.DATA_ROOT, cls._job_name)
if not os.path.exists(job_path):os.makedirs(job_path)
file_path=os.path.join(job_path, cls._file_name)
fd=open(file_path, 'w')
json.dump(cls._pages_json, fd, indent=3)
fd.close()
@classmethod
def _export(cls):
'''
'''
if cls._to == 'file':
cls._file()
print('[=[<o>]=] Done saving file: {} [=[<o>]=]'.format(cls._file_name))
elif cls._to == 'api':
raise NotImplemented('[-] EXPORT FORMAT TO BE IMPLEMENTED')
else:
raise TypeError('[-] Unkown export format')
@classmethod
def set_data(cls, base_data):
'''
'''
cls._base=base_data
@classmethod
def set_job_name(cls, job_name):
'''
'''
cls._job_name=job_name
@classmethod
def by_job(cls, file_name, fields_name=[]):
'''
'''
cls._pages_json=[]
cls._file_name=file_name
for page_dict in cls._base:
page_data=page_dict['page_data']
page_url=page_dict['page_url']
if fields_name:
page={r.fields_name:r.field_value
for r in page_data
if r.fild_name in fields_name}
else:
page={r.field_name:r.field_value
for r in page_data}
page.update({'page_url':page_url})
cls._pages_json.append(page)
cls._export()
@classmethod
def by_urls_chain(cls, file_name):
'''
'''
cls._file_name=file_name
cls._pages_json=cls._urls_chain(cls._base)
cls._export()
@classmethod
def _urls_chain(cls, page_dict_list, chain_container=None):
'''
'''
pages_list=[]
for page_dict in page_dict_list:
url=page_dict['seed']
page_data=page_dict.pop('page_data')
index=page_dict['index']
chain_fields=page_dict.pop('chain_fields')
data_fields=page_dict.pop('data_fields')
chain_size=len(chain_fields)
next_index=index+1
#if it is first iteraction
if chain_container is None:
#this guy will a class soon - for now repetition
page_chain={
'seed':url,
'index':index,
'nodes':[],
'data':[],
'chain_fields':chain_fields,
'data_fields':data_fields,
}
container=page_chain['nodes']
#add page chain dict to final chain result list
pages_list.append(page_chain)
else:
container=chain_container
#if is final chain field -> target data field
if index >= chain_size:
final_data=page_data.filter(field_name__in=data_fields)
final_data=[{p.field_name:p.field_value}
for p in final_data]
final_chain={
'seed':url,
'index':next_index,
'values':final_data,
}
container.append(final_chain)
continue
#building chain
chain_field=chain_fields[index]
next_datum=page_data.filter(field_name=chain_field)
next_dicts=[]
for next_data in next_datum:
next_url=next_data.field_value
next_page=Page.objects.get(addr__url__contains=next_url)
next_page_data=PageData.objects.filter(page=next_page)
next_chain={'seed':next_url,
'page_data':next_page_data,
'index':next_index,
'chain_fields':chain_fields,
'data_fields':data_fields,
'nodes':[],
'data':[]}
if next_index >= chain_size:
next_container=next_chain['data']
else:
next_container=next_chain['nodes']
container.append(next_chain)
cls._urls_chain([next_chain], next_container)
return pages_list
class Export:
'''
'''
@classmethod
def job_to_json(cls, job_id, file_name, fields_name=[]):
'''
'''
job=Job.objects.get(id=job_id)
Pull.set_job_id(job_id)
data=Pull.data_per_job(fields_name)
JsonPage.set_job_name(job.name)
JsonPage.set_data(data)
JsonPage.by_job(file_name)
@classmethod
def urlsChain_to_json(cls, job_id, file_name, urls, chain_fields, data_fields):
'''
'''
if not file_name.endswith('.json'):
file_name='.'.join([file_name, 'json'])
job=Job.objects.get(id=job_id)
data=Pull.data_per_urls(urls, chain_fields, data_fields)
JsonPage.set_job_name(job.name)
JsonPage.set_data(data)
#testing
JsonPage.by_urls_chain(file_name)
| {
"content_hash": "9e01127277a5f9fcfd38fdc691e78d89",
"timestamp": "",
"source": "github",
"line_count": 243,
"max_line_length": 84,
"avg_line_length": 29.43621399176955,
"alnum_prop": 0.49657486369355514,
"repo_name": "VulcanoAhab/delphi",
"id": "62bbc29d74cdb74767aed35e67539ab5068104dd",
"size": "7153",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "information/utils/export.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2786"
},
{
"name": "HTML",
"bytes": "131"
},
{
"name": "Python",
"bytes": "124776"
},
{
"name": "Shell",
"bytes": "3298"
}
],
"symlink_target": ""
} |
import m5
from m5.objects import *
from Caches import *
def config_cache(options, system):
if options.l2cache:
system.l2 = L2Cache(size='2MB')
system.tol2bus = Bus()
system.l2.cpu_side = system.tol2bus.port
system.l2.mem_side = system.membus.port
system.l2.num_cpus = options.num_cpus
for i in xrange(options.num_cpus):
if options.caches:
system.cpu[i].addPrivateSplitL1Caches(L1Cache(size = '32kB'),
L1Cache(size = '64kB'))
if options.l2cache:
system.cpu[i].connectMemPorts(system.tol2bus)
else:
system.cpu[i].connectMemPorts(system.membus)
return system
| {
"content_hash": "e9d8c42af2950b4104804a187328dca0",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 73,
"avg_line_length": 32.77272727272727,
"alnum_prop": 0.5922330097087378,
"repo_name": "liangwang/m5",
"id": "075f6d235155025302490fe06ab38ad0fd449888",
"size": "2336",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "configs/common/CacheConfig.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "490228"
},
{
"name": "C++",
"bytes": "8617145"
},
{
"name": "Emacs Lisp",
"bytes": "1969"
},
{
"name": "Python",
"bytes": "2567844"
},
{
"name": "Shell",
"bytes": "6722"
},
{
"name": "Visual Basic",
"bytes": "2884"
}
],
"symlink_target": ""
} |
import pandas as pd
import numpy as np
import pyaf.HierarchicalForecastEngine as hautof
import pyaf.Bench.TS_datasets as tsds
import datetime
#get_ipython().magic('matplotlib inline')
def train_and_force_fail(b , error_message):
try:
df = b.mPastData;
lEngine = hautof.cHierarchicalForecastEngine()
lEngine.mOptions.mHierarchicalCombinationMethod = "TD";
lEngine.mOptions.set_active_autoregressions([]);
lEngine
H = b.mHorizon;
lEngine.train(df , b.mTimeVar , b.mSignalVar, H, b.mHierarchy, None);
raise Exception("NOT_OK")
except Exception as e:
# should fail
print(str(e));
assert(str(e) == error_message)
if(str(e) == "NOT_OK"):
raise
pass
b1 = tsds.load_AU_hierarchical_dataset();
b1.mPastData['NSW'] = "BlaBla"
train_and_force_fail(b1 , "PYAF_ERROR_HIERARCHY_BASE_SIGNAL_COLUMN_TYPE_NOT_ALLOWED 'NSW' 'object'")
| {
"content_hash": "4097d18e66e6df49a020bcf03e8413b1",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 100,
"avg_line_length": 27.82857142857143,
"alnum_prop": 0.63347022587269,
"repo_name": "antoinecarme/pyaf",
"id": "c23ebfc660a269f25ef848c6a20227468b6d3a2b",
"size": "974",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/basic_checks/hierarchy_checks_base_column_bad_storage.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from chaco.label import Label
from kiva import FILL
from traits.trait_types import Bool, Float, Str
from traits.traits import Property
# ============= standard library imports ========================
# ============= local library imports ==========================
class MarkerLabel(Label):
rotate_angle = 0
zero_y = 0
xoffset = 10
indicator_width = 4
indicator_height = 10
visible = Bool(True)
component_height = 100
x = Float
y = Float
data_y = Float
vertical = False
horizontal_line_visible = False
label_with_intensity = False
text = Property(depends_on='_text, label_with_intensity')
_text = Str
def _set_text(self, text):
self._text =text
def _get_text(self):
if self.label_with_intensity:
return '{:0.4f}'.format(self.data_y)
else:
return self._text
def draw(self, gc, component_height):
if not self.text:
self.text = ''
if self.bgcolor != "transparent":
gc.set_fill_color(self.bgcolor_)
#draw tag border
with gc:
if self.vertical:
self.rotate_angle=90
width, height = self.get_bounding_box(gc)
gc.translate_ctm(self.x, self.zero_y_vert-35-height)
else:
if self.horizontal_line_visible:
self._draw_horizontal_line(gc)
gc.translate_ctm(self.x+self.xoffset, self.y)
self._draw_tag_border(gc)
super(MarkerLabel, self).draw(gc)
with gc:
gc.translate_ctm(self.x - self.indicator_width / 2.0, self.zero_y)
self._draw_index_indicator(gc, component_height)
def _draw_horizontal_line(self, gc):
with gc:
# print self.x, self.y
gc.set_stroke_color((0,0,0,1))
gc.set_line_width(2)
oy = 7 if self.text else 4
y=self.y+oy
gc.move_to(0, y)
gc.line_to(self.x, y)
gc.stroke_path()
def _draw_index_indicator(self, gc, component_height):
# gc.set_fill_color((1, 0, 0, 1))
w, h = self.indicator_width, self.indicator_height
gc.draw_rect((0, 0, w, h), FILL)
gc.draw_rect((0, component_height, w, h), FILL)
def _draw_tag_border(self, gc):
gc.set_stroke_color((0, 0, 0, 1))
gc.set_line_width(2)
# gc.set_fill_color((1, 1, 1, 1))
bb_width, bb_height = self.get_bounding_box(gc)
offset = 2
xoffset = self.xoffset
gc.lines([(-xoffset, (bb_height + offset) / 2.0),
(0, bb_height + 2 * offset),
(bb_width + offset, bb_height + 2 * offset),
(bb_width + offset, -offset),
(0, -offset),
(-xoffset, (bb_height + offset) / 2.0)])
gc.draw_path()
# ============= EOF =============================================
| {
"content_hash": "1e60d88e8638726fbe0d5e52641af21a",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 78,
"avg_line_length": 30.383838383838384,
"alnum_prop": 0.515625,
"repo_name": "UManPychron/pychron",
"id": "676697ddf3ecd00c1ba4c1e0c990eae357b5afff",
"size": "3808",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "pychron/spectrometer/graph/marker_label.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "131"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "279"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "40346"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10234954"
},
{
"name": "Shell",
"bytes": "10753"
}
],
"symlink_target": ""
} |
"""
@created_at 2015-01-18
@author Exequiel Fuentes <[email protected]>
"""
import sys
import datetime
from lib.util import Options, SystemUtils
from lib.doc import BagOfWords
from lib.net import Network
from lib.stats import FMetrics
#import warnings
#warnings.filterwarnings("ignore", category=DeprecationWarning)
if __name__ == "__main__":
try:
# Parse options
options = Options().parse(sys.argv[1:])
# Configure logging
logger = SystemUtils().configure_log()
# Define the classes
classes = ["bioinformatics", "database", "network", "programming"]
# Create bag of words
bag_of_words = BagOfWords()
vocabulary = bag_of_words.create_vocabulary()
# Create inputs to the network
net_inputs = bag_of_words.create_ann_inputs(vocabulary, classes, "acm")
# Create the network
network = Network(net_inputs, classes, options, logger)
# Load the network from file
if options.load:
logger.info("Loading network from file...")
network.load(options.load)
else:
# Fit the network
start = datetime.datetime.now()
network.fit()
logger.info("Time training network: %f [sec]" % (datetime.datetime.now() - start).total_seconds())
# Show the errors
if options.verbose == False:
network.show_error()
# Save the network after fit
if options.save_as:
network.save(options.save_as)
# Show the network layer
#if options.verbose:
network.show_layer()
logger.info("Testing the network using validation dataset from IEEE...")
start = datetime.datetime.now()
validation_dataset = bag_of_words.create_ann_inputs(vocabulary, classes, "ieee")
logger.info("Time creating IEEE input: %f [sec]" %(datetime.datetime.now() - start).total_seconds())
validation_target = []
for i in xrange(len(validation_dataset)):
validation_target.append(validation_dataset[i][1])
start = datetime.datetime.now()
validation_pred = network.predict(validation_dataset)
logger.info("Time predicting IEEE target: %f [sec]" % (datetime.datetime.now() - start).total_seconds())
logger.info("Classification performance: %.2f" % network.classification_performance(validation_pred, validation_target))
logger.info("Explained sum of squares: %.2f" % network.explained_sum_squares(validation_pred, validation_target))
logger.info("Mean squared error: %.2f" % network.mean_squared_error(validation_pred, validation_target))
# Create a metrics instance for showing a report in the console
fmetrics = FMetrics(logger)
# Create the report
fmetrics.report(validation_target, validation_pred)
# For keeping the plot
network.show_plot()
except Exception, err:
error_msg = str(err)
try:
logger.error(error_msg, exc_info=True)
except Exception, err:
print(error_msg)
print str(err)
finally:
sys.exit()
| {
"content_hash": "f2e332bd72e0caaa3da8c2d6f783fead",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 128,
"avg_line_length": 35.07368421052632,
"alnum_prop": 0.5984393757503002,
"repo_name": "efulet/text_classification",
"id": "b5b97f2dd8350510cdd9e78155c49f13a71dbf51",
"size": "3332",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ftc/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27688"
}
],
"symlink_target": ""
} |
""" Setuptools project configuration for rnglib. """
from os.path import exists
from setuptools import setup
long_desc = None
if exists('README.md'):
with open('README.md', 'r') as file:
long_desc = file.read()
setup(name='rnglib',
version='1.3.10',
author='Jim Dixon',
author_email='[email protected]',
long_description=long_desc,
packages=['rnglib'],
package_dir={'': 'src'},
py_modules=[],
include_package_data=False,
zip_safe=False,
scripts=[],
ext_modules=[],
description='random number generator library',
url='https://jddixon.github.io/rnglib',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python 2',
'Programming Language :: Python 2.7',
'Programming Language :: Python 3',
'Programming Language :: Python 3.5',
'Programming Language :: Python 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
],)
| {
"content_hash": "19d5be492e29cefd6467b39aa4755a0b",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 73,
"avg_line_length": 32.729729729729726,
"alnum_prop": 0.5871180842279108,
"repo_name": "jddixon/rnglib",
"id": "362a98a29424ad96f85f9471926a55ff15152685",
"size": "1249",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6648"
},
{
"name": "Python",
"bytes": "29045"
},
{
"name": "Shell",
"bytes": "1597"
}
],
"symlink_target": ""
} |
import math
from collections import Counter
# ==================================================
# Statistical tools
# ==================================================
# efficient combinatorial function to handle extremely large numbers
def log_choose(n, k):
r = 0.0
# swap for efficiency if k is more than half of n
if k * 2 > n:
k = n - k
for d in xrange(1,k+1):
r += math.log(n, 10)
r -= math.log(d, 10)
n -= 1
return r
# return the genotype and log10 p-value
def bayes_gt(ref, alt, is_dup):
# probability of seeing an alt read with true genotype of of hom_ref, het, hom_alt respectively
if is_dup: # specialized logic to handle non-destructive events such as duplications
p_alt = [1e-2, 0.2, 1/3.0]
else:
p_alt = [1e-3, 0.5, 0.9]
total = ref + alt
log_combo = log_choose(total, alt)
lp_homref = log_combo + alt * math.log(p_alt[0], 10) + ref * math.log(1 - p_alt[0], 10)
lp_het = log_combo + alt * math.log(p_alt[1], 10) + ref * math.log(1 - p_alt[1], 10)
lp_homalt = log_combo + alt * math.log(p_alt[2], 10) + ref * math.log(1 - p_alt[2], 10)
return (lp_homref, lp_het, lp_homalt)
# get the number of entries in the set
def countRecords(myCounter):
numRecords = sum(myCounter.values())
return numRecords
# median is approx 50th percentile, except when it is between
# two values in which case it's the mean of them.
def median(myCounter):
#length is the number of bases we're looking at
numEntries = countRecords(myCounter)
# the ordinal value of the middle element
# if 2 middle elements, then non-integer
limit = 0.5 * numEntries
# a list of the values, sorted smallest to largest
# note that this list contains unique elements only
valueList = list(myCounter)
valueList.sort()
# number of entries we've gone through
runEntries = 0
# index of the current value in valueList
i = 0
# initiate v, in case list only has one element
v = valueList[i]
# move through the value list, iterating by number of
# entries for each value
while runEntries < limit:
v = valueList[i]
runEntries += myCounter[v]
i += 1
if runEntries == limit:
return (v + valueList[i]) / 2.0
else:
return v
# calculate upper median absolute deviation
def upper_mad(myCounter, myMedian):
residCounter = Counter()
for x in myCounter:
if x > myMedian:
residCounter[abs(x - myMedian)] += myCounter[x]
return median(residCounter)
# sum of the entries
def sumRecords(myCounter):
mySum = 0.0
for c in myCounter:
mySum += c * float(myCounter[c])
return mySum
# calculate the arithmetic mean, given a counter and the
# length of the feature (chromosome or genome)
# for x percentile, x% of the elements in the set are
# <= the output value
def mean(myCounter):
# the number of total entries in the set is the
# sum of the occurrences for each value
numRecords = countRecords(myCounter)
# u holds the mean
u = float()
u = sumRecords(myCounter) / numRecords
return u
def stdev(myCounter):
# the number of total entries in the set is the
# sum of the occurrences for each value
numRecords = countRecords(myCounter)
# u holds the mean
u = mean(myCounter)
sumVar = 0.0
# stdev is sqrt(sum((x-u)^2)/#elements)
for c in myCounter:
sumVar += myCounter[c] * (c - u)**2
myVariance = float(sumVar) / numRecords
stdev = myVariance**(0.5)
return stdev
| {
"content_hash": "45ead46f66173b1c7b15f41d19e05c63",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 99,
"avg_line_length": 29.442622950819672,
"alnum_prop": 0.6227728285077951,
"repo_name": "hall-lab/svtyper",
"id": "51cc3cdf5e370c95acf6ddaab3698445dd100387",
"size": "3592",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "svtyper/statistics.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "504"
},
{
"name": "Python",
"bytes": "228014"
},
{
"name": "R",
"bytes": "1564"
},
{
"name": "Shell",
"bytes": "611"
}
],
"symlink_target": ""
} |
def get_volume_of_cuboid(length, width, height):
return length * width * height
# PEP8: kata function name should use snake_case not mixedCase
getVolumeOfCubiod = get_volume_of_cuboid
| {
"content_hash": "aafa2bf29772b8bfa96f0939fd6b002d",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 62,
"avg_line_length": 31.666666666666668,
"alnum_prop": 0.7578947368421053,
"repo_name": "the-zebulan/CodeWars",
"id": "ea8f829caba29eedeab0465afb150380af5ca33f",
"size": "190",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "katas/kyu_8/volume_of_a_cuboid.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1203000"
}
],
"symlink_target": ""
} |
"""
This script reads a text file with a list of species names and
SMILES strings and converts it to an RMG dictionary.
The text file should have one species per line, with the species
name/identifier first, followed by the SMILES string.
Comments marked with '!' are ignored.
"""
import argparse
import re
from rmgpy.species import Species
from rmgpy.chemkin import saveSpeciesDictionary
def main(inputPath, outputPath):
species = []
with open(inputPath, 'r+b') as f:
for line0 in f:
line1 = line0.strip()
if line1 and line1[0] != '!':
# Parse line using regex
match = re.match(r"([^\s!]+)\s+([^\s!]+)", line1.strip())
# Get label and SMILES string
label = match.group(1)
smiles = match.group(2)
# Save in species dictionary
spec = Species().fromSMILES(smiles)
spec.label = label
species.append(spec)
# Write to dictionary file
saveSpeciesDictionary(outputPath, species)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('inputPath', metavar='INPUT', type=str, nargs=1,
help='The path of the SMILES input file')
parser.add_argument('outputPath', metavar='OUTPUT', type=str, nargs=1,
help='The path to save the output file')
args = parser.parse_args()
inputPath = args.inputPath[0]
outputPath = args.outputPath[0]
main(inputPath, outputPath)
| {
"content_hash": "5ca45095bddaf77e1df426c02154f8ea",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 74,
"avg_line_length": 28.77358490566038,
"alnum_prop": 0.6177049180327869,
"repo_name": "mliu49/RMG-stuff",
"id": "f487c9a59471b5b3f0e7dd9ce7442915ec6dfbeb",
"size": "1566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Scripts/smilesToDictionary.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1495757"
},
{
"name": "Python",
"bytes": "18540"
}
],
"symlink_target": ""
} |
class jackstack:
"""stack that uses stacknodes"""
def __init__(self):
self.head = None
def push(self, value):
self.head = stacknode(value, self.head)
def pop(self):
result = self.head.value
self.head = self.head.fwdNode
return result
class stacknode:
"""simple node with a value and pointer to the next node"""
def __init__(self, value, nextNode):
self.value = value
self.fwdNode = nextNode | {
"content_hash": "378cb8826f7a63bea627741834fdaaef",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 63,
"avg_line_length": 25.36842105263158,
"alnum_prop": 0.5892116182572614,
"repo_name": "sniboboof/data-structures",
"id": "8defdcc8cbe4dd1831a1eb0a67730fb28a77181a",
"size": "482",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "actualstack.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "DOT",
"bytes": "45356"
},
{
"name": "Python",
"bytes": "40597"
}
],
"symlink_target": ""
} |
"""
tf.Variable
See https://www.tensorflow.org/api_docs/python/tf/Variable
"""
import tensorflow as tf
W = tf.Variable(tf.random_uniform([1], -1.0, 1.0), name='weight') #See https://www.tensorflow.org/api_docs/python/tf/random_uniform
b = tf.Variable(tf.zeros([1]), name='bias') #See https://www.tensorflow.org/api_docs/python/tf/zeros
init_op=tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init_op)
print(sess.run(W))
print(sess.run(b))
"""
Parameter:
1. trainable: True for default, means the variable is also added to the graph collection GraphKeys.TRAINABLE_VARIABLES.
""" | {
"content_hash": "43f5b5160ec4c689477fca439b6d34a6",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 131,
"avg_line_length": 34.05555555555556,
"alnum_prop": 0.7177814029363785,
"repo_name": "KarateJB/Python.Practice",
"id": "dc99ee0493ee27e488f94b20c38aa91bf9b504ec",
"size": "613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/TensorFlow/venv/Lab/Tutorials/Basic/Variable.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1959"
},
{
"name": "C",
"bytes": "3972"
},
{
"name": "CSS",
"bytes": "601"
},
{
"name": "HTML",
"bytes": "11362"
},
{
"name": "JavaScript",
"bytes": "1955"
},
{
"name": "PowerShell",
"bytes": "2988"
},
{
"name": "Python",
"bytes": "1682392"
}
],
"symlink_target": ""
} |
import ast
import re
import sys
import codecs
from json import JSONEncoder
from ast import *
# Is it Python 3?
python3 = hasattr(sys.version_info, 'major') and (sys.version_info.major == 3)
class AstEncoder(JSONEncoder):
def default(self, obj):
if hasattr(obj, '__dict__'):
dic = obj.__dict__
# workaround: decode strings if it's not Python3 code
if not python3:
for key in dic:
if isinstance(dic[key], str):
if key == 's':
dic[key] = lines[dic['start']:dic['end']]
else:
dic[key] = dic[key].decode(enc)
dic['type'] = obj.__class__.__name__
return dic
else:
return str(obj)
enc = 'latin1'
lines = ''
def parse_dump(filename, output, end_mark):
try:
if python3:
encoder = AstEncoder()
else:
encoder = AstEncoder(encoding=enc)
tree = parse_file(filename)
encoded = encoder.encode(tree)
f = open(output, "w")
f.write(encoded)
f.close()
finally:
# write marker file to signal write end
f = open(end_mark, "w")
f.close()
def parse_file(filename):
global enc, lines
enc, enc_len = detect_encoding(filename)
f = codecs.open(filename, 'r', enc)
lines = f.read()
if enc == None:
# This is a terrible hack
lines = lines.decode("latin1").encode("latin1").decode("utf-8")
enc = "latin1"
# remove BOM
lines = re.sub(u'\ufeff', ' ', lines)
# replace the encoding decl by spaces to fool python parser
# otherwise you get 'encoding decl in unicode string' syntax error
# print('enc:', enc, 'enc_len', enc_len)
if enc_len > 0:
lines = re.sub('#.*coding\s*[:=]\s*[\w\d\-]+', '#' + ' ' * (enc_len - 1), lines)
f.close()
return parse_string(lines, filename)
def parse_string(string, filename=None):
tree = ast.parse(string)
improve_ast(tree, string)
if filename:
tree.filename = filename
return tree
# short function for experiments
def p(filename):
parse_dump(filename, "json1", "end1")
def detect_encoding(path):
fin = open(path, 'rb')
prefix = str(fin.read(80))
encs = re.findall('#.*coding\s*[:=]\s*([\w\d\-]+)', prefix)
decl = re.findall('#.*coding\s*[:=]\s*[\w\d\-]+', prefix)
if encs:
enc1 = encs[0]
enc_len = len(decl[0])
try:
codecs.lookup(enc1)
except LookupError:
return None, enc_len
return enc1, enc_len
else:
return None, -1
#-------------------------------------------------------------
# improvements to the AST
#-------------------------------------------------------------
def improve_ast(node, s):
build_index_map(s)
improve_node(node, s)
line_starts = []
# build global table 'idxmap' for lineno <-> index oonversion
def build_index_map(s):
global line_starts
idx = 0
line_starts = [0]
while idx < len(s):
if s[idx] == '\n':
line_starts.append(idx + 1)
idx += 1
# convert (line, col) to offset index
def map_idx(line, col):
return line_starts[line - 1] + col
# convert offset index into (line, col)
def map_line_col(idx):
line = 0
for start in line_starts:
if idx < start:
break
line += 1
col = idx - line_starts[line - 1]
return line, col
def improve_node(node, s):
if isinstance(node, list):
for n in node:
improve_node(n, s)
elif isinstance(node, AST):
find_start(node, s)
find_end(node, s)
if hasattr(node, 'start'):
node.lineno, node.col_offset = map_line_col(node.start)
add_missing_names(node, s)
for f in node_fields(node):
improve_node(f, s)
def find_start(node, s):
ret = None # default value
if hasattr(node, 'start'):
ret = node.start
elif isinstance(node, list):
if node != []:
ret = find_start(node[0], s)
elif isinstance(node, Module):
if node.body != []:
ret = find_start(node.body[0], s)
elif isinstance(node, BinOp):
leftstart = find_start(node.left, s)
if leftstart is not None:
ret = leftstart
else:
ret = map_idx(node.lineno, node.col_offset)
elif hasattr(node, 'lineno'):
if node.col_offset >= 0:
ret = map_idx(node.lineno, node.col_offset)
else: # special case for """ strings
i = map_idx(node.lineno, node.col_offset)
while i > 0 and i + 2 < len(s) and s[i:i + 3] != '"""' and s[i:i + 3] != "'''":
i -= 1
ret = i
else:
return None
if ret is None and hasattr(node, 'lineno'):
raise TypeError("got None for node that has lineno", node)
if isinstance(node, AST) and ret is not None:
node.start = ret
return ret
def find_end(node, s):
the_end = None
if hasattr(node, 'end'):
return node.end
elif isinstance(node, list):
if node != []:
the_end = find_end(node[-1], s)
elif isinstance(node, Module):
if node.body != []:
the_end = find_end(node.body[-1], s)
elif isinstance(node, Expr):
the_end = find_end(node.value, s)
elif isinstance(node, Str):
i = find_start(node, s)
while s[i] != '"' and s[i] != "'":
i += 1
if i + 2 < len(s) and s[i:i + 3] == '"""':
q = '"""'
i += 3
elif i + 2 < len(s) and s[i:i + 3] == "'''":
q = "'''"
i += 3
elif s[i] == '"':
q = '"'
i += 1
elif s[i] == "'":
q = "'"
i += 1
else:
print("illegal quote:", i, s[i])
q = ''
if q != '':
the_end = end_seq(s, q, i)
elif isinstance(node, Name):
the_end = find_start(node, s) + len(node.id)
elif isinstance(node, Attribute):
the_end = end_seq(s, node.attr, find_end(node.value, s))
elif isinstance(node, FunctionDef) or (python3 and isinstance(node, AsyncFunctionDef)):
the_end = find_end(node.body, s)
elif isinstance(node, Lambda):
the_end = find_end(node.body, s)
elif isinstance(node, ClassDef):
the_end = find_end(node.body, s)
# print will be a Call in Python 3
elif not python3 and isinstance(node, Print):
the_end = start_seq(s, '\n', find_start(node, s))
elif isinstance(node, Call):
start = find_end(node.func, s)
if start is not None:
the_end = match_paren(s, '(', ')', start)
elif isinstance(node, Yield):
the_end = find_end(node.value, s)
elif isinstance(node, Return):
if node.value is not None:
the_end = find_end(node.value, s)
else:
the_end = find_start(node, s) + len('return')
elif (isinstance(node, For) or
isinstance(node, While) or
isinstance(node, If) or
isinstance(node, IfExp)):
if node.orelse != []:
the_end = find_end(node.orelse, s)
else:
the_end = find_end(node.body, s)
elif isinstance(node, Assign) or isinstance(node, AugAssign):
the_end = find_end(node.value, s)
elif isinstance(node, BinOp):
the_end = find_end(node.right, s)
elif isinstance(node, BoolOp):
the_end = find_end(node.values[-1], s)
elif isinstance(node, Compare):
the_end = find_end(node.comparators[-1], s)
elif isinstance(node, UnaryOp):
the_end = find_end(node.operand, s)
elif isinstance(node, Num):
the_end = find_start(node, s) + len(str(node.n))
elif isinstance(node, List):
the_end = match_paren(s, '[', ']', find_start(node, s))
elif isinstance(node, Subscript):
the_end = match_paren(s, '[', ']', find_start(node, s))
elif isinstance(node, Tuple):
if node.elts != []:
the_end = find_end(node.elts[-1], s)
elif isinstance(node, Dict):
the_end = match_paren(s, '{', '}', find_start(node, s))
elif ((not python3 and isinstance(node, TryExcept)) or
(python3 and isinstance(node, Try))):
if node.orelse != []:
the_end = find_end(node.orelse, s)
elif node.handlers != []:
the_end = find_end(node.handlers, s)
else:
the_end = find_end(node.body, s)
elif isinstance(node, ExceptHandler):
the_end = find_end(node.body, s)
elif isinstance(node, Pass):
the_end = find_start(node, s) + len('pass')
elif isinstance(node, Break):
the_end = find_start(node, s) + len('break')
elif isinstance(node, Continue):
the_end = find_start(node, s) + len('continue')
elif isinstance(node, Global):
the_end = start_seq(s, '\n', find_start(node, s))
elif isinstance(node, Import):
the_end = find_start(node, s) + len('import')
elif isinstance(node, ImportFrom):
the_end = find_start(node, s) + len('from')
else: # can't determine node end, set to 3 chars after start
start = find_start(node, s)
if start is not None:
the_end = start + 3
if isinstance(node, AST) and the_end is not None:
node.end = the_end
return the_end
def add_missing_names(node, s):
if hasattr(node, 'extra_attr'):
return
if isinstance(node, list):
for n in node:
add_missing_names(n, s)
elif isinstance(node, ClassDef):
head = find_start(node, s)
start = s.find("class", head) + len("class")
if start is not None:
node.name_node = str_to_name(s, start)
node._fields += ('name_node',)
elif isinstance(node, FunctionDef) or (python3 and isinstance(node, AsyncFunctionDef)):
# skip to "def" because it may contain decorators like @property
head = find_start(node, s)
start = s.find("def", head) + len("def")
if start is not None:
node.name_node = str_to_name(s, start)
node._fields += ('name_node',)
# keyword_start = find_start(node, s)
# node.keyword_node = str_to_name(s, keyword_start)
# node._fields += ('keyword_node',)
if node.args.vararg is not None:
if len(node.args.args) > 0:
vstart = find_end(node.args.args[-1], s)
else:
vstart = find_end(node.name_node, s)
if vstart is not None:
vname = str_to_name(s, vstart)
node.vararg_name = vname
else:
node.vararg_name = None
node._fields += ('vararg_name',)
if node.args.kwarg is not None:
if len(node.args.args) > 0:
kstart = find_end(node.args.args[-1], s)
else:
kstart = find_end(node.vararg_name, s)
if kstart:
kname = str_to_name(s, kstart)
node.kwarg_name = kname
else:
node.kwarg_name = None
node._fields += ('kwarg_name',)
elif isinstance(node, Attribute):
start = find_end(node.value, s)
if start is not None:
name = str_to_name(s, start)
node.attr_name = name
node._fields = ('value', 'attr_name') # remove attr for node size accuracy
elif isinstance(node, Compare):
start = find_start(node, s)
if start is not None:
node.opsName = convert_ops(node.ops, s, start)
node._fields += ('opsName',)
elif (isinstance(node, BoolOp) or
isinstance(node, BinOp) or
isinstance(node, UnaryOp) or
isinstance(node, AugAssign)):
if hasattr(node, 'left'):
start = find_end(node.left, s)
else:
start = find_start(node, s)
if start is not None:
ops = convert_ops([node.op], s, start)
else:
ops = []
if ops != []:
node.op_node = ops[0]
node._fields += ('op_node',)
elif isinstance(node, Num):
if isinstance(node.n, int) or (not python3 and isinstance(node.n, long)):
num_type = 'int'
node.n = str(node.n)
elif isinstance(node.n, float):
num_type = 'float'
node.n = str(node.n)
elif isinstance(node.n, complex):
num_type = 'complex'
node.real = node.n.real
node.imag = node.n.imag
node._fields += ('real', 'imag')
else:
num_type = 'unsupported'
node.num_type = num_type
node._fields += ('num_type',)
node.extra_attr = True
#-------------------------------------------------------------
# utilities used by improve AST functions
#-------------------------------------------------------------
# find a sequence in a string s, returning the start point
def start_seq(s, pat, start):
try:
return s.index(pat, start)
except ValueError:
return len(s)
# find a sequence in a string s, returning the end point
def end_seq(s, pat, start):
try:
return s.index(pat, start) + len(pat)
except ValueError:
return len(s)
# find matching close paren from start
def match_paren(s, open, close, start):
while start < len(s) and s[start] != open:
start += 1
if start >= len(s):
return len(s)
left = 1
i = start + 1
while left > 0 and i < len(s):
if s[i] == open:
left += 1
elif s[i] == close:
left -= 1
i += 1
return i
# convert string to Name
def str_to_name(s, start):
i = start
while i < len(s) and not is_alpha(s[i]):
i += 1
name_start = i
ret = []
while i < len(s) and is_alpha(s[i]):
ret.append(s[i])
i += 1
name_end = i
id1 = ''.join(ret)
if id1 == '':
return None
else:
name = Name(id1, None)
name.start = name_start
name.end = name_end
return name
def convert_ops(ops, s, start):
syms = []
for op in ops:
if type(op) in ops_map:
syms.append(ops_map[type(op)])
else:
print("[WARNING] operator %s is missing from ops_map, "
"please report the bug on GitHub" % op)
i = start
j = 0
ret = []
while i < len(s) and j < len(syms):
oplen = len(syms[j])
if s[i:i + oplen] == syms[j]:
op_node = Name(syms[j], None)
op_node.start = i
op_node.end = i + oplen
ret.append(op_node)
j += 1
i = op_node.end
else:
i += 1
return ret
# lookup table for operators for convert_ops
ops_map = {
# compare:
Eq: '==',
NotEq: '!=',
LtE: '<=',
Lt: '<',
GtE: '>=',
Gt: '>',
NotIn: 'not in',
In: 'in',
IsNot: 'is not',
Is: 'is',
# BoolOp
Or: 'or',
And: 'and',
Not: 'not',
Invert: '~',
# bit operators
BitOr: '|',
BitAnd: '&',
BitXor: '^',
RShift: '>>',
LShift: '<<',
# BinOp
Add: '+',
Sub: '-',
Mult: '*',
Div: '/',
FloorDiv: '//',
Mod: '%',
Pow: '**',
# UnaryOp
USub: '-',
UAdd: '+',
}
if python3:
ops_map[MatMult] = '@'
# get list of fields from a node
def node_fields(node):
ret = []
for field in node._fields:
if field != 'ctx' and hasattr(node, field):
ret.append(getattr(node, field))
return ret
# get full source text where the node is from
def node_source(node):
if hasattr(node, 'node_source'):
return node.node_source
else:
return None
# utility for getting exact source code part of the node
def src(node):
return node.node_source[node.start: node.end]
def start(node):
if hasattr(node, 'start'):
return node.start
else:
return 0
def end(node):
if hasattr(node, 'end'):
return node.end
else:
return None
def is_alpha(c):
return (c == '_' or
('0' <= c <= '9') or
('a' <= c <= 'z') or
('A' <= c <= 'Z'))
# p('/System/Library/Frameworks/Python.framework/Versions/2.5/lib/python2.5/lib-tk/Tix.py')
| {
"content_hash": "2a06e6c78867c66c9642a0ab5f2097a8",
"timestamp": "",
"source": "github",
"line_count": 634,
"max_line_length": 91,
"avg_line_length": 26.1198738170347,
"alnum_prop": 0.5122584541062802,
"repo_name": "lambdalab/pysonar2",
"id": "67fb48014bc7e4330e5d24ee0ddfd86014d5f3b7",
"size": "16560",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/resources/org/yinwang/pysonar/python/dump_python.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "920"
},
{
"name": "Java",
"bytes": "413257"
},
{
"name": "JavaScript",
"bytes": "1328"
},
{
"name": "Python",
"bytes": "22786"
}
],
"symlink_target": ""
} |
from Tkinter import *
def sel():
selection = "Value = " + str(var.get())
label.config(text = selection)
root = Tk()
var = DoubleVar()
scale = Scale( root, variable = var, orient=HORIZONTAL )
scale.pack(anchor=CENTER)
button = Button(root, text="Get Scale Value", command=sel)
button.pack(anchor=CENTER)
label = Label(root)
label.pack()
root.mainloop() | {
"content_hash": "3dbef21dce6bc00f0da774810cc46ab5",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 58,
"avg_line_length": 20.11111111111111,
"alnum_prop": 0.6906077348066298,
"repo_name": "divir94/News-Analytics",
"id": "b541460b59e1af52de93201dd35d6f084abd7e2f",
"size": "362",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Divir/Tkinter/slider_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "3253"
},
{
"name": "OpenEdge ABL",
"bytes": "28"
},
{
"name": "Python",
"bytes": "303113"
},
{
"name": "Shell",
"bytes": "544"
}
],
"symlink_target": ""
} |
class AbstractTrade:
"""High-level class representing a trade from one coin to another.
"""
def __init__(
self,
sell_coin: str,
buy_coin: str,
reference_coin: str,
reference_value: float,
) -> None:
self._sell_coin = sell_coin
self._buy_coin = buy_coin
self._reference_coin = reference_coin
self._reference_value = reference_value
@property
def sell_coin(self) -> str:
return self._sell_coin
@property
def buy_coin(self) -> str:
return self._buy_coin
@property
def reference_coin(self) -> str:
return self._reference_coin
@property
def reference_value(self) -> float:
return self._reference_value
| {
"content_hash": "8ec00f1ea3352a016cff4bfbe55ee5b8",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 70,
"avg_line_length": 24.322580645161292,
"alnum_prop": 0.5822281167108754,
"repo_name": "elsehow/moneybot",
"id": "98830c10b469b8d51ef43286f475b4e1ce6a406f",
"size": "780",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "moneybot/trade.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "455"
},
{
"name": "Python",
"bytes": "72870"
},
{
"name": "Shell",
"bytes": "2251"
}
],
"symlink_target": ""
} |
import gspread
from oauth2client.service_account import ServiceAccountCredentials
# use creds to create a client to interact with the Google Drive API
scope = ['https://spreadsheets.google.com/feeds']
creds = ServiceAccountCredentials.from_json_keyfile_name('Beautifulseodang-c0a5cc61a537.json', scope)
client = gspread.authorize(creds)
sheet = client.open("test").sheet1
list_of_hashes = sheet.get_all_records()
print (list_of_hashes) | {
"content_hash": "85565be3029fd634b1728a948ccccc91",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 101,
"avg_line_length": 31.428571428571427,
"alnum_prop": 0.7931818181818182,
"repo_name": "yeongseon/django_beautifulseodang",
"id": "0ba96a49ee2a47b66e2d8e3cc64349fcbf227836",
"size": "440",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test_gspread.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "80794"
},
{
"name": "HTML",
"bytes": "133381"
},
{
"name": "JavaScript",
"bytes": "238909"
},
{
"name": "Python",
"bytes": "28763"
}
],
"symlink_target": ""
} |
import theano
import theano.tensor as T
import numpy as np
from lasagne.layers import Layer
from lasagne.layers.recurrent import Gate
import lasagne.nonlinearities
import lasagne.init
class Controller(Layer):
r"""
The base class :class:`Controller` represents a generic controller
for the Neural Turing Machine. The controller is a neural network
(feed-forward or recurrent) making the interface between the
incoming layer (eg. an instance of :class:`lasagne.layers.InputLayer`)
and the NTM.
Parameters
----------
incoming: a :class:`lasagne.layers.Layer` instance
The layer feeding into the Neural Turing Machine.
memory_shape: tuple
Shape of the NTM's memory.
num_units: int
Number of hidden units in the controller.
num_reads: int
Number of read heads in the Neural Turing Machine.
hid_init: callable, Numpy array or Theano shared variable
Initializer for the initial hidden state (:math:`h_{0}`).
learn_init: bool
If ``True``, initial hidden values are learned.
"""
def __init__(self, incoming, memory_shape, num_units, num_reads,
hid_init=lasagne.init.GlorotUniform(),
learn_init=False,
**kwargs):
super(Controller, self).__init__(incoming, **kwargs)
self.hid_init = self.add_param(hid_init, (1, num_units),
name='hid_init', regularizable=False, trainable=learn_init)
self.memory_shape = memory_shape
self.num_units = num_units
self.num_reads = num_reads
def step(self, input, reads, hidden, state, *args, **kwargs):
raise NotImplementedError
def get_output_shape_for(self, input_shape):
return (input_shape[0], self.num_units)
class DenseController(Controller):
r"""
A fully connected (feed-forward) controller for the NTM.
.. math ::
h_t = \sigma(x_{t} W_{x} + r_{t} W_{r} + b_{x} + b_{r})
Parameters
----------
incoming: a :class:`lasagne.layers.Layer` instance
The layer feeding into the Neural Turing Machine.
memory_shape: tuple
Shape of the NTM's memory.
num_units: int
Number of hidden units in the controller.
num_reads: int
Number of read heads in the Neural Turing Machine.
W_in_to_hid: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
input and the hidden state. Otherwise a matrix with
shape ``(num_inputs, num_units)`` (:math:`W_{x}`).
b_in_to_hid: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
input and the hidden state. If ``None``, the controller
has no bias between the input and the hidden state. Otherwise
a 1D array with shape ``(num_units,)`` (:math:`b_{x}`).
W_reads_to_hid: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
read vector and the hidden state. Otherwise a matrix with
shape ``(num_reads * memory_shape[1], num_units)`` (:math:`W_{r}`).
b_reads_to_hid: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
read vector and the hidden state. If ``None``, the controller
has no bias between the read vector and the hidden state.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`b_{r}`).
nonlinearity: callable or ``None``
The nonlinearity that is applied to the controller. If ``None``,
the controller will be linear (:math:`\sigma`).
hid_init: callable, np.ndarray or theano.shared
Initializer for the initial hidden state (:math:`h_{0}`).
learn_init: bool
If ``True``, initial hidden values are learned.
"""
def __init__(self, incoming, memory_shape, num_units, num_reads,
W_in_to_hid=lasagne.init.GlorotUniform(),
b_in_to_hid=lasagne.init.Constant(0.),
W_reads_to_hid=lasagne.init.GlorotUniform(),
b_reads_to_hid=lasagne.init.Constant(0.),
nonlinearity=lasagne.nonlinearities.rectify,
hid_init=lasagne.init.GlorotUniform(),
learn_init=False,
**kwargs):
super(DenseController, self).__init__(incoming, memory_shape, num_units,
num_reads, hid_init, learn_init,
**kwargs)
self.nonlinearity = (lasagne.nonlinearities.identity if
nonlinearity is None else nonlinearity)
def add_weight_and_bias_params(input_dim, W, b, name):
return (self.add_param(W, (input_dim, self.num_units),
name='W_{}'.format(name)),
self.add_param(b, (self.num_units,),
name='b_{}'.format(name)) if b is not None else None)
num_inputs = int(np.prod(self.input_shape[2:]))
# Inputs / Hidden parameters
self.W_in_to_hid, self.b_in_to_hid = add_weight_and_bias_params(num_inputs,
W_in_to_hid, b_in_to_hid, name='in_to_hid')
# Read vectors / Hidden parameters
self.W_reads_to_hid, self.b_reads_to_hid = add_weight_and_bias_params(self.num_reads * self.memory_shape[1],
W_reads_to_hid, b_reads_to_hid, name='reads_to_hid')
def step(self, input, reads, *args):
if input.ndim > 2:
input = input.flatten(2)
if reads.ndim > 2:
reads = reads.flatten(2)
activation = T.dot(input, self.W_in_to_hid) + \
T.dot(reads, self.W_reads_to_hid)
if self.b_in_to_hid is not None:
activation += self.b_in_to_hid.dimshuffle('x', 0)
if self.b_reads_to_hid is not None:
activation += self.b_reads_to_hid.dimshuffle('x', 0)
state = self.nonlinearity(activation)
return state, state
def outputs_info(self, batch_size):
ones_vector = T.ones((batch_size, 1))
hid_init = T.dot(ones_vector, self.hid_init)
hid_init = T.unbroadcast(hid_init, 0)
return [hid_init, hid_init]
class RecurrentController(Controller):
r"""
A "vanilla" recurrent controller for the NTM.
.. math ::
h_t = \sigma(x_{t} W_{x} + r_{t} W_{r} +
h_{t-1} W_{h} + b_{x} + b_{r} + b_{h})
Parameters
----------
incoming: a :class:`lasagne.layers.Layer` instance
The layer feeding into the Neural Turing Machine.
memory_shape: tuple
Shape of the NTM's memory.
num_units: int
Number of hidden units in the controller.
num_reads: int
Number of read heads in the Neural Turing Machine.
W_in_to_hid: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
input and the hidden state. Otherwise a matrix with
shape ``(num_inputs, num_units)`` (:math:`W_{x}`).
b_in_to_hid: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
input and the hidden state. If ``None``, the controller
has no bias between the input and the hidden state. Otherwise
a 1D array with shape ``(num_units,)`` (:math:`b_{x}`).
W_reads_to_hid: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
read vector and the hidden state. Otherwise a matrix with
shape ``(num_reads * memory_shape[1], num_units)`` (:math:`W_{r}`).
b_reads_to_hid: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
read vector and the hidden state. If ``None``, the controller
has no bias between the read vector and the hidden state.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`b_{r}`).
W_hid_to_hid: callable, Numpy array or Theano shared variable
If callable, initializer for the weights in the hidden-to-hidden
update. Otherwise a matrix with shape ``(num_units, num_units)``
(:math:`W_{h}`).
b_hid_to_hid: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases in the hidden-to-hidden
update. If ``None``, the controller has no bias in the
hidden-to-hidden update. Otherwise a 1D array with shape
``(num_units,)`` (:math:`b_{h}`).
nonlinearity: callable or ``None``
The nonlinearity that is applied to the controller. If ``None``,
the controller will be linear (:math:`\sigma`).
hid_init: callable, np.ndarray or theano.shared
Initializer for the initial hidden state (:math:`h_{0}`).
learn_init: bool
If ``True``, initial hidden values are learned.
"""
def __init__(self, incoming, memory_shape, num_units, num_reads,
W_in_to_hid=lasagne.init.GlorotUniform(),
b_in_to_hid=lasagne.init.Constant(0.),
W_reads_to_hid=lasagne.init.GlorotUniform(),
b_reads_to_hid=lasagne.init.Constant(0.),
W_hid_to_hid=lasagne.init.GlorotUniform(),
b_hid_to_hid=lasagne.init.Constant(0.),
nonlinearity=lasagne.nonlinearities.rectify,
hid_init=lasagne.init.GlorotUniform(),
learn_init=False,
**kwargs):
super(RecurrentController, self).__init__(incoming, memory_shape, num_units,
num_reads, hid_init, learn_init,
**kwargs)
self.nonlinearity = (lasagne.nonlinearities.identity if
nonlinearity is None else nonlinearity)
def add_weight_and_bias_params(input_dim, W, b, name):
return (self.add_param(W, (input_dim, self.num_units),
name='W_{}'.format(name)),
self.add_param(b, (self.num_units,),
name='b_{}'.format(name)) if b is not None else None)
num_inputs = int(np.prod(self.input_shape[2:]))
# Inputs / Hidden parameters
self.W_in_to_hid, self.b_in_to_hid = add_weight_and_bias_params(num_inputs,
W_in_to_hid, b_in_to_hid, name='in_to_hid')
# Read vectors / Hidden parameters
self.W_reads_to_hid, self.b_reads_to_hid = add_weight_and_bias_params(self.num_reads * self.memory_shape[1],
W_reads_to_hid, b_reads_to_hid, name='reads_to_hid')
# Hidden / Hidden parameters
self.W_hid_to_hid, self.b_hid_to_hid = add_weight_and_bias_params(self.num_units,
W_hid_to_hid, b_hid_to_hid, name='hid_to_hid')
def step(self, input, reads, hidden, *args):
if input.ndim > 2:
input = input.flatten(2)
if reads.ndim > 2:
reads = reads.flatten(2)
activation = T.dot(input, self.W_in_to_hid) + \
T.dot(reads, self.W_reads_to_hid) + \
T.dot(hidden, self.W_hid_to_hid)
if self.b_in_to_hid is not None:
activation += self.b_in_to_hid.dimshuffle('x', 0)
if self.b_reads_to_hid is not None:
activation += self.b_reads_to_hid.dimshuffle('x', 0)
if self.b_hid_to_hid is not None:
activation += self.b_hid_to_hid.dimshuffle('x', 0)
state = self.nonlinearity(activation)
return state, state
def outputs_info(self, batch_size):
ones_vector = T.ones((batch_size, 1))
hid_init = T.dot(ones_vector, self.hid_init)
hid_init = T.unbroadcast(hid_init, 0)
return [hid_init, hid_init]
class LSTMController(Controller):
r"""
A LSTM recurrent controller for the NTM.
.. math ::
input-gate = \sigma(x_{t} Wi_{x} + r_{t} Wi_{r} +
h_{t-1} Wi_{h} + bi_{x} + bi_{r} + bi_{h})
forget-gate = \sigma(x_{t} Wf_{x} + r_{t} Wf_{r} +
h_{t-1} Wf_{h} + bf_{x} + bf_{r} + bf_{h})
output-gate = \sigma(x_{t} Wo_{x} + r_{t} Wo_{r} +
h_{t-1} Wo_{h} + bo_{x} + bo_{r} + bo_{h})
candidate-cell-state = \tanh(x_{t} Wc_{x} + r_{t} Wc_{r} +
h_{t-1} Wc_{h} + bc_{x} + bc_{r} + bc_{h})
cell-state_{t} = cell-state_{t-1} \odot forget-gate +
candidate-cell-state \odot input-gate
h_{t} = \tanh(cell-state_{t}) \odot output-gate
Parameters
----------
incoming: a :class:`lasagne.layers.Layer` instance
The layer feeding into the Neural Turing Machine.
memory_shape: tuple
Shape of the NTM's memory.
num_units: int
Number of hidden units in the controller.
num_reads: int
Number of read heads in the Neural Turing Machine.
W_in_to_input: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
input and the input gate. Otherwise a matrix with
shape ``(num_inputs, num_units)`` (:math:`Wi_{x}`).
b_in_to_input: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
input and the input gate. If ``None``, the controller
has no bias between the input and the input gate. Otherwise
a 1D array with shape ``(num_units,)`` (:math:`bi_{x}`).
W_reads_to_input: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
read vector and the input gate. Otherwise a matrix with
shape ``(num_reads * memory_shape[1], num_units)`` (:math:`Wi_{r}`).
b_reads_to_input: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
read vector and the input gate. If ``None``, the controller
has no bias between the read vector and the input gate.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`bi_{r}`).
W_hid_to_input: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
hidden state and the input gate. Otherwise a matrix with
shape ``(num_units, num_units)`` (:math:`Wi_{h}`).
b_hid_to_input: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
hidden state and the input gate. If ``None``, the controller
has no bias between the hidden state and the input gate.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`bi_{h}`).
W_in_to_forget: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
input and the forget gate. Otherwise a matrix with
shape ``(num_inputs, num_units)`` (:math:`Wf_{x}`).
b_in_to_forget: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
input and the forget gate. If ``None``, the controller
has no bias between the input and the forget gate. Otherwise
a 1D array with shape ``(num_units,)`` (:math:`bf_{x}`).
W_reads_to_forget: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
read vector and the forget gate. Otherwise a matrix with
shape ``(num_reads * memory_shape[1], num_units)`` (:math:`Wf_{r}`).
b_reads_to_forget: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
read vector and the forget gate. If ``None``, the controller
has no bias between the read vector and the forget gate.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`bf_{r}`).
W_hid_to_forget: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
hidden state and the forget gate. Otherwise a matrix with
shape ``(num_units, num_units)`` (:math:`Wf_{h}`).
b_hid_to_forget: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
hidden state and the forget gate. If ``None``, the controller
has no bias between the hidden state and the forget gate.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`bf_{h}`).
W_in_to_output: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
input and the output gate. Otherwise a matrix with
shape ``(num_inputs, num_units)`` (:math:`Wo_{x}`).
b_in_to_output: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
input and the output gate. If ``None``, the controller
has no bias between the input and the output gate. Otherwise
a 1D array with shape ``(num_units,)`` (:math:`bo_{x}`).
W_reads_to_output: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
read vector and the output gate. Otherwise a matrix with
shape ``(num_reads * memory_shape[1], num_units)`` (:math:`Wo_{r}`).
b_reads_to_output: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
read vector and the output gate. If ``None``, the controller
has no bias between the read vector and the output gate.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`bo_{r}`).
W_hid_to_output: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
hidden state and the output gate. Otherwise a matrix with
shape ``(num_units, num_units)`` (:math:`Wo_{h}`).
b_hid_to_output: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
hidden state and the output gate. If ``None``, the controller
has no bias between the hidden state and the output gate.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`bo_{h}`).
W_in_to_cell: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
input and the cell state computation gate. Otherwise a matrix
with shape ``(num_inputs, num_units)`` (:math:`Wc_{x}`).
b_in_to_cell: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
input and the cell state computation gate. If ``None``,
the controller has no bias between the input and the cell
state computation gate. Otherwise a 1D array with shape
``(num_units,)`` (:math:`bc_{x}`).
W_reads_to_cell: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
read vector and the cell state computation gate. Otherwise a matrix
with shape ``(num_reads * memory_shape[1], num_units)`` (:math:`Wc_{r}`).
b_reads_to_cell: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
read vector and the cell state computation gate. If ``None``,
the controller has no bias between the read vector and the cell
state computation gate. Otherwise a 1D array with shape
``(num_units,)`` (:math:`bc_{r}`).
W_hid_to_cell: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
hidden state and the cell state computation gate. Otherwise a matrix
with shape ``(num_units, num_units)`` (:math:`Wc_{h}`).
b_hid_to_cell: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
hidden state and the cell state computation gate. If ``None``,
the controller has no bias between the hidden state and the cell
state computation gate. Otherwise a 1D array with shape
``(num_units,)`` (:math:`bc_{h}`).
hid_init: callable, np.ndarray or theano.shared
Initializer for the initial hidden state (:math:`h_{0}`).
cell_init: callable, np.ndarray or theano.shared
Initializer for the initial cell state (:math:`cell-state_{0}`).
learn_init: bool
If ``True``, initial hidden values are learned.
"""
def __init__(self, incoming, memory_shape, num_units, num_reads,
W_in_to_input=lasagne.init.GlorotUniform(),
b_in_to_input=lasagne.init.Constant(0.),
W_reads_to_input=lasagne.init.GlorotUniform(),
b_reads_to_input=lasagne.init.Constant(0.),
W_hid_to_input=lasagne.init.GlorotUniform(),
b_hid_to_input=lasagne.init.Constant(0.),
W_in_to_forget=lasagne.init.GlorotUniform(),
b_in_to_forget=lasagne.init.Constant(0.),
W_reads_to_forget=lasagne.init.GlorotUniform(),
b_reads_to_forget=lasagne.init.Constant(0.),
W_hid_to_forget=lasagne.init.GlorotUniform(),
b_hid_to_forget=lasagne.init.Constant(0.),
W_in_to_output=lasagne.init.GlorotUniform(),
b_in_to_output=lasagne.init.Constant(0.),
W_reads_to_output=lasagne.init.GlorotUniform(),
b_reads_to_output=lasagne.init.Constant(0.),
W_hid_to_output=lasagne.init.GlorotUniform(),
b_hid_to_output=lasagne.init.Constant(0.),
W_in_to_cell=lasagne.init.GlorotUniform(),
b_in_to_cell=lasagne.init.Constant(0.),
W_reads_to_cell=lasagne.init.GlorotUniform(),
b_reads_to_cell=lasagne.init.Constant(0.),
W_hid_to_cell=lasagne.init.GlorotUniform(),
b_hid_to_cell=lasagne.init.Constant(0.),
nonlinearity=lasagne.nonlinearities.rectify,
hid_init=lasagne.init.GlorotUniform(),
cell_init=lasagne.init.Constant(0.),
learn_init=False,
**kwargs):
super(LSTMController, self).__init__(incoming, memory_shape, num_units,
num_reads, hid_init, learn_init,
**kwargs)
self.nonlinearity = (lasagne.nonlinearities.identity if
nonlinearity is None else nonlinearity)
self.cell_init = self.add_param(cell_init, (1, num_units),
name='cell_init', regularizable=False, trainable=learn_init)
def add_weight_and_bias_params(input_dim, W, b, name):
return (self.add_param(W, (input_dim, self.num_units),
name='W_{}'.format(name)),
self.add_param(b, (self.num_units,),
name='b_{}'.format(name)) if b is not None else None)
num_inputs = int(np.prod(self.input_shape[2:]))
# Inputs / Input Gate parameters
self.W_in_to_input, self.b_in_to_input = add_weight_and_bias_params(num_inputs,
W_in_to_input, b_in_to_input, name='in_to_input')
# Read vectors / Input Gate parameters
self.W_reads_to_input, self.b_reads_to_input = add_weight_and_bias_params(self.num_reads * self.memory_shape[1],
W_reads_to_input, b_reads_to_input, name='reads_to_input')
# Hidden / Input Gate parameters
self.W_hid_to_input, self.b_hid_to_input = add_weight_and_bias_params(self.num_units,
W_hid_to_input, b_hid_to_input, name='hid_to_input')
# Inputs / Forget Gate parameters
self.W_in_to_forget, self.b_in_to_forget = add_weight_and_bias_params(num_inputs,
W_in_to_forget, b_in_to_forget, name='in_to_forget')
# Read vectors / Forget Gate parameters
self.W_reads_to_forget, self.b_reads_to_forget = add_weight_and_bias_params(self.num_reads * self.memory_shape[1],
W_reads_to_forget, b_reads_to_forget, name='reads_to_forget')
# Hidden / Forget Gate parameters
self.W_hid_to_forget, self.b_hid_to_forget = add_weight_and_bias_params(self.num_units,
W_hid_to_forget, b_hid_to_forget, name='hid_to_forget')
# Inputs / Output Gate parameters
self.W_in_to_output, self.b_in_to_output = add_weight_and_bias_params(num_inputs,
W_in_to_output, b_in_to_output, name='in_to_output')
# Read vectors / Output Gate parameters
self.W_reads_to_output, self.b_reads_to_output = add_weight_and_bias_params(self.num_reads * self.memory_shape[1],
W_reads_to_output, b_reads_to_output, name='reads_to_output')
# Hidden / Output Gate parameters
self.W_hid_to_output, self.b_hid_to_output = add_weight_and_bias_params(self.num_units,
W_hid_to_output, b_hid_to_output, name='hid_to_output')
# Inputs / Cell State parameters
self.W_in_to_cell, self.b_in_to_cell = add_weight_and_bias_params(num_inputs,
W_in_to_cell, b_in_to_cell, name='in_to_cell')
# Read vectors / Cell State parameters
self.W_reads_to_cell, self.b_reads_to_cell = add_weight_and_bias_params(self.num_reads * self.memory_shape[1],
W_reads_to_cell, b_reads_to_cell, name='reads_to_cell')
# Hidden / Cell State parameters
self.W_hid_to_cell, self.b_hid_to_cell = add_weight_and_bias_params(self.num_units,
W_hid_to_cell, b_hid_to_cell, name='hid_to_cell')
def step(self, input, reads, hidden, cell, *args):
if input.ndim > 2:
input = input.flatten(2)
if reads.ndim > 2:
reads = reads.flatten(2)
# Input Gate output computation
activation = T.dot(input, self.W_in_to_input) + \
T.dot(reads, self.W_reads_to_input) + \
T.dot(hidden, self.W_hid_to_input)
if self.b_in_to_input is not None:
activation += self.b_in_to_input.dimshuffle('x', 0)
if self.b_reads_to_input is not None:
activation += self.b_reads_to_input.dimshuffle('x', 0)
if self.b_hid_to_input is not None:
activation += self.b_hid_to_input.dimshuffle('x', 0)
input_gate = lasagne.nonlinearities.sigmoid(activation)
# Forget Gate output computation
activation = T.dot(input, self.W_in_to_forget) + \
T.dot(reads, self.W_reads_to_forget) + \
T.dot(hidden, self.W_hid_to_forget)
if self.b_in_to_forget is not None:
activation += self.b_in_to_forget.dimshuffle('x', 0)
if self.b_reads_to_forget is not None:
activation += self.b_reads_to_forget.dimshuffle('x', 0)
if self.b_hid_to_forget is not None:
activation += self.b_hid_to_forget.dimshuffle('x', 0)
forget_gate = lasagne.nonlinearities.sigmoid(activation)
# Output Gate output computation
activation = T.dot(input, self.W_in_to_output) + \
T.dot(reads, self.W_reads_to_output) + \
T.dot(hidden, self.W_hid_to_output)
if self.b_in_to_output is not None:
activation += self.b_in_to_output.dimshuffle('x', 0)
if self.b_reads_to_output is not None:
activation += self.b_reads_to_output.dimshuffle('x', 0)
if self.b_hid_to_output is not None:
activation += self.b_hid_to_output.dimshuffle('x', 0)
output_gate = lasagne.nonlinearities.sigmoid(activation)
# New candidate cell state computation
activation = T.dot(input, self.W_in_to_cell) + \
T.dot(reads, self.W_reads_to_cell) + \
T.dot(hidden, self.W_hid_to_cell)
if self.b_in_to_cell is not None:
activation += self.b_in_to_cell.dimshuffle('x', 0)
if self.b_reads_to_cell is not None:
activation += self.b_reads_to_cell.dimshuffle('x', 0)
if self.b_hid_to_cell is not None:
activation += self.b_hid_to_cell.dimshuffle('x', 0)
candidate_cell_state = lasagne.nonlinearities.tanh(activation)
# New cell state and hidden state computation
cell_state = cell * forget_gate + candidate_cell_state * input_gate
state = lasagne.nonlinearities.tanh(cell_state) * output_gate
return state, cell_state
def outputs_info(self, batch_size):
ones_vector = T.ones((batch_size, 1))
hid_init = T.dot(ones_vector, self.hid_init)
hid_init = T.unbroadcast(hid_init, 0)
cell_init = T.dot(ones_vector, self.cell_init)
cell_init = T.unbroadcast(cell_init, 0)
return [hid_init, cell_init]
class GRUController(Controller):
r"""
A GRU recurrent controller for the NTM.
.. math ::
update-gate = \sigma(x_{t} Wz_{x} + r_{t} Wz_{r} +
h_{t-1} Wz_{h} + bz_{x} + bz_{r} + bz_{h})
reset-gate = \sigma(x_{t} Wr_{x} + r_{t} Wr_{r} +
h_{t-1} Wr_{h} + br_{x} + br_{r} + br_{h})
s = \tanh(x_{t} Ws_{x} + r_{t} Ws_{r} +
(h_{t-1} \odot reset-gate) Ws_{h})
h_{t} = (1 - update-gate) \odot s + update-gate \odot h_{t-1}
Parameters
----------
incoming: a :class:`lasagne.layers.Layer` instance
The layer feeding into the Neural Turing Machine.
memory_shape: tuple
Shape of the NTM's memory.
num_units: int
Number of hidden units in the controller.
num_reads: int
Number of read heads in the Neural Turing Machine.
W_in_to_input: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
input and the update gate. Otherwise a matrix with
shape ``(num_inputs, num_units)`` (:math:`Wz_{x}`).
b_in_to_update: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
input and the update gate. If ``None``, the controller
has no bias between the input and the update gate. Otherwise
a 1D array with shape ``(num_units,)`` (:math:`bz_{x}`).
W_reads_to_update: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
read vector and the update gate. Otherwise a matrix with
shape ``(num_reads * memory_shape[1], num_units)`` (:math:`Wz_{r}`).
b_reads_to_update: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
read vector and the update gate. If ``None``, the controller
has no bias between the read vector and the update gate.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`bz_{r}`).
W_hid_to_update: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
hidden state and the update gate. Otherwise a matrix with
shape ``(num_units, num_units)`` (:math:`Wz_{h}`).
b_hid_to_update: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
hidden state and the update gate. If ``None``, the controller
has no bias between the hidden state and the update gate.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`bz_{h}`).
W_in_to_reset: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
input and the reset gate. Otherwise a matrix with
shape ``(num_inputs, num_units)`` (:math:`Wr_{x}`).
b_in_to_reset: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
input and the reset gate. If ``None``, the controller
has no bias between the input and the reset gate. Otherwise
a 1D array with shape ``(num_units,)`` (:math:`br_{x}`).
W_reads_to_reset: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
read vector and the reset gate. Otherwise a matrix with
shape ``(num_reads * memory_shape[1], num_units)`` (:math:`Wr_{r}`).
b_reads_to_reset: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
read vector and the reset gate. If ``None``, the controller
has no bias between the read vector and the reset gate.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`br_{r}`).
W_hid_to_reset: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
hidden state and the reset gate. Otherwise a matrix with
shape ``(num_units, num_units)`` (:math:`Wr_{h}`).
b_hid_to_reset: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
hidden state and the reset gate. If ``None``, the controller
has no bias between the hidden state and the reset gate.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`br_{h}`).
W_in_to_hid: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
input and the hidden gate. Otherwise a matrix with
shape ``(num_inputs, num_units)`` (:math:`Ws_{x}`).
b_in_to_hid: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
input and the hidden gate. If ``None``, the controller
has no bias between the input and the hidden gate. Otherwise
a 1D array with shape ``(num_units,)`` (:math:`bs_{x}`).
W_reads_to_hid: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
read vector and the hidden gate. Otherwise a matrix with
shape ``(num_reads * memory_shape[1], num_units)`` (:math:`Ws_{r}`).
b_reads_to_hid: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
read vector and the hidden gate. If ``None``, the controller
has no bias between the read vector and the hidden gate.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`bs_{r}`).
W_hid_to_hid: callable, Numpy array or Theano shared variable
If callable, initializer for the weights between the
hidden state and the hidden gate. Otherwise a matrix with
shape ``(num_units, num_units)`` (:math:`Ws_{h}`).
b_hid_to_hid: callable, Numpy array, Theano shared variable or ``None``
If callable, initializer for the biases between the
hidden state and the hidden gate. If ``None``, the controller
has no bias between the hidden state and the hidden gate.
Otherwise a 1D array with shape ``(num_units,)`` (:math:`bs_{h}`).
hid_init: callable, np.ndarray or theano.shared
Initializer for the initial hidden state (:math:`h_{0}`).
learn_init: bool
If ``True``, initial hidden values are learned.
"""
def __init__(self, incoming, memory_shape, num_units, num_reads,
W_in_to_update=lasagne.init.GlorotUniform(),
b_in_to_update=lasagne.init.Constant(0.),
W_reads_to_update=lasagne.init.GlorotUniform(),
b_reads_to_update=lasagne.init.Constant(0.),
W_hid_to_update=lasagne.init.GlorotUniform(),
b_hid_to_update=lasagne.init.Constant(0.),
W_in_to_reset=lasagne.init.GlorotUniform(),
b_in_to_reset=lasagne.init.Constant(0.),
W_reads_to_reset=lasagne.init.GlorotUniform(),
b_reads_to_reset=lasagne.init.Constant(0.),
W_hid_to_reset=lasagne.init.GlorotUniform(),
b_hid_to_reset=lasagne.init.Constant(0.),
W_in_to_hid=lasagne.init.GlorotUniform(),
b_in_to_hid=lasagne.init.Constant(0.),
W_reads_to_hid=lasagne.init.GlorotUniform(),
b_reads_to_hid=lasagne.init.Constant(0.),
W_hid_to_hid=lasagne.init.GlorotUniform(),
b_hid_to_hid=lasagne.init.Constant(0.),
nonlinearity=lasagne.nonlinearities.rectify,
hid_init=lasagne.init.GlorotUniform(),
learn_init=False,
**kwargs):
super(GRUController, self).__init__(incoming, memory_shape, num_units,
num_reads, hid_init, learn_init,
**kwargs)
self.nonlinearity = (lasagne.nonlinearities.identity if
nonlinearity is None else nonlinearity)
def add_weight_and_bias_params(input_dim, W, b, name):
return (self.add_param(W, (input_dim, self.num_units),
name='W_{}'.format(name)),
self.add_param(b, (self.num_units,),
name='b_{}'.format(name)) if b is not None else None)
num_inputs = int(np.prod(self.input_shape[2:]))
# Inputs / Update Gate parameters
self.W_in_to_update, self.b_in_to_update = add_weight_and_bias_params(num_inputs,
W_in_to_update, b_in_to_update, name='in_to_update')
# Read vectors / Update Gate parameters
self.W_reads_to_update, self.b_reads_to_update = add_weight_and_bias_params(self.num_reads * self.memory_shape[1],
W_reads_to_update, b_reads_to_update, name='reads_to_update')
# Hidden / Update Gate parameters
self.W_hid_to_update, self.b_hid_to_update = add_weight_and_bias_params(self.num_units,
W_hid_to_update, b_hid_to_update, name='hid_to_update')
# Inputs / Reset Gate parameters
self.W_in_to_reset, self.b_in_to_reset = add_weight_and_bias_params(num_inputs,
W_in_to_reset, b_in_to_reset, name='in_to_reset')
# Read vectors / Reset Gate parameters
self.W_reads_to_reset, self.b_reads_to_reset = add_weight_and_bias_params(self.num_reads * self.memory_shape[1],
W_reads_to_reset, b_reads_to_reset, name='reads_to_reset')
# Hidden / Reset Gate parameters
self.W_hid_to_reset, self.b_hid_to_reset = add_weight_and_bias_params(self.num_units,
W_hid_to_reset, b_hid_to_reset, name='hid_to_reset')
# Inputs / Hidden Gate parameters
self.W_in_to_hid, self.b_in_to_hid = add_weight_and_bias_params(num_inputs,
W_in_to_hid, b_in_to_hid, name='in_to_hid')
# Read vectors / Hidden Gate parameters
self.W_reads_to_hid, self.b_reads_to_hid = add_weight_and_bias_params(self.num_reads * self.memory_shape[1],
W_reads_to_hid, b_reads_to_hid, name='reads_to_hid')
# Hidden / Hidden Gate parameters
self.W_hid_to_hid, self.b_hid_to_hid = add_weight_and_bias_params(self.num_units,
W_hid_to_hid, b_hid_to_hid, name='hid_to_hid')
def step(self, input, reads, hidden, *args):
if input.ndim > 2:
input = input.flatten(2)
if reads.ndim > 2:
reads = reads.flatten(2)
# Update Gate output computation
activation = T.dot(input, self.W_in_to_update) + \
T.dot(reads, self.W_reads_to_update) + \
T.dot(hidden, self.W_hid_to_update)
if self.b_in_to_update is not None:
activation += self.b_in_to_update.dimshuffle('x', 0)
if self.b_reads_to_update is not None:
activation += self.b_reads_to_update.dimshuffle('x', 0)
if self.b_hid_to_update is not None:
activation += self.b_hid_to_update.dimshuffle('x', 0)
update_gate = lasagne.nonlinearities.sigmoid(activation)
# Reset Gate output computation
activation = T.dot(input, self.W_in_to_reset) + \
T.dot(reads, self.W_reads_to_reset) + \
T.dot(hidden, self.W_hid_to_reset)
if self.b_in_to_reset is not None:
activation += self.b_in_to_reset.dimshuffle('x', 0)
if self.b_reads_to_reset is not None:
activation += self.b_reads_to_reset.dimshuffle('x', 0)
if self.b_hid_to_reset is not None:
activation += self.b_hid_to_reset.dimshuffle('x', 0)
reset_gate = lasagne.nonlinearities.sigmoid(activation)
# Hidden Gate output computation
activation = T.dot(input, self.W_in_to_hid) + \
T.dot(reads, self.W_reads_to_hid) + \
T.dot((hidden * reset_gate), self.W_hid_to_hid)
if self.b_in_to_hid is not None:
activation += self.b_in_to_hid.dimshuffle('x', 0)
if self.b_reads_to_hid is not None:
activation += self.b_reads_to_hid.dimshuffle('x', 0)
if self.b_hid_to_hid is not None:
activation += self.b_hid_to_hid.dimshuffle('x', 0)
hidden_gate = lasagne.nonlinearities.tanh(activation)
# New hidden state computation
ones = T.ones(update_gate.shape)
state = (ones - update_gate) * hidden_gate + update_gate * hidden
return state, state
def outputs_info(self, batch_size):
ones_vector = T.ones((batch_size, 1))
hid_init = T.dot(ones_vector, self.hid_init)
hid_init = T.unbroadcast(hid_init, 0)
return [hid_init, hid_init]
| {
"content_hash": "389f41f53a01b8cbc4a8da9ab22cd98f",
"timestamp": "",
"source": "github",
"line_count": 758,
"max_line_length": 122,
"avg_line_length": 55.098944591029024,
"alnum_prop": 0.6092661319286484,
"repo_name": "snipsco/ntm-lasagne",
"id": "525eca495e65dd354bc619b9ab4ba13680861bc1",
"size": "41765",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ntm/controllers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "102276"
}
],
"symlink_target": ""
} |
"""Helper functions for package setup.
"""
import ast
import sys
import subprocess
import pkg_resources
class SetupHelper(object):
"""
SetupHelper streamlines the population of setup() function calls with
contents from __init__.py and README.md.
"""
def __init__(self, initfile="__init__.py", readmefile="README.md"):
self.author, self.email, self.license = self.get_init(initfile)
self.author = ", ".join(self.author)
self.long_description = self.get_long_description(readmefile)
def check_version(self, name, majorv=2, minorv=7):
"""Make sure the package runs on the supported Python version"""
if sys.version_info.major == majorv and sys.version_info.minor != minorv:
sys.stderr.write(
"ERROR: %s is only for >= Python %d.%d but you are running %d.%d\n"
% (name, majorv, minorv, sys.version_info.major, sys.version_info.minor)
)
sys.exit(1)
def get_init(self, filename="__init__.py"):
"""Get various info from the package without importing them"""
with open(filename) as init_file:
module = ast.parse(init_file.read())
itr = lambda x: (
ast.literal_eval(node.value)
for node in ast.walk(module)
if isinstance(node, ast.Assign) and node.targets[0].id == x
)
try:
return (
next(itr("__author__")),
next(itr("__email__")),
next(itr("__license__")),
)
except StopIteration as stop_exception:
raise ValueError(
"One of author, email, license cannot be found in {}".format(filename)
) from stop_exception
def missing_requirements(self, specifiers):
"""Find what's missing"""
for specifier in specifiers:
try:
pkg_resources.require(specifier)
except pkg_resources.DistributionNotFound:
yield specifier
def install_requirements(self, requires):
"""Install the listed requirements"""
# Temporarily install dependencies required by setup.py before trying to import them.
sys.path[0:0] = ["setup-requires"]
pkg_resources.working_set.add_entry("setup-requires")
to_install = list(self.missing_requirements(requires))
if to_install:
cmd = [
sys.executable,
"-m",
"pip",
"install",
"-t",
"setup-requires",
] + to_install
subprocess.call(cmd)
def get_long_description(self, filename="README.md"):
"""I really prefer Markdown to reStructuredText. PyPi does not."""
return open(filename).read()
| {
"content_hash": "e96753c7e1bb8af7a5e16700b41b1458",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 93,
"avg_line_length": 35.2125,
"alnum_prop": 0.5686900958466453,
"repo_name": "tanghaibao/goatools",
"id": "fabadc9973325d107e06a92bbb3105eedc4f4fb8",
"size": "2864",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "setup_helper.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "316670"
},
{
"name": "Makefile",
"bytes": "25213"
},
{
"name": "Python",
"bytes": "146769147"
},
{
"name": "Shell",
"bytes": "1107"
}
],
"symlink_target": ""
} |
from collections import OrderedDict
import numpy as np
from np_model_base import NNModelBase
from utils import softmax, safe_macro_f1
__author__ = "Christopher Potts"
__version__ = "CS224u, Stanford, Spring 2022"
class RNNClassifier(NNModelBase):
"""
Simple Recurrent Neural Network for classification problems.
The structure of the network is as follows:
y
/|
b | W_hy
|
h_0 -- W_hh -- h_1 -- W_hh -- h_2 -- W_hh -- h_3
| | |
| W_xh | W_xh | W_xh
| | |
x_1 x_2 x_3
where x_i are the inputs, h_j are the hidden units, and y is a
one-hot vector indicating the true label for this sequence. The
parameters are W_xh, W_hh, W_hy, and the bias b. The inputs x_i
come from a user-supplied embedding space for the vocabulary. These
can either be random or pretrained. The network equations in brief:
h[t] = tanh(x[t].dot(W_xh) + h[t-1].dot(W_hh))
y = softmax(h[-1].dot(W_hy) + b)
The network will work for any kind of classification task.
Parameters
----------
vocab : list of str
This should be the vocabulary. It needs to be aligned with
`embedding` in the sense that the ith element of vocab
should be represented by the ith row of `embedding`. Ignored
if `use_embedding=False`.
embedding : np.array or None
Each row represents a word in `vocab`, as described above.
use_embedding : bool
If True, then incoming examples are presumed to be lists of
elements of the vocabulary. If False, then they are presumed
to be lists of vectors. In this case, the `embedding` and
`embed_dim` arguments are ignored, since no embedding is needed
and `embed_dim` is set by the nature of the incoming vectors.
embed_dim : int
Dimensionality for the initial embeddings. This is ignored
if `embedding` is not None, as a specified value there
determines this value. Also ignored if `use_embedding=False`.
All of the above are set as attributes. In addition, `self.embed_dim`
is set to the dimensionality of the input representations.
"""
def __init__(self,
vocab,
embedding=None,
use_embedding=True,
embed_dim=50,
**kwargs):
self.vocab = vocab
self.vocab_lookup = dict(zip(self.vocab, range(len(self.vocab))))
self.use_embedding = use_embedding
self._embed_dim = embed_dim
if self.use_embedding:
if embedding is None:
embedding = self._define_embedding_matrix(
len(self.vocab), embed_dim)
self.embedding = embedding
self._embed_dim = self.embedding.shape[1]
super().__init__(**kwargs)
self.params += ['embedding', 'embed_dim']
@property
def embed_dim(self):
return self._embed_dim
@embed_dim.setter
def embed_dim(self, value):
self._embed_dim = value
self.embedding = self._define_embedding_matrix(
len(self.vocab), value)
def fit(self, X, y):
if not self.use_embedding:
self._embed_dim = len(X[0][0])
return super().fit(X, y)
def initialize_parameters(self):
"""
Attributes
----------
output_dim : int
Set based on the length of the labels in `training_data`.
This happens in `self.prepare_output_data`.
W_xh : np.array
Dense connections between the word representations
and the hidden layers. Random initialization.
W_hh : np.array
Dense connections between the hidden representations.
Random initialization.
W_hy : np.array
Dense connections from the final hidden layer to
the output layer. Random initialization.
b : np.array
Output bias. Initialized to all 0.
"""
self.W_xh = self.weight_init(self.embed_dim, self.hidden_dim)
self.W_hh = self.weight_init(self.hidden_dim, self.hidden_dim)
self.W_hy = self.weight_init(self.hidden_dim, self.output_dim)
self.b = np.zeros(self.output_dim)
def forward_propagation(self, seq):
"""
Parameters
----------
seq : list
Variable length sequence of elements in the vocabulary.
Returns
----------
h : np.array
Each row is for a hidden representation. The first row
is an all-0 initial state. The others correspond to
the inputs in seq.
y : np.array
The vector of predictions.
"""
h = np.zeros((len(seq)+1, self.hidden_dim))
for t in range(1, len(seq)+1):
if self.use_embedding:
word_rep = self.get_word_rep(seq[t-1])
else:
word_rep = seq[t-1]
h[t] = self.hidden_activation(
word_rep.dot(self.W_xh) + h[t-1].dot(self.W_hh))
y = softmax(h[-1].dot(self.W_hy) + self.b)
return h, y
def backward_propagation(self, h, predictions, seq, labels):
"""
Parameters
----------
h : np.array, shape (m, self.hidden_dim)
Matrix of hidden states. `m` is the shape of the current
example (which is allowed to vary).
predictions : np.array, dimension `len(self.classes)`
Vector of predictions.
seq : list of lists
The original example.
labels : np.array, dimension `len(self.classes)`
One-hot vector giving the true label.
Returns
-------
tuple
The matrices of derivatives (d_W_hy, d_b, d_W_hh, d_W_xh).
"""
# Output errors:
y_err = predictions
y_err[np.argmax(labels)] -= 1
h_err = y_err.dot(self.W_hy.T) * self.d_hidden_activation(h[-1])
d_W_hy = np.outer(h[-1], y_err)
d_b = y_err
# For accumulating the gradients through time:
d_W_hh = np.zeros(self.W_hh.shape)
d_W_xh = np.zeros(self.W_xh.shape)
# Back-prop through time; the +1 is because the 0th
# hidden state is the all-0s initial state.
num_steps = len(seq)+1
for t in reversed(range(1, num_steps)):
d_W_hh += np.outer(h[t], h_err)
if self.use_embedding:
word_rep = self.get_word_rep(seq[t-1])
else:
word_rep = seq[t-1]
d_W_xh += np.outer(word_rep, h_err)
h_err = h_err.dot(self.W_hh.T) * self.d_hidden_activation(h[t])
return (d_W_hy, d_b, d_W_hh, d_W_xh)
def update_parameters(self, gradients):
d_W_hy, d_b, d_W_hh, d_W_xh = gradients
self.W_hy -= self.eta * d_W_hy
self.b -= self.eta * d_b
self.W_hh -= self.eta * d_W_hh
self.W_xh -= self.eta * d_W_xh
def score(self, X, y):
preds = self.predict(X)
return safe_macro_f1(y, preds)
def simple_example():
from sklearn.metrics import accuracy_score
import utils
utils.fix_random_seeds()
vocab = ['a', 'b', '$UNK']
# No b before an a
train = [
[list('ab'), 'good'],
[list('aab'), 'good'],
[list('abb'), 'good'],
[list('aabb'), 'good'],
[list('ba'), 'bad'],
[list('baa'), 'bad'],
[list('bba'), 'bad'],
[list('bbaa'), 'bad'],
[list('aba'), 'bad']]
test = [
[list('baaa'), 'bad'],
[list('abaa'), 'bad'],
[list('bbaa'), 'bad'],
[list('aaab'), 'good'],
[list('aaabb'), 'good']]
X_train, y_train = zip(*train)
X_test, y_test = zip(*test)
mod = RNNClassifier(vocab)
print(mod)
mod.fit(X_train, y_train)
preds = mod.predict(X_test)
print("\nPredictions:")
for ex, pred, gold in zip(X_test, preds, y_test):
score = "correct" if pred == gold else "incorrect"
print("{0:>6} - predicted: {1:>4}; actual: {2:>4} - {3}".format(
"".join(ex), pred, gold, score))
return accuracy_score(y_test, preds)
if __name__ == '__main__':
simple_example()
| {
"content_hash": "fe32abf8d5e69f23b630e6a209633d52",
"timestamp": "",
"source": "github",
"line_count": 263,
"max_line_length": 75,
"avg_line_length": 32.47148288973384,
"alnum_prop": 0.534192037470726,
"repo_name": "cgpotts/cs224u",
"id": "9f5c206053528e475fee0195408922ab68817537",
"size": "8540",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "np_rnn_classifier.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "4297079"
},
{
"name": "Python",
"bytes": "364552"
}
],
"symlink_target": ""
} |
import unittest
from airflow import DAG, configuration, models
from airflow.contrib.sensors.weekday_sensor import DayOfWeekSensor
from airflow.contrib.utils.weekday import WeekDay
from airflow.exceptions import AirflowSensorTimeout
from airflow.models import DagBag, TaskFail
from airflow.settings import Session
from airflow.utils.timezone import datetime
DEFAULT_DATE = datetime(2018, 12, 10)
WEEKDAY_DATE = datetime(2018, 12, 20)
WEEKEND_DATE = datetime(2018, 12, 22)
TEST_DAG_ID = 'weekday_sensor_dag'
DEV_NULL = '/dev/null'
class DayOfWeekSensorTests(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
self.dagbag = DagBag(
dag_folder=DEV_NULL,
include_examples=True
)
self.args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE
}
dag = DAG(TEST_DAG_ID, default_args=self.args)
self.dag = dag
def tearDown(self):
session = Session()
session.query(models.TaskInstance).filter_by(
dag_id=TEST_DAG_ID).delete()
session.query(TaskFail).filter_by(
dag_id=TEST_DAG_ID).delete()
session.commit()
session.close()
def test_weekday_sensor_true(self):
t = DayOfWeekSensor(
task_id='weekday_sensor_check_true',
week_day='Thursday',
use_task_execution_day=True,
dag=self.dag)
t.run(start_date=WEEKDAY_DATE, end_date=WEEKDAY_DATE, ignore_ti_state=True)
def test_weekday_sensor_false(self):
t = DayOfWeekSensor(
task_id='weekday_sensor_check_false',
poke_interval=1,
timeout=2,
week_day='Tuesday',
use_task_execution_day=True,
dag=self.dag)
with self.assertRaises(AirflowSensorTimeout):
t.run(start_date=WEEKDAY_DATE, end_date=WEEKDAY_DATE, ignore_ti_state=True)
def test_invalid_weekday_number(self):
invalid_week_day = 'Thsday'
with self.assertRaisesRegexp(AttributeError,
'Invalid Week Day passed: "{}"'.format(
invalid_week_day)):
DayOfWeekSensor(
task_id='weekday_sensor_invalid_weekday_num',
week_day=invalid_week_day,
use_task_execution_day=True,
dag=self.dag)
def test_weekday_sensor_with_enum(self):
week_day = WeekDay.THURSDAY
t = DayOfWeekSensor(
task_id='weekday_sensor_check_true',
week_day=WeekDay.THURSDAY,
use_task_execution_day=True,
dag=self.dag)
t.run(start_date=WEEKDAY_DATE, end_date=WEEKDAY_DATE, ignore_ti_state=True)
self.assertEqual(t.week_day, week_day)
def test_weekday_sensor_with_enum_set(self):
week_day = {WeekDay.THURSDAY}
t = DayOfWeekSensor(
task_id='weekday_sensor_check_true',
week_day=week_day,
use_task_execution_day=True,
dag=self.dag)
t.run(start_date=WEEKDAY_DATE, end_date=WEEKDAY_DATE, ignore_ti_state=True)
self.assertEqual(t.week_day, week_day)
def test_weekday_sensor_with_enum_set_2_items(self):
week_day = {WeekDay.THURSDAY, WeekDay.FRIDAY}
t = DayOfWeekSensor(
task_id='weekday_sensor_check_true',
week_day=week_day,
use_task_execution_day=True,
dag=self.dag)
t.run(start_date=WEEKDAY_DATE, end_date=WEEKDAY_DATE, ignore_ti_state=True)
self.assertEqual(t.week_day, week_day)
def test_weekday_sensor_with_string_set(self):
week_day = {'Thursday'}
t = DayOfWeekSensor(
task_id='weekday_sensor_check_true',
week_day=week_day,
use_task_execution_day=True,
dag=self.dag)
t.run(start_date=WEEKDAY_DATE, end_date=WEEKDAY_DATE, ignore_ti_state=True)
self.assertEqual(t.week_day, week_day)
def test_weekday_sensor_with_string_set_2_items(self):
week_day = {'Thursday', 'Friday'}
t = DayOfWeekSensor(
task_id='weekday_sensor_check_true',
week_day=week_day,
use_task_execution_day=True,
dag=self.dag)
t.run(start_date=WEEKDAY_DATE, end_date=WEEKDAY_DATE, ignore_ti_state=True)
self.assertEqual(t.week_day, week_day)
def test_weekday_sensor_with_invalid_type(self):
invalid_week_day = ['Thsday']
with self.assertRaisesRegexp(TypeError,
'Unsupported Type for week_day parameter:'
' {}. It should be one of str, set or '
'Weekday enum type'.format(type(invalid_week_day))
):
DayOfWeekSensor(
task_id='weekday_sensor_check_true',
week_day=invalid_week_day,
use_task_execution_day=True,
dag=self.dag)
def test_weekday_sensor_timeout_with_set(self):
t = DayOfWeekSensor(
task_id='weekday_sensor_check_false',
poke_interval=1,
timeout=2,
week_day={WeekDay.MONDAY, WeekDay.TUESDAY},
use_task_execution_day=True,
dag=self.dag)
with self.assertRaises(AirflowSensorTimeout):
t.run(start_date=WEEKDAY_DATE, end_date=WEEKDAY_DATE, ignore_ti_state=True)
| {
"content_hash": "53645a46e5dd218124fe4566f7bcc2e9",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 87,
"avg_line_length": 38.41258741258741,
"alnum_prop": 0.5876570180229382,
"repo_name": "r39132/airflow",
"id": "ac8a3354e6310445b55137d99be4174b9702bac3",
"size": "6307",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/contrib/sensors/test_weekday_sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12126"
},
{
"name": "Dockerfile",
"bytes": "4111"
},
{
"name": "HTML",
"bytes": "128531"
},
{
"name": "JavaScript",
"bytes": "22118"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "5928206"
},
{
"name": "Shell",
"bytes": "41869"
}
],
"symlink_target": ""
} |
from tastypie.resources import ModelResource
from tastypie.constants import ALL
from tastypie import fields
from kvtags.models import *
class TagResource(ModelResource):
kv_pairs = fields.ToManyField('kvtags.api.KeyValueResource', 'kv_pairs', full=True)
class Meta:
queryset = Tag.objects.all()
filtering = {
"key": ALL
}
resource_name = 'tag'
excludes = ['created', 'updated']
include_resource_uri = False
class KeyValueResource(ModelResource):
tag = fields.ForeignKey(TagResource, 'tag')
class Meta:
queryset = KeyValue.objects.all()
filtering = {
"tag": ALL,
"key": ALL,
"value": ALL
}
resource_name = 'tag-kv'
include_resource_uri = False
class TaggedItemResource(ModelResource):
tag = fields.ForeignKey(TagResource, 'tag')
class Meta:
queryset = TaggedItem.objects.all()
resource_name = 'tagged-item'
| {
"content_hash": "3013284e60319eb8129f24df23cb8820",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 87,
"avg_line_length": 25.41025641025641,
"alnum_prop": 0.6185671039354188,
"repo_name": "ozen/django-kvtags",
"id": "029223c4fcbde71cf8c3b8ded6815bb47055c97a",
"size": "991",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kvtags/api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "860"
},
{
"name": "Python",
"bytes": "30283"
}
],
"symlink_target": ""
} |
import os
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
from app import create_app, db
from app.models import Category, Item, User, Message
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
def make_shell_context():
return dict(app=app, db=db, Category=Category, Item=Item, User=User,
Message=Message)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def test(coverage=False):
"""Run the unit/integration tests. With coverage also functional tests"""
path = os.path.dirname(os.path.abspath(__file__))
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
import unittest
tests_dir = os.path.join(path,"tests")
tests = unittest.TestLoader().discover(tests_dir)
unittest.TextTestRunner(verbosity=2).run(tests)
if COV:
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
basedir = os.path.abspath(os.path.dirname(__file__))
covdir = os.path.join(basedir, 'tmp/coverage')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
COV.erase()
@manager.command
def acceptance_test():
"""Run the functional tests."""
import unittest
path = os.path.dirname(os.path.abspath(__file__))
tests_dir = os.path.join(path,"tests/functional")
tests = unittest.TestLoader().discover(tests_dir)
unittest.TextTestRunner(verbosity=2).run(tests)
@manager.command
def deploy():
"""Run deployment tasks."""
from flask.ext.migrate import upgrade
# migrate database to latest revision
upgrade()
# create categories
Category.insert_categories()
if __name__ == '__main__':
manager.run() | {
"content_hash": "ef19c1d9c3fd27a0ee6c8c95653868eb",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 75,
"avg_line_length": 27.62162162162162,
"alnum_prop": 0.7025440313111546,
"repo_name": "rosariomgomez/tradyfit",
"id": "f835d6b487798b46a9135a8e3be93abc283a76ff",
"size": "2090",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vagrant/tradyfit/manage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8440"
},
{
"name": "HTML",
"bytes": "32116"
},
{
"name": "JavaScript",
"bytes": "776"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "170903"
},
{
"name": "Ruby",
"bytes": "546"
},
{
"name": "Shell",
"bytes": "564"
}
],
"symlink_target": ""
} |
"""Run the py->rst conversion and run all examples.
Steps are:
analyze example index file for example py filenames
check for any filenames in example directory not included
do py to rst conversion, writing into build directory
run
"""
# -----------------------------------------------------------------------------
# Library imports
# -----------------------------------------------------------------------------
# Stdlib imports
import os
from os.path import join as pjoin, abspath
import sys
import shutil
from subprocess import check_call
from glob import glob
# Third-party imports
# We must configure the mpl backend before making any further mpl imports
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib._pylab_helpers import Gcf
# -----------------------------------------------------------------------------
# Function defintions
# -----------------------------------------------------------------------------
# These global variables let show() be called by the scripts in the usual
# manner, but when generating examples, we override it to write the figures to
# files with a known name (derived from the script name) plus a counter
figure_basename = None
# We must change the show command to save instead
def show():
allfm = Gcf.get_all_fig_managers()
for fcount, fm in enumerate(allfm):
fm.canvas.figure.savefig('%s_%02i.png' %
(figure_basename, fcount + 1))
_mpl_show = plt.show
plt.show = show
# -----------------------------------------------------------------------------
# Main script
# -----------------------------------------------------------------------------
# Where things are
EG_INDEX_FNAME = abspath('examples_index.rst')
EG_SRC_DIR = abspath('examples')
# Work in examples directory
os.chdir('examples_built')
if not os.getcwd().endswith(pjoin('doc', 'examples_built')):
raise OSError('This must be run from the doc directory')
# Copy the py files; check they are in the examples list and warn if not
eg_index_contents = open(EG_INDEX_FNAME, 'rt').read()
# Here I am adding an extra step. The list of examples to be executed need
# also to be added in the following file (valid_examples.txt). This helps
# with debugging the examples and the documentation only a few examples at
# the time.
flist_name = pjoin(os.path.dirname(os.getcwd()), 'examples',
'valid_examples.txt')
flist = open(flist_name, "r")
validated_examples = flist.readlines()
flist.close()
# Parse "#" in lines
validated_examples = [line.split("#", 1)[0] for line in validated_examples]
# Remove leading and trailing white space from example names
validated_examples = [line.strip() for line in validated_examples]
# Remove blank lines
validated_examples = list(filter(None, validated_examples))
for example in validated_examples:
fullpath = pjoin(EG_SRC_DIR, example)
if not example.endswith(".py"):
print("%s not a python file, skipping." % example)
continue
elif not os.path.isfile(fullpath):
print("Cannot find file, %s, skipping." % example)
continue
shutil.copyfile(fullpath, example)
# Check that example file is included in the docs
file_root = example[:-3]
if file_root not in eg_index_contents:
msg = "Example, %s, not in index file %s."
msg = msg % (example, EG_INDEX_FNAME)
print(msg)
# Run the conversion from .py to rst file
check_call('python ../../tools/ex2rst --project dipy --outdir . .', shell=True)
# added the path so that scripts can import other scripts on the same directory
sys.path.insert(0, os.getcwd())
# Execute each python script in the directory.
if not os.path.isdir('fig'):
os.mkdir('fig')
use_xvfb = os.environ.get('TEST_WITH_XVFB', False)
if use_xvfb:
from xvfbwrapper import Xvfb
display = Xvfb(width=1920, height=1080)
display.start()
for script in validated_examples:
namespace = {}
figure_basename = os.path.join('fig', os.path.splitext(script)[0])
print(script)
exec(open(script).read(), namespace)
plt.close('all')
del namespace
if use_xvfb:
display.stop()
# clean up stray images, pickles, npy files, etc
for globber in ('*.nii.gz', '*.dpy', '*.npy', '*.pkl', '*.mat', '*.img',
'*.hdr'):
for fname in glob(globber):
os.unlink(fname)
| {
"content_hash": "6d0d7886b454803086c9d57e5f41afca",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 79,
"avg_line_length": 33.15909090909091,
"alnum_prop": 0.6154900616860863,
"repo_name": "villalonreina/dipy",
"id": "d34ab28ff7ce359d775f568201fcca1352ee9177",
"size": "4399",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tools/make_examples.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "2932"
},
{
"name": "Makefile",
"bytes": "3639"
},
{
"name": "Python",
"bytes": "3100258"
}
],
"symlink_target": ""
} |
"""Grainbin tasks module."""
from celery.utils.log import get_task_logger
from fm_server.celery_runner import app
from .grainbin_update import process_grainbin_update
LOGGER = get_task_logger("fm.grainbin.tasks")
@app.task(name="grainbin.update")
def grainbin_update(info):
"""Celery task for grainbin update messages."""
LOGGER.debug(f"Received grainbin update from {info['name']}")
return_code = process_grainbin_update(info)
if return_code is True:
LOGGER.debug(f"Processed grainbin update from {info['name']}")
else:
LOGGER.error(f"Failed to process grainbin update from {info['name']}")
return return_code
| {
"content_hash": "f520de93c8764b9f01d3bd6a5b0efd4b",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 78,
"avg_line_length": 27.5,
"alnum_prop": 0.7090909090909091,
"repo_name": "nstoik/farm_monitor",
"id": "11dbfc0f4e04a20a0c0037d4aafff7c6ed1b13aa",
"size": "660",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "server/fm_server/grainbin/tasks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "18259"
},
{
"name": "HCL",
"bytes": "1534"
},
{
"name": "HTML",
"bytes": "611"
},
{
"name": "JavaScript",
"bytes": "268"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "242717"
},
{
"name": "Shell",
"bytes": "2937"
},
{
"name": "TypeScript",
"bytes": "18970"
},
{
"name": "Vue",
"bytes": "14394"
}
],
"symlink_target": ""
} |
def post_save(sender, instance, created, **kwargs):
"""
Receives a signal just after the object is saved.
"""
if created:
instance.at_first_save()
def remove_attributes_on_delete(sender, instance, **kwargs):
instance.db_attributes.all().delete()
| {
"content_hash": "71d558a29b9478697ea36400654c8950",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 60,
"avg_line_length": 27.6,
"alnum_prop": 0.6594202898550725,
"repo_name": "feend78/evennia",
"id": "49a24570664b08c91c43c7a4433ce95f41ff3ed6",
"size": "318",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "evennia/typeclasses/signals.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "42859"
},
{
"name": "Emacs Lisp",
"bytes": "2734"
},
{
"name": "HTML",
"bytes": "20118"
},
{
"name": "JavaScript",
"bytes": "32388"
},
{
"name": "Python",
"bytes": "2734770"
},
{
"name": "Shell",
"bytes": "4237"
}
],
"symlink_target": ""
} |
"""Fake HPE client for testing LeftHand without installing the client."""
import sys
import mock
from cinder.tests.unit import fake_hpe_client_exceptions as hpeexceptions
hpelefthand = mock.Mock()
hpelefthand.version = "2.1.0"
hpelefthand.exceptions = hpeexceptions
sys.modules['hpelefthandclient'] = hpelefthand
| {
"content_hash": "98f22378464cfa8a98f26b1743e839f6",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 73,
"avg_line_length": 24.46153846153846,
"alnum_prop": 0.789308176100629,
"repo_name": "bswartz/cinder",
"id": "2360de0977f75d0a0de6ebb79c71914e26ac2e44",
"size": "988",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cinder/tests/unit/fake_hpe_lefthand_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16345375"
},
{
"name": "Shell",
"bytes": "8187"
}
],
"symlink_target": ""
} |
DESCRIPTION = "PyAnalog - Weather forecasting with analogous past dates."
DISTNAME = 'pyanalog'
MAINTAINER = 'Francisco M. Alvarez'
MAINTAINER_EMAIL = '[email protected]'
URL = 'https://github.com/mogismog/PyAnalog'
LICENSE = 'Apache v2 License'
DOWNLOAD_URL = 'https://github.com/mogismog/PyAnalog'
VERSION = '0.1 dev'
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('pyanalog', parent_package, top_path)
config.add_extension('fortran_analog',
sources=['./pyanalog/analog/fortran_routines.f90', './pyanalog/analog/fortran_analog.pyf',
])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
license=LICENSE,
url=URL,
version=VERSION,
download_url=DOWNLOAD_URL,
packages = ['pyanalog','pyanalog.analog'],
author='Francisco Alvarez',
author_email='[email protected]',
install_requires=["numpy"],
**configuration(top_path='').todict())
| {
"content_hash": "0d057671ec3bb82d1004a99c8c227ebd",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 115,
"avg_line_length": 35.542857142857144,
"alnum_prop": 0.6463022508038585,
"repo_name": "mogismog/PyAnalog",
"id": "728d1cf1e343fdba5d301ab01cce4600a39ddbfa",
"size": "1346",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "FORTRAN",
"bytes": "37287"
},
{
"name": "Python",
"bytes": "18976"
}
],
"symlink_target": ""
} |
import datetime
import time
from gppylib.operations.segment_reconfigurer import SegmentReconfigurer, FTS_PROBE_QUERY
from gppylib.test.unit.gp_unittest import GpTestCase
import pg
import pgdb
import mock
from mock import Mock, patch, call, MagicMock
import contextlib
class MyDbUrl:
pass
class SegmentReconfiguerTestCase(GpTestCase):
db = 'database'
host = 'mdw'
port = 15432
user = 'postgres'
passwd = 'passwd'
timeout = 30
def setUp(self):
self.conn = Mock(name='conn')
self.logger = Mock()
self.worker_pool = Mock()
self.db_url = db_url = MyDbUrl()
db_url.pgdb = self.db
db_url.pghost = self.host
db_url.pgport = self.port
db_url.pguser = self.user
db_url.pgpass = self.passwd
self.connect = MagicMock()
cm = contextlib.nested(
patch('gppylib.db.dbconn.connect', new=self.connect),
patch('gppylib.db.dbconn.DbURL', return_value=self.db_url),
patch('pg.connect'),
)
cm.__enter__()
self.cm = cm
def tearDown(self):
self.cm.__exit__(None, None, None)
def test_it_triggers_fts_probe(self):
reconfigurer = SegmentReconfigurer(logger=self.logger,
worker_pool=self.worker_pool, timeout=self.timeout)
reconfigurer.reconfigure()
pg.connect.assert_has_calls([
call(self.db, self.host, self.port, None, self.user, self.passwd),
call().query(FTS_PROBE_QUERY),
call().close(),
]
)
def test_it_retries_the_connection(self):
self.connect.configure_mock(side_effect=[pgdb.DatabaseError, pgdb.DatabaseError, self.conn])
reconfigurer = SegmentReconfigurer(logger=self.logger,
worker_pool=self.worker_pool, timeout=self.timeout)
reconfigurer.reconfigure()
self.connect.assert_has_calls([call(self.db_url), call(self.db_url), call(self.db_url), ])
self.conn.close.assert_any_call()
@patch('time.time')
def test_it_gives_up_after_30_seconds(self, now_mock):
start_datetime = datetime.datetime(2018, 5, 9, 16, 0, 0)
start_time = time.mktime(start_datetime.timetuple())
now_mock.configure_mock(return_value=start_time)
def fail_for_half_a_minute():
new_time = start_time
for i in xrange(2):
# leap forward 15 seconds
new_time += self.timeout / 2
now_mock.configure_mock(return_value=new_time)
yield pgdb.DatabaseError
self.connect.configure_mock(side_effect=fail_for_half_a_minute())
reconfigurer = SegmentReconfigurer(logger=self.logger,
worker_pool=self.worker_pool, timeout=self.timeout)
with self.assertRaises(RuntimeError) as context:
reconfigurer.reconfigure()
self.assertEqual("Mirror promotion did not complete in {0} seconds.".format(self.timeout), context.exception.message)
self.connect.assert_has_calls([call(self.db_url), call(self.db_url), ])
self.conn.close.assert_has_calls([])
| {
"content_hash": "b556d0cb93d13e01a3649aea18b1b04d",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 129,
"avg_line_length": 33.797872340425535,
"alnum_prop": 0.6185080264400378,
"repo_name": "jmcatamney/gpdb",
"id": "7db1c70d0e7d40578faf43a0d5b3e736ea796cf7",
"size": "3177",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gpMgmt/bin/gppylib/operations/test/unit/test_unit_segment_reconfigurer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3724"
},
{
"name": "Awk",
"bytes": "836"
},
{
"name": "Batchfile",
"bytes": "12854"
},
{
"name": "C",
"bytes": "42498841"
},
{
"name": "C++",
"bytes": "14366259"
},
{
"name": "CMake",
"bytes": "38452"
},
{
"name": "Csound Score",
"bytes": "223"
},
{
"name": "DTrace",
"bytes": "3873"
},
{
"name": "Dockerfile",
"bytes": "11932"
},
{
"name": "Emacs Lisp",
"bytes": "3488"
},
{
"name": "Fortran",
"bytes": "14863"
},
{
"name": "GDB",
"bytes": "576"
},
{
"name": "Gherkin",
"bytes": "335208"
},
{
"name": "HTML",
"bytes": "53484"
},
{
"name": "JavaScript",
"bytes": "23969"
},
{
"name": "Lex",
"bytes": "229556"
},
{
"name": "M4",
"bytes": "111147"
},
{
"name": "Makefile",
"bytes": "496239"
},
{
"name": "Objective-C",
"bytes": "38376"
},
{
"name": "PLpgSQL",
"bytes": "8009512"
},
{
"name": "Perl",
"bytes": "798767"
},
{
"name": "PowerShell",
"bytes": "422"
},
{
"name": "Python",
"bytes": "3000118"
},
{
"name": "Raku",
"bytes": "698"
},
{
"name": "Roff",
"bytes": "32437"
},
{
"name": "Ruby",
"bytes": "77585"
},
{
"name": "SCSS",
"bytes": "339"
},
{
"name": "Shell",
"bytes": "451713"
},
{
"name": "XS",
"bytes": "6983"
},
{
"name": "Yacc",
"bytes": "674092"
},
{
"name": "sed",
"bytes": "1231"
}
],
"symlink_target": ""
} |
class SprinklerQueueElementControl( ):
def __init__(self,redis_handle,io_control,alarm_queue,counter_devices):
self.redis = redis
self.alarm_queue = alarm_queue
self.io_control = io_control
self.counter_devices = counter_devices
self.app_files = load_files.APP_FILES(redis_handle)
self.redis_handle.hset("CONTROL_VARIBALES","MAX_FLOW_TIME",0)
def check_for_excessive_flow_rate( self,*args ):
flow_value = float( check_redis_value( "global_flow_sensor_corrected" ) )
max_flow = float( check_redis_value( "FLOW_CUT_OFF"))
if max_flow == 0:
return # feature is not turned on
compact_data = self.redis.lindex( "QUEUES:SPRINKLER:IRRIGATION_CELL_QUEUE",0 )
json_string = base64.b64decode(compact_data)
json_object = json.loads(json_string)
run_time = int( json_object["run_time"])
elasped_time = int(json_object["elasped_time"])
schedule_step = int(json_object["step"])
step_number = json_object["step"]
schedule_name = json_object["schedule_name"]
if elasped_time < 3 :
return # let flow stabilize
if flow_value > max_flow:
over_load_time = int(self.redis.hget("CONTROL_VARIBALES","MAX_FLOW_TIME")) +1
if over_load_time > 2:
self.redis.hset("CONTROL_VARIABLES","SKIP_STATION","ON")
self.alarm_queue.store_past_action_queue("IRRIGATION:FLOW_ABORT","RED", { "schedule_name":json_object["schedule_name"],"step_number":json_object["step"],
"flow_value":flow_value,"max_flow":max_flow } )
self.redis.hset("CONTROL_VARIBALES","MAX_FLOW_TIME",0)
else:
self.redis.hset("CONTROL_VARIBALES","MAX_FLOW_TIME",over_load_time)
else:
self.redis.hset("CONTROL_VARIBALES","MAX_FLOW_TIME",0)
def check_redis_value( self,key):
value = redis.hget( "CONTROL_VARIABLES",key )
if value == None:
value = 0
return value
def check_current(self,*args):
compact_data = self.redis.lindex( "QUEUES:SPRINKLER:IRRIGATION_CELL_QUEUE",0 )
json_string = base64.b64decode(compact_data)
#print "json_string",json_string
json_object = json.loads(json_string)
temp = float( self.redis.hget( "CONTROL_VARIABLES","coil_current" ))
print "check_current temp",temp
if temp > 24:
self.redis.hset("CONTROL_VARIABLES","SKIP_STATION","ON")
self.clean_up_irrigation_cell( json_object )
self.alarm_queue.store_past_action_queue("IRRIGATION:CURRENT_ABORT","RED", { "schedule_name":json_object["schedule_name"],"step_number":json_object["step"] } )
return "RESET"
else:
return "DISABLE"
def start(self, *args ):
#print "start ------------------------------------------------->"
self.redis.hset("CONTROL_VARIBALES","MAX_FLOW_TIME",0)
compact_data = self.redis.lindex( "QUEUES:SPRINKLER:IRRIGATION_CELL_QUEUE",0 )
json_string = base64.b64decode(compact_data)
json_object = json.loads(json_string)
if self.check_redis_value("SUSPEND") == "ON":
#self.log_start_step( schedule_name, json_object["step"])
#self.io_control.turn_off_io(json_object["io_setup"])
#self.io_control.disable_all_sprinklers()
return # System is not processing commands right now
#print "start --- #1"
self.redis.hset("CONTROL_VARIABLES","SKIP_STATION","OFF")
#print "made it here"
run_time = int( json_object["run_time"])
elasped_time = int(json_object["elasped_time"])
schedule_step = int(json_object["step"])
step_number = json_object["step"]
schedule_name = json_object["schedule_name"]
#print "run_time",run_time
if json_object["eto_enable"] == True:
run_time = self.eto_update( run_time , json_object["io_setup"] )
#print "start ---#2 runtime",run_time
if run_time == 0:
self.clean_up_irrigation_cell(json_object)
json_object["run_time"] = 0
self.alarm_queue.store_past_action_queue("IRRIGATION:START:ETO_RESTRICTION","YELLOW", json_object )
return "RESET"
self.io_control.load_duration_counters( run_time )
#print "made it here"
self.io_control.turn_on_master_valves()
self.io_control.turn_on_io( json_object["io_setup"] )
station_by_pass = 0
elasped_time = 1
self.redis.hset( "CONTROL_VARIABLES","sprinkler_ctrl_mode","AUTO")
self.redis.hset( "CONTROL_VARIABLES","schedule_name", schedule_name )
self.redis.hset( "CONTROL_VARIABLES","schedule_step_number", step_number )
self.redis.hset( "CONTROL_VARIABLES","schedule_step", schedule_step )
self.redis.hset( "CONTROL_VARIABLES","schedule_time_count", elasped_time )
self.redis.hset( "CONTROL_VARIABLES","schedule_time_max", run_time )
self.log_start_step( schedule_name, json_object["step"])
#print "current_log",self.current_log_object
#print "flow_log", self.flow_log_object
json_object["elasped_time"] = elasped_time
json_object["run_time"] = run_time
json_string = json.dumps( json_object )
compact_data = base64.b64encode(json_string)
#print "start #end json string ",json_string
self.redis.lset( "QUEUES:SPRINKLER:IRRIGATION_CELL_QUEUE", 0, compact_data )
return "DISABLE"
def monitor( self, *args ):
#print "monitor --------------->"
# check to see if something is in the queue
length = self.redis.llen( "QUEUES:SPRINKLER:IRRIGATION_CELL_QUEUE" )
#print "length",length
if length == 0 :
return "CONTINUE"
compact_data = self.redis.lindex( "QUEUES:SPRINKLER:IRRIGATION_CELL_QUEUE",0 )
json_string = base64.b64decode(compact_data)
json_object = json.loads(json_string)
run_time = int( json_object["run_time"])
elasped_time = int(json_object["elasped_time"])
schedule_step = int(json_object["step"])
step_number = json_object["step"]
schedule_name = json_object["schedule_name"]
if (self.check_redis_value("SUSPEND") == "ON") :
#self.io_control.turn_off_io(json_object["io_setup"])
#self.io_control.disable_all_sprinklers()
return "HALT" # System is not processing commands right now
elasped_time = elasped_time +1
self.log_sensors( schedule_name, schedule_step)
if json_object["eto_enable"] == True:
self.update_eto_queue_a( 1, json_object["io_setup"] )
if (elasped_time <= run_time ) and ( self.check_redis_value("SKIP_STATION") != "ON" ):
self.io_control.turn_on_io( json_object["io_setup"] )
self.io_control.turn_on_master_valves()
self.redis.hset( "CONTROL_VARIABLES","schedule_time_count", elasped_time )
json_object["elasped_time"] = elasped_time
json_string = json.dumps( json_object )
compact_data = base64.b64encode(json_string)
self.redis.lset( "QUEUES:SPRINKLER:IRRIGATION_CELL_QUEUE", 0, compact_data )
return_value = "RESET"
else:
#print "normal end"
self.log_step_stop()
self.clean_up_irrigation_cell(json_object)
return_value = "DISABLE"
#print "cell returnValue is ",return_value
return return_value
def clean_up_irrigation_cell( self ,json_object ):
#print "made it to cleanup"
self.redis.delete("QUEUES:SPRINKLER:IRRIGATION_CELL_QUEUE")
self.redis.hset("CONTROL_VARIABLES", "schedule_name","offline" )
self.redis.hset("CONTROL_VARIABLES", "schedule_step_number",0 )
self.redis.hset("CONTROL_VARIABLES", "schedule_step",0 )
self.redis.hset("CONTROL_VARIABLES", "schedule_time_count",0 )
self.redis.hset( "CONTROL_VARIABLES","schedule_time_max",0 )
self.redis.hset( "CONTROL_VARIABLES","sprinkler_ctrl_mode","AUTO")
self.redis.hset( "CONTROL_VARIABLES","SKIP_STATION","OFF")
self.io_control.turn_off_io(json_object["io_setup"])
self.io_control.disable_all_sprinklers()
self.io_control.clear_duration_counters()
self.io_control.turn_off_master_valves()
def log_sensors(self, schedule_name,step):
if hasattr(self, 'current_log_object') == False:
self.current_log_object = self.initialize_object( "current_log",schedule_name,step)
if hasattr(self, 'flow_log_object') == False:
self.flow_log_object = self.initialize_object( "flow_log",schedule_name,step )
coil_current = float( self.redis.hget( "CONTROL_VARIABLES","coil_current" ))
self.log_coil_current ( coil_current )
for i in self.counter_devices.keys():
sensor_name = i
flow_value = self.redis.lindex("QUEUES:SPRINKLER:FLOW:"+str(i),0)
self.log_flow_rate( sensor_name, flow_value )
def log_flow_rate( self, sensor_name, flow_value ):
if self.flow_log_object["fields"].has_key( sensor_name ) == False:
self.initialize_field( self.flow_log_object, sensor_name )
temp = self.flow_log_object["fields"][ sensor_name ]
temp["count"] = temp["count"]+1
temp["data"].append( flow_value)
if flow_value > temp["max"] :
temp["max"] = flow_value
if flow_value < temp["min"] :
temp["min"] = flow_value
def log_coil_current ( self,coil_current ):
if self.current_log_object["fields"].has_key( "coil_current" ) == False:
self.initialize_field( self.current_log_object, "coil_current")
temp = self.current_log_object["fields"]["coil_current"]
temp["count"] = temp["count"]+1
temp["data"].append( coil_current )
if coil_current > temp["max"] :
temp["max"] = coil_current
if coil_current < temp["min"] :
temp["min"] = coil_current
def log_start_step( self, schedule_name, step):
#print "made it log start step"
self.current_log_object = self.initialize_object( "current_log",schedule_name,step)
self.flow_log_object = self.initialize_object( "flow_log",schedule_name,step )
self.alarm_queue.store_event_queue( "start_step", { "schedule":schedule_name, "step":step } )
def log_step_stop( self ):
if hasattr(self, 'flow_log_object') == False:
return # case when eto abort
obj = self.flow_log_object
self.alarm_queue.store_past_action_queue("IRRIGATION:END","GREEN", { "schedule_name":obj["schedule_name"],"step_name":obj["step"] } )
self.store_object( self.current_log_object, "coil" )
self.store_object( self.flow_log_object, "flow" )
obj = {}
obj["coil"] = self.current_log_object
obj["flow"] = self.flow_log_object
self.alarm_queue.store_event_queue( "irrigatation_store_object", obj )
self.current_log_object = None
self.flow_log_object = None
def store_object( self, obj ,queue_type ):
if obj == None:
return
#self.add_limits(obj, queue_type )
self.compute_object_statistics( obj )
queue = "log_data:"+queue_type+":"+obj["schedule_name"]+":"+str(obj["step"])
json_string = json.dumps(obj)
compact_data = base64.b64encode(json_string)
self.redis.lpush( queue, json_string )
self.redis.ltrim( queue,0,100)
def initialize_object( self, name,schedule_name,step ):
obj = {}
obj["name"] = name
obj["time"] = time.time()
obj["schedule_name"] = schedule_name
obj["step"] = step
obj["fields"] = {}
return obj
def initialize_field( self, obj ,field):
if obj["fields"].has_key(field) == False:
obj["fields"][field] = {}
obj["fields"][field]["max"] = -1000000
obj["fields"][field]["min"] = 1000000
obj["fields"][field]["count"] = 0
obj["fields"][field]["data"] = []
def compute_object_statistics( self, obj ):
#print "compute object statistics", obj
for j in obj["fields"] :
temp = obj["fields"][j]
temp["total"] = 0
count = 0
for m in temp["data"]:
m = float(m)
count = count +1
if count > 5:
temp["total"] = temp["total"] + m
#print "count ",count
if count > 5:
temp["average"] = temp["total"]/(count -5)
else:
temp["average"] = 0
temp["std"] = 0
count = 0
for m in temp["data"]:
m = float(m)
count = count +1
if count > 5 :
temp["std"] = temp["std"] + (m -temp["average"])*(m-temp["average"])
temp["std"] = math.sqrt(temp["std"]/(count-5))
else:
temp["std"] = 0
## 1 gallon is 0.133681 ft3
## assuming a 5 foot radius
## a 12 gallon/hour head 0.2450996343 inch/hour
## a 14 gallon/hour head 0.2859495733 inch/hour
## a 16 gallon/hour head 0.3267995123 inch/hour
##
##
##
##
## capacity of soil
## for silt 2 feet recharge rate 30 % recharge inches -- .13 * 24 *.3 = .936 inch
## for sand 1 feet recharge rate 30 % recharge inches -- .06 * 12 *.3 = .216 inch
##
## recharge rate for is as follows for 12 gallon/hour head:
## sand 1 feet .216/.245 which is 52 minutes
## silt 2 feet recharge rate is 3.820 hours or 229 minutes
##
## {"controller":"satellite_1", "pin": 9, "recharge_eto": 0.216, "recharge_rate":0.245 },
## eto_site_data
def eto_update( self, schedule_run_time, io_list ):
self.eto_site_data = self.app_files.load_file( "eto_site_setup.json" )
manage_eto = self.redis.hget( "CONTROL_VARIABLES","ETO_MANAGE_FLAG" )
if manage_eto == None:
manage_eto = 1
self.redis.hset("CONTROL_VARIABLES", "ETO_MANAGE_FLAG",manage_eto)
manage_eto = int( manage_eto )
if manage_eto == 1:
sensor_list = self.find_queue_names( io_list )
if len(sensor_list) != 0:
run_time = self.find_largest_runtime( schedule_run_time, sensor_list )
if run_time < schedule_run_time :
schedule_run_time = run_time
return schedule_run_time
def find_queue_names( self, io_list ):
eto_values = []
for j in io_list:
controller = j["remote"]
bits = j["bits"]
bit = bits[0]
index = 0
for m in self.eto_site_data:
if (m["controller"] == controller) and (m["pin"] == bit):
queue_name = controller+"|"+str(bit)
data = self.redis.hget( "ETO_RESOURCE", queue_name )
eto_values.append( [index, data, queue_name ] )
index = index +1
#print "eto values ",eto_values
return eto_values
def find_largest_runtime( self, run_time, sensor_list ):
runtime = 0
for j in sensor_list:
index = j[0]
deficient = float(j[1])
eto_temp = self.eto_site_data[index]
recharge_eto = float( eto_temp["recharge_eto"] )
recharge_rate = float(eto_temp["recharge_rate"])
if float(deficient) > recharge_eto :
runtime_temp = (deficient /recharge_rate)*60
if runtime_temp > runtime :
runtime = runtime_temp
#print "run time",runtime
return runtime
def update_eto_queue_a( self, run_time, io_list ):
self.eto_site_data = self.app_files.load_file( "eto_site_setup.json" )
manage_eto = self.redis.hget( "CONTROL_VARIABLES","ETO_MANAGE_FLAG" )
if manage_eto == None:
manage_eto = 1
self.redis.hset( "CONTROL_VARIABLES","ETO_MANAGE_FLAG",manage_eto)
manage_eto = int( manage_eto )
if manage_eto == 1:
sensor_list = self.find_queue_names( io_list )
if len(sensor_list) != 0:
self.update_eto_queue(run_time,sensor_list)
def update_eto_queue( self, run_time, sensor_list ):
for l in sensor_list:
j_index = l[0]
queue_name = l[2]
j = self.eto_site_data[ j_index ]
deficient = self.redis.hget("ETO_RESOURCE", queue_name )
if deficient == None:
deficient = 0
else:
deficient = float(deficient)
recharge_rate = float(j["recharge_rate"])
deficient = deficient - (recharge_rate/60)*run_time
if deficient < 0 :
deficient = 0
self.redis.hset( "ETO_RESOURCE", queue_name, deficient )
if __name__ == "__main__":
pass
| {
"content_hash": "4d35b3c3cf4775a40bd3e4399c62adf6",
"timestamp": "",
"source": "github",
"line_count": 424,
"max_line_length": 170,
"avg_line_length": 42.488207547169814,
"alnum_prop": 0.5473216763807938,
"repo_name": "glenn-edgar/local_controller_3",
"id": "11dd81c6dcfb4776a20fc549b8f387b09cff46d9",
"size": "18016",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "irrigation_control_py3/ref/queue_element_control_py3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2510"
},
{
"name": "CSS",
"bytes": "4575415"
},
{
"name": "HTML",
"bytes": "2215958"
},
{
"name": "JavaScript",
"bytes": "9981211"
},
{
"name": "Makefile",
"bytes": "5136"
},
{
"name": "PHP",
"bytes": "124476"
},
{
"name": "Python",
"bytes": "4396570"
},
{
"name": "Shell",
"bytes": "569"
},
{
"name": "Smalltalk",
"bytes": "252"
},
{
"name": "TeX",
"bytes": "3153"
},
{
"name": "TypeScript",
"bytes": "11006"
}
],
"symlink_target": ""
} |
import os
import json
from bs4 import BeautifulSoup
from flask import (abort, current_app, redirect, render_template, request,
url_for)
from models import SCHEMA
from rst import iteritems, get_hyperlink_target, rst_to_html_fragment
import search
from settings import INDEX, INDEX_PATH, SRC, BACKLINKS
def index():
suggestions = []
corpus = current_app.corpus
for document in corpus.gen_documents():
try:
preview = rst_to_html_fragment(document.excerpt)
except StopIteration:
preview = None
suggestions.append({
"title": document.reference_name,
"preview": preview,
})
return render_template(
"_layouts/index.html",
suggestions=suggestions
)
def article(name):
"""Generate a HTML page for an article.
:param name:
The name of the article.
"""
if name.endswith(".html"):
root, _ = os.path.splitext(name)
return redirect(url_for("article", name=root))
else:
try:
document = current_app.corpus.find(name)
except ValueError:
try:
canonical_name = current_app.corpus.get_canonical_name(name)
except KeyError:
abort(404)
else:
return redirect(canonical_name)
else:
html_doc = document.html
soup = BeautifulSoup(html_doc, 'html.parser')
body = str(soup.body)
title = name.replace('_', ' ')
backlinks = BACKLINKS.get("{}.html".format(document.filename), [])
return render_template(
'article.html',
document=body,
title=title,
backlinks=sorted(backlinks),
)
def search_view():
if not os.path.isdir(INDEX_PATH):
os.mkdir(INDEX_PATH)
search_index = search.get_or_create_index(INDEX_PATH, SCHEMA, SRC)
querystring = request.args.get('q')
results = list(search.search(search_index, querystring))
if request.args.get('follow'):
# Redirect to article if a direct hit is found in the INDEX
index = {k.lower(): v for k, v in iteritems(INDEX)}
for result in results:
reference_name = str(querystring).lower()
try:
target = get_hyperlink_target(index, reference_name)
except KeyError:
pass
else:
return redirect(target)
return render_template("_layouts/search.html", querystring=querystring,
results=results)
def open_search():
return render_template("opensearch.xml"), 200, {'Content-Type': 'text/xml'}
| {
"content_hash": "2809d357a3f523818b8acc0ea310537f",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 79,
"avg_line_length": 30.640449438202246,
"alnum_prop": 0.5775577557755776,
"repo_name": "Ceasar/Encyclopedia",
"id": "a49312abd5f2d4d2938a6a36381fe968fcfadf31",
"size": "2727",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10503"
},
{
"name": "HTML",
"bytes": "2947"
},
{
"name": "JavaScript",
"bytes": "71351"
},
{
"name": "Makefile",
"bytes": "816"
},
{
"name": "Python",
"bytes": "13822"
}
],
"symlink_target": ""
} |
from http import HTTPStatus
import requests
from eth_utils import to_bytes, to_hex
from raiden.raiden_service import RaidenService
from raiden.storage.wal import WriteAheadLog
from raiden.transfer.mediated_transfer.events import SendSecretRequest
from raiden.transfer.mediated_transfer.state_change import ReceiveSecretReveal
def reveal_secret_with_resolver(
raiden: RaidenService, secret_request_event: SendSecretRequest
) -> bool:
if "resolver_endpoint" not in raiden.config:
return False
assert isinstance(raiden.wal, WriteAheadLog), "RaidenService has not been started"
current_state = raiden.wal.state_manager.current_state
task = current_state.payment_mapping.secrethashes_to_task[secret_request_event.secrethash]
token = task.target_state.transfer.token
request = {
"token": to_hex(token),
"secrethash": to_hex(secret_request_event.secrethash),
"amount": secret_request_event.amount,
"payment_identifier": secret_request_event.payment_identifier,
"payment_sender": to_hex(secret_request_event.recipient),
"expiration": secret_request_event.expiration,
"payment_recipient": to_hex(raiden.address),
"reveal_timeout": raiden.config["reveal_timeout"],
"settle_timeout": raiden.config["settle_timeout"],
}
try:
response = requests.post(raiden.config["resolver_endpoint"], json=request)
except requests.exceptions.RequestException:
return False
if response is None or response.status_code != HTTPStatus.OK:
return False
state_change = ReceiveSecretReveal(
to_bytes(hexstr=response.json()["secret"]), secret_request_event.recipient
)
raiden.handle_and_track_state_change(state_change)
return True
| {
"content_hash": "d088d498870a35e0a122fe973aa164fb",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 94,
"avg_line_length": 37.0625,
"alnum_prop": 0.7234401349072512,
"repo_name": "hackaugusto/raiden",
"id": "649953ab78fb37c33f128f645378bc560b913484",
"size": "1779",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "raiden/network/resolver/client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "4327"
},
{
"name": "HCL",
"bytes": "8943"
},
{
"name": "Makefile",
"bytes": "5888"
},
{
"name": "Nix",
"bytes": "1271"
},
{
"name": "Python",
"bytes": "2681630"
},
{
"name": "Shell",
"bytes": "9530"
},
{
"name": "Solidity",
"bytes": "1004"
}
],
"symlink_target": ""
} |
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('background05.xlsx')
def test_create_file(self):
"""Test the creation of an XlsxWriter file with a background image."""
workbook = Workbook(self.got_filename)
worksheet1 = workbook.add_worksheet()
worksheet2 = workbook.add_worksheet()
worksheet1.set_background(self.image_dir + 'logo.jpg')
worksheet2.set_background(self.image_dir + 'red.jpg')
workbook.close()
self.assertExcelEqual()
| {
"content_hash": "f0e1072a5b748eaf3c492608982306ae",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 78,
"avg_line_length": 26.214285714285715,
"alnum_prop": 0.6743869209809265,
"repo_name": "jmcnamara/XlsxWriter",
"id": "0d835b4cc329bedc4e78e57e6e75c6af5c9e8a15",
"size": "947",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "xlsxwriter/test/comparison/test_background05.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5113"
},
{
"name": "CSS",
"bytes": "16544"
},
{
"name": "HTML",
"bytes": "13100"
},
{
"name": "Makefile",
"bytes": "7748"
},
{
"name": "Perl",
"bytes": "3503"
},
{
"name": "Python",
"bytes": "2807230"
},
{
"name": "Shell",
"bytes": "7964"
}
],
"symlink_target": ""
} |
'''
Rackspace Cloud Module
======================
The Rackspace cloud module. This module uses the preferred means to set up a
libcloud based cloud module and should be used as the general template for
setting up additional libcloud based modules.
The rackspace cloud module interfaces with the Rackspace public cloud service
and requires that two configuration parameters be set for use, ``user`` and
``apikey``.
Set up the cloud configuration at ``/etc/salt/cloud.providers`` or
``/etc/salt/cloud.providers.d/rackspace.conf``:
.. code-block:: yaml
my-rackspace-config:
provider: rackspace
# The Rackspace login user
user: fred
# The Rackspace user's apikey
apikey: 901d3f579h23c8v73q9
'''
# The import section is mostly libcloud boilerplate
# Import python libs
import copy
import logging
import socket
import pprint
# Import libcloud
from libcloud.compute.base import NodeState
# Import generic libcloud functions
from salt.cloud.libcloudfuncs import * # pylint: disable=W0614,W0401
# Import salt libs
import salt.utils
# Import salt.cloud libs
import salt.utils.cloud
import salt.config as config
from salt.utils import namespaced_function
from salt.cloud.exceptions import (
SaltCloudSystemExit,
SaltCloudExecutionFailure,
SaltCloudExecutionTimeout
)
# Get logging started
log = logging.getLogger(__name__)
# Some of the libcloud functions need to be in the same namespace as the
# functions defined in the module, so we create new function objects inside
# this module namespace
get_size = namespaced_function(get_size, globals())
get_image = namespaced_function(get_image, globals())
avail_locations = namespaced_function(avail_locations, globals())
avail_images = namespaced_function(avail_images, globals())
avail_sizes = namespaced_function(avail_sizes, globals())
script = namespaced_function(script, globals())
destroy = namespaced_function(destroy, globals())
list_nodes = namespaced_function(list_nodes, globals())
list_nodes_full = namespaced_function(list_nodes_full, globals())
list_nodes_select = namespaced_function(list_nodes_select, globals())
show_instance = namespaced_function(show_instance, globals())
# Only load in this module is the RACKSPACE configurations are in place
def __virtual__():
'''
Set up the libcloud functions and check for Rackspace configuration.
'''
if get_configured_provider() is False:
log.debug(
'There is no Rackspace cloud provider configuration available. '
'Not loading module.'
)
return False
log.debug('Loading Rackspace cloud module')
return True
def get_configured_provider():
'''
Return the first configured instance.
'''
return config.is_provider_configured(
__opts__,
__active_provider_name__ or 'rackspace',
('user', 'apikey')
)
def get_conn():
'''
Return a conn object for the passed VM data
'''
force_first_gen = config.get_cloud_config_value(
'force_first_gen',
get_configured_provider(),
__opts__,
search_global=False,
default=False
)
compute_region = config.get_cloud_config_value(
'compute_region',
get_configured_provider(),
__opts__,
search_global=False,
default='DFW'
).upper()
if force_first_gen:
log.info('Rackspace driver will only have access to first-gen images')
driver = get_driver(Provider.RACKSPACE)
else:
computed_provider = 'RACKSPACE_NOVA_{0}'.format(compute_region)
try:
driver = get_driver(getattr(Provider, computed_provider))
except AttributeError:
log.info(
'Rackspace driver will only have access to first-gen images '
'since it was unable to load the driver as {0}'.format(
computed_provider
)
)
driver = get_driver(Provider.RACKSPACE)
except Exception:
# http://goo.gl/qFgY42
driver = get_driver(Provider.RACKSPACE)
return driver(
config.get_cloud_config_value(
'user',
get_configured_provider(),
__opts__,
search_global=False
),
config.get_cloud_config_value(
'apikey',
get_configured_provider(),
__opts__,
search_global=False
)
)
def preferred_ip(vm_, ips):
'''
Return the preferred Internet protocol. Either 'ipv4' (default) or 'ipv6'.
'''
proto = config.get_cloud_config_value(
'protocol', vm_, __opts__, default='ipv4', search_global=False
)
family = socket.AF_INET
if proto == 'ipv6':
family = socket.AF_INET6
for ip in ips:
try:
socket.inet_pton(family, ip)
return ip
except Exception:
continue
return False
def ssh_interface(vm_):
'''
Return the ssh_interface type to connect to. Either 'public_ips' (default)
or 'private_ips'.
'''
return config.get_cloud_config_value(
'ssh_interface', vm_, __opts__, default='public_ips',
search_global=False
)
def create(vm_):
'''
Create a single VM from a data dict
'''
deploy = config.get_cloud_config_value('deploy', vm_, __opts__)
if deploy is True and salt.utils.which('sshpass') is None:
raise SaltCloudSystemExit(
'Cannot deploy salt in a VM if the \'sshpass\' binary is not '
'present on the system.'
)
salt.utils.cloud.fire_event(
'event',
'starting create',
'salt/cloud/{0}/creating'.format(vm_['name']),
{
'name': vm_['name'],
'profile': vm_['profile'],
'provider': vm_['provider'],
},
)
log.info('Creating Cloud VM {0}'.format(vm_['name']))
conn = get_conn()
kwargs = {
'name': vm_['name'],
'image': get_image(conn, vm_),
'size': get_size(conn, vm_)
}
salt.utils.cloud.fire_event(
'event',
'requesting instance',
'salt/cloud/{0}/requesting'.format(vm_['name']),
{'kwargs': {'name': kwargs['name'],
'image': kwargs['image'].name,
'size': kwargs['size'].name}},
)
try:
data = conn.create_node(**kwargs)
except Exception as exc:
log.error(
'Error creating {0} on RACKSPACE\n\n'
'The following exception was thrown by libcloud when trying to '
'run the initial deployment: \n{1}'.format(
vm_['name'], exc
),
# Show the traceback if the debug logging level is enabled
exc_info=log.isEnabledFor(logging.DEBUG)
)
return False
def __query_node_data(vm_, data):
try:
nodelist = list_nodes()
log.debug(
'Loaded node data for {0}:\n{1}'.format(
vm_['name'],
pprint.pformat(
nodelist[vm_['name']]
)
)
)
except Exception as err:
log.error(
'Failed to get nodes list: {0}'.format(
err
),
# Show the traceback if the debug logging level is enabled
exc_info=log.isEnabledFor(logging.DEBUG)
)
# Trigger a failure in the wait for IP function
return False
running = nodelist[vm_['name']]['state'] == node_state(
NodeState.RUNNING
)
if not running:
# Still not running, trigger another iteration
return
private = nodelist[vm_['name']]['private_ips']
public = nodelist[vm_['name']]['public_ips']
if private and not public:
log.warn(
'Private IPs returned, but not public... Checking for '
'misidentified IPs'
)
for private_ip in private:
private_ip = preferred_ip(vm_, [private_ip])
if salt.utils.cloud.is_public_ip(private_ip):
log.warn('{0} is a public IP'.format(private_ip))
data.public_ips.append(private_ip)
else:
log.warn('{0} is a private IP'.format(private_ip))
if private_ip not in data.private_ips:
data.private_ips.append(private_ip)
if ssh_interface(vm_) == 'private_ips' and data.private_ips:
return data
if private:
data.private_ips = private
if ssh_interface(vm_) == 'private_ips':
return data
if public:
data.public_ips = public
if ssh_interface(vm_) != 'private_ips':
return data
try:
data = salt.utils.cloud.wait_for_ip(
__query_node_data,
update_args=(vm_, data),
timeout=config.get_cloud_config_value(
'wait_for_ip_timeout', vm_, __opts__, default=25 * 60),
interval=config.get_cloud_config_value(
'wait_for_ip_interval', vm_, __opts__, default=10),
)
except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc:
try:
# It might be already up, let's destroy it!
destroy(vm_['name'])
except SaltCloudSystemExit:
pass
finally:
raise SaltCloudSystemExit(exc.message)
log.debug('VM is now running')
if ssh_interface(vm_) == 'private_ips':
ip_address = preferred_ip(vm_, data.private_ips)
else:
ip_address = preferred_ip(vm_, data.public_ips)
log.debug('Using IP address {0}'.format(ip_address))
if not ip_address:
raise SaltCloudSystemExit(
'No IP addresses could be found.'
)
ssh_username = config.get_cloud_config_value(
'ssh_username', vm_, __opts__, default='root'
)
ret = {}
if deploy is True:
deploy_script = script(vm_)
deploy_kwargs = {
'host': ip_address,
'username': ssh_username,
'password': data.extra['password'],
'script': deploy_script.script,
'name': vm_['name'],
'tmp_dir': config.get_cloud_config_value(
'tmp_dir', vm_, __opts__, default='/tmp/.saltcloud'
),
'deploy_command': config.get_cloud_config_value(
'deploy_command', vm_, __opts__,
default='/tmp/.saltcloud/deploy.sh',
),
'start_action': __opts__['start_action'],
'parallel': __opts__['parallel'],
'sock_dir': __opts__['sock_dir'],
'conf_file': __opts__['conf_file'],
'minion_pem': vm_['priv_key'],
'minion_pub': vm_['pub_key'],
'keep_tmp': __opts__['keep_tmp'],
'preseed_minion_keys': vm_.get('preseed_minion_keys', None),
'sudo': config.get_cloud_config_value(
'sudo', vm_, __opts__, default=(ssh_username != 'root')
),
'sudo_password': config.get_cloud_config_value(
'sudo_password', vm_, __opts__, default=None
),
'tty': config.get_cloud_config_value(
'tty', vm_, __opts__, default=False
),
'display_ssh_output': config.get_cloud_config_value(
'display_ssh_output', vm_, __opts__, default=True
),
'script_args': config.get_cloud_config_value(
'script_args', vm_, __opts__
),
'script_env': config.get_cloud_config_value('script_env', vm_, __opts__),
'minion_conf': salt.utils.cloud.minion_config(__opts__, vm_)
}
# Deploy salt-master files, if necessary
if config.get_cloud_config_value('make_master', vm_, __opts__) is True:
deploy_kwargs['make_master'] = True
deploy_kwargs['master_pub'] = vm_['master_pub']
deploy_kwargs['master_pem'] = vm_['master_pem']
master_conf = salt.utils.cloud.master_config(__opts__, vm_)
deploy_kwargs['master_conf'] = master_conf
if master_conf.get('syndic_master', None):
deploy_kwargs['make_syndic'] = True
deploy_kwargs['make_minion'] = config.get_cloud_config_value(
'make_minion', vm_, __opts__, default=True
)
# Check for Windows install params
win_installer = config.get_cloud_config_value('win_installer', vm_, __opts__)
if win_installer:
deploy_kwargs['win_installer'] = win_installer
minion = salt.utils.cloud.minion_config(__opts__, vm_)
deploy_kwargs['master'] = minion['master']
deploy_kwargs['username'] = config.get_cloud_config_value(
'win_username', vm_, __opts__, default='Administrator'
)
deploy_kwargs['password'] = config.get_cloud_config_value(
'win_password', vm_, __opts__, default=''
)
# Store what was used to the deploy the VM
event_kwargs = copy.deepcopy(deploy_kwargs)
del event_kwargs['minion_pem']
del event_kwargs['minion_pub']
del event_kwargs['sudo_password']
if 'password' in event_kwargs:
del event_kwargs['password']
ret['deploy_kwargs'] = event_kwargs
salt.utils.cloud.fire_event(
'event',
'executing deploy script',
'salt/cloud/{0}/deploying'.format(vm_['name']),
{'kwargs': event_kwargs},
)
deployed = False
if win_installer:
deployed = salt.utils.cloud.deploy_windows(**deploy_kwargs)
else:
deployed = salt.utils.cloud.deploy_script(**deploy_kwargs)
if deployed:
log.info('Salt installed on {0}'.format(vm_['name']))
else:
log.error(
'Failed to deploy and start Salt on Cloud VM {0}'.format(
vm_['name']
)
)
log.info('Created Cloud VM {0[name]!r}'.format(vm_))
log.debug(
'{0[name]!r} VM creation details:\n{1}'.format(
vm_, pprint.pformat(data.__dict__)
)
)
ret.update(data.__dict__)
salt.utils.cloud.fire_event(
'event',
'created instance',
'salt/cloud/{0}/created'.format(vm_['name']),
{
'name': vm_['name'],
'profile': vm_['profile'],
'provider': vm_['provider'],
},
)
return ret
| {
"content_hash": "21e92c11d5a182f33eaa18666a4fe205",
"timestamp": "",
"source": "github",
"line_count": 452,
"max_line_length": 85,
"avg_line_length": 32.67035398230089,
"alnum_prop": 0.5552244870318954,
"repo_name": "MadeiraCloud/salt",
"id": "aa4a61d2294100d58cf9bbbcea1f8421bcbaf40b",
"size": "14791",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sources/salt/cloud/clouds/rackspace.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "10058"
},
{
"name": "Makefile",
"bytes": "1815"
},
{
"name": "Python",
"bytes": "4530204"
},
{
"name": "Shell",
"bytes": "169676"
}
],
"symlink_target": ""
} |
import sling
import sys
sys.path.insert(0, "sling/nlp/parser/trainer")
import corpora
# Error checking options.
class Options:
def __init__(self):
self.stop_on_first_bad_document = False
self.allow_mentions_without_frames = False
self.allow_duplicate_evokes = False
self.allow_nil_roles = False
self.allow_nil_values = False
self.max_error_examples = 3
# Represents a single error and the context it occurs in.
class Error:
# Error types.
BAD_SPAN_BEGIN = 0 # span has a bad begin index
BAD_SPAN_END = 1 # span has a bad end index
CROSSING_SPAN = 2 # span crosses another span
MENTION_WITHOUT_FRAME = 3 # mention without evoked frame
UNTYPED_EVOKED_FRAME = 4 # evoked frame without a type
FRAME_TYPE_NOT_GLOBAL = 5 # frame type is a local symbol
ROLE_IS_NONE = 6 # frame with a nil role
VALUE_IS_NONE = 7 # frame with a nil value
ROLE_IS_LOCAL = 8 # frame with a role that is a local symbol
VALUE_NOT_A_FRAME = 9 # frame with a slot value that is not a frame
FRAME_NOT_LOCAL = 10 # expected a local frame but didn't get one
DUPLICATE_EVOKED_FRAMES = 11 # >1 frames evoked from a mention with same type
DUPLICATE_SPANS = 12 # >1 spans covering exactly the same tokens
def __init__(self, code, doc_index, document, args):
self.code = code
self.doc_index = doc_index
# Document where the error occurs.
self.document = document
assert isinstance(self.document, sling.Document), type(self.document)
# Error-specific context (e.g. mention, frame etc).
self.args = args
# Returns [a limited-size prefix of] the document text.
def _document_text(self, max_tokens=20):
s = []
for index, token in enumerate(self.document.tokens):
if max_tokens >= 0 and index >= max_tokens: break
if token.brk == 0 and len(s) > 0:
s[-1] = s[-1] + token.word
else:
s.append(token.word)
if max_tokens >= 0 and len(self.document.tokens) > max_tokens:
s.append(" ...")
return ' '.join(s)
# Returns a string reprsentation of the error.
def tostr(self, indent=0):
self.document.decorate()
output = []
output.extend(["Document Index: " + str(self.doc_index)])
output.extend(["Document: " + self._document_text()])
frame_id = self.document.frame.id
if frame_id is not None:
output.extend(["Document FrameId: " + str(frame_id)])
output.extend(["DocumentLength: " + str(len(self.document.tokens))])
if type(self.args[0]) is sling.Mention:
output.extend(["Mention: " + self.args[0].frame.data(binary=False)])
if self.code == Error.BAD_SPAN_BEGIN:
output.extend(["Begin: " + str(self.args[0].begin)])
elif self.code == Error.BAD_SPAN_END:
output.extend(["End: " + str(self.args[0].end)])
elif self.code == Error.CROSSING_SPAN:
m2 = self.args[1]
output.extend(["Mention2: " + m2.frame.data(binary=False)])
elif self.code == Error.UNTYPED_EVOKED_FRAME:
f = self.args[1]
output.extend(["UntypedEvokedFrame: " + f.data(binary=False)])
elif self.code == Error.FRAME_TYPE_NOT_GLOBAL:
t = self.args[2]
output.extend(["NonGlobalType: " + t.data(binary=False)])
elif self.code == Error.ROLE_IS_NONE:
f = self.args[0]
output.extend(["FrameWithNilRole: " + f.data(binary=False)])
elif self.code == Error.VALUE_IS_NONE:
f = self.args[0]
output.extend(["FrameWithNilValue: " + f.data(binary=False)])
elif self.code == Error.ROLE_IS_LOCAL:
f = self.args[0]
role = self.args[1]
output.extend(["Frame: " + f.data(binary=False)])
output.extend(["LocalRole: " + role.data(binary=False)])
elif self.code == Error.VALUE_NOT_A_FRAME:
f = self.args[1]
output.extend(["NonFrameValue: " + str(f)])
elif self.code == Error.FRAME_NOT_LOCAL:
f = self.args[1]
output.extend(["NonLocalFrame: " + f.data(binary=False)])
elif self.code == Error.DUPLICATE_EVOKED_FRAMES:
t = self.args[1]
output.extend(["TypeEvokedAgain: " + t.id])
elif self.code == Error.DUPLICATE_SPANS:
m2 = self.args[1]
output.extend(["AnotherSpanOverSameInterval: " + m2.frame.data()])
if indent > 0:
prefix = ' ' * indent
for i in range(len(output)):
if i > 0: output[i] = prefix + output[i]
return '\n'.join(output)
# Returns a string representation of the specified error code.
def _codestr(code):
assert type(code) is int, code
for c, value in Error.__dict__.items():
if type(value) is int and value == code and c[0].isupper():
return c
return "<UNKNOWN_ERROR:" + str(code) + ">"
# Represents results of checking a corpora for errors.
class Results:
def __init__(self, options):
self.error_counts = {} # error code -> count
self.error_examples = {} # error code -> limited no. of examples
self.options = options # error checking options
# Returns whether there were no errors.
def ok(self):
return len(self.error_counts) == 0
# Creates and adds an error with the specified code and context.
def error(self, code, args):
doc_index = args[0]
document = args[1]
assert isinstance(document, sling.Document)
args = args[2:]
if code not in self.error_counts:
self.error_counts[code] = 0
self.error_counts[code] += 1
if self.options.max_error_examples >= 0 and \
self.error_counts[code] <= self.options.max_error_examples:
error = Error(code, doc_index, document, args)
if code not in self.error_examples:
self.error_examples[code] = []
self.error_examples[code].append(error)
# Aggregates the result set in 'other' to this result set.
def add(self, other):
for code, count in other.error_counts.items():
if code not in self.error_counts:
self.error_counts[code] = 0
self.error_examples[code] = []
self.error_counts[code] += count
num = len(other.error_examples[code])
current = len(self.error_examples[code])
if self.options.max_error_examples >= 0 and \
num + current > self.options.max_error_examples:
num = self.options.max_error_examples - current
if num > 0:
self.error_examples[code].extend(other.error_examples[code][0:num])
# Returns the string representation of error checking results.
def __repr__(self):
if self.ok():
return "No errors"
total = 0
for code, count in self.error_counts.items():
total += count
output = []
output.append("Total " + str(total) + " errors")
for code, count in self.error_counts.items():
output.append(" " + _codestr(code) + " : " + str(count))
output.extend(["", "EXAMPLES", "-" * 70, ""])
for code, examples in self.error_examples.items():
output.append(_codestr(code) + ":")
for index, example in enumerate(examples):
indent = len(str(index) + ") ")
s = str(index) + ") " + example.tostr(indent)
output.extend([s, ""])
output.append("")
return '\n'.join(output)
# Validates 'frame', which is expected to be a local frame, for errors.
# If 'mention' is not None, then 'frame' is one of the evoked frames from it.
# Validation results are added to 'results'
def _validate_frame(index, document, mention, frame, options, results):
if type(frame) is not sling.Frame:
results.error(Error.VALUE_NOT_A_FRAME, [index, document, mention, frame])
return
if not frame.islocal():
results.error(Error.FRAME_NOT_LOCAL, [index, document, mention, frame])
return
commons = document.store.globals()
# Check that the frame type is valid.
t = frame[document.schema.isa]
if t is None:
results.error(Error.UNTYPED_EVOKED_FRAME, [index, document, mention, frame])
elif t.islocal():
results.error(Error.FRAME_TYPE_NOT_GLOBAL, \
[index, document, mention, frame, t])
# Check that frame slots are valid.
for role, value in frame:
if not options.allow_nil_roles and role is None:
results.error(Error.ROLE_IS_NONE, [index, document, frame])
if not options.allow_nil_values and value is None:
results.error(Error.VALUE_IS_NONE, [index, document, frame])
if role is not None and type(role) is sling.Frame and role.islocal():
results.error(Error.ROLE_IS_LOCAL, [index, document, frame, role])
# TODO: Add support to see if certain slots (e.g. /pb/ARG0) should always
# have local values, while others (e.g. measure) should always have global
# values. This can be read from the schema or specified in 'options'.
# Validates 'document' against common errors.
def _validate(index, document, options):
results = Results(options)
length = len(document.tokens)
isa = document.schema.isa
spans = {}
for mention in document.mentions:
begin = mention.begin
end = mention.end
# Check for duplicate spans.
k = (begin, end)
if k in spans:
results.error(Error.DUPLICATE_SPANS, [index, document, mention, spans[k]])
else:
spans[k] = mention
# Check span offsets.
if begin < 0 or begin >= length:
results.error(Error.BAD_SPAN_BEGIN, [index, document, mention])
if end < 0 or end > length:
results.error(Error.BAD_SPAN_END, [index, document, mention])
# Check for crossing spans.
for m2 in document.mentions:
if m2.begin < begin: continue # don't double count crossing spans
if m2.begin >= end: break # mentions are sorted
if m2.begin < begin and m2.end > begin and m2.end < end:
results.error(Error.CROSSING_SPAN, [index, document, mention, m2])
if m2.begin > begin and m2.end > end:
results.error(Error.CROSSING_SPAN, [index, document, mention, m2])
# Check for duplicate frames.
if not options.allow_duplicate_evokes:
seen = {}
for frame in mention.evokes():
t = frame[isa]
if t in seen:
results.error(Error.DUPLICATE_EVOKED_FRAMES, \
[index, document, mention, t])
seen[t] = True
# Check valid evoked frames.
num_evoked = 0
for frame in mention.evokes():
num_evoked += 1
_validate_frame(index, document, mention, frame, options, results)
if not options.allow_mentions_without_frames and num_evoked == 0:
results.error(Error.MENTION_WITHOUT_FRAME, [index, document, mention])
for frame in document.themes:
_validate_frame(index, document, None, frame, options, results)
return results
# Main entry point.
# Checks the corpora in 'recordio_filename' for errors.
def validate(commons, recordio_filename, output_recordio='', options=Options()):
schema = None
if not isinstance(commons, sling.Store):
assert type(commons) is str
filename = commons
commons = sling.Store()
commons.load(filename)
schema = sling.DocumentSchema(commons)
commons.freeze()
else:
schema = sling.DocumentSchema(commons)
corpus = corpora.Corpora(recordio_filename, commons, schema)
aggregate = Results(options)
count = 0
writer = None
written = 0
if output_recordio != '':
writer = sling.RecordWriter(output_recordio)
for document in corpus:
results = _validate(count, document, options)
aggregate.add(results)
if not results.ok() and options.stop_on_first_bad_document:
print("Stopping after first bad document as requested")
break
count += 1
if writer and results.ok():
writer.write('', document.frame.data(binary=True))
written += 1
if writer:
writer.close()
return aggregate, count, written
if __name__ == "__main__":
import sling.flags as flags
flags.define('--input',
help='Input recordio file',
default="",
type=str,
metavar='FILE')
flags.define('--commons',
help='Commons file name',
default="",
type=str,
metavar='FILE')
flags.define('--max_examples',
help='Max number of examples per error type',
default=3,
type=int,
metavar='NUM')
flags.define('--output',
help='Output recordio file name for valid documents',
default="",
type=str,
metavar='FILE')
flags.parse()
options = Options()
options.max_error_examples = flags.arg.max_examples
results, total, written = validate(
flags.arg.commons, flags.arg.input, flags.arg.output, options)
print("Went over", total, "documents")
if flags.arg.output:
print("Wrote", written, "valid documents to", flags.arg.output)
print(results)
| {
"content_hash": "a37f864f08ff4889d6a21fae7941d7a5",
"timestamp": "",
"source": "github",
"line_count": 352,
"max_line_length": 80,
"avg_line_length": 36.13920454545455,
"alnum_prop": 0.6361921232607499,
"repo_name": "google/sling",
"id": "3e3571f308a4db19774d0ef3027d8525021e8b77",
"size": "13505",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sling/nlp/parser/tools/validate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "765"
},
{
"name": "C++",
"bytes": "4793787"
},
{
"name": "CSS",
"bytes": "10049"
},
{
"name": "HTML",
"bytes": "37253"
},
{
"name": "JavaScript",
"bytes": "59134"
},
{
"name": "Python",
"bytes": "577781"
},
{
"name": "Shell",
"bytes": "10326"
},
{
"name": "Starlark",
"bytes": "50958"
}
],
"symlink_target": ""
} |
"""
WSGI config for citizenline project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "citizenline.settings")
application = get_wsgi_application()
| {
"content_hash": "2f6262158b1661c8302183c6c8ff8741",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 24.9375,
"alnum_prop": 0.7744360902255639,
"repo_name": "citizenline/citizenline",
"id": "a4b9dfaf9b3ab527d71e163a3397032c6032c433",
"size": "399",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "citizenline/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3470"
},
{
"name": "HTML",
"bytes": "22853"
},
{
"name": "JavaScript",
"bytes": "8389"
},
{
"name": "Python",
"bytes": "86277"
},
{
"name": "Ruby",
"bytes": "198"
}
],
"symlink_target": ""
} |
import json
import DbLiason
from mpUtils import JobRunner
class Connectable:
def __init__(self, address=None, attrManifestDict=None):
self.__dbLiason = None
self.__address = address
self.__jobRunner = JobRunner.JobRunner()
self.__attrManifestDict = attrManifestDict if isinstance(attrManifestDict, dict) else {}
def __initDBLiason(self, *args):
self.__dbLiason = DbLiason.HandlerLiason(self.__address)
return self.__dbLiason is not None
def initDBLiason(self, callback=None):
return self.__jobRunner.run(self.__initDBLiason, None, callback)
def setConnectedAddress(self, addr):
self.setAddress(addr)
if hasattr(self.__dbLiason, 'shutdown'):
self.__dbLiason.shutdown()
return self.initDBLiason()
def getDBLiason(self):
return self.__dbLiason
def getAddress(self):
return self.__address
def __setAddress(self, newAddress):
self.__address = newAddress
def setAddress(self, newAddress, callback=None):
return self.__jobRunner.run(self.__setAddress, None, callback, newAddress)
def __setAttrManifestDict(self, attrManifestDict, *args):
if not isinstance(attrManifestDict, dict):
return False
else:
self.__attrManifestDict = attrManifestDict
return True
def setAttrManifestDict(self, attrManifestDict, callback=None):
return self.__jobRunner.run(self.__setAttrManifestDict, None, callback, attrManifestDict)
def getAttrManifestDict(self):
return self.__attrManifestDict
def __filterAttrsPresentInManifest(self, restMethod, stateDict=None):
if isinstance(stateDict, dict):
dictatedManifest = self.__attrManifestDict.get(restMethod, [])
screenedAttrs = [attr for attr in dictatedManifest if attr in stateDict]
return dict((okAttr, stateDict[okAttr]) for okAttr in screenedAttrs)
def filterForRestGet(self, stateDict, callback=None):
return self.__jobRunner.run(self.__filterAttrsPresentInManifest, None, callback, 'get', stateDict)
def ___opOnDBLiason(self, varsDict, restMethodName, onFinish):
okAttrDict = getattr(self, 'filterForRest%s'%(restMethodName))(varsDict)
relatedDBHandlerMethod = '%sConn'%(restMethodName.lower())
response = self.__prepareAndParseNetResult(getattr(self.__dbLiason, relatedDBHandlerMethod)(okAttrDict))
if hasattr(onFinish, '__call__'):
onFinish(response)
else:
return response
def __getConn(self, varsDict, onFinish):
return self.___opOnDBLiason(varsDict, 'Get', onFinish)
def getConn(self, varsDict, onFinish=None, callback=None):
return self.__jobRunner.run(self.__getConn, None, callback, varsDict, onFinish)
def filterForRestDelete(self, stateDict, callback=None):
return self.__jobRunner.run(self.__filterAttrsPresentInManifest, None, callback, 'delete', stateDict)
def __deleteConn(self, varsDict, onFinish):
return self.___opOnDBLiason(varsDict, 'Delete', onFinish)
def deleteConn(self, varsDict, onFinish=None, callback=None):
return self.__jobRunner.run(self.__deleteConn, None, callback, varsDict, onFinish)
def filterForRestPost(self, stateDict, callback=None):
return self.__jobRunner.run(self.__filterAttrsPresentInManifest, None, callback, 'post', stateDict)
def __postConn(self, varsDict, onFinish=None):
return self.___opOnDBLiason(varsDict, 'Post', onFinish)
def postConn(self, varsDict, onFinish=None, callback=None):
return self.__jobRunner.run(self.__postConn, None, callback, varsDict, onFinish)
def filterForRestPut(self, stateDict, callback=None):
return self.__jobRunner.run(self.__filterAttrsPresentInManifest, None, callback, 'put', stateDict)
def __putConn(self, varsDict, onFinish):
return self.___opOnDBLiason(varsDict, 'Put', onFinish)
def putConn(self, varsDict, onFinish=None, callback=None):
return self.__jobRunner.run(self.__putConn, None, callback, varsDict, onFinish)
def __prepareAndParseNetResult(self, httpResponse):
outData = dict(status_code=httpResponse.get('status_code', 400))
try:
outData['value'] = json.loads(httpResponse['value'].decode())
except Exception as e:
print('\033[91mUnhandled exception\033[00m', e)
finally:
return outData
| {
"content_hash": "66169a1c7b9800551b5b58bac48585c9",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 112,
"avg_line_length": 40.32142857142857,
"alnum_prop": 0.6767050487156776,
"repo_name": "odeke-em/utils",
"id": "ea0ec68204ab7cd7dfe93254ba7eb70814600c10",
"size": "4585",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "connectable/src/Connectable.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "127184"
},
{
"name": "C++",
"bytes": "6311"
},
{
"name": "Go",
"bytes": "5411"
},
{
"name": "Makefile",
"bytes": "1957"
},
{
"name": "Python",
"bytes": "81302"
},
{
"name": "Shell",
"bytes": "1551"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_image
version_added: "2.5"
short_description: Manage Azure image.
description:
- Create, delete an image from virtual machine, blob uri, managed disk or snapshot.
options:
resource_group:
description:
- Name of resource group.
required: true
name:
description:
- Name of the image.
required: true
source:
description:
- OS disk source from the same region.
- It can be a virtual machine, OS disk blob URI, managed OS disk, or OS snapshot.
- Each type of source except for blob URI can be given as resource id, name or a dict contains C(resource_group), C(name) and C(types).
- If source type is blob URI, the source should be the full URI of the blob in string type.
- If you specify the C(type) in a dict, acceptable value contains C(disks), C(virtual_machines) and C(snapshots).
type: raw
required: true
data_disk_sources:
description:
- List of data disk sources, including unmanaged blob URI, managed disk id or name, or snapshot id or name.
type: list
location:
description:
- Location of the image. Derived from I(resource_group) if not specified.
os_type:
description: The OS type of image.
choices:
- Windows
- Linux
state:
description:
- Assert the state of the image. Use C(present) to create or update a image and C(absent) to delete an image.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Yuwei Zhou (@yuwzho)"
'''
EXAMPLES = '''
- name: Create an image from a virtual machine
azure_rm_image:
resource_group: myResourceGroup
name: myImage
source: myVirtualMachine
- name: Create an image from os disk
azure_rm_image:
resource_group: myResourceGroup
name: myImage
source: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroup/myResourceGroup/providers/Microsoft.Compute/disks/disk001
data_disk_sources:
- datadisk001
- datadisk002
os_type: Linux
- name: Create an image from os disk via dict
azure_rm_image:
resource_group: myResourceGroup
name: myImage
source:
type: disks
resource_group: myResourceGroup
name: disk001
data_disk_sources:
- datadisk001
- datadisk002
os_type: Linux
- name: Delete an image
azure_rm_image:
state: absent
resource_group: myResourceGroup
name: myImage
source: testvm001
'''
RETURN = '''
id:
description: Image resource path.
type: str
returned: success
example: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroup/myResourceGroup/providers/Microsoft.Compute/images/myImage"
''' # NOQA
from ansible.module_utils.azure_rm_common import AzureRMModuleBase, format_resource_id
try:
from msrestazure.tools import parse_resource_id
from msrestazure.azure_exceptions import CloudError
except ImportError:
# This is handled in azure_rm_common
pass
class AzureRMImage(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(type='str', required=True),
name=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['present', 'absent']),
location=dict(type='str'),
source=dict(type='raw'),
data_disk_sources=dict(type='list', default=[]),
os_type=dict(type='str', choices=['Windows', 'Linux'])
)
self.results = dict(
changed=False,
id=None
)
required_if = [
('state', 'present', ['source'])
]
self.resource_group = None
self.name = None
self.state = None
self.location = None
self.source = None
self.data_disk_sources = None
self.os_type = None
super(AzureRMImage, self).__init__(self.module_arg_spec, supports_check_mode=True, required_if=required_if)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()) + ['tags']:
setattr(self, key, kwargs[key])
results = None
changed = False
image = None
if not self.location:
# Set default location
resource_group = self.get_resource_group(self.resource_group)
self.location = resource_group.location
self.log('Fetching image {0}'.format(self.name))
image = self.get_image()
if image:
self.check_provisioning_state(image, self.state)
results = image.id
# update is not supported except for tags
update_tags, tags = self.update_tags(image.tags)
if update_tags:
changed = True
self.tags = tags
if self.state == 'absent':
changed = True
# the image does not exist and create a new one
elif self.state == 'present':
changed = True
self.results['changed'] = changed
self.results['id'] = results
if changed:
if self.state == 'present':
image_instance = None
# create from virtual machine
vm = self.get_source_vm()
if vm:
if self.data_disk_sources:
self.fail('data_disk_sources is not allowed when capturing image from vm')
image_instance = self.compute_models.Image(location=self.location,
source_virtual_machine=self.compute_models.SubResource(id=vm.id),
tags=self.tags)
else:
if not self.os_type:
self.fail('os_type is required to create the image')
os_disk = self.create_os_disk()
data_disks = self.create_data_disks()
storage_profile = self.compute_models.ImageStorageProfile(os_disk=os_disk, data_disks=data_disks)
image_instance = self.compute_models.Image(location=self.location, storage_profile=storage_profile, tags=self.tags)
# finally make the change if not check mode
if not self.check_mode and image_instance:
new_image = self.create_image(image_instance)
self.results['id'] = new_image.id
elif self.state == 'absent':
if not self.check_mode:
# delete image
self.delete_image()
# the delete does not actually return anything. if no exception, then we'll assume it worked.
self.results['id'] = None
return self.results
def resolve_storage_source(self, source):
blob_uri = None
disk = None
snapshot = None
# blob URI can only be given by str
if isinstance(source, str) and source.lower().endswith('.vhd'):
blob_uri = source
return (blob_uri, disk, snapshot)
tokenize = dict()
if isinstance(source, dict):
tokenize = source
elif isinstance(source, str):
tokenize = parse_resource_id(source)
else:
self.fail("source parameter should be in type string or dictionary")
if tokenize.get('type') == 'disks':
disk = format_resource_id(tokenize['name'],
tokenize.get('subscription_id') or self.subscription_id,
'Microsoft.Compute',
'disks',
tokenize.get('resource_group') or self.resource_group)
return (blob_uri, disk, snapshot)
if tokenize.get('type') == 'snapshots':
snapshot = format_resource_id(tokenize['name'],
tokenize.get('subscription_id') or self.subscription_id,
'Microsoft.Compute',
'snapshots',
tokenize.get('resource_group') or self.resource_group)
return (blob_uri, disk, snapshot)
# not a disk or snapshots
if 'type' in tokenize:
return (blob_uri, disk, snapshot)
# source can be name of snapshot or disk
snapshot_instance = self.get_snapshot(tokenize.get('resource_group') or self.resource_group,
tokenize['name'])
if snapshot_instance:
snapshot = snapshot_instance.id
return (blob_uri, disk, snapshot)
disk_instance = self.get_disk(tokenize.get('resource_group') or self.resource_group,
tokenize['name'])
if disk_instance:
disk = disk_instance.id
return (blob_uri, disk, snapshot)
def create_os_disk(self):
blob_uri, disk, snapshot = self.resolve_storage_source(self.source)
snapshot_resource = self.compute_models.SubResource(id=snapshot) if snapshot else None
managed_disk = self.compute_models.SubResource(id=disk) if disk else None
return self.compute_models.ImageOSDisk(os_type=self.os_type,
os_state=self.compute_models.OperatingSystemStateTypes.generalized,
snapshot=snapshot_resource,
managed_disk=managed_disk,
blob_uri=blob_uri)
def create_data_disk(self, lun, source):
blob_uri, disk, snapshot = self.resolve_storage_source(source)
if blob_uri or disk or snapshot:
snapshot_resource = self.compute_models.SubResource(id=snapshot) if snapshot else None
managed_disk = self.compute_models.SubResource(id=disk) if disk else None
return self.compute_models.ImageDataDisk(lun=lun,
blob_uri=blob_uri,
snapshot=snapshot_resource,
managed_disk=managed_disk)
def create_data_disks(self):
return list(filter(None, [self.create_data_disk(lun, source) for lun, source in enumerate(self.data_disk_sources)]))
def get_source_vm(self):
# self.resource can be a vm (id/name/dict), or not a vm. return the vm iff it is an existing vm.
resource = dict()
if isinstance(self.source, dict):
if self.source.get('type') != 'virtual_machines':
return None
resource = dict(type='virtualMachines',
name=self.source['name'],
resource_group=self.source.get('resource_group') or self.resource_group)
elif isinstance(self.source, str):
vm_resource_id = format_resource_id(self.source,
self.subscription_id,
'Microsoft.Compute',
'virtualMachines',
self.resource_group)
resource = parse_resource_id(vm_resource_id)
else:
self.fail("Unsupported type of source parameter, please give string or dictionary")
return self.get_vm(resource['resource_group'], resource['name']) if resource['type'] == 'virtualMachines' else None
def get_snapshot(self, resource_group, snapshot_name):
return self._get_resource(self.compute_client.snapshots.get, resource_group, snapshot_name)
def get_disk(self, resource_group, disk_name):
return self._get_resource(self.compute_client.disks.get, resource_group, disk_name)
def get_vm(self, resource_group, vm_name):
return self._get_resource(self.compute_client.virtual_machines.get, resource_group, vm_name, 'instanceview')
def get_image(self):
return self._get_resource(self.compute_client.images.get, self.resource_group, self.name)
def _get_resource(self, get_method, resource_group, name, expand=None):
try:
if expand:
return get_method(resource_group, name, expand=expand)
else:
return get_method(resource_group, name)
except CloudError as cloud_err:
# Return None iff the resource is not found
if cloud_err.status_code == 404:
self.log('{0}'.format(str(cloud_err)))
return None
self.fail('Error: failed to get resource {0} - {1}'.format(name, str(cloud_err)))
except Exception as exc:
self.fail('Error: failed to get resource {0} - {1}'.format(name, str(exc)))
def create_image(self, image):
try:
poller = self.compute_client.images.create_or_update(self.resource_group, self.name, image)
new_image = self.get_poller_result(poller)
except Exception as exc:
self.fail("Error creating image {0} - {1}".format(self.name, str(exc)))
self.check_provisioning_state(new_image)
return new_image
def delete_image(self):
self.log('Deleting image {0}'.format(self.name))
try:
poller = self.compute_client.images.delete(self.resource_group, self.name)
result = self.get_poller_result(poller)
except Exception as exc:
self.fail("Error deleting image {0} - {1}".format(self.name, str(exc)))
return result
def main():
AzureRMImage()
if __name__ == '__main__':
main()
| {
"content_hash": "0f2157d3c2b815dea8b3cca429bbe11c",
"timestamp": "",
"source": "github",
"line_count": 363,
"max_line_length": 147,
"avg_line_length": 39.30027548209367,
"alnum_prop": 0.5666619935511005,
"repo_name": "SergeyCherepanov/ansible",
"id": "b5dff335e47f160756ab8cf7b141d85ffe607500",
"size": "14438",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "ansible/ansible/modules/cloud/azure/azure_rm_image.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Shell",
"bytes": "824"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from .models import *
class EventAdmin(admin.ModelAdmin):
list_display = ('id', 'title', 'open_date', 'close_date')
class AccommodationAdmin(admin.ModelAdmin):
list_display = ('id', 'event', '__str__', 'capacity')
list_filter = ('event',)
class ClassFeeAdmin(admin.ModelAdmin):
list_display = ('event', 'label', 'classes')
list_filter = ('event',)
class EntryAdmin(admin.ModelAdmin):
list_display = ('event', 'id', 'email')
list_filter = ('event',)
class ParticipantAdmin(admin.ModelAdmin):
list_display = ('entry', 'id', 'firstname', 'surname', 'cls')
list_filter = ('entry',)
class DirectoryAdmin(admin.ModelAdmin):
list_display = ('firstname', 'surname', 'club', 'cls')
list_filter = ('cls',)
admin.site.register(Event, EventAdmin)
admin.site.register(Accommodation, AccommodationAdmin)
admin.site.register(ClassFee, ClassFeeAdmin)
admin.site.register(Entry, EntryAdmin)
admin.site.register(Participant, ParticipantAdmin)
admin.site.register(Directory, DirectoryAdmin)
| {
"content_hash": "1492c18b97df6b84b4050b38867f2f19",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 62,
"avg_line_length": 30.147058823529413,
"alnum_prop": 0.72,
"repo_name": "peterkuma/tjrapid",
"id": "635390ac432d441c3a311952df9f7bdc4b159807",
"size": "1089",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "eventapp/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "35400"
},
{
"name": "HTML",
"bytes": "43945"
},
{
"name": "JavaScript",
"bytes": "257779"
},
{
"name": "Python",
"bytes": "82948"
},
{
"name": "XSLT",
"bytes": "2142"
}
],
"symlink_target": ""
} |
from dynet import *
import time
import random
LAYERS = 2
INPUT_DIM = 256 #50 #256
HIDDEN_DIM = 256 # 50 #1024
VOCAB_SIZE = 0
from collections import defaultdict
from itertools import count
import sys
import util
class RNNLanguageModel:
def __init__(self, model, LAYERS, INPUT_DIM, HIDDEN_DIM, VOCAB_SIZE, builder=SimpleRNNBuilder):
self.builder = builder(LAYERS, INPUT_DIM, HIDDEN_DIM, model)
self.lookup = model.add_lookup_parameters((VOCAB_SIZE, INPUT_DIM))
self.R = model.add_parameters((VOCAB_SIZE, HIDDEN_DIM))
self.bias = model.add_parameters((VOCAB_SIZE))
def BuildLMGraph(self, sent):
renew_cg()
init_state = self.builder.initial_state()
R = parameter(self.R)
bias = parameter(self.bias)
errs = [] # will hold expressions
es=[]
state = init_state
for (cw,nw) in zip(sent,sent[1:]):
# assume word is already a word-id
x_t = lookup(self.lookup, int(cw))
state = state.add_input(x_t)
y_t = state.output()
r_t = bias + (R * y_t)
err = pickneglogsoftmax(r_t, int(nw))
errs.append(err)
nerr = esum(errs)
return nerr
def sample(self, first=1, nchars=0, stop=-1):
res = [first]
renew_cg()
state = self.builder.initial_state()
R = parameter(self.R)
bias = parameter(self.bias)
cw = first
while True:
x_t = lookup(self.lookup, cw)
state = state.add_input(x_t)
y_t = state.output()
r_t = bias + (R * y_t)
ydist = softmax(r_t)
dist = ydist.vec_value()
rnd = random.random()
for i,p in enumerate(dist):
rnd -= p
if rnd <= 0: break
res.append(i)
cw = i
if cw == stop: break
if nchars and len(res) > nchars: break
return res
if __name__ == '__main__':
train = util.CharsCorpusReader(sys.argv[1],begin="<s>")
vocab = util.Vocab.from_corpus(train)
VOCAB_SIZE = vocab.size()
model = Model()
sgd = SimpleSGDTrainer(model)
#lm = RNNLanguageModel(model, LAYERS, INPUT_DIM, HIDDEN_DIM, VOCAB_SIZE, builder=SimpleRNNBuilder)
lm = RNNLanguageModel(model, LAYERS, INPUT_DIM, HIDDEN_DIM, VOCAB_SIZE, builder=LSTMBuilder)
train = list(train)
chars = loss = 0.0
for ITER in range(100):
random.shuffle(train)
for i,sent in enumerate(train):
_start = time.time()
if i % 50 == 0:
sgd.status()
if chars > 0: print(loss / chars,)
for _ in range(1):
samp = lm.sample(first=vocab.w2i["<s>"],stop=vocab.w2i["\n"])
print("".join([vocab.i2w[c] for c in samp]).strip())
loss = 0.0
chars = 0.0
chars += len(sent)-1
isent = [vocab.w2i[w] for w in sent]
errs = lm.BuildLMGraph(isent)
loss += errs.scalar_value()
errs.backward()
sgd.update(1.0)
#print "TM:",(time.time() - _start)/len(sent)
print("ITER",ITER,loss)
sgd.status()
sgd.update_epoch(1.0)
| {
"content_hash": "bb3a563e41267185b2d9b81e582b5e1f",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 102,
"avg_line_length": 31.504761904761907,
"alnum_prop": 0.530229746070133,
"repo_name": "kamigaito/cnn",
"id": "eb190be5f45ea88eeec2fb5a42d7595091ea2f8c",
"size": "3308",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/python/rnnlm.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "562793"
},
{
"name": "CMake",
"bytes": "18889"
},
{
"name": "Cuda",
"bytes": "4308"
},
{
"name": "Makefile",
"bytes": "697"
},
{
"name": "Python",
"bytes": "128659"
}
],
"symlink_target": ""
} |
from collections import defaultdict
from util import *
import re,sys
from optparse import OptionParser
#from DependencyGraph import *
# Error definitions
class LexerError(Exception):
pass
class ParserError(Exception):
pass
class Node():
#node_id = 0 #static counter, unique for each node
#mapping_table = {} # old new index mapping table
def __init__(self, parent, trace, node_label, firsthit, leaf, depth, seqID):
"""
initialize a node in the graph
here a node keeps record of trace i.e. from where the node is reached (the edge label)
so nodes with same other attributes may have different trace
"""
self.parent = parent
self.trace = trace
self.node_label = node_label
self.firsthit = firsthit
self.leaf = leaf
self.depth = depth
self.children = []
self.seqID = seqID
#Node.node_id += 1
#self.node_id = node_id
def __str__(self):
return str((self.trace, self.node_label, self.depth, self.seqID))
def __repr__(self):
return str((self.trace, self.node_label, self.depth, self.seqID))
class AMR(defaultdict):
"""
An abstract meaning representation.
Basic idea is based on bolinas' hypergraph for amr.
Here one AMR is a rooted, directed, acyclic graph.
We also use the edge-label style in bolinas.
"""
def __init__(self,*args, **kwargs):
defaultdict.__init__(self,ListMap,*args,**kwargs)
self.roots = []
self.external_nodes = {}
# attributes to be added
self.node_to_concepts = {}
self.align_to_sentence = None
self.reentrance_triples = []
@classmethod
def parse_string(cls,amr_string,RENAME_NODE=False):
"""
Parse a Pennman style string representation for amr and return an AMR
>>>x = AMR.parse_string("(a / and :op1(恶化 :ARG0(它) :ARG1(模式 :mod(开发)) :time (已 经)) :op2(堵塞 :ARG0(它) :ARG1(交通 :mod(局部)) :location(a / around :op1(出口)))))")
>>>
.
"""
def make_compiled_regex(rules):
regexstr = '|'.join('(?P<%s>%s)' % (name, rule) for name, rule in rules)
return re.compile(regexstr)
def rename_node(parentnodelabel,parentconcept):
if not isinstance(parentnodelabel,(Quantity,Polarity,Interrogative,StrLiteral)):
# graph node rebuild
if parentconcept is not None:
amr.node_to_concepts[node_idx] = parentconcept
mapping_table[parentnodelabel] = node_idx
parentnodelabel = node_idx
node_idx += 1
else:
# not revisiting and concept is None
if parentnodelabel not in mapping_table:
amr.node_to_concepts[node_idx] = parentnodelabel
parentnodelabel = node_idx
node_idx += 1
else: #revisiting
parentnodelabel = mapping_table[parentnodelabel]
PNODE = 1
CNODE = 2
EDGE = 3
RCNODE = 4
amr = cls()
stack = []
state = 0
node_idx = 0; # sequential new node index
mapping_table = {}; # old new index mapping table
lex_rules = [
("LPAR", '\('),
("RPAR",'\)'),
("COMMA",','),
("SLASH",'/'),
("EDGELABEL",":[^\s()]+"),
("STRLITERAL",u'"[^"]+"|\u201c[^\u201d]+\u201d'),
("LITERAL","'[^\s(),]+"),
("INTERROGATIVE","\s(interrogative|imperative|expressive)(?=[\s\)])"),
("QUANTITY","[0-9][0-9Ee^+\-\.,:]*(?=[\s\)])"),
("IDENTIFIER","[^\s()]+"), #no blank within characters
("POLARITY","\s(\-|\+)(?=[\s\)])")
]
token_re = make_compiled_regex(lex_rules)
#lexer = Lexer(lex_rules)
#amr.reentrance_triples = []
for match in token_re.finditer(amr_string):
token = match.group()
type = match.lastgroup
#if type == "STRLITERAL":
# import pdb
# pdb.set_trace()
#print token.strip(),type
if state == 0:
if type == "LPAR":
state = 1
else: raise ParserError, "Unexpected token %s"%(token)
elif state == 1:
if type == "IDENTIFIER":
stack.append((PNODE,token.strip(),None))
state = 2
elif type == "QUANTITY":
stack.append((PNODE,Quantity(token.strip()),None))
state = 2
elif type == "STRLITERAL":
stack.append((PNODE,StrLiteral(token.strip()),None))
state = 2
else: raise ParserError , "Unexpected token %s"%(token.encode('utf8'))
elif state == 2:
if type == "SLASH":
state = 3
elif type == "EDGELABEL":
stack.append((EDGE,token[1:]))
state = 5
elif type == "RPAR":
forgetme, parentnodelabel, parentconcept = stack.pop()
assert forgetme == PNODE
assert parentconcept == None
if RENAME_NODE:
rename_node(parentnodelabel,parentconcept)
else:
if not parentnodelabel in amr.node_to_concepts or parentconcept is not None:
amr.node_to_concepts[parentnodelabel] = parentconcept
foo = amr[parentnodelabel]
if stack:
stack.append((CNODE,parentnodelabel,parentconcept))
state = 6
else:
amr.roots.append(parentnodelabel)
state = 0
else: raise ParserError, "Unexpected token %s"%(token)
elif state == 3:
if type == "IDENTIFIER" or "QUANTITY":
assert stack[-1][0] == PNODE
nodelabel = stack.pop()[1]
stack.append((PNODE,nodelabel,token))
state = 4
else: raise ParserError, "Unexpected token %s"%(token)
elif state == 4:
if type == "EDGELABEL":
stack.append((EDGE,token[1:]))
state = 5
elif type == "RPAR":
forgetme, parentnodelabel, parentconcept = stack.pop()
assert forgetme == PNODE
foo = amr[parentnodelabel] # add only the node
#print state,parentnodelabel,parentconcept
if parentconcept is not None:
amr.node_to_concepts[parentnodelabel] = parentconcept
if stack:
stack.append((CNODE,parentnodelabel,parentconcept))
state = 6
else:
amr.roots.append(parentnodelabel)
state = 0
else:
print amr_string
raise ParserError, "Unexpected token %s"%(token.encode('utf8'))
elif state == 5:
if type == "LPAR":
state = 1
elif type == "QUANTITY":
stack.append((CNODE,Quantity(token),None))
state = 6
elif type == "STRLITERAL":
stack.append((CNODE,StrLiteral(token[1:-1]),None))
state = 6
elif type == "INTERROGATIVE":
stack.append((CNODE,Interrogative(token[1:]),None))
state = 6
elif type == "POLARITY":
stack.append((CNODE,Polarity(token.strip()),None))
state = 6
elif type == "IDENTIFIER":
stack.append((RCNODE,token,None))
state = 6
elif type == "EDGELABEL": #Unary edge
stack.append((CNODE,None,None))
stack.append((EDGE,token[1:]))
state = 5
assert False
elif type == "RPAR":
stack.append((CNODE,None,None))
edges = []
assert False
while stack[-1][0] != PNODE:
children = []
#one edge may have multiple children/tail nodes
while stack[-1][0] == CNODE:
forgetme, childnodelabel, childconcept = stack.pop()
children.append((childnodelabel,childconcept))
assert stack[-1][0] == EDGE
forgetme, edgelabel = stack.pop()
edges.append((edgelabel,children))
forgetme,parentnodelabel,parentconcept = stack.pop()
#print state,parentnodelabel,parentconcept
#check for annotation error
if parentnodelabel in amr.node_to_concepts.keys():
#concept has been defined by the children,
#then they must have different concepts, otherwise the children's concepts should be None
#(coreference)
if amr.node_to_concepts[parentnodelabel] == parentconcept:
sys.stderr.write("Wrong annotation format: Revisited concepts %s should be ignored.\n" % parentconcept)
else:
sys.stderr.write("Wrong annotation format: Different concepts %s and %s have same node label(index)\n" % (amr.node_to_concepts[parentnodelabel],parentconcept))
parentnodelabel = parentnodelabel + "1"
if RENAME_NODE:
rename_node(parentnodelabel,parentconcept)
else:
if not parentnodelabel in amr.node_to_concepts or parentconcept is not None:
amr.node_to_concepts[parentnodelabel] = parentconcept
for edgelabel,children in reversed(edges):
hypertarget = []
for node, concept in children:
if node is not None and not isinstance(node,(Quantity,Polarity,Interrogative,StrLiteral)) and not node in amr.node_to_concepts:
if RENAME_NODE:
rename_node(node,concept)
else:
if concept:
amr.node_to_concepts[node] = concept
hypertarget.append(node)
hyperchild = tuple(hypertarget)
amr._add_triple(parentnodelabel,edgelabel,hyperchild)
if stack: #we have done with current level
state = 6
stack.append((CNODE, parentnodelabel, parentconcept))
else: #we have done with this subgraph
state = 0
amr.roots.append(parentlabel)
elif state == 6:
if type == "RPAR":
edges = []
reedges = []
while stack[-1][0] != PNODE:
children = []
reentrances = []
#one edge may have multiple children/tail nodes
while stack[-1][0] == CNODE or stack[-1][0] == RCNODE:
CTYPE, childnodelabel, childconcept = stack.pop()
if CTYPE == RCNODE:
reentrances.append((childnodelabel,childconcept))
children.append((childnodelabel,childconcept))
assert stack[-1][0] == EDGE
forgetme, edgelabel = stack.pop()
edges.append((edgelabel,children))
reedges.append((edgelabel,reentrances))
forgetme,parentnodelabel,parentconcept = stack.pop()
#print "PNODE",state,parentnodelabel,parentconcept
#check for annotation error
if parentnodelabel in amr.node_to_concepts.keys():
#concept has been defined by the children,
#then they must have different concepts, otherwise the children's concepts should be None
#(coreference)
if amr.node_to_concepts[parentnodelabel] == parentconcept:
sys.stderr.write("Wrong annotation format: Revisited concepts %s should be ignored.\n" % parentconcept)
else:
sys.stderr.write("Wrong annotation format: Different concepts %s and %s have same node label(index)\n" % (amr.node_to_concepts[parentnodelabel],parentconcept))
parentnodelabel = parentnodelabel + "1"
if RENAME_NODE:
rename_node(parentnodelabel,parentconcept)
else:
if not parentnodelabel in amr.node_to_concepts or parentconcept is not None:
amr.node_to_concepts[parentnodelabel] = parentconcept
for edgelabel,children in reversed(edges):
hypertarget = []
for node, concept in children:
if node is not None and not isinstance(node,(Quantity,Polarity,Interrogative,StrLiteral)) and not node in amr.node_to_concepts:
if RENAME_NODE:
rename_node(node,concept)
else:
if concept:
amr.node_to_concepts[node] = concept
hypertarget.append(node)
hyperchild = tuple(hypertarget)
amr._add_triple(parentnodelabel,edgelabel,hyperchild)
for edgelabel,reentrance in reedges:
hreent = []
for node,concept in reentrance:
hreent.append(node)
amr._add_reentrance(parentnodelabel,edgelabel,hreent)
if stack: #we have done with current level
state = 6
stack.append((CNODE, parentnodelabel, parentconcept))
else: #we have done with this subgraph
state = 0
amr.roots.append(parentnodelabel)
elif type == "COMMA": # to seperate multiple children/tails
state = 7
elif type == "EDGELABEL":
stack.append((EDGE,token[1:]))
state = 5
else: raise ParserError, "Unexpected token %s"%(token.encode('utf8'))
elif state == 7:
if type == "IDENTIFIER":
stack.append((CNODE, token, None)) # another children
state = 6
elif typpe == "LPAR":
state = 1
else: raise ParserError, "Unexpected token %s"%(token)
if state != 0 and stack:
raise ParserError, "mismatched parenthesis"
return amr
def get_variable(self,posID):
"""return variable given postition ID"""
reent_var = None
seq = self.dfs()[0]
for node in seq:
if node.seqID == posID:
return node.node_label
return None
def get_match(self, subgraph):
"""find the subgraph"""
def is_match(dict1, dict2):
rel_concept_pairs = []
for rel, cpt in dict2.items():
rel_concept_pairs.append(rel+'@'+cpt)
if not (rel in dict1 and cpt in dict1[rel]):
return None
return rel_concept_pairs
subroot = subgraph.keys()[0] # sub root's concept
concepts_on_the_path = []
for v in self.node_to_concepts:
if v[0] == subroot[0] and self.node_to_concepts[v] == subroot:
concepts_on_the_path = [subroot]
rcp = is_match(self[v], subgraph[subroot])
if rcp is not None: return v, concepts_on_the_path+rcp
#for rel, cpt in subgraph[subroot].items():
# if rel in self[v] and cpt in self[v][rel]:
# concepts_on_the_path.append(rel+'@'+cpt)
return None, None
def get_pid(self,var):
seq = self.dfs()[0]
for node in seq:
if node.node_label == var:
return node.seqID
return None
'''
posn_queue = posID.split('.')
var_list = self.roots
past_pos_id = []
while posn_queue:
posn = int(posn_queue.pop(0))
past_pos_id.append(posn)
print var_list,past_pos_id,posn,visited_var
variable = var_list[posn]
var_list = []
vars = [v[0] for v in self[variable].values()]
i = 0
while i < len(vars):
k = vars[i]
if k not in visited_var:
var_list.append(k)
elif isinstance(k,(StrLiteral,Quantity)):
var_list.append(k)
else:
if visited_var[k] == '.'.join(str(j) for j in past_pos_id+[i]):
var_list.append(k)
else:
vars.pop(i)
i -= 1
i += 1
'''
return variable
def get_ref_graph(self,alignment):
"""return the gold dependency graph based on amr graph"""
dpg = DepGraph()
for h in self:
hstr = self.node_to_concepts[h] if h in self.node_to_concepts else h
hidx = alignment[h][0]
if not hidx in dpg.nodes.keys():
h_node = DNode(hidx,hstr)
dpg.addNode(h_node)
for ds in self[h].values():
d = ds[0]
dstr = self.node_to_concepts[d] if d in self.node_to_concepts else d
didx = alignment[d][0]
if not didx in dpg.nodes.keys():
d_node = DNode(didx,dstr)
dpg.addNode(d_node)
dpg.addEdge(hidx,didx)
#root
root = DNode(0,'ROOT')
dpg.addNode(root)
dpg.addEdge(0,alignment[self.roots[0]][0])
return dpg
'''
def get_unlabel_arcs(self):
arc_set = set()
for h in self:
for d in self[h].values():
arc_set.add((h,d[0]))
return arc_set
'''
def _add_reentrance(self,parent,relation,reentrance):
if reentrance:
self.reentrance_triples.append((parent,relation,reentrance[0]))
def _add_triple(self, parent, relation, child, warn=None):
"""
Add a (parent, relation, child) triple to the DAG.
"""
if type(child) is not tuple:
child = (child,)
if parent in child:
#raise Exception('self edge!')
#sys.stderr.write("WARNING: Self-edge (%s, %s, %s).\n" % (parent, relation, child))
if warn: warn.write("WARNING: Self-edge (%s, %s, %s).\n" % (parent, relation, child))
#raise ValueError, "Cannot add self-edge (%s, %s, %s)." % (parent, relation, child)
for c in child:
x = self[c]
for rel, test in self[c].items():
if parent in test:
if warn:
warn.write("WARNING: (%s, %s, %s) produces a cycle with (%s, %s, %s)\n" % (parent, relation, child, c, rel, test))
#ATTENTION:maybe wrong, test may not have only one element, deal with it later
concept1 = self.node_to_concepts[parent]
concept2 = self.node_to_concepts[test[0]]
#print concept1,concept2
if concept1 != concept2:
warn.write("ANNOTATION ERROR: concepts %s and %s have same node label %s!" % (concept1, concepts2, parent))
#raise ValueError,"(%s, %s, %s) would produce a cycle with (%s, %s, %s)" % (parent, relation, child, c, rel, test)
self[parent].append(relation, child)
def set_alignment(self,alignment):
self.align_to_sentence = alignment
def bfs(self):
"""
breadth first search for the graph
return the bfs-ordered triples
"""
from collections import deque
visited_nodes = set()
amr_triples = []
sequence = []
nid = 0
for i,r in enumerate(self.roots):
seqID = str(i)
queue = deque([((r,),None, 0, seqID)]) # node, incoming edge and depth
amr_triples.append(('root','ROOT',r))
while queue:
next,rel,depth,seqID = queue.popleft()
for n in next:
firsthit = (parent,rel,n) not in self.reentrance_triples
leaf = False if self[n] else True
node = Node(rel,n,firsthit,leaf,depth,seqID)
#nid += 1
sequence.append(node)
if n in visited_nodes or (parent,rel,n) in self.reentrance_triples:
continue
visited_nodes.add(n)
p = len([child for rel,child in self[n].items() if (n,rel,child[0]) not in self.reentrance_triples]) - 1
for rel,child in reversed(self[n].items()):
if not (rel,n,child[0]) in amr_triples:
if (n,rel,child[0]) not in self.reentrance_triples:
queue.append((child,rel,depth+1,seqID+'.'+str(p)))
p -= 1
else:
queue.append((child,rel,depth+1,None))
amr_triples.append((rel,n,child[0]))
return (sequence,amr_triples)
def print_triples(self):
result = ''
amr_triples = self.bfs()[1]
for rel,parent,child in amr_triples:
if not isinstance(child,(Quantity,Polarity,Interrogative,StrLiteral)):
result += "%s(%s,%s)\n"%(rel,self.node_to_concepts[parent],self.node_to_concepts[child])
else:
result += "%s(%s,%s)\n"%(rel,self.node_to_concepts[parent],child)
return result
def dfs(self):
"""
depth first search for the graph
return dfs ordered nodes and edges
TO-DO: this visiting order information can be obtained
through the reading order of amr strings; modify the class
to OrderedDefaultDict;
"""
visited_nodes = set()
visited_edges = []
sequence = []
for i,r in enumerate(self.roots):
seqID = str(i)
stack = [((r,),None,None,0,seqID)] # record the node, incoming edge, parent, depth and unique identifier
#all_nodes = []
while stack:
next,rel,parent,depth,seqID = stack.pop()
for n in next:
if self.reentrance_triples:
firsthit = (parent,rel,n) not in self.reentrance_triples
else:
firsthit = n not in visited_nodes
leaf = False if self[n] else True
node = Node(parent, rel, n, firsthit, leaf, depth, seqID)
#print self.node_to_concepts
sequence.append(node)
# same StrLiteral/Quantity/Polarity should not be revisited
if self.reentrance_triples: # for being the same with the amr string readed in
if n in visited_nodes or (parent,rel,n) in self.reentrance_triples:
continue
else:
if n in visited_nodes:
continue
visited_nodes.add(n)
p = len([child for rel,child in self[n].items() if (n,rel,child[0]) not in self.reentrance_triples]) - 1
for rel, child in reversed(self[n].items()):
#print rel,child
if not (rel, n, child[0]) in visited_edges:
#if child[0] not in visited_nodes or isinstance(child[0],(StrLiteral,Quantity)):
visited_edges.append((rel,n,child[0]))
if (n,rel,child[0]) not in self.reentrance_triples:
stack.append((child,rel,n,depth+1,seqID+'.'+str(p)))
p -= 1
else:
stack.append((child,rel,n,depth+1,None))
elif isinstance(child[0],(StrLiteral,Quantity)):
stack.append((child,rel,n,depth+1,seqID+'.'+str(p)))
p -= 1
else:
pass
return (sequence, visited_edges)
def replace_node(self, h_idx, idx):
"""for coreference, replace all occurrence of node idx to h_idx"""
visited_nodes = set()
visited_edges = set()
for i,r in enumerate(self.roots[:]):
stack = [((r,),None,None)] #node,incoming edge and preceding node
while stack:
next, rel, previous = stack.pop()
for n in next:
if n == idx:
if previous == None: # replace root
self.roots[i] = h_idx
break
self[previous].replace(rel,(h_idx,))
if n in visited_nodes:
continue
visited_nodes.add(n)
for rel, child in reversed(self[n].items()):
if not (n, rel, child) in visited_edges:
if child in visited_nodes:
stack.append((child,rel,n))
else:
visited_edges.add((n,rel,child))
stack.append((child,rel,n))
def find_rel(self,h_idx,idx):
"""find the relation between head_idx and idx"""
rels = []
for rel,child in self[h_idx].items():
#print child,idx
if child == (idx,):
rels.append(rel)
return rels
def replace_head(self,old_head,new_head,KEEP_OLD=True):
"""change the focus of current sub graph"""
for rel,child in self[old_head].items():
if child != (new_head,):
self[new_head].append(rel,child)
del self[old_head]
if KEEP_OLD:
foo = self[old_head]
self[new_head].append('NA',(old_head,))
def replace_rel(self,h_idx,old_rel,new_rel):
"""replace the h_idx's old_rel to new_rel"""
for v in self[h_idx].getall(old_rel):
self[h_idx].append(new_rel,v)
del self[h_idx][old_rel]
'''
def rebuild_index(self, node, sent_index_mapping=None):
"""assign non-literal node a new unique node label; replace the
original index with the new node id or sentence offset;
if we have been provided the sentence index mapping, we use the
sentence offsets as new node label instead of the serialized node id.
"""
if sent_index_mapping is None:
if node.node_label in self.node_to_concepts and self.node_to_concepts[node.node_label] is not None:
#update the node_to_concepts table
self.node_to_concepts[Node.node_id] = self.node_to_concepts[node.node_label]
del self.node_to_concepts[node.node_label]
Node.mapping_table[node.node_label] = Node.node_id
node.node_label = Node.node_id
elif self.node_label not in node_to_concepts and self.node_label in Node.mapping_table:
new_label = Node.mapping_table[self.node_label]
self.node_label = new_label
else:
#print Node.node_id,self.node_label
node_to_concepts[Node.node_id] = self.node_label
self.node_label = Node.node_id
'''
def is_named_entity(self, var):
edge_label_set = self[var].keys()
if 'name' in edge_label_set:
try:
assert 'wiki' in edge_label_set
except:
print 'ill-formed entity found'
print self.to_amr_string()
return False
return True
return False
def is_entity(self, var):
if var in self.node_to_concepts:
var_concept = self.node_to_concepts[var]
return var_concept.endswith('-entity') or var_concept.endswith('-quantity') or var_concept.endswith('-organization') or var_concept == 'amr-unknown'
return False
def is_predicate(self, var):
if var in self.node_to_concepts:
return re.match('.*-[0-9]+',self.node_to_concepts[var]) is not None
return False
def is_const(self, var):
return var not in self.node_to_concepts
def statistics(self):
#sequence = self.dfs()[0]
named_entity_nums = defaultdict(int)
entity_nums = defaultdict(int)
predicate_nums = defaultdict(int)
variable_nums = defaultdict(int)
const_nums = defaultdict(int)
reentrancy_nums = 0
stack = [(self.roots[0],None,None,0)]
while stack:
cur_var, rel, parent, depth = stack.pop()
exclude_rels = []
if (parent, rel, cur_var) in self.reentrance_triples: # reentrancy here
reentrancy_nums += 1
continue
if self.is_named_entity(cur_var):
entity_name = self.node_to_concepts[cur_var]
named_entity_nums[entity_name] += 1
exclude_rels = ['name','wiki']
elif self.is_entity(cur_var): # entity does not have name relation
entity_name = self.node_to_concepts[cur_var]
entity_nums[entity_name] += 1
elif self.is_predicate(cur_var):
pred_name = self.node_to_concepts[cur_var]
predicate_nums[pred_name] += 1
elif self.is_const(cur_var):
const_nums[cur_var] += 1
else:
variable_name = self.node_to_concepts[cur_var]
variable_nums[variable_name] += 1
for rel, var in self[cur_var].items():
if rel not in exclude_rels:
stack.append((var[0], rel, cur_var, depth+1))
return named_entity_nums,entity_nums,predicate_nums,variable_nums,const_nums,reentrancy_nums
def to_amr_string(self):
amr_string = ""
seq = self.dfs()[0]
#always begin with root
assert seq[0].trace == None
dep_rec = 0
for node in seq:
if node.trace == None:
if node.firsthit and node.node_label in self.node_to_concepts:
amr_string += "(%s / %s"%(node.node_label,self.node_to_concepts[node.node_label])
else:
amr_string += "(%s"%(node.node_label)
else:
if node.depth >= dep_rec:
dep_rec = node.depth
else:
amr_string += "%s"%((dep_rec-node.depth)*')')
dep_rec = node.depth
if not node.leaf:
if node.firsthit and node.node_label in self.node_to_concepts:
amr_string += "\n%s:%s (%s / %s"%(node.depth*"\t",node.trace,node.node_label,self.node_to_concepts[node.node_label])
else:
amr_string += "\n%s:%s %s"%(node.depth*"\t",node.trace,node.node_label)
else:
if node.firsthit and node.node_label in self.node_to_concepts:
amr_string += "\n%s:%s (%s / %s)"%(node.depth*"\t",node.trace,node.node_label,self.node_to_concepts[node.node_label])
else:
if isinstance(node.node_label,StrLiteral):
amr_string += '\n%s:%s "%s"'%(node.depth*"\t",node.trace,node.node_label)
else:
amr_string += "\n%s:%s %s"%(node.depth*"\t",node.trace,node.node_label)
if dep_rec != 0:
amr_string += "%s"%((dep_rec)*')')
else:
amr_string += ')'
return amr_string
def __reduce__(self):
t = defaultdict.__reduce__(self)
return (t[0], ()) + (self.__dict__,) + t[3:]
if __name__ == "__main__":
opt = OptionParser()
opt.add_option("-v", action="store_true", dest="verbose")
(options, args) = opt.parse_args()
s = '''(a / and :op1(恶化 :ARG0(它) :ARG1(模式 :mod(开发)) :time (已经)) :op2(t / 堵塞 :ARG0(它) :ARG1(交通 :mod(局部)) :location(a / around :op1(出口))))'''
s1 = '''(a / and :op1 (c / change-01 :ARG0 (i / it) :ARG1 (p / pattern :mod (d / develop-02)) :ARG2 (b / bad :degree (m / more))) :op2 (c2 / cause-01 :ARG0 i :ARG1 (c3 / congest-01 :ARG1 (a2 / around :op1 (e / exit :poss i)) :ARG2 (t / traffic) :ARG1-of (l2 / localize-01))) :time (a3 / already))'''
s = s.decode('utf8')
#amr_ch = AMR.parse_string(s)
amr_en = AMR.parse_string(s1)
#print str(amr_en)
#print amr_en.dfs()
print amr_en.to_amr_string()
#print amr_ch
#print amr_ch.dfs()
#print amr_ch.to_amr_string()
| {
"content_hash": "4f3744002d4084de1e532f420463c1c1",
"timestamp": "",
"source": "github",
"line_count": 826,
"max_line_length": 330,
"avg_line_length": 41.705811138014525,
"alnum_prop": 0.48889662980057474,
"repo_name": "masterkeywikz/seq2graph",
"id": "a9c8428c156d12e8f63cb143c7a4232da5437fa9",
"size": "34643",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "amr2seq/amr_graph.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "10200"
},
{
"name": "Makefile",
"bytes": "11456"
},
{
"name": "Python",
"bytes": "1082495"
},
{
"name": "Shell",
"bytes": "3567"
}
],
"symlink_target": ""
} |
import logging
from time import sleep
from typing import List
from django.conf import settings
from telegram import InlineKeyboardButton, InlineKeyboardMarkup, \
InputMediaPhoto, InputMediaVideo, TelegramError
from memes_reposter.telegram_bot import bot
from .models import ImgurConfig, Media, Post
logger = logging.getLogger(__name__)
def publish_posts(posts: List[Post], config: ImgurConfig):
size = len(posts)
for i, post in enumerate(posts):
published = publish_post(post, config)
sleep(0.5)
if published:
logger.info('Published %3d/%d: %s', i + 1, size, repr(post))
post.save()
def publish_blank(posts: List[Post]):
for post in posts:
logger.info('Blank publishing: %s', repr(post))
post.save()
def publish_post(post: Post, config: ImgurConfig):
delete_on_fail = settings.IMGUR_DELETE_ON_FAIL
try:
if post.is_single:
publish_single(post, config)
else:
publish_album(post, config, delete_on_fail)
return True
except TelegramError as e:
logger.error('Error %s: %s for post %s', type(e), e, repr(post))
return False
def send_media(chat_id, media: Media, caption=None, reply_markup=None):
common = dict(chat_id=chat_id, caption=caption, reply_markup=reply_markup)
if media.animated:
return bot.send_video(video=media.link, **common)
else:
return bot.send_photo(photo=media.link, **common)
def get_media_input(media: Media, caption=None):
if media.animated:
inp = InputMediaVideo(media.link, caption=caption)
else:
inp = InputMediaPhoto(media.link, caption=caption)
return inp
def format_album_title(post: Post):
title = post.title.strip()
if post.images_count > len(post.medias):
title = f"🔥 Album [ {post.images_count} ]\n" + title
title += f"\n{post.link}"
return title
def post_title(post: Post, config: ImgurConfig):
title = format_album_title(post)
# reply_markup = build_keyboard_markup(post)
title_message = bot.send_message(
chat_id=config.chat_id, text=title,
# reply_markup=reply_markup,
disable_web_page_preview=True)
return title_message
def publish_single(post: Post, config: ImgurConfig):
title_message = post_title(post, config)
try:
media = post.medias[0]
send_media(config.chat_id, media)
except TelegramError as e:
logger.error('Error %s: %s for post %s', type(e), e, repr(post))
title_message.delete()
def publish_album(post: Post, config: ImgurConfig, delete_on_fail=True):
title_message = post_title(post, config)
medias = [
get_media_input(media)
for media in post.medias[:10]
]
try:
bot.send_media_group(media=medias, chat_id=config.chat_id)
except TelegramError as e:
logger.error('Error %s: %s for post %s', type(e), e, repr(post))
if delete_on_fail:
title_message.delete()
def build_keyboard_markup(post: Post):
return InlineKeyboardMarkup([[
InlineKeyboardButton('comments', url=post.link + '#comments-container'),
InlineKeyboardButton('post', url=post.link),
]])
| {
"content_hash": "2e9036a602f242b2af2d5f2b67f67466",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 80,
"avg_line_length": 30.16822429906542,
"alnum_prop": 0.6496282527881041,
"repo_name": "vaniakosmos/memes-reposter",
"id": "785a3f6f076a0976641b4681d4fa926f9209ce05",
"size": "3231",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/imgur/publisher.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1067"
},
{
"name": "Dockerfile",
"bytes": "203"
},
{
"name": "HTML",
"bytes": "11813"
},
{
"name": "JavaScript",
"bytes": "6442"
},
{
"name": "Makefile",
"bytes": "247"
},
{
"name": "Python",
"bytes": "81075"
},
{
"name": "Shell",
"bytes": "614"
}
],
"symlink_target": ""
} |
import re
def filter_objlist(olist, fieldname, fieldval):
"""
Returns a list with of the objects in olist that have a fieldname valued as fieldval
Parameters
----------
olist: list of objects
fieldname: string
fieldval: anything
Returns
-------
list of objets
"""
return [x for x in olist if getattr(x, fieldname) == fieldval]
def filter_list(lst, filt):
"""
Parameters
----------
lst: list
filter: function
Unary string filter function
Returns
-------
list
List of items that passed the filter
Example
-------
>>> l = ['12123123', 'N123213']
>>> filt = re.compile('\d*').match
>>> nu_l = list_filter(l, filt)
"""
return [m for s in lst for m in (filt(s),) if m]
def match_list(lst, pattern, group_names=[]):
"""
Parameters
----------
lst: list of str
regex: string
group_names: list of strings
See re.MatchObject group docstring
Returns
-------
list of strings
Filtered list, with the strings that match the pattern
"""
filtfn = re.compile(pattern).match
filtlst = filter_list(lst, filtfn)
if not group_names:
return [m.string for m in filtlst]
else:
return [m.group(group_names) for m in filtlst]
def search_list(lst, pattern):
"""
Parameters
----------
pattern: string
lst: list of strings
Returns
-------
filtered_list: list of str
Filtered lists with the strings in which the pattern is found.
"""
filt = re.compile(pattern).search
return filter_list(lst, filt)
def append_to_keys(adict, preffix):
"""
Parameters
----------
adict:
preffix:
Returns
-------
"""
return {preffix + str(key): (value if isinstance(value, dict) else value)
for key, value in list(adict.items())}
def append_to_list(lst, preffix):
"""
Parameters
----------
lst:
preffix:
Returns
-------
"""
return [preffix + str(item) for item in lst]
def is_valid_regex(string):
"""
Checks whether the re module can compile the given regular expression.
Parameters
----------
string: str
Returns
-------
boolean
"""
try:
re.compile(string)
is_valid = True
except re.error:
is_valid = False
return is_valid
def is_regex(string):
"""
TODO: improve this!
Returns True if the given string is considered a regular expression,
False otherwise.
It will be considered a regex if starts with a non alphabetic character
and then correctly compiled by re.compile
:param string: str
"""
is_regex = False
regex_chars = ['\\', '(', '+', '^', '$']
for c in regex_chars:
if string.find(c) > -1:
return is_valid_regex(string)
return is_regex
def is_fnmatch_regex(string):
"""
Returns True if the given string is considered a fnmatch
regular expression, False otherwise.
It will look for
:param string: str
"""
is_regex = False
regex_chars = ['!', '*', '$']
for c in regex_chars:
if string.find(c) > -1:
return True
return is_regex
def remove_from_string(string, values):
"""
Parameters
----------
string:
values:
Returns
-------
"""
for v in values:
string = string.replace(v, '')
return string
def count_hits(strings, pattern):
count = 0
for s in strings:
if re.match(pattern, s):
count += 1
return count
def where_is(strings, pattern, n=1, lookup_func=re.match):
"""Return index of the nth match found of pattern in strings
Parameters
----------
strings: list of str
List of strings
pattern: str
Pattern to be matched
nth: int
Number of times the match must happen to return the item index.
lookup_func: callable
Function to match each item in strings to the pattern, e.g., re.match or re.search.
Returns
-------
index: int
Index of the nth item that matches the pattern.
If there are no n matches will return -1
"""
count = 0
for idx, item in enumerate(strings):
if lookup_func(pattern, item):
count += 1
if count == n:
return idx
return -1
def to_str(bytes_or_str):
if isinstance(bytes_or_str, bytes):
value = bytes_or_str.decode('utf-8')
else:
value = bytes_or_str
return value # Instance of str”
def to_bytes(bytes_or_str):
if isinstance(bytes_or_str, str):
value = bytes_or_str.encode('utf-8')
else:
value = bytes_or_str
return value # Instance of bytes”
# Python 2
def to_unicode(unicode_or_str):
if isinstance(unicode_or_str, str):
value = unicode_or_str.decode('utf-8')
else:
value = unicode_or_str
return value # Instance of unicode
# Python 2
def to_str2(unicode_or_str):
if isinstance(unicode_or_str, unicode):
value = unicode_or_str.encode('utf-8')
else:
value = unicode_or_str
return value # Instance of str”
| {
"content_hash": "222d3532d2ec699c306104abaa9543ba",
"timestamp": "",
"source": "github",
"line_count": 261,
"max_line_length": 91,
"avg_line_length": 20.04597701149425,
"alnum_prop": 0.5726299694189603,
"repo_name": "Neurita/boyle",
"id": "798e060ad8d9e92faef5584ffed72e69edafd711",
"size": "5635",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "boyle/utils/strings.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1687"
},
{
"name": "Python",
"bytes": "391188"
}
],
"symlink_target": ""
} |
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.uic import *
import os
class ConnectorsInfoWnd(QDialog):
def __init__(self, treeModel, genus):
super(ConnectorsInfoWnd, self).__init__()
self.genus = genus
self.treeModel = treeModel
dirname, filename = os.path.split(os.path.abspath(__file__))
loadUi(dirname+'\\connector_info.ui', self)
self.btnAdd.clicked.connect(self.onAddConnector)
self.btnOk.clicked.connect(self.accept)
self.btnCancel.clicked.connect(self.reject)
self.header = ['Label', 'Kind', 'Type']
connectors = []
if(genus.plug != None):
connectors.append(genus.plug)
for socket in self.genus.sockets:
connectors.append(socket)
model = ConnectorTableModel(self, connectors, self.header)
self.tableView.setModel(model)
verticalHeader = self.tableView.verticalHeader();
#verticalHeader.setDefaultAlignment (Qt.AlignLeft)
verticalHeader.setResizeMode(QHeaderView.Fixed);
verticalHeader.setDefaultSectionSize(18);
self.tableView.setItemDelegate(MyDelegate(self.tableView));
self.tableView.setEditTriggers(QAbstractItemView.AllEditTriggers)
def onAddConnector(self):
self.tableView.model().addConnector()
class ConnectorTableModel(QAbstractTableModel):
def __init__(self, parent, connectors, header, *args):
QAbstractTableModel.__init__(self, parent, *args)
self.connectors = connectors
self.header = header
self.InfoWnd = parent
self.blockImages = []
def rowCount(self, parent):
return len(self.connectors)
def columnCount(self, parent):
return len(self.header)
def flags (self, index ) :
if (not index.isValid()):
return Qt.ItemIsEnabled;
return Qt.ItemIsDragEnabled | Qt.ItemIsEnabled | Qt.ItemIsEditable;
def addConnector(self):
#return
from blocks.BlockImageIcon import BlockImageIcon
icon = QPixmap(os.getcwd() + "\\" + 'resource\\117-puzzle.png')
img = BlockImageIcon('', 'CENTER', icon, 32, 32, False, False)
self.blockImages.append(img)
index = QModelIndex ()
self.insertRow(len(self.blockImages), index)
self.emit(SIGNAL("dataChanged(QModelIndex,QModelIndex)"), index, index)
def setData(self, index, value, role=Qt.DisplayRole):
if (index.isValid() and role == Qt.EditRole):
connector = self.connectors[index.row()]
if(index.column() == 0):
connector.label = value
if(index.column() == 1):
connector.kind = value
if(index.column() == 2):
connector.type = value
#emit dataChanged(index, index);
self.InfoWnd.treeModel.showBlock(self.InfoWnd.genus)
return True;
return False
def data(self, index, role):
if not index.isValid():
return None
elif role != Qt.DisplayRole:
return None
connector = self.connectors[index.row()]
if(index.column() == 0):
return connector.label
if(index.column() == 1):
return connector.kind
if(index.column() == 2):
return connector.type
return None
def headerData(self, col, orientation, role):
if orientation == Qt.Horizontal and role == Qt.DisplayRole:
return self.header[col]
if (role == Qt.TextAlignmentRole):
return Qt.AlignLeft | Qt.AlignVCenter
return None
class MyDelegate(QItemDelegate):
def __init__(self, parent):
super(MyDelegate, self).__init__(parent)
def createEditor(self, parent, option, index):
if(index.column() == 0):
editor = super(MyDelegate, self).createEditor(parent, option, index)
return editor
if(index.column() == 1):
combobox = QComboBox(parent)
combobox.addItems(['socket', 'plug'])
#combobox.currentIndexChanged[int].connect(self.currentIndexChanged)
return combobox
if(index.column() == 2):
combobox = QComboBox(parent)
combobox.addItems(['boolean','cmd','number','poly', 'poly-list', 'string'])
#combobox.currentIndexChanged[int].connect(self.currentIndexChanged)
return combobox
return None
def setModelData(self, editor, model, index) :
super(MyDelegate, self).setModelData(editor, model, index)
def setEditorData (self, editor, index):
#self.m_finishedMapper.blockSignals(True);
text = index.model().data(index, Qt.DisplayRole)
if index.column() == 0:
#print('setEditorData')
editor.setText(text);
#super(MyDelegate, self).setEditorData(editor, index)
if index.column() == 1:
_ind = editor.findText(text)
editor.setCurrentIndex(_ind)
if index.column() == 2:
_ind = editor.findText(text)
editor.setCurrentIndex(_ind)
| {
"content_hash": "fa7cac093b9fba95bbfbaa1622637761",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 87,
"avg_line_length": 33.09375,
"alnum_prop": 0.5916902738432483,
"repo_name": "go2net/PythonBlocks",
"id": "2605900e400651a7dafc92f44c5f2c94fe672508",
"size": "5295",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "components/ConnectorsInfoWnd.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "560957"
}
],
"symlink_target": ""
} |
from base_model import WsModel
class ThemePackage(WsModel):
""" ThemePackage groups theme elements together for packaging and distribution"""
_modfields = [{"name":"name","type":"text"},
{"name":"themes","type":"selectmulti","list":"Theme","list_val":"key","list_name":"name"}]
name = WsModel.db.StringProperty()
themes = WsModel.db.ListProperty(WsModel.db.Key)
@classmethod
def relations(cls):
return WsModel.Relation([], [{"name":"name","type":"text"},
{"name":"themes","type":"selectmulti","list":WsModel.Theme,"list_val":"key","list_name":"name"}])
@classmethod
def old_create(cls, dict_values):
theme_package = cls()
theme_package.name = "".join(dict_values["name"])
theme_package.themes = dict_values["themes"]
theme_package.put()
return theme_package
WsModel.ThemePackage = ThemePackage
class Theme(WsModel):
""" Theme relieves the need for static file upload
Each theme element contains the complete html, css and js
for the space the element is intended to fill."""
name = WsModel.db.StringProperty()
html = WsModel.db.TextProperty()
css = WsModel.db.TextProperty()
js = WsModel.db.TextProperty()
@classmethod
def relations(cls):
return WsModel.Relation([{"model":"Page","field":"theme","value":"key"}], [{"name":"name","type":"text"},
{"name":"html","type":"textareahtml"},
{"name":"css","type":"textarea"},
{"name":"js","type":"textarea"}])
WsModel.Theme = Theme | {
"content_hash": "093363a1e664f413203978134d341357",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 109,
"avg_line_length": 39.63157894736842,
"alnum_prop": 0.650066401062417,
"repo_name": "webspinner/webspinner-gae-cms",
"id": "cc376bb63dfec90bc776d87e6d9b0f3cdcc314bd",
"size": "1506",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "models/theme.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "5995"
},
{
"name": "JavaScript",
"bytes": "35291"
},
{
"name": "Python",
"bytes": "2486474"
},
{
"name": "Shell",
"bytes": "65"
}
],
"symlink_target": ""
} |
import numpy as np
def print_test(network, testset, cost_function):
assert testset[0].features.shape[0] == network.n_inputs, \
"ERROR: input size varies from the defined input setting"
assert testset[0].targets.shape[0] == network.layers[-1][0], \
"ERROR: output size varies from the defined output setting"
test_data = np.array([instance.features for instance in testset])
test_targets = np.array([instance.targets for instance in testset])
input_signals, derivatives = network.update(test_data, trace=True)
out = input_signals[-1]
error = cost_function(out, test_targets)
print "[testing] Network error: %.4g" % error
print "[testing] Network results:"
print "[testing] input\tresult\ttarget"
for entry, result, target in zip(test_data, out, test_targets):
print "[testing] %s\t%s\t%s" % tuple(map(str, [entry, result, target]))
# end
def dropout(X, p=0.):
if p != 0:
retain_p = 1 - p
X = X * np.random.binomial(1, retain_p, size=X.shape)
X /= retain_p
return X
# end
def add_bias(A):
# Add a bias value of 1. The value of the bias is adjusted through
# weights rather than modifying the input signal.
return np.hstack((np.ones((A.shape[0], 1)), A))
# end addBias
def confirm(promt='Do you want to continue?'):
prompt = '%s [%s|%s]: ' % (promt, 'y', 'n')
while True:
ans = raw_input(prompt).lower()
if ans in ['y', 'yes']:
return True
if ans in ['n', 'no']:
return False
print "Please enter y or n."
# end
| {
"content_hash": "a53f1c2e2bce941d45686522a3716fd9",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 81,
"avg_line_length": 28.803571428571427,
"alnum_prop": 0.6100433973961562,
"repo_name": "DailyActie/Surrogate-Model",
"id": "9f00b836635c310655d4e3c0dfe7d89ce3b48469",
"size": "1613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "01-codes/python-neural-network-master/nimblenet/tools.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "345"
},
{
"name": "Batchfile",
"bytes": "18746"
},
{
"name": "C",
"bytes": "13004913"
},
{
"name": "C++",
"bytes": "14692003"
},
{
"name": "CMake",
"bytes": "72831"
},
{
"name": "CSS",
"bytes": "303488"
},
{
"name": "Fortran",
"bytes": "7339415"
},
{
"name": "HTML",
"bytes": "854774"
},
{
"name": "Java",
"bytes": "38854"
},
{
"name": "JavaScript",
"bytes": "2432846"
},
{
"name": "Jupyter Notebook",
"bytes": "829689"
},
{
"name": "M4",
"bytes": "1379"
},
{
"name": "Makefile",
"bytes": "48708"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "PHP",
"bytes": "93585"
},
{
"name": "Pascal",
"bytes": "1449"
},
{
"name": "Perl",
"bytes": "1152272"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "34668203"
},
{
"name": "Roff",
"bytes": "5925"
},
{
"name": "Ruby",
"bytes": "92498"
},
{
"name": "Shell",
"bytes": "94698"
},
{
"name": "TeX",
"bytes": "156540"
},
{
"name": "TypeScript",
"bytes": "41691"
}
],
"symlink_target": ""
} |
""""
Most Popular Collaborative Filtering Recommender
[Rating Prediction]
Most Popular predicts ratings for unobserved items for each user based on popularity of user and items.
"""
# © 2019. Case Recommender (MIT License)
from caserec.recommenders.rating_prediction.base_rating_prediction import BaseRatingPrediction
from caserec.utils.extra_functions import timed
import numpy as np
__author__ = 'Arthur Fortes <[email protected]>'
class MostPopular(BaseRatingPrediction):
def __init__(self, train_file=None, test_file=None, output_file=None, sep='\t', output_sep='\t'):
"""
Most Popular for Item Recommendation
This algorithm predicts a rank for each user using the count of number of feedback of users and items
Usage::
>> MostPopular(train, test).compute()
:param train_file: File which contains the train set. This file needs to have at least 3 columns
(user item feedback_value).
:type train_file: str
:param test_file: File which contains the test set. This file needs to have at least 3 columns
(user item feedback_value).
:type test_file: str, default None
:param output_file: File with dir to write the final predictions
:type output_file: str, default None
:param sep: Delimiter for input files
:type sep: str, default '\t'
:param output_sep: Delimiter for output file
:type output_sep: str, default '\t'
"""
super(MostPopular, self).__init__(train_file=train_file, test_file=test_file, output_file=output_file,
sep=sep, output_sep=output_sep)
self.recommender_name = 'Most Popular'
def predict(self):
"""
This method predict final result, building an rank of each user of the train set.
"""
if self.test_file is not None:
for user in self.test_set['users']:
for item in self.test_set['feedback'][user]:
count_value = 0
feedback_value = 0
for user_v in self.train_set['users_viewed_item'].get(item, []):
feedback_value += self.train_set['feedback'][user_v][item]
count_value += 1
if feedback_value == 0:
try:
feedback_value = np.mean(list(self.train_set['feedback'][user].values()))
except KeyError:
feedback_value = self.train_set['mean_value']
else:
feedback_value /= count_value
self.predictions.append((user, item, feedback_value))
else:
raise NotImplemented
def compute(self, verbose=True, metrics=None, verbose_evaluation=True, as_table=False, table_sep='\t'):
"""
Extends compute method from BaseItemRecommendation. Method to run recommender algorithm
:param verbose: Print recommender and database information
:type verbose: bool, default True
:param metrics: List of evaluation measures
:type metrics: list, default None
:param verbose_evaluation: Print the evaluation results
:type verbose_evaluation: bool, default True
:param as_table: Print the evaluation results as table
:type as_table: bool, default False
:param table_sep: Delimiter for print results (only work with verbose=True and as_table=True)
:type table_sep: str, default '\t'
"""
super(MostPopular, self).compute(verbose=verbose)
if verbose:
print("prediction_time:: %4f sec" % timed(self.predict))
print('\n')
else:
self.predict()
self.write_predictions()
if self.test_file is not None:
self.evaluate(metrics, verbose_evaluation, as_table=as_table, table_sep=table_sep)
| {
"content_hash": "87cd3d4a8364961a51348e82e46e0516",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 110,
"avg_line_length": 34.80869565217391,
"alnum_prop": 0.6025480889333,
"repo_name": "ArthurFortes/CaseRecommender",
"id": "41aa133f75904cba8ddbd32d69c116c00097245e",
"size": "4019",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "caserec/recommenders/rating_prediction/most_popular.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "198143"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.