repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringlengths 1
5
| size
stringlengths 4
7
| content
stringlengths 475
1M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,293,591B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
DevHugo/zds-site | zds/mp/validators.py | 2 | 3770 | # -*- coding: utf-8 -*-
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
from zds.api.validators import Validator
from zds.member.models import Profile
class ParticipantsUserValidator(Validator):
can_be_empty = False
def validate_participants(self, value):
msg = None
if value or self.can_be_empty:
for participant in value:
if participant.username == self.get_current_user().username:
msg = _(u'Vous ne pouvez pas vous écrire à vous-même !')
try:
current = get_object_or_404(Profile, user__username=participant)
if not Profile.objects.contactable_members().filter(pk=current.pk).exists():
msg = _(u'Vous avez tenté d\'ajouter un utilisateur injoignable.')
except Http404:
msg = _(u'Un des participants saisi est introuvable')
else:
msg = _(u'Vous devez spécifier des participants.')
if msg is not None:
self.throw_error('participants', msg)
return value
def get_current_user(self):
raise NotImplementedError('`get_current_user()` must be implemented.')
class ParticipantsStringValidator(Validator):
"""
Validates participants field of a MP.
"""
def validate_participants(self, value, username):
"""
Checks about participants.
:param value: participants value
:return: participants value
"""
msg = None
if value:
participants = value.strip()
if participants != '':
if len(participants) == 1 and participants[0].strip() == ',':
msg = _(u'Vous devez spécfier des participants valides')
for participant in participants.split(','):
participant = participant.strip()
if participant == '':
continue
if participant.strip().lower() == username.lower():
msg = _(u'Vous ne pouvez pas vous écrire à vous-même !')
try:
current = get_object_or_404(Profile, user__username=participant)
if not Profile.objects.contactable_members().filter(pk=current.pk).exists():
msg = _(u'Vous avez tenté d\'ajouter un utilisateur injoignable.')
except Http404:
msg = _(u'Un des participants saisi est introuvable')
else:
msg = _(u'Le champ participants ne peut être vide')
if msg is not None:
self.throw_error('participants', msg)
return value
class TitleValidator(Validator):
"""
Validates title field of a MP.
"""
def validate_title(self, value):
"""
Checks about title.
:param value: title value
:return: title value
"""
msg = None
if value:
if value.strip() == '':
msg = _(u'Le champ titre ne peut être vide')
if msg is not None:
self.throw_error('title', msg)
return value
class TextValidator(Validator):
"""
Validates text field of a MP.
"""
def validate_text(self, value):
"""
Checks about text.
:param value: text value
:return: text value
"""
msg = None
if value:
if value.strip() == '':
msg = _(u'Le champ text ne peut être vide')
if msg is not None:
self.throw_error('text', msg)
return value
| gpl-3.0 | -8,151,883,243,706,463,000 | 33.154545 | 100 | 0.542188 | false |
odrotleff/ROOTPWA | pyInterface/package/utils/_treeUtils.py | 3 | 5804 |
import sys
import pyRootPwa
import pyRootPwa.utils
_geantParticleCodes = {}
_geantParticleCodes[0] = "unknown"
_geantParticleCodes[1] = "gamma"
_geantParticleCodes[2] = "e"
_geantParticleCodes[3] = "e"
_geantParticleCodes[7] = "pi0"
_geantParticleCodes[8] = "pi"
_geantParticleCodes[9] = "pi"
_geantParticleCodes[11] = "K"
_geantParticleCodes[12] = "K"
_geantParticleCodes[13] = "n"
_geantParticleCodes[14] = "p"
_geantParticleCodes[15] = "pbar"
_geantParticleCodes[16] = "K0"
_geantParticleCodes[17] = "eta"
_geantParticleCodes[18] = "lambda"
_geantParticleCodes[57] = "rho(770)"
_geantParticleCodes[58] = "rho(770)"
_geantParticleCodes[59] = "rho(770)"
_geantParticleCodes[60] = "omega(782)"
_geantParticleCodes[61] = "eta'(958)"
_geantParticleCodes[62] = "phi(1020)"
_geantParticleCodes[45] = "d"
class _EventFile:
evtfile = None
lineCounter = 0
nLines = 0
def __init__(self, infile):
self.evtfile = infile
beginning = self.evtfile.tell()
nLinesPerParticle = int(self.evtfile.readline()[:-1]) + 1
i = 1
while self.evtfile.readline() != "":
i += 1
self.nLines = int(i / nLinesPerParticle)
self.evtfile.seek(beginning)
def __iter__(self):
return self
def __len__(self):
return self.nLines
def next(self):
n_lines_to_read = self.evtfile.readline()[:-1]
if n_lines_to_read == "":
raise StopIteration()
lines = [n_lines_to_read]
for i in range(0, int(n_lines_to_read)):
lines.append(self.evtfile.readline()[:-1])
if lines[-1] == "":
pyRootPwa.utils.printErr("Unexpected end of event file. Aborting...")
sys.exit(1)
return _Event(lines)
def writeEvent(self, event):
for line in event.lines:
self.evtfile.write(line + "\n")
class _Event:
lines = []
particleNames = []
physicsEvent = []
def __init__(self, lines):
self.lines = lines
def sort(self):
new_lines = self.lines[2:]
new_lines = sorted(new_lines, key=lambda entry: int(entry.split()[0]))
self.lines = self.lines[0:2] + new_lines
self.physicsEvent = []
self.particleNames = []
def __convertLineToPartProps(self, line):
part = line.split(' ')
(part[0], part[1]) = (int(part[0]), int(part[1]))
for j in range(2, 6):
part[j] = float(part[j])
partname = _geantParticleCodes[part[0]]
if part[1] > 0:
partname += "+"
elif part[1] < 0:
partname += "-"
else:
partname += "0"
part[0] = partname
part.pop(1)
part.pop(len(part)-1)
return part
def getPhysicsEvent(self):
if self.physicsEvent:
return self.physicsEvent
nmbParticles = int(self.lines[0])
part = self.__convertLineToPartProps(self.lines[1])
self.physicsEvent.append(pyRootPwa.ROOT.TVector3(part[1], part[2], part[3]))
fillPN = False
if self.particleNames == []:
fillPN = True
self.particleNames.append(part[0])
for i in range(2, nmbParticles + 1):
part = self.__convertLineToPartProps(self.lines[i])
if fillPN:
self.particleNames.append(part[0])
self.physicsEvent.append(pyRootPwa.ROOT.TVector3(part[1], part[2], part[3]))
return self.physicsEvent
def getParticleNames(self):
if not self.particleNames:
self.getPhysicsEvent()
return self.particleNames
def __str__(self):
retval = ""
for line in self.lines:
retval += line + '\n'
return retval[:-1]
def getTreeFromEvtFile(filename, treename = ""):
if pyRootPwa.config is None:
raise pyRootPwa.rootPwaException("pyRootPwa configuration not initialized")
if treename == "":
treename = str(hash(filename))
outTree = pyRootPwa.ROOT.TTree(treename, treename)
prodKinMomenta = pyRootPwa.ROOT.TClonesArray("TVector3")
decayKinMomenta = pyRootPwa.ROOT.TClonesArray("TVector3")
prodKinPartName = pyRootPwa.ROOT.TClonesArray("TObjString")
decayKinPartName = pyRootPwa.ROOT.TClonesArray("TObjString")
prodKinMomentaLeafName = pyRootPwa.config.prodKinMomentaLeafName
decayKinMomentaLeafName= pyRootPwa.config.decayKinMomentaLeafName
outTree.Branch(prodKinMomentaLeafName, "TClonesArray", prodKinMomenta)
outTree.Branch(decayKinMomentaLeafName, "TClonesArray", decayKinMomenta)
pyRootPwa.utils.printInfo('Converting "' + filename + '" to memory residing TTree...')
with open(filename, 'r') as infile:
inEventFile = _EventFile(infile)
index = 0
events = len(inEventFile)
progressbar = pyRootPwa.utils.progressBar(0, events)
progressbar.start()
try:
first = True
for event in inEventFile:
event.sort()
physicsVectors = event.getPhysicsEvent()
# check for the correct ordering of the names
particleNames = event.getParticleNames()
if first:
prodKinPartName[0] = pyRootPwa.ROOT.TObjString(particleNames[0])
for i in range(1, len(particleNames)):
decayKinPartName[i-1] = pyRootPwa.ROOT.TObjString(particleNames[i])
else:
if len(particleNames) != prodKinPartName.GetEntriesFast() + decayKinPartName.GetEntriesFast():
progressbar.cancel()
raise pyRootPwa.rootPwaException("Mismatch between number of particle names in TClonesArray and number of particles in event")
if prodKinPartName[0].GetString() != particleNames[0]:
progressbar.cancel()
raise pyRootPwa.rootPwaException("Inconsistent production particle types")
for i in range(1, len(particleNames)):
if decayKinPartName[i-1].GetString() != particleNames[i]:
progressbar.cancel()
raise pyRootPwa.rootPwaException("Inconsistent decay particle types")
# set the physics vectors in the tree
prodKinMomenta[0] = physicsVectors[0]
for i in range(len(physicsVectors[1:])):
decayKinMomenta[i] = physicsVectors[i+1]
outTree.Fill()
index += 1
progressbar.update(index)
except:
progressbar.cancel()
raise
pyRootPwa.utils.printSucc('Successfully created TTree with ' + str(events) + ' events.')
return (prodKinPartName, decayKinPartName, outTree)
| gpl-3.0 | -3,600,636,337,452,054,000 | 27.732673 | 132 | 0.699345 | false |
jawaidss/halalar-web | halalar/api/tests/models.py | 1 | 4684 | from datetime import datetime
import mailchimp
from django.conf import settings
from django.contrib.auth.models import User
from django.core import mail
from django.test import TestCase
from . import TEST_DATA, BODY, create_user, create_profile, create_message
from ..models import Profile
class ProfileTestCase(TestCase):
m = mailchimp.Mailchimp()
def tearDown(self):
self.m.lists.batch_unsubscribe(settings.MAILCHIMP_LIST_ID,
[{'email': TEST_DATA[0]['email']}],
delete_member=True,
send_goodbye=False)
def test_save(self):
# tests that a token is generated on save
# if it is not given
user = User.objects.create_user('user1')
profile = Profile(user=user, age=0)
profile.save()
self.assertEqual(len(profile.token), 40)
# if it is null
user = User.objects.create_user('user2')
profile = Profile(user=user, age=0, token=None)
profile.save()
self.assertEqual(len(profile.token), 40)
# if it is blank
user = User.objects.create_user('user3')
profile = Profile(user=user, age=0, token='')
profile.save()
self.assertEqual(len(profile.token), 40)
old_token = profile.token
# tests that the token does not change on save
profile.save()
new_token = profile.token
self.assertEqual(old_token, new_token)
# tests that a given token is not overridden on save
user = User.objects.create_user('user4')
profile = Profile(user=user, age=0, token='token')
profile.save()
self.assertEqual(profile.token, 'token')
def test_serialize(self):
user = create_user()
profile = create_profile(user)
expected = {'age': TEST_DATA[0]['age'],
'career': TEST_DATA[0]['career'],
'city': TEST_DATA[0]['city'],
'community': TEST_DATA[0]['community'],
'country': TEST_DATA[0]['country'],
'email': TEST_DATA[0]['email'],
'family': TEST_DATA[0]['family'],
'gender': TEST_DATA[0]['gender'],
'photo': None,
'religion': TEST_DATA[0]['religion'],
'self': TEST_DATA[0]['self'],
'username': TEST_DATA[0]['username']}
self.assertEqual(profile.serialize(), expected)
del expected['email']
self.assertEqual(profile.serialize(False), expected)
def test_send_delayed_welcome_email(self):
user = create_user()
profile = create_profile(user)
profile.send_delayed_welcome_email()
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
self.assertTrue(email.subject)
self.assertTrue(email.message)
self.assertTrue(email.from_email)
self.assertTrue(len(email.to), 1)
self.assertEqual(email.to[0], user.email)
self.assertTrue(86399 <= (email.send_at - datetime.now()).seconds <= 86400)
def test_send_signup_notification_email(self):
user = create_user()
profile = create_profile(user)
profile.send_signup_notification_email()
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
self.assertTrue(email.subject)
self.assertTrue(email.message)
self.assertTrue(email.from_email)
self.assertTrue(len(email.to), 1)
self.assertEqual(email.to[0], settings.ASANA_EMAIL)
def test_subscribe_to_mailchimp_list(self):
user = create_user()
profile = create_profile(user)
profile.subscribe_to_mailchimp_list()
self.assertEqual(self.m.lists.member_info(settings.MAILCHIMP_LIST_ID,
[{'email': TEST_DATA[0]['email']}])['success_count'], 1)
class MessageTestCase(TestCase):
def test_serialize(self):
sender = create_profile(create_user())
recipient = create_profile(create_user(1), 1)
message = create_message(sender, recipient)
expected = {'sender': TEST_DATA[0]['username'],
'recipient': TEST_DATA[1]['username'],
'timestamp': 'now',
'body': BODY}
self.assertEqual(message.serialize(), expected)
def test_send_push_notification(self):
sender = create_profile(create_user())
recipient = create_profile(create_user(1), 1)
message = create_message(sender, recipient)
message.send_push_notification() | mit | -4,581,687,343,895,741,000 | 35.317829 | 106 | 0.58006 | false |
sjshao09/KaggleRH | explore/et.py | 1 | 8787 | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn import model_selection, preprocessing
from sklearn.ensemble import ExtraTreesRegressor
from sklearn.model_selection import RandomizedSearchCV
# ----------------- Settings ----------------- #
EN_CROSSVALIDATION = False
EN_IMPORTANCE = True
# ----------------- Read Data ----------------- #
df = pd.read_csv('input/train.csv')
test_df = pd.read_csv('input/test.csv')
macro = pd.read_csv('input/macro.csv')
# ----------------- Data Cleaning ----------------- #
# Training Set
df.loc[df.id==13549, 'life_sq'] = 74
df.loc[df.id==10092, 'build_year'] = 2007
df.loc[df.id==10092, 'state'] = 3
df.loc[df.id==13120, 'build_year'] = 1970
df.loc[df.id==25943, 'max_floor'] = 17
# Clean - Full Sq
df = df[(df.full_sq>1)|(df.life_sq>1)]
df.loc[(df.full_sq<10) & (df.life_sq>1), 'full_sq'] = df.life_sq
df = df[df.full_sq<400]
# Clean - Life Sq
df.loc[df.life_sq > df.full_sq*4, 'life_sq'] = df.life_sq/10
df.loc[df.life_sq > df.full_sq, 'life_sq'] = np.nan
df.loc[df.life_sq < 2, 'life_sq'] = np.nan
df.loc[df.life_sq < df.full_sq * 0.3, 'life_sq'] = np.nan
# Clean - Kitch Sq
df.loc[df.kitch_sq < 2, 'kitch_sq'] = np.nan
df.loc[df.kitch_sq > df.full_sq * 0.5, 'kitch_sq'] = np.nan
# Clean - Build Year
df.loc[df.build_year<1000, 'build_year'] = np.nan
df.loc[df.build_year>2050, 'build_year'] = np.nan
# Clean - Num Room
df.loc[df.num_room<1, 'num_room'] = np.nan
df.loc[(df.num_room>9) & (df.full_sq<100), 'num_room'] = np.nan
# Clean - Floor and Max Floor
df.loc[df.floor==0, 'floor'] = np.nan
df.loc[df.max_floor==0, 'max_floor'] = np.nan
df.loc[(df.max_floor==1) & (df.floor>1), 'max_floor'] = np.nan
df.loc[df.max_floor>50, 'max_floor'] = np.nan
df.loc[df.floor>df.max_floor, 'floor'] = np.nan
# Test Set
test_df.loc[test_df.id==30938, 'full_sq'] = 37.8
test_df.loc[test_df.id==35857, 'full_sq'] = 42.07
test_df.loc[test_df.id==35108, 'full_sq'] = 40.3
test_df.loc[test_df.id==33648, 'num_room'] = 1
# Clean - Full Sq
test_df.loc[(test_df.full_sq<10) & (test_df.life_sq>1), 'full_sq'] = test_df.life_sq
# Clean - Life Sq
test_df.loc[test_df.life_sq>test_df.full_sq*2, 'life_sq'] = test_df.life_sq/10
test_df.loc[test_df.life_sq > test_df.full_sq, 'life_sq'] = np.nan
test_df.loc[test_df.life_sq < 2, 'life_sq'] = np.nan
test_df.loc[test_df.life_sq < test_df.full_sq * 0.3, 'life_sq'] = np.nan
# Clean - Kitch Sq
test_df.loc[test_df.kitch_sq < 2, 'kitch_sq'] = np.nan
test_df.loc[test_df.kitch_sq > test_df.full_sq * 0.5, 'kitch_sq'] = np.nan
# Clean - Build Year
test_df.loc[test_df.build_year<1000, 'build_year'] = np.nan
test_df.loc[test_df.build_year>2050, 'build_year'] = np.nan
# Clean - Num Room
test_df.loc[test_df.num_room<1, 'num_room'] = np.nan
test_df.loc[(test_df.num_room>9) & (test_df.full_sq<100), 'num_room'] = np.nan
# Clean - Floor and Max Floor
test_df.loc[test_df.floor==0, 'floor'] = np.nan
test_df.loc[test_df.max_floor==0, 'max_floor'] = np.nan
test_df.loc[(test_df.max_floor==1) & (test_df.floor>1), 'max_floor'] = np.nan
test_df.loc[test_df.max_floor>50, 'max_floor'] = np.nan
test_df.loc[test_df.floor>test_df.max_floor, 'floor'] = np.nan
# ----------------- New Features ----------------- #
# Auxiliary Feature - price/sq
df['price/sq'] = df['price_doc'] / df['full_sq']
# New Feature - bad_floor
df['bad_floor'] = (df['floor']==1) | (df['floor']==df['max_floor'])
test_df['bad_floor'] = (test_df['floor']==1) | (test_df['floor']==test_df['max_floor'])
# New Feature - kitch_sq/full_sq
df["kitch_sq/full_sq"] = df["kitch_sq"] / df["full_sq"]
test_df["kitch_sq/full_sq"] = test_df["kitch_sq"] / test_df["full_sq"]
# log size
df["full_sq"] = np.log1p(df["full_sq"])
df["life_sq"] = np.log1p(df["life_sq"])
df["kitch_sq"] = np.log1p(df["kitch_sq"])
test_df["full_sq"] = np.log1p(test_df["full_sq"])
test_df["life_sq"] = np.log1p(test_df["life_sq"])
test_df["kitch_sq"] = np.log1p(test_df["kitch_sq"])
# New Feature - age when sold
df["age_when_sold"] = pd.to_datetime(df["timestamp"]).dt.year - df["build_year"]
test_df["age_when_sold"] = pd.to_datetime(test_df["timestamp"]).dt.year - test_df["build_year"]
# New Feature - life_sq/full_sq
df["life_sq/full_sq"] = df["life_sq"] / df["full_sq"]
test_df["life_sq/full_sq"] = test_df["life_sq"] / test_df["full_sq"]
# ----------------- Macro Data ----------------- #
MacroFeatures = ['timestamp', 'usdrub', 'oil_urals', 'mortgage_rate', 'cpi', 'ppi', 'rent_price_2room_eco', 'micex',
'rent_price_1room_eco', 'balance_trade', 'balance_trade_growth', 'gdp_quart_growth', 'net_capital_export']
macro = macro[MacroFeatures]
df = pd.merge(df, macro, on='timestamp', how='left')
test_df = pd.merge(test_df, macro, on='timestamp', how='left')
# ----------------- Fill by median ----------------- #
df.fillna(df.median(axis=0), inplace=True)
test_df['product_type'].fillna(test_df['product_type'].mode().iloc[0], inplace=True)
test_df.fillna(df.median(axis=0), inplace=True)
# ----------------- Remove Extreme Data ----------------- #
RANDOM_SEED_SPLIT = 1
df_1m = df[ (df.price_doc<=1000000) & (df.product_type=="Investment") ]
df = df.drop(df_1m.index)
df_1m = df_1m.sample(frac=0.1, replace=False, random_state=RANDOM_SEED_SPLIT)
df_2m = df[ (df.price_doc==2000000) & (df.product_type=="Investment") ]
df = df.drop(df_2m.index)
df_2m = df_2m.sample(frac=0.1, replace=False, random_state=RANDOM_SEED_SPLIT)
df_3m = df[ (df.price_doc==3000000) & (df.product_type=="Investment") ]
df = df.drop(df_3m.index)
df_3m = df_3m.sample(frac=0.1, replace=False, random_state=RANDOM_SEED_SPLIT)
df = pd.concat([df, df_1m, df_2m, df_3m])
# Plot Original Data Set
OrigTrainValidSetFig = plt.figure()
ax1 = plt.subplot(311)
plt.hist(np.log1p(df['price_doc'].values), bins=200, color='b')
plt.title('Original Data Set')
# ----------------- Training Data ----------------- #
y_train = df['price_doc'] * 0.95
x_train = df.drop(["id", "timestamp", "price_doc", "price/sq"], axis=1)
# Encoding
for c in x_train.columns:
if x_train[c].dtype == 'object':
lbl = preprocessing.LabelEncoder()
lbl.fit(list(x_train[c].values))
x_train[c] = lbl.transform(list(x_train[c].values))
# ----------------- Test Data ----------------- #
x_test = test_df.drop(["id", "timestamp"], axis=1)
# Encoding
for c in x_test.columns:
if x_test[c].dtype == 'object':
lbl = preprocessing.LabelEncoder()
lbl.fit(list(x_test[c].values))
x_test[c] = lbl.transform(list(x_test[c].values))
# ----------------- Cross Validation ----------------- #
if EN_CROSSVALIDATION:
print "[INFO] Cross Validation..."
etr = ExtraTreesRegressor(random_state=0, bootstrap=True, oob_score=True, n_jobs=1, verbose=0)
param_dist = {"n_estimators": [256, 512, 768],
"max_depth": [6, 7, 8, 9, 10],
"max_features": [100, 200, 280],
"min_samples_split": [2, 4, 6],
"min_samples_leaf": [6, 8, 10],
"criterion": ["mse"]
}
r = RandomizedSearchCV(etr, param_distributions=param_dist, cv=4, n_iter=256,
iid=False, n_jobs=6, verbose=2)
r.fit(x_train, y_train)
print ('Best score: {}'.format(r.best_score_))
print ('Best parameters: {}'.format(r.best_params_))
# ----------------- Training ----------------- #
print "[INFO] Training..."
model = ExtraTreesRegressor(random_state=0, bootstrap=True, oob_score=True, n_jobs=6,
verbose=1, max_depth=10, n_estimators=256, max_features=280,
min_samples_split=6, min_samples_leaf=6)
model.fit(x_train, y_train)
print('OOB score: {:6f}'.format(model.oob_score_))
print "[INFO] Predicting..."
y_predict = model.predict(x_test)
submission = pd.DataFrame({'id': test_df.id, 'price_doc': y_predict})
submission.to_csv('submission.csv', index=False)
print submission.head()
print "[INFO] Average Price =", submission['price_doc'].mean()
# Plot Original, Training and Test Sets
ax4 = plt.subplot(312, sharex=ax1)
plt.hist(np.log1p(y_train), bins=200, color='b')
plt.title('Training Data Set')
plt.subplot(313, sharex=ax1)
plt.hist(np.log1p(y_predict), bins=200, color='b')
plt.title('Test Data Set Prediction')
OrigTrainValidSetFig.show()
# Plot Feature Importance
if EN_IMPORTANCE:
importance_df = pd.DataFrame({'feature':list(x_train), 'fscore':model.feature_importances_})
importance_df = importance_df.nlargest(50, 'fscore')
importance_df.sort_values(by='fscore', inplace=True)
importance_df.plot(kind='barh', x='feature', y='fscore', legend=False, figsize=(8, 13))
plt.title('Extra Trees Regressor Important Features Top 50')
plt.xlabel('fscore')
plt.tight_layout()
plt.show()
| mit | 6,179,529,708,387,754,000 | 39.307339 | 116 | 0.61261 | false |
danianr/NINJa | jobqueue.py | 1 | 9587 | from Tkinter import *
import os
import posix
from joblist import JobList
import cups
import re
from collections import deque
import time
import sys
class Job(object):
def __init__(self, conn=None, jobId=None, maxsize=2147483647):
print >> sys.stderr, time.time(), 'Entry into Job(jobId=%s)' % (jobId,)
self.jobId = jobId
self.authenticated = False
printerUri = conn.getJobAttributes(jobId).get('printer-uri')
# This will raise an IPPError if the job has not finished transferring
# in the form of IPPError: (1030, 'client-error-not-found')
doc = conn.getDocument(printerUri, jobId, 1)
print >> sys.stderr, time.time(), 'After getDocument() for jobId:', jobId
self.size = os.stat(doc['file']).st_size
if self.size > maxsize:
print >> sys.stderr, time.time(), 'Document size is larger than accepted:', self.size
os.remove(doc['file'])
self.error = 'Document PostScript is too large to be printed;\ntry printing from a Mac'
self.pages = 0
self.sha512 = 'NA'
else:
# Note that the getDocument command must be issued prior to requesting
# detailed job attributes such as document-format, job-originating-host-name
# and job-originating-user-name, otherwise these attributes will be blank
digest_cmd = '/usr/bin/nice /usr/bin/openssl dgst -sha512 %s' % ( doc['file'] )
pagecount_cmd = './pagecount.sh %s %s' % ( doc['document-format'], doc['file'] )
sha512 = os.popen(digest_cmd).read()
print >> sys.stderr, time.time(), 'After the digest for jobId:', jobId
pagecount = os.popen(pagecount_cmd).read()
print >> sys.stderr, time.time(), 'After the pagecount for jobId:', jobId
try:
self.pages = int(pagecount)
self.error = None
except ValueError:
self.pages = 1
self.error = 'Unable to determine pagecount, you will be charged for actual usage'
self.sha512 = sha512[-129:-1]
self.docFormat = doc['document-format']
attr = conn.getJobAttributes(jobId)
self.uuid = attr['job-uuid']
self.creation = attr['time-at-creation']
self.username = attr['job-originating-user-name'].encode('ascii','ignore')
self.hostname = attr['job-originating-host-name'].encode('ascii','ignore')
self.title = attr['job-name'].encode('ascii','replace')
self.displayTitle = self.title[:47]
self.jobState = attr['job-state']
self.remote = printerUri.endswith('/remote')
# There is no need to keep the tmpfile around for remote jobs
if self.remote and doc['file'] != "":
os.remove(doc['file'])
self.tmpfile = None
elif self.size > maxsize:
self.tmpfile = None
else:
self.tmpfile = doc['file']
if ( attr.has_key('Duplex') and attr['Duplex'] != u'None' ):
self.duplex = True
self.pages = ( self.pages % 2 + self.pages ) / 2
else:
self.duplex = False
# Use the initially supplied jobId for the returned hash value
# defined using a lambda with a closure to make value immutable
self.__hash__ = lambda : jobId
def __cmp__(self, other):
if self.creation < other.creation:
return -1
elif self.creation > other.creation:
return 1
else:
return 0
def __repr__(self):
return '<jobId: %d, uuid: \'%s\', creation: %d, username: \'%s\', hostname: \'%s\', title:\'%s\', pages: %d, jobState: %d, duplex: %s>' \
% ( self.jobId, self.uuid, self.creation, self.username, self.hostname, self.title, self.pages, self.jobState, self.duplex )
def __str__(self):
return '%4d %-12s %-18s %-48s %6s' % ( self.jobId, self.username, self.hostname[:18], self.displayTitle[:48], self.pages )
def removeTmpFile(self):
if self.tmpfile is not None and self.tmpfile != "":
os.remove(self.tmpfile)
class JobMapping(object):
# Takes a sequence of Job objects, produces an iterator
# suitable for supplying to a a listbox (textual description)
# and allows direct access to Job objects based on their
# position. Also takes a list of positions and returns
# a tuple of Job objects associated with each
def __init__(self, iterable, username):
self.timestamp = time.time()
self.internal = list()
self.internal.extend(iterable)
self.username = username
self.dirty = False
def isDirty(self):
return self.dirty
def setDirty(self):
self.dirty = True
def map(self, iterable):
return map(lambda i: self.internal[int(i)], iterable)
# Only define getter accessors since this is technically
# a read-only snapshot
def __getitem__(self, x):
return self.internal[x]
def __getslice__(self, x, y):
return self.internal[x:y]
def __len__(self):
return len(self.internal)
def __iter__(self):
return iter(map(lambda j: j.__str__(), self.internal))
class JobQueue(object):
def __init__(self, unipattern, conn, multicastHandler=None, cloudAdapter=None, maxsize=2147483647):
self.unipattern = unipattern
self.conn = conn
self.mcast = multicastHandler
self.cloud = cloudAdapter
self.jobs = dict()
self.claimed = dict()
self.unclaimed = deque()
self.refreshReq = deque()
self.claimedMapFrame = None
self.unclaimedMapFrame = None
self.delay = 23 # seconds
self.maxsize = maxsize
self.processing = None
def getMapping(self, username=None):
self.refresh()
if username is None:
if self.unclaimedMapFrame is None or \
self.unclaimedMapFrame.isDirty():
self.unclaimedMapFrame = JobMapping(self.unclaimed, None)
return self.unclaimedMapFrame
else:
if self.claimedMapFrame is None or \
self.claimedMapFrame.isDirty() or \
self.claimedMapFrame.username != username:
if self.claimed.has_key(username):
self.claimedMapFrame = JobMapping(self.claimed[username], username)
else:
self.claimedMapFrame = JobMapping([], username)
return self.claimedMapFrame
def refresh(self, event=None, interjobHook=None, force=False):
if self.processing is not None:
return
now = time.time()
self.refreshReq.append(now)
for req in self.refreshReq:
if force or (req + self.delay) < now:
self.processing = now
break
else:
return
incompleteJobs = self.conn.getJobs(which_jobs='not-completed')
self.remove( filter( lambda x: not incompleteJobs.has_key(x), self.jobs.keys()) )
for jobId in filter( lambda x: not self.jobs.has_key(x), incompleteJobs.keys()):
try:
j = Job(self.conn, jobId, self.maxsize)
if not j.remote:
self.add(j)
except cups.IPPError as e:
print("caught an IPPError",e)
continue
if interjobHook is not None:
interjobHook()
self.refreshReq.clear()
rettime = time.time()
print >> sys.stderr, rettime, 'Total elapsed time for jobqueue.refresh():', rettime - now
self.processing = None
def add(self, job):
# updates the main index
self.jobs[job.jobId] = job
if self.unipattern.match(job.username):
if job.username not in self.claimed:
self.claimed[job.username] = deque()
self.claimed[job.username].appendleft(job)
if self.claimedMapFrame is not None and \
self.claimedMapFrame.username == job.username:
self.claimedMapFrame.setDirty()
if self.cloud is not None and self.mcast is not None and job.size <= self.cloud.maxsize:
self.mcast.advertise(job)
self.cloud.storeJob(job)
else:
self.unclaimed.appendleft(job)
if self.unclaimedMapFrame is not None:
self.unclaimedMapFrame.setDirty()
def remove(self, removedJobs):
for id in filter( lambda x: self.jobs.has_key(x), removedJobs):
j = self.jobs[id]
if j in self.unclaimed:
self.unclaimed.remove(j)
if self.unclaimedMapFrame is not None:
self.unclaimedMapFrame.setDirty()
else:
username=j.username
if self.claimed.has_key(username):
self.claimed[username].remove(j)
if ( len(self.claimed[username]) == 0 ):
del self.claimed[username]
if self.claimedMapFrame is not None and \
self.claimedMapFrame.username == username:
self.claimedMapFrame.setDirty()
del self.jobs[id]
def getClaimedUuids(self, username):
uuids = []
if username in self.claimed:
for j in self.claimed[username]:
urnuuid = j.uuid
uuids.append(urnuuid[9:])
return uuids
def __getitem__(self,x):
if x in self.jobs:
return self.jobs[x]
incompleteJobs = self.conn.getJobs(which_jobs='not-completed')
if incompleteJobs.has_key(x):
return Job(self.conn, x)
else:
return None
| mit | 4,192,363,589,211,776,000 | 36.303502 | 145 | 0.593303 | false |
XIMDEX/ximdex | public_xmd/vendors/kupu/tools/compress.py | 1 | 1657 | #!/usr/bin/env python
"""Remove comments, newlines and redundant whitespace from JavaScript code
This reads all paths that were passed in as arguments on the command-line
and removes everything that is ignored by JavaScript. This makes that
the source isn't readable anymore (which I personally consider bad),
but also that less bytes have to be served by the server, scripts are
loaded faster and also that they're executed faster.
WARNING: This script converts files in place! Original files will be
overwritten. Do *not* run this on a development version of your code,
since you won't be able to get them back into the original state. This
should be ran only by system administrators if they want to speed up
their setups.
"""
import sys, re
one_line_comment = re.compile(r'^\s*//.*$', re.M)
trailing_comment = re.compile(r'//(\w|\s)*$', re.M)
multi_line_comment = re.compile(r'^\s*/\*.*?\*/', re.M | re.S)
whitespace_after_separator = re.compile(r';\s*', re.M | re.S)
whitespace_after_opening_bracket = re.compile(r'{\s*', re.M | re.S)
starting_whitespace = re.compile(r'^\s*', re.M | re.S)
def strip(data):
"""Processes the data, removing comments and unecessary whitespace."""
data = one_line_comment.sub('', data)
data = trailing_comment.sub('', data)
data = multi_line_comment.sub('', data)
data = whitespace_after_separator.sub(';', data)
data = whitespace_after_opening_bracket.sub('{', data)
data = starting_whitespace.sub('', data)
return data.strip()
for file in sys.argv[1:]:
data = open(file).read()
data = strip(data)
open(file, 'w').write(data)
| agpl-3.0 | -4,272,553,897,930,234,400 | 40.425 | 77 | 0.683162 | false |
fish2000/django-delegate | setup.py | 1 | 1992 | #/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import sys
import os
sys.path.append(os.getcwd())
import version
if 'sdist' in sys.argv and 'upload' in sys.argv:
import commands
finder = "/usr/bin/find %s \( -name \*.pyc -or -name .DS_Store \) -delete"
theplace = os.getcwd()
if theplace not in (".", "/"):
print("+ Deleting crapola from %s..." % theplace)
print("$ %s" % finder % theplace)
commands.getstatusoutput(finder % theplace)
print("")
setup(
name='django-delegate',
version='%s.%s.%s' % version.__version__,
description=version.__doc__,
long_description=version.__doc__,
author=version.__author__,
author_email=version.__email__,
maintainer=version.__author__,
maintainer_email=version.__email__,
license='BSD',
url='http://github.com/fish2000/django-delegate/',
download_url='https://github.com/fish2000/django-delegate/zipball/master',
keywords=[
'django',
'delegate',
'queryset',
'manager',
'method',
'dispatch',
'syntax-sugar'],
packages=[
'delegate'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Environment :: Other Environment',
'Environment :: Plugins',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python :: Implementation :: Jython',
'Topic :: Database',
'Topic :: Utilities']
)
| bsd-3-clause | 102,548,618,383,134,560 | 29.646154 | 78 | 0.5999 | false |
ijmarshall/robotreviewer3 | robotreviewer/formatting.py | 1 | 1789 | """
formatting.py
functions for displaying RobotReviewer internal data in useful ways
"""
from robotreviewer.app import app
import logging
log = logging.getLogger(__name__)
def format_authors(author_list, max_authors=1):
et_al = False
if len(author_list) > max_authors:
et_al = True
author_list = author_list[:max_authors]
authors = u", ".join([u"{lastname} {initials}".format(**a) for a in author_list])
if et_al:
authors += " et al"
return authors
@app.context_processor
def short_citation_fn():
def short_citation(article):
try:
return u"{} {}, {}".format(article['authors'][0]['lastname'], article['authors'][0]['initials'], article.get('year', '[unknown year]'))
except Exception as e:
log.debug("Fallback: {} raised".format(e))
return article['filename']
return dict(short_citation=short_citation)
@app.context_processor
def long_citation_fn():
def long_citation(article):
try:
bracket_issue = u"({})".format(article['issue']) if article.get('issue') else u""
return u"{}. {} {} {}. {}{}; {}".format(format_authors(article['authors']), article['title'], article.get('journal_abbr', article['journal']), article.get('year', '[unknown year]'), article.get('volume', '?'), bracket_issue, article.get('pages', '?'))
except Exception as e:
log.debug("Fallback: {} raised".format(e))
return u"Unable to extract citation information for file {}".format(article['filename'])
return dict(long_citation=long_citation)
@app.context_processor
def not_rcts_fn():
def not_rcts(articles):
return [r for r in articles if r.get('rct', {}).get('is_rct', True) == False]
return dict(not_rcts=not_rcts)
| gpl-3.0 | 5,898,080,818,070,069,000 | 36.270833 | 263 | 0.624371 | false |
lambdalisue/django-permission | src/permission/logics/staff.py | 1 | 5221 | # coding=utf-8
"""
Permission logic module for author based permission system
"""
from permission.conf import settings
from permission.logics.base import PermissionLogic
from permission.compat import is_authenticated
class StaffPermissionLogic(PermissionLogic):
"""
Permission logic class for is_staff authority based permission system
"""
def __init__(self,
any_permission=None,
add_permission=None,
change_permission=None,
delete_permission=None):
"""
Constructor
Parameters
----------
any_permission : boolean
True for give any permission of the specified object to the staff
user. Default value will be taken from
``PERMISSION_DEFAULT_SPL_ANY_PERMISSION`` in
settings.
add_permission : boolean
True for give change permission of the specified object to the
staff user.
It will be ignored if :attr:`any_permission` is True.
Default value will be taken from
``PERMISSION_DEFAULT_SPL_ADD_PERMISSION`` in
settings.
change_permission : boolean
True for give change permission of the specified object to the
staff user.
It will be ignored if :attr:`any_permission` is True.
Default value will be taken from
``PERMISSION_DEFAULT_SPL_CHANGE_PERMISSION`` in
settings.
delete_permission : boolean
True for give delete permission of the specified object to the
staff user.
It will be ignored if :attr:`any_permission` is True.
Default value will be taken from
``PERMISSION_DEFAULT_SPL_DELETE_PERMISSION`` in
settings.
"""
self.any_permission = any_permission
self.add_permission = add_permission
self.change_permission = change_permission
self.delete_permission = delete_permission
if self.any_permission is None:
self.any_permission = \
settings.PERMISSION_DEFAULT_SPL_ANY_PERMISSION
if self.add_permission is None:
self.add_permission = \
settings.PERMISSION_DEFAULT_SPL_ADD_PERMISSION
if self.change_permission is None:
self.change_permission = \
settings.PERMISSION_DEFAULT_SPL_CHANGE_PERMISSION
if self.delete_permission is None:
self.delete_permission = \
settings.PERMISSION_DEFAULT_SPL_DELETE_PERMISSION
def has_perm(self, user_obj, perm, obj=None):
"""
Check if user have permission (of object)
If the user_obj is not authenticated, it return ``False``.
If no object is specified, it return ``True`` when the corresponding
permission was specified to ``True`` (changed from v0.7.0).
This behavior is based on the django system.
https://code.djangoproject.com/wiki/RowLevelPermissions
If an object is specified, it will return ``True`` if the user is
staff. The staff can add, change or delete the object (you can change
this behavior to set ``any_permission``, ``add_permission``,
``change_permission``, or ``delete_permission`` attributes of this
instance).
Parameters
----------
user_obj : django user model instance
A django user model instance which be checked
perm : string
`app_label.codename` formatted permission string
obj : None or django model instance
None or django model instance for object permission
Returns
-------
boolean
Weather the specified user have specified permission (of specified
object).
"""
if not is_authenticated(user_obj):
return False
# construct the permission full name
add_permission = self.get_full_permission_string('add')
change_permission = self.get_full_permission_string('change')
delete_permission = self.get_full_permission_string('delete')
if obj is None:
if user_obj.is_staff:
if self.add_permission and perm == add_permission:
return True
if self.change_permission and perm == change_permission:
return True
if self.delete_permission and perm == delete_permission:
return True
return self.any_permission
return False
elif user_obj.is_active:
if user_obj.is_staff:
if self.any_permission:
# have any kind of permissions to the obj
return True
if (self.add_permission and
perm == add_permission):
return True
if (self.change_permission and
perm == change_permission):
return True
if (self.delete_permission and
perm == delete_permission):
return True
return False
| mit | -2,682,839,995,101,287,000 | 38.854962 | 78 | 0.586861 | false |
socialplanning/opencore | opencore/listen/featurelet.py | 1 | 2026 | import logging
from opencore.featurelets.interfaces import IListenContainer
from opencore.featurelets.interfaces import IListenFeatureletInstalled
from opencore.feed.interfaces import ICanFeed
from opencore.interfaces import IProject
from opencore.interfaces.event import ListenFeatureletCreatedEvent
from opencore.listen.mailinglist import OpenMailingList
from Products.CMFCore.utils import getToolByName
from Products.listen.interfaces import IListLookup
from topp.featurelets.base import BaseFeaturelet
from topp.featurelets.interfaces import IFeaturelet
from topp.featurelets.interfaces import IFeatureletSupporter
from zope.component import getMultiAdapter
from zope.component import getUtility
from zope.interface import Interface
from zope.interface import alsoProvides
from zope.interface import implements
from zope.event import notify
log = logging.getLogger('opencore.featurelets.listen')
class ListenFeaturelet(BaseFeaturelet):
"""
A featurelet that installs a folder for managing listen based
mailing lists.
"""
implements(IFeaturelet)
id = "listen"
title = "Mailing lists"
#config_view = "listen_config"
installed_marker = IListenFeatureletInstalled
_info = {'content': ({'id': 'lists', 'title': 'Mailing lists',
'portal_type': 'Folder'},),
'menu_items': ({'title': u'Mailing lists',
'description': u'Mailing lists',
'action': u'lists',
'order': 0,
},
),
}
def deliverPackage(self, obj):
"""
See IFeaturelet.
"""
BaseFeaturelet.deliverPackage(self, obj)
container = obj._getOb(self._info['content'][0]['id'])
container.setLayout('mailing_lists')
alsoProvides(container, IListenContainer)
alsoProvides(container, ICanFeed)
notify(ListenFeatureletCreatedEvent(obj))
return self._info
| gpl-3.0 | -6,922,858,965,203,851,000 | 35.836364 | 70 | 0.678677 | false |
hydroshare/hydroshare | hs_rest_api/resources/file_metadata.py | 1 | 8812 | import os
from django.core.exceptions import ObjectDoesNotExist
from rest_framework.response import Response
from rest_framework import generics
from rest_framework import serializers
from rest_framework.exceptions import APIException, NotFound
from hs_core.models import ResourceFile
from hs_rest_api.permissions import CanViewOrEditResourceMetadata
from hs_core import hydroshare
# TODO: Once we upgrade past Django Rest Framework 3.3, this won't be necessary
class JSONSerializerField(serializers.Field):
""" Serializer for JSONField -- required to make field writable"""
def to_internal_value(self, data):
return data
def to_representation(self, value):
return value
class FileMetaDataSerializer(serializers.Serializer):
title = serializers.CharField(required=False)
keywords = JSONSerializerField(required=False)
spatial_coverage = JSONSerializerField(required=False)
extra_metadata = JSONSerializerField(required=False)
temporal_coverage = JSONSerializerField(required=False)
logical_file = JSONSerializerField(required=False)
class FileMetaDataRetrieveUpdateDestroy(generics.RetrieveUpdateDestroyAPIView):
serializer_class = FileMetaDataSerializer
allowed_methods = ('GET', 'PUT',)
permission_classes = (CanViewOrEditResourceMetadata,)
def get(self, request, pk, pathname):
"""
Get a resource file's metadata.
## Parameters
* `id` - alphanumeric uuid of the resource, i.e. cde01b3898c94cdab78a2318330cf795
* `pathname` - The pathname of the file
to get these
## Returns
```
{
"keywords": [
"keyword1",
"keyword2"
],
"spatial_coverage": {
"units": "Decimal degrees",
"east": -84.0465,
"north": 49.6791,
"name": "12232",
"projection": "WGS 84 EPSG:4326"
},
"extra_metadata": {
"extended1": "one"
},
"temporal_coverage": {
"start": "2018-02-22",
"end": "2018-02-24"
},
"title": "File Metadata Title",
"logical_file": {}
}
```
"""
try:
resource_file = hydroshare.get_resource_file(pk, pathname)
logical_file = resource_file.logical_file
metadata = resource_file.metadata
except ObjectDoesNotExist:
# Backwards compatibility for file_id
try:
resource_file = ResourceFile.objects.get(id=pathname)
logical_file = resource_file.logical_file
metadata = resource_file.metadata
except Exception:
# is it a folder?
resource = hydroshare.get_resource_by_shortkey(pk)
dir_path = pk + os.path.join("/data/contents/", pathname)
logical_file = resource.get_folder_aggregation_object(dir_path)
metadata = None
title = logical_file.dataset_name \
if logical_file else ""
keywords = metadata.keywords \
if metadata else []
spatial_coverage = metadata.spatial_coverage.value \
if metadata and metadata.spatial_coverage else {}
extra_metadata = metadata.extra_metadata \
if metadata else {}
temporal_coverage = metadata.temporal_coverage.value if \
metadata and metadata.temporal_coverage else {}
extra_data = logical_file.metadata.dict() \
if logical_file else {}
# TODO: How to leverage serializer for this?
return Response({
"title": title,
"keywords": keywords,
"spatial_coverage": spatial_coverage,
"extra_metadata": extra_metadata,
"temporal_coverage": temporal_coverage,
"logical_file": extra_data
})
def put(self, request, pk, pathname):
"""
Update a resource file's metadata
Accepts application/json encoding.
## Parameters
* `id` - alphanumeric uuid of the resource, i.e. cde01b3898c94cdab78a2318330cf795
* `pathname` - The pathname of the file
* `data` - see the "returns" section for formatting
## Returns
```
{
"keywords": [
"keyword1",
"keyword2"
],
"spatial_coverage": {
"units": "Decimal degrees",
"east": -84.0465,
"north": 49.6791,
"name": "12232",
"projection": "WGS 84 EPSG:4326"
},
"extra_metadata": {
"extended1": "one"
},
"temporal_coverage": {
"start": "2018-02-22",
"end": "2018-02-24"
},
"title": "File Metadata Title"
}
```
"""
file_serializer = FileMetaDataSerializer(request.data)
try:
title = file_serializer.data.pop("title", "")
try:
resource_file = hydroshare.get_resource_file(pk, pathname)
except ObjectDoesNotExist:
# Backwards compatibility for file_id
resource_file = ResourceFile.objects.get(id=pathname)
if resource_file is None:
raise NotFound("File {} in resource {} does not exist".format(pathname, pk))
resource_file.metadata.logical_file.dataset_name = title
resource_file.metadata.logical_file.save()
spatial_coverage = file_serializer.data.pop("spatial_coverage", None)
if spatial_coverage is not None:
# defaulting to point if not provided for backwards compatibility
type = spatial_coverage["type"] if "type" in spatial_coverage else "point"
if resource_file.metadata.spatial_coverage is not None:
cov_id = resource_file.metadata.spatial_coverage.id
resource_file.metadata.update_element('coverage',
cov_id,
type=type,
value=spatial_coverage)
elif resource_file.metadata.spatial_coverage is None:
resource_file.metadata.create_element('coverage', type=type,
value=spatial_coverage)
temporal_coverage = file_serializer.data.pop("temporal_coverage", None)
if temporal_coverage is not None:
if resource_file.metadata.temporal_coverage is not None:
cov_id = resource_file.metadata.temporal_coverage.id
resource_file.metadata.update_element('coverage',
cov_id,
type='period',
value=temporal_coverage)
elif resource_file.metadata.temporal_coverage is None:
resource_file.metadata.create_element('coverage', type="period",
value=temporal_coverage)
keywords = file_serializer.data.pop("keywords", None)
if keywords is not None:
resource_file.metadata.keywords = keywords
extra_metadata = file_serializer.data.pop("extra_metadata", None)
if extra_metadata is not None:
resource_file.metadata.extra_metadata = extra_metadata
resource_file.metadata.save()
except Exception as e:
raise APIException(e)
# TODO: How to leverage serializer for this?
title = resource_file.metadata.logical_file.dataset_name \
if resource_file.metadata.logical_file else ""
keywords = resource_file.metadata.keywords \
if resource_file.metadata else []
spatial_coverage = resource_file.metadata.spatial_coverage.value \
if resource_file.metadata.spatial_coverage else {}
extra_metadata = resource_file.metadata.extra_metadata \
if resource_file.metadata else {}
temporal_coverage = resource_file.metadata.temporal_coverage.value if \
resource_file.metadata.temporal_coverage else {}
return Response({
"title": title,
"keywords": keywords,
"spatial_coverage": spatial_coverage,
"extra_metadata": extra_metadata,
"temporal_coverage": temporal_coverage
})
| bsd-3-clause | -4,462,537,700,914,126,300 | 38.515695 | 92 | 0.556627 | false |
swift-lang/swift-e-lab | parsl/tests/sites/test_multinode_mpi.py | 1 | 1473 | import argparse
import pytest
import parsl
from parsl.app.app import App
from parsl.tests.configs.cori_ipp_multinode import config
from parsl.tests.conftest import load_dfk
parsl.clear()
parsl.load(config)
parsl.set_stream_logger()
@App("python")
def python_app_slow(duration):
import platform
import time
time.sleep(duration)
return "Hello from {0}".format(platform.uname())
@pytest.mark.skip('not asserting anything')
def test_python_remote(count=10):
"""Run with no delay"""
fus = []
for i in range(0, count):
fu = python_app_slow(0)
fus.extend([fu])
for fu in fus:
print(fu.result())
@pytest.mark.skip('not asserting anything')
def test_python_remote_slow(count=20):
fus = []
for i in range(0, count):
fu = python_app_slow(count)
fus.extend([fu])
for fu in fus:
print(fu.result())
@App("bash")
def bash_mpi_app(stdout=None, stderr=None):
return """ls -thor
mpi_hello
"""
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config", default='local',
help="Path to configuration file to run")
args = parser.parse_args()
load_dfk(args.config)
items = []
for i in range(0, 4):
x = bash_mpi_app(stdout="parsl.{0}.out".format(i),
stderr="parsl.{0}.err".format(i))
items.extend([x])
for i in items:
print(i.result())
| apache-2.0 | -7,146,927,764,417,135,000 | 20.347826 | 65 | 0.604888 | false |
1905410/Misago | misago/readtracker/tests/test_readtracker.py | 1 | 9625 | from datetime import timedelta
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.utils import timezone
from misago.acl import add_acl
from misago.categories.models import Category
from misago.threads import testutils
from misago.users.models import AnonymousUser
from .. import categoriestracker, threadstracker
class ReadTrackerTests(TestCase):
def setUp(self):
self.categories = list(Category.objects.all_categories()[:1])
self.category = self.categories[0]
User = get_user_model()
self.user = User.objects.create_user("Bob", "[email protected]", "Pass.123")
self.anon = AnonymousUser()
def post_thread(self, datetime):
return testutils.post_thread(
category=self.category,
started_on=datetime
)
class CategorysTrackerTests(ReadTrackerTests):
def test_anon_empty_category_read(self):
"""anon users content is always read"""
categoriestracker.make_read_aware(self.anon, self.categories)
self.assertIsNone(self.category.last_post_on)
self.assertTrue(self.category.is_read)
def test_anon_category_with_recent_reply_read(self):
"""anon users content is always read"""
categoriestracker.make_read_aware(self.anon, self.categories)
self.category.last_post_on = timezone.now()
self.assertTrue(self.category.is_read)
def test_empty_category_is_read(self):
"""empty category is read for signed in user"""
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
def test_make_read_aware_sets_read_flag_for_empty_category(self):
"""make_read_aware sets read flag on empty category"""
categoriestracker.make_read_aware(self.anon, self.categories)
self.assertTrue(self.category.is_read)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
def test_make_read_aware_sets_read_flag_for_category_with_old_thread(self):
"""make_read_aware sets read flag on category with old thread"""
self.category.last_post_on = self.user.joined_on - timedelta(days=1)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
def test_make_read_aware_sets_unread_flag_for_category_with_new_thread(self):
"""make_read_aware sets unread flag on category with new thread"""
self.category.last_post_on = self.user.joined_on + timedelta(days=1)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
def test_sync_record_for_empty_category(self):
"""sync_record sets read flag on empty category"""
add_acl(self.user, self.categories)
categoriestracker.sync_record(self.user, self.category)
self.user.categoryread_set.get(category=self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
def test_sync_record_for_category_with_old_thread_and_reply(self):
"""
sync_record sets read flag on category with old thread,
then changes flag to unread when new reply is posted
"""
self.post_thread(self.user.joined_on - timedelta(days=1))
add_acl(self.user, self.categories)
categoriestracker.sync_record(self.user, self.category)
self.user.categoryread_set.get(category=self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
thread = self.post_thread(self.user.joined_on + timedelta(days=1))
categoriestracker.sync_record(self.user, self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
def test_sync_record_for_category_with_new_thread(self):
"""
sync_record sets read flag on category with old thread,
then keeps flag to unread when new reply is posted
"""
self.post_thread(self.user.joined_on + timedelta(days=1))
add_acl(self.user, self.categories)
categoriestracker.sync_record(self.user, self.category)
self.user.categoryread_set.get(category=self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
self.post_thread(self.user.joined_on + timedelta(days=1))
categoriestracker.sync_record(self.user, self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
def test_sync_record_for_category_with_deleted_threads(self):
"""unread category reverts to read after its emptied"""
self.post_thread(self.user.joined_on + timedelta(days=1))
self.post_thread(self.user.joined_on + timedelta(days=1))
self.post_thread(self.user.joined_on + timedelta(days=1))
add_acl(self.user, self.categories)
categoriestracker.sync_record(self.user, self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
self.category.thread_set.all().delete()
self.category.synchronize()
self.category.save()
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
def test_sync_record_for_category_with_many_threads(self):
"""sync_record sets unread flag on category with many threads"""
self.post_thread(self.user.joined_on + timedelta(days=1))
self.post_thread(self.user.joined_on - timedelta(days=1))
self.post_thread(self.user.joined_on + timedelta(days=1))
self.post_thread(self.user.joined_on - timedelta(days=1))
add_acl(self.user, self.categories)
categoriestracker.sync_record(self.user, self.category)
self.user.categoryread_set.get(category=self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
self.post_thread(self.user.joined_on + timedelta(days=1))
categoriestracker.sync_record(self.user, self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
def test_read_leaf_category(self):
"""read_category reads leaf category for user"""
categoriestracker.read_category(self.user, self.category)
self.assertTrue(self.user.categoryread_set.get(category=self.category))
def test_read_root_category(self):
"""read_category reads its subcategories for user"""
root_category = Category.objects.root_category()
categoriestracker.read_category(self.user, root_category)
child_read = self.user.categoryread_set.get(category=self.category)
self.assertTrue(child_read.last_read_on > timezone.now() -timedelta(seconds=3))
class ThreadsTrackerTests(ReadTrackerTests):
def setUp(self):
super(ThreadsTrackerTests, self).setUp()
self.thread = self.post_thread(timezone.now() - timedelta(days=10))
def reply_thread(self, is_hidden=False, is_unapproved=False):
self.post = testutils.reply_thread(
thread=self.thread,
is_hidden=is_hidden,
is_unapproved=is_unapproved,
posted_on=timezone.now()
)
return self.post
def test_thread_read_for_guest(self):
"""threads are always read for guests"""
threadstracker.make_read_aware(self.anon, self.thread)
self.assertTrue(self.thread.is_read)
self.reply_thread()
threadstracker.make_read_aware(self.anon, [self.thread])
self.assertTrue(self.thread.is_read)
def test_thread_read_for_user(self):
"""thread is read for user"""
threadstracker.make_read_aware(self.user, self.thread)
self.assertTrue(self.thread.is_read)
def test_thread_replied_unread_for_user(self):
"""replied thread is unread for user"""
self.reply_thread()
threadstracker.make_read_aware(self.user, self.thread)
self.assertFalse(self.thread.is_read)
def _test_thread_read(self):
"""thread read flag is set for user, then its set as unread by reply"""
self.reply_thread()
add_acl(self.user, self.categories)
threadstracker.make_read_aware(self.user, self.thread)
self.assertFalse(self.thread.is_read)
threadstracker.read_thread(self.user, self.thread, self.post)
threadstracker.make_read_aware(self.user, self.thread)
self.assertTrue(self.thread.is_read)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
self.thread.last_post_on = timezone.now()
self.thread.save()
self.category.synchronize()
self.category.save()
self.reply_thread()
threadstracker.make_read_aware(self.user, self.thread)
self.assertFalse(self.thread.is_read)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
posts = [post for post in self.thread.post_set.order_by('id')]
threadstracker.make_posts_read_aware(self.user, self.thread, posts)
for post in posts[:-1]:
self.assertTrue(post.is_read)
self.assertFalse(posts[-1].is_read)
| gpl-2.0 | -4,961,946,293,181,603,000 | 39.783898 | 87 | 0.682494 | false |
pakpoomton/CellmodellerShadow | CellModeller/Biophysics/BacterialModels/CLBacterium.py | 1 | 54289 | import sys
import math
import numpy
import pyopencl as cl
import pyopencl.array as cl_array
from pyopencl.array import vec
from pyopencl.elementwise import ElementwiseKernel
from pyopencl.reduction import ReductionKernel
import random
ct_map = {}
class CLBacterium:
"""A rigid body model of bacterial growth implemented using
OpenCL.
"""
def __init__(self, simulator,
max_substeps=8,
max_cells=2**15,
max_contacts=32,
max_planes=4,
max_sqs=64**2,
grid_spacing=5.0,
muA=1.0,
gamma=10.0,
cgs_tol=1e-3,
reg_param=0.2,
jitter_z=True,
alternate_divisions=False):
self.frame_no = 0
self.simulator = simulator
self.regulator = None
self.max_cells = max_cells
self.max_contacts = max_contacts
self.max_planes = max_planes
self.max_sqs = max_sqs
self.grid_spacing = grid_spacing
self.muA = muA
self.gamma = gamma
self.cgs_tol = cgs_tol
self.reg_param = numpy.float32(reg_param)
self.max_substeps = max_substeps
self.n_cells = 0
self.n_cts = 0
self.n_planes = 0
self.next_id = 0
self.grid_x_min = 0
self.grid_x_max = 0
self.grid_y_min = 0
self.grid_y_max = 0
self.n_sqs = 0
self.init_cl()
self.init_kernels()
self.init_data()
self.parents = {}
self.jitter_z = jitter_z
self.alternate_divisions = alternate_divisions
self.maxVel = 1.0
# Biophysical Model interface
def reset(self):
self.n_cells=0
self.n_cts=0
self.n_planes=0
def setRegulator(self, regulator):
self.regulator = regulator
def addCell(self, cellState, pos=(0,0,0), dir=(1,0,0), len=4.0, rad=0.5):
i = cellState.idx
self.n_cells += 1
cid = cellState.id
self.cell_centers[i] = tuple(pos+(0,))
self.cell_dirs[i] = tuple(dir+(0,))
self.cell_lens[i] = len
self.cell_rads[i] = rad
self.initCellState(cellState)
self.set_cells()
self.calc_cell_geom() # cell needs a volume
def addPlane(self, pt, norm, coeff):
pidx = self.n_planes
self.n_planes += 1
self.plane_pts[pidx] = tuple(pt)+(0,)
self.plane_norms[pidx] = tuple(norm) + (0,)
self.plane_coeffs[pidx] = coeff
self.set_planes()
def hasNeighbours(self):
return False
def divide(self, parentState, daughter1State, daughter2State, *args, **kwargs):
self.divide_cell(parentState.idx, daughter1State.idx, daughter2State.idx)
# Initialise cellState data
self.initCellState(daughter1State)
self.initCellState(daughter2State)
def init_cl(self):
if self.simulator:
(self.context, self.queue) = self.simulator.getOpenCL()
def init_kernels(self):
"""Set up the OpenCL kernels."""
kernel_src = open('CellModeller/Biophysics/BacterialModels/CLBacterium.cl', 'r').read()
self.program = cl.Program(self.context, kernel_src).build(cache_dir=False)
# Some kernels that seem like they should be built into pyopencl...
self.vclearf = ElementwiseKernel(self.context, "float8 *v", "v[i]=0.0", "vecclearf")
self.vcleari = ElementwiseKernel(self.context, "int *v", "v[i]=0", "veccleari")
self.vadd = ElementwiseKernel(self.context, "float8 *res, const float8 *in1, const float8 *in2",
"res[i] = in1[i] + in2[i]", "vecadd")
self.vsub = ElementwiseKernel(self.context, "float8 *res, const float8 *in1, const float8 *in2",
"res[i] = in1[i] - in2[i]", "vecsub")
self.vaddkx = ElementwiseKernel(self.context,
"float8 *res, const float k, const float8 *in1, const float8 *in2",
"res[i] = in1[i] + k*in2[i]", "vecaddkx")
self.vsubkx = ElementwiseKernel(self.context,
"float8 *res, const float k, const float8 *in1, const float8 *in2",
"res[i] = in1[i] - k*in2[i]", "vecsubkx")
self.vmax = ReductionKernel(self.context, numpy.float32, neutral="0",
reduce_expr="a>b ? a : b", map_expr="length(x[i])",
arguments="__global float4 *x")
# cell geometry kernels
self.calc_cell_area = ElementwiseKernel(self.context, "float* res, float* r, float* l",
"res[i] = 2.f*3.1415927f*r[i]*(2.f*r[i]+l[i])", "cell_area_kern")
self.calc_cell_vol = ElementwiseKernel(self.context, "float* res, float* r, float* l",
"res[i] = 3.1415927f*r[i]*r[i]*(2.f*r[i]+l[i])", "cell_vol_kern")
# A dot product as sum of float4 dot products -
# i.e. like flattening vectors of float8s into big float vectors
# then computing dot
# NB. Some openCLs seem not to implement dot(float8,float8) so split
# into float4's
self.vdot = ReductionKernel(self.context, numpy.float32, neutral="0",
reduce_expr="a+b", map_expr="dot(x[i].s0123,y[i].s0123)+dot(x[i].s4567,y[i].s4567)",
arguments="__global float8 *x, __global float8 *y")
def init_data(self):
"""Set up the data OpenCL will store on the device."""
# cell data
cell_geom = (self.max_cells,)
self.cell_centers = numpy.zeros(cell_geom, vec.float4)
self.cell_centers_dev = cl_array.zeros(self.queue, cell_geom, vec.float4)
self.cell_dirs = numpy.zeros(cell_geom, vec.float4)
self.cell_dirs_dev = cl_array.zeros(self.queue, cell_geom, vec.float4)
self.cell_lens = numpy.zeros(cell_geom, numpy.float32)
self.cell_lens_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.pred_cell_centers = numpy.zeros(cell_geom, vec.float4)
self.pred_cell_centers_dev = cl_array.zeros(self.queue, cell_geom, vec.float4)
self.pred_cell_dirs = numpy.zeros(cell_geom, vec.float4)
self.pred_cell_dirs_dev = cl_array.zeros(self.queue, cell_geom, vec.float4)
self.pred_cell_lens = numpy.zeros(cell_geom, numpy.float32)
self.pred_cell_lens_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.cell_rads = numpy.zeros(cell_geom, numpy.float32)
self.cell_rads_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.cell_sqs = numpy.zeros(cell_geom, numpy.int32)
self.cell_sqs_dev = cl_array.zeros(self.queue, cell_geom, numpy.int32)
self.cell_n_cts = numpy.zeros(cell_geom, numpy.int32)
self.cell_n_cts_dev = cl_array.zeros(self.queue, cell_geom, numpy.int32)
self.cell_dcenters = numpy.zeros(cell_geom, vec.float4)
self.cell_dcenters_dev = cl_array.zeros(self.queue, cell_geom, vec.float4)
self.cell_dangs = numpy.zeros(cell_geom, vec.float4)
self.cell_dangs_dev = cl_array.zeros(self.queue, cell_geom, vec.float4)
self.cell_dlens = numpy.zeros(cell_geom, numpy.float32)
self.cell_dlens_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.cell_target_dlens_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.cell_growth_rates = numpy.zeros(cell_geom, numpy.float32)
# cell geometry calculated from l and r
self.cell_areas_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.cell_vols_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.cell_old_vols_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
# gridding
self.sq_inds = numpy.zeros((self.max_sqs,), numpy.int32)
self.sq_inds_dev = cl_array.zeros(self.queue, (self.max_sqs,), numpy.int32)
self.sorted_ids = numpy.zeros(cell_geom, numpy.int32)
self.sorted_ids_dev = cl_array.zeros(self.queue, cell_geom, numpy.int32)
# constraint planes
plane_geom = (self.max_planes,)
self.plane_pts = numpy.zeros(plane_geom, vec.float4)
self.plane_pts_dev = cl_array.zeros(self.queue, plane_geom, vec.float4)
self.plane_norms = numpy.zeros(plane_geom, vec.float4)
self.plane_norms_dev = cl_array.zeros(self.queue, plane_geom, vec.float4)
self.plane_coeffs = numpy.zeros(plane_geom, numpy.float32)
self.plane_coeffs_dev = cl_array.zeros(self.queue, plane_geom, numpy.float32)
# contact data
ct_geom = (self.max_cells, self.max_contacts)
self.ct_frs = numpy.zeros(ct_geom, numpy.int32)
self.ct_frs_dev = cl_array.zeros(self.queue, ct_geom, numpy.int32)
self.ct_tos = numpy.zeros(ct_geom, numpy.int32)
self.ct_tos_dev = cl_array.zeros(self.queue, ct_geom, numpy.int32)
self.ct_dists = numpy.zeros(ct_geom, numpy.float32)
self.ct_dists_dev = cl_array.zeros(self.queue, ct_geom, numpy.float32)
self.ct_pts = numpy.zeros(ct_geom, vec.float4)
self.ct_pts_dev = cl_array.zeros(self.queue, ct_geom, vec.float4)
self.ct_norms = numpy.zeros(ct_geom, vec.float4)
self.ct_norms_dev = cl_array.zeros(self.queue, ct_geom, vec.float4)
self.ct_stiff_dev = cl_array.zeros(self.queue, ct_geom, numpy.float32)
# where the contacts pointing to this cell are collected
self.cell_tos = numpy.zeros(ct_geom, numpy.int32)
self.cell_tos_dev = cl_array.zeros(self.queue, ct_geom, numpy.int32)
self.n_cell_tos = numpy.zeros(cell_geom, numpy.int32)
self.n_cell_tos_dev = cl_array.zeros(self.queue, cell_geom, numpy.int32)
# the constructed 'matrix'
mat_geom = (self.max_cells*self.max_contacts,)
self.ct_inds = numpy.zeros(mat_geom, numpy.int32)
self.ct_inds_dev = cl_array.zeros(self.queue, mat_geom, numpy.int32)
self.ct_reldists = numpy.zeros(mat_geom, numpy.float32)
self.ct_reldists_dev = cl_array.zeros(self.queue, mat_geom, numpy.float32)
self.fr_ents = numpy.zeros(mat_geom, vec.float8)
self.fr_ents_dev = cl_array.zeros(self.queue, mat_geom, vec.float8)
self.to_ents = numpy.zeros(mat_geom, vec.float8)
self.to_ents_dev = cl_array.zeros(self.queue, mat_geom, vec.float8)
# vectors and intermediates
self.deltap = numpy.zeros(cell_geom, vec.float8)
self.deltap_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
self.Mx = numpy.zeros(mat_geom, numpy.float32)
self.Mx_dev = cl_array.zeros(self.queue, mat_geom, numpy.float32)
self.MTMx = numpy.zeros(cell_geom, vec.float8)
self.MTMx_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
self.Minvx_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
# CGS intermediates
self.p_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
self.Ap_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
self.res_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
self.rhs_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
def load_from_cellstates(self, cell_states):
for (cid,cs) in cell_states.items():
i = cs.idx
self.cell_centers[i] = tuple(cs.pos)+(0,)
self.cell_dirs[i] = tuple(cs.dir)+(0,)
self.cell_rads[i] = cs.radius
self.cell_lens[i] = cs.length
self.n_cells = len(cell_states)
self.set_cells()
def load_test_data(self):
import CellModeller.Biophysics.BacterialModels.CLData as data
self.cell_centers.put(range(len(data.pos)), data.pos)
self.cell_dirs.put(range(len(data.dirs)), data.dirs)
self.cell_lens.put(range(len(data.lens)), data.lens)
self.cell_rads.put(range(len(data.rads)), data.rads)
self.n_cells = data.n_cells
self.set_cells()
def load_1_cell(self):
self.cell_centers.put([0], [(0,0,0,0)])
self.cell_dirs.put([0], [(1,0,0,0)])
self.cell_lens.put([0], [2.0])
self.cell_rads.put([0], [0.5])
self.n_cells = 1
self.set_cells()
def load_2_cells(self):
root2 = numpy.sqrt(2.0)
self.cell_centers.put([0,1], [(-root2-0.5, 0, 0, 0), (root2+0.5, 0, 0, 0)])
self.cell_dirs.put([0,1], [(root2/2.0, root2/2.0, 0, 0), (-root2/2.0, root2/2.0, 0, 0)])
self.cell_lens.put([0,1], [4.0, 4.0])
self.cell_rads.put([0,1], [0.5, 0.5])
self.n_cells = 2
self.set_cells()
def load_3_cells(self):
root2 = numpy.sqrt(2.0)
self.cell_centers.put([0,1,2], [(-root2-0.5, 0, 0, 0), (root2+0.5, 0, 0, 0), (root2+0.5+3.3, 0, 0, 0)])
self.cell_dirs.put([0,1,2], [(root2/2.0, root2/2.0, 0, 0), (-root2/2.0, root2/2.0, 0, 0), (1, 0, 0, 0)])
self.cell_lens.put([0,1,2], [3.0, 3.0, 3.0])
self.cell_rads.put([0,1,2], [0.5, 0.5, 0.5])
self.n_cells = 3
self.set_cells()
def load_3_cells_1_plane(self):
root2 = numpy.sqrt(2.0)
self.cell_centers.put([0,1,2], [(-root2-0.5, 0, 0, 0), (root2+0.5, 0, 0, 0), (root2+0.5+3.3, 0, 0, 0)])
self.cell_dirs.put([0,1,2], [(root2/2.0, root2/2.0, 0, 0), (-root2/2.0, root2/2.0, 0, 0), (1, 0, 0, 0)])
self.cell_lens.put([0,1,2], [3.0, 3.0, 3.0])
self.cell_rads.put([0,1,2], [0.5, 0.5, 0.5])
self.n_cells = 3
self.set_cells()
self.n_planes = 1
self.plane_pts.put([0], [(0, 0, -0.5, 0)])
self.plane_norms.put([0], [(0, 0, 1, 0)])
self.plane_coeffs.put([0], [0.5])
self.set_planes()
def load_3_cells_2_planes(self):
root2 = numpy.sqrt(2.0)
self.cell_centers.put([0,1,2], [(-root2-0.5, 0, 0, 0), (root2+0.5, 0, 0, 0), (root2+0.5+3.3, 0, 0, 0)])
self.cell_dirs.put([0,1,2], [(root2/2.0, root2/2.0, 0, 0), (-root2/2.0, root2/2.0, 0, 0), (1, 0, 0, 0)])
self.cell_lens.put([0,1,2], [3.0, 3.0, 3.0])
self.cell_rads.put([0,1,2], [0.5, 0.5, 0.5])
self.n_cells = 3
self.set_cells()
self.n_planes = 2
self.plane_pts.put([0,1], [(0, 0, -0.5, 0), (0, 0, 0.5, 0)])
self.plane_norms.put([0,1], [(0, 0, 1, 0), (0, 0, -1, 0)])
self.plane_coeffs.put([0,1], [0.5, 0.1])
self.set_planes()
def load_1_cell_1_plane(self):
self.cell_centers.put([0], [(0,0,0,0)])
self.cell_dirs.put([0], [(1,0,0,0)])
self.cell_lens.put([0], [3.0])
self.cell_rads.put([0], [0.5])
self.n_cells = 1
self.set_cells()
self.plane_pts.put([0], [(4, 0, 0, 0)])
self.plane_norms.put([0], [(-1, 0, 0, 0)])
self.plane_coeffs.put([0], [0.5])
self.n_planes = 1
self.set_planes()
def load_1024_cells(self):
d = 32
for i in range(-d/2,d/2):
for j in range(-d/2,d/2):
n = (i+d/2)*d + (j+d/2)
x = i*3.5 + random.uniform(-0.05,0.05)
y = j*2.0 + random.uniform(-0.05,0.05)
th = random.uniform(-0.15, 0.15)
dir_x = math.cos(th)
dir_y = math.sin(th)
self.cell_centers.put([n], [(x, y, 0, 0)])
self.cell_dirs.put([n], [(dir_x, dir_y, 0, 0)])
self.cell_lens.put([n], [2])
self.cell_rads.put([n], 0.5)
self.n_cells = d*d
self.set_cells()
def load_from_cellstates(self, cell_states):
for (id, cs) in cell_states.items():
self.cell_centers.put([cs.idx], [tuple(cs.pos)+(0,)])
self.cell_dirs.put([cs.idx], [tuple(cs.dir)+(0,)])
self.cell_lens.put([cs.idx], [cs.length])
self.cell_rads.put([cs.idx], cs.radius)
self.n_cells = len(cell_states)
self.set_cells()
def get_cells(self):
"""Copy cell centers, dirs, lens, and rads from the device."""
self.cell_centers = self.cell_centers_dev.get()
self.cell_dirs = self.cell_dirs_dev.get()
self.cell_lens = self.cell_lens_dev.get()
self.cell_rads = self.cell_rads_dev.get()
self.cell_dlens = self.cell_dlens_dev.get()
self.cell_dcenters = self.cell_dcenters_dev.get()
self.cell_dangs = self.cell_dangs_dev.get()
def set_cells(self):
"""Copy cell centers, dirs, lens, and rads to the device from local."""
self.cell_centers_dev.set(self.cell_centers)
self.cell_dirs_dev.set(self.cell_dirs)
self.cell_lens_dev.set(self.cell_lens)
self.cell_rads_dev.set(self.cell_rads)
self.cell_dlens_dev.set(self.cell_dlens)
self.cell_dcenters_dev.set(self.cell_dcenters)
self.cell_dangs_dev.set(self.cell_dangs)
def set_planes(self):
"""Copy plane pts, norms, and coeffs to the device from local."""
self.plane_pts_dev.set(self.plane_pts)
self.plane_norms_dev.set(self.plane_norms)
self.plane_coeffs_dev.set(self.plane_coeffs)
def get_cts(self):
"""Copy contact froms, tos, dists, pts, and norms from the device."""
self.ct_frs = self.ct_frs_dev.get()
self.ct_tos = self.ct_tos_dev.get()
self.ct_dists = self.ct_dists_dev.get()
self.ct_pts = self.ct_pts_dev.get()
self.ct_norms = self.ct_norms_dev.get()
self.cell_n_cts = self.cell_n_cts_dev.get()
def matrixTest(self):
x_dev = cl_array.zeros(self.queue, (self.n_cells,), vec.float8)
Ax_dev = cl_array.zeros(self.queue, (self.n_cells,), vec.float8)
opstring = ''
for i in range(self.n_cells):
x = numpy.zeros((self.n_cells,), vec.float8)
for j in range(7):
if j>0:
x[i][j-1]=0.0
x[i][j]=1.0
x_dev.set(x)
self.calculate_Ax(Ax_dev, x_dev)
Ax = Ax_dev.get()
for ii in range(self.n_cells):
for jj in range(7):
opstring += str(Ax[ii][jj])
if ii!=self.n_cells-1 or jj!=6:
opstring = opstring + '\t'
opstring = opstring + '\n'
print "MTM"
print opstring
open('CellModeller/Biophysics/BacterialModels/matrix.mat', 'w').write(opstring)
def dump_cell_data(self, n):
import cPickle
filename = 'data/data-%04i.pickle'%n
outfile = open(filename, 'wb')
data = (self.n_cells,
self.cell_centers_dev.get(),
self.cell_dirs_dev.get(),
self.cell_lens_dev.get(),
self.cell_rads_dev.get(),
self.parents),
cPickle.dump(data, outfile, protocol=-1)
def step(self, dt):
"""Step forward dt units of time.
Assumes that:
cell_centers is up to date when it starts.
"""
self.set_cells()
# Take dt/10 because this was what worked with EdgeDetector, need to
# make timescales consistent at some point
dt = dt*0.1
# Choose good time-step for biophysics to work nicely, then do multiple
# ticks to integrate over dt
#delta_t = max(0.05, 0.25/max(self.maxVel,1.0)) #0.1/math.sqrt(self.n_cells)
#delta_t = 0.7/math.sqrt(self.n_cells)
#delta_t = 5*0.1/self.n_cells
delta_t = 0.005
n_ticks = int(math.ceil(dt/delta_t))
actual_dt = dt / float(n_ticks)
#print 'delta_t %f nticks %f actual_dt %f'%(delta_t,n_ticks,actual_dt)
for i in range(n_ticks):
self.tick(actual_dt)
self.frame_no += 1
if self.frame_no % 10 == 0:
#self.dump_cell_data(frame_no/100)
print '% 8i % 8i cells % 8i contacts' % (self.frame_no, self.n_cells, self.n_cts)
# pull cells from the device and update simulator
if self.simulator:
self.get_cells()
for state in self.simulator.cellStates.values():
self.updateCellState(state)
def tick(self, dt):
# set target dlens (taken from growth rates set by updateCellStates)
#self.cell_target_dlens_dev.set(dt*self.cell_growth_rates)
#self.cell_dlens_dev.set(dt*self.cell_dlens)
self.cell_dlens_dev.set(dt*self.cell_growth_rates)
# redefine gridding based on the range of cell positions
self.cell_centers = self.cell_centers_dev.get()
self.update_grid() # we assume local cell_centers is current
# get each cell into the correct sq and retrieve from the device
self.bin_cells()
# sort cells and find sq index starts in the list
self.cell_sqs = self.cell_sqs_dev.get() # get updated cell sqs
self.sort_cells()
self.sorted_ids_dev.set(self.sorted_ids) # push changes to the device
self.sq_inds_dev.set(self.sq_inds)
new_cts = 1
self.n_cts = 0
self.vcleari(self.cell_n_cts_dev) # clear the accumulated contact count
i=0
while new_cts>0 and i<self.max_substeps:
old_n_cts = self.n_cts
self.predict()
# find all contacts
self.find_contacts()
# place 'backward' contacts in cells
self.collect_tos()
new_cts = self.n_cts - old_n_cts
if new_cts>0 or i==0:
self.build_matrix() # Calculate entries of the matrix
#print "max cell contacts = %i"%cl_array.max(self.cell_n_cts_dev).get()
self.CGSSolve() # invert MTMx to find deltap
self.add_impulse()
i += 1
# Calculate estimated max cell velocity
#self.maxVel = self.vmax(self.cell_dcenters_dev).get() + cl_array.max(self.cell_dlens_dev).get()
#print "maxVel = " + str(self.maxVel)
self.integrate()
self.calc_cell_geom()
def initCellState(self, state):
cid = state.id
i = state.idx
state.pos = [self.cell_centers[i][j] for j in range(3)]
state.dir = [self.cell_dirs[i][j] for j in range(3)]
state.radius = self.cell_rads[i]
state.length = self.cell_lens[i]
state.volume = state.length # TO DO: do something better here
pa = numpy.array(state.pos)
da = numpy.array(state.dir)
state.ends = (pa-da*state.length*0.5, pa+da*state.length*0.5)
state.strainRate = state.growthRate/state.length
self.cell_dlens[i] = state.growthRate
state.startVol = state.volume
def updateCellState(self, state):
cid = state.id
i = state.idx
state.strainRate = self.cell_dlens[i]/state.length
state.pos = [self.cell_centers[i][j] for j in range(3)]
state.dir = [self.cell_dirs[i][j] for j in range(3)]
state.radius = self.cell_rads[i]
state.length = self.cell_lens[i]
state.volume = state.length # TO DO: do something better here
pa = numpy.array(state.pos)
da = numpy.array(state.dir)
state.ends = (pa-da*state.length*0.5, pa+da*state.length*0.5)
# Length vel is linearisation of exponential growth
self.cell_growth_rates[i] = state.growthRate*state.length
def update_grid(self):
"""Update our grid_(x,y)_min, grid_(x,y)_max, and n_sqs.
Assumes that our copy of cell_centers is current.
"""
coords = self.cell_centers.view(numpy.float32).reshape((self.max_cells, 4))
x_coords = coords[:,0]
min_x_coord = x_coords.min()
max_x_coord = x_coords.max()
self.grid_x_min = int(math.floor(min_x_coord / self.grid_spacing))
self.grid_x_max = int(math.ceil(max_x_coord / self.grid_spacing))
if self.grid_x_min == self.grid_x_max:
self.grid_x_max += 1
y_coords = coords[:,1]
min_y_coord = y_coords.min()
max_y_coord = y_coords.max()
self.grid_y_min = int(math.floor(min_y_coord / self.grid_spacing))
self.grid_y_max = int(math.ceil(max_y_coord / self.grid_spacing))
if self.grid_y_min == self.grid_y_max:
self.grid_y_max += 1
self.n_sqs = (self.grid_x_max-self.grid_x_min)*(self.grid_y_max-self.grid_y_min)
def bin_cells(self):
"""Call the bin_cells kernel.
Assumes cell_centers is current on the device.
Calculates cell_sqs.
"""
self.program.bin_cells(self.queue,
(self.n_cells,),
None,
numpy.int32(self.grid_x_min),
numpy.int32(self.grid_x_max),
numpy.int32(self.grid_y_min),
numpy.int32(self.grid_y_max),
numpy.float32(self.grid_spacing),
self.cell_centers_dev.data,
self.cell_sqs_dev.data).wait()
def sort_cells(self):
"""Sort the cells by grid square and find the start of each
grid square's cells in that list.
Assumes that the local copy of cell_sqs is current.
Calculates local sorted_ids and sq_inds.
"""
self.sorted_ids.put(numpy.arange(self.n_cells), numpy.argsort(self.cell_sqs[:self.n_cells]))
self.sorted_ids_dev.set(self.sorted_ids)
# find the start of each sq in the list of sorted cell ids and send to the device
sorted_sqs = numpy.sort(self.cell_sqs[:self.n_cells])
self.sq_inds.put(numpy.arange(self.n_sqs), numpy.searchsorted(sorted_sqs, numpy.arange(self.n_sqs), side='left'))
self.sq_inds_dev.set(self.sq_inds)
def find_contacts(self, predict=True):
"""Call the find_contacts kernel.
Assumes that cell_centers, cell_dirs, cell_lens, cell_rads,
cell_sqs, cell_dcenters, cell_dlens, cell_dangs,
sorted_ids, and sq_inds are current on the device.
Calculates cell_n_cts, ct_frs, ct_tos, ct_dists, ct_pts,
ct_norms, ct_reldists, and n_cts.
"""
if predict:
centers = self.pred_cell_centers_dev
dirs = self.pred_cell_dirs_dev
lens = self.pred_cell_lens_dev
else:
centers = self.cell_centers_dev
dirs = self.cell_dirs_dev
lens = self.cell_lens_dev
self.program.find_plane_contacts(self.queue,
(self.n_cells,),
None,
numpy.int32(self.max_cells),
numpy.int32(self.max_contacts),
numpy.int32(self.n_planes),
self.plane_pts_dev.data,
self.plane_norms_dev.data,
self.plane_coeffs_dev.data,
centers.data,
dirs.data,
lens.data,
self.cell_rads_dev.data,
self.cell_n_cts_dev.data,
self.ct_frs_dev.data,
self.ct_tos_dev.data,
self.ct_dists_dev.data,
self.ct_pts_dev.data,
self.ct_norms_dev.data,
self.ct_reldists_dev.data,
self.ct_stiff_dev.data).wait()
self.program.find_contacts(self.queue,
(self.n_cells,),
None,
numpy.int32(self.max_cells),
numpy.int32(self.n_cells),
numpy.int32(self.grid_x_min),
numpy.int32(self.grid_x_max),
numpy.int32(self.grid_y_min),
numpy.int32(self.grid_y_max),
numpy.int32(self.n_sqs),
numpy.int32(self.max_contacts),
centers.data,
dirs.data,
lens.data,
self.cell_rads_dev.data,
self.cell_sqs_dev.data,
self.sorted_ids_dev.data,
self.sq_inds_dev.data,
self.cell_n_cts_dev.data,
self.ct_frs_dev.data,
self.ct_tos_dev.data,
self.ct_dists_dev.data,
self.ct_pts_dev.data,
self.ct_norms_dev.data,
self.ct_reldists_dev.data,
self.ct_stiff_dev.data).wait()
# set dtype to int32 so we don't overflow the int32 when summing
#self.n_cts = self.cell_n_cts_dev.get().sum(dtype=numpy.int32)
self.n_cts = cl_array.sum(self.cell_n_cts_dev).get()
def collect_tos(self):
"""Call the collect_tos kernel.
Assumes that cell_sqs, sorted_ids, sq_inds, cell_n_cts,
ct_frs, and ct_tos are current on the device.
Calculates cell_tos and n_cell_tos.
"""
self.program.collect_tos(self.queue,
(self.n_cells,),
None,
numpy.int32(self.max_cells),
numpy.int32(self.n_cells),
numpy.int32(self.grid_x_min),
numpy.int32(self.grid_x_max),
numpy.int32(self.grid_y_min),
numpy.int32(self.grid_y_max),
numpy.int32(self.n_sqs),
numpy.int32(self.max_contacts),
self.cell_sqs_dev.data,
self.sorted_ids_dev.data,
self.sq_inds_dev.data,
self.cell_n_cts_dev.data,
self.ct_frs_dev.data,
self.ct_tos_dev.data,
self.cell_tos_dev.data,
self.n_cell_tos_dev.data).wait()
def build_matrix(self):
"""Build the matrix so we can calculate M^TMx = Ax.
Assumes cell_centers, cell_dirs, cell_lens, cell_rads,
ct_inds, ct_frs, ct_tos, ct_dists, and ct_norms are current on
the device.
Calculates fr_ents and to_ents.
"""
self.program.build_matrix(self.queue,
(self.n_cells, self.max_contacts),
None,
numpy.int32(self.max_contacts),
numpy.float32(self.muA),
numpy.float32(self.gamma),
self.pred_cell_centers_dev.data,
self.pred_cell_dirs_dev.data,
self.pred_cell_lens_dev.data,
self.cell_rads_dev.data,
self.cell_n_cts_dev.data,
self.ct_frs_dev.data,
self.ct_tos_dev.data,
self.ct_dists_dev.data,
self.ct_pts_dev.data,
self.ct_norms_dev.data,
self.fr_ents_dev.data,
self.to_ents_dev.data,
self.ct_stiff_dev.data).wait()
def calculate_Ax(self, Ax, x):
self.program.calculate_Mx(self.queue,
(self.n_cells, self.max_contacts),
None,
numpy.int32(self.max_contacts),
self.ct_frs_dev.data,
self.ct_tos_dev.data,
self.fr_ents_dev.data,
self.to_ents_dev.data,
x.data,
self.Mx_dev.data).wait()
self.program.calculate_MTMx(self.queue,
(self.n_cells,),
None,
numpy.int32(self.max_contacts),
self.cell_n_cts_dev.data,
self.n_cell_tos_dev.data,
self.cell_tos_dev.data,
self.fr_ents_dev.data,
self.to_ents_dev.data,
self.Mx_dev.data,
Ax.data).wait()
# Tikhonov test
#self.vaddkx(Ax, numpy.float32(0.01), Ax, x)
# Energy minimizing regularization
self.program.calculate_Minv_x(self.queue,
(self.n_cells,),
None,
numpy.float32(self.muA),
numpy.float32(self.gamma),
self.cell_dirs_dev.data,
self.cell_lens_dev.data,
self.cell_rads_dev.data,
x.data,
self.Minvx_dev.data).wait()
self.vaddkx(Ax, self.reg_param/math.sqrt(self.n_cells), Ax, self.Minvx_dev).wait()
def CGSSolve(self):
# Solve A^TA\deltap=A^Tb (Ax=b)
# There must be a way to do this using built in pyopencl - what
# is it?!
self.vclearf(self.deltap_dev)
self.vclearf(self.rhs_dev)
# put M^T n^Tv_rel in rhs (b)
self.program.calculate_MTMx(self.queue,
(self.n_cells,),
None,
numpy.int32(self.max_contacts),
self.cell_n_cts_dev.data,
self.n_cell_tos_dev.data,
self.cell_tos_dev.data,
self.fr_ents_dev.data,
self.to_ents_dev.data,
self.ct_reldists_dev.data,
self.rhs_dev.data).wait()
self.calculate_Ax(self.MTMx_dev, self.deltap_dev)
# res = b-Ax
self.vsub(self.res_dev, self.rhs_dev, self.MTMx_dev)
# p = res
cl.enqueue_copy(self.queue, self.p_dev.data, self.res_dev.data)
# rsold = l2norm(res)
rsold = self.vdot(self.res_dev, self.res_dev).get()
if math.sqrt(rsold/self.n_cells) < self.cgs_tol:
return (0.0, rsold)
# iterate
# max iters = matrix dimension = 7 (dofs) * num cells
#dying=False
max_iters = self.n_cells*7
for iter in range(max_iters):
# Ap
self.calculate_Ax(self.Ap_dev, self.p_dev)
# p^TAp
pAp = self.vdot(self.p_dev, self.Ap_dev).get()
# alpha = rsold/p^TAp
alpha = numpy.float32(rsold/pAp)
# x = x + alpha*p, x=self.disp
self.vaddkx(self.deltap_dev, alpha, self.deltap_dev, self.p_dev)
# res = res - alpha*Ap
self.vsubkx(self.res_dev, alpha, self.res_dev, self.Ap_dev)
# rsnew = l2norm(res)
rsnew = self.vdot(self.res_dev, self.res_dev).get()
# Test for convergence
if math.sqrt(rsnew/self.n_cts) < self.cgs_tol:
break
# Stopped converging -> terminate
#if rsnew/rsold>2.0:
# break
# p = res + rsnew/rsold *p
self.vaddkx(self.p_dev, numpy.float32(rsnew/rsold), self.res_dev, self.p_dev)
rsold = rsnew
#print ' ',iter,rsold
if self.frame_no%100==0:
print '% 5i'%self.frame_no + '% 6i cells % 6i cts % 6i iterations residual = %f' % (self.n_cells, self.n_cts, iter+1, rsnew)
return (iter+1, rsnew)
def predict(self):
"""Predict cell centers, dirs, lens for a timestep dt based
on the current velocities.
Assumes cell_centers, cell_dirs, cell_lens, cell_rads, and
cell_dcenters, cell_dangs, cell_dlens are current on the device.
Calculates new pred_cell_centers, pred_cell_dirs, pred_cell_lens.
"""
self.program.predict(self.queue,
(self.n_cells,),
None,
self.cell_centers_dev.data,
self.cell_dirs_dev.data,
self.cell_lens_dev.data,
self.cell_dcenters_dev.data,
self.cell_dangs_dev.data,
self.cell_dlens_dev.data,
self.pred_cell_centers_dev.data,
self.pred_cell_dirs_dev.data,
self.pred_cell_lens_dev.data).wait()
def integrate(self):
"""Integrates cell centers, dirs, lens for a timestep dt based
on the current deltap.
Assumes cell_centers, cell_dirs, cell_lens, cell_rads, and
deltap are current on the device.
Calculates new cell_centers, cell_dirs, cell_lens.
"""
self.program.integrate(self.queue,
(self.n_cells,),
None,
self.cell_centers_dev.data,
self.cell_dirs_dev.data,
self.cell_lens_dev.data,
self.cell_dcenters_dev.data,
self.cell_dangs_dev.data,
self.cell_dlens_dev.data).wait()
def add_impulse(self):
self.program.add_impulse(self.queue, (self.n_cells,), None,
numpy.float32(self.muA),
numpy.float32(self.gamma),
self.deltap_dev.data,
self.cell_dirs_dev.data,
self.cell_lens_dev.data,
self.cell_rads_dev.data,
self.cell_dcenters_dev.data,
self.cell_dangs_dev.data,
self.cell_target_dlens_dev.data,
self.cell_dlens_dev.data).wait()
def divide_cell(self, i, d1i, d2i):
"""Divide a cell into two equal sized daughter cells.
Fails silently if we're out of cells.
Assumes our local copy of cells is current.
Calculates new cell_centers, cell_dirs, cell_lens, and cell_rads.
"""
if self.n_cells >= self.max_cells:
return
# idxs of the two new cells
a = d1i
b = d2i
# seems to be making shallow copies without the tuple calls
parent_center = tuple(self.cell_centers[i])
parent_dir = tuple(self.cell_dirs[i])
parent_rad = self.cell_rads[i]
parent_len = self.cell_lens[i]
daughter_len = parent_len/2.0 - parent_rad #- 0.025
daughter_offset = daughter_len/2.0 + parent_rad
center_offset = tuple([parent_dir[k]*daughter_offset for k in range(4)])
self.cell_centers[a] = tuple([(parent_center[k] - center_offset[k]) for k in range(4)])
self.cell_centers[b] = tuple([(parent_center[k] + center_offset[k]) for k in range(4)])
if not self.alternate_divisions:
cdir = numpy.array(parent_dir)
jitter = numpy.random.uniform(-0.001,0.001,3)
if not self.jitter_z: jitter[2] = 0.0
cdir[0:3] += jitter
cdir /= numpy.linalg.norm(cdir)
self.cell_dirs[a] = cdir
cdir = numpy.array(parent_dir)
jitter = numpy.random.uniform(-0.001,0.001,3)
if not self.jitter_z: jitter[2] = 0.0
cdir[0:3] += jitter
cdir /= numpy.linalg.norm(cdir)
self.cell_dirs[b] = cdir
else:
cdir = numpy.array(parent_dir)
tmp = cdir[0]
cdir[0] = -cdir[1]
cdir[1] = tmp
self.cell_dirs[a] = cdir
self.cell_dirs[b] = cdir
self.cell_lens[a] = daughter_len
self.cell_lens[b] = daughter_len
self.cell_rads[a] = parent_rad
self.cell_rads[b] = parent_rad
self.n_cells += 1
self.parents[b] = a
vols = self.cell_vols_dev.get()
daughter_vol = vols[i] / 2.0
vols[a] = daughter_vol
vols[b] = daughter_vol
self.cell_vols_dev.set(vols)
# Inherit velocities from parent (conserve momentum)
parent_dlin = self.cell_dcenters[i]
self.cell_dcenters[a] = parent_dlin
self.cell_dcenters[b] = parent_dlin
parent_dang = self.cell_dangs[i]
self.cell_dangs[a] = parent_dang
self.cell_dangs[b] = parent_dang
#return indices of daughter cells
return (a,b)
def calc_cell_geom(self):
"""Calculate cell geometry using lens/rads on card."""
# swap cell vols and cell_vols old
tmp = self.cell_old_vols_dev
self.cell_old_vols_dev = self.cell_vols_dev
self.cell_vols_dev = tmp
# update geometry
self.calc_cell_area(self.cell_areas_dev, self.cell_rads_dev, self.cell_lens_dev)
self.calc_cell_vol(self.cell_vols_dev, self.cell_rads_dev, self.cell_lens_dev)
def profileGrid(self):
if self.n_cts==0:
return
import time
t1 = time.clock()
for i in range(1000):
# redefine gridding based on the range of cell positions
self.cell_centers = self.cell_centers_dev.get()
self.update_grid() # we assume local cell_centers is current
# get each cell into the correct sq and retrieve from the device
self.bin_cells()
# sort cells and find sq index starts in the list
self.cell_sqs = self.cell_sqs_dev.get() # get updated cell sqs
self.sort_cells()
self.sorted_ids_dev.set(self.sorted_ids) # push changes to the device
self.sq_inds_dev.set(self.sq_inds)
t2 = time.clock()
print "Grid stuff timing for 1000 calls, time per call (s) = %f"%((t2-t1)*0.001)
open("grid_prof","a").write( "%i, %i, %f\n"%(self.n_cells,self.n_cts,(t2-t1)*0.001) )
def profileFindCts(self):
if self.n_cts==0:
return
import time
t1 = time.clock()
dt = 0.005
for i in range(1000):
self.n_cts = 0
self.vcleari(self.cell_n_cts_dev) # clear the accumulated contact count
self.predict()
# find all contacts
self.find_contacts()
# place 'backward' contacts in cells
#self.collect_tos()
# compact the contacts so we can dispatch only enough threads
# to deal with each
#self.ct_frs = self.ct_frs_dev.get()
#self.ct_tos = self.ct_tos_dev.get()
#self.ct_inds_dev.set(self.ct_inds)
t2 = time.clock()
print "Find contacts timing for 1000 calls, time per call (s) = %f"%((t2-t1)*0.001)
open("findcts_prof","a").write( "%i, %i, %f\n"%(self.n_cells,self.n_cts,(t2-t1)*0.001) )
def profileFindCts2(self):
if self.n_cts==0:
return
import time
t1 = time.clock()
dt = 0.005
for i in range(1000):
self.n_cts = 0
self.vcleari(self.cell_n_cts_dev) # clear the accumulated contact count
self.predict()
# find all contacts
self.find_contacts()
# place 'backward' contacts in cells
self.collect_tos()
# compact the contacts so we can dispatch only enough threads
# to deal with each
#self.ct_frs = self.ct_frs_dev.get()
#self.ct_tos = self.ct_tos_dev.get()
#self.ct_inds_dev.set(self.ct_inds)
t2 = time.clock()
print "Find contacts timing for 1000 calls, time per call (s) = %f"%((t2-t1)*0.001)
open("findcts_prof","a").write( "%i, %i, %f\n"%(self.n_cells,self.n_cts,(t2-t1)*0.001) )
def profileCGS(self):
if self.n_cts==0:
return
import time
t1 = time.clock()
dt = 0.005
for i in range(1000):
self.build_matrix() # Calculate entries of the matrix
(iters, res) = self.CGSSolve()
print "cgs prof: iters=%i, res=%f"%(iters,res)
t2 = time.clock()
print "CGS timing for 1000 calls, time per call (s) = %f"%((t2-t1)*0.001)
open("cgs_prof","a").write( "%i, %i, %i, %f\n"%(self.n_cells,self.n_cts,iters,(t2-t1)*0.001) )
circ_pts = [(math.cos(math.radians(th)), math.sin(math.radians(th))) for th in range(-80,90,20)]
def display_grid(spacing, x_lo, x_hi, y_lo, y_hi):
glBegin(GL_LINES)
for i in range(x_lo, x_hi+1):
glVertex3f(i*spacing, y_lo*spacing, 0)
glVertex3f(i*spacing, y_hi*spacing, 0)
for i in range(y_lo, y_hi+1):
glVertex3f(x_lo*spacing, i*spacing, 0)
glVertex3f(x_hi*spacing, i*spacing, 0)
glEnd()
def display_cell(p, d, l, r):
global quad
pa = numpy.array([p[i] for i in range(3)])
da = numpy.array([d[i] for i in range(3)])
e1 = pa - da*l*0.5
e2 = pa + da*l*0.5
glEnable(GL_DEPTH_TEST)
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
glTranslatef(e1[0],e1[1],e1[2])
zaxis = numpy.array([0,0,1])
rotaxis = numpy.cross(da, zaxis)
ang = numpy.arccos(numpy.dot(da, zaxis))
glRotatef(-ang*180.0/math.pi, rotaxis[0], rotaxis[1], rotaxis[2])
#glRotatef(90.0, 1, 0, 0)
gluCylinder(quad, r, r , l, 8, 1)
gluSphere(quad, r, 8, 8)
glPopMatrix()
glPushMatrix()
glTranslatef(e2[0],e2[1],e2[2])
gluSphere(quad, r, 8, 8)
glPopMatrix()
glDisable(GL_DEPTH_TEST)
'''
def display_cell(p, d, l, r):
glEnable(GL_DEPTH_TEST)
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
ang = math.atan2(d[1], d[0]) * 360.0 / (2.0*3.141593)
glTranslatef(p[0], p[1], 0.0)
glRotatef(ang, 0.0, 0.0, 1.0)
glBegin(GL_POLYGON)
glVertex3f(-l/2.0, -r, 0)
glVertex3f(l/2.0, -r, 0)
for x,y in circ_pts:
glVertex3f(l/2.0 + x*r, y*r, 0.0)
glVertex3f(l/2.0, r, 0)
glVertex3f(-l/2.0, r, 0)
for x,y in circ_pts:
glVertex3f(-l/2.0 -x*r, -y*r, 0.0)
glEnd()
glPopMatrix()
glDisable(GL_DEPTH_TEST)
'''
def display_cell_name(p, name):
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
glTranslatef(p[0], p[1], p[2])
glScalef(0.006, 0.006, 0.006)
display_string(name)
glPopMatrix()
def display_ct(pt, norm, fr_Lz):
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
glTranslatef(pt[0], pt[1], pt[2])
glBegin(GL_POINTS)
glVertex3f(0.0, 0.0, 0.0)
glEnd()
glPushMatrix()
glTranslatef(0.1, 0.1, 0.0)
glScalef(0.004, 0.004, 0.004)
display_string(fr_Lz)
glPopMatrix()
xaxis = numpy.array([1,0,0])
norma = numpy.array([norm[i] for i in range(3)])
rotaxis = numpy.cross(norma, xaxis)
ang = numpy.arccos(numpy.dot(norma, xaxis))
glRotatef(-ang*180.0/math.pi, rotaxis[0], rotaxis[1], rotaxis[2])
# ang = math.atan2(norm[1], norm[0]) * 360.0 / (2.0*3.141593)
# glRotatef(ang, 0.0, 0.0, 1.0)
glBegin(GL_LINES)
glVertex3f(0.0, 0.0, 0.0)
glVertex3f(1.0, 0.0, 0.0)
glEnd()
glBegin(GL_TRIANGLES)
glVertex3f(1.0, 0.0, 0.0)
glVertex3f(0.8, 0.2, 0.0)
glVertex3f(0.8, -0.2, 0.0)
glEnd()
glPopMatrix()
def display_string(s):
for ch in s:
glutStrokeCharacter(GLUT_STROKE_ROMAN, ord(ch))
def cell_color(i):
global founders
while i not in founders:
i = model.parents[i]
return founders[i]
def display():
global view_x, view_y, view_z, view_ang
glEnable(GL_LINE_SMOOTH)
glEnable(GL_POLYGON_SMOOTH)
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glClearColor(0.7, 0.7, 0.7, 0.7)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(60.0, 1.0, 0.1, 1000.0)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glTranslatef(view_x, view_y, -view_z)
glRotatef(view_ang, 1,0,0)
glColor3f(0, 0, 0)
glLineWidth(0.5)
display_grid(model.grid_spacing, model.grid_x_min, model.grid_x_max, model.grid_y_min, model.grid_y_max)
model.get_cells()
for i in range(model.n_cells):
#glColor3f(0.5, 0.5, 0.5)
rr,gg,bb = cell_color(i)
glColor3f(rr, gg, bb)
#glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glPolygonMode(GL_FRONT, GL_FILL)
display_cell(model.cell_centers[i], model.cell_dirs[i], model.cell_lens[i], model.cell_rads[i])
glColor3f(0.0, 0.0, 0.0)
#glPolygonMode(GL_FRONT_AND_BACK, GL_LINE)
glPolygonMode(GL_FRONT, GL_LINE)
glLineWidth(2.0)
display_cell(model.cell_centers[i], model.cell_dirs[i], model.cell_lens[i], model.cell_rads[i])
# glColor3f(0.0, 0.0, 0.0)
# glLineWidth(1.0)
# display_cell_name(model.cell_centers[i], str(i))
glColor3f(0.1, 0.2, 0.4)
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glPointSize(1.0)
glLineWidth(1.0)
global ct_map
new_ct_map = {}
model.get_cts()
for i in range(model.n_cells):
for j in range(model.cell_n_cts[i]):
other = model.ct_tos[i][j]
new_ct_map[i,other] = (model.ct_pts[i][j], model.ct_norms[i][j], '% .4f'%model.ct_dists[i][j])
if other<0:
glColor3f(0.5,0.5,0.1)
elif (i,other) in ct_map:
glColor3f(0.1, 0.4, 0.2)
else:
glColor3f(0.6, 0.1, 0.1)
if other<0:
display_ct(model.ct_pts[i][j], model.ct_norms[i][j], '% .4f'% model.ct_dists[i][j])
dead_cts_keys = set(ct_map.keys()) - set(new_ct_map.keys())
for key in dead_cts_keys:
pt, norm, dist = ct_map[key]
glColor3f(0.1, 0.1, 0.6)
display_ct(pt, norm, dist)
ct_map = new_ct_map
glFlush()
glutSwapBuffers()
def reshape(w, h):
l = min(w, h)
glViewport(0, 0, l, l)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glutPostRedisplay()
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
display_flag = False
quad = gluNewQuadric()
def idle():
global frame_no
global display_flag
model.tick(0.01)
model.get_cells()
if model.frame_no % 100 == 0:
#self.dump_cell_data(frame_no/100)
print '% 8i % 8i cells % 8i contacts' % (model.frame_no, model.n_cells, model.n_cts)
if model.frame_no %100 == 0:
for i in range(model.n_cells):
if model.cell_lens[i] > 3.0+random.uniform(0.0,1.0):
model.divide_cell(i)
model.set_cells()
if model.frame_no % 500 == 0 or display_flag:
display()
display_flag = False
if model.frame_no % 1001 == 0:
model.profileCGS()
model.profileFindCts()
model.profileGrid()
model.frame_no += 1
view_x = 0
view_y = 0
view_z = 50
view_ang = 45.0
def key_pressed(*args):
global view_x, view_y, view_z, view_ang, display_flag
if args[0] == 'j':
view_x += 2
elif args[0] == 'l':
view_x -= 2
elif args[0] == 'i':
view_y -= 2
elif args[0] == 'k':
view_y += 2
elif args[0] == 'e':
view_z -= 2
elif args[0] == 'd':
view_z += 2
elif args[0] == 'z':
view_ang += 2
elif args[0] == 'x':
view_ang -= 2
elif args[0] == '\x1b':
exit()
elif args[0] == 'f':
display_flag = True
import time
class state:
pass
if __name__ == '__main__':
numpy.set_printoptions(precision=8,
threshold=10000,
linewidth=180)
ct_map = {}
glutInit(sys.argv)
glutInitWindowSize(1400, 1400)
glutInitWindowPosition(0, 0)
glutCreateWindow('CLBacterium')
glutDisplayFunc(display)
glutReshapeFunc(reshape)
glutKeyboardFunc(key_pressed)
glutIdleFunc(idle)
from CellModeller.Simulator import Simulator
sim = Simulator(None, 0.01)
model = CLBacterium(sim, max_cells=2**15, max_contacts=32, max_sqs=64*16, jitter_z=False, reg_param=2, gamma=5.0)
model.addPlane((0,-16,0), (0,1,0), 1)
model.addPlane((0,16,0), (0,-1,0), 1)
#model = CLBacterium(None)
#model.load_test_data()
#model.load_3_cells_2_planes()
#model.load_1024_cells()
#model.load_3_cells()
cs = state()
cs.id=0
cs.idx=0
cs.growthRate = 0.5
model.addCell(cs)
founders = {0:(0.5, 0.3, 0.3),
1:(0.3, 0.5, 0.3),
2:(0.3, 0.3, 0.5)}
#model.load_3_cells_2_planes()
#model.load_1024_cells()
model.load_3_cells()
glutMainLoop()
| bsd-3-clause | 6,362,412,420,733,977,000 | 37.502837 | 139 | 0.525208 | false |
daviewales/pimotion | pimotion/test_backend.py | 1 | 4646 | #!/usr/bin/env python3
import unittest
import itertools
import numpy
try:
import unittest.mock as mock
except:
import mock
import backend
class TestBackend(unittest.TestCase):
@classmethod
def setUpClass(self):
mock_images = [
numpy.load('test_files/image{}.npy'.format(i))
for i in range(1, 3)]
mock_get_image = mock.patch('backend.get_image', autospec=True).start()
mock_get_image.return_value = itertools.cycle(mock_images)
self.motion_generator = backend.get_motion_data()
self.motion = next(self.motion_generator)
self.correctly_formatted_motion_data = [
[375, 85],
[405, 305],
[565, 105]]
self.tile_width = 10
self.tile_height = 10
self.tile_area = self.tile_width * self.tile_height
self.tile_centre = [self.tile_width/2, self.tile_height/2]
self.tile_motion = 1
self.complete_motion_tile = numpy.ones(
(self.tile_height, self.tile_width), dtype=numpy.bool)
self.zero_motion_tile = numpy.zeros(
(self.tile_height, self.tile_width), dtype=numpy.bool)
self.half_motion_tile = numpy.ones(
(self.tile_height, self.tile_width), dtype=numpy.bool)
self.half_motion_tile[0:10, 0:5] = self.zero_motion_tile[0:10, 0:5]
def test_get_motion_returns_correct_type(self):
'''get_motion should return list'''
self.assertEqual(type(self.motion),
type(self.correctly_formatted_motion_data))
def test_get_motion_returns_list_of_lists(self):
'''get_motion should return list of lists'''
list_type = type(self.correctly_formatted_motion_data[0])
for i in self.motion:
self.assertEqual(list_type, type(i))
def test_get_motion_returns_valid_data(self):
'''get_motion should return list of lists of integers'''
for value in self.motion:
self.assertEqual(type(value[0]), type(1))
self.assertEqual(type(value[1]), type(1))
def test_motion_coordinates_correct_for_complete_motion(self):
'''tile with 100% motion should return coordinates of tile centre'''
valid_coordinates = [self.tile_centre]
coordinates = backend.motion_coordinates(self.complete_motion_tile,
self.tile_width,
self.tile_height,
self.tile_motion)
self.assertEqual(valid_coordinates, coordinates)
def test_motion_coordinates_correct_for_no_motion(self):
'''tile with 0% motion should return empty list'''
valid_coordinates = []
coordinates = backend.motion_coordinates(self.zero_motion_tile,
self.tile_width,
self.tile_height,
self.tile_motion)
self.assertEqual(valid_coordinates, coordinates)
def test_motion_coordinates_correct_for_partial_motion(self):
'''tile with partial motion should be dependent on `tile_motion`'''
for i in range(11):
self.tile_motion = 0.1*i
if self.tile_motion <= 0.5:
message = (
'motion coordinates should be found when '
'tile contains 50% motion and '
'`tile_motion == {:.2f}`.'.format(self.tile_motion))
valid_coordinates = [self.tile_centre]
coordinates = backend.motion_coordinates(
self.half_motion_tile,
self.tile_width,
self.tile_height,
self.tile_motion)
self.assertEqual(
valid_coordinates,
coordinates,
msg=message)
else:
message = ('motion coordinates should not be found when '
'tile contains 50% motion and '
'`tile_motion == {:.2f}`.'.format(self.tile_motion))
valid_coordinates = []
coordinates = backend.motion_coordinates(
self.half_motion_tile,
self.tile_width,
self.tile_height,
self.tile_motion)
self.assertEqual(
valid_coordinates,
coordinates,
msg=message)
if __name__ == '__main__':
unittest.main(verbosity=2)
| bsd-2-clause | -742,510,060,171,487,100 | 36.168 | 79 | 0.538743 | false |
atmantree/post-introduccion-zodb | todo_v1/todo.py | 1 | 1684 | # -*- coding: utf-8 -*-
from ZODB import (DB, FileStorage)
import transaction
import argparse
class ToDo:
def __init__(self):
self.store = FileStorage.FileStorage("ToDo.fs")
self.database = DB(self.store)
self.connection = self.database.open()
self.root = self.connection.root()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
transaction.get()
transaction.abort()
self.connection.close()
self.database.close()
self.store.close()
def add(self, key, value):
if key != "":
self.root[key] = value
transaction.commit()
print("New task added..")
else:
print("A task must have a name")
def list(self):
print("Tasks To Do..")
for k in self.root.keys():
print("%s\t%s" % (k, self.root[k]))
def delete(self, key):
if key in self.root.keys():
del(self.root[key])
transaction.commit()
print("Task deleted..")
else:
print("There is no task '%s'.." % key)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--add', nargs=2, help="add a tast to the ToDo list")
parser.add_argument('-d', '--delete', nargs=1, help="delete a task from the ToDo list")
args = parser.parse_args()
tasks = ToDo()
if args.add:
tasks.add(args.add[0],args.add[1])
elif args.delete:
tasks.delete(args.delete[0])
else:
tasks.list() | bsd-3-clause | 5,546,271,761,760,480,000 | 27.068966 | 91 | 0.513064 | false |
IL2HorusTeam/django-il2-domain | il2_domain/locations/migrations/0001_initial.py | 1 | 15133 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Location'
db.create_table(u'locations_location', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name_en', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),
('name_ru', self.gf('django.db.models.fields.CharField')(max_length=255, unique=True, null=True, blank=True)),
('name_uk', self.gf('django.db.models.fields.CharField')(max_length=255, unique=True, null=True, blank=True)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50)),
('width', self.gf('django.db.models.fields.IntegerField')(default=12800)),
('height', self.gf('django.db.models.fields.IntegerField')(default=12800)),
('zoom_min', self.gf('django.db.models.fields.IntegerField')(default=0)),
('zoom_max', self.gf('django.db.models.fields.IntegerField')(default=0)),
('zoom_dflt', self.gf('django.db.models.fields.IntegerField')(default=0)),
))
db.send_create_signal(u'locations', ['Location'])
# Adding model 'Locality'
db.create_table(u'locations_locality', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('location', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Location'])),
('pos', self.gf('django.contrib.gis.db.models.fields.PointField')()),
('zoom_min', self.gf('django.db.models.fields.IntegerField')(default=0)),
('name_en', self.gf('django.db.models.fields.CharField')(max_length=255)),
('name_ru', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('name_uk', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('area', self.gf('django.contrib.gis.db.models.fields.PolygonField')()),
))
db.send_create_signal(u'locations', ['Locality'])
# Adding model 'GeographicalFeature'
db.create_table(u'locations_geographicalfeature', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('location', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Location'])),
('pos', self.gf('django.contrib.gis.db.models.fields.PointField')()),
('zoom_min', self.gf('django.db.models.fields.IntegerField')(default=0)),
('name_en', self.gf('django.db.models.fields.CharField')(max_length=255)),
('name_ru', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('name_uk', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
))
db.send_create_signal(u'locations', ['GeographicalFeature'])
# Adding model 'Aerodrome'
db.create_table(u'locations_aerodrome', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('location', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Location'])),
('pos', self.gf('django.contrib.gis.db.models.fields.PointField')()),
('zoom_min', self.gf('django.db.models.fields.IntegerField')(default=0)),
('name_en', self.gf('django.db.models.fields.CharField')(max_length=255)),
('name_ru', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('name_uk', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('amsl', self.gf('django.db.models.fields.IntegerField')(default=0)),
))
db.send_create_signal(u'locations', ['Aerodrome'])
# Adding model 'Runway'
db.create_table(u'locations_runway', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('aerodrome', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Aerodrome'])),
('name', self.gf('django.db.models.fields.CharField')(default=u'09/27', max_length=255)),
('is_bidirectional', self.gf('django.db.models.fields.BooleanField')(default=True)),
('beginning', self.gf('django.contrib.gis.db.models.fields.PointField')()),
('end', self.gf('django.contrib.gis.db.models.fields.PointField')()),
('width', self.gf('django.db.models.fields.IntegerField')(default=30)),
('pavement_type', self.gf('django.db.models.fields.CharField')(max_length=3)),
))
db.send_create_signal(u'locations', ['Runway'])
# Adding model 'LocationVariant'
db.create_table(u'locations_locationvariant', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name_en', self.gf('django.db.models.fields.CharField')(max_length=255)),
('name_ru', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('name_uk', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('location', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Location'])),
('loader', self.gf('django.db.models.fields.CharField')(default=u'location/load.ini', max_length=255)),
('season', self.gf('django.db.models.fields.CharField')(max_length=3)),
('month_number', self.gf('django.db.models.fields.IntegerField')(default=1)),
('atmospheric_pressure', self.gf('django.db.models.fields.IntegerField')(default=745)),
('air_temperature', self.gf('django.db.models.fields.IntegerField')(default=25)),
))
db.send_create_signal(u'locations', ['LocationVariant'])
# Adding M2M table for field localities on 'LocationVariant'
m2m_table_name = db.shorten_name(u'locations_locationvariant_localities')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('locationvariant', models.ForeignKey(orm[u'locations.locationvariant'], null=False)),
('locality', models.ForeignKey(orm[u'locations.locality'], null=False))
))
db.create_unique(m2m_table_name, ['locationvariant_id', 'locality_id'])
# Adding M2M table for field aerodromes on 'LocationVariant'
m2m_table_name = db.shorten_name(u'locations_locationvariant_aerodromes')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('locationvariant', models.ForeignKey(orm[u'locations.locationvariant'], null=False)),
('aerodrome', models.ForeignKey(orm[u'locations.aerodrome'], null=False))
))
db.create_unique(m2m_table_name, ['locationvariant_id', 'aerodrome_id'])
# Adding M2M table for field geographical_features on 'LocationVariant'
m2m_table_name = db.shorten_name(u'locations_locationvariant_geographical_features')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('locationvariant', models.ForeignKey(orm[u'locations.locationvariant'], null=False)),
('geographicalfeature', models.ForeignKey(orm[u'locations.geographicalfeature'], null=False))
))
db.create_unique(m2m_table_name, ['locationvariant_id', 'geographicalfeature_id'])
def backwards(self, orm):
# Deleting model 'Location'
db.delete_table(u'locations_location')
# Deleting model 'Locality'
db.delete_table(u'locations_locality')
# Deleting model 'GeographicalFeature'
db.delete_table(u'locations_geographicalfeature')
# Deleting model 'Aerodrome'
db.delete_table(u'locations_aerodrome')
# Deleting model 'Runway'
db.delete_table(u'locations_runway')
# Deleting model 'LocationVariant'
db.delete_table(u'locations_locationvariant')
# Removing M2M table for field localities on 'LocationVariant'
db.delete_table(db.shorten_name(u'locations_locationvariant_localities'))
# Removing M2M table for field aerodromes on 'LocationVariant'
db.delete_table(db.shorten_name(u'locations_locationvariant_aerodromes'))
# Removing M2M table for field geographical_features on 'LocationVariant'
db.delete_table(db.shorten_name(u'locations_locationvariant_geographical_features'))
models = {
u'locations.aerodrome': {
'Meta': {'object_name': 'Aerodrome'},
'amsl': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Location']"}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_ru': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name_uk': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'pos': ('django.contrib.gis.db.models.fields.PointField', [], {}),
'zoom_min': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'locations.geographicalfeature': {
'Meta': {'object_name': 'GeographicalFeature'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Location']"}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_ru': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name_uk': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'pos': ('django.contrib.gis.db.models.fields.PointField', [], {}),
'zoom_min': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'locations.locality': {
'Meta': {'object_name': 'Locality'},
'area': ('django.contrib.gis.db.models.fields.PolygonField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Location']"}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_ru': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name_uk': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'pos': ('django.contrib.gis.db.models.fields.PointField', [], {}),
'zoom_min': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'locations.location': {
'Meta': {'ordering': "(u'slug',)", 'object_name': 'Location'},
'height': ('django.db.models.fields.IntegerField', [], {'default': '12800'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'name_ru': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'name_uk': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'width': ('django.db.models.fields.IntegerField', [], {'default': '12800'}),
'zoom_dflt': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'zoom_max': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'zoom_min': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'locations.locationvariant': {
'Meta': {'object_name': 'LocationVariant'},
'aerodromes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['locations.Aerodrome']", 'null': 'True', 'blank': 'True'}),
'air_temperature': ('django.db.models.fields.IntegerField', [], {'default': '25'}),
'atmospheric_pressure': ('django.db.models.fields.IntegerField', [], {'default': '745'}),
'geographical_features': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['locations.GeographicalFeature']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'loader': ('django.db.models.fields.CharField', [], {'default': "u'location/load.ini'", 'max_length': '255'}),
'localities': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['locations.Locality']", 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Location']"}),
'month_number': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_ru': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name_uk': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'season': ('django.db.models.fields.CharField', [], {'max_length': '3'})
},
u'locations.runway': {
'Meta': {'object_name': 'Runway'},
'aerodrome': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Aerodrome']"}),
'beginning': ('django.contrib.gis.db.models.fields.PointField', [], {}),
'end': ('django.contrib.gis.db.models.fields.PointField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_bidirectional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "u'09/27'", 'max_length': '255'}),
'pavement_type': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'width': ('django.db.models.fields.IntegerField', [], {'default': '30'})
}
}
complete_apps = ['locations'] | mit | -6,195,351,885,397,270,000 | 66.5625 | 199 | 0.600211 | false |
AndreySibiryakov/tools | smooth_curve_gaussian_class_api_2_w_smooth.py | 1 | 7752 | import scipy.ndimage as sp
import maya.cmds as cmds
import maya.OpenMaya as om
import maya.OpenMayaAnim as oma
import numpy as np
import scipy.interpolate as si
def bspline(cv, n=100, degree=3, periodic=False):
""" Calculate n samples on a bspline
cv : Array ov control vertices
n : Number of samples to return
degree: Curve degree
periodic: True - Curve is closed
False - Curve is open
"""
# If periodic, extend the point array by count+degree+1
cv = np.asarray(cv)
count = len(cv)
if periodic:
factor, fraction = divmod(count + degree + 1, count)
cv = np.concatenate((cv,) * factor + (cv[:fraction],))
count = len(cv)
degree = np.clip(degree, 1, degree)
# If opened, prevent degree from exceeding count-1
else:
degree = np.clip(degree, 1, count - 1)
# Calculate knot vector
kv = None
if periodic:
kv = np.arange(0 - degree, count + degree + degree - 1)
else:
kv = np.clip(np.arange(count + degree + 1) - degree, 0, count - degree)
# Calculate query range
u = np.linspace(periodic, (count - degree), n)
# Calculate result
return np.array(si.splev(u, (kv, cv.T, degree))).T
class Gaussian(object):
def __init__(self, blend=10):
self.blend = blend
self.curves = []
self.frame_data = {}
self.value_data = {}
self.id_data = {}
def normalize_value(self, v, min_v, max_v):
'''Normalizes single curve value.
Used for processing "Stress" internal facefx curve.
Args:
v (float): curve value
min_v (float): minimum value
max_v (float): maximum value
Returns:
float:
'''
# range2 = y - x;
# a = (a * range2) + x;
return (v - min_v) / (max_v - min_v)
def normalize_data(self, data):
min_v = min(data) if min(data) > 0 else 0
max_v = max(data)
return [self.normalize_value(d, min_v, max_v) for d in data]
def restore_normalized_value(self, v, min_v, max_v):
return min_v + v * (max_v - min_v)
def restore_normalized_data(self, src_data, norm_data):
min_v = min(src_data) if min(src_data) > 0 else 0
max_v = max(src_data)
return [self.restore_normalized_value(d, min_v, max_v) for d in norm_data]
def add_keys(self, plugName, times, values, changeCache=None):
# Get the plug to be animated.
sel = om.MSelectionList()
sel.add(plugName)
plug = om.MPlug()
sel.getPlug(0, plug)
# Create the animCurve.
animfn = oma.MFnAnimCurve(plug)
timeArray = om.MTimeArray()
valueArray = om.MDoubleArray()
for i in range(len(times)):
timeArray.append(om.MTime(times[i], om.MTime.uiUnit()))
valueArray.append(values[i])
# Add the keys to the animCurve.
animfn.addKeys(
timeArray,
valueArray,
oma.MFnAnimCurve.kTangentGlobal,
oma.MFnAnimCurve.kTangentGlobal,
False,
changeCache
)
def delete_node(self, node):
try:
cmds.delete(cmds.listConnections(node)[0])
except:
return
def calc_blend_val(self, orig_val, proc_val, multi):
diff_val = (orig_val - proc_val) * multi
return orig_val - diff_val
def set_blend(self, init_values, filt_values):
# Keeps blend length revelant to list length
if len(init_values) / 2 < self.blend:
self.blend = len(init_values) / 2
gradient_range = [p / float(self.blend)
for p in range(0, self.blend)][1:]
for i, multi in enumerate(gradient_range):
rev_i = -(i + 1)
filt_values[i] = self.calc_blend_val(
init_values[i], filt_values[i], multi)
filt_values[rev_i] = self.calc_blend_val(
init_values[rev_i], filt_values[rev_i], multi)
return filt_values
def group_by_increasing(data):
res = [[data[0]]]
for i in range(1, len(data)):
if data[i - 1] + 1 == data[i]:
res[-1].append(data[i])
else:
res.append([data[i]])
return res
def get_data(self):
self.curves = cmds.keyframe(query=True, name=True)
for curve in self.curves:
frame_data = cmds.keyframe(curve, sl=True, query=True)
if not frame_data:
frame_data = cmds.keyframe(curve, query=True)
self.frame_data[curve] = frame_data
value_data = cmds.keyframe(
curve, sl=True, valueChange=True, query=True)
if not value_data:
value_data = cmds.keyframe(
curve, valueChange=True, query=True)
self.value_data[curve] = value_data
# Resets slider value to default
cmds.floatSlider(power_sl, v=0, edit=True)
def process_curve(self):
# self.get_data()
power = cmds.floatSlider(power_sl, value=True, query=True)
# Reverses the input range, as soon it is not possible to do in gui
for curve in self.curves:
if cmds.checkBox(b_spline, v=True, q=True):
filtered_values = bspline(self.value_data[curve], n=len(
self.value_data[curve]), degree=int(power))
else:
filtered_values = self.gaussian(self.value_data[curve], power)
filtered_values = [float(v) for v in filtered_values]
filtered_values = self.set_blend(
self.value_data[curve], filtered_values)
if cmds.checkBox(cbx, v=True, q=True):
filtered_values = self.normalize_data(filtered_values)
filtered_values = self.restore_normalized_data(
self.value_data[curve], filtered_values)
attr = cmds.listConnections(curve, p=True)[0]
self.add_keys(attr, self.frame_data[curve], filtered_values, None)
cmds.keyTangent(itt='auto', ott='auto')
def normalize_only(self):
self.get_data()
# Reverses the input range, as soon it is not possible to do in gui
for curve in self.curves:
filtered_values = self.normalize_data(self.value_data[curve])
attr = cmds.listConnections(curve, p=True)[0]
self.add_keys(attr, self.frame_data[curve], filtered_values, None)
cmds.keyTangent(itt='auto', ott='auto')
def gaussian(self, data, power):
return sp.filters.gaussian_filter1d(data, power)
window_name = 'Gaussian'
if cmds.window(window_name, exists=True):
cmds.deleteUI(window_name)
cmds.window(window_name)
column = cmds.columnLayout(adjustableColumn=True)
# cmds.label('Power')
# text = cmds.text(label='Size', h=30)
cbx = cmds.checkBox(label='Normalize',
value=False,
ann='Normalyze curves or not')
b_spline = cmds.checkBox(label='B-spline',
value=False,
ann='Simplify curve with B-spline')
power_sl = cmds.floatSlider(
min=0, max=20, step=1, w=250, h=30)
cmds.button(label='Normalize Only',
command='gg.normalize_only()')
gg = Gaussian()
cmds.floatSlider(power_sl, dc='gg.process_curve()',
dragCallback='gg.get_data()', edit=True)
cmds.showWindow()
| mit | 8,784,526,235,841,071,000 | 32.300885 | 82 | 0.551987 | false |
tomdoel/pyxnatbrowser | browser/progresslistbox.py | 1 | 10205 | # https://github.com/tomdoel/pyxnatbrowser
# Author: Tom Doel www.tomdoel.com
# Distributed under the Simplified BSD License.
from enum import Enum
from sys import stdout
from tkinter import Frame, Scrollbar, VERTICAL, Label, Listbox, EXTENDED, RIGHT, Y, LEFT, BOTH, END, Checkbutton, Text, \
IntVar
from tkinter.ttk import Progressbar
from database.observable import Observable
from database.xnatdatabase import ProgressStatus
class LabeledProgressListBox(Frame):
def __init__(self, parent, list_model_factory, label_text):
Frame.__init__(self, parent)
scrollbar = Scrollbar(self, orient=VERTICAL)
Label(self, text=label_text).pack()
self.check_list_box = ProgressListBox(self, scrollbar, list_model_factory)
scrollbar.config(command=self.check_list_box.yview)
scrollbar.pack(side=RIGHT, fill=Y)
self.check_list_box.pack(side=LEFT, fill=BOTH, expand=1)
class ProgressListBox(Text):
def __init__(self, parent, scrollbar, list_model_factory):
Text.__init__(self, parent, yscrollcommand=scrollbar.set)
self._list_model_factory = list_model_factory
self._list_model = list_model_factory.get_list_model()
self._list_model.add_listener(self._list_items_changed)
def _list_items_changed(self):
self.delete(1.0, END) # Clears the list entries
for list_item in self._list_model.get_list_items():
list_item_model = self._list_model_factory.get_list_item_model(list_item)
new_checkbutton = ProgressListBoxItem(self, list_item_model)
self.window_create("end", window=new_checkbutton)
self.insert("end", "\n")
class ProgressListBoxModelFactory:
def __init__(self):
self._list_model = ProgressListBoxModel()
def get_list_model(self):
return self._list_model
def get_list_item_model(self, list_item):
return ProgressListBoxItemModel(list_item.get_label())
class ProgressListBoxModel(Observable):
def __init__(self):
Observable.__init__(self)
self._list_items = []
def get_list_items(self):
return self._list_items
def set_list_items(self, list_items):
self._list_items = list_items
self._notify()
class ProgressListBoxItemModel(Observable):
def __init__(self, label):
Observable.__init__(self)
self._label = label
self._progress_status = ProgressStatus.undefined
self._check_status = False
def get_progress_status(self):
return self._progress_status
def get_check_status(self):
return self._check_status
def get_label(self):
return self._label
def set_progress_status(self, progress_status):
if self._progress_status is not progress_status:
self._progress_status = progress_status
self._notify()
def set_check_status(self, check_status):
if self._check_status is not check_status:
self._check_status = check_status
self._notify()
def manual_set_checked(self, check_status):
self.set_check_status(check_status)
class ProgressListBoxItem(Frame, Observable):
def __init__(self, parent, model):
Frame.__init__(self, parent)
Observable.__init__(self)
self._model = model
# Create variables and initialise to zero
self._checked_var = IntVar()
self._progress_var = IntVar()
self._checked_var.set(0)
self._progress_var.set(0)
self._current_gui_checked_state = CheckStatus.undefined
self._current_gui_progress_state = ProgressStatus.undefined
self.check_button = Checkbutton(self, text=model.get_label(), variable=self._checked_var, command=self._user_check_changed)
self.progress_bar = Progressbar(self, orient='horizontal', mode='indeterminate', variable=self._progress_var)
self.check_button.pack(side=LEFT, fill="both", expand=True)
self._update()
self._model.add_listener(self._model_changed)
def _model_changed(self):
self.update()
def _update(self):
# Update check status
model_check_state = self._model.get_check_status()
if model_check_state is not self._current_gui_checked_state:
self._current_gui_checked_state = model_check_state
# if self.status_model.is_checked_force_reload():
if model_check_state:
self.check_button.select()
else:
self.check_button.deselect()
# Update progress status
model_progress_state = self._model.get_progress_status
if not model_progress_state == self._current_gui_progress_state:
self._current_gui_progress_state = model_progress_state
if model_progress_state == ProgressStatus.in_progress:
self.progress_bar.pack(side=RIGHT, fill="both", expand=True)
else:
self.progress_bar.pack_forget()
def _user_check_changed(self):
new_checked = self._checked_var.get()
if new_checked is not self._model.get_check_status():
self._model.manual_set_checked(new_checked)
# self.model.set_checked(new_checked)
# if new_checked is not self.status_model.is_checked():
# self._notify(self.index, new_checked)
# def update_list(self, scan_records):
# self.delete(1.0, END) # Clears the list entries
#
# self.list_objects = []
# self.check_buttons = {}
# self.next_index = 0
# self.checked_indices = None
# self.unchecked_indices = None
# for scan_record in scan_records:
# self.list_objects.append(scan_record.scan)
# node_checkbox_model = ProgressCheckButtonModel(scan_record.label)
# new_checkbutton = ProgressCheckButton(self, node_checkbox_model, self.next_index, scan_record)
# self.window_create("end", window=new_checkbutton)
# self.insert("end", "\n")
# self.check_buttons[self.next_index] = new_checkbutton
# new_checkbutton.add_listener(self._checkbox_changed)
# self.next_index += 1
#
# self._populate_cache()
#
# def refresh_checks(self):
# for index, checkbutton in self.check_buttons.items():
# checkbutton.refresh_check()
# self._populate_cache()
#
# def _populate_cache(self):
# self.checked_indices = []
# self.unchecked_indices = []
# for index, checkbutton in self.check_buttons.items():
# if checkbutton.is_checked():
# self.checked_indices.append(index)
# else:
# self.unchecked_indices.append(index)
#
# def _checkbox_changed(self, index, value):
# self._populate_cache()
# selected_items = [self.list_objects[int(index)] for index in self.checked_indices]
# unselected_items = [self.list_objects[int(index)] for index in self.unchecked_indices]
#
# # Update the selection models - these will trigger notifications via their setter methods
# self.selected_items_model.selected_items = selected_items
# self.unselected_items_model.selected_items = unselected_items
class ProgressCheckButtonModel(Observable):
def __init__(self, label, status_model):
self.label = label
self.status_model = status_model
self.status_model.model.add_listener(self._progress_status_changed)
def get_label(self):
return self.label
def get_checked(self):
return self.status_model.is_checked()
def set_checked(self, checked):
return self.label
def _progress_status_changed(self, new_status):
self._notify(self.index, new_status)
class ProgressCheckButton(Frame, Observable):
def __init__(self, parent, model, index, status_model):
Frame.__init__(self, parent)
Observable.__init__(self)
self.model = model
self.index = index
self.status_model = status_model
self.var = IntVar()
self.var.set(model.get_checked())
self.progress_var = IntVar()
self.progress_status = ProgressStatus.undefined
self.check_button = Checkbutton(self, text=model.get_label, variable=self.var, command=self._check_changed)
self.progress_bar = Progressbar(self, orient='horizontal', mode='indeterminate', variable=self.progress_var)
self.check_button.pack(side=LEFT, fill="both", expand=True)
self.model.add_listener(self._model_changed)
def _model_changed(self, new_status):
model_state = self.model.get_checked()
gui_state = self.var.get()
if model_state is not gui_state:
self.model.set_checked(gui_state)
def refresh_check(self):
if self.status_model.is_checked_force_reload():
self.check_button.select()
else:
self.check_button.deselect()
def is_checked(self):
return self.var.get()
def _progress_status_changed(self, new_status):
self._refresh_progress()
def _refresh_progress(self):
status = self.status_model.get_status()
if not status == self.progress_status:
if status == ProgressStatus.in_progress:
self.progress_bar.pack(side=RIGHT, fill="both", expand=True)
else:
self.progress_bar.pack_forget()
def _check_changed(self):
new_checked = self.var.get()
if new_checked is not self.model.get_checked():
self.model.set_checked(new_checked)
if new_checked is not self.status_model.is_checked():
self._notify(self.index, new_checked)
class SelectedItems(Observable):
def __init__(self):
Observable.__init__(self)
self._selected_items = []
@property
def selected_items(self):
return self._selected_items
@selected_items.setter
def selected_items(self, value):
if self.selected_items != value:
self._selected_items = value
self._notify(value)
class CheckStatus(Enum):
off = 0
on = 1
undefined = 2
| bsd-2-clause | 8,188,460,140,480,932,000 | 34.933099 | 131 | 0.630475 | false |
Shapeways/coyote_framework | example/example_tests/TestFollowLink_YouFollowedItGood.py | 1 | 1568 | from coyote_framework.testing.coyote_test import CoyoteTest
from example.example_app.config.example_config import ExampleConfig
from coyote_framework.drivers.coyote_driverfactory import driver_context
from example.example_app.page_objects.example_follow_page import ExampleFollowPage
from example.example_app.page_objects.example_home_page import ExampleHomePage
__author__ = 'matt'
class TestFollowLink_YouFollowedItGood(CoyoteTest):
"""
Test that we can load a page, click a link,
Instantiate a page object, click another link.
You're really doin' it now, kid. Complex shit.
"""
def setUp(self):
super(TestFollowLink_YouFollowedItGood, self).setUp()
self.config = ExampleConfig()
def test_main(self):
with driver_context() as driver:
test = self.config.get('web_hostname')
driver.visit(test)
hp = ExampleHomePage(driver_wrapper=driver)
driver.assertion.assert_true(hp.is_page_loaded())
# Let's go to another page.
# Notice how we interact with hp, then instantiate fp after we land on it
hp.click_follow_me()
fp = ExampleFollowPage(driver_wrapper=driver)
driver.assertion.assert_true(fp.is_page_loaded())
# Now. let's go back to the home page
# Notice that we re-instantiate hp, as the original hp now has a stale DOM
fp.click_go_back_link()
hp = ExampleHomePage(driver_wrapper=driver)
driver.assertion.assert_true(hp.is_page_loaded())
| mit | -2,093,807,376,312,911,400 | 38.2 | 86 | 0.673469 | false |
itfootman/hackrepo | gitc_utils.py | 1 | 2668 | #
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import sys
import git_command
import git_config
# TODO (sbasi) - Remove this constant and fetch manifest dir from /gitc/.config
GITC_MANIFEST_DIR = '/usr/local/google/gitc/'
GITC_FS_ROOT_DIR = '/gitc/manifest-rw/'
NUM_BATCH_RETRIEVE_REVISIONID = 300
def _set_project_revisions(projects):
"""Sets the revisionExpr for a list of projects.
Because of the limit of open file descriptors allowed, length of projects
should not be overly large. Recommend calling this function multiple times
with each call not exceeding NUM_BATCH_RETRIEVE_REVISIONID projects.
@param projects: List of project objects to set the revionExpr for.
"""
# Retrieve the commit id for each project based off of it's current
# revisionExpr and it is not already a commit id.
project_gitcmds = [(
project, git_command.GitCommand(None,
['ls-remote',
project.remote.url,
project.revisionExpr],
capture_stdout=True, cwd='/tmp'))
for project in projects if not git_config.IsId(project.revisionExpr)]
for proj, gitcmd in project_gitcmds:
if gitcmd.Wait():
print('FATAL: Failed to retrieve revisionExpr for %s' % proj)
sys.exit(1)
proj.revisionExpr = gitcmd.stdout.split('\t')[0]
def generate_gitc_manifest(client_dir, manifest):
"""Generate a manifest for shafsd to use for this GITC client.
@param client_dir: GITC client directory to install the .manifest file in.
@param manifest: XmlManifest object representing the repo manifest.
"""
print('Generating GITC Manifest by fetching revision SHAs for each '
'project.')
index = 0
while index < len(manifest.projects):
_set_project_revisions(
manifest.projects[index:(index+NUM_BATCH_RETRIEVE_REVISIONID)])
index += NUM_BATCH_RETRIEVE_REVISIONID
# Save the manifest.
with open(os.path.join(client_dir, '.manifest'), 'w') as f:
manifest.Save(f)
| apache-2.0 | 6,277,069,641,981,856,000 | 38.235294 | 79 | 0.691904 | false |
iw3hxn/LibrERP | account_vat_period_end_statement/wizard/vat_settlement.py | 1 | 17171 | # flake8: noqa
# -*- coding: utf-8 -*-
# Copyright 2017-19 Didotech srl (<http://www.didotech.com>)
# Andrei Levin <[email protected]>
# Antonio M. Vigliotti <[email protected]>
# Odoo-Italia.org Community
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp.osv import fields, orm
import base64
import logging
import datetime
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT
from openerp.addons.l10n_it_ade.bindings.vat_settlement_v_18 import (
CTD_ANON, # Intestazione,; Comunicazione,
Comunicazione_IVP_Type,
DatiContabili_IVP_Type,
Fornitura,
Frontespizio_IVP_Type,
Intestazione_IVP_Type
)
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.DEBUG)
codice_fornitura = 'IVP18'
identificativo_software = 'Odoo.6.1.4.0.0'
class WizardVatSettlement(orm.TransientModel):
_name = "wizard.vat.settlement"
_columns = {
'data': fields.binary("File", readonly=True),
'name': fields.char('Filename', 32, readonly=True),
'state': fields.selection((
('create', 'create'), # choose
('get', 'get'), # get the file
)),
}
_defaults = {
'state': lambda *a: 'create',
}
def get_date_start_stop(self, statement, context=None):
date_start = False
date_stop = False
periods = statement.e_period_ids or statement.period_ids
for period in periods: # todo passare come parametro
if not date_start:
date_start = period.date_start
else:
if period.date_start < date_start:
date_start = period.date_start
if not date_stop:
date_stop = period.date_stop
else:
if period.date_stop > date_stop:
date_stop = period.date_stop
date_start = datetime.datetime.strptime(date_start,
DEFAULT_SERVER_DATE_FORMAT)
date_stop = datetime.datetime.strptime(date_stop,
DEFAULT_SERVER_DATE_FORMAT)
return date_start, date_stop
def get_taxable(self, cr, uid, statement, type, context=None):
"""
:param cr:
:param uid:
:param statement:
:param type: 'credit' or 'debit'
:param context:
:return: amount_taxable
"""
base_amount = 0.0
if type == 'credit':
credit_line_pool = self.pool.get('statement.credit.account.line')
for credit_line in statement.credit_vat_account_line_ids:
# if credit_line.amount != 0.0:
base_amount += credit_line.base_amount
elif type == 'debit':
debit_line_pool = self.pool.get('statement.debit.account.line')
for debit_line in statement.debit_vat_account_line_ids:
# if debit_line.amount != 0.0:
base_amount += debit_line.base_amount
return base_amount
@staticmethod
def italian_number(number):
return '{:.2f}'.format(number).replace('.', ',')
@staticmethod
def italian_date(dt):
if len(dt) == 8:
return dt[-2:] + dt[4:6] + dt[0:4]
elif len(dt) == 10:
return dt[-2:] + dt[5:7] + dt[0:4]
else:
return ''
def export_vat_settlemet(self, cr, uid, ids, context=None):
# TODO: insert period verification
context = context or {}
model_data_obj = self.pool['ir.model.data']
statement_debit_account_line_obj = \
self.pool['statement.debit.account.line']
statement_credit_account_line_obj = \
self.pool['statement.credit.account.line']
trimestre = {
'3': '1',
'6': '2',
'9': '3',
'12': '4'
}
module_pool = self.pool.get('ir.module.module')
company_pool = self.pool.get('res.company')
ids = module_pool.search(
cr, uid, [('name', '=', 'account_vat_period_end_statement')])
if len(ids) == 0:
_logger.info('Invalid software signature.')
_logger.info('Please contact [email protected] '
'to obtain free valid software')
identificativo_software = ''
else:
ver = module_pool.browse(cr, uid,
ids[0]).installed_version
identificativo_software = 'Odoo' + ver
identificativo_software = identificativo_software.upper()
statement_pool = self.pool.get('account.vat.period.end.statement')
statement_ids = context.get('active_ids', False)
for statement in statement_pool.browse(cr,
uid,
statement_ids,
context=context):
progressivo_telematico = statement_pool.set_progressivo_telematico(
cr, uid, statement, context)
company_id = statement.company_id.id
company = company_pool.browse(cr, uid, company_id, context=context)
if company.partner_id.vat[:2].lower() == 'it':
vat = company.partner_id.vat[2:]
else:
vat = company.partner_id.vat
settlement = Fornitura()
settlement.Intestazione = (Intestazione_IVP_Type())
settlement.Intestazione.CodiceFornitura = codice_fornitura
_logger.debug(settlement.Intestazione.toDOM().toprettyxml(
encoding="UTF-8"))
if statement.type[0:3] != 'xml':
_logger.info('No electronic statement type!')
raise orm.except_orm(
'Error!',
'No electronic statement type!')
if not (statement.period_ids or statement.e_period_ids):
_logger.info('No period defined!')
raise orm.except_orm(
'Error!',
'No period defined!')
if not statement.soggetto_codice_fiscale:
_logger.info(
'Manca CF del contribuente!')
raise orm.except_orm(
'Errore!',
'Manca CF del contribuente!')
if len(statement.soggetto_codice_fiscale) != 11:
_logger.info(
'Il CF del dichiarante deve essere una PI di 11 cifre!')
raise orm.except_orm(
'Errore!',
'Il CF del dichiarante deve essere una PI di 11 cifre!')
if statement.soggetto_codice_fiscale != \
company.partner_id.vat[2:] and \
statement.soggetto_codice_fiscale != \
company.partner_id.fiscalcode:
_logger.info(
'CF contrinuente diverso da CF azienda!')
raise orm.except_orm(
'Errore!',
'CF contrinuente diverso da CF azienda!')
if not statement.dichiarante_codice_fiscale:
_logger.info(
'Manca CF del dichiarante!')
raise orm.except_orm(
'Errore!',
'Manca CF del dichiarante!')
if len(statement.dichiarante_codice_fiscale) != 16:
_logger.info(
'Il dichiarante deve essere PF con CF di 16 caratteri!')
raise orm.except_orm(
'Errore!',
'Il dichiarante deve essere PF con CF di 16 caratteri!!')
if not statement.codice_carica:
_logger.info(
'Manca codice carica del dichiarante!')
raise orm.except_orm(
'Errore!',
'Manca codice carica del dichiarante!')
if not statement.incaricato_trasmissione_codice_fiscale or \
not statement.incaricato_trasmissione_data_impegno:
_logger.info(
'Manca CF o data impegno incaricato alla trasmissione!')
raise orm.except_orm(
'Errore!',
'Manca CF o data impegno incaricato alla trasmissione!')
settlement.Comunicazione = (Comunicazione_IVP_Type())
settlement.Comunicazione.Frontespizio = (Frontespizio_IVP_Type())
settlement.Comunicazione.Frontespizio.FirmaDichiarazione = "1"
settlement.Comunicazione.Frontespizio.CodiceFiscale = \
statement.soggetto_codice_fiscale
settlement.Comunicazione.Frontespizio.CFIntermediario = \
statement.incaricato_trasmissione_codice_fiscale
if statement.incaricato_trasmissione_data_impegno:
settlement.Comunicazione.Frontespizio.DataImpegno = \
self.italian_date(
statement.incaricato_trasmissione_data_impegno)
settlement.Comunicazione.Frontespizio.FirmaIntermediario = "1"
settlement.Comunicazione.Frontespizio.ImpegnoPresentazione = "1"
if statement.dichiarante_codice_fiscale:
settlement.Comunicazione.Frontespizio.CFDichiarante = \
statement.dichiarante_codice_fiscale
if statement.codice_carica:
settlement.Comunicazione.Frontespizio.CodiceCaricaDichiarante = \
statement.codice_carica.code
date_start, date_stop = self.get_date_start_stop(statement, context=context)
settlement.Comunicazione.Frontespizio.AnnoImposta = str(
date_stop.year)
settlement.Comunicazione.Frontespizio.PartitaIVA = \
statement.soggetto_codice_fiscale
# settlement.Comunicazione.Frontespizio.PIVAControllante
# settlement.Comunicazione.Frontespizio.UltimoMese = str(date_period_end.month)
# settlement.Comunicazione.Frontespizio.LiquidazioneGruppo
# settlement.Comunicazione.Frontespizio.CodiceFiscaleSocieta
# settlement.Comunicazione.Frontespizio.FlagConferma
if identificativo_software:
settlement.Comunicazione.Frontespizio.\
IdentificativoProdSoftware = identificativo_software
_logger.debug(
settlement.Comunicazione.Frontespizio.toDOM().toprettyxml(
encoding="UTF-8"))
settlement.Comunicazione.DatiContabili = (DatiContabili_IVP_Type())
# We may have more than one modulo, but do we want it?
# modulo_period_end = datetime.datetime.strptime(statement.date,
# DEFAULT_SERVER_DATE_FORMAT)
modulo = CTD_ANON()
modulo.NumeroModulo = '1' # 1, 2, 3, 4, 5
# <<<<< quarter_vat_period non esite nella 7.0 >>>>>
# if statement.period_ids[0].fiscalyear_id.quarter_vat_period:
# # trimestrale
# modulo.Trimestre = trimestre[str(modulo_period_end.month)]
# else:
# # mensile
# modulo.Mese = str(modulo_period_end.month)
if date_start.month == date_stop.month:
modulo.Mese = str(date_stop.month)
else:
if date_start.month in (1, 4, 7, 10) and \
date_stop.month in (3, 6, 9, 12):
modulo.Trimestre = trimestre[str(date_stop.month)]
else:
_logger.info(
'Undetermined quarter/month!')
raise orm.except_orm(
'Error!',
"Undetermined quarter/month!")
# TODO: Per aziende supposte al controllo antimafia (i subfornitori), per il momento non valorizziamo
# modulo.Subfornitura = "0"
# TODO: facultativo: Vittime del terremoto, per il momento non valorizziamo
# modulo.EventiEccezionali =
modulo.TotaleOperazioniAttive = self.italian_number(
self.get_taxable(cr, uid, statement, 'debit', context)
)
modulo.TotaleOperazioniPassive = self.italian_number(
self.get_taxable(cr, uid, statement, 'credit', context)
)
iva_esigibile = 0
debit_account_line_ids = statement_debit_account_line_obj.search(
cr, uid, [('statement_id', '=', statement.id)])
for debit_account_line in statement_debit_account_line_obj.browse(
cr, uid, debit_account_line_ids, context):
iva_esigibile += debit_account_line.amount
# NOTE: formato numerico;
# i decimali vanno separati con il carattere ',' (virgola)
modulo.IvaEsigibile = self.italian_number(iva_esigibile)
iva_detratta = 0
credit_account_line_ids = statement_credit_account_line_obj.search(
cr, uid, [('statement_id', '=', statement.id)])
for credit_account_line in statement_credit_account_line_obj.\
browse(cr, uid, credit_account_line_ids, context):
iva_detratta += credit_account_line.amount
# NOTE: formato numerico;
# i decimali vanno separati con il carattere ',' (virgola)
modulo.IvaDetratta = self.italian_number(iva_detratta)
if iva_esigibile > iva_detratta:
iva_dovuta = iva_esigibile - iva_detratta
modulo.IvaDovuta = self.italian_number(iva_dovuta)
else:
iva_credito = iva_detratta - iva_esigibile
modulo.IvaCredito = self.italian_number(iva_credito)
# TODO: lasciamo per dopo
# modulo.IvaDetratta = self.italian_number(iva_detratta)
# modulo.IvaCredito =
previous_debit = statement.previous_debit_vat_amount
if previous_debit:
modulo.DebitoPrecedente = self.italian_number(previous_debit)
previous_credit = statement.previous_credit_vat_amount
if previous_credit:
if date_start.month == 1:
modulo.CreditoAnnoPrecedente = self.italian_number(previous_credit)
else:
modulo.CreditoPeriodoPrecedente = self.italian_number(previous_credit)
# Chiedere all'utente
# modulo.CreditoAnnoPrecedente
# TODO: lasciamo per dopo
# modulo.VersamentiAutoUE
# modulo.CreditiImposta
# modulo.InteressiDovuti
# modulo.Acconto
if statement.authority_vat_amount > 0:
# NOTE: formato numerico; i decimali vanno separati dall'intero con il carattere ',' (virgola)
modulo.ImportoDaVersare = self.italian_number(statement.authority_vat_amount)
elif statement.authority_vat_amount < 0:
# NOTE: formato numerico; i decimali vanno separati dall'intero con il carattere ',' (virgola)
modulo.ImportoACredito = self.italian_number(-statement.authority_vat_amount)
settlement.Comunicazione.DatiContabili.Modulo.append(modulo)
_logger.debug(settlement.Comunicazione.DatiContabili.toDOM().toprettyxml(encoding="UTF-8"))
settlement.Comunicazione.identificativo = \
"%05d" % progressivo_telematico
vat_settlement_xml = settlement.toDOM().toprettyxml(encoding="UTF-8")
fn_name = 'IT%s_LI_%05d.xml' % (statement.soggetto_codice_fiscale,
progressivo_telematico)
attach_vals = {
'name': fn_name,
'datas_fname': fn_name,
'datas': base64.encodestring(vat_settlement_xml),
'res_model': 'account.vat.period.end.statement',
'res_id': statement.id
}
statement_pool.write(cr, uid, [statement.id],
{'progressivo_telematico': progressivo_telematico})
vat_settlement_attachment_out_id = self.pool[
'account.vat.settlement.attachment'].create(cr,
uid, attach_vals, context={})
view_rec = model_data_obj.get_object_reference(
cr, uid, 'account_vat_period_end_statement',
'view_vat_settlement_attachment_form')
if view_rec:
view_id = view_rec and view_rec[1] or False
return {
'view_type': 'form',
'name': "Export Liquidazione IVA",
'view_id': [view_id],
'res_id': vat_settlement_attachment_out_id,
'view_mode': 'form',
'res_model': 'account.vat.settlement.attachment',
'type': 'ir.actions.act_window',
'context': context
}
| agpl-3.0 | 7,779,389,733,822,867,000 | 42.915601 | 113 | 0.552152 | false |
sunfall/giles | giles/games/seat.py | 1 | 1691 | # Giles: seat.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from giles.utils import Struct
class Seat(object):
"""A seat at a game. Seats can be named, be active or inactive, and
have players or not.
"""
def __init__(self, name):
self.display_name = name
self.name = name.lower()
self.active = False
self.player = None
self.player_name = "Empty!"
self.data = Struct()
def __repr__(self):
return self.display_name
def sit(self, player, activate=True):
# By default, sitting a player down in a seat activates that
# seat. That can be overridden.
if not self.player:
self.player = player
self.player_name = repr(player)
if activate:
self.active = True
return True
return False
def stand(self):
if self.player:
self.player_name = repr(self.player) + " (absentee)"
self.player = None
return True
return False
| agpl-3.0 | -5,881,379,406,293,452,000 | 29.745455 | 74 | 0.640449 | false |
MaxTyutyunnikov/lino | lino/core/auth.py | 1 | 11109 | # -*- coding: UTF-8 -*-
## Copyright 2010-2013 Luc Saffre
## This file is part of the Lino project.
## Lino is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 3 of the License, or
## (at your option) any later version.
## Lino is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with Lino; if not, see <http://www.gnu.org/licenses/>.
"""
Lino's authentification middleware
"""
from __future__ import unicode_literals
import os
import logging
logger = logging.getLogger(__name__)
from django.utils.importlib import import_module
from django.utils.translation import ugettext_lazy as _
from django.core import exceptions
from django.utils import translation
from django.conf import settings
from django import http
from lino.core import constants
from lino.core.perms import AnonymousUser
class AuthMiddleWareBase(object):
"""
Common base class for
:class:`RemoteUserMiddleware`,
:class:`SessionsUserMiddleware`
and
:class:`NoUserMiddleware`.
"""
def get_user_from_request(self, request):
raise NotImplementedError
def process_request(self, request):
#~ print 20130313, request.session.get('username')
settings.SITE.startup()
"""
first request will trigger site startup to load UserProfiles
"""
user = self.get_user_from_request(request)
self.on_login(request,user)
class NOT_NEEDED:
pass
#~ @classmethod
def authenticate(cls, username, password=NOT_NEEDED):
#~ logger.info("20130923 authenticate %s,%s" % (username,password))
if not username:
return AnonymousUser.instance()
"""
20120110 : Alicia once managed to add a space char in front of
her username log in the login dialog.
Apache let her in as " alicia".
"""
username = username.strip()
try:
user = settings.SITE.user_model.objects.get(username=username)
if user.profile is None:
logger.info("Could not authenticate %s : user has no profile",username)
return None
if password != cls.NOT_NEEDED:
if not user.check_password(password):
logger.info("Could not authenticate %s : password mismatch",username)
return None
#~ logger.info("20130923 good password for %s",username)
#~ else:
#~ logger.info("20130923 no password needed for %s",username)
return user
except settings.SITE.user_model.DoesNotExist,e:
logger.info("Could not authenticate %s : no such user",username)
return None
def on_login(self,request,user):
"""
The method which is applied when the user has been determined.
On multilingual sites,
if URL_PARAM_USER_LANGUAGE is present it overrides user.language.
"""
#~ logger.info("20130923 on_login(%s)" % user)
request.user = user
user_language = user.language or settings.SITE.get_default_language()
if request.method == 'GET':
rqdata = request.GET
elif request.method in ('PUT','DELETE'):
rqdata = http.QueryDict(request.body) # raw_post_data before Django 1.4
elif request.method == 'POST':
rqdata = request.POST
else:
# e.g. OPTIONS, HEAD
if len(settings.SITE.languages) > 1:
translation.activate(user_language)
request.LANGUAGE_CODE = translation.get_language()
#~ logger.info("20121205 on_login %r",translation.get_language())
request.requesting_panel = None
request.subst_user = None
return
#~ else: # DELETE
#~ request.subst_user = None
#~ request.requesting_panel = None
#~ return
if len(settings.SITE.languages) > 1:
user_language = rqdata.get(constants.URL_PARAM_USER_LANGUAGE,user_language)
translation.activate(user_language)
request.LANGUAGE_CODE = translation.get_language()
su = rqdata.get(constants.URL_PARAM_SUBST_USER,None)
if su is not None:
if su:
try:
su = settings.SITE.user_model.objects.get(id=int(su))
#~ logger.info("20120714 su is %s",su.username)
except settings.SITE.user_model.DoesNotExist, e:
su = None
else:
su = None # e.g. when it was an empty string "su="
request.subst_user = su
request.requesting_panel = rqdata.get(constants.URL_PARAM_REQUESTING_PANEL,None)
#~ logger.info("20121228 subst_user is %r",request.subst_user)
#~ if request.subst_user is not None and not isinstance(request.subst_user,settings.SITE.user_model):
#~ raise Exception("20121228")
class RemoteUserMiddleware(AuthMiddleWareBase):
"""
Middleware automatically installed by
:meth:`get_middleware_classes <lino.site.Site.get_middleware_classes>`
when both
:setting:`remote_user_header` and :setting:`user_model`
are not empty.
This does the same as
`django.contrib.auth.middleware.RemoteUserMiddleware`,
but in a simplified manner and without using Sessions.
It also activates the User's language, if that field is not empty.
Since it will run *after*
`django.contrib.auth.middleware.RemoteUserMiddleware`
(at least if you didn't change :meth:`lino.Lino.get_middleware_classes`),
it will override any browser setting.
"""
def get_user_from_request(self, request):
username = request.META.get(
settings.SITE.remote_user_header,settings.SITE.default_user)
if not username:
#~ msg = "Using remote authentication, but no user credentials found."
#~ raise exceptions.PermissionDenied(msg)
raise Exception("Using remote authentication, but no user credentials found.")
user = self.authenticate(username)
if user is None:
#~ logger.info("20130514 Unknown username %s from request %s",username, request)
#~ raise Exception(
#~ raise exceptions.PermissionDenied("Unknown or inactive username %r. Please contact your system administrator."
#~ logger.info("Unknown or inactive username %r.",username)
raise exceptions.PermissionDenied()
return user
class NoUserMiddleware(AuthMiddleWareBase):
"""
Middleware automatically installed by
:meth:`get_middleware_classes <lino.site.Site.get_middleware_classes>`
when :setting:`user_model` is None.
"""
def get_user_from_request(self, request):
return AnonymousUser.instance()
class SessionUserMiddleware(AuthMiddleWareBase):
"""
Middleware automatically installed by
:meth:`get_middleware_classes <lino.site.Site.get_middleware_classes>`
when
:setting:`remote_user_header` is None
and :setting:`user_model` not.
"""
def get_user_from_request(self, request):
#~ logger.info("20130923 get_user_from_request(%s)" % request.session.items())
user = self.authenticate(request.session.get('username'),
request.session.get('password'))
if user is None:
#~ logger.info("20130923 Login failed from session %s", request.session)
user = AnonymousUser.instance()
return user
class LDAPAuthMiddleware(SessionUserMiddleware):
"""
Middleware automatically installed by
:meth:`get_middleware_classes <lino.site.Site.get_middleware_classes>`
when
- :setting:`user_model` is not None
- :setting:`remote_user_header` is None
- :setting:`ldap_auth_server` is not None
Using this requires
`activedirectory <https://github.com/theatlantic/python-active-directory>`_.
Thanks to Josef Kejzlar for the initial implementation.
"""
def __init__(self):
from activedirectory import Client, Creds
from activedirectory.core.exception import Error
server_spec = settings.SITE.ldap_auth_server
if isinstance(server_spec,basestring):
server_spec = server_spec.split()
self.domain = server_spec[0]
self.server = server_spec[1]
#~ domain = 'DOMAIN_NAME'
#~ server = 'SERVER_DNS'
self.creds = Creds(domain)
def check_password(self,username, password):
try:
self.creds.acquire(username, password, server=self.server)
return True
except Exception as e:
pass
return False
#~ @classmethod
def authenticate(cls, username, password=SessionUserMiddleware.NOT_NEEDED, from_session=False):
if not from_session and username and password != SessionUserMiddleware.NOT_NEEDED:
if not cls.check_password(username, password):
return None
return SessionUserMiddleware.authenticate(username, SessionUserMiddleware.NOT_NEEDED)
def get_user_from_request(self, request):
user = self.authenticate(request.session.get('username'),
request.session.get('password'), True)
if user is None:
logger.debug("Login failed from session %s", request.session)
user = AnonymousUser.instance()
return user
def get_auth_middleware():
if settings.SITE.auth_middleware is None:
return AuthMiddleWareBase
module, obj = settings.SITE.auth_middleware.rsplit('.', 1)
module = import_module(module)
return getattr(module, obj)
def authenticate(*args, **kwargs):
"""
Needed by the ``/auth`` view (:class:`lino.ui.views.Authenticate`).
Called when the Login window of the web interface is confirmed.
"""
middleware = get_auth_middleware()
return middleware().authenticate(*args, **kwargs)
| gpl-3.0 | 8,390,691,697,609,771,000 | 33.155063 | 126 | 0.596543 | false |
nicoechaniz/IPP | bdd/features/steps/completar_muestra.py | 1 | 3130 | # -*- coding: utf-8 -*-
# IPP, Plataforma web del Índice de Precios Popular
# Copyright (c) 2016 Nicolás Echániz and contributors.
#
# This file is part of IPP
#
# IPP is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from time import sleep
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from behave import *
from utils import seleccionar_primera_opcion
from ipp.relevamiento.constants import RELEVADOR
from ipp.relevamiento.factories import JerarquizacionMarcaFactory
from ipp.relevamiento.models import Muestra, PlanillaDeRelevamiento, Perfil, ProductoConMarca
@given('los productos en la planilla tienen marcas establecidas')
def impl(context):
planilla = PlanillaDeRelevamiento.objects.last()
producto_ids = [p.id for p in planilla.planilla_modelo.productos.all()]
for p_id in producto_ids:
producto_con_marca = ProductoConMarca.objects.get(producto_generico__id=p_id,
marca=context.MARCA_POR_DEFECTO)
JerarquizacionMarcaFactory(tipo_marca="economica",
planilla_de_relevamiento=planilla,
producto_con_marca=producto_con_marca)
@when('selecciono la Muestra')
def impl(context):
muestra = Muestra.objects.last()
url = reverse("relevamiento:editar_muestra",
kwargs={"muestra_id": muestra.id})
context.browser.click_link_by_href(url)
@when('establezco el precio para un producto')
def impl(context):
context.browser.find_by_css('span.glyphicon').first.click()
# cuando el behave_browser es un browser real, demora la animación para mostrar el modal
sleep(1)
context.browser.fill('precio', 112)
context.browser.find_by_name('guardar_precio').first.click()
@then('la planilla refleja el precio cargado')
def impl(context):
ocurrencia = context.browser.find_by_css('td.success')[1].html.find("112")
assert ocurrencia >= 0
@then('si edito el precio cargado')
def impl(context):
context.browser.find_by_css('span.glyphicon').first.click()
# cuando el behave_browser es un browser real, demora la animación para mostrar el modal
sleep(1)
context.browser.fill('precio', 116)
context.browser.find_by_name('guardar_precio').first.click()
@then('la planilla refleja el nuevo precio')
def impl(context):
ocurrencia = context.browser.find_by_css('td.success')[1].html.find("116")
assert ocurrencia >= 0
| agpl-3.0 | 8,816,208,493,711,927,000 | 40.666667 | 93 | 0.70336 | false |
repleo/bounca | api/serializers.py | 1 | 5784 | """Serializers for Certificate API"""
import uuid
from django.contrib.auth import password_validation
from rest_framework import serializers
from x509_pki.models import Certificate, DistinguishedName
class DistinguishedNameSerializer(serializers.ModelSerializer):
class Meta:
fields = (
'commonName',
'countryName',
'stateOrProvinceName',
'localityName',
'organizationName',
'organizationalUnitName',
'emailAddress',
'subjectAltNames')
model = DistinguishedName
class CertificateSerializer(serializers.ModelSerializer):
dn = DistinguishedNameSerializer()
passphrase_in = serializers.CharField(
max_length=200,
required=False,
allow_null=True,
allow_blank=True)
passphrase_out = serializers.CharField(
max_length=200,
required=False,
allow_null=True,
allow_blank=True)
passphrase_out_confirmation = serializers.CharField(
max_length=200, required=False, allow_null=True, allow_blank=True)
class Meta:
fields = (
'id',
'owner',
'shortname',
'name',
'parent',
'cert_path',
'type',
'dn',
'created_at',
'expires_at',
'revoked_at',
'days_valid',
'expired',
'revoked',
'crl_distribution_url',
'ocsp_distribution_host',
'passphrase_in',
'passphrase_out',
'passphrase_out_confirmation')
model = Certificate
extra_kwargs = {
'passphrase_out': {
'write_only': True}, 'passphrase_out_confirmation': {
'write_only': True}, 'passphrase_in': {
'write_only': True}}
def validate_passphrase_out(self, passphrase_out):
if passphrase_out:
password_validation.validate_password(
passphrase_out, self.instance)
return passphrase_out
return None
def validate_passphrase_in(self, passphrase_in):
if passphrase_in:
if not self.initial_data.get('parent'):
raise serializers.ValidationError(
"You should provide a parent certificate if you provide a passphrase in")
parent = Certificate.objects.get(
pk=self.initial_data.get('parent'))
parent.passphrase_in = passphrase_in
if not parent.is_passphrase_valid():
raise serializers.ValidationError(
"Passphrase incorrect. Not allowed to sign your certificate")
return passphrase_in
return None
def validate_passphrase_out_confirmation(
self, passphrase_out_confirmation):
if passphrase_out_confirmation:
passphrase_out = self.initial_data.get("passphrase_out")
if passphrase_out and passphrase_out_confirmation and passphrase_out != passphrase_out_confirmation:
raise serializers.ValidationError(
"The two passphrase fields didn't match.")
password_validation.validate_password(
passphrase_out_confirmation, self.instance)
return passphrase_out_confirmation
return None
def validate(self, data):
shortname = data.get("shortname")
cert_type = data.get("type")
if Certificate.objects.filter(
shortname=shortname,
type=cert_type,
revoked_uuid=0).count() > 0:
raise serializers.ValidationError(
dict(
Certificate.TYPES)[cert_type] +
" \"" +
shortname +
"\" already exists.")
return data
def create(self, validated_data):
dn_data = validated_data.pop('dn')
dn = DistinguishedName.objects.create(**dn_data)
certificate = Certificate.objects.create(dn=dn, **validated_data)
return certificate
class CertificateRevokeSerializer(serializers.ModelSerializer):
passphrase_in = serializers.CharField(max_length=200, required=True)
class Meta:
fields = ('passphrase_in',)
model = Certificate
extra_kwargs = {'passphrase_in': {'write_only': True}}
def validate_passphrase_in(self, passphrase_in):
if passphrase_in:
self.instance.parent.passphrase_in = passphrase_in
if not self.instance.parent.is_passphrase_valid():
raise serializers.ValidationError(
"Passphrase incorrect. Not allowed to revoke your certificate")
return passphrase_in
return None
def update(self, instance, validated_data):
instance.passphrase_in = validated_data['passphrase_in']
instance.delete()
return instance
class CertificateCRLSerializer(serializers.ModelSerializer):
passphrase_in = serializers.CharField(max_length=200, required=True)
class Meta:
fields = ('passphrase_in',)
model = Certificate
extra_kwargs = {'passphrase_in': {'write_only': True}}
def validate_passphrase_in(self, passphrase_in):
if passphrase_in:
self.instance.passphrase_in = passphrase_in
if not self.instance.is_passphrase_valid():
raise serializers.ValidationError(
"Passphrase incorrect. No permission to create CRL File")
return passphrase_in
return None
def update(self, instance, validated_data):
instance.passphrase_in = validated_data['passphrase_in']
instance.generate_crl()
return instance
| apache-2.0 | -8,043,028,781,522,893,000 | 33.023529 | 112 | 0.592669 | false |
exepulveda/swfc | python/clustering_pca_ds4.py | 1 | 2204 | import numpy as np
import pickle
import logging
import argparse
import csv
import matplotlib as mpl
mpl.use('agg')
from sklearn.preprocessing import StandardScaler
from sklearn.cluster import KMeans
from sklearn.decomposition import PCA
from sklearn.metrics import silhouette_score
from cluster_utils import create_clusters_dict, recode_categorical_values
from plotting import scatter_clusters
import matplotlib.pyplot as plt
import clusteringlib as cl
from case_study_2d import attributes,setup_case_study,setup_distances
if __name__ == "__main__":
filename = 'ds4'
X = np.loadtxt("../data/{dataset}.csv".format(dataset=filename),skiprows=1,delimiter=",")
locations = X[:,0:2]
data = X[:,2:6] #0,1,2,5 are continues
true_clusters = X[:,6]
N,ND = data.shape
#now all are continues variables
var_types = np.ones(ND)
seed = 1634120
np.random.seed(seed)
standadize = StandardScaler()
data_scaled = standadize.fit_transform(data)
scale = standadize.scale_
ND_PCA = 2
pca = PCA(n_components=ND_PCA,whiten=True)
pca_X = pca.fit_transform(data_scaled)
data_F = np.asfortranarray(data,dtype=np.float32)
for NC in range(2,11):
clustering_pca = KMeans(n_clusters=NC)
clusters_pca = clustering_pca.fit_predict(pca_X)
#print("Calculating centroids")
centroids_F = np.asfortranarray(np.empty((NC,ND)),dtype=np.float32)
for k in range(NC):
indices = np.where(clusters_pca == k)[0]
centroids_F[k,:] = np.mean(data[indices,:],axis=0)
#print(k,len(indices)) #,centroids_F[k,:])
#PCA
cl.distances.sk_setup(np.asfortranarray(np.float32(scale)))
cl.distances.set_variables(np.asfortranarray(np.int32(var_types)),False)
clusters = np.asfortranarray(clusters_pca,dtype=np.int8)
weights = np.asfortranarray(np.ones((NC,ND),dtype=np.float32)/ ND)
ret_pca = cl.clustering.dbi_index(centroids_F,data_F,clusters,weights)
ret_sill= cl.clustering.silhouette_index(data_F,clusters,weights)
print('2D PCA',NC,ret_pca,ret_sill,sep=',')
cl.distances.reset()
| gpl-3.0 | -6,894,974,080,485,650,000 | 30.042254 | 93 | 0.663793 | false |
gooofy/HTMLTerminal | HTMLTerminal.py | 1 | 10940 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2015 Guenter Bartsch
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# OpenGL ES based HTML terminal for RaspberryPi
#
import ctypes
import time
import math
import os
import sys
import platform
import datetime
from base64 import b64decode
import traceback
import threading
import cairo
import zmq
import json
import ConfigParser
from os.path import expanduser
from Platform import pi_version
from temperature import measure_temperatures
from logger import ldebug, linfo, lerror, set_loglevel, LOG_DEBUG, LOG_INFO
import robinson
LED_UPDATE = 50
TEMP_UPDATE = 100
def hal_comm (socket, cmd, arg):
reply = None
try:
rq = json.dumps ([cmd, arg])
ldebug ("hal_comm: sending %s" % rq)
socket.send (rq)
# Get the reply.
message = socket.recv()
reply = json.loads(message)
except:
traceback.print_exc()
return reply
def _load_resource (resourcefn):
global socket
return b64decode(hal_comm (socket, 'LOAD_RESOURCE', resourcefn))
DRAW_SPEED = 32
SCMD_SET_SOURCE_RGBA = 1
SCMD_PAINT = 2
SCMD_SELECT_FONT_FACE = 3
SCMD_SET_FONT_SIZE = 4
SCMD_MOVE_TO = 5
SCMD_SHOW_TEXT = 6
SCMD_REL_LINE_TO = 7
SCMD_CLOSE_PATH = 8
SCMD_FILL = 9
SCMD_SET_LINE_WIDTH = 10
SCMD_SAVE = 11
SCMD_RESTORE = 12
SCMD_SET_SOURCE = 13
SCMD_CLIP = 14
SCMD_SET_SOURCE_SURFACE = 15
def text_extents(self, font_face, font_size, text):
self.ctx.select_font_face (font_face)
self.ctx.set_font_size (font_size)
return self.ctx.text_extents (text)
def font_extents(self, font_face, font_size):
self.ctx.select_font_face (font_face)
self.ctx.set_font_size (font_size)
return self.ctx.font_extents ()
class HAL(object):
def __init__(self, gfx):
self.gfx = gfx
self.ctx = gfx.get_cairo_ctx()
self.width = gfx.width
self.height = gfx.height
self.scene = []
self.coffset = 0
print "HAL.__init__() done."
#
# anim scene support stuff in a cairo context lookalike way
#
def scene_reset(self, counter):
self.scene = []
self.coffset = counter
def set_source_rgba (self, r, g, b, a):
self.scene.append ( (SCMD_SET_SOURCE_RGBA, r, g, b, a) )
def paint (self):
self.scene.append ( (SCMD_PAINT, ) )
def select_font_face (self, font_face):
self.ctx.select_font_face (font_face)
self.scene.append ( (SCMD_SELECT_FONT_FACE, font_face) )
def set_font_size (self, font_size):
self.ctx.set_font_size (font_size)
self.scene.append ( (SCMD_SET_FONT_SIZE, font_size) )
def set_line_width (self, w):
self.scene.append ( (SCMD_SET_LINE_WIDTH, w) )
def move_to (self, x, y):
self.scene.append ( (SCMD_MOVE_TO, x, y) )
def show_text (self, txt):
self.scene.append ( (SCMD_SHOW_TEXT, txt) )
def rel_line_to (self, x, y):
self.scene.append ( (SCMD_REL_LINE_TO, x, y) )
def close_path (self):
self.scene.append ( (SCMD_CLOSE_PATH,) )
def fill (self):
self.scene.append ( (SCMD_FILL,) )
def rectangle (self, x, y, w, h):
self.move_to (x, y)
self.rel_line_to (w, 0)
self.rel_line_to (0, h)
self.rel_line_to (-w, 0)
self.close_path()
def set_source (self, img):
self.scene.append ( (SCMD_SET_SOURCE, img) )
def set_source_surface (self, img, x, y):
self.scene.append ( (SCMD_SET_SOURCE_SURFACE, img, x, y) )
def clip (self):
self.scene.append ( (SCMD_CLIP,) )
def font_extents(self):
return self.ctx.font_extents()
def scene_html (self, html, css):
html = robinson.html(html, css, self.width, _load_resource, text_extents, font_extents, self)
html.render (self)
def scene_draw(self, counter):
#
# cairo
#
self.ctx.set_operator (cairo.OPERATOR_OVER)
drawlimit = (counter - self.coffset) * DRAW_SPEED
# render scene by executing commands
for t in self.scene:
drawlimit -= 1
if drawlimit <= 0:
break
#print "SCMD: %s" % repr(t)
scmd = t[0]
if scmd == SCMD_SET_SOURCE_RGBA:
self.ctx.set_source_rgba (t[1], t[2], t[3], t[4])
elif scmd == SCMD_PAINT:
self.ctx.paint()
elif scmd == SCMD_SELECT_FONT_FACE:
self.ctx.select_font_face (t[1])
elif scmd == SCMD_SET_FONT_SIZE:
self.ctx.set_font_size (t[1])
elif scmd == SCMD_MOVE_TO:
self.ctx.move_to (t[1], t[2])
elif scmd == SCMD_SHOW_TEXT:
self.ctx.show_text (t[1][:drawlimit])
drawlimit -= len(t[1])
elif scmd == SCMD_REL_LINE_TO:
self.ctx.rel_line_to (t[1], t[2])
elif scmd == SCMD_CLOSE_PATH:
self.ctx.close_path()
elif scmd == SCMD_FILL:
self.ctx.fill()
elif scmd == SCMD_SET_LINE_WIDTH:
self.ctx.set_line_width (t[1])
elif scmd == SCMD_SAVE:
self.ctx.save()
elif scmd == SCMD_RESTORE:
self.ctx.restore()
elif scmd == SCMD_SET_SOURCE:
self.ctx.set_source(t[1])
elif scmd == SCMD_CLIP:
self.ctx.clip()
elif scmd == SCMD_SET_SOURCE_SURFACE:
self.ctx.set_source_surface(t[1], t[2], t[3])
self.gfx.swap_buffers()
def update_led():
if USE_X11:
return
dt = datetime.datetime.now()
led.led_write (dt.strftime("%H%M"))
class input_handler (object):
def _process_events(self):
try:
key = self.inp.process_events()
if key is not None:
hal_comm (self.socket, 'KEYPRESS', key)
return True
except:
traceback.print_exc()
lerror("Input handler: EXCEPTION CAUGHT: %s" % traceback.format_exc())
return False
def _input_loop(self):
while True:
ldebug ("Input handler: _linput_loop iter")
if not self._process_events():
time.sleep(0.1)
else:
ldebug ("Input handler: INPUT EVENT HANDLED")
def process_events(self):
"""public function to be called regularly, in effect on non-threaded X11 only"""
global USE_X11
if not USE_X11:
return False
return self._process_events()
def __init__(self, inp):
global USE_X11
self.inp = inp
linfo("Input handler: connecting to server...")
self.context = zmq.Context()
self.socket = self.context.socket(zmq.REQ)
self.socket.connect ("tcp://%s:%s" % (host_getty, port_getty))
if USE_X11:
return
# on rpi we handle input in separate thread for low latency
linfo("Input handler: running on pi -> starting input thread")
self.thread = threading.Thread (target=self._input_loop)
self.thread.setDaemon(True)
self.thread.start()
#
# main
#
USE_X11 = pi_version() == None
linfo ("Using X11: %s " % repr(USE_X11))
#
# load config, set up global variables
#
home_path = expanduser("~")
config = ConfigParser.RawConfigParser()
config.read("%s/%s" % (home_path, ".halrc"))
host_getty = config.get("zmq", "host")
port_getty = config.get("zmq", "port_getty")
port_gettyp = config.get("zmq", "port_gettyp")
sensor_inside = config.get("term", "sensor_inside")
sensor_outside = config.get("term", "sensor_outside")
term_location = config.get("term", "location")
# command line
if len(sys.argv) == 2 and sys.argv[1] == '-d':
set_loglevel(LOG_DEBUG)
else:
set_loglevel(LOG_INFO)
if not USE_X11:
import led
from PiGraphics import PiGraphics
from PiInput import PiInput
gfx = PiGraphics ()
inp = PiInput ()
else:
from X11Graphics import X11Graphics
from X11Input import X11Input
gfx = X11Graphics (name = "HAL 9000")
inp = X11Input (gfx.xDisplay)
#
# zmq connection to getty
#
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.connect ("tcp://%s:%s" % (host_getty, port_getty))
# subscribe to broadcasts
socket_sub = context.socket(zmq.SUB)
socket_sub.connect ("tcp://%s:%s" % (host_getty, port_gettyp))
# messages we're interested in
socket_sub.setsockopt(zmq.SUBSCRIBE, 'DISPLAY_HTML')
# set up poller so we can do timeouts when waiting for messages
poller = zmq.Poller()
poller.register(socket_sub, zmq.POLLIN)
#
# setup rendering engine + display
#
linfo("Setup rendering engine + display ...")
hal = HAL(gfx)
hal_comm (socket, 'TERM_BOOT', measure_temperatures(term_location, sensor_inside, sensor_outside))
update_led()
#
# input handler
#
linfo("Launching input handler...")
inp_handler = input_handler(inp)
#
# main loop
#
linfo("Starting main loop.")
quit = False
counter = 0
while not quit:
if not inp_handler.process_events():
# check for broadcast messages
socks = poller.poll(10)
if len(socks) > 0:
for s,e in socks:
cmd, data = s.recv().split(' ', 1)
data = json.loads(data)
ldebug("CMD is %s" % cmd)
if cmd == 'DISPLAY_HTML':
ldebug("display html, counter=%d" % counter)
job_html, job_css, job_effect = data
try:
hal.scene_reset (0)
counter = 0 if job_effect == 1 else 32768
hal.scene_html (job_html, job_css)
except:
traceback.print_exc()
hal.scene_draw (counter)
counter += 1
if counter % LED_UPDATE == 0:
update_led()
if counter % TEMP_UPDATE == 0:
hal_comm (socket, 'TEMPERATURE', measure_temperatures(term_location, sensor_inside, sensor_outside))
| lgpl-3.0 | -8,675,675,495,578,616,000 | 24.560748 | 112 | 0.574497 | false |
walchko/pygecko | pygecko/pycore/transport.py | 1 | 1276 | import pickle
try:
import simplejson as json
except ImportError:
import json
"""
dumps -> serialize
loads -> deserialize
Ascii may have to always convert data to a string first: str(data)
For cross language (python/C/C++) you need to keep it simple and
probably just an array is best for the messages.
Pickle ------------------------------------------------------
>>> v=pickle.dumps((1,2,3,"hi"))
>>> v
b'\x80\x03(K\x01K\x02K\x03X\x02\x00\x00\x00hiq\x00tq\x01.'
>>> pickle.loads(v)
(1, 2, 3, 'hi')
Json --------------------------------------------------------
>>> v=json.dumps((1,2,3,"hi"))
>>> v
'[1, 2, 3, "hi"]'
>>> json.loads(v)
[1, 2, 3, 'hi']
"""
class Ascii(object):
"""Simple ASCII format to send info"""
def dumps(self, data):
return "|".join(data).encode('utf-8')
def loads(self, msg):
return msg.decode('utf-8').split("|")
class Json(object):
"""Use json to transport message"""
def dumps(self, data):
return json.dumps(data).encode('utf-8')
def loads(self, msg):
return json.loads(msg.decode('utf-8'))
class Pickle(object):
"""Use pickle to transport message"""
def dumps(self, data):
return pickle.dumps(data)
def loads(self, msg):
return pickle.loads(msg)
| mit | 464,038,483,235,091,140 | 21.785714 | 66 | 0.566614 | false |
jawsper/modularirc | src/modularirc/modules/base.py | 1 | 1738 | import logging
class BaseModule(object):
def __init__(self, manager, has_commands=True, admin_only=False):
self.bot = manager.bot
self.has_commands = has_commands
self.admin_only = admin_only
self.module_name = self.__module__.split('.')[-1]
logging.info('Module {0} __init__'.format(self.module_name))
self.start()
def __del__(self):
logging.info('Module {0} __del__'.format(self.module_name))
self.stop()
def enable(self):
self.start()
def disable(self):
self.stop()
def start(self):
pass
def stop(self):
pass
def get_cmd_list(self, prefix='cmd_'):
return ['!{0}'.format(cmd[len(prefix):]) for cmd in dir(self) if cmd.startswith(prefix)]
def has_cmd(self, cmd, prefix='cmd_'):
return hasattr(self, '{}{}'.format(prefix, cmd))
def get_cmd(self, cmd, prefix='cmd_'):
return getattr(self, '{}{}'.format(prefix, cmd))
def get_admin_cmd_list(self):
return self.get_cmd_list(prefix='admin_cmd_')
def has_admin_cmd(self, cmd):
return self.has_cmd(cmd, prefix='admin_cmd_')
def get_admin_cmd(self, cmd):
return self.get_cmd(cmd, prefix='admin_cmd_')
# methods that directly call the bot
def notice(self, target, message):
self.bot.notice(target, message)
def privmsg(self, target, message):
self.bot.privmsg(target, message)
def get_config(self, key, default=None):
return self.bot.get_config(self.module_name, key, default)
def set_config(self, key, value):
self.bot.set_config(self.module_name, key, value)
def get_module(self, name):
return self.bot.get_module(name)
| mit | 6,261,652,994,644,733,000 | 27.491803 | 96 | 0.601266 | false |
vitriolik/Asteroids2 | asteroids.py | 1 | 3039 | import asteroid
import math
import pygame
from pygame.locals import *
import random
import ship
import sys
'''Pygame constants'''
SCR_WIDTH, SCR_HEIGHT = 640, 480
FPS = 30
'''Misc stff'''
starfield = []
NUM_STARS = 45
asteroids = []
NUM_ASTEROIDS = 3
'''Pygame init'''
pygame.init()
fps_timer = pygame.time.Clock()
screen = pygame.display.set_mode((SCR_WIDTH, SCR_HEIGHT))
player = ship.Ship(SCR_WIDTH, SCR_HEIGHT)
def init_starfield():
global starfield
for i in range(NUM_STARS):
x = random.random() * SCR_WIDTH
y = random.random() * SCR_HEIGHT
starfield.insert(i, (x,y))
init_starfield()
def init_asteroids():
for i in range(NUM_ASTEROIDS):
asteroids.append(asteroid.Asteroid(SCR_WIDTH, SCR_HEIGHT))
init_asteroids()
first_pass = True
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.key == K_RIGHT or event.key == K_d:
player.rotate_right = True
elif event.key == K_LEFT or event.key == K_a:
player.rotate_left = True
if event.key == K_UP or event.key == K_w:
player.thrusting = True
if event.key == K_SPACE:
player.fire = True
if event.type == KEYUP:
if event.key == K_RIGHT or event.key == K_d:
player.rotate_right = False
if event.key == K_LEFT or event.key == K_a:
player.rotate_left = False
if event.key == K_UP or event.key == K_w:
player.thrusting = False
if event.key == K_SPACE:
player.fire = False
if player.rotate_right:
player.angle += player.ROTATE_SPEED
elif player.rotate_left:
player.angle -= player.ROTATE_SPEED
if player.thrusting:
vel = player.thrust(player.angle)
player.xvel += vel[0]
player.yvel += vel[1]
if math.fabs(player.xvel) > player.MAX_VEL:
player.xvel = math.copysign(player.MAX_VEL, player.xvel)
if math.fabs(player.yvel) > player.MAX_VEL:
player.yvel = math.copysign(player.MAX_VEL, player.yvel)
else:
if math.fabs(player.xvel) > 0.0:
player.xvel += -(math.copysign(player.FRICTION, player.xvel))
else:
player.xvel = 0.0
if math.fabs(player.yvel) > 0.0:
player.yvel += -(math.copysign(player.FRICTION, player.yvel))
else:
player.yvel = 0.0
if player.fire:
player.fire_bullet(player.angle, player.points[0][0], player.points[0][1])
player.fire = False
if len(player.bullets) > 0:
player.update_bullets()
player.rotate(player.centx, player.centy)
player.trans()
player.centx += player.xvel
player.centy += player.yvel
centroid = player.wrap()
player.centx = centroid[0]
player.centy = centroid[1]
# print('xvel = ' + str(xvel) + ', yvel = ' + str(yvel) + ', angle = ' + str(angle))
screen.fill((32,32,32))
for star in starfield:
pygame.draw.rect(screen, (255,255,255), (star[0], star[1], 2, 2))
for bullet in player.bullets:
pygame.draw.rect(screen, (255, 255, 0), (bullet[1], bullet[2], 2, 2))
for each_asteroid in asteroids:
each_asteroid.move()
each_asteroid.render(screen)
player.render(screen)
pygame.display.flip()
fps_timer.tick(FPS)
| gpl-2.0 | 4,837,477,248,523,590,000 | 24.754237 | 85 | 0.670944 | false |
Starbase/StarinetPythonLogger | analogue/readadc.py | 1 | 1579 | import Adafruit_BBIO.ADC as ADC
import logging
## initialise logger
logger = logging.getLogger('analogue')
try:
ADC.setup()
except Exception as e:
print 'Adc failed - did you start as root?', e
pass
try:
ADC.read("P9_40")
except Exception as e:
print 'failed to read adc - did you start as root?', e
pass
def read():
logger.debug("Analogue.readadc called")
_reading = None
try:
x0 = ADC.read("AIN0")
x0 = ADC.read("AIN0")
x1 = ADC.read("AIN1")
x1 = ADC.read("AIN1")
x2 = ADC.read("AIN2")
x2 = ADC.read("AIN2")
x3 = ADC.read("AIN3")
x3 = ADC.read("AIN3")
# print "Reading are 0 = ", x0
# print "Reading are 1 = ", x1
# print "Reading are 2 = ", x2
# print "Reading are 3 = ", x3
b0 = int(x0 * 1800)
b1 = int(x1 * 1800)
b2 = int(x2 * 1800)
b3 = int(x3 * 1800)
# print "Reading are 0 = ", b0
# print "Reading are 1 = ", b1
# print "Reading are 2 = ", b2
# print "Reading are 3 = ", b3
r0 = "{0:04d}".format(b0)
r1 = "{0:04d}".format(b1)
r2 = "{0:04d}".format(b2)
r3 = "{0:04d}".format(b3)
except IOError:
_reading = '0000', '0000', '0000', '0000'
logger.debug("%s %s", "adc IO Error ", e)
except RuntimeError:
_reading = '0000', '0000', '0000', '0000'
logger.debug("%s %s", "adc RuntimeError ", e)
else:
_reading = r0, r1, r2, r3
return _reading
if __name__ == "__main__":
print read()
| gpl-2.0 | -8,467,573,453,376,623,000 | 21.884058 | 58 | 0.51045 | false |
benjello/liam2 | liam2/exprmisc.py | 1 | 22638 | # encoding: utf-8
from __future__ import print_function
from itertools import izip, chain
import os
import random
import numpy as np
import config
from expr import (Variable, UnaryOp, BinaryOp, ComparisonOp, DivisionOp,
LogicalOp, getdtype, coerce_types, expr_eval, as_simple_expr,
as_string, collect_variables,
get_default_array, get_default_vector, FunctionExpr,
always, firstarg_dtype, expr_cache)
from exprbases import (FilteredExpression, CompoundExpression, NumexprFunction,
TableExpression, NumpyChangeArray)
from context import context_length
from importer import load_ndarray, load_table
from utils import PrettyTable, argspec
# TODO: implement functions in expr to generate "Expr" nodes at the python level
# less painful
class Min(CompoundExpression):
def build_expr(self, context, *args):
assert len(args) >= 2
expr1, expr2 = args[:2]
expr = Where(ComparisonOp('<', expr1, expr2), expr1, expr2)
for arg in args[2:]:
expr = Where(ComparisonOp('<', expr, arg), expr, arg)
# args = [Symbol('x%d' % i) for i in range(len(self.args))]
# ctx = {'__entity__': 'x',
# 'x': {'x%d' % i: a for i, a in enumerate(self.args)}}
# where = Symbol('where')
# expr = where(a < b, a, b)
# for arg in self.args[2:]:
# expr = where(expr < arg, expr, arg)
# expr = expr.to_ast(ctx)
# expr1, expr2 = self.args[:2]
# expr = parse('if(a < b, a, b)',
# {'__entity__': 'x', 'x': {'a': expr1, 'b': expr2}})
# for arg in self.args[2:]:
# expr = parse('if(a < b, a, b)',
# {'__entity__': 'x', 'x': {'a': expr, 'b': arg}})
# expr = Where(expr1 < expr2, expr1, expr2)
# for arg in self.args[2:]:
# expr = Where(expr < arg, expr, arg)
# Where(Where(expr1 < expr2, expr1, expr2) < expr3,
# Where(expr1 < expr2, expr1, expr2),
# expr3)
# 3 where, 3 comparisons = 6 op (or 4 if optimised)
#
# Where(Where(Where(expr1 < expr2, expr1, expr2) < expr3,
# Where(expr1 < expr2, expr1, expr2),
# expr3) < expr4,
# Where(Where(expr1 < expr2, expr1, expr2) < expr3,
# Where(expr1 < expr2, expr1, expr2),
# expr3),
# expr4)
# 7 where, 7 comp = 14 op (or 6 if optimised)
# this version scales better in theory (but in practice, it will depend
# if numexpr factorize the common subexpression in the above version
# or not)
# Where(expr1 < expr2 & expr1 < expr3,
# expr1,
# Where(expr2 < expr3, expr2, expr3))
# 2 where, 3 comparisons, 1 and = 6 op
#
# Where(expr1 < expr2 & expr1 < expr3 & expr1 < expr4,
# expr1,
# Where(expr2 < expr3 & expr2 < expr4,
# expr2
# Where(expr3 < expr4,
# expr3,
# expr4)))
# 3 where, 6 comp, 3 and = 12 op
return expr
class Max(CompoundExpression):
def build_expr(self, context, *args):
assert len(args) >= 2
expr1, expr2 = args[:2]
# if(x > y, x, y)
expr = Where(ComparisonOp('>', expr1, expr2), expr1, expr2)
for arg in args[2:]:
# if(e > z, e, z)
expr = Where(ComparisonOp('>', expr, arg), expr, arg)
return expr
class Logit(CompoundExpression):
def build_expr(self, context, expr):
# log(x / (1 - x))
return Log(DivisionOp('/', expr, BinaryOp('-', 1.0, expr)))
class Logistic(CompoundExpression):
def build_expr(self, context, expr):
# 1 / (1 + exp(-x))
return DivisionOp('/', 1.0,
BinaryOp('+', 1.0, Exp(UnaryOp('-', expr))))
class ZeroClip(CompoundExpression):
def build_expr(self, context, expr, expr_min, expr_max):
# if(minv <= x <= maxv, x, 0)
return Where(LogicalOp('&', ComparisonOp('>=', expr, expr_min),
ComparisonOp('<=', expr, expr_max)), expr,
0)
# We do not have to coerce with self.expr_min & expr_max because they
# are only used in the comparisons, not in the result.
dtype = firstarg_dtype
# >>> mi = 1
# >>> ma = 10
# >>> a = np.arange(1e7)
#
# >>> timeit np.clip(a, mi, ma)
# 10 loops, best of 3: 127 ms per loop
# >>> timeit np.clip(a, mi, ma, a)
# 10 loops, best of 3: 26.2 ms per loop
# >>> timeit ne.evaluate('where(a < mi, mi, where(a > ma, ma, a))')
# 10 loops, best of 3: 94.1 ms per loop
class Clip(NumpyChangeArray):
np_func = np.clip
class Sort(NumpyChangeArray):
np_func = np.sort
# ------------------------------------
class Round(NumpyChangeArray):
np_func = np.round
dtype = firstarg_dtype
class Trunc(FunctionExpr):
# TODO: check that the dtype is correct at compilation time (__init__ is too
# early since we do not have the context yet)
# assert getdtype(self.args[0], context) == float
def compute(self, context, expr):
if isinstance(expr, np.ndarray):
return expr.astype(int)
else:
return int(expr)
dtype = always(int)
# ------------------------------------
class Abs(NumexprFunction):
argspec = argspec('expr')
dtype = always(float)
class Log(NumexprFunction):
argspec = argspec('expr')
dtype = always(float)
class Exp(NumexprFunction):
argspec = argspec('expr')
dtype = always(float)
def add_individuals(target_context, children):
target_entity = target_context.entity
id_to_rownum = target_entity.id_to_rownum
array = target_entity.array
num_rows = len(array)
num_birth = len(children)
if config.log_level == "processes":
print("%d new %s(s) (%d -> %d)" % (num_birth, target_entity.name,
num_rows, num_rows + num_birth),
end=' ')
target_entity.array.append(children)
temp_variables = target_entity.temp_variables
for name, temp_value in temp_variables.iteritems():
# FIXME: OUCH, this is getting ugly, I'll need a better way to
# differentiate nd-arrays from "entity" variables
# I guess having the context contain all entities and a separate
# globals namespace should fix this problem. Well, no it would not
# fix the problem by itself, as this would only move the problem
# to the "store" part of Assignment processes which would need to be
# able to differentiate between an "entity temp" and a global temp.
# I think this can be done by inspecting the expressions that generate
# them: no non-aggregated entity var => global temp. It would be nice
# to further distinguish between aggregated entity var and other global
# temporaries to store them in the entity somewhere, but I am unsure
# whether it is possible.
if (isinstance(temp_value, np.ndarray) and
temp_value.shape == (num_rows,)):
extra = get_default_vector(num_birth, temp_value.dtype)
temp_variables[name] = np.concatenate((temp_value, extra))
extra_variables = target_context.entity_data.extra
for name, temp_value in extra_variables.iteritems():
if name == '__globals__':
continue
if isinstance(temp_value, np.ndarray) and temp_value.shape:
extra = get_default_vector(num_birth, temp_value.dtype)
extra_variables[name] = np.concatenate((temp_value, extra))
id_to_rownum_tail = np.arange(num_rows, num_rows + num_birth)
target_entity.id_to_rownum = np.concatenate(
(id_to_rownum, id_to_rownum_tail))
class New(FilteredExpression):
no_eval = ('filter', 'kwargs')
def _initial_values(self, array, to_give_birth, num_birth, default_values):
return get_default_array(num_birth, array.dtype, default_values)
@classmethod
def _collect_kwargs_variables(cls, kwargs):
used_variables = set()
# kwargs are stored as a list of (k, v) pairs
for k, v in kwargs.iteritems():
used_variables.update(collect_variables(v))
return used_variables
def compute(self, context, entity_name=None, filter=None, number=None,
**kwargs):
if filter is not None and number is not None:
# Having neither is allowed, though, as there can be a contextual
# filter. Also, there is no reason to prevent the whole
# population giving birth, even though the usefulness of such
# usage seem dubious.
raise ValueError("new() 'filter' and 'number' arguments are "
"mutually exclusive")
source_entity = context.entity
if entity_name is None:
target_entity = source_entity
else:
target_entity = context.entities[entity_name]
# target context is the context where the new individuals will be
# created
if target_entity is source_entity:
target_context = context
else:
# we do need to copy the data (.extra) because we will insert into
# the entity.array anyway => fresh_data=True
target_context = context.clone(fresh_data=True,
entity_name=target_entity.name)
filter_expr = self._getfilter(context, filter)
if filter_expr is not None:
to_give_birth = expr_eval(filter_expr, context)
num_birth = to_give_birth.sum()
elif number is not None:
to_give_birth = None
num_birth = number
else:
to_give_birth = np.ones(len(context), dtype=bool)
num_birth = len(context)
array = target_entity.array
default_values = target_entity.fields.default_values
id_to_rownum = target_entity.id_to_rownum
num_individuals = len(id_to_rownum)
children = self._initial_values(array, to_give_birth, num_birth,
default_values)
if num_birth:
children['id'] = np.arange(num_individuals,
num_individuals + num_birth)
children['period'] = context.period
used_variables = [v.name for v in
self._collect_kwargs_variables(kwargs)]
if to_give_birth is None:
assert not used_variables
child_context = context.empty(num_birth)
else:
child_context = context.subset(to_give_birth, used_variables,
filter_expr)
for k, v in kwargs.iteritems():
if k not in array.dtype.names:
print("WARNING: {} is unknown, ignoring it!".format(k))
continue
children[k] = expr_eval(v, child_context)
add_individuals(target_context, children)
expr_cache.invalidate(context.period, context.entity_name)
# result is the ids of the new individuals corresponding to the source
# entity
if to_give_birth is not None:
result = np.full(context_length(context), -1, dtype=int)
if source_entity is target_entity:
extra_bools = np.zeros(num_birth, dtype=bool)
to_give_birth = np.concatenate((to_give_birth, extra_bools))
# Note that np.place is a bit faster, but is currently buggy when
# working with columns of structured arrays.
# See https://github.com/numpy/numpy/issues/2462
result[to_give_birth] = children['id']
return result
else:
return None
dtype = always(int)
class Clone(New):
def __init__(self, filter=None, **kwargs):
New.__init__(self, None, filter, None, **kwargs)
def _initial_values(self, array, to_give_birth, num_birth, default_values):
return array[to_give_birth]
class Dump(TableExpression):
no_eval = ('args',)
kwonlyargs = {'filter': None, 'missing': None, 'header': True,
'limit': None}
def compute(self, context, *args, **kwargs):
filter_value = kwargs.pop('filter', None)
missing = kwargs.pop('missing', None)
# periods = kwargs.pop('periods', None)
header = kwargs.pop('header', True)
limit = kwargs.pop('limit', None)
entity = context.entity
if args:
expressions = list(args)
else:
# extra=False because we don't want globals nor "system" variables
# (nan, period, __xxx__)
# FIXME: we should also somehow "traverse" expressions in this case
# too (args is ()) => all keys in the current context
expressions = [Variable(entity, name)
for name in context.keys(extra=False)]
str_expressions = [str(e) for e in expressions]
if 'id' not in str_expressions:
str_expressions.insert(0, 'id')
expressions.insert(0, Variable(entity, 'id'))
id_pos = 0
else:
id_pos = str_expressions.index('id')
# if (self.periods is not None and len(self.periods) and
# 'period' not in str_expressions):
# str_expressions.insert(0, 'period')
# expressions.insert(0, Variable('period'))
# id_pos += 1
columns = []
for expr in expressions:
if filter_value is False:
# dtype does not matter much
expr_value = np.empty(0)
else:
expr_value = expr_eval(expr, context)
if (filter_value is not None and
isinstance(expr_value, np.ndarray) and
expr_value.shape):
expr_value = expr_value[filter_value]
columns.append(expr_value)
ids = columns[id_pos]
if isinstance(ids, np.ndarray) and ids.shape:
numrows = len(ids)
else:
# FIXME: we need a test for this case (no idea how this can happen)
numrows = 1
# expand scalar columns to full columns in memory
# TODO: handle or explicitly reject columns wh ndim > 1
for idx, col in enumerate(columns):
dtype = None
if not isinstance(col, np.ndarray):
dtype = type(col)
elif not col.shape:
dtype = col.dtype.type
if dtype is not None:
# TODO: try using itertools.repeat instead as it seems to be a
# bit faster and would consume less memory (however, it might
# not play very well with Pandas.to_csv)
newcol = np.full(numrows, col, dtype=dtype)
columns[idx] = newcol
if limit is not None:
assert isinstance(limit, (int, long))
columns = [col[:limit] for col in columns]
data = izip(*columns)
table = chain([str_expressions], data) if header else data
return PrettyTable(table, missing)
dtype = always(None)
class Where(NumexprFunction):
funcname = 'if'
argspec = argspec('cond, iftrue, iffalse')
@property
def cond(self):
return self.args[0]
@property
def iftrue(self):
return self.args[1]
@property
def iffalse(self):
return self.args[2]
def as_simple_expr(self, context):
cond = as_simple_expr(self.cond, context)
# filter is stored as an unevaluated expression
context_filter = context.filter_expr
local_ctx = context.clone()
if context_filter is None:
local_ctx.filter_expr = self.cond
else:
# filter = filter and cond
local_ctx.filter_expr = LogicalOp('&', context_filter, self.cond)
iftrue = as_simple_expr(self.iftrue, local_ctx)
if context_filter is None:
local_ctx.filter_expr = UnaryOp('~', self.cond)
else:
# filter = filter and not cond
local_ctx.filter_expr = LogicalOp('&', context_filter,
UnaryOp('~', self.cond))
iffalse = as_simple_expr(self.iffalse, local_ctx)
return Where(cond, iftrue, iffalse)
def as_string(self):
args = as_string((self.cond, self.iftrue, self.iffalse))
return 'where(%s)' % self.format_args_str(args, [])
def dtype(self, context):
assert getdtype(self.cond, context) == bool
return coerce_types(context, self.iftrue, self.iffalse)
def _plus(a, b):
return BinaryOp('+', a, b)
def _mul(a, b):
return BinaryOp('*', a, b)
class ExtExpr(CompoundExpression):
def __init__(self, fname):
data = load_ndarray(os.path.join(config.input_directory, fname))
# TODO: handle more dimensions. For that we need to evaluate a
# different expr depending on the values for the other dimensions
# we will need to either
# 1) create awful expressions with lots of nested if() (X*Y*Z)
# OR
# 2) use groupby (or partition_nd)
# the problem with groupby is that once we have one vector of values
# for each group, we have to recombine them into a single vector
# result = np.empty(context_length(context), dtype=expr.dtype)
# groups = partition_nd(filtered_columns, True, possible_values)
# if not groups:
# return
# contexts = [filtered_context.subset(indices, expr_vars, not_hashable)
# for indices in groups]
# data = [expr_eval(expr, c) for c in contexts]
# for group_indices, group_values in zip(groups, data):
# result[group_indices] = group_values
# 3) use a lookup for each individual & coef (we can only envision
# this during the evaluation of the larger expression if done via numba,
# otherwise it will be too slow
# expr = age * AGECOEF[gender, xyz] + eduach * EDUCOEF[gender, xyz]
# 4) compute the coefs separately
# 4a) via nested if()
# AGECOEF = if(gender, if(workstate == 1, a, if(workstate == 2, b, c)
# if(workstate == 1, a, if(workstate == 2, b, c))
# EDUCOEF = ...
# expr = age * AGECOEF + eduach * EDUCOEF
# 4b) via lookup
# AGECOEF = AGECOEFS[gender, workstate]
# EDUCOEF = EDUCOEFS[gender, workstate]
# expr = age * AGECOEF + eduach * EDUCOEF
# Note, in general, we could make
# EDUCOEFS (sans rien) equivalent to EDUCOEFS[:, :, period] s'il y a
# une dimension period en 3eme position
# et non à EDUCOEFS[gender, workstate, period] car ca pose probleme
# pour l'alignement (on a pas besoin d'une valeur par personne)
# in general, we could let user tell explicitly which fields they want
# to index by (autoindex: period) for periodic
fields_dim = data.dim_names.index('fields')
fields_axis = data.axes[fields_dim]
self.names = list(fields_axis.labels)
self.coefs = list(data)
# needed for compatibility with CompoundExpression
self.args = []
self.kwargs = []
def build_expr(self, context):
res = None
for name, coef in zip(self.names, self.coefs):
# XXX: parse expressions instead of only simple Variable?
if name != 'constant':
# cond_dims = self.cond_dims
# cond_exprs = [Variable(context.entity, d) for d in cond_dims]
# coef = GlobalArray('__xyz')[name, *cond_exprs]
term = _mul(Variable(context.entity, name), coef)
else:
term = coef
if res is None:
res = term
else:
res = _plus(res, term)
return res
class Seed(FunctionExpr):
def compute(self, context, seed=None):
if seed is not None:
seed = long(seed)
print("using fixed random seed: %d" % seed)
else:
print("resetting random seed")
random.seed(seed)
np.random.seed(seed)
class Array(FunctionExpr):
def compute(self, context, expr):
return np.array(expr)
# XXX: is this correct?
dtype = firstarg_dtype
class Load(FunctionExpr):
def compute(self, context, fname, type=None, fields=None):
# TODO: move those checks to __init__
if type is None and fields is None:
raise ValueError("type or fields must be specified")
if type is not None and fields is not None:
raise ValueError("cannot specify both type and fields")
if type is not None:
return load_ndarray(os.path.join(config.input_directory, fname), type)
elif fields is not None:
return load_table(os.path.join(config.input_directory, fname), fields)
functions = {
# element-wise functions
# Min and Max are in aggregates.py.functions (because of the dispatcher)
'abs': Abs,
'clip': Clip,
'zeroclip': ZeroClip,
'round': Round,
'trunc': Trunc,
'exp': Exp,
'log': Log,
'logit': Logit,
'logistic': Logistic,
'where': Where,
# misc
'sort': Sort,
'new': New,
'clone': Clone,
'dump': Dump,
'extexpr': ExtExpr,
'seed': Seed,
'array': Array,
'load': Load,
}
| gpl-3.0 | -134,677,495,349,826,080 | 36.045378 | 82 | 0.546009 | false |
hfaran/slack-export-viewer | slackviewer/formatter.py | 1 | 5541 | import logging
import re
import sys
import emoji
import markdown2
from slackviewer.user import User
# Workaround for ASCII encoding error in Python 2.7
# See https://github.com/hfaran/slack-export-viewer/issues/81
if sys.version_info[0] == 2:
reload(sys)
sys.setdefaultencoding('utf8')
class SlackFormatter(object):
"This formats messages and provides access to workspace-wide data (user and channel metadata)."
# Class-level constants for precompilation of frequently-reused regular expressions
# URL detection relies on http://stackoverflow.com/a/1547940/1798683
_LINK_PAT = re.compile(r"<(https|http|mailto):[A-Za-z0-9_\.\-\/\?\,\=\#\:\@]+\|[^>]+>")
_MENTION_PAT = re.compile(r"<((?:#C|@[UB])\w+)(?:\|([A-Za-z0-9.-_]+))?>")
_HASHTAG_PAT = re.compile(r"(^| )#[A-Za-z][\w\.\-\_]+( |$)")
def __init__(self, USER_DATA, CHANNEL_DATA):
self.__USER_DATA = USER_DATA
self.__CHANNEL_DATA = CHANNEL_DATA
def find_user(self, message):
if message.get("subtype", "").startswith("bot_") and "bot_id" in message and message["bot_id"] not in self.__USER_DATA:
bot_id = message["bot_id"]
logging.debug("bot addition for %s", bot_id)
if "bot_link" in message:
(bot_url, bot_name) = message["bot_link"].strip("<>").split("|", 1)
elif "username" in message:
bot_name = message["username"]
bot_url = None
else:
bot_name = None
bot_url = None
self.__USER_DATA[bot_id] = User({
"user": bot_id,
"real_name": bot_name,
"bot_url": bot_url,
"is_bot": True,
"is_app_user": True
})
user_id = message.get("user") or message.get("bot_id")
if user_id in self.__USER_DATA:
return self.__USER_DATA.get(user_id)
logging.error("unable to find user in %s", message)
def render_text(self, message, process_markdown=True):
message = message.replace("<!channel>", "@channel")
message = message.replace("<!channel|@channel>", "@channel")
message = message.replace("<!here>", "@here")
message = message.replace("<!here|@here>", "@here")
message = message.replace("<!everyone>", "@everyone")
message = message.replace("<!everyone|@everyone>", "@everyone")
# Handle mentions of users, channels and bots (e.g "<@U0BM1CGQY|calvinchanubc> has joined the channel")
message = self._MENTION_PAT.sub(self._sub_annotated_mention, message)
# Handle links
message = self._LINK_PAT.sub(self._sub_hyperlink, message)
# Handle hashtags (that are meant to be hashtags and not headings)
message = self._HASHTAG_PAT.sub(self._sub_hashtag, message)
# Introduce unicode emoji
message = self.slack_to_accepted_emoji(message)
message = emoji.emojize(message, use_aliases=True)
if process_markdown:
# Handle bold (convert * * to ** **)
message = re.sub(r'\*', "**", message)
message = markdown2.markdown(
message,
extras=[
"cuddled-lists",
# This gives us <pre> and <code> tags for ```-fenced blocks
"fenced-code-blocks",
"pyshell"
]
).strip()
# Special handling cases for lists
message = message.replace("\n\n<ul>", "<ul>")
message = message.replace("\n<li>", "<li>")
return message
def slack_to_accepted_emoji(self, message):
"""Convert some Slack emoji shortcodes to more universal versions"""
# Convert -'s to _'s except for the 1st char (preserve things like :-1:)
# For example, Slack's ":woman-shrugging:" is converted to ":woman_shrugging:"
message = re.sub(
r":([^ <>/:])([^ <>/:]+):",
lambda x: ":{}{}:".format(x.group(1), x.group(2).replace("-", "_")),
message
)
# https://github.com/Ranks/emojione/issues/114
message = message.replace(":simple_smile:", ":slightly_smiling_face:")
return message
def _sub_annotated_mention(self, matchobj):
ref_id = matchobj.group(1)[1:] # drop #/@ from the start, we don't care
annotation = matchobj.group(2)
if ref_id.startswith('C'):
mention_format = "<b>#{}</b>"
if not annotation:
channel = self.__CHANNEL_DATA.get(ref_id)
annotation = channel["name"] if channel else ref_id
else:
mention_format = "@{}"
if not annotation:
user = self.__USER_DATA.get(ref_id)
annotation = user.display_name if user else ref_id
return mention_format.format(annotation)
def _sub_hyperlink(self, matchobj):
compound = matchobj.group(0)[1:-1]
if len(compound.split("|")) == 2:
url, title = compound.split("|")
else:
url, title = compound, compound
result = "<a href=\"{url}\">{title}</a>".format(url=url, title=title)
return result
def _sub_hashtag(self, matchobj):
text = matchobj.group(0)
starting_space = " " if text[0] == " " else ""
ending_space = " " if text[-1] == " " else ""
return "{}<b>{}</b>{}".format(
starting_space,
text.strip(),
ending_space
)
| mit | 8,813,842,781,377,737,000 | 38.297872 | 127 | 0.548096 | false |
semplice/quickstart | quickstart/common.py | 1 | 1829 | # -*- coding: utf-8 -*-
#
# quickstart - Refreshing the GUI world.
# Copyright (C) 2013 Eugenio "g7" Paolantonio
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
import signal
from quickstart import events
from gi.repository import Gtk
def quickstart(gui, with_threads=True):
""" common.quickstart() provide a quick way to start the main loop.
It requires the class containing the UI.
If the optional parameter with_threads is True (default), the GObject
threads are automatically initialized.
If the class contains the event dictionary (see events.connect() for details),
events.connect() will be automatically called.
This method also properly handles KeyboardInterrupts.
Usage example:
class GUI:
events = {
"clicked": ("button1","button2","button3"),
"destroy": ("main_window",)
}
...
common.quickstart(GUI, with_threads=True)
"""
signal.signal(signal.SIGINT, signal.SIG_DFL)
clss = gui()
if hasattr(clss, "events"):
events.connect(clss)
# Handle threads?
if with_threads:
from gi.repository import GObject, Gdk
GObject.threads_init()
Gdk.threads_init()
Gtk.main()
| lgpl-2.1 | -5,763,135,811,984,010,000 | 27.578125 | 80 | 0.728267 | false |
mruwnik/magnolia | src/magnolia/math/geometry.py | 1 | 11252 | import math
from typing import List, Tuple, Iterable
class FrontError(ValueError):
"""Raised when a valid front can't be constructed."""
class Sphere(object):
"""Represent a sphere in cylindrical coords."""
def __init__(self, angle=0, height=0, radius=1, scale=3, **kwargs):
"""
Initialise the sphere.
:param float angle: the angle by which the sphere is rotated around the cylinder
:param float height: the height of the sphere on the cylinder
:param float radius: the radius of the cylinder
:param float scale: the radius of the sphere
"""
self.angle = angle
self.height = height
self.radius = radius
self.scale = scale
super().__init__(**kwargs)
@staticmethod
def cyl_to_cart(angle, height, radius):
"""Convert the given cylinderic point to a cartesian one."""
x = math.sin(angle) * radius
z = math.cos(angle) * radius
return (x, height, z)
@property
def offset(self):
"""Calculate the buds offset from the meristems origin.
The bud is positioned on a simple circle on the XZ axis, so
simple trigonometry does the trick.
"""
return self.cyl_to_cart(self.angle, self.height, self.radius)
@staticmethod
def norm_angle(angle):
"""Normalize the given angle (wrapping around π)."""
return norm_angle(angle)
def angle2x(self, angle):
"""Return the given angle in pseudo 2D coordinates.
In these coordinates, x is the bud's angle, while y is its height. To make calculations
work, the angle has to be scaled by the radius. Otherwise 2 buds with the same angle would
have the same x value, regardless of their radius. This would mean that there would be no way
to e.g. check which is wider.
"""
return self.norm_angle(angle) * self.radius
def distance(self, bud):
"""Calculate the distance between this bud and the provided one."""
return math.sqrt(self.angle2x(self.angle - bud.angle)**2 + (self.height - bud.height)**2)
def opposite(self, b1, b2):
"""Check whether the given buds are on the opposite sides of this bud.
This checks to a precision of 1% of the radius.
"""
angles_diff = abs(self.angle2x(b1.angle - self.angle) + self.angle2x(b2.angle - self.angle))
height_diff = abs(abs(b1.height + b2.height)/2 - abs(self.height))
return angles_diff < self.radius / 100 and height_diff < self.radius / 100
def bounds_test(self, angle, h, offset):
"""Check whether the provided point lies in this bud.
This is a 2D test, for use when a meristem is rolled out.
"""
dist = self.angle2x(angle / self.radius - offset[0] - self.angle)**2 + (h - self.height)**2
if dist < self.scale**2:
return math.sqrt(dist)
return -1
def __repr__(self):
return '<Sphere (angle=%s, height=%s, radius=%s, scale=%s)' % (self.angle, self.height, self.radius, self.scale)
def by_height(circles: List[Sphere], reversed=True):
"""Return the given circles sorted by height."""
return sorted(circles, key=lambda c: c.height, reverse=reversed)
def by_angle(circles: List[Sphere], reversed=True):
"""Return the given circles sorted by angle."""
return sorted(circles, key=lambda c: c.angle, reverse=reversed)
def approx_equal(a: float, b: float, diff=0.001) -> bool:
"""Check whether the 2 values are appropriately equal."""
return abs(a - b) < diff
def norm_angle(angle):
"""Normalize the given angle (wrapping around π)."""
return ((angle + math.pi) % (2 * math.pi) - math.pi)
def dot_product(v1: Iterable, v2: Iterable) -> float:
"""Calculate the dot products of the provided vectors.
If the vectors have different lengths, the extra values will be discarded from the longer one.
"""
return sum(i*j for i, j in zip(v1, v2))
def vect_diff(v1: Iterable, v2: Iterable) -> List[float]:
"""Subtract the provided vectors from each other.
If the vectors have different lengths, the extra values will be discarded from the longer one.
"""
return [i - j for i, j in zip(v1, v2)]
def vect_mul(v: Iterable, scalar: float) -> List[float]:
"""Multiply the vector by the scalar."""
return [i * scalar for i in v]
def cross_product(v1, v2):
"""Return the cross product of the provided 3D vectors."""
ax, ay, az = v1
bx, by, bz = v2
i = ay * bz - az * by
j = az * bx - ax * bz
k = ax * by - ay * bx
return (i, j, k)
def length(vector: Iterable) -> float:
"""Return the length of the provided vector."""
return math.sqrt(sum(i**2 for i in vector))
def cylin_distance(p1: Tuple[float, float], p2: Tuple[float, float]) -> float:
"""Calculate the distance between the given points, in cylinder coords."""
return length((norm_angle(p1[0] - p2[0]), p1[1] - p2[1]))
def in_cone_checker(tip, dir_vec, r, h):
"""
Return a function that checks whether a bud is in the provided cone.
The `r` and `h` params describe a sample base - in reality the cone is assumed to be
infinite. For use in occlusion checks, `tip` should be where the inner tangents of the
checked bud meet, `dir_vec` should be the vector between them, while `r` and `h` should
be the scale and height (respectably) of the occluding bud.
:param tuple tip: the tip of the cone
:param tuple dir_vec: the direction vector of the cone
:param float r: a radius at h that describes the cone
:param float h: a height along the axis which along with `r` describes the cone
"""
tx, ty, tz = tip
def in_cone(bud):
"""Return whether the given bud totally fits in the cone."""
diff = (norm_angle(bud.angle - tx), bud.height - ty, bud.radius - tz)
cone_dist = dot_product(diff, dir_vec)
if cone_dist < 0:
return False
radius = r * cone_dist / h
orth_dist = length(vect_diff(diff, vect_mul(dir_vec, cone_dist)))
return orth_dist < radius
return in_cone
def first_gap(circles: List[Sphere], radius: float) -> Tuple[float, float]:
"""
Return the first available gap that will fit a circle of the given radius.
This simply loops around the circles, sorted by x, and whenever the distance between
2 circles is larger than 2*radius it deems that it's found a hole and returns the (x,y) that lies
between the 2 circles.
"""
circles = by_angle(circles)
for c1, c2 in zip(circles, circles[1:] + [circles[0]]):
dist = abs(norm_angle(c1.angle - c2.angle))
if c1.scale + c2.scale + 2*radius < dist:
return norm_angle(c1.angle - dist/2), 0
def flat_circle_overlap(
b1: Tuple[float, float, float], b2: Tuple[float, float, float], r: float) -> Tuple[float, float]:
"""Return the higher overlap of 2 circles that are on the same height."""
x1, y1, r1 = b1
x2, y2, r2 = b2
# there are 2 possible intersections, both with the same x, but with different ys
x3 = -((r + r1)**2 - (r + r2)**2)/(2 * (x1 + x2))
y3 = math.sqrt((r + r1)**2 - (x3 - x1))
return norm_angle(x3), max(y1 + y3, y1 - y3)
def are_intersecting(c1: Sphere, c2: Sphere) -> bool:
"""Check whether the 2 provided circles intersect,"""
return c1.distance(c2) < c1.scale + c2.scale - 0.0000001
def check_collisions(circle: Sphere, to_check: List[Sphere]) -> bool:
"""Check whether the given circle overlaps with any in the provided list."""
return any(are_intersecting(circle, c) for c in to_check)
def closest_circle(b1: Sphere, b2: Sphere, radius: float) -> Sphere:
"""
Return the angle and height of a bud with the given radius as close a possible to the given buds.
n *
/ \
/ phi \
n_b1 / \ n_b2
/ \
/ \
b1 * -------------------------* b2
b1_b2
This can be reduced to the intersection of 2 circles at b1 and b2, with radiuses of
b1,radius + radius and b2.radius + radius
"""
x1, y1, r1 = b1.angle, b1.height, b1.scale
x2, y2, r2 = b2.angle, b2.height, b2.scale
n_b1 = r1 + radius
n_b2 = r2 + radius
# the dist between the 2 buds should be r1 + r2, but do it manually just in case
b1_b2 = b1.distance(b2)
# check if the circles are in the same place
if approx_equal(b1_b2, 0):
return None
a = (n_b1**2 - n_b2**2 + b1_b2**2) / (2 * b1_b2)
if n_b1 < abs(a):
h = 0
else:
h = math.sqrt(n_b1**2 - a**2)
midx = x1 + a * norm_angle(x2 - x1)/b1_b2
midy = y1 + a * (y2 - y1)/b1_b2
x3_1 = midx + h*(y2 - y1)/b1_b2
y3_1 = midy - h*norm_angle(x2 - x1)/b1_b2
x3_2 = midx - h*(y2 - y1)/b1_b2
y3_2 = midy + h*norm_angle(x2 - x1)/b1_b2
if y3_1 > y3_2:
return Sphere(norm_angle(x3_1), y3_1, scale=radius)
return Sphere(norm_angle(x3_2), y3_2, scale=radius)
def highest_left(circles: List[Sphere], checked: Sphere) -> Sphere:
for c in circles:
if norm_angle(c.angle - checked.angle) > 0:
return c
raise FrontError
def touching(circle: Sphere, circles: Iterable[Sphere], precision: float=0.1) -> List[Sphere]:
"""Return all circles that are touching the provided one."""
return [c for c in circles if circle.distance(c) < c.scale + circle.scale + precision and c != circle]
def front(circles: List[Sphere]) -> List[Sphere]:
"""
Given a list of circles, return their current front.
From https://doi.org/10.5586/asbp.3533: "a front is a zigzagging ring of
primordia encircling the cylinder, each primordium being tangent to one on its left and
one on its right. Moreover, any primordium above the front must be higher than any
primordium of the front."
:param list circles: the collection of circles to be checked
:returns: the front
"""
if not circles:
return []
# sort the circles by height
circles = by_height(circles)
highest = circles[0]
seen = set()
def left(checked):
neighbours = touching(checked, circles)
c = highest_left(neighbours, checked)
if c and c != highest and c not in seen:
# sometimes a proper front can't be constructed coz a bud has no left neighbours
# so to stop infinite recursions, stop when a bud is found more than once
seen.add(c)
return [checked] + left(c)
return [checked]
try:
return left(highest)
except FrontError:
return None
def cycle_ring(ring: List[Sphere], n: int) -> List[Sphere]:
"""
Rotate the given ring of circles by n circles.
This function assumes that the ring is sorted by angle.
"""
if n > 1:
ring = cycle_ring(ring, n - 1)
last = ring[-1]
first = ring[0]
if abs(last.angle - first.angle) > math.pi:
first = Sphere(last.angle - 2 * math.pi, last.height, scale=last.scale)
else:
first = last
return [first] + ring[:-1]
| gpl-3.0 | -7,572,957,586,726,389,000 | 32.885542 | 120 | 0.614311 | false |
j127/caster | caster/asynch/auto_com/language.py | 1 | 1206 | from caster.lib import utilities, settings, ccr, control
AUTO_ENABLED_LANGUAGE = None
LAST_EXTENSION = None
def toggle_language():
global AUTO_ENABLED_LANGUAGE, LAST_EXTENSION
filename, folders, title = utilities.get_window_title_info()
extension = None
if filename != None:
extension = "." + filename.split(".")[-1]
if LAST_EXTENSION != extension:
message=None
if extension != None and extension in settings.SETTINGS["ccr"]["registered_extensions"]:
chosen_extension=settings.SETTINGS["ccr"]["registered_extensions"][extension]
ccr.set_active_command(1, chosen_extension)
AUTO_ENABLED_LANGUAGE = chosen_extension
LAST_EXTENSION = extension
message="Enabled '"+chosen_extension+"'"
elif AUTO_ENABLED_LANGUAGE != None:
message="Disabled '"+AUTO_ENABLED_LANGUAGE+"'"
ccr.set_active_command(0, AUTO_ENABLED_LANGUAGE)
AUTO_ENABLED_LANGUAGE = None
if message!=None:
if settings.SETTINGS["miscellaneous"]["status_window_enabled"]:
control.nexus().comm.get_com("status").text(message)
LAST_EXTENSION = extension
| lgpl-3.0 | -2,507,564,527,367,156,000 | 39.2 | 96 | 0.639303 | false |
jmvrbanac/Specter | specter/util.py | 1 | 7291 | import ast
import inspect
import re
import itertools
import sys
import six
from specter.vendor.ast_decompiler import decompile
try:
import __builtin__
except ImportError:
import builtins as __builtin__
CAPTURED_TRACEBACKS = []
class ExpectParams(object):
types_with_args = [
'equal',
'almost_equal',
'be_greater_than',
'be_less_than',
'be_almost_equal',
'be_a',
'be_an_instance_of',
'be_in',
'contain',
'raise_a'
]
def __init__(self, line, module):
def distance(node):
return abs(node.lineno - line)
tree = ast.parse(inspect.getsource(module))
# Walk the tree until we get the expression we need
expect_exp = None
closest_exp = None
for node in ast.walk(tree):
if isinstance(node, ast.Expr):
if node.lineno == line:
expect_exp = node
break
if (closest_exp is None or
distance(node) < distance(closest_exp)):
closest_exp = node
self.expect_exp = expect_exp or closest_exp
@property
def cmp_call(self):
if self.expect_exp:
return self.expect_exp.value
@property
def expect_call(self):
if self.cmp_call:
return self.cmp_call.func.value.value
@property
def cmp_type(self):
if self.cmp_call:
return self.cmp_call.func.attr
@property
def cmp_arg(self):
arg = None
if self.cmp_type in self.types_with_args:
arg = decompile(self.cmp_call.args[0])
return arg
@property
def expect_type(self):
return self.expect_call.func.id
@property
def expect_arg(self):
if self.expect_call:
return decompile(self.expect_call.args[0])
def convert_camelcase(input_str):
if input_str is None:
return ''
camelcase_tags = '((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))'
return re.sub(camelcase_tags, r' \1', input_str)
def get_module_and_line(use_child_attr=None):
last_frame = inspect.currentframe()
steps = 2
for i in range(steps):
last_frame = last_frame.f_back
self = module = last_frame.f_locals['self']
# Use an attr instead of self
if use_child_attr:
module = getattr(self, use_child_attr, self)
return last_frame.f_lineno, inspect.getmodule(type(module))
def get_source_from_frame(frame):
self = frame.f_locals.get('self', None)
cls = frame.f_locals.get('cls', None)
# for old style classes, getmodule(type(self)) returns __builtin__, and
# inspect.getfile(__builtin__) throws an exception
insp_obj = inspect.getmodule(type(self) if self else cls) or frame.f_code
if insp_obj == __builtin__:
insp_obj = frame.f_code
line_num_modifier = 0
if inspect.iscode(insp_obj):
line_num_modifier -= 1
module_path = inspect.getfile(insp_obj)
source_lines = inspect.getsourcelines(insp_obj)
return source_lines[0], module_path, source_lines[1] + line_num_modifier
def get_all_tracebacks(tb, tb_list=[]):
tb_list.append(tb)
next_tb = getattr(tb, 'tb_next')
if next_tb:
tb_list = get_all_tracebacks(next_tb, tb_list)
return tb_list
def get_numbered_source(lines, line_num, starting_line=0):
try:
center = (line_num - starting_line) - 1
start = center - 2 if center - 2 > 0 else 0
end = center + 2 if center + 2 <= len(lines) else len(lines)
orig_src_lines = [line.rstrip('\n') for line in lines[start:end]]
line_range = range(start + 1 + starting_line, end + 1 + starting_line)
nums_and_source = zip(line_range, orig_src_lines)
traceback_lines = []
for num, line in nums_and_source:
prefix = '--> ' if num == line_num else ' '
traceback_lines.append('{0}{1}: {2}'.format(prefix, num, line))
return traceback_lines
except Exception as e:
return ['Error finding traceback!', e]
def get_real_last_traceback(exception):
""" An unfortunate evil... All because Python's traceback cannot
determine where my executed code is coming from...
"""
traceback_blocks = []
_n, _n, exc_traceback = sys.exc_info()
tb_list = get_all_tracebacks(exc_traceback)[1:]
# Remove already captured tracebacks
# TODO(jmv): This must be a better way of doing this. Need to revisit.
tb_list = [tb for tb in tb_list if tb not in CAPTURED_TRACEBACKS]
CAPTURED_TRACEBACKS.extend(tb_list)
for traceback in tb_list:
lines, path, line_num = get_source_from_frame(traceback.tb_frame)
traceback_lines = get_numbered_source(lines, traceback.tb_lineno,
line_num)
traceback_lines.insert(0, ' - {0}'.format(path))
traceback_lines.insert(1, ' ------------------')
traceback_lines.append(' ------------------')
traceback_blocks.append(traceback_lines)
traced_lines = ['Error Traceback:']
traced_lines.extend(itertools.chain.from_iterable(traceback_blocks))
traced_lines.append(' - Error | {0}: {1}'.format(
type(exception).__name__, exception))
return traced_lines
def find_by_names(names, cases):
selected_cases = {}
for case_id, case in six.iteritems(cases):
if case.name in names or case.pretty_name in names:
selected_cases[case_id] = case
return selected_cases
def children_with_tests_named(names, describe):
children = []
for child in describe.describes:
found = find_by_names(names, child.cases)
if len(found) > 0:
children.append(child)
children.extend(children_with_tests_named(names, child))
return children
def find_by_metadata(meta, cases):
selected_cases = {}
for case_id, case in six.iteritems(cases):
matched_keys = set(meta.keys()) & set(case.metadata.keys())
for key in matched_keys:
if meta.get(key) == case.metadata.get(key):
selected_cases[case_id] = case
return selected_cases
def children_with_tests_with_metadata(meta, describe):
children = []
for child in describe.describes:
found = find_by_metadata(meta, child.cases)
if len(found) > 0:
children.append(child)
children.extend(children_with_tests_with_metadata(meta, child))
return children
def extract_metadata(case_func):
# Handle metadata decorator
metadata = {}
if 'DECORATOR_ONCALL' in case_func.__name__:
try:
decorator_data = case_func()
case_func = decorator_data[0]
metadata = decorator_data[1]
except Exception as e:
# Doing this old school to avoid dependancy conflicts
handled = ['TestIncompleteException', 'TestSkippedException']
if type(e).__name__ in handled:
case_func = e.func
metadata = e.other_data.get('metadata')
else:
raise e
return case_func, metadata
def remove_empty_entries_from_dict(input_dict):
return {k: v for k, v in six.iteritems(input_dict) if v is not None}
| mit | -3,008,437,385,475,753,000 | 28.399194 | 78 | 0.597449 | false |
minghuadev/hulahop | python/webview.py | 1 | 11188 | # Copyright (C) 2007, Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import logging
import gobject
import gtk
from hulahop import _hulahop
import xpcom
from xpcom import components
from xpcom.components import interfaces
from xpcom.nsError import *
class _Chrome:
_com_interfaces_ = interfaces.nsIWebBrowserChrome, \
interfaces.nsIWebBrowserChrome2, \
interfaces.nsIEmbeddingSiteWindow, \
interfaces.nsIWebProgressListener, \
interfaces.nsIWindowProvider, \
interfaces.nsIInterfaceRequestor
def __init__(self, web_view):
self.web_view = web_view
self.title = ''
self._modal = False
self._chrome_flags = interfaces.nsIWebBrowserChrome.CHROME_ALL
self._visible = False
def provideWindow(self, parent, flags, position_specified,
size_specified, uri, name, features):
if name == "_blank":
return parent, False
else:
return None, False
# nsIWebBrowserChrome
def destroyBrowserWindow(self):
logging.debug("nsIWebBrowserChrome.destroyBrowserWindow")
if self._modal:
self.exitModalEventLoop(0)
self.web_view.get_toplevel().destroy()
def exitModalEventLoop(self, status):
logging.debug("nsIWebBrowserChrome.exitModalEventLoop: %r" % status)
"""
if self._continue_modal_loop:
self.enable_parent(True)
"""
if self._modal:
self._continue_modal_loop = False
self._modal = False
self._modal_status = status
#self.web_view.get_toplevel().grab_remove()
def isWindowModal(self):
logging.debug("nsIWebBrowserChrome.isWindowModal")
return self._modal
def setStatus(self, statusType, status):
#logging.debug("nsIWebBrowserChrome.setStatus")
self.web_view._set_status(status.encode('utf-8'))
def showAsModal(self):
logging.debug("nsIWebBrowserChrome.showAsModal")
self._modal = True
self._continue_modal_loop = True
self._modal_status = None
#EnableParent(PR_FALSE);
#self.web_view.get_toplevel().grab_add()
cls = components.classes["@mozilla.org/thread-manager;1"]
thread_manager = cls.getService(interfaces.nsIThreadManager)
current_thread = thread_manager.currentThread
self.web_view.push_js_context()
while self._continue_modal_loop:
processed = current_thread.processNextEvent(True)
if not processed:
break
self.web_view.pop_js_context()
self._modal = False
self._continue_modal_loop = False
return self._modal_status
def sizeBrowserTo(self, cx, cy):
logging.debug("nsIWebBrowserChrome.sizeBrowserTo: %r %r" % (cx, cy))
self.web_view.get_toplevel().resize(cx, cy)
self.web_view.type = WebView.TYPE_POPUP
# nsIWebBrowserChrome2
def setStatusWithContext(self, statusType, statusText, statusContext):
self.web_view._set_status(statusText.encode('utf-8'))
# nsIEmbeddingSiteWindow
def getDimensions(self, flags):
logging.debug("nsIEmbeddingSiteWindow.getDimensions: %r" % flags)
base_window = self.web_view.browser.queryInterface(interfaces.nsIBaseWindow)
if (flags & interfaces.nsIEmbeddingSiteWindow.DIM_FLAGS_POSITION) and \
((flags & interfaces.nsIEmbeddingSiteWindow.DIM_FLAGS_SIZE_INNER) or \
(flags & interfaces.nsIEmbeddingSiteWindow.DIM_FLAGS_SIZE_OUTER)):
return base_window.getPositionAndSize()
elif flags & interfaces.nsIEmbeddingSiteWindow.DIM_FLAGS_POSITION:
x, y = base_window.getPosition()
return (x, y, 0, 0)
elif (flags & interfaces.nsIEmbeddingSiteWindow.DIM_FLAGS_SIZE_INNER) or \
(flags & interfaces.nsIEmbeddingSiteWindow.DIM_FLAGS_SIZE_OUTER):
width, height = base_window.getSize()
return (0, 0, width, height)
else:
raise xpcom.Exception('Invalid flags: %r' % flags)
def setDimensions(self, flags, x, y, cx, cy):
logging.debug("nsIEmbeddingSiteWindow.setDimensions: %r" % flags)
def setFocus(self):
logging.debug("nsIEmbeddingSiteWindow.setFocus")
base_window = self.web_view.browser.queryInterface(interfaces.nsIBaseWindow)
base_window.setFocus()
def get_title(self):
logging.debug("nsIEmbeddingSiteWindow.get_title: %r" % self.title)
return self.title
def set_title(self, title):
logging.debug("nsIEmbeddingSiteWindow.set_title: %r" % title)
self.title = title
self.web_view._notify_title_changed()
def get_webBrowser(self):
return self.web_view.browser
def get_chromeFlags(self):
return self._chrome_flags
def set_chromeFlags(self, flags):
self._chrome_flags = flags
def get_visibility(self):
logging.debug("nsIEmbeddingSiteWindow.get_visibility: %r" % self._visible)
# See bug https://bugzilla.mozilla.org/show_bug.cgi?id=312998
# Work around the problem that sometimes the window is already visible
# even though mVisibility isn't true yet.
visibility = self.web_view.props.visibility
mapped = self.web_view.flags() & gtk.MAPPED
return visibility or (not self.web_view.is_chrome and mapped)
def set_visibility(self, visibility):
logging.debug("nsIEmbeddingSiteWindow.set_visibility: %r" % visibility)
if visibility == self.web_view.props.visibility:
return
self.web_view.props.visibility = visibility
# nsIWebProgressListener
def onStateChange(self, web_progress, request, state_flags, status):
if (state_flags & interfaces.nsIWebProgressListener.STATE_STOP) and \
(state_flags & interfaces.nsIWebProgressListener.STATE_IS_NETWORK):
if self.web_view.is_chrome:
self.web_view.dom_window.sizeToContent()
def onStatusChange(self, web_progress, request, status, message): pass
def onSecurityChange(self, web_progress, request, state): pass
def onProgressChange(self, web_progress, request, cur_self_progress, max_self_progress, cur_total_progress, max_total_progress): pass
def onLocationChange(self, web_progress, request, location): pass
# nsIInterfaceRequestor
def queryInterface(self, uuid):
if uuid == interfaces.nsIDOMWindow:
return self.web_view.dom_window
if not uuid in self._com_interfaces_:
# Components.returnCode = Cr.NS_ERROR_NO_INTERFACE;
logging.warning('Interface %s not implemented by this instance: %r' % (uuid, self))
return None
return xpcom.server.WrapObject(self, uuid)
def getInterface(self, uuid):
result = self.queryInterface(uuid)
if not result:
# delegate to the nsIWebBrowser
requestor = self.web_view.browser.queryInterface(interfaces.nsIInterfaceRequestor)
try:
result = requestor.getInterface(uuid)
except xpcom.Exception:
logging.warning('Interface %s not implemented by this instance: %r' % (uuid, self.web_view.browser))
result = None
return result
class WebView(_hulahop.WebView):
TYPE_WINDOW = 0
TYPE_POPUP = 1
__gproperties__ = {
'title' : (str, None, None, None,
gobject.PARAM_READABLE),
'status' : (str, None, None, None,
gobject.PARAM_READABLE),
'visibility' : (bool, None, None, False,
gobject.PARAM_READWRITE)
}
def __init__(self):
_hulahop.WebView.__init__(self)
self.type = WebView.TYPE_WINDOW
self.is_chrome = False
chrome = _Chrome(self)
self._chrome = xpcom.server.WrapObject(chrome, interfaces.nsIEmbeddingSiteWindow)
weak_ref = xpcom.client.WeakReference(self._chrome)
self.browser.containerWindow = self._chrome
listener = xpcom.server.WrapObject(chrome, interfaces.nsIWebProgressListener)
weak_ref2 = xpcom.client.WeakReference(listener)
# FIXME: weak_ref2._comobj_ looks quite a bit ugly.
self.browser.addWebBrowserListener(weak_ref2._comobj_,
interfaces.nsIWebProgressListener)
self._status = ''
self._first_uri = None
self._visibility = False
def do_setup(self):
_hulahop.WebView.do_setup(self)
if self._first_uri:
self.load_uri(self._first_uri)
def _notify_title_changed(self):
self.notify('title')
def _set_status(self, status):
self._status = status
self.notify('status')
def do_get_property(self, pspec):
if pspec.name == 'title':
return self._chrome.title
elif pspec.name == 'status':
return self._status
elif pspec.name == 'visibility':
return self._visibility
def do_set_property(self, pspec, value):
if pspec.name == 'visibility':
self._visibility = value
def get_window_root(self):
return _hulahop.WebView.get_window_root(self)
def get_browser(self):
return _hulahop.WebView.get_browser(self)
def get_doc_shell(self):
requestor = self.browser.queryInterface(interfaces.nsIInterfaceRequestor)
return requestor.getInterface(interfaces.nsIDocShell)
def get_web_progress(self):
return self.doc_shell.queryInterface(interfaces.nsIWebProgress)
def get_web_navigation(self):
return self.browser.queryInterface(interfaces.nsIWebNavigation)
def get_dom_window(self):
return self.browser.contentDOMWindow
def load_uri(self, uri):
try:
self.web_navigation.loadURI(
uri, interfaces.nsIWebNavigation.LOAD_FLAGS_NONE,
None, None, None)
except xpcom.Exception:
self._first_uri = uri
dom_window = property(get_dom_window)
browser = property(get_browser)
window_root = property(get_window_root)
doc_shell = property(get_doc_shell)
web_progress = property(get_web_progress)
web_navigation = property(get_web_navigation)
| lgpl-2.1 | 6,456,294,321,182,294,000 | 36.169435 | 137 | 0.639524 | false |
josircg/raizcidadanista | raizcidadanista/BruteBuster/decorators.py | 1 | 2284 | # BruteBuster by Cyber Security Consulting(www.csc.bg)
"""Decorators used by BruteBuster"""
from BruteBuster.models import FailedAttempt
from BruteBuster.middleware import get_request
def protect_and_serve(auth_func):
"""
This is the main code of the application. It is meant to replace the
authentication() function, with one that records failed login attempts and
blocks logins, if a threshold is reached
"""
if hasattr(auth_func, '__BB_PROTECTED__'):
# avoiding multiple decorations
return auth_func
def decor(*args, **kwargs):
"""
This is the wrapper that gets installed around the default
authentication function.
"""
user = kwargs.get('username', '')
if not user:
raise ValueError('BruteBuster cannot work with authenticate functions that do not include "username" as an argument')
request = get_request()
if request:
# try to get the remote address from thread locals
IP_ADDR = request.META.get('HTTP_X_FORWARDED_FOR', request.META.get('REMOTE_ADDR', '')).split(', ')[0]
else:
IP_ADDR = None
try:
fa = FailedAttempt.objects.filter(username=user, IP=IP_ADDR)[0]
if fa.recent_failure():
if fa.too_many_failures():
# we block the authentication attempt because
# of too many recent failures
fa.failures += 1
fa.save()
return None
else:
# the block interval is over, so let's start
# with a clean sheet
fa.failures = 0
fa.save()
except IndexError:
# No previous failed attempts
fa = None
result = auth_func(*args, **kwargs)
if result:
# the authentication was successful - we do nothing
# special
return result
# the authentication was kaput, we should record this
fa = fa or FailedAttempt(username=user, IP=IP_ADDR, failures=0)
fa.failures += 1
fa.save()
# return with unsuccessful auth
return None
decor.__BB_PROTECTED__ = True
return decor
| gpl-3.0 | -5,943,375,566,235,557,000 | 33.089552 | 129 | 0.57662 | false |
tcyb/nextgen4b | nextgen4b/process/filter.py | 1 | 14126 | """
nextgen4b.process.filter
TC, 8/11/16
A collection of functions that read, filter, and output sequence data from
next-generation sequencing experiments.
"""
import gzip
import logging
import os
import re
import sys
import time
import uuid
import numpy as np
import yaml
from Bio import AlignIO, SeqIO
from Bio.Emboss.Applications import NeedleCommandline
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from tqdm import tqdm
__all__ = ['filter_sample', 'run_all_experiments']
#####################
# File Management
#####################
def load_ngs_file(fpath, ftype='fastq'):
"""
Load a .fastq file to a SeqIO iterator, un-gzip if necessary.
"""
if fpath.endswith('.gz'):
seq_f = gzip.open(fpath, 'rt')
elif fpath.endswith('.fastq'):
seq_f = open(fpath, 'rt')
else:
raise ValueError('File does not end in .gz or .fastq; confirm file type.')
f_iter = SeqIO.parse(seq_f, ftype)
return f_iter
#####################
# Main Filter Code
#####################
def filter_sample(f_name, pe_name, bcs, templates, f_filt_seqs, r_filt_seqs):
"""
Output filtered sequences as dictionary, indexed by barcode.
Sequences will be aligned to the provided template.
Parts of the template not represented will be '-'
"""
# setup loggers
text_logger = logging.getLogger(__name__+'.text_logger')
csv_logger = logging.getLogger(__name__+'.csv_logger')
text_logger.info('Started filtering routine for %s', f_name)
# Compile regexes
f_res = compile_res(f_filt_seqs)
pe_res = compile_res(r_filt_seqs)
# Load as generators, then filter
text_logger.info('Loading Files')
f_seqs = load_ngs_file(f_name)
for regex in f_res:
f_seqs = filter_seqs(f_seqs, regex)
pe_seqs = load_ngs_file(pe_name)
for regex in pe_res:
pe_seqs = filter_seqs(pe_seqs, regex)
# Barcode Filtering/Demux
bc_seqs = barcodeDemux(f_seqs, bcs)
# Sequence-based filtering
for expt in bcs.keys():
text_logger.info('Starting post-demux filtering for expt ID %s', expt)
csv_data = [expt, len(bc_seqs[expt])]
# Filter based on PE matches, only return the copied sequence
# Assumes the first RE in f_res will terminate the copied sequence
# copiedFuncGenerator's output should return all sequence before the adapter
seqs = filter_pe_mismatch(bc_seqs[expt], pe_seqs,
gen_copied_seq_function(f_res))
csv_data.append(len(seqs))
seqs = [trim_lig_adapter(s, f_res) for s in seqs] # Trim CS2 before filtering on quality (bad Qs at end of seqs)
# Quality filter
if len(seqs) > 0:
seqs = quality_filter(seqs) # Quality Filtering (needs to only have copied sequence)
else:
text_logger.info("""No sequences left, skipped quality score
filtering for expt ID %s.""", expt)
bc_seqs[expt] = seqs
csv_data.append(len(seqs))
# Align filter
if len(seqs) > 0:
# Do alignment-based filtering
full_template = '{}{}'.format(bcs[expt], templates[expt])
seqs = alignment_filter(seqs, full_template) # Do alignment-based filtering
else:
text_logger.info("""No sequences left, skipped align filtering for
expt ID %s.***""", expt)
bc_seqs[expt] = seqs
csv_data.append(len(seqs))
# Length filtering
if len(seqs) > 0:
seqs = len_filter(seqs, l_barcode=len(bcs[expt])) # Length Filtering
else:
text_logger.info("""No sequences left, skipped length filtering for
expt ID %s***""", expt)
bc_seqs[expt] = seqs
csv_data.append(len(seqs))
csv_logger.info(','.join([str(n) for n in csv_data]))
bc_seqs[expt] = seqs
return bc_seqs
#####################
# F/R Regex Filtering
#####################
def filter_seqs(seqs, q_re):
"""
Filter an iterator based on whether items match a regex object.
"""
# sIO_iterator should be generated by Bio.SeqIO.parse
# q_re should be a regex object generated by re.compile()
# Outputs a list of Seq objects that have the desired sequence in them.
text_logger = logging.getLogger(__name__+'.text_logger')
text_logger.info('Started regex filter: %s', q_re.pattern)
out_l = [s for s in seqs if q_re.search(str(s.seq))]
text_logger.info('Finished regex filter. Kept %i sequences.', len(out_l))
return out_l
def compile_res(seqs):
"""
Compile regex for each string in a list, return list of regex objects.
"""
# Takes a list of sequences you want to filter for
# Outputs a list of regex objects that you can iterate over
return [re.compile(s) for s in seqs]
#####################
# Barcode Filtering
#####################
def barcodeDemux(seqs, bcs):
"""
Takes lists of sequence objects, dict of barcodes (indexed by expt. ID)
Demuxes based on the barcode the sequences start with
Discards sequences that don't start with a barcode exact match
Assumes forward read -> sequences start with a barcode
"""
text_logger = logging.getLogger(__name__+'.text_logger')
text_logger.info('Started barcode demuxing.')
n_seqs = 0
bc_filtered_data = {}
for expt in bcs.keys():
bc_filtered_data[expt] = [s for s in seqs if str(s.seq).startswith(bcs[expt])]
n_seqs += len(bc_filtered_data[expt])
text_logger.info('Finished barcode demuxing. Kept %i of %i sequences.', n_seqs, len(seqs))
return bc_filtered_data
#####################
# Paired End Match Filtering
#####################
def get_coords(s):
return ':'.join(s.description.split(' ')[0].split(':')[3:])
def get_sense(s):
return s.description.split(' ')[1].split(':')[0]
def get_copied_seq(s, f_res):
return s[f_res[0].search(str(s.seq)).end():list(f_res[1].finditer(str(s.seq)))[-1].start()]
def trim_lig_adapter(s, f_res):
return s[:list(f_res[1].finditer(str(s.seq)))[-1].start()]
def gen_copied_seq_function(f_res):
return lambda s: get_copied_seq(s, f_res)
def filter_pe_mismatch(f_seqs, pe_seqs, copied_func):
"""
Args:
f_seqs - sequences from forward reads. Presumably filtered for the
required adatper(s).
pe_seqs - the paired end sequences of f_seqs. Also presumably filtered
for the required adapter(s).
copied_func - takes a sequence, should ouptut the DNA that we expect
to have been copied, i.e. that should be on the paired
end read.
Outputs a list of forward sequences that pass two filters:
* Have a coordinate match in the paired end reads
* That coordinate match has the same sequence.
"""
text_logger = logging.getLogger(__name__+'.text_logger')
text_logger.info('Started Paired-End Filtering')
# Some housekeeping stuff
proc_ct = 0 # number of sequences processed
co_ct = 0 # number of sequences with coordinate matches
aln_ct = 0 # number of sequences that have paired end sequence matches
matched_seq_list = []
# Get coordinate list
pe_coord_list = [get_coords(s) for s in pe_seqs]
for s in f_seqs:
if get_coords(s) in pe_coord_list: # Filter based on paired-end presence
co_ct += 1
copied = copied_func(s) # Get the part of the sequence that was actually copied
if str(pe_seqs[0].reverse_complement().seq).find(str(copied.seq)): # Filter on PE match
aln_ct += 1
matched_seq_list.append(s)
proc_ct += 1
if not (proc_ct % 5000):
text_logger.info("Processed %i out of %i", proc_ct, len(f_seqs))
text_logger.info("Finished Paired-End Filtering")
text_logger.info("""Kept %i of %i forward sequences after coordinate
filtering""", co_ct, len(f_seqs))
text_logger.info("""Kept %i of %i forward sequences after paired-end sequence
matching""", aln_ct, co_ct)
return matched_seq_list
#####################
# Q-score Filtering
#####################
def quality_filter(seqs, q_cutoff=20):
text_logger = logging.getLogger(__name__+'.text_logger')
text_logger.info('Started Quality Score Filtering')
out_l = [s for s in seqs
if not any(s.letter_annotations['phred_quality']
< np.ones(len(s.letter_annotations['phred_quality']))*q_cutoff)]
text_logger.info('Finished Quality Score Filtering. Kept %i of %i sequences.',
len(out_l), len(seqs))
return out_l
#####################
# Length Filtering
#####################
def len_filter(seqs, l_cutoff=70, u_cutoff=150, l_barcode=0):
"""
Return only sequence objects that have length between l_cutoff and
u_cutoff
"""
text_logger = logging.getLogger(__name__+'.text_logger')
text_logger.info('Started Length Filtering')
out_l = [s for s in seqs if (len(s.seq) >= (l_cutoff + l_barcode)) and
(len(s.seq) <= (u_cutoff + l_barcode))]
text_logger.info('Finished Length Filtering. Kept %i of %i sequences.',
len(out_l), len(seqs))
return out_l
#####################
# Alignment Filtering
#####################
def alignment_filter(seqs, template, gapopen=10, gapextend=0.5, lo_cutoff=300,
hi_cutoff=1000, cleanup=True):
text_logger = logging.getLogger(__name__+'.text_logger')
text_logger.info('Started alignment-based filtering')
start_n_seqs = len(seqs)
# Save the template and sequences as temporary fasta files
# Probably some hacking that can be done in the NeedleCommandline stuff
seqs_f_name = 'tempseq.fa'
with open(seqs_f_name, 'w') as sh:
SeqIO.write(seqs, sh, 'fastq')
# Generate alignment command, run the alignment
text_logger.info("""Began EMBOSS needle routine with settings:\ngapopen:
%i\ngapextend: %i\nlo_cutoff: %i\nhi_cutoff: %i""",
gapopen, gapextend, lo_cutoff, hi_cutoff)
ofilen = 'temp_'+str(uuid.uuid4())+'.needle'
needle_cline = NeedleCommandline(asequence='asis::{}'.format(template),
bsequence=seqs_f_name, gapopen=gapopen,
gapextend=gapextend, outfile=ofilen)
needle_cline()
text_logger.info('Finished EMBOSS needle routine')
aln_data = AlignIO.parse(open(ofilen), "emboss")
new_seqs = cull_alignments(aln_data, lo_cutoff=lo_cutoff,
hi_cutoff=hi_cutoff)
# Exit routine
if cleanup:
text_logger.info('Cleaning up temp files')
os.remove(seqs_f_name)
os.remove(ofilen)
text_logger.info("""Finished alignment-based filtering. Kept %i of %i
sequences.""", len(new_seqs), start_n_seqs)
return new_seqs
def cull_alignments(aln_data, lo_cutoff=300, hi_cutoff=650):
new_seqs = []
for alignment in aln_data:
if (alignment.annotations['score'] > lo_cutoff) \
and (alignment.annotations['score'] < hi_cutoff):
# Template should have no gaps, and should contain the whole
# non-template sequence
if not str(alignment[0].seq).count('-') > 0:
new_seqs.append(alignment[1])
new_seqs[-1].annotations['alnscore'] = alignment.annotations['score']
return new_seqs
#####################
# Main Routines
#####################
def setup_logger(name, file_name, log_format, level=logging.INFO):
logger = logging.getLogger(name)
logger.setLevel(level)
handler = logging.FileHandler(file_name)
formatter = logging.Formatter(log_format)
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
def run_all_experiments(yf_name, save_intermediates=True):
"""
Filters all sequences noted in the passed YAML file.
"""
# Setup text_logger
text_logger = setup_logger(__name__+'.text_logger',
'ngs_%s.log' % time.strftime("%Y%m%d-%H%M%S"),
'%(asctime)s %(message)s')
csv_logger = setup_logger(__name__+'.csv_logger',
'ngs_filter_%s.csv' % time.strftime("%Y%m%d-%H%M%S"),
'%(message)s')
# Load YAML file
with open(yf_name) as expt_f:
expt_yaml = yaml.load(expt_f) # Should probably make this a class at some point...
text_logger.info('Loaded YAML experiment file '+yf_name)
runs = expt_yaml['ngsruns']
text_logger.info('Found NGS Runs: '+', '.join(runs))
for run in tqdm(runs.keys()):
text_logger.info('Performing routine for NGS Run '+run)
expts = runs[run]['experiments']
text_logger.info('Found experiments '+', '.join(expts))
# Get barcodes, templates for all experiments in the run
bcs = {}
templates = {}
for expt in expts:
bcs[expt] = expt_yaml['experiments'][expt]['barcode']
templates[expt] = expt_yaml['experiments'][expt]['template_seq']
# Do filtering
text_logger.info('Starting filtering for run %s', run)
aln_seqs = filter_sample(runs[run]['f_read_name'],
runs[run]['pe_read_name'],
bcs, templates,
runs[run]['filter_seqs']['forward'],
runs[run]['filter_seqs']['reverse'])
if save_intermediates:
for expt in aln_seqs.keys():
with open('aln_seqs_%s_%s.fa' % (run, expt), 'w') as out_f:
SeqIO.write(aln_seqs[expt], out_f, 'fasta')
text_logger.info('Finished filtering for run %s', run)
if __name__ == '__main__':
if len(sys.argv) > 1:
yaml_name = sys.argv[1]
else:
yaml_name = 'samples.yaml'
run_all_experiments(yaml_name, save_intermediates=True)
| mit | -3,774,632,956,410,515,500 | 35.313625 | 120 | 0.593162 | false |
anton-golubkov/Garland | src/ipf/ipfblock/findchessboard.py | 1 | 2238 | #-------------------------------------------------------------------------------
# Copyright (c) 2011 Anton Golubkov.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the GNU Lesser Public License v2.1
# which accompanies this distribution, and is available at
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
#
# Contributors:
# Anton Golubkov - initial API and implementation
#-------------------------------------------------------------------------------
# -*- coding: utf-8 -*-
import cv
import ipfblock
import ioport
import ipf.ipfblock.processing
from property import Property
from ipf.ipftype.ipfimage1ctype import IPFImage1cType
from ipf.ipftype.ipfimage3ctype import IPFImage3cType
from ipf.ipftype.ipfarraytype import IPFArrayType
from ipf.ipftype.ipfinttype import IPFIntType
from ipf.ipftype.ipffindchessboardtype import IPFFindChessboardType
class FindChessboard(ipfblock.IPFBlock):
""" Find chess board corners block
"""
type = "FindChessboard"
category = "Feature detection"
is_abstract_block = False
def __init__(self):
super(FindChessboard, self).__init__()
self.input_ports["input_image"] = ioport.IPort(self, IPFImage1cType)
self.output_ports["output_array"] = ioport.OPort(self, IPFArrayType)
self.properties["type"] = Property(IPFFindChessboardType)
self.properties["width"] = Property(IPFIntType, 3, 20)
self.properties["height"] = Property(IPFIntType, 3, 20)
self.processing_function = ipf.ipfblock.processing.find_chessboard
def get_preview_image(self):
corners = self.output_ports["output_array"]._value
if len(corners) == 0:
return self.input_ports["input_image"]._value
output_image = IPFImage3cType.convert(self.input_ports["input_image"]._value)
width = self.properties["width"].get_value()
height = self.properties["height"].get_value()
cv.DrawChessboardCorners(output_image,
(width, height),
corners,
1)
return output_image
| lgpl-2.1 | -891,357,427,570,049,300 | 36.3 | 85 | 0.615728 | false |
demisto/content | Packs/UnifiVideoNVR/Integrations/UnifiVideo/UnifiVideo.py | 1 | 8478 | import cv2
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
from unifi_video import UnifiVideoAPI
import dateparser
import json
demisto_format = '%Y-%m-%dT%H:%M:%SZ'
params = demisto.params()
args = demisto.args()
api_key = params.get('api_key')
address = params.get('addr')
port = params.get('port')
schema = params.get('schema')
fetch_limit = params.get('fetch_limit')
verify_cert = params.get('verify_cert')
FETCH_TIME = params.get('fetch_time')
if demisto.command() == 'test-module':
# This is the call made when pressing the integration test button.
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
demisto.results('ok')
if demisto.command() == 'unifivideo-get-camera-list':
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
context_output = []
for camera in uva.cameras:
context_output.append(camera.name)
results = [
CommandResults(
outputs_prefix='UnifiVideo.Cameras',
readable_output=tableToMarkdown("Camera list", context_output, headers=["Camera name"], removeNull=False),
outputs=context_output
)]
return_results(results)
if demisto.command() == 'unifivideo-get-snapshot':
camera_name = args.get('camera_name')
output = bytes()
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
uva.get_camera(camera_name).snapshot("/tmp/snapshot.png")
f = open("/tmp/snapshot.png", "rb")
output = f.read()
filename = "snapshot.png"
file = fileResult(filename=filename, data=output)
file['Type'] = entryTypes['image']
demisto.results(file)
if demisto.command() == 'unifivideo-set-recording-settings':
camera_name = args.get('camera_name')
rec_set = args.get('rec_set')
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
uva.get_camera(camera_name).set_recording_settings(rec_set)
demisto.results(camera_name + ": " + rec_set)
if demisto.command() == 'unifivideo-ir-leds':
camera_name = args.get('camera_name')
ir_leds = args.get('ir_leds')
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
uva.get_camera(camera_name).ir_leds(ir_leds)
demisto.results(camera_name + ": " + ir_leds)
if demisto.command() == 'unifivideo-get-recording':
recording_id = args.get('recording_id')
recording_file_name = 'recording-' + recording_id + '.mp4'
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
uva.refresh_recordings(0)
uva.recordings[recording_id].download('/tmp/recording.mp4')
f = open("/tmp/recording.mp4", "rb")
output = f.read()
filename = recording_file_name
file = fileResult(filename=filename, data=output, file_type=EntryType.ENTRY_INFO_FILE)
demisto.results(file)
if demisto.command() == 'unifivideo-get-recording-motion-snapshot':
recording_id = args.get('recording_id')
snapshot_file_name = 'snapshot-motion-' + recording_id + '.jpg'
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
uva.refresh_recordings(0)
uva.recordings[recording_id].motion('/tmp/snapshot.png')
f = open("/tmp/snapshot.png", "rb")
output = f.read()
filename = snapshot_file_name
file = fileResult(filename=filename, data=output)
file['Type'] = entryTypes['image']
demisto.results(file)
if demisto.command() == 'unifivideo-get-recording-snapshot':
recording_id = args.get('recording_id')
snapshot_file_name = 'snapshot-' + recording_id + '-' + args.get('frame') + '.jpg'
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
uva.refresh_recordings(0)
uva.recordings[recording_id].download('/tmp/recording.mp4')
if "frame" in args:
vc = cv2.VideoCapture('/tmp/recording.mp4') # pylint: disable=E1101
c = 1
if vc.isOpened():
rval, frame = vc.read()
else:
rval = False
while rval:
rval, frame = vc.read()
c = c + 1
if c == int(args.get('frame')):
cv2.imwrite("/tmp/" + snapshot_file_name, frame) # pylint: disable=E1101
break
vc.release()
f = open("/tmp/" + snapshot_file_name, "rb")
output = f.read()
filename = snapshot_file_name
file = fileResult(filename=filename, data=output)
file['Type'] = entryTypes['image']
demisto.results(file)
if demisto.command() == 'unifivideo-get-recording-list':
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
recordings = []
for rec in uva.get_recordings():
rec_tmp = {}
rec_tmp['id'] = rec._id
rec_tmp['rec_type'] = rec.rec_type
rec_tmp['start_time'] = rec.start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
rec_tmp['end_time'] = rec.start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
recordings.append(rec_tmp)
results = [
CommandResults(
outputs_prefix='UnifiVideo.Recordings',
readable_output=tableToMarkdown("Recording list", recordings, headers=["id", "rec_type", "start_time", "end_time"]),
outputs_key_field=['id'],
outputs=recordings
)]
return_results(results)
if demisto.command() == 'unifivideo-get-snapshot-at-frame':
entry_id = demisto.args().get('entryid')
snapshot_file_name = 'snapshot-' + entry_id + '-' + args.get('frame') + '.jpg'
try:
file_result = demisto.getFilePath(entry_id)
except Exception as ex:
return_error("Failed to load file entry with entryid: {}. Error: {}".format(entry_id, ex))
video_path = file_result.get("path") # pylint: disable=E1101
vc = cv2.VideoCapture(video_path) # pylint: disable=E1101
c = 1
if vc.isOpened():
rval, frame = vc.read()
else:
rval = False
while rval:
rval, frame = vc.read()
c = c + 1
if c == int(args.get('frame')):
cv2.imwrite("/tmp/" + snapshot_file_name, frame) # pylint: disable=E1101
break
vc.release()
f = open("/tmp/" + snapshot_file_name, "rb")
output = f.read()
filename = snapshot_file_name
file = fileResult(filename=filename, data=output)
file['Type'] = entryTypes['image']
demisto.results(file)
if demisto.command() == 'fetch-incidents':
start_time_of_int = str(datetime.now())
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
# And retrieve it for use later:
last_run = demisto.getLastRun()
# lastRun is a dictionary, with value "now" for key "time".
# JSON of the incident type created by this integration
inc = []
start_time = dateparser.parse(FETCH_TIME)
if last_run:
start_time = last_run.get('start_time')
if not isinstance(start_time, datetime):
start_time = datetime.strptime(str(start_time), '%Y-%m-%d %H:%M:%S.%f')
uva.refresh_recordings()
for rec in uva.get_recordings(limit=fetch_limit, start_time=start_time, order='desc'):
incident = {}
datetime_object = datetime.strptime(str(rec.start_time), '%Y-%m-%d %H:%M:%S')
for camera in uva.cameras:
cam_id = uva.get_camera(camera.name)
if cam_id._id in rec.cameras:
camera_name = camera.name
try:
if datetime_object > start_time:
incident = {
'name': rec.rec_type,
'occurred': datetime_object.strftime('%Y-%m-%dT%H:%M:%SZ'),
'rawJSON': json.dumps({"event": rec.rec_type, "ubnt_id": rec._id, "camera_name": camera_name,
"integration_lastrun": str(start_time), "start_time": str(rec.start_time),
"stop_time": str(rec.end_time)})
}
inc.append(incident)
except Exception as e:
raise Exception("Problem comparing: " + str(datetime_object) + ' ' + str(start_time) + " Exception: " + str(e))
demisto.incidents(inc)
demisto.setLastRun({'start_time': start_time_of_int})
| mit | 6,209,920,336,024,969,000 | 40.763547 | 128 | 0.623378 | false |
helgefmi/Easy | src/easy/lexer.py | 1 | 4113 | import re
class Token(object):
def __init__(self, lineno, token_type, token_value=None):
self._type = token_type
self._value = token_value
self._lineno = lineno
@property
def type(self):
return self._type
@property
def value(self):
return self._value
@property
def lineno(self):
return self._lineno
def __str__(self):
if self.type == 'tok_string':
return '"%s"' % self.value
if self.value is None:
return self.type
else:
return str(self.value)
class Lexer(object):
KEYWORDS = (
'def', 'do', 'end',
'if', 'then', 'else',
'return',
)
SYMBOLS = (
('>=', 'tok_binary_op'),
('<=', 'tok_binary_op'),
('==', 'tok_binary_op'),
('!=', 'tok_binary_op'),
('<', 'tok_binary_op'),
('>', 'tok_binary_op'),
('*', 'tok_binary_op'),
('-', 'tok_binary_op'),
('/', 'tok_binary_op'),
('+', 'tok_binary_op'),
('(', 'tok_paren_start'),
(')', 'tok_paren_end'),
(';', 'tok_semicolon'),
)
def __init__(self, input, filename=None):
self.input = input
self._tokens = []
self._lineno = 1
def _append(self, type, value=None):
token = Token(self._lineno, type, value)
self._tokens.append(token)
def _strip_whitespace(self):
for char in self.input:
if not char.isspace():
break
if char == '\n':
self._lineno += 1
self.input = self.input.lstrip()
def _assert(self, cond, error, lineno=None):
lineno = lineno or self._lineno
if not cond:
print error
print 'At line %d' % lineno
print 'input[:10] = %s' % repr(self.input[:10])
exit(1)
def lex(self):
while True:
self._strip_whitespace()
if not self.input:
break
result = (self.lex_identifier() or self.lex_number() or
self.lex_symbol() or self.lex_string() or
self.lex_type())
self._assert(result, 'Unexpected input')
return self._tokens
def lex_string(self):
if self.input[0] != '"':
return False
self.input = self.input[1:]
start_lineno = self._lineno
last = None
for i, char in enumerate(self.input):
if char == '\n':
self._lineno += 1
if char == '"' and last != '\\':
break
last = char
else:
self._assert(False, 'Unterminated string literal; expecting "',
start_lineno)
string, self.input = self.input[:i], self.input[i + 1:]
self._append('tok_string', string)
return True
def lex_identifier(self):
match = re.match(r'[a-z][a-zA-Z0-9_]*', self.input)
if not match:
return False
id = match.group()
self.input = self.input[match.end():]
if id in self.KEYWORDS:
self._append('tok_%s' % id)
else:
self._append('tok_identifier', id)
return True
def lex_type(self):
match = re.match(r'[A-Z][a-zA-Z0-9_]*', self.input)
if not match:
return False
name = match.group()
self.input = self.input[match.end():]
self._append('tok_type', name)
return True
def lex_symbol(self):
for symbol, token in self.SYMBOLS:
if self.input.startswith(symbol):
self.input = self.input[len(symbol):]
self._append(token, symbol)
return True
return False
def lex_number(self):
for i, char in enumerate(self.input):
if not char.isdigit():
break
if i == 0:
return False
number, self.input = self.input[:i], self.input[i:]
self._append('tok_number', int(number))
return True
| mit | -6,500,820,321,542,905,000 | 26.42 | 75 | 0.480671 | false |
Azure/azure-sdk-for-python | sdk/appconfiguration/azure-mgmt-appconfiguration/azure/mgmt/appconfiguration/models/_app_configuration_management_client_enums.py | 1 | 3123 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum, EnumMeta
from six import with_metaclass
class _CaseInsensitiveEnumMeta(EnumMeta):
def __getitem__(self, name):
return super().__getitem__(name.upper())
def __getattr__(cls, name):
"""Return the enum member matching `name`
We use __getattr__ instead of descriptors or inserting into the enum
class' __dict__ in order to support `name` and `value` being both
properties for enum members (which live in the class' __dict__) and
enum members themselves.
"""
try:
return cls._member_map_[name.upper()]
except KeyError:
raise AttributeError(name)
class ActionsRequired(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Any action that is required beyond basic workflow (approve/ reject/ disconnect)
"""
NONE = "None"
RECREATE = "Recreate"
class ConfigurationResourceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The resource type to check for name availability.
"""
MICROSOFT_APP_CONFIGURATION_CONFIGURATION_STORES = "Microsoft.AppConfiguration/configurationStores"
class ConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The private link service connection status.
"""
PENDING = "Pending"
APPROVED = "Approved"
REJECTED = "Rejected"
DISCONNECTED = "Disconnected"
class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of identity that created the resource.
"""
USER = "User"
APPLICATION = "Application"
MANAGED_IDENTITY = "ManagedIdentity"
KEY = "Key"
class IdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of managed identity used. The type 'SystemAssigned, UserAssigned' includes both an
implicitly created identity and a set of user-assigned identities. The type 'None' will remove
any identities.
"""
NONE = "None"
SYSTEM_ASSIGNED = "SystemAssigned"
USER_ASSIGNED = "UserAssigned"
SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned, UserAssigned"
class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The provisioning state of the configuration store.
"""
CREATING = "Creating"
UPDATING = "Updating"
DELETING = "Deleting"
SUCCEEDED = "Succeeded"
FAILED = "Failed"
CANCELED = "Canceled"
class PublicNetworkAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Control permission for data plane traffic coming from public networks while private endpoint is
enabled.
"""
ENABLED = "Enabled"
DISABLED = "Disabled"
| mit | -6,880,166,865,497,613,000 | 34.488636 | 103 | 0.666346 | false |
m1trix/Tetris-Wars | tetris_wars/sdl2/test/hints_test.py | 1 | 3881 | import sys
import unittest
from ctypes import cast, c_char_p
from .. import SDL_Init, SDL_Quit, SDL_QuitSubSystem, SDL_INIT_EVERYTHING
from .. import hints
class SDLHintsTest(unittest.TestCase):
__tags__ = ["sdl"]
def setUp(self):
SDL_Init(SDL_INIT_EVERYTHING)
def tearDown(self):
SDL_QuitSubSystem(SDL_INIT_EVERYTHING)
SDL_Quit()
def test_SDL_ClearHints(self):
self.assertEqual(hints.SDL_SetHint(b"TEST", b"32"), 1)
self.assertEqual(hints.SDL_GetHint(b"TEST"), b"32")
hints.SDL_ClearHints()
self.assertEqual(hints.SDL_GetHint(b"TEST"), None)
def test_SDL_GetHint(self):
self.assertEqual(hints.SDL_SetHint(b"TEST", b"32"), 1)
self.assertEqual(hints.SDL_GetHint(b"TEST"), b"32")
self.assertEqual(hints.SDL_SetHint(hints.SDL_HINT_RENDER_DRIVER,
b"dummy"), 1)
self.assertEqual(hints.SDL_GetHint(hints.SDL_HINT_RENDER_DRIVER),
b"dummy")
def test_SDL_SetHint(self):
self.assertEqual(hints.SDL_SetHint(b"TEST", b"32"), 1)
self.assertEqual(hints.SDL_GetHint(b"TEST"), b"32")
self.assertEqual(hints.SDL_SetHint(b"TEST", b"abcdef"), 1)
self.assertEqual(hints.SDL_GetHint(b"TEST"), b"abcdef")
if sys.platform != "cli":
# TODO: Check on next IronPython version (>2.7.4)
self.assertEqual(hints.SDL_SetHint(b"", b""), 1)
self.assertEqual(hints.SDL_GetHint(b""), b"")
def test_SDL_SetHintWithPriority(self):
self.assertEqual(hints.SDL_SetHintWithPriority
(b"TEST", b"32", hints.SDL_HINT_DEFAULT), 1)
self.assertEqual(hints.SDL_GetHint(b"TEST"), b"32")
self.assertEqual(hints.SDL_SetHintWithPriority
(b"TEST", b"abcdef", hints.SDL_HINT_NORMAL), 1)
self.assertEqual(hints.SDL_GetHint(b"TEST"), b"abcdef")
if sys.platform != "cli":
# TODO: Check on next IronPython version (>2.7.4)
self.assertEqual(hints.SDL_SetHintWithPriority
(b"", b"", hints.SDL_HINT_OVERRIDE), 1)
self.assertEqual(hints.SDL_GetHint(b""), b"")
# self.assertRaises(ValueError, hints.SDL_SetHintWithPriority,
# "TEST", "123456789", 12)
# self.assertRaises(ValueError, hints.SDL_SetHintWithPriority,
# "TEST", "123456789", -78)
# self.assertRaises(ValueError, hints.SDL_SetHintWithPriority,
# "TEST", "123456789", None)
# self.assertRaises(ValueError, hints.SDL_SetHintWithPriority,
# "TEST", "123456789", "bananas")
def test_SDL_AddDelHintCallback(self):
calls = []
def callback(userdata, name, oldval, newval):
data = cast(userdata, c_char_p)
calls.append((data.value, name, oldval, newval))
hintcb = hints.SDL_HintCallback(callback)
udata = c_char_p(b"banana")
hints.SDL_AddHintCallback(hints.SDL_HINT_ALLOW_TOPMOST, hintcb,
udata)
# SDL_AddHintCallback invokes the callback once.
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0], (b"banana", hints.SDL_HINT_ALLOW_TOPMOST,
None, None))
hints.SDL_SetHint(hints.SDL_HINT_ALLOW_TOPMOST, b"true")
self.assertEqual(len(calls), 2)
self.assertEqual(calls[1], (b"banana", hints.SDL_HINT_ALLOW_TOPMOST,
None, b"true"))
hints.SDL_DelHintCallback(hints.SDL_HINT_ALLOW_TOPMOST, hintcb,
udata)
hints.SDL_SetHint(hints.SDL_HINT_ALLOW_TOPMOST, b"false")
self.assertEqual(len(calls), 2)
if __name__ == '__main__':
sys.exit(unittest.main())
| gpl-2.0 | 3,688,641,344,245,164,000 | 42.606742 | 76 | 0.582324 | false |
endlessm/chromium-browser | third_party/catapult/telemetry/telemetry/core/platform.py | 1 | 14924 | # Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging as real_logging
import os
import sys
import time
from telemetry.core import local_server
from telemetry.core import memory_cache_http_server
from telemetry.core import network_controller
from telemetry.core import tracing_controller
from telemetry.core import util
from telemetry.internal.platform import (platform_backend as
platform_backend_module)
from py_utils import discover
_HOST_PLATFORM = None
# Remote platform is a dictionary from device ids to remote platform instances.
_REMOTE_PLATFORMS = {}
def _InitHostPlatformIfNeeded():
global _HOST_PLATFORM # pylint: disable=global-statement
if _HOST_PLATFORM:
return
backend = None
backends = _IterAllPlatformBackendClasses()
for platform_backend_class in backends:
if platform_backend_class.IsPlatformBackendForHost():
backend = platform_backend_class()
break
if not backend:
raise NotImplementedError()
_HOST_PLATFORM = Platform(backend)
def GetHostPlatform():
_InitHostPlatformIfNeeded()
return _HOST_PLATFORM
def _IterAllPlatformBackendClasses():
platform_dir = os.path.dirname(os.path.realpath(
platform_backend_module.__file__))
return discover.DiscoverClasses(
platform_dir, util.GetTelemetryDir(),
platform_backend_module.PlatformBackend).itervalues()
def GetPlatformForDevice(device, finder_options, logging=real_logging):
""" Returns a platform instance for the device.
Args:
device: a device.Device instance.
"""
if device.guid in _REMOTE_PLATFORMS:
return _REMOTE_PLATFORMS[device.guid]
try:
for platform_backend_class in _IterAllPlatformBackendClasses():
if platform_backend_class.SupportsDevice(device):
_REMOTE_PLATFORMS[device.guid] = (
platform_backend_class.CreatePlatformForDevice(device,
finder_options))
return _REMOTE_PLATFORMS[device.guid]
return None
except Exception:
current_exception = sys.exc_info()
logging.error('Fail to create platform instance for %s.', device.name)
raise current_exception[0], current_exception[1], current_exception[2]
class Platform(object):
"""The platform that the target browser is running on.
Provides a limited interface to interact with the platform itself, where
possible. It's important to note that platforms may not provide a specific
API, so check with IsFooBar() for availability.
"""
def __init__(self, platform_backend):
self._platform_backend = platform_backend
self._platform_backend.InitPlatformBackend()
self._platform_backend.SetPlatform(self)
self._network_controller = network_controller.NetworkController(
self._platform_backend.network_controller_backend)
self._tracing_controller = tracing_controller.TracingController(
self._platform_backend.tracing_controller_backend)
self._local_server_controller = local_server.LocalServerController(
self._platform_backend)
self._forwarder = None
@property
def is_host_platform(self):
return self == GetHostPlatform()
@property
def network_controller(self):
"""Control network settings and servers to simulate the Web."""
return self._network_controller
@property
def tracing_controller(self):
return self._tracing_controller
def Initialize(self):
pass
def CanMonitorThermalThrottling(self):
"""Platforms may be able to detect thermal throttling.
Some fan-less computers go into a reduced performance mode when their heat
exceeds a certain threshold. Performance tests in particular should use this
API to detect if this has happened and interpret results accordingly.
"""
return self._platform_backend.CanMonitorThermalThrottling()
def GetSystemLog(self):
return self._platform_backend.GetSystemLog()
def IsThermallyThrottled(self):
"""Returns True if the device is currently thermally throttled."""
return self._platform_backend.IsThermallyThrottled()
def HasBeenThermallyThrottled(self):
"""Returns True if the device has been thermally throttled."""
return self._platform_backend.HasBeenThermallyThrottled()
def GetDeviceTypeName(self):
"""Returns a string description of the Platform device, or None.
Examples: Nexus 7, Nexus 6, Desktop"""
return self._platform_backend.GetDeviceTypeName()
def GetArchName(self):
"""Returns a string description of the Platform architecture.
Examples: x86_64 (posix), AMD64 (win), armeabi-v7a, x86"""
return self._platform_backend.GetArchName()
def GetOSName(self):
"""Returns a string description of the Platform OS.
Examples: WIN, MAC, LINUX, CHROMEOS"""
return self._platform_backend.GetOSName()
def GetDeviceId(self):
"""Returns a string identifying the device.
Examples: 0123456789abcdef"""
return self._platform_backend.GetDeviceId()
def GetOSVersionName(self):
"""Returns a logically sortable, string-like description of the Platform OS
version.
Examples: VISTA, WIN7, LION, MOUNTAINLION"""
return self._platform_backend.GetOSVersionName()
def GetOSVersionDetailString(self):
"""Returns more detailed information about the OS version than
GetOSVersionName, if available. Otherwise returns the empty string.
Examples: '10.12.4' on macOS."""
return self._platform_backend.GetOSVersionDetailString()
def GetSystemTotalPhysicalMemory(self):
"""Returns an integer with the total physical memory in bytes."""
return self._platform_backend.GetSystemTotalPhysicalMemory()
def CanFlushIndividualFilesFromSystemCache(self):
"""Returns true if the disk cache can be flushed for individual files."""
return self._platform_backend.CanFlushIndividualFilesFromSystemCache()
def SupportFlushEntireSystemCache(self):
"""Returns true if entire system cache can be flushed.
Also checks that platform has required privilegues to flush system caches.
"""
return self._platform_backend.SupportFlushEntireSystemCache()
def _WaitForPageCacheToBeDropped(self):
# There seems to be no reliable way to wait for all pages to be dropped from
# the OS page cache (also known as 'file cache'). There is no guaranteed
# moment in time when everything is out of page cache. A number of pages
# will likely be reused before other pages are evicted. While individual
# files can be watched in limited ways, we choose not to be clever.
time.sleep(2)
def FlushEntireSystemCache(self):
"""Flushes the OS's file cache completely.
This function may require root or administrator access. Clients should
call SupportFlushEntireSystemCache to check first.
"""
self._platform_backend.FlushEntireSystemCache()
self._WaitForPageCacheToBeDropped()
def FlushSystemCacheForDirectories(self, directories):
"""Flushes the OS's file cache for the specified directory.
This function does not require root or administrator access."""
for path in directories:
self._platform_backend.FlushSystemCacheForDirectory(path)
self._WaitForPageCacheToBeDropped()
def FlushDnsCache(self):
"""Flushes the OS's DNS cache completely.
This function may require root or administrator access."""
return self._platform_backend.FlushDnsCache()
def LaunchApplication(self,
application,
parameters=None,
elevate_privilege=False):
""""Launches the given |application| with a list of |parameters| on the OS.
Set |elevate_privilege| to launch the application with root or admin rights.
Returns:
A popen style process handle for host platforms.
"""
return self._platform_backend.LaunchApplication(
application,
parameters,
elevate_privilege=elevate_privilege)
def StartActivity(self, intent, blocking=False):
"""Starts an activity for the given intent on the device."""
return self._platform_backend.StartActivity(intent, blocking)
def CanLaunchApplication(self, application):
"""Returns whether the platform can launch the given application."""
return self._platform_backend.CanLaunchApplication(application)
def InstallApplication(self, application, **kwargs):
"""Installs the given application."""
return self._platform_backend.InstallApplication(application, **kwargs)
def IsCooperativeShutdownSupported(self):
"""Indicates whether CooperativelyShutdown, below, is supported.
It is not necessary to implement it on all platforms."""
return self._platform_backend.IsCooperativeShutdownSupported()
def CooperativelyShutdown(self, proc, app_name):
"""Cooperatively shut down the given process from subprocess.Popen.
Currently this is only implemented on Windows. See
crbug.com/424024 for background on why it was added.
Args:
proc: a process object returned from subprocess.Popen.
app_name: on Windows, is the prefix of the application's window
class name that should be searched for. This helps ensure
that only the application's windows are closed.
Returns True if it is believed the attempt succeeded.
"""
return self._platform_backend.CooperativelyShutdown(proc, app_name)
def CanTakeScreenshot(self):
return self._platform_backend.CanTakeScreenshot()
# TODO(nednguyen): Implement this on Mac, Linux & Win. (crbug.com/369490)
def TakeScreenshot(self, file_path):
""" Takes a screenshot of the platform and save to |file_path|.
Note that this method may not be supported on all platform, so check with
CanTakeScreenshot before calling this.
Args:
file_path: Where to save the screenshot to. If the platform is remote,
|file_path| is the path on the host platform.
Returns True if it is believed the attempt succeeded.
"""
return self._platform_backend.TakeScreenshot(file_path)
def CanRecordVideo(self):
return self._platform_backend.CanRecordVideo()
def StartVideoRecording(self):
"""Starts recording a video on the device.
Note that this method may not be supported on all platforms, so the caller
must check with CanRecordVideo before calling this. Once the caller starts
recording a video using this call, the caller must stop recording the video
by calling StopVideoRecording() before attempting to start recording another
video.
"""
self._platform_backend.StartVideoRecording()
def StopVideoRecording(self, video_path):
"""Stops recording a video on the device and saves to |video_path|.
This method must be called only if recording a video had started using a
call to StartVideoRecording(), and it was not already stopped using a call
to StopVideoRecording().
Args:
video_path: Where to save the video to. If the platform is remote,
|video_path| is the path on the host platform.
"""
self._platform_backend.StopVideoRecording(video_path)
def SetFullPerformanceModeEnabled(self, enabled):
""" Set full performance mode on the platform.
Note: this can be no-op on certain platforms.
"""
return self._platform_backend.SetFullPerformanceModeEnabled(enabled)
def StartLocalServer(self, server):
"""Starts a LocalServer and associates it with this platform.
|server.Close()| should be called manually to close the started server.
"""
self._local_server_controller.StartServer(server)
@property
def http_server(self):
# TODO(crbug.com/799490): Ownership of the local server should be moved
# to the network_controller.
server = self._local_server_controller.GetRunningServer(
memory_cache_http_server.MemoryCacheDynamicHTTPServer, None)
if server:
return server
return self._local_server_controller.GetRunningServer(
memory_cache_http_server.MemoryCacheHTTPServer, None)
def SetHTTPServerDirectories(self, paths, handler_class=None):
"""Returns True if the HTTP server was started, False otherwise."""
# pylint: disable=redefined-variable-type
if isinstance(paths, basestring):
paths = set([paths])
paths = set(os.path.realpath(p) for p in paths)
# If any path is in a subdirectory of another, remove the subdirectory.
duplicates = set()
for parent_path in paths:
for sub_path in paths:
if parent_path == sub_path:
continue
if os.path.commonprefix((parent_path, sub_path)) == parent_path:
duplicates.add(sub_path)
paths -= duplicates
if self.http_server:
old_handler_class = getattr(self.http_server,
"dynamic_request_handler_class", None)
if not old_handler_class and not handler_class and \
self.http_server.paths == paths:
return False
if old_handler_class and handler_class \
and old_handler_class.__name__ == handler_class.__name__ \
and self.http_server.paths == paths:
return False
self.http_server.Close()
if not paths:
return False
if handler_class:
server = memory_cache_http_server.MemoryCacheDynamicHTTPServer(
paths, handler_class)
real_logging.info('MemoryCacheDynamicHTTPServer created')
else:
server = memory_cache_http_server.MemoryCacheHTTPServer(paths)
real_logging.info('MemoryCacheHTTPServer created')
self.StartLocalServer(server)
# For now, Fuchsia needs to do port forwarding due to --proxy-server
# flag not being supported in its browser.
# TODO(https://crbug.com/1014670): Remove once debug flags supported in
# Fuchsia browsers.
if self._platform_backend.GetOSName() == 'fuchsia':
self._platform_backend.forwarder_factory.Create(server.port, server.port)
return True
def StopAllLocalServers(self):
self._local_server_controller.Close()
if self._forwarder:
self._forwarder.Close()
@property
def local_servers(self):
"""Returns the currently running local servers."""
return self._local_server_controller.local_servers
def WaitForBatteryTemperature(self, temp):
"""Waits for the battery on the device under test to cool down to temp.
Args:
temp: temperature target in degrees C.
"""
return self._platform_backend.WaitForBatteryTemperature(temp)
def WaitForCpuTemperature(self, temp):
"""Waits for the CPU temperature to be less than temp.
Args:
temp: A float containing the maximum temperature to allow
in degrees c.
"""
return self._platform_backend.WaitForCpuTemperature(temp)
def GetTypExpectationsTags(self):
return self._platform_backend.GetTypExpectationsTags()
| bsd-3-clause | 561,020,929,033,317,700 | 35.311436 | 80 | 0.718306 | false |
Dirrot/python-dogechain-api | DogechainApi/DogechainApi.py | 1 | 3991 | '''
Created on 21.01.2014
@author: Dirk Rother
@contact: [email protected]
@license: GPL
@version: 0.1
'''
from urllib2 import Request, urlopen, URLError, HTTPError
class API(object):
'''
This class is a wrapper class for the dogechain.info api.
'''
API_PATH = "http://www.dogechain.info/chain/Dogecoin/"
API_QUERY = API_PATH + "q/"
def addressbalance(self, address):
'''
Amount ever received minus amount ever sent by a given address.
Usage: API_QUERY + addressbalance/ADDRESS
'''
url = self.API_QUERY + 'addressbalance/' + address
return self._getdata(url)
def addresstohash(self, address):
'''
Shows the public key hash encoded in an address.
Usage: API_QUERY + addresstohash/ADDRESS
'''
url = self.API_QUERY + 'addresstohash/' + address
return self._getdata(url)
def checkaddress(self, address):
'''
Checks an address for validity.
Usage: API_QUERY + checkaddress/ADDRESS
'''
url = self.API_QUERY + 'checkaddress/' + address
return self._getdata(url)
def decode_address(self, address):
'''
Shows the version prefix and hash encoded in an address.
Usage: API_QUERY + decode_address/ADDRESS
'''
url = self.API_QUERY + 'decode_address/' + address
return self._getdata(url)
def getblockcount(self):
'''
Shows the current block number.
Usage: API_QUERY + getblockcount
'''
url = self.API_QUERY + 'getblockcount'
return self._getdata(url)
def getdifficulty(self):
'''
Shows the last solved block's difficulty.
Usage: API_QUERY + getdifficulty
'''
url = self.API_QUERY + 'getdifficulty'
return self._getdata(url)
def getreceivedbyaddress(self, address):
'''
Shows the amount ever received from a given address.
(not balance, sends are not subtracted)
Usage: API_QUERY + getreceivedbyaddress/ADDRESS
'''
url = self.API_QUERY + 'getreceivedbyaddress/' + address
return self._getdata(url)
def getsentbyaddress(self, address):
'''
Shows the amount ever sent from a given address.
Usage: API_QUERY + getsentbyaddress/ADDRESS
'''
url = self.API_QUERY + 'getsentbyaddress/' + address
return self._getdata(url)
def hashtoaddress(self, hash):
'''
Shows the address with the given version prefix an hash.
Converts a 160-bit hash and address version to an address.
Usage: API_QUERY + hashtoaddress/HASH
'''
url = self.API_QUERY + 'hashtoaddress/' + hash
return self._getdata(url)
def nethash(self):
'''
Shows statistics about difficulty and network power.
Usage: API_QUERY + nethash
'''
url = self.API_QUERY + 'nethash'
return self._getdata(url)
def totalbc(self):
'''
Shows the amount of currency ever mined.
Usage: API_QUERY + totalbc
'''
url = self.API_QUERY + 'totalbc'
return self._getdata(url)
def transactions(self):
'''
Shows the amount transactions of the last blocks.
Usage: API_QUERY + transactions
'''
url = self.API_QUERY + 'transactions'
return self._getdata(url)
def _getdata(self, url):
'''
Wrapper method
'''
request = Request(url)
try:
response = urlopen(request)
except HTTPError as e:
print 'The Server couldn\'t fulfill the request.'
print 'Error code: ', e.code
except URLError as e:
print 'We failed to reach a server.'
print 'Reason: ', e.code
else:
# Everything is fine.
return response.read()
| gpl-2.0 | 10,162,344,229,742,784 | 28.783582 | 71 | 0.572288 | false |
RealTimeWeb/Blockpy-Server | controllers/services.py | 1 | 2386 | import logging
from pprint import pprint
from flask_wtf import Form
from wtforms import IntegerField, BooleanField
from flask import Blueprint, send_from_directory
from flask import Flask, redirect, url_for, session, request, jsonify, g,\
make_response, Response, render_template
from werkzeug.utils import secure_filename
from sqlalchemy import Date, cast, func, desc, or_
from main import app
from controllers.helpers import crossdomain
from interaction_logger import StructuredEvent
services = Blueprint('services', __name__, url_prefix='/services')
from controllers.service_libraries import weather as weather_service
@services.route('/weather/', methods=['GET', "POST"])
@services.route('/weather', methods=['GET', 'POST'])
def weather():
function = request.args.get("function", "get_temperature")
city = request.args.get("city", "Blacksburg, VA")
weather_function = getattr(weather_service, function)
return jsonify(data=weather_function(city))
@services.route('/sheets', methods=['GET'])
def sheets(sheet_url):
sheet_id = ''
if sheet_url.startswith('http'):
sheet_url.split('/')
elif sheet_url.startswith('docs'):
sheet_url.split('/')
elif sheet_url.startswith('docs'):
sheet_url.split('/')
# sample:
# https://docs.google.com/spreadsheets/d/1eLbX_5EFvZYc7JOGYF8ATdu5uQeu6OvILNnr4vH3vFI/pubhtml
# =>
# https://spreadsheets.google.com/feeds/list/___/od6/public/basic?alt=json
# https://spreadsheets.google.com/feeds/list/1eLbX_5EFvZYc7JOGYF8ATdu5uQeu6OvILNnr4vH3vFI/od6/public/basic?alt=json
@services.route('/log/', methods=['GET', 'POST', 'OPTIONS'])
@services.route('/log', methods=['GET', 'POST', 'OPTIONS'])
#@crossdomain(origin='*')
def log_event():
user_id = request.form.get('user_id', "")
if user_id == "":
user_id = str(request.remote_addr)
question_id = request.form.get('question_id', "")
event = request.form.get('event', "")
action = request.form.get('action', "")
body = request.form.get('body', "")
external_interactions_logger = logging.getLogger('ExternalInteractions')
external_interactions_logger.info(
StructuredEvent(user_id, question_id, event, action, body)
)
response = make_response('success')
response.headers['Access-Control-Allow-Origin'] = "*"
return response
| mit | -2,807,518,795,514,543,600 | 37.483871 | 119 | 0.687343 | false |
spring01/libPSI | lib/python/grendel/util/units/unit.py | 1 | 21791 | from __future__ import absolute_import
from collections import defaultdict
import math
from numbers import Number, Real
from grendel.util.aliasing import function_alias
from grendel.util.strings import classname
import sys
# Version 3 compatibility
if sys.version_info[0] == 3:
basestring = str
__all__ = [
'Unit',
'isunit', 'is_unit',
'convert', 'convert_units',
'compatible_units', 'iscompatible',
# Unit genres:
'DistanceUnit',
'EnergyUnit',
'AngularUnit',
'ElectricChargeUnit',
'MassUnit',
'TimeUnit'
]
#############
# Utilities #
#############
def isunit(unit):
if isinstance(unit, Unit) or isinstance(unit, CompositeUnit):
return True
else:
return False
is_unit = function_alias('is_unit', isunit)
def plural(unit): # pragma: no cover
if not isunit(unit):
raise TypeError
return unit.__plural__
def convert_units(val, from_unit, to_unit):
if not isunit(from_unit):
raise UnknownUnitError(from_unit)
if not isunit(to_unit):
raise UnknownUnitError(to_unit)
if from_unit == to_unit:
return val
return val * from_unit.to(to_unit)
convert = function_alias('convert', convert_units)
def compatible_units(unit1, unit2):
try:
unit1.to(unit2)
return True
except IncompatibleUnitsError:
return False
iscompatible = function_alias('iscompatible', compatible_units)
########################
# Metaclasses and such #
########################
class Prefix(object):
""" The prefix for a unit, e.g. centi-, kilo-, mega-, etc.
"""
##############
# Attributes #
##############
in_words = None
abbrev = None
multiplier = None
##################
# Initialization #
##################
def __init__(self, in_words, abbrev, multiplier):
self.in_words = in_words
self.abbrev = abbrev
self.multiplier = multiplier
class Unit(type):
""" Metaclass for a general unit of something.
"""
########################
# Metaclass Attributes #
########################
known_units = []
prefixes = [
Prefix("Yotta", "Y", 1.0e24),
Prefix("Zetta", "Z", 1.0e21),
Prefix("Exa", "E", 1.0e18),
Prefix("Peta", "P", 1.0e15),
Prefix("Tera", "T", 1.0e12),
Prefix("Giga", "G", 1.0e9),
Prefix("Mega", "M", 1.0e6),
Prefix("Kilo", "k", 1.0e3),
Prefix("Hecto", "h", 100.0),
Prefix("Deca", "da", 10.0),
Prefix("Deci", "d", 1.0e-1),
Prefix("Centi", "c", 1.0e-2),
Prefix("Milli", "m", 1.0e-3),
Prefix("Micro", "u", 1.0e-6),
Prefix("Nano", "n", 1.0e-9),
Prefix("Pico", "p", 1.0e-12),
Prefix("Femto", "f", 1.0e-15),
Prefix("Atto", "a", 1.0e-18),
Prefix("Zepto", "z", 1.0e-21),
Prefix("Yocto", "y", 1.0e-24)
]
####################
# Class Attributes #
####################
__plural__ = None
__aliases__ = None
__abbrev__ = None
__prefixed__ = True
############################
# Metaclass Initialization #
############################
def __init__(cls, name, bases, dct):
Unit.known_units.append(name)
if not all(issubclass(base, UnitGenre) for base in bases) or not len(bases) == 1:
raise TypeError("Units must inherit from a single class with the UnitGenre superclass.")
super(Unit, cls).__init__(name, bases, dct)
globals()['__all__'].append(str(cls))
# Automatically create a plural alias for the unit if one is not given
if cls.__plural__ is None:
cls.__plural__ = str(cls) + "s"
if not cls.__plural__ == name:
globals()[cls.__plural__] = cls
globals()['__all__'].append(cls.__plural__)
Unit.known_units.append(cls.__plural__)
# Automatically create prefixed versions of the unit
if cls.__prefixed__:
for prefix in Unit.prefixes:
d = {'prefix': prefix, 'base_unit': cls}
name1 = prefix.in_words + name
pre = PrefixedUnit.__new__(PrefixedUnit, name1, (cls,), d)
globals()[name1] = pre
globals()['__all__'].append(name1)
Unit.known_units.append(pre)
name2 = prefix.in_words + cls.__plural__
globals()[name2] = pre
globals()['__all__'].append(name1)
# If the name is not CamelCase or UPPERCASE, append uncapitalized versions (e.g. Kilogram as well
# as KiloGram, but not KiloaMU, only KiloAMU)
if not any(letter.isupper() for letter in name[1:]):
name3 = prefix.in_words + name[0].lower() + name[1:]
globals()[name3] = pre
globals()['__all__'].append(name3)
name4 = prefix.in_words + cls.__plural__[0].lower() + cls.__plural__[1:]
globals()[name4] = pre
globals()['__all__'].append(name4)
####################
# Class Properties #
####################
@property
def genre(cls):
return cls.__mro__[1]
@property
def name(cls):
return cls.__name__
#########################
# Special Class Methods #
#########################
def __contains__(self, item):
if isinstance(item, ValueWithUnits):
if item.units == self:
return True
else:
return False
else:
raise TypeError()
#----------------------#
# Comparison Operators #
#----------------------#
def __eq__(cls, other):
try:
return other.to(cls) == 1.0
except IncompatibleUnitsError:
return False
except AttributeError:
# Other doesn't even have a 'to()' method...
return NotImplemented
def __ne__(self, other):
eq_val = self.__eq__(other)
if eq_val is NotImplemented:
return NotImplemented
else:
return not eq_val
#----------------------#
# Arithmetic Operators #
#----------------------#
def __mul__(cls, other):
if isinstance(other, Number):
return ValueWithUnits(other, cls)
elif isinstance(other, Unit):
return CompositeUnit({cls: 1, other: 1})
else:
return NotImplemented
def __rmul__(cls, other):
if isinstance(other, Number):
return ValueWithUnits(other, cls)
else:
return NotImplemented
def __div__(cls, other):
if isinstance(other, Unit):
return CompositeUnit({cls: 1, other:-1})
else:
return NotImplemented
__truediv__ = __div__
def __rdiv__(cls, other):
if isinstance(other, Number):
return ValueWithUnits(other, CompositeUnit({cls: -1}))
else: # pragma: no cover
return NotImplemented
__rtruediv__ = __rdiv__
def __pow__(cls, power):
if isinstance(power, Real):
return CompositeUnit({cls: power})
else: # pragma: no cover
return NotImplemented
#------------------------#
# Output Representations #
#------------------------#
def __repr__(cls):
return classname(super(Unit, cls).__repr__())
__str__ = __repr__
#################
# Class Methods #
#################
def genre_check(cls, other):
if not issubclass(other, cls.genre):
raise IncompatibleUnitsError(cls, other)
def prefix_factor(cls, other):
other_fact = 1.0
if isinstance(other, PrefixedUnit):
other_fact = other.prefix.multiplier
my_fact = 1.0
if isinstance(cls, PrefixedUnit):
my_fact = cls.prefix.multiplier
return my_fact / other_fact
def to(cls, other):
cls.genre_check(other)
if other is cls:
return 1.0
elif issubclass(other, cls) or issubclass(cls, other):
return cls.prefix_factor(other)
else:
return (1.0 / cls.genre.reference_unit.to(cls)) * cls.genre.reference_unit.to(other)
class PrefixedUnit(Unit):
""" Metaclass for a unit with a prefix, such as a Kilogram, Centimeter, etc.
"""
####################
# Class Attributes #
####################
base_unit = None
prefix = None
############################
# Metaclass Initialization #
############################
def __init__(cls, name, bases, dct):
cls.known_units.append(name)
if not 'to' in dct:
dct['to'] = PrefixedUnit.to
if not all(isinstance(base, Unit) for base in bases) or not len(bases) == 1: # pragma: no cover
raise TypeError("PrefixedUnits must inherit from a single class which is a Unit.")
super(Unit, cls).__init__(name, bases, dct)
@property
def genre(cls):
return cls.base_unit.genre
class UnitGenre(object):
""" Superclass for classes of things that can be measured by units.
For instance, DistanceUnit, AngularUnit, EnergyUnit, etc.
"""
default = None
reference_unit = None
class GenreDefaultDict(defaultdict):
def __missing__(self, key):
return key.genre
#--------------------------------------------------------------------------------#
####################
# Helper functions #
####################
# Can be called as either def_unit_alias(alias, unit) or def_unit_alias(unit, alias) (as long as alias is a str and
# is_unit(unit) is True)
def def_unit_alias(arg1, arg2, plural=True, prefixed=True): # pragma: no cover
alias = None
unit = None
if isinstance(arg1, basestring) and is_unit(arg2):
alias = arg1
unit = arg2
elif isinstance(arg2, basestring) and is_unit(arg1):
alias = arg2
unit = arg1
else:
raise TypeError()
globals()[alias] = unit
globals()['__all__'].append(alias)
my_plural = None
if plural is True:
# Automatically add plural with 's' unless the user specifies a specific plural or if the user specifies 'False'
globals()[alias + 's'] = unit
globals()['__all__'].append(alias + 's')
my_plural = alias + 's'
elif plural is not False and not str(plural) == alias:
my_plural = str(plural)
globals()[my_plural] = unit
globals()['__all__'].append(my_plural)
# Automatically create prefixed versions of the unit alias
if prefixed:
for prefix in Unit.prefixes:
d = {'prefix': prefix, 'base_unit': unit}
name = prefix.in_words + alias
pre = PrefixedUnit.__new__(PrefixedUnit, name, (unit,), d)
PrefixedUnit.__init__(pre, name, (unit,), d)
globals()[name] = pre
globals()['__all__'].append(name)
Unit.known_units.append(pre)
if not plural is False:
name = prefix.in_words + my_plural
globals()[name] = pre
globals()['__all__'].append(name)
if not any(letter.isupper() for letter in alias[1:]):
# If the name is not CamelCase or UPPERCASE, append uncapitalized versions
# (e.g. Kilogram as well as KiloGram, but not KiloaMU, only KiloAMU)
name = prefix.in_words + alias[0].lower() + alias[1:]
globals()[name] = pre
globals()['__all__'].append(name)
if not plural is False:
name = prefix.in_words + my_plural[0].lower() + my_plural[1:]
globals()[name] = pre
globals()['__all__'].append(name)
def def_unit_aliases(unit, *args, **kwargs): # pragma: no cover
for al in args:
alias = str(al)
plural = kwargs.pop(al + "_plural", True)
prefixed = kwargs.pop(al + "_prefixed", True)
def_unit_alias(unit, alias, plural, prefixed)
def def_unit(genre, unit, plural=True, prefixed=True):
d = {} #{'to': Unit.to}
if plural is False: # pragma: no cover
# Define a plural that is the same as the unit to prevent plural from being defined
d['__plural__'] = unit
elif plural is not True:
# When plural is True, use the default plural. Otherwise, define it
d['__plural__'] = str(plural)
new_cls = globals()[unit] = Unit.__new__(Unit, unit, (genre,), d)
new_cls.__prefixed__ = prefixed
Unit.__init__(globals()[unit], unit, (genre,), d)
globals()['__all__'].append(unit)
def def_units(genre, *args, **kwargs): # pragma: no cover
for unit in args:
prefixed = kwargs.pop(unit + "_prefixed", True)
plural = kwargs.pop(unit + "_plural", True)
def_unit(genre, unit, plural, prefixed)
if (unit + "_alias") in kwargs:
if (unit + "_alias_plural") in kwargs:
def_unit_alias(kwargs[unit + "_alias"], eval(unit, globals()), kwargs[unit + "_alias_plural"])
elif kwargs[unit + 'alias'] + "_alias" in kwargs:
def_unit_alias(kwargs[unit + "_alias"], eval(unit, globals()), kwargs[kwargs[unit + "_alias"] + '_alias'])
else:
def_unit_alias(kwargs[unit + "_alias"], eval(unit, globals()))
elif (unit + "_aliases") in kwargs:
for alias in kwargs[unit + "_aliases"]:
aplural = kwargs.pop(alias + "_plural", True)
aprefixed = kwargs.pop(alias + "_prefixed", prefixed)
def_unit_alias(alias, eval(unit, globals()), aplural, aprefixed)
#--------------------------------------------------------------------------------#
##################
# Distance Units #
##################
class DistanceUnit(UnitGenre):
""" General superclass for all distance units
"""
class Angstrom(DistanceUnit):
__metaclass__ = Unit
@classmethod
def to(cls, other):
cls.genre_check(other)
pf = cls.prefix_factor(other)
if issubclass(other, Bohr):
return pf / BohrRadius.value
elif issubclass(other, Meter):
return 1e-10 * pf
elif issubclass(other, Angstrom):
return pf
else: # pragma: no cover
raise NotImplementedError("Conversion from units " + classname(cls) + " to units " + classname(other) + " is not implemented.")
DistanceUnit.reference_unit = Angstrom
class Bohr(DistanceUnit):
__metaclass__ = Unit
def_unit_alias('AtomicUnitOfDistance', Bohr, plural='AtomicUnitsOfDistance')
class Meter(DistanceUnit):
__metaclass__ = Unit
DistanceUnit.default = Angstrom
#DistanceUnit.default = Bohr
#################
# Angular Units #
#################
class AngularUnit(UnitGenre):
""" General superclass for all angular units
"""
class Degree(AngularUnit):
__metaclass__ = Unit
@classmethod
def to(cls, other):
cls.genre_check(other)
pf = cls.prefix_factor(other)
if issubclass(other, Radian):
return pf * math.pi / 180.0
elif issubclass(other, Degree):
return pf
else: # pragma: no cover
raise NotImplementedError("Conversion from units " + classname(cls) + " to units " + classname(other) + " is not implemented.")
AngularUnit.reference_unit = Degree
class Radian(AngularUnit):
__metaclass__ = Unit
# For now, using default units of Radians causes some unit tests to fail
#AngularUnit.default = Radian
AngularUnit.default = Degree
################
# Energy Units #
################
class EnergyUnit(UnitGenre):
""" General superclass for all energy units
"""
class Joule(EnergyUnit):
__metaclass__ = Unit
@classmethod
def to(cls, other):
cls.genre_check(other)
pf = cls.prefix_factor(other)
if issubclass(other, Joule):
return pf
elif issubclass(other, Hartree):
return pf / 4.35974434e-18
elif issubclass(other, Wavenumbers):
return pf / (PlanckConstant.value * SpeedOfLight.in_units(Centimeters/Second).value)
elif issubclass(other, KiloCaloriePerMol):
return pf * AvogadrosNumber / 1000.0 / 4.184
elif issubclass(other, KiloJoulePerMol):
return pf * AvogadrosNumber / 1000.0
elif issubclass(other, Hertz):
return pf / PlanckConstant.value
else: # pragma: no cover
raise NotImplementedError("Conversion from units " + classname(cls) + " to units " + classname(other) + " is not implemented.")
EnergyUnit.reference_unit = Joule
class Wavenumber(EnergyUnit):
__metaclass__ = Unit
EnergyUnit.default = Wavenumber
# TODO Molar energy unit?
def_units(EnergyUnit,
#'ElectronVolt',
'Hertz',
'Hartree',
'KiloCaloriePerMol',
'KiloJoulePerMol',
#------------------#
Hartree_alias = 'AtomicUnitOfEnergy',
Hartree_alias_plural = 'AtomicUnitsOfEnergy',
#------------------#
KiloCaloriePerMol_prefixed = False, # Don't create prefixes, since e.g. MicroKCalPerMol doesn't make sense
KiloCaloriePerMol_aliases = [
'KiloCaloriePerMole',
'KCalPerMol',
'KcalPerMol',
],
KiloCaloriePerMole_plural = 'KiloCaloriesPerMol',
KcalPerMol_plural = 'KcalsPerMol',
KCalPerMol_plural = 'KCalsPerMol',
#------------------#
KiloJoulePerMol_plural = 'KiloJoulesPerMol',
KiloJoulesPerMol_prefixed = False,
KiloJoulesPerMol_aliases = [
'KJPerMol',
],
KJPerMol_plural = False,
#------------------#
)
##############
# Time Units #
##############
class TimeUnit(UnitGenre):
""" General superclass for all time units
"""
class Second(TimeUnit):
__metaclass__ = Unit
@classmethod
def to(cls, other):
cls.genre_check(other)
pf = cls.prefix_factor(other)
if issubclass(other, Second):
return pf
elif issubclass(other, AtomicUnitOfTime):
return pf / 2.418884326502e-17
elif issubclass(other, Minute):
return pf / 60.0
elif issubclass(other, Hour):
return pf / 3600.0
elif issubclass(other, Day):
return pf / 86400.0
elif issubclass(other, Week):
return pf / 604800.0
elif issubclass(other, Year):
return pf / 31556925.445
elif issubclass(other, Decade):
return pf / (31556925.445 * 10)
elif issubclass(other, Century):
return pf / (31556925.445 * 100)
elif issubclass(other, Millennium):
return pf / (31556925.445 * 1000)
else: # pragma: no cover
raise NotImplementedError("Conversion from units " + classname(cls) + " to units " + classname(other) + " is not implemented.")
TimeUnit.default = Second
TimeUnit.reference_unit = Second
# Just to demonstrate how the process works...
def_units(TimeUnit,
'AtomicUnitOfTime',
'Minute',
'Hour',
'Day',
'Week',
'Year',
'Decade',
'Century',
'Millennium',
AtomicUnitOfTime_plural = "AtomicUnitsOfTime",
Century_plural = "Centuries",
Millennium_plural = 'Millennia')
#########################
# Electric Charge Units #
#########################
class ElectricChargeUnit(UnitGenre):
""" General superclass for all units of electric charge
"""
class Coulomb(ElectricChargeUnit):
__metaclass__ = Unit
@classmethod
def to(cls, other):
cls.genre_check(other)
pf = cls.prefix_factor(other)
if issubclass(other, Coulomb):
return pf
elif issubclass(other, AtomicUnitOfElectricCharge):
return pf / ElementaryCharge.in_units(Coulomb).value
else: # pragma: no cover
raise NotImplementedError("Conversion from units " + classname(cls) + " to units " + classname(other) + " is not implemented.")
ElectricChargeUnit.default = Coulomb
ElectricChargeUnit.reference_unit = Coulomb
def_units(ElectricChargeUnit,
'AtomicUnitOfElectricCharge',
AtomicUnitOfElectricCharge_plural = 'AtomicUnitsOfElectricCharge',
AtomicUnitOfElectricCharge_alias = 'AtomicUnitOfCharge',
AtomicUnitOfElectricCharge_alias_plural = 'AtomicUnitsOfCharge',
)
##############
# Mass Units #
##############
class MassUnit(UnitGenre):
""" General superclass for all units of mass
"""
class Gram(MassUnit):
__metaclass__ = Unit
@classmethod
def to(cls, other):
cls.genre_check(other)
pf = cls.prefix_factor(other)
if issubclass(other, Gram):
return pf
if issubclass(other, AtomicMassUnit):
# NIST
return pf / 1.660538921e-24
# IUPAC
#return pf / 1.6605402e-24
elif issubclass(other, AtomicUnitOfMass):
return pf / ElectronMass.in_units(Gram).value
else: # pragma: no cover
raise NotImplementedError("Conversion from units " + classname(cls) + " to units " + classname(other) + " is not implemented.")
MassUnit.reference_unit = Gram
class AtomicMassUnit(MassUnit):
__metaclass__ = Unit
def_unit_alias('AMU', AtomicMassUnit)
MassUnit.default = AtomicMassUnit
class AtomicUnitOfMass(MassUnit):
__metaclass__ = Unit
__plural__ = 'AtomicUnitsOfMass'
#####################
# Dependent Imports #
#####################
from grendel.util.units.composite import CompositeUnit
from grendel.util.units.errors import IncompatibleUnitsError, UnknownUnitError
from grendel.util.units.value_with_units import ValueWithUnits
from grendel.util.units.physical_constants import ElectronMass, ElementaryCharge, PlanckConstant, SpeedOfLight, AvogadrosNumber, BohrRadius
| gpl-2.0 | -1,983,723,185,309,552,000 | 30.765306 | 139 | 0.556835 | false |
davsebamse/random_testing | selenium_connector.py | 1 | 3011 | # -*- coding: utf-8 -*-
"""
Created on Wed Aug 14 19:31:52 2013
@author: davse
"""
#TODO: Make this class more factory like, eg. get it to accept
#a driver created from the enviroment, instead of creating one itself
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
import misc
class TreeNode:
parent = None
children = []
element = None
id = None
text = None
tag_name = None
location = None
size = None
def __init__(self, element, parent):
self.element = element
self.parent = parent
def store_element_in_node(self):
self.id = self.element.id
self.text = self.element.text
self.tag_name = self.element.tag_name
self.location = self.element.location
self.size = self.element.size
def __str__(self):
return self.element.tag_name # + ' ' + self.element.text
class SeleniumConnector:
def __init__(self, drivername='CHROME', localserver=False, host='127.0.0.1', port=4444, debug=1):
if debug == 1:
misc.enable_debug()
desired_capabilities = None
if (drivername.upper() == 'CHROME'):
desired_capabilities = DesiredCapabilities.CHROME
elif (drivername.upper() == 'FIREFOX'):
desired_capabilities = DesiredCapabilities.FIREFOX
host = 'http://' + host +':'+ str(port)+'/wd/hub'
misc.debug_writeline('Connecting to {0} with desiredcapabilities {1}'.format(host, desired_capabilities))
self.driver = webdriver.Remote(
command_executor=host,
desired_capabilities=desired_capabilities)
def goto_page(self, page):
self.driver.get(page)
def quit(self):
self.driver.quit()
class SeleniumWrapper:
def __init__(self, connector):
self.connector = connector
def get_root_element_in_page(self):
return self.connector.driver.find_element_by_xpath('*')
def build_dom_tree(self, parent):
sub_elements = list(parent.element.find_elements_by_xpath('*'))
if len(sub_elements) == 0:
return
for element in sub_elements:
tmp = TreeNode(element, parent)
tmp.children = []
parent.children.append(tmp)
self.build_dom_tree(tmp)
def get_dom_tree(self):
root = self.get_root_element_in_page()
root_tree_node = TreeNode(root, None)
self.build_dom_tree(root_tree_node)
return root_tree_node
def print_tree(treenode, level=0):
print ' ' * (level * 3), str(treenode)
if len(treenode.children) == 0:
return
for c in treenode.children:
print_tree(c, level + 1)
def test_if_element_is_clickable(element):
return element.is_enabled() and element.is_displayed()
def domtree_to_list(treenode, acc=[]):
acc.append(treenode)
if len(treenode.children) == 0:
return;
for c in treenode.children:
domtree_to_list(c, acc) | mit | 3,968,769,667,942,308,400 | 28.242718 | 113 | 0.623049 | false |
EggInTheShell/TodoCounting | blur_image.py | 1 | 2626 | import numpy as np
from PIL import Image, ImageFilter
import matplotlib.pyplot as plt
import pandas as pd
from os.path import join, relpath
import glob, os
from scipy.ndimage.filters import gaussian_filter
import pickle
from settings import *
from data_utils import *
import time
startTime = time.time()
data_folder = DATA_DIR + 'patches_bool/'
data_path_list = glob.glob(data_folder+'*traindata_reduced.pkl')
# ぼかし方を設定
# todo 各dotのガウスを和算せずに最大値を取る -> peakが消失しない ref openpose
sigma = 15
sample = np.zeros([99,99], dtype=np.float32)
sample[44,44] = 1
sample = gaussian_filter(sample, sigma=sigma)
# plt.imshow(sample)
# plt.gray()
# plt.show()
peak = np.max(sample)
# print(peak)
for path in data_path_list:
id = int(os.path.basename(path)[:-len('traindata_reduced.pkl')])
print('processing: ', id)
with open(path, mode='rb') as f:
dict = pickle.load(f)
slice = 1000
images = dict['image'][:slice]
labels = dict['label'][:slice]
labels_blurred = np.zeros([slice,labels.shape[1], labels.shape[2], 5], dtype=np.float32)
# print('labels shape', labels.shape)
for i in range(labels.shape[0]):
print(i)
label = labels[i].astype(np.float32)
# print(np.max(label))
# print(label.shape)
blurred = np.zeros_like(label, dtype=np.float32)
blurred = gaussian_filter(label[:, :], sigma=15)
for ch in range(label.shape[2]):
blurred[:,:,ch] = gaussian_filter(label[:,:,ch], sigma=sigma)
# print(np.max(blurred))
labels_blurred[i] = blurred
# labels_blurred = labels_blurred/peak/2
print('label peak ', np.max(labels_blurred))
labels_blurred = np.minimum(1, labels_blurred)
# 可視化
# for i in range(slice):
# plt.subplot(2,3,1)
# plt.imshow(images[i])
# plt.subplot(2,3,2)
# plt.imshow(labels_blurred[i,:,:,0])
# plt.gray()
# plt.subplot(2,3,3)
# plt.imshow(labels_blurred[i,:,:,1])
# plt.gray()
# plt.subplot(2,3,4)
# plt.imshow(labels_blurred[i,:,:,2])
# plt.gray()
# plt.subplot(2,3,5)
# plt.imshow(labels_blurred[i,:,:,3])
# plt.gray()
# plt.subplot(2,3,6)
# plt.imshow(labels_blurred[i,:,:,4])
# plt.gray()
# plt.show()
# 保存
dict = {'image': images, 'label': labels_blurred}
savepath = DATA_DIR + str(id) + '_train_blurred.pkl'
with open(savepath, mode='wb') as f:
pickle.dump(dict, f)
print('saved: ', savepath, time.time()-startTime) | mit | 7,488,384,135,458,226,000 | 30.182927 | 92 | 0.605243 | false |
krausedj/TaxCruncher | BoaCCParser.py | 1 | 2009 |
import collections
import cfg
parse_files = cfg.parse_files
out_file = open(cfg.boa_cc_outfile, 'w')
data_csv = 'Filename,Transaction Date,Post Date,Business,Location,Reference Number,Account Number, Amount\n'
out_file.write(data_csv)
for file in sorted(parse_files):
with open(file, encoding='cp1252') as f:
lines = f.readlines()
for parse_range in parse_files[file]['ParseRanges']:
colm_info = parse_files[file]['Columns']
for parsed_line in lines[parse_range[0]-1:parse_range[1]]:
if parsed_line not in ('','\n'):
data_TransDate = parsed_line[colm_info['TransDate'][0]-1:colm_info['TransDate'][1]-1].strip()
data_PostDate = parsed_line[colm_info['PostDate'][0]-1:colm_info['PostDate'][1]-1].strip()
data_Business = parsed_line[colm_info['Business'][0]-1:colm_info['Business'][1]-1].strip()
data_Location = parsed_line[colm_info['Location'][0]-1:colm_info['Location'][1]-1].strip()
data_RefNum = parsed_line[colm_info['RefNum'][0]-1:colm_info['RefNum'][1]-1].strip()
data_ActNum = parsed_line[colm_info['ActNum'][0]-1:colm_info['ActNum'][1]-1].strip()
data_Amount = parsed_line[colm_info['Amount'][0]-1:colm_info['Amount'][1]-1].strip()
print(parsed_line)
print('Transation Date: {0}'.format(data_TransDate))
print('Post Date: {0}'.format(data_PostDate))
print('Business: {0}'.format(data_Business))
print('Location: {0}'.format(data_Location))
print('Reference Number: {0}'.format(data_RefNum))
print('Account Number: {0}'.format(data_ActNum))
print('Amount: {0}'.format(data_Amount))
data_csv = '{0},{1},{2},{3},{4},{5},{6},{7}\n'.format(file,data_TransDate,data_PostDate,data_Business,data_Location,data_RefNum,data_ActNum,data_Amount)
out_file.write(data_csv)
out_file.close()
| mit | -6,896,914,806,560,751,000 | 53.297297 | 168 | 0.594823 | false |
nikitanovosibirsk/district42 | district42/_props.py | 1 | 1280 | from typing import Any, Mapping, TypeVar
from niltype import Nil, Nilable
__all__ = ("Props", "PropsType",)
PropsType = TypeVar("PropsType", bound="Props")
class Props:
def __init__(self, registry: Nilable[Mapping[str, Any]] = Nil) -> None:
self._registry = registry if (registry is not Nil) else {}
def get(self, name: str, default: Nilable[Any] = Nil) -> Nilable[Any]:
return self._registry.get(name, default)
def set(self: PropsType, name: str, value: Any) -> PropsType:
registry = {**self._registry, name: value}
return self.__class__(registry)
def update(self: PropsType, **keys: Any) -> PropsType:
registry = {**self._registry, **keys}
return self.__class__(registry)
def __repr__(self) -> str:
return f"<{self.__class__.__name__} {self._registry}>"
def __eq__(self, other: Any) -> bool:
if not isinstance(other, self.__class__):
return False
for key, val in self._registry.items():
other_val = other.get(key)
if val != other_val:
return False
for key, other_val in other._registry.items():
val = self.get(key)
if other_val != val:
return False
return True
| mit | 8,434,861,864,708,869,000 | 29.47619 | 75 | 0.56875 | false |
Kriegspiel/ks-python-api | kriegspiel_api_server/api/views/game.py | 1 | 1249 | # -*- coding: utf-8 -*-
from django.db import transaction
from api.views.base import AuthenticatedApiView, ApiView
from api.response import ApiResponse
from api.serializers.game import GameSerializer
from api import exceptions
from kriegspiel.models import Game, Move
class GamesView(AuthenticatedApiView):
def get(self, request):
"""
List all games.
"""
return ApiResponse()
def post(self, request):
"""
Create a new game.
"""
input_serializer = GameSerializer().load_data(request.POST)
game = Game.objects.create(
created_by=request.user,
name=input_serializer['name'],
white=input_serializer.get('white'),
black=input_serializer.get('black'),
)
output_serializer, errors = GameSerializer().dump(game)
return ApiResponse(data=output_serializer)
class TurnView(AuthenticatedApiView):
def post(self, request, game_id):
game = Game.objects.filter(id=game_id).first()
if game is None or request.user.id not in [game.white_id, game.black_id]:
raise exceptions.NotFound()
with transaction.atomic():
pass # todo: validate move, save it to db | mit | -8,627,306,668,786,321,000 | 28.761905 | 81 | 0.639712 | false |
funkyfuture/inxs | tests/test_lib.py | 1 | 5519 | from itertools import product
from types import SimpleNamespace
from delb import Document, new_tag_node, register_namespace
from pytest import mark, raises
from inxs import lib, Ref, Rule, Transformation
def test_add_html_classes():
doc = Document("<html><body/></html>")
transformation = Transformation(Rule("body", lib.add_html_classes("transformed")))
result = transformation(doc).root
assert result[0].attributes["class"] == "transformed"
doc = Document('<html><body class="loaded" /></html>')
result = transformation(doc).root
assert all(x in result[0].attributes["class"] for x in ("transformed", "loaded"))
transformation = Transformation(
Rule("body", lib.add_html_classes("transformed", "and_something_else"))
)
result = transformation(doc).root
assert all(
x in result[0].attributes["class"]
for x in ("and_something_else", "loaded", "transformed")
)
transformation = Transformation(
Rule("body", lib.add_html_classes(Ref("html_classes"))),
context={"html_classes": ["transformed", "and_something_else"]},
)
result = transformation(doc).root
assert all(
x in result[0].attributes["class"]
for x in ("and_something_else", "loaded", "transformed")
)
def test_clear_attributes():
element = new_tag_node("root", attributes={"foo": "bar"})
lib.clear_attributes(element, None)
assert element.attributes == {}
def test_concatenate():
transformation = SimpleNamespace(_available_symbols={"foo": "bar"})
assert lib.concatenate("foo", Ref("foo"))(transformation) == "foobar"
def test_get_variable():
assert lib.get_variable("foo")(SimpleNamespace(foo="bar")) == "bar"
def test_has_matching_text():
node = Document("<song>Desmond Dekker - Shanty Town</song>").root
assert lib.has_matching_text(".* - .*")(node, None)
assert not lib.has_matching_text(".*007.*")(node, None)
def test_insert_fa_icon():
document = Document("<root><a/></root>")
handler = lib.insert_fontawesome_icon("arrow-left", "after", spin=True)
handler(document.root[0])
assert str(document) == '<root><a/><i class="fas fa-arrow-left fa-spin"/></root>'
def test_join_to_string():
transformation = SimpleNamespace(
_available_symbols={"previous_result": ["la", "la", "la"]}
)
assert lib.join_to_string(" ")(transformation) == "la la la"
def test_make_element():
root = new_tag_node("root")
handler = lib.make_node(local_name="foo", namespace="http://bar.org")
transformation = SimpleNamespace(states=SimpleNamespace(root=root))
assert handler(root, transformation).qualified_name == "{http://bar.org}foo"
def test_pop_attribute():
node = new_tag_node("x", attributes={"y": "z"})
handler = lib.pop_attribute("y")
result = handler(node)
assert result == "z"
assert "y" not in node.attributes
def test_pop_attributes():
node = new_tag_node("x", attributes={"x": "0", "y": "1"})
assert lib.pop_attributes("x", "y")(node) == {"x": "0", "y": "1"}
node = new_tag_node("x", attributes={"x": "0"})
assert lib.pop_attributes("x", "y", ignore_missing=True)(node) == {"x": "0"}
node = new_tag_node("x", {"x": "0"})
with raises(KeyError):
lib.pop_attributes("x", "y")(node)
@mark.parametrize(
"keep_children,preserve_text,clear_ref", tuple(product((True, False), repeat=3))
)
def test_remove_elements(keep_children, preserve_text, clear_ref):
root = Document("<root><a>foo<b/></a></root>").root
trash_bin = [root.first_child]
transformation = SimpleNamespace(
_available_symbols={"trashbin": trash_bin},
states=SimpleNamespace(previous_result=None),
)
lib.remove_nodes(
"trashbin",
keep_children=keep_children,
preserve_text=preserve_text,
clear_ref=clear_ref,
)(transformation)
assert not root.css_select("a")
assert keep_children == bool(root.css_select("b"))
assert preserve_text == (root.full_text == "foo")
assert clear_ref == (not bool(trash_bin)), (clear_ref, trash_bin)
def test_rename_attributes():
element = new_tag_node("x", attributes={"x": "0", "y": "1"})
lib.rename_attributes({"x": "a", "y": "b"})(element)
assert element.attributes == {"a": "0", "b": "1"}
def test_set_attribute():
element = new_tag_node("x")
lib.set_attribute("y", "z")(element, None)
assert element.attributes == {"y": "z"}
def test_set_text():
node = new_tag_node("pre")
transformation = Transformation(
lib.put_variable("x", "Hello world."), Rule("/", lib.set_text(Ref("x")))
)
assert str(transformation(node)) == "<pre>Hello world.</pre>"
@mark.parametrize(
"namespace,expected", ((None, "rosa"), ("spartakus", "{spartakus}rosa"))
)
def test_set_localname(namespace, expected):
node = new_tag_node("karl", namespace=namespace)
lib.set_localname("rosa")(node, None)
assert node.qualified_name == expected
def test_strip_attributes():
element = new_tag_node("root", attributes={"a": "a", "b": "b"})
lib.remove_attributes("b")(element, None)
assert element.attributes == {"a": "a"}
def test_strip_namespace():
namespace = "http://www.example.org/ns/"
register_namespace("x", namespace)
root = new_tag_node("div", namespace=namespace)
transformation = Transformation(Rule(namespace, lib.remove_namespace))
result = transformation(root)
assert result.qualified_name == "div"
| agpl-3.0 | 506,960,185,487,367,900 | 31.087209 | 86 | 0.635622 | false |
tongxindao/shiyanlou | shiyanlou_cs803/my_blog/my_blog/settings.py | 1 | 3126 | """
Django settings for my_blog project.
Generated by 'django-admin startproject' using Django 1.11.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '9z)b3%mhxoilgs&ga@950naj*@v!r)+!1e0%58hs^j(q^=^i61'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'article',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'my_blog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'my_blog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| apache-2.0 | 7,739,922,912,703,766,000 | 24.834711 | 91 | 0.684901 | false |
msimet/Stile | stile/treecorr_utils.py | 1 | 5553 | """
treecorr_utils.py: Contains elements of Stile needed to interface with Mike Jarvis's TreeCorr
program.
"""
import numpy
from . import file_io
import treecorr
from treecorr.corr2 import corr2_valid_params
def Parser():
import argparse
p = argparse.Parser()
p.add_argument('--file_type',
help="File type (ASCII or FITS)",
dest='file_type')
p.add_argument('--delimiter',
help="ASCII file column delimiter",
dest='delimiter')
p.add_argument('--comment_marker',
help="ASCII file comment-line marker",
dest='comment_marker')
p.add_argument('--first_row',
help="First row of the file(s) to be considered",
dest='first_row')
p.add_argument('--last_row',
help="Last row of the file(s) to be considered",
dest='last_row')
p.add_argument('--x_units',
help="X-column units (radians, hours, degrees, arcmin, arcsec) -- only allowed "+
"by certain DataHandlers",
dest='x_units')
p.add_argument('--y_units',
help="Y-column units (radians, hours, degrees, arcmin, arcsec) -- only allowed "+
"by certain DataHandlers",
dest='y_units')
p.add_argument('--ra_units',
help="RA-column units (radians, hours, degrees, arcmin, arcsec) -- only "+
"allowed by certain DataHandlers",
dest='ra_units')
p.add_argument('--dec_units',
help="dec-column units (radians, hours, degrees, arcmin, arcsec) -- only "+
"allowed by certain DataHandlers",
dest='dec_units')
p.add_argument('--flip_g1',
help="Flip the sign of g1 [default: False]",
dest='flip_g1', default=False)
p.add_argument('--flip_g2',
help="Flip the sign of g2 [default: False]",
dest='flip_g2', default=False)
p.add_argument('--min_sep',
help="Minimum separation for the TreeCorr correlation functions",
dest='min_sep')
p.add_argument('--max_sep',
help="Maximum separation for the TreeCorr correlation functions",
dest='max_sep')
p.add_argument('--nbins',
help="Number of bins for the TreeCorr correlation functions",
dest='nbins')
p.add_argument('--bin_size',
help="Bin width for the TreeCorr correlation functions",
dest='bin_size')
p.add_argument('--sep_units',
help="Units for the max_sep/min_sep/bin_size arguments for the TreeCorr "+
"correlation functions",
dest='sep_units')
p.add_argument('--bin_slop',
help="A parameter relating to accuracy of the TreeCorr bins--changing is not "+
"recommended",
dest='bin_slop')
p.add_argument('-v', '--verbose',
help="Level of verbosity",
dest='verbose')
p.add_argument('--num_threads',
help='Number of threads (TreeCorr) or multiprocessing.Pool processors '+
'(Stile) to use; default is to automatically determine',
dest='num_threads')
p.add_argument('--split_method',
help="One of 'mean', 'median', or 'middle', directing TreeCorr how to split the "
"tree into child nodes. [default: 'mean']",
dest='split_method')
return p
def ReadTreeCorrResultsFile(file_name):
"""
Read in the given ``file_name``. Cast it into a formatted numpy array with the appropriate
fields and return it.
:param file_name: The location of an output file from TreeCorr.
:returns: A numpy array corresponding to the data in ``file_name``.
"""
from . import stile_utils
output = file_io.ReadASCIITable(file_name, comments='#')
if not len(output):
raise RuntimeError('File %s (supposedly an output from TreeCorr) is empty.'%file_name)
# Now, the first line of the TreeCorr output file is of the form:
# "# col1 . col2 . col3 [...]"
# so we can get the proper field names by reading the first line of the file and processing it.
with open(file_name) as f:
fields = f.readline().split()
fields = fields[1:]
fields = [field for field in fields if field != '.']
return stile_utils.FormatArray(output, fields=fields)
def PickTreeCorrKeys(input_dict):
"""
Take an ``input_dict``, harvest the kwargs you'll need for TreeCorr, and return a dict
containing these values. This is useful if you have a parameters dict that contains some things
TreeCorr might want, but some other keys that shouldn't be used by it.
:param input_dict: A dict containing some (key, value) pairs that apply to TreeCorr.
:returns: A dict containing the (key, value) pairs from input_dict that apply to
TreeCorr.
"""
if not input_dict:
return {}
if 'treecorr_kwargs' in input_dict:
treecorr_dict = input_dict['treecorr_kwargs']
else:
treecorr_dict = {}
for key in corr2_valid_params:
if key in input_dict:
treecorr_dict[key] = input_dict[key]
return treecorr_dict
| bsd-3-clause | 957,391,640,524,652,000 | 42.724409 | 100 | 0.563299 | false |
GoogleCloudPlatform/solutions-google-compute-engine-cluster-for-hadoop | sample/shortest-to-longest-mapper.py | 1 | 1113 | #!/usr/bin/env python
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mapper sample.
The mapper takes arbitrary text as input.
With the corresponding reducer, the MapReduce task counts occurrence
of the word in the original text.
The output is sorted by the length of the word, and then in alphabetical
order if the length of the word is the same.
"""
import re
import sys
word_pattern = re.compile('[a-z]+')
for line in sys.stdin:
for match in word_pattern.finditer(line.lower()):
word = match.group()
print '%03d:%s\t%s' % (len(word), word, 1)
| apache-2.0 | -424,906,760,193,485,000 | 31.735294 | 74 | 0.740341 | false |
bluegenes/MakeMyTranscriptome | scripts/expression.py | 1 | 11582 | import argparse
import os
from os.path import join, dirname, basename
import sys
from tasks_v2 import Supervisor, Task
import functions_general as fg
import functions_annotater as fan
import functions_expression as fex
import assembler as assemb
salmon_naming = 'salmon'
express_naming = 'express'
intersect_naming = 'intersect'
def gen_salmon_supervisor(opc, fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,gene_trans_map,sample_info,model,out_dir,cpu_cap, deps):
salmon_tasks = []
salmon_dir = fg.make_dir_task(os.path.join(out_dir,'salmon'))
out_dir = salmon_dir.targets[0]
build_salmon = fex.build_salmon_task(opc, assembly_path, assembly_name, out_dir,fg.round_div(cpu_cap, 2),[salmon_dir])
deps = deps + [build_salmon] #, salmon_gene_map]
salmon_trans_gene_map = ''
if len(gene_trans_map) > 0:
salmon_gene_map = fex.salmon_gene_map_task(opc,out_dir,assembly_name,gene_trans_map,[salmon_dir])
salmon_trans_gene_map = salmon_gene_map.targets[0]
deps = deps + [salmon_gene_map]
for i in range(len(fastq1)):
#filename = '_'.join([paired_names[i],salmon_naming,assembly_name])
filename = paired_names[i] #,salmon_naming,assembly_name])
salmon = fex.salmon_task(opc, build_salmon.targets[0],fastq1[i],fastq2[i],filename, salmon_trans_gene_map,out_dir,fg.round_div(cpu_cap,2),deps)
salmon_tasks.append(salmon)
for i in range(len(unpaired)):
#filename = '_'.join([unpaired_names[i],salmon_naming,assembly_name])
filename = unpaired_names[i] #,salmon_naming,assembly_name])
salmon = fex.salmon_unpaired_task(opc, build_salmon.targets[0],unpaired[i],filename,salmon_trans_gene_map,out_dir,fg.round_div(cpu_cap,2),deps)
salmon_tasks.append(salmon)
transcriptName = assembly_name #'_'.join([assembly_name,salmon_naming])
geneName = assembly_name + '_gene' #'_'.join([assembly_name,salmon_naming,'gene'])
counts_to_table_salmon=fex.counts_to_table_task(opc, assembly_name,gene_trans_map,out_dir,[t.targets[0] for t in salmon_tasks],transcriptName,'--salmon',salmon_tasks)
deseq2_salmon = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_salmon.targets[0],sample_info,transcriptName,model,[counts_to_table_salmon])
deseq2_salmon_gene = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_salmon.targets[1],sample_info,geneName,model,[counts_to_table_salmon])
salmon_tasks = [salmon_dir,build_salmon,salmon_gene_map,counts_to_table_salmon, deseq2_salmon, deseq2_salmon_gene]+salmon_tasks
return Supervisor(tasks = salmon_tasks)
def gen_express_supervisor(opc,fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,bowtie2_index,gene_trans_map,sample_info,model,out_dir,cpu_cap,deps):
express_tasks,bowtie_e_tasks = [],[]
express_dir = fg.make_dir_task(os.path.join(out_dir,'express'))
out_dir = express_dir.targets[0]
for i in range(len(fastq1)):
filename = paired_names[i] #'_'.join([paired_names[i],express_naming,assembly_name])
#filename = '_'.join([paired_names[i],express_naming,assembly_name])
bowtie_e = fex.bowtie2_task(opc, bowtie2_index,out_dir,fastq1[i],fastq2[i],filename,0,fg.round_div(cpu_cap,2),deps)
express = fex.express_task(opc, bowtie2_index,assembly_path,out_dir,paired_names[i],bowtie_e.targets[0],[bowtie_e])
bowtie_e_tasks.append(bowtie_e)
express_tasks.append(express)
for i in range(len(unpaired)):
filename = unpaired_names[i] #'_'.join([unpaired_names[i],express_naming,assembly_name])
bowtie_e = fex.bowtie2_unpaired_task(opc, bowtie2_index,out_dir,unpaired[i],filename,0,fg.round_div(cpu_cap,2),deps)
bowtie_e_tasks.append(bowtie_e)
express = fex.express_task(opc, bowtie2_index,assembly_path,out_dir,unpaired_names[i],bowtie_e.targets[0],[bowtie_e])
express_tasks.append(express)
transcriptName = assembly_name #'_'.join([assembly_name,express_naming])
geneName = assembly_name + '_gene' #'_'.join([assembly_name,express_naming,'gene'])
counts_to_table_express = fex.counts_to_table_task(opc, assembly_name,gene_trans_map,out_dir,[t.targets[0] for t in express_tasks],transcriptName,'--eXpress',express_tasks)
deseq2_express = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_express.targets[0],sample_info,transcriptName,model,[counts_to_table_express])
deseq2_express_gene = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_express.targets[1],sample_info,geneName,model,[counts_to_table_express])
e_tasks = [express_dir,counts_to_table_express,deseq2_express,deseq2_express_gene]+bowtie_e_tasks+express_tasks
return Supervisor(tasks = e_tasks)
def gen_rapclust_supervisor(opc,fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,bowtie2_index,gene_trans_map,sample_info,model,out_dir,cpu_cap,deps):
rc_tasks,bowtie_rc_tasks = [],[]
rc_dir = fg.make_dir_task(os.path.join(out_dir,'rapclust_bt2'))
out_dir = rc_dir.targets[0]
for i in range(len(fastq1)):
filename = paired_names[i] #'_'.join([paired_names[i],express_naming,assembly_name])
#filename = '_'.join([paired_names[i],express_naming,assembly_name])
bowtie_rc = fex.bowtie2_task(opc, bowtie2_index,out_dir,fastq1[i],fastq2[i],filename,2,fg.round_div(cpu_cap,2),deps)
# express = fex.express_task(opc, bowtie2_index,assembly_path,out_dir,paired_names[i],bowtie_e.targets[0],[bowtie_e])
bowtie_rc_tasks.append(bowtie_rc)
# express_tasks.append(express)
for i in range(len(unpaired)):
filename = unpaired_names[i] #'_'.join([unpaired_names[i],express_naming,assembly_name])
bowtie_rcU = fex.bowtie2_unpaired_task(opc, bowtie2_index,out_dir,unpaired[i],filename,2,fg.round_div(cpu_cap,2),deps)
bowtie_rc_tasks.append(bowtie_rcU)
# express = fex.express_task(opc, bowtie2_index,assembly_path,out_dir,unpaired_names[i],bowtie_e.targets[0],[bowtie_e])
# express_tasks.append(express)
# transcriptName = assembly_name #'_'.join([assembly_name,express_naming])
# geneName = assembly_name + '_gene' #'_'.join([assembly_name,express_naming,'gene'])
# counts_to_table_express = fex.counts_to_table_task(opc, assembly_name,gene_trans_map,out_dir,[t.targets[0] for t in express_tasks],transcriptName,'--eXpress',express_tasks)
# deseq2_express = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_express.targets[0],sample_info,transcriptName,model,[counts_to_table_express])
# deseq2_express_gene = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_express.targets[1],sample_info,geneName,model,[counts_to_table_express])
rc_tasks = [rc_dir]+bowtie_rc_tasks+ rc_tasks
return Supervisor(tasks = rc_tasks)
def gen_intersect_supervisor(opc,fq1,fq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,bowtie2_index,gene_trans_map,sample_info,model,out_dir,cpu_cap, deps):
intersect_tasks,bowtie_i_tasks,sam_sort_tasks = [],[],[]
intersect_dir = fg.make_dir_task(os.path.join(out_dir,'intersectBed'))
out_dir = intersect_dir.targets[0]
deps.append(intersect_dir)
fasta_to_bed = fan.assembly_to_bed_task(opc, assembly_path, out_dir,[intersect_dir])
for i in range(len(fq1)):
filename = paired_names[i] #'_'.join([paired_names[i],intersect_naming,assembly_name])
#filename = '_'.join([paired_names[i],intersect_naming,assembly_name])
bowtie_i = fex.bowtie2_task(opc, bowtie2_index,out_dir,fq1[i],fq2[i],filename,1,fg.round_div(cpu_cap,2),deps)
sorted_name = filename + '_sorted'
sam_sort = fex.sam_sort_task(opc, out_dir,bowtie_i.targets[0],sorted_name,[bowtie_i])
intersect_bed = fex.intersect_bed_task(opc, out_dir,sam_sort.targets[0],fasta_to_bed.targets[0],paired_names[i],[sam_sort,fasta_to_bed])
bowtie_i_tasks.append(bowtie_i)
sam_sort_tasks.append(sam_sort)
intersect_tasks.append(intersect_bed)
for i in range(len(unpaired)):
filename = unpaired_names[i] #'_'.join([unpaired_names[i],intersect_naming,assembly_name])
bowtie_i = fex.bowtie2_unpaired_task(opc, bowtie2_index,out_dir,unpaired[i],filename,1,fg.round_div(cpu_cap,2),deps)
bowtie_i_tasks.append(bowtie_i)
sorted_name = filename + '_sorted'
sam_sort = fex.sam_sort_task(opc, out_dir,bowtie_i.targets[0],sorted_name,[bowtie_i])
sam_sort_tasks.append(sam_sort)
intersect_bed = fex.intersect_bed_task(opc, out_dir,sam_sort.targets[0],fasta_to_bed.targets[0],unpaired_names[i],[sam_sort,fasta_to_bed])
intersect_tasks.append(intersect_bed)
transcriptName = assembly_name #'_'.join([assembly_name,express_naming])
geneName = assembly_name + '_gene' #'_'.join([assembly_name,express_naming,'gene'])
counts_to_table_intersect=fex.counts_to_table_task(opc, assembly_name,gene_trans_map,out_dir,[t.targets[0] for t in intersect_tasks],transcriptName,'',intersect_tasks)
deseq2_intersect = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_intersect.targets[0],sample_info,transcriptName,model,[counts_to_table_intersect])
deseq2_intersect_gene = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_intersect.targets[1],sample_info,geneName,model,[counts_to_table_intersect])
i_tasks = [intersect_dir,fasta_to_bed,counts_to_table_intersect,deseq2_intersect, deseq2_intersect_gene]+bowtie_i_tasks+sam_sort_tasks+intersect_tasks
return Supervisor(tasks=i_tasks)
def gen_expression_supervisor(opc, dbs, fastq1,fastq2,paired_names,unpaired,unpaired_names,cpu,sample_info,model,gene_trans_map,dependency_set,assembly_name, assembly_path, out_dir,run_salmon=True,run_express=False,run_intersectbed=False,run_rapclust=False):
all_tasks = []
deps = []
trim_reads = False
if trim_reads:
trimmomatic_flag = True
rmdup = False
truncate_opt = False
trim_tasks,fastq1,fastq2,unpaired=assemb.gen_trimming_supervisor(opc,out_dir,fastq1,fastq2,unpaired,False,trimmomatic_flag,rmdup,10**15,0,truncate_opt,[],cpu)
all_tasks.append(trim_tasks)
deps.append(trim_tasks)
if run_salmon:
salmon_tasks = gen_salmon_supervisor(opc, fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,gene_trans_map,sample_info,model,out_dir,cpu, deps)
all_tasks.append(salmon_tasks)
if run_express or run_intersectbed or run_rapclust:
build_bowtie = fex.build_bowtie_task(opc, assembly_path,assembly_name, out_dir,[])
bowtie2_index = join(dirname(build_bowtie.targets[0]),basename(build_bowtie.targets[0]).split('.')[0])
all_tasks.append(build_bowtie)
if run_express:
express_tasks = gen_express_supervisor(opc,fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,bowtie2_index,gene_trans_map,sample_info,model,out_dir,cpu, [build_bowtie])
all_tasks.append(express_tasks)
if run_rapclust:
rc_tsks = gen_rapclust_supervisor(opc,fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,bowtie2_index,gene_trans_map,sample_info,model,out_dir,cpu, [build_bowtie])
all_tasks.append(rc_tsks)
if run_intersectbed:
intersect_tasks = gen_intersect_supervisor(opc,fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,bowtie2_index,gene_trans_map,sample_info,model,out_dir,cpu,[build_bowtie])
all_tasks.append(intersect_tasks)
return Supervisor(tasks=all_tasks,dependencies=dependency_set)
if(__name__=='__main__'):
pass
| bsd-3-clause | -7,513,924,934,046,294,000 | 72.303797 | 258 | 0.717406 | false |
exclude/monki | monki/boards/admin.py | 1 | 3418 | from django.contrib import admin
from django.core.files import File
from django.conf import settings
from imagekit.admin import AdminThumbnail
from monki.boards.models import (
Ban,
Banner,
Board,
Category,
Image,
Post,
Video,
)
from monki.boards.forms import ImageForm, VideoForm
@admin.register(Category)
class CategoryAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'order',
)
list_editable = (
'order',
)
@admin.register(Board)
class BoardAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'category',
'order',
'max_replies',
'max_length',
'show_id',
'country_flags',
'enable_captcha',
'forced_anonymous',
'locked',
'nsfw',
'created_at',
)
list_editable = (
'category',
'order',
'max_replies',
'max_length',
'show_id',
'country_flags',
'enable_captcha',
'forced_anonymous',
'locked',
'nsfw',
)
class ImageInline(admin.StackedInline):
model = Image
form = ImageForm
can_delete = False
class VideoInline(admin.StackedInline):
model = Video
form = VideoForm
can_delete = False
@admin.register(Post)
class PostAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'board',
'subject',
'name',
'tripcode',
'cid',
'ip_address',
'created_at',
'updated_at',
'bumped_at',
)
list_filter = (
'board',
)
search_fields = (
'subject',
'name',
'tripcode',
'ip_address',
'cid',
)
ordering = (
'-created_at',
)
inlines = (
ImageInline,
VideoInline,
)
@admin.register(Image)
class ImageAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'original_filename',
'admin_thumbnail',
'size',
'width',
'heigth',
'checksum',
)
admin_thumbnail = AdminThumbnail(image_field='thumbnail')
actions = (
'turn_potato',
)
def turn_potato(self, request, queryset):
count = 0
placeholder = str(settings.BASE_DIR / 'static' / 'img' / 'anders_bateva.png')
with open(placeholder, 'rb') as file:
for image in queryset:
image.file = File(file, name=image.original_filename)
image.save()
count += 1
self.message_user(request, '{} image(s) was potato\'d'.format(count))
turn_potato.short_description = 'Turn into a potato'
@admin.register(Video)
class VideoAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'admin_thumbnail',
)
admin_thumbnail = AdminThumbnail(image_field='thumbnail')
@admin.register(Banner)
class BannerAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'admin_thumbnail',
)
admin_thumbnail = AdminThumbnail(image_field='image')
@admin.register(Ban)
class BanAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'reason',
'created_at',
'expires_at',
'banned_by',
)
search_fields = (
'ip_address',
)
def save_model(self, request, obj, form, change):
obj.banned_by = request.user
obj.save()
| agpl-3.0 | -175,683,558,941,638,980 | 17.78022 | 85 | 0.534816 | false |
amuramatsu/dwf | setup.py | 1 | 1430 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
import sys
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dwf',
version='0.2.0.dev0',
description="Digilent's DWF library wrapper",
long_description=long_description,
url='https://github.com/amuramatsu/dwf/',
author='MURAMATSU Atsushi',
author_email='[email protected]',
license='MIT',
install_requires=[
'enum34'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6', # Not tested
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3', # Not tested
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
platforms="Linux,Mac,Windows",
packages=['dwf'],
use_2to3=False
)
| mit | -7,619,697,030,664,391,000 | 28.183673 | 64 | 0.596503 | false |
jmason86/MinXSS_Beacon_Decoder | logger.py | 1 | 1195 | import logging
import os
class Logger:
def __init__(self):
self.create_log()
def create_log(self):
"""
For debugging and informational purposes.
"""
self.ensure_log_folder_exists()
log = logging.getLogger('minxss_beacon_decoder_debug')
if not self.logger_exists(log):
handler = logging.FileHandler(self.create_log_filename())
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
log.setLevel(logging.DEBUG)
return log
@staticmethod
def logger_exists(log):
if len(log.handlers) > 0:
return True
else:
return False
@staticmethod
def ensure_log_folder_exists():
if not os.path.exists(os.path.join(os.path.expanduser("~"), "MinXSS_Beacon_Decoder", "log")):
os.makedirs(os.path.join(os.path.expanduser("~"), "MinXSS_Beacon_Decoder", "log"))
@staticmethod
def create_log_filename():
return os.path.join(os.path.expanduser("~"), "MinXSS_Beacon_Decoder", "log", "minxss_beacon_decoder_debug.log")
| gpl-3.0 | -6,227,885,340,899,333,000 | 29.641026 | 119 | 0.603347 | false |
ArielCabib/python-tkinter-calculator | Calculator/Widgets/MainMenu.py | 1 | 3396 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# MainMenu.py
#
# Copyright 2010 Ariel Haviv <[email protected]>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
***Main menu container (a Frame)***
Calculator by Ariel Haviv ([email protected])
Instructors: Anatoly Peymer, Zehava Lavi
"""
from Tkinter import *
#auto-generated methods will use this list:
m = [['File',
['Load history (Ctrl+L)', 'Load_History'],
['Save history (Ctrl+S)', 'Save_History'],
['Quit (Alt+F4)', 'Quit']],
['Edit',
['Undo (Ctrl+Z)', 'Undo'],
['Redo (Ctrl+Y)', 'Redo']],
['View',
['Toggle previous action bar (Ctrl+P)', 'Toggle_Prev_Lbl'],
['Show history', 'Show_History'],
['Toggle Prefix & Postfix', 'Toggle_Fixes']],
['Base',
['Binary (Ctrl+B)', 'Binary'],
['Octal (Ctrl+O)', 'Octal'],
['Decimal (Ctrl+D)', 'Decimal'],
['Hexa (Ctrl+X)', 'Hexa'],
['Manual (Ctrl+A)', 'Manual']],
['Help',
['Contents (F1)', 'Contents'],
['About...', 'About']]]
class MainMenu(Frame):
def __init__(self, root, in_hndl, **args):
Frame.__init__(self, root, **args)
self.root = root
self.in_hndl = in_hndl
mb = self.menuBtns = []
mn = self.menus = []
#drawing menus
for i in range(len(m)):
mb.append(Menubutton(self, text=m[i][0]))
mb[i].grid(row=0, column=i)
mn.append(Menu(mb[i], tearoff=False))
mb[i]['menu'] = mn[i]
for j in m[i][1:]:
#pointing to auto-generated class methods
method = ("%s_%s" % (m[i][0], j[1]))
eval('mn[i].add_command(label=j[0], command=self.%s)' % method)
#auto-generating methods
for i in range(len(m)):
for j in m[i][1:]:
#generating auto class methods for menu commands
method = ("%s_%s" % (m[i][0], j[1]))
exec("""def %s(self):
self.in_hndl.mnu_clicked(["%s", "%s"])""" % (method, m[i][0], j[1]))
| bsd-3-clause | -1,414,582,796,137,303,800 | 35.913043 | 80 | 0.636926 | false |
metalshark/lesscss-python | lesscss/media.py | 1 | 2033 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Copyright 2010 Beech Horn
This file is part of lesscss-python.
lesscss-python is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
lesscss-python is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with lesscss-python. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from lesscss.nested import parse_nested
from lesscss.rules import Rules
MEDIA = re.compile('''
(?P<names>
@media
\s*
(?P<media>
[a-z]+
\s*
(
,
\s*
[a-z]+
\s*
)*?
)
)
\s*
{
''', re.DOTALL | re.IGNORECASE | re.VERBOSE)
def parse_media(less, parent=None, **kwargs):
match = MEDIA.match(less)
if not match:
raise ValueError()
media = [media.strip() for media in match.group('media').split(',')]
matched_length = len(match.group())
remaining_less = less[matched_length:]
contents = parse_nested(remaining_less)
code = match.group() + contents + '}'
return Media(code=code, media=media, contents=contents, parent=parent)
class Media(Rules):
__slots__ = ('__media',)
def __init__(self, parent, code, media, contents=None):
Rules.__init__(self, parent=parent, code=code, contents=contents)
self.__media = media
def __get_media(self):
return self.__media
media = property(fget=__get_media) | gpl-3.0 | 2,662,149,872,208,000,000 | 20.870968 | 74 | 0.578455 | false |
BCVisin/PhotoViewer | get_photos.py | 1 | 2807 |
import threading
import Queue
from PIL import ImageTk
from PIL import Image
class get_photos(object):
def __init__(self, max_w, max_h):
self.max_w, self.max_h = max_w, max_h
self.image_index = -1
self.images = ['photos/%s.JPG' % x for x in range(1, 11)]
self.image_queue = Queue.Queue()
self.image_dict = {}
def thread_load_images(self):
while True:
try:
image_location, image = self.image_queue.get_nowait()
self.image_dict[image_location] = image
except Queue.Empty:
break
def get_next_index(self):
if self.image_index >= len(self.images) - 1:
self.image_index = 0
else:
self.image_index += 1
return self.image_index
def get_previous_index(self):
if self.image_index <= 0:
self.image_index = len(self.images) - 1
else:
self.image_index -= 1
return self.image_index
def get_photo(self, image_path):
#check the queue for other images that we may have returned
self.thread_load_images()
#try to return the image if it's been pre-loaded:
try:
return self.image_dict[image_path]
except KeyError:
#load the image
self.image_dict[image_path] = load_image(self.image_queue, image_path, self.max_w, self.max_h).run(True)
return self.image_dict[image_path]
def get_next(self):
this_photo_index = self.get_next_index()
self.preload(start_index=this_photo_index)
return self.get_photo(self.images[this_photo_index])
def get_previous(self):
return self.get_photo(self.images[self.get_previous_index()])
def preload(self, start_index, forward=True):
preload_num = 4
if forward:
index_range = range(start_index + 1, min(start_index + preload_num + 1, len(self.images)))
else:
index_range = range(max(0, start_index - preload_num), start_index)
for i in index_range:
try:
self.image_dict[self.images[i]]
except KeyError:
load_image(self.image_queue, self.images[i], self.max_w, self.max_h).start()
class load_image(threading.Thread):
def __init__(self, return_queue, image_path, max_x, max_y):
self.return_queue = return_queue
self.image_path = image_path
self.max_x = max_x
self.max_y = max_y
threading.Thread.__init__(self)
def run(self, direct=False):
image = Image.open(self.image_path)
new_size = self.get_new_size(self.max_x, self.max_y, image)
resized_image = image.resize(new_size, Image.ANTIALIAS)
final_image = ImageTk.PhotoImage(resized_image)
if direct:
return final_image
else:
self.return_queue.put((self.image_path, final_image))
def get_new_size(self, max_width, max_height, image):
x, y = image.size
if x > max_width or x > y:
y = int(max(y * max_width / x, 1))
x = int(max_width)
if y > max_height or x < y:
x = int(max(x * max_height / y, 1))
y = int(max_height)
new_size = x, y
return new_size | mit | -3,579,786,799,524,637,000 | 23 | 107 | 0.670823 | false |
dhatzenbichler/Slider | tl.py | 1 | 2225 | #!/usr/bin/python
from datetime import datetime
from datetime import timedelta
import subprocess
import RPi.GPIO as GPIO
import time
from wrappers import GPhoto
from wrappers import Identify
from wrappers import NetworkInfo
from ui import TimelapseUi
from motor import MotorObject
def main():
print "Timelapse"
camera = GPhoto(subprocess)
idy = Identify(subprocess)
netinfo = NetworkInfo(subprocess)
ui = TimelapseUi()
motor = MotorObject()
motor.backwards(0.005,50)
shot = 0
network_status = netinfo.network_status()
ui.main(motor, network_status)
print "Test vor capture"
try:
## last_started = datetime.now()
## print "Shot: %d Shutter: %s ISO: %d" % (shot)
## ui.backlight_on()
## print "Jetyt set shutter speed"
## camera.set_shutter_speed(secs=config[0])
## print "Jetyt nach set shutter speed"
## print config[1]
## camera.set_iso(iso=str(config[1]))
## print "Jetyt nach set iso"
if ui.getBkt() == True:
camera.set_bracketing()
print "nach Set Bracketing"
ui.backlight_off()
while True:
try:
if ui.getBkt() == True:
camera.capture_image_and_download(shot)
shot = shot + 1
camera.capture_image_and_download(shot)
shot = shot + 1
camera.capture_image_and_download(shot)
else:
camera.capture_image_and_download(shot)
time.sleep(intervall)
motor.forward(5/1000,ui.getSteps())
time.sleep(ui.getSteps()/33) # Zeit die der Motor yum fahren braucht
except Exception, e:
print "Error on capture." + str(e)
print "Retrying..."
# Occasionally, capture can fail but retries will be successful.
continue
shot = shot + 1
except Exception,e:
ui.show_error(str(e))
if __name__ == "__main__":
main() | gpl-3.0 | 992,594,585,256,588,700 | 25.5 | 84 | 0.529888 | false |
apallin/testworks-appium | testworksappium/elements.py | 1 | 1483 | #!/usr/bin/env python
import logging
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import WebDriverException
log = logging.getLogger(__name__)
class Elements(object):
def __init__(self, appium_driver, **kwargs):
"""
Element object for wrapping webdriver element calls.
Must pass a locator/locator_value in kwargs to find elements.
:param: :appium_driver: webdriver object
"""
self.appium_driver = appium_driver
self.element_objects = []
if not kwargs:
raise ValueError("Please specify a locator")
if len(kwargs) > 1:
raise ValueError("Please specify only one locator")
locator_key, locator_value = next(iter(kwargs.items()))
self.locator_value = locator_value
self.locator_key = locator_key
self.locator = (locator_key, locator_value)
def find_elements(self):
"""
Function for finding element objects for appium interaction.
:return: webdriver element object
"""
log.debug("Finding {}".format(self.locator))
try:
self.element_objects = self.appium_driver.find_elements(
by=self.locator_key, value=self.locator_value)
except NoSuchElementException as e:
log.error(e)
pass
except WebDriverException:
log.error(e)
pass
return self.element_objects
| mit | -8,590,678,342,811,658,000 | 31.955556 | 69 | 0.626433 | false |
renskiy/django-bitmask-field | django_bitmask_field.py | 1 | 4281 | import codecs
import functools
from django import forms
from django.core import checks, exceptions, validators
from django.db import models
from django.utils.encoding import force_bytes
from django.utils.six import integer_types, buffer_types, text_type
from django.utils.six.moves import reduce
from django.utils.translation import ugettext_lazy as _
long = integer_types[-1]
def int2bytes(i):
hex_value = '{0:x}'.format(i)
# make length of hex_value a multiple of two
hex_value = '0' * (len(hex_value) % 2) + hex_value
return codecs.decode(hex_value, 'hex_codec')
def bytes2int(b):
return long(codecs.encode(b, 'hex_codec'), 16)
class BitmaskFormField(forms.TypedMultipleChoiceField):
def prepare_value(self, value):
if isinstance(value, list):
return value
if not value:
return value
return [
long(bit) * (2 ** place)
for place, bit in enumerate('{0:b}'.format(value)[::-1])
if bit == '1'
]
def has_changed(self, initial, data):
return initial != self._coerce(data)
def _coerce(self, value):
values = super(BitmaskFormField, self)._coerce(value)
if values is None:
return values
return reduce(long.__or__, map(long, values), long(0))
class BitmaskField(models.BinaryField):
description = _('Bitmask')
default_validators = [validators.MinValueValidator(0)]
def __init__(self, *args, **kwargs):
editable = kwargs.get('editable', True)
super(BitmaskField, self).__init__(*args, **kwargs)
self.editable = editable
self.validators = list(self.__validators)
@property
def __validators(self):
for validator in self.validators:
if isinstance(validator, validators.MaxLengthValidator):
max_value = 2 ** (validator.limit_value * 8)
yield validators.MaxValueValidator(max_value)
else:
yield validator
def _check_choices(self):
errors = super(BitmaskField, self)._check_choices()
if not errors and self.choices and not all(
isinstance(choice, integer_types) and choice >= 0
for choice, description in self.flatchoices
):
return [
checks.Error(
"all 'choices' must be of integer type.",
obj=self,
)
]
return errors
def deconstruct(self):
return models.Field.deconstruct(self)
@property
def all_values(self):
return reduce(
long.__or__,
map(long, list(zip(*self.flatchoices))[0]),
long(0),
)
def validate(self, value, model_instance):
try:
super(BitmaskField, self).validate(value, model_instance)
except exceptions.ValidationError as error:
if error.code != 'invalid_choice':
raise
if (
self.choices
and value not in self.empty_values
and value & self.all_values != value
):
raise exceptions.ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': value},
)
def value_to_string(self, obj):
return models.Field.value_to_string(self, obj)
def to_python(self, value):
if isinstance(value, buffer_types):
return bytes2int(force_bytes(value))
elif isinstance(value, text_type):
return long(value)
return value
def get_prep_value(self, value):
value = super(BitmaskField, self).get_prep_value(value)
if value is None:
return value
return int2bytes(value)
def from_db_value(self, value, expression, connection, context):
return self.to_python(value)
def formfield(self, **kwargs):
defaults = {
'form_class': functools.partial(forms.IntegerField, min_value=0),
'choices_form_class': BitmaskFormField,
}
if self.choices:
defaults['coerce'] = long
defaults.update(kwargs)
return super(BitmaskField, self).formfield(**defaults)
| mit | 6,983,439,037,241,272,000 | 29.798561 | 77 | 0.589348 | false |
ddurdle/XBMC-ustvnow | resources/lib/ustvnow.py | 1 | 7979 | '''
ustvnow XBMC Plugin
Copyright (C) 2011 t0mm0
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import Addon
import cookielib
import os
import re
import urllib, urllib2
class Ustvnow:
__BASE_URL = 'http://lv2.ustvnow.com'
def __init__(self, user, password):
self.user = user
self.password = password
def get_channels(self, quality=1, stream_type='rtmp'):
self._login()
html = self._get_html('iphone_ajax', {'tab': 'iphone_playingnow',
'token': self.token})
channels = []
for channel in re.finditer('id="(content.+?)".+?class="panel".+?title="(.+?)".+?src="' +
'(.+?)".+?class="nowplaying_item">(.+?)' +
'<\/td>.+?class="nowplaying_itemdesc".+?' +
'<\/a>(.+?)<\/td>.+?href="(.+?)"',
html, re.DOTALL):
id, name, icon, title, plot, url = channel.groups()
title = title.replace("&", "&")
if name.find('fieldset') != -1:
if icon.endswith('APL.png'):
name = 'Animal Planet'
elif icon.endswith('BRAVO.png'):
name = 'Bravo'
elif icon.endswith('TOON.png'):
name = 'Cartoon Network'
elif icon.endswith('ESPN.png'):
name = 'ESPN'
elif icon.endswith('CNN.png'):
name = 'CNN'
elif icon.endswith('CNBC.png'):
name = 'CNBC'
elif icon.endswith('USA.png'):
name = 'USA'
elif icon.endswith('SYFY.png'):
name = 'Syfy'
elif icon.endswith('HISTORY.png'):
name = 'History'
elif icon.endswith('DSC.png'):
name = 'Discovery Channel'
elif icon.endswith('COMEDY.png'):
name = 'Comedy Central'
elif icon.endswith('TNT.png'):
name = 'TNT'
elif icon.endswith('WLYH.png'):
name = 'CW'
elif icon.endswith('WHTM.png'):
name = 'ABC'
elif icon.endswith('WPMT.png'):
name = 'FOX'
elif icon.endswith('FX.png'):
name = 'FX'
elif icon.endswith('WPSU.png'):
name = 'PBS'
elif icon.endswith('FOOD.png'):
name = 'Food Network'
elif icon.endswith('TBS.png'):
name = 'TBS'
elif icon.endswith('NIK.png'):
name = 'Nickelodeon'
elif icon.endswith('WHP.png'):
name = 'CBS'
elif icon.endswith('WGAL.png'):
name = 'NBC'
elif icon.endswith('AETV.png'):
name = 'AETV'
elif icon.endswith('LIFE.png'):
name = 'Lifetime'
elif icon.endswith('SPIKETV.png'):
name = 'SPIKE TV'
elif icon.endswith('FNC.png'):
name = 'Fox News Channel'
elif icon.endswith('NGC.png'):
name = 'National Geographic Channel'
elif icon.endswith('WHVLLD.png'):
name = 'My9'
elif icon.endswith('AMC.png'):
name = 'AMC'
else:
name = 'Unknown'
if not url.startswith('http'):
now = {'title': title, 'plot': plot.strip()}
url = '%s%s%d' % (stream_type, url[4:-1], quality + 1)
aChannel = {'name': name, 'url': url,
'icon': icon, 'now': now}
if aChannel in channels:
print 'Duplicate channel found: %s' % (name)
else:
channels.append(aChannel)
channels.sort()
return channels
def get_recordings(self, quality=1, stream_type='rtmp'):
self._login()
html = self._get_html('iphone_ajax', {'tab': 'iphone_viewdvrlist'})
schedule_index = html.find('Scheduled')
if schedule_index > 0:
html = html[0:schedule_index]
recordings = []
for r in re.finditer('class="panel".+?title="(.+?)".+?src="(.+?)".+?' +
'class="nowplaying_item">(.+?)<\/td>.+?(?:<\/a>' +
'(.+?)<\/td>.+?)?vertical-align:bottom.+?">.+?(Recorded.+?)' +
'<\/div>.+?"(rtsp.+?)".+?"(iphone_ajax.+?)"',
html, re.DOTALL):
chan, icon, title, plot, rec_date, url, del_url = r.groups()
rec_date = rec_date.replace('\n', ' ').replace('\r', '').replace('\t', '')
url = '%s%s%s' % (stream_type, url[4:-7],
['350', '650', '950'][quality])
if plot:
plot = plot.strip()
else:
plot = ''
recordings.append({'channel': chan,
'stream_url': url,
'title': title,
'plot': plot,
'rec_date': rec_date.strip(),
'icon': icon,
'del_url': del_url
})
return recordings
def delete_recording(self, del_url):
html = self._get_html(del_url)
print html
def _build_url(self, path, queries={}):
if queries:
query = Addon.build_query(queries)
return '%s/%s?%s' % (self.__BASE_URL, path, query)
else:
return '%s/%s' % (self.__BASE_URL, path)
def _fetch(self, url, form_data=False):
if form_data:
Addon.log('posting: %s %s' % (url, str(form_data)))
req = urllib2.Request(url, form_data)
else:
Addon.log('getting: ' + url)
req = url
try:
response = urllib2.urlopen(url)
return response
except urllib2.URLError, e:
Addon.log(str(e), True)
return False
def _get_html(self, path, queries={}):
html = False
url = self._build_url(path, queries)
response = self._fetch(url)
if response:
html = response.read()
else:
html = False
return html
def _login(self):
Addon.log('logging in')
self.token = None
self.cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
urllib2.install_opener(opener)
url = self._build_url('iphone_login', {'username': self.user,
'password': self.password})
response = self._fetch(url)
#response = opener.open(url)
self.token ='1fjcfojwzitbz6ufzetw'
for cookie in self.cj:
print '%s: %s' % (cookie.name, cookie.value)
if cookie.name == 'token':
self.token = cookie.value
self.token ='1fjcfojwzitbz6ufzetw'
| gpl-2.0 | 6,640,263,805,704,231,000 | 39.095477 | 97 | 0.455571 | false |
FLOSSmole/codeplex | 1getCodeplexPages.py | 1 | 3667 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it
# and/or modify it under the terms of GPL v3
#
# Copyright (C) 2004-2017 Megan Squire <[email protected]>
#
# We're working on this at http://flossmole.org - Come help us build
# an open and accessible repository for data and analyses for open
# source projects.
#
# If you use this code or data for preparing an academic paper please
# provide a citation to
#
# Howison, J., Conklin, M., & Crowston, K. (2006). FLOSSmole:
# A collaborative repository for FLOSS research data and analyses.
# Int. Journal of Information Technology & Web Engineering, 1(3), 17–26.
#
# and
#
# FLOSSmole(2004-2017) FLOSSmole: a project to provide academic access to data
# and analyses of open source projects. Available at http://flossmole.org
#
################################################################
# usage:
# 1getCodeplexPages.py <datasource_id> <db password>
# purpose:
# grab all the pages for projects stored on Codeplex before it was shut down
################################################################
import sys
import pymysql
try:
import urllib.request as urllib2
except ImportError:
import urllib2
# grab commandline args
datasourceID = str(sys.argv[1])
pw = str(sys.argv[2])
lastUpdated = None
# Open remote database connection
dbconn = pymysql.connect(host="",
user="",
passwd=pw,
db="",
use_unicode=True,
charset="utf8mb4")
cursor = dbconn.cursor()
# read in list of projects
# for each project, grab the following pages:
# --- home page
# --- history page
selectProjectsQuery = 'SELECT proj_name, proj_url FROM cp_projects \
WHERE datasource_id = %s \
ORDER BY 1'
insertHTMLQuery = 'INSERT INTO cp_projects_indexes (proj_name, \
datasource_id, \
home_html, \
history_html, \
last_updated) \
VALUES (%s, %s, %s, %s, %s)'
cursor.execute(selectProjectsQuery, (datasourceID,))
projectList = cursor.fetchall()
# insert project pages
for project in projectList:
projectName = project[0]
projectUrl = project[1]
print("grabbing", projectName)
# set up headers
hdr = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'}
try:
# grab the main page
req = urllib2.Request(projectUrl, headers=hdr)
mainhtml = urllib2.urlopen(req).read()
# grab the history page
historyUrl = projectUrl + 'wikipage/history'
req2 = urllib2.Request(historyUrl, headers=hdr)
historyhtml = urllib2.urlopen(req2).read()
cursor.execute(insertHTMLQuery, (projectName,
datasourceID,
mainhtml,
historyhtml,
lastUpdated))
dbconn.commit()
except pymysql.Error as error:
print(error)
dbconn.rollback()
except:
print()
dbconn.close()
| gpl-3.0 | -2,855,964,648,277,341,000 | 32.623853 | 132 | 0.554707 | false |
wpjesus/codematch | ietf/doc/templatetags/wg_menu.py | 2 | 2540 | # Copyright (C) 2009-2010 Nokia Corporation and/or its subsidiary(-ies).
# All rights reserved. Contact: Pasi Eronen <[email protected]>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of the Nokia Corporation and/or its
# subsidiary(-ies) nor the names of its contributors may be used
# to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from django import template
from django.template.loader import render_to_string
from django.db import models
from ietf.group.models import Group
register = template.Library()
area_short_names = {
'ops':'Ops & Mgmt',
'rai':'RAI'
}
@register.simple_tag
def wg_menu():
parents = Group.objects.filter(models.Q(type="area") | models.Q(type="irtf", acronym="irtf"),
state="active").order_by('type_id', 'acronym')
for p in parents:
p.short_name = area_short_names.get(p.acronym) or p.name
if p.short_name.endswith(" Area"):
p.short_name = p.short_name[:-len(" Area")]
if p.type_id == "area":
p.menu_url = "/wg/#" + p.acronym
elif p.acronym == "irtf":
p.menu_url = "/rg/"
return render_to_string('base/menu_wg.html', { 'parents': parents })
| bsd-3-clause | 5,304,016,432,687,058,000 | 40.639344 | 97 | 0.710236 | false |
rascul/botwot | plugins/cookie.py | 1 | 3091 | """ Cookie Plugin (botwot plugins.cookie) """
# Copyright 2014 Ray Schulz <https://rascul.io>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import requests
from bs4 import BeautifulSoup
from pyaib.plugins import keyword, plugin_class
from pyaib.db import db_driver
@plugin_class
@plugin_class.requires('db')
class Cookie(object):
def __init__(self, context, config):
self.db = context.db.get('cookies')
self.cookies = list(self.db.getAll())
print "%s cookies are in the jar." % len(self.cookies)
@keyword("cookie")
@keyword.nosub("round", "refresh")
def keyword_cookie(self, context, msg, trigger, args, kargs):
""" [<user>] - Hand out a cookie, to <user> if specified """
# Choose a cookie
cookie = random.choice(self.cookies).value
# Aquire target
target_user = " ".join(args)
# Dispense cookie
context.PRIVMSG(
msg.channel or msg.sender,
"\x01ACTION hands %s a %s from the cookie jar.\x01" % (
target_user or msg.sender,
cookie
)
)
@keyword("cookie")
@keyword.sub("round")
def keyword_cookie_round(self, context, msg, trigger, args, kargs):
""" - Pass around a box of cookies """
# Choose a cookie
cookie = random.choice(self.cookies).value
# Pass the box around
context.PRIVMSG(
msg.channel or msg.sender,
"\x01ACTION passes around a box of %s.\x01" % cookie
)
def scancookies(self):
""" Download and scan the cookie list into the database """
counter = 0
# Grab the listing from Wikipedia
page = requests.get("http://en.wikipedia.org/wiki/List_of_cookies")
soup = BeautifulSoup(page.text)
# grab each table row, drop the header
cookie_cells = [tr.td for tr in soup.table.find_all("tr")][1:]
# grab the cookie name from each row, some have links and some don't
new_cookies = [getattr(c.contents[0], "text", None) or getattr(c, "text", None) for c in cookie_cells]
# Fill the database
for c in new_cookies:
item = self.db.get(c)
item.value = "%s" % c
item.commit()
counter += 1
self.cookies = list(self.db.getAll())
print "%s cookies scanned." % counter
return counter
@keyword("cookie")
@keyword.sub("refresh")
def keyword_cookie_refresh(self, context, msg, trigger, args, kargs):
""" Download and scan the cookie list into the database """
# Only if user is an admin
if msg.sender == context.config.IRC.admin:
print "Scanning cookies..."
# First clear the database
for item in self.cookies:
self.db.delete(item.key)
msg.reply("%s cookies scanned." % self.scancookies())
| apache-2.0 | -7,046,567,008,575,976,000 | 26.353982 | 104 | 0.681009 | false |
Archman/felapps | felapps/utils/parseutils.py | 1 | 5522 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Author: Tong Zhang
Created Time: 10:22, Sep. 17, 2015
"""
from __future__ import print_function
import os
import sys
import time
try:
from configparser import SafeConfigParser, ConfigParser
except ImportError:
from ConfigParser import SafeConfigParser, ConfigParser
class ConfigFile(object):
"""
Class to resolve parameters parsing by applying xmlparser approach.
"""
def __init__(self, infilename='config.xml', *args, **kwargs):
self.xmlfile = infilename
self.namelist = {}
self.parseConfigs()
def parseConfigs(self):
pass
def getConfigs(self):
return self.namelist
def updateConfigs(self, params_dict, savetofile=None):
if not savetofile:
savetofile = self.xmlfile
for p in self.root.iter('properties'):
for k in params_dict.keys():
if p.get(k):
p.set(k, params_dict[k])
self.tree.write(savetofile)
class ParamParser(object):
"""
Class to resolve parameters parsing by applying ConfigParser approach.
"""
def __init__(self, inifilename='config.ini', *args, **kws):
self.inifilename = inifilename
self.parser = ConfigParser()
self.parser.optionxform = str
def readConfig(self):
if not os.path.isfile(self.inifilename):
self.createTemplate()
sys.exit(1)
else:
self.parser.read(self.inifilename)
def createTemplate(self, configfilename='config_sample.conf'):
dict_sample = dict([('00-info', {
'author':
'Tong Zhang',
'note':
'',
'created_time':
time.strftime('%Y-%m-%d %H:%M:%S %Z', time.localtime())
}), ('01-facility', {
'country': 'China',
'name': 'SDUV',
'affiliation': 'SINAP'
}), ('02-electron_beam', {
'normalized_emittance(m)': '4e-6',
'peak_current(A)': '300',
'central_energy(MeV)': '150',
'average_beta_function(m)': '4',
'bunch_charge(C)': '0.2e-9',
'energy_spread': '1e-4',
'bunch_shape': 'gaussian'
}), ('03-undulator', {
'total_length(m)': '10',
'period_length(m)': '0.04'
}), ('04-FEL_radiation', {
'wavelength(m)': '350e-9'
})])
parser_sample = SafeConfigParser()
for section_name in sorted(dict_sample.keys()):
parser_sample.add_section(section_name)
[
parser_sample.set(section_name, k, v)
for k, v in sorted(dict_sample[section_name].items())
]
parser_sample.write(open(configfilename, 'w'))
def makeFlatDict(self):
"""
return dict with key,value pairs
"""
onedict = {}
for section_name in self.parser.sections():
onedict.update({k: v for k, v in self.parser.items(section_name)})
return onedict
def makeHierDict(self):
"""
return dict with hierarch structure
"""
sdict = {}
for section_name in self.parser.sections():
for section_name in self.parser.sections():
sdict[section_name] = {
k: v
for k, v in self.parser.items(section_name)
}
return sdict
def setOneParam(self, section_name, option_name, newvalue):
self.parser.set(section_name, option_name, newvalue)
def setAllParams(self, newhierdict):
for section_name in self.parser.sections():
for k, v in self.parser.items(section_name):
self.parser.set(section_name, k, newhierdict[section_name][k])
def dumpDictToConfig(self, newhierdict, configfilename):
newparser = SafeConfigParser()
newparser.optionxform = str
for section_name in sorted(newhierdict.keys()):
newparser.add_section(section_name)
[
newparser.set(section_name, k, v)
for k, v in sorted(newhierdict[section_name].items())
]
newparser.write(open(configfilename, 'w'))
def saveConfig(self, filetosave=None):
if filetosave == None:
filetosave = self.inifilename
self.parser.write(open(filetosave, 'w'))
def loadtest():
# test load config from file
testparser = ParamParser('config_sample.conf')
testparser.readConfig()
print(testparser.makeHierDict())
def savetest():
# test save config to file
# save parser into new config file
# get param dict from config_sample.conf which is parsed by readConfig method
testparser = ParamParser('config_sample.conf')
testparser.readConfig()
raw_dict = testparser.makeHierDict()
# modify parameters
raw_dict['01-facility']['name'] = 'XFEL'
raw_dict['03-undulator']['period_length(m)'] = str(0.04)
raw_dict['02-electron_beam']['peak_current(A)'] = str(300)
if not raw_dict.has_key('00-info'):
raw_dict['00-info'] = {}
raw_dict['00-info']['time'] = time.strftime('%Y-%m-%d %H:%M:%S %Z',
time.localtime())
# add options
raw_dict['04-FEL_radiation']['output_power(W)'] = '%.3e' % 1e8
# save to new config file
testparser.dumpDictToConfig(raw_dict, 'sxfel.conf')
if __name__ == '__main__':
#loadtest()
savetest()
| mit | -2,765,303,437,213,584,400 | 29.849162 | 81 | 0.565737 | false |
tuxxi/OpenBurn | tests/test_json.py | 1 | 1177 | import unittest
from openburn.core.propellant import SimplePropellant
from openburn.core.grain import CylindricalCoreGrain
from openburn.core.nozzle import ConicalNozzle
from openburn.core.motor import OpenBurnMotor
class InternalBallisticsTest(unittest.TestCase):
def setUp(self):
"""Set up the test data"""
self.propellant = SimplePropellant("68/10", 0.0341, 0.2249, 4706, 0.058, 1.226)
# using a list comprehension to create four unique grain objects
self.grains = [CylindricalCoreGrain(diameter=2, length=4, core_diameter=1, burning_faces=2,
propellant=self.propellant)
for _ in range(0, 4)]
self.nozzle = ConicalNozzle(throat=0.5, exit=2, half_angle=15, throat_len=0.25)
self.motor = OpenBurnMotor()
self.motor.set_grains(self.grains)
self.motor.set_nozzle(self.nozzle)
def test_json_in(self):
out = self.motor.to_json()
uuid_out = self.motor.uuid
in_ = self.motor.from_json(out)
uuid_in = in_.uuid
self.assertIsInstance(in_, OpenBurnMotor)
self.assertNotEqual(uuid_out, uuid_in)
| gpl-3.0 | -2,680,164,306,009,758,000 | 39.586207 | 99 | 0.654206 | false |
sourceperl/pyHMI | pyHMI/Dialog.py | 1 | 3709 | # -*- coding: utf-8 -*-
from tkinter import *
from .Colors import *
class ConfirmDialog(Toplevel):
def __init__(self, parent, title, text, valid_command):
super(ConfirmDialog, self).__init__(parent)
self.transient(parent)
self.grab_set()
self.title(title)
self.valid_command = valid_command
Label(self, text=text).grid(row=0, column=0, columnspan=2, padx=20, pady=20)
Button(self, text='Validation', command=self.ok).grid(row=1, column=0, padx=10, pady=10)
Button(self, text='Annulation', command=self.cancel).grid(row=1, column=1, padx=10, pady=10)
self.bind('<Escape>', lambda evt: self.destroy())
self.after(45000, self.destroy)
def ok(self):
self.valid_command()
self.destroy()
def cancel(self):
self.destroy()
class ValveOpenCloseDialog(Toplevel):
def __init__(self, parent, title, text, open_command, close_command):
super(ValveOpenCloseDialog, self).__init__(parent)
self.transient(parent)
self.grab_set()
self.title(title)
self.open_command = open_command
self.close_command = close_command
Label(self, text=text).grid(row=0, column=0, columnspan=3, padx=20, pady=20)
Button(self, text='Ouverture', command=self.open).grid(row=1, column=0, padx=10, pady=5)
Button(self, text='Fermeture', command=self.close).grid(row=1, column=1, padx=10, pady=5)
Button(self, text='Annulation', command=self.destroy, default=ACTIVE).grid(row=1, column=2, padx=10, pady=5)
self.bind('<Escape>', lambda evt: self.destroy())
self.after(45000, self.destroy)
def open(self):
self.open_command()
self.destroy()
def close(self):
self.close_command()
self.destroy()
class ValveESDDialog(Toplevel):
def __init__(self, parent, title, text, stop_command, pst_command):
super(ValveESDDialog, self).__init__(parent)
self.transient(parent)
self.grab_set()
self.title(title)
self.stop_command = stop_command
self.pst_command = pst_command
Label(self, text=text).grid(row=0, column=0, columnspan=3, padx=20, pady=20)
Button(self, text='Fermeture', command=self.stop, background=RED).grid(row=1, column=0, padx=10, pady=5)
Button(self, text='Test partiel', command=self.pst).grid(row=1, column=1, padx=10, pady=5)
Button(self, text='Annulation', command=self.destroy, default=ACTIVE).grid(row=1, column=2, padx=10, pady=5)
self.bind('<Escape>', lambda evt: self.destroy())
self.after(45000, self.destroy)
def stop(self):
self.stop_command()
self.destroy()
def pst(self):
self.pst_command()
self.destroy()
class SetIntValueDialog(Toplevel):
def __init__(self, parent, title, text, valid_command):
super(SetIntValueDialog, self).__init__(parent)
self.transient(parent)
self.grab_set()
self.title(title)
self.valid_command = valid_command
self.value = IntVar()
Label(self, text=text).grid(row=0, column=0, columnspan=2, padx=20, pady=20)
Entry(self, textvariable=self.value).grid(row=1, column=0, columnspan=2, padx=10, pady=10)
Button(self, text='Validation', command=self.ok).grid(row=2, column=0, padx=10, pady=10)
Button(self, text='Annulation', command=self.cancel).grid(row=2, column=1, padx=10, pady=10)
self.bind('<Escape>', lambda evt: self.destroy())
self.after(45000, self.destroy)
def ok(self):
self.valid_command(self.value.get())
self.destroy()
def cancel(self):
self.destroy()
| mit | -1,398,264,354,190,854,100 | 37.635417 | 116 | 0.626314 | false |
tiborsimko/analysis-preservation.cern.ch | tests/integration/test_delete_deposit.py | 2 | 7407 | # -*- coding: utf-8 -*-
#
# This file is part of CERN Analysis Preservation Framework.
# Copyright (C) 2017 CERN.
#
# CERN Analysis Preservation Framework is free software; you can redistribute
# it and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# CERN Analysis Preservation Framework is distributed in the hope that it will
# be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CERN Analysis Preservation Framework; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
# or submit itself to any jurisdiction.
"""Integration tests for deleting deposits."""
import json
# #######################################
# # api/deposits/{pid} [DELETE]
# #######################################
def test_delete_deposit_with_non_existing_pid_returns_404(app,
auth_headers_for_superuser):
with app.test_client() as client:
resp = client.delete('/deposits/{}'.format('non-existing-pid'),
headers=auth_headers_for_superuser)
assert resp.status_code == 404
def test_delete_deposit_when_user_has_no_permission_returns_403(app,
users,
create_deposit,
auth_headers_for_user):
deposit = create_deposit(users['lhcb_user'], 'lhcb-v0.0.1')
other_user_headers = auth_headers_for_user(users['lhcb_user2'])
with app.test_client() as client:
resp = client.delete('/deposits/{}'.format(deposit['_deposit']['id']),
headers=other_user_headers)
assert resp.status_code == 403
def test_delete_deposit_when_user_is_owner_can_delete_his_deposit(app,
users,
create_deposit,
json_headers,
auth_headers_for_user):
owner = users['lhcb_user']
deposit = create_deposit(owner, 'lhcb-v0.0.1')
headers = auth_headers_for_user(owner) + json_headers
with app.test_client() as client:
resp = client.delete('/deposits/{}'.format(deposit['_deposit']['id']),
headers=headers)
assert resp.status_code == 204
# deposit not existing anymore
resp = client.get('/deposits/{}'.format(deposit['_deposit']['id']),
headers=headers)
assert resp.status_code == 410
def test_delete_deposit_when_deposit_published_already_cant_be_deleted(app,
users,
create_deposit,
json_headers,
auth_headers_for_user):
deposit = create_deposit(users['lhcb_user'], 'lhcb-v0.0.1')
headers = auth_headers_for_user(users['lhcb_user']) + json_headers
pid = deposit['_deposit']['id']
with app.test_client() as client:
resp = client.post('/deposits/{}/actions/publish'.format(pid),
headers=headers)
resp = client.delete('/deposits/{}'.format(pid),
headers=headers)
assert resp.status_code == 403
# deposit not removed
resp = client.get('/deposits/{}'.format(pid),
headers=headers)
assert resp.status_code == 200
def test_delete_deposit_when_superuser_can_delete_others_deposit(app,
users,
create_deposit,
auth_headers_for_superuser):
deposit = create_deposit(users['lhcb_user'], 'lhcb-v0.0.1')
with app.test_client() as client:
resp = client.delete('/deposits/{}'.format(deposit['_deposit']['id']),
headers=auth_headers_for_superuser)
assert resp.status_code == 204
def test_delete_deposit_when_user_with_admin_access_can_delete(app,
users,
create_deposit,
auth_headers_for_user,
json_headers):
owner, other_user = users['lhcb_user'], users['cms_user']
deposit = create_deposit(owner, 'lhcb-v0.0.1')
permissions = [{
'email': other_user.email,
'type': 'user',
'op': 'add',
'action': 'deposit-admin'
}]
with app.test_client() as client:
# give other user read/write access
resp = client.post('/deposits/{}/actions/permissions'.format(deposit['_deposit']['id']),
headers=auth_headers_for_user(owner) + json_headers,
data=json.dumps(permissions))
resp = client.delete('/deposits/{}'.format(deposit['_deposit']['id']),
headers=auth_headers_for_user(other_user))
assert resp.status_code == 204
def test_delete_deposit_when_user_only_with_read_write_access_returns_403(app,
users,
create_deposit,
auth_headers_for_user,
json_headers):
owner, other_user = users['lhcb_user'], users['cms_user']
deposit = create_deposit(owner, 'lhcb-v0.0.1')
permissions = [{
'email': other_user.email,
'type': 'user',
'op': 'add',
'action': 'deposit-read'
},{
'email': other_user.email,
'type': 'user',
'op': 'add',
'action': 'deposit-update'
}]
with app.test_client() as client:
# give other user read/write access
resp = client.post('/deposits/{}/actions/permissions'.format(deposit['_deposit']['id']),
headers=auth_headers_for_user(owner) + json_headers,
data=json.dumps(permissions))
resp = client.delete('/deposits/{}'.format(deposit['_deposit']['id']),
headers=auth_headers_for_user(other_user))
assert resp.status_code == 403
| gpl-2.0 | 6,271,426,835,280,457,000 | 42.315789 | 96 | 0.493182 | false |
facebookresearch/Detectron | detectron/datasets/json_dataset.py | 1 | 19557 | # Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
"""Representation of the standard COCO json dataset format.
When working with a new dataset, we strongly suggest to convert the dataset into
the COCO json format and use the existing code; it is not recommended to write
code to support new dataset formats.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import copy
import logging
import numpy as np
import os
import scipy.sparse
# Must happen before importing COCO API (which imports matplotlib)
import detectron.utils.env as envu
envu.set_up_matplotlib()
# COCO API
from pycocotools import mask as COCOmask
from pycocotools.coco import COCO
from detectron.core.config import cfg
from detectron.utils.timer import Timer
import detectron.datasets.dataset_catalog as dataset_catalog
import detectron.utils.boxes as box_utils
from detectron.utils.io import load_object
import detectron.utils.segms as segm_utils
logger = logging.getLogger(__name__)
class JsonDataset(object):
"""A class representing a COCO json dataset."""
def __init__(self, name):
assert dataset_catalog.contains(name), \
'Unknown dataset name: {}'.format(name)
assert os.path.exists(dataset_catalog.get_im_dir(name)), \
'Im dir \'{}\' not found'.format(dataset_catalog.get_im_dir(name))
assert os.path.exists(dataset_catalog.get_ann_fn(name)), \
'Ann fn \'{}\' not found'.format(dataset_catalog.get_ann_fn(name))
logger.debug('Creating: {}'.format(name))
self.name = name
self.image_directory = dataset_catalog.get_im_dir(name)
self.image_prefix = dataset_catalog.get_im_prefix(name)
self.COCO = COCO(dataset_catalog.get_ann_fn(name))
self.debug_timer = Timer()
# Set up dataset classes
category_ids = self.COCO.getCatIds()
categories = [c['name'] for c in self.COCO.loadCats(category_ids)]
self.category_to_id_map = dict(zip(categories, category_ids))
self.classes = ['__background__'] + categories
self.num_classes = len(self.classes)
self.json_category_id_to_contiguous_id = {
v: i + 1
for i, v in enumerate(self.COCO.getCatIds())
}
self.contiguous_category_id_to_json_id = {
v: k
for k, v in self.json_category_id_to_contiguous_id.items()
}
self._init_keypoints()
def get_roidb(
self,
gt=False,
proposal_file=None,
min_proposal_size=2,
proposal_limit=-1,
crowd_filter_thresh=0
):
"""Return an roidb corresponding to the json dataset. Optionally:
- include ground truth boxes in the roidb
- add proposals specified in a proposals file
- filter proposals based on a minimum side length
- filter proposals that intersect with crowd regions
"""
assert gt is True or crowd_filter_thresh == 0, \
'Crowd filter threshold must be 0 if ground-truth annotations ' \
'are not included.'
image_ids = self.COCO.getImgIds()
image_ids.sort()
roidb = copy.deepcopy(self.COCO.loadImgs(image_ids))
for entry in roidb:
self._prep_roidb_entry(entry)
if gt:
# Include ground-truth object annotations
self.debug_timer.tic()
for entry in roidb:
self._add_gt_annotations(entry)
logger.debug(
'_add_gt_annotations took {:.3f}s'.
format(self.debug_timer.toc(average=False))
)
if proposal_file is not None:
# Include proposals from a file
self.debug_timer.tic()
self._add_proposals_from_file(
roidb, proposal_file, min_proposal_size, proposal_limit,
crowd_filter_thresh
)
logger.debug(
'_add_proposals_from_file took {:.3f}s'.
format(self.debug_timer.toc(average=False))
)
_add_class_assignments(roidb)
return roidb
def _prep_roidb_entry(self, entry):
"""Adds empty metadata fields to an roidb entry."""
# Reference back to the parent dataset
entry['dataset'] = self
# Make file_name an abs path
im_path = os.path.join(
self.image_directory, self.image_prefix + entry['file_name']
)
assert os.path.exists(im_path), 'Image \'{}\' not found'.format(im_path)
entry['image'] = im_path
entry['flipped'] = False
entry['has_visible_keypoints'] = False
# Empty placeholders
entry['boxes'] = np.empty((0, 4), dtype=np.float32)
entry['segms'] = []
entry['gt_classes'] = np.empty((0), dtype=np.int32)
entry['seg_areas'] = np.empty((0), dtype=np.float32)
entry['gt_overlaps'] = scipy.sparse.csr_matrix(
np.empty((0, self.num_classes), dtype=np.float32)
)
entry['is_crowd'] = np.empty((0), dtype=np.bool)
# 'box_to_gt_ind_map': Shape is (#rois). Maps from each roi to the index
# in the list of rois that satisfy np.where(entry['gt_classes'] > 0)
entry['box_to_gt_ind_map'] = np.empty((0), dtype=np.int32)
if self.keypoints is not None:
entry['gt_keypoints'] = np.empty(
(0, 3, self.num_keypoints), dtype=np.int32
)
# Remove unwanted fields that come from the json file (if they exist)
for k in ['date_captured', 'url', 'license', 'file_name']:
if k in entry:
del entry[k]
def _add_gt_annotations(self, entry):
"""Add ground truth annotation metadata to an roidb entry."""
ann_ids = self.COCO.getAnnIds(imgIds=entry['id'], iscrowd=None)
objs = self.COCO.loadAnns(ann_ids)
# Sanitize bboxes -- some are invalid
valid_objs = []
valid_segms = []
width = entry['width']
height = entry['height']
for obj in objs:
# crowd regions are RLE encoded
if segm_utils.is_poly(obj['segmentation']):
# Valid polygons have >= 3 points, so require >= 6 coordinates
obj['segmentation'] = [
p for p in obj['segmentation'] if len(p) >= 6
]
if obj['area'] < cfg.TRAIN.GT_MIN_AREA:
continue
if 'ignore' in obj and obj['ignore'] == 1:
continue
# Convert form (x1, y1, w, h) to (x1, y1, x2, y2)
x1, y1, x2, y2 = box_utils.xywh_to_xyxy(obj['bbox'])
x1, y1, x2, y2 = box_utils.clip_xyxy_to_image(
x1, y1, x2, y2, height, width
)
# Require non-zero seg area and more than 1x1 box size
if obj['area'] > 0 and x2 > x1 and y2 > y1:
obj['clean_bbox'] = [x1, y1, x2, y2]
valid_objs.append(obj)
valid_segms.append(obj['segmentation'])
num_valid_objs = len(valid_objs)
boxes = np.zeros((num_valid_objs, 4), dtype=entry['boxes'].dtype)
gt_classes = np.zeros((num_valid_objs), dtype=entry['gt_classes'].dtype)
gt_overlaps = np.zeros(
(num_valid_objs, self.num_classes),
dtype=entry['gt_overlaps'].dtype
)
seg_areas = np.zeros((num_valid_objs), dtype=entry['seg_areas'].dtype)
is_crowd = np.zeros((num_valid_objs), dtype=entry['is_crowd'].dtype)
box_to_gt_ind_map = np.zeros(
(num_valid_objs), dtype=entry['box_to_gt_ind_map'].dtype
)
if self.keypoints is not None:
gt_keypoints = np.zeros(
(num_valid_objs, 3, self.num_keypoints),
dtype=entry['gt_keypoints'].dtype
)
im_has_visible_keypoints = False
for ix, obj in enumerate(valid_objs):
cls = self.json_category_id_to_contiguous_id[obj['category_id']]
boxes[ix, :] = obj['clean_bbox']
gt_classes[ix] = cls
seg_areas[ix] = obj['area']
is_crowd[ix] = obj['iscrowd']
box_to_gt_ind_map[ix] = ix
if self.keypoints is not None:
gt_keypoints[ix, :, :] = self._get_gt_keypoints(obj)
if np.sum(gt_keypoints[ix, 2, :]) > 0:
im_has_visible_keypoints = True
if obj['iscrowd']:
# Set overlap to -1 for all classes for crowd objects
# so they will be excluded during training
gt_overlaps[ix, :] = -1.0
else:
gt_overlaps[ix, cls] = 1.0
entry['boxes'] = np.append(entry['boxes'], boxes, axis=0)
entry['segms'].extend(valid_segms)
# To match the original implementation:
# entry['boxes'] = np.append(
# entry['boxes'], boxes.astype(np.int).astype(np.float), axis=0)
entry['gt_classes'] = np.append(entry['gt_classes'], gt_classes)
entry['seg_areas'] = np.append(entry['seg_areas'], seg_areas)
entry['gt_overlaps'] = np.append(
entry['gt_overlaps'].toarray(), gt_overlaps, axis=0
)
entry['gt_overlaps'] = scipy.sparse.csr_matrix(entry['gt_overlaps'])
entry['is_crowd'] = np.append(entry['is_crowd'], is_crowd)
entry['box_to_gt_ind_map'] = np.append(
entry['box_to_gt_ind_map'], box_to_gt_ind_map
)
if self.keypoints is not None:
entry['gt_keypoints'] = np.append(
entry['gt_keypoints'], gt_keypoints, axis=0
)
entry['has_visible_keypoints'] = im_has_visible_keypoints
def _add_proposals_from_file(
self, roidb, proposal_file, min_proposal_size, top_k, crowd_thresh
):
"""Add proposals from a proposals file to an roidb."""
logger.info('Loading proposals from: {}'.format(proposal_file))
proposals = load_object(proposal_file)
id_field = 'indexes' if 'indexes' in proposals else 'ids' # compat fix
_remove_proposals_not_in_roidb(proposals, roidb, id_field)
_sort_proposals(proposals, id_field)
box_list = []
for i, entry in enumerate(roidb):
if i % 2500 == 0:
logger.info(' {:d}/{:d}'.format(i + 1, len(roidb)))
boxes = proposals['boxes'][i]
# Sanity check that these boxes are for the correct image id
assert entry['id'] == proposals[id_field][i]
# Remove duplicate boxes and very small boxes and then take top k
boxes = box_utils.clip_boxes_to_image(
boxes, entry['height'], entry['width']
)
keep = box_utils.unique_boxes(boxes)
boxes = boxes[keep, :]
keep = box_utils.filter_small_boxes(boxes, min_proposal_size)
boxes = boxes[keep, :]
if top_k > 0:
boxes = boxes[:top_k, :]
box_list.append(boxes)
_merge_proposal_boxes_into_roidb(roidb, box_list)
if crowd_thresh > 0:
_filter_crowd_proposals(roidb, crowd_thresh)
def _init_keypoints(self):
"""Initialize COCO keypoint information."""
self.keypoints = None
self.keypoint_flip_map = None
self.keypoints_to_id_map = None
self.num_keypoints = 0
# Thus far only the 'person' category has keypoints
if 'person' in self.category_to_id_map:
cat_info = self.COCO.loadCats([self.category_to_id_map['person']])
else:
return
# Check if the annotations contain keypoint data or not
if 'keypoints' in cat_info[0]:
keypoints = cat_info[0]['keypoints']
self.keypoints_to_id_map = dict(
zip(keypoints, range(len(keypoints))))
self.keypoints = keypoints
self.num_keypoints = len(keypoints)
self.keypoint_flip_map = {
'left_eye': 'right_eye',
'left_ear': 'right_ear',
'left_shoulder': 'right_shoulder',
'left_elbow': 'right_elbow',
'left_wrist': 'right_wrist',
'left_hip': 'right_hip',
'left_knee': 'right_knee',
'left_ankle': 'right_ankle'}
def _get_gt_keypoints(self, obj):
"""Return ground truth keypoints."""
if 'keypoints' not in obj:
return None
kp = np.array(obj['keypoints'])
x = kp[0::3] # 0-indexed x coordinates
y = kp[1::3] # 0-indexed y coordinates
# 0: not labeled; 1: labeled, not inside mask;
# 2: labeled and inside mask
v = kp[2::3]
num_keypoints = len(obj['keypoints']) / 3
assert num_keypoints == self.num_keypoints
gt_kps = np.ones((3, self.num_keypoints), dtype=np.int32)
for i in range(self.num_keypoints):
gt_kps[0, i] = x[i]
gt_kps[1, i] = y[i]
gt_kps[2, i] = v[i]
return gt_kps
def add_proposals(roidb, rois, scales, crowd_thresh):
"""Add proposal boxes (rois) to an roidb that has ground-truth annotations
but no proposals. If the proposals are not at the original image scale,
specify the scale factor that separate them in scales.
"""
box_list = []
for i in range(len(roidb)):
inv_im_scale = 1. / scales[i]
idx = np.where(rois[:, 0] == i)[0]
box_list.append(rois[idx, 1:] * inv_im_scale)
_merge_proposal_boxes_into_roidb(roidb, box_list)
if crowd_thresh > 0:
_filter_crowd_proposals(roidb, crowd_thresh)
_add_class_assignments(roidb)
def _merge_proposal_boxes_into_roidb(roidb, box_list):
"""Add proposal boxes to each roidb entry."""
assert len(box_list) == len(roidb)
for i, entry in enumerate(roidb):
boxes = box_list[i]
num_boxes = boxes.shape[0]
gt_overlaps = np.zeros(
(num_boxes, entry['gt_overlaps'].shape[1]),
dtype=entry['gt_overlaps'].dtype
)
box_to_gt_ind_map = -np.ones(
(num_boxes), dtype=entry['box_to_gt_ind_map'].dtype
)
# Note: unlike in other places, here we intentionally include all gt
# rois, even ones marked as crowd. Boxes that overlap with crowds will
# be filtered out later (see: _filter_crowd_proposals).
gt_inds = np.where(entry['gt_classes'] > 0)[0]
if len(gt_inds) > 0:
gt_boxes = entry['boxes'][gt_inds, :]
gt_classes = entry['gt_classes'][gt_inds]
proposal_to_gt_overlaps = box_utils.bbox_overlaps(
boxes.astype(dtype=np.float32, copy=False),
gt_boxes.astype(dtype=np.float32, copy=False)
)
# Gt box that overlaps each input box the most
# (ties are broken arbitrarily by class order)
argmaxes = proposal_to_gt_overlaps.argmax(axis=1)
# Amount of that overlap
maxes = proposal_to_gt_overlaps.max(axis=1)
# Those boxes with non-zero overlap with gt boxes
I = np.where(maxes > 0)[0]
# Record max overlaps with the class of the appropriate gt box
gt_overlaps[I, gt_classes[argmaxes[I]]] = maxes[I]
box_to_gt_ind_map[I] = gt_inds[argmaxes[I]]
entry['boxes'] = np.append(
entry['boxes'],
boxes.astype(entry['boxes'].dtype, copy=False),
axis=0
)
entry['gt_classes'] = np.append(
entry['gt_classes'],
np.zeros((num_boxes), dtype=entry['gt_classes'].dtype)
)
entry['seg_areas'] = np.append(
entry['seg_areas'],
np.zeros((num_boxes), dtype=entry['seg_areas'].dtype)
)
entry['gt_overlaps'] = np.append(
entry['gt_overlaps'].toarray(), gt_overlaps, axis=0
)
entry['gt_overlaps'] = scipy.sparse.csr_matrix(entry['gt_overlaps'])
entry['is_crowd'] = np.append(
entry['is_crowd'],
np.zeros((num_boxes), dtype=entry['is_crowd'].dtype)
)
entry['box_to_gt_ind_map'] = np.append(
entry['box_to_gt_ind_map'],
box_to_gt_ind_map.astype(
entry['box_to_gt_ind_map'].dtype, copy=False
)
)
def _filter_crowd_proposals(roidb, crowd_thresh):
"""Finds proposals that are inside crowd regions and marks them as
overlap = -1 with each ground-truth rois, which means they will be excluded
from training.
"""
for entry in roidb:
gt_overlaps = entry['gt_overlaps'].toarray()
crowd_inds = np.where(entry['is_crowd'] == 1)[0]
non_gt_inds = np.where(entry['gt_classes'] == 0)[0]
if len(crowd_inds) == 0 or len(non_gt_inds) == 0:
continue
crowd_boxes = box_utils.xyxy_to_xywh(entry['boxes'][crowd_inds, :])
non_gt_boxes = box_utils.xyxy_to_xywh(entry['boxes'][non_gt_inds, :])
iscrowd_flags = [int(True)] * len(crowd_inds)
ious = COCOmask.iou(non_gt_boxes, crowd_boxes, iscrowd_flags)
bad_inds = np.where(ious.max(axis=1) > crowd_thresh)[0]
gt_overlaps[non_gt_inds[bad_inds], :] = -1
entry['gt_overlaps'] = scipy.sparse.csr_matrix(gt_overlaps)
def _add_class_assignments(roidb):
"""Compute object category assignment for each box associated with each
roidb entry.
"""
for entry in roidb:
gt_overlaps = entry['gt_overlaps'].toarray()
# max overlap with gt over classes (columns)
max_overlaps = gt_overlaps.max(axis=1)
# gt class that had the max overlap
max_classes = gt_overlaps.argmax(axis=1)
entry['max_classes'] = max_classes
entry['max_overlaps'] = max_overlaps
# sanity checks
# if max overlap is 0, the class must be background (class 0)
zero_inds = np.where(max_overlaps == 0)[0]
assert all(max_classes[zero_inds] == 0)
# if max overlap > 0, the class must be a fg class (not class 0)
nonzero_inds = np.where(max_overlaps > 0)[0]
assert all(max_classes[nonzero_inds] != 0)
def _sort_proposals(proposals, id_field):
"""Sort proposals by the specified id field."""
order = np.argsort(proposals[id_field])
fields_to_sort = ['boxes', id_field, 'scores']
for k in fields_to_sort:
proposals[k] = [proposals[k][i] for i in order]
def _remove_proposals_not_in_roidb(proposals, roidb, id_field):
# fix proposals so they don't contain entries for images not in the roidb
roidb_ids = set({entry["id"] for entry in roidb})
keep = [i for i, id in enumerate(proposals[id_field]) if id in roidb_ids]
for f in ['boxes', id_field, 'scores']:
proposals[f] = [proposals[f][i] for i in keep]
| apache-2.0 | -9,000,661,081,801,376,000 | 41.058065 | 80 | 0.575804 | false |
sravanti/UVisa | visas/migrations/0001_initial.py | 1 | 1769 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('answer_text', models.TextField(null=True, blank=True)),
('transcription', models.TextField(null=True, blank=True)),
('audio', models.FileField(null=True, upload_to=b'photos/%Y/%m/%d', blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Form',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('question_eng', models.CharField(max_length=255, null=True, blank=True)),
('question_esp', models.CharField(max_length=255, null=True, blank=True)),
('link', models.URLField(max_length=255, null=True, blank=True)),
],
options={
},
bases=(models.Model,),
),
]
| mit | 7,047,489,270,904,382,000 | 35.102041 | 114 | 0.538157 | false |
jekhokie/scriptbox | python--web-service-simulator/run.py | 1 | 1213 | # start the application after importing all relative app components
from app import app
from random import randint
import requests
import time
# check if we are an injector - if so, we only need to fire off a request and terminate
if app.config['ROLE'] == 'Injector':
while True:
# create a random transaction ID
transaction_id = randint(1, 1000000)
# simulate being the first service in a pipeline to receive a request
app.logger.info('Received Request', extra={'role': app.config['ROLE'], 'event': 'RECEIVED', 'transaction_id': transaction_id})
# inject an artificial delay between 100-300ms into the cycle to simulate "processing"
ms_delay = (randint(100, 300) / 1000.0)
time.sleep(ms_delay)
# record completing and passing the request downstream
app.logger.info('Processed Request', extra={'role': app.config['ROLE'], 'event': 'PROCESSED', 'transaction_id': transaction_id})
# send the transaction to the next service processor
payload = {'transaction_id': transaction_id}
requests.post(app.config['NEXT_HOP'], data=payload)
else:
# else, start as a processor or terminator
app.run(host='0.0.0.0', port=app.config['PORT'], debug=app.config['DEBUG'])
| mit | -2,438,706,884,530,930,700 | 42.321429 | 132 | 0.713932 | false |
GjjvdBurg/ABED | abed/prune.py | 1 | 2281 | # -*- coding: utf-8 -*-
"""Functionality for removing results that don't match the current config
"""
import os
import shutil
from pathlib import Path
from .conf import settings
from .utils import hash_from_filename, mkdir
def prune_results(task_dict, dry_run=False):
"""Remove result files that are not in the task_dict
This can occur when the experiment configuration changes over time and old
result files are still lying around. This command moves them to the
PRUNE_DIR defined in the settings file.
"""
if not os.path.exists(settings.RESULT_DIR):
# no results, no pruning
return
# map from hash to Path of the result file
tasks_have = {}
dset_dirs = os.listdir(settings.RESULT_DIR)
for dset in dset_dirs:
dset_path = os.path.join(settings.RESULT_DIR, dset)
method_dirs = os.listdir(dset_path)
for method in method_dirs:
method_path = os.path.join(dset_path, method)
task_files = os.listdir(method_path)
for filename in task_files:
pth = os.path.join(method_path, filename)
h = hash_from_filename(pth)
tasks_have[h] = Path(pth)
# list hashes that we don't have in the task dict
unknown_hashes = []
for h in tasks_have:
if not h in task_dict:
unknown_hashes.append(h)
# no unknown hashes, no problem
if not unknown_hashes:
return
# create the pruned dir if needed
if not dry_run:
mkdir(settings.PRUNE_DIR)
# move the stragglers
for h in unknown_hashes:
path = tasks_have[h]
filename = path.parts[-1]
method = path.parts[-2]
dset = path.parts[-3]
dest_dir = os.path.join(settings.PRUNE_DIR, dset, method)
if not dry_run:
mkdir(dest_dir)
dest_path = os.path.join(dest_dir, filename)
it = 1
while os.path.exists(dest_path):
stem, ext = os.path.splitext(dest_path)
filename = "%s_dup_%i%s" % (stem, it, ext)
dest_path = os.path.join(dest_dir, filename)
it += 1
if dry_run:
print("Moving %s to %s" % (path, dest_path))
else:
shutil.move(path, dest_path)
| gpl-2.0 | -5,288,211,294,130,294,000 | 27.5125 | 79 | 0.597545 | false |
linaro-technologies/jobserv | simulator.py | 1 | 10305 | #!/usr/bin/env python3
# Copyright (C) 2017 Linaro Limited
# Author: Andy Doan <[email protected]>
import argparse
import hashlib
import json
import os
import re
import sys
import requests
import yaml
def _validate(args):
url = args.jobserv
if url[-1] != '/':
url += '/'
url += 'simulator-validate'
data = yaml.load(args.proj_def)
r = requests.post(url, json=data)
if r.status_code != 200:
try:
sys.exit(r.json()['message'])
except:
sys.exit(r.text)
return data
def _get_trigger(projdef, trigger_name):
for trigger in projdef.get('triggers', []):
if trigger.get('name') == trigger_name:
return trigger
sys.exit('No trigger named %s was found' % trigger_name)
def _get_run(trigger, run_name):
for r in trigger.get('runs', []):
if r.get('name') == run_name:
return r
if 'loop-on' in r:
pat = r['name'].replace('{loop}', '')
if pat in run_name:
print('found looping match')
return r
sys.exit('No run named %s was found in the trigger' % run_name)
def _get_script(projdef, script_name):
s = projdef.get('scripts', {}).get(script_name)
if not s:
sys.exit('No script named %s was found' % script_name)
return s
def _get_script_repo(projdef, script):
s = projdef.get('script-repos', {}).get(script['name'])
if not s:
sys.exit('No script-repo named %s was found' % script['name'])
s['path'] = script['path']
return s
def _add_pr_params(params, secrets):
headers = {
'Content-Type': 'application/json',
'Authorization': 'token ' + secrets['githubtok'],
}
url = 'https://api.github.com/repos/%s/%s/pulls/%s' % (
params['GH_OWNER'], params['GH_REPO'], params['GH_PRNUM'])
r = requests.get(url, headers=headers)
if r.status_code != 200:
sys.exit('Unable to get PR info: %s: %d\n%s' % (
url, r.status_code, r.text))
data = r.json()
params['GH_STATUS_URL'] = data['statuses_url']
params['GH_TARGET_REPO'] = data['base']['repo']['clone_url']
params['GIT_URL'] = data['head']['repo']['clone_url']
params['GIT_SHA_BASE'] = data['base']['sha']
params['GIT_SHA'] = data['head']['sha']
def _get_params(projdef, trigger, run, keyvals, secrets):
params = {'H_RUN': 'simulator', 'H_BUILD': '42'}
params.update(projdef.get('params', {}))
params.update(trigger.get('params', {}))
params.update(run.get('params', {}))
for kv in keyvals:
k, v = kv.split('=', 1)
params[k] = v
if trigger['type'] == 'github_pr':
_add_pr_params(params, secrets)
return params
def _get_secrets(keyvals):
if keyvals is None:
keyvals = []
secrets = {}
for kv in keyvals:
k, v = kv.split('=', 1)
secrets[k] = v
return secrets
def _create(args):
if not os.path.exists(args.workspace):
sys.exit('Simulator workspace, %s, does not exist' % args.workspace)
proj_def = _validate(args)
trigger = _get_trigger(proj_def, args.trigger_name)
run = _get_run(trigger, args.run_name)
secrets = _get_secrets(args.secret)
rundef = {
'simulator': True,
'trigger_type': trigger['type'],
'run_url': '',
'api_key': '',
'container': run['container'],
'env': _get_params(proj_def, trigger, run, args.param, secrets),
'timeout': proj_def['timeout'],
'secrets': secrets,
}
script = run.get('script')
if script:
rundef['script'] = proj_def['scripts'][script]
else:
name = run['script-repo']['name']
rundef['script-repo'] = {
'clone-url': proj_def['script-repos'][name]['clone-url'],
'path': run['script-repo']['path'],
}
token = proj_def['script-repos'][name].get('token')
if token:
rundef['script-repo']['token'] = token
ref = proj_def['script-repos'][name].get('git-ref')
if ref:
rundef['script-repo']['git-ref'] = ref
rundef_file = os.path.join(args.workspace, 'rundef.json')
with open(rundef_file, 'w') as f:
json.dump(rundef, f, indent=2)
print('Downloading runner for simulator')
wheel = os.path.join(args.workspace, 'runner.whl')
with open(wheel, 'wb') as f:
url = args.jobserv + '/runner'
r = requests.get(url)
if r.status_code != 200:
sys.exit('Unable to download %s: %d\n%s' % (
url, r.status_code, r.text))
for chunk in r:
f.write(chunk)
with open(os.path.join(args.workspace, 'run_simulator'), 'w') as f:
os.fchmod(f.fileno(), 0o755)
f.write('#!/bin/sh -e\n')
f.write('export PYTHONPATH=%s\n' % wheel)
f.write('python3 -m jobserv_runner.simulator -w %s %s </dev/null' % (
args.workspace, rundef_file))
print('Simulator can now be run with %s run -w %s' % (
sys.argv[0], args.workspace))
def _run(args):
script = os.path.join(args.workspace, 'run_simulator')
os.execv(script, [script])
def _test_grep(args):
with open(args.proj_def) as f:
proj_def = yaml.load(f)
trigger = _get_trigger(proj_def, args.trigger_name)
run = _get_run(trigger, args.run_name)
grepping = run.get('test-grepping')
if not grepping:
sys.exit(' \'test-grepping\' pattern defined in run')
test_pat = grepping.get('test-pattern')
if test_pat:
test_pat = re.compile(test_pat)
res_pat = re.compile(grepping['result-pattern'])
fixups = grepping.get('fixupdict', {})
cur_test = None
passes = failures = 0
for line in sys.stdin.readlines():
if test_pat:
m = test_pat.match(line)
if m:
cur_test = m.group('name')
m = res_pat.match(line)
if m:
result = m.group('result')
result = fixups.get(result, result)
if not cur_test:
cur_test = 'default'
print('Test(%s) %s = %s' % (cur_test, m.group('name'), result))
if result == 'PASSED':
passes += 1
else:
failures += 1
print('%d PASSED, %d FAILED' % (passes, failures))
def _check_for_updates(args):
with open(__file__, 'rb') as f:
h = hashlib.md5()
h.update(f.read())
version = h.hexdigest()
url = args.jobserv + '/simulator?version=' + version
resp = requests.get(url)
if resp.status_code == 200:
print('Simulator version has changed, updating local script')
with open(__file__, 'w') as f:
f.write(resp.text)
elif resp.status_code == 304:
print('Simulator version has not changed')
else:
print('HTTP Error: %d\n%s' % (resp.status_code, resp.text))
def get_args(args=None):
parser = argparse.ArgumentParser(description='''
A tool to help build and run a "JobServ simulator" that can execute
a Run defined in a project definition file locally without using the
actual JobServ.''')
cmds = parser.add_subparsers(title='Commands')
p = cmds.add_parser('validate-schema',
help='''Validate a project definition YAML file against
a running JobServ''')
p.set_defaults(func=_validate)
p.add_argument('--jobserv', '-j',
default='https://api.linarotechnologies.org/',
help='The JobServ to query. Default=%(default)s')
p.add_argument('--proj-def', '-d', required=True,
type=argparse.FileType('r'),
help='Project defintion .yml')
p = cmds.add_parser('create',
help='Create a workspace for executing simulated run.')
p.set_defaults(func=_create)
p.add_argument('--jobserv', '-j',
default='https://api.linarotechnologies.org/',
help='The JobServ to query. Default=%(default)s')
p.add_argument('--proj-def', '-d', required=True,
type=argparse.FileType('r'),
help='Project defintion .yml')
p.add_argument('--trigger-name', '-t', required=True,
help='The name of the trigger the run is under')
p.add_argument('--run-name', '-r', required=True,
help='The name of the run to try')
p.add_argument('--workspace', '-w', required=True,
help='''A directory to serve as the simulator workspace. It
will hold the scripts needed to run the simulator and
and also store its artifacts''')
p.add_argument('--param', '-p', metavar='KEY=VAL', action='append',
help='Parameter(s) needed by the run.')
p.add_argument('--secret', '-s', metavar='KEY=VAL', action='append',
help='Parameter(s) needed by the run.')
p = cmds.add_parser('run',
help='Run the simulator defined in the workspace.')
p.set_defaults(func=_run)
p.add_argument('--workspace', '-w', required=True,
help='The simulator workspace')
p = cmds.add_parser('check-test-grepping',
help='''Parses STDIN with test-grepping rules to see
find out what tests it thinks pass/fail''')
p.set_defaults(func=_test_grep)
p.add_argument('--proj-def', '-d', required=True,
help='Project defintion .yml')
p.add_argument('--trigger-name', '-t', required=True,
help='The name of the trigger the run is under')
p.add_argument('--run-name', '-r', required=True,
help='The name of the run to try')
p = cmds.add_parser('check-for-updates',
help='Check for updates to this simulator script')
p.set_defaults(func=_check_for_updates)
p.add_argument('--jobserv', '-j',
default='https://api.linarotechnologies.org/',
help='The JobServ to query. Default=%(default)s')
args = parser.parse_args(args)
return args
if __name__ == '__main__':
args = get_args()
if getattr(args, 'func', None):
args.func(args)
| agpl-3.0 | 2,794,882,061,083,793 | 33.122517 | 79 | 0.557011 | false |
CoffeeForThinkers/MagentoModels | mm/routines/product.py | 1 | 3304 | import logging
import mm.routines
_LOGGER = logging.getLogger(__name__)
class ProductRoutines(mm.routines.RoutinesBase):
noun = 'product'
def update_enum_product_attribute(self, sku, att_name, att_value):
record = \
self.get_one_record(
'update_enum_product_attribute',
sku, att_name, att_value)
record['affected'] = int(record['affected'])
return record
def upsert_product_int_attribute(self, sku, att_name, att_value, store_id=0):
record = \
self.get_one_record(
'upsert_product_int_attribute',
sku, att_name, att_value, store_id)
return record
def upsert_product_varchar_attribute(self, sku, att_name, att_value, store_id=0):
record = \
self.get_one_record(
'upsert_product_varchar_attribute',
sku, att_name, att_value, store_id)
return record
def get_configurable_associated_products(self, store_id=None, is_active=None, is_visible=None):
message = "Not a valid input value for '{0}'. Use: {1}"
assert type(store_id) is int or store_id is None, \
message.format('store_id', 'None or int')
assert is_active is True or is_active is False or is_active is None, \
message.format('is_active', 'None, True or False')
assert is_visible is True or is_visible is False or is_visible is None, \
message.format('is_visible', 'None, True or False')
rows = \
self.call(
'get_configurable_associated_products',
store_id,
is_active,
is_visible)
return rows
def get_configurable_associated_products_stock(self, store_id=None):
assert type(store_id) is int or store_id is None, \
"Not a valid input value for 'store_id'. Use: 'None or int'"
rows = \
self.call(
'get_configurable_associated_products_stock',
store_id)
return rows
def get_product_listing_with_attributes(self, product_type=None, store_id=None):
assert type(product_type) is str or product_type is None, \
"Not a valid input value for 'product_type'. Use: 'None or string'"
assert type(store_id) is int or store_id is None, \
"Not a valid input value for 'store_id'. Use: 'None or int'"
rows = \
self.call(
'get_product_listing_with_attributes',
product_type,
store_id)
return rows
def upsert_product_price(self, sku, currency_code, price, special_price, store_id=0):
record = \
self.get_one_record(
'upsert_product_price',
sku, store_id, currency_code, price, special_price)
record['affected'] = int(record['affected'])
return record
def catalog_association(self, product_id, linked_product_id, link_type, is_cleanup = False):
record = \
self.get_one_record(
'catalog_association',
product_id, linked_product_id, link_type, 1 if is_cleanup else 0)
record['affected'] = int(record['affected'])
return record
| gpl-3.0 | 2,908,460,994,270,722,600 | 31.712871 | 99 | 0.573245 | false |
mx3L/archivczsk | build/plugin/src/resources/libraries/youtube_dl/update.py | 1 | 7957 | from __future__ import unicode_literals
import io
import json
import traceback
import hashlib
import os
import subprocess
import sys
from zipimport import zipimporter
from .compat import compat_realpath
from .utils import encode_compat_str
from .version import __version__
def rsa_verify(message, signature, key):
from hashlib import sha256
assert isinstance(message, bytes)
byte_size = (len(bin(key[0])) - 2 + 8 - 1) // 8
signature = ('%x' % pow(int(signature, 16), key[1], key[0])).encode()
signature = (byte_size * 2 - len(signature)) * b'0' + signature
asn1 = b'3031300d060960864801650304020105000420'
asn1 += sha256(message).hexdigest().encode()
if byte_size < len(asn1) // 2 + 11:
return False
expected = b'0001' + (byte_size - len(asn1) // 2 - 3) * b'ff' + b'00' + asn1
return expected == signature
def update_self(to_screen, verbose, opener):
"""Update the program file with the latest version from the repository"""
UPDATE_URL = 'https://blackjack4494.github.io//update/'
VERSION_URL = UPDATE_URL + 'LATEST_VERSION'
JSON_URL = UPDATE_URL + 'versions.json'
UPDATES_RSA_KEY = (0x9d60ee4d8f805312fdb15a62f87b95bd66177b91df176765d13514a0f1754bcd2057295c5b6f1d35daa6742c3ffc9a82d3e118861c207995a8031e151d863c9927e304576bc80692bc8e094896fcf11b66f3e29e04e3a71e9a11558558acea1840aec37fc396fb6b65dc81a1c4144e03bd1c011de62e3f1357b327d08426fe93, 65537)
def sha256sum():
h = hashlib.sha256()
b = bytearray(128 * 1024)
mv = memoryview(b)
with open(os.path.realpath(sys.executable), 'rb', buffering=0) as f:
for n in iter(lambda: f.readinto(mv), 0):
h.update(mv[:n])
return h.hexdigest()
to_screen('Current Build Hash %s' % sha256sum())
if not isinstance(globals().get('__loader__'), zipimporter) and not hasattr(sys, 'frozen'):
to_screen('It looks like you installed youtube-dlc with a package manager, pip, setup.py or a tarball. Please use that to update.')
return
# compiled file.exe can find itself by
# to_screen(os.path.basename(sys.executable))
# and path to py or exe
# to_screen(os.path.realpath(sys.executable))
# Check if there is a new version
try:
newversion = opener.open(VERSION_URL).read().decode('utf-8').strip()
except Exception:
if verbose:
to_screen(encode_compat_str(traceback.format_exc()))
to_screen('ERROR: can\'t find the current version. Please try again later.')
to_screen('Visit https://github.com/blackjack4494/yt-dlc/releases/latest')
return
if newversion == __version__:
to_screen('youtube-dlc is up-to-date (' + __version__ + ')')
return
# Download and check versions info
try:
versions_info = opener.open(JSON_URL).read().decode('utf-8')
versions_info = json.loads(versions_info)
except Exception:
if verbose:
to_screen(encode_compat_str(traceback.format_exc()))
to_screen('ERROR: can\'t obtain versions info. Please try again later.')
to_screen('Visit https://github.com/blackjack4494/yt-dlc/releases/latest')
return
if 'signature' not in versions_info:
to_screen('ERROR: the versions file is not signed or corrupted. Aborting.')
return
signature = versions_info['signature']
del versions_info['signature']
if not rsa_verify(json.dumps(versions_info, sort_keys=True).encode('utf-8'), signature, UPDATES_RSA_KEY):
to_screen('ERROR: the versions file signature is invalid. Aborting.')
return
version_id = versions_info['latest']
def version_tuple(version_str):
return tuple(map(int, version_str.split('.')))
if version_tuple(__version__) >= version_tuple(version_id):
to_screen('youtube-dlc is up to date (%s)' % __version__)
return
to_screen('Updating to version ' + version_id + ' ...')
version = versions_info['versions'][version_id]
print_notes(to_screen, versions_info['versions'])
# sys.executable is set to the full pathname of the exe-file for py2exe
# though symlinks are not followed so that we need to do this manually
# with help of realpath
filename = compat_realpath(sys.executable if hasattr(sys, 'frozen') else sys.argv[0])
if not os.access(filename, os.W_OK):
to_screen('ERROR: no write permissions on %s' % filename)
return
# Py2EXE
if hasattr(sys, 'frozen'):
exe = filename
directory = os.path.dirname(exe)
if not os.access(directory, os.W_OK):
to_screen('ERROR: no write permissions on %s' % directory)
return
try:
urlh = opener.open(version['exe'][0])
newcontent = urlh.read()
urlh.close()
except (IOError, OSError):
if verbose:
to_screen(encode_compat_str(traceback.format_exc()))
to_screen('ERROR: unable to download latest version')
to_screen('Visit https://github.com/blackjack4494/yt-dlc/releases/latest')
return
newcontent_hash = hashlib.sha256(newcontent).hexdigest()
if newcontent_hash != version['exe'][1]:
to_screen('ERROR: the downloaded file hash does not match. Aborting.')
return
try:
with open(exe + '.new', 'wb') as outf:
outf.write(newcontent)
except (IOError, OSError):
if verbose:
to_screen(encode_compat_str(traceback.format_exc()))
to_screen('ERROR: unable to write the new version')
return
try:
bat = os.path.join(directory, 'youtube-dlc-updater.bat')
with io.open(bat, 'w') as batfile:
batfile.write('''
@echo off
echo Waiting for file handle to be closed ...
ping 127.0.0.1 -n 5 -w 1000 > NUL
move /Y "%s.new" "%s" > NUL
echo Updated youtube-dlc to version %s.
start /b "" cmd /c del "%%~f0"&exit /b"
\n''' % (exe, exe, version_id))
subprocess.Popen([bat]) # Continues to run in the background
return # Do not show premature success messages
except (IOError, OSError):
if verbose:
to_screen(encode_compat_str(traceback.format_exc()))
to_screen('ERROR: unable to overwrite current version')
return
# Zip unix package
elif isinstance(globals().get('__loader__'), zipimporter):
try:
urlh = opener.open(version['bin'][0])
newcontent = urlh.read()
urlh.close()
except (IOError, OSError):
if verbose:
to_screen(encode_compat_str(traceback.format_exc()))
to_screen('ERROR: unable to download latest version')
to_screen('Visit https://github.com/blackjack4494/yt-dlc/releases/latest')
return
newcontent_hash = hashlib.sha256(newcontent).hexdigest()
if newcontent_hash != version['bin'][1]:
to_screen('ERROR: the downloaded file hash does not match. Aborting.')
return
try:
with open(filename, 'wb') as outf:
outf.write(newcontent)
except (IOError, OSError):
if verbose:
to_screen(encode_compat_str(traceback.format_exc()))
to_screen('ERROR: unable to overwrite current version')
return
to_screen('Updated youtube-dlc. Restart youtube-dlc to use the new version.')
def get_notes(versions, fromVersion):
notes = []
for v, vdata in sorted(versions.items()):
if v > fromVersion:
notes.extend(vdata.get('notes', []))
return notes
def print_notes(to_screen, versions, fromVersion=__version__):
notes = get_notes(versions, fromVersion)
if notes:
to_screen('PLEASE NOTE:')
for note in notes:
to_screen(note)
| gpl-2.0 | -7,652,275,497,875,585,000 | 36.890476 | 289 | 0.622094 | false |
chippey/gaffer | python/GafferTest/CompoundPlugTest.py | 1 | 4764 | ##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import gc
import IECore
import Gaffer
import GafferTest
class CompoundPlugTest( GafferTest.TestCase ) :
def testContructor( self ) :
p = Gaffer.CompoundPlug()
self.assertEqual( p.getName(), "CompoundPlug" )
self.assertEqual( p.direction(), Gaffer.Plug.Direction.In )
p = Gaffer.V3fPlug( name="b", direction=Gaffer.Plug.Direction.Out )
self.assertEqual( p.getName(), "b" )
self.assertEqual( p.direction(), Gaffer.Plug.Direction.Out )
def testDerivingInPython( self ) :
class TestCompoundPlug( Gaffer.CompoundPlug ) :
def __init__( self, name = "TestCompoundPlug", direction = Gaffer.Plug.Direction.In, flags = Gaffer.Plug.Flags.None ) :
Gaffer.CompoundPlug.__init__( self, name, direction, flags )
def acceptsChild( self, child ) :
if not Gaffer.CompoundPlug.acceptsChild( self, child ) :
return False
return isinstance( child, Gaffer.IntPlug )
IECore.registerRunTimeTyped( TestCompoundPlug )
# check the constructor
p = TestCompoundPlug()
self.assertEqual( p.getName(), "TestCompoundPlug" )
self.assertEqual( p.direction(), Gaffer.Plug.Direction.In )
self.assertEqual( p.getFlags(), Gaffer.Plug.Flags.None )
p = TestCompoundPlug( name = "p", direction = Gaffer.Plug.Direction.Out, flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
self.assertEqual( p.getName(), "p" )
self.assertEqual( p.direction(), Gaffer.Plug.Direction.Out )
self.assertEqual( p.getFlags(), Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
# check that acceptsChild can be overridden
p = TestCompoundPlug()
self.assertRaises( RuntimeError, p.addChild, Gaffer.FloatPlug() )
p.addChild( Gaffer.IntPlug() )
# check that the fact the plug has been wrapped solves the object identity problem
p = TestCompoundPlug()
n = Gaffer.Node()
n["p"] = p
self.failUnless( n["p"] is p )
def testRunTimeTyped( self ) :
p = Gaffer.CompoundPlug( "hello" )
self.failUnless( p.isInstanceOf( Gaffer.CompoundPlug.staticTypeId() ) )
self.assertEqual( IECore.RunTimeTyped.baseTypeId( p.typeId() ), Gaffer.ValuePlug.staticTypeId() )
def testCreateCounterpart( self ) :
c = Gaffer.CompoundPlug( "a", Gaffer.Plug.Direction.Out )
c["b"] = Gaffer.IntPlug( direction = Gaffer.Plug.Direction.Out )
c["c"] = Gaffer.IntPlug( direction = Gaffer.Plug.Direction.Out )
c2 = c.createCounterpart( "aa", Gaffer.Plug.Direction.In )
self.assertEqual( c2.getName(), "aa" )
self.assertEqual( c2.direction(), Gaffer.Plug.Direction.In )
self.assertEqual( c2["b"].direction(), Gaffer.Plug.Direction.In )
self.assertEqual( c2["c"].direction(), Gaffer.Plug.Direction.In )
def testNonValuePlugChildren( self ) :
c = Gaffer.CompoundPlug()
p = Gaffer.Plug()
self.assertTrue( c.acceptsChild( p ) )
c["p"] = p
self.assertTrue( p.parent().isSame( c ) )
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | 9,192,075,603,553,010,000 | 34.819549 | 138 | 0.696054 | false |
graphql-python/graphql-core | tests/utilities/test_get_introspection_query.py | 1 | 1816 | import re
from graphql.utilities import get_introspection_query
def describe_get_introspection_query():
def skips_all_description_fields():
has_descriptions = re.compile(r"\bdescription\b").search
assert has_descriptions(get_introspection_query())
assert has_descriptions(get_introspection_query(descriptions=True))
assert not has_descriptions(get_introspection_query(descriptions=False))
def includes_is_repeatable_field_on_directives():
has_repeatability = re.compile(r"\bisRepeatable\b").search
assert not has_repeatability(get_introspection_query())
assert has_repeatability(get_introspection_query(directive_is_repeatable=True))
assert not has_repeatability(
get_introspection_query(directive_is_repeatable=False)
)
def includes_description_field_on_schema():
all_descriptions = re.compile(r"\bdescription\b").findall
assert len(all_descriptions(get_introspection_query())) == 5
assert (
len(all_descriptions(get_introspection_query(schema_description=False)))
== 5
)
assert (
len(all_descriptions(get_introspection_query(schema_description=True))) == 6
)
assert not all_descriptions(
get_introspection_query(descriptions=False, schema_description=True)
)
def includes_specified_by_url_field():
all_specified_by_urls = re.compile(r"\bspecifiedByUrl\b").findall
assert not all_specified_by_urls(get_introspection_query())
assert not all_specified_by_urls(
get_introspection_query(specified_by_url=False)
)
assert (
len(all_specified_by_urls(get_introspection_query(specified_by_url=True)))
== 1
)
| mit | 182,015,892,003,239,520 | 30.859649 | 88 | 0.660793 | false |
glormph/msstitch | src/app/drivers/prottable.py | 1 | 5076 | from app.drivers.base import PepProttableDriver
from app.drivers.options import prottable_options
from app.readers import tsv as tsvreader
from app.dataformats import prottable as prottabledata
from app.dataformats import peptable as peptabledata
from app.dataformats import mzidtsv as mzidtsvdata
from app.actions import proteins
from app.actions.psmtable import isosummarize
class ProttableDriver(PepProttableDriver):
mediannormalize = False # FIXME remove when done
def set_options(self):
super().set_options()
options = self.define_options(['decoyfn', 'scorecolpattern', 'minlogscore',
'quantcolpattern', 'minint', 'denomcols', 'denompatterns',
'precursor', 'psmfile'], prottable_options)
self.options.update(options)
def get_td_proteins_bestpep(self, theader, dheader):
self.header = [self.headeraccfield] + prottabledata.PICKED_HEADER
tscorecol = tsvreader.get_cols_in_file(self.scorecolpattern, theader, True)
dscorecol = tsvreader.get_cols_in_file(self.scorecolpattern, dheader, True)
tpeps = tsvreader.generate_tsv_psms(self.fn, theader)
dpeps = tsvreader.generate_tsv_psms(self.decoyfn, dheader)
targets = proteins.generate_bestpep_proteins(tpeps, tscorecol,
self.minlogscore, self.headeraccfield, self.featcol)
decoys = proteins.generate_bestpep_proteins(dpeps, dscorecol,
self.minlogscore, self.headeraccfield, self.featcol)
return targets, decoys
def get_quant(self, theader, features):
if self.precursor:
tpeps = tsvreader.generate_tsv_psms(self.fn, theader)
self.header.append(prottabledata.HEADER_AREA)
features = proteins.add_ms1_quant_from_top3_mzidtsv(features,
tpeps, self.headeraccfield, self.featcol)
if self.quantcolpattern:
psmheader = tsvreader.get_tsv_header(self.psmfile)
if self.denomcols is not None:
denomcols = [self.number_to_headerfield(col, psmheader)
for col in self.denomcols]
elif self.denompatterns is not None:
denomcolnrs = [tsvreader.get_columns_by_pattern(psmheader, pattern)
for pattern in self.denompatterns]
denomcols = set([col for cols in denomcolnrs for col in cols])
else:
raise RuntimeError('Must define either denominator column numbers '
'or regex pattterns to find them')
quantcols = tsvreader.get_columns_by_pattern(psmheader, self.quantcolpattern)
nopsms = [isosummarize.get_no_psms_field(qf) for qf in quantcols]
self.header = self.header + quantcols + nopsms
features = isosummarize.get_isobaric_ratios(self.psmfile, psmheader,
quantcols, denomcols, self.minint,
features, self.headeraccfield,
self.featcol, self.mediannormalize)
return features
class ProteinsDriver(ProttableDriver):
command = 'proteins'
commandhelp = 'Create a protein table from peptides'
outsuffix = '_proteins.tsv'
headeraccfield = prottabledata.HEADER_PROTEIN
featcol = peptabledata.HEADER_MASTERPROTEINS
def set_features(self):
theader = tsvreader.get_tsv_header(self.fn)
dheader = tsvreader.get_tsv_header(self.decoyfn)
targets, decoys = self.get_td_proteins_bestpep(theader, dheader)
features = proteins.generate_protein_fdr(targets, decoys, self.headeraccfield)
self.features = self.get_quant(theader, features)
class GenesDriver(ProttableDriver):
command = 'genes'
commandhelp = 'Create a gene table from peptides'
outsuffix = '_genes.tsv'
headeraccfield = prottabledata.HEADER_GENENAME
featcol = mzidtsvdata.HEADER_SYMBOL
def set_options(self):
super().set_options()
options = self.define_options(['fastadelim', 'genefield', 't_fasta',
'd_fasta'], prottable_options)
self.options.update(options)
def set_features(self):
theader = tsvreader.get_tsv_header(self.fn)
dheader = tsvreader.get_tsv_header(self.decoyfn)
targets, decoys = self.get_td_proteins_bestpep(theader, dheader)
fastadelim, genefield = self.get_fastadelim_genefield(self.fastadelim,
self.genefield)
features = proteins.generate_pick_fdr(
targets, decoys, self.t_fasta, self.d_fasta, 'fasta', self.headeraccfield,
fastadelim, genefield)
self.features = self.get_quant(theader, features)
class ENSGDriver(GenesDriver):
command = 'ensg'
commandhelp = 'Create an ENSG table from peptides'
outsuffix = '_ensg.tsv'
headeraccfield = prottabledata.HEADER_GENEID
featcol = mzidtsvdata.HEADER_GENE
# TODO create a result driver? For what?
| mit | -8,372,553,706,438,568,000 | 43.920354 | 89 | 0.650315 | false |
jgagneastro/FireHose_OLD | 3-XIDL/23-XIDL/idlspec2d/bin/copy_spPlan.py | 2 | 5070 | #!/usr/bin/env python
"""
Utility script to copy the spPlan* files from one production to another
while updating the RUN2D entries appropriately.
Stephen Bailey, LBL
Fall 2012
"""
import sys
import os
import os.path
import random
from glob import glob
import pyfits
#- copy spPlan file while updating the RUN2D entry
def copyplan(inplan, outplan, run2d):
finput = open(inplan)
foutput = open(outplan, 'w')
for line in finput:
if line.startswith('RUN2D'):
xx = line.split(None, 2) #- RUN2D VER [# Comment]
xx[1] = run2d #- replace RUN2D
line = " ".join(xx) + '\n' #- put back together with newline
foutput.write(line)
finput.close()
foutput.close()
#-------------------------------------------------------------------------
import optparse
parser = optparse.OptionParser(usage = "%prog [options]",
description="""Copy spPlan files from one redux version to another while replacing RUN2D.
""")
parser.add_option("-i", "--input", type="string", help="input directory [default $BOSS_SPECTRO_REDUX/$RUN2D/]")
parser.add_option("-o", "--output", type="string", help="output directory")
parser.add_option("--run2d", type="string", help="output RUN2D version")
parser.add_option("--minmjd", type="int", help="min MJD to include", default=0)
parser.add_option("--maxmjd", type="int", help="max MJD to include", default=100000)
parser.add_option("-n", "--numplates", type="int", help="number of plates to copy [default all good ones]")
parser.add_option("-R", "--randseed", type="int", default=0, help="random seed [default 0]")
### parser.add_option("--run1d", type="string", help="output RUN1D version")
parser.add_option("-b", "--bad", help="also copy bad quality plans, not just good ones", action="store_true")
parser.add_option("-p", "--platelist", help="override platelist location [default input/platelist.fits]")
opts, args = parser.parse_args()
#- Set random seed so that results are reproducible
random.seed(opts.randseed)
#- Default input directory $BOSS_SPECTRO_REDUX/$RUN2D/
if opts.input is None:
opts.input = os.environ['BOSS_SPECTRO_REDUX'] + "/" + os.environ['RUN2D']
#- required options
if opts.output is None:
print >> sys.stderr, 'ERROR: you must specify -o/--output directory'
print >> sys.stderr, 'To see all options, run copy_spPlan.py -h'
sys.exit(1)
#- choose run2d based upon output name if needed
if opts.run2d is None:
opts.run2d = os.path.basename(opts.output)
if opts.run2d in (None, '', '.'):
opts.run2d = os.path.basename(os.path.dirname(opts.output))
if opts.run2d in (None, '', '.'):
print "ERROR: Unable to derive RUN2D from path", opts.output
print "ERROR: use --run2d instead"
sys.exit(2)
print "Using RUN2D="+opts.run2d
#- Create output directory if needed
if not os.path.isdir(opts.output):
os.makedirs(opts.output)
#- Find platelist file
if opts.platelist is None:
opts.platelist = opts.input + '/platelist.fits'
if not os.path.exists(opts.platelist):
print >> sys.stderr, "ERROR: if no platelist.fits in input dir, you must specify a platelist"
sys.exit(1)
#- Create set of plates with at least one good plugging within the
#- desired MJD range
p = pyfits.getdata(opts.platelist, 1)
goodplates = set()
for plate, mjd, quality in zip(p['PLATE'], p['MJD'], p['PLATEQUALITY']):
if (quality.strip() == 'good' or opts.bad):
if opts.minmjd <= mjd <= opts.maxmjd:
goodplates.add( plate )
#- Randomly subsample
if opts.numplates is not None:
goodplates = set( random.sample(goodplates, opts.numplates) )
#- Make matching list of good pluggings for those plates
goodplugs = set()
for plate, mjd, quality in zip(p['PLATE'], p['MJD'], p['PLATEQUALITY']):
if plate in goodplates and (quality.strip() == 'good' or opts.bad):
goodplugs.add( (plate, mjd) )
#- Loop over plates, copying the plan files
ncopied = 0
for plate in sorted(goodplates):
platedir = os.path.join(opts.input, str(plate))
print '\rPlate', plate,
sys.stdout.flush()
#- Copy all the plan2d files since they may be needed for the coadd
#- even if they aren't in platelist
plan2dfiles = glob(platedir + '/spPlan2d*.par')
for planfile in plan2dfiles:
outdir = opts.output + "/" + str(plate)
if not os.path.isdir(outdir):
os.makedirs(outdir)
outplan = outdir + '/' + os.path.basename(planfile)
copyplan(planfile, outplan, opts.run2d)
#- Copy only the good plate-mjd plancomb files
plancombfiles = glob(platedir + '/spPlancomb*.par')
for planfile in plancombfiles:
mjd = int(os.path.basename(planfile).split('-')[2][0:5])
if (plate, mjd) in goodplugs:
outplan = outdir + '/' + os.path.basename(planfile)
copyplan(planfile, outplan, opts.run2d)
#- final blank line print to get CR since we were being fancy with '\r...'
print
| gpl-2.0 | -461,338,971,563,287,600 | 37.120301 | 112 | 0.642998 | false |
mattduan/proof | util/generator/ResourceGenerator.py | 1 | 18070 | """
A PROOF resource generator utility script.
"""
import string
import os, os.path
import re, sys
import datetime
import getopt
import ConfigParser
# required path
#import PyStartup
from util.Trace import traceBack
import proof.driver.MySQLConnection as MySQLConnection
from proof.ProofResource import STRATEGY_STATIC, STRATEGY_DYNAMIC
# mysql is the only supported adapter at this time
DEFAULT_ADAPTER = 'mysql'
class ResourceGenerator:
def __init__( self,
name,
version,
proof_path,
schemas,
adapter = DEFAULT_ADAPTER,
python_path = '/path/to/python/modules',
default_schema = '',
default_namespace = '',
id = None,
date = None
):
""" Constructor.
"""
#######################
# Resource Attributes #
#######################
# name
self.__name = name
# version
self.__version = version
# id
if not id:
self.__id = string.replace(string.lower(name), ' ', '_')
else:
self.__id = id
# proof path
self.__proof_path = proof_path
if self.__proof_path[0] == '/':
raise Exception("proof_path has to be a relative path")
# adapter used for the resource
self.__adapter = adapter
# path to services base
self.__python_path = python_path
self.__root_path = os.path.join(self.__python_path, self.__proof_path)
# default schema [[ FILL_IN_NEEDED ]]
self.__default_schema = default_schema
# default namespace [[ FILL_IN_NEEDED ]]
self.__default_namespace = default_namespace
# date created
self.__date = date or datetime.date.today().__str__()
self.schemas = schemas
self.xml = ''
def process(self):
self.xml = """\
<?xml version="1.0" encoding="UTF-8"?>
<resource>
%(info)s
%(strategies)s
%(schemas)s
</resource>""" % { 'info' : self.generate_info(),
'strategies' : self.generate_strategies(),
'schemas' : self.generate_schemas() }
# save to _gen
res_path = self.__gen_path()
filename = "%s.xml" % (string.replace(string.lower(self.__name), ' ', '_'))
open( os.path.join(res_path, filename), "w" ).write(self.xml)
def __gen_path(self):
""" Return gen path.
"""
# make sure the basic path exists
gen_path = os.path.join( self.__root_path,
'resource',
'_gen' )
if not os.access(gen_path, os.F_OK):
os.makedirs(gen_path)
date = datetime.date.today().strftime('%Y%m%d')
i = 0
full_path = None
while not full_path or os.access(full_path, os.F_OK):
i += 1
full_path = os.path.join( gen_path,
"%s_%s" % (date, string.zfill(`i`, 3)) )
os.mkdir(full_path)
return full_path
def generate_info(self):
""" Return resource info header xml.
"""
return """\
<name>%(name)s</name>
<id>%(id)s</id>
<version>%(version)s</version>
<date>%(date)s</date>
<proof_path>%(path)s</proof_path>
<default_schema>%(default_schema)s</default_schema>
<default_namespace>%(default_namespace)s</default_namespace>""" % {
'name' : self.__name,
'id' : self.__id,
'version' : self.__version,
'date' : self.__date,
'path' : self.__python_path,
'default_schema' : self.__default_schema,
'default_namespace' : self.__default_namespace
}
def generate_strategies(self):
""" Return resource strategies xml block.
"""
return """\
<!-- strategy for different resource types -->
<strategies>
<adapter>%(adapter)s</adapter>
<databasemap>%(dbmap)s</databasemap>
<namespace>%(namespace)s</namespace>
<object>%(object)s</object>
<aggregate>%(aggregate)s</aggregate>
</strategies>""" % {
'adapter' : STRATEGY_STATIC,
'dbmap' : STRATEGY_STATIC,
'namespace' : STRATEGY_DYNAMIC,
'object' : STRATEGY_STATIC,
'aggregate' : STRATEGY_STATIC,
}
def generate_schemas(self):
""" Return all schema xml blocks.
"""
xml_list = []
for schema in self.schemas:
db_host = schema['db_host']
db_name = schema['db_name']
db_user = schema['db_user']
db_pass = schema['db_pass']
aggregates = schema['aggregates']
schema_name = string.upper(db_name)
sg = SchemaGenerator( schema_name,
self.__adapter,
self.__proof_path,
db_host,
db_name,
db_user,
db_pass,
aggregates = aggregates )
sg.process()
xml_list.append(sg.xml)
return string.join(xml_list, "\n\n")
class SchemaGenerator:
""" generate one schema using one database.
"""
def __init__( self,
schema_name,
adapter,
proof_path,
db_host,
db_name,
db_user,
db_pass,
aggregates = {}
):
self.__schema_name = schema_name
self.__adapter = adapter
self.__proof_path = proof_path
self.__aggregates = aggregates
self.__module_path = proof_path.replace('/', '.')
# parse tables first
self.table_gen = TableGenerator( db_host,
db_name,
db_user,
db_pass )
self.table_gen.process()
# do we missing anything
self.__check_mistakes()
self.xml = ''
def __check_mistakes(self):
# check all tables are included in aggregate defines
aggr_tables = []
for aggregate in self.__aggregates.keys():
aggr_tables.append(aggregate)
for child in self.__aggregates[aggregate].keys():
aggr_tables.append(child)
db_tables = self.table_gen.table_names
if len(aggr_tables) != len(db_tables):
aggr_tables.sort()
db_tables.sort()
missing_tables = []
for table in db_tables:
if table not in aggr_tables:
missing_tables.append(table)
raise Exception( "Not all tables are included in aggregates:\n" + \
"Aggregated tables: \n%s\n" % (aggr_tables) + \
"Database tables: \n%s\n" % (db_tables) + \
"Missing tables: \n%s\n" % (missing_tables) )
def process(self):
""" Create the xml config resource for one schema.
"""
self.xml = """\
<schema name="%(schema_name)s">
%(adapter)s
%(namespaces)s
%(tables)s
%(objects)s
%(aggregates)s
</schema>""" % { 'schema_name' : self.__schema_name,
'adapter' : self.generate_adapter(),
'namespaces' : self.generate_namespaces(),
'tables' : self.generate_tables(),
'objects' : self.generate_objects(),
'aggregates' : self.generate_aggregates() }
def generate_adapter(self):
return """\
<adapter>%s</adapter>""" % (self.__adapter)
def generate_namespaces(self):
""" Generated resource always be dynamic.
"""
return """\
<!-- start namespace -->
<namespaces>
</namespaces>
<!-- end namespace -->"""
def generate_tables(self):
return self.table_gen.xml
def generate_objects(self):
""" Generate objects section in a schema based available tables.
@return generated xml.
"""
xml = """\
<!-- start objects -->
<objects>"""
for table_name in self.table_gen.table_names:
xml += """
<object name="%(table_name)s">
<module>%(module_path)s.%(schema_name)s.%(table_name)s</module>
<class>%(table_name)s</class>
<factorymodule>%(module_path)s.%(schema_name)s.%(table_name)sFactory</factorymodule>
<factoryclass>%(table_name)sFactory</factoryclass>
</object>""" % { 'table_name' : table_name,
'module_path' : self.__module_path,
'schema_name' : string.lower(self.__schema_name) }
xml += """
</objects>
<!-- end objects -->"""
return xml
def generate_aggregates(self):
""" Generate aggregates section in a schema based available tables.
All tables are considered as aggregates, which need to be defined
further when defining specific domains.
@return generated xml.
"""
xml = """\
<!-- start aggregates -->
<aggregates>"""
for table_name in self.table_gen.table_names:
if self.__aggregates and table_name in self.__aggregates.keys():
xml += """
<aggregate name="%(table_name)s">
<module>%(module_path)s.%(schema_name)s.%(table_name)sAggregate</module>
<class>%(table_name)sAggregate</class>
<factorymodule>%(module_path)s.%(schema_name)s.%(table_name)sAggregateFactory</factorymodule>
<factoryclass>%(table_name)sAggregateFactory</factoryclass>
<repositorymodule>%(module_path)s.%(schema_name)s.%(table_name)sAggregateRepository</repositorymodule>
<repositoryclass>%(table_name)sAggregateRepository</repositoryclass>
</aggregate>""" % { 'table_name' : table_name,
'module_path' : self.__module_path,
'schema_name' : string.lower(self.__schema_name) }
xml += """
</aggregates>
<!-- end aggregates -->"""
return xml
class TableGenerator:
""" table generator for one database schema.
"""
def __init__( self,
db_host,
db_name,
db_user,
db_pass,
):
self.__db_host = db_host
self.__db_name = db_name
self.__db_user = db_user
self.__db_pass = db_pass
self.con = MySQLConnection.MySQLConnection( host = self.__db_host,
user = self.__db_user,
passwd = self.__db_pass,
db = self.__db_name )
self.table_names = []
self.tables = []
self.xml = ''
def process(self):
self.collect_tables()
self.parse_tables()
self.py2xml()
def collect_tables(self):
""" Collect all tables in the database.
"""
sql = "show tables"
cursor = self.con.cursor()
cursor.query(sql)
result = cursor.fetchall()
for row in result:
self.table_names.append(row[0])
def parse_tables(self):
""" Parse all tables.
"""
cursor = self.con.cursor()
for table in self.table_names:
sql = "desc `%s`" % (table)
cursor.query(sql)
result = cursor.fetchall()
#sys.stderr.write(`result`)
self.tables.append( [table, self.parse_columns(result)] )
def parse_columns(self, column_list):
""" Parse columns in the table.
Required fields for each column are:
<type>varchar</type>
<size>25</size>
<pk>false</pk>
<notnull>true</notnull>
<fktable>none</fktable>
<fkcolumn>none</fkcolumn>
@param column_list A list of columns with these attributes:
Field, Type, Null, Key, Default, Extra
"""
columns = []
for column in column_list:
name = string.strip(column[0])
typestr = string.strip(column[1])
isnull = string.strip(column[2])
key = string.strip(column[3])
type_name = typestr
size = 0
m = re.match("^([^(]+)\((.*?)\)$", typestr)
if m:
type_name = m.group(1)
# only varchar/char size make sense
if type_name in ( 'varchar', 'char' ):
size = m.group(2)
if isnull == 'YES':
notnull = 'false'
else:
notnull = 'true'
if key == 'PRI':
pk = 'true'
else:
pk = 'false'
# guess foreign key
# assume all primary keys are 'Id'
# all foreign keys should follow the pattern: <table_name>Id
fktable = 'none'
fkcolumn = 'none'
if re.match(".+Id$", name):
fkt = name[:-2]
if fkt in self.table_names:
fktable = fkt
fkcolumn = 'Id'
columns.append( [ name, { 'type' : type_name,
'size' : size,
'pk' : pk,
'notnull' : notnull,
'fktable' : fktable,
'fkcolumn' : fkcolumn
} ] )
return columns
def py2xml(self):
""" Convert parsed python list to an xml string.
"""
xml = """\
<!-- start tables -->
<tables>"""
for table_name, columns in self.tables:
xml += """
<table name="%s">
<columns>""" % (table_name)
for column_name, fields in columns:
fields['name'] = column_name
xml += """
<column name="%(name)s">
<type>%(type)s</type>
<size>%(size)s</size>
<pk>%(pk)s</pk>
<notnull>%(notnull)s</notnull>
<fktable>%(fktable)s</fktable>
<fkcolumn>%(fkcolumn)s</fkcolumn>
</column>""" % fields
xml += """
</columns>
</table>"""
xml += """
</tables>
<!-- end tables -->"""
self.xml = xml
def usage(msg=''):
print """USAGE: %s options [-c filename]
Description
===========
Generate PROOF XML Resource for database schemas configured in the configuration
file.
Parameters
==========
options:
h/help -- print this message
c(cfg):
c/cfg -- specify the configuration filename
default 'resource.cfg'
"""%( sys.argv[0] )
if msg:
print >> sys.stderr, msg
sys.exit(1)
if __name__ == '__main__':
# options:
# for simplity, we use config file for now
try:
opts, args = getopt.getopt(sys.argv[1:], 'hc:', ['help', 'cfg='])
except getopt.error, msg:
usage()
cfg_filename = 'resource.cfg'
for opt, arg in opts:
if opt in ('-h', '--help'):
usage()
if opt in ('-c', '--cfg'):
cfg_filename = arg
config = ConfigParser.ConfigParser()
try:
config.readfp(open(cfg_filename))
except:
usage(msg=traceBack())
# required parameters
name = config.get('INFO', 'name')
version = config.get('INFO', 'version')
proof_path = config.get('INFO', 'proof_path')
# optional
kwargs = {}
for opt in ['id', 'adapter', 'python_path', 'default_schema', 'default_namespace', 'date']:
if config.has_option('INFO', opt):
kwargs[opt] = config.get('INFO', opt)
# schemas
schemas = []
sections = config.sections()
for section in sections:
if section != 'INFO':
# schema
aggregate_dict = {}
if config.has_option(section, 'aggregates'):
aggregates = config.get(section, 'aggregates')
aggregate_list = string.split(aggregates, ",")
for aggregate in aggregate_list:
aggregate_dict[aggregate] = {}
if config.has_option(section, aggregate):
child_list = string.split(config.get(section, aggregate), "|")
for child in child_list:
child_spec_list = string.split(child, ",")
aggregate_dict[aggregate][child_spec_list[0]] = child_spec_list
schemas.append( { 'db_host' : config.get(section, 'db_host'),
'db_name' : config.get(section, 'db_name'),
'db_user' : config.get(section, 'db_user'),
'db_pass' : config.get(section, 'db_pass'),
'aggregates' : aggregate_dict,
} )
res_gen = ResourceGenerator(name, version, proof_path, schemas, **kwargs)
res_gen.process()
print res_gen.xml
| bsd-3-clause | 3,100,293,126,923,339,000 | 30.426087 | 144 | 0.469341 | false |
openlmi/openlmi-doc | doc/python/lmi/test/base.py | 1 | 5665 | # Copyright (C) 2012-2014 Red Hat, Inc. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Jan Safranek <[email protected]>
# Authors: Michal Minar <[email protected]>
# Authors: Roman Rakus <[email protected]>
#
"""
Base classes for *OpenLMI Provider* test cases.
"""
import os
import socket
from lmi.test import CIMError
from lmi.test import unittest
from lmi.test import util
from lmi.test import wbem
def render_iname(iname, indent=2):
"""
Render object path in human readable way. Result will occupy multiple
lines. The first line is a class name, which is not indented at all. Other
lines will be indented with *indent* spaces.
:param iname: Object path to render.
:type iname: :py:class:`lmiwbem.CIMInstanceName`
:param integer ident: Number of spaces prefixing all lines but the first.
:returns: *iname* nicely rendered.
:rtype: string
"""
if not isinstance(iname, wbem.CIMInstanceName):
return repr(iname)
lines = [ "%s" % iname.classname
, " "*indent + "namespace: %s" % iname.namespace
, " "*indent + "keys:"]
align = max([len(k) for k in iname.keybindings.iterkeys()])
for key, value in iname.keybindings.iteritems():
if isinstance(value, wbem.CIMInstanceName):
value = render_iname(value, indent + 4)
lines.append(" "*indent + (" %%-%ds : %%s" % align) % (key, value))
return "\n".join(lines)
class BaseLmiTestCase(unittest.TestCase):
"""
Base class for all LMI test cases.
"""
#: Value used in ``SystemName`` key properties in various *CIM* instances.
#: It's also used to fill ``CIM_ComputerySystem.Name`` property.
SYSTEM_NAME = socket.gethostname()
@classmethod
def setUpClass(cls):
#: Cached value of SystemCreationClassName set with
#: ``LMI_CS_CLASSNAME`` environment variable.
cls.system_cs_name = os.environ.get(
"LMI_CS_CLASSNAME", "PG_ComputerSystem")
#: *URL* of *CIMOM* we connect to. Overriden with ``LMI_CIMOM_URL``
#: environment variable.
cls.url = os.environ.get("LMI_CIMOM_URL", "https://localhost:5989")
#: User name for authentication with *CIMOM*. Overriden with
#: ``LMI_CIMOM_USERNAME`` variable.
cls.username = os.environ.get("LMI_CIMOM_USERNAME", "root")
#: User's password for authentication with *CIMOM*. Overriden with
#: ``LMI_CIMOM_PASSWORD`` environment variable.
cls.password = os.environ.get("LMI_CIMOM_PASSWORD", "")
#: Name of *CIMOM* we connect to. There are two possible values:
#: ``"tog-pegasus"`` and ``"sblim-sfcb"``. Overriden with
#: ``LMI_CIMOM_BROKER`` environment variable.
cls.cimom = os.environ.get("LMI_CIMOM_BROKER", "tog-pegasus")
#: Boolean value saying whether to run dangerous tests. These are marked
#: with :py:func:`mark_dangerous` decorator. This is set with
#: ``LMI_RUN_DANGEROUS`` environment variable.
cls.run_dangerous = util.get_environvar('LMI_RUN_DANGEROUS', '0', bool)
#: Boolean value saying whether to run tedious tests. These are marked
#: with :py:func:`mark_tedious` decorator. This is set with
#: ``LMI_RUN_TEDIOUS`` environment variable.
cls.run_tedious = util.get_environvar('LMI_RUN_TEDIOUS', '1', bool)
def assertRaisesCIM(self, cim_err_code, func, *args, **kwds):
"""
This test passes if given function called with supplied arguments
raises `CIMError` with given cim error code.
"""
with self.assertRaises(CIMError) as cm:
func(*args, **kwds)
self.assertEqual(cim_err_code, cm.exception.args[0])
def assertCIMNameEqual(self, fst, snd, msg=None):
"""
Compare two objects of :py:class:`lmiwbem.CIMInstanceName`. Their host
properties are not checked.
"""
if msg is None:
msg = ( "%s\n\nis not equal to: %s"
% (render_iname(fst), render_iname(snd)))
self.assertTrue(util.check_inames_equal(fst, snd), msg)
def assertCIMNameIn(self, name, candidates):
"""
Checks that given :py:class:`lmiwbem.CIMInstanceName` is present in
set of candidates. It compares all properties but ``host``.
"""
for candidate in candidates:
if util.check_inames_equal(name, candidate):
return
self.assertTrue(False, 'name "%s" is not in candidates' % str(name))
def assertNocaseDictEqual(self, fst, snd, msg=None):
"""
Compare two no-case dictionaries ignoring the case of their keys.
"""
fst_dict = {}
for (key, value) in fst.iteritems():
fst_dict[key.lower()] = value
snd_dict = {}
for (key, value) in snd.iteritems():
snd_dict[key.lower()] = value
self.assertEqual(fst_dict, snd_dict, msg)
| gpl-2.0 | -3,318,208,735,782,902,000 | 41.276119 | 80 | 0.644484 | false |
edeposit/edeposit.amqp.storage | tests/structures/test_db_publication.py | 1 | 4163 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Interpreter version: python 2.7
#
# Imports =====================================================================
from __future__ import unicode_literals
import uuid
import copy
import base64
import random
import os.path
import tempfile
from BTrees.OOBTree import OOTreeSet
import pytest
from faker import Factory
import storage
from storage.structures import Publication
from storage.structures import DBPublication
from zeo_connector_defaults import tmp_context_name
# Variables ===================================================================
FAKER = Factory.create('cs_CZ')
# Fixtures ====================================================================
@pytest.fixture
def pdf_file():
fn = os.path.join(os.path.dirname(__file__), "../data/ebook.pdf")
with open(fn) as f:
return f.read()
@pytest.fixture
def b64_pdf_file():
return base64.b64encode(
pdf_file()
)
@pytest.fixture
def random_publication_comm(monkeypatch):
monkeypatch.setattr(
storage.structures.db.db_publication,
"PUBLIC_DIR",
tempfile.mkdtemp(dir=tmp_context_name())
)
monkeypatch.setattr(
storage.structures.db.db_publication,
"PRIVATE_DIR",
tempfile.mkdtemp(dir=tmp_context_name())
)
return Publication(
title=FAKER.text(20),
author=FAKER.name(),
pub_year="%04d" % random.randint(1990, 2015),
isbn=FAKER.ssn(),
urnnbn="urn:nbn:cz:edep002-00%04d" % random.randint(0, 999),
uuid=str(uuid.uuid4()),
is_public=True,
b64_data=b64_pdf_file(),
filename="/home/xex.pdf",
)
@pytest.fixture
def random_publication(monkeypatch):
return DBPublication.from_comm(random_publication_comm(monkeypatch))
# Tests =======================================================================
def test_random_publication(random_publication):
assert random_publication.title
assert random_publication.author
assert random_publication.pub_year
assert random_publication.isbn
assert random_publication.urnnbn
assert random_publication.uuid
assert random_publication.is_public
assert random_publication.filename
assert random_publication.indexes
assert random_publication.project_key
assert os.path.exists(random_publication.file_pointer)
assert os.path.isfile(random_publication.file_pointer)
assert random_publication.file_pointer.startswith("/tmp")
rp = random_publication
rpc = random_publication.to_comm()
assert rp.title == rpc.title
assert rp.author == rpc.author
assert rp.pub_year == rpc.pub_year
assert rp.isbn == rpc.isbn
assert rp.urnnbn == rpc.urnnbn
assert rp.uuid == rpc.uuid
assert rp.is_public == rpc.is_public
assert rp.filename == rpc.filename
assert rp.file_pointer == rpc.file_pointer
def test_op_eq(random_publication):
rand_copy = copy.deepcopy(random_publication)
assert random_publication == rand_copy
# assert random_publication.__hash__() == rand_copy.__hash__()
assert not (random_publication != rand_copy)
rand_copy.title = "azgabash"
assert not (random_publication == rand_copy) # op eq
# assert random_publication.__hash__() != rand_copy.__hash__()
assert random_publication != rand_copy
assert random_publication != 1
def test_in_operator(monkeypatch):
rp1 = random_publication(monkeypatch)
rp2 = random_publication(monkeypatch)
rp3 = random_publication(monkeypatch)
assert rp1 != rp2
cont = set([rp1, rp2])
assert rp1 in cont
assert rp2 in cont
assert rp3 not in cont
def test_OOTreeSet(monkeypatch):
a = OOTreeSet()
rp1 = random_publication(monkeypatch)
rp2 = random_publication(monkeypatch)
rp3 = random_publication(monkeypatch)
a.insert(rp1)
assert rp1 in a
assert rp2 not in a
assert rp3 not in a
assert not (rp2 in a)
assert not (rp3 in a)
a.insert(rp2)
assert len(a) == 2
assert rp1 in a
assert rp2 in a
assert not (rp2 not in a)
assert rp3 not in a
assert not (rp3 in a)
| mit | 6,978,471,420,981,689,000 | 24.384146 | 79 | 0.632957 | false |
rstens/gwells | gwells/test_views.py | 1 | 5288 | """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from gwells.forms import *
from django.test import TestCase
from django.core.urlresolvers import reverse
from http import HTTPStatus
from gwells.models import *
from gwells.search import Search
from gwells.views import *
import logging
from django.core import serializers
from django.conf import settings
from django.contrib.auth.models import Group
from django.contrib.auth.models import User
#TODO split tests into one file per view
class ViewsTestCase(TestCase):
fixtures = ['well_detail_fixture', 'survey_get_fixture']
@classmethod
def setUpTestData(cls):
Group.objects.create(name='admin')
def setUp(self):
pass
def ok(self, view):
response = self.client.get(reverse(view))
self.assertEqual(response.status_code, HTTPStatus.OK)
def not_ok(self, view, status):
response = self.client.get(reverse(view))
self.assertEqual(response.status_code, status)
def test_robots_view_ok(self):
self.ok('robots')
def test_home_view_ok(self):
self.ok('home')
def test_search_view_ok(self):
self.ok('search')
def test_search_view_with_no_params(self):
url = reverse('search')
response = self.client.get(url)
self.assertEqual(response.status_code, HTTPStatus.OK)
def test_well_detail_no_well(self):
#setup
logger = logging.getLogger('django.request')
previous_level = logger.getEffectiveLevel()
logger.setLevel(logging.ERROR)
initial_url = reverse('well_detail', kwargs={'pk':'1'})
url = initial_url[:-2]
response = self.client.get(url)
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
#teardown
logger.setLevel(previous_level)
def test_well_detail_ok(self):
wells = Search.well_search(123, '', '', '')
self.assertEqual(wells.count(), 1)
url = reverse('well_detail', kwargs={'pk':123})
response = self.client.get(url)
self.assertEqual(response.status_code, HTTPStatus.OK)
def test_registry_ok(self):
self.ok('registry-legacy')
def test_health_ok(self):
self.ok('health')
def test_groundwater_information_ok(self):
self.ok('groundwater_information')
def test_map_well_search(self):
self.ok('map_well_search')
def test_404_not_ok(self):
#setup
logger = logging.getLogger('django.request')
previous_level = logger.getEffectiveLevel()
logger.setLevel(logging.ERROR)
#look for clearly erroneous well_tag_number
url = reverse('well_detail', kwargs={'pk':999999999})
response = self.client.get(url)
self.assertEqual(response.status_code, HTTPStatus.NOT_FOUND)
#teardown
logger.setLevel(previous_level)
def test_site_admin_ok(self):
group_name = 'admin'
username = 'admin'
password = 'admin'
email = '[email protected]'
self.user = User.objects.create_user(username=username, password=password, email=email)
admin_group = Group.objects.get(name=group_name)
admin_group.user_set.add(self.user)
self.client.login(username=username,password=password)
self.ok('site_admin')
self.client.logout()
self.user.delete()
def test_site_admin_has_add_survey(self):
group_name = 'admin'
username = 'admin'
password = 'admin'
email = '[email protected]'
self.user = User.objects.create_user(username=username, password=password, email=email)
admin_group = Group.objects.get(name=group_name)
admin_group.user_set.add(self.user)
self.client.login(username=username,password=password)
response = self.client.get(reverse('site_admin'))
self.assertEquals(response.status_code, HTTPStatus.OK)
self.assertContains( response, 'id="add-survey"')
self.client.logout()
self.user.delete()
def test_survey_detail_ok(self):
group_name = 'admin'
username = 'admin'
password = 'admin'
email = '[email protected]'
self.user = User.objects.create_user(username=username, password=password, email=email)
admin_group = Group.objects.get(name=group_name)
admin_group.user_set.add(self.user)
self.client.login(username=username,password=password)
surveys = Survey.objects.all()
self.assertEqual(surveys.count(), 1)
url = reverse('survey', kwargs={'pk':"495a9927-5a13-490e-bf1d-08bf2048b098"})
response = self.client.get(url)
self.assertEqual(response.status_code, HTTPStatus.OK)
self.client.logout()
self.user.delete()
| apache-2.0 | -6,359,778,834,814,518,000 | 32.257862 | 95 | 0.656203 | false |
nnugumanov/yandex-tank | yandextank/plugins/Phantom/plugin.py | 1 | 11454 | """ Contains Phantom Plugin, Console widgets, result reader classes """
# FIXME: 3 there is no graceful way to interrupt the process of phout import
# TODO: phout import
import logging
import multiprocessing as mp
import os
import subprocess
import time
from ...common.util import execute, expand_to_seconds
from ...common.interfaces import AbstractPlugin, AbstractCriterion, GeneratorPlugin
from .reader import PhantomReader, PhantomStatsReader
from .utils import PhantomConfig
from .widget import PhantomInfoWidget, PhantomProgressBarWidget
from ..Aggregator import Plugin as AggregatorPlugin
from ..Autostop import Plugin as AutostopPlugin
from ..Console import Plugin as ConsolePlugin
logger = logging.getLogger(__name__)
class Plugin(AbstractPlugin, GeneratorPlugin):
""" Plugin for running phantom tool """
OPTION_CONFIG = "config"
SECTION = PhantomConfig.SECTION
def __init__(self, core, config_section):
AbstractPlugin.__init__(self, core, config_section)
self.config = None
self.process = None
self.predefined_phout = None
self.phout_import_mode = False
self.did_phout_import_try = False
self.phantom_path = None
self.eta_file = None
self.processed_ammo_count = 0
self.phantom_start_time = time.time()
self.buffered_seconds = "2"
self.taskset_affinity = None
self.cpu_count = mp.cpu_count()
self.phantom = None
self.cached_info = None
self.phantom_stderr = None
self.exclude_markers = []
self.enum_ammo = False
@staticmethod
def get_key():
return __file__
def get_available_options(self):
opts = [
"phantom_path", "buffered_seconds", "exclude_markers", "affinity"
]
opts += [PhantomConfig.OPTION_PHOUT, self.OPTION_CONFIG]
opts += PhantomConfig.get_available_options()
return opts
def configure(self):
# plugin part
self.config = self.get_option(self.OPTION_CONFIG, '')
self.phantom_path = self.get_option("phantom_path", 'phantom')
self.enum_ammo = self.get_option("enum_ammo", False)
self.buffered_seconds = int(
self.get_option("buffered_seconds", self.buffered_seconds))
self.exclude_markers = set(
filter((lambda marker: marker != ''),
self.get_option('exclude_markers', []).split(' ')))
self.taskset_affinity = self.get_option('affinity', '')
try:
autostop = self.core.get_plugin_of_type(AutostopPlugin)
autostop.add_criterion_class(UsedInstancesCriterion)
except KeyError:
logger.debug(
"No autostop plugin found, not adding instances criterion")
self.predefined_phout = self.get_option(PhantomConfig.OPTION_PHOUT, '')
if not self.get_option(
self.OPTION_CONFIG, '') and self.predefined_phout:
self.phout_import_mode = True
if not self.config and not self.phout_import_mode:
self.phantom = PhantomConfig(self.core)
self.phantom.read_config()
def prepare_test(self):
aggregator = self.core.job.aggregator_plugin
if not self.config and not self.phout_import_mode:
# generate config
self.config = self.phantom.compose_config()
args = [self.phantom_path, 'check', self.config]
try:
result = execute(args, catch_out=True)
except OSError:
raise RuntimeError("Phantom I/O engine is not installed!")
retcode = result[0]
if retcode:
raise RuntimeError(
"Config check failed. Subprocess returned code %s" %
retcode)
if result[2]:
raise RuntimeError(
"Subprocess returned message: %s" % result[2])
reader = PhantomReader(self.phantom.phout_file)
logger.debug(
"Linking sample reader to aggregator."
" Reading samples from %s", self.phantom.phout_file)
logger.debug(
"Linking stats reader to aggregator."
" Reading stats from %s", self.phantom.stat_log)
else:
reader = PhantomReader(self.predefined_phout)
logger.debug(
"Linking sample reader to aggregator."
" Reading samples from %s", self.predefined_phout)
if aggregator:
aggregator.reader = reader
info = self.phantom.get_info()
aggregator.stats_reader = PhantomStatsReader(
self.phantom.stat_log, info)
aggregator.add_result_listener(self)
try:
console = self.core.get_plugin_of_type(ConsolePlugin)
except Exception as ex:
logger.debug("Console not found: %s", ex)
console = None
self.core.job.phantom_info = self.phantom.get_info()
if console and aggregator:
widget = PhantomProgressBarWidget(self)
console.add_info_widget(widget)
aggregator.add_result_listener(widget)
widget = PhantomInfoWidget(self)
console.add_info_widget(widget)
aggregator = self.core.get_plugin_of_type(AggregatorPlugin)
aggregator.add_result_listener(widget)
def start_test(self):
if not self.phout_import_mode:
args = [self.phantom_path, 'run', self.config]
logger.debug(
"Starting %s with arguments: %s", self.phantom_path, args)
if self.taskset_affinity != '':
args = [
self.core.taskset_path, '-c', self.taskset_affinity
] + args
logger.debug(
"Enabling taskset for phantom with affinity: %s,"
" cores count: %d", self.taskset_affinity, self.cpu_count)
self.phantom_start_time = time.time()
phantom_stderr_file = self.core.mkstemp(
".log", "phantom_stdout_stderr_")
self.core.add_artifact_file(phantom_stderr_file)
self.phantom_stderr = open(phantom_stderr_file, 'w')
self.process = subprocess.Popen(
args,
stderr=self.phantom_stderr,
stdout=self.phantom_stderr,
close_fds=True)
else:
if not os.path.exists(self.predefined_phout):
raise RuntimeError(
"Phout file not exists for import: %s" %
self.predefined_phout)
logger.warn(
"Will import phout file instead of running phantom: %s",
self.predefined_phout)
def is_test_finished(self):
if not self.phout_import_mode:
retcode = self.process.poll()
if retcode is not None:
logger.info("Phantom done its work with exit code: %s", retcode)
return abs(retcode)
else:
info = self.get_info()
if info:
eta = int(info.duration) - (
int(time.time()) - int(self.phantom_start_time))
self.publish('eta', eta)
return -1
else:
if not self.processed_ammo_count or self.did_phout_import_try != self.processed_ammo_count:
self.did_phout_import_try = self.processed_ammo_count
return -1
else:
return 0
def end_test(self, retcode):
if self.process and self.process.poll() is None:
logger.warn(
"Terminating phantom process with PID %s", self.process.pid)
self.process.terminate()
if self.process:
self.process.communicate()
else:
logger.debug("Seems phantom finished OK")
if self.phantom_stderr:
self.phantom_stderr.close()
return retcode
def post_process(self, retcode):
if not retcode:
info = self.get_info()
if info and info.ammo_count != self.processed_ammo_count:
logger.warning(
"Planned ammo count %s differs from processed %s",
info.ammo_count, self.processed_ammo_count)
return retcode
def on_aggregated_data(self, data, stat):
self.processed_ammo_count += data["overall"]["interval_real"]["len"]
logger.debug("Processed ammo count: %s/", self.processed_ammo_count)
def get_info(self):
""" returns info object """
if not self.cached_info:
if not self.phantom:
return None
self.cached_info = self.phantom.get_info()
return self.cached_info
class UsedInstancesCriterion(AbstractCriterion):
"""
Autostop criterion, based on active instances count
"""
RC_INST = 24
@staticmethod
def get_type_string():
return 'instances'
def __init__(self, autostop, param_str):
AbstractCriterion.__init__(self)
self.seconds_count = 0
self.autostop = autostop
self.threads_limit = 1
level_str = param_str.split(',')[0].strip()
if level_str[-1:] == '%':
self.level = float(level_str[:-1]) / 100
self.is_relative = True
else:
self.level = int(level_str)
self.is_relative = False
self.seconds_limit = expand_to_seconds(param_str.split(',')[1])
try:
phantom = autostop.core.get_plugin_of_type(Plugin)
info = phantom.get_info()
if info:
self.threads_limit = info.instances
if not self.threads_limit:
raise ValueError(
"Cannot create 'instances' criterion"
" with zero instances limit")
except KeyError:
logger.warning("No phantom module, 'instances' autostop disabled")
def notify(self, data, stat):
threads = stat["metrics"]["instances"]
if self.is_relative:
threads = float(threads) / self.threads_limit
if threads > self.level:
if not self.seconds_count:
self.cause_second = (data, stat)
logger.debug(self.explain())
self.seconds_count += 1
self.autostop.add_counting(self)
if self.seconds_count >= self.seconds_limit:
return True
else:
self.seconds_count = 0
return False
def get_rc(self):
return self.RC_INST
def get_level_str(self):
"""
String value for instances level
"""
if self.is_relative:
level_str = str(100 * self.level) + "%"
else:
level_str = self.level
return level_str
def explain(self):
items = (
self.get_level_str(), self.seconds_count,
self.cause_second[0].get('ts'))
return (
"Testing threads (instances) utilization"
" higher than %s for %ss, since %s" % items)
def widget_explain(self):
items = (self.get_level_str(), self.seconds_count, self.seconds_limit)
return "Instances >%s for %s/%ss" % items, float(
self.seconds_count) / self.seconds_limit
| lgpl-2.1 | -7,101,965,361,478,808,000 | 34.682243 | 103 | 0.569408 | false |
pgroudas/pants | src/python/pants/subsystem/subsystem.py | 1 | 3698 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.option.options import Options
class SubsystemError(Exception):
"""An error in a subsystem."""
class Subsystem(object):
"""A separable piece of functionality that may be reused across multiple tasks or other code.
Subsystems encapsulate the configuration and initialization of things like JVMs,
Python interpreters, SCMs and so on.
Subsystem instances are tied to option scopes. For example, a singleton subsystem that all tasks
share is tied to the global scope, while a private instance used by just one task is tied to
that task's scope.
A Subsystem instance initializes itself from options in a subscope (the 'qualified scope') of
the scope it's tied to. For example, a global SubsystemFoo instance gets its options from
scope 'foo', while a SubsystemFoo instance for use just in task bar.baz gets its options from
scope 'bar.baz.foo'.
TODO(benjy): Model dependencies between subsystems? Registration of subsystems?
"""
@classmethod
def scope_qualifier(cls):
"""Qualifies the options scope of this Subsystem type.
E.g., for SubsystemFoo this should return 'foo'.
"""
raise NotImplementedError()
@classmethod
def register_options(cls, register):
"""Register options for this subsystem.
Subclasses may override and call register(*args, **kwargs) with argparse arguments.
"""
@classmethod
def register_options_on_scope(cls, options, scope):
"""Trigger registration of this subsystem's options under a given scope."""
cls.register_options(options.registration_function_for_scope(cls.qualify_scope(scope)))
@classmethod
def qualify_scope(cls, scope):
return '{0}.{1}'.format(scope, cls.scope_qualifier()) if scope else cls.scope_qualifier()
# The full Options object for this pants run. Will be set after options are parsed.
# TODO: A less clunky way to make option values available?
_options = None
# A cache of (cls, scope) -> the instance of cls tied to that scope.
_scoped_instances = {}
@classmethod
def global_instance(cls):
return cls._instance_for_scope(Options.GLOBAL_SCOPE)
@classmethod
def reset(cls):
"""Forget all option values and cached subsystem instances.
Used for test isolation.
"""
cls._options = None
cls._scoped_instances = {}
@classmethod
def instance_for_task(cls, task):
return cls._instance_for_scope(task.options_scope)
@classmethod
def _instance_for_scope(cls, scope):
if cls._options is None:
raise SubsystemError('Subsystem not initialized yet.')
key = (cls, scope)
if key not in cls._scoped_instances:
qscope = cls.qualify_scope(scope)
cls._scoped_instances[key] = cls(qscope, cls._options.for_scope(qscope))
return cls._scoped_instances[key]
def __init__(self, scope, scoped_options):
"""Note: A subsystem has no access to options in scopes other than its own.
TODO: We'd like that to be true of Tasks some day. Subsystems will help with that.
Task code should call instance_for_scope() or global_instance() to get a subsystem instance.
Tests can call this constructor directly though.
"""
self._scope = scope
self._scoped_options = scoped_options
@property
def options_scope(self):
return self._scope
def get_options(self):
"""Returns the option values for this subsystem's scope."""
return self._scoped_options
| apache-2.0 | 3,583,883,669,439,646,700 | 33.560748 | 98 | 0.715792 | false |
TorleifHensvold/ITGK3 | oving9/Torleif/07_Soke_i_tekst.py | 1 | 1351 | import string
# f = open(filename)
# variabel = f.read()
# f.close()
# liste = variabel.split('\n')
# print(liste)
# my_dictionary = {}
# while liste:
# if liste[0] in my_dictionary:
# midlertidig = my_dictionary[liste[0]]
# print(midlertidig)
# midlertidig += 1
# print(midlertidig)
# my_dictionary[liste[0]] = midlertidig
# print(my_dictionary)
# liste.pop(0)
# else:
# my_dictionary[liste[0]] = 1
# liste.pop(0)
# print(my_dictionary)
# return my_dictionary
def read_from_file(filename):
f = open(filename,'r')
variabel=f.read()
f.close()
return variabel
def remove_symbols(text):
validchars = string.ascii_letters + ' '
clean_string = ''.join(a for a in text if a in validchars)
lower_clean_string = clean_string.lower()
return lower_clean_string
def count_words(filename):
variabel = read_from_file(filename)
# print(variabel)
variabel = remove_symbols(variabel)
liste=variabel.split()
# print(liste)
my_dictionary = {}
while liste:
if liste[0] in my_dictionary:
midlertidig=1+my_dictionary[liste[0]]
my_dictionary[liste[0]]=midlertidig
liste.pop(0)
else:
my_dictionary[liste[0]]=1
liste.pop(0)
return my_dictionary
bible_dict = count_words('BIBLE.txt')
for word, value in bible_dict.items():
print(word,value)
| mit | -6,117,888,994,099,742,000 | 18.784615 | 59 | 0.647668 | false |
zifter/nonogram | tests/test_sudoku/test_solver/test_solver.py | 1 | 1266 | from copy import copy, deepcopy
import unittest
from test_sudoku.testcase_sudoku import TestCaseSudoku
from sudoku.solver.solver import Solver
from sudoku.solution import Solution
from sudoku.solution import SudokuDescr
import cProfile
# https://habrahabr.ru/post/173795/
def profile(func):
"""Decorator for run function profile"""
def wrapper(*args, **kwargs):
profile_filename = func.__name__ + '.prof'
profiler = cProfile.Profile()
result = profiler.runcall(func, *args, **kwargs)
profiler.print_stats(sort="cumtime")
return result
return wrapper
class TestSolver(TestCaseSudoku):
#@profile
def test_solve_and_compare(self):
test_data = self.getSolverData()
mySolver = Solver()
for name, layout, solution in test_data:
print "# ------ SUDOKU: %s ------- #" % name
d0 = SudokuDescr.load_from_file(layout)
print d0
s1, steps = mySolver.solve(d0)
print "steps: %s " % steps
print s1
s0 = Solution.load_from_file(solution)
print "answer"
print s0
self.assertEqual(s0, s1, "Solution failed: %s" % solution)
if __name__ == '__main__':
unittest.main() | mit | -8,591,695,616,087,866,000 | 26.543478 | 70 | 0.612164 | false |
masegaloeh/web-parser | tiketkai/parser.py | 1 | 1052 | from bs4 import BeautifulSoup
import urllib.request
def getLastDate(soupObj):
objList = None
lastdate = None
comboClass = soupObj.findAll(class_="itScheduleCombox")
for combo in comboClass:
if combo['name'] == 'tanggal':
objList = combo
try:
for obj in objList:
lastdate = obj.string
return lastdate
except TypeError as e:
print("Webpage structure not found. Quitting...")
return None
def fetchPage(url):
# add a header to define a custon User-Ageny
headers = { 'User-Agent' : 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)' }
try:
req = urllib.request.Request(url, None, headers)
data = urllib.request.urlopen(req)
return data
except:
return None
#our url
url = "https://tiket.kereta-api.co.id/"
page = fetchPage(url)
mydate = None
if page is not None:
soup = BeautifulSoup(page.read())
mydate = getLastDate(soup)
if mydate is None:
print("something error. Sorry")
else:
print(mydate) | mit | 7,484,101,775,677,648,000 | 22.4 | 81 | 0.629278 | false |
csxeba/brainforge | brainforge/util/persistance.py | 1 | 2344 | class Capsule:
def __init__(self, name=None, cost=None, optimizer=None, architecture=None, layers=None):
self.vname = name
self.vcost = cost
self.voptimizer = optimizer
self.varchitecture = architecture
self.vlayers = layers
def dump(self, path):
import pickle
import gzip
with gzip.open(path, "wb") as handle:
pickle.dump({k: v for k, v in self.__dict__.items() if k[0] == "v"},
handle)
@classmethod
def encapsulate(cls, network, dumppath=None):
capsule = cls(**{
"name": network.name,
"metrics": network.cost,
"optimizer": network.optimizer,
"architecture": network.layers.architecture[:],
"layers": [layer.capsule() for layer in network.layers.layers]})
if dumppath is not None:
capsule.dump(dumppath)
return capsule
@classmethod
def read(cls, path):
import pickle
import gzip
from os.path import exists
if not exists(path):
raise RuntimeError("No such capsule:", path)
new = cls()
with gzip.open(path) as handle:
new.__dict__.update(pickle.load(handle))
return new
def __getitem__(self, item):
if item not in self.__dict__:
raise AttributeError("No such item in capsule:", item)
return self.__dict__[item]
def load(capsule):
from ..learner import Backpropagation
from ..optimizers import optimizers
from ..util.shame import translate_architecture as trsl
if not isinstance(capsule, Capsule):
capsule = Capsule.read(capsule)
c = capsule
net = Backpropagation(input_shape=c["vlayers"][0][0], name=c["vname"])
for layer_name, layer_capsule in zip(c["varchitecture"], c["vlayers"]):
if layer_name[:5] == "Input":
continue
layer_cls = trsl(layer_name)
layer = layer_cls.from_capsule(layer_capsule)
net.add(layer)
opti = c["voptimizer"]
if isinstance(opti, str):
opti = optimizers[opti]()
net.finalize(cost=c["vcost"], optimizer=opti)
for layer, lcaps in zip(net.layers, c["vlayers"]):
if layer.weights is not None:
layer.set_weights(lcaps[-1], fold=False)
return net
| gpl-3.0 | 5,053,646,490,615,273,000 | 28.670886 | 93 | 0.584898 | false |
pclubuiet/website | home/migrations/0005_auto_20180815_0959.py | 1 | 1621 | # Generated by Django 2.1 on 2018-08-15 09:59
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('home', '0004_resourceurl_category'),
]
operations = [
migrations.CreateModel(
name='Topic',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=128, null=True)),
],
),
migrations.RemoveField(
model_name='resourceurl',
name='resource',
),
migrations.AddField(
model_name='resource',
name='category',
field=models.CharField(choices=[('video', 'Videos'), ('blog', 'Blogs / Articles'), ('ebook', 'E-Books'), ('other', 'Others')], max_length=128, null=True),
),
migrations.AddField(
model_name='resource',
name='description',
field=models.CharField(blank=True, max_length=10240, null=True),
),
migrations.AddField(
model_name='resource',
name='url',
field=models.URLField(db_index=True, max_length=128, null=True),
),
migrations.DeleteModel(
name='ResourceURL',
),
migrations.AddField(
model_name='resource',
name='resource',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='home.Topic'),
preserve_default=False,
),
]
| gpl-3.0 | 135,660,602,289,841,000 | 32.081633 | 166 | 0.552745 | false |
fakedrake/WikipediaBase | tests/test_infobox.py | 1 | 4643 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_infobox
----------------------------------
Tests for `infobox` module.
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
from wikipediabase.util import get_article, get_infoboxes
from wikipediabase import fetcher
from wikipediabase.infobox import Infobox
class TestInfobox(unittest.TestCase):
def setUp(self):
self.fetcher = fetcher.WIKIBASE_FETCHER
def test_class(self):
self.assertEqual(Infobox._to_class("Template:Infobox martial artist"),
"wikipedia-martial-artist")
def test_class_strip(self):
self.assertEqual(Infobox._to_class("Template:Infobox writer "),
"wikipedia-writer")
def test_class_taxobox(self):
self.assertEqual(Infobox._to_class("Template:Taxobox"),
"wikipedia-taxobox")
def test_type(self):
self.assertEqual(Infobox._to_type("Template:Infobox martial artist"),
"martial artist")
def test_type_taxobox(self):
self.assertIsNone(Infobox._to_type("Template:Taxobox"))
def test_markup(self):
ibox = get_infoboxes("Led Zeppelin", fetcher=self.fetcher)[0]
self.assertEqual(ibox.markup_source()[:9], "{{Infobox")
self.assertIn("| name = Led Zeppelin", ibox.markup_source())
def test_infobox_markup_raw(self):
ibox = get_infoboxes("Winston Churchill", fetcher=self.fetcher)[0]
self.assertIn("|death_place ", ibox.markup_source())
def test_infobox_html_raw(self):
ibox = get_infoboxes("Led Zeppelin", fetcher=self.fetcher)[0]
self.assertIn("Origin\nLondon, England", ibox.rendered())
def test_infobox_html_parsed(self):
ibox = get_infoboxes("AC/DC", fetcher=self.fetcher)[0]
self.assertIn(("Origin", "Sydney, Australia"), ibox.html_parsed())
def test_rendered_attributes(self):
clinton = get_infoboxes("Winston Churchill", fetcher=self.fetcher)[0]
self.assertEqual("Died",
clinton.rendered_attributes().get("death_place"))
bridge = get_infoboxes("Brooklyn Bridge", fetcher=self.fetcher)[0]
self.assertEqual("Maintained by",
bridge.rendered_attributes().get("maint"))
def test_get(self):
ibox = get_infoboxes("The Rolling Stones", fetcher=self.fetcher)[0]
self.assertEqual(ibox.get("origin"), "London, England")
def test_attributes(self):
ibox = get_infoboxes("Winston Churchill", fetcher=self.fetcher)[0]
self.assertIn("death-place",
[k for k, v in ibox.markup_parsed_iter()])
def test_templates(self):
infoboxes = get_infoboxes("Vladimir Putin", fetcher=self.fetcher)
templates = ["Template:Infobox officeholder",
"Template:Infobox martial artist"]
self.assertItemsEqual(map(lambda i: i.template(), infoboxes),
templates)
def test_classes(self):
infoboxes = get_infoboxes("Vladimir Putin", fetcher=self.fetcher)
classes = ["wikipedia-officeholder", "wikipedia-martial-artist"]
self.assertItemsEqual(map(lambda i: i.wikipedia_class(), infoboxes),
classes)
def test_types(self):
article = get_article("Vladimir Putin", self.fetcher)
# TODO : fix case inconsistency in infobox_tree
types = ["officeholder", "martial artist", "Person", "Sportsperson",
"Other sportsperson"]
self.assertItemsEqual(article.types(), types)
def test_types_redirect(self):
ibox = get_infoboxes("Bill Clinton", fetcher=self.fetcher)[0]
self.assertIn("president", ibox.types())
def test_html_attributes(self):
ibox = get_infoboxes("BBC News", fetcher=self.fetcher)[0]
self.assertEqual("Owners", ibox.rendered_attributes().get("owners"))
def test_no_clashes_with_multiple_infoboxes(self):
officeholder_ibox, martial_artist_ibox = get_infoboxes('Vladimir Putin')
self.assertEqual(officeholder_ibox.wikipedia_class(),
'wikipedia-officeholder')
self.assertEqual(martial_artist_ibox.wikipedia_class(),
'wikipedia-martial-artist')
self.assertEqual(officeholder_ibox.get('image'),
'Vladimir Putin 12023 (cropped).jpg')
self.assertEqual(martial_artist_ibox.get('image'),
'Vladimir Putin in Japan 3-5 September 2000-22.jpg')
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 4,356,002,232,220,515,300 | 37.371901 | 80 | 0.620719 | false |
Subsets and Splits