code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
import numpy as np
import pandas as pd
from sklearn.feature_extraction.text import CountVectorizer
def load_coefficients():
df = pd.read_csv('../data/logreg_coefficients.txt', sep='\t', header=None,
quoting=3, names=['term', 'weight'])
return df
def load_reviews():
reviews = []
with open('../data/test_movies.txt', 'r') as movie_reviews:
for review in movie_reviews:
reviews.append(review)
return np.array(reviews)
def features(reviews):
cv = CountVectorizer()
X = cv.fit_transform(reviews)
X = (X > 0) * 1
return X, cv.get_feature_names()
def df_tdm_w(coefficients, X, feature_names):
df = pd.DataFrame(X.todense(), columns=feature_names).T.reset_index()
df.rename(columns={'index' : 'term'}, inplace=True)
tdm = pd.merge(coefficients, df, on='term', how='outer')
tdm.fillna(0, inplace=True)
return tdm
def logistic(weights, values):
assert isinstance(weights, np.ndarray) and isinstance(values, np.ndarray)
alpha = weights.dot(values)
return np.exp(alpha) / (1 + np.exp(alpha))
def build_cases(df):
df = df.copy()
cases = {}
for c in range(10):
yhat = logistic(df.weight.values, df[c].values)
cases[c] = {'yhat' : yhat, 'class' : yhat >= 0.5}
return cases
def explanations(df, cases):
df = df.copy()
for c in range(10):
positive_class = cases[c]['class']
df.sort_values('weight', inplace=True, ascending=(not positive_class))
df.reset_index(drop=True, inplace=True)
E = []
arr = df[c].values
indices = np.where(arr==1)[0]
for ind in indices:
arr[ind] = 0
E.append(ind)
if positive_class and logistic(df.weight.values, arr) < 0.5:
break
elif not positive_class and logistic(df.weight.values, arr) >= 0.5:
break
terms = df.ix[E]['term'].tolist()
cases[c]['explanations'] = terms
return None
if __name__ == '__main__':
coefficients = load_coefficients()
X, feature_names = features(load_reviews())
df = df_tdm_w(coefficients, X, feature_names)
cases = build_cases(df)
explanations(df, cases)
for c in range(10):
print('Case:', c, '| y_hat:', cases[c]['yhat'])
print('Removed Terms:', ' '.join(cases[c]['explanations']), end='\n\n')
| juanshishido/info290-dds | assignments/assignment03/code/sedc.py | Python | mit | 2,387 |
"""
creates an (d2, w1, w2) "movie" scan
"""
import NISE as n
import os
import numpy as np
import sys
import time
def simulate(plot=False):
if __name__ == '__main__':
here = os.path.dirname(__file__)
t = n.experiments.trive # module for experiment class k1 - k2 + k2'
m = n.lib.measure
H0 = n.hamiltonians.H0
inhom = n.hamiltonians.params.inhom
Delta_t = 50. # pulse duration [fs]
slitwidth = 120. # mono resolution [cm-1]
# --- set up hamiltonian ----------------------------------------------
H = H0.Omega(wa_central=0,
tau_ag = Delta_t,
tau_2aa = Delta_t)
H.TOs = np.array([1,2,3,4,5,6])
#print([s for s in dir(H) if s[:2] != '__'])
# --- set experiment details ------------------------------------------
w1 = t.w1
w2 = t.w2
d2 = t.d2 # tau_12
w1.points = np.linspace(-2.5, 2.5, sys.argv[1])
w1.points*= 4 * np.log(2) / Delta_t * 1/(2*np.pi*3e-5)
w2.points = w1.points.copy()
d2.points = np.linspace(-2*Delta_t, 4*Delta_t, sys.argv[2])
t.exp.set_coord(t.d1, 0.) # set tau_22' to zero delay
t.exp.set_coord(t.ss, Delta_t) # set pulse widths
t.exp.timestep = 2.0
# time integration starts
# (relative to first pulse arriving)
t.exp.early_buffer = np.abs(t.d2.points.min()) + Delta_t
# time to start recording values to array
# (relative to final pulse arriving)
t.exp.late_buffer = 5 * Delta_t
# dummy object that is neccessary (for no good reason)
inhom1 = inhom.Inhom()
m.Mono.slitwidth = slitwidth
# --- define scan object and run scan ---------------------------------
def run_if_not_exists(folder, scan, mp=True, plot=False):
if not os.path.exists(folder):
os.makedirs(folder)
#plot=True
if len(os.listdir(folder)) != 0:
#print('scan has already been run; importing {0}'.format(folder))
scan = n.lib.scan.Scan._import(folder)
else:
begin = time.perf_counter()
scan.run(autosave=False, mp=mp)
print(time.perf_counter() - begin)
#scan.save(full_name=folder)
measure = m.Measure(scan, m.Mono, m.SLD)
measure.run(save=False)
if plot:
measure.plot(1, yaxis=2, zoom=2)
return scan, measure.pol
scan = t.exp.scan(t.d2, t.w1, t.w2, H=H, inhom_object=inhom1)
folder = os.path.join(here, sys.argv[3])
return run_if_not_exists(folder, scan, plot=plot, mp=False) # to run again delete folder
else: return None,None
scan, pol = simulate(plot=False)
| wright-group/WrightSim | scripts/NISE_run_for_kyle.py | Python | mit | 3,131 |
"""
roblox.py
Library for interacting with the Roblox site. Useful for bots (good ones too), custom notifications, and much more.
Copyright (c) 2017 James Patrick Dill, reshanie
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import re
_request_verification_re = re.compile(r'<input name="__RequestVerificationToken" type="hidden" value="(.+)" ?\/>')
_viewstate_re = re.compile(r'<input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="(.+)" \/>', re.IGNORECASE)
_viewstate_re_2 = re.compile(r'__VIEWSTATE\|(.*?)\|')
_vs_generator_re = re.compile(
r'<input type="hidden" name="__VIEWSTATEGENERATOR" id="__VIEWSTATEGENERATOR" value="(.+)" ?\/>'
)
_vs_gen_re_2 = re.compile(r'__VIEWSTATEGENERATOR\|(.*?)\|')
_event_validator_re = re.compile(
r'<input type="hidden" name="__EVENTVALIDATION" id="__EVENTVALIDATION" value="(.+)" ?\/>'
)
_event_val_2_re = re.compile(r'__EVENTVALIDATION\|(.*?)\|')
_current_roleset_re = re.compile(
r'<input name="ctl00\$cphRoblox\$rbxGroupRoleSetMembersPane\$currentRoleSetID" type="hidden" id="ctl00_cphRoblox_rb'
r'xGroupRoleSetMembersPane_currentRoleSetID" value="(.+)" ?\/>'
)
_roleset_count_re = re.compile(
r'<input name="ctl00\$cphRoblox\$rbxGroupRoleSetMembersPane\$RolesetCountHidden" type="text" value="(\d+)"'
)
class Auth(object):
__slots__ = ["page", "force_2nd"]
def __init__(self, page, force_2nd=False):
self.page = page
self.force_2nd = force_2nd
@property
def request_verification_token(self):
rvt = _request_verification_re.search(self.page)
if not rvt:
return
return rvt.group(1)
@property
def viewstate(self):
vs = _viewstate_re.search(self.page)
if self.force_2nd or not vs:
vs = _viewstate_re_2.search(self.page)
if not vs:
return
return vs.group(1)
@property
def viewstate_generator(self):
vs_gen = _vs_generator_re.search(self.page)
if not vs_gen:
vs_gen = _vs_gen_re_2.search(self.page)
if self.force_2nd or not vs_gen:
return
return vs_gen.group(1)
@property
def event_validation(self):
event_val = _event_validator_re.search(self.page)
if self.force_2nd or not event_val:
event_val = _event_val_2_re.search(self.page)
if not event_val:
return
return event_val.group(1)
@property
def roleset_filler(self):
roleset_id = _current_roleset_re.search(self.page)
if not roleset_id:
return
return roleset_id.group(1)
@property
def hidden_rolesets(self):
rolesets = _roleset_count_re.search(self.page)
if not rolesets:
return
return int(rolesets.group(1))
@property
def default_payload(self):
return {
"__EVENTTARGET": "",
"__EVENTARGUMENT": "",
"__LASTFOCUS": "",
"__VIEWSTATE": self.viewstate,
"__VIEWSTATEGENERATOR": self.viewstate_generator,
"__EVENTVALIDATION": self.event_validation,
"__RequestVerificationToken": self.request_verification_token,
}
| reshanie/roblox.py | roblox/auth.py | Python | mit | 4,340 |
import os
from animation import Animation
from dmd import Frame
from procgame import config
from procgame import util
from font import Font
###################################
# Animated Fonts
class AnimFont(Font):
"""An animated Font with multiple frames per letter.
Fonts can be loaded manually, using :meth:`load`, or with the :func:`font_named` utility function
which supports searching a font path."""
frames = None
current = 0
def __init__(self, filename=None):
super(AnimFont, self).__init__()
self.frames = list()
self.current = 0
self.__anim = Animation()
if filename != None:
self.load(filename)
def load(self, filename):
"""Loads the font from a ``.dmd`` file (see :meth:`Animation.load`).
Fonts are stored in .dmd files with frame 0 containing the bitmap data
and frame 1 containing the character widths. 96 characters (32..127,
ASCII printables) are stored in a 10x10 grid, starting with space (``' '``)
in the upper left at 0, 0. The character widths are stored in the second frame
(index 1) within the 'raw' bitmap data in bytes 0-95; additional frames make
up the rest of the animation.
"""
(font_info_file, ext) = os.path.splitext(filename)
if(ext == '.dmd'):
composite_op = 'blacksrc'
else:
composite_op = None
self.__anim.load(filename, composite_op=composite_op)
if len(self.__anim.frames) < 1:
raise ValueError, "Expected a minimum 2 frames: %d" % (len(self.__anim.frames))
self.frames = list(self.__anim.frames)
font_info_file = font_info_file + ".csv"
self.char_data = self.parseCBFGinfo(font_info_file)
self.char_height = self.char_data['Cell Height']
self.char_width = self.char_data['Cell Width']
self.char_size = self.char_width
self.char_widths = list()
# if(char_widths==None):
for i in range(96):
ch = chr(int(i+ord(' ')))
self.char_widths += [self.char_data['positions'][ch]['Base Width'] + self.char_data['positions'][ch]['Width Offset']]
# self.char_widths += [14] #[self.__anim.frames[1].get_font_dot(i%self.__anim.width, i/self.__anim.width)]
# # self.char_widths += [self.__anim.frames[1].get_font_dot(i%self.__anim.width, i/self.__anim.width)]
# # print("Width: %d" % self.char_widths[-1])
# else:
# self.char_widths = char_widths
# for i in range(2,len(self.__anim.frames)):
# self.frames.append(self.__anim.frames[i])
return self
def parseCBFGinfo(self, filename):
font_data = {}
width_file = open(filename)
for line in width_file:
parse = line.split(',')
line_data = parse[0]
if line_data == "Font Name":
line_value = parse[1][:-1]
font_data[line_data] = line_value
else:
line_value = int(parse[1][:-1])
if line_data.startswith('Char'):
char_info = line_data.split(' ')
char = chr(int(char_info[1]))
if('positions' not in font_data):
font_data['positions'] = {}
if(char in font_data['positions']):
place = font_data['positions'][char]
else:
place = {}
font_data['positions'][char] = place
place[char_info[2] + ' ' + char_info[3]] = line_value
else:
font_data[line_data] = line_value
return font_data
def size(self, text):
"""Returns a tuple of the width and height of this text as rendered with this font."""
x = 0
for ch in text:
char_offset = ord(ch) - ord(' ')
if char_offset < 0 or char_offset >= 96:
continue
width = self.char_widths[char_offset]
x += width + self.tracking
return (x, self.char_size)
def save(self, filename):
"""Save the font to the given path."""
out = Animation()
out.width = self.__anim.width
out.height = self.__anim.height
out.frames = [self.bitmap, Frame(out.width, out.height)]
for i in range(96):
out.frames[1].set_font_dot(i%self.__anim.width, i/self.__anim.width, self.char_widths[i])
out.save_old(filename)
# def draw(self, frame, text, x, y):
# Frame.copy_rect(dst=frame, dst_x=x, dst_y=y, src=self.frames[self.current], src_x=0, src_y=0, width=224, height=112, op=self.composite_op)
def draw(self, frame, text, x, y):
"""Uses this font's characters to draw the given string at the given position."""
#print("drawing word with animation # %d" % self.current)
for ch in text:
char_offset = ord(ch) - ord(' ')
if char_offset < 0 or char_offset >= 96:
continue
char_x = self.char_width * (char_offset % 10) + self.char_data['positions'][ch]['X Offset']
char_y = self.char_height * (char_offset / 10) + self.char_data['positions'][ch]['Y Offset']
width = self.char_widths[char_offset]
#print("Blitting an %c at [%d,%d] width=%d" % (ch,char_x,char_y,width))
Frame.copy_rect(dst=frame, dst_x=x, dst_y=y, src=self.frames[self.current], src_x=char_x, src_y=char_y, width=width, height=self.char_height, op=self.composite_op)
x += width + self.tracking
self.current = (self.current + 1) % len(self.frames)
return x
#
# convert ./flames_000.png -resize 512x50\! ftmp.png ; composite -tile ./ftmp.png -size 512x512 xc:none flame_font_000.png ; rm ftmp.png
#
####################################
| mjocean/PyProcGameHD-SkeletonGame | procgame/dmd/animfont.py | Python | mit | 5,992 |
#code created by NamanNimmo Gera
#4:57pm, May 1, 2019.
def reverse(x):
return int(str(x)[::-1])
def Palindrome(x):
if x == reverse(x):
return True
else:
return False
#function to check if a number is a Lychrel number or not
def checkLyr(x):
tot = 0
while True:
x = x + reverse(x)
if Palindrome(x):
return False
else:
tot = tot + 1
if tot>50:
return True
count = 0
for i in range(1,10000):
if checkLyr(i):
count = count + 1
print(count)
| DestructHub/ProjectEuler | Problem055/Python/solution_1.py | Python | mit | 578 |
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import numpy as np
from jittermodel import u, q2unitless
from jittermodel.simulation import (Simulation, SphereCapacitance, _alpha,
sum_sinh, _eta, _lambda, _thetaI,
_thetaII)
from jittermodel._sim import _thetaI_c
from jittermodel.base import Cantilever, Experiment, Transistor
from numpy.testing import assert_allclose
from nose.tools import eq_, assert_almost_equal, assert_raises
from bunch import Bunch
from jittermodel.tests import expected_failure
import unittest
u.d = u.dimensionless # For brevity
import mpmath as mp
def mp_sum_sinh(alpha):
"""Implements the infinite sum using mpmath, at very high precision.
Method 'r+s+e' was found to work accurately for all values of alpha,
unlike most other alogithms in Mathematica, python, etc."""
summand = lambda n: mp.sinh(alpha) / mp.sinh(alpha * n)
return mp.nsum(summand, [1, mp.inf], method='r+s+e')
class Test_sum_sinh(unittest.TestCase):
@staticmethod
def test_sum_sinh():
"""Test that the sum is working properly for a range of alpha values.
The mpmath module is used to verify that the sum meets error
specifications.
"""
alphas = [2 ** i for i in xrange(-12, 7)]
results = [sum_sinh(alpha) for alpha in alphas]
mp_results = [mp_sum_sinh(alpha) for alpha in alphas]
for mp_result, test_result in zip(mp_results, results):
assert_almost_equal(mp_result, test_result, 7)
class MockSimulationCapacitance(object):
"""A mock simulation object only containing the parameters necessary to
test SphereCapacitance"""
units = {"[mass]": u.pg, "[length]": u.um, "[time]": u.ms,
"[current]": u.aC / u.ms, "[temperature]": u.K, "[angle]": u.rad}
E_0 = q2unitless(u.epsilon_0, units)
q = q2unitless(u.elementary_charge, units)
k_B = q2unitless(u.boltzmann_constant, units)
Samp = Bunch(h=0.1, E_s1=3)
Cant = Bunch(R_tip=0.05)
Expt = Bunch(d=0.15)
def __init__(self):
self.sphere = SphereCapacitance(self)
# TODO: Where do these test cases come from?
class TestSphereCapacitance(unittest.TestCase):
def setUp(self):
self.sim = MockSimulationCapacitance()
def test_C(self):
assert_almost_equal(0.00623177, self.sim.sphere.C())
def test_Cd(self):
assert_almost_equal(-0.00322151, self.sim.sphere.Cd())
def test_Cd2(self):
assert_almost_equal(0.0311542, self.sim.sphere.Cd2())
class TestSimulation(unittest.TestCase):
@staticmethod
def test_init_Simulation():
cant = Cantilever(f_c=50*u.kHz, k_c=3.5*u.N/u.m, Q=20000*u.d,
R_tip=40*u.nm, L_tip=15*u.um, theta_tip=16*u.degrees,
geometry_c='perpendicular')
trans = Transistor(semiconductor='TPD', h=70 * u.nm, h_trans=1 * u.nm,
h_i=300 * u.nm, E_s1=3.5, E_s2=-0.0005, E_i1=4.65,
E_i2=0, mobility=3e-6 * u.cm ** 2 / u.V / u.s,
T=298 * u.K, V_g=10 * u.V, rho=None)
expt = Experiment(d=100 * u.nm, V_ts=5 * u.V, jitter_f_i=0.2 * u.Hz,
jitter_f_f=3 * u.Hz)
sim = Simulation(cant, trans, expt)
# Test some properties are correct
eq_(sim.Cant.f_c, 50)
eq_(sim.Expt.d, 0.1)
eq_(sim.Samp.h_i, 0.3)
assert_almost_equal(sim.Samp.diff, 0.0077038955272097955)
# These tests are all generated by implementing sympy code for the functions in
# validate-im-dielectric.ipynb. That should be a good comparison; sympy
# uses mpmath as a backend for its infinite precision arithmatic, so this
# should be robust against ordinary floating point errors.
class TestImDielectricHelperFunctions(unittest.TestCase):
@staticmethod
def test__eta():
k = np.array([1, 10, 100, 1000, 10000, 100000])
kappa = 3500
E_s = 3 - 0.001j
D = 0.005
omega = 300
# Expected values were calculated using sympy,
# to 15 digits of precision.
# See test_verification/validate-im-dielectric
expected_eta = np.array([2020.78311260126 + 15.182507854811j,
2020.80760652432 + 15.182323829782j,
2023.25550170076 + 15.163955048756j,
2254.66583909462 + 13.607584302718j,
10202.1243828582 + 3.007271263178j,
100020.414581093 + 0.30674293451j])
eta = _eta(k, kappa, E_s, D, omega)
assert_allclose(eta, expected_eta)
@staticmethod
def test__lambda():
k = np.array([1, 10, 100, 1000, 10000, 100000])
eta = np.array([2020.78311260126 + 15.182507854811j,
2020.80760652432 + 15.182323829782j,
2023.25550170076 + 15.163955048756j,
2254.66583909462 + 13.607584302718j,
10202.1243828582 + 3.007271263178j,
100020.414581093 + 0.30674293451j])
E_eff = 3 - 100j
E_s = 3 - 0.001j
expected_lambda = np.array([0.0001184255261724 + 0.0164941987549306j,
0.00118421087011718 + 0.164939988752172j,
0.0117978533636026 + 1.64740475175451j,
0.0842948437214929 + 14.7834985873234j,
-0.00125999301746353 + 32.672603689536j,
-0.0110065260871034 + 33.3261929274151j])
Lambda = _lambda(k, eta, E_eff, E_s)
assert_allclose(Lambda, expected_lambda)
@staticmethod
def test_thetaI():
k = np.array([1, 10, 100, 1000, 10000, 100000])
eta = np.array([2020.78311260126 + 15.182507854811j,
2020.80760652432 + 15.182323829782j,
2023.25550170076 + 15.163955048756j,
2254.66583909462 + 13.607584302718j,
10202.1243828582 + 3.007271263178j,
100020.414581093 + 0.30674293451j])
Lambda = np.array([0.0001184255261724 + 0.0164941987549306j,
0.00118421087011718 + 0.164939988752172j,
0.0117978533636026 + 1.64740475175451j,
0.0842948437214929 + 14.7834985873234j,
-0.00125999301746353 + 32.672603689536j,
-0.0110065260871034 + 33.3261929274151j])
expected_thetaI = np.array([0.00157126996626562 + 0.0210682675809495j,
0.00672782406000677 + 0.0281575198774334j,
0.050275664263775 + 0.0281213204722464j,
0.443934273416263 + 0.0140052914999941j,
0.980197277948465 + 0.000305155415174606j,
0.999795989512753 + 3.05416795636227e-6j])
h_s = 0.1
alpha = 0.65 - 0.0002j
E_s = 3 - 0.001j
E_eff = 3 - 100j
thetaI = [_thetaI(_k, h_s, alpha, _Lambda, _eta, E_s, E_eff) for
_k, _Lambda, _eta in zip(k, Lambda, eta)]
thetaI = np.array(thetaI)
assert_allclose(expected_thetaI, thetaI)
@staticmethod
def test_thetaI_c():
k = np.array([1, 10, 100, 1000, 10000, 100000])
eta = np.array([2020.78311260126 + 15.182507854811j,
2020.80760652432 + 15.182323829782j,
2023.25550170076 + 15.163955048756j,
2254.66583909462 + 13.607584302718j,
10202.1243828582 + 3.007271263178j,
100020.414581093 + 0.30674293451j])
Lambda = np.array([0.0001184255261724 + 0.0164941987549306j,
0.00118421087011718 + 0.164939988752172j,
0.0117978533636026 + 1.64740475175451j,
0.0842948437214929 + 14.7834985873234j,
-0.00125999301746353 + 32.672603689536j,
-0.0110065260871034 + 33.3261929274151j])
expected_thetaI = np.array([0.00157126996626562 + 0.0210682675809495j,
0.00672782406000677 + 0.0281575198774334j,
0.050275664263775 + 0.0281213204722464j,
0.443934273416263 + 0.0140052914999941j,
0.980197277948465 + 0.000305155415174606j,
0.999795989512753 + 3.05416795636227e-6j])
h_s = 0.1
alpha = 0.65 - 0.0002j
E_s = 3 - 0.001j
E_eff = 3 - 100j
thetaI = [_thetaI_c(_k, h_s, alpha, _Lambda, _eta, E_s, E_eff) for
_k, _Lambda, _eta in zip(k, Lambda, eta)]
thetaI = np.array(thetaI)
assert_allclose(expected_thetaI, thetaI)
@staticmethod
def test_thetaII():
k = np.array([1, 10, 100, 1000, 10000, 100000])
Lambda = np.array([0.0001184255261724 + 0.0164941987549306j,
0.00118421087011718 + 0.164939988752172j,
0.0117978533636026 + 1.64740475175451j,
0.0842948437214929 + 14.7834985873234j,
-0.00125999301746353 + 32.672603689536j,
-0.0110065260871034 + 33.3261929274151j])
expected_thetaII = np.array([0.101145810077246 + 0.0296480635666554j,
0.764320753451023 + 0.0123928030520502j,
0.999999996277978 + 2.1003332939236e-10j,
1.0 + 8.470329472543e-22j,
1.00000000000000,
1.00000000000000])
h = 0.1
E_s = 3 - 0.001j
E_d = 3 - 0.001j
E_eff = 3 - 100j
thetaII = _thetaII(k, h, E_s, E_d, E_eff, Lambda)
assert_allclose(expected_thetaII, thetaII)
class TestSimulationObject(unittest.TestCase):
def setUp(self):
self.cant = Cantilever(
f_c=50*u.kHz, k_c=3.5*u.N/u.m, Q=20000*u.d,
R_tip=40*u.nm, L_tip=15*u.um, theta_tip=16*u.degrees,
geometry_c='perpendicular')
self.trans = Transistor(
semiconductor='TPD', h=70 * u.nm, h_trans=1 * u.nm, h_i=300 * u.nm,
E_s1=3.5, E_s2=-0.0005, E_i1=4.65, E_i2=0,
mobility=3e-6*u.cm**2/u.V/u.s, T=298 * u.K, V_g=10 * u.V, rho=None)
self.expt = Experiment(
d=100 * u.nm, V_ts=5 * u.V, jitter_f_i=0.2 * u.Hz,
jitter_f_f=3 * u.Hz)
self.sim = Simulation(self.cant, self.trans, self.expt)
def test_check_assignment(self):
eq_(self.sim.Cant.f_c, 50)
self.sim.assign('f_c', 75 * u.kHz)
eq_(self.sim.Cant.f_c, 75)
# TODO: Where does this calculation come from?
class TestCapacitanceCalculations(unittest.TestCase):
def setUp(self):
cant = Cantilever(
f_c=46*u.kHz, Q=2500*u.dimensionless, k_c=0.85*u.N/u.m,
R_tip=40*u.nm, L_tip=15*u.um, theta_tip=16*u.degrees,
geometry_c='perpendicular')
trans = Transistor(
semiconductor='TPD', h=63*u.nm, h_trans=1*u.nm, h_i=300*u.nm,
E_s1=3.5, E_s2=-0.0005, E_i1=4.65, E_i2=0,
mobility=3e-6*u.cm**2/u.V/u.s, T=298*u.K, V_g=10*u.V, rho=None)
expt = Experiment(
d=100*u.nm, V_ts=5*u.V, jitter_f_i=0.2*u.Hz, jitter_f_f=3*u.Hz)
self.sim = Simulation(cant, trans, expt)
def test_capacitance(self):
assert_almost_equal(self.sim.Sphere.C(), 5.1e-3, places=2)
def test_capacitance_2nd_derivative(self):
assert_almost_equal(self.sim.Sphere.Cd2(), 6.9e-2, places=2)
class TestImDielectric(unittest.TestCase):
def setUp(self):
exp = Experiment(V_ts=3*u.V, d=300*u.nm)
cant = Cantilever(
f_c=65*u.kHz, Q=2500*u.dimensionless, k_c=3.5*u.N/u.m,
R_tip=40*u.nm, L_tip=15*u.um, theta_tip=16*u.degrees,
geometry_c='perpendicular')
trans = Transistor(
h=72*u.nm, V_g=40*u.V, E_s1=3.4, mobility=2.7e-6*u.cm**2/u.V/u.s,
E_s2=-0.05, E_i1=4.65)
self.sim = Simulation(cant, trans, exp)
@expected_failure
def test_im_dielectric(self):
assert_almost_equal(self.sim._im_dielectric(1), -0.003635)
| ryanpdwyer/jittermodel | jittermodel/tests/test_simulation.py | Python | mit | 12,688 |
"""
.. Copyright (c) 2014- Marshall Farrier
license http://opensource.org/licenses/MIT
Options (:mod:`pynance.opt`)
===============================================
.. currentmodule:: pynance.opt
:mod:`pynance.opt.core`
:mod:`pynance.opt.covcall`
:mod:`pynance.opt.price`
:mod:`pynance.opt.retrieve`
:mod:`pynance.opt.spread`
"""
from __future__ import absolute_import
__all__ = ["covcall", "retrieve"]
# imported directly into module
from . import retrieve
from .retrieve import *
# imported as submodule
from . import covcall
| herberthudson/pynance | pynance/opt/__init__.py | Python | mit | 540 |
#!/usr/bin/env python
import sys
import urllib2
def download(url):
r = urllib2.urlopen(url)
content = r.read().replace("\r", "")[3:]
r.close()
return content
for line in sys.stdin:
name, url = line.strip().split(',')
if url.endswith("/edit"):
url = url[0:-len("/edit")]
url = url + "/export?format=txt"
print >>sys.stderr, "downloading %s: %s" % (name, url)
f = open(name, "w")
print >>f, download(url)
f.close()
| kohyatoh/texwebpreview | scripts/download.py | Python | mit | 468 |
#!/usr/bin/python
"""
This script generates the complete PCFG usable for
honey encryption. The inputs to this script are,
a) vault file b) password leak file (if not provided
the vault password distribution is used.) c) test the accuracy or not
etc.
"""
#import sys, os, math, struct, bz2, resource
#BASE_DIR = os.getcwd()
#sys.path.append(BASE_DIR)
import string, os, sys, json
from helper.helper import open_
from collections import defaultdict
from lexer.lexer import parallel_buildpcfg
from lexer.pcfg import TrainedGrammar
from analysis_tools.classifier import Experiment
from vaultanalysis.train_vault_dist import cal_size_subG, cal_stat
from analysis_tools.gen_decoy_data import decoy_vault_random
PW_TMP_FILE = 'pw_set_tmp.txt'
def create_pcfg(vault_leak, password_leak=None):
# learn the grammar
vault_d = json.load(open(vault_leak))
print "# of vaults: ", len(vault_d)
print "max size of vault:", max(len(x) for x in vault_d.values())
print "max size of vault:", min(len(x) for x in vault_d.values())
if not password_leak:
D = defaultdict(int)
for k,v in vault_d.items():
if len(v)>40: continue
for x in v:
D[x] += 1
password_leak = PW_TMP_FILE
with open(password_leak, 'w') as f:
f.write('\n'.join(
'%d\t%s' % (f,p)
for p,f in sorted(D.items(), key=lambda x: x[1],
reverse=True))
)
print "Password file created"
parallel_buildpcfg(password_leak)
# learn the vault distribution
tg = TrainedGrammar()
G = cal_size_subG(tg, vault_leak)
f = os.tmpfile()
json.dump(G, f)
f.seek(0)
cal_stat(fds=[f])
f.close()
def test(vault_leak):
trn_fl = 'analysis_tools/decoy_trn.txt'
tst_fl = 'analysis_tools/decoy_tst.txt'
length_dist = defaultdict(int)
D = json.load(open(vault_leak))
for k,v in D.items():
length_dist[len(v)] += 1
if 1 in length_dist:
del length_dist[1]
s = max(length_dist.values())
json.dump(decoy_vault_random(n=1000, s_max=s, length_dist=length_dist),
open(trn_fl, 'w'), indent=2)
json.dump(decoy_vault_random(n=1000, s_max=s, length_dist=length_dist),
open(tst_fl, 'w'), indent=2)
for s,t in [(2,4), (5,8), (9,40), (4,40)]:
Experiment(
_s=s,
_t=t,
trn_fl=trn_fl,
tst_fl=tst_fl,
vault_fl=vault_leak,
plot_graph=True)
def main():
if len(sys.argv)<2 or sys.argv[1] == '-help':
print """This script will (should) generate all the static required files
for honey encryption. options are '-vault' <vault-file> '-pwfile' <pw-file> -test
'-pwfile' and '-test' are optional arguments. If '-test' option is provided it will
test the security of the newly created pcfg using the SVM classifier."""
exit(0)
vault_fl = None
pw_fl = None
istest = False
if '-test' in sys.argv:
istest = True
try:
i = sys.argv.index('-vault')
vault_fl = sys.argv[i+1]
if '-pwfile' in sys.argv:
i = sys.argv.index('-pwfile')
pw_fl = sys.argv[i+1]
except:
print "Sorry you have to specify vault file with '-vault' option!"
print "---\nVault: %s\nPwFile: %s\ntest: %s\n----" % (
vault_fl, pw_fl, istest)
if istest:
create_pcfg(vault_fl, pw_fl)
if istest:
test(vault_fl)
main()
| rchatterjee/nocrack | complete_setup.py | Python | mit | 3,534 |
# game.py
# Author: Sébastien Combéfis
# Version: April 20, 2016
from abc import *
import copy
import json
import socket
import sys
DEFAULT_BUFFER_SIZE = 1024
SECTION_WIDTH = 60
def _printsection(title):
print()
print(' {} '.format(title).center(SECTION_WIDTH, '='))
class InvalidMoveException(Exception):
'''Exception representing an invalid move.'''
def __init__(self, message):
super().__init__(message)
class GameState(metaclass=ABCMeta):
'''Abstract class representing a generic game state.'''
def __init__(self, visible, hidden=None):
self._state = {'visible': visible, 'hidden': hidden}
def __str__(self):
return json.dumps(self._state['visible'], separators=(',', ':'))
def __repr__(self):
return json.dumps(self._state, separators=(',', ':'))
@abstractmethod
def winner(self):
'''Check whether the state is a winning state.
Pre: -
Post: The returned value contains:
-1 if there is no winner yet (and the game is still going on);
None if the game ended with a draw;
or the number of the winning player, otherwise.
'''
...
@abstractmethod
def prettyprint(self):
'''Print the state.
Pre: -
Post: This state has been printed on stdout.'''
...
@classmethod
def parse(cls, state):
return cls(json.loads(state))
@classmethod
def buffersize(cls):
return DEFAULT_BUFFER_SIZE
class GameServer(metaclass=ABCMeta):
'''Abstract class representing a generic game server.'''
def __init__(self, name, nbplayers, initialstate, verbose=False):
self.__name = name
self.__nbplayers = nbplayers
self.__verbose = verbose
self._state = initialstate
# Stats about the running game
self.__currentplayer = None
self.__turns = 0
@property
def name(self):
return self.__name
@property
def nbplayers(self):
return self.__nbplayers
@property
def currentplayer(self):
return self.__currentplayer
@property
def turns(self):
return self.__turns
@abstractmethod
def applymove(self, move):
'''Apply a move.
Pre: 'move' is valid
Post: The specified 'move' have been applied to the game for the current player.
Raises InvalidMoveException: If 'move' is invalid.
'''
...
@property
def state(self):
return copy.deepcopy(self._state)
def _waitplayers(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(('0.0.0.0', 5000))
s.listen(self.nbplayers)
if self.__verbose:
_printsection('Starting {}'.format(self.name))
print(' Game server listening on port {}.'.format(5000))
print(' Waiting for {} players...'.format(self.nbplayers))
self.__players = []
# Wait for enough players for a play
try:
while len(self.__players) < self.__nbplayers:
client = s.accept()[0]
self.__players.append(client)
if self.__verbose:
print(' - Client connected from {}:{} ({}/{}).'
.format(*client.getpeername(), len(self.__players), self.nbplayers)
)
except KeyboardInterrupt:
for player in self.__players:
player.close()
_printsection('Game server ended')
return False
# Notify players that the game started
try:
for i in range(len(self.__players)):
if self.__verbose:
print(' Initialising player {}...'.format(i))
player = self.__players[i]
player.sendall('START {}'.format(i).encode())
data = player.recv(self._state.__class__.buffersize()).decode().split(' ')
if data[0] != 'READY':
if self.__verbose:
print(' - Player {} not ready to start.'.format(i))
_printsection('Current game ended')
return False
elif self.__verbose:
print(' - Player {} ({}) ready to start.'.format(i, data[1] if len(data) == 2 else 'Anonymous'))
except OSError:
if self.__verbose:
print('Error while notifying player {}.'.format(player))
return False
# Start the game since all the players are ready
if self.__verbose:
_printsection('Game initialised (all players ready to start)')
return True
def _gameloop(self):
self.__currentplayer = 0
winner = -1
if self.__verbose:
print(' Initial state:')
self._state.prettyprint()
# Loop until the game ends with a winner or with a draw
while winner == -1:
player = self.__players[self.__currentplayer]
if self.__verbose:
print("\n=> Turn #{} (player {})".format(self.turns, self.__currentplayer))
player.sendall('PLAY {}'.format(self.state).encode())
try:
move = player.recv(self._state.__class__.buffersize()).decode()
if self.__verbose:
print(' Move:', move)
self.applymove(move)
self.__turns += 1
self.__currentplayer = (self.__currentplayer + 1) % self.nbplayers
except InvalidMoveException as e:
if self.__verbose:
print('Invalid move:', e)
player.sendall('ERROR {}'.format(e).encode())
if self.__verbose:
print(' State:')
self._state.prettyprint()
winner = self._state.winner()
if self.__verbose:
_printsection('Game finished')
# Notify players about won/lost status
if winner is not None:
for i in range(self.nbplayers):
self.__players[i].sendall(('WON' if winner == i else 'LOST').encode())
if self.__verbose:
print(' The winner is player {}.'.format(winner))
# Notify players that the game ended
else:
for player in self.__players:
player.sendall('END'.encode())
# Close the connexions with the clients
for player in self.__players:
player.close()
if self.__verbose:
_printsection('Game ended')
def run(self):
if self._waitplayers():
self._gameloop()
class GameClient(metaclass=ABCMeta):
'''Abstract class representing a game client'''
def __init__(self, server, stateclass, verbose=False):
self.__stateclass = stateclass
self.__verbose = verbose
if self.__verbose:
_printsection('Starting game')
addrinfos = socket.getaddrinfo(*server, socket.AF_INET, socket.SOCK_STREAM)
s = socket.socket()
try:
s.connect(addrinfos[0][4])
if self.__verbose:
print(' Connected to the game server on {}:{}.'.format(*addrinfos[0][4]))
self.__server = s
self._gameloop()
except OSError:
print(' Impossible to connect to the game server on {}:{}.'.format(*addrinfos[0][4]))
def _gameloop(self):
server = self.__server
running = True
while running:
data = server.recv(self.__stateclass.buffersize()).decode()
command = data[:data.index(' ')] if ' ' in data else data
if command == 'START':
self._playernb = int(data[data.index(' '):])
server.sendall('READY'.encode())
if self.__verbose:
_printsection('Game started')
print(" Player's number: {}".format(self._playernb))
elif command == 'PLAY':
state = self.__stateclass.parse(data[data.index(' ')+1:])
if self.__verbose:
print("\n=> Player's turn to play")
print(' State:')
state.prettyprint()
move = self._nextmove(state)
if self.__verbose:
print(' Move:', move)
server.sendall(move.encode())
elif command in ('WON', 'LOST', 'END'):
running = False
if self.__verbose:
_printsection('Game finished')
if command == 'WON':
print(' You won the game.')
elif command == 'LOST':
print(' You lost the game.')
else:
print(' It is draw.')
_printsection('Game ended')
server.close()
else:
if self.__verbose:
print('Specific data received:', data)
self._handle(data)
@abstractmethod
def _handle(self, command):
'''Handle a command.
Pre: command != ''
Post: The specified 'command' has been handled.
'''
...
@abstractmethod
def _nextmove(self, state):
'''Get the next move to play.
Pre: 'state' is a valid game' state.
Post: The returned value contains a valid move to be played by this player
in the specified 'state' of the game.
'''
...
| Diab0lix/Pylos | lib/game.py | Python | mit | 9,652 |
from system.core.model import Model
class User(Model):
def __init__(self):
super(User, self).__init__()
def get_user(self, user_id):
query = "SELECT * FROM users WHERE user_id = :user_id LIMIT 1"
data = {
"user_id": user_id
}
user = self.db.get_one(query, data)
if user:
return {'status': True, 'user': user}
return {'status': False}
def add_user(self, user_data):
query = "SELECT * FROM users WHERE fb_user_id = :fb_user_id"
data = {
"fb_user_id": user_data['id']
}
user = self.db.query_db(query, data)
if not user:
query = "INSERT INTO users (fb_user_id, first_name, last_name) VALUES (:fb_user_id, :first_name," \
" :last_name)"
data = {
"fb_user_id": user_data['id'],
"first_name": user_data['first_name'],
"last_name": user_data['last_name']
}
self.db.query_db(query, data)
return {'status': True}
return {'status': False}
def register(self, form, user_id):
query = "UPDATE users SET email = :email, username = :username WHERE fb_user_id = :fb_user_id"
data = {
'email': form['email'],
'username': form['username'],
'fb_user_id': user_id
}
self.db.query_db(query, data)
query = "SELECT * FROM users WHERE email = :email"
data = {
'email': form['email']
}
user = self.db.query_db(query, data)
if user:
return {'status': True}
return {'status': False, 'error': "Did not save to database"} | RydrDojo/Ridr | app/models/User.py | Python | mit | 1,720 |
from .i18n import _
message = _('Hello')
| rebeccaframework/rebecca.bootstrapui | rebecca/bootstrapui/constants.py | Python | mit | 42 |
#!/usr/bin/python2
import ConfigParser
import urllib2 as ul
import json
import argparse
import sys
from unicodedata import normalize
parser = argparse.ArgumentParser(description='Retrieves live CS:GO Streams')
parser.add_argument('--verbose', '-v', help='Use verbose output', action='store_true')
parser.add_argument('--conky', '-c', help='Formats the output conky-friendly', action='store_true')
parser.add_argument('--delimiter', '-d', help='Prints the output separated by the given argument', metavar='CHAR', default='|')
parser.add_argument('--limit', '-l', help='Limits the output to $n streams', metavar='INT', default='0')
parser.add_argument('--minimal', '-m', help='Minimal viewer count', metavar='INT', default='50')
parser.add_argument('--title', '-t', help='Print the title of the streams aswell', action='store_true')
parser.add_argument('--config', '-f', help='Configuration file including twitch API client id', action='store')
args = parser.parse_args()
def sanitizeConky(title, name):
strings = [title, name]
for i in range(0,len(strings)):
strings[i] = strings[i].replace("$", "$$")
strings[i] = strings[i].replace("#", "\#")
return strings[0], strings[1]
def colorConky(name,viewer):
str="${color "
if name in ['tarik_tv','summit1g', 'g5taz', 'meclipse']:
str += "ff9900"
else:
if viewer > 10000:
str += "cc3300"
else:
return "${color0}"
str += "}"
return str
if not args.config:
print "[!!] You need to specify a config file!"
sys.exit(0)
config = ConfigParser.RawConfigParser()
config.read(args.config)
client_id = config.get('Twitch', 'client_id')
if not client_id:
print "[!!] Specify an client_id in the Twitch section of the config"
sys.exit(0)
if args.verbose:
print "[**] Open URL"
req = ul.Request('https://api.twitch.tv/kraken/streams?game=Counter-Strike%3A%20Global%20Offensive')
req.add_header('Client-ID', client_id)
response = ul.urlopen(req)
if args.verbose:
print "[**] Read HTML"
html = response.read().decode('utf-8')
html = normalize('NFKD', html).encode('ascii', 'ignore')
if args.verbose:
print "[**] Parse Streams"
j = json.loads(html)
streams = j['streams']
i = 0
if args.verbose:
print "[**] Limit: "+args.limit
print "[**] Minimal Viewercount: "+args.minimal
if args.conky:
print "${font}"
sep = args.delimiter
for stream in streams:
if int(args.limit) != 0:
i+=1
if i > int(args.limit):
break
channel = stream['channel']
viewers = stream['viewers']
if viewers < int(args.minimal):
continue
title = channel['status'].encode('utf-8')
name = channel['name'].encode('utf-8')
if args.conky:
title, name = sanitizeConky(title, name)
print colorConky(name,viewers)+"${font}"+name[:20] + "${font Liberation Sans:bold:size=10}${goto 130}${color3}" +str(viewers) +"${color0}${font}${goto 200}"+title[:30]
continue
if args.verbose:
print name+": "+title[:50]+" ["+str(viewers)+"]"
else:
print name+sep+str(viewers)
| Spotlight0xff/scripts | streams.py | Python | mit | 3,110 |
# coding=utf-8
import os
from plugin_common.baseplugin import BasePlugin
from plugin_common.baseplugin import Cmd
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gio, Gtk
__author__ = 'peter'
appList = []
def getAppIcon(name):
global appList
iconTheme = Gtk.IconTheme.get_default()
if not appList:
appList = Gio.AppInfo.get_all()
for app in appList:
if name == Gio.AppInfo.get_display_name(app) or \
name == Gio.AppInfo.get_executable(app) or \
os.path.basename(name) == Gio.AppInfo.get_display_name(app) or \
os.path.basename(name) == os.path.basename(Gio.AppInfo.get_executable(app)):
icon = Gio.AppInfo.get_icon(app)
if icon:
iconInfo = Gtk.IconTheme.lookup_by_gicon(iconTheme, icon, 256, Gtk.IconLookupFlags.USE_BUILTIN)
if iconInfo:
return iconInfo.get_filename()
return 'app.png'
class Main(BasePlugin):
mainCmd = None
subCmdList = []
@staticmethod
def init():
Main.mainCmd = Cmd(title='top', desc='显示当前进程', icon='icon.png', cmd='top')
Main.subCmdList = [
Cmd(title='top c', desc='sort by %CPU', icon=Main.mainCmd.icon, cmd='top c'),
Cmd(title='top m', desc='sort by %MEM', icon=Main.mainCmd.icon, cmd='top m'),
]
@staticmethod
def onList(param):
if not param:
return Main.subCmdList
if param[0] == 'c':
f = os.popen('ps axo comm,pid,pcpu,pmem,user,command k -pcpu |head')
elif param[0] == 'm':
f = os.popen('ps axo comm,pid,pcpu,pmem,user,command k -pmem |head')
else:
return []
topList = []
for line in f.readlines()[1:]:
info = line.split()
topList.append(
Cmd(title=info[0], desc='PID: ' + info[1] + ', CPU: ' + info[2] + '%, RAM: ' + info[3] + '%',
icon=getAppIcon(info[5]), cmd='top ' + param[0]))
return topList
| PeterHo/Linalfred | plugins/top/main.py | Python | mit | 2,091 |
# Adapted from score written by wkentaro
# https://github.com/wkentaro/pytorch-fcn/blob/master/torchfcn/utils.py
import numpy as np
class runningScore(object):
def __init__(self, n_classes):
self.n_classes = n_classes
self.confusion_matrix = np.zeros((n_classes, n_classes))
def _fast_hist(self, label_true, label_pred, n_class):
mask = (label_true >= 0) & (label_true < n_class)
hist = np.bincount(
n_class * label_true[mask].astype(int) + label_pred[mask], minlength=n_class ** 2
).reshape(n_class, n_class)
return hist
def update(self, label_trues, label_preds):
for lt, lp in zip(label_trues, label_preds):
self.confusion_matrix += self._fast_hist(lt.flatten(), lp.flatten(), self.n_classes)
def get_scores(self):
"""Returns accuracy score evaluation result.
- overall accuracy
- mean accuracy
- mean IU
- fwavacc
"""
hist = self.confusion_matrix
acc = np.diag(hist).sum() / hist.sum()
acc_cls = np.diag(hist) / hist.sum(axis=1)
acc_cls = np.nanmean(acc_cls)
iu = np.diag(hist) / (hist.sum(axis=1) + hist.sum(axis=0) - np.diag(hist))
mean_iu = np.nanmean(iu)
freq = hist.sum(axis=1) / hist.sum()
fwavacc = (freq[freq > 0] * iu[freq > 0]).sum()
cls_iu = dict(zip(range(self.n_classes), iu))
return (
{
"Overall Acc: \t": acc,
"Mean Acc : \t": acc_cls,
"FreqW Acc : \t": fwavacc,
"Mean IoU : \t": mean_iu,
},
cls_iu,
)
def reset(self):
self.confusion_matrix = np.zeros((self.n_classes, self.n_classes))
class averageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
| meetshah1995/pytorch-semseg | ptsemseg/metrics.py | Python | mit | 2,166 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "datacademy.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| mingot/datacademy_django | datacademy/manage.py | Python | mit | 259 |
import cPickle as pickle
from stellar_parameters import Star
from channel import SpectralChannel
class spectrum(object):
pass
import sick
spec = sick.specutils.Spectrum.load("spectra/hermes-sun.fits")
spec = sick.specutils.Spectrum.load("spectra/uvessun1.txt")
blue_channel = spectrum()
blue_channel.dispersion = spec.disp
blue_channel.flux = spec.flux
blue_channel.variance = spec.variance
with open("transitions.pkl", "rb") as fp:
transitions = pickle.load(fp)
with open("sousa-transitions.pkl", "rb") as fp:
transitions = pickle.load(fp)
# Get just blue channel ones
transition_indices = (blue_channel.dispersion[-1] > transitions["rest_wavelength"]) * (transitions["rest_wavelength"] > blue_channel.dispersion[0])
use_regions = np.array([
[4731.3, 4731.65],
[4742.65, 4742.93],
[4757.95, 4748.31],
[4759.1, 4759.56],
[4764.43, 4764.47],
[4778.08, 4778.41],
[4779.78, 4780.2],
[4781.59, 4781.92],
[4788.41, 4789],
[4789.91, 4790.19],
[4795.24, 4795.66],
[4798.39, 4798.64],
[4802.69, 4803.2],
[4805.3, 4805.71],
[4807.95, 4808.35],
[4820.23, 4820.6],
[4847.89, 4848.02],
[4869.85, 4870.3],
[4873.88, 4874.19],
[4884.95, 4885.25],
[4889.9, 4892.67],
[4894.7, 4895.0]
])
#use_regions = np.array([
# [4705, 4850.],
# [4880., 5000.]
#])
mask = np.empty(len(blue_channel.dispersion))
mask[:] = np.nan
for row in use_regions:
indices = blue_channel.dispersion.searchsorted(row)
mask[indices[0]:indices[1] + 1] = 1.
print(np.sum(np.isfinite(mask)))
blue = SpectralChannel(blue_channel, transitions[transition_indices], mask=mask, redshift=False, continuum_order=-1,
wl_tolerance=0.10, wl_cont=2, outliers=True)
xopt = blue.optimise(plot_filename="blue_optimise.pdf", plot_clobber=True)
star = Star("/Users/arc/atmospheres/castelli-kurucz-2004/a???at*.dat", channels=[blue])
star.infer({"Teff": 5700., "logg": 4.0, "[M/H]": 0.1, "xi": 0.9}, walkers=200, burn=450, sample=50)
| andycasey/precise-objective-differential-spectroscopy | code/test_hd22879.py | Python | mit | 2,015 |
import cocotb
from cocotb.triggers import Timer
from cocotb.result import TestFailure
import random
import math
TICK = 1
INPUT_WIDTH = 28
# @cocotb.test()
# def sqrt_basic_test(dut):
# """Test for sqrt(127)"""
# i = 127
# dut.integer_input = i
# dut.reset = 0
# dut.clk = 0
# yield Timer(TICK)
# dut.clk = 1
# yield Timer(TICK)
# dut.clk = 0
# yield Timer(TICK)
# dut.reset = 1
# yield Timer(TICK)
# for _ in range(INPUT_WIDTH):
# dut.clk = 1
# yield Timer(TICK)
# dut.clk = 0
# yield Timer(TICK)
# dut.integer_input = 0
# integer_sqrt = int(math.sqrt(i))
# remainder = int(i - integer_sqrt**2)
# if int(dut.result) != int(math.sqrt(i)) or int(dut.remainder) != remainder:
# raise TestFailure(
# "sqrt is incorrect for %i; r: %s, q: %s, expected r: %s, q: %s" % (i, int(dut.remainder), int(dut.result), integer_sqrt, remainder))
# # check the zeros a few times as well
# for _ in range(5):
# dut.clk = 1
# yield Timer(TICK)
# dut.clk = 0
# yield Timer(TICK)
# if int(dut.result) != 0 or int(dut.remainder) != 0:
# raise TestFailure(
# "sqrt is incorrect for %i; r: %s, q: %s, expected r: %s, q: %s" % (i, int(dut.remainder), int(dut.result), integer_sqrt, remainder))
# @cocotb.test()
# def sqrt_7bit_test(dut):
# """Test for sqrt up to 127 with resets"""
# for i in range(127):
# dut.integer_input = i
# dut.reset = 0
# dut.clk = 0
# yield Timer(TICK)
# dut.clk = 1
# yield Timer(TICK)
# dut.clk = 0
# yield Timer(TICK)
# dut.reset = 1
# yield Timer(TICK)
# for _ in range(int(INPUT_WIDTH)):
# dut.clk = 1
# yield Timer(TICK)
# dut.clk = 0
# yield Timer(TICK)
# if int(dut.result) != int(math.sqrt(i)):
# raise TestFailure(
# "sqrt is incorrect for %i; r: %s, q: %s" % (i, int(dut.remainder), int(dut.result)))
@cocotb.test()
def sqrt_sequential_7bit_test(dut):
"""sequential sqrt test up to 127"""
pipeline_delay = INPUT_WIDTH
print("pipeline delay is: ", pipeline_delay)
dut.reset = 0
dut.clk = 0
yield Timer(TICK)
dut.clk = 1
yield Timer(TICK)
dut.clk = 0
yield Timer(TICK)
dut.reset = 1
yield Timer(TICK)
for i in range(128):
#print(i)
dut.integer_input = i
if i >= pipeline_delay:
actual_input = i - pipeline_delay
if actual_input == 0: # zero case
integer_sqrt = 0
remainder = 0
else:
integer_sqrt = int(math.sqrt(actual_input))
remainder = int((i-pipeline_delay) - integer_sqrt**2)
#print("actually: %i; got r: %s, q: %s, expected r: %s, q: %s" % (actual_input, hex(int(dut.remainder)), hex(int(dut.result)), hex(remainder), hex(integer_sqrt)))
if int(dut.result) != integer_sqrt or int(dut.remainder) != remainder:
raise TestFailure(
"sqrt is incorrect for %i; r: %s, q: %s, expected: r: %s, q: %s" % (i, int(dut.remainder), int(dut.result), remainder, integer_sqrt))
dut.clk = 1
yield Timer(TICK)
dut.clk = 0
yield Timer(TICK)
# @cocotb.test()
# def sqrt_randomised_test(dut):
# """Checks the sqrt of 1000 random integers"""
# max_number = 2**(INPUT_WIDTH-1) - 1
# for _ in range(1000):
# i = random.randint(0, max_number)
# dut.integer_input = i
# dut.reset = 0
# dut.clk = 0
# yield Timer(TICK)
# dut.clk = 1
# yield Timer(TICK)
# dut.clk = 0
# yield Timer(TICK)
# dut.reset = 1
# yield Timer(TICK)
# for _ in range(int(INPUT_WIDTH/2)+1):
# dut.clk = 1
# yield Timer(TICK)
# dut.clk = 0
# yield Timer(TICK)
# if int(dut.result) != int(math.sqrt(i)):
# raise TestFailure(
# "sqrt is incorrect for %i; r: %s, q: %s" % (i, int(dut.remainder), int(dut.result)))
| jeremyherbert/real_time_stdev | sqrt/tests/test_sqrt.py | Python | mit | 4,259 |
# Give the next prime number until asked to stop
from sys import argv
is_prime = lambda y, p: ((map(lambda x: y % x, p)).count(0) == 0)
def next_prime(n, p = None):
if p is None:
p = [2]
i = n + 1
while not is_prime(i, p):
i += 1
return i, p + [i]
def primes(y = None):
if y is None:
cont = ["y"]
else:
cont = ["y"] * y
i, n, p = 1, 2, None
text = "prime {i} is, {n}"
while len(cont) > 0 and cont[0] == "y":
print text.format(i = i, n = n)
n, p= next_prime(n, p)
i += 1
cont = cont[1:]
if len(cont) == 0:
cont = raw_input("Would you like to continue the sequence? [Y/N] ").lower().strip()
if cont.isdigit():
cont = ["y"] * int(cont)
print "--Sequence Ended at prime {i}--".format(i = i - 1)
return
def main():
if len(argv) == 1:
primes()
elif len(argv) == 2:
primes(int(argv[1]))
else:
print """Usage: python next_prime.py [<N>]
Each prime will be output, the user will then get a choice to continue or stop
Where <N> is an optional parameter to output before giving the choice to continue"""
return
if __name__ == '__main__':
main()
| mykhamill/Projects-Solutions | solutions/next_prime.py | Python | mit | 1,162 |
from django.core.management.base import BaseCommand
from 語料庫.models import 語料表
class Command(BaseCommand):
def handle(self, *args, **參數):
for 語料 in 語料表.objects.all():
語料.save()
| i3thuan5/gi2_liau7_khoo3 | 檢查工具/management/commands/重做檢查狀態.py | Python | mit | 234 |
class Solution:
#@param n: Given a decimal number that is passed in as a string
#@return: A string
def binaryRepresentation(self, n):
# write you code here
length = len(n)
if length == 0:
return n
split = n.split(".")
decimalPart = self.decBinary(split[1])
if decimalPart == "ERROR":
return decimalPart
intPart = self.intBinary(split[0])
if decimalPart == "":
return intPart
else:
return intPart+"."+decimalPart
def decBinary(self, num):
n = "0." + num
val = float(n)
sb = []
while val != 0:
if len(sb) > 32:
return "ERROR"
r = val * 2
if r >= 1:
sb.append("1")
val = r - 1
else:
sb.append("0")
val = r
return "".join(sb)
def intBinary(self, num):
i = int(num)
sb = []
base = 1
current = 0
while current != i:
if (i & base) != 0:
sb.append("1")
current += base
else:
sb.append("0")
base = base << 1
if len(sb) == 0:
sb.append("0")
return "".join(sb)[::-1]
| quake0day/oj | binaryRepresentation.py | Python | mit | 1,156 |
def sleep_us(us):
from time import sleep
sleep(us/1000000)
| Morteo/kiot | src/python3/utime/utime.py | Python | mit | 66 |
'''
Created on 30/01/2014
@author: jdsantana
'''
class InterfaceDB:
def __init__(self, host, port, user, password):
self.__host = host
self.__port = port
self.__user = user
self.__password = password
def connect(self):
raise NotImplementedError('No implemented yet')
def execute(self, sql):
raise NotImplementedError('No implemented yet')
def checkDB(self, db):
raise NotImplementedError('No implemented yet')
def createDB(self, db, schema):
raise NotImplementedError('No implemented yet')
| JuanDaniel/DBOJ | Engine/src/DB/InterfaceBD.py | Python | mit | 583 |
import types
class InstanceVisibleMeta(type):
def __new__(cls, cls_name, cls_parents, cls_dict):
for attr_name in dir(cls):
attr_value = getattr(cls, attr_name, None)
if isinstance(attr_value, instancevisible) and attr_name not in cls_dict:
if not isinstance(attr_value.method, types.FunctionType):
cls_dict[attr_name] = attr_value.method
else:
cls_dict[attr_name] = classmethod(attr_value.method)
return super(InstanceVisibleMeta, cls).__new__(cls, cls_name, cls_parents, cls_dict)
class instancevisible(object):
"""
Converts a metaclass-defined function to class-defined.
Default behavior is convertion to classmethod but descriptors
such as property will be propagated to class level intact.
General use:
>>> class Metaclass(instancevisible.Meta):
... @instancevisible
... def func(cls):
... return cls.__name__
...
>>> class A(object):
... __metaclass__ = Metaclass
...
>>> obj = A()
>>> obj.func()
'A'
>>> A.func()
'A'
Property case:
>>> class Metaclass(instancevisible.Meta):
... @instancevisible
... @property
... def prop(self):
... return self.__class__.__name__
...
>>> class A(object):
... __metaclass__ = Metaclass
...
>>> obj = A()
>>> obj.prop
'A'
"""
Meta = InstanceVisibleMeta
def __init__(self, method):
self.method = method
def __get__(self, instance, owner):
return self
| Quantify-world/apification | src/apification/utils/instancevisible.py | Python | mit | 1,758 |
# -*- coding: utf-8; -*-
from decimal import Decimal
from collections import deque
from copy import copy
from io import BytesIO
import xml.sax
import xml.sax.handler
import datetime
import re
import iso8601
def format_open_tag(name, attrs):
attrs = [u'%s="%s"' % attr for attr in attrs.items()]
return u"<%s %s>" % (name, ' '.join(attrs))
class ElementHandler(object):
__slots__ = ('value', )
def __init__(self):
self.value = copy(self.default)
def on_nested_start(self, name, attrs):
raise NotImplementedError(
"Elements with type=%s are not expected to have nested elements" % self.type_name)
def on_nested_end(self, name, handler):
pass
def on_content(self, content):
if not content.strip():
return
raise NotImplementedError(
"Elements with type=%s are not expected to have a content" % self.type_name)
class NoTypeHandler(ElementHandler):
__slots__ = ('_string_parts', '_dict')
type_name = None
default = None
wspace_re = re.compile('\s+')
def __init__(self):
self._string_parts = []
self._dict = {}
def on_nested_start(self, name, attrs):
if self._string_parts:
self._string_parts.append(format_open_tag(name, attrs))
return MixedContentHandler()
return handler_for(attrs)
def on_nested_end(self, name, handler):
if self._string_parts:
self._string_parts.append(handler.value)
self._string_parts.append("</%s>" % name)
return
self._dict[name] = handler.value
def on_content(self, content):
if self._dict:
# Hack: drop content if we're already got nested
# elements. This could lead to a bug, but since
# InSales doesn't enclose nested HTML with CDATA
# there is no easy way to recover string data as-is
# in such case
return
if not self._string_parts and not content.strip():
# skip insignificant leading whitespace
return
self._string_parts.append(content)
@property
def value(self):
if self._dict:
return self._dict
if self._string_parts:
val = u''.join(self._string_parts)
val = self.wspace_re.sub(u' ', val)
val = val.strip()
return val
return self.default
class MixedContentHandler(ElementHandler):
default = ''
def on_nested_start(self, name, attrs):
self.value += format_open_tag(name, attrs)
return MixedContentHandler()
def on_nested_end(self, name, handler):
self.value += handler.value
self.value += "</%s>" % name
def on_content(self, content):
content = content.strip()
self.value += content
class NilHandler(ElementHandler):
default = None
def on_nested_start(self, name, attrs):
raise NotImplementedError(
"Elements with nil=true are not expected to have nested elements")
def on_nested_end(self, name, handler):
pass
def on_content(self, content):
if not content.strip():
return
raise NotImplementedError(
"Elements with nil=true are not expected to have a content")
class ArrayHandler(ElementHandler):
type_name = 'array'
default = []
def on_nested_start(self, name, attrs):
return handler_for(attrs)
def on_nested_end(self, name, handler):
self.value.append(handler.value)
class IntegerHandler(ElementHandler):
type_name = 'integer'
default = 0
def on_content(self, content):
self.value = int(content.strip())
class DecimalHandler(ElementHandler):
type_name = 'decimal'
default = Decimal(0)
def on_content(self, content):
self.value = Decimal(content.strip())
class BooleanHandler(ElementHandler):
type_name = 'boolean'
default = False
def on_content(self, content):
self.value = (content.strip() == 'true')
class DateHandler(ElementHandler):
type_name = 'date'
default = None
def on_content(self, content):
self.value = datetime.datetime.strptime(content.strip(), "%Y-%m-%d")
class TimestampHandler(ElementHandler):
type_name = 'timestamp'
default = None
date_re = re.compile(r"\s+\+(\d\d)(\d\d)$")
def on_content(self, content):
# convert 2010-08-16 18:39:58 +0400
# to 2010-08-16 18:39:58+04:00
string = self.date_re.sub(r"+\1:\2", content.strip())
self.value = iso8601.parse_date(string)
class DateTimeHandler(ElementHandler):
type_name = 'dateTime'
default = None
def on_content(self, content):
self.value = iso8601.parse_date(content.strip())
all_handlers = [
NoTypeHandler,
ArrayHandler,
IntegerHandler,
DecimalHandler,
BooleanHandler,
DateHandler,
DateTimeHandler,
TimestampHandler,
]
type2handler = dict((h.type_name, h) for h in all_handlers)
def handler_for(attrs):
nil = (attrs.get('nil') == 'true')
if nil:
return NilHandler()
type_name = attrs.get('type')
return type2handler.get(type_name, NoTypeHandler)()
class XmlProcessor(xml.sax.handler.ContentHandler):
def __init__(self):
xml.sax.handler.ContentHandler.__init__(self)
self._handler_stack = deque([NoTypeHandler()])
def startElement(self, name, attrs):
head = self._handler_stack[-1]
new_head = head.on_nested_start(name, attrs)
self._handler_stack.append(new_head)
def endElement(self, name):
h = self._handler_stack.pop()
self._handler_stack[-1].on_nested_end(name, h)
def characters(self, content):
self._handler_stack[-1].on_content(content)
def data(self):
top_dict = self._handler_stack[0].value
if top_dict:
return list(top_dict.values())[0]
def parse(xml_string):
processor = XmlProcessor()
io = BytesIO(xml_string)
parser = xml.sax.make_parser()
parser.setContentHandler(processor)
for line in io:
parser.feed(line)
return processor.data()
| nailxx/pyinsales | insales/parsing.py | Python | mit | 6,197 |
# -*- coding: utf-8 -*-
from django.conf import settings
KWAY_ADMIN_LIST_EDITABLE = getattr(settings, 'KWAY_ADMIN_LIST_EDITABLE', True)
KWAY_ADMIN_LIST_PER_PAGE = getattr(settings, 'KWAY_ADMIN_LIST_PER_PAGE', 100)
KWAY_ADMIN_SHOW_ACTIONS = getattr(settings, 'KWAY_ADMIN_SHOW_ACTIONS', False)
KWAY_ADMIN_SHOW_LIST_FILTER = getattr(settings, 'KWAY_ADMIN_SHOW_LIST_FILTER', True)
KWAY_CACHE_NAME = getattr(settings, 'KWAY_CACHE_NAME', 'kway')
KWAY_CACHE_KEY_PREFIX = getattr(settings, 'KWAY_CACHE_KEY_PREFIX', 'kway')
KWAY_CACHE_TIMEOUT = getattr(settings, 'KWAY_CACHE_TIMEOUT', (60 * 60 * 24)) #1 day
__cache_backend = settings.CACHES.get(KWAY_CACHE_NAME, None)
if not __cache_backend:
__cache_backend = {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'KEY_PREFIX': KWAY_CACHE_KEY_PREFIX,
'TIMEOUT': KWAY_CACHE_TIMEOUT
}
settings.CACHES[KWAY_CACHE_NAME] = __cache_backend
KWAY_LANGUAGES = getattr(settings, 'KWAY_LANGUAGES', settings.LANGUAGES)
if len(KWAY_LANGUAGES) > 0:
for language in KWAY_LANGUAGES:
if not language in settings.LANGUAGES:
raise ValueError('KWAY_LANGUAGES cannot contain invalid languages: %s' % str(language))
else:
raise ValueError('KWAY_LANGUAGES must contain at least 1 language')
KWAY_DEFAULT_LANGUAGE = getattr(settings, 'KWAY_DEFAULT_LANGUAGE', settings.DEFAULT_LANGUAGE)
KWAY_LANGUAGES[KWAY_DEFAULT_LANGUAGE]
KWAY_USE_KEY_AS_DEBUG_VALUE = getattr(settings, 'KWAY_USE_KEY_AS_DEBUG_VALUE', True) and settings.DEBUG
KWAY_USE_KEY_AS_DEFAULT_VALUE = getattr(settings, 'KWAY_USE_KEY_AS_DEFAULT_VALUE', False)
KWAY_USE_KEY_AS_VALUE = getattr(settings, 'KWAY_USE_KEY_AS_VALUE', False)
KWAY_USE_MODELTRANSLATION = ('modeltranslation' in settings.INSTALLED_APPS)
KWAY_USE_SORL_THUMBNAIL = ('sorl.thumbnail' in settings.INSTALLED_APPS)
| fabiocaccamo/django-kway | kway/settings.py | Python | mit | 1,864 |
import json
import logging
import mwparserfromhell as mwp
from wikitables.client import Client
from wikitables.readers import RowReader
from wikitables.util import TableJSONEncoder, ftag, ustr
log = logging.getLogger('wikitables')
def import_tables(article, lang='en'):
client = Client(lang)
page = client.fetch_page(article)
body = page['revisions'][0]['*']
## parse for tables
raw_tables = mwp.parse(body).filter_tags(matches=ftag('table'))
def _table_gen():
for idx, table in enumerate(raw_tables):
name = '%s[%s]' % (page['title'], idx)
yield WikiTable(name, table, lang)
return list(_table_gen())
class WikiTable():
"""
Parsed Wikipedia table
attributes:
- name(str): Table name in the format <article_name>[<table_index>]
- head(list): List of parsed column names as strings
- rows(list): List of <wikitables.Row> objects
"""
def __init__(self, name, raw_table, lang='en'):
self.name = ustr(name)
self.lang = lang
self.rows = []
self._head = []
self._node = raw_table
self._tr_nodes = raw_table.contents.filter_tags(matches=ftag('tr'))
self._read_header()
self._read_rows()
def json(self):
return json.dumps(self.rows, cls=TableJSONEncoder)
@property
def head(self):
return self._head
@head.setter
def head(self, val):
if not isinstance(val, list):
raise ValueError('table head must be provided as list')
self._head = val
self._read_rows()
def __repr__(self):
return "<WikiTable '%s'>" % self.name
def _log(self, value):
log.debug('%s: %s', self.name, value)
def _read_rows(self):
reader = RowReader(self.name, self._head, self.lang)
self.rows = list(reader.parse(*self._tr_nodes))
self._log('parsed %d rows %d cols' % (len(self.rows), len(self._head)))
def _read_header(self):
# read header
header_nodes = self._find_header_flat()
if not header_nodes:
header_nodes = self._find_header_row()
if not header_nodes:
header_nodes = self._make_default_header()
for header_node in header_nodes:
field_name = header_node.contents.strip_code().strip(' ')
self._head.append(ustr(field_name))
def _find_header_flat(self):
"""
Find header elements in a table, if possible. This case handles
situations where '<th>' elements are not within a row('<tr>')
"""
nodes = self._node.contents.filter_tags(matches=ftag('th'), recursive=False)
if not nodes:
return None
self._log('found header outside rows (%d <th> elements)' % len(nodes))
return nodes
def _find_header_row(self):
"""
Evaluate all rows and determine header position, based on
greatest number of 'th' tagged elements
"""
th_max = 0
header_idx = 0
for idx, tr_node in enumerate(self._tr_nodes):
th_count = len(tr_node.contents.filter_tags(matches=ftag('th')))
if th_count > th_max:
th_max = th_count
header_idx = idx
if not th_max:
return None
self._log('found header at row %d (%d <th> elements)' % \
(header_idx, th_max))
header_row = self._tr_nodes.pop(header_idx)
return header_row.contents.filter_tags(matches=ftag('th'))
def _make_default_header(self):
"""
Return a generic placeholder header based on the tables column count
"""
td_max = 0
for tr_node in self._tr_nodes:
td_count = len(tr_node.contents.filter_tags(matches=ftag('td')))
if td_count > td_max:
td_max = td_count
self._log('creating default header (%d columns)' % td_max)
return ['column%d' % n for n in range(0, td_max)]
| bcicen/wikitables | wikitables/__init__.py | Python | mit | 4,005 |
# -*- coding: utf-8 -*-
# flake8: noqa
"""
A Python library for FilePreview's API.
"""
__title__ = 'filepreviews'
__version__ = '2.0.2'
__author__ = 'José Padilla'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Blimp LLC'
VERSION = __version__
API_URL = 'https://api.filepreviews.io/v2'
from .api import FilePreviews
from .exceptions import (
FilePreviewsError, APIError, InvalidRequestError, AuthenticationError
)
| GetBlimp/filepreviews-python | filepreviews/__init__.py | Python | mit | 429 |
# -*- coding: utf-8 -*-
from PyGcs import PyGcs
from SearchResult import SearchResult | Naoto-Ida/PyGcs | PyGcs/__init__.py | Python | mit | 86 |
#!/usr/bin/env python
###
# (C) Copyright (2012-2015) Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
###
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import range
from future import standard_library
standard_library.install_aliases()
import sys
import re
PYTHON_VERSION = sys.version_info[:3]
PY2 = (PYTHON_VERSION[0] == 2)
if PY2:
if PYTHON_VERSION < (2, 7, 9):
raise Exception('Must use Python 2.7.9 or later')
elif PYTHON_VERSION < (3, 4):
raise Exception('Must use Python 3.4 or later')
import hpOneView as hpov
from pprint import pprint
def acceptEULA(con):
# See if we need to accept the EULA before we try to log in
con.get_eula_status()
try:
if con.get_eula_status() is True:
print('EULA display needed')
con.set_eula('no')
except Exception as e:
print('EXCEPTION:')
print(e)
def login(con, credential):
# Login with givin credentials
try:
con.login(credential)
except:
print('Login failed')
def get_lig(net, name):
ligs = net.get_ligs()
for lig in ligs:
if name:
print(lig['name'])
else:
pprint(lig)
def main():
parser = argparse.ArgumentParser(add_help=True,
formatter_class=argparse.RawTextHelpFormatter,
description='''
Retrieves a list of Logical Interconnect Groups
Usage: ''')
parser.add_argument('-a', dest='host', required=True,
help='''
HP OneView Appliance hostname or IP address''')
parser.add_argument('-u', dest='user', required=False,
default='Administrator',
help='''
HP OneView Username''')
parser.add_argument('-p', dest='passwd', required=True,
help='''
HP OneView Password''')
parser.add_argument('-c', dest='cert', required=False,
help='''
Trusted SSL Certificate Bundle in PEM (Base64 Encoded DER) Format''')
parser.add_argument('-y', dest='proxy', required=False,
help='''
Proxy (host:port format''')
parser.add_argument('-n', dest='name', required=False,
action='store_true',
help='''
Output only the names of the Logical Interconnect Groups''')
args = parser.parse_args()
credential = {'userName': args.user, 'password': args.passwd}
con = hpov.connection(args.host)
net = hpov.networking(con)
if args.proxy:
con.set_proxy(args.proxy.split(':')[0], args.proxy.split(':')[1])
if args.cert:
con.set_trusted_ssl_bundle(args.cert)
login(con, credential)
acceptEULA(con)
get_lig(net, args.name)
if __name__ == '__main__':
import sys
import argparse
sys.exit(main())
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| miqui/python-hpOneView | examples/scripts/get-logical-interconnect-groups.py | Python | mit | 4,072 |
#!/usr/bin/env python
import xml.etree.ElementTree as ET
import xml.dom.minidom
class JunitXml(object):
""" A class which is designed to create a junit test xml file.
Note: currently this class is designed to return the junit xml file
in a string format (through the dump method).
"""
def __init__(self, testsuit_name, test_cases,
total_tests=None, total_failures=None):
self.testsuit_name = testsuit_name
self.test_cases = test_cases
self.failing_test_cases = self._get_failing_test_cases()
self.total_tests = total_tests
self.total_failures = total_failures
if total_tests is None:
self.total_tests = len(self.test_cases)
if total_failures is None:
self.total_failures = len(self.failing_test_cases)
self.root = ET.Element("testsuite",
{
"name": str(self.testsuit_name),
"failures": str(self.total_failures),
"tests": str(self.total_tests)
}
)
self.build_junit_xml()
def _get_failing_test_cases(self):
return set([case for case in self.test_cases if
case.is_failure()])
def build_junit_xml(self):
""" create the xml tree from the given testsuite name and
testcase
"""
for case in self.test_cases:
test_case_element = ET.SubElement(
self.root, "testcase", {"name": str(case.name)})
if case.is_failure():
failure_element = ET.Element("failure")
failure_element.text = case.contents
test_case_element.append(failure_element)
def dump(self, pretty=True):
""" returns a string representation of the junit xml tree. """
out = ET.tostring(self.root)
if pretty:
dom = xml.dom.minidom.parseString(out)
out = dom.toprettyxml()
return out
class TestCase(object):
""" A junit test case representation class.
The JunitXml accepts a set of these and uses them to create
the junit test xml tree
"""
def __init__(self, name, contents, test_type=None):
self.name = name
self.contents = contents
if test_type is None:
test_type = ""
self.test_type = test_type
def is_failure(self):
""" returns True if this test case is a 'failure' type """
return self.test_type == "failure"
| dbaxa/python-junit-xml-output | junit_xml_output/__init__.py | Python | mit | 2,621 |
import logging
import xmlrpclib
import time
import subprocess
from socket import gethostname
from mode import Mode
from helper import get_config, get_server_url, wrap_xmlrpc
class Restore(Mode):
def _initialise(self, args):
logging.debug('Starting client mode checks on config file')
config = get_config(args.config_file)
server_url = get_server_url(config)
self._proxy = xmlrpclib.ServerProxy(server_url)
self._hostname = gethostname()
self._artifact = args.artifact
self._version = args.version
logging.debug('Client checks passed')
def _add_arguments(self):
self._parser.add_argument('artifact', metavar='ARTIFACT')
self._parser.add_argument('version', metavar='VERSION')
self._parser.add_argument('config_file', metavar='CONFIGFILE')
def run(self):
logging.debug('Retrieving artifact')
filename, restore_command, binary = wrap_xmlrpc(self._proxy.get_version, self._hostname, self._artifact, self._version)
with open(filename, 'wb') as handle:
handle.write(binary.data)
if restore_command:
subprocess.call(restore_command, shell=True)
logging.debug('Finished retrieving artifact')
def stop(self):
pass | mattdavis90/re-store-it | src/restore.py | Python | mit | 1,295 |
from datetime import timedelta, date
from django.test import TestCase
from autoemails.actions import AskForWebsiteAction
from autoemails.models import Trigger, EmailTemplate
from workshops.models import Task, Role, Person, Event, Tag, Organization
class TestAskForWebsiteAction(TestCase):
def setUp(self):
# we're missing some tags
Tag.objects.bulk_create(
[
Tag(name="automated-email", priority=0),
Tag(name="SWC", priority=10),
Tag(name="DC", priority=20),
Tag(name="LC", priority=30),
Tag(name="TTT", priority=40),
]
)
# by default there's only self-organized organization, but it can't be
# used in AskForWebsiteAction
Organization.objects.bulk_create(
[
Organization(domain="carpentries.org", fullname="Instructor Training"),
Organization(
domain="librarycarpentry.org", fullname="Library Carpentry"
),
]
)
def testLaunchAt(self):
e1 = Event(slug="test-event1", host=Organization.objects.first())
e2 = Event(
slug="test-event2",
host=Organization.objects.first(),
start=date.today() + timedelta(days=7),
end=date.today() + timedelta(days=8),
)
e3 = Event(
slug="test-event3",
host=Organization.objects.first(),
start=date.today() + timedelta(days=-8),
end=date.today() + timedelta(days=-7),
)
e4 = Event(
slug="test-event4",
host=Organization.objects.first(),
start=date.today() + timedelta(days=70),
end=date.today() + timedelta(days=71),
)
# case 1: no context event
a1 = AskForWebsiteAction(
trigger=Trigger(action="test-action", template=EmailTemplate()),
)
self.assertEqual(a1.get_launch_at(), timedelta(days=-30))
# case 2: event with no start date
a2 = AskForWebsiteAction(
trigger=Trigger(action="test-action", template=EmailTemplate()),
objects=dict(event=e1),
)
self.assertEqual(a2.get_launch_at(), timedelta(days=-30))
# case 3: event with near start date
a3 = AskForWebsiteAction(
trigger=Trigger(action="test-action", template=EmailTemplate()),
objects=dict(event=e2),
)
self.assertEqual(a3.get_launch_at(), timedelta(hours=1))
# case 4: event with negative start date
a4 = AskForWebsiteAction(
trigger=Trigger(action="test-action", template=EmailTemplate()),
objects=dict(event=e3),
)
self.assertEqual(a4.get_launch_at(), timedelta(hours=1))
# case 5: event with start date in 10 weeks
a5 = AskForWebsiteAction(
trigger=Trigger(action="test-action", template=EmailTemplate()),
objects=dict(event=e4),
)
self.assertEqual(a5.get_launch_at(), timedelta(days=40, hours=1))
def testCheckConditions(self):
"""Make sure `check` works for various input data."""
# totally fake Task, Role and Event data
e = Event.objects.create(
slug="test-event",
host=Organization.objects.first(),
administrator=Organization.objects.get(domain="self-organized"),
start=date.today() + timedelta(days=7),
end=date.today() + timedelta(days=8),
)
e.tags.set(Tag.objects.filter(name__in=["automated-email"]))
p = Person.objects.create(
personal="Harry", family="Potter", email="[email protected]"
)
r = Role.objects.create(name="instructor")
s = Role.objects.create(name="supporting-instructor")
t = Task.objects.create(event=e, person=p, role=r)
# 1st case: everything is good
self.assertEqual(AskForWebsiteAction.check(e), True)
# 2nd case: event has no end date
# result: OK
e.end = None
e.save()
self.assertEqual(AskForWebsiteAction.check(e), True)
# 3rd case: event has no start date
# result: FAIL
e.start = None
e.save()
self.assertEqual(AskForWebsiteAction.check(e), False)
# bring back the good start date
e.start = date.today() + timedelta(days=7)
e.save()
self.assertEqual(AskForWebsiteAction.check(e), True)
# 4th case: event is tagged with one (or more) excluding tags
# result: FAIL
for tag in ["cancelled", "stalled", "unresponsive"]:
e.tags.add(Tag.objects.get(name=tag))
self.assertEqual(AskForWebsiteAction.check(e), False)
e.tags.remove(Tag.objects.get(name=tag))
# retest to make sure it's back to normal
self.assertEqual(AskForWebsiteAction.check(e), True)
# 5th case: not self-organized (centrally-organised)
# result: OK
e.administrator = Organization.objects.get(domain="carpentries.org")
e.save()
self.assertEqual(AskForWebsiteAction.check(e), True)
# retest to make sure it stays the same
e.administrator = Organization.objects.get(domain="self-organized")
self.assertEqual(AskForWebsiteAction.check(e), True)
# 6th case: website URL present
# result: FAIL
e.url = "http://example.org"
e.save()
self.assertEqual(AskForWebsiteAction.check(e), False)
# retest to make sure it's back to normal
e.url = ""
e.save()
self.assertEqual(AskForWebsiteAction.check(e), True)
# 7th case: no instructor tasks
# result: FAIL
r.name = "helper"
r.save()
self.assertEqual(AskForWebsiteAction.check(e), False)
r.name = "instructor"
r.save()
# retest to make sure it's back to normal
self.assertEqual(AskForWebsiteAction.check(e), True)
# 8th case: supporting role used
# result: OK
t.role = s
t.save()
self.assertEqual(AskForWebsiteAction.check(e), True)
def testContext(self):
"""Make sure `get_additional_context` works correctly."""
a = AskForWebsiteAction(
trigger=Trigger(action="test-action", template=EmailTemplate())
)
# method fails when obligatory objects are missing
with self.assertRaises(KeyError):
a.get_additional_context(dict()) # missing 'event'
with self.assertRaises(AttributeError):
# now both objects are present, but the method tries to execute
# `refresh_from_db` on them
a.get_additional_context(dict(event="dummy", task="dummy"))
e = Event.objects.create(
slug="test-event",
host=Organization.objects.first(),
start=date.today() + timedelta(days=7),
end=date.today() + timedelta(days=8),
country="GB",
venue="Ministry of Magic",
)
e.tags.set(Tag.objects.filter(name__in=["TTT", "SWC"]))
p1 = Person.objects.create(
personal="Harry", family="Potter", username="hpotter", email="[email protected]"
)
p2 = Person.objects.create(
personal="Hermione",
family="Granger",
username="hgranger",
email="[email protected]",
)
p3 = Person.objects.create(
personal="Ron",
family="Weasley",
username="rweasley",
email="[email protected]",
)
instructor = Role.objects.create(name="instructor")
supporting = Role.objects.create(name="supporting-instructor")
host = Role.objects.create(name="host")
Task.objects.create(event=e, person=p1, role=instructor)
Task.objects.create(event=e, person=p2, role=supporting)
Task.objects.create(event=e, person=p3, role=host)
ctx = a.get_additional_context(objects=dict(event=e))
self.assertEqual(
ctx,
dict(
workshop=e,
workshop_main_type="SWC",
dates=e.human_readable_date,
workshop_host=Organization.objects.first(),
regional_coordinator_email=["[email protected]"],
instructors=[p1, p2],
hosts=[p3],
all_emails=["[email protected]", "[email protected]"],
assignee="Regional Coordinator",
tags=["SWC", "TTT"],
),
)
def testRecipients(self):
"""Make sure AskForWebsiteAction correctly renders recipients.
They should get overwritten by AskForWebsiteAction during email
building."""
e = Event.objects.create(
slug="test-event",
host=Organization.objects.first(),
start=date.today() + timedelta(days=7),
end=date.today() + timedelta(days=8),
country="GB",
venue="Ministry of Magic",
)
e.tags.set(Tag.objects.filter(name="LC"))
p1 = Person.objects.create(
personal="Harry", family="Potter", username="hpotter", email="[email protected]"
)
p2 = Person.objects.create(
personal="Hermione",
family="Granger",
username="hgranger",
email="[email protected]",
)
instructor = Role.objects.create(name="instructor")
supporting = Role.objects.create(name="supporting-instructor")
Task.objects.bulk_create(
[
Task(event=e, person=p1, role=instructor),
Task(event=e, person=p2, role=supporting),
]
)
template = EmailTemplate.objects.create(
slug="sample-template",
subject="Welcome to {{ site.name }}",
to_header="[email protected]",
from_header="[email protected]",
cc_header="[email protected]",
bcc_header="[email protected]",
reply_to_header="{{ reply_to }}",
body_template="Sample text.",
)
trigger = Trigger.objects.create(
action="week-after-workshop-completion", template=template,
)
a = AskForWebsiteAction(trigger=trigger, objects=dict(event=e),)
email = a._email()
self.assertEqual(email.to, [p1.email, p2.email])
def test_event_slug(self):
e = Event.objects.create(
slug="test-event",
host=Organization.objects.first(),
start=date.today() + timedelta(days=7),
end=date.today() + timedelta(days=8),
country="GB",
venue="Ministry of Magic",
)
e.tags.set(Tag.objects.filter(name="LC"))
p1 = Person.objects.create(
personal="Harry", family="Potter", username="hpotter", email="[email protected]"
)
r = Role.objects.create(name="instructor")
t = Task.objects.create(event=e, person=p1, role=r)
a = AskForWebsiteAction(
trigger=Trigger(action="test-action", template=EmailTemplate()),
objects=dict(event=e, task=t),
)
self.assertEqual(a.event_slug(), "test-event")
def test_all_recipients(self):
e = Event.objects.create(
slug="test-event",
host=Organization.objects.first(),
start=date.today() + timedelta(days=7),
end=date.today() + timedelta(days=8),
country="GB",
venue="Ministry of Magic",
)
e.tags.set(Tag.objects.filter(name="LC"))
p1 = Person.objects.create(
personal="Harry", family="Potter", username="hpotter", email="[email protected]"
)
p2 = Person.objects.create(
personal="Hermione",
family="Granger",
username="hgranger",
email="[email protected]",
)
instructor = Role.objects.create(name="instructor")
supporting = Role.objects.create(name="supporting-instructor")
Task.objects.bulk_create(
[
Task(event=e, person=p1, role=instructor),
Task(event=e, person=p2, role=supporting),
]
)
a = AskForWebsiteAction(
trigger=Trigger(action="test-action", template=EmailTemplate()),
objects=dict(event=e),
)
self.assertEqual(
a.all_recipients(), "[email protected], [email protected]",
)
| swcarpentry/amy | amy/autoemails/tests/test_askforwebsiteaction.py | Python | mit | 12,577 |
import sys
import itertools
from collections import deque
from formula import Formula, Atomic, And, Or, Not, Imply
recursion = 0
DEBUG = 1
class NotWellFormedFormula(Exception):
def __init__(self, formula):
self.message = '"{}" is not a WFF'.format(formula)
super(NotWellFormedFormula, self).__init__(self.message)
class RecursiveDescent(object):
""" Parser an expression using Recursive-Descent algorithm
Grammar:
I ::= F > I | F
F ::= A v F | A
A ::= B & A | B
B ::= ~C | C
C ::= ~C | (I) | p
"""
def __init__(self, formula):
self.formula = formula
self.idx = 0
def _next(self):
if not len(self.formula) == self.idx + 1:
self.idx += 1
def debug(self):
if DEBUG == 0:
return
f_stack = []
def get_frames(frame):
try:
f_stack.append(frame.f_back.f_code.co_name)
except AttributeError:
return None
return get_frames(frame.f_back)
get_frames(sys._getframe())
print self.formula
print '{}^{}{} (called by: {})\n'.format(
' ' * self.idx,
' ' * 10,
sys._getframe().f_back.f_code.co_name,
sys._getframe().f_back.f_back.f_code.co_name
)
indent = 0
count = 0
for e in reversed(f_stack):
print '{}{}{}'.format(count, '.' * indent, e)
indent += 4
count += 1
print '\n------------------------------\n'
def parse_atomic(self):
""" Parse atomic """
if self.formula[self.idx] in Atomic.token:
form = Atomic(self.formula[self.idx])
self._next()
return form
return False
def parse_op(self):
""" Parse opening parenthesis """
if self.formula[self.idx] == Formula.group['open']:
self._next()
return True
return False
def parse_cp(self):
""" Parse closing parenthesis """
self.debug()
if self.formula[self.idx] == Formula.group['close']:
self._next()
return True
return False
def parse_not(self):
""" Parse negation """
if self.formula[self.idx] in Not.token:
self._next()
return True
return False
def parse_and(self):
""" Parse conjunction """
if self.formula[self.idx] in And.token:
self._next()
return True
return False
def parse_or(self):
""" Parse disjunction """
self.debug()
if self.formula[self.idx] in Or.token:
self._next()
return True
return False
def parse_cond(self):
""" Parse conditional """
if self.formula[self.idx] in Imply.token:
self._next()
return True
return False
def parse_bicond():
""" Parse biconditional """
pass
def parse_cond_rule(self):
""" Conditional Rule """
self.debug()
left = self.parse_or_rule()
if not left:
return False
if not self.parse_cond():
return left if left else False
right = self.parse_cond_rule()
if right:
return Imply(left, right)
return left if left else False
def parse_or_rule(self):
""" Or Rule """
self.debug()
left = self.parse_and_rule()
if not left:
return False
if not self.parse_or():
return left if left else False
right = self.parse_or_rule()
if right:
return Or(left, right)
return left if left else False
def parse_and_rule(self):
""" And Rule """
self.debug()
left = self.parse_not_rule() or self.parse_almost_rule()
if not left:
return False
if not self.parse_and():
return left if left else False
right = self.parse_form()
if right:
return And(left, right)
return left if left else False
def parse_not_rule(self):
""" Negating formulas """
self.debug()
if not self.parse_not():
return False
form = self.parse_not_rule() or self.parse_almost_rule() or \
self.parse_atom()
if not form:
return False
return Not(form)
def parse_almost_rule(self):
""" Almost formula """
self.debug()
form = self.parse_group() or self.parse_atom()
if not form:
return False
return form
def parse_group(self):
""" Parenth """
self.debug()
if not self.parse_op():
return False
form = self.parse_cond_rule()
if not form:
return False
if not self.parse_cp():
return False
return form
def parse_atom(self):
""" Atomic """
self.debug()
return self.parse_atomic()
def parse_form(self):
""" Parse formula """
self.debug()
return self.parse_cond_rule()
class RPN(object):
""" Parse an expression through RPN """
@staticmethod
def parse(formula):
""" Parse an expression transforming it to RPN and then to SAT """
def to_rpn(formula):
""" Shunting Yard algorithm by Dijkstra in 1961
Transforms an infix expression to a posfix one
"""
output = deque()
stack = []
operators = list(itertools.chain.from_iterable(
e.token for e in [And, Or, Imply, Not]))
for token in formula:
if token in Atomic.token:
output.append(token)
if token in operators or token == Formula.group['open']:
stack.append(token)
if token == Formula.group['close']:
while stack:
op = stack.pop()
if op == Formula.group['open']:
break
output.append(op)
else:
raise NotWellFormedFormula(formula)
if stack[-1] in operators:
output.append(stack.pop())
while stack:
op = stack.pop()
if op in Formula.group.values():
raise NotWellFormedFormula(formula)
output.append(op)
return output
formula = to_rpn(formula)
stack = []
diadic_op = {k: v for v in [And, Or, Imply] for k in v.token}
for val in formula:
if val in diadic_op.keys():
op_right = stack.pop()
op_left = stack.pop()
stack.append(diadic_op[val](op_left, op_right))
elif val in Not.token:
op = stack.pop()
result = Not(op)
stack.append(result)
elif val in Atomic.token:
stack.append(Atomic(val))
else:
raise ValueError('{} is not a valid symbol'.format(val))
return stack.pop()
| Woraufhin/logic | parsers.py | Python | mit | 7,273 |
#!/usr/bin/python3
import serial
s = serial.Serial("/dev/ttyAMA0", 57600)
while 1: print(s.readline())
| Restioson/Rover3 | test/serial-test.py | Python | mit | 103 |
# encoding: utf8
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('recipebook', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='ingredientline',
name='preparation',
field=models.CharField(blank=True, max_length=40, null=True),
preserve_default=True,
),
]
| ateoto/django-recipebook | recipebook/migrations/0002_ingredientline_preparation.py | Python | mit | 415 |
from logging import getLogger
from multiprocessing import Process
class LoggableProcess(Process):
def start(self, *args, **kwargs):
super().start(*args, **kwargs)
L.debug(
'started %s%s process %s', self.name,
' daemon' if self.daemon else '', self.ident)
class StoppableProcess(LoggableProcess):
def stop(
self,
sigterm_timeout_in_seconds=3,
sigkill_timeout_in_seconds=1):
'''
Stop the process using SIGTERM and, if necessary, SIGKILL.
See watchgod/main.py for original code.
'''
L.debug('terminating %s process %s', self.name, self.ident)
self.terminate()
self.join(sigterm_timeout_in_seconds)
if self.exitcode is None:
L.debug('killing %s process %s', self.name, self.ident)
self.kill()
self.join(sigkill_timeout_in_seconds)
L = getLogger(__name__)
| crosscompute/crosscompute | crosscompute/macros/process.py | Python | mit | 944 |
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from future.standard_library import install_aliases
install_aliases()
import vim
import sys
def proc():
pass
if __name__ == '__main__':
print(sys.argv)
| oxnz/dot-files | .vim/pythonx/maxprod/vimext.py | Python | mit | 296 |
import cProfile
import StringIO
import pstats
pr = cProfile.Profile()
pr.enable()
import atxcf
pr.disable()
s = StringIO.StringIO()
ps = pstats.Stats(pr, stream=s).sort_stats('cumulative')
ps.print_stats()
print s.getvalue()
| transfix/atxcf | test_profile.py | Python | mit | 229 |
# -*- coding: utf-8 -*-
# Django settings for basic pinax project.
import os.path
import posixpath
import pinax
PINAX_ROOT = os.path.abspath(os.path.dirname(pinax.__file__))
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# tells Pinax to use the default theme
PINAX_THEME = 'default'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# tells Pinax to serve media through django.views.static.serve.
SERVE_MEDIA = DEBUG
INTERNAL_IPS = (
'127.0.0.1',
)
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'ado_mssql'.
DATABASE_NAME = 'dev.db' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Local time zone for this installation. Choices can be found here:
# http://www.postgresql.org/docs/8.1/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
# although not all variations may be possible on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'US/Eastern'
# Language code for this installation. All choices can be found here:
# http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
# http://blogs.law.harvard.edu/tech/stories/storyReader$15
LANGUAGE_CODE = 'en'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'site_media', 'media')
# URL that handles the media served from MEDIA_ROOT.
# Example: "http://media.lawrence.com"
MEDIA_URL = '/site_media/media/'
# Absolute path to the directory that holds static files like app media.
# Example: "/home/media/media.lawrence.com/apps/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'site_media', 'static')
# URL that handles the static files like app media.
# Example: "http://media.lawrence.com"
STATIC_URL = '/site_media/static/'
# Additional directories which hold static files
STATICFILES_DIRS = (
('confista', os.path.join(PROJECT_ROOT, 'media')),
('pinax', os.path.join(PINAX_ROOT, 'media', PINAX_THEME)),
)
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = posixpath.join(STATIC_URL, "admin/")
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'r#4*%-)d1d__z_uq39=n*jm$)_%d&ys$-&rezsus#fl7(vb8mu'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django_openid.consumer.SessionConsumer',
'account.middleware.LocaleMiddleware',
'django.middleware.doc.XViewMiddleware',
'pagination.middleware.PaginationMiddleware',
'pinax.middleware.security.HideSensistiveFieldsMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
# 'debug_toolbar.middleware.DebugToolbarMiddleware',
)
ROOT_URLCONF = 'confista.urls'
TEMPLATE_DIRS = (
os.path.join(PROJECT_ROOT, "templates"),
os.path.join(PINAX_ROOT, "templates", PINAX_THEME),
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"pinax.core.context_processors.pinax_settings",
"notification.context_processors.notification",
"announcements.context_processors.site_wide_announcements",
"account.context_processors.openid",
"account.context_processors.account",
"menu.context_processors.menu",
)
INSTALLED_APPS = (
# included
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.humanize',
'pinax.templatetags',
# external
'notification', # must be first
'django_openid',
'emailconfirmation',
'mailer',
'announcements',
'pagination',
'timezones',
'ajax_validation',
'uni_form',
'staticfiles',
'debug_toolbar',
'blog',
'tagging',
'wiki',
'gravatar',
'threadedcomments',
'django.contrib.markup',
# Below required for blog
'tag_app',
# internal (for now)
'basic_profiles',
'account',
'signup_codes',
'about',
'django.contrib.admin',
'django.contrib.flatpages',
# confista specific
'talk',
'deadline',
'statistics',
'delegate',
'schedule',
'menu',
)
ABSOLUTE_URL_OVERRIDES = {
"auth.user": lambda o: "/profiles/profile/%s/" % o.username,
}
MARKUP_FILTER_FALLBACK = 'none'
MARKUP_CHOICES = (
('restructuredtext', u'reStructuredText'),
('textile', u'Textile'),
('markdown', u'Markdown'),
('creole', u'Creole'),
)
WIKI_MARKUP_CHOICES = MARKUP_CHOICES
AUTH_PROFILE_MODULE = 'delegate.Delegate_Profile'
NOTIFICATION_LANGUAGE_MODULE = 'account.Account'
ACCOUNT_OPEN_SIGNUP = True
ACCOUNT_REQUIRED_EMAIL = False
ACCOUNT_EMAIL_VERIFICATION = False
EMAIL_CONFIRMATION_DAYS = 2
EMAIL_DEBUG = DEBUG
CONTACT_EMAIL = "[email protected]"
SITE_NAME = "Pinax"
LOGIN_URL = "/account/login/"
LOGIN_REDIRECT_URLNAME = "what_next"
# local_settings.py can be used to override environment-specific settings
# like database and email that differ between development and production.
try:
from local_settings import *
except ImportError:
pass
| theju/confista | settings.py | Python | mit | 6,157 |
from tornkts.base.server_response import ServerError
class ToDictMixin(object):
MODE_INCLUDE = 1
MODE_EXCLUDE = 0
def to_dict(self, name='', *args, **kwargs):
depth = kwargs.get('depth', 0)
kwargs.update({'depth': depth + 1})
if depth == 0:
fields = kwargs.get('fields', None)
else:
fields = kwargs.get(name + '_fields', None)
next_fields = {}
if isinstance(fields, dict):
for k, mode in fields.items():
field = k.split('.', 1)
if len(field) > 1:
entity_title = field[0]
fields_chain = field[1]
kwargs_key = entity_title + '_fields'
if mode == ToDictMixin.MODE_INCLUDE:
fields[field[0]] = mode
if next_fields.get(kwargs_key) is None:
next_fields[kwargs_key] = {}
next_fields[kwargs_key][fields_chain] = mode
kwargs.update(next_fields)
dict_object = self.to_dict_impl(**kwargs)
if isinstance(fields, dict):
mode = None
for v in fields.values():
if v != mode and mode is not None:
raise ServerError(ServerError.MIX_FIELDS_FILTER)
mode = v % 2 # MODE_EXCLUDE or MODE_INCLUDE
for field in dict_object.keys():
if mode == ToDictMixin.MODE_INCLUDE:
if field not in fields.keys():
dict_object.pop(field, False)
elif mode == ToDictMixin.MODE_EXCLUDE:
if field in fields.keys():
dict_object.pop(field, False)
return dict_object
def to_dict_impl(self, **kwargs):
return {}
| ktsstudio/tornkts | tornkts/mixins/to_dict_mixin.py | Python | mit | 1,822 |
# -*- coding: utf-8 -*-
"""
MultiPlotWidget.py - Convenience class--GraphicsView widget displaying a MultiPlotItem
Copyright 2010 Luke Campagnola
Distributed under MIT/X11 license. See license.txt for more infomation.
"""
from .GraphicsView import GraphicsView
from ..graphicsItems import MultiPlotItem as MultiPlotItem
__all__ = ['MultiPlotWidget']
class MultiPlotWidget(GraphicsView):
"""Widget implementing a graphicsView with a single PlotItem inside."""
def __init__(self, parent=None):
GraphicsView.__init__(self, parent)
self.enableMouse(False)
self.mPlotItem = MultiPlotItem.MultiPlotItem()
self.setCentralItem(self.mPlotItem)
## Explicitly wrap methods from mPlotItem
#for m in ['setData']:
#setattr(self, m, getattr(self.mPlotItem, m))
def __getattr__(self, attr): ## implicitly wrap methods from plotItem
if hasattr(self.mPlotItem, attr):
m = getattr(self.mPlotItem, attr)
if hasattr(m, '__call__'):
return m
raise NameError(attr)
def widgetGroupInterface(self):
return (None, MultiPlotWidget.saveState, MultiPlotWidget.restoreState)
def saveState(self):
return {}
#return self.plotItem.saveState()
def restoreState(self, state):
pass
#return self.plotItem.restoreState(state)
def close(self):
self.mPlotItem.close()
self.mPlotItem = None
self.setParent(None)
GraphicsView.close(self)
| hiuwo/acq4 | acq4/pyqtgraph/widgets/MultiPlotWidget.py | Python | mit | 1,546 |
import numpy
import matplotlib
import matplotlib.mlab as mlab
import matplotlib.pyplot as plt
from matplotlib.ticker import NullFormatter
from matplotlib import rcParams
from scipy.optimize import curve_fit
from data_plots.utils import labeler, titler
rcParams['text.usetex'] = True
def scatter_hist(x, y, *args,
bins=10,
linestyle='r--', scatterstyle='k+',
histtype='stepfilled', facecolor='#FFFFFF', hatch='/',
show_mean=True, show_std=True,
**kwargs):
# no labels
nullfmt = NullFormatter()
# definitions for axes
left, width = 0.1, 0.65
bottom, height = 0.1, 0.65
bottom_h = left_h = left+width+0.02
rect_scatter = [left, bottom, width, height]
rect_histx = [left, bottom_h, width, 0.2]
rect_histy = [left_h, bottom, 0.2, height]
# start with a rectangular Figure
fig = plt.figure(1, figsize=(8, 8))
axScatter = fig.add_axes(rect_scatter)
axHistx = fig.add_axes(rect_histx)
axHisty = fig.add_axes(rect_histy)
# no labels on some axes
axHistx.xaxis.set_major_formatter(nullfmt)
axHisty.yaxis.set_major_formatter(nullfmt)
# the scatter plot:
axScatter.plot(x, y, scatterstyle)
# determine limits
xmin, ymin = numpy.min(x), numpy.min(y)
xmax, ymax = numpy.max(x), numpy.max(y)
x_mean, y_mean = x.mean(), y.mean()
x_std, y_std = x.std(), y.std()
# xlims = ((numpy.array([-xmin, xmax]) // binwidth) + 1) * binwidth
# ylims = ((numpy.array([-ymin, ymax]) // binwidth) + 1) * binwidth
xbins = numpy.linspace(xmin, xmax, bins)
ybins = numpy.linspace(ymin, ymax, bins)
# xbins = numpy.arange(-xlims[0], xlims[1]+binwidth, binwidth)
# ybins = numpy.arange(-ylims[0], ylims[1]+binwidth, binwidth)
n, xbins, xpatches = axHistx.hist(x, bins=xbins, normed=1,
histtype=histtype, facecolor=facecolor,
hatch=hatch)
n, ybins, ypatches = axHisty.hist(y, bins=ybins, normed=1,
histtype=histtype, facecolor=facecolor,
hatch=hatch,
orientation='horizontal')
mean_formatter = r'$\mu = {0:.5f}$'.format
std_formatter = r'$\sigma = {0:.5f}$'.format
xhandles, yhandles = [], []
xlabels, ylabels = [], []
if show_mean:
p = plt.Rectangle((0, 0), 1, 1, fc="r")
xlabels.append(mean_formatter(x_mean))
ylabels.append(mean_formatter(y_mean))
xhandles.append(p)
yhandles.append(p)
if show_std:
p = plt.Rectangle((0, 0), 1, 1, fc="b")
xlabels.append(std_formatter(x_std))
ylabels.append(std_formatter(y_std))
xhandles.append(p)
yhandles.append(p)
if show_mean or show_std:
axHistx.legend(xhandles, xlabels,
fontsize='small', loc='upper right')
axHisty.legend(xhandles, xlabels,
fontsize='small', loc='upper right')
xpdf = mlab.normpdf(xbins, x_mean, x_std)
ypdf = mlab.normpdf(ybins, y_mean, y_std)
axHistx.plot(xbins, xpdf, linestyle)
axHisty.plot(ypdf, ybins, linestyle)
axHistx.set_xlim(axScatter.get_xlim())
axHisty.set_ylim(axScatter.get_ylim())
axHistx.locator_params(tight=False, nbins=3)
axHisty.locator_params(tight=False, nbins=3)
axHistx = titler(axHistx, **kwargs)
axScatter = labeler(axScatter, **kwargs)
return fig
def scatter_hist_from_file(input, *args, usecols=range(2), **kwargs):
x, y = numpy.loadtxt(input, usecols=usecols, unpack=True)
return scatter_hist(x, y, *args, **kwargs)
def _gauss(x, *p):
A, mu, sigma = p
return A*numpy.exp(-(x-mu)**2/(2.*sigma**2))
| astroswego/data-plots | src/data_plots/stats.py | Python | mit | 3,782 |
from python.ci.common import flatten_map, get_deploy_info, create_target_matrix, get_script_locations
from python.common import try_get_key
from collections import defaultdict
from copy import deepcopy
def github_gen_config(build_info, repo_dir):
script_loc = try_get_key(build_info, 'script_location', 'ci')
make_loc = try_get_key(build_info, 'makefile_location', 'ci')
linux_targets = create_target_matrix(repo_dir + '/' + make_loc, 'linux', build_info)
macos_targets = create_target_matrix(repo_dir + '/' + make_loc, 'osx', build_info)
windows_targets = create_target_matrix(repo_dir + '/' + make_loc, 'windows', build_info)
linux_targets = {x: {'variant': x} for x in linux_targets }
macos_targets = {x: {'variant': x} for x in macos_targets }
windows_targets = {x: {'variant': x} for x in windows_targets }
include_branches = [x['name'] for x in try_get_key(build_info, 'branches', [])]
unix_env = {
'CONFIGURATION': 'Release',
'GENERATE_PROGRAMS': 'ON',
'BUILDVARIANT': '${{ matrix.variant }}',
'ACTIONS': '1',
'MAKEFILE_DIR': 'toolchain/makers',
'SOURCE_DIR': '${{ github.workspace }}/source',
'BUILD_DIR': '${{ github.workspace }}/build',
'BUILD_REPO_URI': '${{ github.repository }}',
'BUILD_REPO_BRANCH': '${{ github.ref }}',
'BUILD_REPO_EVENT': 'push',
'BUILD_REPO_ID': '',
'BUILD_REPO_URL': 'https://github.com/${{ github.repository }}',
'TRAVIS_COMMIT': '${{ github.sha }}',
'TRAVIS_REPO_SLUG': '${{ github.repository }}',
'GITHUB_TOKEN': '${{ secrets.GITHUB_TOKEN }}'
}
linux_env = unix_env.copy()
osx_env = unix_env.copy()
linux_env['TRAVIS_OS_NAME'] = 'linux'
osx_env['TRAVIS_OS_NAME'] = 'osx'
windows_env = {
'AZURE_IMAGE': 'vs2019-win2019',
'VSVERSION': '2019',
'OPENSSL_ROOT_DIR': '$(Build.SourcesDirectory)/openssl-libs/',
'ACTIONS': '1',
'BUILD_REPO_URI': '${{ github.repository }}',
'BUILD_REPO_BRANCH': '${{ github.ref }}',
'BUILD_REPO_EVENT': 'push',
'BUILD_REPO_ID': '${{ matrix.variant }}',
'BUILD_REPO_URL': 'https://github.com/${{ github.repository }}',
'GITHUB_TOKEN': '${{ secrets.GITHUB_TOKEN }}',
'CMAKE_BIN': 'cmake.exe',
'MAKEFILE_DIR': 'toolchain/makers',
'SAME_BUILD_DIR': '1',
'NOBUILD': '1',
'SOURCE_DIR': '${{ github.workspace }}/source',
'BUILD_DIR': '${{ github.workspace }}/build',
'APPVEYOR_BUILD_FOLDER': '${{ github.workspace }}/build',
'APPVEYOR_REPO_NAME': '${{ github.repository }}',
'APPVEYOR_REPO_COMMIT': '${{ github.sha }}',
'BUILDVARIANT': '${{ matrix.variant }}',
'GENERATE_PROGRAMS': 'ON',
'CONFIGURATION': 'Debug'
}
linux_strategy = defaultdict(list)
macos_strategy = defaultdict(list)
windows_strategy = defaultdict(list)
android_strategy = defaultdict(list)
for val in linux_targets.values():
for key in val:
if val[key].startswith('android.'):
android_strategy[key].append(val[key])
else:
linux_strategy[key].append(val[key])
for val in macos_targets.values():
for key in val:
macos_strategy[key].append(val[key])
for val in windows_targets.values():
for key in val:
windows_strategy[key].append(val[key])
linux_strategy = dict(linux_strategy)
macos_strategy = dict(macos_strategy)
windows_strategy = dict(windows_strategy)
android_strategy = dict(android_strategy)
checkout_step = {
'uses': 'actions/checkout@v2',
'with': {
'submodules': True,
'path': 'source'
}
}
package_step = [
{
'name': 'Compress executables',
'run': 'source/cb compress-usr-dir bin'
},
{
'name': 'Compress libraries',
'run': 'source/cb compress-usr-dir libraries'
},
{
'name': 'Uploading artifacts',
'uses': 'actions/upload-artifact@v2',
'with': {
'name': '{{matrix.variant}}',
'path': '*.tar.bz2'
}
}
]
return {
'name': 'CMake Build',
'on': {
'push': {
'branches': ['master', 'testing', 'feature**']
}
},
'jobs': {
'Linux': {
'runs-on': 'ubuntu-18.04',
'strategy': {
'fail-fast': False,
'matrix': linux_strategy
},
'env': linux_env.copy(),
'steps': [
deepcopy(checkout_step),
{
'name': 'Select Docker container',
'run': 'sh ${{ github.workspace }}/source/.github/cmake/select/${{ matrix.variant }}.sh'
},
{
'name': 'Building project',
'run': 'source/cb docker-build -GNinja'
}
] + deepcopy(package_step)
},
'Coverage': {
'runs-on': 'ubuntu-18.04',
'strategy': {
'fail-fast': False,
'matrix': {'variant': ['coverage']}
},
'env': linux_env.copy(),
'steps': [
deepcopy(checkout_step),
{
'name': 'Select Docker container',
'run': 'sh ${{ github.workspace }}/source/.github/cmake/select/${{ matrix.variant }}.sh'
},
{
'name': 'Building project',
'run': 'source/cb docker-build -GNinja'
},
{
'name': 'Running tests',
'env': { 'BUILD_TARGET': 'CoverageTest' },
'run': 'source/cb docker-build -GNinja'
},
{
'name': 'Gathering coverage info',
'uses': 'codecov/codecov-action@v1'
}
]
},
'Android': {
'runs-on': 'ubuntu-18.04',
'strategy': {
'fail-fast': False,
'matrix': android_strategy
},
'env': linux_env.copy(),
'steps': [
deepcopy(checkout_step),
{
'name': 'Select Docker container',
'run': 'echo "::set-env name=CONTAINER::hbirch/android:r21"'
},
{
'name': 'Building project',
'run': 'source/cb docker-build -GNinja'
}
] + deepcopy(package_step)
},
'macOS': {
'runs-on': 'macos-latest',
'strategy': {
'fail-fast': False,
'matrix': macos_strategy
},
'env': osx_env.copy(),
'steps': [
deepcopy(checkout_step),
{
'name': 'Installing system dependencies',
'run': 'source/toolchain/ci/travis-deps.sh'
},
{
'name': 'Building project',
'run': 'source/cb ci-build -GXcode',
'env': {
'BUILD_TARGET': 'ALL_BUILD'
}
}
] + deepcopy(package_step)
},
'Windows': {
'runs-on': 'windows-2019',
'strategy': {
'fail-fast': False,
'matrix': windows_strategy
},
'env': windows_env,
'steps': [
deepcopy(checkout_step),
{
'run': 'source/toolchain/ci/appveyor-deps.ps1',
'shell': 'powershell',
'name': 'Downloading dependencies'
},
{
'run': 'echo "::add-path::C:/Program Files/Nasm"',
'name': 'Add Nasm to PATH'
},
{
'run': 'source/toolchain/ci/appveyor-build.ps1',
'shell': 'powershell',
'name': 'Configuring project'
},
{
'run': '& cmake.exe --build $env:BUILD_DIR --target install --config $env:CONFIGURATION',
'shell': 'powershell',
'name': 'Building project'
},
{
'run': 'source/toolchain/ci/appveyor-deploy.ps1',
'shell': 'powershell',
'name': 'Deploying artifacts',
'continue-on-error': True
},
{
'name': 'Uploading artifacts',
'uses': 'actions/upload-artifact@v2',
'with': {
'name': '{{matrix.variant}}',
'path': 'build/*.7z'
}
}
]
}
}
}
| hbirchtree/coffeecutie-imgui | toolchain/python/ci/actions.py | Python | mit | 10,299 |
"""C interface for behaviors support (a.k.a windowless controls)."""
import enum
import ctypes
from sciter.capi.scdom import HELEMENT
from sciter.capi.scvalue import SCITER_VALUE
from sciter.capi.scgraphics import HGFX
from sciter.capi.sctypes import *
import sciter.capi.sctiscript as sctiscript
class EVENT_GROUPS(enum.IntEnum):
"""event groups."""
HANDLE_INITIALIZATION = 0x0000 # attached/detached */
HANDLE_MOUSE = 0x0001 # mouse events */
HANDLE_KEY = 0x0002 # key events */
HANDLE_FOCUS = 0x0004 # focus events if this flag is set it also means that element it attached to is focusable */
HANDLE_SCROLL = 0x0008 # scroll events */
HANDLE_TIMER = 0x0010 # timer event */
HANDLE_SIZE = 0x0020 # size changed event */
HANDLE_DRAW = 0x0040 # drawing request (event) */
HANDLE_DATA_ARRIVED = 0x080 # requested data () has been delivered */
HANDLE_BEHAVIOR_EVENT = 0x0100 # logical, synthetic events: BUTTON_CLICK, HYPERLINK_CLICK, etc., a.k.a. notifications from intrinsic behaviors */
HANDLE_METHOD_CALL = 0x0200 # behavior specific methods */
HANDLE_SCRIPTING_METHOD_CALL = 0x0400 # behavior specific methods */
HANDLE_TISCRIPT_METHOD_CALL = 0x0800 # behavior specific methods using direct tiscript::value's */
HANDLE_EXCHANGE = 0x1000 # system drag-n-drop */
HANDLE_GESTURE = 0x2000 # touch input events */
HANDLE_ALL = 0xFFFF # all of them */
SUBSCRIPTIONS_REQUEST = 0xFFFFFFFF # special value for getting subscription flags */
class PHASE_MASK(enum.IntEnum):
"""."""
BUBBLING = 0
SINKING = 0x8000
HANDLED = 0x10000
SINKING_HANDLED = HANDLED|SINKING
class MOUSE_BUTTONS(enum.IntEnum):
"""."""
MAIN_MOUSE_BUTTON = 1
PROP_MOUSE_BUTTON = 2
MIDDLE_MOUSE_BUTTON = 4
class KEYBOARD_STATES(enum.IntEnum):
CONTROL_KEY_PRESSED = 0x1
SHIFT_KEY_PRESSED = 0x2
ALT_KEY_PRESSED = 0x4
class INITIALIZATION_EVENTS(enum.IntEnum):
BEHAVIOR_DETACH = 0
BEHAVIOR_ATTACH = 1
class INITIALIZATION_PARAMS(ctypes.Structure):
_fields_ = [
("cmd", UINT), # INITIALIZATION_EVENTS
]
class DRAGGING_TYPE(enum.IntEnum):
NO_DRAGGING = 0
DRAGGING_MOVE = 1
DRAGGING_COPY = 2
class MOUSE_EVENTS(enum.IntEnum):
(MOUSE_ENTER,
MOUSE_LEAVE,
MOUSE_MOVE,
MOUSE_UP,
MOUSE_DOWN,
MOUSE_DCLICK,
MOUSE_WHEEL,
MOUSE_TICK,
MOUSE_IDLE,
DROP,
DRAG_ENTER,
DRAG_LEAVE,
DRAG_REQUEST) = range(13)
MOUSE_CLICK = 0xFF
DRAGGING = 0x100
class MOUSE_PARAMS(ctypes.Structure):
_fields_ = [
("cmd", UINT), # MOUSE_EVENTS
("target", HELEMENT), # target element
("pos", POINT), # position of cursor, element relative
("pos_view", POINT), # position of cursor, view relative
("button_state", UINT), # MOUSE_BUTTONS
("alt_state", UINT), # KEYBOARD_STATES
("cursor_type", UINT), # CURSOR_TYPE to set, see CURSOR_TYPE
("is_on_icon", BOOL), # mouse is over icon (foreground-image, foreground-repeat:no-repeat)
("dragging", HELEMENT), # element that is being dragged over, this field is not NULL if (cmd & DRAGGING) != 0
("dragging_mode", UINT), # see DRAGGING_TYPE.
]
class CURSOR_TYPE(enum.IntEnum):
(CURSOR_ARROW,
CURSOR_IBEAM,
CURSOR_WAIT,
CURSOR_CROSS,
CURSOR_UPARROW,
CURSOR_SIZENWSE,
CURSOR_SIZENESW,
CURSOR_SIZEWE,
CURSOR_SIZENS,
CURSOR_SIZEALL,
CURSOR_NO,
CURSOR_APPSTARTING,
CURSOR_HELP,
CURSOR_HAND,
CURSOR_DRAG_MOVE,
CURSOR_DRAG_COPY) = range(16)
class KEY_EVENTS(enum.IntEnum):
KEY_DOWN = 0
KEY_UP = 1
KEY_CHAR = 2
class KEY_PARAMS(ctypes.Structure):
_fields_ = [
("cmd", UINT), # KEY_EVENTS
("target", HELEMENT), # target element
("key_code", UINT), # key scan code, or character unicode for KEY_CHAR
("alt_state", UINT), # KEYBOARD_STATES
]
class FOCUS_EVENTS(enum.IntEnum):
FOCUS_LOST = 0 # non-bubbling event, target is new focus element
FOCUS_GOT = 1 # non-bubbling event, target is old focus element
FOCUS_IN = 2 # bubbling event/notification, target is an element that got focus
FOCUS_OUT = 3 # bubbling event/notification, target is an element that lost focus
class FOCUS_PARAMS(ctypes.Structure):
_fields_ = [
("cmd", UINT), # FOCUS_EVENTS
("target", HELEMENT), # target element, for FOCUS_LOST it is a handle of new focus element
# and for FOCUS_GOT it is a handle of old focus element, can be NULL
("by_mouse_click", BOOL), # true if focus is being set by mouse click
("cancel", BOOL), # in FOCUS_LOST phase setting this field to true will cancel transfer focus from old element to the new one.
]
class SCROLL_EVENTS(enum.IntEnum):
(SCROLL_HOME,
SCROLL_END,
SCROLL_STEP_PLUS,
SCROLL_STEP_MINUS,
SCROLL_PAGE_PLUS,
SCROLL_PAGE_MINUS,
SCROLL_POS,
SCROLL_SLIDER_RELEASED,
SCROLL_CORNER_PRESSED,
SCROLL_CORNER_RELEASED,
SCROLL_SLIDER_PRESSED) = range(11)
class SCROLL_SOURCE(enum.IntEnum):
(SCROLL_SOURCE_UNKNOWN,
SCROLL_SOURCE_KEYBOARD, # `SCROLL_PARAMS::reason` contains a key code
SCROLL_SOURCE_SCROLLBAR, # `SCROLL_PARAMS::reason` contains a `SCROLLBAR_PART` enum
SCROLL_SOURCE_ANIMATOR,
) = range(4)
class SCROLL_PARAMS(ctypes.Structure):
_fields_ = [
("cmd", UINT), # SCROLL_EVENTS
("target", HELEMENT), # target element
("pos", INT), # scroll position if SCROLL_POS
("vertical", BOOL), # true if from vertical scrollbar
("source", UINT), # SCROLL_SOURCE
("reason", UINT), # SCROLLBAR_PART or key code, see SCROLL_SOURCE
]
class GESTURE_CMD(enum.IntEnum):
(GESTURE_REQUEST, # return true and fill flags if it will handle gestures.
GESTURE_ZOOM, # The zoom gesture.
GESTURE_PAN, # The pan gesture.
GESTURE_ROTATE, # The rotation gesture.
GESTURE_TAP1, # The tap gesture.
GESTURE_TAP2) = range(6) # The two-finger tap gesture.
class GESTURE_STATE(enum.IntEnum):
GESTURE_STATE_BEGIN = 1 # starts
GESTURE_STATE_INERTIA = 2 # events generated by inertia processor
GESTURE_STATE_END = 4 # end, last event of the gesture sequence
class GESTURE_TYPE_FLAGS(enum.IntEnum):
GESTURE_FLAG_ZOOM = 0x0001
GESTURE_FLAG_ROTATE = 0x0002
GESTURE_FLAG_PAN_VERTICAL = 0x0004
GESTURE_FLAG_PAN_HORIZONTAL = 0x0008
GESTURE_FLAG_TAP1 = 0x0010 # press & tap
GESTURE_FLAG_TAP2 = 0x0020 # two fingers tap
GESTURE_FLAG_PAN_WITH_GUTTER = 0x4000 # PAN_VERTICAL and PAN_HORIZONTAL modifiers
GESTURE_FLAG_PAN_WITH_INERTIA = 0x8000 #
GESTURE_FLAGS_ALL = 0xFFFF #
class GESTURE_PARAMS(ctypes.Structure):
_fields_ = [
("cmd", UINT), # GESTURE_EVENTS
("target", HELEMENT), # target element
("pos", POINT), # position of cursor, element relative
("pos_view", POINT), # position of cursor, view relative
("flags", UINT), # for GESTURE_REQUEST combination of GESTURE_FLAGs.
# for others it is a combination of GESTURE_STATe's
("delta_time", UINT), # period of time from previous event.
("delta_xy", SIZE), # for GESTURE_PAN it is a direction vector
("delta_v", c_double), # for GESTURE_ROTATE - delta angle (radians)
# for GESTURE_ZOOM - zoom value, is less or greater than 1.0
]
class DRAW_EVENTS(enum.IntEnum):
DRAW_BACKGROUND = 0
DRAW_CONTENT = 1
DRAW_FOREGROUND = 2
DRAW_OUTLINE = 3
class DRAW_PARAMS(ctypes.Structure):
_fields_ = [
("cmd", UINT), # DRAW_EVENTS
("gfx", HGFX), # hdc to paint on
("area", RECT), # element area, to get invalid area to paint use GetClipBox,
("reserved", UINT), # for DRAW_BACKGROUND/DRAW_FOREGROUND - it is a border box
] # for DRAW_CONTENT - it is a content box
class CONTENT_CHANGE_BITS(enum.IntEnum):
CONTENT_ADDED = 0x01
CONTENT_REMOVED = 0x02
class BEHAVIOR_EVENTS(enum.IntEnum):
"""Behavior event code."""
BUTTON_CLICK = 0 # click on button
BUTTON_PRESS = 1 # mouse down or key down in button
BUTTON_STATE_CHANGED = 2 # checkbox/radio/slider changed its state/value
EDIT_VALUE_CHANGING = 3 # before text change
EDIT_VALUE_CHANGED = 4 # after text change
SELECT_SELECTION_CHANGED = 5 # selection in <select> changed
SELECT_STATE_CHANGED = 6 # node in select expanded/collapsed, heTarget is the node
POPUP_REQUEST = 7 # request to show popup just received,
# here DOM of popup element can be modifed.
POPUP_READY = 8 # popup element has been measured and ready to be shown on screen,
# here you can use functions like ScrollToView.
POPUP_DISMISSED = 9 # popup element is closed,
# here DOM of popup element can be modifed again - e.g. some items can be removed
# to free memory.
MENU_ITEM_ACTIVE = 0xA # menu item activated by mouse hover or by keyboard,
MENU_ITEM_CLICK = 0xB # menu item click,
# BEHAVIOR_EVENT_PARAMS structure layout
# BEHAVIOR_EVENT_PARAMS.cmd - MENU_ITEM_CLICK/MENU_ITEM_ACTIVE
# BEHAVIOR_EVENT_PARAMS.heTarget - owner(anchor) of the menu
# BEHAVIOR_EVENT_PARAMS.he - the menu item, presumably <li> element
# BEHAVIOR_EVENT_PARAMS.reason - BY_MOUSE_CLICK | BY_KEY_CLICK
CONTEXT_MENU_REQUEST = 0x10 # "right-click", BEHAVIOR_EVENT_PARAMS::he is current popup menu HELEMENT being processed or NULL.
# application can provide its own HELEMENT here (if it is NULL) or modify current menu element.
VISIUAL_STATUS_CHANGED = 0x11 # broadcast notification, sent to all elements of some container being shown or hidden
DISABLED_STATUS_CHANGED = 0x12 # broadcast notification, sent to all elements of some container that got new value of :disabled state
POPUP_DISMISSING = 0x13 # popup is about to be closed
CONTENT_CHANGED = 0x15 # content has been changed, is posted to the element that gets content changed, reason is combination of CONTENT_CHANGE_BITS.
# target == NULL means the window got new document and this event is dispatched only to the window.
CLICK = 0x16 # generic click
CHANGE = 0x17 # generic change
# "grey" event codes - notfications from behaviors from this SDK
HYPERLINK_CLICK = 0x80 # hyperlink click
ELEMENT_COLLAPSED = 0x90 # element was collapsed, so far only behavior:tabs is sending these two to the panels
ELEMENT_EXPANDED = 0x91 # element was expanded,
ACTIVATE_CHILD = 0x92 # activate (select) child,
# used for example by accesskeys behaviors to send activation request, e.g. tab on behavior:tabs.
INIT_DATA_VIEW = 0x93 # request to virtual grid to initialize its view
ROWS_DATA_REQUEST = 0x94 # request from virtual grid to data source behavior to fill data in the table
# parameters passed throug DATA_ROWS_PARAMS structure.
UI_STATE_CHANGED = 0x95 # ui state changed, observers shall update their visual states.
# is sent for example by behavior:richtext when caret position/selection has changed.
FORM_SUBMIT = 0x96 # behavior:form detected submission event. BEHAVIOR_EVENT_PARAMS::data field contains data to be posted.
# BEHAVIOR_EVENT_PARAMS::data is of type T_MAP in this case key/value pairs of data that is about
# to be submitted. You can modify the data or discard submission by returning true from the handler.
FORM_RESET = 0x97 # behavior:form detected reset event (from button type=reset). BEHAVIOR_EVENT_PARAMS::data field contains data to be reset.
# BEHAVIOR_EVENT_PARAMS::data is of type T_MAP in this case key/value pairs of data that is about
# to be rest. You can modify the data or discard reset by returning true from the handler.
DOCUMENT_COMPLETE = 0x98 # document in behavior:frame or root document is complete.
HISTORY_PUSH = 0x99 # requests to behavior:history (commands)
HISTORY_DROP = 0x9A
HISTORY_PRIOR = 0x9B
HISTORY_NEXT = 0x9C
HISTORY_STATE_CHANGED = 0x9D # behavior:history notification - history stack has changed
CLOSE_POPUP = 0x9E # close popup request,
REQUEST_TOOLTIP = 0x9F # request tooltip, evt.source <- is the tooltip element.
ANIMATION = 0xA0 # animation started (reason=1) or ended(reason=0) on the element.
DOCUMENT_CREATED = 0xC0 # document created, script namespace initialized. target -> the document
DOCUMENT_CLOSE_REQUEST = 0xC1 # document is about to be closed, to cancel closing do: evt.data = sciter::value("cancel");
DOCUMENT_CLOSE = 0xC2 # last notification before document removal from the DOM
DOCUMENT_READY = 0xC3 # document has got DOM structure, styles and behaviors of DOM elements. Script loading run is complete at this moment.
DOCUMENT_PARSED = 0xC4 # document just finished parsing - has got DOM structure. This event is generated before the `DOCUMENT_READY`. Since 4.0.3.
VIDEO_INITIALIZED = 0xD1 # <video> "ready" notification
VIDEO_STARTED = 0xD2 # <video> playback started notification
VIDEO_STOPPED = 0xD3 # <video> playback stoped/paused notification
VIDEO_BIND_RQ = 0xD4 # <video> request for frame source binding,
# If you want to provide your own video frames source for the given target <video> element do the following:
# 1. Handle and consume this VIDEO_BIND_RQ request
# 2. You will receive second VIDEO_BIND_RQ request/event for the same <video> element
# but this time with the 'reason' field set to an instance of sciter::video_destination interface.
# 3. add_ref() it and store it for example in worker thread producing video frames.
# 4. call sciter::video_destination::start_streaming(...) providing needed parameters
# call sciter::video_destination::render_frame(...) as soon as they are available
# call sciter::video_destination::stop_streaming() to stop the rendering (a.k.a. end of movie reached)
PAGINATION_STARTS = 0xE0 # behavior:pager starts pagination
PAGINATION_PAGE = 0xE1 # behavior:pager paginated page no, reason -> page no
PAGINATION_ENDS = 0xE2 # behavior:pager end pagination, reason -> total pages
FIRST_APPLICATION_EVENT_CODE = 0x100
# all custom event codes shall be greater
# than this number. All codes below this will be used
# solely by application - HTMLayout will not intrepret it
# and will do just dispatching.
# To send event notifications with these codes use
# HTMLayoutSend/PostEvent API.
class CLICK_REASON(enum.IntEnum):
BY_MOUSE_CLICK = 0
BY_KEY_CLICK = 1
SYNTHESIZED = 2 # synthesized, programmatically generated.
BY_MOUSE_ON_ICON = 3
class EDIT_CHANGED_REASON(enum.IntEnum):
BY_INS_CHAR = 0 # single char insertion
BY_INS_CHARS = 1 # character range insertion, clipboard
BY_DEL_CHAR = 2 # single char deletion
BY_DEL_CHARS = 3 # character range deletion (selection)
BY_UNDO_REDO = 4 # undo/redo
class BEHAVIOR_EVENT_PARAMS(ctypes.Structure):
_fields_ = [
("cmd", UINT), # BEHAVIOR_EVENTS
("heTarget", HELEMENT), # target element handler, in MENU_ITEM_CLICK this is owner element that caused this menu - e.g. context menu owner
# In scripting this field named as Event.owner
("he", HELEMENT), # source element e.g. in SELECTION_CHANGED it is new selected <option>, in MENU_ITEM_CLICK it is menu item (LI) element
("reason", UINT_PTR), # CLICK_REASON or EDIT_CHANGED_REASON - UI action causing change.
# In case of custom event notifications this may be any
# application specific value.
("data", SCITER_VALUE), # auxiliary data accompanied with the event. E.g. FORM_SUBMIT event is using this field to pass collection of values.
]
class TIMER_PARAMS(ctypes.Structure):
_fields_ = [
("timerId", UINT_PTR), # timerId that was used to create timer by using HTMLayoutSetTimerEx
]
class BEHAVIOR_METHOD_IDENTIFIERS(enum.IntEnum):
""""Identifiers of methods currently supported by intrinsic behaviors."""
DO_CLICK = 0
GET_TEXT_VALUE = 1
SET_TEXT_VALUE = 2
TEXT_EDIT_GET_SELECTION = 3
TEXT_EDIT_SET_SELECTION = 4
TEXT_EDIT_REPLACE_SELECTION = 5
SCROLL_BAR_GET_VALUE = 6
SCROLL_BAR_SET_VALUE = 7
TEXT_EDIT_GET_CARET_POSITION = 8
TEXT_EDIT_GET_SELECTION_TEXT = 9 # p - TEXT_SELECTION_PARAMS
TEXT_EDIT_GET_SELECTION_HTML = 10 # p - TEXT_SELECTION_PARAMS
TEXT_EDIT_CHAR_POS_AT_XY = 11 # p - TEXT_EDIT_CHAR_POS_AT_XY_PARAMS
IS_EMPTY = 0xFC # p - IS_EMPTY_PARAMS # set VALUE_PARAMS::is_empty (false/true) reflects :empty state of the element.
GET_VALUE = 0xFD # p - VALUE_PARAMS
SET_VALUE = 0xFE # p - VALUE_PARAMS
FIRST_APPLICATION_METHOD_ID = 0x100
class SCRIPTING_METHOD_PARAMS(ctypes.Structure):
_fields_ = [
("name", LPCSTR), # method name
("argv", POINTER(SCITER_VALUE)), # vector of arguments
("argc", UINT), # argument count
("result", SCITER_VALUE), # return value
]
class TISCRIPT_METHOD_PARAMS(ctypes.Structure):
_fields_ = [
# parameters are accessible through tiscript::args.
("vm", sctiscript.HVM),
("tag", sctiscript.value), # method id (symbol)
("result", sctiscript.value), # return value
]
# GET_VALUE/SET_VALUE methods params
class VALUE_PARAMS(ctypes.Structure):
_fields_ = [
("methodID", UINT),
("val", SCITER_VALUE),
]
# IS_EMPTY method params
class IS_EMPTY_PARAMS(ctypes.Structure):
_fields_ = [
("methodID", UINT),
("is_empty", UINT), # !0 - is empty
]
# see SciterRequestElementData
class DATA_ARRIVED_PARAMS(ctypes.Structure):
_fields_ = [
("initiator", HELEMENT), # element intiator of HTMLayoutRequestElementData request,
("data", LPCBYTE), # data buffer
("dataSize", UINT), # size of data
("dataType", UINT), # data type passed "as is" from HTMLayoutRequestElementData
("status", UINT), # status = 0 (dataSize == 0) - unknown error.
# status = 100..505 - http response status, Note: 200 - OK!
# status > 12000 - wininet error code, see ERROR_INTERNET_*** in wininet.h
("_uri", LPCWSTR), # requested url
]
uri = UTF16LEField('_uri')
| pravic/pysciter | sciter/capi/scbehavior.py | Python | mit | 20,318 |
#!/usr/bin/env python
import os
import sys
import unittest
import multiprocessing
import tempfile
import artifactory
if sys.version_info[0] < 3:
import StringIO as io
import ConfigParser as configparser
else:
import io
import configparser
config = configparser.ConfigParser()
config.read("test.cfg")
art_uri = config.get("artifactory", "uri")
art_username = config.get("artifactory", "username")
art_password = config.get("artifactory", "password")
art_auth = (art_username, art_password)
class ArtifactoryPathTest(unittest.TestCase):
cls = artifactory.ArtifactoryPath
def test_root(self):
P = self.cls
self.assertEqual(P(art_uri + '/artifactory/libs-release-local').root,
'/libs-release-local/')
def test_isdir(self):
P = self.cls
self.assertTrue(P(art_uri + '/artifactory/libs-release-local').is_dir())
self.assertFalse(P(art_uri + '/artifactory/non-existing-repo').is_dir())
def test_owner(self):
P = self.cls
self.assertEquals(P(art_uri + '/artifactory/libs-release-local').owner(),
'nobody')
def test_mkdir(self):
P = self.cls
p = P(art_uri + '/artifactory/to-delete/foo', auth=art_auth)
p.mkdir()
self.assertTrue(p.is_dir())
self.assertFalse(p.is_file())
self.assertRaises(OSError, p.mkdir)
p.rmdir()
self.assertFalse(p.exists())
self.assertFalse(p.is_dir())
self.assertFalse(p.is_file())
def test_touch(self):
P = self.cls
p = P(art_uri + '/artifactory/to-delete/foo', auth=art_auth)
p.touch(exist_ok=False)
p.touch()
self.assertFalse(p.is_dir())
self.assertTrue(p.is_file())
self.assertTrue(p.exists())
self.assertRaises(OSError, p.touch, exist_ok=False)
p.unlink()
self.assertFalse(p.exists())
def test_iterdir(self):
P = self.cls
p = P(art_uri + '/artifactory/to-delete/foo', auth=art_auth)
p.mkdir()
(p / 'a').touch()
(p / 'b').touch()
(p / 'c').mkdir()
(p / 'c' / 'd').mkdir()
(p / 'e').touch()
count = 0
for child in p.iterdir():
self.assertIn(str(child)[-1:], ['a', 'b', 'c', 'e'])
count += 1
self.assertEquals(count, 4)
p.rmdir()
def test_glob(self):
P = self.cls
p = P(art_uri + '/artifactory/to-delete/foo', auth=art_auth)
p_root = P(art_uri + '/artifactory/to-delete', auth=art_auth)
if p.exists():
p.rmdir()
p.mkdir()
(p / 'a').touch()
(p / 'b.txt').touch()
(p / 'c').mkdir()
(p / 'c' / 'd.txt').mkdir()
(p / 'e.bin').touch()
count = 0
for child in p.glob("**/*.txt"):
self.assertIn(str(child)[-5:], ['b.txt', 'd.txt'])
count += 1
self.assertEquals(count, 2)
for child in p_root.glob("**/*.txt"):
self.assertIn(str(child)[-5:], ['b.txt', 'd.txt'])
p.rmdir()
def test_deploy(self):
P = self.cls
p = P(art_uri + '/artifactory/to-delete/foo', auth=art_auth)
p2 = P(art_uri + '/artifactory/to-delete/foo2', auth=art_auth)
if p.exists():
p.unlink()
s = io.StringIO()
s.write("Some test string")
p.deploy(s)
with p.open() as fd:
result = fd.read()
self.assertEqual(result, "Some test string")
with p.open() as fd:
p2.deploy(fd)
with p2.open() as fd:
result = fd.read()
self.assertEqual(result, "Some test string")
p.unlink()
p2.unlink()
def test_deploy_file(self):
P = self.cls
p = P(art_uri + '/artifactory/to-delete/foo', auth=art_auth)
if p.exists():
p.unlink()
tf = tempfile.NamedTemporaryFile()
tf.write("Some test string")
tf.flush()
p.deploy_file(tf.name)
tf.close()
with p.open() as fd:
result = fd.read()
self.assertEqual(result, "Some test string")
p.unlink()
def test_open(self):
P = self.cls
p = P(art_uri + '/artifactory/to-delete/foo', auth=art_auth)
if p.exists():
p.rmdir()
s = io.StringIO()
s.write("Some test string")
p.deploy(s)
with self.assertRaises(NotImplementedError):
p.open('w')
with self.assertRaises(NotImplementedError):
p.open(buffering=1)
with self.assertRaises(NotImplementedError):
p.open(encoding='foo')
with self.assertRaises(NotImplementedError):
p.open(errors='bar')
with self.assertRaises(NotImplementedError):
p.open(newline='baz')
p.unlink()
if __name__ == '__main__':
unittest.main()
| Parallels/artifactory | int_test.py | Python | mit | 4,970 |
"""
Pre-processing and normalization functions for the ST datasets.
"""
import numpy as np
import pandas as pd
import math
import os
from sklearn.preprocessing import StandardScaler
import re
def normalize(counts, normalization):
"""
Wrapper around the function normalize_data()
"""
return normalize_data(counts, normalization)
def filter_data_genes(counts, filter_genes):
"""
Filter the input matrix of counts to keep only
the genes given as input.
:param counts: matrix of counts (genes as columns)
:param filter_genes: list of genes to keep
:return: the filtered matrix of counts
"""
genes_to_keep = list()
if filter_genes:
for gene in counts.columns:
for regex in filter_genes:
if re.fullmatch(regex, gene):
genes_to_keep.append(gene)
break
else:
genes_to_keep = counts.columns
# Check that we hit some genes
if len(genes_to_keep) == 0:
raise RuntimeError("No genes found in the dataset from the "
"list given\n{}\n".format(' '.join([x for x in filter_genes])))
return counts.loc[:, genes_to_keep]
def filter_data(counts, num_exp_genes, num_exp_spots, min_gene_expression):
"""
Filters the input matrix of counts with the thresholds given as input.
:param counts: matrix of counts (genes as columns)
:param num_exp_genes: the number of detected genes (>= min_gene_expression) a
spot must have
:param num_exp_spots: the number of detected spots (>= min_gene_expression) a
gene must have
:param min_gene_expression: expression value to define as detected
:return: the filtered matrix of counts
"""
if num_exp_spots <= 0.0 and num_exp_genes <= 0.0:
return counts
return remove_noise(counts, num_exp_genes, num_exp_spots,
min_expression=min_gene_expression)
def ztransformation(counts):
"""
Applies a simple z-score transformation to
a matrix of counts (genes as columns)
which consists in substracting to each count
the mean of its column (gene) and then divide it by its
the standard deviation
:param counts: matrix of counts (genes as columns)
:return: the z-scaled matrix of counts
"""
scaler = StandardScaler()
rows = counts.index
cols = counts.columns
scaled_counts = scaler.fit_transform(counts.values)
return pd.DataFrame(data=scaled_counts,
index=rows,
columns=cols)
def aggregate_datatasets(counts_table_files, add_index=True, header=0):
"""
Takes a list of matrices of counts (genes as columns and spots as rows)
and merges them into one data frame using the genes as merging criteria.
An index will be appended to each spot to be able to identify
them (this is optional).
:param counts_table_files: a list of file names corresponding to the matrices
:param add_index: add the dataset index (position) to the spot's when True
:param header: whether to include the columns of the matrices or not
:return: a matrix counts with the merged data
"""
# Spots are rows and genes are columns
counts = pd.DataFrame()
for i, counts_file in enumerate(counts_table_files):
if not os.path.isfile(counts_file):
raise IOError("Error parsing data frame", "Invalid input file")
new_counts = pd.read_csv(counts_file, sep="\t",
header=header, index_col=0, engine='c', low_memory=True)
new_counts = new_counts[~new_counts.index.duplicated()]
# Append dataset index to the spots (indexes) so they can be traced
if add_index and len(counts_table_files) > 1:
new_spots = ["{0}_{1}".format(i + 1, spot) for spot in new_counts.index]
new_counts.index = new_spots
counts = counts.append(new_counts, sort=True)
# Replace Nan and Inf by zeroes
counts.replace([np.inf, -np.inf], np.nan)
counts.fillna(0.0, inplace=True)
return counts
def remove_noise(counts, num_exp_genes=0.01, num_exp_spots=0.01, min_expression=1):
"""
This functions remove noisy (low quality) genes and spots
for a given matrix of counts (Genes as columns and spots as rows).
- The noisy spots are removed so to keep a percentage
of the total distribution of spots whose gene counts >= min_expression
The percentage is given as a parameter (0.0 - 1.0).
- The noisy genes are removed so every gene that is expressed
in less than a percentage of the total spots whose gene counts >= min_expression
The percentage is given as a parameter (0.0 - 1.0).
:param counts: a matrix of counts
:param num_exp_genes: a float from 0-1 representing the % of
the distribution of expressed genes a spot must have to be kept
:param num_exp_spots: a float from 0-1 representing the % of
the total number of spots that a gene must have with a count bigger
than the parameter min_expression in order to be kept
:param min_expression: the minimum expression for a gene to be
considered expressed
:return: a new matrix of counts with noisy spots/genes removed
"""
# How many spots do we keep based on the number of genes expressed?
num_spots = len(counts.index)
num_genes = len(counts.columns)
if 0.0 < num_exp_genes < 1.0:
# Remove noisy spots
gene_sums = (counts >= min_expression).sum(axis=1)
min_genes_spot_exp = round(gene_sums.quantile(num_exp_genes))
print("Number of expressed genes (count of at least {}) a spot must have to be kept "
"({}% of total expressed genes) {}".format(min_expression, num_exp_genes, min_genes_spot_exp))
counts = counts[gene_sums >= min_genes_spot_exp]
print("Dropped {} spots".format(num_spots - len(counts.index)))
if 0.0 < num_exp_spots < 1.0:
# Spots are columns and genes are rows
counts = counts.transpose()
# Remove noisy genes
min_features_gene = round(len(counts.columns) * num_exp_spots)
print("Removing genes that are expressed in less than {} "
"spots with a count of at least {}".format(min_features_gene, min_expression))
counts = counts[(counts >= min_expression).sum(axis=1) >= min_features_gene]
print("Dropped {} genes".format(num_genes - len(counts.index)))
counts = counts.transpose()
return counts
def keep_top_genes(counts, num_genes_discard, criteria="Variance"):
"""
This function takes a matrix of counts (Genes as columns and spots as rows)
and returns a new matrix of counts where a number of genesa re kept
using the variance or the total count as filtering criterias.
:param counts: a matrix of counts
:param num_genes_discard: the % (1-100) of genes to keep
:param criteria: the criteria used to select ("Variance or "TopRanked")
:return: a new matrix of counts with only the top ranked genes.
"""
if num_genes_discard <= 0:
return counts
# Spots as columns and genes as rows
counts = counts.transpose()
# Keep only the genes with higher over-all variance
num_genes = len(counts.index)
print("Removing {}% of genes based on the {}".format(num_genes_discard * 100, criteria))
if criteria == "Variance":
var = counts.var(axis=1)
min_genes_spot_var = var.quantile(num_genes_discard)
if math.isnan(min_genes_spot_var):
print("Computed variance is NaN! Check your input data.")
else:
print("Min normalized variance a gene must have over all spots "
"to be kept ({0}% of total) {1}".format(num_genes_discard, min_genes_spot_var))
counts = counts[var >= min_genes_spot_var]
elif criteria == "TopRanked":
sum = counts.sum(axis=1)
min_genes_spot_sum = sum.quantile(num_genes_discard)
if math.isnan(min_genes_spot_sum):
print("Computed sum is NaN! Check your input data.")
else:
print("Min normalized total count a gene must have over all spots "
"to be kept ({0}% of total) {1}".format(num_genes_discard, min_genes_spot_sum))
counts = counts[sum >= min_genes_spot_sum]
else:
raise RuntimeError("Error, incorrect criteria method\n")
print("Dropped {} genes".format(num_genes - len(counts.index)))
return counts.transpose()
def compute_size_factors(counts, normalization):
"""
Helper function to compute normalization size factors
"""
counts = counts.transpose()
if normalization in "REL":
size_factors = counts.sum(axis=0)
elif normalization in "CPM":
col_sums = counts.sum(axis=0)
size_factors = col_sums * np.mean(col_sums)
elif normalization in "RAW":
size_factors = 1
else:
raise RuntimeError("Error, incorrect normalization method\n")
return size_factors
def normalize_data(counts, normalization):
"""
This functions takes a matrix of counts as input
(genes as columns and spots as rows) and
returns a new matrix of counts normalized using
the normalization method given in the input.
:param counts: a matrix of counts (genes as columns)
:param normalization: the normalization method to use (RAW, REL or CPM)
:return: a matrix of counts with normalized counts (genes as columns)
"""
# Spots as columns and genes as rows
norm_counts = counts.transpose()
if normalization in "REL":
norm_counts = norm_counts.div(norm_counts.sum(axis=1), axis=0)
elif normalization in "CPM":
col_sums = counts.sum(axis=1)
norm_counts = norm_counts.div(col_sums, axis=0) * np.mean(col_sums)
elif normalization in "RAW":
pass
# return normalize counts (genes as columns)
return norm_counts.transpose()
| jfnavarro/st_analysis | stanalysis/preprocessing.py | Python | mit | 9,931 |
#From: https://djangosnippets.org/snippets/690/
import re
from django.template.defaultfilters import slugify
def unique_slugify(instance, value, slug_field_name='slug', queryset=None,
slug_separator='-'):
"""
Calculates and stores a unique slug of ``value`` for an instance.
``slug_field_name`` should be a string matching the name of the field to
store the slug in (and the field to check against for uniqueness).
``queryset`` usually doesn't need to be explicitly provided - it'll default
to using the ``.all()`` queryset from the model's default manager.
"""
slug_field = instance._meta.get_field(slug_field_name)
slug = getattr(instance, slug_field.attname)
slug_len = slug_field.max_length
# Sort out the initial slug, limiting its length if necessary.
slug = slugify(value)
if slug_len:
slug = slug[:slug_len]
slug = _slug_strip(slug, slug_separator)
original_slug = slug
# Create the queryset if one wasn't explicitly provided and exclude the
# current instance from the queryset.
if queryset is None:
queryset = instance.__class__._default_manager.all()
if instance.pk:
queryset = queryset.exclude(pk=instance.pk)
# Find a unique slug. If one matches, at '-2' to the end and try again
# (then '-3', etc).
next = 2
while not slug or queryset.filter(**{slug_field_name: slug}):
slug = original_slug
end = '%s%s' % (slug_separator, next)
if slug_len and len(slug) + len(end) > slug_len:
slug = slug[:slug_len-len(end)]
slug = _slug_strip(slug, slug_separator)
slug = '%s%s' % (slug, end)
next += 1
setattr(instance, slug_field.attname, slug)
def _slug_strip(value, separator='-'):
"""
Cleans up a slug by removing slug separator characters that occur at the
beginning or end of a slug.
If an alternate separator is used, it will also replace any instances of
the default '-' separator with the new separator.
"""
separator = separator or ''
if separator == '-' or not separator:
re_sep = '-'
else:
re_sep = '(?:-|%s)' % re.escape(separator)
# Remove multiple instances and if an alternate separator is provided,
# replace the default '-' separator.
if separator != re_sep:
value = re.sub('%s+' % re_sep, separator, value)
# Remove separator from the beginning and end of the slug.
if separator:
if separator != '-':
re_sep = re.escape(separator)
value = re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value)
return value | ninapavlich/scout-and-rove | scoutandrove/utils/slugify.py | Python | mit | 2,649 |
"""Sample unit test module using pytest-describe and expecter."""
# pylint: disable=redefined-outer-name,unused-variable,expression-not-assigned,singleton-comparison
from demo import utils
def describe_feet_to_meters():
def when_integer(expect):
expect(utils.feet_to_meters(42)) == 12.80165
def when_string(expect):
expect(utils.feet_to_meters("hello")) == None
| jacebrowning/template-python-demo | demo/tests/test_utils.py | Python | mit | 390 |
#!/usr/bin/python3
"""
Given an integer array, find three numbers whose product is maximum and output
the maximum product.
Example 1:
Input: [1,2,3]
Output: 6
Example 2:
Input: [1,2,3,4]
Output: 24
Note:
The length of the given array will be in range [3,104] and all elements are in
the range [-1000, 1000].
Multiplication of any three numbers in the input won't exceed the range of
32-bit signed integer.
"""
import heapq
from typing import List
class Solution:
def maximumProduct(self, nums: List[int]) -> int:
"""
heapq nlargest nsmallest
"""
mxes = heapq.nlargest(3, nums)
mns = heapq.nsmallest(3, nums)
return max(
mxes[0] * mxes[1] * mxes[2],
mns[0] * mns[1] * mxes[0],
)
| algorhythms/LeetCode | 628 Maximum Product of Three Numbers.py | Python | mit | 773 |
import re
from thefuck.utils import get_closest
def extract_possibilities(command):
possib = re.findall(r'\n\(did you mean one of ([^\?]+)\?\)', command.stderr)
if possib:
return possib[0].split(', ')
possib = re.findall(r'\n ([^$]+)$', command.stderr)
if possib:
return possib[0].split(' ')
return possib
def match(command, settings):
return (command.script.startswith('hg ')
and ('hg: unknown command' in command.stderr
and '(did you mean one of ' in command.stderr
or "hg: command '" in command.stderr
and "' is ambiguous:" in command.stderr
)
)
def get_new_command(command, settings):
script = command.script.split(' ')
possibilities = extract_possibilities(command)
script[1] = get_closest(script[1], possibilities)
return ' '.join(script)
| zhangzhishan/thefuck | thefuck/rules/mercurial.py | Python | mit | 902 |
"""fips verb to build the oryol samples webpage"""
import os
import yaml
import shutil
import subprocess
import glob
from string import Template
from mod import log, util, project, emscripten, android
from tools import texexport
# what to build
BuildEmscripten = True
BuildWasm = True
ExportAssets = True
ExtensionSamples = True
# webpage template arguments
GitHubSamplesURL = 'https://github.com/floooh/oryol/tree/master/code/Samples/'
DocTitle = 'Oryol Core Samples'
Title = 'Oryol'
Subtitle = 'core samples'
# Separator = 'rainbow-separator'
# GameSeparator = 'game-rainbow-separator'
# BackgroundColor = '#19A3FF' # this is the original bright blue
Separator = 'simple-separator'
GameSeparator = 'simple-separator'
BackgroundColor = '#42A5F5'
# build configuration
EmscConfig = 'webgl2-emsc-ninja-release'
WasmConfig = 'webgl2-wasm-ninja-release'
#-------------------------------------------------------------------------------
def deploy_webpage(fips_dir, proj_dir, webpage_dir) :
"""builds the final webpage under under fips-deploy/oryol-webpage"""
ws_dir = util.get_workspace_dir(fips_dir)
# load the websamples.yml file, should have been created during the last build
with open(webpage_dir + '/websamples.yml', 'r') as f :
samples = yaml.load(f.read())
# create directories
for platform in ['asmjs', 'wasm'] :
platform_dir = '{}/{}'.format(webpage_dir, platform)
if not os.path.isdir(platform_dir) :
os.makedirs(platform_dir)
# link to the Extension Samples
content = ''
if ExtensionSamples :
content = '<div class="thumb">\n'
content += ' <div class="thumb-title">To Extension Samples...</div>\n'
content += ' <div class="img-frame"><a href="http://floooh.github.com/oryol-samples/index.html"><img class="image" src="ext_samples.jpg"></img></a></div>\n'
content += '</div>\n'
# build the thumbnail gallery
for sample in samples :
if sample['name'] != '__end__' :
log.info('> adding thumbnail for {}'.format(sample['name']))
name = sample['name']
imgPath = sample['image']
types = sample['type']
desc = sample['desc']
head, tail = os.path.split(imgPath)
if tail == 'none' :
imgFileName = 'dummy.jpg'
else :
imgFileName = tail
content += '<div class="thumb">\n'
content += ' <div class="thumb-title">{}</div>\n'.format(name)
content += ' <div class="img-frame"><a href="asmjs/{}.html"><img class="image" src="{}" title="{}"></img></a></div>\n'.format(name,imgFileName,desc)
content += ' <div class="thumb-bar">\n'
content += ' <ul class="thumb-list">\n'
if BuildEmscripten and 'emscripten' in types :
content += ' <li class="thumb-item"><a class="thumb-link" href="asmjs/{}.html">asm.js</a></li>\n'.format(name)
if BuildWasm and 'emscripten' in types :
content += ' <li class="thumb-item"><a class="thumb-link" href="wasm/{}.html">wasm</a></li>\n'.format(name)
content += ' </ul>\n'
content += ' </div>\n'
content += '</div>\n'
# populate the html template, and write to the build directory
with open(proj_dir + '/web/index.html', 'r') as f :
templ = Template(f.read())
html = templ.safe_substitute(doctitle=DocTitle, title=Title, subtitle=Subtitle, samples=content, separator=Separator)
with open(webpage_dir + '/index.html', 'w') as f :
f.write(html)
# and the same with the CSS template
with open(proj_dir + '/web/style.css', 'r') as f :
templ = Template(f.read())
css = templ.safe_substitute(background=BackgroundColor)
with open(webpage_dir +'/style.css', 'w') as f :
f.write(css)
# copy other required files
for name in ['dummy.jpg', 'emsc.js', 'wasm.js', 'about.html', 'favicon.png', 'ext_samples.jpg'] :
log.info('> copy file: {}'.format(name))
shutil.copy(proj_dir + '/web/' + name, webpage_dir + '/' + name)
# generate emscripten HTML pages
if BuildEmscripten and emscripten.check_exists(fips_dir) :
emsc_deploy_dir = '{}/fips-deploy/oryol/{}'.format(ws_dir, EmscConfig)
for sample in samples :
name = sample['name']
if name != '__end__' and 'emscripten' in sample['type'] :
log.info('> generate emscripten HTML page: {}'.format(name))
for ext in ['js', 'html.mem'] :
src_path = '{}/{}.{}'.format(emsc_deploy_dir, name, ext)
if os.path.isfile(src_path) :
shutil.copy(src_path, '{}/asmjs/'.format(webpage_dir))
with open(proj_dir + '/web/emsc.html', 'r') as f :
templ = Template(f.read())
src_url = GitHubSamplesURL + sample['src'];
html = templ.safe_substitute(name=name, source=src_url, separator=GameSeparator)
with open('{}/asmjs/{}.html'.format(webpage_dir, name, name), 'w') as f :
f.write(html)
# generate WebAssembly HTML pages
if BuildWasm and emscripten.check_exists(fips_dir) :
wasm_deploy_dir = '{}/fips-deploy/oryol/{}'.format(ws_dir, WasmConfig)
for sample in samples :
name = sample['name']
if name != '__end__' and 'emscripten' in sample['type'] :
log.info('> generate wasm HTML page: {}'.format(name))
for ext in ['js', 'wasm.mappedGlobals'] :
src_path = '{}/{}.{}'.format(wasm_deploy_dir, name, ext)
if os.path.isfile(src_path) :
shutil.copy(src_path, '{}/wasm/'.format(webpage_dir))
for ext in ['html.mem', 'wasm'] :
src_path = '{}/{}.{}'.format(wasm_deploy_dir, name, ext)
if os.path.isfile(src_path) :
shutil.copy(src_path, '{}/wasm/{}.{}.txt'.format(webpage_dir, name, ext))
with open(proj_dir + '/web/wasm.html', 'r') as f :
templ = Template(f.read())
src_url = GitHubSamplesURL + sample['src'];
html = templ.safe_substitute(name=name, source=src_url, separator=GameSeparator)
with open('{}/wasm/{}.html'.format(webpage_dir, name), 'w') as f :
f.write(html)
# copy the screenshots
for sample in samples :
if sample['name'] != '__end__' :
img_path = sample['image']
head, tail = os.path.split(img_path)
if tail != 'none' :
log.info('> copy screenshot: {}'.format(tail))
shutil.copy(img_path, webpage_dir + '/' + tail)
#-------------------------------------------------------------------------------
def export_assets(fips_dir, proj_dir, webpage_dir) :
tex_srcdir = proj_dir + '/data'
tex_dstdir = webpage_dir + '/data'
texexport.configure(proj_dir, tex_srcdir, tex_dstdir)
texexport.exportSampleTextures()
for ext in ['txt'] :
for dataFile in glob.glob(proj_dir + '/data/*.{}'.format(ext)) :
shutil.copy(dataFile, '{}/data/'.format(webpage_dir))
#-------------------------------------------------------------------------------
def build_deploy_webpage(fips_dir, proj_dir, rebuild) :
# if webpage dir exists, clear it first
ws_dir = util.get_workspace_dir(fips_dir)
webpage_dir = '{}/fips-deploy/oryol-webpage'.format(ws_dir)
if rebuild :
if os.path.isdir(webpage_dir) :
shutil.rmtree(webpage_dir)
if not os.path.isdir(webpage_dir) :
os.makedirs(webpage_dir)
# compile samples
if BuildEmscripten and emscripten.check_exists(fips_dir) :
project.gen(fips_dir, proj_dir, EmscConfig)
project.build(fips_dir, proj_dir, EmscConfig)
if BuildWasm and emscripten.check_exists(fips_dir) :
project.gen(fips_dir, proj_dir, WasmConfig)
project.build(fips_dir, proj_dir, WasmConfig)
# export sample assets
if ExportAssets :
export_assets(fips_dir, proj_dir, webpage_dir)
# deploy the webpage
deploy_webpage(fips_dir, proj_dir, webpage_dir)
log.colored(log.GREEN, 'Generated Samples web page under {}.'.format(webpage_dir))
#-------------------------------------------------------------------------------
def serve_webpage(fips_dir, proj_dir) :
ws_dir = util.get_workspace_dir(fips_dir)
webpage_dir = '{}/fips-deploy/oryol-webpage'.format(ws_dir)
p = util.get_host_platform()
if p == 'osx' :
try :
subprocess.call(
'open http://localhost:8000 ; python {}/mod/httpserver.py'.format(fips_dir),
cwd = webpage_dir, shell=True)
except KeyboardInterrupt :
pass
elif p == 'win':
try:
subprocess.call(
'cmd /c start http://localhost:8000 && python {}/mod/httpserver.py'.format(fips_dir),
cwd = webpage_dir, shell=True)
except KeyboardInterrupt:
pass
elif p == 'linux':
try:
subprocess.call(
'xdg-open http://localhost:8000; python {}/mod/httpserver.py'.format(fips_dir),
cwd = webpage_dir, shell=True)
except KeyboardInterrupt:
pass
#-------------------------------------------------------------------------------
def run(fips_dir, proj_dir, args) :
if len(args) > 0 :
if args[0] == 'build' :
build_deploy_webpage(fips_dir, proj_dir, False)
elif args[0] == 'rebuild' :
build_deploy_webpage(fips_dir, proj_dir, True)
elif args[0] == 'serve' :
serve_webpage(fips_dir, proj_dir)
else :
log.error("Invalid param '{}', expected 'build' or 'serve'".format(args[0]))
else :
log.error("Param 'build' or 'serve' expected")
#-------------------------------------------------------------------------------
def help() :
log.info(log.YELLOW +
'fips webpage build\n' +
'fips webpage rebuild\n' +
'fips webpage serve\n' +
log.DEF +
' build oryol samples webpage')
| floooh/oryol | fips-files/verbs/webpage.py | Python | mit | 10,355 |
from __future__ import unicode_literals
import argparse
import json
import logging
import os
import time
import tensorflow as tf
from ..load import yield_batch
from ..convolutional import ChessConvolutionalNetwork
from ..train import train_model, test_model
BATCH_SIZE = 1e3
TRAIN_TEST_RATIO = 0.8
MAX_ITERATIONS_PER_SQUARE = 50
LEARNING_RATES = [1e0, 1e-1, 1e-2, 1e-3, 1e-4]
ADAM_EPSILONS = [1, 1e-1, 1e-2, 1e-3, 1e-4, 1e-5, 1e-6, 1e-7]
logger = logging.getLogger(__name__)
class GridSquare(object):
__slots__ = ('learning_rate', 'epsilon', 'train_losses', 'test_losses')
def __init__(self, learning_rate, epsilon):
self.learning_rate = learning_rate
self.epsilon = epsilon
self.train_losses = []
self.test_losses = []
def to_dict(self):
return dict(
learning_rate=self.learning_rate,
epsilon=self.epsilon,
train_losses=self.train_losses,
test_losses=self.test_losses,
)
def main(output_path):
grid = []
total = len(LEARNING_RATES) * len(ADAM_EPSILONS)
grid_iteration = 0
for initial_learning_rate in LEARNING_RATES:
for epsilon in ADAM_EPSILONS:
start = time.time()
grid_iteration += 1
logger.info(
'%d / %d: LEARNING RATE %f; EPSILON %f',
grid_iteration,
total,
initial_learning_rate,
epsilon
)
estimator = ChessConvolutionalNetwork(
learning_rate=initial_learning_rate,
adam_epsilon=epsilon,
)
square = GridSquare(initial_learning_rate, epsilon)
with tf.Session() as session:
session.run(tf.global_variables_initializer())
iteration = 0
best_loss = float('inf')
best_iteration = 0
for X_train, X_test in yield_batch(BATCH_SIZE, TRAIN_TEST_RATIO, flat=False):
iteration += 1
loss_train = train_model(session, estimator, X_train)
loss_test = test_model(session, estimator, X_test)
square.train_losses.append(float(loss_train))
square.test_losses.append(float(loss_test))
if loss_test < best_loss:
best_loss = loss_test
best_iteration = iteration
elapsed = int(time.time() - start)
logger.info('Training batch %d; Elapsed %ds; loss: %f (train: %f); best: %f (%d)',
iteration, elapsed, loss_test, loss_train, best_loss, best_iteration)
if iteration == MAX_ITERATIONS_PER_SQUARE:
break
logger.info('Saving grid...')
grid.append(square)
save_grid(output_path, grid)
logger.info('Saved to %s', output_path)
tf.reset_default_graph()
logger.info('DONE')
def save_grid(output_path, grid):
grid = dict(grid=[g.to_dict() for g in grid])
with open(output_path, 'w') as f:
json.dump(grid, f)
if __name__ == '__main__':
logger.info('Parameters Grid Search')
logging.basicConfig(level=logging.INFO, format="%(asctime)s (%(levelname)s) %(message)s")
dir_path = os.path.dirname(os.path.realpath(__file__))
parser = argparse.ArgumentParser()
parser.add_argument('--output', default=os.path.join(dir_path, 'grid_conv2.json'))
args = parser.parse_args()
output_path = args.output
logger.info('Output file path: %s', output_path)
main(output_path)
| srom/chessbot | estimator/train/grid/__main__.py | Python | mit | 3,679 |
import unittest
from PrimeNumbers import prime
class TestPrimeNumbers(unittest.TestCase):
def setUp(self):
self.result = prime.generate_prime_numbers(100)
def test_only_integers(self):
for my_number in self.result:
self.assertIsInstance(my_number, int)
def test_first_prime_number(self):
self.assertEqual(self.result[0], 2)
def test_prime_even(self):
for my_number in self.result:
self.assertFalse(my_number > 2 and my_number % 2 == 0)
def test_prime_odd(self):
for my_number in self.result:
self.assertFalse(my_number > 3 and my_number % 3 == 0)
def test_negative_primes(self):
for my_number in self.result:
self.assertFalse(my_number < 1)
def test_list_length(self):
self.assertEqual(len(self.result), 25)
def test_compare_output(self):
self.assertEqual(self.result, [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97])
if __name__ == '__main__':
unittest.main() | SerryJohns/SLC-JOHN-PAUL | tests.py | Python | mit | 1,067 |
import tensorflow as tf
import numpy as np
import model
import utils
import graph
from scipy import misc
import os
import time
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
from sklearn.model_selection import train_test_split
current_location = os.getcwd()
learning_rate = 0.001
epoch = 1000
batch_size = 5
split_percentage = 0.2
Train = False
test_number = 10
type_pokemon, unique_type = utils.gettype(current_location)
pokemon_pictures = utils.getpictures(current_location + '/pokemon')
output_dimension = len(unique_type)
picture_dimension = 28
pokemon_pictures_train, pokemon_pictures_test, pokemon_types_train, pokemon_types_test = train_test_split(pokemon_pictures, type_pokemon, test_size = split_percentage)
sess = tf.InteractiveSession()
model = model.Model(picture_dimension, learning_rate, output_dimension)
sess.run(tf.global_variables_initializer())
saver = tf.train.Saver(tf.global_variables())
try:
saver.restore(sess, current_location + "/model.ckpt")
print "load model.."
except:
if Train:
print "start from fresh variables"
else:
print "please train first, exiting.."
exit(0)
def train():
ACCURACY = []; EPOCH = []; LOST = []
for i in xrange(epoch):
total_cost = 0
total_accuracy = 0
last_time = time.time()
EPOCH.append(i)
for k in xrange(0, len(pokemon_pictures_train) - batch_size, batch_size):
emb_data = np.zeros((batch_size, picture_dimension, picture_dimension, 4), dtype = np.float32)
emb_data_label_1 = np.zeros((batch_size, output_dimension), dtype = np.float32)
emb_data_label_2 = np.zeros((batch_size, output_dimension), dtype = np.float32)
for x in xrange(batch_size):
image = misc.imread(current_location + '/pokemon/' + pokemon_pictures_train[k + x])
image = misc.imresize(image, (picture_dimension, picture_dimension))
emb_data_label_1[x, unique_type.index(pokemon_types_train[k + x, 0])] = 1.0
emb_data_label_2[x, unique_type.index(pokemon_types_train[k + x, 1])] = 1.0
emb_data[x, :, :, :] = image
_, loss = sess.run([model.optimizer, model.cost], feed_dict = {model.X : emb_data, model.Y_1 : emb_data_label_1, model.Y_2 : emb_data_label_2})
accuracy_1, accuracy_2 = sess.run([model.accuracy_1, model.accuracy_2], feed_dict = {model.X : emb_data, model.Y_1 : emb_data_label_1, model.Y_2 : emb_data_label_2})
total_cost += loss
total_accuracy += ((accuracy_1 + accuracy_2) / 2.0)
accuracy = total_accuracy / ((len(pokemon_pictures_train) - batch_size) / (batch_size * 1.0))
loss = total_cost / ((len(pokemon_pictures_train) - batch_size) / (batch_size * 1.0))
ACCURACY.append(accuracy)
LOST.append(loss)
print "epoch: " + str(i + 1) + ", loss: " + str(loss) + ", accuracy: " + str(accuracy) + ", s / epoch: " + str(time.time() - last_time)
graph.generategraph(EPOCH, ACCURACY, LOST)
saver.save(sess, current_location + "/model.ckpt")
def test():
import matplotlib.pyplot as plt
num_print = int(np.sqrt(len(pokemon_pictures_test)))
fig = plt.figure(figsize = (1.5 * num_print, 1.5 * num_print))
for k in xrange(0, num_print * num_print):
plt.subplot(num_print, num_print, k + 1)
emb_data = np.zeros((1, picture_dimension, picture_dimension, 4), dtype = np.float32)
image = misc.imread(current_location + '/pokemon/' + pokemon_pictures_test[k])
image = misc.imresize(image, (picture_dimension, picture_dimension))
emb_data[0, :, :, :] = image
y_hat_1, y_hat_2 = sess.run([model.y_hat_1, model.y_hat_2], feed_dict = {model.X : emb_data})
label_1 = unique_type[np.argmax(y_hat_1[0])]
label_2 = unique_type[np.argmax(y_hat_2[0])]
plt.imshow(image)
plt.title(label_1 + " + " + label_2)
fig.tight_layout()
plt.savefig('output.png')
plt.savefig('output.pdf')
plt.cla()
print "printing diamond-pearl.."
list_folder = os.listdir(current_location + '/diamond-pearl')
num_print = int(np.sqrt(len(list_folder)))
fig = plt.figure(figsize = (1.5 * num_print, 1.5 * num_print))
for k in xrange(0, num_print * num_print):
plt.subplot(num_print, num_print, k + 1)
emb_data = np.zeros((1, picture_dimension, picture_dimension, 4), dtype = np.float32)
image = misc.imread(current_location + '/diamond-pearl/' + list_folder[k])
image = misc.imresize(image, (picture_dimension, picture_dimension))
emb_data[0, :, :, :] = image
y_hat_1, y_hat_2 = sess.run([model.y_hat_1, model.y_hat_2], feed_dict = {model.X : emb_data})
label_1 = unique_type[np.argmax(y_hat_1[0])]
label_2 = unique_type[np.argmax(y_hat_2[0])]
plt.imshow(image)
plt.title(label_1 + " + " + label_2)
fig.tight_layout()
plt.savefig('output_diamond_pearl.png')
plt.savefig('output_diamond_pearl.pdf')
plt.cla()
def main():
if Train:
train()
else:
test()
main()
| huseinzol05/Deep-Learning-Tensorflow | deprecated/Deep Convolutional Network/pokemon-type/old-model/main.py | Python | mit | 5,482 |
# -*- coding: utf-8 -*-
"""Common variables and functions.
"""
__all__ = [
'get_var_desc',
'get_label',
]
_variable_description = {
'AH002': ('Absolute Feuchte 2 m', 'g/m³'),
'ALB': ('Albedo', '1 '),
'D': ('Diffuse Himmelsstrahlung', 'W/m²'),
'DD010': ('Windrichtung 10 m', '°'),
'DR': ('Diffuse Himmelsstrahlung (unkorrigiert)', 'W/m²'),
'DT002': ('Taupunkt 2 m', '°C'),
'E': ('Langwellige Strahlung von unten', 'W/m²'),
'ETS': ('Oberflächentemperatur (mit EPS aus Standortparametern)', '°C'),
'FB010': ('Stärkste Böen 10 m', 'm/s'),
'FF010': ('Windgeschwindigkeit 10 m', 'm/s'),
'G': ('Globalstrahlung', 'W/m²'),
'GD': ('Diffuse Himmelsstrahlung (aus Globalstrahlung)', 'W/m²'),
'GI': ('Direkte Sonnenstrahlung (aus Globalstrahlung)', 'W/m²'),
'GND': ('Sonnenschein nicht möglich (Nacht)', ''),
'GP': ('Relative Globalstrahlung', '%'),
'GSD': ('Sonnenscheindetektion', ''),
'GSH': ('Sonnenscheindauer', 'h'),
'GSPT': ('Relative Sonnenscheindauer bzgl. 1 Tag', '%'),
'GSPX': ('Relative Sonnenscheindauer', '%'),
'GSW': ('Sonnenscheinschwellwert', 'W/m²'),
'GSZ': ('Summierte Sonnenscheindetektionen', '1'),
'GTD': ('Schattendetektion', ''),
'GTH': ('Schattendauer', 'h'),
'GTZ': ('Summierte Schattendetektionen', '1'),
'GXD': ('Sonnenschein möglich', ''),
'GXH': ('Mögliche Sonnenscheindauer', 'h'),
'GXT': ('Max. mögliche Tagessonnenscheindauer', 'h'),
'GXZ': ('Summierter möglicher Sonnenschein', '1'),
'HTD600': ('HMP-Mast Taupunkt, 6 m', '°C'),
'HTT050': ('HMP-Mast Lufttemperatur, 0,5 m', '°C'),
'HTT200': ('HMP-Mast Lufttemperatur, 2 m', '°C'),
'HTT600': ('HMP-Mast Lufttemperatur, 6 m', '°C'),
'I': ('Direkte Sonnenstrahlung', 'W/m²'),
'IC': ('Direkte Sonnenstrahlung bei wolkenlosen Bedingungen', 'W/m²'),
'L': ('Langwellige Strahlung von oben', 'W/m²'),
'LTS': ('Himmelstemperatur (mit EPS = 1)', '°C'),
'MG': ('Theoretische Globalstrahlung bei wolkenlosem Himmel', 'W/m²'),
'MH002': ('Massenmischungsverhältnis 2 m', 'g/kg'),
'P000': ('Luftdruck (Meereshöhe)', 'hPa'),
'P007': ('Luftdruck (Stationshöhe)', 'hPa'),
'PBA': ('Steigwinkel Pilotballon', '°'),
'PBG': ('Füllgewicht Pilotballon', 'g'),
'PBW': ('Steiggeschwindigkeit Pilotballon', 'm/min'),
'Q': ('Strahlungsbilanz', 'W/m²'),
'R': ('Kurzwellige Strahlung von unten', 'W/m²'),
'RH002': ('Relative Feuchte 2 m', '%'),
'RK': ('Niederschlagsmenge ab 0 Uhr', 'mm'),
'RR': ('Niederschlagsintensität', 'mm'),
'SH002': ('Spezifische Feuchte 2 m', 'g/kg'),
'SOLH': ('Höhenwinkel der Sonne', '°'),
'TT002': ('Lufttemperatur 2 m', '°C'),
'VP002': ('Wasserdampfdruck 2 m', 'hPa'),
}
def get_var_desc():
"""Get a copy of the default variable description.
Returns:
dict: Dict values are tuples containing full variable name and unit.
The keys are the abbrevations used in `MASTER.txt`.
Examples:
Add description for non-default variables:
>>> desc = lx.plots.get_var_desc()
>>> desc.update({'FOO': ('New variable', 'Unit')})
>>> lx.plots.get_label('FOO', var_desc=desc)
'New variable [Unit]'
"""
return _variable_description.copy()
def get_label(key, label='{name} [{unit}]', var_desc=None):
"""Return label for variable key.
Parameters:
key (str): Variable key.
label (str): Format string to create the label.
The variables `name` and `unit` can be used.
var_desc (dict): Dictionary with variable descriptions.
The value behind the key has to be a tuple of strings:
dict[key] = (name, unit)
If `None` a default set of variables is used.
Returns:
str: Variable specific label.
Examples:
Get default axis label for variable key "TT002":
>>> get_label('TT002')
'Lufttemperature 2 m [K]'
Pass different format string for label:
>>> get_label('TT002', label='{name} in {unit}')
'Lufttemperatur in 2 m in K'
Set description for non-default variables:
>>> desc = lx.plots.get_var_desc()
>>> desc.update({'FOO': ('New variable', 'Unit')})
>>> get_label('FOO', var_desc=desc)
'New variable [Unit]'
"""
if var_desc is None:
var_desc = _variable_description
name, unit = var_desc.get(key, (key, ''))
kwargs = {
'name': name,
'unit': unit,
}
return label.format(**kwargs)
| lkluft/lehrex | lehrex/plots/common.py | Python | mit | 4,602 |
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='monopoly',
version='0.1.1',
description='A karma bot for Hangouts, Slack, and IRC.',
long_description=readme(),
url='https://github.com/laneshetron/monopoly.git',
author='Lane Shetron',
author_email='[email protected]',
license='MIT',
packages=['monopoly'],
install_requires=['hangups==0.1.0-monopoly', 'websocket-client', 'fuzzywuzzy', 'python-Levenshtein'],
dependency_links=['https://github.com/laneshetron/hangups/archive/v0.1.0-monopoly.zip#egg=hangups-0.1.0-monopoly'],
include_package_data=True)
| laneshetron/monopoly | setup.py | Python | mit | 687 |
#!/usr/bin/python2
# -*- coding: utf-8 -*-
import time
import datetime
import inspect
import simplejson as json
import sys
sys.path.append("../../")
import sensnode.weather
#import getWeatherCurrent
api = 'f86d1e8be4de0136'
STIMAZOWIE117='http://api.wunderground.com/api/'+ api +'/conditions/q/pws:IMAZOWIE117.json'
STIWARSZAW408='http://api.wunderground.com/api/'+ api +'/conditions/q/pws:IWARSZAW408.json'
def outdoor(data):
STATION1 = sensnode.weather.getWeatherCurrent(STIMAZOWIE117)
STATION2 = sensnode.weather.getWeatherCurrent(STIWARSZAW408)
try:
#wind speed
wind = float(STATION1['wind_kph'])
#W/m2 naslonecznienie
solarradiation = int(STATION1['solarradiation'])
#wind_degrees
wind_degrees = int(STATION1['wind_degrees'])
#mm / h
precip_1hr = round((((float(STATION1['precip_1hr_metric']))+(float(STATION2['precip_1hr_metric'])))/2),2)
# mm day
precip_today = round((((float(STATION1['precip_today_metric']))+(float(STATION2['precip_today_metric'])))/2),2)
dewpoint = round((((float(STATION1['dewpoint_c']))+(float(STATION2['dewpoint_c'])))/2),2)
except TypeError:
pass
except IndentationError:
pass
except ValueError:
precip_today = 0
precip_1hr = 0
'''Pomiar:
- swiatła,
- wlgotności
- temperatury
- ciśnienia
- stanu baterii
- napięcia beterii
>> a = "OK 2 0 0 70 1 242 0 201 38 0 15 17"
>> raw = a.split(" ")
>> weathernode(raw, "weathernode")
'{"name": "weathernode", "temp": "242", "lobat": "0", "humi": "326", "timestamp": 1364553092, "light": "0", "press": "9929", "batvol": "4367"}'
'''
a = float(data[7]) #napiecie batvol
f = float(data[2]) #wilg gleby groundhumi
b = float(data[3]) #wilgotnosc humi
c = float(data[4]) #temperatura temp
d = float(data[5]) #naslonecznienie sun
h = float(data[9]) #temp gleby tempground
e = precip_1hr
g = precip_today
i = wind
j = solarradiation
k = wind_degrees
l = dewpoint
#nodeid = str(data[1])
name = inspect.stack()[0][3] # z nazwy funcji
timestamp = int(time.mktime(datetime.datetime.now().timetuple())) # czas unixa
template = ({
'name':name,
'batvol': a,
'sun': d,
'thumi': b,
'temp': c,
'tgroundhumi': f,
'tempground': h,
'zpreciphr': e,
'zpreciptoday': g,
'zdewpoint': l,
'solarradiation': j,
'wind': i,
'winddegrees': k,
'timestamp':timestamp
})
return dict((k,v) for (k,v) in template.iteritems())
| roblad/sensmon | sensnode/decoders/outdoor.py | Python | mit | 2,695 |
#!/usr/bin/env python3
# Copyright (c) 2009-2019 The Bitcoin Core developers
# Copyright (c) 2014-2019 The DigiByte Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node responses to invalid blocks.
In this test we connect to one node over p2p, and test block requests:
1) Valid blocks should be requested and become chain tip.
2) Invalid block with duplicated transaction should be re-requested.
3) Invalid block with bad coinbase value should be rejected and not
re-requested.
"""
import copy
from test_framework.blocktools import create_block, create_coinbase, create_tx_with_script
from test_framework.messages import COIN
from test_framework.mininode import P2PDataStore
from test_framework.test_framework import DigiByteTestFramework
from test_framework.util import assert_equal
class InvalidBlockRequestTest(DigiByteTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
self.extra_args = [["-whitelist=127.0.0.1"]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Add p2p connection to node0
node = self.nodes[0] # convenience reference to the node
node.add_p2p_connection(P2PDataStore())
best_block = node.getblock(node.getbestblockhash())
tip = int(node.getbestblockhash(), 16)
height = best_block["height"] + 1
block_time = best_block["time"] + 1
self.log.info("Create a new block with an anyone-can-spend coinbase")
height = 1
block = create_block(tip, create_coinbase(height), block_time)
block.solve()
# Save the coinbase for later
block1 = block
tip = block.sha256
node.p2p.send_blocks_and_test([block1], node, success=True)
self.log.info("Mature the block.")
node.generate(100)
best_block = node.getblock(node.getbestblockhash())
tip = int(node.getbestblockhash(), 16)
height = best_block["height"] + 1
block_time = best_block["time"] + 1
# Use merkle-root malleability to generate an invalid block with
# same blockheader.
# Manufacture a block with 3 transactions (coinbase, spend of prior
# coinbase, spend of that spend). Duplicate the 3rd transaction to
# leave merkle root and blockheader unchanged but invalidate the block.
self.log.info("Test merkle root malleability.")
block2 = create_block(tip, create_coinbase(height), block_time)
block_time += 1
# b'0x51' is OP_TRUE
tx1 = create_tx_with_script(block1.vtx[0], 0, script_sig=b'\x51', amount=50 * COIN)
tx2 = create_tx_with_script(tx1, 0, script_sig=b'\x51', amount=50 * COIN)
block2.vtx.extend([tx1, tx2])
block2.hashMerkleRoot = block2.calc_merkle_root()
block2.rehash()
block2.solve()
orig_hash = block2.sha256
block2_orig = copy.deepcopy(block2)
# Mutate block 2
block2.vtx.append(tx2)
assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root())
assert_equal(orig_hash, block2.rehash())
assert block2_orig.vtx != block2.vtx
node.p2p.send_blocks_and_test([block2], node, success=False, reject_code=16, reject_reason=b'bad-txns-duplicate')
# Check transactions for duplicate inputs
self.log.info("Test duplicate input block.")
block2_orig.vtx[2].vin.append(block2_orig.vtx[2].vin[0])
block2_orig.vtx[2].rehash()
block2_orig.hashMerkleRoot = block2_orig.calc_merkle_root()
block2_orig.rehash()
block2_orig.solve()
node.p2p.send_blocks_and_test([block2_orig], node, success=False, reject_reason=b'bad-txns-inputs-duplicate')
self.log.info("Test very broken block.")
block3 = create_block(tip, create_coinbase(height), block_time)
block_time += 1
block3.vtx[0].vout[0].nValue = 100 * COIN # Too high!
block3.vtx[0].sha256 = None
block3.vtx[0].calc_sha256()
block3.hashMerkleRoot = block3.calc_merkle_root()
block3.rehash()
block3.solve()
node.p2p.send_blocks_and_test([block3], node, success=False, reject_code=16, reject_reason=b'bad-cb-amount')
if __name__ == '__main__':
InvalidBlockRequestTest().main()
| digibyte/digibyte | test/functional/p2p_invalid_block.py | Python | mit | 4,441 |
# -*- coding: utf-8 -*-
from rest_framework import status
from rest_framework.reverse import reverse
from rest_framework.test import APITestCase
from ....factories import (DiscountFactory, DiscountRegistrationFactory,
TicketTypeFactory, UserFactory)
class DiscountApiTests(APITestCase):
def test_list_unauthenticated(self):
url = reverse('v1:discount-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_list_authenticated(self):
url = reverse('v1:discount-list')
user = UserFactory()
self.client.force_authenticate(user)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, [])
def test_list_authenticated_unowned(self):
url = reverse('v1:discount-list')
user = UserFactory()
# Creates a Discount "owned" by someone else
unowned_discount = DiscountFactory()
self.client.force_authenticate(user)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, [])
def test_list_authenticated_owned(self):
url = reverse('v1:discount-list')
user = UserFactory()
owned_discount = DiscountFactory()
owned_discount.ticket_type.event.organization.admins.add(user)
unowned_discount = DiscountFactory()
self.client.force_authenticate(user)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
self.assertEqual(response.data[0]['id'], str(owned_discount.id))
def test_create_unauthenticated(self):
url = reverse('v1:discount-list')
# I'm lazy. Let's use the factory, but don't save the object.
temp_discount = DiscountFactory.build()
request_data = {
'ticket_type': reverse(
'v1:tickettype-detail',
kwargs={'pk': temp_discount.ticket_type.pk}),
'union': reverse(
'v1:union-detail', kwargs={'pk': temp_discount.union.pk}),
'amount': temp_discount.amount
}
response = self.client.post(url, data=request_data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_authenticated_unowned_ticket_type(self):
url = reverse('v1:discount-list')
user = UserFactory()
# I'm lazy. Let's use the factory, but don't save the object.
temp_discount = DiscountFactory.build()
request_data = {
'ticket_type': reverse(
'v1:tickettype-detail',
kwargs={'pk': temp_discount.ticket_type.pk}),
'union': reverse(
'v1:union-detail', kwargs={'pk': temp_discount.union.pk}),
'amount': temp_discount.amount
}
self.client.force_authenticate(user)
response = self.client.post(url, data=request_data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_authenticated_owned_ticket_type(self):
url = reverse('v1:discount-list')
user = UserFactory()
owned_ticket_type = TicketTypeFactory()
owned_ticket_type.event.organization.admins.add(user)
# I'm lazy. Let's use the factory, but don't save the object.
temp_discount = DiscountFactory.build(ticket_type=owned_ticket_type)
request_data = {
'ticket_type': reverse(
'v1:tickettype-detail',
kwargs={'pk': temp_discount.ticket_type.pk}),
'union': reverse(
'v1:union-detail', kwargs={'pk': temp_discount.union.pk}),
'amount': temp_discount.amount
}
self.client.force_authenticate(user)
response = self.client.post(url, data=request_data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_retrieve_unauthenticated(self):
discount = DiscountFactory()
url = reverse('v1:discount-detail', kwargs={'pk': discount.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_retrieve_authenticated_unowned(self):
user = UserFactory()
discount = DiscountFactory()
url = reverse('v1:discount-detail', kwargs={'pk': discount.pk})
self.client.force_authenticate(user)
response = self.client.get(url)
# Authenticated requests should be treated as 404 when retrieving an
# unowned discount
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_retrieve_authenticated_owned(self):
user = UserFactory()
discount = DiscountFactory()
discount.ticket_type.event.organization.admins.add(user)
url = reverse('v1:discount-detail', kwargs={'pk': discount.pk})
self.client.force_authenticate(user)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['id'], str(discount.id))
def test_update_unauthenticated(self):
original_ticket_type = TicketTypeFactory()
discount = DiscountFactory(ticket_type=original_ticket_type)
url = reverse('v1:discount-detail', kwargs={'pk': discount.pk})
new_ticket_type = TicketTypeFactory()
new_ticket_type_url = reverse('v1:tickettype-detail',
kwargs={'pk': new_ticket_type.pk})
# Request with changed ticket type
request_data = {
'ticket_type': new_ticket_type_url,
'union': reverse('v1:union-detail',
kwargs={'pk': discount.union.pk}),
'amount': discount.amount
}
response = self.client.put(url, data=request_data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_authenticated_unowned_to_unowned_ticket_type(self):
user = UserFactory()
original_ticket_type = TicketTypeFactory() # Unowned
discount = DiscountFactory(ticket_type=original_ticket_type)
url = reverse('v1:discount-detail', kwargs={'pk': discount.pk})
new_ticket_type = TicketTypeFactory() # Unowned
new_ticket_type_url = reverse('v1:tickettype-detail',
kwargs={'pk': new_ticket_type.pk})
# Request with changed ticket type
request_data = {
'ticket_type': new_ticket_type_url,
'union': reverse('v1:union-detail',
kwargs={'pk': discount.union.pk}),
'amount': discount.amount
}
self.client.force_authenticate(user)
response = self.client.put(url, data=request_data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_update_authenticated_unowned_to_owned_ticket_type(self):
user = UserFactory()
original_ticket_type = TicketTypeFactory() # Unowned
discount = DiscountFactory(ticket_type=original_ticket_type)
url = reverse('v1:discount-detail', kwargs={'pk': discount.pk})
new_ticket_type = TicketTypeFactory() # Owned
new_ticket_type.event.organization.admins.add(user)
new_ticket_type_url = reverse('v1:tickettype-detail',
kwargs={'pk': new_ticket_type.pk})
# Request with changed ticket type
request_data = {
'ticket_type': new_ticket_type_url,
'union': reverse('v1:union-detail',
kwargs={'pk': discount.union.pk}),
'amount': discount.amount
}
self.client.force_authenticate(user)
response = self.client.put(url, data=request_data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_update_authenticated_owned_to_unowned_ticket_type(self):
user = UserFactory()
original_ticket_type = TicketTypeFactory() # Owned
original_ticket_type.event.organization.admins.add(user)
discount = DiscountFactory(ticket_type=original_ticket_type)
url = reverse('v1:discount-detail', kwargs={'pk': discount.pk})
new_ticket_type = TicketTypeFactory() # Unowned
new_ticket_type_url = reverse('v1:tickettype-detail',
kwargs={'pk': new_ticket_type.pk})
# Request with changed ticket type
request_data = {
'ticket_type': new_ticket_type_url,
'union': reverse('v1:union-detail',
kwargs={'pk': discount.union.pk}),
'amount': discount.amount
}
self.client.force_authenticate(user)
response = self.client.put(url, data=request_data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_update_authenticated_owned_to_owned_ticket_type(self):
user = UserFactory()
original_ticket_type = TicketTypeFactory() # Owned
original_ticket_type.event.organization.admins.add(user)
discount = DiscountFactory(ticket_type=original_ticket_type)
url = reverse('v1:discount-detail', kwargs={'pk': discount.pk})
new_ticket_type = TicketTypeFactory() # Owned
new_ticket_type.event.organization.admins.add(user)
new_ticket_type_url = reverse('v1:tickettype-detail',
kwargs={'pk': new_ticket_type.pk})
# Request with changed ticket type
request_data = {
'ticket_type': new_ticket_type_url,
'union': reverse('v1:union-detail',
kwargs={'pk': discount.union.pk}),
'amount': discount.amount
}
self.client.force_authenticate(user)
response = self.client.put(url, data=request_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_delete_unauthenticated(self):
discount = DiscountFactory()
url = reverse('v1:discount-detail', kwargs={'pk': discount.pk})
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_authenticated_unowned_without_registrations(self):
user = UserFactory()
discount = DiscountFactory()
url = reverse('v1:discount-detail', kwargs={'pk': discount.pk})
self.client.force_authenticate(user)
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_authenticated_unowned_with_registrations(self):
user = UserFactory()
discount = DiscountFactory()
DiscountRegistrationFactory(discount=discount)
url = reverse('v1:discount-detail', kwargs={'pk': discount.pk})
self.client.force_authenticate(user)
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_authenticated_owned_without_registrations(self):
user = UserFactory()
discount = DiscountFactory()
discount.ticket_type.event.organization.admins.add(user)
url = reverse('v1:discount-detail', kwargs={'pk': discount.pk})
self.client.force_authenticate(user)
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_delete_authenticated_owned_with_registrations(self):
user = UserFactory()
discount = DiscountFactory()
discount.ticket_type.event.organization.admins.add(user)
DiscountRegistrationFactory(discount=discount)
url = reverse('v1:discount-detail', kwargs={'pk': discount.pk})
self.client.force_authenticate(user)
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
| karservice/kobra | kobra/api/v1/tests/test_discounts.py | Python | mit | 12,169 |
from time import sleep
from sys import exc_info, exit
from config_parser import get_config_key
from csv_parser import get_watchlist, update_last_known_filenames
from process_latest_release_info import download_and_or_notify
from html_parser import get_latest_release_info
from log import log_error, log_info, log_success
log_success("CeilingNyaa is starting...")
try:
while True:
log_info("Main loop starting")
try:
watchlist = get_watchlist()
except IOError as e:
raise IOError(log_error("Error getting watchlist info: " + str(e)))
try:
update_last_known_filenames(download_and_or_notify(get_latest_release_info(watchlist)))
except KeyboardInterrupt as e:
raise e
except IOError as e:
raise IOError(log_error("Error saving watchlist info: " + str(e)))
log_info("Main loop done")
run_once = get_config_key("RunOnce", default = False, isBoolean = True)
if run_once:
log_info("run_once is True, breaking out of main loop")
break
minutes_to_sleep = int(get_config_key("CheckIntervalInMinutes", default = False))
log_info("Next run will be in " + str(minutes_to_sleep) + " minutes")
sleep(minutes_to_sleep * 60)
except KeyboardInterrupt:
log_info("KeyboardInterrupt received")
except IOError:
log_error("CeilingNyaa is exiting...")
exit(1)
except:
log_error("Unexpected error: " + str(exc_info()[0]))
log_error("CeilingNyaa is exiting...")
exit(2)
log_success("CeilingNyaa is exiting...")
exit(0)
| SurgamIdentidem/CeilingNyaa | ceilingnyaa/__main__.py | Python | mit | 1,612 |
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from django.utils import timezone
from .models import Question, Choice
# Create your views here.
class IndexView(generic.ListView):
template_name ='polls/index.html'
context_object_name = 'latest_question_list'
def get_queryset(self):
"""
Return last 5 published questions, not including future questions.
"""
return Question.objects.filter(
pub_date__lte=timezone.now()).order_by('-pub_date')[:5]
class DetailView(generic.DetailView):
model = Question
template_name = 'polls/detail.html'
def get_queryset(self):
"""
Excludes unpublished questions.
"""
return Question.objects.filter(pub_date__lte=timezone.now())
class ResultsView(generic.DetailView):
model = Question
template_name = 'polls/results.html'
def vote(request, question_id):
question = get_object_or_404(Question, pk=question_id)
try:
selected_choice = question.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the question voting form
return render(request, 'polls/detail.html', {
'question': question,
'error_message': "No choice selected.",
})
else:
selected_choice.votes += 1
selected_choice.save()
# Return a HttpResponseRedirect after successful POST data to prevent
# duplicates if user hits back button
return HttpResponseRedirect(reverse('polls:results', args=(question.id,)))
| samcheck/djangotest | mysite/polls/views.py | Python | mit | 1,701 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._models_py3 import CheckTrafficManagerRelativeDnsNameAvailabilityParameters
from ._models_py3 import CloudErrorBody
from ._models_py3 import DeleteOperationResult
from ._models_py3 import DnsConfig
from ._models_py3 import Endpoint
from ._models_py3 import EndpointPropertiesCustomHeadersItem
from ._models_py3 import EndpointPropertiesSubnetsItem
from ._models_py3 import HeatMapEndpoint
from ._models_py3 import HeatMapModel
from ._models_py3 import MonitorConfig
from ._models_py3 import MonitorConfigCustomHeadersItem
from ._models_py3 import MonitorConfigExpectedStatusCodeRangesItem
from ._models_py3 import Profile
from ._models_py3 import ProfileListResult
from ._models_py3 import ProxyResource
from ._models_py3 import QueryExperience
from ._models_py3 import Region
from ._models_py3 import Resource
from ._models_py3 import TrackedResource
from ._models_py3 import TrafficFlow
from ._models_py3 import TrafficManagerGeographicHierarchy
from ._models_py3 import TrafficManagerNameAvailability
from ._models_py3 import UserMetricsModel
from ._traffic_manager_management_client_enums import (
AllowedEndpointRecordType,
EndpointMonitorStatus,
EndpointStatus,
EndpointType,
MonitorProtocol,
ProfileMonitorStatus,
ProfileStatus,
TrafficRoutingMethod,
TrafficViewEnrollmentStatus,
)
__all__ = [
'CheckTrafficManagerRelativeDnsNameAvailabilityParameters',
'CloudErrorBody',
'DeleteOperationResult',
'DnsConfig',
'Endpoint',
'EndpointPropertiesCustomHeadersItem',
'EndpointPropertiesSubnetsItem',
'HeatMapEndpoint',
'HeatMapModel',
'MonitorConfig',
'MonitorConfigCustomHeadersItem',
'MonitorConfigExpectedStatusCodeRangesItem',
'Profile',
'ProfileListResult',
'ProxyResource',
'QueryExperience',
'Region',
'Resource',
'TrackedResource',
'TrafficFlow',
'TrafficManagerGeographicHierarchy',
'TrafficManagerNameAvailability',
'UserMetricsModel',
'AllowedEndpointRecordType',
'EndpointMonitorStatus',
'EndpointStatus',
'EndpointType',
'MonitorProtocol',
'ProfileMonitorStatus',
'ProfileStatus',
'TrafficRoutingMethod',
'TrafficViewEnrollmentStatus',
]
| Azure/azure-sdk-for-python | sdk/trafficmanager/azure-mgmt-trafficmanager/azure/mgmt/trafficmanager/models/__init__.py | Python | mit | 2,693 |
import os
import sys
import signal
import requests
from multiprocessing import Pool
signal.signal(signal.SIGINT, signal.SIG_IGN)
url = os.environ.get("UNOSERVICE_URL")
def request(i):
path = sys.argv[1]
files = {"file": open(path, "rb")}
data = {"extension": "docx"}
# print('send request')
res = requests.post(url, files=files, data=data)
# message = res.text if res.status_code != 200 else ''
print(res.status_code, res.content[:20])
# print(res.content == open(path, 'rb').read())
pool = Pool(20)
try:
pool.map(request, range(10000))
except KeyboardInterrupt:
pool.terminate()
pool.join()
# request(5)
| alephdata/ingestors | convert/test.py | Python | mit | 655 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import csv
import numpy as np
class AnalytikJenaqTower2:
def __init__(self):
self.name = "Analytik Jena qTower 2.0/2.2"
self.providesTempRange = False
self.providesDeltaT = False
def loadData(self, filename, reads, wells):
with open(filename, 'r') as f:
reader = csv.reader(f, delimiter=';', quoting=csv.QUOTE_NONE)
i = 0
for row in reader:
temp = np.zeros(reads, dtype=float)
for read in range(reads + 1):
if read > 0:
try:
temp[read - 1] = row[read]
except (IndexError, ValueError):
temp[read - 1] = 0.0
elif read == 0:
wells[i].name = row[read]
wells[i].raw = temp
i += 1
return wells
| Athemis/PyDSF | instruments/analytikJenaqTower2.py | Python | mit | 956 |
#! /bin/python
import
| likewwddaa/PiFlower | hello.py | Python | mit | 23 |
# -*- coding: utf-8 -*-
from .base import FunctionalTestCase
from .pages import game
DATE_REGEX = r'\[\d{1,2}-\d{1,2} \d{2}:\d{2}\] '
class UndoTests(FunctionalTestCase):
def test_can_undo_player_transfering_money_to_bank(self):
self.story('Alice is a user who has a game with a player')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.bank_cash.clear()
homepage.bank_cash.send_keys('1000\n')
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=100)
self.story('Alice transfers some money to the bank')
game_page = game.GamePage(self.browser)
game_page.reload_game.click()
transfer_form = game.TransferForm(self.browser)
alice = game_page.get_players()[0]
alice['row'].click()
transfer_form.amount.send_keys('50\n')
alice = game_page.get_players()[0]
self.assertEqual(game_page.bank_cash.text, '1050')
self.assertEqual(alice['cash'].text, '50')
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Alice transfered 50 to the bank')
self.story('There is an undo button, once it is clicked the game is '
'reverted to the previous state')
game_page.undo.click()
alice = game_page.get_players()[0] # Get DOM updates
self.assertEqual(game_page.bank_cash.text, '1000')
self.assertEqual(alice['cash'].text, '100')
self.assertEqual(len(game_page.log), 1)
self.story('There is also a redo button, when that is clicked the '
'transfer happens again')
game_page.redo.click()
alice = game_page.get_players()[0] # Get DOM updates
self.assertEqual(game_page.bank_cash.text, '1050')
self.assertEqual(alice['cash'].text, '50')
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Alice transfered 50 to the bank')
def test_can_undo_company_transfering_money_to_bank(self):
self.story('Alice is a user who has a game with a company')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_company(game_uuid, 'B&O', cash=1000)
self.story('The B&O transfers some money to the bank')
game_page = game.GamePage(self.browser)
game_page.reload_game.click()
transfer_form = game.TransferForm(self.browser)
bno = game_page.get_companies()[0]
bno['elem'].click()
transfer_form.amount.send_keys('30\n')
bno = game_page.get_companies()[0]
self.assertEqual(game_page.bank_cash.text, '12030')
self.assertEqual(bno['cash'].text, '970')
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'B&O transfered 30 to the bank')
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
bno = game_page.get_companies()[0]
self.assertEqual(game_page.bank_cash.text, '12000')
self.assertEqual(bno['cash'].text, '1000'),
self.assertEqual(len(game_page.log), 1)
self.story('Click the redo button, the transfer is done again')
game_page.redo.click()
bno = game_page.get_companies()[0]
self.assertEqual(game_page.bank_cash.text, '12030')
self.assertEqual(bno['cash'].text, '970')
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'B&O transfered 30 to the bank')
def test_can_undo_player_transfering_money_to_company(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=100)
self.create_company(game_uuid, 'B&O', cash=1000)
self.story('Alice transfers some money to the B&O')
game_page = game.GamePage(self.browser)
transfer_form = game.TransferForm(self.browser)
game_page.reload_game.click()
alice = game_page.get_players()[0]
alice['row'].click()
transfer_form.select_target('B&O')
transfer_form.amount.send_keys('40\n')
self.story('Verify transfer happened')
alice = game_page.get_players()[0]
bno = game_page.get_companies()[0]
self.verify_player(alice, cash=60)
self.verify_company(bno, cash=1040)
self.assertEqual(len(game_page.log), 2)
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
alice = game_page.get_players()[0]
bno = game_page.get_companies()[0]
self.verify_player(alice, cash=100)
self.verify_company(bno, cash=1000)
self.assertEqual(len(game_page.log), 1)
self.story('Click the redo button, the transfer is done again')
game_page.redo.click()
alice = game_page.get_players()[0]
bno = game_page.get_companies()[0]
self.verify_player(alice, cash=60)
self.verify_company(bno, cash=1040)
self.assertEqual(len(game_page.log), 2)
def test_can_undo_company_transfering_money_to_player(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=100)
self.create_company(game_uuid, 'B&O', cash=1000)
self.story('Alice transfers some money to the B&O')
game_page = game.GamePage(self.browser)
transfer_form = game.TransferForm(self.browser)
game_page.reload_game.click()
bno = game_page.get_companies()[0]
bno['elem'].click()
transfer_form.select_target('Alice')
transfer_form.amount.send_keys('20\n')
self.story('Verify transfer happened')
alice = game_page.get_players()[0]
bno = game_page.get_companies()[0]
self.verify_player(alice, cash=120)
self.verify_company(bno, cash=980)
self.assertEqual(len(game_page.log), 2)
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
alice = game_page.get_players()[0]
bno = game_page.get_companies()[0]
self.verify_player(alice, cash=100)
self.verify_company(bno, cash=1000)
self.assertEqual(len(game_page.log), 1)
self.story('Click the redo button, the transfer is done again')
game_page.redo.click()
alice = game_page.get_players()[0]
bno = game_page.get_companies()[0]
self.verify_player(alice, cash=120)
self.verify_company(bno, cash=980)
def test_can_undo_player_buying_share_from_ipo(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=100)
self.create_company(game_uuid, 'B&O', cash=0, ipo_shares=3)
self.story('Alice buys a share from the B&Os IPO')
game_page = game.GamePage(self.browser)
share_form = game.ShareForm(self.browser)
game_page.reload_game.click()
bno = game_page.get_companies()[0]
bno.set_value(10)
alice = game_page.get_players()[0]
alice['row'].click()
share_form.select_company('B&O')
share_form.select_source('ipo')
share_form.shares.clear()
share_form.shares.send_keys('2\n')
self.story('Verify that Alice bought the share')
bno = game_page.get_companies()[0]
alice = game_page.get_players()[0]
self.verify_player(alice, cash=80, shares=['B&O 20%'])
self.verify_company(bno, cash=0, ipo_shares=1, bank_shares=0)
self.assertEqual(game_page.bank_cash.text, '12020')
self.assertEqual(len(game_page.log), 2)
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
bno = game_page.get_companies()[0]
alice = game_page.get_players()[0]
self.verify_player(alice, cash=100, shares=[])
self.verify_company(bno, cash=0, ipo_shares=3, bank_shares=0)
self.assertEqual(game_page.bank_cash.text, '12000')
self.assertEqual(len(game_page.log), 1)
self.story('Click the redo button, the transfer is done again')
game_page.redo.click()
bno = game_page.get_companies()[0]
alice = game_page.get_players()[0]
self.verify_player(alice, cash=80, shares=['B&O 20%'])
self.verify_company(bno, cash=0, ipo_shares=1, bank_shares=0)
self.assertEqual(game_page.bank_cash.text, '12020')
self.assertEqual(len(game_page.log), 2)
def test_can_undo_company_buying_share_from_bank(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_company(game_uuid, 'CPR', cash=0, bank_shares=5,
ipo_shares=5)
self.create_company(game_uuid, 'B&M', cash=100)
self.story('B&M buys a share of CPR from the bank')
game_page = game.GamePage(self.browser)
share_form = game.ShareForm(self.browser)
game_page.reload_game.click()
bm, cpr = game_page.get_companies()
cpr.set_value(20)
bm['elem'].click()
share_form.select_company('CPR')
share_form.select_source('bank')
share_form.shares.clear()
share_form.shares.send_keys('4\n')
self.story('Verify that shares have been bought')
bm, cpr = game_page.get_companies()
self.verify_company(cpr, cash=0, ipo_shares=5, bank_shares=1)
self.verify_company(bm, cash=20, shares=['CPR 40%'])
self.assertEqual(game_page.bank_cash.text, '12080')
self.assertEqual(len(game_page.log), 2)
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
bm, cpr = game_page.get_companies()
self.verify_company(cpr, cash=0, ipo_shares=5, bank_shares=5)
self.verify_company(bm, cash=100, shares=[])
self.assertEqual(game_page.bank_cash.text, '12000')
self.story('Click the redo button, the transfer is done again')
game_page.redo.click()
bm, cpr = game_page.get_companies()
self.verify_company(cpr, cash=0, ipo_shares=5, bank_shares=1)
self.verify_company(bm, cash=20, shares=['CPR 40%'])
self.assertEqual(game_page.bank_cash.text, '12080')
self.assertEqual(len(game_page.log), 2)
def test_can_undo_player_buying_share_from_company_treasury(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=300)
co_uuid = self.create_company(game_uuid, 'C&O', cash=0, bank_shares=0,
ipo_shares=0)
self.create_company_share(co_uuid, co_uuid, shares=10)
self.story('Alice buys a share C&O from the C&O')
game_page = game.GamePage(self.browser)
share_form = game.ShareForm(self.browser)
game_page.reload_game.click()
alice = game_page.get_players()[0]
co = game_page.get_companies()[0]
co.set_value(30)
alice['row'].click()
share_form.select_company('C&O')
share_form.select_source('C&O')
share_form.shares.clear()
share_form.shares.send_keys('6\n')
self.story('Verify that shares have been bought')
alice = game_page.get_players()[0]
co = game_page.get_companies()[0]
self.verify_player(alice, cash=120, shares=['C&O 60%'])
self.verify_company(co, cash=180, shares=['C&O 40%'])
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
alice = game_page.get_players()[0]
co = game_page.get_companies()[0]
self.verify_player(alice, cash=300, shares=[])
self.verify_company(co, cash=0, shares=['C&O 100%'])
self.story('Click the redo button, the transfer is done again')
game_page.redo.click()
alice = game_page.get_players()[0]
co = game_page.get_companies()[0]
self.verify_player(alice, cash=120, shares=['C&O 60%'])
self.verify_company(co, cash=180, shares=['C&O 40%'])
def test_can_undo_company_paying_dividends(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
alice_uuid = self.create_player(game_uuid, 'Alice', cash=0)
bob_uuid = self.create_player(game_uuid, 'Bob', cash=0)
bo_uuid = self.create_company(game_uuid, 'B&O', cash=0, bank_shares=0,
ipo_shares=2)
self.create_company_share(bo_uuid, bo_uuid, shares=1)
self.create_player_share(alice_uuid, bo_uuid, shares=4)
self.create_player_share(bob_uuid, bo_uuid, shares=3)
self.story('The B&O operates and pays dividends')
game_page = game.GamePage(self.browser)
operate_form = game.OperateForm(self.browser)
game_page.reload_game.click()
bo = game_page.get_companies()[0]
bo['elem'].click()
operate_form.revenue.clear()
operate_form.revenue.send_keys('80')
operate_form.full.click()
self.story('Verify that everyone has received money')
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=32)
self.verify_player(bob, cash=24)
self.verify_company(bo, cash=8)
self.assertEqual(game_page.bank_cash.text, '11936')
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=0)
self.verify_player(bob, cash=0)
self.verify_company(bo, cash=0)
self.assertEqual(game_page.bank_cash.text, '12000')
self.story('Click the redo button, the operation is done again')
game_page.redo.click()
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=32)
self.verify_player(bob, cash=24)
self.verify_company(bo, cash=8)
self.assertEqual(game_page.bank_cash.text, '11936')
def test_can_undo_company_withholding_dividends(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
alice_uuid = self.create_player(game_uuid, 'Alice', cash=0)
bob_uuid = self.create_player(game_uuid, 'Bob', cash=0)
bo_uuid = self.create_company(game_uuid, 'B&O', cash=0, bank_shares=0,
ipo_shares=2)
self.create_company_share(bo_uuid, bo_uuid, shares=1)
self.create_player_share(alice_uuid, bo_uuid, shares=4)
self.create_player_share(bob_uuid, bo_uuid, shares=3)
self.story('The B&O operates and withholds dividends')
game_page = game.GamePage(self.browser)
operate_form = game.OperateForm(self.browser)
game_page.reload_game.click()
bo = game_page.get_companies()[0]
bo['elem'].click()
operate_form.revenue.clear()
operate_form.revenue.send_keys('90')
operate_form.withhold.click()
self.story('Verify that only the B&O has received money')
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=0)
self.verify_player(bob, cash=0)
self.verify_company(bo, cash=90)
self.assertEqual(game_page.bank_cash.text, '11910')
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=0)
self.verify_player(bob, cash=0)
self.verify_company(bo, cash=0)
self.assertEqual(game_page.bank_cash.text, '12000')
self.story('Click the redo button, the withholding is done again')
game_page.redo.click()
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=0)
self.verify_player(bob, cash=0)
self.verify_company(bo, cash=90)
self.assertEqual(game_page.bank_cash.text, '11910')
def test_can_undo_company_paying_half_dividends(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
alice_uuid = self.create_player(game_uuid, 'Alice', cash=0)
bob_uuid = self.create_player(game_uuid, 'Bob', cash=0)
bo_uuid = self.create_company(game_uuid, 'B&O', cash=0, bank_shares=0,
ipo_shares=2)
self.create_company_share(bo_uuid, bo_uuid, shares=1)
self.create_player_share(alice_uuid, bo_uuid, shares=4)
self.create_player_share(bob_uuid, bo_uuid, shares=3)
self.story('The B&O operates and pays half dividends')
game_page = game.GamePage(self.browser)
operate_form = game.OperateForm(self.browser)
game_page.reload_game.click()
bo = game_page.get_companies()[0]
bo['elem'].click()
operate_form.revenue.clear()
operate_form.revenue.send_keys('100')
operate_form.half.click()
self.story('Verify that everyone received the correct amounts')
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=20)
self.verify_player(bob, cash=15)
self.verify_company(bo, cash=55)
self.assertEqual(game_page.bank_cash.text, '11910')
self.story('Click the undo button, the game state is reverted')
game_page.undo.click()
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=0)
self.verify_player(bob, cash=0)
self.verify_company(bo, cash=0)
self.assertEqual(game_page.bank_cash.text, '12000')
self.story('Click the redo button, the split payment is done again')
game_page.redo.click()
alice, bob = game_page.get_players()
bo = game_page.get_companies()[0]
self.verify_player(alice, cash=20)
self.verify_player(bob, cash=15)
self.verify_company(bo, cash=55)
self.assertEqual(game_page.bank_cash.text, '11910')
def test_log_does_not_show_undone_log_actions(self):
self.story('Alice is a user who has a game with a player')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=100)
self.story('Alice transfers some money to the bank')
game_page = game.GamePage(self.browser)
game_page.reload_game.click()
transfer_form = game.TransferForm(self.browser)
alice = game_page.get_players()[0]
alice['row'].click()
transfer_form.amount.send_keys('50\n')
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Alice transfered 50 to the bank')
self.story('Click the undo button, an item is removed from the log')
game_page.undo.click()
self.assertEqual(len(game_page.log), 1)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'New game started')
self.story('Soft reload the page, the undone item is still not shown')
game_page.reload_game.click()
self.assertEqual(len(game_page.log), 1)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'New game started')
self.story('Hard refresh the page, the undone item is still not shown')
self.browser.refresh()
self.assertEqual(len(game_page.log), 1)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'New game started')
self.story('Click the redo button, the undone item is shown again')
game_page.redo.click()
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Alice transfered 50 to the bank')
self.story('Soft reload the page, the item is still there')
game_page.reload_game.click()
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Alice transfered 50 to the bank')
self.story('Hard refresh the page, the item is still visible')
self.browser.refresh()
self.assertEqual(len(game_page.log), 2)
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Alice transfered 50 to the bank')
def test_undo_button_disabled_when_action_cant_be_undone(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
self.story("The game creating can't be undone")
game_page = game.GamePage(self.browser)
self.assertFalse(game_page.undo.is_enabled())
self.story("Can't undo player creation")
game_page.add_player_link.click()
add_player = game.AddPlayerPage(self.browser)
add_player.name.send_keys('Alice\n')
self.assertFalse(game_page.undo.is_enabled())
self.story("Can't undo company creation")
game_page.add_company_link.click()
add_company = game.AddCompanyPage(self.browser)
add_company.name.send_keys('B&M\n')
self.assertFalse(game_page.undo.is_enabled())
self.story("Can't undo editing a company")
bm = game_page.get_companies()[0]
bm['elem'].click()
bm['edit'].click()
edit_company = game.EditCompanyPage(self.browser)
edit_company.name.clear()
edit_company.name.send_keys('CPR\n')
self.assertFalse(game_page.undo.is_enabled())
def test_undone_actions_not_in_log_after_doing_new_action(self):
self.story('Alice is a user who has a game')
self.browser.get(self.server_url)
homepage = game.Homepage(self.browser)
homepage.start_button.click()
game_uuid = self.browser.current_url[-36:]
self.create_player(game_uuid, 'Alice', cash=100)
self.create_player(game_uuid, 'Bob', cash=100)
self.story('Alice transfers some money to the bank')
game_page = game.GamePage(self.browser)
game_page.reload_game.click()
transfer_form = game.TransferForm(self.browser)
alice, bob = game_page.get_players()
alice['row'].click()
transfer_form.amount.send_keys('60\n')
self.story('The transfer action has been done')
self.assertEqual(len(game_page.log), 2)
self.story('Undo the transfer action because Bob was meant to do it')
game_page.undo.click()
alice, bob = game_page.get_players()
bob['row'].click()
transfer_form.amount.send_keys('60\n')
self.story("Alice's action doesn't show in the log")
self.assertRegex(game_page.log[1].text,
DATE_REGEX + 'New game started')
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Bob transfered 60 to the bank')
self.assertEqual(len(game_page.log), 2)
self.story("After soft refresh it still doesn't show")
game_page.reload_game.click()
self.assertRegex(game_page.log[1].text,
DATE_REGEX + 'New game started')
self.assertRegex(game_page.log[0].text,
DATE_REGEX + 'Bob transfered 60 to the bank')
self.assertEqual(len(game_page.log), 2)
| XeryusTC/18xx-accountant | accountant/functional_tests/test_undo.py | Python | mit | 25,079 |
import json
import urllib2, base64
from auth_service import AuthCheckResult
class TCAuthService():
def __init__(self, tc_url):
# https://www.transparentclassroom.com/api/v1/authenticate.json
self.endpoint = tc_url + '/api/v1/authenticate.json'
def do_req(self, req):
req.add_header('Content-Type', 'application/json')
try:
response = urllib2.urlopen(req)
except urllib2.HTTPError as e:
if e.code == 401:
return AuthCheckResult(False)
else:
raise e
else:
body = response.read()
userinfo = json.loads(body)
return AuthCheckResult(True, userinfo)
def check_auth(self, username, password):
req = urllib2.Request(self.endpoint)
base64string = base64.b64encode('%s:%s' % (username, password))
req.add_header("Authorization", "Basic %s" % base64string)
return self.do_req(req)
def check_token(self, token):
req = urllib2.Request(self.endpoint)
req.add_header("X-TransparentClassroomToken", token)
return self.do_req(req)
| WildflowerSchools/sensei | app/tc_auth_service.py | Python | mit | 1,142 |
#!/usr/bin/env python3
# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import re
import fnmatch
import sys
import subprocess
import datetime
import os
################################################################################
# file filtering
################################################################################
EXCLUDE = [
# auto generated:
'src/qt/bitcoinstrings.cpp',
'src/chainparamsseeds.h',
# other external copyrights:
'src/reverse_iterator.h',
'src/test/fuzz/FuzzedDataProvider.h',
'src/tinyformat.h',
'test/functional/test_framework/bignum.py',
# python init:
'*__init__.py',
]
EXCLUDE_COMPILED = re.compile('|'.join([fnmatch.translate(m) for m in EXCLUDE]))
EXCLUDE_DIRS = [
# git subtrees
"src/crypto/ctaes/",
"src/leveldb/",
"src/secp256k1/",
"src/univalue/",
"src/crc32c/",
]
INCLUDE = ['*.h', '*.cpp', '*.cc', '*.c', '*.mm', '*.py', '*.sh', '*.bash-completion']
INCLUDE_COMPILED = re.compile('|'.join([fnmatch.translate(m) for m in INCLUDE]))
def applies_to_file(filename):
for excluded_dir in EXCLUDE_DIRS:
if filename.startswith(excluded_dir):
return False
return ((EXCLUDE_COMPILED.match(filename) is None) and
(INCLUDE_COMPILED.match(filename) is not None))
################################################################################
# obtain list of files in repo according to INCLUDE and EXCLUDE
################################################################################
GIT_LS_CMD = 'git ls-files --full-name'.split(' ')
GIT_TOPLEVEL_CMD = 'git rev-parse --show-toplevel'.split(' ')
def call_git_ls(base_directory):
out = subprocess.check_output([*GIT_LS_CMD, base_directory])
return [f for f in out.decode("utf-8").split('\n') if f != '']
def call_git_toplevel():
"Returns the absolute path to the project root"
return subprocess.check_output(GIT_TOPLEVEL_CMD).strip().decode("utf-8")
def get_filenames_to_examine(base_directory):
"Returns an array of absolute paths to any project files in the base_directory that pass the include/exclude filters"
root = call_git_toplevel()
filenames = call_git_ls(base_directory)
return sorted([os.path.join(root, filename) for filename in filenames if
applies_to_file(filename)])
################################################################################
# define and compile regexes for the patterns we are looking for
################################################################################
COPYRIGHT_WITH_C = r'Copyright \(c\)'
COPYRIGHT_WITHOUT_C = 'Copyright'
ANY_COPYRIGHT_STYLE = '(%s|%s)' % (COPYRIGHT_WITH_C, COPYRIGHT_WITHOUT_C)
YEAR = "20[0-9][0-9]"
YEAR_RANGE = '(%s)(-%s)?' % (YEAR, YEAR)
YEAR_LIST = '(%s)(, %s)+' % (YEAR, YEAR)
ANY_YEAR_STYLE = '(%s|%s)' % (YEAR_RANGE, YEAR_LIST)
ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE = ("%s %s" % (ANY_COPYRIGHT_STYLE,
ANY_YEAR_STYLE))
ANY_COPYRIGHT_COMPILED = re.compile(ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE)
def compile_copyright_regex(copyright_style, year_style, name):
return re.compile(r'%s %s,? %s( +\*)?\n' % (copyright_style, year_style, name))
EXPECTED_HOLDER_NAMES = [
r"Satoshi Nakamoto",
r"The Bitcoin Core developers",
r"BitPay Inc\.",
r"University of Illinois at Urbana-Champaign\.",
r"Pieter Wuille",
r"Wladimir J\. van der Laan",
r"Jeff Garzik",
r"Jan-Klaas Kollhof",
r"ArtForz -- public domain half-a-node",
r"Intel Corporation ?",
r"The Zcash developers",
r"Jeremy Rubin",
]
DOMINANT_STYLE_COMPILED = {}
YEAR_LIST_STYLE_COMPILED = {}
WITHOUT_C_STYLE_COMPILED = {}
for holder_name in EXPECTED_HOLDER_NAMES:
DOMINANT_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_RANGE, holder_name))
YEAR_LIST_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITH_C, YEAR_LIST, holder_name))
WITHOUT_C_STYLE_COMPILED[holder_name] = (
compile_copyright_regex(COPYRIGHT_WITHOUT_C, ANY_YEAR_STYLE,
holder_name))
################################################################################
# search file contents for copyright message of particular category
################################################################################
def get_count_of_copyrights_of_any_style_any_holder(contents):
return len(ANY_COPYRIGHT_COMPILED.findall(contents))
def file_has_dominant_style_copyright_for_holder(contents, holder_name):
match = DOMINANT_STYLE_COMPILED[holder_name].search(contents)
return match is not None
def file_has_year_list_style_copyright_for_holder(contents, holder_name):
match = YEAR_LIST_STYLE_COMPILED[holder_name].search(contents)
return match is not None
def file_has_without_c_style_copyright_for_holder(contents, holder_name):
match = WITHOUT_C_STYLE_COMPILED[holder_name].search(contents)
return match is not None
################################################################################
# get file info
################################################################################
def read_file(filename):
return open(filename, 'r', encoding="utf8").read()
def gather_file_info(filename):
info = {}
info['filename'] = filename
c = read_file(filename)
info['contents'] = c
info['all_copyrights'] = get_count_of_copyrights_of_any_style_any_holder(c)
info['classified_copyrights'] = 0
info['dominant_style'] = {}
info['year_list_style'] = {}
info['without_c_style'] = {}
for holder_name in EXPECTED_HOLDER_NAMES:
has_dominant_style = (
file_has_dominant_style_copyright_for_holder(c, holder_name))
has_year_list_style = (
file_has_year_list_style_copyright_for_holder(c, holder_name))
has_without_c_style = (
file_has_without_c_style_copyright_for_holder(c, holder_name))
info['dominant_style'][holder_name] = has_dominant_style
info['year_list_style'][holder_name] = has_year_list_style
info['without_c_style'][holder_name] = has_without_c_style
if has_dominant_style or has_year_list_style or has_without_c_style:
info['classified_copyrights'] = info['classified_copyrights'] + 1
return info
################################################################################
# report execution
################################################################################
SEPARATOR = '-'.join(['' for _ in range(80)])
def print_filenames(filenames, verbose):
if not verbose:
return
for filename in filenames:
print("\t%s" % filename)
def print_report(file_infos, verbose):
print(SEPARATOR)
examined = [i['filename'] for i in file_infos]
print("%d files examined according to INCLUDE and EXCLUDE fnmatch rules" %
len(examined))
print_filenames(examined, verbose)
print(SEPARATOR)
print('')
zero_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 0]
print("%4d with zero copyrights" % len(zero_copyrights))
print_filenames(zero_copyrights, verbose)
one_copyright = [i['filename'] for i in file_infos if
i['all_copyrights'] == 1]
print("%4d with one copyright" % len(one_copyright))
print_filenames(one_copyright, verbose)
two_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 2]
print("%4d with two copyrights" % len(two_copyrights))
print_filenames(two_copyrights, verbose)
three_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] == 3]
print("%4d with three copyrights" % len(three_copyrights))
print_filenames(three_copyrights, verbose)
four_or_more_copyrights = [i['filename'] for i in file_infos if
i['all_copyrights'] >= 4]
print("%4d with four or more copyrights" % len(four_or_more_copyrights))
print_filenames(four_or_more_copyrights, verbose)
print('')
print(SEPARATOR)
print('Copyrights with dominant style:\ne.g. "Copyright (c)" and '
'"<year>" or "<startYear>-<endYear>":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
dominant_style = [i['filename'] for i in file_infos if
i['dominant_style'][holder_name]]
if len(dominant_style) > 0:
print("%4d with '%s'" % (len(dominant_style),
holder_name.replace('\n', '\\n')))
print_filenames(dominant_style, verbose)
print('')
print(SEPARATOR)
print('Copyrights with year list style:\ne.g. "Copyright (c)" and '
'"<year1>, <year2>, ...":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
year_list_style = [i['filename'] for i in file_infos if
i['year_list_style'][holder_name]]
if len(year_list_style) > 0:
print("%4d with '%s'" % (len(year_list_style),
holder_name.replace('\n', '\\n')))
print_filenames(year_list_style, verbose)
print('')
print(SEPARATOR)
print('Copyrights with no "(c)" style:\ne.g. "Copyright" and "<year>" or '
'"<startYear>-<endYear>":\n')
for holder_name in EXPECTED_HOLDER_NAMES:
without_c_style = [i['filename'] for i in file_infos if
i['without_c_style'][holder_name]]
if len(without_c_style) > 0:
print("%4d with '%s'" % (len(without_c_style),
holder_name.replace('\n', '\\n')))
print_filenames(without_c_style, verbose)
print('')
print(SEPARATOR)
unclassified_copyrights = [i['filename'] for i in file_infos if
i['classified_copyrights'] < i['all_copyrights']]
print("%d with unexpected copyright holder names" %
len(unclassified_copyrights))
print_filenames(unclassified_copyrights, verbose)
print(SEPARATOR)
def exec_report(base_directory, verbose):
filenames = get_filenames_to_examine(base_directory)
file_infos = [gather_file_info(f) for f in filenames]
print_report(file_infos, verbose)
################################################################################
# report cmd
################################################################################
REPORT_USAGE = """
Produces a report of all copyright header notices found inside the source files
of a repository.
Usage:
$ ./copyright_header.py report <base_directory> [verbose]
Arguments:
<base_directory> - The base directory of a bitcoin source code repository.
[verbose] - Includes a list of every file of each subcategory in the report.
"""
def report_cmd(argv):
if len(argv) == 2:
sys.exit(REPORT_USAGE)
base_directory = argv[2]
if not os.path.exists(base_directory):
sys.exit("*** bad <base_directory>: %s" % base_directory)
if len(argv) == 3:
verbose = False
elif argv[3] == 'verbose':
verbose = True
else:
sys.exit("*** unknown argument: %s" % argv[2])
exec_report(base_directory, verbose)
################################################################################
# query git for year of last change
################################################################################
GIT_LOG_CMD = "git log --pretty=format:%%ai %s"
def call_git_log(filename):
out = subprocess.check_output((GIT_LOG_CMD % filename).split(' '))
return out.decode("utf-8").split('\n')
def get_git_change_years(filename):
git_log_lines = call_git_log(filename)
if len(git_log_lines) == 0:
return [datetime.date.today().year]
# timestamp is in ISO 8601 format. e.g. "2016-09-05 14:25:32 -0600"
return [line.split(' ')[0].split('-')[0] for line in git_log_lines]
def get_most_recent_git_change_year(filename):
return max(get_git_change_years(filename))
################################################################################
# read and write to file
################################################################################
def read_file_lines(filename):
f = open(filename, 'r', encoding="utf8")
file_lines = f.readlines()
f.close()
return file_lines
def write_file_lines(filename, file_lines):
f = open(filename, 'w', encoding="utf8")
f.write(''.join(file_lines))
f.close()
################################################################################
# update header years execution
################################################################################
COPYRIGHT = r'Copyright \(c\)'
YEAR = "20[0-9][0-9]"
YEAR_RANGE = '(%s)(-%s)?' % (YEAR, YEAR)
HOLDER = 'The Bitcoin Core developers'
UPDATEABLE_LINE_COMPILED = re.compile(' '.join([COPYRIGHT, YEAR_RANGE, HOLDER]))
def get_updatable_copyright_line(file_lines):
index = 0
for line in file_lines:
if UPDATEABLE_LINE_COMPILED.search(line) is not None:
return index, line
index = index + 1
return None, None
def parse_year_range(year_range):
year_split = year_range.split('-')
start_year = year_split[0]
if len(year_split) == 1:
return start_year, start_year
return start_year, year_split[1]
def year_range_to_str(start_year, end_year):
if start_year == end_year:
return start_year
return "%s-%s" % (start_year, end_year)
def create_updated_copyright_line(line, last_git_change_year):
copyright_splitter = 'Copyright (c) '
copyright_split = line.split(copyright_splitter)
# Preserve characters on line that are ahead of the start of the copyright
# notice - they are part of the comment block and vary from file-to-file.
before_copyright = copyright_split[0]
after_copyright = copyright_split[1]
space_split = after_copyright.split(' ')
year_range = space_split[0]
start_year, end_year = parse_year_range(year_range)
if end_year == last_git_change_year:
return line
return (before_copyright + copyright_splitter +
year_range_to_str(start_year, last_git_change_year) + ' ' +
' '.join(space_split[1:]))
def update_updatable_copyright(filename):
file_lines = read_file_lines(filename)
index, line = get_updatable_copyright_line(file_lines)
if not line:
print_file_action_message(filename, "No updatable copyright.")
return
last_git_change_year = get_most_recent_git_change_year(filename)
new_line = create_updated_copyright_line(line, last_git_change_year)
if line == new_line:
print_file_action_message(filename, "Copyright up-to-date.")
return
file_lines[index] = new_line
write_file_lines(filename, file_lines)
print_file_action_message(filename,
"Copyright updated! -> %s" % last_git_change_year)
def exec_update_header_year(base_directory):
for filename in get_filenames_to_examine(base_directory):
update_updatable_copyright(filename)
################################################################################
# update cmd
################################################################################
UPDATE_USAGE = """
Updates all the copyright headers of "The Bitcoin Core developers" which were
changed in a year more recent than is listed. For example:
// Copyright (c) <firstYear>-<lastYear> The Bitcoin Core developers
will be updated to:
// Copyright (c) <firstYear>-<lastModifiedYear> The Bitcoin Core developers
where <lastModifiedYear> is obtained from the 'git log' history.
This subcommand also handles copyright headers that have only a single year. In those cases:
// Copyright (c) <year> The Bitcoin Core developers
will be updated to:
// Copyright (c) <year>-<lastModifiedYear> The Bitcoin Core developers
where the update is appropriate.
Usage:
$ ./copyright_header.py update <base_directory>
Arguments:
<base_directory> - The base directory of a bitcoin source code repository.
"""
def print_file_action_message(filename, action):
print("%-52s %s" % (filename, action))
def update_cmd(argv):
if len(argv) != 3:
sys.exit(UPDATE_USAGE)
base_directory = argv[2]
if not os.path.exists(base_directory):
sys.exit("*** bad base_directory: %s" % base_directory)
exec_update_header_year(base_directory)
################################################################################
# inserted copyright header format
################################################################################
def get_header_lines(header, start_year, end_year):
lines = header.split('\n')[1:-1]
lines[0] = lines[0] % year_range_to_str(start_year, end_year)
return [line + '\n' for line in lines]
CPP_HEADER = '''
// Copyright (c) %s The Bitcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
def get_cpp_header_lines_to_insert(start_year, end_year):
return reversed(get_header_lines(CPP_HEADER, start_year, end_year))
SCRIPT_HEADER = '''
# Copyright (c) %s The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
def get_script_header_lines_to_insert(start_year, end_year):
return reversed(get_header_lines(SCRIPT_HEADER, start_year, end_year))
################################################################################
# query git for year of last change
################################################################################
def get_git_change_year_range(filename):
years = get_git_change_years(filename)
return min(years), max(years)
################################################################################
# check for existing core copyright
################################################################################
def file_already_has_core_copyright(file_lines):
index, _ = get_updatable_copyright_line(file_lines)
return index is not None
################################################################################
# insert header execution
################################################################################
def file_has_hashbang(file_lines):
if len(file_lines) < 1:
return False
if len(file_lines[0]) <= 2:
return False
return file_lines[0][:2] == '#!'
def insert_script_header(filename, file_lines, start_year, end_year):
if file_has_hashbang(file_lines):
insert_idx = 1
else:
insert_idx = 0
header_lines = get_script_header_lines_to_insert(start_year, end_year)
for line in header_lines:
file_lines.insert(insert_idx, line)
write_file_lines(filename, file_lines)
def insert_cpp_header(filename, file_lines, start_year, end_year):
file_lines.insert(0, '\n')
header_lines = get_cpp_header_lines_to_insert(start_year, end_year)
for line in header_lines:
file_lines.insert(0, line)
write_file_lines(filename, file_lines)
def exec_insert_header(filename, style):
file_lines = read_file_lines(filename)
if file_already_has_core_copyright(file_lines):
sys.exit('*** %s already has a copyright by The Bitcoin Core developers'
% (filename))
start_year, end_year = get_git_change_year_range(filename)
if style in ['python', 'shell']:
insert_script_header(filename, file_lines, start_year, end_year)
else:
insert_cpp_header(filename, file_lines, start_year, end_year)
################################################################################
# insert cmd
################################################################################
INSERT_USAGE = """
Inserts a copyright header for "The Bitcoin Core developers" at the top of the
file in either Python or C++ style as determined by the file extension. If the
file is a Python file and it has a '#!' starting the first line, the header is
inserted in the line below it.
The copyright dates will be set to be:
"<year_introduced>-<current_year>"
where <year_introduced> is according to the 'git log' history. If
<year_introduced> is equal to <current_year>, the date will be set to be:
"<current_year>"
If the file already has a copyright for "The Bitcoin Core developers", the
script will exit.
Usage:
$ ./copyright_header.py insert <file>
Arguments:
<file> - A source file in the bitcoin repository.
"""
def insert_cmd(argv):
if len(argv) != 3:
sys.exit(INSERT_USAGE)
filename = argv[2]
if not os.path.isfile(filename):
sys.exit("*** bad filename: %s" % filename)
_, extension = os.path.splitext(filename)
if extension not in ['.h', '.cpp', '.cc', '.c', '.py', '.sh']:
sys.exit("*** cannot insert for file extension %s" % extension)
if extension == '.py':
style = 'python'
elif extension == '.sh':
style = 'shell'
else:
style = 'cpp'
exec_insert_header(filename, style)
################################################################################
# UI
################################################################################
USAGE = """
copyright_header.py - utilities for managing copyright headers of 'The Bitcoin
Core developers' in repository source files.
Usage:
$ ./copyright_header <subcommand>
Subcommands:
report
update
insert
To see subcommand usage, run them without arguments.
"""
SUBCOMMANDS = ['report', 'update', 'insert']
if __name__ == "__main__":
if len(sys.argv) == 1:
sys.exit(USAGE)
subcommand = sys.argv[1]
if subcommand not in SUBCOMMANDS:
sys.exit(USAGE)
if subcommand == 'report':
report_cmd(sys.argv)
elif subcommand == 'update':
update_cmd(sys.argv)
elif subcommand == 'insert':
insert_cmd(sys.argv)
| midnightmagic/bitcoin | contrib/devtools/copyright_header.py | Python | mit | 22,184 |
# ######## KADEMLIA CONSTANTS ###########
# Small number representing the degree of
# parallelism in network calls
alpha = 3
# Maximum number of contacts stored in a bucket
# NOTE: Should be an even number
k = 80
# Timeout for network operations
# [seconds]
rpcTimeout = 0.1
# Delay between iterations of iterative node lookups
# (for loose parallelism)
# [seconds]
iterativeLookupDelay = rpcTimeout / 2
# If a k-bucket has not been used for this amount of time, refresh it.
# [seconds]
refreshTimeout = 60 * 60 * 1000 # 1 hour
# The interval at which nodes replicate (republish/refresh)
# the data they hold
# [seconds]
replicateInterval = refreshTimeout
# The time it takes for data to expire in the network;
# the original publisher of the data will also republish
# the data at this time if it is still valid
# [seconds]
dataExpireTimeout = 86400 # 24 hours
# ####### IMPLEMENTATION-SPECIFIC CONSTANTS ###########
# The interval in which the node should check whether any buckets
# need refreshing or whether any data needs to be republished
# [seconds]
checkRefreshInterval = refreshTimeout / 5
# Max size of a single UDP datagram.
# Any larger message will be spread accross several UDP packets.
# [bytes]
udpDatagramMaxSize = 8192 # 8 KB
DB_PATH = "db/ob.db"
VERSION = "0.2.0"
| hoffmabc/OpenBazaar | node/constants.py | Python | mit | 1,299 |
'''
fabfile_sample.py
edit this to your satisfaction, then move it in your project root as fabfile.py
usage:
$ fab dev pack deploy
$ fab dev uptime
'''
import os
from fabric.api import *
def dev():
env.user = 'nathaniel'
env.hosts = ['tycho']
env.virtualenv_dir = '/home/nathaniel/conf/virtualenvs/ivrhub'
env.supervisord_config = '/home/nathaniel/conf/tycho/supervisord.conf'
def pack():
# create a new source distribution as a tarball
local('python setup.py sdist --formats=gztar', capture=False)
def deploy():
# determine release name and version
dist = local('python setup.py --fullname', capture=True).strip()
# upload the source tarball and unzip
put('dist/%s.tar.gz' % dist, '/tmp/ivrhub.tar.gz')
run('mkdir /tmp/ivrhub')
with cd('/tmp/ivrhub'):
run('tar xzf /tmp/ivrhub.tar.gz')
# setup the package with the virtualenv
with cd('/tmp/ivrhub/%s' % dist):
python = os.path.join(env.virtualenv_dir, 'bin/python')
run('%s setup.py install' % python)
# re-install requirements.txt
run('pip install -r requirements.txt -E %s' % env.virtualenv_dir)
# delete the temporary folder
run('rm -rf /tmp/ivrhub /tmp/ivrhub.tar.gz')
# restart the server..
run('supervisorctl restart ivrhub')
def logs():
''' view logs
supervisord redirects stderr and stdout to this path
'''
run('tail /tmp/ivrhub.log')
def nginx(command):
''' nginx controls
'''
if command == 'start':
sudo('/etc/init.d/nginx start')
elif command == 'stop':
sudo('/etc/init.d/nginx stop')
elif command == 'restart':
nginx('stop')
nginx('start')
else:
print 'hm, did not quite understand that nginx command'
''' misc
'''
def host_info():
print 'checking lsb_release of host: '
run('lsb_release -a')
def uptime():
run('uptime')
def grep_python():
run('ps aux | grep python')
| aquaya/ivrhub | conf/fabfile_sample.py | Python | mit | 1,996 |
from .formfill import FormFillExtension as formfill # NOQA
| Kroisse/FormEncode-Jinja2 | formencode_jinja2/__init__.py | Python | mit | 60 |
# THIS FILE IS AUTO-GENERATED. DO NOT EDIT
from verta._swagger.base_type import BaseType
class UacRemoveTeamUserResponse(BaseType):
def __init__(self, status=None):
required = {
"status": False,
}
self.status = status
for k, v in required.items():
if self[k] is None and v:
raise ValueError('attribute {} is required'.format(k))
@staticmethod
def from_json(d):
tmp = d.get('status', None)
if tmp is not None:
d['status'] = tmp
return UacRemoveTeamUserResponse(**d)
| mitdbg/modeldb | client/verta/verta/_swagger/_public/uac/model/UacRemoveTeamUserResponse.py | Python | mit | 535 |
import loggingskeleton
loggingskeleton.test("l_testlogging3.py") | sburnett/seattle | repy/tests/ut_repytests_testlogging3.py | Python | mit | 65 |
import matplotlib.pyplot as plt
import json
import numpy as np
def visual_file(file_name, line_color, font_size, font_size2):
names = []
numbs = []
n = 0
with open(file_name, 'r') as f:
data = json.load(f)
for d in data:
cur_births = d['birth']
for cur_birth in cur_births:
n += 1
# if n > 50: break
name = cur_birth['name']
year = cur_birth['year']
deathyear = cur_birth['deathyear']
names.append(name)
numbs.append(n)
if deathyear.isdigit():
deathyear = int(deathyear)
plt.plot([n, n], [year, deathyear], color='black', lw=2)
else:
plt.plot([n, n], [year, year + 5], color='grey', lw=2)
plt.yticks([i for i in range(800, 2100, 10) size=font_size2 ])
plt.ylabel('year', size=font_size2)
plt.xticks(numbs, names, size=font_size, rotation=90)
plt.barh([i for i in range(800, 2100, 50)], [len(names) for _ in range(800, 2100, 50)],
alpha=0.1, color='whitesmoke')
plt.show()
if __name__ == '__main__':
file_name = 'wikibirth-jan1.json'
line_color = 'black'
font_size = 3.5
font_size2 = 6.0
visual_file(file_name, line_color, font_size, font_size2)
| ArtezGDA/MappingTheCity-Maps | Kimberley ter Heerdt/Poster/Visual-1:2/visualisatie2.py | Python | mit | 1,458 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Dict, List, Optional, Union
from azure.core.exceptions import HttpResponseError
import msrest.serialization
from ._azure_maps_management_client_enums import *
class Resource(msrest.serialization.Model):
"""Common fields that are returned in the response for all Azure Resource Manager resources.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
"Microsoft.Storage/storageAccounts".
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
class TrackedResource(Resource):
"""The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
"Microsoft.Storage/storageAccounts".
:vartype type: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param location: Required. The geo-location where the resource lives.
:type location: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'location': {'key': 'location', 'type': 'str'},
}
def __init__(
self,
*,
location: str,
tags: Optional[Dict[str, str]] = None,
**kwargs
):
super(TrackedResource, self).__init__(**kwargs)
self.tags = tags
self.location = location
class Creator(TrackedResource):
"""An Azure resource which represents Maps Creator product and provides ability to manage private location data.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
"Microsoft.Storage/storageAccounts".
:vartype type: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param location: Required. The geo-location where the resource lives.
:type location: str
:param properties: Required. The Creator resource properties.
:type properties: ~azure.mgmt.maps.models.CreatorProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'properties': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'location': {'key': 'location', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'CreatorProperties'},
}
def __init__(
self,
*,
location: str,
properties: "CreatorProperties",
tags: Optional[Dict[str, str]] = None,
**kwargs
):
super(Creator, self).__init__(tags=tags, location=location, **kwargs)
self.properties = properties
class CreatorList(msrest.serialization.Model):
"""A list of Creator resources.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: a Creator account.
:vartype value: list[~azure.mgmt.maps.models.Creator]
:param next_link: URL client should use to fetch the next page (per server side paging).
It's null for now, added for future use.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Creator]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(CreatorList, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class CreatorProperties(msrest.serialization.Model):
"""Creator resource properties.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar provisioning_state: The state of the resource provisioning, terminal states: Succeeded,
Failed, Canceled.
:vartype provisioning_state: str
:param storage_units: Required. The storage units to be allocated. Integer values from 1 to
100, inclusive.
:type storage_units: int
"""
_validation = {
'provisioning_state': {'readonly': True},
'storage_units': {'required': True, 'maximum': 100, 'minimum': 1},
}
_attribute_map = {
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'storage_units': {'key': 'storageUnits', 'type': 'int'},
}
def __init__(
self,
*,
storage_units: int,
**kwargs
):
super(CreatorProperties, self).__init__(**kwargs)
self.provisioning_state = None
self.storage_units = storage_units
class CreatorUpdateParameters(msrest.serialization.Model):
"""Parameters used to update an existing Creator resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param tags: A set of tags. Gets or sets a list of key value pairs that describe the resource.
These tags can be used in viewing and grouping this resource (across resource groups). A
maximum of 15 tags can be provided for a resource. Each tag must have a key no greater than 128
characters and value no greater than 256 characters.
:type tags: dict[str, str]
:ivar provisioning_state: The state of the resource provisioning, terminal states: Succeeded,
Failed, Canceled.
:vartype provisioning_state: str
:param storage_units: The storage units to be allocated. Integer values from 1 to 100,
inclusive.
:type storage_units: int
"""
_validation = {
'provisioning_state': {'readonly': True},
'storage_units': {'maximum': 100, 'minimum': 1},
}
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'storage_units': {'key': 'properties.storageUnits', 'type': 'int'},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
storage_units: Optional[int] = None,
**kwargs
):
super(CreatorUpdateParameters, self).__init__(**kwargs)
self.tags = tags
self.provisioning_state = None
self.storage_units = storage_units
class Dimension(msrest.serialization.Model):
"""Dimension of map account, for example API Category, Api Name, Result Type, and Response Code.
:param name: Display name of dimension.
:type name: str
:param display_name: Display name of dimension.
:type display_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
display_name: Optional[str] = None,
**kwargs
):
super(Dimension, self).__init__(**kwargs)
self.name = name
self.display_name = display_name
class ErrorAdditionalInfo(msrest.serialization.Model):
"""The resource management error additional info.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar type: The additional info type.
:vartype type: str
:ivar info: The additional info.
:vartype info: str
"""
_validation = {
'type': {'readonly': True},
'info': {'readonly': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'info': {'key': 'info', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ErrorAdditionalInfo, self).__init__(**kwargs)
self.type = None
self.info = None
class ErrorDetail(msrest.serialization.Model):
"""The error detail.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code: The error code.
:vartype code: str
:ivar message: The error message.
:vartype message: str
:ivar target: The error target.
:vartype target: str
:ivar details: The error details.
:vartype details: list[~azure.mgmt.maps.models.ErrorDetail]
:ivar additional_info: The error additional info.
:vartype additional_info: list[~azure.mgmt.maps.models.ErrorAdditionalInfo]
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
'target': {'readonly': True},
'details': {'readonly': True},
'additional_info': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[ErrorDetail]'},
'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'},
}
def __init__(
self,
**kwargs
):
super(ErrorDetail, self).__init__(**kwargs)
self.code = None
self.message = None
self.target = None
self.details = None
self.additional_info = None
class ErrorResponse(msrest.serialization.Model):
"""Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.).
:param error: The error object.
:type error: ~azure.mgmt.maps.models.ErrorDetail
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'ErrorDetail'},
}
def __init__(
self,
*,
error: Optional["ErrorDetail"] = None,
**kwargs
):
super(ErrorResponse, self).__init__(**kwargs)
self.error = error
class MapsAccount(TrackedResource):
"""An Azure resource which represents access to a suite of Maps REST APIs.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Fully qualified resource ID for the resource. Ex -
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
"Microsoft.Storage/storageAccounts".
:vartype type: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param location: Required. The geo-location where the resource lives.
:type location: str
:param sku: Required. The SKU of this account.
:type sku: ~azure.mgmt.maps.models.Sku
:param kind: Get or Set Kind property. Possible values include: "Gen1", "Gen2". Default value:
"Gen1".
:type kind: str or ~azure.mgmt.maps.models.Kind
:ivar system_data: The system meta data relating to this resource.
:vartype system_data: ~azure.mgmt.maps.models.SystemData
:param properties: The map account properties.
:type properties: ~azure.mgmt.maps.models.MapsAccountProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'sku': {'required': True},
'system_data': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'location': {'key': 'location', 'type': 'str'},
'sku': {'key': 'sku', 'type': 'Sku'},
'kind': {'key': 'kind', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'properties': {'key': 'properties', 'type': 'MapsAccountProperties'},
}
def __init__(
self,
*,
location: str,
sku: "Sku",
tags: Optional[Dict[str, str]] = None,
kind: Optional[Union[str, "Kind"]] = "Gen1",
properties: Optional["MapsAccountProperties"] = None,
**kwargs
):
super(MapsAccount, self).__init__(tags=tags, location=location, **kwargs)
self.sku = sku
self.kind = kind
self.system_data = None
self.properties = properties
class MapsAccountKeys(msrest.serialization.Model):
"""The set of keys which can be used to access the Maps REST APIs. Two keys are provided for key rotation without interruption.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar primary_key_last_updated: The last updated date and time of the primary key.
:vartype primary_key_last_updated: str
:ivar primary_key: The primary key for accessing the Maps REST APIs.
:vartype primary_key: str
:ivar secondary_key: The secondary key for accessing the Maps REST APIs.
:vartype secondary_key: str
:ivar secondary_key_last_updated: The last updated date and time of the secondary key.
:vartype secondary_key_last_updated: str
"""
_validation = {
'primary_key_last_updated': {'readonly': True},
'primary_key': {'readonly': True},
'secondary_key': {'readonly': True},
'secondary_key_last_updated': {'readonly': True},
}
_attribute_map = {
'primary_key_last_updated': {'key': 'primaryKeyLastUpdated', 'type': 'str'},
'primary_key': {'key': 'primaryKey', 'type': 'str'},
'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
'secondary_key_last_updated': {'key': 'secondaryKeyLastUpdated', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MapsAccountKeys, self).__init__(**kwargs)
self.primary_key_last_updated = None
self.primary_key = None
self.secondary_key = None
self.secondary_key_last_updated = None
class MapsAccountProperties(msrest.serialization.Model):
"""Additional Map account properties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar unique_id: A unique identifier for the maps account.
:vartype unique_id: str
:param disable_local_auth: Allows toggle functionality on Azure Policy to disable Azure Maps
local authentication support. This will disable Shared Keys authentication from any usage.
:type disable_local_auth: bool
:ivar provisioning_state: the state of the provisioning.
:vartype provisioning_state: str
"""
_validation = {
'unique_id': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'unique_id': {'key': 'uniqueId', 'type': 'str'},
'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
}
def __init__(
self,
*,
disable_local_auth: Optional[bool] = False,
**kwargs
):
super(MapsAccountProperties, self).__init__(**kwargs)
self.unique_id = None
self.disable_local_auth = disable_local_auth
self.provisioning_state = None
class MapsAccounts(msrest.serialization.Model):
"""A list of Maps Accounts.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: a Maps Account.
:vartype value: list[~azure.mgmt.maps.models.MapsAccount]
:param next_link: URL client should use to fetch the next page (per server side paging).
It's null for now, added for future use.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[MapsAccount]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(MapsAccounts, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class MapsAccountUpdateParameters(msrest.serialization.Model):
"""Parameters used to update an existing Maps Account.
Variables are only populated by the server, and will be ignored when sending a request.
:param tags: A set of tags. Gets or sets a list of key value pairs that describe the resource.
These tags can be used in viewing and grouping this resource (across resource groups). A
maximum of 15 tags can be provided for a resource. Each tag must have a key no greater than 128
characters and value no greater than 256 characters.
:type tags: dict[str, str]
:param kind: Get or Set Kind property. Possible values include: "Gen1", "Gen2". Default value:
"Gen1".
:type kind: str or ~azure.mgmt.maps.models.Kind
:param sku: The SKU of this account.
:type sku: ~azure.mgmt.maps.models.Sku
:ivar unique_id: A unique identifier for the maps account.
:vartype unique_id: str
:param disable_local_auth: Allows toggle functionality on Azure Policy to disable Azure Maps
local authentication support. This will disable Shared Keys authentication from any usage.
:type disable_local_auth: bool
:ivar provisioning_state: the state of the provisioning.
:vartype provisioning_state: str
"""
_validation = {
'unique_id': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'kind': {'key': 'kind', 'type': 'str'},
'sku': {'key': 'sku', 'type': 'Sku'},
'unique_id': {'key': 'properties.uniqueId', 'type': 'str'},
'disable_local_auth': {'key': 'properties.disableLocalAuth', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
kind: Optional[Union[str, "Kind"]] = "Gen1",
sku: Optional["Sku"] = None,
disable_local_auth: Optional[bool] = False,
**kwargs
):
super(MapsAccountUpdateParameters, self).__init__(**kwargs)
self.tags = tags
self.kind = kind
self.sku = sku
self.unique_id = None
self.disable_local_auth = disable_local_auth
self.provisioning_state = None
class MapsKeySpecification(msrest.serialization.Model):
"""Whether the operation refers to the primary or secondary key.
All required parameters must be populated in order to send to Azure.
:param key_type: Required. Whether the operation refers to the primary or secondary key.
Possible values include: "primary", "secondary".
:type key_type: str or ~azure.mgmt.maps.models.KeyType
"""
_validation = {
'key_type': {'required': True},
}
_attribute_map = {
'key_type': {'key': 'keyType', 'type': 'str'},
}
def __init__(
self,
*,
key_type: Union[str, "KeyType"],
**kwargs
):
super(MapsKeySpecification, self).__init__(**kwargs)
self.key_type = key_type
class MapsOperations(msrest.serialization.Model):
"""The set of operations available for Maps.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: An operation available for Maps.
:vartype value: list[~azure.mgmt.maps.models.OperationDetail]
:param next_link: URL client should use to fetch the next page (per server side paging).
It's null for now, added for future use.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[OperationDetail]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(MapsOperations, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class MetricSpecification(msrest.serialization.Model):
"""Metric specification of operation.
:param name: Name of metric specification.
:type name: str
:param display_name: Display name of metric specification.
:type display_name: str
:param display_description: Display description of metric specification.
:type display_description: str
:param unit: Unit could be Count.
:type unit: str
:param dimensions: Dimensions of map account.
:type dimensions: list[~azure.mgmt.maps.models.Dimension]
:param aggregation_type: Aggregation type could be Average.
:type aggregation_type: str
:param fill_gap_with_zero: The property to decide fill gap with zero or not.
:type fill_gap_with_zero: bool
:param category: The category this metric specification belong to, could be Capacity.
:type category: str
:param resource_id_dimension_name_override: Account Resource Id.
:type resource_id_dimension_name_override: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'display_description': {'key': 'displayDescription', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'dimensions': {'key': 'dimensions', 'type': '[Dimension]'},
'aggregation_type': {'key': 'aggregationType', 'type': 'str'},
'fill_gap_with_zero': {'key': 'fillGapWithZero', 'type': 'bool'},
'category': {'key': 'category', 'type': 'str'},
'resource_id_dimension_name_override': {'key': 'resourceIdDimensionNameOverride', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
display_name: Optional[str] = None,
display_description: Optional[str] = None,
unit: Optional[str] = None,
dimensions: Optional[List["Dimension"]] = None,
aggregation_type: Optional[str] = None,
fill_gap_with_zero: Optional[bool] = None,
category: Optional[str] = None,
resource_id_dimension_name_override: Optional[str] = None,
**kwargs
):
super(MetricSpecification, self).__init__(**kwargs)
self.name = name
self.display_name = display_name
self.display_description = display_description
self.unit = unit
self.dimensions = dimensions
self.aggregation_type = aggregation_type
self.fill_gap_with_zero = fill_gap_with_zero
self.category = category
self.resource_id_dimension_name_override = resource_id_dimension_name_override
class OperationDetail(msrest.serialization.Model):
"""Operation detail payload.
:param name: Name of the operation.
:type name: str
:param is_data_action: Indicates whether the operation is a data action.
:type is_data_action: bool
:param display: Display of the operation.
:type display: ~azure.mgmt.maps.models.OperationDisplay
:param origin: Origin of the operation.
:type origin: str
:param service_specification: One property of operation, include metric specifications.
:type service_specification: ~azure.mgmt.maps.models.ServiceSpecification
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'is_data_action': {'key': 'isDataAction', 'type': 'bool'},
'display': {'key': 'display', 'type': 'OperationDisplay'},
'origin': {'key': 'origin', 'type': 'str'},
'service_specification': {'key': 'properties.serviceSpecification', 'type': 'ServiceSpecification'},
}
def __init__(
self,
*,
name: Optional[str] = None,
is_data_action: Optional[bool] = None,
display: Optional["OperationDisplay"] = None,
origin: Optional[str] = None,
service_specification: Optional["ServiceSpecification"] = None,
**kwargs
):
super(OperationDetail, self).__init__(**kwargs)
self.name = name
self.is_data_action = is_data_action
self.display = display
self.origin = origin
self.service_specification = service_specification
class OperationDisplay(msrest.serialization.Model):
"""Operation display payload.
:param provider: Resource provider of the operation.
:type provider: str
:param resource: Resource of the operation.
:type resource: str
:param operation: Localized friendly name for the operation.
:type operation: str
:param description: Localized friendly description for the operation.
:type description: str
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
*,
provider: Optional[str] = None,
resource: Optional[str] = None,
operation: Optional[str] = None,
description: Optional[str] = None,
**kwargs
):
super(OperationDisplay, self).__init__(**kwargs)
self.provider = provider
self.resource = resource
self.operation = operation
self.description = description
class ServiceSpecification(msrest.serialization.Model):
"""One property of operation, include metric specifications.
:param metric_specifications: Metric specifications of operation.
:type metric_specifications: list[~azure.mgmt.maps.models.MetricSpecification]
"""
_attribute_map = {
'metric_specifications': {'key': 'metricSpecifications', 'type': '[MetricSpecification]'},
}
def __init__(
self,
*,
metric_specifications: Optional[List["MetricSpecification"]] = None,
**kwargs
):
super(ServiceSpecification, self).__init__(**kwargs)
self.metric_specifications = metric_specifications
class Sku(msrest.serialization.Model):
"""The SKU of the Maps Account.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the SKU, in standard format (such as S0). Possible values
include: "S0", "S1", "G2".
:type name: str or ~azure.mgmt.maps.models.Name
:ivar tier: Gets the sku tier. This is based on the SKU name.
:vartype tier: str
"""
_validation = {
'name': {'required': True},
'tier': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
}
def __init__(
self,
*,
name: Union[str, "Name"],
**kwargs
):
super(Sku, self).__init__(**kwargs)
self.name = name
self.tier = None
class SystemData(msrest.serialization.Model):
"""Metadata pertaining to creation and last modification of the resource.
:param created_by: The identity that created the resource.
:type created_by: str
:param created_by_type: The type of identity that created the resource. Possible values
include: "User", "Application", "ManagedIdentity", "Key".
:type created_by_type: str or ~azure.mgmt.maps.models.CreatedByType
:param created_at: The timestamp of resource creation (UTC).
:type created_at: ~datetime.datetime
:param last_modified_by: The identity that last modified the resource.
:type last_modified_by: str
:param last_modified_by_type: The type of identity that last modified the resource. Possible
values include: "User", "Application", "ManagedIdentity", "Key".
:type last_modified_by_type: str or ~azure.mgmt.maps.models.CreatedByType
:param last_modified_at: The timestamp of resource last modification (UTC).
:type last_modified_at: ~datetime.datetime
"""
_attribute_map = {
'created_by': {'key': 'createdBy', 'type': 'str'},
'created_by_type': {'key': 'createdByType', 'type': 'str'},
'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
}
def __init__(
self,
*,
created_by: Optional[str] = None,
created_by_type: Optional[Union[str, "CreatedByType"]] = None,
created_at: Optional[datetime.datetime] = None,
last_modified_by: Optional[str] = None,
last_modified_by_type: Optional[Union[str, "CreatedByType"]] = None,
last_modified_at: Optional[datetime.datetime] = None,
**kwargs
):
super(SystemData, self).__init__(**kwargs)
self.created_by = created_by
self.created_by_type = created_by_type
self.created_at = created_at
self.last_modified_by = last_modified_by
self.last_modified_by_type = last_modified_by_type
self.last_modified_at = last_modified_at
| Azure/azure-sdk-for-python | sdk/maps/azure-mgmt-maps/azure/mgmt/maps/models/_models_py3.py | Python | mit | 32,038 |
# -*- coding: utf-8 -*-
"""
This script provides dev-ops tools for python project development.
"""
from __future__ import print_function
from pathlib_mate import Path
from setup import package
def reformat(**kwargs):
"""
auto pep8 format all python file in ``source code`` and ``tests`` dir.
"""
# repository direcotry
repo_dir = Path(__file__).parent.absolute()
# source code directory
source_dir = Path(repo_dir, package.__name__)
if source_dir.exists():
print("Source code locate at: '%s'." % source_dir)
print("Auto pep8 all python file ...")
source_dir.autopep8(**kwargs)
else:
print("Source code directory not found!")
# unittest code directory
unittest_dir = Path(repo_dir, "tests")
if unittest_dir.exists():
print("Unittest code locate at: '%s'." % unittest_dir)
print("Auto pep8 all python file ...")
unittest_dir.autopep8(**kwargs)
else:
print("Unittest code directory not found!")
print("Complete!")
if __name__ == "__main__":
reformat()
| MacHu-GWU/docfly-project | reformat_pep8_code_style.py | Python | mit | 1,083 |
# -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand, CommandError
from atados_core.models import GoogleAddress
from optparse import make_option
import csv
import time
class Command(BaseCommand):
help = 'Recover typed_address2 when fucked up by command update_broken_legacy_from_csv'
option_list = BaseCommand.option_list + (
make_option(
"-f",
"--file",
dest = "filename",
help = "specify csv file",
metavar = "FILE"
),
)
def handle(self, *args, **options):
if options['filename'] == None :
raise CommandError("Option `--file=...` must be specified.")
with open(options['filename'], 'rb') as f:
reader = csv.reader(f, delimiter=";")
for row in reader:
address_id = row[0]
old_typed_address2 = row[1]
new_typed_address2 = row[2]
try:
int(address_id)
except:
continue
#print "Trying to update address {}...".format(address_id)
address = GoogleAddress.objects.filter(id=address_id)
if new_typed_address2:
address.update(typed_address2=new_typed_address2)
else:
address.update(typed_address2=old_typed_address2)
print(old_typed_address2)
| atados/api | atados_core/management/commands/recover_legacy_address_from_updated_csv.py | Python | mit | 1,259 |
#!/usr/bin/env python3
import sys
sys.path[0] = sys.path[0] + "/lib/"
import haldir as hd
#generate lists for comparisions
comp_sets = hd.build_comp_sets()
#generate sets for identifying sex
male_set, female_set = hd.build_male_set()
#### test and fix ####
#print(comp_sets["medals"])
#print(male_set)
#print(type(male_set))
#print(female_set)
#print(type(female_set))
#print all lines of all ocr files
#hd.print_ocr_lines()
hd.assemble_lines(comp_sets)
#hd.print_ocr_files(hd.get_ocr_files())
#print(hd.act_year)
#for i in hd.get_csv_rows():
# print(i)
| meetunix/haldir | haldir.py | Python | mit | 577 |
"""
Code used to save the keras model as an image for visualization
"""
from keras.layers import Convolution2D, Input
from keras.layers import Flatten, Dense
from keras.layers import Dropout
from keras.models import Model
from keras.applications.vgg16 import VGG16
try:
# pydot-ng is a fork of pydot that is better maintained
import pydot_ng as pydot
except ImportError:
# fall back on pydot if necessary
import pydot
def model_vgg():
"""
Using pre-trained VGG model without top layers.
Reference https://blog.keras.io/building-powerful-image-classification-models-using-very-little-data.html
Model is trained with last four layer from VGG and with new three conv layers and 3 fully connected layers while freezing other layers.
:return: model
"""
in_layer = Input(shape=(160, 320, 3))
model = VGG16(weights='imagenet', include_top=False, input_tensor=in_layer)
for layer in model.layers[:15]:
layer.trainable = False
# Add last block to the VGG model with modified sub sampling.
layer = model.outputs[0]
# These layers are used for reducing the (5,10,512) sized layer into (1,5,512).
layer = Convolution2D(512, 3, 3, subsample=(1, 1), activation='elu', border_mode='valid', name='block6_conv1')(
layer)
layer = Convolution2D(512, 3, 3, subsample=(1, 1), activation='elu', border_mode='same', name='block6_conv2')(
layer)
layer = Convolution2D(512, 3, 3, subsample=(1, 1), activation='elu', border_mode='valid', name='block6_conv3')(
layer)
layer = Flatten()(layer)
layer = Dropout(.2)(layer)
layer = Dense(1024, activation='relu', name='fc1')(layer)
layer = Dropout(.2)(layer)
layer = Dense(256, activation='relu', name='fc2')(layer)
layer = Dropout(.2)(layer)
layer = Dense(1, activation='linear', name='predict')(layer)
return Model(input=model.input, output=layer)
def model_to_dot(model, show_shapes=False, show_layer_names=True):
dot = pydot.Dot()
dot.set('rankdir', 'TB')
dot.set('concentrate', True)
dot.set_node_defaults(shape='record')
if model.__class__.__name__ == 'Sequential':
if not model.built:
model.build()
model = model.model
layers = model.layers
# first, populate the nodes of the graph
for layer in layers:
layer_id = str(id(layer))
if show_layer_names:
label = str(layer.name) + ' (' + layer.__class__.__name__ + ')'
else:
label = layer.__class__.__name__
if show_shapes:
# Build the label that will actually contain a table with the
# input/output
try:
outputlabels = str(layer.output_shape)
except:
outputlabels = 'multiple'
if hasattr(layer, 'input_shape'):
inputlabels = str(layer.input_shape)
elif hasattr(layer, 'input_shapes'):
inputlabels = ', '.join(
[str(ishape) for ishape in layer.input_shapes])
else:
inputlabels = 'multiple'
label = '%s\n|{input:|output:}|{{%s}|{%s}}' % (label, inputlabels, outputlabels)
node = pydot.Node(layer_id, label=label)
dot.add_node(node)
# second, add the edges
for layer in layers:
layer_id = str(id(layer))
for i, node in enumerate(layer.inbound_nodes):
node_key = layer.name + '_ib-' + str(i)
if node_key in model.container_nodes:
# add edges
for inbound_layer in node.inbound_layers:
inbound_layer_id = str(id(inbound_layer))
layer_id = str(id(layer))
dot.add_edge(pydot.Edge(inbound_layer_id, layer_id))
return dot
def plot(model, to_file='model.png', show_shapes=False, show_layer_names=True):
dot = model_to_dot(model, show_shapes, show_layer_names)
dot.write_png(to_file)
model = model_vgg()
plot(model, to_file='model.png',show_shapes=True) | sridhar912/Self-Driving-Car-NanoDegree | CarND-BehavioralCloning-P3/viz.py | Python | mit | 4,047 |
from fabric.api import run, cd, env
from fabric.decorators import with_settings
from fabric.colors import green
from fabtools import service, files
## TODO: use upload_template to prevent hardcoded
## supervisor configuration files
service_name='nginx'
@with_settings(warn_only=True)
def setup():
print(green('Setting up nginx config'))
conf_dir = '{}/deploy/nginx'.format(env.app_dir)
with cd(conf_dir):
output = run('ls')
list_files = output.split()
for file in list_files:
files.symlink('{}/{}'.format(conf_dir, file), '/etc/nginx/sites-enabled/', use_sudo=True)
restart()
@with_settings(warn_only=True)
def stop():
service.stop(service_name)
def start():
service.start(service_name)
def reload_config():
service.reload(service_name)
def restart():
service.restart(service_name)
| streema/deployer | deployer/tasks/nginx.py | Python | mit | 860 |
# -*- coding: utf-8 -*-
"""Main CLI program."""
import sys
import os
import logging
import time
import argparse
from functools import partial
import json_logging
from .plugins.manager import DefaultPluginManager
from .updater.manager import updater_classes
from .detector.manager import detector_classes
from .core import getDynDnsClientForConfig
from .conf import get_configuration, collect_config
from .common.dynamiccli import parse_cmdline_args
def list_presets(cfg, out):
"""Write a human readable list of available presets to out.
:param cfg: ConfigParser instance
:param out: file object to write to
"""
for section in cfg.sections():
if section.startswith("preset:"):
out.write((section.replace("preset:", "")) + os.linesep)
for k, v in cfg.items(section):
out.write("\t%s = %s" % (k, v) + os.linesep)
def create_argparser():
"""Instantiate an `argparse.ArgumentParser`.
Adds all basic cli options including default values.
"""
parser = argparse.ArgumentParser()
arg_defaults = {
"daemon": False,
"log_json": False,
"loop": False,
"listpresets": False,
"config": None,
"debug": False,
"sleeptime": 300,
"version": False,
"verbose_count": 0
}
# add generic client options to the CLI:
parser.add_argument("-c", "--config", dest="config",
help="config file", default=arg_defaults["config"])
parser.add_argument("--list-presets", dest="listpresets",
help="list all available presets",
action="store_true", default=arg_defaults["listpresets"])
parser.add_argument("-d", "--daemon", dest="daemon",
help="go into daemon mode (implies --loop)",
action="store_true", default=arg_defaults["daemon"])
parser.add_argument("--debug", dest="debug",
help="increase logging level to DEBUG (DEPRECATED, please use -vvv)",
action="store_true", default=arg_defaults["debug"])
parser.add_argument("--log-json", dest="log_json",
help="log in json format",
action="store_true", default=arg_defaults["log_json"])
parser.add_argument("--loop", dest="loop",
help="loop forever (default is to update once)",
action="store_true", default=arg_defaults["loop"])
parser.add_argument("--sleeptime", dest="sleeptime",
help="how long to sleep between checks in seconds",
default=arg_defaults["sleeptime"])
parser.add_argument("--version", dest="version",
help="show version and exit",
action="store_true", default=arg_defaults["version"])
parser.add_argument("-v", "--verbose", dest="verbose_count",
action="count", default=arg_defaults["verbose_count"],
help="increases log verbosity for each occurrence")
return parser, arg_defaults
def run_forever(dyndnsclients):
"""
Run an endless loop accross the given dynamic dns clients.
:param dyndnsclients: list of DynDnsClients
"""
while True:
try:
# Do small sleeps in the main loop, needs_check() is cheap and does
# the rest.
time.sleep(15)
for dyndnsclient in dyndnsclients:
dyndnsclient.check()
except KeyboardInterrupt:
break
except Exception as exc:
logging.critical("An exception occurred in the dyndns loop", exc_info=exc)
return 0
def init_logging(log_level, log_json=False):
"""Configure logging framework."""
if log_json:
LOG = logging.getLogger()
LOG.setLevel(log_level)
LOG.addHandler(logging.StreamHandler(sys.stdout))
json_logging.init_non_web(enable_json=True)
json_logging.config_root_logger()
else:
logging.basicConfig(level=log_level, format="%(levelname)s %(message)s")
def main():
"""
Run the main CLI program.
Initializes the stack, parses command line arguments, and fires requested
logic.
"""
plugins = DefaultPluginManager()
plugins.load_plugins()
parser, _ = create_argparser()
# add the updater protocol options to the CLI:
for kls in updater_classes():
kls.register_arguments(parser)
for kls in detector_classes():
kls.register_arguments(parser)
# add the plugin options to the CLI:
from os import environ
plugins.options(parser, environ)
args = parser.parse_args()
if args.debug:
args.verbose_count = 5 # some high number
log_level = max(int(logging.WARNING / 10) - args.verbose_count, 0) * 10
init_logging(log_level, log_json=args.log_json)
# logging.debug("args %r", args)
if args.version:
from . import __version__
print("dyndnsc %s" % __version__) # noqa
return 0
# silence 'requests' logging
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.WARNING)
logging.debug(parser)
cfg = get_configuration(args.config)
if args.listpresets:
list_presets(cfg, out=sys.stdout)
return 0
if args.config:
collected_configs = collect_config(cfg)
else:
parsed_args = parse_cmdline_args(args, updater_classes().union(detector_classes()))
logging.debug("parsed_args %r", parsed_args)
collected_configs = {
"cmdline": {
"interval": int(args.sleeptime)
}
}
collected_configs["cmdline"].update(parsed_args)
plugins.configure(args)
plugins.initialize()
logging.debug("collected_configs: %r", collected_configs)
dyndnsclients = []
for thisconfig in collected_configs:
logging.debug("Initializing client for '%s'", thisconfig)
# done with options, bring on the dancing girls
dyndnsclient = getDynDnsClientForConfig(
collected_configs[thisconfig], plugins=plugins)
if dyndnsclient is None:
return 1
# do an initial synchronization, before going into endless loop:
dyndnsclient.sync()
dyndnsclients.append(dyndnsclient)
run_forever_callable = partial(run_forever, dyndnsclients)
if args.daemon:
import daemonocle
daemon = daemonocle.Daemon(worker=run_forever_callable)
daemon.do_action("start")
args.loop = True
if args.loop:
run_forever_callable()
return 0
| infothrill/python-dyndnsc | dyndnsc/cli.py | Python | mit | 6,684 |
import random
class RandomAgent(object):
def __init__(self, seed=None):
self.random = random.Random()
self.random.seed(seed)
def new_game(self, game_state):
pass
def make_move(self, game_state):
move = self.random.choice(game_state.legal_moves)
return move, game_state.move(move)
| tarvaina/nn-planner | agents/random_agent.py | Python | mit | 309 |
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import constants
import sys
from charsetprober import CharSetProber
SAMPLE_SIZE = 64
SB_ENOUGH_REL_THRESHOLD = 1024
POSITIVE_SHORTCUT_THRESHOLD = 0.95
NEGATIVE_SHORTCUT_THRESHOLD = 0.05
SYMBOL_CAT_ORDER = 250
NUMBER_OF_SEQ_CAT = 4
POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1
# NEGATIVE_CAT = 0
class SingleByteCharSetProber(CharSetProber):
def __init__(self, model, reversed=constants.False, nameProber=None):
CharSetProber.__init__(self)
self._mModel = model
self._mReversed = reversed # TRUE if we need to reverse every pair in the model lookup
self._mNameProber = nameProber # Optional auxiliary prober for name decision
self.reset()
def reset(self):
CharSetProber.reset(self)
self._mLastOrder = 255 # char order of last character
self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT
self._mTotalSeqs = 0
self._mTotalChar = 0
self._mFreqChar = 0 # characters that fall in our sampling range
def get_charset_name(self):
if self._mNameProber:
return self._mNameProber.get_charset_name()
else:
return self._mModel['charsetName']
def feed(self, aBuf):
if not self._mModel['keepEnglishLetter']:
aBuf = self.filter_without_english_letters(aBuf)
aLen = len(aBuf)
if not aLen:
return self.get_state()
for c in aBuf:
order = self._mModel['charToOrderMap'][ord(c)]
if order < SYMBOL_CAT_ORDER:
self._mTotalChar += 1
if order < SAMPLE_SIZE:
self._mFreqChar += 1
if self._mLastOrder < SAMPLE_SIZE:
self._mTotalSeqs += 1
if not self._mReversed:
self._mSeqCounters[
self._mModel['precedenceMatrix'][(self._mLastOrder * SAMPLE_SIZE) + order]] += 1
else: # reverse the order of the letters in the lookup
self._mSeqCounters[
self._mModel['precedenceMatrix'][(order * SAMPLE_SIZE) + self._mLastOrder]] += 1
self._mLastOrder = order
if self.get_state() == constants.eDetecting:
if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD:
cf = self.get_confidence()
if cf > POSITIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, we have a winner\n' % (self._mModel['charsetName'], cf))
self._mState = constants.eFoundIt
elif cf < NEGATIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, below negative shortcut threshhold %s\n' % (
self._mModel['charsetName'], cf, NEGATIVE_SHORTCUT_THRESHOLD))
self._mState = constants.eNotMe
return self.get_state()
def get_confidence(self):
r = 0.01
if self._mTotalSeqs > 0:
# print self._mSeqCounters[POSITIVE_CAT], self._mTotalSeqs, self._mModel['mTypicalPositiveRatio']
r = (1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs / self._mModel['mTypicalPositiveRatio']
# print r, self._mFreqChar, self._mTotalChar
r = r * self._mFreqChar / self._mTotalChar
if r >= 1.0:
r = 0.99
return r
| madgik/exareme | Exareme-Docker/src/exareme/exareme-tools/madis/src/lib/chardet/sbcharsetprober.py | Python | mit | 4,703 |
def separate_filename_from_extension(s):
slash = s.rfind('/') + 1
dex = slash + s[slash:].find('.', s[slash:].startswith('.'))
return s[:dex], s[dex:]
| the-zebulan/CodeWars | katas/beta/separate_filename_from_extension.py | Python | mit | 163 |
import numpy as np
problems = [
{'C': 1, 'coins': [1, 5]},
{'C': 4, 'coins': [1]},
{'C': 10, 'coins': [1, 5, 10]},
{'C': 10, 'coins': [1, 2, 4, 5, 8, 10, 12]},
{'C': 12, 'coins': [1, 2, 4, 5, 8, 10, 13]}
]
def making_change(C, coins):
coins = [0] + coins
m = np.zeros((len(coins), C+1))
for i in range(1, C+1):
m[1, i] = i
for i in range(2, len(coins)):
for cost in range(1, C + 1):
if coins[i] > cost:
# Current coin greater than cost C. Just re-use previous solution.
m[i, cost] = m[i-1, cost]
else:
# Min between:
# previous solution
# current solution + solution of the rest smaller than current coin.
m[i, cost] = min(m[i-1, cost], cost // coins[i] + m[i-1, cost % coins[i]])
return m
if __name__ == '__main__':
for problem in problems:
result = making_change(**problem)
print('C: %i, coins: %s' % (problem['C'], problem['coins']))
print(result)
| lucasdavid/Use-Algorithms | src/dynamic_programing/making_change.py | Python | mit | 1,084 |
"""This file contains code for use with "Think Bayes",
by Allen B. Downey, available from greenteapress.com
Copyright 2014 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function, division
import numpy
import thinkbayes2
import thinkplot
from scrape import scrape_team
class Football():
""" Represents hypotheses about a Football teams offense,
in terms of scores per game, and the probability of a given
score being a TD.
"""
def __init__(self, hypos):
self.score = ScoreType(hypos[0])
self.TDPercent = BooleanEstimator(hypos[1])
def Update(self, data):
"""Update the child PMFs based on the data.
data = (time since last score, boolean flag for the score being a TD)
"""
self.score.Update(data[0])
self.TDPercent.Update(data[1])
def PredRemaining(self, rem_time, points_scored):
"""Plots the predictive distribution for final number of goals.
rem_time: remaining time in the game in minutes
points_scored: points already scored
"""
scorePredict = self.score.PredRemaining(rem_time,0)
scorePmf = thinkbayes2.Pmf()
for prob_td, prob_p in self.TDPercent.Items():
tdProbPmf = thinkbayes2.Pmf()
for scores, prob_s in scorePredict.Items():
for num_tds in range(scores + 1):
num_fgs = scores - num_tds
points = 7 * num_tds + 3 * num_fgs
ncr = thinkbayes2.BinomialCoef(scores, num_tds)
tdProbPmf.Incr(points, prob_s * ncr * (prob_td**num_tds * (1 - prob_td)**num_fgs))
scorePmf.Incr(tdProbPmf, prob_p)
mix = thinkbayes2.MakeMixture(scorePmf)
mix += points_scored
return mix
class BooleanEstimator(thinkbayes2.Suite):
"""Represents a choice between 2 options"""
def Likelihood(self, data, hypo):
"""Computes the likelihood of the data under the hypothesis.
data: boolean indicating if the event happened
hypo: the probability of the event happening
"""
if data is True:
return hypo
else:
return 1 - hypo
def PredRemaining(self, rem_time, score):
"""Plots the predictive distribution for final number of goals.
rem_time: remaining time in the game in minutes
score: points already scored
"""
rem_scores = self.score.PredRemaining(rem_time)
metapmf=thinkbayes2.Pmf() #PMF about PMFS. probabilities of pmf values
for lam, prob in self.Items(): #loop through probabilities of lamdas
lt = lam*rem_time/60
pmf=thinkbayes2.MakePoissonPmf(lt,20)
metapmf[pmf]=prob
mix = thinkbayes2.MakeMixture(metapmf)
mix += score
return mix
class ScoreType(thinkbayes2.Suite):
"""Represents hypotheses about the lambda parameter of a
Poisson Process to generate scores.
"""
def Likelihood(self, data, hypo):
"""Computes the likelihood of the data under the hypothesis.
hypo: hypothetical goal scoring rate in goals per game
data: time between goals in minutes
"""
x = data #time between goals in minutes
lam = hypo/60.0 #goals per minute
like = thinkbayes2.EvalExponentialPdf(x,lam) #evaluating for every value of lamda
return like
def PredRemaining(self, rem_time, score):
"""Plots the predictive distribution for final number of goals.
rem_time: remaining time in the game in minutes
score: number of goals already scored
"""
metapmf=thinkbayes2.Pmf() #PMF about PMFS. probabilities of pmf values
for lam, prob in self.Items(): #loop through probabilities of lamdas
lt = lam*rem_time/60
pmf=thinkbayes2.MakePoissonPmf(lt,20)
metapmf[pmf]=prob
mix = thinkbayes2.MakeMixture(metapmf)
mix += score
return mix
def constructPriors():
"""Constructs an even prior for both teams, and then
uses data from www.covers.com from the 2014 season to
update the priors
"""
eagles_url = "/pageLoader/pageLoader.aspx?page=/data/nfl/teams/pastresults/2014-2015/team7.html"
giants_url = "/pageLoader/pageLoader.aspx?page=/data/nfl/teams/pastresults/2014-2015/team8.html"
eagles = Football((numpy.linspace(0, 20, 201), numpy.linspace(0, 1, 201)))
giants = Football((numpy.linspace(0, 20, 201), numpy.linspace(0, 1, 201)))
eagles_data = scrape_team(eagles_url)
giants_data = scrape_team(giants_url)
last_time = 0
for game in eagles_data:
last_time += 60.0
for item in game:
if item[2] == "Eagles":
TD = (item[1] == "TD")
inter_arrival = last_time - item[0]
eagles.Update((inter_arrival, TD))
last_time = item[0]
last_time = 0
for game in giants_data:
last_time += 60
for item in game:
if item[2] == "Giants":
TD = (item[1] == "TD")
inter_arrival = last_time - item[0]
giants.Update((inter_arrival, TD))
last_time = item[0]
return eagles, giants
def main():
"""Look at the October 12th, 2014 game between the Giants and the Eagles,
and predict the probabilities of each team winning.
"""
eagles, giants = constructPriors()
GoalTotalGiants = giants.PredRemaining(60, 0)
GoalTotalEagles = eagles.PredRemaining(60, 0)
print("Giants win", GoalTotalEagles < GoalTotalGiants)
print("Eagles win", GoalTotalGiants < GoalTotalEagles)
print(GoalTotalEagles.MakeCdf().CredibleInterval(90))
print(GoalTotalGiants.MakeCdf().CredibleInterval(90))
if __name__ == '__main__':
main()
| MattWis/PoissonFootball | football2.py | Python | mit | 5,886 |
"""
File: motif.py
Purpose: Encapsulates a motif note figure plus all constraints proper to that motif, i.e. the constraint actors are
all from the note figure.
"""
from melody.structure.abstract_motif import AbstractMotif
from structure.abstract_note_collective import AbstractNoteCollective
from structure.line import Line
from structure.note import Note
class Motif(AbstractMotif):
def __init__(self, note_structure, constraints=list(), name=''):
"""
Constructor.
:param note_structure: AbstractNoteCollective, Note, or list of either.
:param constraints: list of constraints on notes from above.
:param name: name of motif
:return:
# How about beat and meter specification?
# Check that each structure is top coverage, i.e. parent is line or line.parent == None
"""
# Normalize all structure info as a list of note structures and notes.
structure = note_structure if isinstance(note_structure, list) else [note_structure]
actors = Motif._extract_actors(structure)
self.__note_structure = structure
self.__name = name
self.__constraints = list(constraints)
AbstractMotif.__init__(self, actors, constraints)
# Check that all the constraints' actors list amongst the Motif's actors
motif_actors = self.actors
for constraint in self.constraints:
for a in constraint.actors:
if a not in motif_actors:
raise Exception('Actor {0} in constraint {1} not a Motif actor.'.format(a, constraint))
@property
def name(self):
return self.__name
@property
def note_structure(self):
return self.__note_structure
def copy_with(self, note_structure):
"""
Clone the Motif, but using a different note_structure that must structurally match this motif. This is how
Motif reuse is achieved in melodic analysis.
:param note_structure: Note, List of structures, or note structure.
:return:
"""
# Do we need to match on note_structural levels???
new_structure = note_structure if isinstance(note_structure, list) else [note_structure]
if len(new_structure) != len(self.note_structure):
return None
# Check that the actors match 1-1 in duration
for a, b in zip(self.note_structure, note_structure):
if isinstance(a, AbstractNoteCollective) == isinstance(b, AbstractNoteCollective):
if not Motif.__structurally_equal(self.note_structure, note_structure):
return None
elif isinstance(a, Note) == isinstance(b, Note):
if a.duration != b.duration:
return None
actors = Motif._extract_actors(new_structure)
note_map = {a: b for a, b in zip(self.actors, actors)}
copy_constraints = [x.clone([note_map[a] for a in x.actors]) for x in self.constraints]
return Motif(new_structure, copy_constraints, self.name)
def copy_to(self, first_note):
new_structure = list()
n = first_note
for s in self.note_structure:
if n is None:
return None
if isinstance(s, Note) and s.duration == n.duration and (n.parent is None or isinstance(n.parent, Line)):
new_structure.append(n)
n = n.next_note()
else: # Assume s is a structure
n_level = None
for s_note in s.get_all_notes():
s_level = s_note
n_level = n
while True:
if n_level is None or type(s_level) != type(n_level):
return None
if s_level == s:
break
s_level = s_level.parent
n_level = n_level.parent
n = n.next_note()
new_structure.append(n_level)
actors = Motif._extract_actors(new_structure)
note_map = {a: b for a, b in zip(self.actors, actors)}
copy_constraints = [x.clone([note_map[a] for a in x.actors]) for x in self.constraints]
return Motif(new_structure, copy_constraints, self.name)
@staticmethod
def __structurally_equal(a, b):
if type(a) == type(b):
if isinstance(a, Note) and a.duration == b.duration:
return True
if len(a.sub_notes) == len(b.sub_notes):
for a1, b1 in zip(a.sub_notes, b.sub_notes):
if not Motif.__structurally_equal(a1, b1):
return False
return True
else:
return False
else:
return False
def reverse(self):
pass
def __str__(self):
strs = ',\n'.join(str(s) for s in self.note_structure)
return 'Motif[{0}{1}]'.format(self.name + '.' if len(self.name) != 0 else '', strs)
@staticmethod
def _extract_actors(note_structure):
actors = list()
for n in note_structure:
if isinstance(n, AbstractNoteCollective):
actors.extend(n.get_all_notes())
elif isinstance(n, Note):
actors.append(n)
else:
raise Exception('Motif does not accept type \'{0}\''.format(type(n)))
return actors
| dpazel/music_rep | melody/structure/motif.py | Python | mit | 5,476 |
"""
Package variables.
.. package:: variables
:platform: Unix, Windows
:synopis: classes for variable objects
.. moduleauthor:: Thomas Lehmann <[email protected]>
"""
| Nachtfeuer/concept-py | concept/variables/__init__.py | Python | mit | 200 |
from __future__ import unicode_literals
__author__ = 'riegel'
import socket
#import threading
import multiprocessing
from .handler import handle_client
#from .context import RequestConte
import logging
logger = logging.getLogger(__name__)
class Server(object):
def __init__(self, hostname, port):
self.hostname = hostname
self.port = port
def start(self):
logger.info("listening")
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind((self.hostname, self.port))
self.socket.listen(1)
while True:
conn, address = self.socket.accept()
logger.info("Got connection")
process = multiprocessing.Process(target=handle_client, args=(conn, address))
process.daemon = True
process.start()
logger.info("Started process %r", process)
def runserver():
server = Server("0.0.0.0", 7766)
try:
logger.info("Listening")
server.start()
except Exception as e:
logger.exception("Unexpected exception: %s", e)
finally:
logger.info("Shutting down")
for process in multiprocessing.active_children():
logger.info("Shutting down process %r", process)
process.terminate()
process.join()
logger.info("All done")
| mrcrgl/gge-storage | lib/socket/server.py | Python | mit | 1,426 |
GREEN = 24
RED = 26
BLUE = 22
WHITELIST = {"eb94294a", "044c4fd2222a80"}
SPINTIME = 3
BLINKFRACTION = 3
#espeak
ACCESSDENIED = "Access denied."
SPEAKSPEED = 140
SPEAKPITCH = 50
| j0sh77/nfc | settings.py | Python | mit | 181 |
import json
from configuration_builder import DEFAULT_CONFIGURATION_BUILDER
def get_config(request):
return {
'javascript_settings': json.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
)
}
| pozytywnie/django-javascript-settings | javascript_settings/context_processors.py | Python | mit | 238 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('feed_filter', '0005_auto_20151002_0938'),
]
operations = [
migrations.AddField(
model_name='filter',
name='name',
field=models.CharField(help_text='Name to identify the filter by', max_length=200, default=0),
preserve_default=False,
),
]
| nice-shot/FacebookFilter | feed_filter/migrations/0006_filter_name.py | Python | mit | 494 |
'''
Created on Jul 10, 2013
@author: nshearer
'''
from ConsoleSimpleQuestion import ConsoleSimpleQuestion, UserAnswerValidationError
class ConsoleYesNoQuestion(ConsoleSimpleQuestion):
def __init__(self, question):
super(ConsoleYesNoQuestion, self).__init__(question)
def present_question(self, format_help=None):
super(ConsoleYesNoQuestion, self).present_question(format_help='yes/no')
def encode_answer_to_native(self, user_answer=None):
if user_answer is None:
user_answer = self.user_answer
if user_answer.lower() in ['yes', 'y', '1', 'on', 'true']:
return True
elif user_answer.lower() in ['no', 'n', '0', 'off', 'false']:
return False
elif len(user_answer.strip()) == 0:
return None
raise UserAnswerValidationError("Answer not recognized")
def decode_answer_to_text(self, answer):
'''Given a previous or default answer, convert it to a text value'''
if answer is True:
return 'yes'
elif answer is False:
return 'no'
return None
| shearern/PyWizard | src/py_wizard/console_wiz_iface/ConsoleYesNoQuestion.py | Python | mit | 1,184 |
from flask import current_app
import requests
def is_recaptcha_valid(token):
payload = {
'secret': current_app.config.get('RECAPTCHA_PRIVATE_KEY', ''),
'response': token
}
response = requests.post(
'https://www.google.com/recaptcha/api/siteverify',
payload
)
return response.json()['success'] is True
| ev-agelos/Python-bookmarks | bookmarks/api/utils.py | Python | mit | 354 |
#!usr/bin/python
from osgeo import gdal
import os,csv
import json
import utils.ogr2ogr as ogr2ogr
import utils.ogrinfo as ogrinfo
import utils.gdal_polygonize_edited as gdal_polygonize
import get_stats
def parse(inputFile=None,outputFolder="data",\
outputFile="datatiles.json",datatype=None,top=15,layernumber=None,key='Descriptio',\
scriptFile=None, scriptArg1=None,scriptArg2=None,\
scriptArg3=None,scriptArg4=None):
"""
Acquire and parse data into GeoJSON
Parameters:
'inputFile': Filename for input data
'outputFolder': Filename for output data
'datatype': Data type (will find automatically if not given)
'top': The N most frequent classes in the GIS file to be processed.
'layernumber': The layer ID of the layer to be processed, when input file contains multiple layers
'key': Keyword in 'properties' of GIS file to classify for.
'script': Script that returns input data. Has to contain script(.) function that returns the filename of the output file (of course, scripts can be run outside of this framework)
'scriptArgN': Arguments for script, if any
Return:
'outputFile': Output file name of the converted file, if input file not GeoJSON
'stats': Labels for most frequent classes
'freq': Frequency of most frequent classes
'elements': GeoJSON data
"""
#Execute script, if given. This should allow to users to load data from
#custom scripts.
if scriptFile:
# import script file as module for input with and without .py ending
if scriptFile[0:8] == "scripts/":
scriptFile = scriptFile[8:] # delete folder if present
if scriptFile[-3:] == ".py":
scriptFile = scriptFile[0:-3] # delete .py ending if present
scriptModule = __import__(scriptFile)
#load according to how many arguments are given
if scriptArg1:
if scriptArg2:
if scriptArg3:
if scriptArg4:
scriptReturn= scriptModule.script\
(scriptArg1,scriptArg2,scriptArg3,scriptArg4)
else:
scriptReturn= scriptModule.script\
(scriptArg1,scriptArg2,scriptArg3)
else:
scriptReturn= scriptModule.script(scriptArg1,scriptArg2)
else:
scriptReturn= scriptModule.script(scriptArg1)
else:
scriptReturn = scriptModule.script()
# parse returns
inputFile = scriptReturn[0]
datatype = scriptReturn[1]
print "Script to get tile data executed successfully within \'parse\'"
else:
if inputFile == None:
print "Error: provide either script or input file"
exit()
#check if provided code exists and if it is ogr-readable vector or
#ogr-readable raster format
vector_or_raster=0 # 2 = Vector, 1 = Raster
if datatype: #if raster format has to be polygonized into vector format
with open('libs/ogr_raster_formats.csv','rb') as csvfile:
raster_formats = list(csv.reader(csvfile,delimiter=",",quotechar='"'))
for cnt in range(0,len(raster_formats)):
if datatype == raster_formats[cnt][1]:
print "Detected format:",raster_formats[cnt][0]
if raster_formats[cnt][-1]!='Yes':
print "Please be aware:"
print "Is format compiled in standard GDAL?",raster_formats[cnt][-1]
print "Ths format is a raster format"
vector_or_raster = 1
with open('libs/ogr_vector_formats.csv','rb') as csvfile:
vector_formats = list(csv.reader(csvfile,delimiter=",",quotechar='"'))
for cnt in range(0,len(vector_formats)):
if datatype == vector_formats[cnt][1]:
print "Detected format:",vector_formats[cnt][0]
if vector_formats[cnt][-1]!='Yes':
print "Please be aware:"
print "Is format compiled in standard GDAL?",vector_formats[cnt][-1]
print "This format is a vector format"
vector_or_raster = 2
if vector_or_raster == 0:
print "Error: Format",datatype,\
"not found. Check libs/*.csv for available formats."
exit()
if vector_or_raster == 0:
vector_or_raster = 2 # Assuming vector format if no datatype specified.
# mitigate index error if length of json file name >=4 characters
if len(inputFile)>=4:
inputFileCopy=inputFile
else:
inputFileCopy=" "
# Remove slash if it is at the end of input file
if inputFile[-1]=="/":
inputFile=inputFile[0:-1]
if datatype=="GeoJSON" or inputFileCopy[-4:]=="json":
outputFile=inputFile#ignore, if already in GeoJSON format
print "No parsing needed"
stats,freq,elements=get_stats.get_stats(outputFile,top,key=key)
else:
#vectorize if in raster format and user agrees
if vector_or_raster == 1:
if input("Your file is a raster format. Convert to vector format? (y/n)").lower == "y":
gdal_polygonize.polygonize(inputFile,outputFolder+"/polygonised.json",
"GeoJSON",quiet_flag=0) #not tested
inputFile=outputFolder+"/polygonised.json"
#get layers of input file
layers = ogrinfo.main(["-so",inputFile])
if len(layers)>1:
# Select layers (one or all)
if not layernumber:
choseLayer = input(\
"Multiple layers found. Chose layer (number) or \'0\' for all layers: ")
else:
choseLayer = layernumber
if choseLayer==0: # iterate over each layer
for i in range(0,len(layers)):
#create filename
print "Converting layer",layers[i],"(",i+1,"out of",len(layers),"layers)..."
_,outputFile=os.path.split(inputFile+str(i+1)+".json")
outputFile=outputFolder+"/"+outputFile
# avoid GeoJSON error (GeoJSON cannot overwrite files)
outputFile=overwrite(outputFile)
#convert layer
ogr2ogr.main(["","-f","GeoJSON",outputFile,inputFile,layers[i]])
print ''
print "Converted to",outputFile
stats,freq,_=get_stats.get_stats(outputFile,top,key=key) #get statistics
elements=None
else: #only convert one layer
print inputFile
_,outputFile=os.path.split(inputFile+str(choseLayer)+".json")
outputFile=outputFolder+"/"+outputFile
print "Converting layer",layers[choseLayer-1],"..."
outputFile=overwrite(outputFile)
ogr2ogr.main(["","-f","GeoJSON",outputFile,\
inputFile,layers[choseLayer-1],\
'--config','OSM_USE_CUSTOM_INDEXING','NO']) #convert layer
print ''
print "Converted to",outputFile
stats,freq,elements=get_stats.get_stats(outputFile,top,key=key) #get statistics
else:
_,outputFile=os.path.split(inputFile+".json")
outputFile=outputFolder+"/"+outputFile
print "Converting..."
outputFile=overwrite(outputFile)
ogr2ogr.main(["","-f","GeoJSON",outputFile,inputFile]) #convert layer
print ''
print "Converted to",outputFile
stats,freq,elements=get_stats.get_stats(outputFile,top,key=key) #get statistics
#reference stats if not already done:
try:
if stats:
print ""
except UnboundLocalError:
stats=None
return outputFile,stats,freq,elements
def overwrite(outputFile,promptoverwrite=False):
'''remove file if already exists. necessary as GeoJSON engine cannot
overwrite files'''
try:
if os.path.exists(outputFile):
if promptoverwrite:
overwrite_answer=raw_input(outputFile+" already exists. Overwrite? (y/n) ")
else:
overwrite_answer=''
if overwrite_answer=="y" or (not promptoverwrite):
os.remove(outputFile)
else:
outputFile=raw_input("Provide alternative filename: ")
except OSError:
pass
return outputFile
| worldbank/cv4ag | modules/parse.py | Python | mit | 7,121 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('beer', '0005_beer_status'),
]
operations = [
migrations.AlterField(
model_name='beer',
name='status',
field=models.IntegerField(choices=[(1, b'Available'), (2, b'Empty')]),
preserve_default=True,
),
]
| OckiFals/crud-django | beer/migrations/0006_auto_20150114_1451.py | Python | mit | 457 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from base import Problem
class Solution(Problem):
def solve(self, input_):
print('Solve problem {}'.format(self.number))
print(str(sum(i for i in range(1, input_ + 1))**2 -
sum(i**2 for i in range(1, input_ + 1))))
if __name__ == '__main__':
solution = Solution(6)
solution.solve(10)
| phapdv/project_euler | pe6.py | Python | mit | 381 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VpnGatewaysOperations:
"""VpnGatewaysOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
gateway_name: str,
**kwargs: Any
) -> "_models.VpnGateway":
"""Retrieves the details of a virtual wan vpn gateway.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VpnGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_09_01.models.VpnGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-09-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
gateway_name: str,
vpn_gateway_parameters: "_models.VpnGateway",
**kwargs: Any
) -> "_models.VpnGateway":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(vpn_gateway_parameters, 'VpnGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VpnGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
gateway_name: str,
vpn_gateway_parameters: "_models.VpnGateway",
**kwargs: Any
) -> AsyncLROPoller["_models.VpnGateway"]:
"""Creates a virtual wan vpn gateway if it doesn't exist else updates the existing gateway.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param vpn_gateway_parameters: Parameters supplied to create or Update a virtual wan vpn
gateway.
:type vpn_gateway_parameters: ~azure.mgmt.network.v2019_09_01.models.VpnGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VpnGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_09_01.models.VpnGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
vpn_gateway_parameters=vpn_gateway_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
gateway_name: str,
vpn_gateway_parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.VpnGateway":
"""Updates virtual wan vpn gateway tags.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param vpn_gateway_parameters: Parameters supplied to update a virtual wan vpn gateway tags.
:type vpn_gateway_parameters: ~azure.mgmt.network.v2019_09_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VpnGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_09_01.models.VpnGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(vpn_gateway_parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
gateway_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-09-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes a virtual wan vpn gateway.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}'} # type: ignore
async def _reset_initial(
self,
resource_group_name: str,
gateway_name: str,
**kwargs: Any
) -> Optional["_models.VpnGateway"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VpnGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-09-01"
accept = "application/json"
# Construct URL
url = self._reset_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reset_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/reset'} # type: ignore
async def begin_reset(
self,
resource_group_name: str,
gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.VpnGateway"]:
"""Resets the primary of the vpn gateway in the specified resource group.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VpnGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_09_01.models.VpnGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reset_initial(
resource_group_name=resource_group_name,
gateway_name=gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/reset'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ListVpnGatewaysResult"]:
"""Lists all the VpnGateways in a resource group.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVpnGatewaysResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_09_01.models.ListVpnGatewaysResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVpnGatewaysResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-09-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListVpnGatewaysResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways'} # type: ignore
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.ListVpnGatewaysResult"]:
"""Lists all the VpnGateways in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVpnGatewaysResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_09_01.models.ListVpnGatewaysResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVpnGatewaysResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-09-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListVpnGatewaysResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/vpnGateways'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_09_01/aio/operations/_vpn_gateways_operations.py | Python | mit | 32,520 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from astropy.units import Unit, def_unit, add_enabled_units
new_units = dict(
flam='erg * s ** (-1) * AA ** (-1) * cm **(-2)',
fnu='erg * s ** (-1) * Hz ** (-1) * cm **(-2)',
photflam='photon * s ** (-1) * AA ** (-1) * cm **(-2)',
photfnu='photon * s ** (-1) * Hz ** (-1) * cm **(-2)',
angstroms='angstrom'
)
add_enabled_units([def_unit([k], Unit(v)) for k, v in new_units.items()])\
.__enter__()
from .vega import Vega
from .sun import Sun
from .sandbox import (UncertainFilter, UnitAscii_Library, UnitFilter,
UnitHDF_Library, UnitLibrary, UnitLickIndex,
UnitLickLibrary, get_library)
| mfouesneau/pyphot | pyphot/astropy/__init__.py | Python | mit | 703 |
Subsets and Splits