code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
"""Utils tests"""
import pytest
from social_django.models import UserSocialAuth
from authentication.backends.micromasters import MicroMastersAuth
from authentication.exceptions import UserMissingSocialAuthException
from authentication.strategy import DjangoRestFrameworkStrategy
from authentication.utils import (
load_drf_strategy,
jwt_get_username_from_payload_handler,
)
def test_load_drf_strategy(mocker):
"""Test that load_drf_strategy returns a DjangoRestFrameworkStrategy instance"""
assert isinstance(load_drf_strategy(mocker.Mock()), DjangoRestFrameworkStrategy)
@pytest.mark.parametrize("provider", [None, MicroMastersAuth.name])
def test_jwt_get_username_from_payload_handler(user, provider):
"""Test that the username is fetched from the JWT correctly"""
social = UserSocialAuth.objects.create(
user=user, provider=MicroMastersAuth.name, uid="abcdef"
)
if provider:
payload = {"username": social.uid, "provider": provider}
else:
payload = {"username": user.username}
assert jwt_get_username_from_payload_handler(payload) == user.username
@pytest.mark.django_db
def test_jwt_get_username_from_payload_handler_missing_social_auth():
"""Test that the username is fetched from the JWT correctly"""
payload = {"username": "abcdef", "provider": "micromasters"}
with pytest.raises(UserMissingSocialAuthException):
jwt_get_username_from_payload_handler(payload)
| mitodl/open-discussions | authentication/utils_test.py | Python | bsd-3-clause | 1,459 |
"""App configuration for custom_user."""
from django.apps import AppConfig
class CustomUserConfig(AppConfig):
"""
Default configuration for custom_user.
"""
name = "custom_user"
verbose_name = "Custom User"
# https://docs.djangoproject.com/en/3.2/releases/3.2/#customizing-type-of-auto-created-primary-keys
default_auto_field = "django.db.models.AutoField"
| jcugat/django-custom-user | src/custom_user/apps.py | Python | bsd-3-clause | 389 |
import os
import pandas as pd
import numpy as np
# Folders where results are and will be saved
savefolder=r'/scratch/sayala/RadianceScenes/BasicSimulations/'
posSampled = 50 #!! Modify to the number of positions sampled
for jj in range(0, 2):
# 0 - With
# 1 - Without
if jj == 0:
testfolder=r'/scratch/sayala/RadianceScenes/BasicSimulations/Gendaylit1axis/WithResults'
savefilename = 'COMPILED_Results_WITH_19AUG.csv'
withopt = 'WITH'
if jj == 1:
testfolder=r'/scratch/sayala/RadianceScenes/BasicSimulations/Gendaylit1axis/WithoutResults'
savefilename = 'COMPILED_Results_WITHOUT_19AUG.csv'
withopt = 'WITHOUT'
filelist = sorted(os.listdir(testfolder))
#daylist = [x[4:] for x in filelist]
# timestamplist = []
# for i in range(len(daylist)):
# timestamplist[i] = sorted(os.listdir(testfolder+r'\'+f'{day for day in daylist}'))
print('{} files in the directory'.format(filelist.__len__()))
#print(filelist[1].partition('_Module_')[0])
#!! Make sures this matches the folder names pattern or adjust accordingly.
# This assumes the folders are named "Day_01_01_01_08" (y m d h)
x_all = []
y_all = []
z_all = []
rearZ_all = []
mattype_all = []
rearMat_all = []
Wm2Front_all = []
Wm2Back_all = []
pos_all = []
timestamp_all = []
errors_all = []
timestamplist = [x[4:15] for x in filelist]
# positionlist = [x[21:] for x in filelist]
timestamplist = ['21_04_29_11']
for i in range(0, len(timestamplist)):
print("Working on entry "+str(i)+" timestamp "+timestamplist[i])
posSampled = 200
for ii in range (0, posSampled):
resfolder = os.path.join(testfolder, 'Day_'+timestamplist[i]+'_Posx_'+str(ii))
resfolder = os.path.join(resfolder, 'results/')
print(resfolder)
#!! Make sure this matches the format being used to save results or
# modify accordingly.
# example filename: 'irr_20_01_01_08_pos_0.csv'
filename = 'irr_1axis_'+timestamplist[i]+'_00_'+withopt+'_pos_'+str(ii)+'.csv'
try:
data = pd.read_csv(os.path.join(resfolder,filename))
# Save all the values
x_all.append(list(data['x']))
y_all.append(list(data['y']))
z_all.append(list(data['z']))
rearZ_all.append(list(data['rearZ']))
mattype_all.append(list(data['mattype']))
rearMat_all.append(list(data['rearMat']))
Wm2Front_all.append(list(data['Wm2Front']))
Wm2Back_all.append(list(data['Wm2Back']))
# Saving position and timestamp for indexing
pos_all.append(ii)
timestamp_all.append(timestamplist[i])
except:
print('*** Missing positions ', ii)
errors_all.append(ii)
df = pd.DataFrame(list(zip(timestamp_all,pos_all,x_all,y_all,z_all,rearZ_all,
mattype_all,rearMat_all,Wm2Front_all,Wm2Back_all)),
columns=['Timestamp', 'Position', 'x','y','z','rearZ',
'mattype','rearMat','Wm2Front','Wm2Back'])
df.to_csv(os.path.join(savefolder,savefilename))
errorfile = os.path.join(savefolder, 'ERRORS'+withopt+'.txt')
with open(errorfile, 'w') as f:
for s in errors_all:
f.write(str(s) + '\n')
print("FINISHED")
| NREL/bifacial_radiance | bifacial_radiance/HPCScripts/Other Examples (unorganized)/compile_basic_module_sampling.py | Python | bsd-3-clause | 3,651 |
class MatchIndicatorStatus(object):
SINGLE_TRANSACTION_MATCH = '1'
MULTIPLE_TRANS_IDENTICAL_CARD_MATCH = '2'
MULTIPLE_TRANS_DIFFERING_CARDS_MATCH = '3'
NO_MATCH_FOUND = '4'
| M4gn4tor/mastercard-api-python | Tests/services/fraud_scoring/matchindicatorstatus.py | Python | bsd-3-clause | 190 |
# -*- encoding:utf-8 -*-
import libmc
import unittest
import cPickle as pickle
import marshal
import time
TEST_SERVER = "localhost"
class BigObject(object):
def __init__(self, letter='1', size=2000000):
self.object = letter * size
def __eq__(self, other):
return self.object == other.object
class NoPickle(object):
def __getattr__(self, name):
pass
class TestCmemcached(unittest.TestCase):
def setUp(self):
self.mc=libmc.Client([TEST_SERVER], comp_threshold=1024)
def test_set_get(self):
self.mc.set("key", "value")
self.assertEqual(self.mc.get("key") , "value")
self.mc.set("key_int", 1)
self.assertEqual(self.mc.get("key_int") , 1)
self.mc.set("key_long", 1234567890L)
self.assertEqual(self.mc.get("key_long") , 1234567890L)
self.mc.set("key_object", BigObject())
self.assertEqual(self.mc.get("key_object"),BigObject())
big_object=BigObject('x', 1000001)
self.mc.set("key_big_object", big_object)
self.assertEqual(self.mc.get("key_big_object"),big_object)
def test_chinese_set_get(self):
key='豆瓣'
value='在炎热的夏天我们无法停止上豆瓣'
self.assertEqual(self.mc.set(key, value),1)
self.assertEqual(self.mc.get(key) , value)
def test_special_key(self):
key='keke a kid'
value=1024
self.assertEqual(self.mc.set(key,value),0)
self.assertEqual(self.mc.get(key),None)
key='u:keke a kid'
self.assertEqual(self.mc.set(key,value),0)
self.assertEqual(self.mc.get(key),None)
def test_empty_string(self):
key='ttt'
value=''
self.assertEqual(self.mc.set(key,value), True)
self.assertEqual(self.mc.get(key), '')
def test_add(self):
key = 'test_add'
self.mc.delete(key)
self.assertEqual(self.mc.add(key, 'tt'), 1)
self.assertEqual(self.mc.get(key), 'tt')
self.assertEqual(self.mc.add(key, 'tt'), 0)
self.mc.delete(key+'2')
self.assertEqual(self.mc.add(key+'2', range(10)), 1)
def test_replace(self):
key = 'test_replace'
self.mc.delete(key)
self.assertEqual(self.mc.replace(key, ''), 0)
self.assertEqual(self.mc.set(key, 'b'), 1)
self.assertEqual(self.mc.replace(key, 'a'), 1)
self.assertEqual(self.mc.get(key), 'a')
def test_append(self):
key="test_append"
value="append\n"
self.mc.delete(key)
self.assertEqual(self.mc.append(key, value), 0)
self.mc.set(key, "")
self.assertEqual(self.mc.append(key, value), 1)
self.assertEqual(self.mc.append(key, value), 1)
self.assertEqual(self.mc.prepend(key, 'before\n'), 1)
self.assertEqual(self.mc.get(key), 'before\n' + value * 2)
def test_append_multi(self):
N = 10
K = "test_append_multi_%d"
data = "after\n"
for i in range(N):
self.assertEqual(self.mc.set(K%i, "before\n"), 1)
keys = [K%i for i in range(N)]
self.assertEqual(self.mc.append_multi(keys, data), 1)
self.assertEqual(self.mc.get_multi(keys), dict(zip(keys, ["before\n"+data] * N)))
# prepend
self.assertEqual(self.mc.prepend_multi(keys, data), 1)
self.assertEqual(self.mc.get_multi(keys), dict(zip(keys, [data+"before\n"+data] * N)))
# delete
self.assertEqual(self.mc.delete_multi(keys), 1)
self.assertEqual(self.mc.get_multi(keys), {})
def test_append_multi_performance(self):
N = 50000
K = "test_append_multi_%d"
data = "after\n"
keys = [K%i for i in range(N)]
t = time.time()
self.mc.append_multi(keys, data)
t = time.time() - t
assert t < 1, 'should append 5k key in 1 secs %f' % t
def test_set_multi(self):
values = dict(('key%s'%k,('value%s'%k)*100000*k) for k in range(10))
values.update({' ':''})
self.assertEqual(self.mc.set_multi(values), 1)
del values[' ']
for k in values:
self.assertEqual(self.mc.get(k), values[k])
mc=libmc.Client(["localhost:11999"], comp_threshold=1024)
self.assertEqual(mc.set_multi(values), 0)
def test_append_large(self):
k = 'test_append_large'
self.mc.set(k, 'a' * 2048)
self.mc.append(k, 'bbbb')
assert 'bbbb' not in self.mc.get(k)
self.mc.set(k, 'a' * 2048, compress=False)
self.mc.append(k, 'bbbb')
assert 'bbbb' in self.mc.get(k)
def test_incr(self):
key="Not_Exist"
self.assertEqual(self.mc.incr(key), None)
#key="incr:key1"
#self.mc.set(key, "not_numerical")
#self.assertEqual(self.mc.incr(key), 0)
key="incr:key2"
self.mc.set(key, 2007)
self.assertEqual(self.mc.incr(key), 2008)
def test_decr(self):
key="Not_Exist"
self.assertEqual(self.mc.decr(key),None)
#key="decr:key1"
#self.mc.set(key, "not_numerical")
#self.assertEqual(self.mc.decr(key),0)
key="decr:key2"
self.mc.set(key, 2009)
self.assertEqual(self.mc.decr(key),2008)
def test_get_multi(self):
keys=["hello1", "hello2", "hello3"]
values=["vhello1", "vhello2", "vhello3"]
for x in xrange(3):
self.mc.set(keys[x], values[x])
self.assertEqual(self.mc.get(keys[x]) , values[x])
self.assertEqual(self.mc.get_multi(keys), dict(zip(keys, values)))
def test_get_multi_big(self):
keys=["hello1", "hello2", "hello3"]
values=[BigObject(str(i), 1000001) for i in xrange(3)]
for x in xrange(3):
self.mc.set(keys[x], values[x])
self.assertEqual(self.mc.get(keys[x]) , values[x])
result=self.mc.get_multi(keys)
for x in xrange(3):
self.assertEqual(result[keys[x]] , values[x])
def test_get_multi_with_empty_string(self):
keys=["hello1", "hello2", "hello3"]
for k in keys:
self.mc.set(k, '')
self.assertEqual(self.mc.get_multi(keys), dict(zip(keys,[""]*3)))
def testBool(self):
self.mc.set("bool", True)
value = self.mc.get("bool")
self.assertEqual(value, True)
self.mc.set("bool_", False)
value = self.mc.get("bool_")
self.assertEqual(value, False)
def testEmptyString(self):
self.mc.set("str", '')
value = self.mc.get("str")
self.assertEqual(value, '')
def testGetHost(self):
self.mc.set("str", '')
host = self.mc.get_host_by_key("str")
self.assertEqual(host, TEST_SERVER)
def test_get_list(self):
self.mc.set("a", 'a')
v = self.mc.get_list(['a','b'])
self.assertEqual(v, ['a',None])
def test_marshal(self):
v = [{2:{"a": 337}}]
self.mc.set("a", v)
self.assertEqual(self.mc.get("a"), v)
raw, flags = self.mc.get_raw("a")
self.assertEqual(raw, marshal.dumps(v, 2))
def test_pickle(self):
v = [{"v": BigObject('a', 10)}]
self.mc.set("a", v)
self.assertEqual(self.mc.get("a"), v)
raw, flags = self.mc.get_raw("a")
self.assertEqual(raw, pickle.dumps(v, -1))
def test_no_pickle(self):
v = NoPickle()
self.assertEqual(self.mc.set("nopickle", v), False)
self.assertEqual(self.mc.get("nopickle"), None)
def test_big_list(self):
v = range(1024*1024)
self.assertEqual(self.mc.set('big_list', v), 1)
self.assertEqual(self.mc.get('big_list'), v)
def test_last_error(self):
self.assertEqual(self.mc.set('testkey', 'hh'), True)
self.assertEqual(self.mc.get('testkey'), 'hh')
self.assertEqual(self.mc.get_last_error(), 0)
self.mc=libmc.Client(["localhost:11999"], comp_threshold=1024)
self.assertEqual(self.mc.set('testkey', 'hh'), False)
self.assertEqual(self.mc.get('testkey'), None)
self.assertNotEqual(self.mc.get_last_error(), 1)
def test_stats(self):
s = self.mc.stats()
self.assertEqual(TEST_SERVER in s, True)
st = s[TEST_SERVER]
st_keys = sorted([
"pid",
"uptime",
"time",
"version",
"pointer_size",
"rusage_user",
"rusage_system",
"curr_items",
"total_items",
"bytes",
"curr_connections",
"total_connections",
"connection_structures",
"cmd_get",
"cmd_set",
"get_hits",
"get_misses",
"evictions",
"bytes_read",
"bytes_written",
"limit_maxbytes",
"threads",
])
self.assertEqual(sorted(st.keys()), st_keys)
mc=libmc.Client(["localhost:11999", TEST_SERVER])
s = mc.stats()
self.assertEqual(len(s), 2)
#def test_gets_multi(self):
# keys=["hello1", "hello2", "hello3"]
# values=["vhello1", "vhello2", "vhello3"]
# for x in xrange(3):
# self.mc.set(keys[x], values[x])
# self.assertEqual(self.mc.get(keys[x]) , values[x])
# result=self.mc.gets_multi(keys)
# for x in xrange(3):
# #print result[keys[x]][0],result[keys[x]][1]
# self.assertEqual(result[keys[x]][0] , values[x])
#def test_cas(self):
# keys=["hello1", "hello2", "hello3"]
# values=["vhello1", "vhello2", "vhello3"]
# for x in xrange(3):
# self.mc.set(keys[x], values[x])
# self.assertEqual(self.mc.get(keys[x]) , values[x])
# result=self.mc.gets_multi(keys)
# for x in xrange(3):
# self.assertEqual(result[keys[x]][0] , values[x])
# self.assertEqual(self.mc.cas(keys[x],'cas',cas=result[keys[x]][1]) , 1)
# self.assertEqual(self.mc.cas(keys[x],'cas2',cas=result[keys[x]][1]) , 0)
# self.assertEqual(self.mc.get(keys[x]) , 'cas')
class TestBinaryCmemcached(TestCmemcached):
def setUp(self):
self.mc=libmc.Client([TEST_SERVER], comp_threshold=1024)
self.mc.set_behavior(libmc.BEHAVIOR_BINARY_PROTOCOL, 1)
def test_append_multi_performance(self):
"binary is slow, bug ?"
def test_stats(self):
"not yet support"
if __name__ == '__main__':
unittest.main()
| davies/libmc-ctypes | test.py | Python | bsd-3-clause | 10,467 |
class TerminationInquiryRequestOptions(object):
def __init__(self, page_offset, page_length):
self.page_offset = page_offset
self.page_length = page_length
if self.page_length > 25:
self.page_length = 25 | M4gn4tor/mastercard-api-python | Services/match/domain/options/terminationinquiryrequestoptions.py | Python | bsd-3-clause | 243 |
#!/usr/bin/env python
"""
Demo of "operate-and-get-next".
(Actually, this creates one prompt application, and keeps running the same app
over and over again. -- For now, this is the only way to get this working.)
"""
from prompt_toolkit.shortcuts import PromptSession
def main():
session = PromptSession("prompt> ")
while True:
session.prompt()
if __name__ == "__main__":
main()
| jonathanslenders/python-prompt-toolkit | examples/prompts/operate-and-get-next.py | Python | bsd-3-clause | 404 |
"""
Tests for offsets.CustomBusinessHour
"""
from __future__ import annotations
from datetime import datetime
import numpy as np
import pytest
from pandas._libs.tslibs import Timestamp
from pandas._libs.tslibs.offsets import (
BusinessHour,
CustomBusinessHour,
Nano,
)
import pandas._testing as tm
from pandas.tests.tseries.offsets.common import (
Base,
assert_offset_equal,
)
from pandas.tseries.holiday import USFederalHolidayCalendar
class TestCustomBusinessHour(Base):
_offset: type[CustomBusinessHour] = CustomBusinessHour
holidays = ["2014-06-27", datetime(2014, 6, 30), np.datetime64("2014-07-02")]
def setup_method(self):
# 2014 Calendar to check custom holidays
# Sun Mon Tue Wed Thu Fri Sat
# 6/22 23 24 25 26 27 28
# 29 30 7/1 2 3 4 5
# 6 7 8 9 10 11 12
self.d = datetime(2014, 7, 1, 10, 00)
self.offset1 = CustomBusinessHour(weekmask="Tue Wed Thu Fri")
self.offset2 = CustomBusinessHour(holidays=self.holidays)
def test_constructor_errors(self):
from datetime import time as dt_time
msg = "time data must be specified only with hour and minute"
with pytest.raises(ValueError, match=msg):
CustomBusinessHour(start=dt_time(11, 0, 5))
msg = "time data must match '%H:%M' format"
with pytest.raises(ValueError, match=msg):
CustomBusinessHour(start="AAA")
msg = "time data must match '%H:%M' format"
with pytest.raises(ValueError, match=msg):
CustomBusinessHour(start="14:00:05")
def test_different_normalize_equals(self):
# GH#21404 changed __eq__ to return False when `normalize` does not match
offset = self._offset()
offset2 = self._offset(normalize=True)
assert offset != offset2
def test_repr(self):
assert repr(self.offset1) == "<CustomBusinessHour: CBH=09:00-17:00>"
assert repr(self.offset2) == "<CustomBusinessHour: CBH=09:00-17:00>"
def test_with_offset(self):
expected = Timestamp("2014-07-01 13:00")
assert self.d + CustomBusinessHour() * 3 == expected
assert self.d + CustomBusinessHour(n=3) == expected
def test_eq(self):
for offset in [self.offset1, self.offset2]:
assert offset == offset
assert CustomBusinessHour() != CustomBusinessHour(-1)
assert CustomBusinessHour(start="09:00") == CustomBusinessHour()
assert CustomBusinessHour(start="09:00") != CustomBusinessHour(start="09:01")
assert CustomBusinessHour(start="09:00", end="17:00") != CustomBusinessHour(
start="17:00", end="09:01"
)
assert CustomBusinessHour(weekmask="Tue Wed Thu Fri") != CustomBusinessHour(
weekmask="Mon Tue Wed Thu Fri"
)
assert CustomBusinessHour(holidays=["2014-06-27"]) != CustomBusinessHour(
holidays=["2014-06-28"]
)
def test_sub(self):
# override the Base.test_sub implementation because self.offset2 is
# defined differently in this class than the test expects
pass
def test_hash(self):
assert hash(self.offset1) == hash(self.offset1)
assert hash(self.offset2) == hash(self.offset2)
def test_call(self):
with tm.assert_produces_warning(FutureWarning):
# GH#34171 DateOffset.__call__ is deprecated
assert self.offset1(self.d) == datetime(2014, 7, 1, 11)
assert self.offset2(self.d) == datetime(2014, 7, 1, 11)
def testRollback1(self):
assert self.offset1.rollback(self.d) == self.d
assert self.offset2.rollback(self.d) == self.d
d = datetime(2014, 7, 1, 0)
# 2014/07/01 is Tuesday, 06/30 is Monday(holiday)
assert self.offset1.rollback(d) == datetime(2014, 6, 27, 17)
# 2014/6/30 and 2014/6/27 are holidays
assert self.offset2.rollback(d) == datetime(2014, 6, 26, 17)
def testRollback2(self):
assert self._offset(-3).rollback(datetime(2014, 7, 5, 15, 0)) == datetime(
2014, 7, 4, 17, 0
)
def testRollforward1(self):
assert self.offset1.rollforward(self.d) == self.d
assert self.offset2.rollforward(self.d) == self.d
d = datetime(2014, 7, 1, 0)
assert self.offset1.rollforward(d) == datetime(2014, 7, 1, 9)
assert self.offset2.rollforward(d) == datetime(2014, 7, 1, 9)
def testRollforward2(self):
assert self._offset(-3).rollforward(datetime(2014, 7, 5, 16, 0)) == datetime(
2014, 7, 7, 9
)
def test_roll_date_object(self):
offset = BusinessHour()
dt = datetime(2014, 7, 6, 15, 0)
result = offset.rollback(dt)
assert result == datetime(2014, 7, 4, 17)
result = offset.rollforward(dt)
assert result == datetime(2014, 7, 7, 9)
normalize_cases = [
(
CustomBusinessHour(normalize=True, holidays=holidays),
{
datetime(2014, 7, 1, 8): datetime(2014, 7, 1),
datetime(2014, 7, 1, 17): datetime(2014, 7, 3),
datetime(2014, 7, 1, 16): datetime(2014, 7, 3),
datetime(2014, 7, 1, 23): datetime(2014, 7, 3),
datetime(2014, 7, 1, 0): datetime(2014, 7, 1),
datetime(2014, 7, 4, 15): datetime(2014, 7, 4),
datetime(2014, 7, 4, 15, 59): datetime(2014, 7, 4),
datetime(2014, 7, 4, 16, 30): datetime(2014, 7, 7),
datetime(2014, 7, 5, 23): datetime(2014, 7, 7),
datetime(2014, 7, 6, 10): datetime(2014, 7, 7),
},
),
(
CustomBusinessHour(-1, normalize=True, holidays=holidays),
{
datetime(2014, 7, 1, 8): datetime(2014, 6, 26),
datetime(2014, 7, 1, 17): datetime(2014, 7, 1),
datetime(2014, 7, 1, 16): datetime(2014, 7, 1),
datetime(2014, 7, 1, 10): datetime(2014, 6, 26),
datetime(2014, 7, 1, 0): datetime(2014, 6, 26),
datetime(2014, 7, 7, 10): datetime(2014, 7, 4),
datetime(2014, 7, 7, 10, 1): datetime(2014, 7, 7),
datetime(2014, 7, 5, 23): datetime(2014, 7, 4),
datetime(2014, 7, 6, 10): datetime(2014, 7, 4),
},
),
(
CustomBusinessHour(
1, normalize=True, start="17:00", end="04:00", holidays=holidays
),
{
datetime(2014, 7, 1, 8): datetime(2014, 7, 1),
datetime(2014, 7, 1, 17): datetime(2014, 7, 1),
datetime(2014, 7, 1, 23): datetime(2014, 7, 2),
datetime(2014, 7, 2, 2): datetime(2014, 7, 2),
datetime(2014, 7, 2, 3): datetime(2014, 7, 3),
datetime(2014, 7, 4, 23): datetime(2014, 7, 5),
datetime(2014, 7, 5, 2): datetime(2014, 7, 5),
datetime(2014, 7, 7, 2): datetime(2014, 7, 7),
datetime(2014, 7, 7, 17): datetime(2014, 7, 7),
},
),
]
@pytest.mark.parametrize("norm_cases", normalize_cases)
def test_normalize(self, norm_cases):
offset, cases = norm_cases
for dt, expected in cases.items():
assert offset._apply(dt) == expected
def test_is_on_offset(self):
tests = [
(
CustomBusinessHour(start="10:00", end="15:00", holidays=self.holidays),
{
datetime(2014, 7, 1, 9): False,
datetime(2014, 7, 1, 10): True,
datetime(2014, 7, 1, 15): True,
datetime(2014, 7, 1, 15, 1): False,
datetime(2014, 7, 5, 12): False,
datetime(2014, 7, 6, 12): False,
},
)
]
for offset, cases in tests:
for dt, expected in cases.items():
assert offset.is_on_offset(dt) == expected
apply_cases = [
(
CustomBusinessHour(holidays=holidays),
{
datetime(2014, 7, 1, 11): datetime(2014, 7, 1, 12),
datetime(2014, 7, 1, 13): datetime(2014, 7, 1, 14),
datetime(2014, 7, 1, 15): datetime(2014, 7, 1, 16),
datetime(2014, 7, 1, 19): datetime(2014, 7, 3, 10),
datetime(2014, 7, 1, 16): datetime(2014, 7, 3, 9),
datetime(2014, 7, 1, 16, 30, 15): datetime(2014, 7, 3, 9, 30, 15),
datetime(2014, 7, 1, 17): datetime(2014, 7, 3, 10),
datetime(2014, 7, 2, 11): datetime(2014, 7, 3, 10),
# out of business hours
datetime(2014, 7, 2, 8): datetime(2014, 7, 3, 10),
datetime(2014, 7, 2, 19): datetime(2014, 7, 3, 10),
datetime(2014, 7, 2, 23): datetime(2014, 7, 3, 10),
datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 10),
# saturday
datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 10),
datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 10),
datetime(2014, 7, 4, 16, 30): datetime(2014, 7, 7, 9, 30),
datetime(2014, 7, 4, 16, 30, 30): datetime(2014, 7, 7, 9, 30, 30),
},
),
(
CustomBusinessHour(4, holidays=holidays),
{
datetime(2014, 7, 1, 11): datetime(2014, 7, 1, 15),
datetime(2014, 7, 1, 13): datetime(2014, 7, 3, 9),
datetime(2014, 7, 1, 15): datetime(2014, 7, 3, 11),
datetime(2014, 7, 1, 16): datetime(2014, 7, 3, 12),
datetime(2014, 7, 1, 17): datetime(2014, 7, 3, 13),
datetime(2014, 7, 2, 11): datetime(2014, 7, 3, 13),
datetime(2014, 7, 2, 8): datetime(2014, 7, 3, 13),
datetime(2014, 7, 2, 19): datetime(2014, 7, 3, 13),
datetime(2014, 7, 2, 23): datetime(2014, 7, 3, 13),
datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 13),
datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 13),
datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 13),
datetime(2014, 7, 4, 16, 30): datetime(2014, 7, 7, 12, 30),
datetime(2014, 7, 4, 16, 30, 30): datetime(2014, 7, 7, 12, 30, 30),
},
),
]
@pytest.mark.parametrize("apply_case", apply_cases)
def test_apply(self, apply_case):
offset, cases = apply_case
for base, expected in cases.items():
assert_offset_equal(offset, base, expected)
nano_cases = [
(
CustomBusinessHour(holidays=holidays),
{
Timestamp("2014-07-01 15:00")
+ Nano(5): Timestamp("2014-07-01 16:00")
+ Nano(5),
Timestamp("2014-07-01 16:00")
+ Nano(5): Timestamp("2014-07-03 09:00")
+ Nano(5),
Timestamp("2014-07-01 16:00")
- Nano(5): Timestamp("2014-07-01 17:00")
- Nano(5),
},
),
(
CustomBusinessHour(-1, holidays=holidays),
{
Timestamp("2014-07-01 15:00")
+ Nano(5): Timestamp("2014-07-01 14:00")
+ Nano(5),
Timestamp("2014-07-01 10:00")
+ Nano(5): Timestamp("2014-07-01 09:00")
+ Nano(5),
Timestamp("2014-07-01 10:00")
- Nano(5): Timestamp("2014-06-26 17:00")
- Nano(5),
},
),
]
@pytest.mark.parametrize("nano_case", nano_cases)
def test_apply_nanoseconds(self, nano_case):
offset, cases = nano_case
for base, expected in cases.items():
assert_offset_equal(offset, base, expected)
def test_us_federal_holiday_with_datetime(self):
# GH 16867
bhour_us = CustomBusinessHour(calendar=USFederalHolidayCalendar())
t0 = datetime(2014, 1, 17, 15)
result = t0 + bhour_us * 8
expected = Timestamp("2014-01-21 15:00:00")
assert result == expected
@pytest.mark.parametrize(
"weekmask, expected_time, mult",
[
["Mon Tue Wed Thu Fri Sat", "2018-11-10 09:00:00", 10],
["Tue Wed Thu Fri Sat", "2018-11-13 08:00:00", 18],
],
)
def test_custom_businesshour_weekmask_and_holidays(weekmask, expected_time, mult):
# GH 23542
holidays = ["2018-11-09"]
bh = CustomBusinessHour(
start="08:00", end="17:00", weekmask=weekmask, holidays=holidays
)
result = Timestamp("2018-11-08 08:00") + mult * bh
expected = Timestamp(expected_time)
assert result == expected
| pandas-dev/pandas | pandas/tests/tseries/offsets/test_custom_business_hour.py | Python | bsd-3-clause | 12,823 |
#!/usr/bin/env python
#
# Author: Patrick Hung (patrickh @caltech)
# Copyright (c) 1997-2015 California Institute of Technology.
# License: 3-clause BSD. The full license text is available at:
# - http://trac.mystic.cacr.caltech.edu/project/mystic/browser/mystic/LICENSE
"""
Solve the dual form of test_circle.py.
Currently, it uses a package called "qld" that I wrote but not in
the repo. yet. It wraps IQP from bell-labs. (code not GPL and has export
restrictions.)
"""
from numpy import *
import pylab
from test_circle import sparse_circle, sv, x0, y0, R0
getpoints = sparse_circle.forward
import qld
def getobjective(H,f, x):
return 0.5 * dot(dot(x,H),x) + dot(f,x)
def chop(x):
if abs(x) > 1e-6:
return x
else:
return 0
def round(x):
return array([chop(y) for y in x])
def plot(xy, sv, x0, y0, R0, center, R):
import pylab
pylab.plot(xy[:,0],xy[:,1],'k+')
pylab.plot(xy[sv,0],xy[sv,1],'ro')
theta = arange(0, 2*pi, 0.02)
pylab.plot([center[0]],[center[1]],'bo')
pylab.plot([xy[sv0,0], center[0]],[xy[sv0,1], center[1]],'r--')
pylab.plot(R0 * cos(theta)+x0, R0*sin(theta)+y0, 'r-',linewidth=2)
pylab.plot(R * cos(theta)+center[0], R*sin(theta)+center[1], 'b-',linewidth=2)
pylab.axis('equal')
pylab.show()
if __name__ == '__main__':
npt = 20
from test_circle import xy
npt1 = xy.shape[0]
if npt is not npt1:
xy = getpoints((x0,y0,R0),npt)
else:
pass
Q = dot(xy, transpose(xy))
f = -diag(Q)+10
H = Q*2
A = ones((1,npt))
b = ones(1)
x = qld.quadprog2(H, f, None, None, A, b, zeros(npt), ones(npt))
center = dot(x,xy)
print "center: " , center
# find support vectors (find numpy way please)
sv = []
for i,v in enumerate(x):
if v > 0.001: sv.append(i)
sv0 = sv[0]
print sv
R = linalg.norm(xy[sv0,:]-center)
plot(xy, sv, x0, y0, R0, center, R)
# $Id$
#
| jcfr/mystic | examples_other/qld_circle_dual.py | Python | bsd-3-clause | 1,952 |
from bokeh.models import HoverTool
from bokeh.plotting import figure, output_file, show
from bokeh.sampledata.glucose import data
x = data.loc['2010-10-06'].index.to_series()
y = data.loc['2010-10-06']['glucose']
# Basic plot setup
p = figure(width=800, height=400, x_axis_type="datetime",
tools="", toolbar_location=None, title='Hover over points')
p.ygrid.grid_line_color = None
p.background_fill_color = "#fafafa"
p.line(x, y, line_dash="4 4", line_width=1, color='gray')
cr = p.circle(x, y, size=20,
fill_color="steelblue", alpha=0.1, line_color=None,
hover_fill_color="midnightblue", hover_alpha=0.5,
hover_line_color="white")
p.add_tools(HoverTool(tooltips=None, renderers=[cr], mode='hline'))
output_file("hover_glyph.html", title="hover_glyph.py example")
show(p)
| bokeh/bokeh | examples/plotting/file/hover_glyph.py | Python | bsd-3-clause | 832 |
def extractAntlerscoloradoCom(item):
'''
Parser for 'antlerscolorado.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
| fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractAntlerscoloradoCom.py | Python | bsd-3-clause | 551 |
from dateutil import parser
from flask import request
from werkzeug.exceptions import NotFound, BadRequest
from rdr_service import clock
from rdr_service.api.base_api import BaseApi, log_api_request
from rdr_service.api_util import GEM, RDR_AND_PTC, RDR
from rdr_service.app_util import auth_required, restrict_to_gae_project
from rdr_service.dao.genomics_dao import GenomicPiiDao, GenomicSetMemberDao, GenomicOutreachDao, GenomicOutreachDaoV2
PTC_ALLOWED_ENVIRONMENTS = [
'all-of-us-rdr-sandbox',
'all-of-us-rdr-stable',
'all-of-us-rdr-ptsc-1-test',
'localhost'
]
class GenomicPiiApi(BaseApi):
def __init__(self):
super(GenomicPiiApi, self).__init__(GenomicPiiDao())
@auth_required([GEM, RDR])
def get(self, mode=None, p_id=None):
if mode not in ('GEM', 'RHP'):
raise BadRequest("GenomicPII Mode required to be \"GEM\" or \"RHP\".")
if p_id is not None:
pii = self.dao.get_by_pid(p_id)
if not pii:
raise NotFound(f"Participant with ID {p_id} not found")
proto_payload = {
'mode': mode,
'data': pii
}
return self._make_response(proto_payload)
raise BadRequest
class GenomicOutreachApi(BaseApi):
def __init__(self):
super(GenomicOutreachApi, self).__init__(GenomicOutreachDao())
self.member_dao = GenomicSetMemberDao()
@auth_required(RDR_AND_PTC)
def get(self, mode=None):
self._check_mode(mode)
if mode.lower() == "gem":
return self.get_gem_outreach()
return BadRequest
@auth_required(RDR_AND_PTC)
@restrict_to_gae_project(PTC_ALLOWED_ENVIRONMENTS)
def post(self, p_id, mode=None):
"""
Generates a genomic test participant from payload
Overwrites BaseAPI.post()
:param p_id:
:param mode:
:return:
"""
self._check_mode(mode)
if mode.lower() == "gem":
return self.post_gem_outreach(p_id)
return BadRequest
def get_gem_outreach(self):
"""
Returns the GEM outreach resource based on the request parameters
:return:
"""
_start_date = request.args.get("start_date")
_end_date = request.args.get("end_date")
_pid = request.args.get("participant_id")
if _pid is not None and _start_date is not None:
raise BadRequest('Start date not supported with participant lookup.')
# Set the return timestamp
if _end_date is None:
_end_date = clock.CLOCK.now()
else:
_end_date = parser.parse(_end_date)
participant_report_states = None
# If this is a participant lookup
if _pid is not None:
if _pid.startswith("P"):
_pid = _pid[1:]
participant_report_states = self.dao.participant_state_lookup(_pid)
if len(participant_report_states) == 0:
raise NotFound(f'Participant P{_pid} does not exist in the Genomic system.')
# If this is a date lookup
if _start_date is not None:
_start_date = parser.parse(_start_date)
participant_report_states = self.dao.date_lookup(_start_date, end_date=_end_date)
if participant_report_states is not None:
proto_payload = {
'date': clock.CLOCK.now(),
'data': participant_report_states
}
return self._make_response(proto_payload)
return BadRequest
def post_gem_outreach(self, p_id):
"""
Creates the genomic participant
:return: response
"""
resource = request.get_json(force=True)
# Create GenomicSetMember with report state
model = self.dao.from_client_json(resource, participant_id=p_id, mode='gem')
m = self._do_insert(model)
self.member_dao.update_member_wf_states(m)
response_data = {
'date': m.genomicWorkflowStateModifiedTime,
'data': [
(m.participantId, m.genomicWorkflowState),
]
}
# Log to requests_log
log_api_request(log=request.log_record)
return self._make_response(response_data)
@staticmethod
def _check_mode(mode):
"""
Checks that the mode in the endpoint is valid
:param mode: "GEM" or "RHP"
"""
modes = ['gem', 'rhp']
if mode.lower() not in modes:
raise BadRequest(f"GenomicOutreach Mode required to be one of {modes}.")
class GenomicOutreachApiV2(BaseApi):
def __init__(self):
super(GenomicOutreachApiV2, self).__init__(GenomicOutreachDaoV2())
self.validate_params()
@auth_required(RDR_AND_PTC)
def get(self):
if not request.args.get('participant_id'):
self._check_global_args(
request.args.get('module'),
request.args.get('type')
)
return self.get_outreach()
def get_outreach(self):
"""
Returns the outreach resource based on the request parameters
:return:
"""
start_date = request.args.get("start_date", None)
pid = request.args.get("participant_id", None)
end_date = clock.CLOCK.now() \
if not request.args.get("end_date") \
else parser.parse(request.args.get("end_date"))
payload = {
'date': clock.CLOCK.now()
}
if not pid and not start_date:
raise BadRequest('Participant ID or Start Date is required for GenomicOutreach lookup.')
if pid:
if pid.startswith("P"):
pid = pid[1:]
participant_data = self.dao.outreach_lookup(pid=pid)
if participant_data:
payload['data'] = participant_data
return self._make_response(payload)
raise NotFound(f'Participant P{pid} does not exist in the Genomic system.')
if start_date:
start_date = parser.parse(start_date)
participant_data = self.dao.outreach_lookup(start_date=start_date, end_date=end_date)
payload['data'] = participant_data
return self._make_response(payload)
raise BadRequest
def _check_global_args(self, module, _type):
"""
Checks that the mode in the endpoint is valid
:param module: "GEM" / "PGX" / "HDR"
:param _type: "result" / "informingLoop" / "appointment"
"""
current_module = None
current_type = None
if module:
if module.lower() not in self.dao.allowed_modules:
raise BadRequest(
f"GenomicOutreach accepted modules: {' | '.join(self.dao.allowed_modules)}")
else:
current_module = module.lower()
if _type:
if _type not in self.dao.allowed_types:
raise BadRequest(f"GenomicOutreach accepted types: {' | '.join(self.dao.allowed_types)}")
else:
current_type = _type
self.dao.set_globals(
module=current_module,
_type=current_type
)
@staticmethod
def validate_params():
valid_params = ['start_date', 'end_date', 'participant_id', 'module', 'type']
request_keys = list(request.args.keys())
if any(arg for arg in request_keys if arg not in valid_params):
raise BadRequest(f"GenomicOutreach accepted params: {' | '.join(valid_params)}")
| all-of-us/raw-data-repository | rdr_service/api/genomic_api.py | Python | bsd-3-clause | 7,595 |
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
"""Wrapper for openal
Generated with:
../tools/wraptypes/wrap.py /usr/include/AL/al.h -lopenal -olib_openal.py
.. Hacked to remove non-existent library functions.
TODO add alGetError check.
.. alListener3i and alListeneriv are present in my OS X 10.4 but not another
10.4 user's installation. They've also been removed for compatibility.
"""
import ctypes
from ctypes import *
import sys
import pyglet.lib
_lib = pyglet.lib.load_library('openal', win32='openal32',
framework='/System/Library/Frameworks/OpenAL.framework')
_int_types = (c_int16, c_int32)
if hasattr(ctypes, 'c_int64'):
# Some builds of ctypes apparently do not have c_int64
# defined; it's a pretty good bet that these builds do not
# have 64-bit pointers.
_int_types += (ctypes.c_int64,)
for t in _int_types:
if sizeof(t) == sizeof(c_size_t):
c_ptrdiff_t = t
class c_void(Structure):
# c_void_p is a buggy return type, converting to int, so
# POINTER(None) == c_void_p is actually written as
# POINTER(c_void), so it can be treated as a real pointer.
_fields_ = [('dummy', c_int)]
AL_API = 0 # /usr/include/AL/al.h:39
ALAPI = 0 # /usr/include/AL/al.h:59
AL_INVALID = -1 # /usr/include/AL/al.h:61
AL_ILLEGAL_ENUM = 0 # /usr/include/AL/al.h:62
AL_ILLEGAL_COMMAND = 0 # /usr/include/AL/al.h:63
ALboolean = c_int # Better return type than c_char, as generated
ALchar = c_char # /usr/include/AL/al.h:73
ALbyte = c_char # /usr/include/AL/al.h:76
ALubyte = c_ubyte # /usr/include/AL/al.h:79
ALshort = c_short # /usr/include/AL/al.h:82
ALushort = c_ushort # /usr/include/AL/al.h:85
ALint = c_int # /usr/include/AL/al.h:88
ALuint = c_uint # /usr/include/AL/al.h:91
ALsizei = c_int # /usr/include/AL/al.h:94
ALenum = c_int # /usr/include/AL/al.h:97
ALfloat = c_float # /usr/include/AL/al.h:100
ALdouble = c_double # /usr/include/AL/al.h:103
ALvoid = None # /usr/include/AL/al.h:106
AL_NONE = 0 # /usr/include/AL/al.h:112
AL_FALSE = 0 # /usr/include/AL/al.h:115
AL_TRUE = 1 # /usr/include/AL/al.h:118
AL_SOURCE_RELATIVE = 514 # /usr/include/AL/al.h:121
AL_CONE_INNER_ANGLE = 4097 # /usr/include/AL/al.h:130
AL_CONE_OUTER_ANGLE = 4098 # /usr/include/AL/al.h:137
AL_PITCH = 4099 # /usr/include/AL/al.h:145
AL_POSITION = 4100 # /usr/include/AL/al.h:157
AL_DIRECTION = 4101 # /usr/include/AL/al.h:160
AL_VELOCITY = 4102 # /usr/include/AL/al.h:163
AL_LOOPING = 4103 # /usr/include/AL/al.h:171
AL_BUFFER = 4105 # /usr/include/AL/al.h:178
AL_GAIN = 4106 # /usr/include/AL/al.h:191
AL_MIN_GAIN = 4109 # /usr/include/AL/al.h:200
AL_MAX_GAIN = 4110 # /usr/include/AL/al.h:209
AL_ORIENTATION = 4111 # /usr/include/AL/al.h:216
AL_SOURCE_STATE = 4112 # /usr/include/AL/al.h:221
AL_INITIAL = 4113 # /usr/include/AL/al.h:222
AL_PLAYING = 4114 # /usr/include/AL/al.h:223
AL_PAUSED = 4115 # /usr/include/AL/al.h:224
AL_STOPPED = 4116 # /usr/include/AL/al.h:225
AL_BUFFERS_QUEUED = 4117 # /usr/include/AL/al.h:230
AL_BUFFERS_PROCESSED = 4118 # /usr/include/AL/al.h:231
AL_SEC_OFFSET = 4132 # /usr/include/AL/al.h:236
AL_SAMPLE_OFFSET = 4133 # /usr/include/AL/al.h:237
AL_BYTE_OFFSET = 4134 # /usr/include/AL/al.h:238
AL_SOURCE_TYPE = 4135 # /usr/include/AL/al.h:246
AL_STATIC = 4136 # /usr/include/AL/al.h:247
AL_STREAMING = 4137 # /usr/include/AL/al.h:248
AL_UNDETERMINED = 4144 # /usr/include/AL/al.h:249
AL_FORMAT_MONO8 = 4352 # /usr/include/AL/al.h:252
AL_FORMAT_MONO16 = 4353 # /usr/include/AL/al.h:253
AL_FORMAT_STEREO8 = 4354 # /usr/include/AL/al.h:254
AL_FORMAT_STEREO16 = 4355 # /usr/include/AL/al.h:255
AL_REFERENCE_DISTANCE = 4128 # /usr/include/AL/al.h:265
AL_ROLLOFF_FACTOR = 4129 # /usr/include/AL/al.h:273
AL_CONE_OUTER_GAIN = 4130 # /usr/include/AL/al.h:282
AL_MAX_DISTANCE = 4131 # /usr/include/AL/al.h:292
AL_FREQUENCY = 8193 # /usr/include/AL/al.h:300
AL_BITS = 8194 # /usr/include/AL/al.h:301
AL_CHANNELS = 8195 # /usr/include/AL/al.h:302
AL_SIZE = 8196 # /usr/include/AL/al.h:303
AL_UNUSED = 8208 # /usr/include/AL/al.h:310
AL_PENDING = 8209 # /usr/include/AL/al.h:311
AL_PROCESSED = 8210 # /usr/include/AL/al.h:312
AL_NO_ERROR = 0 # /usr/include/AL/al.h:316
AL_INVALID_NAME = 40961 # /usr/include/AL/al.h:321
AL_INVALID_ENUM = 40962 # /usr/include/AL/al.h:326
AL_INVALID_VALUE = 40963 # /usr/include/AL/al.h:331
AL_INVALID_OPERATION = 40964 # /usr/include/AL/al.h:336
AL_OUT_OF_MEMORY = 40965 # /usr/include/AL/al.h:342
AL_VENDOR = 45057 # /usr/include/AL/al.h:346
AL_VERSION = 45058 # /usr/include/AL/al.h:347
AL_RENDERER = 45059 # /usr/include/AL/al.h:348
AL_EXTENSIONS = 45060 # /usr/include/AL/al.h:349
AL_DOPPLER_FACTOR = 49152 # /usr/include/AL/al.h:356
AL_DOPPLER_VELOCITY = 49153 # /usr/include/AL/al.h:361
AL_SPEED_OF_SOUND = 49155 # /usr/include/AL/al.h:366
AL_DISTANCE_MODEL = 53248 # /usr/include/AL/al.h:375
AL_INVERSE_DISTANCE = 53249 # /usr/include/AL/al.h:376
AL_INVERSE_DISTANCE_CLAMPED = 53250 # /usr/include/AL/al.h:377
AL_LINEAR_DISTANCE = 53251 # /usr/include/AL/al.h:378
AL_LINEAR_DISTANCE_CLAMPED = 53252 # /usr/include/AL/al.h:379
AL_EXPONENT_DISTANCE = 53253 # /usr/include/AL/al.h:380
AL_EXPONENT_DISTANCE_CLAMPED = 53254 # /usr/include/AL/al.h:381
# /usr/include/AL/al.h:386
alEnable = _lib.alEnable
alEnable.restype = None
alEnable.argtypes = [ALenum]
# /usr/include/AL/al.h:388
alDisable = _lib.alDisable
alDisable.restype = None
alDisable.argtypes = [ALenum]
# /usr/include/AL/al.h:390
alIsEnabled = _lib.alIsEnabled
alIsEnabled.restype = ALboolean
alIsEnabled.argtypes = [ALenum]
# /usr/include/AL/al.h:396
alGetString = _lib.alGetString
alGetString.restype = POINTER(ALchar)
alGetString.argtypes = [ALenum]
# /usr/include/AL/al.h:398
alGetBooleanv = _lib.alGetBooleanv
alGetBooleanv.restype = None
alGetBooleanv.argtypes = [ALenum, POINTER(ALboolean)]
# /usr/include/AL/al.h:400
alGetIntegerv = _lib.alGetIntegerv
alGetIntegerv.restype = None
alGetIntegerv.argtypes = [ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:402
alGetFloatv = _lib.alGetFloatv
alGetFloatv.restype = None
alGetFloatv.argtypes = [ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:404
alGetDoublev = _lib.alGetDoublev
alGetDoublev.restype = None
alGetDoublev.argtypes = [ALenum, POINTER(ALdouble)]
# /usr/include/AL/al.h:406
alGetBoolean = _lib.alGetBoolean
alGetBoolean.restype = ALboolean
alGetBoolean.argtypes = [ALenum]
# /usr/include/AL/al.h:408
alGetInteger = _lib.alGetInteger
alGetInteger.restype = ALint
alGetInteger.argtypes = [ALenum]
# /usr/include/AL/al.h:410
alGetFloat = _lib.alGetFloat
alGetFloat.restype = ALfloat
alGetFloat.argtypes = [ALenum]
# /usr/include/AL/al.h:412
alGetDouble = _lib.alGetDouble
alGetDouble.restype = ALdouble
alGetDouble.argtypes = [ALenum]
# /usr/include/AL/al.h:419
alGetError = _lib.alGetError
alGetError.restype = ALenum
alGetError.argtypes = list()
# /usr/include/AL/al.h:427
alIsExtensionPresent = _lib.alIsExtensionPresent
alIsExtensionPresent.restype = ALboolean
alIsExtensionPresent.argtypes = [POINTER(ALchar)]
# /usr/include/AL/al.h:429
alGetProcAddress = _lib.alGetProcAddress
alGetProcAddress.restype = POINTER(c_void)
alGetProcAddress.argtypes = [POINTER(ALchar)]
# /usr/include/AL/al.h:431
alGetEnumValue = _lib.alGetEnumValue
alGetEnumValue.restype = ALenum
alGetEnumValue.argtypes = [POINTER(ALchar)]
# /usr/include/AL/al.h:450
alListenerf = _lib.alListenerf
alListenerf.restype = None
alListenerf.argtypes = [ALenum, ALfloat]
# /usr/include/AL/al.h:452
alListener3f = _lib.alListener3f
alListener3f.restype = None
alListener3f.argtypes = [ALenum, ALfloat, ALfloat, ALfloat]
# /usr/include/AL/al.h:454
alListenerfv = _lib.alListenerfv
alListenerfv.restype = None
alListenerfv.argtypes = [ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:456
alListeneri = _lib.alListeneri
alListeneri.restype = None
alListeneri.argtypes = [ALenum, ALint]
# /usr/include/AL/al.h:458
#alListener3i = _lib.alListener3i
#alListener3i.restype = None
#alListener3i.argtypes = [ALenum, ALint, ALint, ALint]
# /usr/include/AL/al.h:460
#alListeneriv = _lib.alListeneriv
#alListeneriv.restype = None
#alListeneriv.argtypes = [ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:465
alGetListenerf = _lib.alGetListenerf
alGetListenerf.restype = None
alGetListenerf.argtypes = [ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:467
alGetListener3f = _lib.alGetListener3f
alGetListener3f.restype = None
alGetListener3f.argtypes = [
ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)]
# /usr/include/AL/al.h:469
alGetListenerfv = _lib.alGetListenerfv
alGetListenerfv.restype = None
alGetListenerfv.argtypes = [ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:471
alGetListeneri = _lib.alGetListeneri
alGetListeneri.restype = None
alGetListeneri.argtypes = [ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:473
alGetListener3i = _lib.alGetListener3i
alGetListener3i.restype = None
alGetListener3i.argtypes = [
ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)]
# /usr/include/AL/al.h:475
alGetListeneriv = _lib.alGetListeneriv
alGetListeneriv.restype = None
alGetListeneriv.argtypes = [ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:512
alGenSources = _lib.alGenSources
alGenSources.restype = None
alGenSources.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:515
alDeleteSources = _lib.alDeleteSources
alDeleteSources.restype = None
alDeleteSources.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:518
alIsSource = _lib.alIsSource
alIsSource.restype = ALboolean
alIsSource.argtypes = [ALuint]
# /usr/include/AL/al.h:523
alSourcef = _lib.alSourcef
alSourcef.restype = None
alSourcef.argtypes = [ALuint, ALenum, ALfloat]
# /usr/include/AL/al.h:525
alSource3f = _lib.alSource3f
alSource3f.restype = None
alSource3f.argtypes = [ALuint, ALenum, ALfloat, ALfloat, ALfloat]
# /usr/include/AL/al.h:527
alSourcefv = _lib.alSourcefv
alSourcefv.restype = None
alSourcefv.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:529
alSourcei = _lib.alSourcei
alSourcei.restype = None
alSourcei.argtypes = [ALuint, ALenum, ALint]
# /usr/include/AL/al.h:531
#alSource3i = _lib.alSource3i
#alSource3i.restype = None
#alSource3i.argtypes = [ALuint, ALenum, ALint, ALint, ALint]
# /usr/include/AL/al.h:533
#alSourceiv = _lib.alSourceiv
#alSourceiv.restype = None
#alSourceiv.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:538
alGetSourcef = _lib.alGetSourcef
alGetSourcef.restype = None
alGetSourcef.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:540
alGetSource3f = _lib.alGetSource3f
alGetSource3f.restype = None
alGetSource3f.argtypes = [
ALuint, ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)]
# /usr/include/AL/al.h:542
alGetSourcefv = _lib.alGetSourcefv
alGetSourcefv.restype = None
alGetSourcefv.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:544
alGetSourcei = _lib.alGetSourcei
alGetSourcei.restype = None
alGetSourcei.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:546
#alGetSource3i = _lib.alGetSource3i
#alGetSource3i.restype = None
#alGetSource3i.argtypes = [ALuint, ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)]
# /usr/include/AL/al.h:548
alGetSourceiv = _lib.alGetSourceiv
alGetSourceiv.restype = None
alGetSourceiv.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:556
alSourcePlayv = _lib.alSourcePlayv
alSourcePlayv.restype = None
alSourcePlayv.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:559
alSourceStopv = _lib.alSourceStopv
alSourceStopv.restype = None
alSourceStopv.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:562
alSourceRewindv = _lib.alSourceRewindv
alSourceRewindv.restype = None
alSourceRewindv.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:565
alSourcePausev = _lib.alSourcePausev
alSourcePausev.restype = None
alSourcePausev.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:572
alSourcePlay = _lib.alSourcePlay
alSourcePlay.restype = None
alSourcePlay.argtypes = [ALuint]
# /usr/include/AL/al.h:575
alSourceStop = _lib.alSourceStop
alSourceStop.restype = None
alSourceStop.argtypes = [ALuint]
# /usr/include/AL/al.h:578
alSourceRewind = _lib.alSourceRewind
alSourceRewind.restype = None
alSourceRewind.argtypes = [ALuint]
# /usr/include/AL/al.h:581
alSourcePause = _lib.alSourcePause
alSourcePause.restype = None
alSourcePause.argtypes = [ALuint]
# /usr/include/AL/al.h:586
alSourceQueueBuffers = _lib.alSourceQueueBuffers
alSourceQueueBuffers.restype = None
alSourceQueueBuffers.argtypes = [ALuint, ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:588
alSourceUnqueueBuffers = _lib.alSourceUnqueueBuffers
alSourceUnqueueBuffers.restype = None
alSourceUnqueueBuffers.argtypes = [ALuint, ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:606
alGenBuffers = _lib.alGenBuffers
alGenBuffers.restype = None
alGenBuffers.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:609
alDeleteBuffers = _lib.alDeleteBuffers
alDeleteBuffers.restype = None
alDeleteBuffers.argtypes = [ALsizei, POINTER(ALuint)]
# /usr/include/AL/al.h:612
alIsBuffer = _lib.alIsBuffer
alIsBuffer.restype = ALboolean
alIsBuffer.argtypes = [ALuint]
# /usr/include/AL/al.h:615
alBufferData = _lib.alBufferData
alBufferData.restype = None
alBufferData.argtypes = [ALuint, ALenum, POINTER(ALvoid), ALsizei, ALsizei]
# /usr/include/AL/al.h:620
alBufferf = _lib.alBufferf
alBufferf.restype = None
alBufferf.argtypes = [ALuint, ALenum, ALfloat]
# /usr/include/AL/al.h:622
alBuffer3f = _lib.alBuffer3f
alBuffer3f.restype = None
alBuffer3f.argtypes = [ALuint, ALenum, ALfloat, ALfloat, ALfloat]
# /usr/include/AL/al.h:624
alBufferfv = _lib.alBufferfv
alBufferfv.restype = None
alBufferfv.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:626
alBufferi = _lib.alBufferi
alBufferi.restype = None
alBufferi.argtypes = [ALuint, ALenum, ALint]
# /usr/include/AL/al.h:628
alBuffer3i = _lib.alBuffer3i
alBuffer3i.restype = None
alBuffer3i.argtypes = [ALuint, ALenum, ALint, ALint, ALint]
# /usr/include/AL/al.h:630
alBufferiv = _lib.alBufferiv
alBufferiv.restype = None
alBufferiv.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:635
alGetBufferf = _lib.alGetBufferf
alGetBufferf.restype = None
alGetBufferf.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:637
alGetBuffer3f = _lib.alGetBuffer3f
alGetBuffer3f.restype = None
alGetBuffer3f.argtypes = [
ALuint, ALenum, POINTER(ALfloat), POINTER(ALfloat), POINTER(ALfloat)]
# /usr/include/AL/al.h:639
alGetBufferfv = _lib.alGetBufferfv
alGetBufferfv.restype = None
alGetBufferfv.argtypes = [ALuint, ALenum, POINTER(ALfloat)]
# /usr/include/AL/al.h:641
alGetBufferi = _lib.alGetBufferi
alGetBufferi.restype = None
alGetBufferi.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:643
alGetBuffer3i = _lib.alGetBuffer3i
alGetBuffer3i.restype = None
alGetBuffer3i.argtypes = [
ALuint, ALenum, POINTER(ALint), POINTER(ALint), POINTER(ALint)]
# /usr/include/AL/al.h:645
alGetBufferiv = _lib.alGetBufferiv
alGetBufferiv.restype = None
alGetBufferiv.argtypes = [ALuint, ALenum, POINTER(ALint)]
# /usr/include/AL/al.h:651
alDopplerFactor = _lib.alDopplerFactor
alDopplerFactor.restype = None
alDopplerFactor.argtypes = [ALfloat]
# /usr/include/AL/al.h:653
alDopplerVelocity = _lib.alDopplerVelocity
alDopplerVelocity.restype = None
alDopplerVelocity.argtypes = [ALfloat]
# /usr/include/AL/al.h:655
alSpeedOfSound = _lib.alSpeedOfSound
alSpeedOfSound.restype = None
alSpeedOfSound.argtypes = [ALfloat]
# /usr/include/AL/al.h:657
alDistanceModel = _lib.alDistanceModel
alDistanceModel.restype = None
alDistanceModel.argtypes = [ALenum]
LPALENABLE = CFUNCTYPE(None, ALenum) # /usr/include/AL/al.h:662
LPALDISABLE = CFUNCTYPE(None, ALenum) # /usr/include/AL/al.h:663
LPALISENABLED = CFUNCTYPE(ALboolean, ALenum) # /usr/include/AL/al.h:664
LPALGETSTRING = CFUNCTYPE(POINTER(ALchar), ALenum) # /usr/include/AL/al.h:665
# /usr/include/AL/al.h:666
LPALGETBOOLEANV = CFUNCTYPE(None, ALenum, POINTER(ALboolean))
# /usr/include/AL/al.h:667
LPALGETINTEGERV = CFUNCTYPE(None, ALenum, POINTER(ALint))
# /usr/include/AL/al.h:668
LPALGETFLOATV = CFUNCTYPE(None, ALenum, POINTER(ALfloat))
# /usr/include/AL/al.h:669
LPALGETDOUBLEV = CFUNCTYPE(None, ALenum, POINTER(ALdouble))
LPALGETBOOLEAN = CFUNCTYPE(ALboolean, ALenum) # /usr/include/AL/al.h:670
LPALGETINTEGER = CFUNCTYPE(ALint, ALenum) # /usr/include/AL/al.h:671
LPALGETFLOAT = CFUNCTYPE(ALfloat, ALenum) # /usr/include/AL/al.h:672
LPALGETDOUBLE = CFUNCTYPE(ALdouble, ALenum) # /usr/include/AL/al.h:673
LPALGETERROR = CFUNCTYPE(ALenum) # /usr/include/AL/al.h:674
LPALISEXTENSIONPRESENT = CFUNCTYPE(
ALboolean, POINTER(ALchar)) # /usr/include/AL/al.h:675
# /usr/include/AL/al.h:676
LPALGETPROCADDRESS = CFUNCTYPE(POINTER(c_void), POINTER(ALchar))
# /usr/include/AL/al.h:677
LPALGETENUMVALUE = CFUNCTYPE(ALenum, POINTER(ALchar))
LPALLISTENERF = CFUNCTYPE(None, ALenum, ALfloat) # /usr/include/AL/al.h:678
# /usr/include/AL/al.h:679
LPALLISTENER3F = CFUNCTYPE(None, ALenum, ALfloat, ALfloat, ALfloat)
# /usr/include/AL/al.h:680
LPALLISTENERFV = CFUNCTYPE(None, ALenum, POINTER(ALfloat))
LPALLISTENERI = CFUNCTYPE(None, ALenum, ALint) # /usr/include/AL/al.h:681
# /usr/include/AL/al.h:682
LPALLISTENER3I = CFUNCTYPE(None, ALenum, ALint, ALint, ALint)
# /usr/include/AL/al.h:683
LPALLISTENERIV = CFUNCTYPE(None, ALenum, POINTER(ALint))
# /usr/include/AL/al.h:684
LPALGETLISTENERF = CFUNCTYPE(None, ALenum, POINTER(ALfloat))
LPALGETLISTENER3F = CFUNCTYPE(None, ALenum, POINTER(ALfloat), POINTER(
ALfloat), POINTER(ALfloat)) # /usr/include/AL/al.h:685
# /usr/include/AL/al.h:686
LPALGETLISTENERFV = CFUNCTYPE(None, ALenum, POINTER(ALfloat))
# /usr/include/AL/al.h:687
LPALGETLISTENERI = CFUNCTYPE(None, ALenum, POINTER(ALint))
LPALGETLISTENER3I = CFUNCTYPE(None, ALenum, POINTER(ALint), POINTER(
ALint), POINTER(ALint)) # /usr/include/AL/al.h:688
# /usr/include/AL/al.h:689
LPALGETLISTENERIV = CFUNCTYPE(None, ALenum, POINTER(ALint))
# /usr/include/AL/al.h:690
LPALGENSOURCES = CFUNCTYPE(None, ALsizei, POINTER(ALuint))
# /usr/include/AL/al.h:691
LPALDELETESOURCES = CFUNCTYPE(None, ALsizei, POINTER(ALuint))
LPALISSOURCE = CFUNCTYPE(ALboolean, ALuint) # /usr/include/AL/al.h:692
# /usr/include/AL/al.h:693
LPALSOURCEF = CFUNCTYPE(None, ALuint, ALenum, ALfloat)
# /usr/include/AL/al.h:694
LPALSOURCE3F = CFUNCTYPE(None, ALuint, ALenum, ALfloat, ALfloat, ALfloat)
# /usr/include/AL/al.h:695
LPALSOURCEFV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat))
# /usr/include/AL/al.h:696
LPALSOURCEI = CFUNCTYPE(None, ALuint, ALenum, ALint)
# /usr/include/AL/al.h:697
LPALSOURCE3I = CFUNCTYPE(None, ALuint, ALenum, ALint, ALint, ALint)
# /usr/include/AL/al.h:698
LPALSOURCEIV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint))
# /usr/include/AL/al.h:699
LPALGETSOURCEF = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat))
LPALGETSOURCE3F = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat), POINTER(
ALfloat), POINTER(ALfloat)) # /usr/include/AL/al.h:700
# /usr/include/AL/al.h:701
LPALGETSOURCEFV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat))
# /usr/include/AL/al.h:702
LPALGETSOURCEI = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint))
LPALGETSOURCE3I = CFUNCTYPE(None, ALuint, ALenum, POINTER(
ALint), POINTER(ALint), POINTER(ALint)) # /usr/include/AL/al.h:703
# /usr/include/AL/al.h:704
LPALGETSOURCEIV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint))
# /usr/include/AL/al.h:705
LPALSOURCEPLAYV = CFUNCTYPE(None, ALsizei, POINTER(ALuint))
# /usr/include/AL/al.h:706
LPALSOURCESTOPV = CFUNCTYPE(None, ALsizei, POINTER(ALuint))
# /usr/include/AL/al.h:707
LPALSOURCEREWINDV = CFUNCTYPE(None, ALsizei, POINTER(ALuint))
# /usr/include/AL/al.h:708
LPALSOURCEPAUSEV = CFUNCTYPE(None, ALsizei, POINTER(ALuint))
LPALSOURCEPLAY = CFUNCTYPE(None, ALuint) # /usr/include/AL/al.h:709
LPALSOURCESTOP = CFUNCTYPE(None, ALuint) # /usr/include/AL/al.h:710
LPALSOURCEREWIND = CFUNCTYPE(None, ALuint) # /usr/include/AL/al.h:711
LPALSOURCEPAUSE = CFUNCTYPE(None, ALuint) # /usr/include/AL/al.h:712
LPALSOURCEQUEUEBUFFERS = CFUNCTYPE(
None, ALuint, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:713
LPALSOURCEUNQUEUEBUFFERS = CFUNCTYPE(
None, ALuint, ALsizei, POINTER(ALuint)) # /usr/include/AL/al.h:714
# /usr/include/AL/al.h:715
LPALGENBUFFERS = CFUNCTYPE(None, ALsizei, POINTER(ALuint))
# /usr/include/AL/al.h:716
LPALDELETEBUFFERS = CFUNCTYPE(None, ALsizei, POINTER(ALuint))
LPALISBUFFER = CFUNCTYPE(ALboolean, ALuint) # /usr/include/AL/al.h:717
LPALBUFFERDATA = CFUNCTYPE(None, ALuint, ALenum, POINTER(
ALvoid), ALsizei, ALsizei) # /usr/include/AL/al.h:718
# /usr/include/AL/al.h:719
LPALBUFFERF = CFUNCTYPE(None, ALuint, ALenum, ALfloat)
# /usr/include/AL/al.h:720
LPALBUFFER3F = CFUNCTYPE(None, ALuint, ALenum, ALfloat, ALfloat, ALfloat)
# /usr/include/AL/al.h:721
LPALBUFFERFV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat))
# /usr/include/AL/al.h:722
LPALBUFFERI = CFUNCTYPE(None, ALuint, ALenum, ALint)
# /usr/include/AL/al.h:723
LPALBUFFER3I = CFUNCTYPE(None, ALuint, ALenum, ALint, ALint, ALint)
# /usr/include/AL/al.h:724
LPALBUFFERIV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint))
# /usr/include/AL/al.h:725
LPALGETBUFFERF = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat))
LPALGETBUFFER3F = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat), POINTER(
ALfloat), POINTER(ALfloat)) # /usr/include/AL/al.h:726
# /usr/include/AL/al.h:727
LPALGETBUFFERFV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALfloat))
# /usr/include/AL/al.h:728
LPALGETBUFFERI = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint))
LPALGETBUFFER3I = CFUNCTYPE(None, ALuint, ALenum, POINTER(
ALint), POINTER(ALint), POINTER(ALint)) # /usr/include/AL/al.h:729
# /usr/include/AL/al.h:730
LPALGETBUFFERIV = CFUNCTYPE(None, ALuint, ALenum, POINTER(ALint))
LPALDOPPLERFACTOR = CFUNCTYPE(None, ALfloat) # /usr/include/AL/al.h:731
LPALDOPPLERVELOCITY = CFUNCTYPE(None, ALfloat) # /usr/include/AL/al.h:732
LPALSPEEDOFSOUND = CFUNCTYPE(None, ALfloat) # /usr/include/AL/al.h:733
LPALDISTANCEMODEL = CFUNCTYPE(None, ALenum) # /usr/include/AL/al.h:734
__all__ = ['AL_API', 'ALAPI', 'AL_INVALID', 'AL_ILLEGAL_ENUM',
'AL_ILLEGAL_COMMAND', 'ALboolean', 'ALchar', 'ALbyte', 'ALubyte', 'ALshort',
'ALushort', 'ALint', 'ALuint', 'ALsizei', 'ALenum', 'ALfloat', 'ALdouble',
'ALvoid', 'AL_NONE', 'AL_FALSE', 'AL_TRUE', 'AL_SOURCE_RELATIVE',
'AL_CONE_INNER_ANGLE', 'AL_CONE_OUTER_ANGLE', 'AL_PITCH', 'AL_POSITION',
'AL_DIRECTION', 'AL_VELOCITY', 'AL_LOOPING', 'AL_BUFFER', 'AL_GAIN',
'AL_MIN_GAIN', 'AL_MAX_GAIN', 'AL_ORIENTATION', 'AL_SOURCE_STATE',
'AL_INITIAL', 'AL_PLAYING', 'AL_PAUSED', 'AL_STOPPED', 'AL_BUFFERS_QUEUED',
'AL_BUFFERS_PROCESSED', 'AL_SEC_OFFSET', 'AL_SAMPLE_OFFSET', 'AL_BYTE_OFFSET',
'AL_SOURCE_TYPE', 'AL_STATIC', 'AL_STREAMING', 'AL_UNDETERMINED',
'AL_FORMAT_MONO8', 'AL_FORMAT_MONO16', 'AL_FORMAT_STEREO8',
'AL_FORMAT_STEREO16', 'AL_REFERENCE_DISTANCE', 'AL_ROLLOFF_FACTOR',
'AL_CONE_OUTER_GAIN', 'AL_MAX_DISTANCE', 'AL_FREQUENCY', 'AL_BITS',
'AL_CHANNELS', 'AL_SIZE', 'AL_UNUSED', 'AL_PENDING', 'AL_PROCESSED',
'AL_NO_ERROR', 'AL_INVALID_NAME', 'AL_INVALID_ENUM', 'AL_INVALID_VALUE',
'AL_INVALID_OPERATION', 'AL_OUT_OF_MEMORY', 'AL_VENDOR', 'AL_VERSION',
'AL_RENDERER', 'AL_EXTENSIONS', 'AL_DOPPLER_FACTOR', 'AL_DOPPLER_VELOCITY',
'AL_SPEED_OF_SOUND', 'AL_DISTANCE_MODEL', 'AL_INVERSE_DISTANCE',
'AL_INVERSE_DISTANCE_CLAMPED', 'AL_LINEAR_DISTANCE',
'AL_LINEAR_DISTANCE_CLAMPED', 'AL_EXPONENT_DISTANCE',
'AL_EXPONENT_DISTANCE_CLAMPED', 'alEnable', 'alDisable', 'alIsEnabled',
'alGetString', 'alGetBooleanv', 'alGetIntegerv', 'alGetFloatv',
'alGetDoublev', 'alGetBoolean', 'alGetInteger', 'alGetFloat', 'alGetDouble',
'alGetError', 'alIsExtensionPresent', 'alGetProcAddress', 'alGetEnumValue',
'alListenerf', 'alListener3f', 'alListenerfv', 'alListeneri', 'alListener3i',
'alListeneriv', 'alGetListenerf', 'alGetListener3f', 'alGetListenerfv',
'alGetListeneri', 'alGetListener3i', 'alGetListeneriv', 'alGenSources',
'alDeleteSources', 'alIsSource', 'alSourcef', 'alSource3f', 'alSourcefv',
'alSourcei', 'alSource3i', 'alSourceiv', 'alGetSourcef', 'alGetSource3f',
'alGetSourcefv', 'alGetSourcei', 'alGetSource3i', 'alGetSourceiv',
'alSourcePlayv', 'alSourceStopv', 'alSourceRewindv', 'alSourcePausev',
'alSourcePlay', 'alSourceStop', 'alSourceRewind', 'alSourcePause',
'alSourceQueueBuffers', 'alSourceUnqueueBuffers', 'alGenBuffers',
'alDeleteBuffers', 'alIsBuffer', 'alBufferData', 'alBufferf', 'alBuffer3f',
'alBufferfv', 'alBufferi', 'alBuffer3i', 'alBufferiv', 'alGetBufferf',
'alGetBuffer3f', 'alGetBufferfv', 'alGetBufferi', 'alGetBuffer3i',
'alGetBufferiv', 'alDopplerFactor', 'alDopplerVelocity', 'alSpeedOfSound',
'alDistanceModel', 'LPALENABLE', 'LPALDISABLE', 'LPALISENABLED',
'LPALGETSTRING', 'LPALGETBOOLEANV', 'LPALGETINTEGERV', 'LPALGETFLOATV',
'LPALGETDOUBLEV', 'LPALGETBOOLEAN', 'LPALGETINTEGER', 'LPALGETFLOAT',
'LPALGETDOUBLE', 'LPALGETERROR', 'LPALISEXTENSIONPRESENT',
'LPALGETPROCADDRESS', 'LPALGETENUMVALUE', 'LPALLISTENERF', 'LPALLISTENER3F',
'LPALLISTENERFV', 'LPALLISTENERI', 'LPALLISTENER3I', 'LPALLISTENERIV',
'LPALGETLISTENERF', 'LPALGETLISTENER3F', 'LPALGETLISTENERFV',
'LPALGETLISTENERI', 'LPALGETLISTENER3I', 'LPALGETLISTENERIV',
'LPALGENSOURCES', 'LPALDELETESOURCES', 'LPALISSOURCE', 'LPALSOURCEF',
'LPALSOURCE3F', 'LPALSOURCEFV', 'LPALSOURCEI', 'LPALSOURCE3I', 'LPALSOURCEIV',
'LPALGETSOURCEF', 'LPALGETSOURCE3F', 'LPALGETSOURCEFV', 'LPALGETSOURCEI',
'LPALGETSOURCE3I', 'LPALGETSOURCEIV', 'LPALSOURCEPLAYV', 'LPALSOURCESTOPV',
'LPALSOURCEREWINDV', 'LPALSOURCEPAUSEV', 'LPALSOURCEPLAY', 'LPALSOURCESTOP',
'LPALSOURCEREWIND', 'LPALSOURCEPAUSE', 'LPALSOURCEQUEUEBUFFERS',
'LPALSOURCEUNQUEUEBUFFERS', 'LPALGENBUFFERS', 'LPALDELETEBUFFERS',
'LPALISBUFFER', 'LPALBUFFERDATA', 'LPALBUFFERF', 'LPALBUFFER3F',
'LPALBUFFERFV', 'LPALBUFFERI', 'LPALBUFFER3I', 'LPALBUFFERIV',
'LPALGETBUFFERF', 'LPALGETBUFFER3F', 'LPALGETBUFFERFV', 'LPALGETBUFFERI',
'LPALGETBUFFER3I', 'LPALGETBUFFERIV', 'LPALDOPPLERFACTOR',
'LPALDOPPLERVELOCITY', 'LPALSPEEDOFSOUND', 'LPALDISTANCEMODEL']
| bitcraft/pyglet | pyglet/media/drivers/openal/lib_openal.py | Python | bsd-3-clause | 28,461 |
def extractWwwAfterhourssolaceCom(item):
'''
Parser for 'www.afterhourssolace.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
| fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractWwwAfterhourssolaceCom.py | Python | bsd-3-clause | 560 |
from functools import partial
from django.test import TestCase
from django.utils.safestring import SafeText
from wagtail.admin import compare
from wagtail.core.blocks import StreamValue
from wagtail.images import get_image_model
from wagtail.images.tests.utils import get_test_image_file
from wagtail.tests.testapp.models import (
EventCategory, EventPage, EventPageSpeaker, HeadCountRelatedModelUsingPK, SimplePage,
StreamPage, TaggedPage)
class TestFieldComparison(TestCase):
comparison_class = compare.FieldComparison
def test_hasnt_changed(self):
comparison = self.comparison_class(
SimplePage._meta.get_field('content'),
SimplePage(content="Content"),
SimplePage(content="Content"),
)
self.assertTrue(comparison.is_field)
self.assertFalse(comparison.is_child_relation)
self.assertEqual(comparison.field_label(), "Content")
self.assertEqual(comparison.htmldiff(), 'Content')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertFalse(comparison.has_changed())
def test_has_changed(self):
comparison = self.comparison_class(
SimplePage._meta.get_field('content'),
SimplePage(content="Original content"),
SimplePage(content="Modified content"),
)
self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original content</span><span class="addition">Modified content</span>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertTrue(comparison.has_changed())
def test_htmldiff_escapes_value(self):
comparison = self.comparison_class(
SimplePage._meta.get_field('content'),
SimplePage(content='Original content'),
SimplePage(content='<script type="text/javascript">doSomethingBad();</script>'),
)
self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original content</span><span class="addition"><script type="text/javascript">doSomethingBad();</script></span>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
class TestTextFieldComparison(TestFieldComparison):
comparison_class = compare.TextFieldComparison
# Only change from FieldComparison is the HTML diff is performed on words
# instead of the whole field value.
def test_has_changed(self):
comparison = self.comparison_class(
SimplePage._meta.get_field('content'),
SimplePage(content="Original content"),
SimplePage(content="Modified content"),
)
self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original</span><span class="addition">Modified</span> content')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertTrue(comparison.has_changed())
class TestRichTextFieldComparison(TestTextFieldComparison):
comparison_class = compare.RichTextFieldComparison
# Only change from FieldComparison is that this comparison disregards HTML tags
def test_has_changed_html(self):
comparison = self.comparison_class(
SimplePage._meta.get_field('content'),
SimplePage(content="<b>Original</b> content"),
SimplePage(content="Modified <i>content</i>"),
)
self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original</span><span class="addition">Modified</span> content')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertTrue(comparison.has_changed())
def test_htmldiff_escapes_value(self):
# Need to override this one as the HTML tags are stripped by RichTextFieldComparison
comparison = self.comparison_class(
SimplePage._meta.get_field('content'),
SimplePage(content='Original content'),
SimplePage(content='<script type="text/javascript">doSomethingBad();</script>'),
)
self.assertEqual(comparison.htmldiff(), '<span class="deletion">Original content</span><span class="addition">doSomethingBad();</span>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
class TestStreamFieldComparison(TestCase):
comparison_class = compare.StreamFieldComparison
def test_hasnt_changed(self):
field = StreamPage._meta.get_field('body')
comparison = self.comparison_class(
field,
StreamPage(body=StreamValue(field.stream_block, [
('text', "Content", '1'),
])),
StreamPage(body=StreamValue(field.stream_block, [
('text', "Content", '1'),
])),
)
self.assertTrue(comparison.is_field)
self.assertFalse(comparison.is_child_relation)
self.assertEqual(comparison.field_label(), "Body")
self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object">Content</div>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertFalse(comparison.has_changed())
def test_has_changed(self):
field = StreamPage._meta.get_field('body')
comparison = self.comparison_class(
field,
StreamPage(body=StreamValue(field.stream_block, [
('text', "Original content", '1'),
])),
StreamPage(body=StreamValue(field.stream_block, [
('text', "Modified content", '1'),
])),
)
self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object"><span class="deletion">Original</span><span class="addition">Modified</span> content</div>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertTrue(comparison.has_changed())
def test_add_block(self):
field = StreamPage._meta.get_field('body')
comparison = self.comparison_class(
field,
StreamPage(body=StreamValue(field.stream_block, [
('text', "Content", '1'),
])),
StreamPage(body=StreamValue(field.stream_block, [
('text', "Content", '1'),
('text', "New Content", '2'),
])),
)
self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object">Content</div>\n<div class="comparison__child-object addition">New Content</div>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertTrue(comparison.has_changed())
def test_delete_block(self):
field = StreamPage._meta.get_field('body')
comparison = self.comparison_class(
field,
StreamPage(body=StreamValue(field.stream_block, [
('text', "Content", '1'),
('text', "Content Foo", '2'),
('text', "Content Bar", '3'),
])),
StreamPage(body=StreamValue(field.stream_block, [
('text', "Content", '1'),
('text', "Content Bar", '3'),
])),
)
self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object">Content</div>\n<div class="comparison__child-object deletion">Content Foo</div>\n<div class="comparison__child-object">Content Bar</div>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertTrue(comparison.has_changed())
def test_edit_block(self):
field = StreamPage._meta.get_field('body')
comparison = self.comparison_class(
field,
StreamPage(body=StreamValue(field.stream_block, [
('text', "Content", '1'),
('text', "Content Foo", '2'),
('text', "Content Bar", '3'),
])),
StreamPage(body=StreamValue(field.stream_block, [
('text', "Content", '1'),
('text', "Content Baz", '2'),
('text', "Content Bar", '3'),
])),
)
self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object">Content</div>\n<div class="comparison__child-object">Content <span class="deletion">Foo</span><span class="addition">Baz</span></div>\n<div class="comparison__child-object">Content Bar</div>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertTrue(comparison.has_changed())
def test_has_changed_richtext(self):
field = StreamPage._meta.get_field('body')
comparison = self.comparison_class(
field,
StreamPage(body=StreamValue(field.stream_block, [
('rich_text', "<b>Original</b> content", '1'),
])),
StreamPage(body=StreamValue(field.stream_block, [
('rich_text', "Modified <i>content</i>", '1'),
])),
)
self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object"><span class="deletion">Original</span><span class="addition">Modified</span> content</div>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertTrue(comparison.has_changed())
def test_htmldiff_escapes_value(self):
field = StreamPage._meta.get_field('body')
comparison = self.comparison_class(
field,
StreamPage(body=StreamValue(field.stream_block, [
('text', "Original content", '1'),
])),
StreamPage(body=StreamValue(field.stream_block, [
('text', '<script type="text/javascript">doSomethingBad();</script>', '1'),
])),
)
self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object"><span class="deletion">Original content</span><span class="addition"><script type="text/javascript">doSomethingBad();</script></span></div>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
def test_htmldiff_escapes_value_richtext(self):
field = StreamPage._meta.get_field('body')
comparison = self.comparison_class(
field,
StreamPage(body=StreamValue(field.stream_block, [
('rich_text', "Original content", '1'),
])),
StreamPage(body=StreamValue(field.stream_block, [
('rich_text', '<script type="text/javascript">doSomethingBad();</script>', '1'),
])),
)
self.assertEqual(comparison.htmldiff(), '<div class="comparison__child-object"><span class="deletion">Original content</span><span class="addition">doSomethingBad();</span></div>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
def test_compare_structblock(self):
field = StreamPage._meta.get_field('body')
comparison = self.comparison_class(
field,
StreamPage(body=StreamValue(field.stream_block, [
('product', {'name': 'a packet of rolos', 'price': '75p'}, '1'),
])),
StreamPage(body=StreamValue(field.stream_block, [
('product', {'name': 'a packet of rolos', 'price': '85p'}, '1'),
])),
)
expected = """
<div class="comparison__child-object"><dl>
<dt>Name</dt>
<dd>a packet of rolos</dd>
<dt>Price</dt>
<dd><span class="deletion">75p</span><span class="addition">85p</span></dd>
</dl></div>
"""
self.assertHTMLEqual(comparison.htmldiff(), expected)
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertTrue(comparison.has_changed())
def test_compare_imagechooserblock(self):
image_model = get_image_model()
test_image_1 = image_model.objects.create(
title="Test image 1",
file=get_test_image_file(),
)
test_image_2 = image_model.objects.create(
title="Test image 2",
file=get_test_image_file(),
)
field = StreamPage._meta.get_field('body')
comparison = self.comparison_class(
field,
StreamPage(body=StreamValue(field.stream_block, [
('image', test_image_1, '1'),
])),
StreamPage(body=StreamValue(field.stream_block, [
('image', test_image_2, '1'),
])),
)
result = comparison.htmldiff()
self.assertIn('<div class="preview-image deletion">', result)
self.assertIn('alt="Test image 1"', result)
self.assertIn('<div class="preview-image addition">', result)
self.assertIn('alt="Test image 2"', result)
self.assertIsInstance(result, SafeText)
self.assertTrue(comparison.has_changed())
class TestChoiceFieldComparison(TestCase):
comparison_class = compare.ChoiceFieldComparison
def test_hasnt_changed(self):
comparison = self.comparison_class(
EventPage._meta.get_field('audience'),
EventPage(audience="public"),
EventPage(audience="public"),
)
self.assertTrue(comparison.is_field)
self.assertFalse(comparison.is_child_relation)
self.assertEqual(comparison.field_label(), "Audience")
self.assertEqual(comparison.htmldiff(), 'Public')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertFalse(comparison.has_changed())
def test_has_changed(self):
comparison = self.comparison_class(
EventPage._meta.get_field('audience'),
EventPage(audience="public"),
EventPage(audience="private"),
)
self.assertEqual(comparison.htmldiff(), '<span class="deletion">Public</span><span class="addition">Private</span>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertTrue(comparison.has_changed())
class TestTagsFieldComparison(TestCase):
comparison_class = compare.TagsFieldComparison
def test_hasnt_changed(self):
a = TaggedPage()
a.tags.add('wagtail')
a.tags.add('bird')
b = TaggedPage()
b.tags.add('wagtail')
b.tags.add('bird')
comparison = self.comparison_class(TaggedPage._meta.get_field('tags'), a, b)
self.assertTrue(comparison.is_field)
self.assertFalse(comparison.is_child_relation)
self.assertEqual(comparison.field_label(), "Tags")
self.assertEqual(comparison.htmldiff(), 'wagtail, bird')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertFalse(comparison.has_changed())
def test_has_changed(self):
a = TaggedPage()
a.tags.add('wagtail')
a.tags.add('bird')
b = TaggedPage()
b.tags.add('wagtail')
b.tags.add('motacilla')
comparison = self.comparison_class(TaggedPage._meta.get_field('tags'), a, b)
self.assertEqual(comparison.htmldiff(), 'wagtail, <span class="deletion">bird</span>, <span class="addition">motacilla</span>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertTrue(comparison.has_changed())
class TestM2MFieldComparison(TestCase):
fixtures = ['test.json']
comparison_class = compare.M2MFieldComparison
def setUp(self):
self.meetings_category = EventCategory.objects.create(name='Meetings')
self.parties_category = EventCategory.objects.create(name='Parties')
self.holidays_category = EventCategory.objects.create(name='Holidays')
def test_hasnt_changed(self):
christmas_event = EventPage.objects.get(url_path='/home/events/christmas/')
saint_patrick_event = EventPage.objects.get(url_path='/home/events/saint-patrick/')
christmas_event.categories = [self.meetings_category, self.parties_category]
saint_patrick_event.categories = [self.meetings_category, self.parties_category]
comparison = self.comparison_class(
EventPage._meta.get_field('categories'), christmas_event, saint_patrick_event
)
self.assertTrue(comparison.is_field)
self.assertFalse(comparison.is_child_relation)
self.assertEqual(comparison.field_label(), "Categories")
self.assertFalse(comparison.has_changed())
self.assertEqual(comparison.htmldiff(), 'Meetings, Parties')
self.assertIsInstance(comparison.htmldiff(), SafeText)
def test_has_changed(self):
christmas_event = EventPage.objects.get(url_path='/home/events/christmas/')
saint_patrick_event = EventPage.objects.get(url_path='/home/events/saint-patrick/')
christmas_event.categories = [self.meetings_category, self.parties_category]
saint_patrick_event.categories = [self.meetings_category, self.holidays_category]
comparison = self.comparison_class(
EventPage._meta.get_field('categories'), christmas_event, saint_patrick_event
)
self.assertTrue(comparison.has_changed())
self.assertEqual(comparison.htmldiff(), 'Meetings, <span class="deletion">Parties</span>, <span class="addition">Holidays</span>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
class TestForeignObjectComparison(TestCase):
comparison_class = compare.ForeignObjectComparison
@classmethod
def setUpTestData(cls):
image_model = get_image_model()
cls.test_image_1 = image_model.objects.create(
title="Test image 1",
file=get_test_image_file(),
)
cls.test_image_2 = image_model.objects.create(
title="Test image 2",
file=get_test_image_file(),
)
def test_hasnt_changed(self):
comparison = self.comparison_class(
EventPage._meta.get_field('feed_image'),
EventPage(feed_image=self.test_image_1),
EventPage(feed_image=self.test_image_1),
)
self.assertTrue(comparison.is_field)
self.assertFalse(comparison.is_child_relation)
self.assertEqual(comparison.field_label(), "Feed image")
self.assertEqual(comparison.htmldiff(), 'Test image 1')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertFalse(comparison.has_changed())
def test_has_changed(self):
comparison = self.comparison_class(
EventPage._meta.get_field('feed_image'),
EventPage(feed_image=self.test_image_1),
EventPage(feed_image=self.test_image_2),
)
self.assertEqual(comparison.htmldiff(), '<span class="deletion">Test image 1</span><span class="addition">Test image 2</span>')
self.assertIsInstance(comparison.htmldiff(), SafeText)
self.assertTrue(comparison.has_changed())
class TestChildRelationComparison(TestCase):
field_comparison_class = compare.FieldComparison
comparison_class = compare.ChildRelationComparison
def test_hasnt_changed(self):
# Two event pages with speaker called "Father Christmas". Neither of
# the speaker objects have an ID so this tests that the code can match
# the two together by field content.
event_page = EventPage(title="Event page", slug="event")
event_page.speakers.add(EventPageSpeaker(
first_name="Father",
last_name="Christmas",
))
modified_event_page = EventPage(title="Event page", slug="event")
modified_event_page.speakers.add(EventPageSpeaker(
first_name="Father",
last_name="Christmas",
))
comparison = self.comparison_class(
EventPage._meta.get_field('speaker'),
[
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')),
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')),
],
event_page,
modified_event_page,
)
self.assertFalse(comparison.is_field)
self.assertTrue(comparison.is_child_relation)
self.assertEqual(comparison.field_label(), "Speaker")
self.assertFalse(comparison.has_changed())
# Check mapping
objs_a = list(comparison.val_a.all())
objs_b = list(comparison.val_b.all())
map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b)
self.assertEqual(map_forwards, {0: 0})
self.assertEqual(map_backwards, {0: 0})
self.assertEqual(added, [])
self.assertEqual(deleted, [])
def test_has_changed(self):
# Father Christmas renamed to Santa Claus. And Father Ted added.
# Father Christmas should be mapped to Father Ted because they
# are most alike. Santa claus should be displayed as "new"
event_page = EventPage(title="Event page", slug="event")
event_page.speakers.add(EventPageSpeaker(
first_name="Father",
last_name="Christmas",
sort_order=0,
))
modified_event_page = EventPage(title="Event page", slug="event")
modified_event_page.speakers.add(EventPageSpeaker(
first_name="Santa",
last_name="Claus",
sort_order=0,
))
modified_event_page.speakers.add(EventPageSpeaker(
first_name="Father",
last_name="Ted",
sort_order=1,
))
comparison = self.comparison_class(
EventPage._meta.get_field('speaker'),
[
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')),
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')),
],
event_page,
modified_event_page,
)
self.assertFalse(comparison.is_field)
self.assertTrue(comparison.is_child_relation)
self.assertEqual(comparison.field_label(), "Speaker")
self.assertTrue(comparison.has_changed())
# Check mapping
objs_a = list(comparison.val_a.all())
objs_b = list(comparison.val_b.all())
map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b)
self.assertEqual(map_forwards, {0: 1}) # Map Father Christmas to Father Ted
self.assertEqual(map_backwards, {1: 0}) # Map Father Ted ot Father Christmas
self.assertEqual(added, [0]) # Add Santa Claus
self.assertEqual(deleted, [])
def test_has_changed_with_same_id(self):
# Father Christmas renamed to Santa Claus, but this time the ID of the
# child object remained the same. It should now be detected as the same
# object
event_page = EventPage(title="Event page", slug="event")
event_page.speakers.add(EventPageSpeaker(
id=1,
first_name="Father",
last_name="Christmas",
sort_order=0,
))
modified_event_page = EventPage(title="Event page", slug="event")
modified_event_page.speakers.add(EventPageSpeaker(
id=1,
first_name="Santa",
last_name="Claus",
sort_order=0,
))
modified_event_page.speakers.add(EventPageSpeaker(
first_name="Father",
last_name="Ted",
sort_order=1,
))
comparison = self.comparison_class(
EventPage._meta.get_field('speaker'),
[
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')),
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')),
],
event_page,
modified_event_page,
)
self.assertFalse(comparison.is_field)
self.assertTrue(comparison.is_child_relation)
self.assertEqual(comparison.field_label(), "Speaker")
self.assertTrue(comparison.has_changed())
# Check mapping
objs_a = list(comparison.val_a.all())
objs_b = list(comparison.val_b.all())
map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b)
self.assertEqual(map_forwards, {0: 0}) # Map Father Christmas to Santa Claus
self.assertEqual(map_backwards, {0: 0}) # Map Santa Claus to Father Christmas
self.assertEqual(added, [1]) # Add Father Ted
self.assertEqual(deleted, [])
def test_hasnt_changed_with_different_id(self):
# Both of the child objects have the same field content but have a
# different ID so they should be detected as separate objects
event_page = EventPage(title="Event page", slug="event")
event_page.speakers.add(EventPageSpeaker(
id=1,
first_name="Father",
last_name="Christmas",
))
modified_event_page = EventPage(title="Event page", slug="event")
modified_event_page.speakers.add(EventPageSpeaker(
id=2,
first_name="Father",
last_name="Christmas",
))
comparison = self.comparison_class(
EventPage._meta.get_field('speaker'),
[
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')),
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')),
],
event_page,
modified_event_page,
)
self.assertFalse(comparison.is_field)
self.assertTrue(comparison.is_child_relation)
self.assertEqual(comparison.field_label(), "Speaker")
self.assertTrue(comparison.has_changed())
# Check mapping
objs_a = list(comparison.val_a.all())
objs_b = list(comparison.val_b.all())
map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b)
self.assertEqual(map_forwards, {})
self.assertEqual(map_backwards, {})
self.assertEqual(added, [0]) # Add new Father Christmas
self.assertEqual(deleted, [0]) # Delete old Father Christmas
class TestChildObjectComparison(TestCase):
field_comparison_class = compare.FieldComparison
comparison_class = compare.ChildObjectComparison
def test_same_object(self):
obj_a = EventPageSpeaker(
first_name="Father",
last_name="Christmas",
)
obj_b = EventPageSpeaker(
first_name="Father",
last_name="Christmas",
)
comparison = self.comparison_class(
EventPageSpeaker,
[
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')),
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')),
],
obj_a,
obj_b,
)
self.assertFalse(comparison.is_addition())
self.assertFalse(comparison.is_deletion())
self.assertFalse(comparison.has_changed())
self.assertEqual(comparison.get_position_change(), 0)
self.assertEqual(comparison.get_num_differences(), 0)
def test_different_object(self):
obj_a = EventPageSpeaker(
first_name="Father",
last_name="Christmas",
)
obj_b = EventPageSpeaker(
first_name="Santa",
last_name="Claus",
)
comparison = self.comparison_class(
EventPageSpeaker,
[
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')),
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')),
],
obj_a,
obj_b,
)
self.assertFalse(comparison.is_addition())
self.assertFalse(comparison.is_deletion())
self.assertTrue(comparison.has_changed())
self.assertEqual(comparison.get_position_change(), 0)
self.assertEqual(comparison.get_num_differences(), 2)
def test_moved_object(self):
obj_a = EventPageSpeaker(
first_name="Father",
last_name="Christmas",
sort_order=1,
)
obj_b = EventPageSpeaker(
first_name="Father",
last_name="Christmas",
sort_order=5,
)
comparison = self.comparison_class(
EventPageSpeaker,
[
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')),
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')),
],
obj_a,
obj_b,
)
self.assertFalse(comparison.is_addition())
self.assertFalse(comparison.is_deletion())
self.assertFalse(comparison.has_changed())
self.assertEqual(comparison.get_position_change(), 4)
self.assertEqual(comparison.get_num_differences(), 0)
def test_addition(self):
obj = EventPageSpeaker(
first_name="Father",
last_name="Christmas",
)
comparison = self.comparison_class(
EventPageSpeaker,
[
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')),
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')),
],
None,
obj,
)
self.assertTrue(comparison.is_addition())
self.assertFalse(comparison.is_deletion())
self.assertFalse(comparison.has_changed())
self.assertIsNone(comparison.get_position_change(), 0)
self.assertEqual(comparison.get_num_differences(), 0)
def test_deletion(self):
obj = EventPageSpeaker(
first_name="Father",
last_name="Christmas",
)
comparison = self.comparison_class(
EventPageSpeaker,
[
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('first_name')),
partial(self.field_comparison_class, EventPageSpeaker._meta.get_field('last_name')),
],
obj,
None,
)
self.assertFalse(comparison.is_addition())
self.assertTrue(comparison.is_deletion())
self.assertFalse(comparison.has_changed())
self.assertIsNone(comparison.get_position_change())
self.assertEqual(comparison.get_num_differences(), 0)
class TestChildRelationComparisonUsingPK(TestCase):
"""Test related objects can be compred if they do not use id for primary key"""
field_comparison_class = compare.FieldComparison
comparison_class = compare.ChildRelationComparison
def test_has_changed_with_same_id(self):
# Head Count was changed but the PK of the child object remained the same.
# It should be detected as the same object
event_page = EventPage(title="Semi Finals", slug="semi-finals-2018")
event_page.head_counts.add(HeadCountRelatedModelUsingPK(
custom_id=1,
head_count=22,
))
modified_event_page = EventPage(title="Semi Finals", slug="semi-finals-2018")
modified_event_page.head_counts.add(HeadCountRelatedModelUsingPK(
custom_id=1,
head_count=23,
))
modified_event_page.head_counts.add(HeadCountRelatedModelUsingPK(
head_count=25,
))
comparison = self.comparison_class(
EventPage._meta.get_field('head_counts'),
[partial(self.field_comparison_class, HeadCountRelatedModelUsingPK._meta.get_field('head_count'))],
event_page,
modified_event_page,
)
self.assertFalse(comparison.is_field)
self.assertTrue(comparison.is_child_relation)
self.assertEqual(comparison.field_label(), 'Head counts')
self.assertTrue(comparison.has_changed())
# Check mapping
objs_a = list(comparison.val_a.all())
objs_b = list(comparison.val_b.all())
map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b)
self.assertEqual(map_forwards, {0: 0}) # map head count 22 to 23
self.assertEqual(map_backwards, {0: 0}) # map head count 23 to 22
self.assertEqual(added, [1]) # add second head count
self.assertEqual(deleted, [])
def test_hasnt_changed_with_different_id(self):
# Both of the child objects have the same field content but have a
# different PK (ID) so they should be detected as separate objects
event_page = EventPage(title="Finals", slug="finals-event-abc")
event_page.head_counts.add(HeadCountRelatedModelUsingPK(
custom_id=1,
head_count=220
))
modified_event_page = EventPage(title="Finals", slug="finals-event-abc")
modified_event_page.head_counts.add(HeadCountRelatedModelUsingPK(
custom_id=2,
head_count=220
))
comparison = self.comparison_class(
EventPage._meta.get_field('head_counts'),
[partial(self.field_comparison_class, HeadCountRelatedModelUsingPK._meta.get_field('head_count'))],
event_page,
modified_event_page,
)
self.assertFalse(comparison.is_field)
self.assertTrue(comparison.is_child_relation)
self.assertEqual(comparison.field_label(), "Head counts")
self.assertTrue(comparison.has_changed())
# Check mapping
objs_a = list(comparison.val_a.all())
objs_b = list(comparison.val_b.all())
map_forwards, map_backwards, added, deleted = comparison.get_mapping(objs_a, objs_b)
self.assertEqual(map_forwards, {})
self.assertEqual(map_backwards, {})
self.assertEqual(added, [0]) # Add new head count
self.assertEqual(deleted, [0]) # Delete old head count
| nealtodd/wagtail | wagtail/admin/tests/test_compare.py | Python | bsd-3-clause | 33,922 |
from wtforms import fields
from peewee import (DateTimeField, DateField, TimeField,
PrimaryKeyField, ForeignKeyField, BaseModel)
from wtfpeewee.orm import ModelConverter, model_form
from flask.ext.admin import form
from flask.ext.admin._compat import itervalues
from flask.ext.admin.model.form import InlineFormAdmin, InlineModelConverterBase
from flask.ext.admin.model.fields import InlineModelFormField, InlineFieldList
from .tools import get_primary_key
class InlineModelFormList(InlineFieldList):
def __init__(self, form, model, prop, **kwargs):
self.form = form
self.model = model
self.prop = prop
self._pk = get_primary_key(model)
super(InlineModelFormList, self).__init__(InlineModelFormField(form, self._pk), **kwargs)
def display_row_controls(self, field):
return field.get_pk() is not None
def process(self, formdata, data=None):
if not formdata:
attr = getattr(self.model, self.prop)
data = self.model.select().where(attr == data).execute()
else:
data = None
return super(InlineModelFormList, self).process(formdata, data)
def populate_obj(self, obj, name):
pass
def save_related(self, obj):
model_id = getattr(obj, self._pk)
attr = getattr(self.model, self.prop)
values = self.model.select().where(attr == model_id).execute()
pk_map = dict((str(getattr(v, self._pk)), v) for v in values)
# Handle request data
for field in self.entries:
field_id = field.get_pk()
if field_id in pk_map:
model = pk_map[field_id]
if self.should_delete(field):
model.delete_instance(recursive=True)
continue
else:
model = self.model()
field.populate_obj(model, None)
# Force relation
setattr(model, self.prop, model_id)
model.save()
class CustomModelConverter(ModelConverter):
def __init__(self, additional=None):
super(CustomModelConverter, self).__init__(additional)
self.converters[PrimaryKeyField] = self.handle_pk
self.converters[DateTimeField] = self.handle_datetime
self.converters[DateField] = self.handle_date
self.converters[TimeField] = self.handle_time
def handle_pk(self, model, field, **kwargs):
kwargs['validators'] = []
return field.name, fields.HiddenField(**kwargs)
def handle_date(self, model, field, **kwargs):
kwargs['widget'] = form.DatePickerWidget()
return field.name, fields.DateField(**kwargs)
def handle_datetime(self, model, field, **kwargs):
kwargs['widget'] = form.DateTimePickerWidget()
return field.name, fields.DateTimeField(**kwargs)
def handle_time(self, model, field, **kwargs):
return field.name, form.TimeField(**kwargs)
class InlineModelConverter(InlineModelConverterBase):
def get_info(self, p):
info = super(InlineModelConverter, self).get_info(p)
if info is None:
if isinstance(p, BaseModel):
info = InlineFormAdmin(p)
else:
model = getattr(p, 'model', None)
if model is None:
raise Exception('Unknown inline model admin: %s' % repr(p))
attrs = dict()
for attr in dir(p):
if not attr.startswith('_') and attr != model:
attrs[attr] = getattr(p, attr)
info = InlineFormAdmin(model, **attrs)
return info
def contribute(self, converter, model, form_class, inline_model):
# Find property from target model to current model
reverse_field = None
info = self.get_info(inline_model)
for field in info.model._meta.get_fields():
field_type = type(field)
if field_type == ForeignKeyField:
if field.rel_model == model:
reverse_field = field
break
else:
raise Exception('Cannot find reverse relation for model %s' % info.model)
# Remove reverse property from the list
ignore = [reverse_field.name]
if info.form_excluded_columns:
exclude = ignore + info.form_excluded_columns
else:
exclude = ignore
# Create field
child_form = model_form(info.model,
base_class=form.BaseForm,
only=info.form_columns,
exclude=exclude,
field_args=info.form_args,
allow_pk=True,
converter=converter)
prop_name = 'fa_%s' % model.__name__
label = self.get_label(info, prop_name)
setattr(form_class,
prop_name,
InlineModelFormList(child_form,
info.model,
reverse_field.name,
label=label or info.model.__name__))
setattr(field.rel_model,
prop_name,
property(lambda self: self.id))
return form_class
def save_inline(form, model):
for f in itervalues(form._fields):
if f.type == 'InlineModelFormList':
f.save_related(model)
| saadbinakhlaq/flask-admin | flask_admin/contrib/peeweemodel/form.py | Python | bsd-3-clause | 5,481 |
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000121'
addresses_name = 'parl.2017-06-08/Version 1/Lancaster (and Fleetwood) && (Morecambe and Lunesdale) Democracy_Club__08June2017.tsv'
stations_name = 'parl.2017-06-08/Version 1/Lancaster (and Fleetwood) && (Morecambe and Lunesdale) Democracy_Club__08June2017.tsv'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
| chris48s/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_lancaster.py | Python | bsd-3-clause | 494 |
# -*- coding: utf-8 -*-
import logging
logger = logging.getLogger(__name__)
from werkzeug.contrib.fixers import ProxyFix
from flask import Flask, request, abort, session, g, redirect, url_for, jsonify, render_template
from decouple import config as config_from_env
from . import PYMONGO2
from . import geoip_tools
from . import extensions
from . import admin
from . import tasks
from . import constants
THEMES = [
#'amelia', # ok mais pas terrible
'cerulean', # ok
#'cosmo', # BAD - décallage partout
'cyborg', #ok
'darkly', #ok
'flatly', #ok
'journal', # ok mais pas terrible
'lumen', # ok - petit mais permet ok pour colonnes
'readable', #ok -voir colone tableau
'simplex', # ok - petit mais permet ok pour colonnes
'slate', # ok - New DEFAULT ?
'spacelab', # ok
'superhero', # ok mais perte visibilité dans un select
'united', # ok
#'yeti', # BAD - décallage partout
]
def _configure_themes(app):
@app.before_request
def current_theme():
if not constants.SESSION_THEME_KEY in session:
session[constants.SESSION_THEME_KEY] = app.config.get('DEFAULT_THEME', 'slate')
g.theme = session.get(constants.SESSION_THEME_KEY)
@app.context_processor
def inject_theme():
try:
return {
constants.SESSION_THEME_KEY: g.theme.lower(),
'current_theme_url': url_for('static', filename='mmw/themes/bootswatch/%s/bootstrap.min.css' % g.theme.lower()),
'themes': THEMES,
}
except AttributeError:
return {
constants.SESSION_THEME_KEY: app.config.get('DEFAULT_THEME', 'slate'),
'current_theme_url': url_for('static', filename='mmw/themes/bootswatch/%s/bootstrap.min.css' % self.config('DEFAULT_THEME', 'slate')),
'themes': THEMES,
}
@app.route('/change-theme', endpoint="changetheme")
def change_theme():
"""
/change-theme?theme=flatly
/fam/change-theme fam.changetheme
"""
new_theme = request.args.get('theme', None)
next = request.args.get('next') or request.referrer or '/'
#if not next:
# next = url_for('home')
try:
if new_theme:
session[constants.SESSION_THEME_KEY] = new_theme
except Exception, err:
pass
return redirect(next)
def _configure_logging(debug=False,
stdout_enable=True,
syslog_enable=False,
prog_name='mongo_mail_web',
config_file=None
):
import sys
import logging
import logging.config
if config_file:
logging.config.fileConfig(config_file, disable_existing_loggers=True)
return logging.getLogger(prog_name)
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'debug': {
'format': 'line:%(lineno)d - %(asctime)s %(name)s: [%(levelname)s] - [%(process)d] - [%(module)s] - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
},
'simple': {
'format': '%(asctime)s %(name)s: [%(levelname)s] - %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
},
},
'handlers': {
'null': {
'level':'ERROR',
'class':'logging.NullHandler',
},
'console':{
'level':'INFO',
'class':'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'': {
'handlers': [],
'level': 'INFO',
'propagate': False,
},
prog_name: {
#'handlers': [],
'level': 'INFO',
'propagate': True,
},
},
}
if sys.platform.startswith("win32"):
LOGGING['loggers']['']['handlers'] = ['console']
elif syslog_enable:
LOGGING['handlers']['syslog'] = {
'level':'INFO',
'class':'logging.handlers.SysLogHandler',
'address' : '/dev/log',
'facility': 'daemon',
'formatter': 'simple'
}
LOGGING['loggers']['']['handlers'].append('syslog')
if stdout_enable:
if not 'console' in LOGGING['loggers']['']['handlers']:
LOGGING['loggers']['']['handlers'].append('console')
'''if handlers is empty'''
if not LOGGING['loggers']['']['handlers']:
LOGGING['loggers']['']['handlers'] = ['console']
if debug:
LOGGING['loggers']['']['level'] = 'DEBUG'
LOGGING['loggers'][prog_name]['level'] = 'DEBUG'
for handler in LOGGING['handlers'].keys():
LOGGING['handlers'][handler]['formatter'] = 'debug'
LOGGING['handlers'][handler]['level'] = 'DEBUG'
#from pprint import pprint as pp
#pp(LOGGING)
#werkzeug = logging.getLogger('werkzeug')
#werkzeug.handlers = []
logging.config.dictConfig(LOGGING)
logger = logging.getLogger(prog_name)
return logger
def _configure_security(app):
from . import models
from flask_security import MongoEngineUserDatastore
datastore = MongoEngineUserDatastore(models.db, models.User, models.Role)
state = extensions.security.init_app(app, datastore, register_blueprint=True)
def error_handlers(app):
from flask_security import url_for_security, current_user
@app.route('/unauthorized', endpoint="unauthorized")
def unauthorized_view():
abort(403)
@app.errorhandler(403)
def forbidden(error):
if not current_user.is_anonymous():
logger.fatal("reject user [%s]" % current_user)
logger.fatal("reject host [%s]" % request.remote_addr)
return "forbidden. you ip address is %s" % request.remote_addr, 403
@app.errorhandler(401)
def unauthorized(error):
return redirect(url_for_security('login', next=request.url))
"""
@app.route('/unauthorized', endpoint="unauthorized")
def unauthorized():
if request.args.get('json', 0, type=int) or request.is_xhr:
return jsonify(success=False)
return render_template("mmw/unauthorized.html")
"""
def _configure_sentry(app):
try:
from raven.contrib.flask import Sentry
if app.config.get('SENTRY_DSN', None):
sentry = Sentry(app, logging=True, level=app.logger.level)
except ImportError:
pass
def _configure_i18n(app):
extensions.babel.init_app(app)
babel = extensions.babel
#fr <class 'babel.core.Locale'>
#for t in babel.list_translations():
# print t, type(t)
#current = session.get(constants.SESSION_LANG_KEY, app.config.get('DEFAULT_LANG'))
@app.before_request
def set_locales():
current_lang = session.get(constants.SESSION_LANG_KEY, None)
if not current_lang:
session[constants.SESSION_LANG_KEY] = app.config.get('DEFAULT_LANG')
current_tz = session.get(constants.SESSION_TIMEZONE_KEY, None)
if not current_tz:
session[constants.SESSION_TIMEZONE_KEY] = app.config.get('TIMEZONE')
@babel.localeselector
def get_locale():
current_lang = session.get(constants.SESSION_LANG_KEY, app.config.get('DEFAULT_LANG'))
return current_lang
"""
if current_user.locale:
return current_user.locale
default_locale = current_app.config.get('BABEL_DEFAULT_LOCALE', 'en')
accept_languages = current_app.config.get('ACCEPT_LANGUAGES', [default_locale])
return request.accept_languages.best_match(accept_languages)
"""
@babel.timezoneselector
def get_timezone():
return session.get(constants.SESSION_TIMEZONE_KEY, app.config.get('TIMEZONE'))
"""
if current_user.timezone:
return current_user.timezone
return current_app.config.get('BABEL_DEFAULT_TIMEZONE', 'UTC')
"""
def _configure_mongolock(app):
from . import metrics
from . import models
from mongolock import MongoLock
metrics.lock = MongoLock(client=models.MessageStore._get_db().connection)
def _configure_processors(app):
from . import constants
from . import countries
from . import models
from .form_helpers import _is_hidden, _is_required
@app.context_processor
def form_helpers():
return dict(is_hidden=_is_hidden, is_required=_is_required)
@app.context_processor
def langs():
return dict(langs=app.config.get('ACCEPT_LANGUAGES_CHOICES'))
@app.context_processor
def current_lang():
current = session.get(constants.SESSION_LANG_KEY, app.config.get('DEFAULT_LANG'))
return dict(current_lang=current)
@app.context_processor
def current_tz():
current = session.get(constants.SESSION_TIMEZONE_KEY, app.config.get('TIMEZONE'))
return dict(current_tz=current)
@app.context_processor
def helpers():
return dict(c=constants)
@app.context_processor
def country_name():
"""
{{ country_name('fr') }}
"""
def render(cp):
if not cp:
return ''
name = countries.OFFICIAL_COUNTRIES.get(cp.upper(), '')
return name.capitalize()
return dict(country_name=render)
def create_app(config='mongo_mail_web.settings.Prod'):
"""
TODO: before first request pour redirect vers form domains/mynetwork si aucun
"""
env_config = config_from_env('MMW_SETTINGS', config)
app = Flask(__name__)
app.config.from_object(env_config)
if PYMONGO2:
app.config['MONGODB_SETTINGS']['use_greenlets'] = True
app.config['LOGGER_NAME'] = 'mongo_mail_web'
app._logger = _configure_logging(debug=app.debug, prog_name='mongo_mail_web')
extensions.db.init_app(app)
_configure_mongolock(app)
extensions.moment.init_app(app)
_configure_i18n(app)
_configure_security(app)
admin.init_admin(app, url='/')
_configure_processors(app)
_configure_themes(app)
geoip_tools.configure_geoip()
if app.debug:
from flask_debugtoolbar import DebugToolbarExtension
DebugToolbarExtension(app)
_configure_sentry(app)
if app.config.get('SESSION_ENGINE_ENABLE', False):
from flask_mongoengine import MongoEngineSessionInterface
app.session_interface = MongoEngineSessionInterface(extensions.db)
error_handlers(app)
app.wsgi_app = ProxyFix(app.wsgi_app)
tasks.run_all_tasks(completed_pool=app.config.get('TASK_COMPLETED_POOL'),
completed_sleep=app.config.get('TASK_COMPLETED_SLEEP'),
update_metrics_sleep=app.config.get('TASK_UPDATE_METRICS_SLEEP'))
return app
| radical-software/mongo-mail-web | mongo_mail_web/wsgi.py | Python | bsd-3-clause | 11,323 |
import pytest
import tardis.montecarlo.montecarlo_numba.macro_atom as macro_atom
import numpy as np
@pytest.mark.parametrize(
["seed", "expected"],
[(1963, 10015), (1, 9993), (2111963, 17296), (10000, 9993)],
)
def test_macro_atom(
static_packet,
verysimple_numba_plasma,
verysimple_numba_model,
set_seed_fixture,
seed,
expected,
):
set_seed_fixture(seed)
static_packet.initialize_line_id(
verysimple_numba_plasma, verysimple_numba_model
)
activation_level_id = verysimple_numba_plasma.line2macro_level_upper[
static_packet.next_line_id
]
result, transition_type = macro_atom.macro_atom(
activation_level_id,
static_packet.current_shell_id,
verysimple_numba_plasma
)
assert result == expected
assert transition_type == -1 # line transition
| tardis-sn/tardis | tardis/montecarlo/montecarlo_numba/tests/test_macro_atom.py | Python | bsd-3-clause | 851 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2019 Edgewall Software
# Copyright (C) 2007 Eli Carter <[email protected]>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
from trac.core import Component, implements
from trac.perm import IPermissionRequestor
from trac.ticket.api import ITicketActionController
from trac.util.translation import _
revision = "$Rev$"
url = "$URL$"
class DeleteTicketActionController(Component):
"""Delete ticket using a workflow action.
Illustrates how to create an `ITicketActionController` with side-effects.
Add `DeleteTicketActionController` to the workflow option in the
`[ticket]` section in TracIni. When added to the default value of
`workflow`, the line will look like this:
{{{#!ini
workflow = ConfigurableTicketWorkflow,DeleteTicketActionController
}}}
"""
implements(IPermissionRequestor, ITicketActionController)
# IPermissionRequestor methods
def get_permission_actions(self):
return ['TICKET_DELETE']
# ITicketActionController methods
def get_ticket_actions(self, req, ticket):
actions = []
if ticket.exists and 'TICKET_DELETE' in req.perm(ticket.resource):
actions.append((0, 'delete'))
return actions
def get_all_status(self):
return []
def render_ticket_action_control(self, req, ticket, action):
return 'delete', None, _("The ticket will be deleted.")
def get_ticket_changes(self, req, ticket, action):
return {}
def apply_action_side_effects(self, req, ticket, action):
if action == 'delete':
ticket.delete()
| rbaumg/trac | sample-plugins/workflow/DeleteTicket.py | Python | bsd-3-clause | 2,006 |
"""Timeseries model using FSL's gaussian least squares."""
import re
import os.path as op
import numpy as np
from scipy import stats, signal
import pandas as pd
import nibabel as nib
import matplotlib.pyplot as plt
from moss import glm
from moss.mosaic import Mosaic
import seaborn as sns
from nipype import Node, MapNode, Workflow, IdentityInterface
from nipype.interfaces import fsl
from nipype.interfaces.base import (BaseInterface,
BaseInterfaceInputSpec,
InputMultiPath, OutputMultiPath,
TraitedSpec, File, traits,
isdefined)
import lyman
from lyman.tools import ManyOutFiles, SaveParameters, nii_to_png
def create_timeseries_model_workflow(name="model", exp_info=None):
# Default experiment parameters for generating graph image, testing, etc.
if exp_info is None:
exp_info = lyman.default_experiment_parameters()
# Define constant inputs
inputs = ["realign_file", "nuisance_file", "artifact_file", "timeseries"]
# Possibly add the design and regressor files to the inputs
if exp_info["design_name"] is not None:
inputs.append("design_file")
if exp_info["regressor_file"] is not None:
inputs.append("regressor_file")
# Define the workflow inputs
inputnode = Node(IdentityInterface(inputs), "inputs")
# Set up the experimental design
modelsetup = MapNode(ModelSetup(exp_info=exp_info),
["timeseries", "realign_file",
"nuisance_file", "artifact_file"],
"modelsetup")
# For some nodes, make it possible to request extra memory
mem_request = {"qsub_args": "-l h_vmem=%dG" % exp_info["memory_request"]}
# Use film_gls to estimate the timeseries model
modelestimate = MapNode(fsl.FILMGLS(smooth_autocorr=True,
mask_size=5,
threshold=100),
["design_file", "in_file", "tcon_file"],
"modelestimate")
modelestimate.plugin_args = mem_request
# Compute summary statistics about the model fit
modelsummary = MapNode(ModelSummary(),
["design_matrix_pkl",
"timeseries",
"pe_files"],
"modelsummary")
modelsummary.plugin_args = mem_request
# Save the experiment info for this run
# Save the experiment info for this run
saveparams = MapNode(SaveParameters(exp_info=exp_info),
"in_file", "saveparams")
# Report on the results of the model
# Note: see below for a conditional iterfield
modelreport = MapNode(ModelReport(),
["timeseries", "sigmasquareds_file",
"tsnr_file", "r2_files"],
"modelreport")
# Define the workflow outputs
outputnode = Node(IdentityInterface(["results",
"copes",
"varcopes",
"zstats",
"r2_files",
"ss_files",
"tsnr_file",
"report",
"design_mat",
"contrast_mat",
"design_pkl",
"design_report",
"json_file"]),
"outputs")
# Define the workflow and connect the nodes
model = Workflow(name=name)
model.connect([
(inputnode, modelsetup,
[("realign_file", "realign_file"),
("nuisance_file", "nuisance_file"),
("artifact_file", "artifact_file"),
("timeseries", "timeseries")]),
(inputnode, modelestimate,
[("timeseries", "in_file")]),
(inputnode, saveparams,
[("timeseries", "in_file")]),
(modelsetup, modelestimate,
[("design_matrix_file", "design_file"),
("contrast_file", "tcon_file")]),
(modelsetup, modelsummary,
[("design_matrix_pkl", "design_matrix_pkl")]),
(inputnode, modelsummary,
[("timeseries", "timeseries")]),
(modelestimate, modelsummary,
[("param_estimates", "pe_files")]),
(inputnode, modelreport,
[("timeseries", "timeseries")]),
(modelestimate, modelreport,
[("sigmasquareds", "sigmasquareds_file")]),
(modelsummary, modelreport,
[("r2_files", "r2_files"),
("tsnr_file", "tsnr_file")]),
(modelsetup, outputnode,
[("design_matrix_file", "design_mat"),
("contrast_file", "contrast_mat"),
("design_matrix_pkl", "design_pkl"),
("report", "design_report")]),
(saveparams, outputnode,
[("json_file", "json_file")]),
(modelestimate, outputnode,
[("results_dir", "results"),
("copes", "copes"),
("varcopes", "varcopes"),
("zstats", "zstats")]),
(modelsummary, outputnode,
[("r2_files", "r2_files"),
("ss_files", "ss_files"),
("tsnr_file", "tsnr_file")]),
(modelreport, outputnode,
[("out_files", "report")]),
])
if exp_info["design_name"] is not None:
model.connect(inputnode, "design_file",
modelsetup, "design_file")
if exp_info["regressor_file"] is not None:
model.connect(inputnode, "regressor_file",
modelsetup, "regressor_file")
if exp_info["contrasts"]:
model.connect(modelestimate, "zstats",
modelreport, "zstat_files")
modelreport.iterfield.append("zstat_files")
return model, inputnode, outputnode
# =========================================================================== #
class ModelSetupInput(BaseInterfaceInputSpec):
exp_info = traits.Dict()
timeseries = File(exists=True)
design_file = File(exists=True)
realign_file = File(exists=True)
nuisance_file = File(exists=True)
artifact_file = File(exists=True)
regressor_file = File(exists=True)
class ModelSetupOutput(TraitedSpec):
design_matrix_file = File(exists=True)
contrast_file = File(exists=True)
design_matrix_pkl = File(exists=True)
report = OutputMultiPath(File(exists=True))
class ModelSetup(BaseInterface):
input_spec = ModelSetupInput
output_spec = ModelSetupOutput
def _run_interface(self, runtime):
# Get all the information for the design
design_kwargs = self.build_design_information()
# Initialize the design matrix object
X = glm.DesignMatrix(**design_kwargs)
# Report on the design
self.design_report(self.inputs.exp_info, X, design_kwargs)
# Write out the design object as a pkl to pass to the report function
X.to_pickle("design.pkl")
# Finally, write out the design files in FSL format
X.to_fsl_files("design", self.inputs.exp_info["contrasts"])
return runtime
def build_design_information(self):
# Load in the design information
exp_info = self.inputs.exp_info
tr = self.inputs.exp_info["TR"]
# Derive the length of the scan and run number from the timeseries
ntp = nib.load(self.inputs.timeseries).shape[-1]
run = int(re.search("run_(\d+)", self.inputs.timeseries).group(1))
# Get the experimental design
if isdefined(self.inputs.design_file):
design = pd.read_csv(self.inputs.design_file)
design = design[design["run"] == run]
else:
design = None
# Get confound information to add to the model
confounds = []
sources = exp_info["confound_sources"]
bad_sources = set(sources) - set(["motion", "wm", "brain"])
if bad_sources:
msg = ("Invalid confound source specification: {}"
.format(list(bad_sources)))
raise ValueError(msg)
# Get the motion correction parameters
if "motion" in sources:
realign = pd.read_csv(self.inputs.realign_file)
realign = realign.filter(regex="rot|trans").apply(stats.zscore)
confounds.append(realign)
# Get the anatomical nuisance sources
nuisance = pd.read_csv(self.inputs.nuisance_file).apply(stats.zscore)
if "wm" in sources:
wm = nuisance.filter(regex="wm")
confounds.append(wm)
if "brain" in sources:
brain = nuisance["brain"]
confounds.append(brain)
# Combine the different confound sources
if confounds:
confounds = pd.concat(confounds, axis=1)
else:
confounds = None
# Get the image artifacts
if exp_info["remove_artifacts"]:
artifacts = pd.read_csv(self.inputs.artifact_file).max(axis=1)
else:
artifacts = None
# Get the additional model regressors
if isdefined(self.inputs.regressor_file):
regressors = pd.read_csv(self.inputs.regressor_file)
regressors = regressors[regressors["run"] == run]
regressors = regressors.drop("run", axis=1)
if exp_info["regressor_names"] is not None:
regressors = regressors[exp_info["regressor_names"]]
regressors.index = np.arange(ntp) * tr
else:
regressors = None
# Set up the HRF model
hrf = getattr(glm, exp_info["hrf_model"])
hrf = hrf(exp_info["temporal_deriv"], tr, **exp_info["hrf_params"])
# Build a dict of keyword arguments for the design matrix
design_kwargs = dict(design=design,
hrf_model=hrf,
ntp=ntp,
tr=tr,
confounds=confounds,
artifacts=artifacts,
regressors=regressors,
condition_names=exp_info["condition_names"],
confound_pca=exp_info["confound_pca"],
hpf_cutoff=exp_info["hpf_cutoff"])
return design_kwargs
def design_report(self, exp_info, X, design_kwargs):
"""Generate static images summarizing the design."""
# Plot the design itself
design_png = op.abspath("design.png")
X.plot(fname=design_png, close=True)
with sns.axes_style("whitegrid"):
# Plot the eigenvalue spectrum
svd_png = op.abspath("design_singular_values.png")
X.plot_singular_values(fname=svd_png, close=True)
# Plot the correlations between design elements and confounds
corr_png = op.abspath("design_correlation.png")
if design_kwargs["design"] is None:
with open(corr_png, "wb"):
pass
else:
X.plot_confound_correlation(fname=corr_png, close=True)
# Build a list of images sumarrizing the model
report = [design_png, corr_png, svd_png]
# Now plot the information loss from the high-pass filter
design_kwargs["hpf_cutoff"] = None
X_unfiltered = glm.DesignMatrix(**design_kwargs)
tr = design_kwargs["tr"]
ntp = design_kwargs["ntp"]
# Plot for each contrast
for i, (name, cols, weights) in enumerate(exp_info["contrasts"], 1):
# Compute the contrast predictors
C = X.contrast_vector(cols, weights)
y_filt = X.design_matrix.dot(C)
y_unfilt = X_unfiltered.design_matrix.dot(C)
# Compute the spectral density for filtered and unfiltered
fs, pxx_filt = signal.welch(y_filt, 1. / tr, nperseg=ntp)
fs, pxx_unfilt = signal.welch(y_unfilt, 1. / tr, nperseg=ntp)
# Draw the spectral density
with sns.axes_style("whitegrid"):
f, ax = plt.subplots(figsize=(9, 3))
ax.fill_between(fs, pxx_unfilt, color="#C41E3A")
ax.axvline(1.0 / exp_info["hpf_cutoff"], c=".3", ls=":", lw=1.5)
ax.fill_between(fs, pxx_filt, color=".5")
# Label the plot
ax.set(xlabel="Frequency",
ylabel="Spectral Density",
xlim=(0, .15))
plt.tight_layout()
# Save the plot
fname = op.abspath("cope%d_filter.png" % i)
f.savefig(fname, dpi=100)
plt.close(f)
report.append(fname)
# Store the report files for later
self.report_files = report
def _list_outputs(self):
outputs = self._outputs().get()
outputs["report"] = self.report_files
outputs["contrast_file"] = op.abspath("design.con")
outputs["design_matrix_pkl"] = op.abspath("design.pkl")
outputs["design_matrix_file"] = op.abspath("design.mat")
return outputs
class ModelSummaryInput(BaseInterfaceInputSpec):
design_matrix_pkl = File(exists=True)
timeseries = File(exists=True)
pe_files = InputMultiPath(File(exists=True))
class ModelSummaryOutput(TraitedSpec):
r2_files = OutputMultiPath(File(exists=True))
ss_files = OutputMultiPath(File(exists=True))
tsnr_file = File(exists=True)
class ModelSummary(BaseInterface):
input_spec = ModelSummaryInput
output_spec = ModelSummaryOutput
def _run_interface(self, runtime):
# Load the design matrix object
X = glm.DesignMatrix.from_pickle(self.inputs.design_matrix_pkl)
# Load and de-mean the timeseries
ts_img = nib.load(self.inputs.timeseries)
ts_aff, ts_header = ts_img.get_affine(), ts_img.get_header()
y = ts_img.get_data()
ybar = y.mean(axis=-1)[..., np.newaxis]
y -= ybar
self.y = y
# Store the image attributes
self.affine = ts_aff
self.header = ts_header
# Load the parameter estimates, make 4D, and concatenate
pes = [nib.load(f).get_data() for f in self.inputs.pe_files]
pes = [pe[..., np.newaxis] for pe in pes]
pes = np.concatenate(pes, axis=-1)
# Compute and save the total sum of squares
self.sstot = np.sum(np.square(y), axis=-1)
self.save_image(self.sstot, "sstot")
# Compute the full model r squared
yhat_full = self.dot_by_slice(X, pes)
ss_full, r2_full = self.compute_r2(yhat_full)
self.save_image(ss_full, "ssres_full")
self.save_image(r2_full, "r2_full")
del yhat_full, r2_full
# Compute the main model r squared
yhat_main = self.dot_by_slice(X, pes, "main")
ss_main, r2_main = self.compute_r2(yhat_main)
self.save_image(ss_main, "ssres_main")
self.save_image(r2_main, "r2_main")
del yhat_main, r2_main
# Compute the confound model r squared
yhat_confound = self.dot_by_slice(X, pes, "confound")
_, r2_confound = self.compute_r2(yhat_confound)
self.save_image(r2_confound, "r2_confound")
del yhat_confound, r2_confound
# Compute and save the residual tSNR
std = np.sqrt(ss_full / len(y))
tsnr = np.squeeze(ybar) / std
tsnr = np.nan_to_num(tsnr)
self.save_image(tsnr, "tsnr")
return runtime
def save_image(self, data, fname):
"""Save data to the output structure."""
img = nib.Nifti1Image(data, self.affine, self.header)
img.to_filename(fname + ".nii.gz")
def dot_by_slice(self, X, pes, component=None):
"""Broadcast a dot product by image slices to balance speed/memory."""
if component is not None:
pes = pes * getattr(X, component + "_vector").T[np.newaxis,
np.newaxis, :, :]
# Set up the output data structure
n_x, n_y, n_z, n_pe = pes.shape
n_t = X.design_matrix.shape[0]
out = np.empty((n_x, n_y, n_z, n_t))
# Do the dot product, broadcasted for each Z slice
for k in range(n_z):
slice_pe = pes[:, :, k, :].reshape(-1, n_pe).T
slice_dot = X.design_matrix.values.dot(slice_pe)
out[:, :, k, :] = slice_dot.T.reshape(n_x, n_y, n_t)
return out
def compute_r2(self, yhat):
"""Efficiently compute the coefficient of variation."""
ssres = np.zeros_like(self.sstot)
n_frames = yhat.shape[-1]
for tr in xrange(n_frames):
ssres += np.square(yhat[..., tr] - self.y[..., tr])
r2 = 1 - ssres / self.sstot
return ssres, r2
def _list_outputs(self):
outputs = self._outputs().get()
outputs["r2_files"] = [op.abspath("r2_full.nii.gz"),
op.abspath("r2_main.nii.gz"),
op.abspath("r2_confound.nii.gz")]
outputs["ss_files"] = [op.abspath("sstot.nii.gz"),
op.abspath("ssres_full.nii.gz"),
op.abspath("ssres_main.nii.gz")]
outputs["tsnr_file"] = op.abspath("tsnr.nii.gz")
return outputs
class ModelReportInput(BaseInterfaceInputSpec):
timeseries = File(exists=True)
sigmasquareds_file = File(exists=True)
tsnr_file = File(exists=True)
zstat_files = InputMultiPath(File(exists=True))
r2_files = InputMultiPath(File(exists=True))
class ModelReport(BaseInterface):
input_spec = ModelReportInput
output_spec = ManyOutFiles
def _run_interface(self, runtime):
# Load the sigmasquareds and use it to infer the model mask
var_img = nib.load(self.inputs.sigmasquareds_file).get_data()
self.mask = (var_img > 0).astype(np.int16)
# Load the timeseries and take the mean over time for a background
ts_img = nib.load(self.inputs.timeseries)
self.mean = nib.Nifti1Image(ts_img.get_data().mean(axis=-1),
ts_img.get_affine(),
ts_img.get_header())
# Set up the output list
self.out_files = []
# Plot the data
self.plot_residuals()
self.plot_rsquareds()
self.plot_tsnr()
if isdefined(self.inputs.zstat_files):
self.plot_zstats()
return runtime
def plot_residuals(self):
"""Plot the variance of the model residuals across time."""
ss = self.inputs.sigmasquareds_file
m = Mosaic(self.mean, ss, self.mask, step=1)
m.plot_overlay("cube:.8:.2", 0, alpha=.6, fmt="%d")
png_name = nii_to_png(ss)
m.savefig(png_name)
m.close()
self.out_files.append(png_name)
def plot_tsnr(self):
tsnr = self.inputs.tsnr_file
m = Mosaic(self.mean, tsnr, self.mask, step=1)
m.plot_overlay("cube:1.9:.5", 0, alpha=1, fmt="%d")
png_name = nii_to_png(tsnr)
m.savefig(png_name)
m.close()
self.out_files.append(png_name)
def plot_rsquareds(self):
"""Plot the full, main, and confound R squared maps."""
cmaps = ["cube:2:0", "cube:2.6:0", "cube:1.5:0"]
for r2_file, cmap in zip(self.inputs.r2_files, cmaps):
m = Mosaic(self.mean, r2_file, self.mask, step=1)
m.plot_overlay(cmap, 0, alpha=.6)
png_name = nii_to_png(r2_file)
m.savefig(png_name)
m.close()
self.out_files.append(png_name)
def plot_zstats(self):
"""Plot the positive and negative z stats with a low threshold."""
for z_file in self.inputs.zstat_files:
m = Mosaic(self.mean, z_file, self.mask, step=1)
m.plot_activation(pos_cmap="Reds_r", neg_cmap="Blues",
thresh=1.7, alpha=.85)
png_name = nii_to_png(z_file)
m.savefig(png_name)
m.close()
self.out_files.append(png_name)
def _list_outputs(self):
outputs = self._outputs().get()
outputs["out_files"] = self.out_files
return outputs
| tuqc/lyman | lyman/workflows/model.py | Python | bsd-3-clause | 20,567 |
import json
import logging
import os
from django import forms
from django.conf.urls import url
from django.contrib import admin
from django.contrib import messages
from django.contrib.gis.admin import OSMGeoAdmin
from django.core.cache import cache
from django.shortcuts import render
from django.utils.html import format_html
from django_celery_beat.models import IntervalSchedule, CrontabSchedule
from eventkit_cloud.jobs.forms import RegionForm, RegionalPolicyForm
from eventkit_cloud.jobs.models import (
ExportFormat,
Projection,
Job,
Region,
RegionalPolicy,
RegionalJustification,
DataProvider,
DataProviderType,
DatamodelPreset,
License,
DataProviderStatus,
DataProviderTask,
JobPermission,
clean_config,
)
from eventkit_cloud.utils.ogcapi_process import get_process_formats
logger = logging.getLogger(__name__)
admin.site.register(ExportFormat)
admin.site.register(Projection)
admin.site.register(DataProviderType)
admin.site.register(DatamodelPreset)
admin.site.register(License)
admin.site.register(DataProviderTask)
class JobAdmin(OSMGeoAdmin):
"""
Admin model for editing Jobs in the admin interface.
"""
search_fields = ["uid", "name", "user__username", "region__name"]
list_display = ["uid", "name", "user", "region"]
readonly_fields = ["user", "name", "description", "event"]
exclude = [
"the_geom",
"the_geom_webmercator",
"original_selection",
"the_geog",
"data_provider_tasks",
"json_tags",
"preset",
]
actions = ["select_exports"]
update_template = "admin/update_regions.html"
update_complete_template = "admin/update_complete.html"
def select_exports(self, request):
"""
Select exports to update.
"""
selected = ",".join(request.POST.getlist(admin.ACTION_CHECKBOX_NAME))
regions = Region.objects.all()
# noinspection PyProtectedMember
return render(
request,
self.update_template,
{"regions": regions, "selected": selected, "opts": self.model._meta},
)
select_exports.short_description = "Assign a region to the selected exports"
def update_exports(self, request):
"""
Update selected exports.
"""
selected = request.POST.get("selected", "")
num_selected = len(selected.split(","))
region_uid = request.POST.get("region", "")
region = Region.objects.get(uid=region_uid)
for selected_id in selected.split(","):
export = Job.objects.get(id=selected_id)
export.region = region
export.save()
messages.success(request, "{0} exports updated.".format(num_selected))
# noinspection PyProtectedMember
return render(
request,
self.update_complete_template,
{"num_selected": len(selected.split(",")), "region": region.name, "opts": self.model._meta},
)
def get_urls(self):
urls = super(JobAdmin, self).get_urls()
update_urls = [
url(r"^select/$", self.admin_site.admin_view(self.select_exports)),
url(
r"^update/$",
self.admin_site.admin_view(self.update_exports),
name="update_regions",
),
]
return update_urls + urls
class ExportConfigAdmin(admin.ModelAdmin):
"""
Admin model for editing export configurations in the admin interface.
"""
search_fields = ["uid", "name", "user__username"]
list_display = ["uid", "name", "user", "config_type", "published", "created_at"]
class DataProviderForm(forms.ModelForm):
"""
Admin form for editing export providers in the admin interface.
"""
class Meta:
model = DataProvider
fields = [
"name",
"slug",
"label",
"url",
"preview_url",
"service_copyright",
"service_description",
"data_type",
"layer",
"export_provider_type",
"max_selection",
"level_from",
"level_to",
"config",
"user",
"license",
"zip",
"display",
"attribute_class",
"the_geom",
]
def clean_config(self):
config = self.cleaned_data.get("config")
service_type = self.cleaned_data.get("export_provider_type").type_name
if service_type in ["wms", "wmts", "tms", "arcgis-raster"]:
from eventkit_cloud.utils.mapproxy import (
MapproxyGeopackage,
ConfigurationError,
)
service = MapproxyGeopackage(
layer=self.cleaned_data.get("layer"),
service_type=self.cleaned_data.get("export_provider_type"),
config=config,
)
try:
service.get_check_config()
except ConfigurationError as e:
raise forms.ValidationError(str(e))
elif service_type in ["osm", "osm-generic"]:
if not config:
raise forms.ValidationError("Configuration is required for OSM data providers")
from eventkit_cloud.feature_selection.feature_selection import FeatureSelection
cleaned_config = clean_config(config)
feature_selection = FeatureSelection(cleaned_config)
feature_selection.valid
if feature_selection.errors:
raise forms.ValidationError("Invalid configuration: {0}".format(feature_selection.errors))
elif service_type in ["ogcapi-process"]:
if not config:
raise forms.ValidationError("Configuration is required for OGC API Process")
cleaned_config = clean_config(config, return_dict=True)
ogcapi_process = cleaned_config.get("ogcapi_process")
if not ogcapi_process:
raise forms.ValidationError("OGC API Process requires an ogcapi_process key with valid configuration")
area = ogcapi_process.get("area")
if not area:
raise forms.ValidationError("OGC API Process requires an area key with a name and a type.")
if not area.get("name"):
raise forms.ValidationError("OGC API Process requires the name of the field to submit the area.")
if area.get("type") not in ["geojson", "bbox", "wkt"]:
raise forms.ValidationError("OGC API Process requires an area type of geojson, bbox, or wkt.")
if not ogcapi_process.get("id"):
raise forms.ValidationError("OGC API Process requires a process id.")
return config
def make_display(modeladmin, request, queryset):
queryset.update(display=True)
def make_hidden(modeladmin, request, queryset):
queryset.update(display=False)
class DataProviderAdmin(admin.ModelAdmin):
"""
Admin model for editing export providers in the admin interface.
"""
form = DataProviderForm
list_display = ["name", "slug", "label", "export_provider_type", "attribute_class", "license", "display"]
search_fields = [
"name",
"slug",
"data_type",
"display",
"attribute_class__name",
"export_provider_type__type_name",
"license__name",
]
actions = [make_display, make_hidden]
def save_model(self, request, obj, *args):
super().save_model(request, obj, *args)
provider_caches = cache.get(DataProvider.provider_caches_key)
if provider_caches:
cache.delete_many(provider_caches.keys())
process_formats = get_process_formats(obj, request)
logger.info(f"Process_formats: {process_formats}")
for process_format in process_formats:
export_format, created = ExportFormat.get_or_create(**process_format)
if created:
export_format.options = {"value": export_format.slug, "providers": [obj.slug], "proxy": True}
export_format.supported_projections.add(Projection.objects.get(srid=4326))
else:
providers = export_format.options.get("providers")
if providers:
providers = list(set(providers + [obj.slug]))
export_format.options["providers"] = providers
else:
export_format.options = {"value": export_format.slug, "providers": [obj.slug], "proxy": True}
export_format.save()
# The reason for these empty classes is to remove IntervalSchedule and CrontabSchedule from the admin page. The easiest
# way to do this is to unregister them using admin.site.unregister, but that also means that you can't use the plus
# button to add new ones on lists displayed on admin pages of other models (in this case, PeriodicTask). Having the
# model be registered but hidden prevents that option from being removed.
class IntervalScheduleAdmin(admin.ModelAdmin):
def get_model_perms(self, request):
return {}
class CrontabScheduleAdmin(admin.ModelAdmin):
def get_model_perms(self, request):
return {}
admin.site.unregister(IntervalSchedule)
admin.site.unregister(CrontabSchedule)
class DataProviderStatusAdmin(admin.ModelAdmin):
"""
Status information for Data Providers
"""
def color_status(self, obj):
if obj.status == "SUCCESS":
return format_html(
'<div style="width:100%%; height:100%%; background-color:rgba(0, 255, 0, 0.3);">%s</div>' % obj.status
)
elif obj.status.startswith("WARN"):
return format_html(
'<div style="width:100%%; height:100%%; background-color:rgba(255, 255, 0, 0.3);">%s</div>' % obj.status
)
return format_html(
'<div style="width:100%%; height:100%%; background-color:rgba(255, 0, 0, 0.3);">%s</div>' % obj.status
)
color_status.short_description = "status"
model = DataProviderStatus
readonly_fields = (
"status",
"status_type",
"message",
"last_check_time",
"related_provider",
)
list_display = (
"color_status",
"status_type",
"message",
"last_check_time",
"related_provider",
)
list_filter = ("related_provider", "status", "status_type", "last_check_time")
def has_add_permission(self, request, obj=None):
return False
def get_example_from_file(file_path: str):
with open(os.path.join(os.path.dirname(__file__), file_path)) as json_file:
return json.dumps(json.load(json_file))
class RegionAdmin(admin.ModelAdmin):
model = Region
list_display = ("uid", "name")
form = RegionForm
class RegionalPolicyAdmin(admin.ModelAdmin):
model = RegionalPolicy
form = RegionalPolicyForm
list_display = ("uid", "name", "region")
fieldsets = (
(None, {"fields": ["name", "region", "providers"]}),
(
None,
{
"fields": [
"policies",
"policy_title_text",
"policy_header_text",
"policy_footer_text",
"policy_cancel_text",
"policy_cancel_button_text",
],
"description": "The policy field expects a JSON structure with a list of policy objects. "
"Each policy object must contain a title and a description. See the example below."
f"<br /> <br /> {get_example_from_file('examples/policies_example.json')}",
},
),
(
None,
{
"fields": ["justification_options"],
"description": "The justification options field expects a JSON structure with a "
"list of option objects. Each option object must have an integer id, string name "
"and boolean display. Options may also have suboptions. "
"Suboptions can be of type text or dropdown. See the example below."
f"<br /> <br /> {get_example_from_file('examples/justification_options_example.json')}",
},
),
)
class RegionalJustificationAdmin(admin.ModelAdmin):
model = RegionalJustification
list_display = ("uid", "justification_id", "justification_name", "regional_policy", "user")
def get_readonly_fields(self, request, obj=None):
if obj:
return [field.name for field in obj._meta.get_fields()]
return self.readonly_fields
def has_change_permission(self, request, obj=None):
return False
def has_delete_permission(self, request, obj=None):
return False
# register the new admin models
admin.site.register(IntervalSchedule, IntervalScheduleAdmin)
admin.site.register(CrontabSchedule, CrontabScheduleAdmin)
admin.site.register(Job, JobAdmin)
admin.site.register(DataProvider, DataProviderAdmin)
admin.site.register(DataProviderStatus, DataProviderStatusAdmin)
admin.site.register(Region, RegionAdmin)
admin.site.register(RegionalPolicy, RegionalPolicyAdmin)
admin.site.register(RegionalJustification, RegionalJustificationAdmin)
admin.site.register(JobPermission)
| terranodo/eventkit-cloud | eventkit_cloud/jobs/admin.py | Python | bsd-3-clause | 13,376 |
from django.http import Http404
from django.template.response import TemplateResponse
from django.urls import URLResolver, re_path
from django.urls.resolvers import RegexPattern
from wagtail.core.models import Page
from wagtail.core.url_routing import RouteResult
_creation_counter = 0
def route(pattern, name=None):
def decorator(view_func):
global _creation_counter
_creation_counter += 1
# Make sure page has _routablepage_routes attribute
if not hasattr(view_func, '_routablepage_routes'):
view_func._routablepage_routes = []
# Add new route to view
view_func._routablepage_routes.append((
re_path(pattern, view_func, name=(name or view_func.__name__)),
_creation_counter,
))
return view_func
return decorator
class RoutablePageMixin:
"""
This class can be mixed in to a Page model, allowing extra routes to be
added to it.
"""
@route(r'^$')
def index_route(self, request, *args, **kwargs):
request.is_preview = getattr(request, 'is_preview', False)
return TemplateResponse(
request,
self.get_template(request, *args, **kwargs),
self.get_context(request, *args, **kwargs)
)
@classmethod
def get_subpage_urls(cls):
routes = []
# Loop over this class's defined routes, in method resolution order.
# Routes defined in the immediate class take precedence, followed by
# immediate superclass and so on
for klass in cls.__mro__:
routes_for_class = []
for val in klass.__dict__.values():
if hasattr(val, '_routablepage_routes'):
routes_for_class.extend(val._routablepage_routes)
# sort routes by _creation_counter so that ones earlier in the class definition
# take precedence
routes_for_class.sort(key=lambda route: route[1])
routes.extend(route[0] for route in routes_for_class)
return tuple(routes)
@classmethod
def get_resolver(cls):
if '_routablepage_urlresolver' not in cls.__dict__:
subpage_urls = cls.get_subpage_urls()
cls._routablepage_urlresolver = URLResolver(RegexPattern(r'^/'), subpage_urls)
return cls._routablepage_urlresolver
def reverse_subpage(self, name, args=None, kwargs=None):
"""
This method takes a route name/arguments and returns a URL path.
"""
args = args or []
kwargs = kwargs or {}
return self.get_resolver().reverse(name, *args, **kwargs)
def resolve_subpage(self, path):
"""
This method takes a URL path and finds the view to call.
"""
view, args, kwargs = self.get_resolver().resolve(path)
# Bind the method
view = view.__get__(self, type(self))
return view, args, kwargs
def route(self, request, path_components):
"""
This hooks the subpage URLs into Wagtail's routing.
"""
if self.live:
try:
path = '/'
if path_components:
path += '/'.join(path_components) + '/'
view, args, kwargs = self.resolve_subpage(path)
return RouteResult(self, args=(view, args, kwargs))
except Http404:
pass
return super().route(request, path_components)
def serve(self, request, view=None, args=None, kwargs=None):
if args is None:
args = []
if kwargs is None:
kwargs = {}
if view is None:
return super().serve(request, *args, **kwargs)
return view(request, *args, **kwargs)
def render(self, request, *args, template=None, context_overrides=None, **kwargs):
"""
.. versionadded:: 2.11
This method replicates what ``Page.serve()`` usually does when ``RoutablePageMixin``
is not used. By default, ``Page.get_template()`` is called to derive the template
to use for rendering, and ``Page.get_context()`` is always called to gather the
data to be included in the context.
You can use the ``context_overrides`` keyword argument as a shortcut to override or
add new values to the context. For example:
.. code-block:: python
@route(r'^$') # override the default route
def upcoming_events(self, request):
return self.render(request, context_overrides={
'title': "Current events",
'events': EventPage.objects.live().future(),
})
You can also use the ``template`` argument to specify an alternative
template to use for rendering. For example:
.. code-block:: python
@route(r'^past/$')
def past_events(self, request):
return self.render(
request,
context_overrides={
'title': "Past events",
'events': EventPage.objects.live().past(),
},
template="events/event_index_historical.html",
)
"""
if template is None:
template = self.get_template(request, *args, **kwargs)
context = self.get_context(request, *args, **kwargs)
context.update(context_overrides or {})
return TemplateResponse(request, template, context)
def serve_preview(self, request, mode_name):
view, args, kwargs = self.resolve_subpage('/')
request.is_preview = True
request.preview_mode = mode_name
return view(request, *args, **kwargs)
class RoutablePage(RoutablePageMixin, Page):
"""
This class extends Page by adding methods which allows extra routes to be
added to it.
"""
class Meta:
abstract = True
| torchbox/wagtail | wagtail/contrib/routable_page/models.py | Python | bsd-3-clause | 5,962 |
""" Code for calculations of P(DM|z) and P(z|DM)"""
import numpy as np
import os
from pkg_resources import resource_filename
from scipy.stats import norm, lognorm
from frb.dm import igm
from frb.dm import cosmic
from frb import defs
from IPython import embed
class P_DM_z(object):
pass
def prob_DMcosmic_FRB(frb, DM_min=0., DM_max=5000., step=1.,
ISMfrac=0.10, DM_MWhalo=50.):
"""
Generate P(DM_cosmic) for an input FRP
Args:
frb (:class:`frb.frb.FRB`):
DM_min (float, optional):
Lowest DM for the calulation
DM_max (float, optional):
Highest DM for the calulation
step (float, optional):
Step size of DM array in units of pc/cm**3
ISMfrac (float, optional):
Fraction of DM_ISM to adopt as the 1-sigma error
DM_MWhalo (float, optional):
Fixed value to use for the MW halo
Returns:
tuple: numpy.ndarray, numpy.ndarray
DM_cosmic values (units of pc/cm**3), P(DM_cosmic) normalized to unity
"""
# Init
DMcosmics = np.arange(DM_min, DM_max+step, step)
P_DM_cosmic = np.zeros_like(DMcosmics)
# ISM
scale = np.pi * ISMfrac * frb.DMISM.value
p_ISM = norm(loc=frb.DMISM.value, scale=scale)
# Pre calculate
DM_ISMs = DMcosmics
pdf_ISM = p_ISM.pdf(DM_ISMs)
# Host
# TODO Should use the MCMC chains to do this right!
# And should fix Omega_b true
exp_u = 68.2 # Median
sigma_host = 0.88 # Median
lognorm_floor=0.
p_host = lognorm(s=sigma_host, loc=lognorm_floor, scale=exp_u)
# Loop time
for kk, DMcosmic in enumerate(DMcosmics):
DM_host = frb.DM.value - DM_MWhalo - DM_ISMs - DMcosmic
# Prob time
Prob = pdf_ISM * p_host.pdf(DM_host*(1+frb.z))
# Sum
P_DM_cosmic[kk] = np.sum(Prob)
# Normalize
P_DM_cosmic = P_DM_cosmic / np.sum(P_DM_cosmic)
# Return
return DMcosmics, P_DM_cosmic
def grid_P_DMcosmic_z(beta=3., F=0.31, zvals=None,
DM_cosmics=None,
cosmo=defs.frb_cosmo):
"""
Generate a grid of P(DM_cosmic|z)
Args:
beta (float, optional):
sigma_DM_cosmic parameter
F (float, optional):
Feedback parameter (higher F means weaker feedback)
zvals (np.ndarray, optional):
Redshifts for the grid
DMcosmic (np.ndarray, optional):
DMs for the grid
cosmo (optional):
Cosmology
Returns:
tuple: z, DM_cosmic, P(DM_cosmic|z)
"""
# Check
if not np.isclose(beta, 3.):
raise IOError("Not prepared for this beta value (yet)")
# Load
# sigma_DM
f_C0_3 = cosmic.grab_C0_spline()
# Grid
if zvals is None:
zvals = np.linspace(0., 2., 200)
if DM_cosmics is None:
DM_cosmics = np.linspace(1., 5000., 1000)
PDF_grid = np.zeros((DM_cosmics.size, zvals.size))
# Loop
for kk, zval in enumerate(zvals):
# z=0
if zval == 0:
PDF_grid[0,0] = 1.
continue
avgDM = igm.average_DM(zval, cosmo=cosmo).value
# Params
sigma = F / np.sqrt(zval)
C0 = f_C0_3(sigma)
# Delta
Delta = DM_cosmics / avgDM
# PDF time
PDF = cosmic.DMcosmic_PDF(Delta, C0, sigma)
# Normalize
PDF_grid[:,kk] = PDF / np.sum(PDF)
# Return
return zvals, DM_cosmics, PDF_grid
def build_grid_for_repo(outfile:str):
"""
Build a P(DM,z) grid for the Repository
Args:
outfile (str): Path+filename for output file
"""
print("Generating a new PDM_z grid for the Repo")
print("Please be patient (will take a few minutes)....")
#
zvals = np.linspace(0., 4., 200)
z, DM, P_DM_z = grid_P_DMcosmic_z(zvals=zvals)
# Write
np.savez(outfile, z=z, DM=DM, PDM_z=P_DM_z)
print(f"File written: {outfile}")
print("This will be used going forth")
def grab_repo_grid():
"""
Grab the grid from the Repository
This may require the code to build it first!
Returns:
dict: Numpy dict from the npz save file
"""
# File
PDM_z_grid_file = os.path.join(
resource_filename('frb', 'data'), 'DM',
'PDM_z.npz')
# Build?
if not os.path.isfile(PDM_z_grid_file):
build_grid_for_repo(PDM_z_grid_file)
# Load
print(f"Loading P(DM,z) grid from {PDM_z_grid_file}")
sdict = np.load(PDM_z_grid_file)
# Return
return sdict | FRBs/FRB | frb/dm/prob_dmz.py | Python | bsd-3-clause | 4,572 |
from itertools import chain
from django import forms, VERSION
from django.template import loader
from django.utils.encoding import force_unicode
from django.utils.translation import ugettext, ugettext_lazy
__all__ = (
'TextInput', 'PasswordInput', 'HiddenInput', 'ClearableFileInput',
'FileInput', 'DateInput', 'DateTimeInput', 'TimeInput', 'Textarea',
'CheckboxInput', 'Select', 'NullBooleanSelect', 'SelectMultiple',
'RadioSelect', 'CheckboxSelectMultiple', 'SearchInput', 'RangeInput',
'ColorInput', 'EmailInput', 'URLInput', 'PhoneNumberInput', 'NumberInput',
'IPAddressInput', 'MultiWidget', 'Widget', 'SplitDateTimeWidget',
'SplitHiddenDateTimeWidget', 'MultipleHiddenInput',
)
class Widget(forms.Widget):
pass
class Input(forms.TextInput):
input_type = None
template_name = 'floppyforms/input.html'
is_required = False
def get_context_data(self):
return {}
def get_context(self, name, value, attrs=None, extra_context={}):
context = {
'type': self.input_type,
'name': name,
'hidden': self.is_hidden,
'required': self.is_required,
}
context.update(extra_context)
if value is None:
value = ''
if hasattr(value, '__iter__'):
context['value'] = [force_unicode(v) for v in value]
elif value != '':
context['value'] = force_unicode(value)
context.update(self.get_context_data())
attrs.update(self.attrs)
# for things like "checked", set the value to False so that the
# template doesn't render checked="".
for key, value in attrs.items():
if value == True:
attrs[key] = False
context['attrs'] = attrs
return context
def render(self, name, value, attrs=None, extra_context={}):
context = self.get_context(name, value, attrs=attrs,
extra_context=extra_context)
return loader.render_to_string(self.template_name, context)
class TextInput(Input):
input_type = 'text'
class PasswordInput(Input):
input_type = 'password'
def __init__(self, attrs=None, render_value=False):
super(PasswordInput, self).__init__(attrs)
self.render_value = render_value
def render(self, name, value, attrs=None):
if not self.render_value:
value = None
return super(PasswordInput, self).render(name, value, attrs)
class HiddenInput(Input):
input_type = 'hidden'
is_hidden = True
def get_context_data(self):
ctx = super(HiddenInput, self).get_context_data()
ctx['hidden'] = True
return ctx
class SlugInput(TextInput):
def get_context_data(self):
self.attrs['pattern'] = "[-\w]+"
return super(SlugInput, self).get_context_data()
class IPAddressInput(TextInput):
def get_context_data(self):
self.attrs['pattern'] = ("(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|"
"2[0-4]\d|[0-1]?\d?\d)){3}")
return super(IPAddressInput, self).get_context_data()
class Textarea(Input):
template_name = 'floppyforms/textarea.html'
rows = 10
cols = 40
def __init__(self, attrs=None):
default_attrs = {'cols': self.cols, 'rows': self.rows}
if attrs:
default_attrs.update(attrs)
super(Textarea, self).__init__(default_attrs)
class FileInput(forms.FileInput, Input):
input_type = 'file'
def render(self, name, value, attrs=None):
return super(FileInput, self).render(name, None, attrs=attrs)
if VERSION >= (1, 3):
class ClearableFileInput(FileInput, forms.ClearableFileInput):
template_name = 'floppyforms/clearable_input.html'
initial_text = ugettext_lazy('Currently')
input_text = ugettext_lazy('Change')
clear_checkbox_label = ugettext_lazy('Clear')
def get_context_data(self):
ctx = super(ClearableFileInput, self).get_context_data()
ctx['initial_text'] = self.initial_text
ctx['input_text'] = self.input_text
ctx['clear_checkbox_label'] = self.clear_checkbox_label
return ctx
def render(self, name, value, attrs=None, extra_context={}):
context = self.get_context(name, value, attrs=attrs,
extra_context=extra_context)
ccb_name = self.clear_checkbox_name(name)
context['checkbox_name'] = ccb_name
context['checkbox_id'] = self.clear_checkbox_id(ccb_name)
return loader.render_to_string(self.template_name, context)
else:
class ClearableFileInput(FileInput):
pass
class DateInput(forms.DateInput, Input):
input_type = 'date'
class DateTimeInput(forms.DateTimeInput, Input):
input_type = 'datetime'
class TimeInput(forms.TimeInput, Input):
input_type = 'time'
class SearchInput(Input):
input_type = 'search'
class EmailInput(Input):
input_type = 'email'
class URLInput(Input):
input_type = 'url'
class ColorInput(Input):
input_type = 'color'
class NumberInput(Input):
input_type = 'number'
min = None
max = None
step = None
def __init__(self, attrs=None):
default_attrs = {'min': self.min, 'max': self.max, 'step': self.step}
if attrs:
default_attrs.update(attrs)
super(NumberInput, self).__init__(default_attrs)
class RangeInput(NumberInput):
input_type = 'range'
class PhoneNumberInput(Input):
input_type = 'tel'
class CheckboxInput(forms.CheckboxInput, Input):
input_type = 'checkbox'
def render(self, name, value, attrs=None):
try:
result = self.check_test(value)
if result:
self.attrs['checked'] = ''
except: # That bare except is in the Django code...
pass
if value not in ('', True, False, None):
value = force_unicode(value)
return Input.render(self, name, value, attrs=attrs)
class Select(forms.Select, Input):
template_name = 'floppyforms/select.html'
def render(self, name, value, attrs=None, choices=()):
if value is None:
value = ''
choices = chain(self.choices, choices)
final_choices = []
for option_value, option_label in choices:
final_choices.append((force_unicode(option_value), option_label))
extra = {'choices': final_choices}
return Input.render(self, name, value, attrs=attrs,
extra_context=extra)
class NullBooleanSelect(forms.NullBooleanSelect, Select):
def render(self, name, value, attrs=None, choices=()):
choices = ((u'1', ugettext('Unknown')),
(u'2', ugettext('Yes')),
(u'3', ugettext('No')))
try:
value = {True: u'2', False: u'3', u'2': u'2', u'3': u'3'}[value]
except KeyError:
value = u'1'
return Select.render(self, name, value, attrs, choices=choices)
class SelectMultiple(forms.SelectMultiple, Select):
def get_context_data(self):
ctx = super(SelectMultiple, self).get_context_data()
ctx['multiple'] = True
return ctx
def render(self, name, value, attrs=None, choices=()):
return Select.render(self, name, value, attrs=attrs, choices=choices)
class CheckboxSelectMultiple(SelectMultiple):
template_name = 'floppyforms/checkbox_select.html'
class RadioSelect(forms.RadioSelect, Select):
template_name = 'floppyforms/radio.html'
def render(self, name, value, attrs=None, choices=()):
return Select.render(self, name, value, attrs=attrs, choices=choices)
class MultiWidget(forms.MultiWidget):
pass
class SplitDateTimeWidget(MultiWidget):
def __init__(self, attrs=None, date_format=None, time_format=None):
widgets = (DateInput(attrs=attrs, format=date_format),
TimeInput(attrs=attrs, format=time_format))
super(SplitDateTimeWidget, self).__init__(widgets, attrs)
def decompress(self, value):
if value:
return [value.date(), value.time().replace(microseconds=0)]
return [None, None]
class SplitHiddenDateTimeWidget(SplitDateTimeWidget):
is_hidden = True
def __init__(self, attrs=None, date_format=None, time_format=None):
super(SplitHiddenDateTimeWidget, self).__init__(attrs, date_format,
time_format)
for widget in self.widgets:
widget.input_type = 'hidden'
widget.is_hidden = True
class MultipleHiddenInput(HiddenInput):
"""<input type="hidden"> for fields that have a list of values"""
def __init__(self, attrs=None, choices=()):
super(MultipleHiddenInput, self).__init__(attrs)
self.choices = choices
def render(self, name, value, attrs=None, choices=()):
if value is None:
value = []
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
id_ = final_attrs.get('id', None)
inputs = []
for i, v in enumerate(value):
input_attrs = dict(value=force_unicode(v), **final_attrs)
if id_:
input_attrs['id'] = '%s_%s' % (id_, i)
del input_attrs['type']
del input_attrs['value']
input_ = HiddenInput()
input_.is_required = self.is_required
inputs.append(input_.render(name, force_unicode(v), input_attrs))
return "\n".join(inputs)
| ojii/django-floppyforms | floppyforms/widgets.py | Python | bsd-3-clause | 9,637 |
#!/usr/bin/python
# Copyright (c) 2015, BROCADE COMMUNICATIONS SYSTEMS, INC
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
"""
@authors: Sergei Garbuzov
@status: Development
@version: 1.1.0
"""
import time
import json
from pysdn.controller.controller import Controller
from pysdn.openflowdev.ofswitch import OFSwitch
from pysdn.common.status import STATUS
from pysdn.common.utils import load_dict_from_file
def of_demo_1():
f = "cfg.yml"
d = {}
if(load_dict_from_file(f, d) is False):
print("Config file '%s' read error: " % f)
exit()
try:
ctrlIpAddr = d['ctrlIpAddr']
ctrlPortNum = d['ctrlPortNum']
ctrlUname = d['ctrlUname']
ctrlPswd = d['ctrlPswd']
# nodeName = d['nodeName']
rundelay = d['rundelay']
except:
print ("Failed to get Controller device attributes")
exit(0)
print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
print ("<<< Demo 1 Start")
print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
print ("\n")
print ("<<< Creating Controller instance")
time.sleep(rundelay)
ctrl = Controller(ctrlIpAddr, ctrlPortNum, ctrlUname, ctrlPswd, None)
print ("'Controller':")
print ctrl.brief_json()
print "\n"
print ("<<< Get list of OpenFlow nodes connected to the Controller")
time.sleep(rundelay)
result = ctrl.get_openflow_nodes_operational_list()
status = result.get_status()
if(status.eq(STATUS.OK)):
print ("OpenFlow node names (composed as \"openflow:datapathid\"):")
nodenames = result.get_data()
print json.dumps(nodenames, indent=4)
else:
print ("\n")
print ("!!!Demo terminated, reason: %s" % status.brief().lower())
exit(0)
print "\n"
print ("<<< Get generic information about OpenFlow nodes")
time.sleep(rundelay)
for name in nodenames:
ofswitch = OFSwitch(ctrl, name)
result = ofswitch.get_switch_info()
status = result.get_status()
if(status.eq(STATUS.OK)):
print ("'%s' info:" % name)
info = result.get_data()
print json.dumps(info, indent=4)
else:
print ("\n")
print ("!!!Demo terminated, reason: %s" % status.brief().lower())
exit(0)
print ("\n")
print (">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
print (">>> Demo End")
print (">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
if __name__ == "__main__":
of_demo_1()
| brocade/pysdn | samples/sampleopenflow/demos/demo1.py | Python | bsd-3-clause | 4,024 |
import graphene
from graphene_django.rest_framework.mutation import SerializerMutation
from rest_framework import serializers
# from utils import parse_global_ids
from . import schema
from .utils import parse_global_ids
from django_workflow.models import Workflow, State, StateVariableDef, Transition, Condition, Function, \
FunctionParameter, Callback, CallbackParameter, CurrentObjectState, TransitionLog, StateVariable
from django.db.transaction import atomic
class WorkflowSerializer(serializers.ModelSerializer):
class Meta:
model = Workflow
fields = ['id', 'name', 'initial_prefetch', 'object_type']
class WorkflowMutation(SerializerMutation):
class Meta:
serializer_class = WorkflowSerializer
model_operations = ['create', 'update']
lookup_field = 'id'
@classmethod
@atomic
def mutate_and_get_payload(cls, root, info, **input):
return super(WorkflowMutation, cls).mutate_and_get_payload(root, info, **input)
class CloneWorkflow(graphene.ClientIDMutation):
class Input:
workflow_id = graphene.String()
name = graphene.String()
workflow = graphene.Field(schema.WorkflowNode)
@classmethod
@parse_global_ids()
@atomic
def mutate_and_get_payload(cls, root, info, **input):
workflow = Workflow.objects.get(pk=input.get('workflow_id'))
new_workflow, _, _, _ = workflow.clone(name=input.get('name'))
return CloneWorkflow(workflow=new_workflow)
class StateSerializer(serializers.ModelSerializer):
class Meta:
model = State
fields = ['id', 'name', 'workflow', 'active', 'initial']
class StateMutation(SerializerMutation):
class Meta:
serializer_class = StateSerializer
model_operations = ['create', 'update']
lookup_field = 'id'
@classmethod
@atomic
def mutate_and_get_payload(cls, root, info, **input):
return super(StateMutation, cls).mutate_and_get_payload(root, info, **input)
class StateVariableDefSerializer(serializers.ModelSerializer):
class Meta:
model = StateVariableDef
fields = ['id', 'name', 'workflow', 'state']
class StateVariableDefMutation(SerializerMutation):
class Meta:
serializer_class = StateVariableDefSerializer
model_operations = ['create', 'update']
lookup_field = 'id'
@classmethod
@atomic
def mutate_and_get_payload(cls, root, info, **input):
return super(StateVariableDefMutation, cls).mutate_and_get_payload(root, info, **input)
class TransitionSerializer(serializers.ModelSerializer):
class Meta:
model = Transition
fields = ['id', 'name', 'workflow', 'initial_state', 'final_state', 'priority', 'automatic', 'automatic_delay']
class TransitionMutation(SerializerMutation):
class Meta:
serializer_class = TransitionSerializer
model_operations = ['create', 'update']
lookup_field = 'id'
@classmethod
@atomic
def mutate_and_get_payload(cls, root, info, **input):
return super(TransitionMutation, cls).mutate_and_get_payload(root, info, **input)
class ConditionSerializer(serializers.ModelSerializer):
class Meta:
model = Condition
fields = ['id', 'condition_opt', 'workflow', 'parent_condition', 'transition']
class ConditionMutation(SerializerMutation):
class Meta:
serializer_class = ConditionSerializer
model_operations = ['create', 'update']
lookup_field = 'id'
@classmethod
@atomic
def mutate_and_get_payload(cls, root, info, **input):
return super(ConditionMutation, cls).mutate_and_get_payload(root, info, **input)
class FunctionSerializer(serializers.ModelSerializer):
class Meta:
model = Function
fields = ['id', 'function_name', 'workflow', 'function_module', 'condition']
class FunctionMutation(SerializerMutation):
class Meta:
serializer_class = FunctionSerializer
model_operations = ['create', 'update']
lookup_field = 'id'
@classmethod
@atomic
def mutate_and_get_payload(cls, root, info, **input):
return super(FunctionMutation, cls).mutate_and_get_payload(root, info, **input)
class FunctionParameterSerializer(serializers.ModelSerializer):
class Meta:
model = FunctionParameter
fields = ['id', 'name', 'workflow', 'function', 'value']
class FunctionParameterMutation(SerializerMutation):
class Meta:
serializer_class = FunctionParameterSerializer
model_operations = ['create', 'update']
lookup_field = 'id'
@classmethod
@atomic
def mutate_and_get_payload(cls, root, info, **input):
return super(FunctionParameterMutation, cls).mutate_and_get_payload(root, info, **input)
class CallbackSerializer(serializers.ModelSerializer):
class Meta:
model = Callback
fields = ['id', 'function_name', 'workflow', 'function_module', 'transition', 'order', 'execute_async']
class CallbackMutation(SerializerMutation):
class Meta:
serializer_class = CallbackSerializer
model_operations = ['create', 'update']
lookup_field = 'id'
@classmethod
@atomic
def mutate_and_get_payload(cls, root, info, **input):
return super(CallbackMutation, cls).mutate_and_get_payload(root, info, **input)
class CallbackParameterSerializer(serializers.ModelSerializer):
class Meta:
model = CallbackParameter
fields = ['id', 'name', 'workflow', 'callback', 'value']
class CallbackParameterMutation(SerializerMutation):
class Meta:
serializer_class = CallbackParameterSerializer
model_operations = ['create', 'update']
lookup_field = 'id'
@classmethod
@atomic
def mutate_and_get_payload(cls, root, info, **input):
return super(CallbackParameterMutation, cls).mutate_and_get_payload(root, info, **input)
class CurrentObjectStateSerializer(serializers.ModelSerializer):
class Meta:
model = CurrentObjectState
fields = ['id', 'object_id', 'workflow', 'state', 'updated_ts']
class CurrentObjectStateMutation(SerializerMutation):
class Meta:
serializer_class = CurrentObjectStateSerializer
model_operations = ['create', 'update']
lookup_field = 'id'
@classmethod
@atomic
def mutate_and_get_payload(cls, root, info, **input):
return super(CurrentObjectStateMutation, cls).mutate_and_get_payload(root, info, **input)
class TransitionLogSerializer(serializers.ModelSerializer):
class Meta:
model = TransitionLog
fields = ['id', 'object_id', 'workflow', 'user_id', 'transition', 'completed_ts', 'success', 'error_code', 'error_message']
class TransitionLogMutation(SerializerMutation):
class Meta:
serializer_class = TransitionLogSerializer
model_operations = ['create', 'update']
lookup_field = 'id'
@classmethod
@atomic
def mutate_and_get_payload(cls, root, info, **input):
return super(TransitionLogMutation, cls).mutate_and_get_payload(root, info, **input)
class StateVariableSerializer(serializers.ModelSerializer):
class Meta:
model = StateVariable
fields = ['id', 'current_object_state', 'workflow', 'state_variable_def', 'value']
class StateVariableMutation(SerializerMutation):
class Meta:
serializer_class = StateVariableSerializer
model_operations = ['create', 'update']
lookup_field = 'id'
@classmethod
@atomic
def mutate_and_get_payload(cls, root, info, **input):
return super(StateVariableMutation, cls).mutate_and_get_payload(root, info, **input)
class Mutation(graphene.AbstractType):
"""
Low level CRUD -D API
"""
workflow_mutation = WorkflowMutation.Field()
state_mutation = StateMutation.Field()
state_variable_def_mutation = StateVariableDefMutation.Field()
transition_mutation = TransitionMutation.Field()
condition_mutation = ConditionMutation.Field()
function_mutation = FunctionMutation.Field()
function_parameter_mutation = FunctionParameterMutation.Field()
callback_mutation = CallbackMutation.Field()
callback_parameter_mutation = CallbackParameterMutation.Field()
current_object_state_mutation = CurrentObjectStateMutation.Field()
transition_log_mutation = TransitionLogMutation.Field()
clone_workflow = CloneWorkflow.Field()
"""
High Level API: execute single transition, execute automatic transitions
"""
# TODO high level api | dani0805/django_workflow | django_workflow/mutation.py | Python | bsd-3-clause | 8,601 |
from zeit.calendar.i18n import MessageFactory as _
import zc.sourcefactory.basic
class PrioritySource(zc.sourcefactory.basic.BasicSourceFactory):
values = (
(1, _('^^ mandatory')),
(0, _('^ important')),
(-1, _('> suggestion')))
titles = dict(values)
def getValues(self):
return (v[0] for v in self.values)
def getTitle(self, value):
return self.titles[value]
| ZeitOnline/zeit.calendar | src/zeit/calendar/source.py | Python | bsd-3-clause | 423 |
from __future__ import absolute_import, division, print_function
import os
import tempfile
import unittest
import blaze
from blaze.datadescriptor import dd_as_py
# A CSV toy example
csv_buf = u"""k1,v1,1,False
k2,v2,2,True
k3,v3,3,False
"""
csv_schema = "{ f0: string; f1: string; f2: int16; f3: bool }"
csv_ldict = [
{u'f0': u'k1', u'f1': u'v1', u'f2': 1, u'f3': False},
{u'f0': u'k2', u'f1': u'v2', u'f2': 2, u'f3': True},
{u'f0': u'k3', u'f1': u'v3', u'f2': 3, u'f3': False}
]
class TestOpenCSV(unittest.TestCase):
def setUp(self):
handle, self.fname = tempfile.mkstemp(suffix='.csv')
self.url = self.fname
with os.fdopen(handle, "w") as f:
f.write(csv_buf)
def tearDown(self):
os.unlink(self.fname)
def test_open(self):
store = blaze.Storage(self.url, mode='r')
a = blaze.open(store, schema=csv_schema)
self.assert_(isinstance(a, blaze.Array))
self.assertEqual(dd_as_py(a._data), csv_ldict)
def test_open_dialect(self):
store = blaze.Storage(self.url, mode='r')
a = blaze.open(store, schema=csv_schema, dialect='excel')
self.assert_(isinstance(a, blaze.Array))
self.assertEqual(dd_as_py(a._data), csv_ldict)
def test_open_has_header(self):
store = blaze.Storage(self.url, mode='r')
a = blaze.open(store, schema=csv_schema, has_header=False)
self.assert_(isinstance(a, blaze.Array))
self.assertEqual(dd_as_py(a._data), csv_ldict)
def test_append(self):
store = blaze.Storage(self.url, mode='r+')
a = blaze.open(store, schema=csv_schema)
blaze.append(a, ["k4", "v4", 4, True])
self.assertEqual(dd_as_py(a._data), csv_ldict + \
[{u'f0': u'k4', u'f1': u'v4', u'f2': 4, u'f3': True}])
def test_deprecated_open(self):
url = "csv://" + self.fname
store = blaze.Storage(url, mode='r')
a = blaze.open(store, schema=csv_schema)
self.assert_(isinstance(a, blaze.Array))
self.assertEqual(dd_as_py(a._data), csv_ldict)
json_buf = u"[1, 2, 3, 4, 5]"
json_schema = "var, int8"
class TestOpenJSON(unittest.TestCase):
def setUp(self):
handle, self.fname = tempfile.mkstemp(suffix='.json')
self.url = self.fname
with os.fdopen(handle, "w") as f:
f.write(json_buf)
def tearDown(self):
os.unlink(self.fname)
def test_open(self):
store = blaze.Storage(self.url, mode='r')
a = blaze.open(store, schema=json_schema)
self.assert_(isinstance(a, blaze.Array))
self.assertEqual(dd_as_py(a._data), [1, 2, 3, 4, 5])
def test_deprecated_open(self):
url = "json://" + self.fname
store = blaze.Storage(url, mode='r')
a = blaze.open(store, schema=json_schema)
self.assert_(isinstance(a, blaze.Array))
self.assertEqual(dd_as_py(a._data), [1, 2, 3, 4, 5])
if __name__ == '__main__':
unittest.main(verbosity=2)
| aaronmartin0303/blaze | blaze/tests/test_array_opening.py | Python | bsd-3-clause | 3,006 |
import sys
import random
import os
print os.getcwd()
f = open('t.txt','w')
f.write(str(random.randint(0,10000)))
| jaredgk/IMgui-electron | IMa/make_random_number.py | Python | bsd-3-clause | 117 |
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_core_dependencies.py.template`!!!
CORE_SOURCE_FILES = [
'src/core/lib/profiling/basic_timers.c',
'src/core/lib/profiling/stap_timers.c',
'src/core/lib/support/alloc.c',
'src/core/lib/support/avl.c',
'src/core/lib/support/backoff.c',
'src/core/lib/support/cmdline.c',
'src/core/lib/support/cpu_iphone.c',
'src/core/lib/support/cpu_linux.c',
'src/core/lib/support/cpu_posix.c',
'src/core/lib/support/cpu_windows.c',
'src/core/lib/support/env_linux.c',
'src/core/lib/support/env_posix.c',
'src/core/lib/support/env_windows.c',
'src/core/lib/support/histogram.c',
'src/core/lib/support/host_port.c',
'src/core/lib/support/log.c',
'src/core/lib/support/log_android.c',
'src/core/lib/support/log_linux.c',
'src/core/lib/support/log_posix.c',
'src/core/lib/support/log_windows.c',
'src/core/lib/support/murmur_hash.c',
'src/core/lib/support/slice.c',
'src/core/lib/support/slice_buffer.c',
'src/core/lib/support/stack_lockfree.c',
'src/core/lib/support/string.c',
'src/core/lib/support/string_posix.c',
'src/core/lib/support/string_util_windows.c',
'src/core/lib/support/string_windows.c',
'src/core/lib/support/subprocess_posix.c',
'src/core/lib/support/subprocess_windows.c',
'src/core/lib/support/sync.c',
'src/core/lib/support/sync_posix.c',
'src/core/lib/support/sync_windows.c',
'src/core/lib/support/thd.c',
'src/core/lib/support/thd_posix.c',
'src/core/lib/support/thd_windows.c',
'src/core/lib/support/time.c',
'src/core/lib/support/time_posix.c',
'src/core/lib/support/time_precise.c',
'src/core/lib/support/time_windows.c',
'src/core/lib/support/tls_pthread.c',
'src/core/lib/support/tmpfile_msys.c',
'src/core/lib/support/tmpfile_posix.c',
'src/core/lib/support/tmpfile_windows.c',
'src/core/lib/support/wrap_memcpy.c',
'src/core/lib/surface/init.c',
'src/core/lib/channel/channel_args.c',
'src/core/lib/channel/channel_stack.c',
'src/core/lib/channel/channel_stack_builder.c',
'src/core/lib/channel/compress_filter.c',
'src/core/lib/channel/connected_channel.c',
'src/core/lib/channel/http_client_filter.c',
'src/core/lib/channel/http_server_filter.c',
'src/core/lib/compression/compression.c',
'src/core/lib/compression/message_compress.c',
'src/core/lib/debug/trace.c',
'src/core/lib/http/format_request.c',
'src/core/lib/http/httpcli.c',
'src/core/lib/http/parser.c',
'src/core/lib/iomgr/closure.c',
'src/core/lib/iomgr/endpoint.c',
'src/core/lib/iomgr/endpoint_pair_posix.c',
'src/core/lib/iomgr/endpoint_pair_windows.c',
'src/core/lib/iomgr/error.c',
'src/core/lib/iomgr/ev_epoll_linux.c',
'src/core/lib/iomgr/ev_poll_and_epoll_posix.c',
'src/core/lib/iomgr/ev_poll_posix.c',
'src/core/lib/iomgr/ev_posix.c',
'src/core/lib/iomgr/exec_ctx.c',
'src/core/lib/iomgr/executor.c',
'src/core/lib/iomgr/iocp_windows.c',
'src/core/lib/iomgr/iomgr.c',
'src/core/lib/iomgr/iomgr_posix.c',
'src/core/lib/iomgr/iomgr_windows.c',
'src/core/lib/iomgr/load_file.c',
'src/core/lib/iomgr/network_status_tracker.c',
'src/core/lib/iomgr/polling_entity.c',
'src/core/lib/iomgr/pollset_set_windows.c',
'src/core/lib/iomgr/pollset_windows.c',
'src/core/lib/iomgr/resolve_address_posix.c',
'src/core/lib/iomgr/resolve_address_windows.c',
'src/core/lib/iomgr/sockaddr_utils.c',
'src/core/lib/iomgr/socket_utils_common_posix.c',
'src/core/lib/iomgr/socket_utils_linux.c',
'src/core/lib/iomgr/socket_utils_posix.c',
'src/core/lib/iomgr/socket_windows.c',
'src/core/lib/iomgr/tcp_client_posix.c',
'src/core/lib/iomgr/tcp_client_windows.c',
'src/core/lib/iomgr/tcp_posix.c',
'src/core/lib/iomgr/tcp_server_posix.c',
'src/core/lib/iomgr/tcp_server_windows.c',
'src/core/lib/iomgr/tcp_windows.c',
'src/core/lib/iomgr/time_averaged_stats.c',
'src/core/lib/iomgr/timer.c',
'src/core/lib/iomgr/timer_heap.c',
'src/core/lib/iomgr/udp_server.c',
'src/core/lib/iomgr/unix_sockets_posix.c',
'src/core/lib/iomgr/unix_sockets_posix_noop.c',
'src/core/lib/iomgr/wakeup_fd_eventfd.c',
'src/core/lib/iomgr/wakeup_fd_nospecial.c',
'src/core/lib/iomgr/wakeup_fd_pipe.c',
'src/core/lib/iomgr/wakeup_fd_posix.c',
'src/core/lib/iomgr/workqueue_posix.c',
'src/core/lib/iomgr/workqueue_windows.c',
'src/core/lib/json/json.c',
'src/core/lib/json/json_reader.c',
'src/core/lib/json/json_string.c',
'src/core/lib/json/json_writer.c',
'src/core/lib/surface/alarm.c',
'src/core/lib/surface/api_trace.c',
'src/core/lib/surface/byte_buffer.c',
'src/core/lib/surface/byte_buffer_reader.c',
'src/core/lib/surface/call.c',
'src/core/lib/surface/call_details.c',
'src/core/lib/surface/call_log_batch.c',
'src/core/lib/surface/channel.c',
'src/core/lib/surface/channel_init.c',
'src/core/lib/surface/channel_ping.c',
'src/core/lib/surface/channel_stack_type.c',
'src/core/lib/surface/completion_queue.c',
'src/core/lib/surface/event_string.c',
'src/core/lib/surface/lame_client.c',
'src/core/lib/surface/metadata_array.c',
'src/core/lib/surface/server.c',
'src/core/lib/surface/validate_metadata.c',
'src/core/lib/surface/version.c',
'src/core/lib/transport/byte_stream.c',
'src/core/lib/transport/connectivity_state.c',
'src/core/lib/transport/metadata.c',
'src/core/lib/transport/metadata_batch.c',
'src/core/lib/transport/static_metadata.c',
'src/core/lib/transport/transport.c',
'src/core/lib/transport/transport_op_string.c',
'src/core/ext/transport/chttp2/server/secure/server_secure_chttp2.c',
'src/core/ext/transport/chttp2/transport/bin_decoder.c',
'src/core/ext/transport/chttp2/transport/bin_encoder.c',
'src/core/ext/transport/chttp2/transport/chttp2_plugin.c',
'src/core/ext/transport/chttp2/transport/chttp2_transport.c',
'src/core/ext/transport/chttp2/transport/frame_data.c',
'src/core/ext/transport/chttp2/transport/frame_goaway.c',
'src/core/ext/transport/chttp2/transport/frame_ping.c',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.c',
'src/core/ext/transport/chttp2/transport/frame_settings.c',
'src/core/ext/transport/chttp2/transport/frame_window_update.c',
'src/core/ext/transport/chttp2/transport/hpack_encoder.c',
'src/core/ext/transport/chttp2/transport/hpack_parser.c',
'src/core/ext/transport/chttp2/transport/hpack_table.c',
'src/core/ext/transport/chttp2/transport/huffsyms.c',
'src/core/ext/transport/chttp2/transport/incoming_metadata.c',
'src/core/ext/transport/chttp2/transport/parsing.c',
'src/core/ext/transport/chttp2/transport/status_conversion.c',
'src/core/ext/transport/chttp2/transport/stream_lists.c',
'src/core/ext/transport/chttp2/transport/stream_map.c',
'src/core/ext/transport/chttp2/transport/timeout_encoding.c',
'src/core/ext/transport/chttp2/transport/varint.c',
'src/core/ext/transport/chttp2/transport/writing.c',
'src/core/ext/transport/chttp2/alpn/alpn.c',
'src/core/lib/http/httpcli_security_connector.c',
'src/core/lib/security/context/security_context.c',
'src/core/lib/security/credentials/composite/composite_credentials.c',
'src/core/lib/security/credentials/credentials.c',
'src/core/lib/security/credentials/credentials_metadata.c',
'src/core/lib/security/credentials/fake/fake_credentials.c',
'src/core/lib/security/credentials/google_default/credentials_posix.c',
'src/core/lib/security/credentials/google_default/credentials_windows.c',
'src/core/lib/security/credentials/google_default/google_default_credentials.c',
'src/core/lib/security/credentials/iam/iam_credentials.c',
'src/core/lib/security/credentials/jwt/json_token.c',
'src/core/lib/security/credentials/jwt/jwt_credentials.c',
'src/core/lib/security/credentials/jwt/jwt_verifier.c',
'src/core/lib/security/credentials/oauth2/oauth2_credentials.c',
'src/core/lib/security/credentials/plugin/plugin_credentials.c',
'src/core/lib/security/credentials/ssl/ssl_credentials.c',
'src/core/lib/security/transport/client_auth_filter.c',
'src/core/lib/security/transport/handshake.c',
'src/core/lib/security/transport/secure_endpoint.c',
'src/core/lib/security/transport/security_connector.c',
'src/core/lib/security/transport/server_auth_filter.c',
'src/core/lib/security/transport/tsi_error.c',
'src/core/lib/security/util/b64.c',
'src/core/lib/security/util/json_util.c',
'src/core/lib/surface/init_secure.c',
'src/core/lib/tsi/fake_transport_security.c',
'src/core/lib/tsi/ssl_transport_security.c',
'src/core/lib/tsi/transport_security.c',
'src/core/ext/transport/chttp2/client/secure/secure_channel_create.c',
'src/core/ext/client_config/channel_connectivity.c',
'src/core/ext/client_config/client_channel.c',
'src/core/ext/client_config/client_channel_factory.c',
'src/core/ext/client_config/client_config.c',
'src/core/ext/client_config/client_config_plugin.c',
'src/core/ext/client_config/connector.c',
'src/core/ext/client_config/default_initial_connect_string.c',
'src/core/ext/client_config/initial_connect_string.c',
'src/core/ext/client_config/lb_policy.c',
'src/core/ext/client_config/lb_policy_factory.c',
'src/core/ext/client_config/lb_policy_registry.c',
'src/core/ext/client_config/parse_address.c',
'src/core/ext/client_config/resolver.c',
'src/core/ext/client_config/resolver_factory.c',
'src/core/ext/client_config/resolver_registry.c',
'src/core/ext/client_config/subchannel.c',
'src/core/ext/client_config/subchannel_call_holder.c',
'src/core/ext/client_config/subchannel_index.c',
'src/core/ext/client_config/uri_parser.c',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2.c',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.c',
'src/core/ext/transport/chttp2/client/insecure/channel_create.c',
'src/core/ext/transport/chttp2/client/insecure/channel_create_posix.c',
'src/core/ext/lb_policy/grpclb/load_balancer_api.c',
'src/core/ext/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c',
'third_party/nanopb/pb_common.c',
'third_party/nanopb/pb_decode.c',
'third_party/nanopb/pb_encode.c',
'src/core/ext/lb_policy/pick_first/pick_first.c',
'src/core/ext/lb_policy/round_robin/round_robin.c',
'src/core/ext/resolver/dns/native/dns_resolver.c',
'src/core/ext/resolver/sockaddr/sockaddr_resolver.c',
'src/core/ext/load_reporting/load_reporting.c',
'src/core/ext/load_reporting/load_reporting_filter.c',
'src/core/ext/census/context.c',
'src/core/ext/census/gen/census.pb.c',
'src/core/ext/census/grpc_context.c',
'src/core/ext/census/grpc_filter.c',
'src/core/ext/census/grpc_plugin.c',
'src/core/ext/census/initialize.c',
'src/core/ext/census/mlog.c',
'src/core/ext/census/operation.c',
'src/core/ext/census/placeholders.c',
'src/core/ext/census/tracing.c',
'src/core/plugin_registry/grpc_plugin_registry.c',
'src/boringssl/err_data.c',
'third_party/boringssl/crypto/aes/aes.c',
'third_party/boringssl/crypto/aes/mode_wrappers.c',
'third_party/boringssl/crypto/asn1/a_bitstr.c',
'third_party/boringssl/crypto/asn1/a_bool.c',
'third_party/boringssl/crypto/asn1/a_bytes.c',
'third_party/boringssl/crypto/asn1/a_d2i_fp.c',
'third_party/boringssl/crypto/asn1/a_dup.c',
'third_party/boringssl/crypto/asn1/a_enum.c',
'third_party/boringssl/crypto/asn1/a_gentm.c',
'third_party/boringssl/crypto/asn1/a_i2d_fp.c',
'third_party/boringssl/crypto/asn1/a_int.c',
'third_party/boringssl/crypto/asn1/a_mbstr.c',
'third_party/boringssl/crypto/asn1/a_object.c',
'third_party/boringssl/crypto/asn1/a_octet.c',
'third_party/boringssl/crypto/asn1/a_print.c',
'third_party/boringssl/crypto/asn1/a_strnid.c',
'third_party/boringssl/crypto/asn1/a_time.c',
'third_party/boringssl/crypto/asn1/a_type.c',
'third_party/boringssl/crypto/asn1/a_utctm.c',
'third_party/boringssl/crypto/asn1/a_utf8.c',
'third_party/boringssl/crypto/asn1/asn1_lib.c',
'third_party/boringssl/crypto/asn1/asn1_par.c',
'third_party/boringssl/crypto/asn1/asn_pack.c',
'third_party/boringssl/crypto/asn1/bio_asn1.c',
'third_party/boringssl/crypto/asn1/bio_ndef.c',
'third_party/boringssl/crypto/asn1/f_enum.c',
'third_party/boringssl/crypto/asn1/f_int.c',
'third_party/boringssl/crypto/asn1/f_string.c',
'third_party/boringssl/crypto/asn1/t_bitst.c',
'third_party/boringssl/crypto/asn1/t_pkey.c',
'third_party/boringssl/crypto/asn1/tasn_dec.c',
'third_party/boringssl/crypto/asn1/tasn_enc.c',
'third_party/boringssl/crypto/asn1/tasn_fre.c',
'third_party/boringssl/crypto/asn1/tasn_new.c',
'third_party/boringssl/crypto/asn1/tasn_prn.c',
'third_party/boringssl/crypto/asn1/tasn_typ.c',
'third_party/boringssl/crypto/asn1/tasn_utl.c',
'third_party/boringssl/crypto/asn1/x_bignum.c',
'third_party/boringssl/crypto/asn1/x_long.c',
'third_party/boringssl/crypto/base64/base64.c',
'third_party/boringssl/crypto/bio/bio.c',
'third_party/boringssl/crypto/bio/bio_mem.c',
'third_party/boringssl/crypto/bio/buffer.c',
'third_party/boringssl/crypto/bio/connect.c',
'third_party/boringssl/crypto/bio/fd.c',
'third_party/boringssl/crypto/bio/file.c',
'third_party/boringssl/crypto/bio/hexdump.c',
'third_party/boringssl/crypto/bio/pair.c',
'third_party/boringssl/crypto/bio/printf.c',
'third_party/boringssl/crypto/bio/socket.c',
'third_party/boringssl/crypto/bio/socket_helper.c',
'third_party/boringssl/crypto/bn/add.c',
'third_party/boringssl/crypto/bn/asm/x86_64-gcc.c',
'third_party/boringssl/crypto/bn/bn.c',
'third_party/boringssl/crypto/bn/bn_asn1.c',
'third_party/boringssl/crypto/bn/cmp.c',
'third_party/boringssl/crypto/bn/convert.c',
'third_party/boringssl/crypto/bn/ctx.c',
'third_party/boringssl/crypto/bn/div.c',
'third_party/boringssl/crypto/bn/exponentiation.c',
'third_party/boringssl/crypto/bn/gcd.c',
'third_party/boringssl/crypto/bn/generic.c',
'third_party/boringssl/crypto/bn/kronecker.c',
'third_party/boringssl/crypto/bn/montgomery.c',
'third_party/boringssl/crypto/bn/mul.c',
'third_party/boringssl/crypto/bn/prime.c',
'third_party/boringssl/crypto/bn/random.c',
'third_party/boringssl/crypto/bn/rsaz_exp.c',
'third_party/boringssl/crypto/bn/shift.c',
'third_party/boringssl/crypto/bn/sqrt.c',
'third_party/boringssl/crypto/buf/buf.c',
'third_party/boringssl/crypto/bytestring/asn1_compat.c',
'third_party/boringssl/crypto/bytestring/ber.c',
'third_party/boringssl/crypto/bytestring/cbb.c',
'third_party/boringssl/crypto/bytestring/cbs.c',
'third_party/boringssl/crypto/chacha/chacha_generic.c',
'third_party/boringssl/crypto/chacha/chacha_vec.c',
'third_party/boringssl/crypto/cipher/aead.c',
'third_party/boringssl/crypto/cipher/cipher.c',
'third_party/boringssl/crypto/cipher/derive_key.c',
'third_party/boringssl/crypto/cipher/e_aes.c',
'third_party/boringssl/crypto/cipher/e_chacha20poly1305.c',
'third_party/boringssl/crypto/cipher/e_des.c',
'third_party/boringssl/crypto/cipher/e_null.c',
'third_party/boringssl/crypto/cipher/e_rc2.c',
'third_party/boringssl/crypto/cipher/e_rc4.c',
'third_party/boringssl/crypto/cipher/e_ssl3.c',
'third_party/boringssl/crypto/cipher/e_tls.c',
'third_party/boringssl/crypto/cipher/tls_cbc.c',
'third_party/boringssl/crypto/cmac/cmac.c',
'third_party/boringssl/crypto/conf/conf.c',
'third_party/boringssl/crypto/cpu-arm.c',
'third_party/boringssl/crypto/cpu-intel.c',
'third_party/boringssl/crypto/crypto.c',
'third_party/boringssl/crypto/curve25519/curve25519.c',
'third_party/boringssl/crypto/curve25519/x25519-x86_64.c',
'third_party/boringssl/crypto/des/des.c',
'third_party/boringssl/crypto/dh/check.c',
'third_party/boringssl/crypto/dh/dh.c',
'third_party/boringssl/crypto/dh/dh_asn1.c',
'third_party/boringssl/crypto/dh/params.c',
'third_party/boringssl/crypto/digest/digest.c',
'third_party/boringssl/crypto/digest/digests.c',
'third_party/boringssl/crypto/directory_posix.c',
'third_party/boringssl/crypto/directory_win.c',
'third_party/boringssl/crypto/dsa/dsa.c',
'third_party/boringssl/crypto/dsa/dsa_asn1.c',
'third_party/boringssl/crypto/ec/ec.c',
'third_party/boringssl/crypto/ec/ec_asn1.c',
'third_party/boringssl/crypto/ec/ec_key.c',
'third_party/boringssl/crypto/ec/ec_montgomery.c',
'third_party/boringssl/crypto/ec/oct.c',
'third_party/boringssl/crypto/ec/p224-64.c',
'third_party/boringssl/crypto/ec/p256-64.c',
'third_party/boringssl/crypto/ec/p256-x86_64.c',
'third_party/boringssl/crypto/ec/simple.c',
'third_party/boringssl/crypto/ec/util-64.c',
'third_party/boringssl/crypto/ec/wnaf.c',
'third_party/boringssl/crypto/ecdh/ecdh.c',
'third_party/boringssl/crypto/ecdsa/ecdsa.c',
'third_party/boringssl/crypto/ecdsa/ecdsa_asn1.c',
'third_party/boringssl/crypto/engine/engine.c',
'third_party/boringssl/crypto/err/err.c',
'third_party/boringssl/crypto/evp/algorithm.c',
'third_party/boringssl/crypto/evp/digestsign.c',
'third_party/boringssl/crypto/evp/evp.c',
'third_party/boringssl/crypto/evp/evp_asn1.c',
'third_party/boringssl/crypto/evp/evp_ctx.c',
'third_party/boringssl/crypto/evp/p_dsa_asn1.c',
'third_party/boringssl/crypto/evp/p_ec.c',
'third_party/boringssl/crypto/evp/p_ec_asn1.c',
'third_party/boringssl/crypto/evp/p_rsa.c',
'third_party/boringssl/crypto/evp/p_rsa_asn1.c',
'third_party/boringssl/crypto/evp/pbkdf.c',
'third_party/boringssl/crypto/evp/sign.c',
'third_party/boringssl/crypto/ex_data.c',
'third_party/boringssl/crypto/hkdf/hkdf.c',
'third_party/boringssl/crypto/hmac/hmac.c',
'third_party/boringssl/crypto/lhash/lhash.c',
'third_party/boringssl/crypto/md4/md4.c',
'third_party/boringssl/crypto/md5/md5.c',
'third_party/boringssl/crypto/mem.c',
'third_party/boringssl/crypto/modes/cbc.c',
'third_party/boringssl/crypto/modes/cfb.c',
'third_party/boringssl/crypto/modes/ctr.c',
'third_party/boringssl/crypto/modes/gcm.c',
'third_party/boringssl/crypto/modes/ofb.c',
'third_party/boringssl/crypto/obj/obj.c',
'third_party/boringssl/crypto/obj/obj_xref.c',
'third_party/boringssl/crypto/pem/pem_all.c',
'third_party/boringssl/crypto/pem/pem_info.c',
'third_party/boringssl/crypto/pem/pem_lib.c',
'third_party/boringssl/crypto/pem/pem_oth.c',
'third_party/boringssl/crypto/pem/pem_pk8.c',
'third_party/boringssl/crypto/pem/pem_pkey.c',
'third_party/boringssl/crypto/pem/pem_x509.c',
'third_party/boringssl/crypto/pem/pem_xaux.c',
'third_party/boringssl/crypto/pkcs8/p5_pbe.c',
'third_party/boringssl/crypto/pkcs8/p5_pbev2.c',
'third_party/boringssl/crypto/pkcs8/p8_pkey.c',
'third_party/boringssl/crypto/pkcs8/pkcs8.c',
'third_party/boringssl/crypto/poly1305/poly1305.c',
'third_party/boringssl/crypto/poly1305/poly1305_arm.c',
'third_party/boringssl/crypto/poly1305/poly1305_vec.c',
'third_party/boringssl/crypto/rand/rand.c',
'third_party/boringssl/crypto/rand/urandom.c',
'third_party/boringssl/crypto/rand/windows.c',
'third_party/boringssl/crypto/rc4/rc4.c',
'third_party/boringssl/crypto/refcount_c11.c',
'third_party/boringssl/crypto/refcount_lock.c',
'third_party/boringssl/crypto/rsa/blinding.c',
'third_party/boringssl/crypto/rsa/padding.c',
'third_party/boringssl/crypto/rsa/rsa.c',
'third_party/boringssl/crypto/rsa/rsa_asn1.c',
'third_party/boringssl/crypto/rsa/rsa_impl.c',
'third_party/boringssl/crypto/sha/sha1.c',
'third_party/boringssl/crypto/sha/sha256.c',
'third_party/boringssl/crypto/sha/sha512.c',
'third_party/boringssl/crypto/stack/stack.c',
'third_party/boringssl/crypto/thread.c',
'third_party/boringssl/crypto/thread_none.c',
'third_party/boringssl/crypto/thread_pthread.c',
'third_party/boringssl/crypto/thread_win.c',
'third_party/boringssl/crypto/time_support.c',
'third_party/boringssl/crypto/x509/a_digest.c',
'third_party/boringssl/crypto/x509/a_sign.c',
'third_party/boringssl/crypto/x509/a_strex.c',
'third_party/boringssl/crypto/x509/a_verify.c',
'third_party/boringssl/crypto/x509/asn1_gen.c',
'third_party/boringssl/crypto/x509/by_dir.c',
'third_party/boringssl/crypto/x509/by_file.c',
'third_party/boringssl/crypto/x509/i2d_pr.c',
'third_party/boringssl/crypto/x509/pkcs7.c',
'third_party/boringssl/crypto/x509/t_crl.c',
'third_party/boringssl/crypto/x509/t_req.c',
'third_party/boringssl/crypto/x509/t_x509.c',
'third_party/boringssl/crypto/x509/t_x509a.c',
'third_party/boringssl/crypto/x509/x509.c',
'third_party/boringssl/crypto/x509/x509_att.c',
'third_party/boringssl/crypto/x509/x509_cmp.c',
'third_party/boringssl/crypto/x509/x509_d2.c',
'third_party/boringssl/crypto/x509/x509_def.c',
'third_party/boringssl/crypto/x509/x509_ext.c',
'third_party/boringssl/crypto/x509/x509_lu.c',
'third_party/boringssl/crypto/x509/x509_obj.c',
'third_party/boringssl/crypto/x509/x509_r2x.c',
'third_party/boringssl/crypto/x509/x509_req.c',
'third_party/boringssl/crypto/x509/x509_set.c',
'third_party/boringssl/crypto/x509/x509_trs.c',
'third_party/boringssl/crypto/x509/x509_txt.c',
'third_party/boringssl/crypto/x509/x509_v3.c',
'third_party/boringssl/crypto/x509/x509_vfy.c',
'third_party/boringssl/crypto/x509/x509_vpm.c',
'third_party/boringssl/crypto/x509/x509cset.c',
'third_party/boringssl/crypto/x509/x509name.c',
'third_party/boringssl/crypto/x509/x509rset.c',
'third_party/boringssl/crypto/x509/x509spki.c',
'third_party/boringssl/crypto/x509/x509type.c',
'third_party/boringssl/crypto/x509/x_algor.c',
'third_party/boringssl/crypto/x509/x_all.c',
'third_party/boringssl/crypto/x509/x_attrib.c',
'third_party/boringssl/crypto/x509/x_crl.c',
'third_party/boringssl/crypto/x509/x_exten.c',
'third_party/boringssl/crypto/x509/x_info.c',
'third_party/boringssl/crypto/x509/x_name.c',
'third_party/boringssl/crypto/x509/x_pkey.c',
'third_party/boringssl/crypto/x509/x_pubkey.c',
'third_party/boringssl/crypto/x509/x_req.c',
'third_party/boringssl/crypto/x509/x_sig.c',
'third_party/boringssl/crypto/x509/x_spki.c',
'third_party/boringssl/crypto/x509/x_val.c',
'third_party/boringssl/crypto/x509/x_x509.c',
'third_party/boringssl/crypto/x509/x_x509a.c',
'third_party/boringssl/crypto/x509v3/pcy_cache.c',
'third_party/boringssl/crypto/x509v3/pcy_data.c',
'third_party/boringssl/crypto/x509v3/pcy_lib.c',
'third_party/boringssl/crypto/x509v3/pcy_map.c',
'third_party/boringssl/crypto/x509v3/pcy_node.c',
'third_party/boringssl/crypto/x509v3/pcy_tree.c',
'third_party/boringssl/crypto/x509v3/v3_akey.c',
'third_party/boringssl/crypto/x509v3/v3_akeya.c',
'third_party/boringssl/crypto/x509v3/v3_alt.c',
'third_party/boringssl/crypto/x509v3/v3_bcons.c',
'third_party/boringssl/crypto/x509v3/v3_bitst.c',
'third_party/boringssl/crypto/x509v3/v3_conf.c',
'third_party/boringssl/crypto/x509v3/v3_cpols.c',
'third_party/boringssl/crypto/x509v3/v3_crld.c',
'third_party/boringssl/crypto/x509v3/v3_enum.c',
'third_party/boringssl/crypto/x509v3/v3_extku.c',
'third_party/boringssl/crypto/x509v3/v3_genn.c',
'third_party/boringssl/crypto/x509v3/v3_ia5.c',
'third_party/boringssl/crypto/x509v3/v3_info.c',
'third_party/boringssl/crypto/x509v3/v3_int.c',
'third_party/boringssl/crypto/x509v3/v3_lib.c',
'third_party/boringssl/crypto/x509v3/v3_ncons.c',
'third_party/boringssl/crypto/x509v3/v3_pci.c',
'third_party/boringssl/crypto/x509v3/v3_pcia.c',
'third_party/boringssl/crypto/x509v3/v3_pcons.c',
'third_party/boringssl/crypto/x509v3/v3_pku.c',
'third_party/boringssl/crypto/x509v3/v3_pmaps.c',
'third_party/boringssl/crypto/x509v3/v3_prn.c',
'third_party/boringssl/crypto/x509v3/v3_purp.c',
'third_party/boringssl/crypto/x509v3/v3_skey.c',
'third_party/boringssl/crypto/x509v3/v3_sxnet.c',
'third_party/boringssl/crypto/x509v3/v3_utl.c',
'third_party/boringssl/ssl/custom_extensions.c',
'third_party/boringssl/ssl/d1_both.c',
'third_party/boringssl/ssl/d1_clnt.c',
'third_party/boringssl/ssl/d1_lib.c',
'third_party/boringssl/ssl/d1_meth.c',
'third_party/boringssl/ssl/d1_pkt.c',
'third_party/boringssl/ssl/d1_srtp.c',
'third_party/boringssl/ssl/d1_srvr.c',
'third_party/boringssl/ssl/dtls_record.c',
'third_party/boringssl/ssl/pqueue/pqueue.c',
'third_party/boringssl/ssl/s3_both.c',
'third_party/boringssl/ssl/s3_clnt.c',
'third_party/boringssl/ssl/s3_enc.c',
'third_party/boringssl/ssl/s3_lib.c',
'third_party/boringssl/ssl/s3_meth.c',
'third_party/boringssl/ssl/s3_pkt.c',
'third_party/boringssl/ssl/s3_srvr.c',
'third_party/boringssl/ssl/ssl_aead_ctx.c',
'third_party/boringssl/ssl/ssl_asn1.c',
'third_party/boringssl/ssl/ssl_buffer.c',
'third_party/boringssl/ssl/ssl_cert.c',
'third_party/boringssl/ssl/ssl_cipher.c',
'third_party/boringssl/ssl/ssl_ecdh.c',
'third_party/boringssl/ssl/ssl_file.c',
'third_party/boringssl/ssl/ssl_lib.c',
'third_party/boringssl/ssl/ssl_rsa.c',
'third_party/boringssl/ssl/ssl_session.c',
'third_party/boringssl/ssl/ssl_stat.c',
'third_party/boringssl/ssl/t1_enc.c',
'third_party/boringssl/ssl/t1_lib.c',
'third_party/boringssl/ssl/tls_record.c',
'third_party/zlib/adler32.c',
'third_party/zlib/compress.c',
'third_party/zlib/crc32.c',
'third_party/zlib/deflate.c',
'third_party/zlib/gzclose.c',
'third_party/zlib/gzlib.c',
'third_party/zlib/gzread.c',
'third_party/zlib/gzwrite.c',
'third_party/zlib/infback.c',
'third_party/zlib/inffast.c',
'third_party/zlib/inflate.c',
'third_party/zlib/inftrees.c',
'third_party/zlib/trees.c',
'third_party/zlib/uncompr.c',
'third_party/zlib/zutil.c',
]
| arkmaxim/grpc | src/python/grpcio/grpc_core_dependencies.py | Python | bsd-3-clause | 27,122 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bublfish.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| ideal/bublfish | manage.py | Python | bsd-3-clause | 251 |
# -*- coding: utf-8 -*-
#
# © 2011 SimpleGeo, Inc All rights reserved.
# Author: Ian Eure <[email protected]>
#
"""Make your code robust."""
| simplegeo/tillicum | tillicum/__init__.py | Python | bsd-3-clause | 143 |
from dataclasses import dataclass
from datetime import datetime
from googleapiclient import discovery
import logging
from typing import List
from rdr_service import config
from rdr_service.services.gcp_config import RdrEnvironment
from rdr_service.config import GAE_PROJECT
@dataclass
class ServiceAccount:
email: str
@dataclass
class ServiceAccountKey:
name: str
start_date: datetime
def get_key_age_in_days(self):
return (datetime.utcnow() - self.start_date).days
class ServiceAccountKeyManager:
def __init__(self):
self._app_id = GAE_PROJECT
self._google_service = discovery.build("iam", "v1", cache_discovery=False)
self._max_age_in_days = config.getSetting(config.DAYS_TO_DELETE_KEYS)
self._service_accounts_with_long_lived_keys = config.getSettingList(
config.SERVICE_ACCOUNTS_WITH_LONG_LIVED_KEYS, default=[]
)
self._managed_data_ops_accounts = config.getSettingList(
config.DATA_OPS_SERVICE_ACCOUNTS_TO_MANAGE, default=[]
)
def expire_old_keys(self):
"""Deletes service account keys older than 3 days as required by NIH"""
if self._app_id is None:
raise Exception('Unable to determine current project')
self._expire_keys_for_project(
project_name=self._app_id,
ignore_service_account_func=lambda account: account.email in self._service_accounts_with_long_lived_keys
)
if self._app_id == RdrEnvironment.PROD.value:
self._expire_keys_for_project(
project_name='all-of-us-ops-data-api-prod',
ignore_service_account_func=lambda account: account.email not in self._managed_data_ops_accounts
)
def _expire_keys_for_project(self, project_name, ignore_service_account_func):
for service_account in self._get_service_accounts_for_project(project_name):
if ignore_service_account_func(service_account):
logging.info("Skip key expiration check for Service Account {}".format(service_account.email))
else:
for key in self._get_keys_for_account(
project_name=project_name,
service_account_name=service_account.email
):
key_age_days = key.get_key_age_in_days()
if key_age_days >= self._max_age_in_days:
logging.warning(
f"Deleting service account key older than {self._max_age_in_days} "
f"[{key_age_days}]: {key.name}"
)
self._delete_key(key)
else:
logging.info(f'Service Account key is {key_age_days} days old: {key.name}')
def _get_service_accounts_for_project(self, project_name) -> List[ServiceAccount]:
account_list_request = self._google_service.projects().serviceAccounts().list(name=f'projects/{project_name}')
account_list_response = account_list_request.execute()
service_accounts = [
ServiceAccount(
email=account['email']
)
for account in account_list_response.get('accounts', [])
]
if not service_accounts:
logging.info(f'No Service Accounts found in project "{project_name}"')
return service_accounts
def _get_keys_for_account(self, project_name, service_account_name) -> List[ServiceAccountKey]:
key_list_request = self._google_service.projects().serviceAccounts().keys().list(
name=f'projects/{project_name}/serviceAccounts/{service_account_name}',
keyTypes='USER_MANAGED'
)
key_list_response = key_list_request.execute()
return [
ServiceAccountKey(
name=key['name'],
start_date=datetime.strptime(key["validAfterTime"], "%Y-%m-%dT%H:%M:%SZ")
)
for key in key_list_response.get('keys', [])
]
def _delete_key(self, key: ServiceAccountKey):
delete_request = self._google_service.projects().serviceAccounts().keys().delete(name=key.name)
delete_request.execute()
| all-of-us/raw-data-repository | rdr_service/offline/service_accounts.py | Python | bsd-3-clause | 4,239 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Little utility that help automate tests and optimization with XDS.
"""
__version__ = "0.1.0"
__date__ = "11-10-2011"
__author__ = "Pierre Legrand ([email protected])"
__copyright__ = "Copyright (c) 2011 Pierre Legrand"
__license__ = "New BSD http://www.opensource.org/licenses/bsd-license.php"
import sys
import shutil
from XDS import XDSLogParser
from xupy import saveLastVersion, xdsInp2Param, \
getProfilRefPar, run_xds, LP_names
gnuplot_template = """set dgrid3d %d,%d
set pm3d
splot 'indexer.log' u 1:2:3 with lines
"""
OUT_FILE = open("index.log","a")
def log(txt):
OUT_FILE.write(txt)
sys.stdout.write(txt)
MIN_SPOT_SIZE_LIST_1 = range(2, 36,2)
STRONG_PIXEL_LIST_1 = [2,3,4,5,6,7,8,9,10,11,12] #range(2,12,1)
if __name__ == '__main__':
xp = {}
#xp = xdsInp2Param()
#xp = XParam()
#xp.SPOT_RANGE = [2, 12],[44, 54]
#print xp.SPOT_RANGE
#xp.SPOT_RANGE = "2 12", "44 54"
#xp.DATA_RANGE = 8, 8
#xp.INCLUDE_RESOLUTION_RANGE= 70.0, 2.0
#xp.NBX = 3
#xp.NBY = 3
#if "-a" in sys.argv:
# sys.argv.remove('-a')
# xp.update(getProfilRefPar())
# xp["JOB"] = "DEFPIX", "INTEGRATE", "CORRECT"
# xp["REFINE_INTEGRATE"] = "ORIENTATION", "BEAM", "CELL" #"DISTANCE",
# shutil.copyfile("GXPARM.XDS","XPARM.XDS")
if "-i" in sys.argv:
optid = sys.argv.index("-i")
_xds_input = sys.argv[optid+1]
xp.update(xdsInp2Param(inp_str=""))
sys.argv.remove('-i')
sys.argv.remove(_xds_input)
#if "-norun" in sys.argv:
# saveLastVersion(LP_names)
# sys.exit()
ARGV = sys.argv[1:]
while ARGV:
ARG = ARGV.pop()
xp.update(xdsInp2Param(inp_str=ARG))
open("gnuplot.inp").write("gnuplot_template" % (len(STRONG_PIXEL_LIST_1), len(MIN_SPOT_SIZE_LIST_1)))
xp["JOB"] = "COLSPOT", "IDXREF"
for spot_size in MIN_SPOT_SIZE_LIST_1:
for strong_pixel in STRONG_PIXEL_LIST_1:
xp["MINIMUM_NUMBER_OF_PIXELS_IN_A_SPOT"] = spot_size
xp["STRONG_PIXEL"] = strong_pixel
run_xds(xp, inp_f="XDS.INP", out_f="xds_indexer.log")
saveLastVersion(LP_names)
res = XDSLogParser("IDXREF.LP", verbose=False).results
log( "%4d %5.1f" % (spot_size, strong_pixel))
log( "%(indexed_percentage)6.1f %(indexed_spots)9d %(total_spots)9d" % res)
log( "%(xy_spot_position_ESD)6.2f %(z_spot_position_ESD)6.2f" % res)
log( " %(refined_cell_str)s\n" % res)
| jsburg/xdsme | XDS/runxds_indexer.py | Python | bsd-3-clause | 2,582 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2014, Nicolas P. Rougier
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
import numpy as np
from glumpy import app
from glumpy.transforms import Position, Viewport
from glumpy.graphics.collections import PointCollection
window = app.Window(1024,1024, color=(1,1,1,1))
points = PointCollection("agg", color="shared")
#points.append(np.random.normal(0.0,0.5,(10000,3)), itemsize=5000)
#points["color"] = (1,0,0,1), (0,0,1,1)
#points["viewport"].transform = True
#points["viewport"].clipping = True
#points["viewport"].viewport = 256,256,512,512
@window.event
def on_draw(dt):
window.clear()
points.draw()
points.append(np.random.normal(0.0,0.5,(1,3)))
window.attach(points["transform"])
window.attach(points["viewport"])
app.run()
| duyuan11/glumpy | examples/collection-point.py | Python | bsd-3-clause | 998 |
# Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import unittest
from cybox.common import DataSegment
import cybox.test
from cybox.test import EntityTestCase
class TestByteRun(EntityTestCase, unittest.TestCase):
klass = DataSegment
_full_dict = {
'data_format': "Hexadecimal",
'data_size': {"value": '20', 'units': "Bytes"},
'data_segment': '0001020304050607080910111213141516171819',
'offset': 42,
'search_distance': 100,
'search_within': 50,
}
if __name__ == "__main__":
unittest.main()
| CybOXProject/python-cybox | cybox/test/common/datasegment_test.py | Python | bsd-3-clause | 615 |
from django.conf import settings
MAP_POI_ACTIVITIES = getattr(
settings, 'WIDGETS_MAP_POI_ACTIVITIES', (
('place-see', 'Places to see'),
('place-eat', 'Places to eat')
)
)
MAP_POI_VENUES = getattr(
settings, 'WIDGETS_MAP_POI_VENUES', (
('atm', 'ATM'),
('bar', 'Bar'),
('cafe', 'Coffee'),
('food', 'Food'),
('landmark', 'Landmark'),
('library', 'Library'),
('pin', 'Red Centre'),
('shops', 'Shops'),
('wifi', 'Wi-Fi'),
('wildlife', 'Wildlife')
)
)
| publica-io/django-publica-widgets | widgets/settings.py | Python | bsd-3-clause | 562 |
# the keys and values you want to keep
# '*' is a wildcard, will accept anything
# in order of prevalence, http://taginfo.openstreetmap.org/keys
# at my own (huh, is this interesting?) discretion
wantedTags = {
'highway': {
'bus_stop', 'rest_area'},
'name': '*',
'addr:housenumber': '*',
'addr:street': '*',
'addr:city': '*',
'addr:postcode': '*',
'addr:state': '*',
'natural': {
'water', 'wetland', 'peak',
'beach', 'spring', 'bay',
'land', 'glacier', 'cave_entrance',
'reef', 'volcano', 'stone',
'waterfall'},
'landuse': {
'forest', 'residential', 'meadow',
'farm', 'reservior', 'orchard',
'cemetery', 'vineyard', 'allotments',
'quarry', 'basin', 'retail',
'village_green', 'recreation_ground', 'conservation',
'military', 'landfill'},
'amenity': '*',
'place': {
'island', 'islet'},
'barrier': {
'toll_booth'},
'railway': {
'station', 'platform', 'tram_stop',
'subway', 'halt', 'subway_entrance',
'stop'},
'leisure': {
'pitch', 'park', 'swimming_pool',
'playground', 'garden', 'sports_centre',
'nature_reserve', 'track', 'common',
'stadium', 'recreation_ground', 'golf_course',
'slipway', 'marina', 'water_park',
'miniature_golf', 'horse_riding', 'fishing',
'dog_park', 'ice_rink', 'sauna',
'fitness_station', 'bird_hide', 'beach_resort'},
'shop': '*',
'man_made': {
'pier', 'mine', 'lighthouse'},
'tourism': '*',
'sport': '*',
'religion': '*',
'wheelchair': {'yes'},
'parking': {
'multi-storey', 'park_and_ride'},
'alt_name': '*',
'public_transport': '*',
'website': '*',
'wikipedia': '*',
'water': '*',
'historic': '*',
'denomination': '*',
'url': '*',
'phone': '*',
'cuisine': '*',
'aeroway': {
'aerodrome', 'gate', 'helipad', 'terminal'},
'opening_hours': '*',
'emergency': {
'yes', 'phone'},
'information': {
'guidepost', 'board', 'map', 'office'},
'site': {
'stop_area'},
'atm': {
'yes'},
'golf': {
'tee', 'hole', 'driving_range'},
'brand': '*',
'aerialway': {
'station', 'chair_lift'}
}
| aaronlidman/openstreetPOIs | settings.py | Python | bsd-3-clause | 2,332 |
#------------------------------------------------------------------------------
# Name: pychrono example
# Purpose:
#
# Author: Alessandro Tasora
#
# Created: 1/01/2019
# Copyright: (c) ProjectChrono 2019
#
#
# This file shows how to
# - create a small stack of bricks,
# - create a support that shakes like an earthquake, with motion function
# - simulate the bricks that fall
#-------------------------------------------------------------------------------
import pychrono.core as chrono
import pychrono.irrlicht as chronoirr
# The path to the Chrono data directory containing various assets (meshes, textures, data files)
# is automatically set, relative to the default location of this demo.
# If running from a different directory, you must change the path to the data directory with:
#chrono.SetChronoDataPath('path/to/data')
# ---------------------------------------------------------------------
#
# Create the simulation system and add items
#
my_system = chrono.ChSystemNSC()
# Set the default outward/inward shape margins for collision detection,
# this is epecially important for very large or very small objects.
chrono.ChCollisionModel.SetDefaultSuggestedEnvelope(0.001)
chrono.ChCollisionModel.SetDefaultSuggestedMargin(0.001)
# Maybe you want to change some settings for the solver. For example you
# might want to use SetSolverMaxIterations to set the number of iterations
# per timestep, etc.
#my_system.SetSolverType(chrono.ChSolver.Type_BARZILAIBORWEIN) # precise, more slow
my_system.SetSolverMaxIterations(70)
# Create a contact material (surface property)to share between all objects.
# The rolling and spinning parameters are optional - if enabled they double
# the computational time.
brick_material = chrono.ChMaterialSurfaceNSC()
brick_material.SetFriction(0.5)
brick_material.SetDampingF(0.2)
brick_material.SetCompliance (0.0000001)
brick_material.SetComplianceT(0.0000001)
# brick_material.SetRollingFriction(rollfrict_param)
# brick_material.SetSpinningFriction(0)
# brick_material.SetComplianceRolling(0.0000001)
# brick_material.SetComplianceSpinning(0.0000001)
# Create the set of bricks in a vertical stack, along Y axis
nbricks_on_x = 1
nbricks_on_y = 6
size_brick_x = 0.25
size_brick_y = 0.12
size_brick_z = 0.12
density_brick = 1000; # kg/m^3
mass_brick = density_brick * size_brick_x * size_brick_y * size_brick_z;
inertia_brick = 2/5*(pow(size_brick_x,2))*mass_brick; # to do: compute separate xx,yy,zz inertias
for ix in range(0,nbricks_on_x):
for iy in range(0,nbricks_on_y):
# create it
body_brick = chrono.ChBody()
# set initial position
body_brick.SetPos(chrono.ChVectorD(ix*size_brick_x, (iy+0.5)*size_brick_y, 0 ))
# set mass properties
body_brick.SetMass(mass_brick)
body_brick.SetInertiaXX(chrono.ChVectorD(inertia_brick,inertia_brick,inertia_brick))
# Collision shape
body_brick.GetCollisionModel().ClearModel()
body_brick.GetCollisionModel().AddBox(brick_material, size_brick_x/2, size_brick_y/2, size_brick_z/2) # must set half sizes
body_brick.GetCollisionModel().BuildModel()
body_brick.SetCollide(True)
# Visualization shape, for rendering animation
body_brick_shape = chrono.ChBoxShape()
body_brick_shape.GetBoxGeometry().Size = chrono.ChVectorD(size_brick_x/2, size_brick_y/2, size_brick_z/2)
if iy%2==0 :
body_brick_shape.SetColor(chrono.ChColor(0.65, 0.65, 0.6)) # set gray color only for odd bricks
body_brick.GetAssets().push_back(body_brick_shape)
my_system.Add(body_brick)
# Create the room floor: a simple fixed rigid body with a collision shape
# and a visualization shape
body_floor = chrono.ChBody()
body_floor.SetBodyFixed(True)
body_floor.SetPos(chrono.ChVectorD(0, -2, 0 ))
# Collision shape
body_floor.GetCollisionModel().ClearModel()
body_floor.GetCollisionModel().AddBox(brick_material, 3, 1, 3) # hemi sizes
body_floor.GetCollisionModel().BuildModel()
body_floor.SetCollide(True)
# Visualization shape
body_floor_shape = chrono.ChBoxShape()
body_floor_shape.GetBoxGeometry().Size = chrono.ChVectorD(3, 1, 3)
body_floor.GetAssets().push_back(body_floor_shape)
body_floor_texture = chrono.ChTexture()
body_floor_texture.SetTextureFilename(chrono.GetChronoDataFile('concrete.jpg'))
body_floor.GetAssets().push_back(body_floor_texture)
my_system.Add(body_floor)
# Create the shaking table, as a box
size_table_x = 1;
size_table_y = 0.2;
size_table_z = 1;
body_table = chrono.ChBody()
body_table.SetPos(chrono.ChVectorD(0, -size_table_y/2, 0 ))
# Collision shape
body_table.GetCollisionModel().ClearModel()
body_table.GetCollisionModel().AddBox(brick_material, size_table_x/2, size_table_y/2, size_table_z/2) # hemi sizes
body_table.GetCollisionModel().BuildModel()
body_table.SetCollide(True)
# Visualization shape
body_table_shape = chrono.ChBoxShape()
body_table_shape.GetBoxGeometry().Size = chrono.ChVectorD(size_table_x/2, size_table_y/2, size_table_z/2)
body_table_shape.SetColor(chrono.ChColor(0.4,0.4,0.5))
body_table.GetAssets().push_back(body_table_shape)
body_table_texture = chrono.ChTexture()
body_table_texture.SetTextureFilename(chrono.GetChronoDataFile('concrete.jpg'))
body_table.GetAssets().push_back(body_table_texture)
my_system.Add(body_table)
# Create a constraint that blocks free 3 x y z translations and 3 rx ry rz rotations
# of the table respect to the floor, and impose that the relative imposed position
# depends on a specified motion law.
link_shaker = chrono.ChLinkLockLock()
link_shaker.Initialize(body_table, body_floor, chrono.CSYSNORM)
my_system.Add(link_shaker)
# ..create the function for imposed x horizontal motion, etc.
mfunY = chrono.ChFunction_Sine(0,1.5,0.001) # phase, frequency, amplitude
link_shaker.SetMotion_Y(mfunY)
# ..create the function for imposed y vertical motion, etc.
mfunZ = chrono.ChFunction_Sine(0,1.5,0.12) # phase, frequency, amplitude
link_shaker.SetMotion_Z(mfunZ)
# Note that you could use other types of ChFunction_ objects, or create
# your custom function by class inheritance (see demo_python.py), or also
# set a function for table rotation , etc.
# ---------------------------------------------------------------------
#
# Create an Irrlicht application to visualize the system
#
myapplication = chronoirr.ChIrrApp(my_system, 'PyChrono example', chronoirr.dimension2du(1024,768))
myapplication.AddTypicalSky()
myapplication.AddTypicalLogo(chrono.GetChronoDataFile('logo_pychrono_alpha.png'))
myapplication.AddTypicalCamera(chronoirr.vector3df(0.5,0.5,1.0))
myapplication.AddLightWithShadow(chronoirr.vector3df(2,4,2), # point
chronoirr.vector3df(0,0,0), # aimpoint
9, # radius (power)
1,9, # near, far
30) # angle of FOV
# ==IMPORTANT!== Use this function for adding a ChIrrNodeAsset to all items
# in the system. These ChIrrNodeAsset assets are 'proxies' to the Irrlicht meshes.
# If you need a finer control on which item really needs a visualization proxy in
# Irrlicht, just use application.AssetBind(myitem); on a per-item basis.
myapplication.AssetBindAll();
# ==IMPORTANT!== Use this function for 'converting' into Irrlicht meshes the assets
# that you added to the bodies into 3D shapes, they can be visualized by Irrlicht!
myapplication.AssetUpdateAll();
# If you want to show shadows because you used "AddLightWithShadow()'
# you must remember this:
myapplication.AddShadowAll();
# ---------------------------------------------------------------------
#
# Run the simulation
#
myapplication.SetTimestep(0.001)
myapplication.SetTryRealtime(True)
while(myapplication.GetDevice().run()):
myapplication.BeginScene()
myapplication.DrawAll()
for substep in range(0,5):
myapplication.DoStep()
myapplication.EndScene()
| dariomangoni/chrono | src/demos/python/irrlicht/demo_IRR_earthquake.py | Python | bsd-3-clause | 8,081 |
from django.db.models import get_model
from django.core.urlresolvers import reverse
from oscar.test.testcases import WebTestCase
from oscar_mws.test import factories
AmazonProfile = get_model('oscar_mws', 'AmazonProfile')
class TestAmazonProfileDashboard(WebTestCase):
is_staff = True
def setUp(self):
super(TestAmazonProfileDashboard, self).setUp()
self.product = factories.ProductFactory(amazon_profile=None)
self.marketplace = factories.AmazonMarketplaceFactory()
try:
self.product.amazon_profile
except AmazonProfile.DoesNotExist:
pass
else:
self.fail("product has Amazon profile but shouldn't")
def test_allows_to_create_profile_for_product(self):
form = self.get(reverse('mws-dashboard:profile-create',
args=(self.product.pk,))).form
form['sku'] = 'SAMPLE_SKU'
form['marketplaces'] = (self.marketplace.id,)
page = form.submit()
self.assertRedirects(page, reverse('mws-dashboard:profile-list'))
try:
AmazonProfile.objects.get(product=self.product)
except AmazonProfile.DoesNotExist:
self.fail("Amazon profile not created")
def test_displays_message_for_unkown_product(self):
page = self.get(reverse('mws-dashboard:profile-create', args=(22222,)))
self.assertRedirects(page, reverse('mws-dashboard:profile-list'))
| django-oscar/django-oscar-mws | tests/functional/test_dashboard.py | Python | bsd-3-clause | 1,448 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import tempo.django.fields
class Migration(migrations.Migration):
dependencies = [
('anapp', '0002_nullablemodel'),
]
operations = [
migrations.CreateModel(
name='Movie',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=99, verbose_name=b'Name')),
('schedule', tempo.django.fields.RecurrentEventSetField(verbose_name=b'Schedule')),
],
),
]
| AndrewPashkin/python-tempo | tests/test_django/aproject/anapp/migrations/0003_movie.py | Python | bsd-3-clause | 670 |
import chainer
from chainer_wing.node import Input, Output, Link
# TODO(fukatani): implement systematically.
class Linear(Link):
Input('in_array', (chainer.Variable,))
Input('out_size', (int,))
Input('nobias', (bool,), select=[True, False])
Output('out_array', (chainer.Variable,))
def call_init(self):
self.check_member(('_out_size', '_nobias'))
return 'Linear(None, {out_size}, nobias={nobias}),' \
.format(out_size=self._out_size,
nobias=self._nobias)
@classmethod
def register_chainer_impl(cls):
return chainer.links.Linear
class Maxout(Link):
Input('in_array', (chainer.Variable,))
Input('out_size', (int,))
Input('pool_size', (int,))
Output('out_array', (chainer.Variable,))
def call_init(self):
self.check_member(('_out_size', '_pool_size'))
return 'Maxout(None, {out_size}, {pool_size}),' \
.format(out_size=self._out_size, pool_size=self._pool_size)
@classmethod
def register_chainer_impl(cls):
return chainer.links.Maxout
| fukatani/CW_gui | chainer_wing/CustomNodes/LinkNodes.py | Python | bsd-3-clause | 1,089 |
#!/usr/bin/env python2
import timeit
import numpy as np
import random
from control.optimizer import Optimizer
N = 100
o = Optimizer()
o.mapper = o.tm.thrusts_to_outputs()
thrusts = np.array([8.45, 1.12, -0.15, -12.2, 6.4, 4.4])
desires = np.array([123, 45, -13, -31.123, 31, 90])
def timed():
o.objective(thrusts, desires)
if __name__ == "__main__":
print("Value of objective function: %f" % o.objective(thrusts, desires))
t = timeit.timeit("timed()", setup="from __main__ import timed", number=N)
print("Time for %d calls: %f" % (N, t))
| cuauv/software | control/benchmark.py | Python | bsd-3-clause | 558 |
# -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package des différents contextes utiles à la création, suppression
et connexion de joueurs.
"""
# Contexte de connexion
from .connexion.mode_connecte import ModeConnecte
# Contextes de création
from .creation.choix_race import ChoixRace
from .creation.choix_genre import ChoixGenre
from .creation.langue_cmd import LangueCMD
from .creation.nouveau_nom import NouveauNom
from .creation.entrer_pass import EntrerPassJoueur
from .creation.choisir_pass import ChoisirPassJoueur
from .creation.confirmer_pass import ConfirmerPassJoueur
from .creation.recup_vancia import RecupVancia
from .creation.entrer_v_pass import EntrerVPassJoueur
from .creation.presenter_tips import PresenterTips
# Contextes de suppression
from .suppression.suppression import Suppression
| stormi/tsunami | src/primaires/joueur/contextes/__init__.py | Python | bsd-3-clause | 2,332 |
# Defintion of estimate function function
import numpy as np
import numpy.linalg as lg
############ MIXING TO CONSTUCT H ############
class Polynomial_Matrix(object):
@property
def H(self):
N=len(self.x_axis)
H=np.matrix(np.vander(self.x_axis,self.order+1,increasing=True))
return H
@property
def X(self):
return np.array(np.matrix(self.an).T)
| vincentchoqueuse/parametrix | parametrix/polynomial/tools.py | Python | bsd-3-clause | 396 |
#!/usr/bin/python2.7
"""Tool to generate a graph of commits per day.
The tool reads the repository information of one or more Mercurial repositories
and builds a chart with commit activity per day. This is similar to the commit
chart in github, but generated locally. It creates an SVG file, and launches a
file viewer to display it.
Notice that the mercurial module is not supported in Python3, so we have to use
Python 2.7
"""
__author__ = '[email protected] (Javier Tordable)'
from mercurial import hg, changelog, ui
from svgwrite import Drawing
import argparse
from datetime import date, timedelta
import math
import subprocess
import sys
import os
# Chart file configuration.
CHART_FILE_NAME = 'work.svg'
CHART_FILE_PATH = os.getcwd()
CHART_VIEWER = 'eog'
# Visualization style.
NUM_DAYS_TO_SHOW = 365
DAY_BOX_SIZE = 11
DAY_BOX_SEPARATION = 2
DISTANCE_BETWEEN_BOXES = DAY_BOX_SIZE + DAY_BOX_SEPARATION
MARGIN = 6
# Box colors, sorted from weaker to stronger.
BOX_COLORS = ['#eeeeee', '#d6e685', '#8cc665', '#44a340', '#1e6823']
def create_empty_chart(full_file_name):
"""Creates a chart of the proper dimensions with a white background."""
num_days_in_week = 7
num_weeks = math.ceil(NUM_DAYS_TO_SHOW / 7.0)
if date.today().weekday() + 1 < NUM_DAYS_TO_SHOW % 7:
# We need to draw NUM_DAYS_TO_SHOW % 7 extra days, but on the last week
# we have only space for date.today().weekday() + 1 days.
num_weeks += 1
width = 2 * MARGIN + num_weeks * DAY_BOX_SIZE + \
(num_weeks - 1) * DAY_BOX_SEPARATION
height = 2 * MARGIN + num_days_in_week * DAY_BOX_SIZE + \
(num_days_in_week - 1) * DAY_BOX_SEPARATION
chart = Drawing(full_file_name, size=(width, height))
chart.add(chart.rect(insert=(0, 0), size=(width, height), fill='white'))
return chart
def get_box_color(count):
"""Returns the box color that corresponds to the given count."""
if count < 1:
return BOX_COLORS[0]
elif count == 1:
return BOX_COLORS[1]
elif 2 <= count <= 3:
return BOX_COLORS[2]
elif 4 <= count <= 5:
return BOX_COLORS[3]
else: # 6 <= count.
return BOX_COLORS[4]
def draw_daily_boxes(chart, start_date, cl_counts):
"""Draws the boxes for CL counts for each day."""
first_day_to_show = start_date.weekday()
last_day_to_show = first_day_to_show + NUM_DAYS_TO_SHOW
for day_index in range(first_day_to_show, last_day_to_show):
# Boxes are stacked first by column and then by row.
x = MARGIN + (day_index // 7) * DISTANCE_BETWEEN_BOXES
y = MARGIN + (day_index % 7) * DISTANCE_BETWEEN_BOXES
# Compute the real date from the day index.
day = start_date + timedelta(days=(day_index - first_day_to_show))
if day in cl_counts:
color = get_box_color(cl_counts[day])
else:
color = get_box_color(0)
chart.add(chart.rect(insert=(x,y),
size=(DAY_BOX_SIZE, DAY_BOX_SIZE),
fill=color))
def extract_cl_counts(repository_path, cl_counts):
"""Reads the repository changelog and extracts CL counts per day."""
repository = hg.repository(ui.ui(), repository_path)
changelog = repository.changelog
for cl_index in changelog:
cl_id = changelog.lookup(cl_index)
cl = changelog.read(cl_id)
# The timestamp seems to be the 3rd field in the CL.
# It's given in a tuple. The UNIX timestap is the first field.
timestamp = cl[2][0]
cl_date = date.fromtimestamp(timestamp)
if cl_date in cl_counts:
cl_counts[cl_date] = cl_counts[cl_date] + 1
else:
cl_counts[cl_date] = 1
def view_chart(full_file_name):
"""Launch the image viewer to open the SVG file."""
# Don't print initialization errors.
subprocess.call([CHART_VIEWER, full_file_name], stderr=open(os.devnull))
def main():
parser = argparse.ArgumentParser(description=sys.modules[__name__].__doc__)
parser.add_argument('path', nargs='*', default=None,
help='Root directory for the Mercurial repository.')
args = parser.parse_args()
# Get the changelog data from each repository.
cl_counts = {}
if args.path:
for repository_path in args.path:
extract_cl_counts(repository_path, cl_counts)
else:
# Assume that the current path has a repository.
extract_cl_counts(os.getcwd(), cl_counts)
# Draw the chart and save it in a file.
full_file_name = os.path.join(CHART_FILE_PATH, CHART_FILE_NAME)
chart = create_empty_chart(full_file_name)
start_date = date.today() - timedelta(days=(NUM_DAYS_TO_SHOW - 1))
draw_daily_boxes(chart, start_date, cl_counts)
chart.save()
# Open the image file and print total count.
view_chart(full_file_name)
print('Changes as of: ' + str(date.today()) + ': ' + \
str(sum(cl_counts.itervalues())))
if __name__ == '__main__':
main()
| tordable/activity-chart | work.py | Python | bsd-3-clause | 5,034 |
from django.urls import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from core.constants import ACTIONS
class ActionSerializer(serializers.ModelSerializer):
action_list = serializers.SerializerMethodField()
model = serializers.StringRelatedField()
def get_action_list(self, object):
action_list = {'view': dict(
title=_("View"),
url=object.get_absolute_url(),
btn_class='info',
btn_icon='eye-open',
visible=False,
)}
if not hasattr(object, 'action_list') or not object.action_list:
return action_list
for action in object.action_list:
action_details = ACTIONS[action]
app_name = object._meta.app_label
app_name = app_name if app_name != 'core' else 'panel'
args = []
model = object.__class__.__name__.lower()
parent = None
if hasattr(object, 'parent') and object.parent:
parent = object.parent.__class__.__name__.lower()
args += [object.parent.pk]
url_name = '{app_name}:{parent}_{model}_{action}'
else:
url_name = '{app_name}:{model}_{action}'
args += [object.pk]
url_name = url_name.format(
app_name=app_name,
model=model,
parent=parent,
action=action,
)
url = reverse_lazy(url_name, args=args)
action_list[action] = dict(
title=action_details['title'],
url=url,
btn_class=action_details['level'],
btn_icon=action_details['icon'],
visible=True,
)
return action_list
| ikcam/django-skeleton | core/api/serializers.py | Python | bsd-3-clause | 1,841 |
#!/bin/python3
#
# Examples:
# python3 info.py -p /dev/ttyUSB0
#
import sys
import fcntl
import argparse
from time import sleep
sys.path.append("../")
import buttshock.et312
def main():
modes = {0x76:"Waves", 0x77:"Stroke", 0x78:"Climb", 0x79:"Combo", 0x7a:"Intense", 0x7b:"Rhythm",
0x7c:"Audio1",0x7d:"Audio2", 0x7e:"Audio3", 0x80:"Random1", 0x81:"Random2", 0x82:"Toggle",
0x83:"Orgasm",0x84:"Torment",0x85:"Phase1",0x86:"Phase2",0x87:"Phase3",
0x88:"User1",0x89:"User2",0x8a:"User3",0x8b:"User4",0x8c:"User5",0:"None", 0x7f:"Split"}
powerlevels = {1:"Low (1)",2:"Normal (2)",3:"High (3)"}
parser = argparse.ArgumentParser()
parser.add_argument("-p","--port",dest="port",help="Port for ET312 (default /dev/ttyUSB0)")
args = parser.parse_args()
port = "/dev/ttyUSB0" # lazy default
if (args.port):
port = args.port
# Lock the serial port while we use it, wait a few seconds
connected = False
for _ in range(10):
try:
et312 = buttshock.et312.ET312SerialSync(port)
if et312.port.isOpen():
fcntl.flock(et312.port.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
connected = True
break
except Exception as e:
print(e)
sleep(.2)
if (not connected):
print ("Failed")
return
try:
print ("[+] connected")
# no need to do a handshake unless we want to poke
# print ("[+] trying handshake")
# et312.perform_handshake()
# print ("[+] handshake ok")
print("ADC0 (current sense)\t\t: {0:#x}".format(et312.read(0x4060)))
print("ADC1 (MA knob)\t\t\t: {0:#x}".format(et312.read(0x4061)))
print("\tMA scaled value\t\t: %d (mode range %d-%d)" %(et312.read(0x420d),et312.read(0x4086),et312.read(0x4087)))
print("ADC2 (PSU voltage)\t\t: {0:#x}".format(et312.read(0x4062)))
print("ADC3 (Battery voltage)\t\t: {0:#x}".format(et312.read(0x4063)))
print("\tBattery at boot\t\t: {0:.1f}%".format((et312.read(0x4203))*100/256))
print("ADC4 (Level A knob)\t\t: {0:#x}".format(et312.read(0x4064)))
print("ADC5 (Level B knob)\t\t: {0:#x}".format(et312.read(0x4065)))
currentmode =et312.read(0x407b)
print("Power Level\t\t\t: "+powerlevels[et312.read(0x41f4)])
usermodes = et312.read(0x41f3)-0x87
print("User programs loaded\t\t: {0:#d}".format(usermodes))
for i in range (0,usermodes):
startmodule = et312.read(0x8018+i)
if (startmodule < 0xa0):
programlookup = et312.read(0x8000+startmodule-0x60)
programblockstart = 0x8040+programlookup
else:
programlookup = et312.read(0x8000+startmodule-0xa0)
programblockstart = 0x8100+programlookup
print("\tUser %d is module 0x%02x\t: 0x%04x (eeprom)"%(i+1,startmodule,programblockstart))
print("Current Mode\t\t\t: "+modes[currentmode])
if (currentmode == 0x7f):
print("\tSplit Mode A\t\t: "+modes[et312.read(0x41f5)])
print("\tSplit Mode B\t\t: "+modes[et312.read(0x41f6)])
if (currentmode == 0x80):
print("\tCurrent Random Mode\t: "+modes[et312.read(0x4074)])
timeleft = et312.read(0x4075) - et312.read(0x406a)
if (timeleft<0):
timeleft+=256
print("\tTime until change mode\t: {0:#d} seconds ".format(int(timeleft/1.91)))
print("\tMode has been running\t: {0:#d} seconds".format(int((et312.read(0x4089)+et312.read(0x408a)*256)*1.048)))
except Exception as e:
print(e)
if (et312):
print("[+] resetting key")
et312.reset_key() # reset cipher key so easy resync next time
et312.close()
if __name__ == "__main__":
main()
| metafetish/buttshock-py | examples/et312-info.py | Python | bsd-3-clause | 3,960 |
import os
import traceback
from datetime import datetime
from rdflib import Namespace, Graph
# To avoid duplication of namespaces across converters
NS = {
'en': Namespace("http://www.dfki.de/lt/en.owl#"),
'dax': Namespace("http://www.dfki.de/lt/dax.owl#"),
'cfi': Namespace("http://www.dfki.de/lt/cfi.owl#"),
'if': Namespace("http://www.dfki.de/lt/if.owl#"),
'rdf': Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#"),
'rdfs': Namespace("http://www.w3.org/2000/01/rdf-schema#"),
'xsd': Namespace("http://www.w3.org/2001/XMLSchema#"),
'cp': Namespace("http://www.dfki.de/lt/companyprofile.owl#"),
'icb': Namespace("http://www.dfki.de/lt/icb.owl#"),
'nace': Namespace("http://www.dfki.de/lt/nace.owl#"),
'time': Namespace("http://www.dfki.de/lt/time.owl#"),
'xebr2xbrl': Namespace("http://www.dfki.de/lt/xebr2xbrl.owl#"),
'dc': Namespace("http://www.dfki.de/lt/dc.owl#"),
'xebr': Namespace('http://www.dfki.de/lt/xebr.owl#'),
'xebr_data': Namespace('http://www.dfki.de/data/xebr.owl#'),
'cp_data': Namespace('http://www.dfki.de/data/companyprofile.owl#'),
'xbrl_be': Namespace('http://www.dfki.de/lt/xbrl_be.owl#'),
'xbrl_es': Namespace('http://www.dfki.de/lt/xbrl_es.owl#'),
'xbrl_it': Namespace('http://www.dfki.de/lt/xbrl_it.owl#'),
'xbrl_es_cnmv': Namespace('http://www.dfki.de/lt/xbrl_es_cnmv.owl#'),
'skos': Namespace('http://www.dfki.de/lt/skos.owl#'),
'owl': Namespace('http://www.w3.org/2002/07/owl#')
}
def timestamp_to_datetime(timestamp):
return datetime.fromtimestamp(int(timestamp/1000)).isoformat()
def read_space_delimited_file(f, delimiter=' ', comment='#'):
lines = (l for l in f.read().strip().split('\n') if not l.startswith(comment))
return (l.split(delimiter) for l in lines)
def write_graph(graph, outputfile=None, format='n3'):
'''
Write graph to stdout, or to a file if outputfile is specified.
'''
rdf = graph.serialize(format=format)
if not outputfile:
print(rdf.decode('utf-8'))
else:
with open(outputfile, "wb") as f:
f.write(rdf)
def merge_graphs(sources, format='n3'):
graph = Graph()
for source in sources:
graph.parse(data=source.read(), format=format)
return graph
def merge_graphs_in_directory(directory, outputfile, format='n3'):
with open(outputfile, "wb+") as f:
graph=Graph()
for root, file_ in traverse(directory):
extension = os.path.splitext(file_)[1].lstrip('.')
if extension in ['n3', 'xml', 'nt']:
inputfile = root + file_
graph.parse(inputfile, format=extension)
rdf = graph.serialize(format=format)
f.write(rdf)
def traverse(inputdir, extension=''):
"""
Generator to recursively traverse a directory, yields a 2-tuple of the directory and filename
inputdir: root directory to traverse
extension: only yield files with the given file extension
"""
for root, dirs, files in os.walk(inputdir):
if extension:
files = filter(lambda f: f.lower().endswith('.'+extension.lower().lstrip('.')), files)
for file in files:
yield (root + os.sep, file)
def traverse_mirror(inputdir, outputdir, extension='', outextension=''):
'''
Generator to recursively traverse directory 'inputdir', yields a 2-tuple with each input file
and its "mirror image" in 'outputdir'. If outextension is replaced, the extension of the output
file is changed to outextension. It also creates folders in outputdir if they do not already exist.
This function is useful for batch conversion of files.
'''
for inputroot, inputfile in traverse(inputdir, extension):
inpf = inputroot + inputfile
outdir = outputdir + os.sep + inputroot[len(inputdir):]
if not os.path.exists(outdir):
os.makedirs(outdir)
outf = outdir + inputfile
if outextension:
outf = os.path.splitext(outf)[0] + '.' + outextension.lstrip('.')
yield (inpf, outf)
class CommandBuilder:
'''
Builds command style arguments (like with git e.g. "git add").
Exists because many converters share common commands e.g. for batch conversion
'''
def __init__(self, parser):
self.parser = parser
self.subparsers = parser.add_subparsers(help='commands', dest='command')
self.commands = {}
self.added_format_arg = False
def execute(self, args):
if args.command in self.commands:
self.commands[args.command](args)
def __add_format_arg(self):
if not self.added_format_arg:
self.parser.add_argument('-f', '--format', choices=['turtle', 'n3', 'xml', 'nt'],
default='n3', help="Output format")
self.added_format_arg = True
def add_convert(self, convert_function, default_format='n3'):
self.__add_format_arg()
parser_convert = self.subparsers.add_parser('convert', help='Convert a single file')
parser_convert.add_argument('input', help='Input file')
parser_convert.add_argument('output', nargs='?',
help="Output file. If not specified, output will go to stdout"
)
def command(args):
graph = convert_function(args.input)
write_graph(graph, args.output, args.format or default_format)
self.commands['convert'] = command
return parser_convert
def add_batch_convert(self, convert_function, extension, default_format='n3'):
self.__add_format_arg()
parser_batchconvert = self.subparsers.add_parser('batchconvert',
help='Convert a directory of files recursively, mirroring the structure in the output directory')
parser_batchconvert.add_argument('input', help='Input directory')
parser_batchconvert.add_argument('output', help='Output directory')
parser_batchconvert.add_argument('--merge', dest='merge', help="Merge to file")
def command(args):
if not os.path.isdir(args.input):
raise IOError("Input directory does not exist or is not a directory: %s" % args.input)
if not os.path.exists(args.output):
os.makedirs(args.output)
succeeded, failed = 0, 0
failures = {}
for inputfile, outputfile in traverse_mirror(args.input, args.output, extension, args.format):
print(inputfile + " -> " + outputfile)
try:
graph = convert_function(inputfile)
write_graph(graph, outputfile, args.format or default_format)
succeeded += 1
except KeyboardInterrupt:
return
except Exception as e:
traceback.print_exc()
failures[inputfile] = str(e)
failed += 1
print ("%d Attempted; %d Successes; %d Failures" % (succeeded+failed, succeeded, failed))
if failed > 0:
print("---------\nFailures:\n---------")
for filename in sorted(failures):
print("%s: %s" % (filename, failures[filename]))
if args.merge:
print("Merging graphs to %s" % (args.merge))
merge_graphs_in_directory(args.output, args.merge, format=args.format)
self.commands['batchconvert'] = command
return parser_batchconvert
| monnetproject/rdfconverters | rdfconverters/util.py | Python | bsd-3-clause | 7,438 |
import os
from setuptools import (
setup,
find_packages,
)
version = '1.0a1'
shortdesc = "AGX UML to Filesystem Transform"
longdesc = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
longdesc += open(os.path.join(os.path.dirname(__file__), 'LICENSE.rst')).read()
setup(name='agx.transform.uml2fs',
version=version,
description=shortdesc,
long_description=longdesc,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python',
],
keywords='AGX, Code Generator',
author='BlueDynamics Alliance',
author_email='[email protected]',
url=u'http://github.com/bluedynamics/agx.transform.uml2fs',
license='GNU General Public Licence',
packages=find_packages('src'),
package_dir={'': 'src'},
namespace_packages=['agx', 'agx.transform'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'agx.core',
'node.ext.directory',
],
extras_require = dict(
test=[
'interlude',
]
),
entry_points="""
# -*- Entry points: -*-
""")
| bluedynamics/agx.transform.uml2fs | setup.py | Python | bsd-3-clause | 1,283 |
# -*- coding:utf8 -*-
'''
功能: XML-RPC 服务端
1. 实现允许RPC服务, 被远程结束掉.
依赖: SimpleXMLRPCServer
说明:
1. 命令行,先运行服务器端,再运行客户端.
2. 服务执行一次,就自动关闭.需要手动重启.
'''
__author__ = 'hhstore'
from SimpleXMLRPCServer import SimpleXMLRPCServer
running = True # 全局运行状态
def rpc_test_service():
global running
running = False # 修改运行状态
return "rpc_test_service() is calling..." # 必须有返回值
def main():
addr = ("localhost", 5000) # 主机名, 端口
server = SimpleXMLRPCServer(addr) # 创建RPC服务.在指定端口,监听请求.
server.register_function(rpc_test_service) # 注册函数
while running: # 自主管理服务,是否允许客户端结束.(非死循环)
print "server on..."
server.handle_request() # 处理RPC服务请求
else:
print "server stop..."
if __name__ == '__main__':
main()
| hhstore/learning-notes | python/src/exercise/py27/03_Network/RPC/XMLRPC/03_rpc_serv_cli_exit/rpc_server.py | Python | mit | 1,044 |
import re, json, sys
if sys.version_info[0] == 2:
def _is_num(o):
return isinstance(o, int) or isinstance(o, long) or isinstance(o, float)
def _stringify(o):
if isinstance(o, str):
return unicode(o)
if isinstance(o, unicode):
return o
return None
else:
def _is_num(o):
return isinstance(o, int) or isinstance(o, float)
def _stringify(o):
if isinstance(o, bytes):
return o.decode()
if isinstance(o, str):
return o
return None
_id_re = re.compile(r'[$a-zA-Z_][$0-9a-zA-Z_]*\Z')
class CSONEncoder:
def __init__(self, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, sort_keys=False,
indent=None, default=None):
self._skipkeys = skipkeys
self._ensure_ascii = ensure_ascii
self._allow_nan = allow_nan
self._sort_keys = sort_keys
self._indent = ' ' * (indent or 4)
self._default = default
if check_circular:
self._obj_stack = set()
else:
self._obj_stack = None
def _format_simple_val(self, o):
if o is None:
return 'null'
if isinstance(o, bool):
return 'true' if o else 'false'
if _is_num(o):
return str(o)
s = _stringify(o)
if s is not None:
return self._escape_string(s)
return None
def _escape_string(self, s):
r = json.dumps(s, ensure_ascii=self._ensure_ascii)
return u"'{}'".format(r[1:-1].replace("'", r"\'"))
def _escape_key(self, s):
if s is None or isinstance(s, bool) or _is_num(s):
s = str(s)
s = _stringify(s)
if s is None:
if self._skipkeys:
return None
raise TypeError('keys must be a string')
if not _id_re.match(s):
return self._escape_string(s)
return s
def _push_obj(self, o):
if self._obj_stack is not None:
if id(o) in self._obj_stack:
raise ValueError('Circular reference detected')
self._obj_stack.add(id(o))
def _pop_obj(self, o):
if self._obj_stack is not None:
self._obj_stack.remove(id(o))
def _encode(self, o, obj_val=False, indent='', force_flow=False):
if isinstance(o, list):
if not o:
if obj_val:
yield ' []\n'
else:
yield indent
yield '[]\n'
else:
if obj_val:
yield ' [\n'
else:
yield indent
yield '[\n'
indent = indent + self._indent
self._push_obj(o)
for v in o:
for chunk in self._encode(v, obj_val=False, indent=indent, force_flow=True):
yield chunk
self._pop_obj(o)
yield indent[:-len(self._indent)]
yield ']\n'
elif isinstance(o, dict):
items = [(self._escape_key(k), v) for k, v in o.items()]
if self._skipkeys:
items = [(k, v) for k, v in items if k is not None]
if self._sort_keys:
items.sort()
if force_flow or not items:
if not items:
if obj_val:
yield ' {}\n'
else:
yield indent
yield '{}\n'
else:
if obj_val:
yield ' {\n'
else:
yield indent
yield '{\n'
indent = indent + self._indent
self._push_obj(o)
for k, v in items:
yield indent
yield k
yield ':'
for chunk in self._encode(v, obj_val=True, indent=indent + self._indent, force_flow=False):
yield chunk
self._pop_obj(o)
yield indent[:-len(self._indent)]
yield '}\n'
else:
if obj_val:
yield '\n'
self._push_obj(o)
for k, v in items:
yield indent
yield k
yield ':'
for chunk in self._encode(v, obj_val=True, indent=indent + self._indent, force_flow=False):
yield chunk
self._pop_obj(o)
else:
v = self._format_simple_val(o)
if v is None:
self._push_obj(o)
v = self.default(o)
for chunk in self._encode(v, obj_val=obj_val, indent=indent, force_flow=force_flow):
yield chunk
self._pop_obj(o)
else:
if obj_val:
yield ' '
else:
yield indent
yield v
yield '\n'
def iterencode(self, o):
return self._encode(o)
def encode(self, o):
return ''.join(self.iterencode(o))
def default(self, o):
if self._default is None:
raise TypeError('Cannot serialize an object of type {}'.format(type(o).__name__))
return self._default(o)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None,
indent=None, default=None, sort_keys=False, **kw):
if indent is None and cls is None:
return json.dump(obj, fp, skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular,
allow_nan=allow_nan, default=default, sort_keys=sort_keys, separators=(',', ':'))
if cls is None:
cls = CSONEncoder
encoder = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular,
allow_nan=allow_nan, sort_keys=sort_keys, indent=indent, default=default, **kw)
for chunk in encoder.iterencode(obj):
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None,
default=None, sort_keys=False, **kw):
if indent is None and cls is None:
return json.dumps(obj, skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular,
allow_nan=allow_nan, default=default, sort_keys=sort_keys, separators=(',', ':'))
if cls is None:
cls = CSONEncoder
encoder = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular,
allow_nan=allow_nan, sort_keys=sort_keys, indent=indent, default=default, **kw)
return encoder.encode(obj)
| idleberg/sublime-cson | all/cson/writer.py | Python | mit | 6,879 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutAsserts(Koan):
def test_assert_truth(self):
"""
We shall contemplate truth by testing reality, via asserts.
"""
# Confused? This video should help:
#
# http://bit.ly/about_asserts
self.assertTrue(True) # This should be true
def test_assert_with_message(self):
"""
Enlightenment may be more easily achieved with appropriate messages.
"""
self.assertTrue(True, "This should be true -- Please fix this")
def test_fill_in_values(self):
"""
Sometimes we will ask you to fill in the values
"""
self.assertEqual(2, 1 + 1)
def test_assert_equality(self):
"""
To understand reality, we must compare our expectations against
reality.
"""
expected_value = 2
actual_value = 1 + 1
self.assertTrue(expected_value == actual_value)
def test_a_better_way_of_asserting_equality(self):
"""
Some ways of asserting equality are better than others.
"""
expected_value = 2
actual_value = 1 + 1
self.assertEqual(expected_value, actual_value)
def test_that_unittest_asserts_work_the_same_way_as_python_asserts(self):
"""
Understand what lies within.
"""
# This throws an AssertionError exception
self.assertFalse(False)
def test_that_sometimes_we_need_to_know_the_class_type(self):
"""
What is in a class name?
"""
# Sometimes we will ask you what the class type of an object is.
#
# For example, contemplate the text string "naval". What is it's class type?
# The koans runner will include this feedback for this koan:
#
# AssertionError: '-=> FILL ME IN! <=-' != <type 'str'>
#
# So "naval".__class__ is equal to <type 'str'>? No not quite. This
# is just what it displays. The answer is simply str.
#
# See for yourself:
self.assertEqual(str, "naval".__class__) # It's str, not <type 'str'>
# Need an illustration? More reading can be found here:
#
# http://bit.ly/__class__
| GGXH/python_koans | python_koans/python2/koans/about_asserts.py | Python | mit | 2,296 |
# Generated by Django 2.2 on 2019-06-02 09:44
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('part', '0006_auto_20190526_1215'),
]
operations = [
migrations.RemoveField(
model_name='part',
name='buildable',
),
migrations.RemoveField(
model_name='part',
name='consumable',
),
migrations.AddField(
model_name='part',
name='assembly',
field=models.BooleanField(default=False, help_text='Can this part be built from other parts?', verbose_name='Assembly'),
),
migrations.AddField(
model_name='part',
name='component',
field=models.BooleanField(default=True, help_text='Can this part be used to build other parts?', verbose_name='Component'),
),
migrations.AlterField(
model_name='bomitem',
name='part',
field=models.ForeignKey(help_text='Select parent part', limit_choices_to={'active': True, 'assembly': True}, on_delete=django.db.models.deletion.CASCADE, related_name='bom_items', to='part.Part'),
),
migrations.AlterField(
model_name='bomitem',
name='sub_part',
field=models.ForeignKey(help_text='Select part to be used in BOM', limit_choices_to={'active': True, 'component': True}, on_delete=django.db.models.deletion.CASCADE, related_name='used_in', to='part.Part'),
),
]
| inventree/InvenTree | InvenTree/part/migrations/0007_auto_20190602_1944.py | Python | mit | 1,574 |
class ArtifactMetadataUpdater(object):
def __init__(self, bucket_container, identity):
"""
Args:
bucket_container(shelf.metadata.bucket_container.BucketContainer)
identity(shelf.resource_identity.ResourceIdentity)
"""
self.bucket_container = bucket_container
self.identity = identity
self._metadata = None
@property
def metadata(self):
"""
Returns:
schemas/metadata.json|None: None if run was not executed.
"""
return self._metadata
def run(self):
"""
Populates the metadata property. It will also ensure that the
metadata is in a usable state. In other words, all required
properties are populated.
"""
portal = self.bucket_container.cloud_portal
initializer = self.bucket_container.initializer
metadata = portal.load(self.identity.cloud_metadata)
metadata = initializer.update(self.identity, metadata)
portal.update(self.identity.cloud_metadata, metadata)
self._metadata = metadata
| kyle-long/pyshelf | shelf/bucket_update/artifact_metadata_updater.py | Python | mit | 1,142 |
# encoding: UTF-8
from __future__ import absolute_import
from .vnokex import *
# 在OkCoin网站申请这两个Key,分别对应用户名和密码
apiKey = '你的accessKey'
secretKey = '你的secretKey'
# 创建API对象
api = OkexSpotApi()
api.connect(apiKey, secretKey, True)
sleep(3)
#api.login()
api.subscribeSpotTicker("bch_btc")
api.subscribeSpotDepth("bch_btc")
api.subscribeSpotDepth("bch_btc", 5)
api.subscribeSpotDeals("bch_btc")
api.subscribeSpotKlines("bch_btc","30min")
#api.spotTrade("etc_usdt","sell", "50" , "0.01")
#api.spotCancelOrder("etc_btc","44274138")
#api.spotUserInfo()
#api.spotOrderInfo("etc_btc", 44284731)
# api = OkexFuturesApi()
# api.connect(apiKey, secretKey, True)
# sleep(3)
#api.subsribeFutureTicker("btc","this_week")
#api.subscribeFutureKline("btc","this_week", "30min")
#api.subscribeFutureDepth("btc","this_week")
#api.subscribeFutureDepth("btc","this_week", 5)
#api.subscribeFutureTrades("btc","this_week")
#api.subscribeFutureIndex("btc")
#api.subscribeFutureForecast_price("btc")
#api.login()
#api.futureTrade( "etc_usd", "this_week" ,"1" , 20 , 1 , _match_price = '0' , _lever_rate = '10') # 14245727693
#api.futureCancelOrder("etc_usd","14245727693" , "this_week")
#api.futureUserInfo()
#api.futureOrderInfo("etc_usd" , "14245727693" , "this_week" , '1', '1' , '10')
# api.subscribeFutureTrades()
'''
合约账户信息、 持仓信息等,在登录后都会自动推送。。。官方文档这样写的,还没实际验证过
'''
input() | mumuwoyou/vnpy-master | beta/api/okex/test.py | Python | mit | 1,501 |
# vim: set et ts=4 sw=4 fdm=marker
"""
MIT License
Copyright (c) 2016 Jesse Hogan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# TODO Add Tests
from entities import *
from pdb import set_trace; B=set_trace
import logging
from logging import handlers, Handler
class logs(entities):
_instance = None
def __init__(self):
super().__init__()
@classmethod
def getinstance(cls):
if cls._instance == None:
cls._instance = logs()
return cls._instance
@property
def default(self):
return self.first
class log(entity):
def __init__(self, addr, fac, tag, fmt, lvl):
super().__init__()
self._logger = logging.getLogger()
fmt = tag + fmt
fmt = logging.Formatter(fmt)
self._logger.setLevel(getattr(logging, lvl))
hnd = logging.handlers.SysLogHandler(addr, fac)
hnd.setFormatter(fmt)
self._logger.addHandler(hnd)
hnd = log.callbackhandler(self.callback)
self._logger.addHandler(hnd)
self.onlog = event()
def _self_onlog(self, src, eargs):
pass
class callbackhandler(Handler):
def __init__(self, callback):
super().__init__()
self.callback = callback
def emit(self, rec):
self.callback(rec)
def callback(self, rec):
eargs = log.addlogeventargs(rec)
self.onlog(self, eargs)
class addlogeventargs(eventargs):
def __init__(self, rec):
self.record = rec
# Use properties to expose the direct logger methods. Doings so allows
# %(lineno)d LogRecord attribute to display the line number where the
# method was actually invoked.
@property
def debug(self):
return self._logger.debug
@property
def info(self):
return self._logger.info
@property
def warning(self):
return self._logger.warning
@property
def error(self):
return self._logger.error
@property
def critical(self):
return self._logger.critical
@property
def exception(self):
return self._logger.exception
@staticmethod
def create(d):
addr = d['address']
fac = getattr(logging.handlers.SysLogHandler, d['facility'])
tag = d['tag']
fmt = d['format']
lvl = d['level']
return log(addr, fac, tag, fmt, lvl)
| jhogan/commonpy | logs.py | Python | mit | 3,390 |
import time
import sys
import _mysql
import random
import string
import re
import os
import urllib.parse
from selenium import webdriver
from selenium.webdriver.support.ui import Select
import selenium.webdriver.chrome.service as service
from shutil import copyfile
service = service.Service('D:\ChromeDriver\chromedriver')
service.start()
capabilities = {'chrome.binary': 'C:\Program Files (x86)\Google\Chrome\Application\chrome'} # Chrome path is different for everyone
driver = webdriver.Remote(service.service_url, capabilities)
driver.set_window_size(sys.argv[1], sys.argv[2]);
try:
# Check to see if it was added
db=_mysql.connect('localhost','root','root','paws_db')
rand_fname=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
rand_lname=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
rand_mail=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
db.query("INSERT INTO fosters (first_name,last_name,address,email,created,is_deleted) VALUES(\""+rand_fname+"\",\""+rand_lname+"\",\"55 Gato Way\",\""+rand_mail+"@mail.com\",NOW(),true);");
db.store_result()
db.query("SELECT id,first_name FROM fosters where last_name=\""+rand_lname+"\" AND email=\""+rand_mail+"@mail.com\"")
r=db.store_result()
k=r.fetch_row(1,1)
a_id = k[0].get('id')
curfilePath = os.path.abspath(__file__)
curDir = os.path.abspath(os.path.join(curfilePath,os.pardir)) # this will return current directory in which python file resides.
parentDir = os.path.abspath(os.path.join(curDir,os.pardir))
grandParentDir = os.path.abspath(os.path.join(parentDir,os.pardir))
webroot = os.path.join(grandParentDir,"webroot","files","fosters",a_id)
rand_default=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
rand_new=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
file_path_1 = urllib.parse.urljoin('files/fosters/',a_id+"/"+rand_default)
file_path_2 = urllib.parse.urljoin('files/fosters/',a_id+"/"+rand_new)
db.query('INSERT INTO files (entity_type,entity_id,is_photo,file_path,mime_type,file_size,file_ext,created,is_deleted,original_filename) VALUES(4,'+a_id+',1,"'+file_path_1+'","image/jpg",43466,"png",NOW(),0,"user.png");')
db.store_result()
db.query('INSERT INTO files (entity_type,entity_id,is_photo,file_path,mime_type,file_size,file_ext,created,is_deleted,original_filename) VALUES(4,'+a_id+',1,"'+file_path_2+'","image/jpg",5134,"jpg",NOW(),0,"user1.jpg");')
db.store_result()
db.query('SELECT id FROM files where file_path="'+file_path_1+'"')
r=db.store_result()
k=r.fetch_row(1,1)
file_1_id = k[0].get('id')
db.query('SELECT id FROM files where file_path="'+file_path_2+'"')
r=db.store_result()
k=r.fetch_row(1,1)
file_2_id = k[0].get('id')
db.query('UPDATE fosters SET profile_pic_file_id='+file_1_id+' WHERE id='+a_id+';')
db.store_result()
if not os.path.exists(webroot):
os.makedirs(webroot)
copyfile(os.getcwd()+"/img/user.png", os.path.join(webroot,rand_default+".png"))
copyfile(os.getcwd()+"/img/user1.jpg", os.path.join(webroot,rand_new+".jpg"))
copyfile(os.getcwd()+"/img/user.png", os.path.join(webroot,rand_default+"_tn.png"))
copyfile(os.getcwd()+"/img/user1.jpg", os.path.join(webroot,rand_new+"_tn.jpg"))
for root,dir,files in os.walk(webroot):
for f in files:
os.chmod(os.path.join(root, f), 777)
driver.get('http://localhost:8765');
driver.find_element_by_id('email').send_keys('[email protected]')
driver.find_element_by_id('password').send_keys('password')
driver.find_element_by_css_selector('input[type="submit"]').click()
driver.get('http://localhost:8765/fosters/view/'+a_id)
upload_elem = driver.find_element_by_css_selector('a[data-ix="attachment-notification"]')
upload_elem.click()
driver.find_element_by_css_selector('div.picture-file[data-file-id="'+file_2_id+'"]').click()
driver.find_element_by_id("mark-profile-pic-btn").click()
driver.get('http://localhost:8765/fosters/view/'+a_id)
new_img = driver.find_element_by_css_selector('div.profile-header > img')
img_src = new_img.get_attribute('src')
if rand_new in img_src:
print("pass")
else:
print("fail")
driver.quit()
except Exception as e:
print(e)
print("fail")
| TheParrotsAreComing/PAWS | TestingAssets/Fosters/delta_default_pic.py | Python | mit | 4,314 |
#!/usr/bin/python
#
# isprime.py
# Generates a list of prime numbers in a given range.
#
# Exercise 4.4:
# a) Write a function that determines whether a number is prime.
# b) Use this function in a program that determines and prints all the prime
# numbers between 2 and 1,000.
#
# Author: Billy Wilson Arante
# Created: 2016/08/07 PHT
#
def isprime(n):
"""Checks if number is prime"""
m = n - 1 # Divisor
while m > 1:
if n % m == 0: # It is not a prime
return False
break
else:
m = m - 1
return True
def main():
"""Main"""
for x in range(2, 1001):
if isprime(x):
print x,
if __name__ == "__main__":
main()
| arantebillywilson/python-snippets | py2/htp/ex04/isprime.py | Python | mit | 718 |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import os
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
def pred_visualization(fname, arrays, picks, img_shape, tile_spacing=(0,0),
scale_rows_to_unit_interval=True,
output_pixel_vals=True):
"""Used for visualization of predictions
Args:
fname: filename for saving the image
arrays: list of arrays containing the frames, first array is assumed to be
ground truth (all of shape Nxnframesxframesize**2)
picks: list containing indices of cases that should be used
img_shape: shape of a frame
tile_spacing: spacing between the tiles
scale_rows_to_unit_interval: see tile_raster_images
output_pixel_vals: see tile_raster_images
"""
ncases = len(picks)
narrays = len(arrays)
if narrays > 1:
horizon = arrays[1].shape[1]
horizon_gt = arrays[0].shape[1]
n_presteps = horizon_gt - horizon
if n_presteps > 0:
visdata = np.ones((ncases, horizon_gt * narrays, np.prod(img_shape)))
visdata[:,:horizon_gt] = arrays[0][picks]
for i in range(1, narrays):
visdata[:, i*horizon_gt:(i+1)*horizon_gt] = \
np.hstack((
(np.ones((ncases, n_presteps, np.prod(img_shape)))),
arrays[i][picks]))
else:
visdata = np.hstack([arrays[i][picks] for i in range(narrays)])
else:
horizon = arrays[0].shape[1]
horizon_gt = horizon
visdata = np.hstack([arrays[i][picks] for i in range(narrays)])
visdata = visdata.reshape(ncases*narrays*horizon_gt,-1)
im = tile_raster_images(visdata, img_shape, (ncases*narrays, horizon_gt),
tile_spacing,
scale_rows_to_unit_interval, output_pixel_vals)
for i in range(len(picks)*len(arrays)):
#insert white patches for n_presteps
for j in range(horizon_gt-horizon):
if i % len(arrays) != 0:
im[i*img_shape[0] + i*tile_spacing[0]:(i+1)*img_shape[0] + i*tile_spacing[0],
j*img_shape[1] + j*tile_spacing[1]:(j+1)*img_shape[1] + j*tile_spacing[1]] = 255
#np.insert(im, [i * len(arrays) * img_shape[0] + i * (len(arrays)-1) * tile_spacing[0] for i in range(len(picks))], 0)
h,w = im.shape
fig = plt.figure(frameon=False)
#fig.set_size_inches(1,h/np.float(w))
fig.set_size_inches(w/24.,h/24.)
ax = plt.Axes(fig, [0.,0.,1.,1.])
ax.set_axis_off()
fig.add_axes(ax)
ax.imshow(im, aspect='normal', interpolation='nearest')
fig.savefig(fname, dpi=24)
return im
def scale_to_unit_interval(ndar, eps=1e-8):
""" Scales all values in the ndarray ndar to be between 0 and 1 """
ndar = ndar.copy()
ndar -= ndar.min()
ndar *= 1.0 / (ndar.max()+eps)
return ndar
def tile_raster_images(X, img_shape, tile_shape, tile_spacing = (0, 0),
scale_rows_to_unit_interval = True, output_pixel_vals = True):
"""
Transform an array with one flattened image per row, into an array in
which images are reshaped and layed out like tiles on a floor.
This function is useful for visualizing datasets whose rows are images,
and also columns of matrices for transforming those rows
(such as the first layer of a neural net).
:type X: a 2-D ndarray or a tuple of 4 channels, elements of which can
be 2-D ndarrays or None;
:param X: a 2-D array in which every row is a flattened image.
:type img_shape: tuple; (height, width)
:param img_shape: the original shape of each image
:type tile_shape: tuple; (rows, cols)
:param tile_shape: the number of images to tile (rows, cols)
:param output_pixel_vals: if output should be pixel values (i.e. int8
values) or floats
:param scale_rows_to_unit_interval: if the values need to be scaled before
being plotted to [0, 1] or not
:returns: array suitable for viewing as an image.
(See:`PIL.Image.fromarray`.)
:rtype: a 2-d array with same dtype as X.
"""
assert len(img_shape) == 2
assert len(tile_shape) == 2
assert len(tile_spacing) == 2
# The expression below can be re-written in a more C style as
# follows :
#
# out_shape = [0, 0]
# out_shape[0] = (img_shape[0]+tile_spacing[0])*tile_shape[0] -
# tile_spacing[0]
# out_shape[1] = (img_shape[1]+tile_spacing[1])*tile_shape[1] -
# tile_spacing[1]
out_shape = [(ishp + tsp) * tshp - tsp for ishp, tshp, tsp
in zip(img_shape, tile_shape, tile_spacing)]
if isinstance(X, tuple):
assert len(X) == 4
# Create an output numpy ndarray to store the image
if output_pixel_vals:
out_array = np.zeros((out_shape[0], out_shape[1], 4), dtype='uint8')
else:
out_array = np.zeros((out_shape[0], out_shape[1], 4), dtype=X.dtype)
#colors default to 0, alpha defaults to 1 (opaque)
if output_pixel_vals:
channel_defaults = [0, 0, 0, 255]
else:
channel_defaults = [0., 0., 0., 1.]
for i in xrange(4):
if X[i] is None:
# if channel is None, fill it with zeros of the correct
# dtype
dt = out_array.dtype
if output_pixel_vals:
dt = 'uint8'
out_array[:, :, i] = np.zeros(out_shape,
dtype=dt) + channel_defaults[i]
else:
# use a recurrent call to compute the channel and store it
# in the output
out_array[:, :, i] = tile_raster_images(
X[i], img_shape, tile_shape, tile_spacing,
scale_rows_to_unit_interval, output_pixel_vals)
return out_array
else:
# if we are dealing with only one channel
H, W = img_shape
Hs, Ws = tile_spacing
# generate a matrix to store the output
dt = X.dtype
if output_pixel_vals:
dt = 'uint8'
out_array = np.zeros(out_shape, dtype=dt)
for tile_row in xrange(tile_shape[0]):
for tile_col in xrange(tile_shape[1]):
if tile_row * tile_shape[1] + tile_col < X.shape[0]:
if scale_rows_to_unit_interval:
# if we should scale values to be between 0 and 1
# do this by calling the `scale_to_unit_interval`
# function
this_img = scale_to_unit_interval(
X[tile_row * tile_shape[1] +
tile_col].reshape(img_shape))
else:
this_img = X[tile_row * tile_shape[1] +
tile_col].reshape(img_shape)
# add the slice to the corresponding position in the
# output array
c = 1
if output_pixel_vals:
c = 255
out_array[
tile_row * (H+Hs):tile_row*(H+Hs)+H,
tile_col * (W+Ws):tile_col*(W+Ws)+W
] \
= this_img * c
return out_array
def dispims_white(invwhitening, M, height, width, border=0, bordercolor=0.0,
layout=None, **kwargs):
""" Display a whole stack (colunmwise) of vectorized matrices. Useful
eg. to display the weights of a neural network layer.
"""
numimages = M.shape[1]
M = np.dot(invwhitening, M)
if layout is None:
n0 = int(np.ceil(np.sqrt(numimages)))
n1 = int(np.ceil(np.sqrt(numimages)))
else:
n0, n1 = layout
im = bordercolor * np.ones(((height+border)*n0+border,
(width+border)*n1+border), dtype='<f8')
for i in range(n0):
for j in range(n1):
if i*n1+j < M.shape[1]:
im[i*(height+border)+border:(i+1)*(height+border)+border,
j*(width+border)+border :(j+1)*(width+border)+border] =\
np.vstack((
np.hstack((
np.reshape(M[:, i*n1+j],
(height, width)),
bordercolor*np.ones((height, border),
dtype=float))),
bordercolor*np.ones((border, width+border),
dtype=float)))
plt.imshow(im, cmap=matplotlib.cm.gray, interpolation='nearest', **kwargs)
def CreateMovie(filename, plotter, numberOfFrames, fps):
for i in range(numberOfFrames):
plotter(i)
fname = '_tmp%05d.png' % i
plt.savefig(fname)
plt.clf()
#os.system("rm %s.mp4" % filename)
#os.system("ffmpeg -r "+str(fps)+" -b 1800 -i _tmp%05d.png "+filename+".mp4")
os.system("convert -delay 20 -loop 0 _tmp*.png " +filename+".gif")
os.system("rm _tmp*.png")
def dispimsmovie_patchwise(filename, M, inv, patchsize, fps=5, *args,
**kwargs):
numframes = M.shape[0] / inv.shape[1]
n = M.shape[0]/numframes
def plotter(i):
M_ = M[i*n:n*(i+1)]
M_ = np.dot(inv,M_)
width = int(np.ceil(np.sqrt(M.shape[1])))
image = tile_raster_images(
M_.T, img_shape=(patchsize,patchsize),
tile_shape=(10,10), tile_spacing = (1,1),
scale_rows_to_unit_interval = True, output_pixel_vals = True)
plt.imshow(image,cmap=matplotlib.cm.gray,interpolation='nearest')
plt.axis('off')
CreateMovie(filename, plotter, numframes, fps)
def dispimsmovie(filename, W, filters, nframes, fps=5):
patchsize = np.uint8(np.sqrt(W.shape[0]))
def plotter(i):
dispims_white(W, filters[i*W.shape[1]:(i+1)*W.shape[1], :], patchsize,
patchsize, 1, bordercolor=filters.mean(),
vmin=filters.min(), vmax=filters.max()*0.8)
plt.axis('off')
CreateMovie(filename, plotter, nframes, fps)
def visualizefacenet(fname, imgs, patches_left, patches_right,
true_label, predicted_label):
"""Builds a plot of facenet with attention per RNN step and
classification result
"""
nsamples = imgs.shape[0]
nsteps = patches_left.shape[1]
is_correct = true_label == predicted_label
w = nsteps + 2 + (nsteps % 2)
h = nsamples * 2
plt.clf()
plt.gray()
for i in range(nsamples):
plt.subplot(nsamples, w//2, i*w//2 + 1)
plt.imshow(imgs[i])
msg = ('Prediction: ' + predicted_label[i] + ' TrueLabel: ' +
true_label[i])
if is_correct[i]:
plt.title(msg,color='green')
else:
plt.title(msg,color='red')
plt.axis('off')
for j in range(nsteps):
plt.subplot(h, w, i*2*w + 2 + 1 + j)
plt.imshow(patches_left[i, j])
plt.axis('off')
plt.subplot(h, w, i*2*w + 2 + 1 + j + w)
plt.imshow(patches_right[i, j])
plt.axis('off')
plt.show()
plt.savefig(fname)
if __name__ == '__main__':
from scipy.misc import lena
imgs = lena()[None, ...].repeat(3, axis=0)
patches_left = lena()[None, None, :256].repeat(3, axis=0).repeat(5, axis=1)
patches_right = lena()[None, None, 256:].repeat(3, axis=0).repeat(5, axis=1)
true_label = np.array(['angry', 'angry', 'sad'])
predicted_label = np.array(['sad'] * 3)
visualizefacenet('lena.pdf', imgs, patches_left, patches_right,
true_label, predicted_label)
# vim: set ts=4 sw=4 sts=4 expandtab:
| saebrahimi/Emotion-Recognition-RNN | common/disptools.py | Python | mit | 11,942 |
"""Component to integrate the Home Assistant cloud."""
import asyncio
import json
import logging
import os
import voluptuous as vol
from homeassistant.const import (
EVENT_HOMEASSISTANT_START, CONF_REGION, CONF_MODE)
from . import http_api, iot
from .const import CONFIG_DIR, DOMAIN, SERVERS
REQUIREMENTS = ['warrant==0.5.0']
_LOGGER = logging.getLogger(__name__)
CONF_COGNITO_CLIENT_ID = 'cognito_client_id'
CONF_RELAYER = 'relayer'
CONF_USER_POOL_ID = 'user_pool_id'
MODE_DEV = 'development'
DEFAULT_MODE = MODE_DEV
DEPENDENCIES = ['http']
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(CONF_MODE, default=DEFAULT_MODE):
vol.In([MODE_DEV] + list(SERVERS)),
# Change to optional when we include real servers
vol.Required(CONF_COGNITO_CLIENT_ID): str,
vol.Required(CONF_USER_POOL_ID): str,
vol.Required(CONF_REGION): str,
vol.Required(CONF_RELAYER): str,
}),
}, extra=vol.ALLOW_EXTRA)
@asyncio.coroutine
def async_setup(hass, config):
"""Initialize the Home Assistant cloud."""
if DOMAIN in config:
kwargs = config[DOMAIN]
else:
kwargs = {CONF_MODE: DEFAULT_MODE}
cloud = hass.data[DOMAIN] = Cloud(hass, **kwargs)
@asyncio.coroutine
def init_cloud(event):
"""Initialize connection."""
yield from cloud.initialize()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, init_cloud)
yield from http_api.async_setup(hass)
return True
class Cloud:
"""Store the configuration of the cloud connection."""
def __init__(self, hass, mode, cognito_client_id=None, user_pool_id=None,
region=None, relayer=None):
"""Create an instance of Cloud."""
self.hass = hass
self.mode = mode
self.email = None
self.id_token = None
self.access_token = None
self.refresh_token = None
self.iot = iot.CloudIoT(self)
if mode == MODE_DEV:
self.cognito_client_id = cognito_client_id
self.user_pool_id = user_pool_id
self.region = region
self.relayer = relayer
else:
info = SERVERS[mode]
self.cognito_client_id = info['cognito_client_id']
self.user_pool_id = info['user_pool_id']
self.region = info['region']
self.relayer = info['relayer']
@property
def is_logged_in(self):
"""Get if cloud is logged in."""
return self.email is not None
@property
def user_info_path(self):
"""Get path to the stored auth."""
return self.path('{}_auth.json'.format(self.mode))
@asyncio.coroutine
def initialize(self):
"""Initialize and load cloud info."""
def load_config():
"""Load the configuration."""
# Ensure config dir exists
path = self.hass.config.path(CONFIG_DIR)
if not os.path.isdir(path):
os.mkdir(path)
user_info = self.user_info_path
if os.path.isfile(user_info):
with open(user_info, 'rt') as file:
info = json.loads(file.read())
self.email = info['email']
self.id_token = info['id_token']
self.access_token = info['access_token']
self.refresh_token = info['refresh_token']
yield from self.hass.async_add_job(load_config)
if self.email is not None:
yield from self.iot.connect()
def path(self, *parts):
"""Get config path inside cloud dir."""
return self.hass.config.path(CONFIG_DIR, *parts)
@asyncio.coroutine
def logout(self):
"""Close connection and remove all credentials."""
yield from self.iot.disconnect()
self.email = None
self.id_token = None
self.access_token = None
self.refresh_token = None
yield from self.hass.async_add_job(
lambda: os.remove(self.user_info_path))
def write_user_info(self):
"""Write user info to a file."""
with open(self.user_info_path, 'wt') as file:
file.write(json.dumps({
'email': self.email,
'id_token': self.id_token,
'access_token': self.access_token,
'refresh_token': self.refresh_token,
}, indent=4))
| stefan-jonasson/home-assistant | homeassistant/components/cloud/__init__.py | Python | mit | 4,402 |
#! /usr/bin/env python
from setuptools import find_packages, setup
setup(
name='morph_seg',
version='0.1.0',
description="Morphological segmentation experiments",
author='Judit Acs',
author_email='[email protected]',
packages=find_packages(),
package_dir={'': '.'},
provides=['morph_seg'],
)
| juditacs/morph-segmentation-experiments | setup.py | Python | mit | 326 |
from argparse import ArgumentParser
from flexget import options
from flexget.event import event
from flexget.terminal import TerminalTable, colorize, console, disable_colors, table_parser
try:
from irc_bot.simple_irc_bot import IRCChannelStatus, SimpleIRCBot
except ImportError:
SimpleIRCBot = None
IRCChannelStatus = None
def do_cli(manager, options):
"""Handle irc cli"""
if SimpleIRCBot is None:
console('irc_bot is not installed. install using `pip install irc_bot`.')
return
if hasattr(options, 'table_type') and options.table_type == 'porcelain':
disable_colors()
action_map = {'status': action_status, 'restart': action_restart, 'stop': action_stop}
# NOTE: Direct importing of other plugins is discouraged
from flexget.components.irc.irc import irc_manager
if irc_manager is None:
console('IRC daemon does not appear to be running.')
return
action_map[options.irc_action](options, irc_manager)
def action_status(options, irc_manager):
connection = options.irc_connection
try:
status = irc_manager.status(connection)
except ValueError as e:
console('ERROR: %s' % e.args[0])
return
header = ['Name', 'Alive', 'Channels', 'Server']
table_data = []
for connection in status:
for name, info in connection.items():
alive = colorize('green', 'Yes') if info['alive'] else colorize('red', 'No')
channels = []
for channel in info['channels']:
for channel_name, channel_status in channel.items():
channels.append(channel_name)
if channel_status == IRCChannelStatus.CONNECTED:
channels[-1] = colorize('green', '* ' + channels[-1])
table_data.append(
[name, alive, ', '.join(channels), '%s:%s' % (info['server'], info['port'])]
)
table = TerminalTable(*header, table_type=options.table_type)
for row in table_data:
table.add_row(*row)
console(table)
console(colorize('green', ' * Connected channel'))
def action_restart(options, irc_manager):
connection = options.irc_connection
try:
console('Restarting irc connection %s. It may take a short while.' % connection)
irc_manager.restart_connections(connection)
console(
'Successfully restarted {0}. Use `flexget irc status {0}` to check its status.'.format(
connection or 'all'
)
)
except KeyError:
console('ERROR: %s is not a valid irc connection' % connection)
def action_stop(options, irc_manager):
connection = options.irc_connection
try:
console('Stopping irc connection %s. It may take a short while.' % connection)
irc_manager.stop_connections(wait=False, name=connection)
console(
'Successfully stopped {0}. Use `flexget irc status {0}` to check its status.'.format(
connection or 'all'
)
)
except KeyError:
console('ERROR: %s is not a valid irc connection' % connection)
@event('options.register')
def register_parser_arguments():
# Common option to be used in multiple subparsers
irc_parser = ArgumentParser(add_help=False)
irc_parser.add_argument('irc_connection', nargs='?', help="Title of the irc connection")
# Register subcommand
parser = options.register_command('irc', do_cli, help='View and manage irc connections')
# Set up our subparsers
subparsers = parser.add_subparsers(title='actions', metavar='<action>', dest='irc_action')
subparsers.add_parser(
'status',
parents=[irc_parser, table_parser],
help='Shows status for specific irc connection',
)
subparsers.add_parser('restart', parents=[irc_parser], help='Restart an irc connection')
subparsers.add_parser('stop', parents=[irc_parser], help='Stops an irc connection')
| Flexget/Flexget | flexget/components/irc/cli.py | Python | mit | 3,979 |
from django import forms
from . import models
class AccountForm(forms.ModelForm):
class Meta:
model = models.User
exclude = ('first_name', 'last_name', 'password', 'is_staff',
'is_active', 'is_superuser', 'last_login', 'date_joined',
'groups', 'user_permissions', 'email')
class TimezoneForm(forms.Form):
CHOICES = [('', '')]
CHOICES.extend(models.TIMEZONE_CHOICES)
timezone = forms.ChoiceField(choices=CHOICES, required=False)
def __init__(self, request, *args, **kwargs):
super(TimezoneForm, self).__init__(*args, **kwargs)
self.request = request
self.fields['timezone'].initial = request.session.get('django_timezone',
"")
def save(self):
tz = self.cleaned_data['timezone']
self.request.session['django_timezone'] = tz
if self.request.user.is_authenticated():
self.request.user.timezone = tz
self.request.user.save()
| reidwooten99/botbot-web | botbot/apps/accounts/forms.py | Python | mit | 1,038 |
#!/usr/bin/env python
#|
#| This python script takes a traits files, which looks like the following
#|
#| samplename trait1 trait2 etc
#|
#| By finding the traits and breaking them into bit strings, so they can be loaded
#| into the nexus output file
#|
import os
import numpy
import argparse
import sys
import uuid
TRAITS_TEMPLATE="""BEGIN TRAITS;
Dimensions NTRAITS={0};
Format labels=yes missing=? separator=Comma;
TraitLabels {1};
Matrix
{2}
;
END;
"""
TAXA_TEMPLATE="""#NEXUS
BEGIN TAXA;
DIMENSIONS NTAX={0};
TAXLABELS
{1}
;
END;
"""
def generate_traits_dictionary(trait_file):
"""
Generate a dictionary of traits
"""
trait_list=[]
with open(trait_file) as traits:
for trait in traits:
trait_inner_l=[]
t_line=trait.split('\t')
for i in range(0,len(t_line)):
trait_inner_l.append(t_line[i])
trait_list.append(trait_inner_l)
return(numpy.array(trait_list))
def comma_sep_binary_string(sample_names,full_sample_list,individual_trait_list):
"""
Turns into a comma seperated trait file for nexus
"""
string_dict={}
trait_dict={}
individual_trait_list=[s.strip() for s in individual_trait_list]
individual_trait_list=["UNKNOWN" if s == "" else s for i, s in enumerate(individual_trait_list)]
print full_sample_list
for sample, trait in zip(full_sample_list, individual_trait_list):
trait_dict[sample] = trait
uniq_list = set(individual_trait_list)
temp_string = "0,"*len(uniq_list)
temp_string = temp_string[:len(temp_string)-1]
n_traits = len(uniq_list)
trait_labels=[]
for i, item in enumerate(uniq_list):
trait_labels.append(item)
mod_string = list(temp_string)
mod_string[(i)*2] = "1"
string_dict[item] = ''.join(mod_string)
sample_dict = {}
for sample in sample_names:
sample_dict[sample] = string_dict[trait_dict[sample]]
return(sample_dict, n_traits,trait_labels)
def traits_to_nexus(input_file ,output_prefix, traits):
"""
Reads traits and processes the pandas trait dictionary.
"""
# Pull out the sample names
samplenames = traits[1:,0]
temp_file = str(uuid.uuid4())
with open(temp_file,'w') as out_file:
a = open(input_file)
temp_middle=a.read()
f_ind=temp_middle.find("matrix") + 7
semi_ind=temp_middle.find(';',f_ind)
actual_sample_names=[]
for samps in temp_middle[f_ind:semi_ind].split('\n'):
try:
actual_sample_names.append(samps.split()[0])
except IndexError:
break
out_file.write(TAXA_TEMPLATE.format(len(actual_sample_names),"\n".join(actual_sample_names)))
temp_middle=temp_middle.replace("#NEXUS","")
out_file.write(temp_middle)
a.close()
with open(temp_file,'r') as template_file:
sequence_and_taxa=template_file.read()
for i in range(1,traits.shape[1]):
temp_dict={}
trait_name = traits[0,i].replace(" ", "")
t_list = traits[1:,i]
out_file=open(output_prefix + '.' + trait_name.strip() + '.nex','w')
out_file.write(sequence_and_taxa)
(sample_dict, n_traits, trait_labels)=comma_sep_binary_string(actual_sample_names, samplenames,t_list)
matrix = ""
for key, value in sample_dict.items():
matrix += key + " " + value + "\n"
trait_labels=[o.replace(" ","") for o in trait_labels]
trait_labels=' '.join(trait_labels)
out_file.write(TRAITS_TEMPLATE.format(n_traits,trait_labels ,matrix))
os.remove(temp_file)
def main():
parser = argparse.ArgumentParser(description="Takes a trait file and creates traits for all the descriptions")
parser.add_argument('-i','--nex',dest='nexus_input',help='Nexus Input File')
parser.add_argument('-o','--out-nex',dest='nexus_output',help='Nexus Output File prefix')
parser.add_argument('-t','--traits',dest='traits_file',help='Traits input File')
args = parser.parse_args()
assert args.nexus_input is not None, " A nexus input file is needed for the option --nex or --i"
assert args.nexus_output is not None, " A nexus output file is needed for the option -o or --output-nex"
assert args.traits_file is not None, " A nexus traits file is needed for the option -t or --traits"
try:
a = open(args.nexus_input)
except IOError:
sys.stderr.write('Cannot open nexus_input file: {0}'.format(a))
try:
a= open(args.traits_file)
except IOError:
sys.stderr.write('Cannot open nexus_output file: {0}'.format(a))
trait_list=generate_traits_dictionary(args.traits_file)
traits_to_nexus(args.nexus_input, args.nexus_output, trait_list)
if __name__=="__main__":
main()
| smilefreak/ancient_dna_pipeline | python_scripts/make_traits.py | Python | mit | 5,026 |
import json
import io
from datetime import datetime
def util_load_json(path):
with io.open(path, mode='r', encoding='utf-8') as f:
return json.loads(f.read())
def get_headers_for_login():
headers_for_login = {
'Authorization': 'Basic some-alphanumeric',
'X-Domain': 'Domain-1',
'grant_type': 'client_credentials',
'Content-Type': 'application/json'
}
return headers_for_login
def get_headers_for_query():
headers_for_query = {
'Cookie': 'COOKIE',
'X-Domain': 'DOMAIN',
'grant_type': 'client_credentials',
'client_id': 'CLIENT_ID',
'Content-Type': 'application/json'
}
return headers_for_query
def setup():
from ConcentricAI import LoginClient, QueryClient, initialise_scrolls_and_rules
headers_login = get_headers_for_login()
loginClient = LoginClient(base_url='https://mock-url.com',
verify='False',
headers=headers_login,
proxy='False')
headers_query = get_headers_for_query()
queryClient = QueryClient(
base_url='https://mock-url.com',
headers=headers_query,
proxy='False')
initialise_scrolls_and_rules()
return loginClient, queryClient
def test_test_module(requests_mock):
from ConcentricAI import LoginClient, test_module
headers = get_headers_for_login()
loginClient = LoginClient(base_url='https://mock-url.com',
verify='False',
headers=headers,
proxy='False')
mock_response = {
'accessToken': 'token'
}
requests_mock.get('https://mock-url.com/api/v1/login', json=mock_response)
response = test_module(loginClient)
assert response == 'ok'
def test_fetch_incidents(requests_mock):
# given : Mock response and arguments needed for the given call
from ConcentricAI import fetch_incidents
loginClient, queryClient = setup()
last_run: dict = {}
max_results = '100'
fetch_time = '3 days'
mock_response = util_load_json('test_data/mock_incident.json')
requests_mock.post('https://mock-url.com/graphql-third-party', json=mock_response['response'])
# when : Actual function call
_, new_incidents = fetch_incidents(loginClient, queryClient, last_run, max_results, fetch_time)
t = datetime.fromtimestamp(int('1600114903415') / 1000)
inced_time = t.strftime('%Y-%m-%dT%H:%M:%SZ')
rawJson = '{"cid": "8f4619ebc927276a5908db0e46be2e7da14df3bd", "rule_name": "risk1,risk3", ' \
'"service": "sharepoint", "name": "file-name-1", "file-path": "file-path", ' \
'"owner": ["[email protected]"], "risk": "high", "risk_timestamp": "1600114903415"}'
# then : Assert values of the incident populated.
assert new_incidents == [
{
'name': 'file-name-1',
'occurred': inced_time,
'severity': 3,
'rawJSON': rawJson
}
]
def test_fetch_file_information(requests_mock):
# given : Mock response and arguments needed for the given call
from ConcentricAI import fetch_file_information
loginClient, queryClient = setup()
path = 'path'
name = 'file-name-1'
mock_response = util_load_json('test_data/mock_file_information.json')
requests_mock.post('https://mock-url.com/graphql-third-party', json=mock_response['response'])
# when : Actual function call
result = fetch_file_information(loginClient, queryClient, path, name)
# then : Assert values of the Output prefix
assert result.outputs_prefix == 'ConcentricAI.FileInfo'
assert result.outputs_key_field == 'ownerDetails'
assert result.outputs == mock_response['output']
def test_get_users_overview(requests_mock):
# given : Mock response and arguments needed for the given call
from ConcentricAI import get_users_overview
loginClient, queryClient = setup()
mock_response = util_load_json('test_data/mock_user_overview.json')
requests_mock.post('https://mock-url.com/graphql-third-party', json=mock_response['response'])
max_users = '10'
# when : Actual function call
result = get_users_overview(loginClient, queryClient, max_users)
# then : Assert values of the Output prefix
assert result.outputs_prefix == 'ConcentricAI.UserInfo'
assert result.outputs_key_field == 'info'
def test_get_user_details(requests_mock):
# given : Mock response and arguments needed for the given call
from ConcentricAI import get_user_details
loginClient, queryClient = setup()
mock_response = util_load_json('test_data/mock_user_details.json')
requests_mock.post('https://mock-url.com/graphql-third-party', json=mock_response['response'])
user = 'joe'
# when : Actual function call
result = get_user_details(loginClient, queryClient, user)
# then : Assert values of the Output prefix
assert result.outputs_prefix == 'ConcentricAI.UserDetails'
assert result.outputs_key_field == 'info'
def test_get_file_sharing_details(requests_mock):
# given : Mock response and arguments needed for the given call
from ConcentricAI import get_file_sharing_details
loginClient, queryClient = setup()
mock_response = util_load_json('test_data/mock_file_permissions.json')
requests_mock.post('https://mock-url.com/graphql-third-party', json=mock_response['response'])
cid = 'lsknadkl12312'
# when : Actual function call
result = get_file_sharing_details(loginClient, queryClient, cid)
# then : Assert values of the Output prefix
assert result.outputs_prefix == 'ConcentricAI.FileSharingInfo'
assert result.outputs_key_field == 'info'
| VirusTotal/content | Packs/ConcentricAI/Integrations/ConcentricAI/ConcentricAi_test.py | Python | mit | 5,758 |
from __future__ import absolute_import
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'phoenix.settings')
from django.conf import settings # noqa
app = Celery('phoenix')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
| vchrisb/emc_phoenix2 | phoenix/celery.py | Python | mit | 574 |
from distutils.core import setup
setup(
name='PhotoRename',
version='1.0.9',
author="Jordan Dunn",
author_email="[email protected]",
url="https://github.com/JorDunn/photorename",
packages=['photorename'],
license='MIT',
long_description="A utility to rename photos and give them a more unified filename.",
entry_points={
'console_scripts': ['photorename=photorename.command_line:main'],
},
python_requires="~=3.0"
)
| JorDunn/rename-photos | setup.py | Python | mit | 470 |
'''
'''
import sys, subprocess
sys.path.insert(0, '/nethome/asalomatov/projects/ppln')
import logProc
options = ''' \
--standard_min_confidence_threshold_for_calling 30.0 \
--standard_min_confidence_threshold_for_emitting 30.0 \
--downsample_to_coverage 2000 \
--downsampling_type BY_SAMPLE \
--annotation BaseQualityRankSumTest \
--annotation FisherStrand \
--annotation GCContent \
--annotation HaplotypeScore \
--annotation HomopolymerRun \
--annotation MappingQualityRankSumTest \
--annotation MappingQualityZero \
--annotation QualByDepth \
--annotation ReadPosRankSumTest \
--annotation RMSMappingQuality \
--annotation DepthPerAlleleBySample \
--annotation Coverage \
--interval_set_rule INTERSECTION \
--annotation ClippingRankSumTest \
--annotation DepthPerSampleHC \
--pair_hmm_implementation VECTOR_LOGLESS_CACHING \
-U LENIENT_VCF_PROCESSING \
--read_filter BadCigar \
--read_filter NotPrimaryAlignment \
'''
#-nct 1
print '\nsys.args :', sys.argv[1:]
refGenome, tmpdir, gatk, dbsnp, gaps, outdir, outfile, inbed = sys.argv[1:9]
I = ' -I '
inbams = ''
for f in sys.argv[9:]:
inbams += I + f
cmd = 'java -Xms750m -Xmx3500m -XX:+UseSerialGC -Djava.io.tmpdir=%(tmpdir)s -jar %(gatk)s -T HaplotypeCaller %(inbams)s -o %(outfile)s -R %(refGenome)s --dbsnp %(dbsnp)s -L %(inbed)s %(options)s'
#cmd = 'java -Xms750m -Xmx2500m -XX:+UseSerialGC -Djava.io.tmpdir=%(tmpdir)s -jar %(gatk)s -T HaplotypeCaller %(inbams)s -o %(outfile)s -R %(refGenome)s --dbsnp %(dbsnp)s -L %(inbed)s -XL %(gaps)s %(options)s'
cmd = cmd % locals()
print cmd
logProc.logProc(outfile, outdir, cmd, 'started')
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode == 0:
logProc.logProc(outfile, outdir, cmd, 'finished')
else:
logProc.logProc(outfile, outdir, cmd, 'failed', stderr)
| simonsfoundation/pipeline | ppln/gatkVariantAnnotator.py | Python | mit | 1,892 |
#Github pull reqest builder for Jenkins
import json
import os
import re
import urllib2
import urllib
import base64
import requests
import sys
import traceback
import platform
import subprocess
import codecs
from shutil import copy
#set Jenkins build description using submitDescription to mock browser behavior
#TODO: need to set parent build description
def set_description(desc, url):
req_data = urllib.urlencode({'description': desc})
req = urllib2.Request(url + 'submitDescription', req_data)
#print(os.environ['BUILD_URL'])
req.add_header('Content-Type', 'application/x-www-form-urlencoded')
base64string = base64.encodestring(os.environ['JENKINS_ADMIN']+ ":" + os.environ['JENKINS_ADMIN_PW']).replace('\n', '')
req.add_header("Authorization", "Basic " + base64string)
try:
urllib2.urlopen(req)
except:
traceback.print_exc()
def check_current_3rd_libs(branch):
#get current_libs config
backup_files = range(2)
current_files = range(2)
config_file_paths = ['external/config.json','templates/lua-template-runtime/runtime/config.json']
if (branch == 'v2'):
config_file_paths = ['external/config.json']
backup_files = range(1)
current_files = range(1)
for i, config_file_path in enumerate(config_file_paths):
if not os.path.isfile(config_file_path):
raise Exception("Could not find 'external/config.json'")
with open(config_file_path) as data_file:
data = json.load(data_file)
current_3rd_libs_version = data["version"]
filename = current_3rd_libs_version + '.zip'
node_name = os.environ['NODE_NAME']
backup_file = '../../../cocos-2dx-external/node/' + node_name + '/' + filename
backup_files[i] = backup_file
current_file = filename
current_files[i] = current_file
if os.path.isfile(backup_file):
copy(backup_file, current_file)
#run download-deps.py
os.system('python download-deps.py -r no')
#backup file
for i, backup_file in enumerate(backup_files):
current_file = current_files[i]
copy(current_file, backup_file)
def main():
#get payload from os env
payload_str = os.environ['payload']
payload_str = payload_str.decode('utf-8','ignore')
#parse to json obj
payload = json.loads(payload_str)
#get pull number
pr_num = payload['number']
print 'pr_num:' + str(pr_num)
#build for pull request action 'open' and 'synchronize', skip 'close'
action = payload['action']
print 'action: ' + action
#pr = payload['pull_request']
url = payload['html_url']
print "url:" + url
pr_desc = '<h3><a href='+ url + '> pr#' + str(pr_num) + ' is '+ action +'</a></h3>'
#get statuses url
statuses_url = payload['statuses_url']
#get pr target branch
branch = payload['branch']
#set commit status to pending
#target_url = os.environ['BUILD_URL']
jenkins_url = os.environ['JENKINS_URL']
job_name = os.environ['JOB_NAME'].split('/')[0]
build_number = os.environ['BUILD_NUMBER']
target_url = jenkins_url + 'job/' + job_name + '/' + build_number + '/'
set_description(pr_desc, target_url)
data = {"state":"pending", "target_url":target_url}
access_token = os.environ['GITHUB_ACCESS_TOKEN']
Headers = {"Authorization":"token " + access_token}
try:
requests.post(statuses_url, data=json.dumps(data), headers=Headers)
except:
traceback.print_exc()
#reset path to workspace root
os.system("cd " + os.environ['WORKSPACE']);
os.system("git checkout v3")
os.system("git branch -D pull" + str(pr_num))
#clean workspace
print "Before checkout: git clean -xdf -f"
os.system("git clean -xdf -f")
#fetch pull request to local repo
git_fetch_pr = "git fetch origin pull/" + str(pr_num) + "/head"
ret = os.system(git_fetch_pr)
if(ret != 0):
return(2)
#checkout
git_checkout = "git checkout -b " + "pull" + str(pr_num) + " FETCH_HEAD"
os.system(git_checkout)
# After checkout a new branch, clean workspace again
print "After checkout: git clean -xdf -f"
os.system("git clean -xdf -f")
#update submodule
git_update_submodule = "git submodule update --init --force"
ret = os.system(git_update_submodule)
if(ret != 0):
return(2)
#copy check_current_3rd_libs
check_current_3rd_libs(branch)
# Generate binding glue codes
if(branch == 'v3'):
ret = os.system("python tools/jenkins-scripts/gen_jsb.py")
elif(branch == 'v2'):
os.chdir('tools/tojs')
if(platform.system() == 'Windows'):
os.environ['NDK_ROOT'] = os.environ['NDK_ROOT_R8E']
ret = os.system("genbindings-win32.bat")
os.environ['NDK_ROOT'] = os.environ['NDK_ROOT_R9B']
else:
ret = os.system("./genbindings.sh")
os.chdir('../..')
if(ret != 0):
return(1)
#make temp dir
print "current dir is: " + os.environ['WORKSPACE']
os.system("cd " + os.environ['WORKSPACE']);
os.mkdir("android_build_objs")
#add symbol link
PROJECTS=["cpp-empty-test", "cpp-tests"]
print platform.system()
if(platform.system() == 'Darwin'):
for item in PROJECTS:
cmd = "ln -s " + os.environ['WORKSPACE']+"/android_build_objs/ " + os.environ['WORKSPACE']+"/tests/"+item+"/proj.android/obj"
os.system(cmd)
elif(platform.system() == 'Windows'):
for item in PROJECTS:
p = item.replace("/", os.sep)
cmd = "mklink /J "+os.environ['WORKSPACE']+os.sep+"tests"+os.sep +p+os.sep+"proj.android"+os.sep+"obj " + os.environ['WORKSPACE']+os.sep+"android_build_objs"
print cmd
os.system(cmd)
#build
#TODO: add android-linux build
#TODO: add mac build
node_name = os.environ['NODE_NAME']
if(branch == 'v3'):
if(node_name == 'android_mac') or (node_name == 'android_win7'):
#modify tests/cpp-empty-test/Classes/AppDelegate.cpp to support Console
modify_file = 'tests/cpp-empty-test/Classes/AppDelegate.cpp'
data = codecs.open(modify_file, encoding='UTF-8').read()
data = re.sub("director->setDisplayStats\(true\);", "director->setDisplayStats(true); director->getConsole()->listenOnTCP(5678);", data)
codecs.open(modify_file, 'wb', encoding='UTF-8').write(data)
#modify tests/cpp-empty-test/proj.android/AndroidManifest.xml to support Console
modify_file = 'tests/cpp-empty-test/proj.android/AndroidManifest.xml'
data = codecs.open(modify_file, encoding='UTF-8').read()
data = re.sub('<uses-feature android:glEsVersion="0x00020000" />', '<uses-feature android:glEsVersion="0x00020000" /> <uses-permission android:name="android.permission.INTERNET"/>', data)
codecs.open(modify_file, 'wb', encoding='UTF-8').write(data)
print "Start build android..."
ret = os.system("python build/android-build.py -n -j10 all")
# create and save apk
if(ret == 0):
sample_dir = 'tests/cpp-empty-test/proj.android/'
os.system('android update project -p cocos/platform/android/java/ -t android-13')
os.system('android update project -p ' + sample_dir + ' -t android-13')
os.system('ant debug -f ' + sample_dir + 'build.xml')
local_apk = sample_dir + 'bin/CppEmptyTest-debug.apk'
remote_apk = 'apks/cpp_empty_test/cpp_empty_test_' + str(pr_num) + '.apk'
os.system('tools/jenkins-scripts/upload_apk.sh ' + local_apk + ' ' + remote_apk)
elif(node_name == 'win32_win7'):
ret = subprocess.call('"%VS110COMNTOOLS%..\IDE\devenv.com" "build\cocos2d-win32.vc2012.sln" /Build "Debug|Win32"', shell=True)
elif(node_name == 'ios_mac'):
ret = os.system("tools/jenkins-scripts/ios-build.sh")
elif(node_name == 'linux_centos'):
os.chdir("build/")
ret = os.system("cmake ../")
ret = os.system("make -j10")
os.chdir("../")
elif(branch == 'v2'):
SAMPLES_DIRS = ['Cpp/HelloCpp', 'Cpp/SimpleGame', 'Cpp/TestCpp', 'Javascript/TestJavascript', 'Lua/HelloLua', 'Lua/TestLua']
SAMPLES_NAMES = ['HelloCpp', 'SimpleGame', 'TestCpp', 'TestJavascript', 'HelloLua', 'TestLua']
if(node_name == 'android_mac'):
for item in SAMPLES_DIRS:
proj_dir = "samples/" + item + "/proj.android"
os.system('ln -s ../../../../android_build_objs obj')
os.system(proj_dir + "/build_native.sh")
if (ret != 0):
break
elif(node_name == 'win32_win7'):
ret = subprocess.call('"%VS110COMNTOOLS%..\IDE\devenv.com" "cocos2d-win32.vc2012.sln" /Build "Debug|Win32"', shell=True)
elif(node_name == 'ios_mac'):
for i, item in enumerate(SAMPLES_DIRS):
cmd = "xcodebuild -project samples/" + item + "/proj.ios/" + SAMPLES_NAMES[i] + ".xcodeproj -scheme " + SAMPLES_NAMES[i] + ' -destination "platform=iOS Simulator,name=iPhone Retina (4-inch)"'
cmd_clean = cmd + ' clean'
cmd_build = cmd + ' build'
ret = os.system(cmd_clean)
if(ret != 0):
break
ret = os.system(cmd_build)
if(ret != 0):
break
elif(node_name == 'linux_centos'):
data = codecs.open('cocos2dx/proj.linux/cocos2dx.mk', encoding='UTF-8').read()
data = re.sub('-lglfw','-L$(GLFW_279_LIB) -lglfw', data)
codecs.open('cocos2dx/proj.linux/cocos2dx.mk', 'wb', encoding='UTF-8').write(data)
ret = os.system('make -j10')
else:
ret = 0
#get build result
print "build finished and return " + str(ret)
exit_code = 1
if ret == 0:
exit_code = 0
else:
exit_code = 1
#clean workspace
os.system("cd " + os.environ['WORKSPACE'])
os.system("git reset --hard")
os.system("git clean -xdf -f")
os.system("git checkout v3")
os.system("git branch -D pull" + str(pr_num))
return(exit_code)
# -------------- main --------------
if __name__ == '__main__':
sys_ret = 0
try:
sys_ret = main()
except:
traceback.print_exc()
sys_ret = 1
finally:
sys.exit(sys_ret)
| LuckyGameCn/LHCocosGame | cocos2d/tools/jenkins-scripts/pull-request-builder.py | Python | mit | 10,288 |
# Copyright 2018 Palantir Technologies, Inc.
import logging
import uuid
import sys
from concurrent import futures
from .exceptions import JsonRpcException, JsonRpcRequestCancelled, JsonRpcInternalError, JsonRpcMethodNotFound
log = logging.getLogger(__name__)
JSONRPC_VERSION = '2.0'
CANCEL_METHOD = '$/cancelRequest'
class Endpoint(object):
def __init__(self, dispatcher, consumer, id_generator=lambda: str(uuid.uuid4()), max_workers=5):
"""A JSON RPC endpoint for managing messages sent to/from the client.
Args:
dispatcher (dict): A dictionary of method name to handler function.
The handler functions should return either the result or a callable that will be used to asynchronously
compute the result.
consumer (fn): A function that consumes JSON RPC message dicts and sends them to the client.
id_generator (fn, optional): A function used to generate request IDs.
Defaults to the string value of :func:`uuid.uuid4`.
max_workers (int, optional): The number of workers in the asynchronous executor pool.
"""
self._dispatcher = dispatcher
self._consumer = consumer
self._id_generator = id_generator
self._client_request_futures = {}
self._server_request_futures = {}
self._executor_service = futures.ThreadPoolExecutor(max_workers=max_workers)
def shutdown(self):
self._executor_service.shutdown()
def notify(self, method, params=None):
"""Send a JSON RPC notification to the client.
Args:
method (str): The method name of the notification to send
params (any): The payload of the notification
"""
log.debug('Sending notification: %s %s', method, params)
message = {
'jsonrpc': JSONRPC_VERSION,
'method': method,
}
if params is not None:
message['params'] = params
self._consumer(message)
def request(self, method, params=None):
"""Send a JSON RPC request to the client.
Args:
method (str): The method name of the message to send
params (any): The payload of the message
Returns:
Future that will resolve once a response has been received
"""
msg_id = self._id_generator()
log.debug('Sending request with id %s: %s %s', msg_id, method, params)
message = {
'jsonrpc': JSONRPC_VERSION,
'id': msg_id,
'method': method,
}
if params is not None:
message['params'] = params
request_future = futures.Future()
request_future.add_done_callback(self._cancel_callback(msg_id))
self._server_request_futures[msg_id] = request_future
self._consumer(message)
return request_future
def _cancel_callback(self, request_id):
"""Construct a cancellation callback for the given request ID."""
def callback(future):
if future.cancelled():
self.notify(CANCEL_METHOD, {'id': request_id})
future.set_exception(JsonRpcRequestCancelled())
return callback
def consume(self, message):
"""Consume a JSON RPC message from the client.
Args:
message (dict): The JSON RPC message sent by the client
"""
if 'jsonrpc' not in message or message['jsonrpc'] != JSONRPC_VERSION:
log.warning("Unknown message type %s", message)
return
if 'id' not in message:
log.debug("Handling notification from client %s", message)
self._handle_notification(message['method'], message.get('params'))
elif 'method' not in message:
log.debug("Handling response from client %s", message)
self._handle_response(message['id'], message.get('result'), message.get('error'))
else:
try:
log.debug("Handling request from client %s", message)
self._handle_request(message['id'], message['method'], message.get('params'))
except JsonRpcException as e:
log.exception("Failed to handle request %s", message['id'])
self._consumer({
'jsonrpc': JSONRPC_VERSION,
'id': message['id'],
'error': e.to_dict()
})
except Exception: # pylint: disable=broad-except
log.exception("Failed to handle request %s", message['id'])
self._consumer({
'jsonrpc': JSONRPC_VERSION,
'id': message['id'],
'error': JsonRpcInternalError.of(sys.exc_info()).to_dict()
})
def _handle_notification(self, method, params):
"""Handle a notification from the client."""
if method == CANCEL_METHOD:
self._handle_cancel_notification(params['id'])
return
try:
handler = self._dispatcher[method]
except KeyError:
log.warning("Ignoring notification for unknown method %s", method)
return
try:
handler_result = handler(params)
except Exception: # pylint: disable=broad-except
log.exception("Failed to handle notification %s: %s", method, params)
return
if callable(handler_result):
log.debug("Executing async notification handler %s", handler_result)
notification_future = self._executor_service.submit(handler_result)
notification_future.add_done_callback(self._notification_callback(method, params))
@staticmethod
def _notification_callback(method, params):
"""Construct a notification callback for the given request ID."""
def callback(future):
try:
future.result()
log.debug("Successfully handled async notification %s %s", method, params)
except Exception: # pylint: disable=broad-except
log.exception("Failed to handle async notification %s %s", method, params)
return callback
def _handle_cancel_notification(self, msg_id):
"""Handle a cancel notification from the client."""
request_future = self._client_request_futures.pop(msg_id, None)
if not request_future:
log.warning("Received cancel notification for unknown message id %s", msg_id)
return
# Will only work if the request hasn't started executing
if request_future.cancel():
log.debug("Cancelled request with id %s", msg_id)
def _handle_request(self, msg_id, method, params):
"""Handle a request from the client."""
try:
handler = self._dispatcher[method]
except KeyError:
raise JsonRpcMethodNotFound.of(method)
handler_result = handler(params)
if callable(handler_result):
log.debug("Executing async request handler %s", handler_result)
request_future = self._executor_service.submit(handler_result)
self._client_request_futures[msg_id] = request_future
request_future.add_done_callback(self._request_callback(msg_id))
elif isinstance(handler_result, futures.Future):
log.debug("Request handler is already a future %s", handler_result)
self._client_request_futures[msg_id] = handler_result
handler_result.add_done_callback(self._request_callback(msg_id))
else:
log.debug("Got result from synchronous request handler: %s", handler_result)
self._consumer({
'jsonrpc': JSONRPC_VERSION,
'id': msg_id,
'result': handler_result
})
def _request_callback(self, request_id):
"""Construct a request callback for the given request ID."""
def callback(future):
# Remove the future from the client requests map
self._client_request_futures.pop(request_id, None)
if future.cancelled():
future.set_exception(JsonRpcRequestCancelled())
message = {
'jsonrpc': JSONRPC_VERSION,
'id': request_id,
}
try:
message['result'] = future.result()
except JsonRpcException as e:
log.exception("Failed to handle request %s", request_id)
message['error'] = e.to_dict()
except Exception: # pylint: disable=broad-except
log.exception("Failed to handle request %s", request_id)
message['error'] = JsonRpcInternalError.of(sys.exc_info()).to_dict()
self._consumer(message)
return callback
def _handle_response(self, msg_id, result=None, error=None):
"""Handle a response from the client."""
request_future = self._server_request_futures.pop(msg_id, None)
if not request_future:
log.warning("Received response to unknown message id %s", msg_id)
return
if error is not None:
log.debug("Received error response to message %s: %s", msg_id, error)
request_future.set_exception(JsonRpcException.from_dict(error))
return
log.debug("Received result for message %s: %s", msg_id, result)
request_future.set_result(result)
| glenngillen/dotfiles | .vscode/extensions/ms-toolsai.jupyter-2021.6.832593372/pythonFiles/lib/python/pyls_jsonrpc/endpoint.py | Python | mit | 9,502 |
import pyparsing as pp
import six
import re
import ast
# Grammar for Field inputs
TRUE = pp.CaselessKeyword('true')
FALSE = pp.CaselessKeyword('false')
WILDCARD = pp.Word('*')
INT_LIT = pp.Word(pp.nums)
NEG_DASH = pp.Word('-', exact=1)
FLOAT_LIT = pp.Word(pp.nums + '.')
DEC_POINT = pp.Word('.', exact=1)
FLOAT_LIT_FULL = pp.Word(pp.nums + '.' + pp.nums)
COMMON = pp.Word(",", exact=1)
REVERSE = pp.Word("<")
VALUE = WILDCARD | NEG_DASH | FLOAT_LIT_FULL | FLOAT_LIT | INT_LIT
BOOL = WILDCARD | TRUE | FALSE
def is_safe(s):
"""
Test if a string of comma-separated-values is "safe"
- is the string less than 100 characters?
- is the string boolean?
- is the string an integer or float?
- is the string a wildcard (*)?
- is the string a reverse character (<)?
If one of the tokens does not answer one of the above questions in the
affirmative, then it is deemed "not safe"
Returns:
success: whether value is "safe" or not
"""
if len(s) > 100:
return False
parsers = [VALUE, BOOL, REVERSE]
tokens = s.split(',')
success = [False] * len(tokens)
for i, token in enumerate(tokens):
token_strip = token.strip()
for parser in parsers:
try:
parser.parseString(token_strip)
if parser == VALUE:
# make sure `ast.literal_eval` can parse
# throws ValueError or SyntaxError on failure
ast.literal_eval(token_strip)
except (pp.ParseException, ValueError, SyntaxError):
pass
else:
success[i] = True
break
return all(success)
TRUE_REGEX = re.compile('(?i)true')
FALSE_REGEX = re.compile('(?i)false')
def is_wildcard(x):
if isinstance(x, six.string_types):
return x in ('*', '*') or x.strip() in ('*', '*')
else:
return False
def is_reverse(x):
if isinstance(x, six.string_types):
return x in ('<', '<') or x.strip() in ('<', '<')
else:
return False
def check_wildcards(x):
if isinstance(x, list):
return any([check_wildcards(i) for i in x])
else:
return is_wildcard(x)
def make_bool(x):
"""
Find exact match for case insensitive true or false
Returns True for True or 1
Returns False for False or 0
If x is wildcard then simply return x
"""
if is_wildcard(x):
return x
elif x in (True, '1', '1.0', 1, 1.0):
return True
elif x in (False, '0', '0.0', 0, 0.0):
return False
elif TRUE_REGEX.match(x, endpos=4):
return True
elif FALSE_REGEX.match(x, endpos=5):
return False
else:
# this should be caught much earlier either in model validation or in
# form validation
raise ValueError(
"Expected case insensitive 'true' or 'false' but got {}".format(x)
)
def bool_like(x):
b = True if x == 'True' or x else False
return b
def convert_val(x):
if is_wildcard(x):
return x
if is_reverse(x):
return x
try:
return float(x)
except ValueError:
return make_bool(x)
def is_string(x):
return isinstance(x, str)
def string_to_float(x):
return float(x.replace(',', ''))
INPUTS_META = ('has_errors', 'csrfmiddlewaretoken', 'start_year',
'full_calc', 'quick_calc', 'first_year', '_state',
'creation_date', 'id', 'job_ids', 'jobs_not_ready',
'json_text_id', 'tax_result', 'reform_style',
'_micro_sim_cache', 'micro_sim_id', 'raw_fields',
'data_source', )
def json_int_key_encode(rename_dict):
"""
Recursively rename integer value keys if they are casted to strings
via JSON encoding
returns: dict with new keys
"""
if isinstance(rename_dict, dict):
for k in list(rename_dict.keys()):
if hasattr(k, 'isdigit') and k.isdigit():
new_label = int(k)
else:
new_label = k
rename_dict[new_label] = json_int_key_encode(rename_dict.pop(k))
return rename_dict
| OpenSourcePolicyCenter/webapp-public | webapp/apps/taxbrain/helpers.py | Python | mit | 4,161 |
#!/usr/bin/env python3
from test import support
import marshal
import sys
import unittest
import os
class HelperMixin:
def helper(self, sample, *extra):
new = marshal.loads(marshal.dumps(sample, *extra))
self.assertEqual(sample, new)
try:
with open(support.TESTFN, "wb") as f:
marshal.dump(sample, f, *extra)
with open(support.TESTFN, "rb") as f:
new = marshal.load(f)
self.assertEqual(sample, new)
finally:
support.unlink(support.TESTFN)
class IntTestCase(unittest.TestCase, HelperMixin):
def test_ints(self):
# Test the full range of Python ints.
n = sys.maxsize
while n:
for expected in (-n, n):
self.helper(expected)
n = n >> 1
def test_int64(self):
# Simulate int marshaling on a 64-bit box. This is most interesting if
# we're running the test on a 32-bit box, of course.
def to_little_endian_string(value, nbytes):
b = bytearray()
for i in range(nbytes):
b.append(value & 0xff)
value >>= 8
return b
maxint64 = (1 << 63) - 1
minint64 = -maxint64-1
for base in maxint64, minint64, -maxint64, -(minint64 >> 1):
while base:
s = b'I' + to_little_endian_string(base, 8)
got = marshal.loads(s)
self.assertEqual(base, got)
if base == -1: # a fixed-point for shifting right 1
base = 0
else:
base >>= 1
def test_bool(self):
for b in (True, False):
self.helper(b)
class FloatTestCase(unittest.TestCase, HelperMixin):
def test_floats(self):
# Test a few floats
small = 1e-25
n = sys.maxsize * 3.7e250
while n > small:
for expected in (-n, n):
self.helper(float(expected))
n /= 123.4567
f = 0.0
s = marshal.dumps(f, 2)
got = marshal.loads(s)
self.assertEqual(f, got)
# and with version <= 1 (floats marshalled differently then)
s = marshal.dumps(f, 1)
got = marshal.loads(s)
self.assertEqual(f, got)
n = sys.maxsize * 3.7e-250
while n < small:
for expected in (-n, n):
f = float(expected)
self.helper(f)
self.helper(f, 1)
n *= 123.4567
class StringTestCase(unittest.TestCase, HelperMixin):
def test_unicode(self):
for s in ["", "Andr\xe8 Previn", "abc", " "*10000]:
self.helper(marshal.loads(marshal.dumps(s)))
def test_string(self):
for s in ["", "Andr\xe8 Previn", "abc", " "*10000]:
self.helper(s)
def test_bytes(self):
for s in [b"", b"Andr\xe8 Previn", b"abc", b" "*10000]:
self.helper(s)
class ExceptionTestCase(unittest.TestCase):
def test_exceptions(self):
new = marshal.loads(marshal.dumps(StopIteration))
self.assertEqual(StopIteration, new)
class CodeTestCase(unittest.TestCase):
def test_code(self):
co = ExceptionTestCase.test_exceptions.__code__
new = marshal.loads(marshal.dumps(co))
self.assertEqual(co, new)
def test_many_codeobjects(self):
# Issue2957: bad recursion count on code objects
count = 5000 # more than MAX_MARSHAL_STACK_DEPTH
codes = (ExceptionTestCase.test_exceptions.__code__,) * count
marshal.loads(marshal.dumps(codes))
class ContainerTestCase(unittest.TestCase, HelperMixin):
d = {'astring': '[email protected]',
'afloat': 7283.43,
'anint': 2**20,
'ashortlong': 2,
'alist': ['.zyx.41'],
'atuple': ('.zyx.41',)*10,
'aboolean': False,
'aunicode': "Andr\xe8 Previn"
}
def test_dict(self):
self.helper(self.d)
def test_list(self):
self.helper(list(self.d.items()))
def test_tuple(self):
self.helper(tuple(self.d.keys()))
def test_sets(self):
for constructor in (set, frozenset):
self.helper(constructor(self.d.keys()))
class BugsTestCase(unittest.TestCase):
def test_bug_5888452(self):
# Simple-minded check for SF 588452: Debug build crashes
marshal.dumps([128] * 1000)
def test_patch_873224(self):
self.assertRaises(Exception, marshal.loads, '0')
self.assertRaises(Exception, marshal.loads, 'f')
self.assertRaises(Exception, marshal.loads, marshal.dumps(2**65)[:-1])
def test_version_argument(self):
# Python 2.4.0 crashes for any call to marshal.dumps(x, y)
self.assertEqual(marshal.loads(marshal.dumps(5, 0)), 5)
self.assertEqual(marshal.loads(marshal.dumps(5, 1)), 5)
def test_fuzz(self):
# simple test that it's at least not *totally* trivial to
# crash from bad marshal data
for c in [chr(i) for i in range(256)]:
try:
marshal.loads(c)
except Exception:
pass
def test_loads_recursion(self):
s = 'c' + ('X' * 4*4) + '{' * 2**20
self.assertRaises(ValueError, marshal.loads, s)
def test_recursion_limit(self):
# Create a deeply nested structure.
head = last = []
# The max stack depth should match the value in Python/marshal.c.
if os.name == 'nt' and hasattr(sys, 'gettotalrefcount'):
MAX_MARSHAL_STACK_DEPTH = 1500
else:
MAX_MARSHAL_STACK_DEPTH = 2000
for i in range(MAX_MARSHAL_STACK_DEPTH - 2):
last.append([0])
last = last[-1]
# Verify we don't blow out the stack with dumps/load.
data = marshal.dumps(head)
new_head = marshal.loads(data)
# Don't use == to compare objects, it can exceed the recursion limit.
self.assertEqual(len(new_head), len(head))
self.assertEqual(len(new_head[0]), len(head[0]))
self.assertEqual(len(new_head[-1]), len(head[-1]))
last.append([0])
self.assertRaises(ValueError, marshal.dumps, head)
def test_exact_type_match(self):
# Former bug:
# >>> class Int(int): pass
# >>> type(loads(dumps(Int())))
# <type 'int'>
for typ in (int, float, complex, tuple, list, dict, set, frozenset):
# Note: str sublclasses are not tested because they get handled
# by marshal's routines for objects supporting the buffer API.
subtyp = type('subtyp', (typ,), {})
self.assertRaises(ValueError, marshal.dumps, subtyp())
# Issue #1792 introduced a change in how marshal increases the size of its
# internal buffer; this test ensures that the new code is exercised.
def test_large_marshal(self):
size = int(1e6)
testString = 'abc' * size
marshal.dumps(testString)
def test_invalid_longs(self):
# Issue #7019: marshal.loads shouldn't produce unnormalized PyLongs
invalid_string = b'l\x02\x00\x00\x00\x00\x00\x00\x00'
self.assertRaises(ValueError, marshal.loads, invalid_string)
def test_main():
support.run_unittest(IntTestCase,
FloatTestCase,
StringTestCase,
CodeTestCase,
ContainerTestCase,
ExceptionTestCase,
BugsTestCase)
if __name__ == "__main__":
test_main()
| MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.2/Lib/test/test_marshal.py | Python | mit | 7,626 |
# encoding: UTF-8
'''
本文件中实现了行情数据记录引擎,用于汇总TICK数据,并生成K线插入数据库。
使用DR_setting.json来配置需要收集的合约,以及主力合约代码。
'''
import copy
import json
from Queue import Queue, Empty
from collections import OrderedDict
from datetime import datetime
from threading import Thread
from vnpy.event import Event
from vnpy.trader.app.dataRecorder.drBase import *
from vnpy.trader.app.dataRecorder.language import text
from vnpy.trader.vtEvent import *
from vnpy.trader.vtFunction import todayDate, getJsonPath
from vnpy.trader.vtObject import VtSubscribeReq, VtLogData, VtBarData, VtTickData
########################################################################
class DrEngine(object):
"""数据记录引擎"""
settingFileName = 'DR_setting.json'
settingFilePath = getJsonPath(settingFileName, __file__)
#----------------------------------------------------------------------
def __init__(self, mainEngine, eventEngine):
"""Constructor"""
self.mainEngine = mainEngine
self.eventEngine = eventEngine
# 当前日期
self.today = todayDate()
# 主力合约代码映射字典,key为具体的合约代码(如IF1604),value为主力合约代码(如IF0000)
self.activeSymbolDict = {}
# Tick对象字典
self.tickDict = {}
# K线对象字典
self.barDict = {}
# 配置字典
self.settingDict = OrderedDict()
# 负责执行数据库插入的单独线程相关
self.active = False # 工作状态
self.queue = Queue() # 队列
self.thread = Thread(target=self.run) # 线程
# 载入设置,订阅行情
self.loadSetting()
# 启动数据插入线程
self.start()
# 注册事件监听
self.registerEvent()
#----------------------------------------------------------------------
def loadSetting(self):
"""加载配置"""
with open(self.settingFileName) as f:
drSetting = json.load(f)
# 如果working设为False则不启动行情记录功能
working = drSetting['working']
if not working:
return
if 'tick' in drSetting:
l = drSetting['tick']
for setting in l:
symbol = setting[0]
vtSymbol = symbol
req = VtSubscribeReq()
req.symbol = setting[0]
# 针对LTS和IB接口,订阅行情需要交易所代码
if len(setting)>=3:
req.exchange = setting[2]
vtSymbol = '.'.join([symbol, req.exchange])
# 针对IB接口,订阅行情需要货币和产品类型
if len(setting)>=5:
req.currency = setting[3]
req.productClass = setting[4]
self.mainEngine.subscribe(req, setting[1])
tick = VtTickData() # 该tick实例可以用于缓存部分数据(目前未使用)
self.tickDict[vtSymbol] = tick
# 保存到配置字典中
if vtSymbol not in self.settingDict:
d = {
'symbol': symbol,
'gateway': setting[1],
'tick': True
}
self.settingDict[vtSymbol] = d
else:
d = self.settingDict[vtSymbol]
d['tick'] = True
if 'bar' in drSetting:
l = drSetting['bar']
for setting in l:
symbol = setting[0]
vtSymbol = symbol
req = VtSubscribeReq()
req.symbol = symbol
if len(setting)>=3:
req.exchange = setting[2]
vtSymbol = '.'.join([symbol, req.exchange])
if len(setting)>=5:
req.currency = setting[3]
req.productClass = setting[4]
self.mainEngine.subscribe(req, setting[1])
bar = VtBarData()
self.barDict[vtSymbol] = bar
# 保存到配置字典中
if vtSymbol not in self.settingDict:
d = {
'symbol': symbol,
'gateway': setting[1],
'bar': True
}
self.settingDict[vtSymbol] = d
else:
d = self.settingDict[vtSymbol]
d['bar'] = True
if 'active' in drSetting:
d = drSetting['active']
# 注意这里的vtSymbol对于IB和LTS接口,应该后缀.交易所
for activeSymbol, vtSymbol in d.items():
self.activeSymbolDict[vtSymbol] = activeSymbol
# 保存到配置字典中
if vtSymbol not in self.settingDict:
d = {
'symbol': symbol,
'gateway': setting[1],
'active': True
}
self.settingDict[vtSymbol] = d
else:
d = self.settingDict[vtSymbol]
d['active'] = True
##----------------------------------------------------------------------
#def loadCsvSetting(self):
#"""加载CSV配置"""
#with open(self.settingFileName) as f:
#drSetting = csv.DictReader(f)
#for d in drSetting:
## 读取配置
#gatewayName = d['gateway']
#symbol = d['symbol']
#exchange = d['exchange']
#currency = d['currency']
#productClass = d['product']
#recordTick = d['tick']
#recordBar = d['bar']
#activeSymbol = d['active']
#if exchange:
#vtSymbol = '.'.join([symbol, exchange])
#else:
#vtSymbol = symbol
## 订阅行情
#req = VtSubscribeReq()
#req.symbol = symbol
#req.exchange = exchange
#req.currency = currency
#req.productClass = productClass
#self.mainEngine.subscribe(req, gatewayName)
## 设置需要记录的数据
#if recordTick:
#tick = VtTickData()
#self.tickDict[vtSymbol] = VtTickData()
#if recordBar:
#self.barDict[vtSymbol] = VtBarData()
#if activeSymbol:
#self.activeSymbolDict[vtSymbol] = activeSymbol
## 保存配置到缓存中
#self.settingDict[vtSymbol] = d
#----------------------------------------------------------------------
def getSetting(self):
"""获取配置"""
return self.settingDict
#----------------------------------------------------------------------
def procecssTickEvent(self, event):
"""处理行情推送"""
tick = event.dict_['data']
vtSymbol = tick.vtSymbol
# 转化Tick格式
if not tick.datetime:
tick.datetime = datetime.strptime(' '.join([tick.date, tick.time]), '%Y%m%d %H:%M:%S.%f')
# 更新Tick数据
if vtSymbol in self.tickDict:
self.insertData(TICK_DB_NAME, vtSymbol, tick)
if vtSymbol in self.activeSymbolDict:
activeSymbol = self.activeSymbolDict[vtSymbol]
self.insertData(TICK_DB_NAME, activeSymbol, tick)
# 发出日志
self.writeDrLog(text.TICK_LOGGING_MESSAGE.format(symbol=tick.vtSymbol,
time=tick.time,
last=tick.lastPrice,
bid=tick.bidPrice1,
ask=tick.askPrice1))
# 更新分钟线数据
if vtSymbol in self.barDict:
bar = self.barDict[vtSymbol]
# 如果第一个TICK或者新的一分钟
if (not bar.datetime or
bar.datetime.minute != tick.datetime.minute or
bar.datetime.hour != tick.datetime.hour):
if bar.vtSymbol:
newBar = copy.copy(bar)
self.insertData(MINUTE_DB_NAME, vtSymbol, newBar)
if vtSymbol in self.activeSymbolDict:
activeSymbol = self.activeSymbolDict[vtSymbol]
self.insertData(MINUTE_DB_NAME, activeSymbol, newBar)
self.writeDrLog(text.BAR_LOGGING_MESSAGE.format(symbol=bar.vtSymbol,
time=bar.time,
open=bar.open,
high=bar.high,
low=bar.low,
close=bar.close))
bar.vtSymbol = tick.vtSymbol
bar.symbol = tick.symbol
bar.exchange = tick.exchange
bar.open = tick.lastPrice
bar.high = tick.lastPrice
bar.low = tick.lastPrice
bar.close = tick.lastPrice
bar.date = tick.date
bar.time = tick.time
bar.datetime = tick.datetime.replace(second=0, microsecond=0)
bar.volume = tick.volume
bar.openInterest = tick.openInterest
# 否则继续累加新的K线
else:
bar.high = max(bar.high, tick.lastPrice)
bar.low = min(bar.low, tick.lastPrice)
bar.close = tick.lastPrice
#----------------------------------------------------------------------
def registerEvent(self):
"""注册事件监听"""
self.eventEngine.register(EVENT_TICK, self.procecssTickEvent)
#----------------------------------------------------------------------
def insertData(self, dbName, collectionName, data):
"""插入数据到数据库(这里的data可以是VtTickData或者VtBarData)"""
self.queue.put((dbName, collectionName, data.__dict__))
#----------------------------------------------------------------------
def run(self):
"""运行插入线程"""
while self.active:
try:
dbName, collectionName, d = self.queue.get(block=True, timeout=1)
self.mainEngine.dbInsert(dbName, collectionName, d)
except Empty:
pass
#----------------------------------------------------------------------
def start(self):
"""启动"""
self.active = True
self.thread.start()
#----------------------------------------------------------------------
def stop(self):
"""退出"""
if self.active:
self.active = False
self.thread.join()
#----------------------------------------------------------------------
def writeDrLog(self, content):
"""快速发出日志事件"""
log = VtLogData()
log.logContent = content
event = Event(type_=EVENT_DATARECORDER_LOG)
event.dict_['data'] = log
self.eventEngine.put(event)
| cmbclh/vnpy1.7 | vnpy/trader/app/dataRecorder/drEngine.py | Python | mit | 12,770 |
import logging
from pecan import expose, request
from pecan.ext.notario import validate
from uuid import uuid4
from ceph_installer.controllers import error
from ceph_installer.tasks import call_ansible
from ceph_installer import schemas
from ceph_installer import models
from ceph_installer import util
logger = logging.getLogger(__name__)
class AgentController(object):
@expose(generic=True, template='json')
def index(self):
error(405)
@index.when(method='POST', template='json')
@validate(schemas.agent_install_schema, handler="/errors/schema")
def install(self):
master = request.json.get('master', request.server_name)
logger.info('defining "%s" as the master host for the minion configuration', master)
hosts = request.json.get('hosts')
verbose_ansible = request.json.get('verbose', False)
extra_vars = util.get_install_extra_vars(request.json)
extra_vars['agent_master_host'] = master
identifier = str(uuid4())
task = models.Task(
identifier=identifier,
endpoint=request.path,
)
# we need an explicit commit here because the command may finish before
# we conclude this request
models.commit()
kwargs = dict(extra_vars=extra_vars, verbose=verbose_ansible)
call_ansible.apply_async(
args=([('agents', hosts)], identifier),
kwargs=kwargs,
)
return task
| ceph/ceph-installer | ceph_installer/controllers/agent.py | Python | mit | 1,471 |
"""
You are given two linked lists representing two non-negative numbers. The digits are stored in reverse order and each
of their nodes contain a single digit. Add the two numbers and return it as a linked list.
Input: (2 -> 4 -> 3) + (5 -> 6 -> 4)
Output: 7 -> 0 -> 8
"""
__author__ = 'Danyang'
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
def __repr__(self):
# for debugging
return repr(self.val)
class Solution:
def addTwoNumbers(self, l1, l2):
"""
Algorithm: Two pointers & math
Two pointers for l1 and l2 respectively
Math - carry for addition, in the form of new node
:param l1: linked list head node
:param l2: linked list head node
:return: ListNode
"""
result_head = ListNode(0)
cur1 = l1
cur2 = l2
cur = result_head
while cur1 or cur2:
cur.val = cur.val+self.addNode(cur1, cur2)
if cur.val < 10:
if cur1 and cur1.next or cur2 and cur2.next: # next node
cur.next = ListNode(0)
else:
cur.val -= 10
cur.next = ListNode(1)
if cur1:
cur1 = cur1.next
if cur2:
cur2 = cur2.next
cur = cur.next
return result_head
def addNode(self, node1, node2):
"""
Handles None situation
:param node1: ListNode
:param node2: ListNode
:return: integer, summation
"""
if not node1 and not node2:
raise Exception("two nodes are None")
if not node1:
return node2.val
if not node2:
return node1.val
return node1.val+node2.val
if __name__ == "__main__":
l1s = [ListNode(1)]
l2s = [ListNode(9), ListNode(9)]
for i in range(len(l1s)-1):
l1s[i].next = l1s[i+1]
for i in range(len(l2s)-1):
l2s[i].next = l2s[i+1]
Solution().addTwoNumbers(l1s[0], l2s[0])
| algorhythms/LeetCode | 004 Add Two Numbers.py | Python | mit | 2,135 |
'''
Configuration object
====================
The :class:`Config` object is an instance of a modified Python ConfigParser.
See the `ConfigParser documentation
<http://docs.python.org/library/configparser.html>`_ for more information.
Kivy has a configuration file which determines the default settings. In
order to change these settings, you can alter this file manually or use
the Config object. Please see the :ref:`Configure Kivy` section for more
information.
Applying configurations
-----------------------
Configuration options control the initialization of the :class:`~kivy.app.App`.
In order to avoid situations where the config settings do not work or are not
applied before window creation (like setting an initial window size),
:meth:`Config.set <kivy.config.ConfigParser.set>` should be used before
importing any other Kivy modules. Ideally, this means setting them right at
the start of your main.py script.
Alternatively, you can save these settings permanently using
:meth:`Config.set <ConfigParser.set>` then
:meth:`Config.write <ConfigParser.write>`. In this case, you will need to
restart the app for the changes to take effect. Note that this approach will
effect all Kivy apps system wide.
Please note that no underscores (`_`) are allowed in the section name.
Usage of the Config object
--------------------------
To read a configuration token from a particular section::
>>> from kivy.config import Config
>>> Config.getint('kivy', 'show_fps')
0
Change the configuration and save it::
>>> Config.set('postproc', 'retain_time', '50')
>>> Config.write()
For information on configuring your :class:`~kivy.app.App`, please see the
:ref:`Application configuration` section.
.. versionchanged:: 1.7.1
The ConfigParser should work correctly with utf-8 now. The values are
converted from ascii to unicode only when needed. The method get() returns
utf-8 strings.
Changing configuration with environment variables
-------------------------------------------------
Since 1.11.0, it is now possible to change the configuration using
environment variables. They take precedence on the loaded config.ini.
The format is::
KCFG_<section>_<key> = <value>
For example:
KCFG_GRAPHICS_FULLSCREEN=auto ...
KCFG_KIVY_LOG_LEVEL=warning ...
Or in your file before any kivy import:
import os
os.environ["KCFG_KIVY_LOG_LEVEL"] = "warning"
If you don't want to map any environment variables, you can disable
the behavior::
os.environ["KIVY_NO_ENV_CONFIG"] = "1"
.. _configuration-tokens:
Available configuration tokens
------------------------------
.. |log_levels| replace::
'trace', 'debug', 'info', 'warning', 'error' or 'critical'
:kivy:
`default_font`: list
Default fonts used for widgets displaying any text. It defaults to
['Roboto', 'data/fonts/Roboto-Regular.ttf',
'data/fonts/Roboto-Italic.ttf', 'data/fonts/Roboto-Bold.ttf',
'data/fonts/Roboto-BoldItalic.ttf'].
`desktop`: int, 0 or 1
This option controls desktop OS specific features, such as enabling
drag-able scroll-bar in scroll views, disabling of bubbles in
TextInput etc. 0 is disabled, 1 is enabled.
`exit_on_escape`: int, 0 or 1
Enables exiting kivy when escape is pressed.
0 is disabled, 1 is enabled.
`pause_on_minimize`: int, 0 or 1
If set to `1`, the main loop is paused and the `on_pause` event
is dispatched when the window is minimized. This option is intended
for desktop use only. Defaults to `0`.
`keyboard_layout`: string
Identifier of the layout to use.
`keyboard_mode`: string
Specifies the keyboard mode to use. If can be one of the following:
* '' - Let Kivy choose the best option for your current platform.
* 'system' - real keyboard.
* 'dock' - one virtual keyboard docked to a screen side.
* 'multi' - one virtual keyboard for every widget request.
* 'systemanddock' - virtual docked keyboard plus input from real
keyboard.
* 'systemandmulti' - analogous.
`kivy_clock`: one of `default`, `interrupt`, `free_all`, `free_only`
The clock type to use with kivy. See :mod:`kivy.clock`.
`log_dir`: string
Path of log directory.
`log_enable`: int, 0 or 1
Activate file logging. 0 is disabled, 1 is enabled.
`log_level`: string, one of |log_levels|
Set the minimum log level to use.
`log_name`: string
Format string to use for the filename of log file.
`log_maxfiles`: int
Keep log_maxfiles recent logfiles while purging the log directory. Set
'log_maxfiles' to -1 to disable logfile purging (eg keep all logfiles).
.. note::
You end up with 'log_maxfiles + 1' logfiles because the logger
adds a new one after purging.
`window_icon`: string
Path of the window icon. Use this if you want to replace the default
pygame icon.
:postproc:
`double_tap_distance`: float
Maximum distance allowed for a double tap, normalized inside the range
0 - 1000.
`double_tap_time`: int
Time allowed for the detection of double tap, in milliseconds.
`ignore`: list of tuples
List of regions where new touches are ignored.
This configuration token can be used to resolve hotspot problems
with DIY hardware. The format of the list must be::
ignore = [(xmin, ymin, xmax, ymax), ...]
All the values must be inside the range 0 - 1.
`jitter_distance`: int
Maximum distance for jitter detection, normalized inside the range 0
- 1000.
`jitter_ignore_devices`: string, separated with commas
List of devices to ignore from jitter detection.
`retain_distance`: int
If the touch moves more than is indicated by retain_distance, it will
not be retained. Argument should be an int between 0 and 1000.
`retain_time`: int
Time allowed for a retain touch, in milliseconds.
`triple_tap_distance`: float
Maximum distance allowed for a triple tap, normalized inside the range
0 - 1000.
`triple_tap_time`: int
Time allowed for the detection of triple tap, in milliseconds.
:graphics:
`borderless`: int, one of 0 or 1
If set to `1`, removes the window border/decoration. Window resizing
must also be disabled to hide the resizing border.
`custom_titlebar`: int, one of 0 or 1
If set to `1`, removes the window border and allows user to set a Widget
as a titlebar
see :meth:`~kivy.core.window.WindowBase.set_custom_titlebar`
for detailed usage
`custom_titlebar_border`: int, defaults to 5
sets the how many pixles off the border should be used as the
rezising frame
`window_state`: string , one of 'visible', 'hidden', 'maximized'
or 'minimized'
Sets the window state, defaults to 'visible'. This option is available
only for the SDL2 window provider and it should be used on desktop
OSes.
`fbo`: string, one of 'hardware', 'software' or 'force-hardware'
Selects the FBO backend to use.
`fullscreen`: int or string, one of 0, 1, 'fake' or 'auto'
Activate fullscreen. If set to `1`, a resolution of `width`
times `height` pixels will be used.
If set to `auto`, your current display's resolution will be
used instead. This is most likely what you want.
If you want to place the window in another display,
use `fake`, or set the `borderless` option from the graphics section,
then adjust `width`, `height`, `top` and `left`.
`height`: int
Height of the :class:`~kivy.core.window.Window`, not used if
`fullscreen` is set to `auto`.
`left`: int
Left position of the :class:`~kivy.core.window.Window`.
`maxfps`: int, defaults to 60
Maximum FPS allowed.
.. warning::
Setting maxfps to 0 will lead to max CPU usage.
'multisamples': int, defaults to 2
Sets the `MultiSample Anti-Aliasing (MSAA)
<http://en.wikipedia.org/wiki/Multisample_anti-aliasing>`_ level.
Increasing this value results in smoother graphics but at the cost of
processing time.
.. note::
This feature is limited by device hardware support and will have no
effect on devices which do not support the level of MSAA requested.
`position`: string, one of 'auto' or 'custom'
Position of the window on your display. If `auto` is used, you have no
control of the initial position: `top` and `left` are ignored.
`show_cursor`: int, one of 0 or 1
Set whether or not the cursor is shown on the window.
`top`: int
Top position of the :class:`~kivy.core.window.Window`.
`resizable`: int, one of 0 or 1
If 0, the window will have a fixed size. If 1, the window will be
resizable.
`rotation`: int, one of 0, 90, 180 or 270
Rotation of the :class:`~kivy.core.window.Window`.
`width`: int
Width of the :class:`~kivy.core.window.Window`, not used if
`fullscreen` is set to `auto`.
`minimum_width`: int
Minimum width to restrict the window to. (sdl2 only)
`minimum_height`: int
Minimum height to restrict the window to. (sdl2 only)
`min_state_time`: float, defaults to .035
Minimum time for widgets to display a given visual state.
This attrib is currently used by widgets like
:class:`~kivy.uix.dropdown.DropDown` &
:class:`~kivy.uix.behaviors.buttonbehavior.ButtonBehavior` to
make sure they display their current visual state for the given
time.
`allow_screensaver`: int, one of 0 or 1, defaults to 1
Allow the device to show a screen saver, or to go to sleep
on mobile devices. Only works for the sdl2 window provider.
`vsync`: `none`, empty value, or integers
Whether vsync is enabled, currently only used with sdl2 window.
Possible values are `none` or empty value -- leaves it unchanged,
``0`` -- disables vsync, ``1`` or larger -- sets vsync interval,
``-1`` sets adaptive vsync. It falls back to 1 if setting to ``2+``
or ``-1`` failed. See ``SDL_GL_SetSwapInterval``.
`verify_gl_main_thread`: int, 1 or 0, defaults to 1
Whether to check if code that changes any gl instructions is
running outside the main thread and then raise an error.
:input:
You can create new input devices using this syntax::
# example of input provider instance
yourid = providerid,parameters
# example for tuio provider
default = tuio,127.0.0.1:3333
mytable = tuio,192.168.0.1:3334
.. seealso::
Check the providers in :mod:`kivy.input.providers` for the syntax to
use inside the configuration file.
:widgets:
`scroll_distance`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_distance`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
`scroll_friction`: float
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_friction`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
.. deprecated:: 1.7.0
Please use
:class:`~kivy.uix.scrollview.ScrollView.effect_cls` instead.
`scroll_timeout`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_timeout`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
`scroll_stoptime`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_stoptime`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
.. deprecated:: 1.7.0
Please use
:class:`~kivy.uix.scrollview.ScrollView.effect_cls` instead.
`scroll_moves`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_moves`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
.. deprecated:: 1.7.0
Please use
:class:`~kivy.uix.scrollview.ScrollView.effect_cls` instead.
:modules:
You can activate modules with this syntax::
modulename =
Anything after the = will be passed to the module as arguments.
Check the specific module's documentation for a list of accepted
arguments.
.. versionchanged:: 2.1.0
`vsync` has been added to the graphics section.
`verify_gl_main_thread` has been added to the graphics section.
.. versionchanged:: 1.10.0
`min_state_time` and `allow_screensaver` have been added
to the `graphics` section.
`kivy_clock` has been added to the kivy section.
`default_font` has beed added to the kivy section.
.. versionchanged:: 1.9.0
`borderless` and `window_state` have been added to the graphics section.
The `fake` setting of the `fullscreen` option has been deprecated,
use the `borderless` option instead.
`pause_on_minimize` has been added to the kivy section.
.. versionchanged:: 1.8.0
`systemanddock` and `systemandmulti` has been added as possible values for
`keyboard_mode` in the kivy section. `exit_on_escape` has been added
to the kivy section.
.. versionchanged:: 1.2.0
`resizable` has been added to graphics section.
.. versionchanged:: 1.1.0
tuio no longer listens by default. Window icons are not copied to
user directory anymore. You can still set a new window icon by using the
``window_icon`` config setting.
.. versionchanged:: 1.0.8
`scroll_timeout`, `scroll_distance` and `scroll_friction` have been added.
`list_friction`, `list_trigger_distance` and `list_friction_bound`
have been removed. `keyboard_type` and `keyboard_layout` have been
removed from the widget. `keyboard_mode` and `keyboard_layout` have
been added to the kivy section.
'''
__all__ = ('Config', 'ConfigParser')
try:
from ConfigParser import ConfigParser as PythonConfigParser
except ImportError:
from configparser import RawConfigParser as PythonConfigParser
from os import environ
from os.path import exists
from kivy import kivy_config_fn
from kivy.logger import Logger, logger_config_update
from collections import OrderedDict
from kivy.utils import platform
from kivy.compat import PY2, string_types
from weakref import ref
_is_rpi = exists('/opt/vc/include/bcm_host.h')
# Version number of current configuration format
KIVY_CONFIG_VERSION = 24
Config = None
'''The default Kivy configuration object. This is a :class:`ConfigParser`
instance with the :attr:`~kivy.config.ConfigParser.name` set to 'kivy'.
.. code-block:: python
Config = ConfigParser(name='kivy')
'''
class ConfigParser(PythonConfigParser, object):
'''Enhanced ConfigParser class that supports the addition of default
sections and default values.
By default, the kivy ConfigParser instance, :attr:`~kivy.config.Config`,
is named `'kivy'` and the ConfigParser instance used by the
:meth:`App.build_settings <~kivy.app.App.build_settings>` method is named
`'app'`.
:Parameters:
`name`: string
The name of the instance. See :attr:`name`. Defaults to `''`.
.. versionchanged:: 1.9.0
Each ConfigParser can now be :attr:`named <name>`. You can get the
ConfigParser associated with a name using :meth:`get_configparser`.
In addition, you can now control the config values with
:class:`~kivy.properties.ConfigParserProperty`.
.. versionadded:: 1.0.7
'''
def __init__(self, name='', **kwargs):
PythonConfigParser.__init__(self, **kwargs)
self._sections = OrderedDict()
self.filename = None
self._callbacks = []
self.name = name
def add_callback(self, callback, section=None, key=None):
'''Add a callback to be called when a specific section or key has
changed. If you don't specify a section or key, it will call the
callback for all section/key changes.
Callbacks will receive 3 arguments: the section, key and value.
.. versionadded:: 1.4.1
'''
if section is None and key is not None:
raise Exception('You cannot specify a key without a section')
self._callbacks.append((callback, section, key))
def remove_callback(self, callback, section=None, key=None):
'''Removes a callback added with :meth:`add_callback`.
:meth:`remove_callback` must be called with the same parameters as
:meth:`add_callback`.
Raises a `ValueError` if not found.
.. versionadded:: 1.9.0
'''
self._callbacks.remove((callback, section, key))
def _do_callbacks(self, section, key, value):
for callback, csection, ckey in self._callbacks:
if csection is not None and csection != section:
continue
elif ckey is not None and ckey != key:
continue
callback(section, key, value)
def read(self, filename):
'''Read only one filename. In contrast to the original ConfigParser of
Python, this one is able to read only one file at a time. The last
read file will be used for the :meth:`write` method.
.. versionchanged:: 1.9.0
:meth:`read` now calls the callbacks if read changed any values.
'''
if not isinstance(filename, string_types):
raise Exception('Only one filename is accepted ({})'.format(
string_types.__name__))
self.filename = filename
# If we try to open directly the configuration file in utf-8,
# we correctly get the unicode value by default.
# But, when we try to save it again, all the values we didn't changed
# are still unicode, and then the PythonConfigParser internal do
# a str() conversion -> fail.
# Instead we currently to the conversion to utf-8 when value are
# "get()", but we internally store them in ascii.
# with codecs.open(filename, 'r', encoding='utf-8') as f:
# self.readfp(f)
old_vals = {sect: {k: v for k, v in self.items(sect)} for sect in
self.sections()}
PythonConfigParser.read(self, filename)
# when reading new file, sections/keys are only increased, not removed
f = self._do_callbacks
for section in self.sections():
if section not in old_vals: # new section
for k, v in self.items(section):
f(section, k, v)
continue
old_keys = old_vals[section]
for k, v in self.items(section): # just update new/changed keys
if k not in old_keys or v != old_keys[k]:
f(section, k, v)
def set(self, section, option, value):
'''Functions similarly to PythonConfigParser's set method, except that
the value is implicitly converted to a string.
'''
e_value = value
if not isinstance(value, string_types):
# might be boolean, int, etc.
e_value = str(value)
ret = PythonConfigParser.set(self, section, option, e_value)
self._do_callbacks(section, option, value)
return ret
def setall(self, section, keyvalues):
'''Sets multiple key-value pairs in a section. keyvalues should be a
dictionary containing the key-value pairs to be set.
'''
for key, value in keyvalues.items():
self.set(section, key, value)
def get(self, section, option, **kwargs):
value = PythonConfigParser.get(self, section, option, **kwargs)
if PY2:
if type(value) is str:
return value.decode('utf-8')
return value
def setdefaults(self, section, keyvalues):
'''Set multiple key-value defaults in a section. keyvalues should be
a dictionary containing the new key-value defaults.
'''
self.adddefaultsection(section)
for key, value in keyvalues.items():
self.setdefault(section, key, value)
def setdefault(self, section, option, value):
'''Set the default value for an option in the specified section.
'''
if self.has_option(section, option):
return
self.set(section, option, value)
def getdefault(self, section, option, defaultvalue):
'''Get the value of an option in the specified section. If not found,
it will return the default value.
'''
if not self.has_section(section):
return defaultvalue
if not self.has_option(section, option):
return defaultvalue
return self.get(section, option)
def getdefaultint(self, section, option, defaultvalue):
'''Get the value of an option in the specified section. If not found,
it will return the default value. The value will always be
returned as an integer.
.. versionadded:: 1.6.0
'''
return int(self.getdefault(section, option, defaultvalue))
def adddefaultsection(self, section):
'''Add a section if the section is missing.
'''
assert("_" not in section)
if self.has_section(section):
return
self.add_section(section)
def write(self):
'''Write the configuration to the last file opened using the
:meth:`read` method.
Return True if the write finished successfully, False otherwise.
'''
if self.filename is None:
return False
try:
with open(self.filename, 'w') as fd:
PythonConfigParser.write(self, fd)
except IOError:
Logger.exception('Unable to write the config <%s>' % self.filename)
return False
return True
def update_config(self, filename, overwrite=False):
'''Upgrade the configuration based on a new default config file.
Overwrite any existing values if overwrite is True.
'''
pcp = PythonConfigParser()
pcp.read(filename)
confset = self.setall if overwrite else self.setdefaults
for section in pcp.sections():
confset(section, dict(pcp.items(section)))
self.write()
@staticmethod
def _register_named_property(name, widget_ref, *largs):
''' Called by the ConfigParserProperty to register a property which
was created with a config name instead of a config object.
When a ConfigParser with this name is later created, the properties
are then notified that this parser now exists so they can use it.
If the parser already exists, the property is notified here. See
:meth:`~kivy.properties.ConfigParserProperty.set_config`.
:Parameters:
`name`: a non-empty string
The name of the ConfigParser that is associated with the
property. See :attr:`name`.
`widget_ref`: 2-tuple.
The first element is a reference to the widget containing the
property, the second element is the name of the property. E.g.:
class House(Widget):
address = ConfigParserProperty('', 'info', 'street',
'directory')
Then, the first element is a ref to a House instance, and the
second is `'address'`.
'''
configs = ConfigParser._named_configs
try:
config, props = configs[name]
except KeyError:
configs[name] = (None, [widget_ref])
return
props.append(widget_ref)
if config:
config = config()
widget = widget_ref[0]()
if config and widget: # associate this config with property
widget.property(widget_ref[1]).set_config(config)
@staticmethod
def get_configparser(name):
'''Returns the :class:`ConfigParser` instance whose name is `name`, or
None if not found.
:Parameters:
`name`: string
The name of the :class:`ConfigParser` instance to return.
'''
try:
config = ConfigParser._named_configs[name][0]
if config is not None:
config = config()
if config is not None:
return config
del ConfigParser._named_configs[name]
except KeyError:
return None
# keys are configparser names, values are 2-tuple of (ref(configparser),
# widget_ref), where widget_ref is same as in _register_named_property
_named_configs = {}
_name = ''
@property
def name(self):
''' The name associated with this ConfigParser instance, if not `''`.
Defaults to `''`. It can be safely changed dynamically or set to `''`.
When a ConfigParser is given a name, that config object can be
retrieved using :meth:`get_configparser`. In addition, that config
instance can also be used with a
:class:`~kivy.properties.ConfigParserProperty` instance that set its
`config` value to this name.
Setting more than one ConfigParser with the same name will raise a
`ValueError`.
'''
return self._name
@name.setter
def name(self, value):
old_name = self._name
if value is old_name:
return
self._name = value
configs = ConfigParser._named_configs
if old_name: # disconnect this parser from previously connected props
_, props = configs.get(old_name, (None, []))
for widget, prop in props:
widget = widget()
if widget:
widget.property(prop).set_config(None)
configs[old_name] = (None, props)
if not value:
return
# if given new name, connect it with property that used this name
try:
config, props = configs[value]
except KeyError:
configs[value] = (ref(self), [])
return
if config is not None and config() is not None:
raise ValueError('A parser named {} already exists'.format(value))
for widget, prop in props:
widget = widget()
if widget:
widget.property(prop).set_config(self)
configs[value] = (ref(self), props)
if not environ.get('KIVY_DOC_INCLUDE'):
#
# Read, analyse configuration file
# Support upgrade of older config file versions
#
# Create default configuration
Config = ConfigParser(name='kivy')
Config.add_callback(logger_config_update, 'kivy', 'log_level')
# Read config file if exist
if (exists(kivy_config_fn) and
'KIVY_USE_DEFAULTCONFIG' not in environ and
'KIVY_NO_CONFIG' not in environ):
try:
Config.read(kivy_config_fn)
except Exception as e:
Logger.exception('Core: error while reading local'
'configuration')
version = Config.getdefaultint('kivy', 'config_version', 0)
# Add defaults section
Config.adddefaultsection('kivy')
Config.adddefaultsection('graphics')
Config.adddefaultsection('input')
Config.adddefaultsection('postproc')
Config.adddefaultsection('widgets')
Config.adddefaultsection('modules')
Config.adddefaultsection('network')
# Upgrade default configuration until we have the current version
need_save = False
if version != KIVY_CONFIG_VERSION and 'KIVY_NO_CONFIG' not in environ:
Logger.warning('Config: Older configuration version detected'
' ({0} instead of {1})'.format(
version, KIVY_CONFIG_VERSION))
Logger.warning('Config: Upgrading configuration in progress.')
need_save = True
while version < KIVY_CONFIG_VERSION:
Logger.debug('Config: Upgrading from %d to %d' %
(version, version + 1))
if version == 0:
# log level
Config.setdefault('kivy', 'keyboard_repeat_delay', '300')
Config.setdefault('kivy', 'keyboard_repeat_rate', '30')
Config.setdefault('kivy', 'log_dir', 'logs')
Config.setdefault('kivy', 'log_enable', '1')
Config.setdefault('kivy', 'log_level', 'info')
Config.setdefault('kivy', 'log_name', 'kivy_%y-%m-%d_%_.txt')
Config.setdefault('kivy', 'window_icon', '')
# default graphics parameters
Config.setdefault('graphics', 'display', '-1')
Config.setdefault('graphics', 'fullscreen', 'no')
Config.setdefault('graphics', 'height', '600')
Config.setdefault('graphics', 'left', '0')
Config.setdefault('graphics', 'maxfps', '0')
Config.setdefault('graphics', 'multisamples', '2')
Config.setdefault('graphics', 'position', 'auto')
Config.setdefault('graphics', 'rotation', '0')
Config.setdefault('graphics', 'show_cursor', '1')
Config.setdefault('graphics', 'top', '0')
Config.setdefault('graphics', 'width', '800')
# input configuration
Config.setdefault('input', 'mouse', 'mouse')
# activate native input provider in configuration
# from 1.0.9, don't activate mactouch by default, or app are
# unusable.
if platform == 'win':
Config.setdefault('input', 'wm_touch', 'wm_touch')
Config.setdefault('input', 'wm_pen', 'wm_pen')
elif platform == 'linux':
probesysfs = 'probesysfs'
if _is_rpi:
probesysfs += ',provider=hidinput'
Config.setdefault('input', '%(name)s', probesysfs)
# input postprocessing configuration
Config.setdefault('postproc', 'double_tap_distance', '20')
Config.setdefault('postproc', 'double_tap_time', '250')
Config.setdefault('postproc', 'ignore', '[]')
Config.setdefault('postproc', 'jitter_distance', '0')
Config.setdefault('postproc', 'jitter_ignore_devices',
'mouse,mactouch,')
Config.setdefault('postproc', 'retain_distance', '50')
Config.setdefault('postproc', 'retain_time', '0')
# default configuration for keyboard repetition
Config.setdefault('widgets', 'keyboard_layout', 'qwerty')
Config.setdefault('widgets', 'keyboard_type', '')
Config.setdefault('widgets', 'list_friction', '10')
Config.setdefault('widgets', 'list_friction_bound', '20')
Config.setdefault('widgets', 'list_trigger_distance', '5')
elif version == 1:
Config.set('graphics', 'maxfps', '60')
elif version == 2:
# was a version to automatically copy windows icon in the user
# directory, but it's now not used anymore. User can still change
# the window icon by touching the config.
pass
elif version == 3:
# add token for scrollview
Config.setdefault('widgets', 'scroll_timeout', '55')
Config.setdefault('widgets', 'scroll_distance', '20')
Config.setdefault('widgets', 'scroll_friction', '1.')
# remove old list_* token
Config.remove_option('widgets', 'list_friction')
Config.remove_option('widgets', 'list_friction_bound')
Config.remove_option('widgets', 'list_trigger_distance')
elif version == 4:
Config.remove_option('widgets', 'keyboard_type')
Config.remove_option('widgets', 'keyboard_layout')
# add keyboard token
Config.setdefault('kivy', 'keyboard_mode', '')
Config.setdefault('kivy', 'keyboard_layout', 'qwerty')
elif version == 5:
Config.setdefault('graphics', 'resizable', '1')
elif version == 6:
# if the timeout is still the default value, change it
Config.setdefault('widgets', 'scroll_stoptime', '300')
Config.setdefault('widgets', 'scroll_moves', '5')
elif version == 7:
# desktop bool indicating whether to use desktop specific features
is_desktop = int(platform in ('win', 'macosx', 'linux'))
Config.setdefault('kivy', 'desktop', is_desktop)
Config.setdefault('postproc', 'triple_tap_distance', '20')
Config.setdefault('postproc', 'triple_tap_time', '375')
elif version == 8:
if Config.getint('widgets', 'scroll_timeout') == 55:
Config.set('widgets', 'scroll_timeout', '250')
elif version == 9:
Config.setdefault('kivy', 'exit_on_escape', '1')
elif version == 10:
Config.set('graphics', 'fullscreen', '0')
Config.setdefault('graphics', 'borderless', '0')
elif version == 11:
Config.setdefault('kivy', 'pause_on_minimize', '0')
elif version == 12:
Config.setdefault('graphics', 'window_state', 'visible')
elif version == 13:
Config.setdefault('graphics', 'minimum_width', '0')
Config.setdefault('graphics', 'minimum_height', '0')
elif version == 14:
Config.setdefault('graphics', 'min_state_time', '.035')
elif version == 15:
Config.setdefault('kivy', 'kivy_clock', 'default')
elif version == 16:
Config.setdefault('kivy', 'default_font', [
'Roboto',
'data/fonts/Roboto-Regular.ttf',
'data/fonts/Roboto-Italic.ttf',
'data/fonts/Roboto-Bold.ttf',
'data/fonts/Roboto-BoldItalic.ttf'])
elif version == 17:
Config.setdefault('graphics', 'allow_screensaver', '1')
elif version == 18:
Config.setdefault('kivy', 'log_maxfiles', '100')
elif version == 19:
Config.setdefault('graphics', 'shaped', '0')
Config.setdefault(
'kivy', 'window_shape',
'data/images/defaultshape.png'
)
elif version == 20:
Config.setdefault('network', 'useragent', 'curl')
elif version == 21:
Config.setdefault('graphics', 'vsync', '')
elif version == 22:
Config.setdefault('graphics', 'verify_gl_main_thread', '1')
elif version == 23:
Config.setdefault('graphics', 'custom_titlebar', '0')
Config.setdefault('graphics', 'custom_titlebar_border', '5')
else:
# for future.
break
# Pass to the next version
version += 1
# Indicate to the Config that we've upgrade to the latest version.
Config.set('kivy', 'config_version', KIVY_CONFIG_VERSION)
# Now, activate log file
Logger.logfile_activated = bool(Config.getint('kivy', 'log_enable'))
# If no configuration exist, write the default one.
if ((not exists(kivy_config_fn) or need_save) and
'KIVY_NO_CONFIG' not in environ):
try:
Config.filename = kivy_config_fn
Config.write()
except Exception as e:
Logger.exception('Core: Error while saving default config file')
# Load configuration from env
if environ.get('KIVY_NO_ENV_CONFIG', '0') != '1':
for key, value in environ.items():
if not key.startswith("KCFG_"):
continue
try:
_, section, name = key.split("_", 2)
except ValueError:
Logger.warning((
"Config: Environ `{}` invalid format, "
"must be KCFG_section_name").format(key))
continue
# extract and check section
section = section.lower()
if not Config.has_section(section):
Logger.warning(
"Config: Environ `{}`: unknown section `{}`".format(
key, section))
continue
# extract and check the option name
name = name.lower()
sections_to_check = {
"kivy", "graphics", "widgets", "postproc", "network"}
if (section in sections_to_check and
not Config.has_option(section, name)):
Logger.warning((
"Config: Environ `{}` unknown `{}` "
"option in `{}` section.").format(
key, name, section))
# we don't avoid to set an unknown option, because maybe
# an external modules or widgets (in garden?) may want to
# save its own configuration here.
Config.set(section, name, value)
| rnixx/kivy | kivy/config.py | Python | mit | 37,560 |
# coding: utf-8
"""test_isort.py.
Tests all major functionality of the isort library
Should be ran using py.test by simply running py.test in the isort project directory
Copyright (C) 2013 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import codecs
import os
import shutil
import tempfile
from isort.isort import SortImports
from isort.pie_slice import *
from isort.settings import WrapModes
SHORT_IMPORT = "from third_party import lib1, lib2, lib3, lib4"
REALLY_LONG_IMPORT = ("from third_party import lib1, lib2, lib3, lib4, lib5, lib6, lib7, lib8, lib9, lib10, lib11,"
"lib12, lib13, lib14, lib15, lib16, lib17, lib18, lib20, lib21, lib22")
REALLY_LONG_IMPORT_WITH_COMMENT = ("from third_party import lib1, lib2, lib3, lib4, lib5, lib6, lib7, lib8, lib9, "
"lib10, lib11, lib12, lib13, lib14, lib15, lib16, lib17, lib18, lib20, lib21, lib22"
" # comment")
def test_happy_path():
"""Test the most basic use case, straight imports no code, simply not organized by category."""
test_input = ("import sys\n"
"import os\n"
"import myproject.test\n"
"import django.settings")
test_output = SortImports(file_contents=test_input, known_third_party=['django']).output
assert test_output == ("import os\n"
"import sys\n"
"\n"
"import django.settings\n"
"\n"
"import myproject.test\n")
def test_code_intermixed():
"""Defines what should happen when isort encounters imports intermixed with
code.
(it should pull them all to the top)
"""
test_input = ("import sys\n"
"print('yo')\n"
"print('I like to put code between imports cause I want stuff to break')\n"
"import myproject.test\n")
test_output = SortImports(file_contents=test_input).output
assert test_output == ("import sys\n"
"\n"
"import myproject.test\n"
"\n"
"print('yo')\n"
"print('I like to put code between imports cause I want stuff to break')\n")
def test_correct_space_between_imports():
"""Ensure after imports a correct amount of space (in newlines) is
enforced.
(2 for method, class, or decorator definitions 1 for anything else)
"""
test_input_method = ("import sys\n"
"def my_method():\n"
" print('hello world')\n")
test_output_method = SortImports(file_contents=test_input_method).output
assert test_output_method == ("import sys\n"
"\n"
"\n"
"def my_method():\n"
" print('hello world')\n")
test_input_decorator = ("import sys\n"
"@my_decorator\n"
"def my_method():\n"
" print('hello world')\n")
test_output_decorator = SortImports(file_contents=test_input_decorator).output
assert test_output_decorator == ("import sys\n"
"\n"
"\n"
"@my_decorator\n"
"def my_method():\n"
" print('hello world')\n")
test_input_class = ("import sys\n"
"class MyClass(object):\n"
" pass\n")
test_output_class = SortImports(file_contents=test_input_class).output
assert test_output_class == ("import sys\n"
"\n"
"\n"
"class MyClass(object):\n"
" pass\n")
test_input_other = ("import sys\n"
"print('yo')\n")
test_output_other = SortImports(file_contents=test_input_other).output
assert test_output_other == ("import sys\n"
"\n"
"print('yo')\n")
def test_sort_on_number():
"""Ensure numbers get sorted logically (10 > 9 not the other way around)"""
test_input = ("import lib10\n"
"import lib9\n")
test_output = SortImports(file_contents=test_input).output
assert test_output == ("import lib9\n"
"import lib10\n")
def test_line_length():
"""Ensure isort enforces the set line_length."""
assert len(SortImports(file_contents=REALLY_LONG_IMPORT, line_length=80).output.split("\n")[0]) <= 80
assert len(SortImports(file_contents=REALLY_LONG_IMPORT, line_length=120).output.split("\n")[0]) <= 120
test_output = SortImports(file_contents=REALLY_LONG_IMPORT, line_length=42).output
assert test_output == ("from third_party import (lib1, lib2, lib3,\n"
" lib4, lib5, lib6,\n"
" lib7, lib8, lib9,\n"
" lib10, lib11,\n"
" lib12, lib13,\n"
" lib14, lib15,\n"
" lib16, lib17,\n"
" lib18, lib20,\n"
" lib21, lib22)\n")
test_output = SortImports(file_contents=REALLY_LONG_IMPORT, line_length=42, wrap_length=32).output
assert test_output == ("from third_party import (lib1,\n"
" lib2,\n"
" lib3,\n"
" lib4,\n"
" lib5,\n"
" lib6,\n"
" lib7,\n"
" lib8,\n"
" lib9,\n"
" lib10,\n"
" lib11,\n"
" lib12,\n"
" lib13,\n"
" lib14,\n"
" lib15,\n"
" lib16,\n"
" lib17,\n"
" lib18,\n"
" lib20,\n"
" lib21,\n"
" lib22)\n")
def test_output_modes():
"""Test setting isort to use various output modes works as expected"""
test_output_grid = SortImports(file_contents=REALLY_LONG_IMPORT,
multi_line_output=WrapModes.GRID, line_length=40).output
assert test_output_grid == ("from third_party import (lib1, lib2,\n"
" lib3, lib4,\n"
" lib5, lib6,\n"
" lib7, lib8,\n"
" lib9, lib10,\n"
" lib11, lib12,\n"
" lib13, lib14,\n"
" lib15, lib16,\n"
" lib17, lib18,\n"
" lib20, lib21,\n"
" lib22)\n")
test_output_vertical = SortImports(file_contents=REALLY_LONG_IMPORT,
multi_line_output=WrapModes.VERTICAL, line_length=40).output
assert test_output_vertical == ("from third_party import (lib1,\n"
" lib2,\n"
" lib3,\n"
" lib4,\n"
" lib5,\n"
" lib6,\n"
" lib7,\n"
" lib8,\n"
" lib9,\n"
" lib10,\n"
" lib11,\n"
" lib12,\n"
" lib13,\n"
" lib14,\n"
" lib15,\n"
" lib16,\n"
" lib17,\n"
" lib18,\n"
" lib20,\n"
" lib21,\n"
" lib22)\n")
comment_output_vertical = SortImports(file_contents=REALLY_LONG_IMPORT_WITH_COMMENT,
multi_line_output=WrapModes.VERTICAL, line_length=40).output
assert comment_output_vertical == ("from third_party import (lib1, # comment\n"
" lib2,\n"
" lib3,\n"
" lib4,\n"
" lib5,\n"
" lib6,\n"
" lib7,\n"
" lib8,\n"
" lib9,\n"
" lib10,\n"
" lib11,\n"
" lib12,\n"
" lib13,\n"
" lib14,\n"
" lib15,\n"
" lib16,\n"
" lib17,\n"
" lib18,\n"
" lib20,\n"
" lib21,\n"
" lib22)\n")
test_output_hanging_indent = SortImports(file_contents=REALLY_LONG_IMPORT,
multi_line_output=WrapModes.HANGING_INDENT,
line_length=40, indent=" ").output
assert test_output_hanging_indent == ("from third_party import lib1, lib2, \\\n"
" lib3, lib4, lib5, lib6, lib7, \\\n"
" lib8, lib9, lib10, lib11, lib12, \\\n"
" lib13, lib14, lib15, lib16, lib17, \\\n"
" lib18, lib20, lib21, lib22\n")
comment_output_hanging_indent = SortImports(file_contents=REALLY_LONG_IMPORT_WITH_COMMENT,
multi_line_output=WrapModes.HANGING_INDENT,
line_length=40, indent=" ").output
assert comment_output_hanging_indent == ("from third_party import lib1, \\ # comment\n"
" lib2, lib3, lib4, lib5, lib6, \\\n"
" lib7, lib8, lib9, lib10, lib11, \\\n"
" lib12, lib13, lib14, lib15, lib16, \\\n"
" lib17, lib18, lib20, lib21, lib22\n")
test_output_vertical_indent = SortImports(file_contents=REALLY_LONG_IMPORT,
multi_line_output=WrapModes.VERTICAL_HANGING_INDENT,
line_length=40, indent=" ").output
assert test_output_vertical_indent == ("from third_party import (\n"
" lib1,\n"
" lib2,\n"
" lib3,\n"
" lib4,\n"
" lib5,\n"
" lib6,\n"
" lib7,\n"
" lib8,\n"
" lib9,\n"
" lib10,\n"
" lib11,\n"
" lib12,\n"
" lib13,\n"
" lib14,\n"
" lib15,\n"
" lib16,\n"
" lib17,\n"
" lib18,\n"
" lib20,\n"
" lib21,\n"
" lib22\n"
")\n")
comment_output_vertical_indent = SortImports(file_contents=REALLY_LONG_IMPORT_WITH_COMMENT,
multi_line_output=WrapModes.VERTICAL_HANGING_INDENT,
line_length=40, indent=" ").output
assert comment_output_vertical_indent == ("from third_party import ( # comment\n"
" lib1,\n"
" lib2,\n"
" lib3,\n"
" lib4,\n"
" lib5,\n"
" lib6,\n"
" lib7,\n"
" lib8,\n"
" lib9,\n"
" lib10,\n"
" lib11,\n"
" lib12,\n"
" lib13,\n"
" lib14,\n"
" lib15,\n"
" lib16,\n"
" lib17,\n"
" lib18,\n"
" lib20,\n"
" lib21,\n"
" lib22\n"
")\n")
test_output_vertical_grid = SortImports(file_contents=REALLY_LONG_IMPORT,
multi_line_output=WrapModes.VERTICAL_GRID,
line_length=40, indent=" ").output
assert test_output_vertical_grid == ("from third_party import (\n"
" lib1, lib2, lib3, lib4, lib5, lib6,\n"
" lib7, lib8, lib9, lib10, lib11,\n"
" lib12, lib13, lib14, lib15, lib16,\n"
" lib17, lib18, lib20, lib21, lib22)\n")
comment_output_vertical_grid = SortImports(file_contents=REALLY_LONG_IMPORT_WITH_COMMENT,
multi_line_output=WrapModes.VERTICAL_GRID,
line_length=40, indent=" ").output
assert comment_output_vertical_grid == ("from third_party import ( # comment\n"
" lib1, lib2, lib3, lib4, lib5, lib6,\n"
" lib7, lib8, lib9, lib10, lib11,\n"
" lib12, lib13, lib14, lib15, lib16,\n"
" lib17, lib18, lib20, lib21, lib22)\n")
test_output_vertical_grid_grouped = SortImports(file_contents=REALLY_LONG_IMPORT,
multi_line_output=WrapModes.VERTICAL_GRID_GROUPED,
line_length=40, indent=" ").output
assert test_output_vertical_grid_grouped == ("from third_party import (\n"
" lib1, lib2, lib3, lib4, lib5, lib6,\n"
" lib7, lib8, lib9, lib10, lib11,\n"
" lib12, lib13, lib14, lib15, lib16,\n"
" lib17, lib18, lib20, lib21, lib22\n"
")\n")
comment_output_vertical_grid_grouped = SortImports(file_contents=REALLY_LONG_IMPORT_WITH_COMMENT,
multi_line_output=WrapModes.VERTICAL_GRID_GROUPED,
line_length=40, indent=" ").output
assert comment_output_vertical_grid_grouped == ("from third_party import ( # comment\n"
" lib1, lib2, lib3, lib4, lib5, lib6,\n"
" lib7, lib8, lib9, lib10, lib11,\n"
" lib12, lib13, lib14, lib15, lib16,\n"
" lib17, lib18, lib20, lib21, lib22\n"
")\n")
output_noqa = SortImports(file_contents=REALLY_LONG_IMPORT_WITH_COMMENT,
multi_line_output=WrapModes.NOQA).output
assert output_noqa == "from third_party import lib1, lib2, lib3, lib4, lib5, lib6, lib7, lib8, lib9, lib10, lib11, lib12, lib13, lib14, lib15, lib16, lib17, lib18, lib20, lib21, lib22 # NOQA comment\n" # NOQA
def test_qa_comment_case():
test_input = "from veryveryveryveryveryveryveryveryveryveryvery import X # NOQA"
test_output = SortImports(file_contents=test_input, line_length=40, multi_line_output=WrapModes.NOQA).output
assert test_output == "from veryveryveryveryveryveryveryveryveryveryvery import X # NOQA\n"
test_input = "import veryveryveryveryveryveryveryveryveryveryvery # NOQA"
test_output = SortImports(file_contents=test_input, line_length=40, multi_line_output=WrapModes.NOQA).output
assert test_output == "import veryveryveryveryveryveryveryveryveryveryvery # NOQA\n"
def test_length_sort():
"""Test setting isort to sort on length instead of alphabetically."""
test_input = ("import medium_sizeeeeeeeeeeeeee\n"
"import shortie\n"
"import looooooooooooooooooooooooooooooooooooooong\n"
"import medium_sizeeeeeeeeeeeeea\n")
test_output = SortImports(file_contents=test_input, length_sort=True).output
assert test_output == ("import shortie\n"
"import medium_sizeeeeeeeeeeeeea\n"
"import medium_sizeeeeeeeeeeeeee\n"
"import looooooooooooooooooooooooooooooooooooooong\n")
def test_convert_hanging():
"""Ensure that isort will convert hanging indents to correct indent
method."""
test_input = ("from third_party import lib1, lib2, \\\n"
" lib3, lib4, lib5, lib6, lib7, \\\n"
" lib8, lib9, lib10, lib11, lib12, \\\n"
" lib13, lib14, lib15, lib16, lib17, \\\n"
" lib18, lib20, lib21, lib22\n")
test_output = SortImports(file_contents=test_input, multi_line_output=WrapModes.GRID,
line_length=40).output
assert test_output == ("from third_party import (lib1, lib2,\n"
" lib3, lib4,\n"
" lib5, lib6,\n"
" lib7, lib8,\n"
" lib9, lib10,\n"
" lib11, lib12,\n"
" lib13, lib14,\n"
" lib15, lib16,\n"
" lib17, lib18,\n"
" lib20, lib21,\n"
" lib22)\n")
def test_custom_indent():
"""Ensure setting a custom indent will work as expected."""
test_output = SortImports(file_contents=REALLY_LONG_IMPORT, multi_line_output=WrapModes.HANGING_INDENT,
line_length=40, indent=" ", balanced_wrapping=False).output
assert test_output == ("from third_party import lib1, lib2, \\\n"
" lib3, lib4, lib5, lib6, lib7, lib8, \\\n"
" lib9, lib10, lib11, lib12, lib13, \\\n"
" lib14, lib15, lib16, lib17, lib18, \\\n"
" lib20, lib21, lib22\n")
test_output = SortImports(file_contents=REALLY_LONG_IMPORT, multi_line_output=WrapModes.HANGING_INDENT,
line_length=40, indent="' '", balanced_wrapping=False).output
assert test_output == ("from third_party import lib1, lib2, \\\n"
" lib3, lib4, lib5, lib6, lib7, lib8, \\\n"
" lib9, lib10, lib11, lib12, lib13, \\\n"
" lib14, lib15, lib16, lib17, lib18, \\\n"
" lib20, lib21, lib22\n")
test_output = SortImports(file_contents=REALLY_LONG_IMPORT, multi_line_output=WrapModes.HANGING_INDENT,
line_length=40, indent="tab", balanced_wrapping=False).output
assert test_output == ("from third_party import lib1, lib2, \\\n"
"\tlib3, lib4, lib5, lib6, lib7, lib8, \\\n"
"\tlib9, lib10, lib11, lib12, lib13, \\\n"
"\tlib14, lib15, lib16, lib17, lib18, \\\n"
"\tlib20, lib21, lib22\n")
test_output = SortImports(file_contents=REALLY_LONG_IMPORT, multi_line_output=WrapModes.HANGING_INDENT,
line_length=40, indent=2, balanced_wrapping=False).output
assert test_output == ("from third_party import lib1, lib2, \\\n"
" lib3, lib4, lib5, lib6, lib7, lib8, \\\n"
" lib9, lib10, lib11, lib12, lib13, \\\n"
" lib14, lib15, lib16, lib17, lib18, \\\n"
" lib20, lib21, lib22\n")
def test_use_parentheses():
test_input = (
"from fooooooooooooooooooooooooo.baaaaaaaaaaaaaaaaaaarrrrrrr import \\"
" my_custom_function as my_special_function"
)
test_output = SortImports(
file_contents=test_input, known_third_party=['django'],
line_length=79, use_parentheses=True,
).output
assert '(' in test_output
def test_skip():
"""Ensure skipping a single import will work as expected."""
test_input = ("import myproject\n"
"import django\n"
"print('hey')\n"
"import sys # isort:skip this import needs to be placed here\n\n\n\n\n\n\n")
test_output = SortImports(file_contents=test_input, known_third_party=['django']).output
assert test_output == ("import django\n"
"\n"
"import myproject\n"
"\n"
"print('hey')\n"
"import sys # isort:skip this import needs to be placed here\n")
def test_skip_with_file_name():
"""Ensure skipping a file works even when file_contents is provided."""
test_input = ("import django\n"
"import myproject\n")
skipped = SortImports(file_path='/baz.py', file_contents=test_input, known_third_party=['django'],
skip=['baz.py']).skipped
assert skipped
def test_force_to_top():
"""Ensure forcing a single import to the top of its category works as expected."""
test_input = ("import lib6\n"
"import lib2\n"
"import lib5\n"
"import lib1\n")
test_output = SortImports(file_contents=test_input, force_to_top=['lib5']).output
assert test_output == ("import lib5\n"
"import lib1\n"
"import lib2\n"
"import lib6\n")
def test_add_imports():
"""Ensures adding imports works as expected."""
test_input = ("import lib6\n"
"import lib2\n"
"import lib5\n"
"import lib1\n\n")
test_output = SortImports(file_contents=test_input, add_imports=['import lib4', 'import lib7']).output
assert test_output == ("import lib1\n"
"import lib2\n"
"import lib4\n"
"import lib5\n"
"import lib6\n"
"import lib7\n")
# Using simplified syntax
test_input = ("import lib6\n"
"import lib2\n"
"import lib5\n"
"import lib1\n\n")
test_output = SortImports(file_contents=test_input, add_imports=['lib4', 'lib7', 'lib8.a']).output
assert test_output == ("import lib1\n"
"import lib2\n"
"import lib4\n"
"import lib5\n"
"import lib6\n"
"import lib7\n"
"from lib8 import a\n")
# On a file that has no pre-existing imports
test_input = ('"""Module docstring"""\n'
'\n'
'class MyClass(object):\n'
' pass\n')
test_output = SortImports(file_contents=test_input, add_imports=['from __future__ import print_function']).output
assert test_output == ('"""Module docstring"""\n'
'from __future__ import print_function\n'
'\n'
'\n'
'class MyClass(object):\n'
' pass\n')
# On a file that has no pre-existing imports, and no doc-string
test_input = ('class MyClass(object):\n'
' pass\n')
test_output = SortImports(file_contents=test_input, add_imports=['from __future__ import print_function']).output
assert test_output == ('from __future__ import print_function\n'
'\n'
'\n'
'class MyClass(object):\n'
' pass\n')
# On a file with no content what so ever
test_input = ("")
test_output = SortImports(file_contents=test_input, add_imports=['lib4']).output
assert test_output == ("")
# On a file with no content what so ever, after force_adds is set to True
test_input = ("")
test_output = SortImports(file_contents=test_input, add_imports=['lib4'], force_adds=True).output
assert test_output == ("import lib4\n")
def test_remove_imports():
"""Ensures removing imports works as expected."""
test_input = ("import lib6\n"
"import lib2\n"
"import lib5\n"
"import lib1")
test_output = SortImports(file_contents=test_input, remove_imports=['lib2', 'lib6']).output
assert test_output == ("import lib1\n"
"import lib5\n")
# Using natural syntax
test_input = ("import lib6\n"
"import lib2\n"
"import lib5\n"
"import lib1\n"
"from lib8 import a")
test_output = SortImports(file_contents=test_input, remove_imports=['import lib2', 'import lib6',
'from lib8 import a']).output
assert test_output == ("import lib1\n"
"import lib5\n")
def test_explicitly_local_import():
"""Ensure that explicitly local imports are separated."""
test_input = ("import lib1\n"
"import lib2\n"
"import .lib6\n"
"from . import lib7")
assert SortImports(file_contents=test_input).output == ("import lib1\n"
"import lib2\n"
"\n"
"import .lib6\n"
"from . import lib7\n")
def test_quotes_in_file():
"""Ensure imports within triple quotes don't get imported."""
test_input = ('import os\n'
'\n'
'"""\n'
'Let us\n'
'import foo\n'
'okay?\n'
'"""\n')
assert SortImports(file_contents=test_input).output == test_input
test_input = ('import os\n'
'\n'
"'\"\"\"'\n"
'import foo\n')
assert SortImports(file_contents=test_input).output == ('import os\n'
'\n'
'import foo\n'
'\n'
"'\"\"\"'\n")
test_input = ('import os\n'
'\n'
'"""Let us"""\n'
'import foo\n'
'"""okay?"""\n')
assert SortImports(file_contents=test_input).output == ('import os\n'
'\n'
'import foo\n'
'\n'
'"""Let us"""\n'
'"""okay?"""\n')
test_input = ('import os\n'
'\n'
'#"""\n'
'import foo\n'
'#"""')
assert SortImports(file_contents=test_input).output == ('import os\n'
'\n'
'import foo\n'
'\n'
'#"""\n'
'#"""\n')
test_input = ('import os\n'
'\n'
"'\\\n"
"import foo'\n")
assert SortImports(file_contents=test_input).output == test_input
test_input = ('import os\n'
'\n'
"'''\n"
"\\'''\n"
'import junk\n'
"'''\n")
assert SortImports(file_contents=test_input).output == test_input
def test_check_newline_in_imports(capsys):
"""Ensure tests works correctly when new lines are in imports."""
test_input = ('from lib1 import (\n'
' sub1,\n'
' sub2,\n'
' sub3\n)\n')
SortImports(file_contents=test_input, multi_line_output=WrapModes.VERTICAL_HANGING_INDENT, line_length=20,
check=True, verbose=True)
out, err = capsys.readouterr()
assert 'SUCCESS' in out
def test_forced_separate():
"""Ensure that forcing certain sub modules to show separately works as expected."""
test_input = ('import sys\n'
'import warnings\n'
'from collections import OrderedDict\n'
'\n'
'from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation\n'
'from django.core.paginator import InvalidPage\n'
'from django.core.urlresolvers import reverse\n'
'from django.db import models\n'
'from django.db.models.fields import FieldDoesNotExist\n'
'from django.utils import six\n'
'from django.utils.deprecation import RenameMethodsBase\n'
'from django.utils.encoding import force_str, force_text\n'
'from django.utils.http import urlencode\n'
'from django.utils.translation import ugettext, ugettext_lazy\n'
'\n'
'from django.contrib.admin import FieldListFilter\n'
'from django.contrib.admin.exceptions import DisallowedModelAdminLookup\n'
'from django.contrib.admin.options import IncorrectLookupParameters, IS_POPUP_VAR, TO_FIELD_VAR\n')
assert SortImports(file_contents=test_input, forced_separate=['django.contrib'],
known_third_party=['django'], line_length=120, order_by_type=False).output == test_input
test_input = ('from .foo import bar\n'
'\n'
'from .y import ca\n')
assert SortImports(file_contents=test_input, forced_separate=['.y'],
line_length=120, order_by_type=False).output == test_input
def test_default_section():
"""Test to ensure changing the default section works as expected."""
test_input = ("import sys\n"
"import os\n"
"import myproject.test\n"
"import django.settings")
test_output = SortImports(file_contents=test_input, known_third_party=['django'],
default_section="FIRSTPARTY").output
assert test_output == ("import os\n"
"import sys\n"
"\n"
"import django.settings\n"
"\n"
"import myproject.test\n")
test_output_custom = SortImports(file_contents=test_input, known_third_party=['django'],
default_section="STDLIB").output
assert test_output_custom == ("import myproject.test\n"
"import os\n"
"import sys\n"
"\n"
"import django.settings\n")
def test_first_party_overrides_standard_section():
"""Test to ensure changing the default section works as expected."""
test_input = ("import sys\n"
"import os\n"
"import profile.test\n")
test_output = SortImports(file_contents=test_input, known_first_party=['profile']).output
assert test_output == ("import os\n"
"import sys\n"
"\n"
"import profile.test\n")
def test_thirdy_party_overrides_standard_section():
"""Test to ensure changing the default section works as expected."""
test_input = ("import sys\n"
"import os\n"
"import profile.test\n")
test_output = SortImports(file_contents=test_input, known_third_party=['profile']).output
assert test_output == ("import os\n"
"import sys\n"
"\n"
"import profile.test\n")
def test_force_single_line_imports():
"""Test to ensure forcing imports to each have their own line works as expected."""
test_input = ("from third_party import lib1, lib2, \\\n"
" lib3, lib4, lib5, lib6, lib7, \\\n"
" lib8, lib9, lib10, lib11, lib12, \\\n"
" lib13, lib14, lib15, lib16, lib17, \\\n"
" lib18, lib20, lib21, lib22\n")
test_output = SortImports(file_contents=test_input, multi_line_output=WrapModes.GRID,
line_length=40, force_single_line=True).output
assert test_output == ("from third_party import lib1\n"
"from third_party import lib2\n"
"from third_party import lib3\n"
"from third_party import lib4\n"
"from third_party import lib5\n"
"from third_party import lib6\n"
"from third_party import lib7\n"
"from third_party import lib8\n"
"from third_party import lib9\n"
"from third_party import lib10\n"
"from third_party import lib11\n"
"from third_party import lib12\n"
"from third_party import lib13\n"
"from third_party import lib14\n"
"from third_party import lib15\n"
"from third_party import lib16\n"
"from third_party import lib17\n"
"from third_party import lib18\n"
"from third_party import lib20\n"
"from third_party import lib21\n"
"from third_party import lib22\n")
def test_force_single_line_long_imports():
test_input = ("from veryveryveryveryveryvery import small, big\n")
test_output = SortImports(file_contents=test_input, multi_line_output=WrapModes.NOQA,
line_length=40, force_single_line=True).output
assert test_output == ("from veryveryveryveryveryvery import big\n"
"from veryveryveryveryveryvery import small # NOQA\n")
def test_titled_imports():
"""Tests setting custom titled/commented import sections."""
test_input = ("import sys\n"
"import unicodedata\n"
"import statistics\n"
"import os\n"
"import myproject.test\n"
"import django.settings")
test_output = SortImports(file_contents=test_input, known_third_party=['django'],
import_heading_stdlib="Standard Library", import_heading_firstparty="My Stuff").output
assert test_output == ("# Standard Library\n"
"import os\n"
"import statistics\n"
"import sys\n"
"import unicodedata\n"
"\n"
"import django.settings\n"
"\n"
"# My Stuff\n"
"import myproject.test\n")
test_second_run = SortImports(file_contents=test_output, known_third_party=['django'],
import_heading_stdlib="Standard Library", import_heading_firstparty="My Stuff").output
assert test_second_run == test_output
def test_balanced_wrapping():
"""Tests balanced wrapping mode, where the length of individual lines maintain width."""
test_input = ("from __future__ import (absolute_import, division, print_function,\n"
" unicode_literals)")
test_output = SortImports(file_contents=test_input, line_length=70, balanced_wrapping=True).output
assert test_output == ("from __future__ import (absolute_import, division,\n"
" print_function, unicode_literals)\n")
def test_relative_import_with_space():
"""Tests the case where the relation and the module that is being imported from is separated with a space."""
test_input = ("from ... fields.sproqet import SproqetCollection")
assert SortImports(file_contents=test_input).output == ("from ...fields.sproqet import SproqetCollection\n")
def test_multiline_import():
"""Test the case where import spawns multiple lines with inconsistent indentation."""
test_input = ("from pkg \\\n"
" import stuff, other_suff \\\n"
" more_stuff")
assert SortImports(file_contents=test_input).output == ("from pkg import more_stuff, other_suff, stuff\n")
# test again with a custom configuration
custom_configuration = {'force_single_line': True,
'line_length': 120,
'known_first_party': ['asdf', 'qwer'],
'default_section': 'THIRDPARTY',
'forced_separate': 'asdf'}
expected_output = ("from pkg import more_stuff\n"
"from pkg import other_suff\n"
"from pkg import stuff\n")
assert SortImports(file_contents=test_input, **custom_configuration).output == expected_output
def test_single_multiline():
"""Test the case where a single import spawns multiple lines."""
test_input = ("from os import\\\n"
" getuid\n"
"\n"
"print getuid()\n")
output = SortImports(file_contents=test_input).output
assert output == (
"from os import getuid\n"
"\n"
"print getuid()\n"
)
def test_atomic_mode():
# without syntax error, everything works OK
test_input = ("from b import d, c\n"
"from a import f, e\n")
assert SortImports(file_contents=test_input, atomic=True).output == ("from a import e, f\n"
"from b import c, d\n")
# with syntax error content is not changed
test_input += "while True print 'Hello world'" # blatant syntax error
assert SortImports(file_contents=test_input, atomic=True).output == test_input
def test_order_by_type():
test_input = "from module import Class, CONSTANT, function"
assert SortImports(file_contents=test_input,
order_by_type=True).output == ("from module import CONSTANT, Class, function\n")
# More complex sample data
test_input = "from module import Class, CONSTANT, function, BASIC, Apple"
assert SortImports(file_contents=test_input,
order_by_type=True).output == ("from module import BASIC, CONSTANT, Apple, Class, function\n")
# Really complex sample data, to verify we don't mess with top level imports, only nested ones
test_input = ("import StringIO\n"
"import glob\n"
"import os\n"
"import shutil\n"
"import tempfile\n"
"import time\n"
"from subprocess import PIPE, Popen, STDOUT\n")
assert SortImports(file_contents=test_input, order_by_type=True).output == \
("import glob\n"
"import os\n"
"import shutil\n"
"import StringIO\n"
"import tempfile\n"
"import time\n"
"from subprocess import PIPE, STDOUT, Popen\n")
def test_custom_lines_after_import_section():
"""Test the case where the number of lines to output after imports has been explicitly set."""
test_input = ("from a import b\n"
"foo = 'bar'\n")
# default case is one space if not method or class after imports
assert SortImports(file_contents=test_input).output == ("from a import b\n"
"\n"
"foo = 'bar'\n")
# test again with a custom number of lines after the import section
assert SortImports(file_contents=test_input, lines_after_imports=2).output == ("from a import b\n"
"\n"
"\n"
"foo = 'bar'\n")
def test_smart_lines_after_import_section():
"""Tests the default 'smart' behavior for dealing with lines after the import section"""
# one space if not method or class after imports
test_input = ("from a import b\n"
"foo = 'bar'\n")
assert SortImports(file_contents=test_input).output == ("from a import b\n"
"\n"
"foo = 'bar'\n")
# two spaces if a method or class after imports
test_input = ("from a import b\n"
"def my_function():\n"
" pass\n")
assert SortImports(file_contents=test_input).output == ("from a import b\n"
"\n"
"\n"
"def my_function():\n"
" pass\n")
# two spaces if a method or class after imports - even if comment before function
test_input = ("from a import b\n"
"# comment should be ignored\n"
"def my_function():\n"
" pass\n")
assert SortImports(file_contents=test_input).output == ("from a import b\n"
"\n"
"\n"
"# comment should be ignored\n"
"def my_function():\n"
" pass\n")
# ensure logic works with both style comments
test_input = ("from a import b\n"
'"""\n'
" comment should be ignored\n"
'"""\n'
"def my_function():\n"
" pass\n")
assert SortImports(file_contents=test_input).output == ("from a import b\n"
"\n"
"\n"
'"""\n'
" comment should be ignored\n"
'"""\n'
"def my_function():\n"
" pass\n")
def test_settings_combine_instead_of_overwrite():
"""Test to ensure settings combine logically, instead of fully overwriting."""
assert set(SortImports(known_standard_library=['not_std_library']).config['known_standard_library']) == \
set(SortImports().config['known_standard_library'] + ['not_std_library'])
assert set(SortImports(not_known_standard_library=['thread']).config['known_standard_library']) == \
set(item for item in SortImports().config['known_standard_library'] if item != 'thread')
def test_combined_from_and_as_imports():
"""Test to ensure it's possible to combine from and as imports."""
test_input = ("from translate.misc.multistring import multistring\n"
"from translate.storage import base, factory\n"
"from translate.storage.placeables import general, parse as rich_parse\n")
assert SortImports(file_contents=test_input, combine_as_imports=True).output == test_input
def test_as_imports_with_line_length():
"""Test to ensure it's possible to combine from and as imports."""
test_input = ("from translate.storage import base as storage_base\n"
"from translate.storage.placeables import general, parse as rich_parse\n")
assert SortImports(file_contents=test_input, combine_as_imports=False, line_length=40).output == \
("from translate.storage import \\\n base as storage_base\n"
"from translate.storage.placeables import \\\n parse as rich_parse\n"
"from translate.storage.placeables import \\\n general\n")
def test_keep_comments():
"""Test to ensure isort properly keeps comments in tact after sorting."""
# Straight Import
test_input = ("import foo # bar\n")
assert SortImports(file_contents=test_input).output == test_input
# Star import
test_input_star = ("from foo import * # bar\n")
assert SortImports(file_contents=test_input_star).output == test_input_star
# Force Single Line From Import
test_input = ("from foo import bar # comment\n")
assert SortImports(file_contents=test_input, force_single_line=True).output == test_input
# From import
test_input = ("from foo import bar # My Comment\n")
assert SortImports(file_contents=test_input).output == test_input
# More complicated case
test_input = ("from a import b # My Comment1\n"
"from a import c # My Comment2\n")
assert SortImports(file_contents=test_input).output == \
("from a import b # My Comment1\n"
"from a import c # My Comment2\n")
# Test case where imports comments make imports extend pass the line length
test_input = ("from a import b # My Comment1\n"
"from a import c # My Comment2\n"
"from a import d\n")
assert SortImports(file_contents=test_input, line_length=45).output == \
("from a import b # My Comment1\n"
"from a import c # My Comment2\n"
"from a import d\n")
# Test case where imports with comments will be beyond line length limit
test_input = ("from a import b, c # My Comment1\n"
"from a import c, d # My Comment2 is really really really really long\n")
assert SortImports(file_contents=test_input, line_length=45).output == \
("from a import (b, # My Comment1; My Comment2 is really really really really long\n"
" c, d)\n")
# Test that comments are not stripped from 'import ... as ...' by default
test_input = ("from a import b as bb # b comment\n"
"from a import c as cc # c comment\n")
assert SortImports(file_contents=test_input).output == test_input
# Test that 'import ... as ...' comments are not collected inappropriately
test_input = ("from a import b as bb # b comment\n"
"from a import c as cc # c comment\n"
"from a import d\n")
assert SortImports(file_contents=test_input).output == test_input
assert SortImports(file_contents=test_input, combine_as_imports=True).output == (
"from a import b as bb, c as cc, d # b comment; c comment\n"
)
def test_multiline_split_on_dot():
"""Test to ensure isort correctly handles multiline imports, even when split right after a '.'"""
test_input = ("from my_lib.my_package.test.level_1.level_2.level_3.level_4.level_5.\\\n"
" my_module import my_function")
assert SortImports(file_contents=test_input, line_length=70).output == \
("from my_lib.my_package.test.level_1.level_2.level_3.level_4.level_5.my_module import \\\n"
" my_function\n")
def test_import_star():
"""Test to ensure isort handles star imports correctly"""
test_input = ("from blah import *\n"
"from blah import _potato\n")
assert SortImports(file_contents=test_input).output == ("from blah import *\n"
"from blah import _potato\n")
assert SortImports(file_contents=test_input, combine_star=True).output == ("from blah import *\n")
def test_include_trailing_comma():
"""Test for the include_trailing_comma option"""
test_output_grid = SortImports(
file_contents=SHORT_IMPORT,
multi_line_output=WrapModes.GRID,
line_length=40,
include_trailing_comma=True,
).output
assert test_output_grid == (
"from third_party import (lib1, lib2,\n"
" lib3, lib4,)\n"
)
test_output_vertical = SortImports(
file_contents=SHORT_IMPORT,
multi_line_output=WrapModes.VERTICAL,
line_length=40,
include_trailing_comma=True,
).output
assert test_output_vertical == (
"from third_party import (lib1,\n"
" lib2,\n"
" lib3,\n"
" lib4,)\n"
)
test_output_vertical_indent = SortImports(
file_contents=SHORT_IMPORT,
multi_line_output=WrapModes.VERTICAL_HANGING_INDENT,
line_length=40,
include_trailing_comma=True,
).output
assert test_output_vertical_indent == (
"from third_party import (\n"
" lib1,\n"
" lib2,\n"
" lib3,\n"
" lib4,\n"
")\n"
)
test_output_vertical_grid = SortImports(
file_contents=SHORT_IMPORT,
multi_line_output=WrapModes.VERTICAL_GRID,
line_length=40,
include_trailing_comma=True,
).output
assert test_output_vertical_grid == (
"from third_party import (\n"
" lib1, lib2, lib3, lib4,)\n"
)
test_output_vertical_grid_grouped = SortImports(
file_contents=SHORT_IMPORT,
multi_line_output=WrapModes.VERTICAL_GRID_GROUPED,
line_length=40,
include_trailing_comma=True,
).output
assert test_output_vertical_grid_grouped == (
"from third_party import (\n"
" lib1, lib2, lib3, lib4,\n"
")\n"
)
def test_similar_to_std_library():
"""Test to ensure modules that are named similarly to a standard library import don't end up clobbered"""
test_input = ("import datetime\n"
"\n"
"import requests\n"
"import times\n")
assert SortImports(file_contents=test_input, known_third_party=["requests", "times"]).output == test_input
def test_correctly_placed_imports():
"""Test to ensure comments stay on correct placement after being sorted"""
test_input = ("from a import b # comment for b\n"
"from a import c # comment for c\n")
assert SortImports(file_contents=test_input, force_single_line=True).output == \
("from a import b # comment for b\n"
"from a import c # comment for c\n")
assert SortImports(file_contents=test_input).output == ("from a import b # comment for b\n"
"from a import c # comment for c\n")
# Full example test from issue #143
test_input = ("from itertools import chain\n"
"\n"
"from django.test import TestCase\n"
"from model_mommy import mommy\n"
"\n"
"from apps.clientman.commands.download_usage_rights import associate_right_for_item_product\n"
"from apps.clientman.commands.download_usage_rights import associate_right_for_item_product_d"
"efinition\n"
"from apps.clientman.commands.download_usage_rights import associate_right_for_item_product_d"
"efinition_platform\n"
"from apps.clientman.commands.download_usage_rights import associate_right_for_item_product_p"
"latform\n"
"from apps.clientman.commands.download_usage_rights import associate_right_for_territory_reta"
"il_model\n"
"from apps.clientman.commands.download_usage_rights import associate_right_for_territory_reta"
"il_model_definition_platform_provider # noqa\n"
"from apps.clientman.commands.download_usage_rights import clear_right_for_item_product\n"
"from apps.clientman.commands.download_usage_rights import clear_right_for_item_product_defini"
"tion\n"
"from apps.clientman.commands.download_usage_rights import clear_right_for_item_product_defini"
"tion_platform\n"
"from apps.clientman.commands.download_usage_rights import clear_right_for_item_product_platfo"
"rm\n"
"from apps.clientman.commands.download_usage_rights import clear_right_for_territory_retail_mo"
"del\n"
"from apps.clientman.commands.download_usage_rights import clear_right_for_territory_retail_mo"
"del_definition_platform_provider # noqa\n"
"from apps.clientman.commands.download_usage_rights import create_download_usage_right\n"
"from apps.clientman.commands.download_usage_rights import delete_download_usage_right\n"
"from apps.clientman.commands.download_usage_rights import disable_download_for_item_product\n"
"from apps.clientman.commands.download_usage_rights import disable_download_for_item_product_d"
"efinition\n"
"from apps.clientman.commands.download_usage_rights import disable_download_for_item_product_d"
"efinition_platform\n"
"from apps.clientman.commands.download_usage_rights import disable_download_for_item_product_p"
"latform\n"
"from apps.clientman.commands.download_usage_rights import disable_download_for_territory_reta"
"il_model\n"
"from apps.clientman.commands.download_usage_rights import disable_download_for_territory_reta"
"il_model_definition_platform_provider # noqa\n"
"from apps.clientman.commands.download_usage_rights import get_download_rights_for_item\n"
"from apps.clientman.commands.download_usage_rights import get_right\n")
assert SortImports(file_contents=test_input, force_single_line=True, line_length=140,
known_third_party=["django", "model_mommy"]).output == test_input
def test_auto_detection():
"""Initial test to ensure isort auto-detection works correctly - will grow over time as new issues are raised."""
# Issue 157
test_input = ("import binascii\n"
"import os\n"
"\n"
"import cv2\n"
"import requests\n")
assert SortImports(file_contents=test_input, known_third_party=["cv2", "requests"]).output == test_input
# alternative solution
assert SortImports(file_contents=test_input, default_section="THIRDPARTY").output == test_input
def test_same_line_statements():
"""Ensure isort correctly handles the case where a single line contains multiple statements including an import"""
test_input = ("import pdb; import nose\n")
assert SortImports(file_contents=test_input).output == ("import pdb\n"
"\n"
"import nose\n")
test_input = ("import pdb; pdb.set_trace()\n"
"import nose; nose.run()\n")
assert SortImports(file_contents=test_input).output == test_input
def test_long_line_comments():
"""Ensure isort correctly handles comments at the end of extremely long lines"""
test_input = ("from foo.utils.fabric_stuff.live import check_clean_live, deploy_live, sync_live_envdir, "
"update_live_app, update_live_cron # noqa\n"
"from foo.utils.fabric_stuff.stage import check_clean_stage, deploy_stage, sync_stage_envdir, "
"update_stage_app, update_stage_cron # noqa\n")
assert SortImports(file_contents=test_input).output == \
("from foo.utils.fabric_stuff.live import (check_clean_live, deploy_live, # noqa\n"
" sync_live_envdir, update_live_app, update_live_cron)\n"
"from foo.utils.fabric_stuff.stage import (check_clean_stage, deploy_stage, # noqa\n"
" sync_stage_envdir, update_stage_app, update_stage_cron)\n")
def test_tab_character_in_import():
"""Ensure isort correctly handles import statements that contain a tab character"""
test_input = ("from __future__ import print_function\n"
"from __future__ import\tprint_function\n")
assert SortImports(file_contents=test_input).output == "from __future__ import print_function\n"
def test_split_position():
"""Ensure isort splits on import instead of . when possible"""
test_input = ("from p24.shared.exceptions.master.host_state_flag_unchanged import HostStateUnchangedException\n")
assert SortImports(file_contents=test_input, line_length=80).output == \
("from p24.shared.exceptions.master.host_state_flag_unchanged import \\\n"
" HostStateUnchangedException\n")
def test_place_comments():
"""Ensure manually placing imports works as expected"""
test_input = ("import sys\n"
"import os\n"
"import myproject.test\n"
"import django.settings\n"
"\n"
"# isort:imports-thirdparty\n"
"# isort:imports-firstparty\n"
"print('code')\n"
"\n"
"# isort:imports-stdlib\n")
expected_output = ("\n# isort:imports-thirdparty\n"
"import django.settings\n"
"\n"
"# isort:imports-firstparty\n"
"import myproject.test\n"
"\n"
"print('code')\n"
"\n"
"# isort:imports-stdlib\n"
"import os\n"
"import sys\n")
test_output = SortImports(file_contents=test_input, known_third_party=['django']).output
assert test_output == expected_output
test_output = SortImports(file_contents=test_output, known_third_party=['django']).output
assert test_output == expected_output
def test_placement_control():
"""Ensure that most specific placement control match wins"""
test_input = ("import os\n"
"import sys\n"
"from bottle import Bottle, redirect, response, run\n"
"import p24.imports._argparse as argparse\n"
"import p24.imports._subprocess as subprocess\n"
"import p24.imports._VERSION as VERSION\n"
"import p24.shared.media_wiki_syntax as syntax\n")
test_output = SortImports(file_contents=test_input,
known_first_party=['p24', 'p24.imports._VERSION'],
known_standard_library=['p24.imports'],
known_third_party=['bottle'],
default_section="THIRDPARTY").output
assert test_output == ("import os\n"
"import p24.imports._argparse as argparse\n"
"import p24.imports._subprocess as subprocess\n"
"import sys\n"
"\n"
"from bottle import Bottle, redirect, response, run\n"
"\n"
"import p24.imports._VERSION as VERSION\n"
"import p24.shared.media_wiki_syntax as syntax\n")
def test_custom_sections():
"""Ensure that most specific placement control match wins"""
test_input = ("import os\n"
"import sys\n"
"from django.conf import settings\n"
"from bottle import Bottle, redirect, response, run\n"
"import p24.imports._argparse as argparse\n"
"from django.db import models\n"
"import p24.imports._subprocess as subprocess\n"
"import pandas as pd\n"
"import p24.imports._VERSION as VERSION\n"
"import numpy as np\n"
"import p24.shared.media_wiki_syntax as syntax\n")
test_output = SortImports(file_contents=test_input,
known_first_party=['p24', 'p24.imports._VERSION'],
import_heading_stdlib='Standard Library',
import_heading_thirdparty='Third Party',
import_heading_firstparty='First Party',
import_heading_django='Django',
import_heading_pandas='Pandas',
known_standard_library=['p24.imports'],
known_third_party=['bottle'],
known_django=['django'],
known_pandas=['pandas', 'numpy'],
default_section="THIRDPARTY",
sections=["FUTURE", "STDLIB", "DJANGO", "THIRDPARTY", "PANDAS", "FIRSTPARTY", "LOCALFOLDER"]).output
assert test_output == ("# Standard Library\n"
"import os\n"
"import p24.imports._argparse as argparse\n"
"import p24.imports._subprocess as subprocess\n"
"import sys\n"
"\n"
"# Django\n"
"from django.conf import settings\n"
"from django.db import models\n"
"\n"
"# Third Party\n"
"from bottle import Bottle, redirect, response, run\n"
"\n"
"# Pandas\n"
"import numpy as np\n"
"import pandas as pd\n"
"\n"
"# First Party\n"
"import p24.imports._VERSION as VERSION\n"
"import p24.shared.media_wiki_syntax as syntax\n")
def test_sticky_comments():
"""Test to ensure it is possible to make comments 'stick' above imports"""
test_input = ("import os\n"
"\n"
"# Used for type-hinting (ref: https://github.com/davidhalter/jedi/issues/414).\n"
"from selenium.webdriver.remote.webdriver import WebDriver # noqa\n")
assert SortImports(file_contents=test_input).output == test_input
test_input = ("from django import forms\n"
"# While this couples the geographic forms to the GEOS library,\n"
"# it decouples from database (by not importing SpatialBackend).\n"
"from django.contrib.gis.geos import GEOSException, GEOSGeometry\n"
"from django.utils.translation import ugettext_lazy as _\n")
assert SortImports(file_contents=test_input).output == test_input
def test_zipimport():
"""Imports ending in "import" shouldn't be clobbered"""
test_input = "from zipimport import zipimport\n"
assert SortImports(file_contents=test_input).output == test_input
def test_from_ending():
"""Imports ending in "from" shouldn't be clobbered."""
test_input = "from foo import get_foo_from, get_foo\n"
expected_output = "from foo import get_foo, get_foo_from\n"
assert SortImports(file_contents=test_input).output == expected_output
def test_from_first():
"""Tests the setting from_first works correctly"""
test_input = "from os import path\nimport os\n"
assert SortImports(file_contents=test_input, from_first=True).output == test_input
def test_top_comments():
"""Ensure correct behavior with top comments"""
test_input = ("# -*- encoding: utf-8 -*-\n"
"# Test comment\n"
"#\n"
"from __future__ import unicode_literals\n")
assert SortImports(file_contents=test_input).output == test_input
test_input = ("# -*- coding: utf-8 -*-\n"
"from django.db import models\n"
"from django.utils.encoding import python_2_unicode_compatible\n")
assert SortImports(file_contents=test_input).output == test_input
test_input = ("# Comment\n"
"import sys\n")
assert SortImports(file_contents=test_input).output == test_input
test_input = ("# -*- coding\n"
"import sys\n")
assert SortImports(file_contents=test_input).output == test_input
def test_consistency():
"""Ensures consistency of handling even when dealing with non ordered-by-type imports"""
test_input = "from sqlalchemy.dialects.postgresql import ARRAY, array\n"
assert SortImports(file_contents=test_input, order_by_type=True).output == test_input
def test_force_grid_wrap():
"""Ensures removing imports works as expected."""
test_input = (
"from foo import lib6, lib7\n"
"from bar import lib2\n"
)
test_output = SortImports(
file_contents=test_input,
force_grid_wrap=True,
multi_line_output=WrapModes.VERTICAL_HANGING_INDENT
).output
assert test_output == """from bar import lib2
from foo import (
lib6,
lib7
)
"""
def test_force_grid_wrap_long():
"""Ensure that force grid wrap still happens with long line length"""
test_input = (
"from foo import lib6, lib7\n"
"from bar import lib2\n"
"from babar import something_that_is_kind_of_long"
)
test_output = SortImports(
file_contents=test_input,
force_grid_wrap=True,
multi_line_output=WrapModes.VERTICAL_HANGING_INDENT,
line_length=9999,
).output
assert test_output == """from babar import something_that_is_kind_of_long
from bar import lib2
from foo import (
lib6,
lib7
)
"""
def test_uses_jinja_variables():
"""Test a basic set of imports that use jinja variables"""
test_input = ("import sys\n"
"import os\n"
"import myproject.{ test }\n"
"import django.{ settings }")
test_output = SortImports(file_contents=test_input, known_third_party=['django'],
known_first_party=['myproject']).output
assert test_output == ("import os\n"
"import sys\n"
"\n"
"import django.{ settings }\n"
"\n"
"import myproject.{ test }\n")
test_input = ("import {{ cookiecutter.repo_name }}\n"
"from foo import {{ cookiecutter.bar }}\n")
assert SortImports(file_contents=test_input).output == test_input
def test_fcntl():
"""Test to ensure fcntl gets correctly recognized as stdlib import"""
test_input = ("import fcntl\n"
"import os\n"
"import sys\n")
assert SortImports(file_contents=test_input).output == test_input
def test_import_split_is_word_boundary_aware():
"""Test to ensure that isort splits words in a boundry aware mannor"""
test_input = ("from mycompany.model.size_value_array_import_func import \\\n"
" get_size_value_array_import_func_jobs")
test_output = SortImports(file_contents=test_input,
multi_line_output=WrapModes.VERTICAL_HANGING_INDENT,
line_length=79).output
assert test_output == ("from mycompany.model.size_value_array_import_func import (\n"
" get_size_value_array_import_func_jobs\n"
")\n")
def test_other_file_encodings():
"""Test to ensure file encoding is respected"""
try:
tmp_dir = tempfile.mkdtemp()
for encoding in ('latin1', 'utf8'):
tmp_fname = os.path.join(tmp_dir, 'test_{0}.py'.format(encoding))
with codecs.open(tmp_fname, mode='w', encoding=encoding) as f:
file_contents = "# coding: {0}\n\ns = u'ã'\n".format(encoding)
f.write(file_contents)
assert SortImports(file_path=tmp_fname).output == file_contents
finally:
shutil.rmtree(tmp_dir, ignore_errors=True)
def test_comment_at_top_of_file():
"""Test to ensure isort correctly handles top of file comments"""
test_input = ("# Comment one\n"
"from django import forms\n"
"# Comment two\n"
"from django.contrib.gis.geos import GEOSException\n")
assert SortImports(file_contents=test_input).output == test_input
test_input = ("# -*- coding: utf-8 -*-\n"
"from django.db import models\n")
assert SortImports(file_contents=test_input).output == test_input
def test_alphabetic_sorting():
"""Test to ensure isort correctly handles top of file comments"""
test_input = ("import unittest\n"
"\n"
"import ABC\n"
"import Zope\n"
"from django.contrib.gis.geos import GEOSException\n"
"from plone.app.testing import getRoles\n"
"from plone.app.testing import ManageRoles\n"
"from plone.app.testing import setRoles\n"
"from Products.CMFPlone import utils\n"
)
options = {'force_single_line': True,
'force_alphabetical_sort_within_sections': True, }
output = SortImports(file_contents=test_input, **options).output
assert output == test_input
test_input = ("# -*- coding: utf-8 -*-\n"
"from django.db import models\n")
assert SortImports(file_contents=test_input).output == test_input
def test_alphabetic_sorting_multi_line():
"""Test to ensure isort correctly handles multiline import see: issue 364"""
test_input = ("from a import (CONSTANT_A, cONSTANT_B, CONSTANT_C, CONSTANT_D, CONSTANT_E,\n"
" CONSTANT_F, CONSTANT_G, CONSTANT_H, CONSTANT_I, CONSTANT_J)\n")
options = {'force_alphabetical_sort_within_sections': True, }
assert SortImports(file_contents=test_input, **options).output == test_input
def test_comments_not_duplicated():
"""Test to ensure comments aren't duplicated: issue 303"""
test_input = ('from flask import url_for\n'
"# Whole line comment\n"
'from service import demo # inline comment\n'
'from service import settings\n')
output = SortImports(file_contents=test_input).output
assert output.count("# Whole line comment\n") == 1
assert output.count("# inline comment\n") == 1
def test_top_of_line_comments():
"""Test to ensure top of line comments stay where they should: issue 260"""
test_input = ('# -*- coding: utf-8 -*-\n'
'from django.db import models\n'
'#import json as simplejson\n'
'from myproject.models import Servidor\n'
'\n'
'import reversion\n'
'\n'
'import logging\n')
output = SortImports(file_contents=test_input).output
assert output.startswith('# -*- coding: utf-8 -*-\n')
def test_basic_comment():
"""Test to ensure a basic comment wont crash isort"""
test_input = ('import logging\n'
'# Foo\n'
'import os\n')
assert SortImports(file_contents=test_input).output == test_input
def test_shouldnt_add_lines():
"""Ensure that isort doesn't add a blank line when a top of import comment is present, issue #316"""
test_input = ('"""Text"""\n'
'# This is a comment\n'
'import pkg_resources\n')
assert SortImports(file_contents=test_input).output == test_input
def test_sections_parsed_correct():
"""Ensure that modules for custom sections parsed as list from config file and isort result is correct"""
tmp_conf_dir = None
conf_file_data = (
'[settings]\n'
'sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER,COMMON\n'
'known_common=nose\n'
'import_heading_common=Common Library\n'
'import_heading_stdlib=Standard Library\n'
)
test_input = (
'import os\n'
'from nose import *\n'
'import nose\n'
'from os import path'
)
correct_output = (
'# Standard Library\n'
'import os\n'
'from os import path\n'
'\n'
'# Common Library\n'
'import nose\n'
'from nose import *\n'
)
try:
tmp_conf_dir = tempfile.mkdtemp()
tmp_conf_name = os.path.join(tmp_conf_dir, '.isort.cfg')
with codecs.open(tmp_conf_name, 'w') as test_config:
test_config.writelines(conf_file_data)
assert SortImports(file_contents=test_input, settings_path=tmp_conf_dir).output == correct_output
finally:
shutil.rmtree(tmp_conf_dir, ignore_errors=True)
def test_alphabetic_sorting_no_newlines():
'''Test to ensure that alphabetical sort does not erroneously introduce new lines (issue #328)'''
test_input = "import os\n"
test_output = SortImports(file_contents=test_input,force_alphabetical_sort_within_sections=True).output
assert test_input == test_output
test_input = ('import os\n'
'import unittest\n'
'\n'
'from a import b\n'
'\n'
'\n'
'print(1)\n')
test_output = SortImports(file_contents=test_input,force_alphabetical_sort_within_sections=True, lines_after_imports=2).output
assert test_input == test_output
def test_sort_within_section():
'''Test to ensure its possible to force isort to sort within sections'''
test_input = ('from Foob import ar\n'
'import foo\n'
'from foo import bar\n'
'from foo.bar import Quux, baz\n')
test_output = SortImports(file_contents=test_input,force_sort_within_sections=True).output
assert test_output == test_input
test_input = ('import foo\n'
'from foo import bar\n'
'from foo.bar import baz\n'
'from foo.bar import Quux\n'
'from Foob import ar\n')
test_output = SortImports(file_contents=test_input,force_sort_within_sections=True, order_by_type=False,
force_single_line=True).output
assert test_output == test_input
def test_sorting_with_two_top_comments():
'''Test to ensure isort will sort files that contain 2 top comments'''
test_input = ('#! comment1\n'
"''' comment2\n"
"'''\n"
'import b\n'
'import a\n')
assert SortImports(file_contents=test_input).output == ('#! comment1\n'
"''' comment2\n"
"'''\n"
'import a\n'
'import b\n')
def test_lines_between_sections():
"""Test to ensure lines_between_sections works"""
test_input = ('from bar import baz\n'
'import os\n')
assert SortImports(file_contents=test_input, lines_between_sections=0).output == ('import os\n'
'from bar import baz\n')
assert SortImports(file_contents=test_input, lines_between_sections=2).output == ('import os\n\n\n'
'from bar import baz\n')
def test_forced_sepatate_globs():
"""Test to ensure that forced_separate glob matches lines"""
test_input = ('import os\n'
'\n'
'from myproject.foo.models import Foo\n'
'\n'
'from myproject.utils import util_method\n'
'\n'
'from myproject.bar.models import Bar\n'
'\n'
'import sys\n')
test_output = SortImports(file_contents=test_input, forced_separate=['*.models'],
line_length=120).output
assert test_output == ('import os\n'
'import sys\n'
'\n'
'from myproject.utils import util_method\n'
'\n'
'from myproject.bar.models import Bar\n'
'from myproject.foo.models import Foo\n')
def test_no_additional_lines_issue_358():
"""Test to ensure issue 358 is resovled and running isort multiple times does not add extra newlines"""
test_input = ('"""This is a docstring"""\n'
'# This is a comment\n'
'from __future__ import (\n'
' absolute_import,\n'
' division,\n'
' print_function,\n'
' unicode_literals\n'
')\n')
expected_output = ('"""This is a docstring"""\n'
'# This is a comment\n'
'from __future__ import (\n'
' absolute_import,\n'
' division,\n'
' print_function,\n'
' unicode_literals\n'
')\n')
test_output = SortImports(file_contents=test_input, multi_line_output=3, line_length=20).output
assert test_output == expected_output
test_output = SortImports(file_contents=test_output, multi_line_output=3, line_length=20).output
assert test_output == expected_output
for attempt in range(5):
test_output = SortImports(file_contents=test_output, multi_line_output=3, line_length=20).output
assert test_output == expected_output
test_input = ('"""This is a docstring"""\n'
'\n'
'# This is a comment\n'
'from __future__ import (\n'
' absolute_import,\n'
' division,\n'
' print_function,\n'
' unicode_literals\n'
')\n')
expected_output = ('"""This is a docstring"""\n'
'\n'
'# This is a comment\n'
'from __future__ import (\n'
' absolute_import,\n'
' division,\n'
' print_function,\n'
' unicode_literals\n'
')\n')
test_output = SortImports(file_contents=test_input, multi_line_output=3, line_length=20).output
assert test_output == expected_output
test_output = SortImports(file_contents=test_output, multi_line_output=3, line_length=20).output
assert test_output == expected_output
for attempt in range(5):
test_output = SortImports(file_contents=test_output, multi_line_output=3, line_length=20).output
assert test_output == expected_output
def test_import_by_paren_issue_375():
"""Test to ensure isort can correctly handle sorting imports where the paren is directly by the import body"""
test_input = ('from .models import(\n'
' Foo,\n'
' Bar,\n'
')\n')
assert SortImports(file_contents=test_input).output == 'from .models import Bar, Foo\n'
def test_function_with_docstring():
"""Test to ensure isort can correctly sort imports when the first found content is a function with a docstring"""
add_imports = ['from __future__ import unicode_literals']
test_input = ('def foo():\n'
' """ Single line triple quoted doctring """\n'
' pass\n')
expected_output = ('from __future__ import unicode_literals\n'
'\n'
'\n'
'def foo():\n'
' """ Single line triple quoted doctring """\n'
' pass\n')
assert SortImports(file_contents=test_input, add_imports=add_imports).output == expected_output
def test_plone_style():
"""Test to ensure isort correctly plone style imports"""
test_input = ("from django.contrib.gis.geos import GEOSException\n"
"from plone.app.testing import getRoles\n"
"from plone.app.testing import ManageRoles\n"
"from plone.app.testing import setRoles\n"
"from Products.CMFPlone import utils\n"
"\n"
"import ABC\n"
"import unittest\n"
"import Zope\n")
options = {'force_single_line': True,
'force_alphabetical_sort': True}
assert SortImports(file_contents=test_input, **options).output == test_input
| adamchainz/isort | test_isort.py | Python | mit | 87,978 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('robocrm', '0034_robouser_rfid_card'),
]
operations = [
migrations.AlterField(
model_name='robouser',
name='rfid_card',
field=models.CharField(default='CMU', choices=[('CMU', 'CMU ID'), ('PITT', 'PITT ID'), ('OTHER', 'OTHER ID')], max_length=5),
preserve_default=True,
),
]
| sreidy/roboticsclub.org | robocrm/migrations/0035_auto_20150130_1557.py | Python | mit | 529 |
from __future__ import unicode_literals
def _build_mimetype(resource_name, fmt='json'):
return 'application/vnd.reviewboard.org.%s+%s' % (resource_name, fmt)
api_token_list_mimetype = _build_mimetype('api-tokens')
api_token_item_mimetype = _build_mimetype('api-token')
archived_item_mimetype = _build_mimetype('archived-review-request')
change_list_mimetype = _build_mimetype('review-request-changes')
change_item_mimetype = _build_mimetype('review-request-change')
default_reviewer_list_mimetype = _build_mimetype('default-reviewers')
default_reviewer_item_mimetype = _build_mimetype('default-reviewer')
diff_list_mimetype = _build_mimetype('diffs')
diff_item_mimetype = _build_mimetype('diff')
diff_file_attachment_list_mimetype = _build_mimetype('diff-file-attachments')
diff_file_attachment_item_mimetype = _build_mimetype('diff-file-attachment')
draft_file_attachment_list_mimetype = _build_mimetype('draft-file-attachments')
draft_file_attachment_item_mimetype = _build_mimetype('draft-file-attachment')
error_mimetype = _build_mimetype('error')
file_attachment_list_mimetype = _build_mimetype('file-attachments')
file_attachment_item_mimetype = _build_mimetype('file-attachment')
file_attachment_comment_list_mimetype = \
_build_mimetype('file-attachment-comments')
file_attachment_comment_item_mimetype = \
_build_mimetype('file-attachment-comment')
filediff_list_mimetype = _build_mimetype('files')
filediff_item_mimetype = _build_mimetype('file')
filediff_comment_list_mimetype = _build_mimetype('file-diff-comments')
filediff_comment_item_mimetype = _build_mimetype('file-diff-comment')
general_comment_list_mimetype = _build_mimetype('general-comments')
general_comment_item_mimetype = _build_mimetype('general-comment')
hosting_service_list_mimetype = _build_mimetype('hosting-services')
hosting_service_item_mimetype = _build_mimetype('hosting-service')
hosting_service_account_list_mimetype = \
_build_mimetype('hosting-service-accounts')
hosting_service_account_item_mimetype = \
_build_mimetype('hosting-service-account')
original_file_mimetype = 'text/plain'
patched_file_mimetype = 'text/plain'
remote_repository_list_mimetype = _build_mimetype('remote-repositories')
remote_repository_item_mimetype = _build_mimetype('remote-repository')
repository_list_mimetype = _build_mimetype('repositories')
repository_item_mimetype = _build_mimetype('repository')
repository_branches_item_mimetype = _build_mimetype('repository-branches')
repository_commits_item_mimetype = _build_mimetype('repository-commits')
repository_info_item_mimetype = _build_mimetype('repository-info')
review_list_mimetype = _build_mimetype('reviews')
review_item_mimetype = _build_mimetype('review')
review_diff_comment_list_mimetype = _build_mimetype('review-diff-comments')
review_diff_comment_item_mimetype = _build_mimetype('review-diff-comment')
review_group_list_mimetype = _build_mimetype('review-groups')
review_group_item_mimetype = _build_mimetype('review-group')
review_group_user_list_mimetype = _build_mimetype('review-group-users')
review_group_user_item_mimetype = _build_mimetype('review-group-user')
review_reply_list_mimetype = _build_mimetype('review-replies')
review_reply_item_mimetype = _build_mimetype('review-reply')
review_reply_diff_comment_list_mimetype = \
_build_mimetype('review-reply-diff-comments')
review_reply_diff_comment_item_mimetype = \
_build_mimetype('review-reply-diff-comment')
review_reply_file_attachment_comment_list_mimetype = \
_build_mimetype('review-reply-file-attachment-comments')
review_reply_file_attachment_comment_item_mimetype = \
_build_mimetype('review-reply-file-attachment-comment')
review_reply_general_comment_list_mimetype = \
_build_mimetype('review-reply-general-comments')
review_reply_general_comment_item_mimetype = \
_build_mimetype('review-reply-general-comment')
review_reply_screenshot_comment_list_mimetype = \
_build_mimetype('review-reply-screenshot-comments')
review_reply_screenshot_comment_item_mimetype = \
_build_mimetype('review-reply-screenshot-comment')
review_request_list_mimetype = _build_mimetype('review-requests')
review_request_item_mimetype = _build_mimetype('review-request')
review_request_draft_item_mimetype = _build_mimetype('review-request-draft')
root_item_mimetype = _build_mimetype('root')
screenshot_list_mimetype = _build_mimetype('screenshots')
screenshot_item_mimetype = _build_mimetype('screenshot')
screenshot_comment_list_mimetype = _build_mimetype('screenshot-comments')
screenshot_comment_item_mimetype = _build_mimetype('screenshot-comment')
screenshot_draft_item_mimetype = _build_mimetype('draft-screenshot')
screenshot_draft_list_mimetype = _build_mimetype('draft-screenshots')
search_mimetype = _build_mimetype('search')
server_info_mimetype = _build_mimetype('server-info')
session_mimetype = _build_mimetype('session')
status_update_list_mimetype = _build_mimetype('status-updates')
status_update_item_mimetype = _build_mimetype('status-update')
user_list_mimetype = _build_mimetype('users')
user_item_mimetype = _build_mimetype('user')
user_file_attachment_list_mimetype = _build_mimetype('user-file-attachments')
user_file_attachment_item_mimetype = _build_mimetype('user-file-attachment')
validate_diff_mimetype = _build_mimetype('diff-validation')
watched_review_group_list_mimetype = _build_mimetype('watched-review-groups')
watched_review_group_item_mimetype = _build_mimetype('watched-review-group')
watched_review_request_item_mimetype = \
_build_mimetype('watched-review-request')
watched_review_request_list_mimetype = \
_build_mimetype('watched-review-requests')
webhook_list_mimetype = _build_mimetype('webhooks')
webhook_item_mimetype = _build_mimetype('webhook')
| davidt/reviewboard | reviewboard/webapi/tests/mimetypes.py | Python | mit | 5,834 |
from __future__ import absolute_import
import Cookie
import urllib
import urlparse
import time
import copy
from email.utils import parsedate_tz, formatdate, mktime_tz
import threading
from netlib import http, tcp, http_status
import netlib.utils
from netlib.odict import ODict, ODictCaseless
from .tcp import TCPHandler
from .primitives import KILL, ProtocolHandler, Flow, Error
from ..proxy.connection import ServerConnection
from .. import encoding, utils, controller, stateobject, proxy
HDR_FORM_URLENCODED = "application/x-www-form-urlencoded"
CONTENT_MISSING = 0
class KillSignal(Exception):
pass
def get_line(fp):
"""
Get a line, possibly preceded by a blank.
"""
line = fp.readline()
if line == "\r\n" or line == "\n":
# Possible leftover from previous message
line = fp.readline()
if line == "":
raise tcp.NetLibDisconnect()
return line
def send_connect_request(conn, host, port, update_state=True):
upstream_request = HTTPRequest(
"authority",
"CONNECT",
None,
host,
port,
None,
(1, 1),
ODictCaseless(),
""
)
conn.send(upstream_request.assemble())
resp = HTTPResponse.from_stream(conn.rfile, upstream_request.method)
if resp.code != 200:
raise proxy.ProxyError(resp.code,
"Cannot establish SSL " +
"connection with upstream proxy: \r\n" +
str(resp.assemble()))
if update_state:
conn.state.append(("http", {
"state": "connect",
"host": host,
"port": port}
))
return resp
class decoded(object):
"""
A context manager that decodes a request or response, and then
re-encodes it with the same encoding after execution of the block.
Example:
with decoded(request):
request.content = request.content.replace("foo", "bar")
"""
def __init__(self, o):
self.o = o
ce = o.headers.get_first("content-encoding")
if ce in encoding.ENCODINGS:
self.ce = ce
else:
self.ce = None
def __enter__(self):
if self.ce:
self.o.decode()
def __exit__(self, type, value, tb):
if self.ce:
self.o.encode(self.ce)
class HTTPMessage(stateobject.StateObject):
"""
Base class for HTTPRequest and HTTPResponse
"""
def __init__(self, httpversion, headers, content, timestamp_start=None,
timestamp_end=None):
self.httpversion = httpversion
self.headers = headers
"""@type: ODictCaseless"""
self.content = content
self.timestamp_start = timestamp_start
self.timestamp_end = timestamp_end
_stateobject_attributes = dict(
httpversion=tuple,
headers=ODictCaseless,
content=str,
timestamp_start=float,
timestamp_end=float
)
_stateobject_long_attributes = {"content"}
def get_state(self, short=False):
ret = super(HTTPMessage, self).get_state(short)
if short:
if self.content:
ret["contentLength"] = len(self.content)
else:
ret["contentLength"] = 0
return ret
def get_decoded_content(self):
"""
Returns the decoded content based on the current Content-Encoding
header.
Doesn't change the message iteself or its headers.
"""
ce = self.headers.get_first("content-encoding")
if not self.content or ce not in encoding.ENCODINGS:
return self.content
return encoding.decode(ce, self.content)
def decode(self):
"""
Decodes content based on the current Content-Encoding header, then
removes the header. If there is no Content-Encoding header, no
action is taken.
Returns True if decoding succeeded, False otherwise.
"""
ce = self.headers.get_first("content-encoding")
if not self.content or ce not in encoding.ENCODINGS:
return False
data = encoding.decode(ce, self.content)
if data is None:
return False
self.content = data
del self.headers["content-encoding"]
return True
def encode(self, e):
"""
Encodes content with the encoding e, where e is "gzip", "deflate"
or "identity".
"""
# FIXME: Error if there's an existing encoding header?
self.content = encoding.encode(e, self.content)
self.headers["content-encoding"] = [e]
def size(self, **kwargs):
"""
Size in bytes of a fully rendered message, including headers and
HTTP lead-in.
"""
hl = len(self._assemble_head(**kwargs))
if self.content:
return hl + len(self.content)
else:
return hl
def copy(self):
c = copy.copy(self)
c.headers = self.headers.copy()
return c
def replace(self, pattern, repl, *args, **kwargs):
"""
Replaces a regular expression pattern with repl in both the headers
and the body of the message. Encoded content will be decoded
before replacement, and re-encoded afterwards.
Returns the number of replacements made.
"""
with decoded(self):
self.content, c = utils.safe_subn(
pattern, repl, self.content, *args, **kwargs
)
c += self.headers.replace(pattern, repl, *args, **kwargs)
return c
def _assemble_first_line(self):
"""
Returns the assembled request/response line
"""
raise NotImplementedError() # pragma: nocover
def _assemble_headers(self):
"""
Returns the assembled headers
"""
raise NotImplementedError() # pragma: nocover
def _assemble_head(self):
"""
Returns the assembled request/response line plus headers
"""
raise NotImplementedError() # pragma: nocover
def assemble(self):
"""
Returns the assembled request/response
"""
raise NotImplementedError() # pragma: nocover
class HTTPRequest(HTTPMessage):
"""
An HTTP request.
Exposes the following attributes:
method: HTTP method
scheme: URL scheme (http/https)
host: Target hostname of the request. This is not neccessarily the
directy upstream server (which could be another proxy), but it's always
the target server we want to reach at the end. This attribute is either
inferred from the request itself (absolute-form, authority-form) or from
the connection metadata (e.g. the host in reverse proxy mode).
port: Destination port
path: Path portion of the URL (not present in authority-form)
httpversion: HTTP version tuple, e.g. (1,1)
headers: ODictCaseless object
content: Content of the request, None, or CONTENT_MISSING if there
is content associated, but not present. CONTENT_MISSING evaluates
to False to make checking for the presence of content natural.
form_in: The request form which mitmproxy has received. The following
values are possible:
- relative (GET /index.html, OPTIONS *) (covers origin form and
asterisk form)
- absolute (GET http://example.com:80/index.html)
- authority-form (CONNECT example.com:443)
Details: http://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-25#section-5.3
form_out: The request form which mitmproxy will send out to the
destination
timestamp_start: Timestamp indicating when request transmission started
timestamp_end: Timestamp indicating when request transmission ended
"""
def __init__(
self,
form_in,
method,
scheme,
host,
port,
path,
httpversion,
headers,
content,
timestamp_start=None,
timestamp_end=None,
form_out=None
):
assert isinstance(headers, ODictCaseless) or not headers
HTTPMessage.__init__(
self,
httpversion,
headers,
content,
timestamp_start,
timestamp_end
)
self.form_in = form_in
self.method = method
self.scheme = scheme
self.host = host
self.port = port
self.path = path
self.httpversion = httpversion
self.form_out = form_out or form_in
# Have this request's cookies been modified by sticky cookies or auth?
self.stickycookie = False
self.stickyauth = False
# Is this request replayed?
self.is_replay = False
_stateobject_attributes = HTTPMessage._stateobject_attributes.copy()
_stateobject_attributes.update(
form_in=str,
method=str,
scheme=str,
host=str,
port=int,
path=str,
form_out=str,
is_replay=bool
)
@classmethod
def from_state(cls, state):
f = cls(None, None, None, None, None, None, None, None, None, None, None)
f.load_state(state)
return f
def __repr__(self):
return "<HTTPRequest: {0}>".format(
self._assemble_first_line(self.form_in)[:-9]
)
@classmethod
def from_stream(cls, rfile, include_body=True, body_size_limit=None):
"""
Parse an HTTP request from a file stream
"""
httpversion, host, port, scheme, method, path, headers, content, timestamp_start, timestamp_end = (
None, None, None, None, None, None, None, None, None, None)
timestamp_start = utils.timestamp()
if hasattr(rfile, "reset_timestamps"):
rfile.reset_timestamps()
request_line = get_line(rfile)
if hasattr(rfile, "first_byte_timestamp"):
# more accurate timestamp_start
timestamp_start = rfile.first_byte_timestamp
request_line_parts = http.parse_init(request_line)
if not request_line_parts:
raise http.HttpError(
400,
"Bad HTTP request line: %s" % repr(request_line)
)
method, path, httpversion = request_line_parts
if path == '*' or path.startswith("/"):
form_in = "relative"
if not netlib.utils.isascii(path):
raise http.HttpError(
400,
"Bad HTTP request line: %s" % repr(request_line)
)
elif method.upper() == 'CONNECT':
form_in = "authority"
r = http.parse_init_connect(request_line)
if not r:
raise http.HttpError(
400,
"Bad HTTP request line: %s" % repr(request_line)
)
host, port, _ = r
path = None
else:
form_in = "absolute"
r = http.parse_init_proxy(request_line)
if not r:
raise http.HttpError(
400,
"Bad HTTP request line: %s" % repr(request_line)
)
_, scheme, host, port, path, _ = r
headers = http.read_headers(rfile)
if headers is None:
raise http.HttpError(400, "Invalid headers")
if include_body:
content = http.read_http_body(rfile, headers, body_size_limit,
method, None, True)
timestamp_end = utils.timestamp()
return HTTPRequest(
form_in,
method,
scheme,
host,
port,
path,
httpversion,
headers,
content,
timestamp_start,
timestamp_end
)
def _assemble_first_line(self, form=None):
form = form or self.form_out
if form == "relative":
request_line = '%s %s HTTP/%s.%s' % (
self.method, self.path, self.httpversion[0], self.httpversion[1]
)
elif form == "authority":
request_line = '%s %s:%s HTTP/%s.%s' % (
self.method, self.host, self.port, self.httpversion[0],
self.httpversion[1]
)
elif form == "absolute":
request_line = '%s %s://%s:%s%s HTTP/%s.%s' % (
self.method, self.scheme, self.host,
self.port, self.path, self.httpversion[0],
self.httpversion[1]
)
else:
raise http.HttpError(400, "Invalid request form")
return request_line
# This list is adopted legacy code.
# We probably don't need to strip off keep-alive.
_headers_to_strip_off = ['Proxy-Connection',
'Keep-Alive',
'Connection',
'Transfer-Encoding',
'Upgrade']
def _assemble_headers(self):
headers = self.headers.copy()
for k in self._headers_to_strip_off:
del headers[k]
if 'host' not in headers and self.scheme and self.host and self.port:
headers["Host"] = [utils.hostport(self.scheme,
self.host,
self.port)]
# If content is defined (i.e. not None or CONTENT_MISSING), we always add a content-length header.
if self.content or self.content == "":
headers["Content-Length"] = [str(len(self.content))]
return str(headers)
def _assemble_head(self, form=None):
return "%s\r\n%s\r\n" % (
self._assemble_first_line(form), self._assemble_headers()
)
def assemble(self, form=None):
"""
Assembles the request for transmission to the server. We make some
modifications to make sure interception works properly.
Raises an Exception if the request cannot be assembled.
"""
if self.content == CONTENT_MISSING:
raise proxy.ProxyError(
502,
"Cannot assemble flow with CONTENT_MISSING"
)
head = self._assemble_head(form)
if self.content:
return head + self.content
else:
return head
def __hash__(self):
return id(self)
def anticache(self):
"""
Modifies this request to remove headers that might produce a cached
response. That is, we remove ETags and If-Modified-Since headers.
"""
delheaders = [
"if-modified-since",
"if-none-match",
]
for i in delheaders:
del self.headers[i]
def anticomp(self):
"""
Modifies this request to remove headers that will compress the
resource's data.
"""
self.headers["accept-encoding"] = ["identity"]
def constrain_encoding(self):
"""
Limits the permissible Accept-Encoding values, based on what we can
decode appropriately.
"""
if self.headers["accept-encoding"]:
self.headers["accept-encoding"] = [', '.join(
e for e in encoding.ENCODINGS if e in self.headers["accept-encoding"][0]
)]
def update_host_header(self):
"""
Update the host header to reflect the current target.
"""
self.headers["Host"] = [self.host]
def get_form_urlencoded(self):
"""
Retrieves the URL-encoded form data, returning an ODict object.
Returns an empty ODict if there is no data or the content-type
indicates non-form data.
"""
if self.content and self.headers.in_any("content-type", HDR_FORM_URLENCODED, True):
return ODict(utils.urldecode(self.content))
return ODict([])
def set_form_urlencoded(self, odict):
"""
Sets the body to the URL-encoded form data, and adds the
appropriate content-type header. Note that this will destory the
existing body if there is one.
"""
# FIXME: If there's an existing content-type header indicating a
# url-encoded form, leave it alone.
self.headers["Content-Type"] = [HDR_FORM_URLENCODED]
self.content = utils.urlencode(odict.lst)
def get_path_components(self):
"""
Returns the path components of the URL as a list of strings.
Components are unquoted.
"""
_, _, path, _, _, _ = urlparse.urlparse(self.url)
return [urllib.unquote(i) for i in path.split("/") if i]
def set_path_components(self, lst):
"""
Takes a list of strings, and sets the path component of the URL.
Components are quoted.
"""
lst = [urllib.quote(i, safe="") for i in lst]
path = "/" + "/".join(lst)
scheme, netloc, _, params, query, fragment = urlparse.urlparse(self.url)
self.url = urlparse.urlunparse(
[scheme, netloc, path, params, query, fragment]
)
def get_query(self):
"""
Gets the request query string. Returns an ODict object.
"""
_, _, _, _, query, _ = urlparse.urlparse(self.url)
if query:
return ODict(utils.urldecode(query))
return ODict([])
def set_query(self, odict):
"""
Takes an ODict object, and sets the request query string.
"""
scheme, netloc, path, params, _, fragment = urlparse.urlparse(self.url)
query = utils.urlencode(odict.lst)
self.url = urlparse.urlunparse(
[scheme, netloc, path, params, query, fragment]
)
def pretty_host(self, hostheader):
"""
Heuristic to get the host of the request.
Note that pretty_host() does not always return the TCP destination
of the request, e.g. if an upstream proxy is in place
If hostheader is set to True, the Host: header will be used as
additional (and preferred) data source. This is handy in transparent
mode, where only the ip of the destination is known, but not the
resolved name. This is disabled by default, as an attacker may spoof
the host header to confuse an analyst.
"""
host = None
if hostheader:
host = self.headers.get_first("host")
if not host:
host = self.host
host = host.encode("idna")
return host
def pretty_url(self, hostheader):
if self.form_out == "authority": # upstream proxy mode
return "%s:%s" % (self.pretty_host(hostheader), self.port)
return utils.unparse_url(self.scheme,
self.pretty_host(hostheader),
self.port,
self.path).encode('ascii')
@property
def url(self):
"""
Returns a URL string, constructed from the Request's URL components.
"""
return utils.unparse_url(
self.scheme,
self.host,
self.port,
self.path
).encode('ascii')
@url.setter
def url(self, url):
"""
Parses a URL specification, and updates the Request's information
accordingly.
Returns False if the URL was invalid, True if the request succeeded.
"""
parts = http.parse_url(url)
if not parts:
raise ValueError("Invalid URL: %s" % url)
self.scheme, self.host, self.port, self.path = parts
def get_cookies(self):
cookie_headers = self.headers.get("cookie")
if not cookie_headers:
return None
cookies = []
for header in cookie_headers:
pairs = [pair.partition("=") for pair in header.split(';')]
cookies.extend((pair[0], (pair[2], {})) for pair in pairs)
return dict(cookies)
def replace(self, pattern, repl, *args, **kwargs):
"""
Replaces a regular expression pattern with repl in the headers, the
request path and the body of the request. Encoded content will be
decoded before replacement, and re-encoded afterwards.
Returns the number of replacements made.
"""
c = HTTPMessage.replace(self, pattern, repl, *args, **kwargs)
self.path, pc = utils.safe_subn(
pattern, repl, self.path, *args, **kwargs
)
c += pc
return c
class HTTPResponse(HTTPMessage):
"""
An HTTP response.
Exposes the following attributes:
httpversion: HTTP version tuple, e.g. (1,1)
code: HTTP response code
msg: HTTP response message
headers: ODict object
content: Content of the request, None, or CONTENT_MISSING if there
is content associated, but not present. CONTENT_MISSING evaluates
to False to make checking for the presence of content natural.
timestamp_start: Timestamp indicating when request transmission started
timestamp_end: Timestamp indicating when request transmission ended
"""
def __init__(self, httpversion, code, msg, headers, content, timestamp_start=None,
timestamp_end=None):
assert isinstance(headers, ODictCaseless) or headers is None
HTTPMessage.__init__(
self,
httpversion,
headers,
content,
timestamp_start,
timestamp_end
)
self.code = code
self.msg = msg
# Is this request replayed?
self.is_replay = False
self.stream = False
_stateobject_attributes = HTTPMessage._stateobject_attributes.copy()
_stateobject_attributes.update(
code=int,
msg=str
)
@classmethod
def from_state(cls, state):
f = cls(None, None, None, None, None)
f.load_state(state)
return f
def __repr__(self):
size = utils.pretty_size(len(self.content)) if self.content else "content missing"
return "<HTTPResponse: {code} {msg} ({contenttype}, {size})>".format(
code=self.code,
msg=self.msg,
contenttype=self.headers.get_first(
"content-type", "unknown content type"
),
size=size
)
@classmethod
def from_stream(cls, rfile, request_method, include_body=True, body_size_limit=None):
"""
Parse an HTTP response from a file stream
"""
timestamp_start = utils.timestamp()
if hasattr(rfile, "reset_timestamps"):
rfile.reset_timestamps()
httpversion, code, msg, headers, content = http.read_response(
rfile,
request_method,
body_size_limit,
include_body=include_body)
if hasattr(rfile, "first_byte_timestamp"):
# more accurate timestamp_start
timestamp_start = rfile.first_byte_timestamp
if include_body:
timestamp_end = utils.timestamp()
else:
timestamp_end = None
return HTTPResponse(
httpversion,
code,
msg,
headers,
content,
timestamp_start,
timestamp_end
)
def _assemble_first_line(self):
return 'HTTP/%s.%s %s %s' % \
(self.httpversion[0], self.httpversion[1], self.code, self.msg)
_headers_to_strip_off = ['Proxy-Connection',
'Alternate-Protocol',
'Alt-Svc']
def _assemble_headers(self, preserve_transfer_encoding=False):
headers = self.headers.copy()
for k in self._headers_to_strip_off:
del headers[k]
if not preserve_transfer_encoding:
del headers['Transfer-Encoding']
# If content is defined (i.e. not None or CONTENT_MISSING), we always add a content-length header.
if self.content or self.content == "":
headers["Content-Length"] = [str(len(self.content))]
return str(headers)
def _assemble_head(self, preserve_transfer_encoding=False):
return '%s\r\n%s\r\n' % (
self._assemble_first_line(),
self._assemble_headers(
preserve_transfer_encoding=preserve_transfer_encoding
)
)
def assemble(self):
"""
Assembles the response for transmission to the client. We make some
modifications to make sure interception works properly.
Raises an Exception if the request cannot be assembled.
"""
if self.content == CONTENT_MISSING:
raise proxy.ProxyError(
502,
"Cannot assemble flow with CONTENT_MISSING"
)
head = self._assemble_head()
if self.content:
return head + self.content
else:
return head
def _refresh_cookie(self, c, delta):
"""
Takes a cookie string c and a time delta in seconds, and returns
a refreshed cookie string.
"""
c = Cookie.SimpleCookie(str(c))
for i in c.values():
if "expires" in i:
d = parsedate_tz(i["expires"])
if d:
d = mktime_tz(d) + delta
i["expires"] = formatdate(d)
else:
# This can happen when the expires tag is invalid.
# reddit.com sends a an expires tag like this: "Thu, 31 Dec
# 2037 23:59:59 GMT", which is valid RFC 1123, but not
# strictly correct according to the cookie spec. Browsers
# appear to parse this tolerantly - maybe we should too.
# For now, we just ignore this.
del i["expires"]
return c.output(header="").strip()
def refresh(self, now=None):
"""
This fairly complex and heuristic function refreshes a server
response for replay.
- It adjusts date, expires and last-modified headers.
- It adjusts cookie expiration.
"""
if not now:
now = time.time()
delta = now - self.timestamp_start
refresh_headers = [
"date",
"expires",
"last-modified",
]
for i in refresh_headers:
if i in self.headers:
d = parsedate_tz(self.headers[i][0])
if d:
new = mktime_tz(d) + delta
self.headers[i] = [formatdate(new)]
c = []
for i in self.headers["set-cookie"]:
c.append(self._refresh_cookie(i, delta))
if c:
self.headers["set-cookie"] = c
def get_cookies(self):
cookie_headers = self.headers.get("set-cookie")
if not cookie_headers:
return None
cookies = []
for header in cookie_headers:
pairs = [pair.partition("=") for pair in header.split(';')]
cookie_name = pairs[0][0] # the key of the first key/value pairs
cookie_value = pairs[0][2] # the value of the first key/value pairs
cookie_parameters = {
key.strip().lower(): value.strip() for key, sep, value in pairs[1:]
}
cookies.append((cookie_name, (cookie_value, cookie_parameters)))
return dict(cookies)
class HTTPFlow(Flow):
"""
A HTTPFlow is a collection of objects representing a single HTTP
transaction. The main attributes are:
request: HTTPRequest object
response: HTTPResponse object
error: Error object
server_conn: ServerConnection object
client_conn: ClientConnection object
Note that it's possible for a Flow to have both a response and an error
object. This might happen, for instance, when a response was received
from the server, but there was an error sending it back to the client.
The following additional attributes are exposed:
intercepted: Is this flow currently being intercepted?
live: Does this flow have a live client connection?
"""
def __init__(self, client_conn, server_conn, live=None):
super(HTTPFlow, self).__init__("http", client_conn, server_conn, live)
self.request = None
"""@type: HTTPRequest"""
self.response = None
"""@type: HTTPResponse"""
_stateobject_attributes = Flow._stateobject_attributes.copy()
_stateobject_attributes.update(
request=HTTPRequest,
response=HTTPResponse
)
@classmethod
def from_state(cls, state):
f = cls(None, None)
f.load_state(state)
return f
def __repr__(self):
s = "<HTTPFlow"
for a in ("request", "response", "error", "client_conn", "server_conn"):
if getattr(self, a, False):
s += "\r\n %s = {flow.%s}" % (a, a)
s += ">"
return s.format(flow=self)
def copy(self):
f = super(HTTPFlow, self).copy()
if self.request:
f.request = self.request.copy()
if self.response:
f.response = self.response.copy()
return f
def match(self, f):
"""
Match this flow against a compiled filter expression. Returns True
if matched, False if not.
If f is a string, it will be compiled as a filter expression. If
the expression is invalid, ValueError is raised.
"""
if isinstance(f, basestring):
from .. import filt
f = filt.parse(f)
if not f:
raise ValueError("Invalid filter expression.")
if f:
return f(self)
return True
def replace(self, pattern, repl, *args, **kwargs):
"""
Replaces a regular expression pattern with repl in both request and
response of the flow. Encoded content will be decoded before
replacement, and re-encoded afterwards.
Returns the number of replacements made.
"""
c = self.request.replace(pattern, repl, *args, **kwargs)
if self.response:
c += self.response.replace(pattern, repl, *args, **kwargs)
return c
class HttpAuthenticationError(Exception):
def __init__(self, auth_headers=None):
super(HttpAuthenticationError, self).__init__(
"Proxy Authentication Required"
)
self.headers = auth_headers
self.code = 407
def __repr__(self):
return "Proxy Authentication Required"
class HTTPHandler(ProtocolHandler):
"""
HTTPHandler implements mitmproxys understanding of the HTTP protocol.
"""
def __init__(self, c):
super(HTTPHandler, self).__init__(c)
self.expected_form_in = c.config.mode.http_form_in
self.expected_form_out = c.config.mode.http_form_out
self.skip_authentication = False
def handle_messages(self):
while self.handle_flow():
pass
def get_response_from_server(self, flow):
self.c.establish_server_connection()
request_raw = flow.request.assemble()
for attempt in (0, 1):
try:
self.c.server_conn.send(request_raw)
# Only get the headers at first...
flow.response = HTTPResponse.from_stream(
self.c.server_conn.rfile, flow.request.method,
body_size_limit=self.c.config.body_size_limit,
include_body=False
)
break
except (tcp.NetLibDisconnect, http.HttpErrorConnClosed), v:
self.c.log(
"error in server communication: %s" % repr(v),
level="debug"
)
if attempt == 0:
# In any case, we try to reconnect at least once. This is
# necessary because it might be possible that we already
# initiated an upstream connection after clientconnect that
# has already been expired, e.g consider the following event
# log:
# > clientconnect (transparent mode destination known)
# > serverconnect
# > read n% of large request
# > server detects timeout, disconnects
# > read (100-n)% of large request
# > send large request upstream
self.c.server_reconnect()
else:
raise
# call the appropriate script hook - this is an opportunity for an
# inline script to set flow.stream = True
flow = self.c.channel.ask("responseheaders", flow)
if flow is None or flow == KILL:
raise KillSignal()
else:
# now get the rest of the request body, if body still needs to be
# read but not streaming this response
if flow.response.stream:
flow.response.content = CONTENT_MISSING
else:
flow.response.content = http.read_http_body(
self.c.server_conn.rfile, flow.response.headers,
self.c.config.body_size_limit,
flow.request.method, flow.response.code, False
)
flow.response.timestamp_end = utils.timestamp()
def handle_flow(self):
flow = HTTPFlow(self.c.client_conn, self.c.server_conn, self.live)
try:
try:
req = HTTPRequest.from_stream(
self.c.client_conn.rfile,
body_size_limit=self.c.config.body_size_limit
)
except tcp.NetLibDisconnect:
# don't throw an error for disconnects that happen
# before/between requests.
return False
self.c.log(
"request",
"debug",
[req._assemble_first_line(req.form_in)]
)
ret = self.process_request(flow, req)
if ret is not None:
return ret
# Be careful NOT to assign the request to the flow before
# process_request completes. This is because the call can raise an
# exception. If the request object is already attached, this results
# in an Error object that has an attached request that has not been
# sent through to the Master.
flow.request = req
request_reply = self.c.channel.ask("request", flow)
if request_reply is None or request_reply == KILL:
raise KillSignal()
self.process_server_address(flow) # The inline script may have changed request.host
if isinstance(request_reply, HTTPResponse):
flow.response = request_reply
else:
self.get_response_from_server(flow)
# no further manipulation of self.c.server_conn beyond this point
# we can safely set it as the final attribute value here.
flow.server_conn = self.c.server_conn
self.c.log("response", "debug", [flow.response._assemble_first_line()])
response_reply = self.c.channel.ask("response", flow)
if response_reply is None or response_reply == KILL:
raise KillSignal()
self.send_response_to_client(flow)
if self.check_close_connection(flow):
return False
# We sent a CONNECT request to an upstream proxy.
if flow.request.form_in == "authority" and flow.response.code == 200:
# TODO: Possibly add headers (memory consumption/usefulness
# tradeoff) Make sure to add state info before the actual
# processing of the CONNECT request happens. During an SSL
# upgrade, we may receive an SNI indication from the client,
# which resets the upstream connection. If this is the case, we
# must already re-issue the CONNECT request at this point.
self.c.server_conn.state.append(
(
"http", {
"state": "connect",
"host": flow.request.host,
"port": flow.request.port
}
)
)
if not self.process_connect_request((flow.request.host, flow.request.port)):
return False
# If the user has changed the target server on this connection,
# restore the original target server
flow.live.restore_server()
return True # Next flow please.
except (
HttpAuthenticationError,
http.HttpError,
proxy.ProxyError,
tcp.NetLibError,
), e:
self.handle_error(e, flow)
except KillSignal:
self.c.log("Connection killed", "info")
finally:
flow.live = None # Connection is not live anymore.
return False
def handle_server_reconnect(self, state):
if state["state"] == "connect":
send_connect_request(
self.c.server_conn,
state["host"],
state["port"],
update_state=False
)
else: # pragma: nocover
raise RuntimeError("Unknown State: %s" % state["state"])
def handle_error(self, error, flow=None):
message = repr(error)
message_debug = None
if isinstance(error, tcp.NetLibDisconnect):
message = None
message_debug = "TCP connection closed unexpectedly."
elif "tlsv1 alert unknown ca" in message:
message = "TLSv1 Alert Unknown CA: The client does not trust the proxy's certificate."
elif "handshake error" in message:
message_debug = message
message = "SSL handshake error: The client may not trust the proxy's certificate."
if message:
self.c.log(message, level="info")
if message_debug:
self.c.log(message_debug, level="debug")
if flow:
# TODO: no flows without request or with both request and response
# at the moment.
if flow.request and not flow.response:
flow.error = Error(message or message_debug)
self.c.channel.ask("error", flow)
try:
code = getattr(error, "code", 502)
headers = getattr(error, "headers", None)
html_message = message or ""
if message_debug:
html_message += "<pre>%s</pre>" % message_debug
self.send_error(code, html_message, headers)
except:
pass
def send_error(self, code, message, headers):
response = http_status.RESPONSES.get(code, "Unknown")
html_content = """
<html>
<head>
<title>%d %s</title>
</head>
<body>%s</body>
</html>
""" % (code, response, message)
self.c.client_conn.wfile.write("HTTP/1.1 %s %s\r\n" % (code, response))
self.c.client_conn.wfile.write(
"Server: %s\r\n" % self.c.config.server_version
)
self.c.client_conn.wfile.write("Content-type: text/html\r\n")
self.c.client_conn.wfile.write(
"Content-Length: %d\r\n" % len(html_content)
)
if headers:
for key, value in headers.items():
self.c.client_conn.wfile.write("%s: %s\r\n" % (key, value))
self.c.client_conn.wfile.write("Connection: close\r\n")
self.c.client_conn.wfile.write("\r\n")
self.c.client_conn.wfile.write(html_content)
self.c.client_conn.wfile.flush()
def process_request(self, flow, request):
"""
@returns:
True, if the request should not be sent upstream
False, if the connection should be aborted
None, if the request should be sent upstream
(a status code != None should be returned directly by handle_flow)
"""
if not self.skip_authentication:
self.authenticate(request)
# Determine .scheme, .host and .port attributes
# For absolute-form requests, they are directly given in the request.
# For authority-form requests, we only need to determine the request scheme.
# For relative-form requests, we need to determine host and port as well.
if not request.scheme:
request.scheme = "https" if flow.server_conn and flow.server_conn.ssl_established else "http"
if not request.host:
# Host/Port Complication: In upstream mode, use the server we CONNECTed to,
# not the upstream proxy.
if flow.server_conn:
for s in flow.server_conn.state:
if s[0] == "http" and s[1]["state"] == "connect":
request.host, request.port = s[1]["host"], s[1]["port"]
if not request.host and flow.server_conn:
request.host, request.port = flow.server_conn.address.host, flow.server_conn.address.port
# Now we can process the request.
if request.form_in == "authority":
if self.c.client_conn.ssl_established:
raise http.HttpError(
400,
"Must not CONNECT on already encrypted connection"
)
if self.c.config.mode == "regular":
self.c.set_server_address((request.host, request.port))
# Update server_conn attribute on the flow
flow.server_conn = self.c.server_conn
self.c.establish_server_connection()
self.c.client_conn.send(
'HTTP/1.1 200 Connection established\r\n' +
'Content-Length: 0\r\n' +
('Proxy-agent: %s\r\n' % self.c.config.server_version) +
'\r\n'
)
return self.process_connect_request(self.c.server_conn.address)
elif self.c.config.mode == "upstream":
return None
else:
# CONNECT should never occur if we don't expect absolute-form
# requests
pass
elif request.form_in == self.expected_form_in:
request.form_out = self.expected_form_out
if request.form_in == "absolute":
if request.scheme != "http":
raise http.HttpError(
400,
"Invalid request scheme: %s" % request.scheme
)
if self.c.config.mode == "regular":
# Update info so that an inline script sees the correct
# value at flow.server_conn
self.c.set_server_address((request.host, request.port))
flow.server_conn = self.c.server_conn
return None
raise http.HttpError(
400, "Invalid HTTP request form (expected: %s, got: %s)" % (
self.expected_form_in, request.form_in
)
)
def process_server_address(self, flow):
# Depending on the proxy mode, server handling is entirely different
# We provide a mostly unified API to the user, which needs to be
# unfiddled here
# ( See also: https://github.com/mitmproxy/mitmproxy/issues/337 )
address = netlib.tcp.Address((flow.request.host, flow.request.port))
ssl = (flow.request.scheme == "https")
if self.c.config.mode == "upstream":
# The connection to the upstream proxy may have a state we may need
# to take into account.
connected_to = None
for s in flow.server_conn.state:
if s[0] == "http" and s[1]["state"] == "connect":
connected_to = tcp.Address((s[1]["host"], s[1]["port"]))
# We need to reconnect if the current flow either requires a
# (possibly impossible) change to the connection state, e.g. the
# host has changed but we already CONNECTed somewhere else.
needs_server_change = (
ssl != self.c.server_conn.ssl_established
or
# HTTP proxying is "stateless", CONNECT isn't.
(connected_to and address != connected_to)
)
if needs_server_change:
# force create new connection to the proxy server to reset state
self.live.change_server(self.c.server_conn.address, force=True)
if ssl:
send_connect_request(
self.c.server_conn,
address.host,
address.port
)
self.c.establish_ssl(server=True)
else:
# If we're not in upstream mode, we just want to update the host and
# possibly establish TLS. This is a no op if the addresses match.
self.live.change_server(address, ssl=ssl)
flow.server_conn = self.c.server_conn
def send_response_to_client(self, flow):
if not flow.response.stream:
# no streaming:
# we already received the full response from the server and can send
# it to the client straight away.
self.c.client_conn.send(flow.response.assemble())
else:
# streaming:
# First send the headers and then transfer the response
# incrementally:
h = flow.response._assemble_head(preserve_transfer_encoding=True)
self.c.client_conn.send(h)
for chunk in http.read_http_body_chunked(self.c.server_conn.rfile,
flow.response.headers,
self.c.config.body_size_limit, flow.request.method,
flow.response.code, False, 4096):
for part in chunk:
self.c.client_conn.wfile.write(part)
self.c.client_conn.wfile.flush()
flow.response.timestamp_end = utils.timestamp()
def check_close_connection(self, flow):
"""
Checks if the connection should be closed depending on the HTTP
semantics. Returns True, if so.
"""
close_connection = (
http.connection_close(flow.request.httpversion, flow.request.headers) or
http.connection_close(flow.response.httpversion, flow.response.headers) or
http.expected_http_body_size(flow.response.headers, False, flow.request.method,
flow.response.code) == -1)
if close_connection:
if flow.request.form_in == "authority" and flow.response.code == 200:
# Workaround for https://github.com/mitmproxy/mitmproxy/issues/313:
# Some proxies (e.g. Charles) send a CONNECT response with HTTP/1.0 and no Content-Length header
pass
else:
return True
return False
def process_connect_request(self, address):
"""
Process a CONNECT request.
Returns True if the CONNECT request has been processed successfully.
Returns False, if the connection should be closed immediately.
"""
address = tcp.Address.wrap(address)
if self.c.config.check_ignore(address):
self.c.log("Ignore host: %s:%s" % address(), "info")
TCPHandler(self.c, log=False).handle_messages()
return False
else:
self.expected_form_in = "relative"
self.expected_form_out = "relative"
self.skip_authentication = True
# In practice, nobody issues a CONNECT request to send unencrypted HTTP requests afterwards.
# If we don't delegate to TCP mode, we should always negotiate a SSL connection.
#
# FIXME:
# Turns out the previous statement isn't entirely true. Chrome on Windows CONNECTs to :80
# if an explicit proxy is configured and a websocket connection should be established.
# We don't support websocket at the moment, so it fails anyway, but we should come up with
# a better solution to this if we start to support WebSockets.
should_establish_ssl = (
address.port in self.c.config.ssl_ports
or
not self.c.config.check_tcp(address)
)
if should_establish_ssl:
self.c.log("Received CONNECT request to SSL port. Upgrading to SSL...", "debug")
self.c.establish_ssl(server=True, client=True)
self.c.log("Upgrade to SSL completed.", "debug")
if self.c.config.check_tcp(address):
self.c.log("Generic TCP mode for host: %s:%s" % address(), "info")
TCPHandler(self.c).handle_messages()
return False
return True
def authenticate(self, request):
if self.c.config.authenticator:
if self.c.config.authenticator.authenticate(request.headers):
self.c.config.authenticator.clean(request.headers)
else:
raise HttpAuthenticationError(
self.c.config.authenticator.auth_challenge_headers())
return request.headers
class RequestReplayThread(threading.Thread):
name = "RequestReplayThread"
def __init__(self, config, flow, masterq, should_exit):
self.config, self.flow, self.channel = config, flow, controller.Channel(masterq, should_exit)
threading.Thread.__init__(self)
def run(self):
r = self.flow.request
form_out_backup = r.form_out
try:
self.flow.response = None
request_reply = self.channel.ask("request", self.flow)
if request_reply is None or request_reply == KILL:
raise KillSignal()
elif isinstance(request_reply, HTTPResponse):
self.flow.response = request_reply
else:
# In all modes, we directly connect to the server displayed
if self.config.mode == "upstream":
server_address = self.config.mode.get_upstream_server(self.flow.client_conn)[2:]
server = ServerConnection(server_address)
server.connect()
if r.scheme == "https":
send_connect_request(server, r.host, r.port)
server.establish_ssl(self.config.clientcerts, sni=self.flow.server_conn.sni)
r.form_out = "relative"
else:
r.form_out = "absolute"
else:
server_address = (r.host, r.port)
server = ServerConnection(server_address)
server.connect()
if r.scheme == "https":
server.establish_ssl(self.config.clientcerts, sni=self.flow.server_conn.sni)
r.form_out = "relative"
server.send(r.assemble())
self.flow.server_conn = server
self.flow.response = HTTPResponse.from_stream(server.rfile, r.method,
body_size_limit=self.config.body_size_limit)
response_reply = self.channel.ask("response", self.flow)
if response_reply is None or response_reply == KILL:
raise KillSignal()
except (proxy.ProxyError, http.HttpError, tcp.NetLibError) as v:
self.flow.error = Error(repr(v))
self.channel.ask("error", self.flow)
except KillSignal:
self.channel.tell("log", proxy.Log("Connection killed", "info"))
finally:
r.form_out = form_out_backup
| xtso520ok/mitmproxy | libmproxy/protocol/http.py | Python | mit | 52,925 |
__author__ = "Nick Isaacs"
import shutil
import time
import os
import tests.TestHelper
from src.processor.MongoProcessor import MongoProcessor
from src.utils.Envirionment import Envirionment
class SaveThreadProcessorTest(object):
def setup(self):
self.test_helper = tests.TestHelper.TestHelper()
self.client = self.test_helper.client
this_path = os.path.dirname(__file__)
save_path = os.path.join(this_path, '../out')
if os.path.exists(save_path):
shutil.rmtree(save_path)
os.mkdir(save_path)
self.save_path = save_path
def test_processor(self):
self.client.run()
print "Running Mongo stream test"
processor = MongoProcessor(self.client.queue(), Envirionment())
processor.run()
print("Sleep 5 seconds to allow thread to process messages")
run_until = time.time() + 15
while time.time() < run_until:
pass
processor.stop()
self.client.stop()
while processor.running() or self.client.running():
pass
| gnip/sample-python-connector | tests/specs/MongoStgreamProcessorTest.py | Python | mit | 1,081 |
import os.path, sys
import distutils.util
# Append the directory in which the binaries were placed to Python's sys.path,
# then import the D DLL.
libDir = os.path.join('build', 'lib.%s-%s' % (
distutils.util.get_platform(),
'.'.join(str(v) for v in sys.version_info[:2])
))
sys.path.append(os.path.abspath(libDir))
from arraytest import Foo, get, set, test
#set([Foo(1), Foo(2), Foo(3)])
print (">>> get()")
print (str(get()))
print (">>> set([Foo(10), Foo(20)])")
#set(a=[Foo(10), Foo(20)])
set([Foo(10), Foo(20)])
print (">>> get()")
print (str(get()))
| ariovistus/pyd | examples/arraytest/test.py | Python | mit | 566 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# File: cifar10-resnet.py
# Author: Yuxin Wu <[email protected]>
import numpy as np
import tensorflow as tf
import argparse
import os
from tensorpack import *
from tensorpack.tfutils.symbolic_functions import *
from tensorpack.tfutils.summary import *
"""
CIFAR10 ResNet example. See:
Deep Residual Learning for Image Recognition, arxiv:1512.03385
This implementation uses the variants proposed in:
Identity Mappings in Deep Residual Networks, arxiv:1603.05027
I can reproduce the results on 2 TitanX for
n=5, about 7.1% val error after 67k step (8.6 step/s)
n=18, about 5.7% val error (2.45 step/s)
n=30: a 182-layer network, about 5.6% val error after 51k step (1.55 step/s)
This model uses the whole training set instead of a train-val split.
"""
BATCH_SIZE = 128
class Model(ModelDesc):
def __init__(self, n):
super(Model, self).__init__()
self.n = n
def _get_input_vars(self):
return [InputVar(tf.float32, [None, 32, 32, 3], 'input'),
InputVar(tf.int32, [None], 'label')
]
def _build_graph(self, input_vars, is_training):
image, label = input_vars
image = image / 128.0 - 1
def conv(name, l, channel, stride):
return Conv2D(name, l, channel, 3, stride=stride,
nl=tf.identity, use_bias=False,
W_init=tf.random_normal_initializer(stddev=np.sqrt(2.0/9/channel)))
def residual(name, l, increase_dim=False, first=False):
shape = l.get_shape().as_list()
in_channel = shape[3]
if increase_dim:
out_channel = in_channel * 2
stride1 = 2
else:
out_channel = in_channel
stride1 = 1
with tf.variable_scope(name) as scope:
if not first:
b1 = BatchNorm('bn1', l, is_training)
b1 = tf.nn.relu(b1)
else:
b1 = l
c1 = conv('conv1', b1, out_channel, stride1)
b2 = BatchNorm('bn2', c1, is_training)
b2 = tf.nn.relu(b2)
c2 = conv('conv2', b2, out_channel, 1)
if increase_dim:
l = AvgPooling('pool', l, 2)
l = tf.pad(l, [[0,0], [0,0], [0,0], [in_channel//2, in_channel//2]])
l = c2 + l
return l
l = conv('conv0', image, 16, 1)
l = BatchNorm('bn0', l, is_training)
l = tf.nn.relu(l)
l = residual('res1.0', l, first=True)
for k in range(1, self.n):
l = residual('res1.{}'.format(k), l)
# 32,c=16
l = residual('res2.0', l, increase_dim=True)
for k in range(1, self.n):
l = residual('res2.{}'.format(k), l)
# 16,c=32
l = residual('res3.0', l, increase_dim=True)
for k in range(1, self.n):
l = residual('res3.' + str(k), l)
l = BatchNorm('bnlast', l, is_training)
l = tf.nn.relu(l)
# 8,c=64
l = GlobalAvgPooling('gap', l)
logits = FullyConnected('linear', l, out_dim=10, nl=tf.identity)
prob = tf.nn.softmax(logits, name='output')
cost = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, label)
cost = tf.reduce_mean(cost, name='cross_entropy_loss')
tf.add_to_collection(MOVING_SUMMARY_VARS_KEY, cost)
wrong = prediction_incorrect(logits, label)
nr_wrong = tf.reduce_sum(wrong, name='wrong')
# monitor training error
tf.add_to_collection(
MOVING_SUMMARY_VARS_KEY, tf.reduce_mean(wrong, name='train_error'))
# weight decay on all W of fc layers
wd_w = tf.train.exponential_decay(0.0002, get_global_step_var(),
480000, 0.2, True)
wd_cost = tf.mul(wd_w, regularize_cost('.*/W', tf.nn.l2_loss), name='wd_cost')
tf.add_to_collection(MOVING_SUMMARY_VARS_KEY, wd_cost)
add_param_summary([('.*/W', ['histogram'])]) # monitor W
self.cost = tf.add_n([cost, wd_cost], name='cost')
def get_data(train_or_test):
isTrain = train_or_test == 'train'
ds = dataset.Cifar10(train_or_test)
pp_mean = ds.get_per_pixel_mean()
if isTrain:
augmentors = [
imgaug.CenterPaste((40, 40)),
imgaug.RandomCrop((32, 32)),
imgaug.Flip(horiz=True),
#imgaug.Brightness(20),
#imgaug.Contrast((0.6,1.4)),
imgaug.MapImage(lambda x: x - pp_mean),
]
else:
augmentors = [
imgaug.MapImage(lambda x: x - pp_mean)
]
ds = AugmentImageComponent(ds, augmentors)
ds = BatchData(ds, 128, remainder=not isTrain)
if isTrain:
ds = PrefetchData(ds, 3, 2)
return ds
def get_config():
basename = os.path.basename(__file__)
logger.set_logger_dir(
os.path.join('train_log', basename[:basename.rfind('.')]))
# prepare dataset
dataset_train = get_data('train')
step_per_epoch = dataset_train.size()
dataset_test = get_data('test')
sess_config = get_default_sess_config(0.9)
lr = tf.Variable(0.01, trainable=False, name='learning_rate')
tf.scalar_summary('learning_rate', lr)
return TrainConfig(
dataset=dataset_train,
optimizer=tf.train.MomentumOptimizer(lr, 0.9),
callbacks=Callbacks([
StatPrinter(),
ModelSaver(),
InferenceRunner(dataset_test,
[ScalarStats('cost'), ClassificationError()]),
ScheduledHyperParamSetter('learning_rate',
[(1, 0.1), (82, 0.01), (123, 0.001), (300, 0.0002)])
]),
session_config=sess_config,
model=Model(n=18),
step_per_epoch=step_per_epoch,
max_epoch=400,
)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', help='comma separated list of GPU(s) to use.') # nargs='*' in multi mode
parser.add_argument('--load', help='load model')
args = parser.parse_args()
if args.gpu:
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu
config = get_config()
if args.load:
config.session_init = SaverRestore(args.load)
if args.gpu:
config.nr_tower = len(args.gpu.split(','))
SyncMultiGPUTrainer(config).train()
| hclhkbu/dlbench | synthetic/experiments/tensorflow/cnn/resnet/cifar10-resnet.py | Python | mit | 6,463 |
from .config import SQLALCHEMY_DATABASE_URI
from .config import SQLALCHEMY_MIGRATE_REPO
from .app import db
def create_or_update_db():
import os.path
db.create_all()
if __name__ == '__main__':
create_or_update_db()
| gauravyeole/iDigBio-appliance | idigbio_media_appliance/create_db.py | Python | mit | 230 |
from urlparse import urljoin
from flask import request
from werkzeug.contrib.atom import AtomFeed
import flask
import os
import dateutil.parser
from .query import QueryFinder
from .indexer import read_json_file
PARAM_TOKEN = '$$'
ALLOWED_FEED_PARAMS = ('feed_title', 'feed_url')
ALLOWED_ENTRY_PARAMS = ('entry_url', 'entry_title', 'entry_content',
'entry_author', 'entry_updated', 'entry_content_type',
'entry_summary', 'entry_published', 'entry_rights')
DATE_PARAMS = ('updated', 'published')
def make_external(url):
return urljoin(request.url_root, url)
def get_feed_settings(name):
app = flask.current_app
queries_dir = os.path.join(app.root_dir, '_queries')
query_path = os.path.join(queries_dir, '{0}.json'.format(name))
if os.path.exists(query_path):
query_file = read_json_file(query_path)
return query_file.get('feed')
class Feed(object):
# Make sure only allowed feed params are passed into the feed
def __init__(self, settings):
self.feed_url = request.url
for setting in settings:
if setting.startswith('feed_') and setting in ALLOWED_FEED_PARAMS:
setting_trimmed = setting.replace('feed_', '')
setattr(self, setting_trimmed, settings[setting])
if self.url:
self.url = make_external(self.url)
class Entry(object):
# Make sure only allowed entry params are passed into the feed
def __init__(self, item, settings):
for setting in settings:
attribute = settings[setting].replace(PARAM_TOKEN, '')
if setting.startswith('entry_') and \
setting in ALLOWED_ENTRY_PARAMS and \
hasattr(item, attribute):
setting_trimmed = setting.replace('entry_', '')
# Dates must be in datetime.datetime format
if setting_trimmed in DATE_PARAMS:
setattr(self, setting_trimmed,
dateutil.parser.parse(getattr(item, attribute)))
else:
setattr(self, setting_trimmed, getattr(item, attribute))
def add_feeds_to_sheer(app):
@app.route('/feed/<name>/')
def recent_feed(name):
settings = get_feed_settings(name) or flask.abort(404)
feed = Feed(settings)
atom_feed = AtomFeed(**feed.__dict__)
query_finder = QueryFinder()
query = getattr(query_finder, name) or flask.abort(404)
items = query.search_with_url_arguments()
for item in items:
entry = Entry(item, settings)
atom_feed.add(**entry.__dict__)
return atom_feed.get_response()
| rosskarchner/sheer | sheer/feeds.py | Python | cc0-1.0 | 2,701 |
#!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
##---------------------------------------------------------------------------##
##
## Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
## Copyright (C) 2003 Mt. Hood Playing Card Co.
## Copyright (C) 2005-2009 Skomoroh
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
##---------------------------------------------------------------------------##
presets = {
'None': {
'preset': 'None',
'name': n_('My Game'),
},
'Klondike': {
'preset': 'Klondike',
'name': n_('My Klondike'),
'layout': 'Klondike',
'talon': 'Deal to waste',
'redeals': 'Unlimited redeals',
'rows_num': 7,
'rows_base_card': 'King',
'reserves_num': 0,
'deal_type': 'Triangle',
'deal_face_down': 6,
'deal_face_up': 1,
},
'FreeCell': {
'preset': 'FreeCell',
'name': n_('My FreeCell'),
'skill_level': 'Mostly skill',
'rows_max_move': 'Top card',
'rows_super_move': 1,
'deal_face_up': 6,
},
'Spider': {
'preset': 'Spider',
'name': n_('My Spider'),
'skill_level': 'Mostly skill',
'decks': 'Two',
'layout': 'Klondike',
'talon': 'Spider',
'found_type': 'Spider same suit',
'found_max_move': 'None',
'rows_num': 10,
'rows_type': 'Spider same suit',
'reserves_num': 0,
'deal_face_down': 5,
'deal_face_up': 1,
'deal_max_cards': 54,
},
'Gypsy': {
'preset': 'Gypsy',
'name': n_('My Gypsy'),
'skill_level': 'Mostly skill',
'decks': 'Two',
'layout': 'Gypsy',
'talon': 'Deal to tableau',
'found_max_move': 'None',
'reserves_num': 0,
'deal_face_down': 2,
'deal_face_up': 1,
},
'Grounds for a Divorce': {
'preset': 'Grounds for a Divorce',
'name': n_('My Grounds for a Divorce'),
'skill_level': 'Mostly skill',
'decks': 'Two',
'layout': 'Harp',
'talon': 'Grounds for a Divorce',
'found_type': 'Spider same suit',
'found_base_card': 'Any',
'found_equal': 0,
'rows_num': 10,
'rows_type': 'Spider same suit',
'rows_wrap': 1,
'reserves_num': 0,
'deal_face_up': 5,
},
'Double Klondike': {
'preset': 'Double Klondike',
'name': n_('My Double Klondike'),
'decks': 'Two',
'layout': 'Harp',
'talon': 'Deal to waste',
'redeals': 'Unlimited redeals',
'rows_num': 9,
'rows_base_card': 'King',
'reserves_num': 0,
'deal_type': 'Triangle',
'deal_face_down': 8,
'deal_face_up': 1,
},
'Simple Simon': {
'preset': 'Simple Simon',
'name': n_('My Simple Simon'),
'skill_level': 'Mostly skill',
'found_type': 'Spider same suit',
'found_max_move': 'None',
'rows_num': 10,
'rows_type': 'Spider same suit',
'reserves_num': 0,
'deal_type': 'Triangle',
},
}
| TrevorLowing/PyGames | pysollib/wizardpresets.py | Python | gpl-2.0 | 3,786 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__date__= 'Aug 19, 2015 '
__author__= 'samuel'
import yaml
import sys
import os
import docker
from docker import Client
def check_image_name(docker, service):
print 'check_image_name,',
#pwd = os.path.dirname(os.path.realpath(__file__)).split('/')[-1]
#folder = pwd
cwd = os.getcwd().split('/')[-1]
folder = cwd
iname = "%s_%s" % (folder, service)
if iname in [z for z in [y.encode('utf-8').split(':')[0] for
x in docker.images() for
y in x['RepoTags']] if '<none>' not in z]:
print '"%s" image exist' % iname
return (True, iname)
else:
print '"%s" image not exist' % iname
return (False, None)
def remove_images(docker, composefile_name, dangling=False):
try:
if dangling:
dangling_images_ids = \
[id['Id'].encode('utf-8') for id in docker.images(filters={'dangling': True})]
for id in dangling_images_ids:
docker.remove_image(image=id, force=True, noprune=False)
print '%s image removed' % id
with open(composefile_name, 'r') as ymlfile:
cfg = yaml.load(ymlfile)
for service in cfg:
exist, iname = check_image_name(docker, service)
if exist:
docker.remove_image(image=iname, force=True, noprune=False)
print '%s image removed' % iname
except Exception,e:
print 'type: %s' % type(e)
print 'args: %s' % e.args
print str(e)
def main():
if len(sys.argv) == 1:
composefile_name = 'docker-compose.yml'
elif len(sys.argv) == 2:
composefile_name = sys.argv[1]
else:
print "\n\t$ docker-compose-rmi <docekr-compose.yml file path>\n"
return
docker = Client(base_url='unix://var/run/docker.sock')
remove_images(docker, composefile_name, dangling=True)
if __name__ == '__main__':
main()
| bowlofstew/BuildbotDocker | test/docker-compose-rmi.py | Python | gpl-2.0 | 1,987 |
import datetime
import pytest
from manageiq_client.filters import Q
from cfme import test_requirements
from cfme.infrastructure.provider import InfraProvider
from cfme.markers.env_markers.provider import ONE
from cfme.rest.gen_data import vm as _vm
from cfme.utils.blockers import BZ
from cfme.utils.rest import assert_response
from cfme.utils.version import Version
from cfme.utils.version import VersionPicker
from cfme.utils.wait import wait_for
pytestmark = [
test_requirements.rest,
pytest.mark.provider(classes=[InfraProvider], selector=ONE),
pytest.mark.usefixtures("setup_provider"),
]
@pytest.fixture(scope="function")
def vm(request, provider, appliance):
return _vm(request, provider, appliance)
@pytest.fixture
def retire_vm(appliance, vm, provider):
retire_vm = appliance.collections.infra_vms.instantiate(vm, provider)
# retiring VM via UI, because retiring it via API will not generate request
# and we will not get the retirement requester.
retire_vm.retire()
# using rest entity to check if the VM has retired since it is a lot faster
_retire_vm = appliance.rest_api.collections.vms.get(name=vm)
wait_for(
lambda: (hasattr(_retire_vm, "retired") and _retire_vm.retired),
timeout=1000,
delay=5,
fail_func=_retire_vm.reload,
)
return vm
@pytest.fixture
def vm_retirement_report(appliance, retire_vm):
# Create a report for Virtual Machines that exactly matches with
# the name of the vm that was just retired
report_data = {
"menu_name": "vm_retirement_requester",
"title": "VM Retirement Requester",
"base_report_on": "Virtual Machines",
"report_fields": ["Name", "Retirement Requester", "Retirement State"],
"filter": {
"primary_filter": "fill_field(Virtual Machine : Name, =, {})".format(
retire_vm
)
},
}
report = appliance.collections.reports.create(**report_data)
yield retire_vm, report
report.delete()
@pytest.mark.tier(3)
@pytest.mark.parametrize(
"from_collection", [True, False], ids=["from_collection", "from_detail"]
)
@pytest.mark.meta(automates=[BZ(1805119)], blockers=[BZ(1805119, forced_streams=["5.10"])])
def test_retire_vm_now(appliance, vm, from_collection):
"""Test retirement of vm
Prerequisities:
* An appliance with ``/api`` available.
* VM
Steps:
* POST /api/vms/<id> (method ``retire``)
OR
* POST /api/vms (method ``retire``) with ``href`` of the vm or vms
Metadata:
test_flag: rest
Bugzilla:
1805119
Polarion:
assignee: pvala
casecomponent: Infra
caseimportance: high
initialEstimate: 1/3h
"""
retire_action = VersionPicker({Version.lowest(): 'retire', '5.11': 'request_retire'}).pick()
retire_vm = appliance.rest_api.collections.vms.get(name=vm)
if from_collection:
getattr(appliance.rest_api.collections.vms.action, retire_action)(retire_vm)
else:
getattr(retire_vm.action, retire_action)()
assert_response(appliance)
def _finished():
retire_vm.reload()
# The retirement_state field appears after calling retire method
try:
if retire_vm.retirement_state == "retired":
return True
except AttributeError:
pass
return False
wait_for(_finished, num_sec=1500, delay=10, message="REST vm retire now")
@pytest.mark.tier(3)
@pytest.mark.parametrize(
"from_collection", [True, False], ids=["from_collection", "from_detail"]
)
@pytest.mark.meta(
automates=[BZ(1805119), BZ(1827787)], blockers=[BZ(1827787, forced_streams=["5.10", "5.11"])]
)
def test_retire_vm_future(appliance, vm, from_collection):
"""Test retirement of vm
Prerequisities:
* An appliance with ``/api`` available.
* VM
Steps:
* POST /api/vms/<id> (method ``retire``) with the ``retire_date``
OR
* POST /api/vms (method ``retire``) with the ``retire_date`` and ``href`` of the vm or vms
Metadata:
test_flag: rest
Bugzilla:
1805119
1827787
Polarion:
assignee: pvala
casecomponent: Infra
caseimportance: high
initialEstimate: 1/3h
"""
retire_action = VersionPicker({Version.lowest(): 'retire', '5.11': 'request_retire'}).pick()
retire_vm = appliance.rest_api.collections.vms.get(name=vm)
date = (datetime.datetime.now() + datetime.timedelta(days=5)).strftime("%Y/%m/%d")
future = {"date": date, "warn": "4"}
if from_collection:
future.update(retire_vm._ref_repr())
getattr(appliance.rest_api.collections.vms.action, retire_action)(future)
else:
getattr(retire_vm.action, retire_action)(**future)
assert_response(appliance)
def _finished():
retire_vm.reload()
if not hasattr(retire_vm, "retires_on"):
return False
if not hasattr(retire_vm, "retirement_warn"):
return False
if not hasattr(retire_vm, "retirement_state"):
return False
return True
wait_for(_finished, num_sec=1500, delay=10, message="REST vm retire future")
@pytest.mark.tier(1)
@pytest.mark.meta(automates=[BZ(1805119), BZ(1638502)])
def test_check_vm_retirement_requester(
appliance, request, provider, vm_retirement_report
):
"""
Polarion:
assignee: pvala
casecomponent: Infra
caseimportance: medium
initialEstimate: 1/2h
tags: retirement
setup:
1. Add a provider.
2. Provision a VM.
3. Once the VM has been provisioned, retire the VM.
4. Create a report(See attachment in BZ).
testSteps:
1. Queue the report once the VM has retired
and check the retirement_requester column for the VM.
expectedResults:
1. Requester name must be visible.
Bugzilla:
1638502
1805119
"""
vm_name, report = vm_retirement_report
saved_report = report.queue(wait_for_finish=True)
# filtering the request by description because description sometimes changes with version
requester_id = (
appliance.rest_api.collections.requests.filter(
Q("description", "=", f"VM Retire for: {vm_name}*")
)
.resources[0]
.requester_id
)
# obtaining the retirement requester's userid from retirement request
requester_userid = appliance.rest_api.collections.users.get(id=requester_id).userid
# the report filter is such that we will only obtain one row in the report
row_data = saved_report.data.find_row("Name", vm_name)
assert (
row_data["Name"],
row_data["Retirement Requester"],
row_data["Retirement State"],
) == (vm_name, requester_userid, "retired")
| nachandr/cfme_tests | cfme/tests/infrastructure/test_vm_retirement_rest.py | Python | gpl-2.0 | 6,932 |
# MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Module implementing low-level socket communication with MySQL servers.
"""
import os
import socket
import struct
from collections import deque
import zlib
try:
import ssl
except:
# If import fails, we don't have SSL support.
pass
# socket.inet_pton() is not available on the Windows platform
if os.name == 'nt':
import ctypes
_WIN_DDL = ctypes.WinDLL("ws2_32.dll")
def inet_pton(address_family, ip_string):
# ctypes functions expect bytes
try:
ipaddr = ip_string.encode('ascii')
except AttributeError:
ipaddr = ip_string
res = _WIN_DDL.inet_pton(address_family, ipaddr, b'')
if res == 0:
raise socket.error("illegal IP address string passed to inet_pton")
else:
inet_pton = socket.inet_pton
from . import (constants, errors)
def _prepare_packets(buf, pktnr):
"""Prepare a packet for sending to the MySQL server"""
pkts = []
pllen = len(buf)
maxpktlen = constants.MAX_PACKET_LENGTH
while pllen > maxpktlen:
pkts.append(b'\xff\xff\xff' + struct.pack('<B',pktnr)
+ buf[:maxpktlen])
buf = buf[maxpktlen:]
pllen = len(buf)
pktnr = pktnr + 1
pkts.append(struct.pack('<I',pllen)[0:3]
+ struct.pack('<B',pktnr) + buf)
return pkts
class BaseMySQLSocket(object):
"""Base class for MySQL socket communication
This class should not be used directly but overloaded, changing the
at least the open_connection()-method. Examples of subclasses are
mysql.connector.network.MySQLTCPSocket
mysql.connector.network.MySQLUnixSocket
"""
def __init__(self):
self.sock = None # holds the socket connection
self._connection_timeout = None
self._packet_number = -1
self._packet_queue = deque()
self.recvsize = 8192
@property
def next_packet_number(self):
self._packet_number = self._packet_number + 1
return self._packet_number
def open_connection(self):
"""Open the socket"""
raise NotImplementedError
def get_address(self):
"""Get the location of the socket"""
raise NotImplementedError
def close_connection(self):
"""Close the socket"""
try:
self.sock.close()
del self._packet_queue
except (socket.error, AttributeError):
pass
def send_plain(self, buf, packet_number=None):
"""Send packets to the MySQL server"""
if packet_number is None:
self.next_packet_number
else:
self._packet_number = packet_number
packets = _prepare_packets(buf, self._packet_number)
for packet in packets:
try:
self.sock.sendall(packet)
except Exception as err:
raise errors.OperationalError(str(err))
send = send_plain
def send_compressed(self, buf, packet_number=None):
"""Send compressed packets to the MySQL server"""
if packet_number is None:
self.next_packet_number
else:
self._packet_number = packet_number
pktnr = self._packet_number
pllen = len(buf)
zpkts = []
maxpktlen = constants.MAX_PACKET_LENGTH
if pllen > maxpktlen:
pkts = _prepare_packets(buf,pktnr)
tmpbuf = b''.join(pkts)
del pkts
seqid = 0
zbuf = zlib.compress(tmpbuf[:16384])
zpkts.append(struct.pack('<I', len(zbuf))[0:3]
+ struct.pack('<B', seqid)
+ b'\x00\x40\x00' + zbuf)
tmpbuf = tmpbuf[16384:]
pllen = len(tmpbuf)
seqid = seqid + 1
while pllen > maxpktlen:
zbuf = zlib.compress(tmpbuf[:maxpktlen])
zpkts.append(struct.pack('<I', len(zbuf))[0:3]
+ struct.pack('<B', seqid)
+ b'\xff\xff\xff' + zbuf)
tmpbuf = tmpbuf[maxpktlen:]
pllen = len(tmpbuf)
seqid = seqid + 1
if tmpbuf:
zbuf = zlib.compress(tmpbuf)
zpkts.append(struct.pack('<I',len(zbuf))[0:3]
+ struct.pack('<B',seqid)
+ struct.pack('<I',pllen)[0:3]
+ zbuf)
del tmpbuf
else:
pkt = (struct.pack('<I', pllen)[0:3] +
struct.pack('<B', pktnr) + buf)
pllen = len(pkt)
if pllen > 50:
zbuf = zlib.compress(pkt)
zpkts.append(struct.pack('<I', len(zbuf))[0:3]
+ struct.pack('<B', 0)
+ struct.pack('<I', pllen)[0:3]
+ zbuf)
else:
zpkts.append(struct.pack('<I', pllen)[0:3]
+ struct.pack('<B', 0)
+ struct.pack('<I', 0)[0:3]
+ pkt)
for zip_packet in zpkts:
zpktlen = len(zip_packet)
try:
self.sock.sendall(zip_packet)
except Exception as err:
raise errors.OperationalError(str(err))
def recv_plain(self):
"""Receive packets from the MySQL server"""
packet = b''
try:
# Read the header of the MySQL packet, 4 bytes
packet = self.sock.recv(1)
while len(packet) < 4:
chunk = self.sock.recv(1)
if not chunk:
raise errors.InterfaceError(errno=2013)
packet += chunk
# Save the packet number and total packet length from header
self._packet_number = packet[3]
packet_totlen = struct.unpack("<I", packet[0:3] + b'\x00')[0] + 4
# Read the rest of the packet
rest = packet_totlen - len(packet)
while rest > 0:
chunk = self.sock.recv(rest)
if not chunk:
raise errors.InterfaceError(errno=2013)
packet += chunk
rest = packet_totlen - len(packet)
return packet
except socket.timeout as err:
raise errors.InterfaceError(errno=2013)
except socket.error as err:
raise errors.InterfaceError(
errno=2055, values=(self.get_address(), err.errno))
recv = recv_plain
def _split_zipped_payload(self, packet_bunch):
"""Split compressed payload"""
while packet_bunch:
payload_length = struct.unpack("<I",
packet_bunch[0:3] + b'\x00')[0]
self._packet_queue.append(packet_bunch[0:payload_length + 4])
packet_bunch = packet_bunch[payload_length + 4:]
def recv_compressed(self):
"""Receive compressed packets from the MySQL server"""
try:
return self._packet_queue.popleft()
except IndexError:
pass
header = b''
packets = []
try:
abyte = self.sock.recv(1)
while abyte and len(header) < 7:
header += abyte
abyte = self.sock.recv(1)
while header:
if len(header) < 7:
raise errors.InterfaceError(errno=2013)
zip_payload_length = struct.unpack("<I",
header[0:3] + b'\x00')[0]
payload_length = struct.unpack("<I", header[4:7] + b'\x00')[0]
zip_payload = abyte
while len(zip_payload) < zip_payload_length:
chunk = self.sock.recv(zip_payload_length
- len(zip_payload))
if len(chunk) == 0:
raise errors.InterfaceError(errno=2013)
zip_payload = zip_payload + chunk
if payload_length == 0:
self._split_zipped_payload(zip_payload)
return self._packet_queue.popleft()
packets.append(header + zip_payload)
if payload_length != 16384:
break
header = b''
abyte = self.sock.recv(1)
while abyte and len(header) < 7:
header += abyte
abyte = self.sock.recv(1)
except socket.timeout as err:
raise errors.InterfaceError(errno=2013)
except socket.error as err:
raise errors.InterfaceError(
errno=2055, values=(self.get_address(), err.errno))
tmp = []
for packet in packets:
payload_length = struct.unpack("<I", header[4:7] + b'\x00')[0]
if payload_length == 0:
tmp.append(packet[7:])
else:
tmp.append(zlib.decompress(packet[7:]))
self._split_zipped_payload(b''.join(tmp))
del tmp
try:
return self._packet_queue.popleft()
except IndexError:
pass
def set_connection_timeout(self, timeout):
"""Set the connection timeout"""
self._connection_timeout = timeout
def switch_to_ssl(self, ca, cert, key):
"""Switch the socket to use SSL"""
if not self.sock:
raise errors.InterfaceError(errno=2048)
try:
self.sock = ssl.wrap_socket(
self.sock, keyfile=key, certfile=cert, ca_certs=ca,
cert_reqs=ssl.CERT_NONE, do_handshake_on_connect=False,
ssl_version=ssl.PROTOCOL_TLSv1)
self.sock.do_handshake()
except NameError:
raise errors.NotSupportedError(
"Python installation has no SSL support")
except ssl.SSLError as err:
raise errors.InterfaceError("SSL error: {}".format(str(err)))
except socket.error as err:
raise errors.InterfaceError("Socket error: {}".format(str(err)))
class MySQLUnixSocket(BaseMySQLSocket):
"""MySQL socket class using UNIX sockets
Opens a connection through the UNIX socket of the MySQL Server.
"""
def __init__(self, unix_socket='/tmp/mysql.sock'):
super().__init__()
self.unix_socket = unix_socket
def get_address(self):
return self.unix_socket
def open_connection(self):
try:
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.settimeout(self._connection_timeout)
self.sock.connect(self.unix_socket)
except socket.error as err:
try:
msg = err.errno
if msg is None:
msg = str(err)
except AttributeError:
msg = str(err)
raise errors.InterfaceError(
errno=2002, values=(self.get_address(), msg))
except Exception as err:
raise errors.InterfaceError(str(err))
class MySQLTCPSocket(BaseMySQLSocket):
"""MySQL socket class using TCP/IP
Opens a TCP/IP connection to the MySQL Server.
"""
def __init__(self, host='127.0.0.1', port=3306):
super().__init__()
self.server_host = host
self.server_port = port
def get_address(self):
return "{}:{}".format(self.server_host, self.server_port)
def open_connection(self):
"""Open the TCP/IP connection to the MySQL server
"""
# Detect address family.
try:
inet_pton(socket.AF_INET6, self.server_host.split('%')[0])
family = socket.AF_INET6
except socket.error:
family = socket.AF_INET
try:
(family, socktype, proto, canonname, sockaddr) = socket.getaddrinfo(
self.server_host,
self.server_port,
family,
socket.SOCK_STREAM)[0]
self.sock = socket.socket(family, socktype, proto)
self.sock.settimeout(self._connection_timeout)
self.sock.connect(sockaddr)
except socket.error as err:
try:
msg = err.errno
if msg is None:
msg = str(err)
except AttributeError:
msg = str(err)
raise errors.InterfaceError(
errno=2003, values=(self.get_address(), msg))
except Exception as err:
raise errors.OperationalError(str(err))
| analogue/mythbox | resources/lib/mysql-connector-python/python3/mysql/connector/network.py | Python | gpl-2.0 | 13,834 |
"""
python-calais v.1.4 -- Python interface to the OpenCalais API
Author: Jordan Dimov ([email protected])
Last-Update: 01/12/2009
"""
import httplib, urllib, urllib2, re
try:
import simplejson as json
except ImportError:
import json
from StringIO import StringIO
PARAMS_XML = """
<c:params xmlns:c="http://s.opencalais.com/1/pred/"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
<c:processingDirectives %s> </c:processingDirectives>
<c:userDirectives %s> </c:userDirectives>
<c:externalMetadata %s> </c:externalMetadata>
</c:params>
"""
STRIP_RE = re.compile('<script.*?</script>|<noscript.*?</noscript>|<style.*?</style>', re.IGNORECASE)
__version__ = "1.5"
class AppURLopener(urllib.FancyURLopener):
version = "Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.0.5) Gecko/2008121623 Ubuntu/8.10 (intrepid)Firefox/3.0.5" # Lie shamelessly to Wikipedia.
urllib._urlopener = AppURLopener()
class Calais():
"""
Python class that knows how to talk to the OpenCalais API. Use the analyze() and analyze_url() methods, which return CalaisResponse objects.
"""
api_key = None
processing_directives = {"contentType":"TEXT/RAW",
"outputFormat":"application/json",
"reltagBaseURL":None,
"calculateRelevanceScore":"true",
"enableMetadataType":'SocialTags',
"discardMetadata":None,
"omitOutputtingOriginalText":"true"}
user_directives = {"allowDistribution":"false",
"allowSearch":"false",
"externalID":None}
external_metadata = {}
url = 'http://api.opencalais.com/enlighten/rest/'
def __init__(self, api_key, submitter="python-calais client v.%s" % __version__):
self.api_key = api_key
self.user_directives["submitter"]=submitter
def _get_params_XML(self):
return PARAMS_XML % (" ".join('c:%s="%s"' % (k,v) for (k,v) in self.processing_directives.items() if v), " ".join('c:%s="%s"' % (k,v) for (k,v) in self.user_directives.items() if v), " ".join('c:%s="%s"' % (k,v) for (k,v) in self.external_metadata.items() if v))
def rest_POST(self, content):
params = urllib.urlencode({'licenseID':self.api_key,
'content':content, 'paramsXML':self._get_params_XML()})
#headers = {"Content-type":"application/x-www-form-urlencoded"}
#conn = httplib.HTTPConnection("api.opencalais.com:80")
#conn.request("POST", "/enlighten/rest/", params, headers)
#response = conn.getresponse()
#data = response.read()
#conn.close()
response = urllib2.urlopen(self.url, data=params)
data = response.read()
return (data)
def get_random_id(self):
"""
Creates a random 10-character ID for your submission.
"""
import string
from random import choice
chars = string.letters + string.digits
np = ""
for i in range(10):
np = np + choice(chars)
return np
def get_content_id(self, text):
"""
Creates a SHA1 hash of the text of your submission.
"""
import hashlib
h = hashlib.sha1()
h.update(text)
return h.hexdigest()
def preprocess_html(self, html):
html = html.replace('\n', '')
html = STRIP_RE.sub('', html)
return html
def analyze(self, content, content_type="TEXT/RAW", external_id=None):
if not (content and len(content.strip())):
return None
self.processing_directives["contentType"]=content_type
if external_id:
self.user_directives["externalID"] = external_id
return CalaisResponse(self.rest_POST(content))
def analyze_url(self, url):
f = urllib.urlopen(url)
html = self.preprocess_html(f.read())
return self.analyze(html, content_type="TEXT/HTML", external_id=url)
def analyze_file(self, fn):
import mimetypes
try:
filetype = mimetypes.guess_type(fn)[0]
except:
raise ValueError("Can not determine file type for '%s'" % fn)
if filetype == "text/plain":
content_type="TEXT/RAW"
f = open(fn)
content = f.read()
f.close()
elif filetype == "text/html":
content_type = "TEXT/HTML"
f = open(fn)
content = self.preprocess_html(f.read())
f.close()
else:
raise ValueError("Only plaintext and HTML files are currently supported. ")
return self.analyze(content, content_type=content_type, external_id=fn)
class CalaisResponse():
"""
Encapsulates a parsed Calais response and provides easy pythonic access to the data.
"""
raw_response = None
simplified_response = None
def __init__(self, raw_result):
try:
self.raw_response = json.load(StringIO(raw_result))
except:
raise ValueError(raw_result)
self.simplified_response = self._simplify_json(self.raw_response)
self.__dict__['doc'] = self.raw_response['doc']
for k,v in self.simplified_response.items():
self.__dict__[k] = v
def _simplify_json(self, json):
result = {}
# First, resolve references
for element in json.values():
for k,v in element.items():
if isinstance(v, unicode) and v.startswith("http://") and json.has_key(v):
element[k] = json[v]
for k, v in json.items():
if v.has_key("_typeGroup"):
group = v["_typeGroup"]
if not result.has_key(group):
result[group]=[]
del v["_typeGroup"]
v["__reference"] = k
result[group].append(v)
return result
def print_summary(self):
if not hasattr(self, "doc"):
return None
info = self.doc['info']
print "Calais Request ID: %s" % info['calaisRequestID']
if info.has_key('externalID'):
print "External ID: %s" % info['externalID']
if info.has_key('docTitle'):
print "Title: %s " % info['docTitle']
print "Language: %s" % self.doc['meta']['language']
print "Extractions: "
for k,v in self.simplified_response.items():
print "\t%d %s" % (len(v), k)
def print_entities(self):
if not hasattr(self, "entities"):
return None
for item in self.entities:
print "%s: %s (%.2f)" % (item['_type'], item['name'], item['relevance'])
def print_topics(self):
if not hasattr(self, "topics"):
return None
for topic in self.topics:
print topic['categoryName']
def print_relations(self):
if not hasattr(self, "relations"):
return None
for relation in self.relations:
print relation['_type']
for k,v in relation.items():
if not k.startswith("_"):
if isinstance(v, unicode):
print "\t%s:%s" % (k,v)
elif isinstance(v, dict) and v.has_key('name'):
print "\t%s:%s" % (k, v['name'])
| collective/collective.taghelper | collective/taghelper/calais.py | Python | gpl-2.0 | 7,318 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
| egabancho/invenio-communities | tests/__init__.py | Python | gpl-2.0 | 983 |
#!/usr/bin/env python
from httplib import HTTP
from string import replace
from struct import unpack
import sys
latitude=0
longitude=0
def doLookup(cellId, lac, host = "www.google.com", port = 80):
page = "/glm/mmap"
http = HTTP(host, port)
result = None
errorCode = 0
content_type, body = encode_request(cellId, lac)
http.putrequest('POST', page)
http.putheader('Content-Type', content_type)
http.putheader('Content-Length', str(len(body)))
http.endheaders()
http.send(body)
errcode, errmsg, headers = http.getreply()
result = http.file.read()
if (errcode == 200):
(a, b,errorCode, latitude, longitude, accuracy, c, d) = unpack(">hBiiiiih",result)
latitude = latitude / 1000000.0
longitude = longitude / 1000000.0
return latitude, longitude, accuracy
def encode_request(cellId, lac):
from struct import pack
content_type = 'application/binary'
body = pack('>hqh2sh13sh5sh3sBiiihiiiiii', 21, 0, 2, 'in', 13, "Nokia E72", 5,"1.3.1", 3, "Web", 27, 0, 0, 3, 0, cellId, lac, 0, 0, 0, 0)
return content_type, body
(mcc, mnc, lac, cellId) = (int(sys.argv[1]),int(sys.argv[2]),int(sys.argv[3]),int(sys.argv[4]))
(latitude, longitude, accuracy) = doLookup(cellId, lac, "www.google.com", 80)
print latitude
print longitude
print accuracy
| udit-gupta/socialmaps | socialmaps/mobile_pages/lib/my_location.py | Python | gpl-2.0 | 1,282 |
print "--------------- tn5250j test fields script start ------------"
screen = _session.getScreen()
screenfields = screen.getScreenFields()
fields = screenfields.getFields()
for x in fields:
print x.toString()
print x.getString()
print "number of fields %s " % screenfields.getSize()
print "---------------- tn5250j test fields script end -------------"
| zenovalle/tn5250j | scripts/Test/testfields.py | Python | gpl-2.0 | 368 |
"""
Interpret is a collection of utilities to list the import plugins.
An import plugin is a script in the interpret_plugins folder which has the function getCarving.
The following examples shows functions of fabmetheus_interpret. The examples are run in a terminal in the folder which contains fabmetheus_interpret.py.
> python
Python 2.5.1 (r251:54863, Sep 22 2007, 01:43:31)
[GCC 4.2.1 (SUSE Linux)] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import interpret
>>> fabmetheus_interpret.getGNUTranslatorGcodeFileTypeTuples()
[('GTS files', '*.gts'), ('Gcode text files', '*.gcode'), ('STL files', '*.stl'), ('SVG files', '*.svg')]
>>> fabmetheus_interpret.getImportPluginFileNames()
['gts', 'stl', 'svg']
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from fabmetheus_utilities import archive
from fabmetheus_utilities import euclidean
from fabmetheus_utilities import gcodec
from fabmetheus_utilities import settings
from skeinforge_application.skeinforge_utilities import skeinforge_polyfile
from skeinforge_application.skeinforge_utilities import skeinforge_profile
import os
import time
__author__ = 'Enrique Perez ([email protected])'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def getCarving(fileName):
"Get carving."
pluginModule = getInterpretPlugin(fileName)
if pluginModule == None:
return None
return pluginModule.getCarving(fileName)
def getGNUTranslatorGcodeFileTypeTuples():
"Get the file type tuples from the translators in the import plugins folder plus gcode."
fileTypeTuples = getTranslatorFileTypeTuples()
fileTypeTuples.append( ('Gcode text files', '*.gcode') )
fileTypeTuples.sort()
return fileTypeTuples
def getGNUTranslatorFilesUnmodified():
"Get the file types from the translators in the import plugins folder."
return archive.getFilesWithFileTypesWithoutWords(getImportPluginFileNames())
def getImportPluginFileNames():
"Get interpret plugin fileNames."
return archive.getPluginFileNamesFromDirectoryPath( getPluginsDirectoryPath() )
def getInterpretPlugin(fileName):
"Get the interpret plugin for the file."
importPluginFileNames = getImportPluginFileNames()
for importPluginFileName in importPluginFileNames:
fileTypeDot = '.' + importPluginFileName
if fileName[ - len(fileTypeDot) : ].lower() == fileTypeDot:
importPluginsDirectoryPath = getPluginsDirectoryPath()
pluginModule = archive.getModuleWithDirectoryPath( importPluginsDirectoryPath, importPluginFileName )
if pluginModule != None:
return pluginModule
print('Could not find plugin to handle ' + fileName )
return None
def getNewRepository():
'Get new repository.'
return InterpretRepository()
def getPluginsDirectoryPath():
"Get the plugins directory path."
return archive.getAbsoluteFrozenFolderPath( __file__, 'interpret_plugins')
def getTranslatorFileTypeTuples():
"Get the file types from the translators in the import plugins folder."
importPluginFileNames = getImportPluginFileNames()
fileTypeTuples = []
for importPluginFileName in importPluginFileNames:
fileTypeTitle = importPluginFileName.upper() + ' files'
fileType = ( fileTypeTitle, '*.' + importPluginFileName )
fileTypeTuples.append( fileType )
fileTypeTuples.sort()
return fileTypeTuples
def getWindowAnalyzeFile(fileName):
"Get file interpretion."
startTime = time.time()
carving = getCarving(fileName)
if carving == None:
return None
interpretGcode = str( carving )
if interpretGcode == '':
return None
repository = settings.getReadRepository( InterpretRepository() )
if repository.printInterpretion.value:
print( interpretGcode )
suffixFileName = fileName[ : fileName.rfind('.') ] + '_interpret.' + carving.getInterpretationSuffix()
suffixDirectoryName = os.path.dirname(suffixFileName)
suffixReplacedBaseName = os.path.basename(suffixFileName).replace(' ', '_')
suffixFileName = os.path.join( suffixDirectoryName, suffixReplacedBaseName )
archive.writeFileText( suffixFileName, interpretGcode )
print('The interpret file is saved as ' + archive.getSummarizedFileName(suffixFileName) )
print('It took %s to interpret the file.' % euclidean.getDurationString( time.time() - startTime ) )
textProgram = repository.textProgram.value
if textProgram == '':
return None
if textProgram == 'webbrowser':
settings.openWebPage(suffixFileName)
return None
textFilePath = '"' + os.path.normpath(suffixFileName) + '"' # " to send in file name with spaces
shellCommand = textProgram + ' ' + textFilePath
print('Sending the shell command:')
print(shellCommand)
commandResult = os.system(shellCommand)
if commandResult != 0:
print('It may be that the system could not find the %s program.' % textProgram )
print('If so, try installing the %s program or look for another one, like Open Office which can be found at:' % textProgram )
print('http://www.openoffice.org/')
print('Open office writer can then be started from the command line with the command "soffice -writer".')
class InterpretRepository:
"A class to handle the interpret settings."
def __init__(self):
"Set the default settings, execute title & settings fileName."
skeinforge_profile.addListsToCraftTypeRepository('skeinforge_application.skeinforge_plugins.analyze_plugins.interpret.html', self)
self.fileNameInput = settings.FileNameInput().getFromFileName( getGNUTranslatorGcodeFileTypeTuples(), 'Open File for Interpret', self, '')
self.activateInterpret = settings.BooleanSetting().getFromValue('Activate Interpret', self, False )
self.printInterpretion = settings.BooleanSetting().getFromValue('Print Interpretion', self, False )
self.textProgram = settings.StringSetting().getFromValue('Text Program:', self, 'webbrowser')
self.executeTitle = 'Interpret'
def execute(self):
"Write button has been clicked."
fileNames = skeinforge_polyfile.getFileOrGcodeDirectory( self.fileNameInput.value, self.fileNameInput.wasCancelled )
for fileName in fileNames:
getWindowAnalyzeFile(fileName)
| natetrue/ReplicatorG | skein_engines/skeinforge-40/fabmetheus_utilities/fabmetheus_tools/fabmetheus_interpret.py | Python | gpl-2.0 | 6,236 |
# Copyright (C) 2014-2016 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import itertools
import logging
import os
import re
import shutil
import subprocess
import time
from webkitpy.common.memoized import memoized
from webkitpy.common.system.executive import ScriptError
from webkitpy.layout_tests.models.test_configuration import TestConfiguration
from webkitpy.port import config as port_config
from webkitpy.port import driver, image_diff
from webkitpy.port.darwin import DarwinPort
from webkitpy.port.simulator_process import SimulatorProcess
from webkitpy.xcode.simulator import Simulator, Runtime, DeviceType
from webkitpy.common.system.crashlogs import CrashLogs
_log = logging.getLogger(__name__)
class IOSPort(DarwinPort):
port_name = "ios"
ARCHITECTURES = ['armv7', 'armv7s', 'arm64']
DEFAULT_ARCHITECTURE = 'arm64'
VERSION_FALLBACK_ORDER = ['ios-7', 'ios-8', 'ios-9', 'ios-10']
@classmethod
def determine_full_port_name(cls, host, options, port_name):
if port_name == cls.port_name:
iphoneos_sdk_version = host.platform.xcode_sdk_version('iphoneos')
if not iphoneos_sdk_version:
raise Exception("Please install the iOS SDK.")
major_version_number = iphoneos_sdk_version.split('.')[0]
port_name = port_name + '-' + major_version_number
return port_name
# Despite their names, these flags do not actually get passed all the way down to webkit-build.
def _build_driver_flags(self):
return ['--sdk', 'iphoneos'] + (['ARCHS=%s' % self.architecture()] if self.architecture() else [])
def operating_system(self):
return 'ios'
class IOSSimulatorPort(DarwinPort):
port_name = "ios-simulator"
FUTURE_VERSION = 'future'
ARCHITECTURES = ['x86_64', 'x86']
DEFAULT_ARCHITECTURE = 'x86_64'
DEFAULT_DEVICE_CLASS = 'iphone'
CUSTOM_DEVICE_CLASSES = ['ipad', 'iphone7']
SDK = 'iphonesimulator'
SIMULATOR_BUNDLE_ID = 'com.apple.iphonesimulator'
SIMULATOR_DIRECTORY = "/tmp/WebKitTestingSimulators/"
LSREGISTER_PATH = "/System/Library/Frameworks/CoreServices.framework/Frameworks/LaunchServices.framework/Versions/Current/Support/lsregister"
PROCESS_COUNT_ESTIMATE_PER_SIMULATOR_INSTANCE = 100
DEVICE_CLASS_MAP = {
'x86_64': {
'iphone': 'iPhone 5s',
'iphone7': 'iPhone 7',
'ipad': 'iPad Air'
},
'x86': {
'iphone': 'iPhone 5',
'ipad': 'iPad Retina'
},
}
def __init__(self, host, port_name, **kwargs):
DarwinPort.__init__(self, host, port_name, **kwargs)
self._test_runner_process_constructor = SimulatorProcess
optional_device_class = self.get_option('device_class')
self._printing_cmd_line = False
self._device_class = optional_device_class if optional_device_class else self.DEFAULT_DEVICE_CLASS
_log.debug('IOSSimulatorPort _device_class is %s', self._device_class)
self._current_device = Simulator(host).current_device()
if not self._current_device:
self.set_option('dedicated_simulators', True)
if not self.get_option('dedicated_simulators'):
if self.get_option('child_processes') > 1:
_log.warn('Cannot have more than one child process when using a running simulator. Setting child_processes to 1.')
self.set_option('child_processes', 1)
def driver_name(self):
if self.get_option('driver_name'):
return self.get_option('driver_name')
if self.get_option('webkit_test_runner'):
return 'WebKitTestRunnerApp.app'
return 'DumpRenderTree.app'
def driver_cmd_line_for_logging(self):
# Avoid spinning up devices just for logging the commandline.
self._printing_cmd_line = True
result = super(IOSSimulatorPort, self).driver_cmd_line_for_logging()
self._printing_cmd_line = False
return result
@property
@memoized
def simulator_runtime(self):
runtime_identifier = self.get_option('runtime')
if runtime_identifier:
runtime = Runtime.from_identifier(runtime_identifier)
else:
runtime = Runtime.from_version_string(self.host.platform.xcode_sdk_version('iphonesimulator'))
return runtime
def simulator_device_type(self):
device_type_identifier = self.get_option('device_type')
if device_type_identifier:
_log.debug('simulator_device_type for device identifier %s', device_type_identifier)
device_type = DeviceType.from_identifier(device_type_identifier)
else:
_log.debug('simulator_device_type for device %s', self._device_class)
device_name = self.DEVICE_CLASS_MAP[self.architecture()][self._device_class]
if not device_name:
raise Exception('Failed to find device for architecture {} and device class {}'.format(self.architecture()), self._device_class)
device_type = DeviceType.from_name(device_name)
return device_type
@memoized
def child_processes(self):
return int(self.get_option('child_processes'))
@memoized
def default_child_processes(self):
"""Return the number of Simulators instances to use for this port."""
best_child_process_count_for_cpu = self._executive.cpu_count() / 2
system_process_count_limit = int(subprocess.check_output(["ulimit", "-u"]).strip())
current_process_count = len(subprocess.check_output(["ps", "aux"]).strip().split('\n'))
_log.debug('Process limit: %d, current #processes: %d' % (system_process_count_limit, current_process_count))
maximum_simulator_count_on_this_system = (system_process_count_limit - current_process_count) // self.PROCESS_COUNT_ESTIMATE_PER_SIMULATOR_INSTANCE
# FIXME: We should also take into account the available RAM.
if (maximum_simulator_count_on_this_system < best_child_process_count_for_cpu):
_log.warn("This machine could support %s simulators, but is only configured for %s."
% (best_child_process_count_for_cpu, maximum_simulator_count_on_this_system))
_log.warn('Please see <https://trac.webkit.org/wiki/IncreasingKernelLimits>.')
if maximum_simulator_count_on_this_system == 0:
maximum_simulator_count_on_this_system = 1
return min(maximum_simulator_count_on_this_system, best_child_process_count_for_cpu)
def _get_crash_log(self, name, pid, stdout, stderr, newer_than, time_fn=time.time, sleep_fn=time.sleep, wait_for_log=True):
time_fn = time_fn or time.time
sleep_fn = sleep_fn or time.sleep
# FIXME: We should collect the actual crash log for DumpRenderTree.app because it includes more
# information (e.g. exception codes) than is available in the stack trace written to standard error.
stderr_lines = []
crashed_subprocess_name_and_pid = None # e.g. ('DumpRenderTree.app', 1234)
for line in (stderr or '').splitlines():
if not crashed_subprocess_name_and_pid:
match = self.SUBPROCESS_CRASH_REGEX.match(line)
if match:
crashed_subprocess_name_and_pid = (match.group('subprocess_name'), int(match.group('subprocess_pid')))
continue
stderr_lines.append(line)
if crashed_subprocess_name_and_pid:
return self._get_crash_log(crashed_subprocess_name_and_pid[0], crashed_subprocess_name_and_pid[1], stdout,
'\n'.join(stderr_lines), newer_than, time_fn, sleep_fn, wait_for_log)
# App crashed
_log.debug('looking for crash log for %s:%s' % (name, str(pid)))
crash_log = ''
crash_logs = CrashLogs(self.host)
now = time_fn()
deadline = now + 5 * int(self.get_option('child_processes', 1))
while not crash_log and now <= deadline:
crash_log = crash_logs.find_newest_log(name, pid, include_errors=True, newer_than=newer_than)
if not wait_for_log:
break
if not crash_log or not [line for line in crash_log.splitlines() if not line.startswith('ERROR')]:
sleep_fn(0.1)
now = time_fn()
if not crash_log:
return stderr, None
return stderr, crash_log
def _build_driver_flags(self):
archs = ['ARCHS=i386'] if self.architecture() == 'x86' else []
sdk = ['--sdk', 'iphonesimulator']
return archs + sdk
def _generate_all_test_configurations(self):
configurations = []
for build_type in self.ALL_BUILD_TYPES:
for architecture in self.ARCHITECTURES:
configurations.append(TestConfiguration(version=self._version, architecture=architecture, build_type=build_type))
return configurations
def default_baseline_search_path(self):
if self.get_option('webkit_test_runner'):
fallback_names = [self._wk2_port_name()] + [self.port_name] + ['wk2']
else:
fallback_names = [self.port_name + '-wk1'] + [self.port_name]
return map(self._webkit_baseline_path, fallback_names)
def _set_device_class(self, device_class):
self._device_class = device_class if device_class else self.DEFAULT_DEVICE_CLASS
def _create_simulators(self):
if (self.default_child_processes() < self.child_processes()):
_log.warn('You have specified very high value({0}) for --child-processes'.format(self.child_processes()))
_log.warn('maximum child-processes which can be supported on this system are: {0}'.format(self.default_child_processes()))
_log.warn('This is very likely to fail.')
if self._using_dedicated_simulators():
self._createSimulatorApps()
for i in xrange(self.child_processes()):
self._create_device(i)
for i in xrange(self.child_processes()):
device_udid = self._testing_device(i).udid
Simulator.wait_until_device_is_in_state(device_udid, Simulator.DeviceState.SHUTDOWN)
Simulator.reset_device(device_udid)
else:
assert(self._current_device)
if self._current_device.name != self.simulator_device_type().name:
_log.warn("Expected simulator of type '" + self.simulator_device_type().name + "' but found simulator of type '" + self._current_device.name + "'")
_log.warn('The next block of tests may fail due to device mis-match')
def setup_test_run(self, device_class=None):
mac_os_version = self.host.platform.os_version
self._set_device_class(device_class)
_log.debug('')
_log.debug('setup_test_run for %s', self._device_class)
self._create_simulators()
if not self._using_dedicated_simulators():
return
for i in xrange(self.child_processes()):
device_udid = self._testing_device(i).udid
_log.debug('testing device %s has udid %s', i, device_udid)
# FIXME: <rdar://problem/20916140> Switch to using CoreSimulator.framework for launching and quitting iOS Simulator
self._executive.run_command([
'open', '-g', '-b', self.SIMULATOR_BUNDLE_ID + str(i),
'--args', '-CurrentDeviceUDID', device_udid])
if mac_os_version in ['elcapitan', 'yosemite', 'mavericks']:
time.sleep(2.5)
_log.info('Waiting for all iOS Simulators to finish booting.')
for i in xrange(self.child_processes()):
Simulator.wait_until_device_is_booted(self._testing_device(i).udid)
def _quit_ios_simulator(self):
if not self._using_dedicated_simulators():
return
_log.debug("_quit_ios_simulator killing all Simulator processes")
# FIXME: We should kill only the Simulators we started.
subprocess.call(["killall", "-9", "-m", "Simulator"])
def clean_up_test_run(self):
super(IOSSimulatorPort, self).clean_up_test_run()
_log.debug("clean_up_test_run")
self._quit_ios_simulator()
fifos = [path for path in os.listdir('/tmp') if re.search('org.webkit.(DumpRenderTree|WebKitTestRunner).*_(IN|OUT|ERROR)', path)]
for fifo in fifos:
try:
os.remove(os.path.join('/tmp', fifo))
except OSError:
_log.warning('Unable to remove ' + fifo)
pass
if not self._using_dedicated_simulators():
return
for i in xrange(self.child_processes()):
simulator_path = self.get_simulator_path(i)
device_udid = self._testing_device(i).udid
self._remove_device(i)
if not os.path.exists(simulator_path):
continue
try:
self._executive.run_command([self.LSREGISTER_PATH, "-u", simulator_path])
_log.debug('rmtree %s', simulator_path)
self._filesystem.rmtree(simulator_path)
logs_path = self._filesystem.join(self._filesystem.expanduser("~"), "Library/Logs/CoreSimulator/", device_udid)
_log.debug('rmtree %s', logs_path)
self._filesystem.rmtree(logs_path)
saved_state_path = self._filesystem.join(self._filesystem.expanduser("~"), "Library/Saved Application State/", self.SIMULATOR_BUNDLE_ID + str(i) + ".savedState")
_log.debug('rmtree %s', saved_state_path)
self._filesystem.rmtree(saved_state_path)
except:
_log.warning('Unable to remove Simulator' + str(i))
def setup_environ_for_server(self, server_name=None):
_log.debug("setup_environ_for_server")
env = super(IOSSimulatorPort, self).setup_environ_for_server(server_name)
if server_name == self.driver_name():
if self.get_option('leaks'):
env['MallocStackLogging'] = '1'
env['__XPC_MallocStackLogging'] = '1'
env['MallocScribble'] = '1'
env['__XPC_MallocScribble'] = '1'
if self.get_option('guard_malloc'):
self._append_value_colon_separated(env, 'DYLD_INSERT_LIBRARIES', '/usr/lib/libgmalloc.dylib')
self._append_value_colon_separated(env, '__XPC_DYLD_INSERT_LIBRARIES', '/usr/lib/libgmalloc.dylib')
env['XML_CATALOG_FILES'] = '' # work around missing /etc/catalog <rdar://problem/4292995>
return env
def operating_system(self):
return 'ios-simulator'
def check_sys_deps(self, needs_http):
if not self.simulator_runtime.available:
_log.error('The iOS Simulator runtime with identifier "{0}" cannot be used because it is unavailable.'.format(self.simulator_runtime.identifier))
return False
return super(IOSSimulatorPort, self).check_sys_deps(needs_http)
SUBPROCESS_CRASH_REGEX = re.compile('#CRASHED - (?P<subprocess_name>\S+) \(pid (?P<subprocess_pid>\d+)\)')
def _using_dedicated_simulators(self):
return self.get_option('dedicated_simulators')
def _create_device(self, number):
return Simulator.create_device(number, self.simulator_device_type(), self.simulator_runtime)
def _remove_device(self, number):
Simulator.remove_device(number)
def _testing_device(self, number):
return Simulator.device_number(number)
# FIXME: This is only exposed so that SimulatorProcess can use it.
def device_id_for_worker_number(self, number):
if self._printing_cmd_line:
return '<dummy id>'
if self._using_dedicated_simulators():
return self._testing_device(number).udid
return self._current_device.udid
def get_simulator_path(self, suffix=""):
return os.path.join(self.SIMULATOR_DIRECTORY, "Simulator" + str(suffix) + ".app")
def diff_image(self, expected_contents, actual_contents, tolerance=None):
if not actual_contents and not expected_contents:
return (None, 0, None)
if not actual_contents or not expected_contents:
return (True, 0, None)
if not self._image_differ:
self._image_differ = image_diff.IOSSimulatorImageDiffer(self)
self.set_option_default('tolerance', 0.1)
if tolerance is None:
tolerance = self.get_option('tolerance')
return self._image_differ.diff_image(expected_contents, actual_contents, tolerance)
def reset_preferences(self):
_log.debug("reset_preferences")
self._quit_ios_simulator()
# Maybe this should delete all devices that we've created?
def nm_command(self):
return self.xcrun_find('nm')
@property
@memoized
def developer_dir(self):
return self._executive.run_command(['xcode-select', '--print-path']).rstrip()
def logging_patterns_to_strip(self):
return []
def stderr_patterns_to_strip(self):
return []
def _createSimulatorApps(self):
for i in xrange(self.child_processes()):
self._createSimulatorApp(i)
def _createSimulatorApp(self, suffix):
destination = self.get_simulator_path(suffix)
_log.info("Creating app:" + destination)
if os.path.exists(destination):
shutil.rmtree(destination, ignore_errors=True)
simulator_app_path = self.developer_dir + "/Applications/Simulator.app"
shutil.copytree(simulator_app_path, destination)
# Update app's package-name inside plist and re-code-sign it
plist_path = destination + "/Contents/Info.plist"
command = "Set CFBundleIdentifier com.apple.iphonesimulator" + str(suffix)
subprocess.check_output(["/usr/libexec/PlistBuddy", "-c", command, plist_path])
subprocess.check_output(["install_name_tool", "-add_rpath", self.developer_dir + "/Library/PrivateFrameworks/", destination + "/Contents/MacOS/Simulator"])
subprocess.check_output(["install_name_tool", "-add_rpath", self.developer_dir + "/../Frameworks/", destination + "/Contents/MacOS/Simulator"])
subprocess.check_output(["codesign", "-fs", "-", destination])
subprocess.check_output([self.LSREGISTER_PATH, "-f", destination])
| Debian/openjfx | modules/web/src/main/native/Tools/Scripts/webkitpy/port/ios.py | Python | gpl-2.0 | 19,676 |
from pyx import *
text.preamble(r"\parindent0pt")
c = canvas.canvas()
t = c.text(0, 0, r"spam \& eggs", [trafo.scale(6), text.parbox(1.2, baseline=text.parbox.top)])
t2 = text.text(0, 0, "eggs", [trafo.scale(6)])
b, b2 = t.bbox(), t2.bbox()
c.stroke(t.path(), [style.linewidth.THin])
c.stroke(path.line(-0.3, b.top(), -0.1, b.top()), [deco.earrow.Small])
c.text(-0.5, b.top(), "valign.top", [text.vshift.mathaxis, text.halign.right])
c.stroke(path.line(-0.3, 0.5*(b.top()+b.bottom()), -0.1, 0.5*(b.top()+b.bottom())), [deco.earrow.Small])
c.text(-0.5, 0.5*(b.top()+b.bottom()), "valign.middle", [text.vshift.mathaxis, text.halign.right])
c.stroke(path.line(-0.3, b.bottom(), -0.1, b.bottom()), [deco.earrow.Small])
c.text(-0.5, b.bottom(), "valign.bottom", [text.vshift.mathaxis, text.halign.right])
c.stroke(path.line(0, 0, 7.2, 0))
c.stroke(path.line(7.3, 0, 7.5, 0), [deco.barrow.Small])
c.text(7.7, 0, "parbox.top", [text.vshift.mathaxis])
c.stroke(path.line(7.3, 0.5*(b.bottom()-b2.bottom()), 7.5, 0.5*(b.bottom()-b2.bottom())), [deco.barrow.Small])
c.text(7.7, 0.5*(b.bottom()-b2.bottom()), "parbox.middle", [text.vshift.mathaxis])
c.stroke(path.line(0, b.bottom()-b2.bottom(), 7.2, b.bottom()-b2.bottom()))
c.stroke(path.line(7.3, b.bottom()-b2.bottom(), 7.5, b.bottom()-b2.bottom()), [deco.barrow.Small])
c.text(7.7, b.bottom()-b2.bottom(), "parbox.bottom", [text.vshift.mathaxis])
c.writePDFfile()
| mjg/PyX-svn | manual/textvalign.py | Python | gpl-2.0 | 1,409 |
# common.py - common code for the convert extension
#
# Copyright 2005-2009 Matt Mackall <[email protected]> and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import base64, errno, subprocess, os, datetime, re
import cPickle as pickle
from mercurial import util
from mercurial.i18n import _
propertycache = util.propertycache
def encodeargs(args):
def encodearg(s):
lines = base64.encodestring(s)
lines = [l.splitlines()[0] for l in lines]
return ''.join(lines)
s = pickle.dumps(args)
return encodearg(s)
def decodeargs(s):
s = base64.decodestring(s)
return pickle.loads(s)
class MissingTool(Exception):
pass
def checktool(exe, name=None, abort=True):
name = name or exe
if not util.findexe(exe):
if abort:
exc = util.Abort
else:
exc = MissingTool
raise exc(_('cannot find required "%s" tool') % name)
class NoRepo(Exception):
pass
SKIPREV = 'SKIP'
class commit(object):
def __init__(self, author, date, desc, parents, branch=None, rev=None,
extra={}, sortkey=None):
self.author = author or 'unknown'
self.date = date or '0 0'
self.desc = desc
self.parents = parents
self.branch = branch
self.rev = rev
self.extra = extra
self.sortkey = sortkey
class converter_source(object):
"""Conversion source interface"""
def __init__(self, ui, path=None, rev=None):
"""Initialize conversion source (or raise NoRepo("message")
exception if path is not a valid repository)"""
self.ui = ui
self.path = path
self.rev = rev
self.encoding = 'utf-8'
def checkhexformat(self, revstr, mapname='splicemap'):
""" fails if revstr is not a 40 byte hex. mercurial and git both uses
such format for their revision numbering
"""
if not re.match(r'[0-9a-fA-F]{40,40}$', revstr):
raise util.Abort(_('%s entry %s is not a valid revision'
' identifier') % (mapname, revstr))
def before(self):
pass
def after(self):
pass
def setrevmap(self, revmap):
"""set the map of already-converted revisions"""
pass
def getheads(self):
"""Return a list of this repository's heads"""
raise NotImplementedError
def getfile(self, name, rev):
"""Return a pair (data, mode) where data is the file content
as a string and mode one of '', 'x' or 'l'. rev is the
identifier returned by a previous call to getchanges().
Data is None if file is missing/deleted in rev.
"""
raise NotImplementedError
def getchanges(self, version, full):
"""Returns a tuple of (files, copies, cleanp2).
files is a sorted list of (filename, id) tuples for all files
changed between version and its first parent returned by
getcommit(). If full, all files in that revision is returned.
id is the source revision id of the file.
copies is a dictionary of dest: source
cleanp2 is the set of files filenames that are clean against p2.
(Files that are clean against p1 are already not in files (unless
full). This makes it possible to handle p2 clean files similarly.)
"""
raise NotImplementedError
def getcommit(self, version):
"""Return the commit object for version"""
raise NotImplementedError
def numcommits(self):
"""Return the number of commits in this source.
If unknown, return None.
"""
return None
def gettags(self):
"""Return the tags as a dictionary of name: revision
Tag names must be UTF-8 strings.
"""
raise NotImplementedError
def recode(self, s, encoding=None):
if not encoding:
encoding = self.encoding or 'utf-8'
if isinstance(s, unicode):
return s.encode("utf-8")
try:
return s.decode(encoding).encode("utf-8")
except UnicodeError:
try:
return s.decode("latin-1").encode("utf-8")
except UnicodeError:
return s.decode(encoding, "replace").encode("utf-8")
def getchangedfiles(self, rev, i):
"""Return the files changed by rev compared to parent[i].
i is an index selecting one of the parents of rev. The return
value should be the list of files that are different in rev and
this parent.
If rev has no parents, i is None.
This function is only needed to support --filemap
"""
raise NotImplementedError
def converted(self, rev, sinkrev):
'''Notify the source that a revision has been converted.'''
pass
def hasnativeorder(self):
"""Return true if this source has a meaningful, native revision
order. For instance, Mercurial revisions are store sequentially
while there is no such global ordering with Darcs.
"""
return False
def hasnativeclose(self):
"""Return true if this source has ability to close branch.
"""
return False
def lookuprev(self, rev):
"""If rev is a meaningful revision reference in source, return
the referenced identifier in the same format used by getcommit().
return None otherwise.
"""
return None
def getbookmarks(self):
"""Return the bookmarks as a dictionary of name: revision
Bookmark names are to be UTF-8 strings.
"""
return {}
def checkrevformat(self, revstr, mapname='splicemap'):
"""revstr is a string that describes a revision in the given
source control system. Return true if revstr has correct
format.
"""
return True
class converter_sink(object):
"""Conversion sink (target) interface"""
def __init__(self, ui, path):
"""Initialize conversion sink (or raise NoRepo("message")
exception if path is not a valid repository)
created is a list of paths to remove if a fatal error occurs
later"""
self.ui = ui
self.path = path
self.created = []
def revmapfile(self):
"""Path to a file that will contain lines
source_rev_id sink_rev_id
mapping equivalent revision identifiers for each system."""
raise NotImplementedError
def authorfile(self):
"""Path to a file that will contain lines
srcauthor=dstauthor
mapping equivalent authors identifiers for each system."""
return None
def putcommit(self, files, copies, parents, commit, source, revmap, full,
cleanp2):
"""Create a revision with all changed files listed in 'files'
and having listed parents. 'commit' is a commit object
containing at a minimum the author, date, and message for this
changeset. 'files' is a list of (path, version) tuples,
'copies' is a dictionary mapping destinations to sources,
'source' is the source repository, and 'revmap' is a mapfile
of source revisions to converted revisions. Only getfile() and
lookuprev() should be called on 'source'. 'full' means that 'files'
is complete and all other files should be removed.
'cleanp2' is a set of the filenames that are unchanged from p2
(only in the common merge case where there two parents).
Note that the sink repository is not told to update itself to
a particular revision (or even what that revision would be)
before it receives the file data.
"""
raise NotImplementedError
def puttags(self, tags):
"""Put tags into sink.
tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
Return a pair (tag_revision, tag_parent_revision), or (None, None)
if nothing was changed.
"""
raise NotImplementedError
def setbranch(self, branch, pbranches):
"""Set the current branch name. Called before the first putcommit
on the branch.
branch: branch name for subsequent commits
pbranches: (converted parent revision, parent branch) tuples"""
pass
def setfilemapmode(self, active):
"""Tell the destination that we're using a filemap
Some converter_sources (svn in particular) can claim that a file
was changed in a revision, even if there was no change. This method
tells the destination that we're using a filemap and that it should
filter empty revisions.
"""
pass
def before(self):
pass
def after(self):
pass
def putbookmarks(self, bookmarks):
"""Put bookmarks into sink.
bookmarks: {bookmarkname: sink_rev_id, ...}
where bookmarkname is an UTF-8 string.
"""
pass
def hascommitfrommap(self, rev):
"""Return False if a rev mentioned in a filemap is known to not be
present."""
raise NotImplementedError
def hascommitforsplicemap(self, rev):
"""This method is for the special needs for splicemap handling and not
for general use. Returns True if the sink contains rev, aborts on some
special cases."""
raise NotImplementedError
class commandline(object):
def __init__(self, ui, command):
self.ui = ui
self.command = command
def prerun(self):
pass
def postrun(self):
pass
def _cmdline(self, cmd, *args, **kwargs):
cmdline = [self.command, cmd] + list(args)
for k, v in kwargs.iteritems():
if len(k) == 1:
cmdline.append('-' + k)
else:
cmdline.append('--' + k.replace('_', '-'))
try:
if len(k) == 1:
cmdline.append('' + v)
else:
cmdline[-1] += '=' + v
except TypeError:
pass
cmdline = [util.shellquote(arg) for arg in cmdline]
if not self.ui.debugflag:
cmdline += ['2>', os.devnull]
cmdline = ' '.join(cmdline)
return cmdline
def _run(self, cmd, *args, **kwargs):
def popen(cmdline):
p = subprocess.Popen(cmdline, shell=True, bufsize=-1,
close_fds=util.closefds,
stdout=subprocess.PIPE)
return p
return self._dorun(popen, cmd, *args, **kwargs)
def _run2(self, cmd, *args, **kwargs):
return self._dorun(util.popen2, cmd, *args, **kwargs)
def _dorun(self, openfunc, cmd, *args, **kwargs):
cmdline = self._cmdline(cmd, *args, **kwargs)
self.ui.debug('running: %s\n' % (cmdline,))
self.prerun()
try:
return openfunc(cmdline)
finally:
self.postrun()
def run(self, cmd, *args, **kwargs):
p = self._run(cmd, *args, **kwargs)
output = p.communicate()[0]
self.ui.debug(output)
return output, p.returncode
def runlines(self, cmd, *args, **kwargs):
p = self._run(cmd, *args, **kwargs)
output = p.stdout.readlines()
p.wait()
self.ui.debug(''.join(output))
return output, p.returncode
def checkexit(self, status, output=''):
if status:
if output:
self.ui.warn(_('%s error:\n') % self.command)
self.ui.warn(output)
msg = util.explainexit(status)[0]
raise util.Abort('%s %s' % (self.command, msg))
def run0(self, cmd, *args, **kwargs):
output, status = self.run(cmd, *args, **kwargs)
self.checkexit(status, output)
return output
def runlines0(self, cmd, *args, **kwargs):
output, status = self.runlines(cmd, *args, **kwargs)
self.checkexit(status, ''.join(output))
return output
@propertycache
def argmax(self):
# POSIX requires at least 4096 bytes for ARG_MAX
argmax = 4096
try:
argmax = os.sysconf("SC_ARG_MAX")
except (AttributeError, ValueError):
pass
# Windows shells impose their own limits on command line length,
# down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
# for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
# details about cmd.exe limitations.
# Since ARG_MAX is for command line _and_ environment, lower our limit
# (and make happy Windows shells while doing this).
return argmax // 2 - 1
def _limit_arglist(self, arglist, cmd, *args, **kwargs):
cmdlen = len(self._cmdline(cmd, *args, **kwargs))
limit = self.argmax - cmdlen
bytes = 0
fl = []
for fn in arglist:
b = len(fn) + 3
if bytes + b < limit or len(fl) == 0:
fl.append(fn)
bytes += b
else:
yield fl
fl = [fn]
bytes = b
if fl:
yield fl
def xargs(self, arglist, cmd, *args, **kwargs):
for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
self.run0(cmd, *(list(args) + l), **kwargs)
class mapfile(dict):
def __init__(self, ui, path):
super(mapfile, self).__init__()
self.ui = ui
self.path = path
self.fp = None
self.order = []
self._read()
def _read(self):
if not self.path:
return
try:
fp = open(self.path, 'r')
except IOError, err:
if err.errno != errno.ENOENT:
raise
return
for i, line in enumerate(fp):
line = line.splitlines()[0].rstrip()
if not line:
# Ignore blank lines
continue
try:
key, value = line.rsplit(' ', 1)
except ValueError:
raise util.Abort(
_('syntax error in %s(%d): key/value pair expected')
% (self.path, i + 1))
if key not in self:
self.order.append(key)
super(mapfile, self).__setitem__(key, value)
fp.close()
def __setitem__(self, key, value):
if self.fp is None:
try:
self.fp = open(self.path, 'a')
except IOError, err:
raise util.Abort(_('could not open map file %r: %s') %
(self.path, err.strerror))
self.fp.write('%s %s\n' % (key, value))
self.fp.flush()
super(mapfile, self).__setitem__(key, value)
def close(self):
if self.fp:
self.fp.close()
self.fp = None
def makedatetimestamp(t):
"""Like util.makedate() but for time t instead of current time"""
delta = (datetime.datetime.utcfromtimestamp(t) -
datetime.datetime.fromtimestamp(t))
tz = delta.days * 86400 + delta.seconds
return t, tz
| hekra01/mercurial | hgext/convert/common.py | Python | gpl-2.0 | 15,243 |
# -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo SIPStore API."""
from __future__ import absolute_import
import json
from flask import has_request_context, request
from flask_login import current_user
from invenio_db import db
from invenio_sipstore.models import SIP, RecordSIP, SIPFile
class ZenodoSIP(object):
"""API for creating Zenodo-specific SIPs."""
@staticmethod
def _build_agent_info():
"""Build the SIP agent info.
:returns: Agent information regarding the SIP.
:rtype: dict
"""
agent = dict()
if has_request_context() and request.remote_addr:
agent['ip_address'] = request.remote_addr
if current_user.is_authenticated and current_user.email:
agent['email'] = current_user.email
return agent
@classmethod
def create(cls, pid, record, create_sip_files=True, user_id=None,
agent=None):
"""Create a Zenodo SIP, from the PID and the Record.
Apart from the SIP itself, it also creates ``RecordSIP`` for the
SIP-PID-Record relationship, as well as ``SIPFile`` objects for each
the files in the record.
Those objects are not returned by this function but can be fetched by
the corresponding SIP relationships 'record_sips' and 'sip_files'.
:param pid: PID of the published record ('recid').
:type pid: `invenio_pidstore.models.PersistentIdentifier`
:param record: Record for which the SIP should be created.
:type record: `invenio_records.api.Record`
:param create_sip_files: If True the SIPFiles will be created.
:type create_sip_files: bool
:returns: A Zenodo-specifi SIP object.
:rtype: ``invenio_sipstore.models.SIP``
"""
if not user_id:
user_id = (None if current_user.is_anonymous
else current_user.get_id())
if not agent:
agent = cls._build_agent_info()
with db.session.begin_nested():
sip = SIP.create('json', json.dumps(record.dumps()),
user_id=user_id, agent=agent)
recsip = RecordSIP(sip_id=sip.id, pid_id=pid.id)
db.session.add(recsip)
if record.files and create_sip_files:
for f in record.files:
sf = SIPFile(sip_id=sip.id, filepath=f.key,
file_id=f.file_id)
db.session.add(sf)
return sip
| lnielsen/zenodo | zenodo/modules/sipstore/api.py | Python | gpl-2.0 | 3,440 |
Subsets and Splits