code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
# richard -- video index system
# Copyright (C) 2012, 2013, 2014, 2015 richard contributors. See AUTHORS.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import patterns, url
urlpatterns = patterns(
'richard.playlists.views',
# playlists
url(r'^playlist/$',
'playlist_list', name='playlists-playlist-list'),
url(r'playlist/delete/?$',
'playlist_delete', name='playlists-playlist-delete'),
url(r'playlist/remove-video/?$',
'playlist_remove_video', name='playlists-playlist-remove-video'),
url(r'playlist/(?P<playlist_id>[0-9]+)/?$',
'playlist', name='playlists-playlist'),
)
|
[
"django.conf.urls.url"
] |
[((886, 953), 'django.conf.urls.url', 'url', (['"""^playlist/$"""', '"""playlist_list"""'], {'name': '"""playlists-playlist-list"""'}), "('^playlist/$', 'playlist_list', name='playlists-playlist-list')\n", (889, 953), False, 'from django.conf.urls import patterns, url\n'), ((968, 1046), 'django.conf.urls.url', 'url', (['"""playlist/delete/?$"""', '"""playlist_delete"""'], {'name': '"""playlists-playlist-delete"""'}), "('playlist/delete/?$', 'playlist_delete', name='playlists-playlist-delete')\n", (971, 1046), False, 'from django.conf.urls import patterns, url\n'), ((1061, 1162), 'django.conf.urls.url', 'url', (['"""playlist/remove-video/?$"""', '"""playlist_remove_video"""'], {'name': '"""playlists-playlist-remove-video"""'}), "('playlist/remove-video/?$', 'playlist_remove_video', name=\n 'playlists-playlist-remove-video')\n", (1064, 1162), False, 'from django.conf.urls import patterns, url\n'), ((1172, 1258), 'django.conf.urls.url', 'url', (['"""playlist/(?P<playlist_id>[0-9]+)/?$"""', '"""playlist"""'], {'name': '"""playlists-playlist"""'}), "('playlist/(?P<playlist_id>[0-9]+)/?$', 'playlist', name=\n 'playlists-playlist')\n", (1175, 1258), False, 'from django.conf.urls import patterns, url\n')]
|
import os
import sys
# train test1 test2 test3
def readtst(tstfn):
outlist = list()
with open(tstfn) as br:
for aline in br.readlines():
aline = aline.strip()
outlist.append(aline)
return outlist
def split_train_tests_xml(xmlpath, test1fn, test2fn, test3fn):
test1list = readtst(test1fn)
test2list = readtst(test2fn)
test3list = readtst(test3fn)
outtrainlist = list() # full path ".xml.simp" files
outt1list = list() # test 1, full path ".xml.simp" files
outt2list = list()
outt3list = list()
for afile in os.listdir(xmlpath):
if not afile.endswith('.xml.simp'):
continue
afile2 = xmlpath + '/' + afile
aid = afile.split('.')[0]
if aid in test1list:
outt1list.append(afile2)
elif aid in test2list:
outt2list.append(afile2)
elif aid in test3list:
outt3list.append(afile2)
else:
outtrainlist.append(afile2)
return outtrainlist, outt1list, outt2list, outt3list
def all_wavs(wavpath):
wavlist = list()
for afile in os.listdir(wavpath):
if not afile.endswith('.wav'):
continue
afile2 = wavpath + '/' + afile
wavlist.append(afile2)
return wavlist
def gen_text(xmllist, outpath):
# id \t text
# e.g., /workspace/asr/wenet/examples/csj/s0/data/xml/S11M1689.xml.simp
# ID = S11M1689_stime_etime
outtxtfn = os.path.join(outpath, 'text')
with open(outtxtfn, 'w') as bw:
for xmlfn in xmllist:
aid = xmlfn.split('/')[-1]
aid2 = aid.split('.')[0]
with open(xmlfn) as br:
for aline in br.readlines():
aline = aline.strip()
# stime \t etime \t text1 \t text2 \t text3 \t text4 \t text5
cols = aline.split('\t')
# TODO different between "< 7" and "< 4"? strange
# -> use "< 4", DO NOT use "< 7" !
if len(cols) < 4:
continue
stime = cols[0]
etime = cols[1]
atxt = cols[3].replace(' ', '')
afullid = '{}_{}_{}'.format(aid2, stime, etime)
aoutline = '{}\t{}\n'.format(afullid, atxt)
bw.write(aoutline)
def parse_xml_set(xmllist):
outset = set()
for xml in xmllist:
aid = xml.split('/')[-1]
aid2 = aid.split('.')[0]
outset.add(aid2)
return outset
def gen_wav_scp(xmllist, wavlist, outpath):
# xmlset = pure id set, alike 'S04F1228'
# can be from train, test1, test2, or test3
xmlset = parse_xml_set(xmllist)
outwavscpfn = os.path.join(outpath, 'wav.scp')
with open(outwavscpfn, 'w') as bw:
for wav in wavlist:
# wav is alike "/workspace/asr/wenet/examples/csj/s0/data
# /wav/S04F1228.wav_00458.875_00459.209.wav"
aid = wav.split('/')[-1]
cols = aid.split('_')
aid2 = cols[0].split('.')[0]
if aid2 not in xmlset:
continue
stime = cols[1]
etime = cols[2].replace('.wav', '')
afullid = '{}_{}_{}'.format(aid2, stime, etime)
wavabspath = os.path.abspath(wav)
aoutline = '{}\t{}\n'.format(afullid, wavabspath)
bw.write(aoutline)
def prep_text_wavscp(
xmlpath, wavpath, test1fn, test2fn, test3fn,
outtrainpath, out1path, out2path, out3path):
trainlist, t1list, t2list, t3list = split_train_tests_xml(
xmlpath,
test1fn,
test2fn,
test3fn)
wavlist = all_wavs(wavpath)
gen_text(trainlist, outtrainpath)
gen_text(t1list, out1path)
gen_text(t2list, out2path)
gen_text(t3list, out3path)
gen_wav_scp(trainlist, wavlist, outtrainpath)
gen_wav_scp(t1list, wavlist, out1path)
gen_wav_scp(t2list, wavlist, out2path)
gen_wav_scp(t3list, wavlist, out3path)
if __name__ == '__main__':
if len(sys.argv) < 10:
print(
"Usage: {}".format(sys.argv[0]) + "<xmlpath> " +
"<wavpath> <test1fn> <test2fn> <test3fn> " +
"<outtrainpath> <out1path> <out2path> <out3path>")
exit(1)
xmlpath = sys.argv[1]
wavpath = sys.argv[2]
test1fn = sys.argv[3]
test2fn = sys.argv[4]
test3fn = sys.argv[5]
outtrainpath = sys.argv[6]
out1path = sys.argv[7]
out2path = sys.argv[8]
out3path = sys.argv[9]
prep_text_wavscp(xmlpath, wavpath, test1fn,
test2fn, test3fn, outtrainpath,
out1path, out2path, out3path)
|
[
"os.path.abspath",
"os.path.join",
"os.listdir"
] |
[((589, 608), 'os.listdir', 'os.listdir', (['xmlpath'], {}), '(xmlpath)\n', (599, 608), False, 'import os\n'), ((1124, 1143), 'os.listdir', 'os.listdir', (['wavpath'], {}), '(wavpath)\n', (1134, 1143), False, 'import os\n'), ((1467, 1496), 'os.path.join', 'os.path.join', (['outpath', '"""text"""'], {}), "(outpath, 'text')\n", (1479, 1496), False, 'import os\n'), ((2757, 2789), 'os.path.join', 'os.path.join', (['outpath', '"""wav.scp"""'], {}), "(outpath, 'wav.scp')\n", (2769, 2789), False, 'import os\n'), ((3321, 3341), 'os.path.abspath', 'os.path.abspath', (['wav'], {}), '(wav)\n', (3336, 3341), False, 'import os\n')]
|
# Generated with GeneratorTorqueFault
#
from enum import Enum
from enum import auto
class GeneratorTorqueFault(Enum):
""""""
NONE = auto()
LOSS = auto()
BACKUP = auto()
def label(self):
if self == GeneratorTorqueFault.NONE:
return "No generator torque fault"
if self == GeneratorTorqueFault.LOSS:
return "Total loss of generator torque"
if self == GeneratorTorqueFault.BACKUP:
return "Backup power - torque follows scaled torque control"
|
[
"enum.auto"
] |
[((142, 148), 'enum.auto', 'auto', ([], {}), '()\n', (146, 148), False, 'from enum import auto\n'), ((160, 166), 'enum.auto', 'auto', ([], {}), '()\n', (164, 166), False, 'from enum import auto\n'), ((180, 186), 'enum.auto', 'auto', ([], {}), '()\n', (184, 186), False, 'from enum import auto\n')]
|
import os
def test_net(dir_net='exp/dnn4_pretrain-dbn_dnn/final.nnet'):
flag = os.system('./local/nnet/test_wer.sh %s >/dev/null 2>&1 '%dir_net)#
assert flag == 0
os.system('bash show_dnn test > res.log')
content = open('res.log').read()
res = float(content.split()[1])
return res
def finetune_net(dir_net = 'exp/dnn4_pretrain-dbn_dnn/nnet.init',
exp_dir='exp/dnn4_pretrain-dbn_dnn', iters=16, lr=0.002,
momentum=0, l2_penalty=0, halve_every_k=2):
flag = os.system('./finetune_dnn.sh --dir %s --nnet-init %s --iters %d --learning-rate %f --momentum %f --l2-penalty %f --halve-every-k %d'%(exp_dir, dir_net, iters, lr, momentum, l2_penalty, halve_every_k))
return flag
|
[
"os.system"
] |
[((85, 152), 'os.system', 'os.system', (["('./local/nnet/test_wer.sh %s >/dev/null 2>&1 ' % dir_net)"], {}), "('./local/nnet/test_wer.sh %s >/dev/null 2>&1 ' % dir_net)\n", (94, 152), False, 'import os\n'), ((177, 218), 'os.system', 'os.system', (['"""bash show_dnn test > res.log"""'], {}), "('bash show_dnn test > res.log')\n", (186, 218), False, 'import os\n'), ((504, 716), 'os.system', 'os.system', (["('./finetune_dnn.sh --dir %s --nnet-init %s --iters %d --learning-rate %f --momentum %f --l2-penalty %f --halve-every-k %d'\n % (exp_dir, dir_net, iters, lr, momentum, l2_penalty, halve_every_k))"], {}), "(\n './finetune_dnn.sh --dir %s --nnet-init %s --iters %d --learning-rate %f --momentum %f --l2-penalty %f --halve-every-k %d'\n % (exp_dir, dir_net, iters, lr, momentum, l2_penalty, halve_every_k))\n", (513, 716), False, 'import os\n')]
|
from heuslertools.xrd import O2TSimulation
from heuslertools.xrd.materials import NiMnSb, InP
import xrayutilities as xu
import numpy as np
import matplotlib.pyplot as plt
##### LAYERSTACK #####
sub = xu.simpack.Layer(InP, np.inf)
lay1 = xu.simpack.Layer(NiMnSb, 400, relaxation=0.0)
layerstack = xu.simpack.PseudomorphicStack001('NiMnSb on InP', sub, lay1)
print(layerstack)
xrd = O2TSimulation(layerstack)
om, int = xrd.simulate_o2t(0, 0, 2, 0.4)
plt.figure()
plt.semilogy(om, int)
plt.show()
|
[
"matplotlib.pyplot.show",
"xrayutilities.simpack.Layer",
"heuslertools.xrd.O2TSimulation",
"xrayutilities.simpack.PseudomorphicStack001",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.semilogy"
] |
[((202, 231), 'xrayutilities.simpack.Layer', 'xu.simpack.Layer', (['InP', 'np.inf'], {}), '(InP, np.inf)\n', (218, 231), True, 'import xrayutilities as xu\n'), ((239, 284), 'xrayutilities.simpack.Layer', 'xu.simpack.Layer', (['NiMnSb', '(400)'], {'relaxation': '(0.0)'}), '(NiMnSb, 400, relaxation=0.0)\n', (255, 284), True, 'import xrayutilities as xu\n'), ((298, 358), 'xrayutilities.simpack.PseudomorphicStack001', 'xu.simpack.PseudomorphicStack001', (['"""NiMnSb on InP"""', 'sub', 'lay1'], {}), "('NiMnSb on InP', sub, lay1)\n", (330, 358), True, 'import xrayutilities as xu\n'), ((383, 408), 'heuslertools.xrd.O2TSimulation', 'O2TSimulation', (['layerstack'], {}), '(layerstack)\n', (396, 408), False, 'from heuslertools.xrd import O2TSimulation\n'), ((453, 465), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (463, 465), True, 'import matplotlib.pyplot as plt\n'), ((466, 487), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (['om', 'int'], {}), '(om, int)\n', (478, 487), True, 'import matplotlib.pyplot as plt\n'), ((488, 498), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (496, 498), True, 'import matplotlib.pyplot as plt\n')]
|
from dungeonfeature import new_stairs_up
import dungeonfeature
import terrain
import tile
from dungeonlevel import DungeonLevel
def get_empty_tile_matrix(width, height):
return [[tile.Tile()
for x in range(width)]
for y in range(height)]
def unknown_level_map(width, height, depth):
tile_matrix = get_empty_tile_matrix(width, height)
dungeon_level = DungeonLevel(tile_matrix, depth)
for x in range(width):
for y in range(height):
tile_matrix[y][x] = tile.unknown_tile
return dungeon_level
def dungeon_level_from_lines(lines):
terrain_matrix = terrain_matrix_from_lines(lines)
dungeon_level = DungeonLevel(terrain_matrix, 1)
set_terrain_from_lines(dungeon_level, lines)
return dungeon_level
def dungeon_level_from_file(file_name):
lines = read_file(file_name)
return dungeon_level_from_lines(lines)
def terrain_matrix_from_lines(lines):
width = len(lines[0])
height = len(lines)
terrain_matrix = get_empty_tile_matrix(width, height)
return terrain_matrix
def set_terrain_from_lines(dungeon_level, lines):
for x in range(dungeon_level.width):
for y in range(dungeon_level.height):
features = char_to_terrain_and_features(lines[y][x])
for f in features:
f.mover.replace_move((x, y), dungeon_level)
def char_to_terrain_and_features(c):
if c == '#':
return [terrain.Wall()]
elif c == '+':
return [terrain.Door()]
elif c == '~':
return [terrain.Water()]
elif c == 'g':
return [terrain.GlassWall()]
elif c == '_':
return [terrain.Chasm()]
elif c == '>':
return [terrain.Floor(), new_stairs_up()]
elif c == 'p':
return [terrain.Floor(), dungeonfeature.new_plant()]
else:
return [terrain.Floor()]
def read_file(file_name):
f = open(file_name, "r")
data = f.readlines()
data = [line.strip() for line in data]
f.close()
return data
|
[
"terrain.Water",
"dungeonfeature.new_stairs_up",
"dungeonfeature.new_plant",
"terrain.GlassWall",
"terrain.Chasm",
"terrain.Floor",
"dungeonlevel.DungeonLevel",
"terrain.Door",
"tile.Tile",
"terrain.Wall"
] |
[((391, 423), 'dungeonlevel.DungeonLevel', 'DungeonLevel', (['tile_matrix', 'depth'], {}), '(tile_matrix, depth)\n', (403, 423), False, 'from dungeonlevel import DungeonLevel\n'), ((671, 702), 'dungeonlevel.DungeonLevel', 'DungeonLevel', (['terrain_matrix', '(1)'], {}), '(terrain_matrix, 1)\n', (683, 702), False, 'from dungeonlevel import DungeonLevel\n'), ((185, 196), 'tile.Tile', 'tile.Tile', ([], {}), '()\n', (194, 196), False, 'import tile\n'), ((1456, 1470), 'terrain.Wall', 'terrain.Wall', ([], {}), '()\n', (1468, 1470), False, 'import terrain\n'), ((1507, 1521), 'terrain.Door', 'terrain.Door', ([], {}), '()\n', (1519, 1521), False, 'import terrain\n'), ((1558, 1573), 'terrain.Water', 'terrain.Water', ([], {}), '()\n', (1571, 1573), False, 'import terrain\n'), ((1610, 1629), 'terrain.GlassWall', 'terrain.GlassWall', ([], {}), '()\n', (1627, 1629), False, 'import terrain\n'), ((1666, 1681), 'terrain.Chasm', 'terrain.Chasm', ([], {}), '()\n', (1679, 1681), False, 'import terrain\n'), ((1718, 1733), 'terrain.Floor', 'terrain.Floor', ([], {}), '()\n', (1731, 1733), False, 'import terrain\n'), ((1735, 1750), 'dungeonfeature.new_stairs_up', 'new_stairs_up', ([], {}), '()\n', (1748, 1750), False, 'from dungeonfeature import new_stairs_up\n'), ((1787, 1802), 'terrain.Floor', 'terrain.Floor', ([], {}), '()\n', (1800, 1802), False, 'import terrain\n'), ((1804, 1830), 'dungeonfeature.new_plant', 'dungeonfeature.new_plant', ([], {}), '()\n', (1828, 1830), False, 'import dungeonfeature\n'), ((1858, 1873), 'terrain.Floor', 'terrain.Floor', ([], {}), '()\n', (1871, 1873), False, 'import terrain\n')]
|
import numpy as np
import pandas as pd
from .nlp_utils.classifier import NaiveBayesClassifier
from .nlp_utils.tokenizer import NGramTokenizer
DATASET_PATH = 'spam_filter/data/spam.csv'
def preprocess_data():
dataset = pd.read_csv(DATASET_PATH, encoding='latin-1')
dataset.rename(columns={'v1': 'labels', 'v2': 'message'}, inplace=True)
dataset['label'] = dataset['labels'].map({'ham': 0, 'spam': 1})
dataset.drop(['labels'], axis=1, inplace=True)
train_indices, test_indices = [], []
for i in range(dataset.shape[0]):
if np.random.uniform(0, 1) < 0.75:
train_indices += [i]
else:
test_indices += [i]
train_dataset = dataset.loc[train_indices]
test_dataset = dataset.loc[test_indices]
train_dataset.reset_index(inplace=True)
train_dataset.drop(['index'], axis=1, inplace=True)
test_dataset.reset_index(inplace=True)
test_dataset.drop(['index'], axis=1, inplace=True)
return train_dataset, test_dataset
def metrics(labels, predictions):
true_pos, true_neg, false_pos, false_neg = 0, 0, 0, 0
for i in range(len(labels)):
true_pos += int(labels[i] == 1 and predictions[i] == 1)
true_neg += int(labels[i] == 0 and predictions[i] == 0)
false_pos += int(labels[i] == 0 and predictions[i] == 1)
false_neg += int(labels[i] == 1 and predictions[i] == 0)
precision = true_pos / (true_pos + false_pos)
recall = true_pos / (true_pos + false_neg)
f_score = 2 * precision * recall / (precision + recall)
accuracy = (true_pos + true_neg) / (true_pos + true_neg + false_pos + false_neg)
print("Precision: ", precision)
print("Recall: ", recall)
print("F-score: ", f_score)
print("Accuracy: ", accuracy)
if __name__ == '__main__':
train_dataset, test_dataset = preprocess_data()
classifier = NaiveBayesClassifier()
classifier.train(train_dataset)
prediction_list = classifier.predict(test_dataset['message'])
metrics(test_dataset['label'], prediction_list)
|
[
"pandas.read_csv",
"numpy.random.uniform"
] |
[((226, 271), 'pandas.read_csv', 'pd.read_csv', (['DATASET_PATH'], {'encoding': '"""latin-1"""'}), "(DATASET_PATH, encoding='latin-1')\n", (237, 271), True, 'import pandas as pd\n'), ((558, 581), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (575, 581), True, 'import numpy as np\n')]
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import pypandoc
import os
print("Pandoc", pypandoc.get_pandoc_version())
base = "../../Note/"
for r, ds, fs in os.walk(base):
for f in fs:
if f.endswith(".md"):
src = r+"/"+f
dst = src.replace(".md", ".pdf")
print(src, "->", dst)
print(pypandoc.convert_file(
src, "pdf", outputfile=dst, format="gfm", encoding="utf-8",
extra_args=["-V", "CJKmainfont=Microsoft YaHei", "--pdf-engine=xelatex"]))
|
[
"os.walk",
"pypandoc.convert_file",
"pypandoc.get_pandoc_version"
] |
[((158, 171), 'os.walk', 'os.walk', (['base'], {}), '(base)\n', (165, 171), False, 'import os\n'), ((87, 116), 'pypandoc.get_pandoc_version', 'pypandoc.get_pandoc_version', ([], {}), '()\n', (114, 116), False, 'import pypandoc\n'), ((343, 507), 'pypandoc.convert_file', 'pypandoc.convert_file', (['src', '"""pdf"""'], {'outputfile': 'dst', 'format': '"""gfm"""', 'encoding': '"""utf-8"""', 'extra_args': "['-V', 'CJKmainfont=Microsoft YaHei', '--pdf-engine=xelatex']"}), "(src, 'pdf', outputfile=dst, format='gfm', encoding=\n 'utf-8', extra_args=['-V', 'CJKmainfont=Microsoft YaHei',\n '--pdf-engine=xelatex'])\n", (364, 507), False, 'import pypandoc\n')]
|
from mysql.connector.errors import OperationalError
import pytest
from test.helpers import db_fixture, execute
@pytest.fixture(scope="session")
def warehouse():
return db_fixture("test_warehouse")
@pytest.fixture
def empty_warehouse(warehouse):
cursor = warehouse.cursor()
cursor.execute("SHOW TABLES")
tables = [_[0] for _ in cursor]
cursor.close()
execute(warehouse, "SET foreign_key_checks = 0")
for table in tables:
try:
execute(warehouse, "DROP TABLE {}".format(table))
except OperationalError:
execute(warehouse, "DROP VIEW {}".format(table))
execute(warehouse, "SET foreign_key_checks = 1")
warehouse.commit()
return warehouse
|
[
"test.helpers.db_fixture",
"pytest.fixture",
"test.helpers.execute"
] |
[((115, 146), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (129, 146), False, 'import pytest\n'), ((175, 203), 'test.helpers.db_fixture', 'db_fixture', (['"""test_warehouse"""'], {}), "('test_warehouse')\n", (185, 203), False, 'from test.helpers import db_fixture, execute\n'), ((380, 428), 'test.helpers.execute', 'execute', (['warehouse', '"""SET foreign_key_checks = 0"""'], {}), "(warehouse, 'SET foreign_key_checks = 0')\n", (387, 428), False, 'from test.helpers import db_fixture, execute\n'), ((628, 676), 'test.helpers.execute', 'execute', (['warehouse', '"""SET foreign_key_checks = 1"""'], {}), "(warehouse, 'SET foreign_key_checks = 1')\n", (635, 676), False, 'from test.helpers import db_fixture, execute\n')]
|
from .import views
from django.urls import path
from django.conf import settings
from django.conf.urls.static import static
from . views import *
# Application Views
urlpatterns = [
path('', views.user_login, name='login'),
path('logout/', views.user_logout, name='logout'),
path('signup/', views.user_signup, name='signup'),
path('profile', profile, name='profile'),
path('homepage', homepage, name='homepage'),
path('profile/update/', views.update_profile, name='update_profile'),
path('user/<int:id>/', views.user_profile, name='user_profile'),
path('like/<int:id>/', views.like_image, name='like_image'),
path('comment/add', views.save_comment, name='add_comment'),
path('search/', views.search_images, name='search_images'),
path('upload/add/', views.save_image, name='save.image'),
path('picture/<int:id>/', views.image_comments, name='single_image'),
path('follow/<int:pk>',views.FollowView,name="follow")
]
if settings.DEBUG:
urlpatterns+= static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
|
[
"django.conf.urls.static.static",
"django.urls.path"
] |
[((188, 228), 'django.urls.path', 'path', (['""""""', 'views.user_login'], {'name': '"""login"""'}), "('', views.user_login, name='login')\n", (192, 228), False, 'from django.urls import path\n'), ((234, 283), 'django.urls.path', 'path', (['"""logout/"""', 'views.user_logout'], {'name': '"""logout"""'}), "('logout/', views.user_logout, name='logout')\n", (238, 283), False, 'from django.urls import path\n'), ((289, 338), 'django.urls.path', 'path', (['"""signup/"""', 'views.user_signup'], {'name': '"""signup"""'}), "('signup/', views.user_signup, name='signup')\n", (293, 338), False, 'from django.urls import path\n'), ((344, 384), 'django.urls.path', 'path', (['"""profile"""', 'profile'], {'name': '"""profile"""'}), "('profile', profile, name='profile')\n", (348, 384), False, 'from django.urls import path\n'), ((390, 433), 'django.urls.path', 'path', (['"""homepage"""', 'homepage'], {'name': '"""homepage"""'}), "('homepage', homepage, name='homepage')\n", (394, 433), False, 'from django.urls import path\n'), ((439, 507), 'django.urls.path', 'path', (['"""profile/update/"""', 'views.update_profile'], {'name': '"""update_profile"""'}), "('profile/update/', views.update_profile, name='update_profile')\n", (443, 507), False, 'from django.urls import path\n'), ((513, 576), 'django.urls.path', 'path', (['"""user/<int:id>/"""', 'views.user_profile'], {'name': '"""user_profile"""'}), "('user/<int:id>/', views.user_profile, name='user_profile')\n", (517, 576), False, 'from django.urls import path\n'), ((582, 641), 'django.urls.path', 'path', (['"""like/<int:id>/"""', 'views.like_image'], {'name': '"""like_image"""'}), "('like/<int:id>/', views.like_image, name='like_image')\n", (586, 641), False, 'from django.urls import path\n'), ((647, 706), 'django.urls.path', 'path', (['"""comment/add"""', 'views.save_comment'], {'name': '"""add_comment"""'}), "('comment/add', views.save_comment, name='add_comment')\n", (651, 706), False, 'from django.urls import path\n'), ((712, 770), 'django.urls.path', 'path', (['"""search/"""', 'views.search_images'], {'name': '"""search_images"""'}), "('search/', views.search_images, name='search_images')\n", (716, 770), False, 'from django.urls import path\n'), ((776, 832), 'django.urls.path', 'path', (['"""upload/add/"""', 'views.save_image'], {'name': '"""save.image"""'}), "('upload/add/', views.save_image, name='save.image')\n", (780, 832), False, 'from django.urls import path\n'), ((838, 906), 'django.urls.path', 'path', (['"""picture/<int:id>/"""', 'views.image_comments'], {'name': '"""single_image"""'}), "('picture/<int:id>/', views.image_comments, name='single_image')\n", (842, 906), False, 'from django.urls import path\n'), ((912, 968), 'django.urls.path', 'path', (['"""follow/<int:pk>"""', 'views.FollowView'], {'name': '"""follow"""'}), "('follow/<int:pk>', views.FollowView, name='follow')\n", (916, 968), False, 'from django.urls import path\n'), ((1006, 1067), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (1012, 1067), False, 'from django.conf.urls.static import static\n')]
|
import cv2
import json
from daisykit.utils import get_asset_file
from daisykit import BarcodeScannerFlow
config = {
"try_harder": True,
"try_rotate": True
}
barcode_scanner_flow = BarcodeScannerFlow(json.dumps(config))
# Open video stream from webcam
vid = cv2.VideoCapture(0)
while(True):
# Capture the video frame
ret, frame = vid.read()
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
result = barcode_scanner_flow.Process(frame, draw=True)
frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)
# Display the resulting frame
cv2.imshow('frame', frame)
# The 'q' button is set as the
# quitting button you may use any
# desired button of your choice
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# After the loop release the cap object
vid.release()
# Destroy all the windows
cv2.destroyAllWindows()
|
[
"cv2.cvtColor",
"cv2.waitKey",
"cv2.imshow",
"json.dumps",
"cv2.VideoCapture",
"cv2.destroyAllWindows"
] |
[((268, 287), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (284, 287), False, 'import cv2\n'), ((839, 862), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (860, 862), False, 'import cv2\n'), ((209, 227), 'json.dumps', 'json.dumps', (['config'], {}), '(config)\n', (219, 227), False, 'import json\n'), ((374, 412), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2RGB'], {}), '(frame, cv2.COLOR_BGR2RGB)\n', (386, 412), False, 'import cv2\n'), ((487, 525), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_RGB2BGR'], {}), '(frame, cv2.COLOR_RGB2BGR)\n', (499, 525), False, 'import cv2\n'), ((565, 591), 'cv2.imshow', 'cv2.imshow', (['"""frame"""', 'frame'], {}), "('frame', frame)\n", (575, 591), False, 'import cv2\n'), ((709, 723), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (720, 723), False, 'import cv2\n')]
|
from django import forms
from django.contrib.auth import get_user_model
from django.template.defaultfilters import filesizeformat
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import Group
from os import path
from .models import User
from texaslan.applications.models import Application
class UserSignupForm(forms.ModelForm):
class Meta:
model = User
fields = ['full_name', 'nick_name', 'graduation_date', 'concentration', 'gender']
widgets = {
'full_name': forms.TextInput(attrs={'placeholder': 'Full Name'}),
'nick_name': forms.TextInput(attrs={'placeholder': 'Nick Name'}),
'graduation_date': forms.TextInput(attrs={'placeholder': 'Graduation Date'}),
}
def signup(self, request, user):
user.username = self.cleaned_data['username']
user.full_name = self.cleaned_data['full_name']
user.nick_name = self.cleaned_data['nick_name']
user.graduation_date = self.cleaned_data['graduation_date']
user.save()
open_rush_group = Group.objects.get(name="Open Rushie")
open_rush_group.user_set.add(user)
open_rush_group.save()
(application, created) = Application.objects.get_or_create(applicant_user__pk=user.pk)
application.applicant_user = user
application.save()
class UserUpdateForm(forms.ModelForm):
class Meta:
model = User
fields = ['full_name', 'nick_name', 'graduation_date', 'concentration', 'gender']
|
[
"django.forms.TextInput",
"texaslan.applications.models.Application.objects.get_or_create",
"django.contrib.auth.models.Group.objects.get"
] |
[((1094, 1131), 'django.contrib.auth.models.Group.objects.get', 'Group.objects.get', ([], {'name': '"""Open Rushie"""'}), "(name='Open Rushie')\n", (1111, 1131), False, 'from django.contrib.auth.models import Group\n'), ((1240, 1301), 'texaslan.applications.models.Application.objects.get_or_create', 'Application.objects.get_or_create', ([], {'applicant_user__pk': 'user.pk'}), '(applicant_user__pk=user.pk)\n', (1273, 1301), False, 'from texaslan.applications.models import Application\n'), ((544, 595), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'Full Name'}"}), "(attrs={'placeholder': 'Full Name'})\n", (559, 595), False, 'from django import forms\n'), ((622, 673), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'Nick Name'}"}), "(attrs={'placeholder': 'Nick Name'})\n", (637, 673), False, 'from django import forms\n'), ((706, 763), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'placeholder': 'Graduation Date'}"}), "(attrs={'placeholder': 'Graduation Date'})\n", (721, 763), False, 'from django import forms\n')]
|
import click
from PyInquirer import prompt
from one.utils.environment.common import get_credentials_file, get_config_file, get_idp_file, write_config
from one.utils.prompt import style
from one.docker.image import Image
from one.docker.container import Container
from one.__init__ import CLI_ROOT
from one.prompt.idp import PROVIDER_QUESTIONS, GSUITE_QUESTIONS, AZURE_QUESTIONS, OKTA_QUESTIONS
from one.prompt.auth import AWS_ACCESS_KEY_QUESTIONS
image = Image()
container = Container()
def configure_idp():
provider_answer = prompt(PROVIDER_QUESTIONS, style=style)
if not provider_answer:
raise SystemExit
if provider_answer['provider'] == 'Google G Suite SSO':
configure_gsuite()
elif provider_answer['provider'] == 'Microsoft Azure SSO':
configure_azure()
elif provider_answer['provider'] == 'Okta SSO':
configure_okta()
elif provider_answer['provider'] == 'AWS SSO':
configure_aws_sso()
elif provider_answer['provider'] == 'AWS IAM user':
configure_iam_user()
else:
raise SystemExit
def configure_gsuite():
answers = prompt(GSUITE_QUESTIONS, style=style)
if not bool(answers):
raise SystemExit
idp_file = get_idp_file()
idp_file['gsuite'] = {
'google_idp_id': answers['GOOGLE_IDP_ID'],
'google_sp_id': answers['GOOGLE_SP_ID']
}
write_config(idp_file, '/idp')
click.echo('\n')
def configure_azure():
answers = prompt(AZURE_QUESTIONS, style=style)
if not bool(answers):
raise SystemExit
idp_file = get_idp_file()
idp_file['azure'] = {
'AZURE_TENANT_ID': answers['AZURE_TENANT_ID'],
'AZURE_APP_ID_URI': answers['AZURE_APP_ID_URI']
}
write_config(idp_file, '/idp')
click.echo('\n')
def configure_okta():
answers = prompt(OKTA_QUESTIONS, style=style)
if not bool(answers):
raise SystemExit
idp_file = get_idp_file()
idp_file['okta'] = {
'okta_org': answers['OKTA_ORG'],
'okta_aws_app_url': answers['OKTA_AWS_APP_URL'],
'okta_aws_default_region': answers['OKTA_AWS_DEFAULT_REGION']
}
write_config(idp_file, '/idp')
click.echo('\n')
def configure_aws_sso():
auth_image = image.get_image('aws_v2')
work_volume = CLI_ROOT + ':/work'
env_sso = {}
env_sso['AWS_CONFIG_FILE'] = '/work/config'
container.create(
image=auth_image,
command='configure sso',
volumes=[work_volume],
environment=env_sso
)
click.echo('\n')
def configure_iam_user():
aws_auth_answer = prompt(AWS_ACCESS_KEY_QUESTIONS, style=style)
if not aws_auth_answer:
raise SystemExit
credentials_file = get_credentials_file()
credentials_file[aws_auth_answer['PROFILE']] = {
'AWS_ACCESS_KEY_ID': aws_auth_answer['AWS_ACCESS_KEY_ID'],
'AWS_SECRET_ACCESS_KEY': aws_auth_answer['AWS_SECRET_ACCESS_KEY']
}
config_file = get_config_file()
config_file['profile ' + aws_auth_answer['PROFILE']] = {
'REGION': aws_auth_answer['REGION']
}
write_config(credentials_file, '/credentials')
write_config(config_file, '/config')
click.echo('\n')
|
[
"one.utils.environment.common.get_idp_file",
"one.utils.environment.common.get_config_file",
"one.docker.container.Container",
"click.echo",
"one.docker.image.Image",
"one.utils.environment.common.get_credentials_file",
"one.utils.environment.common.write_config",
"PyInquirer.prompt"
] |
[((456, 463), 'one.docker.image.Image', 'Image', ([], {}), '()\n', (461, 463), False, 'from one.docker.image import Image\n'), ((476, 487), 'one.docker.container.Container', 'Container', ([], {}), '()\n', (485, 487), False, 'from one.docker.container import Container\n'), ((533, 572), 'PyInquirer.prompt', 'prompt', (['PROVIDER_QUESTIONS'], {'style': 'style'}), '(PROVIDER_QUESTIONS, style=style)\n', (539, 572), False, 'from PyInquirer import prompt\n'), ((1119, 1156), 'PyInquirer.prompt', 'prompt', (['GSUITE_QUESTIONS'], {'style': 'style'}), '(GSUITE_QUESTIONS, style=style)\n', (1125, 1156), False, 'from PyInquirer import prompt\n'), ((1223, 1237), 'one.utils.environment.common.get_idp_file', 'get_idp_file', ([], {}), '()\n', (1235, 1237), False, 'from one.utils.environment.common import get_credentials_file, get_config_file, get_idp_file, write_config\n'), ((1375, 1405), 'one.utils.environment.common.write_config', 'write_config', (['idp_file', '"""/idp"""'], {}), "(idp_file, '/idp')\n", (1387, 1405), False, 'from one.utils.environment.common import get_credentials_file, get_config_file, get_idp_file, write_config\n'), ((1410, 1426), 'click.echo', 'click.echo', (['"""\n"""'], {}), "('\\n')\n", (1420, 1426), False, 'import click\n'), ((1466, 1502), 'PyInquirer.prompt', 'prompt', (['AZURE_QUESTIONS'], {'style': 'style'}), '(AZURE_QUESTIONS, style=style)\n', (1472, 1502), False, 'from PyInquirer import prompt\n'), ((1570, 1584), 'one.utils.environment.common.get_idp_file', 'get_idp_file', ([], {}), '()\n', (1582, 1584), False, 'from one.utils.environment.common import get_credentials_file, get_config_file, get_idp_file, write_config\n'), ((1733, 1763), 'one.utils.environment.common.write_config', 'write_config', (['idp_file', '"""/idp"""'], {}), "(idp_file, '/idp')\n", (1745, 1763), False, 'from one.utils.environment.common import get_credentials_file, get_config_file, get_idp_file, write_config\n'), ((1768, 1784), 'click.echo', 'click.echo', (['"""\n"""'], {}), "('\\n')\n", (1778, 1784), False, 'import click\n'), ((1823, 1858), 'PyInquirer.prompt', 'prompt', (['OKTA_QUESTIONS'], {'style': 'style'}), '(OKTA_QUESTIONS, style=style)\n', (1829, 1858), False, 'from PyInquirer import prompt\n'), ((1925, 1939), 'one.utils.environment.common.get_idp_file', 'get_idp_file', ([], {}), '()\n', (1937, 1939), False, 'from one.utils.environment.common import get_credentials_file, get_config_file, get_idp_file, write_config\n'), ((2144, 2174), 'one.utils.environment.common.write_config', 'write_config', (['idp_file', '"""/idp"""'], {}), "(idp_file, '/idp')\n", (2156, 2174), False, 'from one.utils.environment.common import get_credentials_file, get_config_file, get_idp_file, write_config\n'), ((2179, 2195), 'click.echo', 'click.echo', (['"""\n"""'], {}), "('\\n')\n", (2189, 2195), False, 'import click\n'), ((2520, 2536), 'click.echo', 'click.echo', (['"""\n"""'], {}), "('\\n')\n", (2530, 2536), False, 'import click\n'), ((2587, 2632), 'PyInquirer.prompt', 'prompt', (['AWS_ACCESS_KEY_QUESTIONS'], {'style': 'style'}), '(AWS_ACCESS_KEY_QUESTIONS, style=style)\n', (2593, 2632), False, 'from PyInquirer import prompt\n'), ((2709, 2731), 'one.utils.environment.common.get_credentials_file', 'get_credentials_file', ([], {}), '()\n', (2729, 2731), False, 'from one.utils.environment.common import get_credentials_file, get_config_file, get_idp_file, write_config\n'), ((2951, 2968), 'one.utils.environment.common.get_config_file', 'get_config_file', ([], {}), '()\n', (2966, 2968), False, 'from one.utils.environment.common import get_credentials_file, get_config_file, get_idp_file, write_config\n'), ((3085, 3131), 'one.utils.environment.common.write_config', 'write_config', (['credentials_file', '"""/credentials"""'], {}), "(credentials_file, '/credentials')\n", (3097, 3131), False, 'from one.utils.environment.common import get_credentials_file, get_config_file, get_idp_file, write_config\n'), ((3136, 3172), 'one.utils.environment.common.write_config', 'write_config', (['config_file', '"""/config"""'], {}), "(config_file, '/config')\n", (3148, 3172), False, 'from one.utils.environment.common import get_credentials_file, get_config_file, get_idp_file, write_config\n'), ((3177, 3193), 'click.echo', 'click.echo', (['"""\n"""'], {}), "('\\n')\n", (3187, 3193), False, 'import click\n')]
|
import matplotlib.pyplot as plt
import pandas as pd
from utils import *
# 主索节点的坐标和编号
data1 = pd.read_csv("data/附件1.csv", encoding='ANSI')
# print('主索节点的坐标和编号:\n', data1)
nodes_data = {}
for d in data1.itertuples():
nodes_data[d[1]] = {
# 'position': tuple(d[2:]),
'position_raw': np.array(d[2:]),
'position': np.array(d[2:]),
# 伸缩量,即所有需要求解的变量
'expand': 0
}
# 促动器下端点(地锚点)坐标、
# 基准态时上端点(顶端)的坐标,
# 以及促动器对应的主索节点编号
data2 = pd.read_csv("data/附件2.csv", encoding='ANSI')
# print('data2:\n', data2)
for d in data2.itertuples():
nodes_data[d[1]]['actuactor_head'] = np.array(d[2:5])
nodes_data[d[1]]['actuactor_base'] = np.array(d[5:8])
# print(nodes_data)
triangles_data = []
# 反射面板对应的主索节点编号
data3 = pd.read_csv("data/附件3.csv", encoding='ANSI')
# print('data3:\n', data3)
for d in data3.itertuples():
triangles_data.append(tuple(d[1:]))
# print(triangles_data)
# 绘制当前图像
def draw_points(points: np.ndarray = None, nodes_data_: dict = nodes_data):
ax = plt.axes(projection='3d')
plt.xlim(-300, 300)
plt.ylim(-300, 300)
ax.set_zlim(-400, -100)
if points is None:
points = to_points(nodes_data_=nodes_data_)
points2 = to_points(nodes_data_=nodes_data_, dict_key='actuactor_head')
points3 = to_points(nodes_data_=nodes_data_, dict_key='actuactor_base')
m = get_rotation_matrix(np.pi / 6, np.pi / 4, np.pi / 12)
# m = get_rotation_matrix(0, 0, np.pi / 12)
# m = get_rotation_matrix(a, b, c)
# points = points * m
# np.zeros((3, 3)) * np.zeros((100, 3)).T
# 矩阵乘法用 np.dot...
points = np.dot(points, m)
ax.scatter3D(points.T[0], points.T[1], points.T[2], c="g", marker='.')
ax.scatter3D(points2.T[0], points2.T[1], points2.T[2], c="c", marker='.')
ax.scatter3D(points3.T[0], points3.T[1], points3.T[2], c='m', marker='.')
plt.show()
# 计算在当前伸缩值状态下,主索节点的位置(position)
def update_expand(nodes_data_: dict = nodes_data):
for name in nodes_data_:
node = nodes_data_[name]
# 用促动器下端坐标和最初的主索节点位置确定方向向量,计算伸缩量
n = get_unit_vector(node['position_raw'], node['actuactor_base'])
# 更新 position
node['position'] = node['position_raw'] + n * node['expand']
# 转换数据到坐标
def to_points(nodes_data_: dict = nodes_data, dict_key: str = 'position') -> np.ndarray:
points = []
for name in nodes_data_:
node = nodes_data_[name]
points.append(node[dict_key])
return np.array(points)
def do_rotation(alpha: float, beta: float) -> np.ndarray:
m = get_rotation_matrix(0, alpha, beta)
for i in range(0, 8):
# c -> beta
# b -> alpha ?
draw_points(b=0, c=0, a=i * np.pi / 8)
# import random
# for name in nodes_data:
# nodes_data[name]['expand'] = 20 * random.random()
# update_expand()
# draw_points()
# print(plane_symmetry_point([1, 1, 1, 0], [2, 2, 2]))
|
[
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.show",
"matplotlib.pyplot.ylim",
"pandas.read_csv",
"matplotlib.pyplot.axes"
] |
[((94, 138), 'pandas.read_csv', 'pd.read_csv', (['"""data/附件1.csv"""'], {'encoding': '"""ANSI"""'}), "('data/附件1.csv', encoding='ANSI')\n", (105, 138), True, 'import pandas as pd\n'), ((467, 511), 'pandas.read_csv', 'pd.read_csv', (['"""data/附件2.csv"""'], {'encoding': '"""ANSI"""'}), "('data/附件2.csv', encoding='ANSI')\n", (478, 511), True, 'import pandas as pd\n'), ((751, 795), 'pandas.read_csv', 'pd.read_csv', (['"""data/附件3.csv"""'], {'encoding': '"""ANSI"""'}), "('data/附件3.csv', encoding='ANSI')\n", (762, 795), True, 'import pandas as pd\n'), ((1014, 1039), 'matplotlib.pyplot.axes', 'plt.axes', ([], {'projection': '"""3d"""'}), "(projection='3d')\n", (1022, 1039), True, 'import matplotlib.pyplot as plt\n'), ((1044, 1063), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-300)', '(300)'], {}), '(-300, 300)\n', (1052, 1063), True, 'import matplotlib.pyplot as plt\n'), ((1068, 1087), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-300)', '(300)'], {}), '(-300, 300)\n', (1076, 1087), True, 'import matplotlib.pyplot as plt\n'), ((1856, 1866), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1864, 1866), True, 'import matplotlib.pyplot as plt\n')]
|
#!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2011, Kov<NAME> <<EMAIL>>; 2011, <NAME> <<EMAIL>>'
__docformat__ = 'restructuredtext en'
import time
try:
from queue import Empty, Queue
except ImportError:
from Queue import Empty, Queue
from calibre.ebooks.metadata import check_isbn
from calibre.ebooks.metadata.sources.base import Option, Source
from calibre.ebooks.metadata.book.base import Metadata
from calibre import as_unicode
NAMESPACES = {
'openSearch': 'http://a9.com/-/spec/opensearchrss/1.0/',
'atom': 'http://www.w3.org/2005/Atom',
'db': 'https://www.douban.com/xmlns/',
'gd': 'http://schemas.google.com/g/2005'
}
def get_details(browser, url, timeout): # {{{
try:
if Douban.DOUBAN_API_KEY:
url = url + "?apikey=" + Douban.DOUBAN_API_KEY
raw = browser.open_novisit(url, timeout=timeout).read()
except Exception as e:
gc = getattr(e, 'getcode', lambda: -1)
if gc() != 403:
raise
# Douban is throttling us, wait a little
time.sleep(2)
raw = browser.open_novisit(url, timeout=timeout).read()
return raw
# }}}
class Douban(Source):
name = 'Douban Books'
author = '<NAME>, xcffl, jnozsc, deckvig'
version = (3, 2, 0)
minimum_calibre_version = (5, 0, 0)
description = _(
'Downloads metadata and covers from Douban.com. '
'Useful only for Chinese language books.'
)
capabilities = frozenset(['identify', 'cover'])
touched_fields = frozenset([
'title', 'authors', 'tags', 'pubdate', 'comments', 'publisher',
'identifier:isbn', 'rating', 'identifier:douban'
]) # language currently disabled
supports_gzip_transfer_encoding = True
cached_cover_url_is_reliable = True
DOUBAN_API_KEY = '054022eaeae0b00e0fc068c0c0a2102a'
DOUBAN_API_URL = 'https://api.douban.com/v2/book/search'
DOUBAN_BOOK_URL = 'https://book.douban.com/subject/%s/'
options = (
Option(
'include_subtitle_in_title', 'bool', True,
_('Include subtitle in book title:'),
_('Whether to append subtitle in the book title.')
),
Option(
'douban_api_domain', 'string', "https://api.douban.com",
_('simple boot douban api address:'),
_('simple boot douban api server address.')
),
)
def save_settings(self, *args, **kwargs):
Source.save_settings(self, *args, **kwargs)
@property
def douban_api_domain(self):
return self.prefs['douban_api_domain']
def to_metadata(self, browser, log, entry_, timeout): # {{{
from calibre.utils.date import parse_date, utcnow
douban_id = entry_.get('id')
title = entry_.get('title')
description = entry_.get('summary')
# subtitle = entry_.get('subtitle') # TODO: std metada doesn't have this field
publisher = entry_.get('publisher')
isbn = entry_.get('isbn13') # ISBN11 is obsolute, use ISBN13
pubdate = entry_.get('pubdate')
authors = entry_.get('author')
book_tags = entry_.get('tags')
rating = entry_.get('rating')
cover_url = entry_.get('image')
series = entry_.get('series')
if not authors:
authors = [_('Unknown')]
if not douban_id or not title:
# Silently discard this entry
return None
mi = Metadata(title, authors)
mi.identifiers = {'douban': douban_id}
mi.publisher = publisher
mi.comments = description
# mi.subtitle = subtitle
# ISBN
isbns = []
if isinstance(isbn, (type(''), bytes)):
if check_isbn(isbn):
isbns.append(isbn)
else:
for x in isbn:
if check_isbn(x):
isbns.append(x)
if isbns:
mi.isbn = sorted(isbns, key=len)[-1]
mi.all_isbns = isbns
# Tags
mi.tags = [tag['name'] for tag in book_tags]
# pubdate
if pubdate:
try:
default = utcnow().replace(day=15)
mi.pubdate = parse_date(pubdate, assume_utc=True, default=default)
except:
log.error('Failed to parse pubdate %r' % pubdate)
# Ratings
if rating:
try:
mi.rating = float(rating['average']) / 2.0
except:
log.exception('Failed to parse rating')
mi.rating = 0
# Cover
mi.has_douban_cover = None
u = cover_url
if u:
# If URL contains "book-default", the book doesn't have a cover
if u.find('book-default') == -1:
mi.has_douban_cover = u
# Series
if series:
mi.series = series['title']
return mi
# }}}
def get_book_url(self, identifiers): # {{{
db = identifiers.get('douban', None)
if db is not None:
return ('douban', db, self.DOUBAN_BOOK_URL % db)
# }}}
def create_query(self, log, title=None, authors=None, identifiers={}): # {{{
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
SEARCH_URL = self.douban_api_domain + '/v2/book/search?count=10&'
ISBN_URL = self.douban_api_domain + '/v2/book/isbn/'
SUBJECT_URL = self.douban_api_domain + '/v2/book/'
q = ''
t = None
isbn = check_isbn(identifiers.get('isbn', None))
subject = identifiers.get('douban', None)
if isbn is not None:
q = isbn
t = 'isbn'
elif subject is not None:
q = subject
t = 'subject'
elif title or authors:
def build_term(prefix, parts):
return ' '.join(x for x in parts)
title_tokens = list(self.get_title_tokens(title))
if title_tokens:
q += build_term('title', title_tokens)
author_tokens = list(
self.get_author_tokens(authors, only_first_author=True)
)
if author_tokens:
q += ((' ' if q != '' else '') + build_term('author', author_tokens))
t = 'search'
q = q.strip()
if not q:
return None
url = None
if t == "isbn":
url = ISBN_URL + q
elif t == 'subject':
url = SUBJECT_URL + q
else:
url = SEARCH_URL + urlencode({
'q': q,
})
if self.DOUBAN_API_KEY and self.DOUBAN_API_KEY != '':
if t == "isbn" or t == "subject":
url = url + "?apikey=" + self.DOUBAN_API_KEY
else:
url = url + "&apikey=" + self.DOUBAN_API_KEY
return url
# }}}
def download_cover(
self,
log,
result_queue,
abort, # {{{
title=None,
authors=None,
identifiers={},
timeout=30,
get_best_cover=False
):
cached_url = self.get_cached_cover_url(identifiers)
if cached_url is None:
log.info('No cached cover found, running identify')
rq = Queue()
self.identify(
log,
rq,
abort,
title=title,
authors=authors,
identifiers=identifiers
)
if abort.is_set():
return
results = []
while True:
try:
results.append(rq.get_nowait())
except Empty:
break
results.sort(
key=self.identify_results_keygen(
title=title, authors=authors, identifiers=identifiers
)
)
for mi in results:
cached_url = self.get_cached_cover_url(mi.identifiers)
if cached_url is not None:
break
if cached_url is None:
log.info('No cover found')
return
if abort.is_set():
return
br = self.browser
log('Downloading cover from:', cached_url)
try:
cdata = br.open_novisit(cached_url, timeout=timeout).read()
if cdata:
result_queue.put((self, cdata))
except:
log.exception('Failed to download cover from:', cached_url)
# }}}
def get_cached_cover_url(self, identifiers): # {{{
url = None
db = identifiers.get('douban', None)
if db is None:
isbn = identifiers.get('isbn', None)
if isbn is not None:
db = self.cached_isbn_to_identifier(isbn)
if db is not None:
url = self.cached_identifier_to_cover_url(db)
return url
# }}}
def get_all_details(
self,
br,
log,
entries,
abort, # {{{
result_queue,
timeout
):
for relevance, i in enumerate(entries):
try:
ans = self.to_metadata(br, log, i, timeout)
if isinstance(ans, Metadata):
ans.source_relevance = relevance
db = ans.identifiers['douban']
for isbn in getattr(ans, 'all_isbns', []):
self.cache_isbn_to_identifier(isbn, db)
if ans.has_douban_cover:
self.cache_identifier_to_cover_url(db, ans.has_douban_cover)
self.clean_downloaded_metadata(ans)
result_queue.put(ans)
except:
log.exception('Failed to get metadata for identify entry:', i)
if abort.is_set():
break
# }}}
def identify(
self,
log,
result_queue,
abort,
title=None,
authors=None, # {{{
identifiers={},
timeout=30
):
import json
query = self.create_query(
log, title=title, authors=authors, identifiers=identifiers
)
if not query:
log.error('Insufficient metadata to construct query')
return
br = self.browser
try:
raw = br.open_novisit(query, timeout=timeout).read()
except Exception as e:
log.exception('Failed to make identify query: %r' % query)
return as_unicode(e)
try:
j = json.loads(raw)
except Exception as e:
log.exception('Failed to parse identify results')
return as_unicode(e)
if 'books' in j:
entries = j['books']
else:
entries = []
entries.append(j)
if not entries and identifiers and title and authors and \
not abort.is_set():
return self.identify(
log,
result_queue,
abort,
title=title,
authors=authors,
timeout=timeout
)
# There is no point running these queries in threads as douban
# throttles requests returning 403 Forbidden errors
self.get_all_details(br, log, entries, abort, result_queue, timeout)
return None
# }}}
if __name__ == '__main__': # tests {{{
# To run these test use: calibre-debug -e src/calibre/ebooks/metadata/sources/douban.py
from calibre.ebooks.metadata.sources.test import (
test_identify_plugin, title_test, authors_test
)
test_identify_plugin(
Douban.name, [
({
'identifiers': {
'isbn': '9787536692930'
},
'title': '三体',
'authors': ['刘慈欣']
}, [title_test('三体', exact=True),
authors_test(['刘慈欣'])]),
({
'title': 'Linux内核修炼之道',
'authors': ['任桥伟']
}, [title_test('Linux内核修炼之道', exact=False)]),
]
)
# }}}
|
[
"json.loads",
"calibre.utils.date.utcnow",
"Queue.Queue",
"calibre.ebooks.metadata.sources.test.title_test",
"calibre.ebooks.metadata.book.base.Metadata",
"time.sleep",
"calibre.ebooks.metadata.sources.test.authors_test",
"calibre.ebooks.metadata.check_isbn",
"calibre.utils.date.parse_date",
"calibre.as_unicode",
"urllib.urlencode",
"calibre.ebooks.metadata.sources.base.Source.save_settings"
] |
[((2496, 2539), 'calibre.ebooks.metadata.sources.base.Source.save_settings', 'Source.save_settings', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (2516, 2539), False, 'from calibre.ebooks.metadata.sources.base import Option, Source\n'), ((3501, 3525), 'calibre.ebooks.metadata.book.base.Metadata', 'Metadata', (['title', 'authors'], {}), '(title, authors)\n', (3509, 3525), False, 'from calibre.ebooks.metadata.book.base import Metadata\n'), ((1107, 1120), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1117, 1120), False, 'import time\n'), ((3771, 3787), 'calibre.ebooks.metadata.check_isbn', 'check_isbn', (['isbn'], {}), '(isbn)\n', (3781, 3787), False, 'from calibre.ebooks.metadata import check_isbn\n'), ((7346, 7353), 'Queue.Queue', 'Queue', ([], {}), '()\n', (7351, 7353), False, 'from Queue import Empty, Queue\n'), ((10649, 10664), 'json.loads', 'json.loads', (['raw'], {}), '(raw)\n', (10659, 10664), False, 'import json\n'), ((3884, 3897), 'calibre.ebooks.metadata.check_isbn', 'check_isbn', (['x'], {}), '(x)\n', (3894, 3897), False, 'from calibre.ebooks.metadata import check_isbn\n'), ((4236, 4289), 'calibre.utils.date.parse_date', 'parse_date', (['pubdate'], {'assume_utc': '(True)', 'default': 'default'}), '(pubdate, assume_utc=True, default=default)\n', (4246, 4289), False, 'from calibre.utils.date import parse_date, utcnow\n'), ((10606, 10619), 'calibre.as_unicode', 'as_unicode', (['e'], {}), '(e)\n', (10616, 10619), False, 'from calibre import as_unicode\n'), ((10777, 10790), 'calibre.as_unicode', 'as_unicode', (['e'], {}), '(e)\n', (10787, 10790), False, 'from calibre import as_unicode\n'), ((6627, 6646), 'urllib.urlencode', 'urlencode', (["{'q': q}"], {}), "({'q': q})\n", (6636, 6646), False, 'from urllib import urlencode\n'), ((11969, 11997), 'calibre.ebooks.metadata.sources.test.title_test', 'title_test', (['"""三体"""'], {'exact': '(True)'}), "('三体', exact=True)\n", (11979, 11997), False, 'from calibre.ebooks.metadata.sources.test import test_identify_plugin, title_test, authors_test\n'), ((12015, 12036), 'calibre.ebooks.metadata.sources.test.authors_test', 'authors_test', (["['刘慈欣']"], {}), "(['刘慈欣'])\n", (12027, 12036), False, 'from calibre.ebooks.metadata.sources.test import test_identify_plugin, title_test, authors_test\n'), ((12146, 12184), 'calibre.ebooks.metadata.sources.test.title_test', 'title_test', (['"""Linux内核修炼之道"""'], {'exact': '(False)'}), "('Linux内核修炼之道', exact=False)\n", (12156, 12184), False, 'from calibre.ebooks.metadata.sources.test import test_identify_plugin, title_test, authors_test\n'), ((4182, 4190), 'calibre.utils.date.utcnow', 'utcnow', ([], {}), '()\n', (4188, 4190), False, 'from calibre.utils.date import parse_date, utcnow\n')]
|
# Copyright (c) 2013 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_utils import timeutils
from barbicanclient.tests import test_client
from barbicanclient import cas
class CAData(object):
def __init__(self):
self.name = u'Test CA'
self.description = u'Test CA description'
self.plugin_name = u'Test CA Plugin'
self.plugin_ca_id = 'plugin_uuid'
now = timeutils.utcnow()
self.expiration = str(now)
self.created = str(now)
self.meta = [
{'name': self.name},
{'description': self.description}
]
self.ca_dict = {'meta': self.meta,
'status': u'ACTIVE',
'plugin_name': self.plugin_name,
'plugin_ca_id': self.plugin_ca_id,
'created': self.created}
def get_dict(self, ca_ref=None):
ca = self.ca_dict
if ca_ref:
ca['ca_ref'] = ca_ref
return ca
class WhenTestingCAs(test_client.BaseEntityResource):
def setUp(self):
self._setUp('cas')
self.ca = CAData()
self.manager = self.client.cas
def test_should_get_lazy(self):
data = self.ca.get_dict(self.entity_href)
m = self.responses.get(self.entity_href, json=data)
ca = self.manager.get(ca_ref=self.entity_href)
self.assertIsInstance(ca, cas.CA)
self.assertEqual(self.entity_href, ca._ca_ref)
# Verify GET wasn't called yet
self.assertFalse(m.called)
# Check an attribute to trigger lazy-load
self.assertEqual(self.ca.plugin_ca_id, ca.plugin_ca_id)
# Verify the correct URL was used to make the GET call
self.assertEqual(self.entity_href, m.last_request.url)
def test_should_get_lazy_in_meta(self):
data = self.ca.get_dict(self.entity_href)
m = self.responses.get(self.entity_href, json=data)
ca = self.manager.get(ca_ref=self.entity_href)
self.assertIsInstance(ca, cas.CA)
self.assertEqual(self.entity_href, ca._ca_ref)
# Verify GET wasn't called yet
self.assertFalse(m.called)
# Check an attribute in meta to trigger lazy-load
self.assertEqual(self.ca.name, ca.name)
# Verify the correct URL was used to make the GET call
self.assertEqual(self.entity_href, m.last_request.url)
def test_should_get_list(self):
ca_resp = self.entity_href
data = {"cas": [ca_resp for v in range(3)]}
m = self.responses.get(self.entity_base, json=data)
ca_list = self.manager.list(limit=10, offset=5)
self.assertTrue(len(ca_list) == 3)
self.assertIsInstance(ca_list[0], cas.CA)
self.assertEqual(self.entity_href, ca_list[0].ca_ref)
# Verify the correct URL was used to make the call.
self.assertEqual(self.entity_base,
m.last_request.url.split('?')[0])
# Verify that correct information was sent in the call.
self.assertEqual(['10'], m.last_request.qs['limit'])
self.assertEqual(['5'], m.last_request.qs['offset'])
def test_should_fail_get_invalid_ca(self):
self.assertRaises(ValueError, self.manager.get,
**{'ca_ref': '12345'})
|
[
"oslo_utils.timeutils.utcnow"
] |
[((925, 943), 'oslo_utils.timeutils.utcnow', 'timeutils.utcnow', ([], {}), '()\n', (941, 943), False, 'from oslo_utils import timeutils\n')]
|
import numpy as np
import cv2
# Translation is the shifting of objects location. If you know the shift in
# (x,y) direction, let it be (t_x,t_y), you can create the transformation matrix
# M as follows:
#
# M = | 1 0 t_x |
# | 0 1 t_y |
#
# You'll need to make it into a Numpy array of type np.float32 and pass it into
# cv2.warpAffine() function.
img = cv2.imread('images/saturn.png', 0)
rows, cols = img.shape
translate_x = -150
translate_y = 50
M = np.float32([[1, 0, translate_x], [0, 1, translate_y]])
img_translated = cv2.warpAffine(img, M, (cols, rows))
cv2.imshow('Translated Image', img_translated)
cv2.waitKey(0)
cv2.destroyAllWindows()
# WARNING: Third argument of the cv2.warpAffine() function is the size of the
# output image, which should be in the form of (width, height).
# Remember width = number of columns, and height = number of rows.
|
[
"cv2.waitKey",
"cv2.destroyAllWindows",
"numpy.float32",
"cv2.imread",
"cv2.warpAffine",
"cv2.imshow"
] |
[((377, 411), 'cv2.imread', 'cv2.imread', (['"""images/saturn.png"""', '(0)'], {}), "('images/saturn.png', 0)\n", (387, 411), False, 'import cv2\n'), ((475, 529), 'numpy.float32', 'np.float32', (['[[1, 0, translate_x], [0, 1, translate_y]]'], {}), '([[1, 0, translate_x], [0, 1, translate_y]])\n', (485, 529), True, 'import numpy as np\n'), ((548, 584), 'cv2.warpAffine', 'cv2.warpAffine', (['img', 'M', '(cols, rows)'], {}), '(img, M, (cols, rows))\n', (562, 584), False, 'import cv2\n'), ((585, 631), 'cv2.imshow', 'cv2.imshow', (['"""Translated Image"""', 'img_translated'], {}), "('Translated Image', img_translated)\n", (595, 631), False, 'import cv2\n'), ((632, 646), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (643, 646), False, 'import cv2\n'), ((647, 670), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (668, 670), False, 'import cv2\n')]
|
import boto3
from rebelykos.core.response import Response as res
from rebelykos.core.teamserver.module import Module
class RLModule(Module):
def __init__(self):
super().__init__()
self.name = "role_info"
self.description = ("List all RoleNames if RoleName not specified."
" If specified, Describe the role and get "
"policies attached to that role.")
self.author = "<NAME>"
self.options["RoleName"] = {
"Description": ("Describe role detail and policies"
" attached to this role."),
"Required": False,
"Value": ""
}
def run(self):
client = boto3.client("iam", **self["profile"])
if self["RoleName"]:
role = boto3.resource("iam").Role(self["RoleName"])
yield (res.INFO,
"Showing role arn and assume_role_policy_document.")
yield (
res.RESULT,
{"Arn": role.arn,
"Statement": role.assume_role_policy_document["Statement"]}
)
yield res.INFO, "Listing attached policies."
yield from self._iam_attached_policies(
client,
role.attached_policies.all()
)
yield res.INFO, "Listing inline policies."
yield from self._iam_inline_policies(role.policies.all())
elif not self["RoleName"]:
for result in self._handle_is_truncated(client.list_roles):
if result[0] == res.RESULT:
for role in result[1]["Roles"]:
yield res.RESULT, role["RoleName"]
else:
yield result
yield res.END, "End"
|
[
"boto3.resource",
"boto3.client"
] |
[((728, 766), 'boto3.client', 'boto3.client', (['"""iam"""'], {}), "('iam', **self['profile'])\n", (740, 766), False, 'import boto3\n'), ((815, 836), 'boto3.resource', 'boto3.resource', (['"""iam"""'], {}), "('iam')\n", (829, 836), False, 'import boto3\n')]
|
"""
Quality-biased ranking (Bendersky et al., 2011)
"""
import argparse
import bs4
import collections
import json
import math
# import re
import string
from smart_open import smart_open
# Module side
#
class Pipeline():
"""Feature extraction pipeline"""
def __init__(self):
self.jobs = []
def add(self, features, adaptor=None):
if not isinstance(features, (tuple, list)):
features = [features]
self.jobs.append({'adaptor': adaptor, 'extractors': features})
def extract(self, item):
vector = []
for job in self.jobs:
input_ = item if job['adaptor'] is None else job['adaptor'](item)
for extractor in job['extractors']:
vector.append(extractor(input_))
return vector
PUNCTUATION_REMOVER = string.maketrans(string.punctuation, ' ' * len(string.punctuation))
def to_terms(text):
return text.encode('utf8', errors='replace').translate(PUNCTUATION_REMOVER).split()
def UrlDepth(url):
"""The depth of the URL path"""
pos = url.find('://')
if pos >= 0:
return url[pos+3:].count('/')
else:
return url.count('/')
def NumVisTerms(doc):
"""Number of visible terms on the page"""
_, terms = doc
return len(terms)
def NumTitleTerms(doc):
"""Number of terms in the page <title> field"""
soup, _ = doc
if soup.title is None:
return 0
else:
return len(to_terms(soup.title.get_text()))
def AvgTermLen(doc):
"""Average length of visible term on the page"""
_, terms = doc
return float(sum(len(t) for t in terms)) / len(terms) if terms else 0
def FracAnchorText(doc):
"""Fraction of anchor text on the page"""
soup, terms = doc
terms_in_anchor_texts = sum(len(to_terms(tag.get_text())) for tag in soup.find_all('a'))
return float(terms_in_anchor_texts) / len(terms) if terms else 0
def FracVisText(doc):
"""Fraction of visible text on the page"""
soup, _ = doc
try:
pagesize = len(soup.decode_contents())
except Exception:
pagesize = 0
return float(len(soup.get_text())) / pagesize if pagesize > 0 else 0
def Entropy(doc):
"""Entropy of the page content"""
_, terms = doc
N = len(terms)
tf = collections.Counter(terms)
return math.log(N) - float(sum(n * math.log(n) for n in tf.values())) / N if N > 0 else 0
class FracStops():
"""Stopword/non-stopword ratio"""
def __init__(self, stoplist):
self.stoplist = stoplist
def __call__(self, doc):
_, terms = doc
return float(sum(term in self.stoplist for term in terms)) / len(terms) if terms else 0
class StopCover():
"""Fraction of terms in the stopword list that appear on the page"""
def __init__(self, stoplist):
self.stoplist = stoplist
def __call__(self, doc):
_, terms = doc
if self.stoplist:
return float(sum(sw in terms for sw in self.stoplist)) / len(self.stoplist)
else:
return 0
def FracTableText(doc):
"""Fraction of table text on the page"""
soup, terms = doc
terms_in_tables = 0
for tag in soup.find_all('table'):
if any(p.name == 'table' for p in tag.parents):
continue
terms_in_tables += len(to_terms(tag.get_text()))
frac = float(terms_in_tables) / len(terms) if terms else 0
assert frac <= 1
return frac
# Data side
BLOCK_TAGS = ('h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'p', 'div', 'ul', 'ol', 'tr',
'td', 'th', 'table', 'dl', 'dd', 'li', 'blockquote', 'pre',
'address', 'title', 'head')
def SOUP_TERMS(doc):
chunk = doc['text']
soup = bs4.BeautifulSoup(chunk, 'lxml')
for elem in soup(['br']):
elem.insert_after('\n')
for elem in soup(BLOCK_TAGS):
elem.insert_after('\n')
terms = to_terms(soup.get_text().lower())
return soup, terms
def URL(doc):
return doc['url']
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('corpus_json')
parser.add_argument('stoplist')
args = parser.parse_args()
stoplist = set(l.strip() for l in smart_open(args.stoplist))
pipeline = Pipeline()
pipeline.add(UrlDepth, adaptor=URL)
pipeline.add([NumVisTerms, NumTitleTerms, AvgTermLen, FracAnchorText, FracVisText,
Entropy, FracStops(stoplist), StopCover(stoplist), FracTableText], adaptor=SOUP_TERMS)
ranked_lists = json.load(smart_open(args.corpus_json))
for rl in ranked_lists:
qid = rl['topic']['qid']
for doc in rl['docs']:
docno = doc['docno']
rel = max(doc['rel'], 0)
score = doc['score']
vector = ' '.join(['{}:{}'.format(i, val) for i, val in enumerate(pipeline.extract(doc), 2)])
print('{rel} qid:{qid} 1:{score} {vector} # {docno}'.format(**locals()))
|
[
"argparse.ArgumentParser",
"smart_open.smart_open",
"bs4.BeautifulSoup",
"collections.Counter",
"math.log"
] |
[((2271, 2297), 'collections.Counter', 'collections.Counter', (['terms'], {}), '(terms)\n', (2290, 2297), False, 'import collections\n'), ((3689, 3721), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['chunk', '"""lxml"""'], {}), "(chunk, 'lxml')\n", (3706, 3721), False, 'import bs4\n'), ((4000, 4044), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (4023, 4044), False, 'import argparse\n'), ((4506, 4534), 'smart_open.smart_open', 'smart_open', (['args.corpus_json'], {}), '(args.corpus_json)\n', (4516, 4534), False, 'from smart_open import smart_open\n'), ((2309, 2320), 'math.log', 'math.log', (['N'], {}), '(N)\n', (2317, 2320), False, 'import math\n'), ((4190, 4215), 'smart_open.smart_open', 'smart_open', (['args.stoplist'], {}), '(args.stoplist)\n', (4200, 4215), False, 'from smart_open import smart_open\n'), ((2337, 2348), 'math.log', 'math.log', (['n'], {}), '(n)\n', (2345, 2348), False, 'import math\n')]
|
import os
import c4d
import math
# Be sure to use a unique ID obtained from www.plugincafe.com
PLUGIN_ID = 123456790
#----begin_resource_section----
from bootstrap4c4d import Description, Assignment, Group, Container
crumb_percent_slider = [
Assignment("STEP", 1.0),
Assignment("UNIT", "PERCENT"),
Assignment("CUSTOMGUI", "REALSLIDER"),
Assignment("MINSLIDER", 0.0),
Assignment("MAXSLIDER", 100.0)
]
crumb_percent_slider_limit_min = [
Assignment("MIN", 0.0)
]
crumb_percent_slider_limit_max = [
Assignment("MAX", 100.0)
]
crumb_flag_group_open = Assignment("DEFAULT", 1)
settings_effect_strength = Description({
"id": "SETTINGS_EFFECT_STRENGTH",
"key": "REAL",
"value": [
*crumb_percent_slider,
*crumb_percent_slider_limit_min,
*crumb_percent_slider_limit_max
],
"locales": {
"strings_us": "Strength"
}
})
settings_base_origin_object = Description({
"id": "SETTINGS_BASE_ORIGIN_OBJECT",
"key": "LINK",
"value": [
Assignment("ANIM", "OFF"),
Description({
"key": "ACCEPT",
"value": [
Assignment("Obase", None)
]
})
],
"locales": {
"strings_us": "Origin"
}
})
settings_base_start_time = Description({
"id": "SETTINGS_BASE_START_TIME",
"key": "REAL",
"value": [
Assignment("UNIT", "TIME")
],
"locales": {
"strings_us": "Start Time"
}
})
settings_base_target_offset = Description({
"id": "SETTINGS_BASE_TARGET_OFFSET",
"key": "VECTOR",
"value": [
Assignment("UNIT", "METER")
],
"locales": {
"strings_us": "Target Offset"
}
})
settings_base_draw_debug_lines = Description({
"id": "SETTINGS_BASE_DRAW_DEBUG_LINES",
"key": "BOOL",
"value": [
Assignment("ANIM", "OFF")
],
"locales": {
"strings_us": "Draw Debug Lines"
}
})
vector_xplus = Assignment(None, None, {
"id": "VECTOR_XPLUS",
"locales": {
"strings_us": "X+"
}
})
vector_xminus = Assignment(None, None, {
"id": "VECTOR_XMINUS",
"locales": {
"strings_us": "X-"
}
})
vector_yplus = Assignment(None, None, {
"id": "VECTOR_YPLUS",
"locales": {
"strings_us": "Y+"
}
})
vector_yminus = Assignment(None, None, {
"id": "VECTOR_YMINUS",
"locales": {
"strings_us": "Y-"
}
})
vector_zplus = Assignment(None, None, {
"id": "VECTOR_ZPLUS",
"locales": {
"strings_us": "Z+"
}
})
vector_zminus = Assignment(None, None, {
"id": "VECTOR_ZMINUS",
"locales": {
"strings_us": "Z-"
}
})
settings_base_up_vector = Description({
"id": "SETTINGS_BASE_UP_VECTOR",
"key": "LONG",
"value": [
Assignment("ANIM", "OFF"),
Assignment("CYCLE", [
vector_xplus,
vector_xminus,
vector_yplus,
vector_yminus,
vector_zplus,
vector_zminus
])
],
"locales": {
"strings_us": "Up Vector"
}
})
settings_base_aim_vector = Description({
"id": "SETTINGS_BASE_AIM_VECTOR",
"key": "LONG",
"value": [
Assignment("ANIM", "OFF"),
Assignment("CYCLE", [
vector_xplus,
vector_xminus,
vector_yplus,
vector_yminus,
vector_zplus,
vector_zminus
])
],
"locales": {
"strings_us": "Aim Vector"
}
})
group_effect = Group("GROUP_EFFECT", {
"value": [
crumb_flag_group_open,
settings_effect_strength
],
"locales": {
"strings_us": "Effect"
}
})
group_base = Group("GROUP_BASE", {
"value": [
crumb_flag_group_open,
settings_base_origin_object,
settings_base_target_offset,
settings_base_draw_debug_lines,
settings_base_start_time,
settings_base_up_vector,
settings_base_aim_vector
],
"locales": {
"strings_us": "Base"
},
})
# squash and stretch
settings_squash_stretch_enable = Description({
"id": "SETTINGS_SQUASH_STRETCH_ENABLE",
"key": "BOOL",
"value": [
Assignment("ANIM", "OFF")
],
"locales": {
"strings_us": "Enable"
}
})
settings_squash_stretch_stretch_strength = Description({
"id": "SETTINGS_SQUASH_STRETCH_STRETCH_STRENGTH",
"key": "REAL",
"value": [
*crumb_percent_slider,
*crumb_percent_slider_limit_min
],
"locales": {
"strings_us": "Stretch Strength"
}
})
settings_squash_stretch_squash_strength = Description({
"id": "SETTINGS_SQUASH_STRETCH_SQUASH_STRENGTH",
"key": "REAL",
"value": [
*crumb_percent_slider,
*crumb_percent_slider_limit_min
],
"locales": {
"strings_us": "Squash Strength"
}
})
group_squash_stretch = Group("GROUP_SQUASH_STRETCH", {
"value": [
crumb_flag_group_open,
settings_squash_stretch_enable,
settings_squash_stretch_stretch_strength,
settings_squash_stretch_squash_strength
],
"locales": {
"strings_us": "Squash and Stretch"
}
})
# physics descriptions
settings_physics_stiffness = Description({
"id": "SETTINGS_PHYSICS_STIFFNESS",
"key": "REAL",
"value": crumb_percent_slider,
"locales": {
"strings_us": "Stiffness"
}
})
settings_physics_mass = Description({
"id": "SETTINGS_PHYSICS_MASS",
"key": "REAL",
"value": crumb_percent_slider,
"locales": {
"strings_us": "Mass"
}
})
settings_physics_damping = Description({
"id": "SETTINGS_PHYSICS_DAMPING",
"key": "REAL",
"value": crumb_percent_slider,
"locales": {
"strings_us": "Damping"
}
})
settings_physics_gravity = Description({
"id": "SETTINGS_PHYSICS_GRAVITY",
"key": "VECTOR",
"value": [
Assignment("UNIT", "METER")
],
"locales": {
"strings_us": "Gravity"
}
})
group_physics = Group("GROUP_PHYSICS", {
"value": [
crumb_flag_group_open,
settings_physics_stiffness,
settings_physics_mass,
settings_physics_damping,
settings_physics_gravity
],
"locales": {
"strings_us": "Base"
},
})
root = Container("Tjiggle", {
"value": [
Assignment("NAME", "Tjiggle"),
Assignment("INCLUDE", "Tbase"),
Assignment("INCLUDE", "Texpression"),
Group("GROUP_SETTINGS", {
"value": [
crumb_flag_group_open,
group_effect,
group_base,
group_squash_stretch,
group_physics
],
"locales": {
"strings_us": "Settings"
}
})
],
"locales": {
"strings_us": "Jiggle"
}
})
#----end_resource_section----
#----begin_id_section----
VECTOR_XPLUS = vector_xplus.GetId()
VECTOR_XMINUS = vector_xminus.GetId()
VECTOR_YPLUS = vector_yplus.GetId()
VECTOR_YMINUS = vector_yminus.GetId()
VECTOR_ZPLUS = vector_zplus.GetId()
VECTOR_ZMINUS = vector_zminus.GetId()
# effect ids
SETTINGS_EFFECT_STRENGTH = settings_effect_strength.GetId()
# base ids
SETTINGS_BASE_ORIGIN_OBJECT = settings_base_origin_object.GetId()
SETTINGS_BASE_START_TIME = settings_base_start_time.GetId()
SETTINGS_BASE_TARGET_OFFSET = settings_base_target_offset.GetId()
SETTINGS_BASE_UP_VECTOR = settings_base_up_vector.GetId()
SETTINGS_BASE_AIM_VECTOR = settings_base_aim_vector.GetId()
SETTINGS_BASE_DRAW_DEBUG_LINES = settings_base_draw_debug_lines.GetId()
# squash stretch ids
SETTINGS_SQUASH_STRETCH_ENABLE = settings_squash_stretch_enable.GetId()
SETTINGS_SQUASH_STRETCH_STRETCH_STRENGTH = settings_squash_stretch_stretch_strength.GetId()
SETTINGS_SQUASH_STRETCH_SQUASH_STRENGTH = settings_squash_stretch_squash_strength.GetId()
# physics ids
SETTINGS_PHYSICS_STIFFNESS = settings_physics_stiffness.GetId()
SETTINGS_PHYSICS_MASS = settings_physics_mass.GetId()
SETTINGS_PHYSICS_DAMPING = settings_physics_damping.GetId()
SETTINGS_PHYSICS_GRAVITY = settings_physics_gravity.GetId()
#----end_id_section----
class DataContainer(object):
def __init__(self, data):
self.data = data
@property
def strength(self):
return self.data[SETTINGS_EFFECT_STRENGTH]
@strength.setter
def strength(self, value):
self.data[SETTINGS_EFFECT_STRENGTH] = value
@property
def originObject(self):
return self.data[SETTINGS_BASE_ORIGIN_OBJECT]
@property
def targetOffset(self):
return self.data[SETTINGS_BASE_TARGET_OFFSET]
@targetOffset.setter
def targetOffset(self, value):
self.data[SETTINGS_BASE_TARGET_OFFSET] = value
@property
def drawDebugLines(self):
return self.data[SETTINGS_BASE_DRAW_DEBUG_LINES]
# time
@property
def startTime(self):
return self.data[SETTINGS_BASE_START_TIME]
@startTime.setter
def startTime(self, value):
self.data[SETTINGS_BASE_START_TIME] = value
# up vector
@property
def upVector(self):
return self.data[SETTINGS_BASE_UP_VECTOR]
@upVector.setter
def upVector(self, value):
self.data[SETTINGS_BASE_UP_VECTOR] = value
# aim vector
@property
def aimVector(self):
return self.data[SETTINGS_BASE_AIM_VECTOR]
@aimVector.setter
def aimVector(self, value):
self.data[SETTINGS_BASE_AIM_VECTOR] = value
# squash stretch
@property
def squashStretchEnable(self):
return self.data[SETTINGS_SQUASH_STRETCH_ENABLE]
@property
def squashStretchStretchStrength(self):
return self.data[SETTINGS_SQUASH_STRETCH_STRETCH_STRENGTH]
@squashStretchStretchStrength.setter
def squashStretchStretchStrength(self, value):
self.data[SETTINGS_SQUASH_STRETCH_STRETCH_STRENGTH] = value
@property
def squashStretchSquashStrength(self):
return self.data[SETTINGS_SQUASH_STRETCH_SQUASH_STRENGTH]
@squashStretchSquashStrength.setter
def squashStretchSquashStrength(self, value):
self.data[SETTINGS_SQUASH_STRETCH_SQUASH_STRENGTH] = value
# physics
@property
def stiffness(self):
return self.data[SETTINGS_PHYSICS_STIFFNESS]
@stiffness.setter
def stiffness(self, value):
self.data[SETTINGS_PHYSICS_STIFFNESS] = value
@property
def mass(self):
return self.data[SETTINGS_PHYSICS_MASS]
@mass.setter
def mass(self, value):
self.data[SETTINGS_PHYSICS_MASS] = value
@property
def damping(self):
return self.data[SETTINGS_PHYSICS_DAMPING]
@damping.setter
def damping(self, value):
self.data[SETTINGS_PHYSICS_DAMPING] = value
@property
def gravity(self):
return self.data[SETTINGS_PHYSICS_GRAVITY]
@gravity.setter
def gravity(self, value):
self.data[SETTINGS_PHYSICS_GRAVITY] = value
class Jiggle(c4d.plugins.TagData):
"""Jiggle"""
def Init(self, node):
"""
Called when Cinema 4D Initialize the TagData (used to define, default values)
:param node: The instance of the TagData.
:type node: c4d.GeListNode
:return: True on success, otherwise False.
"""
# data = node.GetDataInstance()
data = DataContainer(node.GetDataInstance())
data.strength = 1.0
data.resultRotation = c4d.Vector(0, 0, 0)
# time related
self.previousFrame = 0
data.targetOffset = c4d.Vector(0, 0, 100)
data.startTime = 0.0
# up vector
data.upVector = VECTOR_YPLUS
# aim vector
data.aimVector = VECTOR_ZPLUS
# squash stretch
data.squashStretchStretchStrength = 0.0
data.squashStretchSquashStrength = 0.0
# physics related
data.stiffness = 0.1
data.mass = 0.9
data.damping = 0.75
data.gravity = c4d.Vector(0, -981.0, 0)
self.Reset(node)
c4d.EventAdd()
return True
@classmethod
def GetFrame(cls, time, fps):
return time.GetFrame(fps)
@classmethod
def CalculateTargetPosition(cls, origin, offset):
if origin:
return offset * origin.GetMg()
return offset
def GetHandleCount(self, op):
"""
:param op: The host object of the tag.
:type op: c4d.BaseObject
:return:
"""
return 1
def GetHandle(self, op, i, info):
"""
:param op: The host object of the tag.
:type op: c4d.BaseObject
:param i: Index of handle
:type i: int
:param info: Info of handle
:type info: c4d.HandleInfo
:return:
"""
data = DataContainer(op.GetDataInstance())
info.position = Jiggle.CalculateTargetPosition(data.originObject, data.targetOffset)
info.type = c4d.HANDLECONSTRAINTTYPE_FREE
def SetHandle(self, op, i, p, info):
"""
:param op: The host object of the tag.
:type op: c4d.BaseObject
:param i: Index of handle
:type i: int
:param p: Handle Position
:type p: c4d.Vector
:param info: Info of handle
:type info: c4d.HandleInfo
:return:
"""
data = DataContainer(op.GetDataInstance())
data.targetOffset = p * ~data.originObject.GetMg()
def Execute(self, tag, doc, op, bt, priority, flags):
"""
Called by Cinema 4D at each Scene Execution, this is the place where calculation should take place.
:param tag: The instance of the TagData.
:type tag: c4d.BaseTag
:param doc: The host document of the tag's object.
:type doc: c4d.documents.BaseDocument
:param op: The host object of the tag.
:type op: c4d.BaseObject
:param bt: The Thread that execute the this TagData.
:type bt: c4d.threading.BaseThread
:param priority: Information about the execution priority of this TagData.
:type priority: EXECUTIONPRIORITY
:param flags: Information about when this TagData is executed.
:type flags: EXECUTIONFLAGS
:return:
"""
data = DataContainer(tag.GetDataInstance())
fps = doc.GetFps()
currentFrame = float(Jiggle.GetFrame(doc.GetTime(), fps))
originMatrix = data.originObject.GetMg()
originPosition = originMatrix.off
projectedPosition = Jiggle.CalculateTargetPosition(data.originObject, data.targetOffset)
if currentFrame > data.startTime:
# only update if current frame is an increment by 1 of previous frame
if currentFrame == self.previousFrame + 1.0:
self.Update(tag, doc, op)
else:
self.Reset(tag)
# blend position by strength
targetPosition = c4d.utils.MixVec(projectedPosition, self.position, data.strength)
# calculate matrix
# calculate aim vector
aim = c4d.Vector(targetPosition - originPosition).GetNormalized()
# change up vector position
if data.upVector == VECTOR_XPLUS:
up = originMatrix.MulV(c4d.Vector(1.0, 0, 0))
elif data.upVector == VECTOR_XMINUS:
up = originMatrix.MulV(c4d.Vector(-1.0, 0, 0))
elif data.upVector == VECTOR_YPLUS:
up = originMatrix.MulV(c4d.Vector(0, 1.0, 0))
elif data.upVector == VECTOR_YMINUS:
up = originMatrix.MulV(c4d.Vector(0, -1.0, 0))
elif data.upVector == VECTOR_ZPLUS:
up = originMatrix.MulV(c4d.Vector(0, 0, 1.0))
elif data.upVector == VECTOR_ZMINUS:
up = originMatrix.MulV(c4d.Vector(0, 0, -1.0))
side = up.Cross(aim)
# calculate squash strech
if data.squashStretchEnable:
distance = c4d.Vector(targetPosition - originPosition).GetLength()
maxDistance = data.targetOffset.GetLength()
relativeDistance = distance - maxDistance
try:
squashStretchBias = abs(relativeDistance) / maxDistance
except ZeroDivisionError:
squashStretchBias = 0.0
if relativeDistance > 0.0:
squashStretchBias = squashStretchBias * data.squashStretchStretchStrength
# stretch
aim = aim * (1.0 + squashStretchBias)
up = up * (1.0 - squashStretchBias)
side = side * (1.0 - squashStretchBias)
else:
squashStretchBias = squashStretchBias * data.squashStretchSquashStrength
# squash
aim = aim * (1.0 - squashStretchBias)
up = up * (1.0 + squashStretchBias)
side = side * (1.0 + squashStretchBias)
# change input order based on aim axis
if data.aimVector == VECTOR_XPLUS:
jiggleMatrix = c4d.Matrix(
originPosition,
aim,
up,
side
)
elif data.aimVector == VECTOR_XMINUS:
jiggleMatrix = c4d.Matrix(
originPosition,
-aim,
up,
side
)
elif data.aimVector == VECTOR_YPLUS:
jiggleMatrix = c4d.Matrix(
originPosition,
side,
aim,
up
)
elif data.aimVector == VECTOR_YMINUS:
jiggleMatrix = c4d.Matrix(
originPosition,
side,
-aim,
up
)
elif data.aimVector == VECTOR_ZPLUS:
jiggleMatrix = c4d.Matrix(
originPosition,
side,
up,
aim
)
elif data.aimVector == VECTOR_ZMINUS:
jiggleMatrix = c4d.Matrix(
originPosition,
side,
up,
-aim
)
op.SetMg(jiggleMatrix)
# finish execute
self.previousFrame = currentFrame
return c4d.EXECUTIONRESULT_OK
def Draw(self, tag, op, bd, bh):
data = DataContainer(tag.GetDataInstance())
drawpass = bd.GetDrawPass()
if not data.drawDebugLines:
return True
# draw target line
targetPosition = Jiggle.CalculateTargetPosition(data.originObject, data.targetOffset)
bd.SetPen(c4d.GetViewColor(c4d.VIEWCOLOR_XAXIS))
bd.DrawLine(
data.originObject.GetMg().off,
targetPosition,
0
)
# draw connection
bd.SetPen(c4d.GetViewColor(c4d.VIEWCOLOR_YAXIS))
bd.DrawLine(
targetPosition,
self.position,
0
)
# draw current target
bd.SetPen(c4d.GetViewColor(c4d.VIEWCOLOR_ZAXIS))
bd.DrawLine(data.originObject.GetMg().off, self.position, 0)
# bd.SetMatrix_Screen()
# circlePosition = bd.WS(targetPosition)
# bd.DrawCircle2D(circlePosition.x, circlePosition.y, 5.0)
if drawpass == c4d.DRAWPASS_HANDLES:
bd.SetMatrix_Screen()
handleScreenSpace = bd.WS(Jiggle.CalculateTargetPosition(data.originObject, data.targetOffset))
bd.SetPen(c4d.GetViewColor(c4d.VIEWCOLOR_OBJECTHIGHLIGHT))
bd.DrawCircle2D(handleScreenSpace.x, handleScreenSpace.y, 8)
plugins.TagData.Draw(self, tag, op, bd, bh)
return True
def Reset(self, tag):
"""
Update loop.
:param tag: The instance of the TagData.
:type tag: c4d.BaseTag
:return:
"""
# print("Reset")
data = DataContainer(tag.GetDataInstance())
self.force = c4d.Vector(0, 0, 0)
self.acceleration = c4d.Vector(0, 0, 0)
self.velocity = c4d.Vector(0, 0, 0)
self.position = Jiggle.CalculateTargetPosition(data.originObject, data.targetOffset)
def Update(self, tag, doc, op):
"""
Update loop.
:param tag: The instance of the TagData.
:type tag: c4d.BaseTag
:param doc: The host document of the tag's object.
:type doc: c4d.documents.BaseDocument
:param op: The host object of the tag.
:type op: c4d.BaseObject
:return:
"""
# print("Update")
data = DataContainer(tag.GetDataInstance())
targetPosition = Jiggle.CalculateTargetPosition(data.originObject, data.targetOffset)
direction = targetPosition - self.position
#direction = c4d.Vector(0, 0, 0)
# calculate spring
self.force = (direction * data.stiffness) + (data.gravity / 10.0 / float(doc.GetFps()))
self.acceleration = self.force / data.mass
self.velocity = self.velocity + (self.acceleration * (1.0 - data.damping))
self.position = self.position + self.velocity + self.force
if __name__ == "__main__":
# Retrieves the icon path
directory, _ = os.path.split(__file__)
fn = os.path.join(directory, "res", "tjiggle.png")
# Creates a BaseBitmap
bmp = c4d.bitmaps.BaseBitmap()
if bmp is None:
raise MemoryError("Failed to create a BaseBitmap.")
# Init the BaseBitmap with the icon
if bmp.InitWith(fn)[0] != c4d.IMAGERESULT_OK:
raise MemoryError("Failed to initialize the BaseBitmap.")
c4d.plugins.RegisterTagPlugin(id=PLUGIN_ID,
str="Jiggle",
info=c4d.TAG_EXPRESSION | c4d.TAG_VISIBLE | c4d.TAG_IMPLEMENTS_DRAW_FUNCTION,
g=Jiggle,
description="Tjiggle",
icon=bmp
)
|
[
"bootstrap4c4d.Assignment",
"bootstrap4c4d.Description",
"c4d.EventAdd",
"c4d.GetViewColor",
"bootstrap4c4d.Group",
"c4d.Vector",
"c4d.bitmaps.BaseBitmap",
"c4d.plugins.RegisterTagPlugin",
"c4d.Matrix",
"os.path.split",
"os.path.join",
"c4d.utils.MixVec"
] |
[((580, 604), 'bootstrap4c4d.Assignment', 'Assignment', (['"""DEFAULT"""', '(1)'], {}), "('DEFAULT', 1)\n", (590, 604), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((633, 844), 'bootstrap4c4d.Description', 'Description', (["{'id': 'SETTINGS_EFFECT_STRENGTH', 'key': 'REAL', 'value': [*\n crumb_percent_slider, *crumb_percent_slider_limit_min, *\n crumb_percent_slider_limit_max], 'locales': {'strings_us': 'Strength'}}"], {}), "({'id': 'SETTINGS_EFFECT_STRENGTH', 'key': 'REAL', 'value': [*\n crumb_percent_slider, *crumb_percent_slider_limit_min, *\n crumb_percent_slider_limit_max], 'locales': {'strings_us': 'Strength'}})\n", (644, 844), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((1954, 2033), 'bootstrap4c4d.Assignment', 'Assignment', (['None', 'None', "{'id': 'VECTOR_XPLUS', 'locales': {'strings_us': 'X+'}}"], {}), "(None, None, {'id': 'VECTOR_XPLUS', 'locales': {'strings_us': 'X+'}})\n", (1964, 2033), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((2074, 2159), 'bootstrap4c4d.Assignment', 'Assignment', (['None', 'None', "{'id': 'VECTOR_XMINUS', 'locales': {'strings_us': 'X-'}}"], {}), "(None, None, {'id': 'VECTOR_XMINUS', 'locales': {'strings_us': 'X-'}}\n )\n", (2084, 2159), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((2194, 2273), 'bootstrap4c4d.Assignment', 'Assignment', (['None', 'None', "{'id': 'VECTOR_YPLUS', 'locales': {'strings_us': 'Y+'}}"], {}), "(None, None, {'id': 'VECTOR_YPLUS', 'locales': {'strings_us': 'Y+'}})\n", (2204, 2273), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((2314, 2399), 'bootstrap4c4d.Assignment', 'Assignment', (['None', 'None', "{'id': 'VECTOR_YMINUS', 'locales': {'strings_us': 'Y-'}}"], {}), "(None, None, {'id': 'VECTOR_YMINUS', 'locales': {'strings_us': 'Y-'}}\n )\n", (2324, 2399), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((2434, 2513), 'bootstrap4c4d.Assignment', 'Assignment', (['None', 'None', "{'id': 'VECTOR_ZPLUS', 'locales': {'strings_us': 'Z+'}}"], {}), "(None, None, {'id': 'VECTOR_ZPLUS', 'locales': {'strings_us': 'Z+'}})\n", (2444, 2513), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((2554, 2639), 'bootstrap4c4d.Assignment', 'Assignment', (['None', 'None', "{'id': 'VECTOR_ZMINUS', 'locales': {'strings_us': 'Z-'}}"], {}), "(None, None, {'id': 'VECTOR_ZMINUS', 'locales': {'strings_us': 'Z-'}}\n )\n", (2564, 2639), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((3506, 3630), 'bootstrap4c4d.Group', 'Group', (['"""GROUP_EFFECT"""', "{'value': [crumb_flag_group_open, settings_effect_strength], 'locales': {\n 'strings_us': 'Effect'}}"], {}), "('GROUP_EFFECT', {'value': [crumb_flag_group_open,\n settings_effect_strength], 'locales': {'strings_us': 'Effect'}})\n", (3511, 3630), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((3687, 3961), 'bootstrap4c4d.Group', 'Group', (['"""GROUP_BASE"""', "{'value': [crumb_flag_group_open, settings_base_origin_object,\n settings_base_target_offset, settings_base_draw_debug_lines,\n settings_base_start_time, settings_base_up_vector,\n settings_base_aim_vector], 'locales': {'strings_us': 'Base'}}"], {}), "('GROUP_BASE', {'value': [crumb_flag_group_open,\n settings_base_origin_object, settings_base_target_offset,\n settings_base_draw_debug_lines, settings_base_start_time,\n settings_base_up_vector, settings_base_aim_vector], 'locales': {\n 'strings_us': 'Base'}})\n", (3692, 3961), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((4321, 4526), 'bootstrap4c4d.Description', 'Description', (["{'id': 'SETTINGS_SQUASH_STRETCH_STRETCH_STRENGTH', 'key': 'REAL', 'value':\n [*crumb_percent_slider, *crumb_percent_slider_limit_min], 'locales': {\n 'strings_us': 'Stretch Strength'}}"], {}), "({'id': 'SETTINGS_SQUASH_STRETCH_STRETCH_STRENGTH', 'key':\n 'REAL', 'value': [*crumb_percent_slider, *\n crumb_percent_slider_limit_min], 'locales': {'strings_us':\n 'Stretch Strength'}})\n", (4332, 4526), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((4611, 4809), 'bootstrap4c4d.Description', 'Description', (["{'id': 'SETTINGS_SQUASH_STRETCH_SQUASH_STRENGTH', 'key': 'REAL', 'value': [\n *crumb_percent_slider, *crumb_percent_slider_limit_min], 'locales': {\n 'strings_us': 'Squash Strength'}}"], {}), "({'id': 'SETTINGS_SQUASH_STRETCH_SQUASH_STRENGTH', 'key': 'REAL',\n 'value': [*crumb_percent_slider, *crumb_percent_slider_limit_min],\n 'locales': {'strings_us': 'Squash Strength'}})\n", (4622, 4809), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((4880, 5125), 'bootstrap4c4d.Group', 'Group', (['"""GROUP_SQUASH_STRETCH"""', "{'value': [crumb_flag_group_open, settings_squash_stretch_enable,\n settings_squash_stretch_stretch_strength,\n settings_squash_stretch_squash_strength], 'locales': {'strings_us':\n 'Squash and Stretch'}}"], {}), "('GROUP_SQUASH_STRETCH', {'value': [crumb_flag_group_open,\n settings_squash_stretch_enable,\n settings_squash_stretch_stretch_strength,\n settings_squash_stretch_squash_strength], 'locales': {'strings_us':\n 'Squash and Stretch'}})\n", (4885, 5125), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((5225, 5364), 'bootstrap4c4d.Description', 'Description', (["{'id': 'SETTINGS_PHYSICS_STIFFNESS', 'key': 'REAL', 'value':\n crumb_percent_slider, 'locales': {'strings_us': 'Stiffness'}}"], {}), "({'id': 'SETTINGS_PHYSICS_STIFFNESS', 'key': 'REAL', 'value':\n crumb_percent_slider, 'locales': {'strings_us': 'Stiffness'}})\n", (5236, 5364), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((5418, 5547), 'bootstrap4c4d.Description', 'Description', (["{'id': 'SETTINGS_PHYSICS_MASS', 'key': 'REAL', 'value':\n crumb_percent_slider, 'locales': {'strings_us': 'Mass'}}"], {}), "({'id': 'SETTINGS_PHYSICS_MASS', 'key': 'REAL', 'value':\n crumb_percent_slider, 'locales': {'strings_us': 'Mass'}})\n", (5429, 5547), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((5604, 5739), 'bootstrap4c4d.Description', 'Description', (["{'id': 'SETTINGS_PHYSICS_DAMPING', 'key': 'REAL', 'value':\n crumb_percent_slider, 'locales': {'strings_us': 'Damping'}}"], {}), "({'id': 'SETTINGS_PHYSICS_DAMPING', 'key': 'REAL', 'value':\n crumb_percent_slider, 'locales': {'strings_us': 'Damping'}})\n", (5615, 5739), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((6002, 6211), 'bootstrap4c4d.Group', 'Group', (['"""GROUP_PHYSICS"""', "{'value': [crumb_flag_group_open, settings_physics_stiffness,\n settings_physics_mass, settings_physics_damping,\n settings_physics_gravity], 'locales': {'strings_us': 'Base'}}"], {}), "('GROUP_PHYSICS', {'value': [crumb_flag_group_open,\n settings_physics_stiffness, settings_physics_mass,\n settings_physics_damping, settings_physics_gravity], 'locales': {\n 'strings_us': 'Base'}})\n", (6007, 6211), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((249, 272), 'bootstrap4c4d.Assignment', 'Assignment', (['"""STEP"""', '(1.0)'], {}), "('STEP', 1.0)\n", (259, 272), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((278, 307), 'bootstrap4c4d.Assignment', 'Assignment', (['"""UNIT"""', '"""PERCENT"""'], {}), "('UNIT', 'PERCENT')\n", (288, 307), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((313, 350), 'bootstrap4c4d.Assignment', 'Assignment', (['"""CUSTOMGUI"""', '"""REALSLIDER"""'], {}), "('CUSTOMGUI', 'REALSLIDER')\n", (323, 350), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((356, 384), 'bootstrap4c4d.Assignment', 'Assignment', (['"""MINSLIDER"""', '(0.0)'], {}), "('MINSLIDER', 0.0)\n", (366, 384), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((390, 420), 'bootstrap4c4d.Assignment', 'Assignment', (['"""MAXSLIDER"""', '(100.0)'], {}), "('MAXSLIDER', 100.0)\n", (400, 420), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((463, 485), 'bootstrap4c4d.Assignment', 'Assignment', (['"""MIN"""', '(0.0)'], {}), "('MIN', 0.0)\n", (473, 485), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((528, 552), 'bootstrap4c4d.Assignment', 'Assignment', (['"""MAX"""', '(100.0)'], {}), "('MAX', 100.0)\n", (538, 552), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((21058, 21081), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (21071, 21081), False, 'import os\n'), ((21091, 21136), 'os.path.join', 'os.path.join', (['directory', '"""res"""', '"""tjiggle.png"""'], {}), "(directory, 'res', 'tjiggle.png')\n", (21103, 21136), False, 'import os\n'), ((21175, 21199), 'c4d.bitmaps.BaseBitmap', 'c4d.bitmaps.BaseBitmap', ([], {}), '()\n', (21197, 21199), False, 'import c4d\n'), ((21442, 21630), 'c4d.plugins.RegisterTagPlugin', 'c4d.plugins.RegisterTagPlugin', ([], {'id': 'PLUGIN_ID', 'str': '"""Jiggle"""', 'info': '(c4d.TAG_EXPRESSION | c4d.TAG_VISIBLE | c4d.TAG_IMPLEMENTS_DRAW_FUNCTION)', 'g': 'Jiggle', 'description': '"""Tjiggle"""', 'icon': 'bmp'}), "(id=PLUGIN_ID, str='Jiggle', info=c4d.\n TAG_EXPRESSION | c4d.TAG_VISIBLE | c4d.TAG_IMPLEMENTS_DRAW_FUNCTION, g=\n Jiggle, description='Tjiggle', icon=bmp)\n", (21471, 21630), False, 'import c4d\n'), ((11424, 11443), 'c4d.Vector', 'c4d.Vector', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (11434, 11443), False, 'import c4d\n'), ((11527, 11548), 'c4d.Vector', 'c4d.Vector', (['(0)', '(0)', '(100)'], {}), '(0, 0, 100)\n', (11537, 11548), False, 'import c4d\n'), ((11948, 11972), 'c4d.Vector', 'c4d.Vector', (['(0)', '(-981.0)', '(0)'], {}), '(0, -981.0, 0)\n', (11958, 11972), False, 'import c4d\n'), ((12008, 12022), 'c4d.EventAdd', 'c4d.EventAdd', ([], {}), '()\n', (12020, 12022), False, 'import c4d\n'), ((14880, 14945), 'c4d.utils.MixVec', 'c4d.utils.MixVec', (['projectedPosition', 'self.position', 'data.strength'], {}), '(projectedPosition, self.position, data.strength)\n', (14896, 14945), False, 'import c4d\n'), ((19818, 19837), 'c4d.Vector', 'c4d.Vector', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (19828, 19837), False, 'import c4d\n'), ((19866, 19885), 'c4d.Vector', 'c4d.Vector', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (19876, 19885), False, 'import c4d\n'), ((19910, 19929), 'c4d.Vector', 'c4d.Vector', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (19920, 19929), False, 'import c4d\n'), ((1025, 1050), 'bootstrap4c4d.Assignment', 'Assignment', (['"""ANIM"""', '"""OFF"""'], {}), "('ANIM', 'OFF')\n", (1035, 1050), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((1380, 1406), 'bootstrap4c4d.Assignment', 'Assignment', (['"""UNIT"""', '"""TIME"""'], {}), "('UNIT', 'TIME')\n", (1390, 1406), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((1605, 1632), 'bootstrap4c4d.Assignment', 'Assignment', (['"""UNIT"""', '"""METER"""'], {}), "('UNIT', 'METER')\n", (1615, 1632), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((1838, 1863), 'bootstrap4c4d.Assignment', 'Assignment', (['"""ANIM"""', '"""OFF"""'], {}), "('ANIM', 'OFF')\n", (1848, 1863), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((2779, 2804), 'bootstrap4c4d.Assignment', 'Assignment', (['"""ANIM"""', '"""OFF"""'], {}), "('ANIM', 'OFF')\n", (2789, 2804), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((2814, 2926), 'bootstrap4c4d.Assignment', 'Assignment', (['"""CYCLE"""', '[vector_xplus, vector_xminus, vector_yplus, vector_yminus, vector_zplus,\n vector_zminus]'], {}), "('CYCLE', [vector_xplus, vector_xminus, vector_yplus,\n vector_yminus, vector_zplus, vector_zminus])\n", (2824, 2926), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((3195, 3220), 'bootstrap4c4d.Assignment', 'Assignment', (['"""ANIM"""', '"""OFF"""'], {}), "('ANIM', 'OFF')\n", (3205, 3220), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((3230, 3342), 'bootstrap4c4d.Assignment', 'Assignment', (['"""CYCLE"""', '[vector_xplus, vector_xminus, vector_yplus, vector_yminus, vector_zplus,\n vector_zminus]'], {}), "('CYCLE', [vector_xplus, vector_xminus, vector_yplus,\n vector_yminus, vector_zplus, vector_zminus])\n", (3240, 3342), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((4187, 4212), 'bootstrap4c4d.Assignment', 'Assignment', (['"""ANIM"""', '"""OFF"""'], {}), "('ANIM', 'OFF')\n", (4197, 4212), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((5892, 5919), 'bootstrap4c4d.Assignment', 'Assignment', (['"""UNIT"""', '"""METER"""'], {}), "('UNIT', 'METER')\n", (5902, 5919), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((6324, 6353), 'bootstrap4c4d.Assignment', 'Assignment', (['"""NAME"""', '"""Tjiggle"""'], {}), "('NAME', 'Tjiggle')\n", (6334, 6353), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((6363, 6393), 'bootstrap4c4d.Assignment', 'Assignment', (['"""INCLUDE"""', '"""Tbase"""'], {}), "('INCLUDE', 'Tbase')\n", (6373, 6393), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((6403, 6439), 'bootstrap4c4d.Assignment', 'Assignment', (['"""INCLUDE"""', '"""Texpression"""'], {}), "('INCLUDE', 'Texpression')\n", (6413, 6439), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((6449, 6619), 'bootstrap4c4d.Group', 'Group', (['"""GROUP_SETTINGS"""', "{'value': [crumb_flag_group_open, group_effect, group_base,\n group_squash_stretch, group_physics], 'locales': {'strings_us': 'Settings'}\n }"], {}), "('GROUP_SETTINGS', {'value': [crumb_flag_group_open, group_effect,\n group_base, group_squash_stretch, group_physics], 'locales': {\n 'strings_us': 'Settings'}})\n", (6454, 6619), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((16925, 16966), 'c4d.Matrix', 'c4d.Matrix', (['originPosition', 'aim', 'up', 'side'], {}), '(originPosition, aim, up, side)\n', (16935, 16966), False, 'import c4d\n'), ((18486, 18523), 'c4d.GetViewColor', 'c4d.GetViewColor', (['c4d.VIEWCOLOR_XAXIS'], {}), '(c4d.VIEWCOLOR_XAXIS)\n', (18502, 18523), False, 'import c4d\n'), ((18686, 18723), 'c4d.GetViewColor', 'c4d.GetViewColor', (['c4d.VIEWCOLOR_YAXIS'], {}), '(c4d.VIEWCOLOR_YAXIS)\n', (18702, 18723), False, 'import c4d\n'), ((18882, 18919), 'c4d.GetViewColor', 'c4d.GetViewColor', (['c4d.VIEWCOLOR_ZAXIS'], {}), '(c4d.VIEWCOLOR_ZAXIS)\n', (18898, 18919), False, 'import c4d\n'), ((15019, 15062), 'c4d.Vector', 'c4d.Vector', (['(targetPosition - originPosition)'], {}), '(targetPosition - originPosition)\n', (15029, 15062), False, 'import c4d\n'), ((15193, 15214), 'c4d.Vector', 'c4d.Vector', (['(1.0)', '(0)', '(0)'], {}), '(1.0, 0, 0)\n', (15203, 15214), False, 'import c4d\n'), ((17118, 17160), 'c4d.Matrix', 'c4d.Matrix', (['originPosition', '(-aim)', 'up', 'side'], {}), '(originPosition, -aim, up, side)\n', (17128, 17160), False, 'import c4d\n'), ((19349, 19396), 'c4d.GetViewColor', 'c4d.GetViewColor', (['c4d.VIEWCOLOR_OBJECTHIGHLIGHT'], {}), '(c4d.VIEWCOLOR_OBJECTHIGHLIGHT)\n', (19365, 19396), False, 'import c4d\n'), ((15296, 15318), 'c4d.Vector', 'c4d.Vector', (['(-1.0)', '(0)', '(0)'], {}), '(-1.0, 0, 0)\n', (15306, 15318), False, 'import c4d\n'), ((15857, 15900), 'c4d.Vector', 'c4d.Vector', (['(targetPosition - originPosition)'], {}), '(targetPosition - originPosition)\n', (15867, 15900), False, 'import c4d\n'), ((17311, 17352), 'c4d.Matrix', 'c4d.Matrix', (['originPosition', 'side', 'aim', 'up'], {}), '(originPosition, side, aim, up)\n', (17321, 17352), False, 'import c4d\n'), ((1143, 1168), 'bootstrap4c4d.Assignment', 'Assignment', (['"""Obase"""', 'None'], {}), "('Obase', None)\n", (1153, 1168), False, 'from bootstrap4c4d import Description, Assignment, Group, Container\n'), ((15399, 15420), 'c4d.Vector', 'c4d.Vector', (['(0)', '(1.0)', '(0)'], {}), '(0, 1.0, 0)\n', (15409, 15420), False, 'import c4d\n'), ((17504, 17546), 'c4d.Matrix', 'c4d.Matrix', (['originPosition', 'side', '(-aim)', 'up'], {}), '(originPosition, side, -aim, up)\n', (17514, 17546), False, 'import c4d\n'), ((15502, 15524), 'c4d.Vector', 'c4d.Vector', (['(0)', '(-1.0)', '(0)'], {}), '(0, -1.0, 0)\n', (15512, 15524), False, 'import c4d\n'), ((17697, 17738), 'c4d.Matrix', 'c4d.Matrix', (['originPosition', 'side', 'up', 'aim'], {}), '(originPosition, side, up, aim)\n', (17707, 17738), False, 'import c4d\n'), ((15605, 15626), 'c4d.Vector', 'c4d.Vector', (['(0)', '(0)', '(1.0)'], {}), '(0, 0, 1.0)\n', (15615, 15626), False, 'import c4d\n'), ((17890, 17932), 'c4d.Matrix', 'c4d.Matrix', (['originPosition', 'side', 'up', '(-aim)'], {}), '(originPosition, side, up, -aim)\n', (17900, 17932), False, 'import c4d\n'), ((15708, 15730), 'c4d.Vector', 'c4d.Vector', (['(0)', '(0)', '(-1.0)'], {}), '(0, 0, -1.0)\n', (15718, 15730), False, 'import c4d\n')]
|
from itertools import count
from utils import pentagonal
def pentagonal_number(index: int) -> int:
return index * (3 * index - 1) // 2
# TODO: improve this "bruteforcefully" working function
def pentagonal_numbers(offset: int) -> int:
for j in count(1):
p_j = pentagonal_number(j)
for s in range(j + 1, j + offset):
p_s = pentagonal_number(s)
p_k = p_s - p_j
p_d = p_k - p_j
if pentagonal(p_k) and pentagonal(p_d):
break
else:
continue
return p_k - p_j
assert pentagonal_numbers(offset=10_000) == 5_482_660
|
[
"itertools.count",
"utils.pentagonal"
] |
[((257, 265), 'itertools.count', 'count', (['(1)'], {}), '(1)\n', (262, 265), False, 'from itertools import count\n'), ((456, 471), 'utils.pentagonal', 'pentagonal', (['p_k'], {}), '(p_k)\n', (466, 471), False, 'from utils import pentagonal\n'), ((476, 491), 'utils.pentagonal', 'pentagonal', (['p_d'], {}), '(p_d)\n', (486, 491), False, 'from utils import pentagonal\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-14 02:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0008_merge_20161114_0228'),
]
operations = [
migrations.AlterField(
model_name='volunteer',
name='email',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='volunteer',
name='jacket',
field=models.CharField(max_length=30),
),
migrations.AlterField(
model_name='volunteer',
name='years_of_service',
field=models.CharField(max_length=3),
),
]
|
[
"django.db.models.CharField"
] |
[((400, 431), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (416, 431), False, 'from django.db import migrations, models\n'), ((556, 587), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (572, 587), False, 'from django.db import migrations, models\n'), ((722, 752), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3)'}), '(max_length=3)\n', (738, 752), False, 'from django.db import migrations, models\n')]
|
from chainer_transformer.functions import generate_positional_encodings
import pytest
def test_generate_positional_encoding():
start = 0
end = 100
dim = 256
l = end - start
output = generate_positional_encodings(start, end, dim)
assert output.shape == (l, dim)
|
[
"chainer_transformer.functions.generate_positional_encodings"
] |
[((206, 252), 'chainer_transformer.functions.generate_positional_encodings', 'generate_positional_encodings', (['start', 'end', 'dim'], {}), '(start, end, dim)\n', (235, 252), False, 'from chainer_transformer.functions import generate_positional_encodings\n')]
|
from abc import ABCMeta, abstractmethod
try:
from flan import istruthy, error, info
except:
from flan.flan import istruthy, error, info
pass
import settings
import os
import threading
import _thread as thread
def _timeout(exportname):
error('Flan->%s import timed out' % exportname)
thread.interrupt_main() # raises KeyboardInterrupt
def timeout_after(s):
"""
Use as decorator to exit process if function takes longer than s seconds
"""
def outer(fn):
def inner(*args, **kwargs):
x = fn
timer = threading.Timer(s, _timeout, args=[fn.__module__])
timer.start()
try:
result = fn(*args, **kwargs)
finally:
timer.cancel()
return result
return inner
return outer
class FlanIntegration:
__metaclass__ = ABCMeta
def __init__(self, name, meta, config):
self.name = name
self.meta = meta
self.config = None
self.loglevel = None
self.haltonerror = None
self.version = settings.__VERSION__
def logerr(self, err):
if self.loglevel == "errors" or self.haltonerror:
error('Flan->%s exports failed: %s' % (self.name, err))
if self.haltonerror:
os._exit(1)
return
def loginfo(self, msg):
if self.loglevel == "info":
info(msg)
return
@staticmethod
def istruthy(val):
return istruthy(val)
def _getsetting(self, name, erroronnone=True, checkenv=False, defaultvalue=None):
val = defaultvalue
try:
if checkenv:
val = os.environ[name.upper()]
except KeyError:
pass
if not val:
ln = name.lower()
if ln in self.config:
val = self.config[ln]
if not val and erroronnone:
self.logerr('Flan->%s config failed: no %s defined in the environment or passed to Flan.' % (self.name, name))
return val
|
[
"flan.flan.istruthy",
"threading.Timer",
"flan.flan.info",
"os._exit",
"flan.flan.error",
"_thread.interrupt_main"
] |
[((253, 300), 'flan.flan.error', 'error', (["('Flan->%s import timed out' % exportname)"], {}), "('Flan->%s import timed out' % exportname)\n", (258, 300), False, 'from flan.flan import istruthy, error, info\n'), ((305, 328), '_thread.interrupt_main', 'thread.interrupt_main', ([], {}), '()\n', (326, 328), True, 'import _thread as thread\n'), ((1484, 1497), 'flan.flan.istruthy', 'istruthy', (['val'], {}), '(val)\n', (1492, 1497), False, 'from flan.flan import istruthy, error, info\n'), ((568, 618), 'threading.Timer', 'threading.Timer', (['s', '_timeout'], {'args': '[fn.__module__]'}), '(s, _timeout, args=[fn.__module__])\n', (583, 618), False, 'import threading\n'), ((1201, 1256), 'flan.flan.error', 'error', (["('Flan->%s exports failed: %s' % (self.name, err))"], {}), "('Flan->%s exports failed: %s' % (self.name, err))\n", (1206, 1256), False, 'from flan.flan import istruthy, error, info\n'), ((1298, 1309), 'os._exit', 'os._exit', (['(1)'], {}), '(1)\n', (1306, 1309), False, 'import os\n'), ((1402, 1411), 'flan.flan.info', 'info', (['msg'], {}), '(msg)\n', (1406, 1411), False, 'from flan.flan import istruthy, error, info\n')]
|
from torch import nn
import torch.nn.functional as F
class LeNet(nn.Module):
"""LeNet-like network for tests with MNIST (28x28)."""
def __init__(self, in_channels=1, num_classes=10, **kwargs):
super().__init__()
# main part of the network
self.conv1 = nn.Conv2d(in_channels, 6, 5)
self.conv2 = nn.Conv2d(6, 16, 5)
self.fc1 = nn.Linear(16 * 16, 120)
self.fc2 = nn.Linear(120, 84)
# last classifier layer (head) with as many outputs as classes
self.fc = nn.Linear(84, num_classes)
# and `head_var` with the name of the head, so it can be removed when doing incremental learning experiments
self.head_var = 'fc'
def forward(self, x):
out = F.relu(self.conv1(x))
out = F.max_pool2d(out, 2)
out = F.relu(self.conv2(out))
out = F.max_pool2d(out, 2)
out = out.view(out.size(0), -1)
out = F.relu(self.fc1(out))
out = F.relu(self.fc2(out))
out = self.fc(out)
return out
|
[
"torch.nn.Conv2d",
"torch.nn.functional.max_pool2d",
"torch.nn.Linear"
] |
[((287, 315), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', '(6)', '(5)'], {}), '(in_channels, 6, 5)\n', (296, 315), False, 'from torch import nn\n'), ((337, 356), 'torch.nn.Conv2d', 'nn.Conv2d', (['(6)', '(16)', '(5)'], {}), '(6, 16, 5)\n', (346, 356), False, 'from torch import nn\n'), ((376, 399), 'torch.nn.Linear', 'nn.Linear', (['(16 * 16)', '(120)'], {}), '(16 * 16, 120)\n', (385, 399), False, 'from torch import nn\n'), ((419, 437), 'torch.nn.Linear', 'nn.Linear', (['(120)', '(84)'], {}), '(120, 84)\n', (428, 437), False, 'from torch import nn\n'), ((528, 554), 'torch.nn.Linear', 'nn.Linear', (['(84)', 'num_classes'], {}), '(84, num_classes)\n', (537, 554), False, 'from torch import nn\n'), ((778, 798), 'torch.nn.functional.max_pool2d', 'F.max_pool2d', (['out', '(2)'], {}), '(out, 2)\n', (790, 798), True, 'import torch.nn.functional as F\n'), ((851, 871), 'torch.nn.functional.max_pool2d', 'F.max_pool2d', (['out', '(2)'], {}), '(out, 2)\n', (863, 871), True, 'import torch.nn.functional as F\n')]
|
from django.conf import settings
from django.shortcuts import get_object_or_404
from .models import Product
def favourites():
"""This view iterates through the favourites"""
favourite_items = []
shop_items = Product.objects.all()
for item in shop_items:
shop_item = get_object_or_404(Product, pk=item_id)
favourite_items.append(shop_item)
context = {
'favourite_items': favourite_items,
}
return context
|
[
"django.shortcuts.get_object_or_404"
] |
[((292, 330), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Product'], {'pk': 'item_id'}), '(Product, pk=item_id)\n', (309, 330), False, 'from django.shortcuts import get_object_or_404\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Date : 2019-05-31 00:14:27
# @Author : Racter (<EMAIL>)
# @Profile : https://racterub.me
import yaml
from argparse import ArgumentParser
import os
import sys
import shutil
from subprocess import check_output
def parseParam():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", help="Path to challenges", default="chal/", dest="path")
parser.add_argument("-p", "--port", help="Pwn challenges' starting port (Default => 6000)", type=int, default=6000, dest="port")
parser.add_argument("-i", "--img", help="Docker base image for your pwn challenges (Default => ubuntu:18.04) or do just do <img>:<tag>", default="ubuntu:18.04", dest="image")
parser.add_argument("-t", "--timeout", help="Set timeout limit", default=0, dest="time")
parser.add_argument("-g", "--gen-conf", help="Generate docker-compose.yml", action="store_true", dest="gen_conf")
parser.add_argument("-e", "--ex-libc", help="Export libc from container", action="store_true", dest="ex_libc")
args = parser.parse_args()
return args
def genConf(path, port, image, timeout):
config = {"services": {}}
base = os.path.dirname(os.path.abspath(__file__)) + "/%s" % path
chal = [f for f in os.listdir(base)]
for i in range(len(chal)):
baseDir = base + chal[i]
data = {"build": "chal/%s" % chal[i], "ulimits": {"nproc": 1024}, "ports": ["%d:9999" % port]}
config['services'][chal[i]] = data
port += 1
with open('docker-compose.yml', 'w') as f:
f.write(yaml.dump({"version": '3'}) + yaml.dump(config))
def exportLibc(path, port, image, timeout):
base = os.path.dirname(os.path.abspath(__file__)) + "/%s" % path
chal = [f for f in os.listdir(base)]
os.mkdir('libc/')
for i in range(len(chal)):
os.mkdir("libc/%s" % chal[i])
containerID = check_output('docker ps -aqf "name=pwndeployer_%s"' % chal[i], shell=True).strip().decode()
os.system("docker cp --follow-link %s:lib32/libc.so.6 libc/%s/lib32" % (containerID, chal[i]))
os.system("docker cp --follow-link %s:lib/x86_64-linux-gnu/libc.so.6 libc/%s/lib64" % (containerID, chal[i]))
def setup(path, port, image, timeout):
config = {"services": {}}
base = os.path.dirname(os.path.abspath(__file__)) + "/%s" % path
chal = [f for f in os.listdir(base)]
for i in range(len(chal)):
baseDir = base + chal[i]
os.mkdir(baseDir+"/bin/")
dockerfile = """FROM %s
RUN apt-get update && apt-get -y dist-upgrade
RUN apt-get install -y lib32z1 xinetd
RUN useradd -m ctf
COPY ./bin/ /home/ctf/
COPY ./ctf.xinetd /etc/xinetd.d/ctf
COPY ./start.sh /start.sh
RUN echo "Blocked by ctf_xinetd" > /etc/banner_fail
RUN chmod +x /start.sh
RUN chown -R root:ctf /home/ctf
RUN chmod -R 750 /home/ctf
RUN chmod 740 /home/ctf/flag
RUN cp -R /lib* /home/ctf
RUN cp -R /usr/lib* /home/ctf
RUN mkdir /home/ctf/dev
RUN mknod /home/ctf/dev/null c 1 3
RUN mknod /home/ctf/dev/zero c 1 5
RUN mknod /home/ctf/dev/random c 1 8
RUN mknod /home/ctf/dev/urandom c 1 9
RUN chmod 666 /home/ctf/dev/*
RUN mkdir /home/ctf/bin
RUN cp /bin/sh /home/ctf/bin
RUN cp /bin/ls /home/ctf/bin
RUN cp /bin/cat /home/ctf/bin
RUN cp /usr/bin/timeout /home/ctf/bin
WORKDIR /home/ctf
CMD ["/start.sh"]
EXPOSE 9999
""" % image
with open('xinetd_setting', 'r') as setting:
ctfXinetd = setting.read()
if timeout:
runsh = '''#!/bin/sh
exec 2>/dev/null
timeout %d ./%s''' % (timeout, chal[i])
else:
runsh = '''
#!/bin/sh
exec 2>/dev/null
./%s''' % chal[i]
shutil.move(baseDir+"/%s" % chal[i], baseDir+'/bin/')
shutil.move(baseDir+"/flag", baseDir+'/bin/')
os.chmod(baseDir+'/bin/%s' % chal[i], 0o755)
with open('start.sh') as f:
startsh = f.read()
with open(baseDir+'/start.sh', 'w') as f:
f.write(startsh)
with open(baseDir+'/Dockerfile', 'w') as f:
f.write(dockerfile)
with open(baseDir+'/bin/run.sh', 'w') as f:
f.write(runsh)
with open(baseDir+'/ctf.xinetd', 'w') as f:
f.write(ctfXinetd)
data = {"build": "chal/%s" % chal[i], "ulimits": {"nproc": 1024}, "ports": ["%d:9999" % port]}
config['services'][chal[i]] = data
port += 1
with open('docker-compose.yml', 'w') as f:
f.write(yaml.dump({"version": '3'}) + yaml.dump(config))
if __name__ == "__main__":
arg = parseParam()
if os.path.isdir(arg.path):
if arg.gen_conf:
genConf(arg.path, arg.port, arg.image, arg.time)
elif arg.ex_libc:
exportLibc(arg.path, arg.port, arg.image, arg.time)
else:
setup(arg.path, arg.port, arg.image, arg.time)
else:
print("Invalid input")
sys.exit(-1)
|
[
"os.mkdir",
"os.path.abspath",
"os.chmod",
"argparse.ArgumentParser",
"os.path.isdir",
"subprocess.check_output",
"yaml.dump",
"os.system",
"shutil.move",
"os.listdir",
"sys.exit"
] |
[((299, 315), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (313, 315), False, 'from argparse import ArgumentParser\n'), ((1786, 1803), 'os.mkdir', 'os.mkdir', (['"""libc/"""'], {}), "('libc/')\n", (1794, 1803), False, 'import os\n'), ((4603, 4626), 'os.path.isdir', 'os.path.isdir', (['arg.path'], {}), '(arg.path)\n', (4616, 4626), False, 'import os\n'), ((1843, 1872), 'os.mkdir', 'os.mkdir', (["('libc/%s' % chal[i])"], {}), "('libc/%s' % chal[i])\n", (1851, 1872), False, 'import os\n'), ((1995, 2094), 'os.system', 'os.system', (["('docker cp --follow-link %s:lib32/libc.so.6 libc/%s/lib32' % (containerID,\n chal[i]))"], {}), "('docker cp --follow-link %s:lib32/libc.so.6 libc/%s/lib32' % (\n containerID, chal[i]))\n", (2004, 2094), False, 'import os\n'), ((2098, 2216), 'os.system', 'os.system', (["('docker cp --follow-link %s:lib/x86_64-linux-gnu/libc.so.6 libc/%s/lib64' %\n (containerID, chal[i]))"], {}), "(\n 'docker cp --follow-link %s:lib/x86_64-linux-gnu/libc.so.6 libc/%s/lib64' %\n (containerID, chal[i]))\n", (2107, 2216), False, 'import os\n'), ((2461, 2488), 'os.mkdir', 'os.mkdir', (["(baseDir + '/bin/')"], {}), "(baseDir + '/bin/')\n", (2469, 2488), False, 'import os\n'), ((3712, 3769), 'shutil.move', 'shutil.move', (["(baseDir + '/%s' % chal[i])", "(baseDir + '/bin/')"], {}), "(baseDir + '/%s' % chal[i], baseDir + '/bin/')\n", (3723, 3769), False, 'import shutil\n'), ((3774, 3823), 'shutil.move', 'shutil.move', (["(baseDir + '/flag')", "(baseDir + '/bin/')"], {}), "(baseDir + '/flag', baseDir + '/bin/')\n", (3785, 3823), False, 'import shutil\n'), ((3828, 3872), 'os.chmod', 'os.chmod', (["(baseDir + '/bin/%s' % chal[i])", '(493)'], {}), "(baseDir + '/bin/%s' % chal[i], 493)\n", (3836, 3872), False, 'import os\n'), ((4926, 4938), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (4934, 4938), False, 'import sys\n'), ((1196, 1221), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1211, 1221), False, 'import os\n'), ((1261, 1277), 'os.listdir', 'os.listdir', (['base'], {}), '(base)\n', (1271, 1277), False, 'import os\n'), ((1699, 1724), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1714, 1724), False, 'import os\n'), ((1764, 1780), 'os.listdir', 'os.listdir', (['base'], {}), '(base)\n', (1774, 1780), False, 'import os\n'), ((2306, 2331), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (2321, 2331), False, 'import os\n'), ((2371, 2387), 'os.listdir', 'os.listdir', (['base'], {}), '(base)\n', (2381, 2387), False, 'import os\n'), ((4493, 4520), 'yaml.dump', 'yaml.dump', (["{'version': '3'}"], {}), "({'version': '3'})\n", (4502, 4520), False, 'import yaml\n'), ((4523, 4540), 'yaml.dump', 'yaml.dump', (['config'], {}), '(config)\n', (4532, 4540), False, 'import yaml\n'), ((1578, 1605), 'yaml.dump', 'yaml.dump', (["{'version': '3'}"], {}), "({'version': '3'})\n", (1587, 1605), False, 'import yaml\n'), ((1608, 1625), 'yaml.dump', 'yaml.dump', (['config'], {}), '(config)\n', (1617, 1625), False, 'import yaml\n'), ((1895, 1969), 'subprocess.check_output', 'check_output', (['(\'docker ps -aqf "name=pwndeployer_%s"\' % chal[i])'], {'shell': '(True)'}), '(\'docker ps -aqf "name=pwndeployer_%s"\' % chal[i], shell=True)\n', (1907, 1969), False, 'from subprocess import check_output\n')]
|
import copy as cp
from pprint import pprint as pp
from termcolor import colored
from modules.blockchain.block import *
from modules.blockchain.book import *
from modules.blockchain.transaction import *
class BlockChain:
"""BlockChain Object to be added to the chain
:Attributes:
:attr block_chain: All the blocks in the chain
:type block_chain: list
:attr open_transaction: All the transactions to be added
:type open_transaction: list
:Methods:
:meth __init__: Constructor of the class
:meth to_json: Create a json file of the block-chain
:meth valid_proof: Verify the hash guess
:meth proof_of_work: Calculate the hash of the block and return nonce
:meth create_append_transaction: Create and append a transaction to the open transaction list
:meth mine_block: mine the new block + add the reward transaction
:meth number_blocks: gives number of block in the chain
:meth __str__: magic method, prints the chain and its blocks
"""
def __init__(self, override=False, debug=True):
"""Constructor of the class"""
# Create the genesis block (the first block in the chain)
if not override:
genesis_block = Block(None,
[Transaction(sender=None, recipient='BlockChain', book=None, transaction_type=2)])
self.block_chain = [genesis_block]
else:
genesis_block = None
self.block_chain = []
# a list containing all the forks of a chain at the same level
self.chains_same_level = [self.block_chain]
self.open_transactions = []
self.debug = debug
def valid_proof(self, last_hash, nonce):
"""Verify the hash guess
:param last_hash: the hash of the previous block in the chain
:type last_hash: str
:param nonce: nonce guess of the hash
:type nonce: int
:returns: True or False guess_hash
:rtype: bool
"""
guess = (str(list(map(str, self.open_transactions))) + str(last_hash) + str(nonce)).encode()
guess_hash = hashlib.sha256(guess).hexdigest()
# print(guess_hash)
return guess_hash[0:2] == '42' # 42 is the difficulty to find the hash
def proof_of_work(self):
"""Search for the right hash by adjusting the `nonce` value
:var nonce: field whose value is adjusted by miners so that the hash of
the block will be the current target (for now it's 42 as the first two chars) of the network
:type nonce: int
:returns: nonce of the hash
:rtype: int
"""
last_block = self.block_chain[-1]
last_hash = last_block.hash
nonce = 0
while not self.valid_proof(last_hash, nonce):
nonce += 1
return nonce
# TODO: change the name of this method
def create_append_transaction(self, new_transaction):
"""This method create a transaction and append it to the Open transaction attr
:param new_transaction: new transaction
:type new_transaction: Transaction object -> *`modules.blockchain.transaction`*
:returns: None
"""
if self.verify_transaction(new_transaction):
self.open_transactions.append(new_transaction)
def verify_transaction(self, new_transaction): # TODO: complete this method
pass
@staticmethod
def verify_blockchain(blockchain, flag_list=False):
"""Verify if a block-chain hasn't been tampered with
loop through the block and verify the difference between the hashes
:param blockchain: the block-chain to be verified
:type blockchain: BlockChain *-blockchain.py*
:returns: the chain is valid or not
:rtype: {bool}
"""
if not flag_list:
block_chain = blockchain.block_chain
else:
block_chain = blockchain
flags = []
for i in range(1, len(block_chain)):
block = block_chain[i]
block1 = block_chain[i - 1]
if block.hash != block.hash_block():
flags.append("[!] Found difference between the hash and the calculated one")
elif block1.hash != block.previous_hash:
flags.append("[!] Found difference between the hash of a block and the one previous")
elif block1.timestamp >= block.timestamp:
flags.append("[!] Found irregularity between the time-stamps")
if not flag_list:
blockchain._debug(flags)
return True if len(flags) == 0 else False
def mine_block(self, recipient):
"""This method mine the new block with the opentransaction list
:param recipient: Miner's ID - who is being rewarded for mining the block
:type recipient: str
:returns: None
"""
last_block = self.block_chain[-1] # Get the Last block
last_hash = last_block.hash # Get the hash of the last block
nonce = self.proof_of_work() # Determine the nonce value
# Create the reward and append it to the open transactions
reward_transaction = Transaction(sender=None, recipient=recipient, book=None, transaction_type=2)
self.open_transactions.append(reward_transaction)
# Create the new Block
new_block = Block(last_hash, self.open_transactions, index=len(self.block_chain), nonce=nonce)
self.block_chain.append(new_block)
self.open_transactions = []
def fork_chain(self, index=None):
"""Create a fork *-copy* of the block-chain with index*- beginning* preferred
The copy is made by a deep-copy
:param index: the start of the *forking*, defaults to None
:type index: int, optional
"""
copy = cp.deepcopy(self)
copy.block_chain = copy.block_chain[index:]
return copy
def to_json(self):
"""
to_json converts the object into a json object
:var dict_json: contains information about the blocks
:type dict_json: dict
:returns: a dict (json) containing the chain
:rtype: dict
"""
dict_json = {}
# Loop through and convert the block to json objects
for i, block in enumerate(self.block_chain):
dict_json[i] = block.to_json()
return dict_json
# Returs number of block in the chain
number_blocks = lambda self: len(self.block_chain)
def __eq__(self, other):
return self.to_json() == other.to_json()
def __repr__(self):
return str(self.to_json())
def __str__(self):
print(f'::{self.number_blocks()} blocks in the blockchain')
for block, number in zip(self.block_chain, range(len(self.block_chain))):
print('number\n', number)
print('block\n', block)
return ''
@staticmethod
def json_to_blockchain(bc_json):
bc = BlockChain(override=True)
for block in bc_json.values():
bc.block_chain.append(Block.json_to_block(block))
return bc
def _debug(self, msg, pprint=False):
"""Prints helpful information in debug mode
_debug print with different color depending on the node_type
:param msg: the message to display
:type msg: string
:param pprint: prints a msg with a pprint *with indentation*, defaults to False
:type pprint: bool, optional
"""
if self.debug:
if not pprint:
print(colored(msg, 'magenta'))
else:
pp(msg, indent=4, width=4)
if __name__ == '__main__':
# Exemple on how to use the blockchain object
blockchain = BlockChain()
print(blockchain)
blockchain.create_append_transaction(Transaction('mouha', 'recipient',
Book(title='The Selfish Gene', author='<NAME>',
date='19--', genre='Science')))
blockchain.mine_block('zeddo')
print(blockchain)
|
[
"termcolor.colored",
"copy.deepcopy",
"pprint.pprint"
] |
[((5174, 5191), 'copy.deepcopy', 'cp.deepcopy', (['self'], {}), '(self)\n', (5185, 5191), True, 'import copy as cp\n'), ((6685, 6711), 'pprint.pprint', 'pp', (['msg'], {'indent': '(4)', 'width': '(4)'}), '(msg, indent=4, width=4)\n', (6687, 6711), True, 'from pprint import pprint as pp\n'), ((6647, 6670), 'termcolor.colored', 'colored', (['msg', '"""magenta"""'], {}), "(msg, 'magenta')\n", (6654, 6670), False, 'from termcolor import colored\n')]
|
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import Any, Dict, List, Tuple
import pytest
from scripts.generate import (
CIRCULAR_PROPERTY_REFERENCES,
CIRCULAR_REFERENCES,
INT_PROPERTIES,
ObjectDefinition,
OneOfDefinition,
PrimitiveDefinition,
Property,
Schema,
generate_files,
get_definition_class,
get_import_line_from_reference,
get_init_py,
get_resource_file_name,
get_resource_from_path,
get_resource_var_name,
get_schema,
)
def test_utils() -> None:
assert get_resource_from_path("#/definitions/Account") == "Account"
assert get_resource_var_name("Account") == "FHIR_Account"
assert get_resource_file_name("Account") == "FHIR_Account"
import_line = "from .FHIR_Account import FHIR_Account\n"
assert get_import_line_from_reference("Account") == import_line
assert Property.get_enum_literal(["a", "b"]) == 'Literal["a", "b"]'
def test_get_definition() -> None:
assert type(OneOfDefinition) == type(
get_definition_class("Property", {"oneOf": []})
)
assert type(PrimitiveDefinition) == type(
get_definition_class("boolean", {"properties": {}})
)
assert type(ObjectDefinition) == type(
get_definition_class("Property", {"properties": {}})
)
with pytest.raises(Exception, match="Not expecting this schema definition"):
get_definition_class("Property", {"oneOfQQQ": []})
def test_primitive_def() -> None:
def build(line: str) -> str:
return f"# aaa\n{line}"
assert PrimitiveDefinition(
"a", {"description": "aaa", "type": "string"}
).generate() == build("FHIR_a = str")
assert PrimitiveDefinition(
"a", {"description": "aaa", "type": "boolean"}
).generate() == build("FHIR_a = bool")
assert PrimitiveDefinition(
"a", {"description": "aaa", "type": "number"}
).generate() == build("FHIR_a = float")
# No Type Provided (XHTML works this way)
assert PrimitiveDefinition("a", {"description": "aaa"}).generate() == build(
"FHIR_a = str"
)
# Check override INT
prop_name = INT_PROPERTIES[0]
assert PrimitiveDefinition(
prop_name, {"description": "aaa", "type": "number"}
).generate() == build(f"FHIR_{prop_name} = int")
# Check bad type given
with pytest.raises(Exception, match="Not able to handle schema"):
PrimitiveDefinition("a", {"description": "aaa", "type": "number123"}).generate()
def test_oneof_def() -> None:
schema = {
"oneOf": [{"$ref": "#/definitions/Account"}, {"$ref": "#/definitions/Patient"}]
}
lines = [
"from typing import Union",
"from .FHIR_Account import FHIR_Account",
"from .FHIR_Patient import FHIR_Patient",
"FHIR_Result = Union[",
" FHIR_Account,",
" FHIR_Patient,",
"]",
]
definition = OneOfDefinition("Result", schema)
result = definition.generate().split("\n")
for i in range(len(result)):
assert result[i] == lines[i]
def test_object_def() -> None:
schema = {
"description": "aaa",
"properties": {
"a": {"description": "bbb", "type": "string"},
"b": {"description": "bbb", "$ref": "#/definitions/Account"},
},
}
lines = [
"from typing import Any, List, TypedDict, Literal",
"from .FHIR_Account import FHIR_Account",
"# aaa",
'FHIR_Result = TypedDict("FHIR_Result", {',
" # bbb",
' "a": str,',
" # bbb",
' "b": FHIR_Account,',
"}, total=False)",
]
definition = ObjectDefinition("Result", schema)
result = definition.generate().split("\n")
for i in range(len(result)):
assert result[i] == lines[i]
def test_object_def_circular() -> None:
circular_reference = list(CIRCULAR_REFERENCES)[0]
circular_parent = list(CIRCULAR_PROPERTY_REFERENCES.keys())[0]
circular_property = CIRCULAR_PROPERTY_REFERENCES[circular_parent][0]
schema = {
"description": "aaa",
"properties": {
# property can't point to self
"a": {"description": "bbb", "$ref": f"#/definitions/{circular_parent}"},
# circular references are blacklisted
"b": {"description": "bbb", "$ref": f"#/definitions/{circular_reference}"},
# reference.properties are blacklisted
circular_property: {"description": "bbb", "$ref": "#/definitions/123"},
},
}
# There should be no import references in any of these cases
lines = [
"from typing import Any, List, TypedDict, Literal",
"# aaa",
f"FHIR_{circular_parent} = TypedDict(\"FHIR_{circular_parent}\", {'{'}",
" # bbb",
' "a": Any,',
" # bbb",
' "b": Any,',
" # bbb",
f' "{circular_property}": Any,',
"}, total=False)",
]
definition = ObjectDefinition(circular_parent, schema)
result = definition.generate().split("\n")
for i in range(len(result)):
assert result[i] == lines[i]
def test_property_gen() -> None:
parent = "Parent"
circular_reference = list(CIRCULAR_REFERENCES)[0]
mappings: List[Tuple[str, Dict[str, Any]]] = [
("bool", {"description": "aaa", "type": "boolean"}),
("str", {"description": "aaa", "type": "string"}),
("float", {"description": "aaa", "type": "number"}),
('Literal["Account"]', {"description": "aaa", "const": "Account"}),
('Literal["a", "b"]', {"description": "aaa", "enum": ["a", "b"]}),
("FHIR_Account", {"description": "aaa", "$ref": "#/definitions/Account"}),
(
'List[Literal["a", "b"]]',
{
"description": "aaa",
"type": "array",
"items": {"enum": ["a", "b"]},
},
),
(
"List[FHIR_Account]",
{
"description": "aaa",
"type": "array",
"items": {"$ref": "#/definitions/Account"},
},
),
# Circular Reference
("Any", {"description": "aaa", "$ref": f"#/definitions/{circular_reference}"}),
# Self reference
("Any", {"description": "aaa", "$ref": f"#/definitions/{parent}"}),
]
for result, schema in mappings:
p = Property("prop_a", schema, parent)
assert p.generate_property() == result
# Check Circular Property as well
circular_parent = list(CIRCULAR_PROPERTY_REFERENCES.keys())[0]
circular_property = CIRCULAR_PROPERTY_REFERENCES[circular_parent][0]
p2 = Property(
circular_property,
{"description": "aaa", "$ref": "#/definitions/123"},
circular_parent,
)
assert p2.generate_property() == "Any"
def test_property_gen_fail() -> None:
with pytest.raises(Exception, match="Property schema can not be handled"):
schema: Schema = {"description": "aaa", "type": "123"}
p = Property("prop_a", schema, "parent")
p.generate()
with pytest.raises(Exception, match=r"items should have \$ref or enum"):
schema = {"description": "aaa", "type": "array", "items": {"type": "string"}}
p = Property("prop_a", schema, "parent")
p.generate()
def test_get_init_py() -> None:
expected = """from .FHIR_foo import FHIR_foo
from .FHIR_bar import FHIR_bar
__all__ = ['FHIR_foo', 'FHIR_bar']
"""
assert get_init_py(["foo", "bar"]) == expected
def test_generate_files_and_get_schema() -> None:
with TemporaryDirectory() as _tempdir:
tempdir = Path(_tempdir)
generate_files(get_schema(), tempdir)
assert tempdir.joinpath("__init__.py").exists()
assert tempdir.joinpath("FHIR_Account.py").exists()
assert tempdir.joinpath("FHIR_Patient.py").exists()
|
[
"scripts.generate.get_import_line_from_reference",
"tempfile.TemporaryDirectory",
"scripts.generate.Property",
"scripts.generate.ObjectDefinition",
"scripts.generate.get_resource_file_name",
"pytest.raises",
"pathlib.Path",
"scripts.generate.Property.get_enum_literal",
"scripts.generate.get_definition_class",
"scripts.generate.PrimitiveDefinition",
"scripts.generate.get_resource_from_path",
"scripts.generate.CIRCULAR_PROPERTY_REFERENCES.keys",
"scripts.generate.get_schema",
"scripts.generate.OneOfDefinition",
"scripts.generate.get_init_py",
"scripts.generate.get_resource_var_name"
] |
[((2914, 2947), 'scripts.generate.OneOfDefinition', 'OneOfDefinition', (['"""Result"""', 'schema'], {}), "('Result', schema)\n", (2929, 2947), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((3656, 3690), 'scripts.generate.ObjectDefinition', 'ObjectDefinition', (['"""Result"""', 'schema'], {}), "('Result', schema)\n", (3672, 3690), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((4967, 5008), 'scripts.generate.ObjectDefinition', 'ObjectDefinition', (['circular_parent', 'schema'], {}), '(circular_parent, schema)\n', (4983, 5008), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((6671, 6772), 'scripts.generate.Property', 'Property', (['circular_property', "{'description': 'aaa', '$ref': '#/definitions/123'}", 'circular_parent'], {}), "(circular_property, {'description': 'aaa', '$ref':\n '#/definitions/123'}, circular_parent)\n", (6679, 6772), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((566, 613), 'scripts.generate.get_resource_from_path', 'get_resource_from_path', (['"""#/definitions/Account"""'], {}), "('#/definitions/Account')\n", (588, 613), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((638, 670), 'scripts.generate.get_resource_var_name', 'get_resource_var_name', (['"""Account"""'], {}), "('Account')\n", (659, 670), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((700, 733), 'scripts.generate.get_resource_file_name', 'get_resource_file_name', (['"""Account"""'], {}), "('Account')\n", (722, 733), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((824, 865), 'scripts.generate.get_import_line_from_reference', 'get_import_line_from_reference', (['"""Account"""'], {}), "('Account')\n", (854, 865), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((892, 929), 'scripts.generate.Property.get_enum_literal', 'Property.get_enum_literal', (["['a', 'b']"], {}), "(['a', 'b'])\n", (917, 929), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((1326, 1396), 'pytest.raises', 'pytest.raises', (['Exception'], {'match': '"""Not expecting this schema definition"""'}), "(Exception, match='Not expecting this schema definition')\n", (1339, 1396), False, 'import pytest\n'), ((1406, 1456), 'scripts.generate.get_definition_class', 'get_definition_class', (['"""Property"""', "{'oneOfQQQ': []}"], {}), "('Property', {'oneOfQQQ': []})\n", (1426, 1456), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((2348, 2407), 'pytest.raises', 'pytest.raises', (['Exception'], {'match': '"""Not able to handle schema"""'}), "(Exception, match='Not able to handle schema')\n", (2361, 2407), False, 'import pytest\n'), ((6401, 6435), 'scripts.generate.Property', 'Property', (['"""prop_a"""', 'schema', 'parent'], {}), "('prop_a', schema, parent)\n", (6409, 6435), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((6893, 6961), 'pytest.raises', 'pytest.raises', (['Exception'], {'match': '"""Property schema can not be handled"""'}), "(Exception, match='Property schema can not be handled')\n", (6906, 6961), False, 'import pytest\n'), ((7038, 7074), 'scripts.generate.Property', 'Property', (['"""prop_a"""', 'schema', '"""parent"""'], {}), "('prop_a', schema, 'parent')\n", (7046, 7074), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((7106, 7172), 'pytest.raises', 'pytest.raises', (['Exception'], {'match': '"""items should have \\\\$ref or enum"""'}), "(Exception, match='items should have \\\\$ref or enum')\n", (7119, 7172), False, 'import pytest\n'), ((7272, 7308), 'scripts.generate.Property', 'Property', (['"""prop_a"""', 'schema', '"""parent"""'], {}), "('prop_a', schema, 'parent')\n", (7280, 7308), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((7494, 7521), 'scripts.generate.get_init_py', 'get_init_py', (["['foo', 'bar']"], {}), "(['foo', 'bar'])\n", (7505, 7521), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((7595, 7615), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {}), '()\n', (7613, 7615), False, 'from tempfile import TemporaryDirectory\n'), ((7647, 7661), 'pathlib.Path', 'Path', (['_tempdir'], {}), '(_tempdir)\n', (7651, 7661), False, 'from pathlib import Path\n'), ((1040, 1087), 'scripts.generate.get_definition_class', 'get_definition_class', (['"""Property"""', "{'oneOf': []}"], {}), "('Property', {'oneOf': []})\n", (1060, 1087), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((1148, 1199), 'scripts.generate.get_definition_class', 'get_definition_class', (['"""boolean"""', "{'properties': {}}"], {}), "('boolean', {'properties': {}})\n", (1168, 1199), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((1257, 1309), 'scripts.generate.get_definition_class', 'get_definition_class', (['"""Property"""', "{'properties': {}}"], {}), "('Property', {'properties': {}})\n", (1277, 1309), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((3932, 3967), 'scripts.generate.CIRCULAR_PROPERTY_REFERENCES.keys', 'CIRCULAR_PROPERTY_REFERENCES.keys', ([], {}), '()\n', (3965, 3967), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((6549, 6584), 'scripts.generate.CIRCULAR_PROPERTY_REFERENCES.keys', 'CIRCULAR_PROPERTY_REFERENCES.keys', ([], {}), '()\n', (6582, 6584), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((7685, 7697), 'scripts.generate.get_schema', 'get_schema', ([], {}), '()\n', (7695, 7697), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((1570, 1636), 'scripts.generate.PrimitiveDefinition', 'PrimitiveDefinition', (['"""a"""', "{'description': 'aaa', 'type': 'string'}"], {}), "('a', {'description': 'aaa', 'type': 'string'})\n", (1589, 1636), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((1699, 1766), 'scripts.generate.PrimitiveDefinition', 'PrimitiveDefinition', (['"""a"""', "{'description': 'aaa', 'type': 'boolean'}"], {}), "('a', {'description': 'aaa', 'type': 'boolean'})\n", (1718, 1766), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((1830, 1896), 'scripts.generate.PrimitiveDefinition', 'PrimitiveDefinition', (['"""a"""', "{'description': 'aaa', 'type': 'number'}"], {}), "('a', {'description': 'aaa', 'type': 'number'})\n", (1849, 1896), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((2007, 2055), 'scripts.generate.PrimitiveDefinition', 'PrimitiveDefinition', (['"""a"""', "{'description': 'aaa'}"], {}), "('a', {'description': 'aaa'})\n", (2026, 2055), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((2177, 2249), 'scripts.generate.PrimitiveDefinition', 'PrimitiveDefinition', (['prop_name', "{'description': 'aaa', 'type': 'number'}"], {}), "(prop_name, {'description': 'aaa', 'type': 'number'})\n", (2196, 2249), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n'), ((2417, 2486), 'scripts.generate.PrimitiveDefinition', 'PrimitiveDefinition', (['"""a"""', "{'description': 'aaa', 'type': 'number123'}"], {}), "('a', {'description': 'aaa', 'type': 'number123'})\n", (2436, 2486), False, 'from scripts.generate import CIRCULAR_PROPERTY_REFERENCES, CIRCULAR_REFERENCES, INT_PROPERTIES, ObjectDefinition, OneOfDefinition, PrimitiveDefinition, Property, Schema, generate_files, get_definition_class, get_import_line_from_reference, get_init_py, get_resource_file_name, get_resource_from_path, get_resource_var_name, get_schema\n')]
|
import json
import os
import bottle
import time
from app.api import ping_response, start_response, move_response, end_response
from app.board import update_board
from app.random_snake import random_move
from app.food_snake import food_move
from app.wall_snake import wall_move
from app.smart_snake import smart_move
from app.doga_snake import doga_move
snake_num = 0
@bottle.route('/')
def index():
return bottle.static_file('index.html', root='./static/')
@bottle.route('/static/<path:path>')
def static(path):
return bottle.static_file(path, root='./static/')
@bottle.post('/ping')
def ping():
return ping_response()
@bottle.post('/start')
def start():
game_state = bottle.request.json
snake_colour = "#ff0000"
return start_response(snake_colour)
@bottle.post('/move')
def move():
game_state = bottle.request.json
new_board = update_board(game_state)
direction = ""
direction = doga_move(game_state, new_board)
return move_response(direction)
@bottle.post('/end')
def end():
game_state = bottle.request.json
return end_response()
# Expose WSGI app (so gunicorn can find it)
application = bottle.default_app()
def is_input(temp):
if not temp.isnumeric():
return False
if not len(temp)==1:
return False
if int(temp)<1 or int(temp)>5:
return False
return True
if __name__ == '__main__':
snake_num = 5
bottle.run(
application,
host=os.getenv('IP', '0.0.0.0'),
port=os.getenv('PORT', '8080'),
debug=os.getenv('DEBUG', True)
)
|
[
"app.doga_snake.doga_move",
"bottle.default_app",
"bottle.static_file",
"app.api.ping_response",
"app.api.move_response",
"bottle.route",
"app.board.update_board",
"app.api.end_response",
"os.getenv",
"bottle.post",
"app.api.start_response"
] |
[((373, 390), 'bottle.route', 'bottle.route', (['"""/"""'], {}), "('/')\n", (385, 390), False, 'import bottle\n'), ((468, 503), 'bottle.route', 'bottle.route', (['"""/static/<path:path>"""'], {}), "('/static/<path:path>')\n", (480, 503), False, 'import bottle\n'), ((579, 599), 'bottle.post', 'bottle.post', (['"""/ping"""'], {}), "('/ping')\n", (590, 599), False, 'import bottle\n'), ((642, 663), 'bottle.post', 'bottle.post', (['"""/start"""'], {}), "('/start')\n", (653, 663), False, 'import bottle\n'), ((786, 806), 'bottle.post', 'bottle.post', (['"""/move"""'], {}), "('/move')\n", (797, 806), False, 'import bottle\n'), ((1005, 1024), 'bottle.post', 'bottle.post', (['"""/end"""'], {}), "('/end')\n", (1016, 1024), False, 'import bottle\n'), ((1158, 1178), 'bottle.default_app', 'bottle.default_app', ([], {}), '()\n', (1176, 1178), False, 'import bottle\n'), ((415, 465), 'bottle.static_file', 'bottle.static_file', (['"""index.html"""'], {'root': '"""./static/"""'}), "('index.html', root='./static/')\n", (433, 465), False, 'import bottle\n'), ((533, 575), 'bottle.static_file', 'bottle.static_file', (['path'], {'root': '"""./static/"""'}), "(path, root='./static/')\n", (551, 575), False, 'import bottle\n'), ((623, 638), 'app.api.ping_response', 'ping_response', ([], {}), '()\n', (636, 638), False, 'from app.api import ping_response, start_response, move_response, end_response\n'), ((754, 782), 'app.api.start_response', 'start_response', (['snake_colour'], {}), '(snake_colour)\n', (768, 782), False, 'from app.api import ping_response, start_response, move_response, end_response\n'), ((872, 896), 'app.board.update_board', 'update_board', (['game_state'], {}), '(game_state)\n', (884, 896), False, 'from app.board import update_board\n'), ((933, 965), 'app.doga_snake.doga_move', 'doga_move', (['game_state', 'new_board'], {}), '(game_state, new_board)\n', (942, 965), False, 'from app.doga_snake import doga_move\n'), ((977, 1001), 'app.api.move_response', 'move_response', (['direction'], {}), '(direction)\n', (990, 1001), False, 'from app.api import ping_response, start_response, move_response, end_response\n'), ((1084, 1098), 'app.api.end_response', 'end_response', ([], {}), '()\n', (1096, 1098), False, 'from app.api import ping_response, start_response, move_response, end_response\n'), ((1448, 1474), 'os.getenv', 'os.getenv', (['"""IP"""', '"""0.0.0.0"""'], {}), "('IP', '0.0.0.0')\n", (1457, 1474), False, 'import os\n'), ((1489, 1514), 'os.getenv', 'os.getenv', (['"""PORT"""', '"""8080"""'], {}), "('PORT', '8080')\n", (1498, 1514), False, 'import os\n'), ((1530, 1554), 'os.getenv', 'os.getenv', (['"""DEBUG"""', '(True)'], {}), "('DEBUG', True)\n", (1539, 1554), False, 'import os\n')]
|
import os
import pandas as pd
import pdb
import seaborn as sns
import matplotlib.pyplot as plt
#import pymrmr
from scipy.stats import kendalltau, pearsonr, spearmanr
from sklearn.feature_selection import SelectKBest, mutual_info_classif, chi2, f_classif, RFE
import numpy as np
# Feature Importance Sklearn
# https://machinelearningmastery.com/calculate-feature-importance-with-python/
class Feature_Selection(object):
"""
Class with preprocessments to apply in dataframe
"""
def __init__(self):
super().__init__()
self.correlation_matrix = Correlation_Matrix()
def select_features(self,df,columns):
raise NotImplementedError()
def get_missing_values_df(self,df):
percent_missing = df.isnull().sum() * 100 / len(df)
missing_value_df = pd.DataFrame({'column_name': df.columns,'percent_missing': percent_missing})
return missing_value_df
def get_correlation_matrix(self,df,method):
return self.correlation_matrix.get_correlation_matrix(df,method)
def plot_correlation_matrix(self,df_corr,plot=True):
return self.correlation_matrix.plot_correlation_matrix(df_corr,plot)
def get_correlation_with_target(self,df,target_column,method,num_feats=10):
return self.correlation_matrix.get_correlation_with_target(df,target_column,method,num_feats)
def get_IG_feature_scores(self,df,n_features_to_select):
"""
IG calculates the importance of each feature by measuring the increase in entropy when the feature is given vs. absent.
"""
bestfeatures = SelectKBest(score_func=mutual_info_classif, k=n_features_to_select) # n is number of features you want to select
fit = bestfeatures.fit(xs,y)
dfscores = pd.DataFrame(fit.scores_)
dfcolumns = pd.DataFrame(xs.columns)
featureScores = pd.concat([dfcolumns,dfscores],axis=1)
featureScores.columns = ['Feature','Score']
return featureScores
# def get_mRMR_feature_scores(self,df,n_features_to_select):
# # https://medium.com/subex-ai-labs/feature-selection-techniques-for-machine-learning-in-python-455dadcd3869
# """
# (Minimal Redundancy and Maximal Relevance)
# Intuition: It selects the features, based on their relevancy with the target variable, as well as their redundancy with the other features.
# """
# selected_features = pymrmr.mRMR(df, 'MIQ',n_features_to_select)
# return selected_features
def get_chisquare_feature_scores(self,df,target_column,n_features_to_select):
"""
It calculates the correlation between the feature and target and selects the best k features according to their chi square score calculated using following chi square test.
"""
X,y = self._split_df_in_xy(df,target_column)
import pdb;pdb.set_trace()
bestfeatures = SelectKBest(score_func=chi2, k=n_features_to_select) # n is number of features you want to select
fit = bestfeatures.fit(X,y)
dfscores = pd.DataFrame(fit.scores_)
dfcolumns = pd.DataFrame(xs.columns)
featureScores = pd.concat([dfcolumns,dfscores],axis=1)
featureScores.columns = ['Feature','Score']
return featureScores
def get_anova_feature_scores(self,df,n_features_to_select):
"""
We perform Anova between features and target to check if they belong to same population.
"""
bestfeatures = SelectKBest(score_func=f_classif, k=n_features_to_select) # n is number of features you want to select
fit = bestfeatures.fit(xs,y)
dfscores = pd.DataFrame(fit.scores_)
dfcolumns = pd.DataFrame(xs.columns)
featureScores = pd.concat([dfcolumns,dfscores],axis=1)
featureScores.columns = ['Feature','Score']
return featureScores
def get_features_by_RFE(self,df,model):
"""
It is a greedy optimization algorithm which aims to find the best performing feature subset. It repeatedly creates models and keeps aside the best or the worst performing feature at each iteration. It constructs the next model with the left features until all the features are exhausted. It then ranks the features based on the order of their elimination.
"""
#model = LogisticRegression(max_iter=1000)
rfe_model = RFE(model, 20)
rfe_fit = rfe_model.fit(x, y)
selected = df[df.columns[rfe_fit.get_support(indices=True)]]
return selected
def get_feature_selection_summary(self,df):
# https://towardsdatascience.com/the-5-feature-selection-algorithms-every-data-scientist-need-to-know-3a6b566efd2
# put all selection together
feature_selection_df = pd.DataFrame({'Feature':feature_name, 'Pearson':cor_support, 'Chi-2':chi_support, 'RFE':rfe_support, 'Logistics':embeded_lr_support,
'Random Forest':embeded_rf_support, 'LightGBM':embeded_lgb_support})
# count the selected times for each feature
feature_selection_df['Total'] = np.sum(feature_selection_df, axis=1)
# display the top 100
feature_selection_df = feature_selection_df.sort_values(['Total','Feature'] , ascending=False)
feature_selection_df.index = range(1, len(feature_selection_df)+1)
feature_selection_df.head(num_feats)
class Correlation_Matrix(object):
def __init__(self):
super().__init__()
def get_correlation_with_target(self,df,target_column,method,num_feats):
corr_dict = self.get_correlation_matrix(df,method)
df_k,df_p = corr_dict['df_k'],corr_dict['df_p']
correlations_with_target = df_k[target_column]
correlations_with_target = correlations_with_target.fillna(0)
correlations_with_target = correlations_with_target[correlations_with_target.index.difference([target_column])]
correlations_with_target = correlations_with_target.map(lambda x : x).abs().sort_values(ascending = False)
correlations_with_target = correlations_with_target[:num_feats]
return correlations_with_target
def plot_correlation_matrix(self,df_corr,plot=True):
plt.figure(figsize=(16, 6))
heatmap = sns.heatmap(df_corr, vmin=-1, vmax=1, annot=True, cmap='coolwarm')
heatmap.set_title('Correlation Heatmap', fontdict={'fontsize':18}, pad=12)
if plot:
plt.show()
else:
return heatmap
def get_correlation_matrix(self,df,method):
accpedted_correlations = ['pearson','spearman','kendall']
if method not in accpedted_correlations:
raise ValueError(f"O método deve ser um entre {accpedted_correlations}")
if method == 'pearson':
method_k = self._pearsonr_rval
method_p = self._pearsonr_pval
elif method == 'spearman':
method_k = self._spearmanr_rval
method_p = self._spearmanr_pval
elif method == 'kendall':
method_k = self._kendall_rval
method_p = self._kendall_pval
df_k = df.corr(method=method_k)
df_p = df.corr(method=method_p)
return {'df_k':df_k,'df_p':df_p}
def _kendall_rval(self,x,y):
return np.round(kendalltau(x,y)[0],6)
def _pearsonr_rval(self,x,y):
return np.round(pearsonr(x,y)[0],6)
def _spearmanr_rval(self,x,y):
return np.round(spearmanr(x,y)[0],6)
def _kendall_pval(self,x,y):
return np.round(kendalltau(x,y)[1],6)
def _pearsonr_pval(self,x,y):
return np.round(pearsonr(x,y)[1],6)
def _spearmanr_pval(self,x,y):
return np.round(spearmanr(x,y)[1],6)
|
[
"pandas.DataFrame",
"numpy.sum",
"seaborn.heatmap",
"matplotlib.pyplot.show",
"sklearn.feature_selection.RFE",
"scipy.stats.spearmanr",
"scipy.stats.pearsonr",
"matplotlib.pyplot.figure",
"pdb.set_trace",
"scipy.stats.kendalltau",
"sklearn.feature_selection.SelectKBest",
"pandas.concat"
] |
[((804, 881), 'pandas.DataFrame', 'pd.DataFrame', (["{'column_name': df.columns, 'percent_missing': percent_missing}"], {}), "({'column_name': df.columns, 'percent_missing': percent_missing})\n", (816, 881), True, 'import pandas as pd\n'), ((1598, 1665), 'sklearn.feature_selection.SelectKBest', 'SelectKBest', ([], {'score_func': 'mutual_info_classif', 'k': 'n_features_to_select'}), '(score_func=mutual_info_classif, k=n_features_to_select)\n', (1609, 1665), False, 'from sklearn.feature_selection import SelectKBest, mutual_info_classif, chi2, f_classif, RFE\n'), ((1767, 1792), 'pandas.DataFrame', 'pd.DataFrame', (['fit.scores_'], {}), '(fit.scores_)\n', (1779, 1792), True, 'import pandas as pd\n'), ((1813, 1837), 'pandas.DataFrame', 'pd.DataFrame', (['xs.columns'], {}), '(xs.columns)\n', (1825, 1837), True, 'import pandas as pd\n'), ((1862, 1902), 'pandas.concat', 'pd.concat', (['[dfcolumns, dfscores]'], {'axis': '(1)'}), '([dfcolumns, dfscores], axis=1)\n', (1871, 1902), True, 'import pandas as pd\n'), ((2891, 2906), 'pdb.set_trace', 'pdb.set_trace', ([], {}), '()\n', (2904, 2906), False, 'import pdb\n'), ((2930, 2982), 'sklearn.feature_selection.SelectKBest', 'SelectKBest', ([], {'score_func': 'chi2', 'k': 'n_features_to_select'}), '(score_func=chi2, k=n_features_to_select)\n', (2941, 2982), False, 'from sklearn.feature_selection import SelectKBest, mutual_info_classif, chi2, f_classif, RFE\n'), ((3083, 3108), 'pandas.DataFrame', 'pd.DataFrame', (['fit.scores_'], {}), '(fit.scores_)\n', (3095, 3108), True, 'import pandas as pd\n'), ((3129, 3153), 'pandas.DataFrame', 'pd.DataFrame', (['xs.columns'], {}), '(xs.columns)\n', (3141, 3153), True, 'import pandas as pd\n'), ((3178, 3218), 'pandas.concat', 'pd.concat', (['[dfcolumns, dfscores]'], {'axis': '(1)'}), '([dfcolumns, dfscores], axis=1)\n', (3187, 3218), True, 'import pandas as pd\n'), ((3507, 3564), 'sklearn.feature_selection.SelectKBest', 'SelectKBest', ([], {'score_func': 'f_classif', 'k': 'n_features_to_select'}), '(score_func=f_classif, k=n_features_to_select)\n', (3518, 3564), False, 'from sklearn.feature_selection import SelectKBest, mutual_info_classif, chi2, f_classif, RFE\n'), ((3666, 3691), 'pandas.DataFrame', 'pd.DataFrame', (['fit.scores_'], {}), '(fit.scores_)\n', (3678, 3691), True, 'import pandas as pd\n'), ((3712, 3736), 'pandas.DataFrame', 'pd.DataFrame', (['xs.columns'], {}), '(xs.columns)\n', (3724, 3736), True, 'import pandas as pd\n'), ((3761, 3801), 'pandas.concat', 'pd.concat', (['[dfcolumns, dfscores]'], {'axis': '(1)'}), '([dfcolumns, dfscores], axis=1)\n', (3770, 3801), True, 'import pandas as pd\n'), ((4386, 4400), 'sklearn.feature_selection.RFE', 'RFE', (['model', '(20)'], {}), '(model, 20)\n', (4389, 4400), False, 'from sklearn.feature_selection import SelectKBest, mutual_info_classif, chi2, f_classif, RFE\n'), ((4771, 4987), 'pandas.DataFrame', 'pd.DataFrame', (["{'Feature': feature_name, 'Pearson': cor_support, 'Chi-2': chi_support,\n 'RFE': rfe_support, 'Logistics': embeded_lr_support, 'Random Forest':\n embeded_rf_support, 'LightGBM': embeded_lgb_support}"], {}), "({'Feature': feature_name, 'Pearson': cor_support, 'Chi-2':\n chi_support, 'RFE': rfe_support, 'Logistics': embeded_lr_support,\n 'Random Forest': embeded_rf_support, 'LightGBM': embeded_lgb_support})\n", (4783, 4987), True, 'import pandas as pd\n'), ((5109, 5145), 'numpy.sum', 'np.sum', (['feature_selection_df'], {'axis': '(1)'}), '(feature_selection_df, axis=1)\n', (5115, 5145), True, 'import numpy as np\n'), ((6226, 6253), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 6)'}), '(figsize=(16, 6))\n', (6236, 6253), True, 'import matplotlib.pyplot as plt\n'), ((6272, 6338), 'seaborn.heatmap', 'sns.heatmap', (['df_corr'], {'vmin': '(-1)', 'vmax': '(1)', 'annot': '(True)', 'cmap': '"""coolwarm"""'}), "(df_corr, vmin=-1, vmax=1, annot=True, cmap='coolwarm')\n", (6283, 6338), True, 'import seaborn as sns\n'), ((6451, 6461), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6459, 6461), True, 'import matplotlib.pyplot as plt\n'), ((7307, 7323), 'scipy.stats.kendalltau', 'kendalltau', (['x', 'y'], {}), '(x, y)\n', (7317, 7323), False, 'from scipy.stats import kendalltau, pearsonr, spearmanr\n'), ((7388, 7402), 'scipy.stats.pearsonr', 'pearsonr', (['x', 'y'], {}), '(x, y)\n', (7396, 7402), False, 'from scipy.stats import kendalltau, pearsonr, spearmanr\n'), ((7468, 7483), 'scipy.stats.spearmanr', 'spearmanr', (['x', 'y'], {}), '(x, y)\n', (7477, 7483), False, 'from scipy.stats import kendalltau, pearsonr, spearmanr\n'), ((7547, 7563), 'scipy.stats.kendalltau', 'kendalltau', (['x', 'y'], {}), '(x, y)\n', (7557, 7563), False, 'from scipy.stats import kendalltau, pearsonr, spearmanr\n'), ((7628, 7642), 'scipy.stats.pearsonr', 'pearsonr', (['x', 'y'], {}), '(x, y)\n', (7636, 7642), False, 'from scipy.stats import kendalltau, pearsonr, spearmanr\n'), ((7708, 7723), 'scipy.stats.spearmanr', 'spearmanr', (['x', 'y'], {}), '(x, y)\n', (7717, 7723), False, 'from scipy.stats import kendalltau, pearsonr, spearmanr\n')]
|
#########
11/15/2021
#Generating a dataset for only categorical
df_categorical = df.select_dtypes(exclude=['number'])
df_categorical=df_categorical.drop(['Date.of.Birth','DisbursalDate'],axis=1)
df_categorical.head()
#Building a Dataset for numerical (continous)
df_continuous = df.select_dtypes(include=['number'])
df_continuous=df_continuous.drop(['UniqueID'],axis=1)
df_continuous.head()
#Univariate Analysis
import matplotlib.pyplot as plt # Charge matplotlib
import seaborn as sns # Charge seaborn
#To obtain the basic statistics
df_continuous.describe()
#Get the List of all Column Names
continuous_list = list(df_continuous)
# Plot for all continous
#1
sns.displot(df['disbursed_amount'][df['disbursed_amount'] < df['disbursed_amount'].quantile(.99)],kind='hist',kde=True)
plt.show()
#2
sns.displot(df['asset_cost'][df['asset_cost'] < df['asset_cost'].quantile(.99)],kind='hist',kde=True)
plt.show()
#3
sns.displot(df['ltv'][df['ltv'] < df['ltv'].quantile(.99)],kind='hist',kde=True)
plt.show()
#4
sns.displot(df['PERFORM_CNS.SCORE'][df['PERFORM_CNS.SCORE'] < df['PERFORM_CNS.SCORE'].quantile(.99)],kind='hist',kde=True)
plt.show()
#5
sns.displot(df['PRI.NO.OF.ACCTS'][df['PRI.NO.OF.ACCTS'] < df['PRI.NO.OF.ACCTS'].quantile(.99)],kind='hist',kde=True)
plt.show()
#6
sns.displot(df['PRI.ACTIVE.ACCTS'][df['PRI.ACTIVE.ACCTS'] < df['PRI.ACTIVE.ACCTS'].quantile(.99)],kind='hist',kde=True)
plt.show()
#6
sns.displot(df['PRI.OVERDUE.ACCTS'][df['PRI.OVERDUE.ACCTS'] < df['PRI.OVERDUE.ACCTS'].quantile(.99)],kind='hist',kde=True)
plt.show()
#7
sns.displot(df['PRI.CURRENT.BALANCE'][df['PRI.CURRENT.BALANCE'] < df['PRI.CURRENT.BALANCE'].quantile(.99)],kind='hist',kde=True)
plt.show()
#8
sns.displot(df['PRI.SANCTIONED.AMOUNT'][df['PRI.SANCTIONED.AMOUNT'] < df['PRI.SANCTIONED.AMOUNT'].quantile(.99)],kind='hist',kde=True)
plt.show()
#9
sns.displot(df['PRI.DISBURSED.AMOUNT'][df['PRI.DISBURSED.AMOUNT'] < df['PRI.DISBURSED.AMOUNT'].quantile(.99)],kind='hist',kde=True)
plt.show()
#10
sns.displot(df['SEC.NO.OF.ACCTS'][df['SEC.NO.OF.ACCTS'] < df['SEC.NO.OF.ACCTS'].quantile(.99)],kind='hist',kde=True)
plt.show()
#11
sns.displot(df['SEC.ACTIVE.ACCTS'][df['SEC.ACTIVE.ACCTS'] < df['SEC.ACTIVE.ACCTS'].quantile(.99)],kind='hist',kde=True)
plt.show()
#12
sns.displot(df['SEC.OVERDUE.ACCTS'][df['SEC.OVERDUE.ACCTS'] < df['SEC.OVERDUE.ACCTS'].quantile(.99)],kind='hist',kde=True)
plt.show()
#13
sns.displot(df['SEC.CURRENT.BALANCE'][df['SEC.CURRENT.BALANCE'] < df['SEC.CURRENT.BALANCE'].quantile(.99)],kind='hist',kde=True)
plt.show()
#14
sns.displot(df['SEC.SANCTIONED.AMOUNT'][df['SEC.SANCTIONED.AMOUNT'] < df['SEC.SANCTIONED.AMOUNT'].quantile(.99)],kind='hist',kde=True)
plt.show()
#15
sns.displot(df['SEC.DISBURSED.AMOUNT'][df['SEC.DISBURSED.AMOUNT'] < df['SEC.DISBURSED.AMOUNT'].quantile(.99)],kind='hist',kde=True)
plt.show()
#16
sns.displot(df['PRIMARY.INSTAL.AMT'][df['PRIMARY.INSTAL.AMT'] < df['PRIMARY.INSTAL.AMT'].quantile(.99)],kind='hist',kde=True)
plt.show()
#17
sns.displot(df['NEW.ACCTS.IN.LAST.SIX.MONTHS'][df['NEW.ACCTS.IN.LAST.SIX.MONTHS'] < df['NEW.ACCTS.IN.LAST.SIX.MONTHS'].quantile(.99)],kind='hist',kde=True)
plt.show()
#18
sns.displot(df['DELINQUENT.ACCTS.IN.LAST.SIX.MONTHS'][df['DELINQUENT.ACCTS.IN.LAST.SIX.MONTHS'] < df['DELINQUENT.ACCTS.IN.LAST.SIX.MONTHS'].quantile(.99)],kind='hist',kde=True)
plt.show()
#19
sns.displot(df['AVERAGE.ACCT.AGE'][df['AVERAGE.ACCT.AGE'] < df['AVERAGE.ACCT.AGE'].quantile(.99)],kind='hist',kde=True)
plt.show()
#20
sns.displot(df['CREDIT.HISTORY.LENGTH'][df['CREDIT.HISTORY.LENGTH'] < df['CREDIT.HISTORY.LENGTH'].quantile(.99)],kind='hist',kde=True)
plt.show()
#21
sns.displot(df['CREDIT.HISTORY.LENGTH'][df['CREDIT.HISTORY.LENGTH'] < df['CREDIT.HISTORY.LENGTH'].quantile(.99)],kind='hist',kde=True)
plt.show()
#22
sns.displot(df['NO.OF_INQUIRIES'][df['NO.OF_INQUIRIES'] < df['NO.OF_INQUIRIES'].quantile(.99)],kind='hist',kde=True)
plt.show()
#23
sns.displot(df['Age'][df['Age'] < df['Age'].quantile(.99)],kind='hist',kde=True)
plt.show()
#23
sns.displot(df['Disbursal_months'][df['Disbursal_months'] < df['Disbursal_months'].quantile(.99)],kind='hist',kde=True)
plt.show()
########Multivariate Analysis
plt.rcParams["figure.figsize"] = (10,7)
sns.heatmap(df_continuous.corr())
plt.show()
#Heat map
sns.heatmap(df_continuous.corr(), cmap="YlGnBu", annot=False,mask=np.triu(df_continuous.corr()))
plt.show()
#Heat map that highligts if the correlation is greater than 0.6
sns.heatmap(df_continuous.corr().abs()>0.6, cmap="YlGnBu", annot=False,mask=np.triu(df_continuous.corr()))
plt.show() # black are with the highest correlation
def get_redundant_pairs(df):
'''Get diagonal and lower triangular pairs of correlation matrix'''
pairs_to_drop = set()
cols = df.columns
for i in range(0, df.shape[1]):
for j in range(0, i+1):
pairs_to_drop.add((cols[i], cols[j]))
return pairs_to_drop
def get_top_abs_correlations(df, n=5):
au_corr = df.corr().abs().unstack()
labels_to_drop = get_redundant_pairs(df)
au_corr = au_corr.drop(labels=labels_to_drop).sort_values(ascending=False)
return au_corr[0:n]
list1=get_top_abs_correlations(df_continuous,n=9)
print(list1)
'''
11/22 Aihan added, file name unchanged
test needed
'''
# Continuous variable vs categorical variables
score_ranking = ["A-Very Low Risk", "B-Very Low Risk", "C-Very Low Risk", "D-Very Low Risk", \
"E-Low Risk", "F-Low Risk", "G-Low Risk", "H-Medium Risk", "I-Medium Risk", "J-High Risk", "K-High Risk",\
"L-Very High Risk", "M-Very High Risk", "No Bureau History Available", "Not Scored: No Activity seen on the customer (Inactive)", \
"Not Scored: Not Enough Info available on the customer", "Not Scored: Sufficient History Not Available", "Not Scored: Only a Guarantor",\
"Not Scored: No Updates available in last 36 months1", "Not Scored: More than 50 active Accounts found"]
# sns.boxplot(x="PERFORM_CNS.SCORE.DESCRIPTION", y="PERFORM_CNS.SCORE", color="b", data=df_subset)
# plt.show()
def df_boxplot(df, xstr, ystr):
sns.boxplot(x=xstr, y=ystr, palette=sns.color_palette(), data=df)
plt.show()
# continuous variable vs target
df_subset = merge_df[merge_df['PERFORM_CNS.SCORE.DESCRIPTION'] < 13]
df_boxplot(df_subset, "PERFORM_CNS.SCORE.DESCRIPTION", "PERFORM_CNS.SCORE")
#continuous variable vs target
df_boxplot(merge_df, "loan_default", y="PERFORM_CNS.SCORE")
# t-test
# stats.ttest_ind(a, b, equal_var = False)
|
[
"matplotlib.pyplot.show",
"seaborn.color_palette"
] |
[((788, 798), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (796, 798), True, 'import matplotlib.pyplot as plt\n'), ((905, 915), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (913, 915), True, 'import matplotlib.pyplot as plt\n'), ((1001, 1011), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1009, 1011), True, 'import matplotlib.pyplot as plt\n'), ((1139, 1149), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1147, 1149), True, 'import matplotlib.pyplot as plt\n'), ((1271, 1281), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1279, 1281), True, 'import matplotlib.pyplot as plt\n'), ((1406, 1416), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1414, 1416), True, 'import matplotlib.pyplot as plt\n'), ((1544, 1554), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1552, 1554), True, 'import matplotlib.pyplot as plt\n'), ((1688, 1698), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1696, 1698), True, 'import matplotlib.pyplot as plt\n'), ((1838, 1848), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1846, 1848), True, 'import matplotlib.pyplot as plt\n'), ((1985, 1995), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1993, 1995), True, 'import matplotlib.pyplot as plt\n'), ((2118, 2128), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2126, 2128), True, 'import matplotlib.pyplot as plt\n'), ((2254, 2264), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2262, 2264), True, 'import matplotlib.pyplot as plt\n'), ((2393, 2403), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2401, 2403), True, 'import matplotlib.pyplot as plt\n'), ((2538, 2548), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2546, 2548), True, 'import matplotlib.pyplot as plt\n'), ((2689, 2699), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2697, 2699), True, 'import matplotlib.pyplot as plt\n'), ((2837, 2847), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2845, 2847), True, 'import matplotlib.pyplot as plt\n'), ((2979, 2989), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2987, 2989), True, 'import matplotlib.pyplot as plt\n'), ((3151, 3161), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3159, 3161), True, 'import matplotlib.pyplot as plt\n'), ((3344, 3354), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3352, 3354), True, 'import matplotlib.pyplot as plt\n'), ((3480, 3490), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3488, 3490), True, 'import matplotlib.pyplot as plt\n'), ((3631, 3641), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3639, 3641), True, 'import matplotlib.pyplot as plt\n'), ((3782, 3792), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3790, 3792), True, 'import matplotlib.pyplot as plt\n'), ((3915, 3925), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3923, 3925), True, 'import matplotlib.pyplot as plt\n'), ((4012, 4022), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4020, 4022), True, 'import matplotlib.pyplot as plt\n'), ((4148, 4158), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4156, 4158), True, 'import matplotlib.pyplot as plt\n'), ((4265, 4275), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4273, 4275), True, 'import matplotlib.pyplot as plt\n'), ((4384, 4394), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4392, 4394), True, 'import matplotlib.pyplot as plt\n'), ((4567, 4577), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4575, 4577), True, 'import matplotlib.pyplot as plt\n'), ((6179, 6189), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6187, 6189), True, 'import matplotlib.pyplot as plt\n'), ((6145, 6164), 'seaborn.color_palette', 'sns.color_palette', ([], {}), '()\n', (6162, 6164), True, 'import seaborn as sns\n')]
|
# -*- coding:utf-8 -*-
from django.db import models
from django.contrib.auth.models import User
BRANCHE_CHOICES = (
(1, u"Lutins"),
(2, u"Louveteaux"),
(3, u"Éclés"),
(4, u"Aînés"),
)
EXPLOGRAM_CHOICES = (
(1, u"Le pont des cultures"),
(2, u"On est cap !"),
(3, u"Filles/garçons"),
(4, u"Au fil du temps"),
)
class Unite(models.Model):
branche = models.IntegerField(u"Branche", choices=BRANCHE_CHOICES)
nom = models.CharField(u"Nom de l'unité/équipage", max_length=100)
inscr_explogram = models.NullBooleanField(u"Nous souhaitons participer à l'ExploGRAM")
inscr_congram = models.NullBooleanField(u"Nous souhaitons participer au ConGRAM")
inscr_tangram = models.NullBooleanField(u"Nous souhaitons participer au rassemblement TanGRAM")
theme_explogram = models.CharField(max_length=100, blank=True)
etat_explogram = models.TextField(u"Descriptif de l'ExploGRAM tel qu'il en est aujourd'hui", blank=True)
effectif = models.PositiveIntegerField(u"Effectif approximatif", blank=True, null=True)
contact = models.CharField(max_length=100, blank=True)
tel = models.CharField(max_length=100, blank=True)
user = models.OneToOneField(User)
fg1_theme = models.IntegerField(u"Notre exploGRAM", null=True, blank=True, choices=EXPLOGRAM_CHOICES)
fg1_projet = models.TextField(u"Le projet qu'on rêve de réaliser", blank=True)
fg2_votants = models.CharField(u"Nombre de votants", max_length=100, blank=True)
fg2_resultat = models.CharField(u"Résultats des votes", max_length=100, blank=True)
fg2_elus = models.TextField(u"Sont élus", blank=True)
fg2_adresses = models.TextField(u"Adresses", blank=True)
fg2_propositions = models.TextField(u"Nombre de propositions à présenter", blank=True)
fg3_date = models.CharField(u"Date", max_length=100, blank=True)
fg3_temps = models.TextField(u"Temps consacré", blank=True)
fg3_forme = models.TextField(u"Forme adoptée (conseil, jeux,...)", blank=True)
fg4_theme = models.IntegerField(u"Notre exploGRAM", null=True, blank=True, choices=EXPLOGRAM_CHOICES)
fg4_description = models.TextField(u"Le projet qu'on rêve de réaliser", blank=True)
fg4_taches = models.BooleanField(u"Nous avons listé les tâches à faire")
fg4_roles = models.BooleanField(u"Nous nous sommes répartis les rôles")
fg4_partenaire = models.BooleanField(u"Nous allons chercher un partenaire")
fg4_materiel = models.BooleanField(u"Nous avons fait la liste du matériel")
fg4_reunions = models.IntegerField(u"Nombre de réunions de préparation", null=True, blank=True)
fg5_theme = models.IntegerField(u"Notre exploGRAM", null=True, blank=True, choices=EXPLOGRAM_CHOICES)
fg5_texte = models.TextField(u"La carte postale de l'exploGRAM", blank=True)
fg5_photo = models.ImageField(u"Photo", blank=True, null=True, upload_to='carte_postale/')
fg6_theme = models.IntegerField(u"Notre exploGRAM", null=True, blank=True, choices=EXPLOGRAM_CHOICES)
fg6_date = models.CharField(u"Date", max_length=100, blank=True)
fg6_descriptif = models.TextField(u"Descriptif de notre projet", blank=True)
fg6_positifs = models.TextField(u"Les points positifs de notre projet", blank=True)
fg7_theme = models.IntegerField(u"Notre exploGRAM", null=True, blank=True, choices=EXPLOGRAM_CHOICES)
fg7_description = models.TextField(u"Description de notre retransmission", blank=True)
fg7_install = models.CharField(u"Le temps qu'il nous faudra pour l'installer", max_length=100, blank=True)
fg7_presentation = models.CharField(u"Le temps qu'il nous faudra pour le présenter pendant le rassemblement tanGRAM", max_length=100, blank=True)
fg7_espace = models.CharField(u"L'espace qu'il nous faudra", max_length=100, blank=True)
fg7_micro = models.BooleanField(u"Nous aurons besoin d'un micro")
fg7_ecran = models.BooleanField(u"Nous aurons besoin d'un écran")
fg7_expo = models.BooleanField(u"Nous aurons besoin de supports expo")
fg7_autre = models.TextField(u"Nous aurons besoin d'autres choses", blank=True)
fg1_ok = models.BooleanField(u"Gram attribué")
fg2_ok = models.BooleanField(u"Gram attribué")
fg3_ok = models.BooleanField(u"Gram attribué")
fg4_ok = models.BooleanField(u"Gram attribué")
fg5_ok = models.BooleanField(u"Gram attribué")
fg6_ok = models.BooleanField(u"Gram attribué")
fg7_ok = models.BooleanField(u"Gram attribué")
nb_grams = models.IntegerField(u"nombre de grams", default=0)
def save(self, *args, **kwargs):
self.nb_grams = sum([int(getattr(self, 'fg%u_ok' % i)) for i in range(1, 8)])
super(Unite, self).save(*args, **kwargs)
class FicheAction(models.Model):
titre = models.CharField(u"Titre", max_length=100)
par = models.CharField(u"Action réalisée par", max_length=100, blank=True)
public = models.TextField(u"Description public (âge, autonomie, effectif…)", blank=True)
deroule = models.TextField(u"Descriptif du déroulé", blank=True)
activite = models.CharField(u"Type d'activité (grand jeux, forum, projet de longue durée…)", max_length=100, blank=True)
objectifs = models.TextField(u"Objectifs", blank=True)
place = models.TextField(u"Place dans une démarche d'année (lancement, construction, conclusion…)", blank=True)
duree = models.CharField(u"Durée", max_length=100, blank=True)
lancement = models.TextField(u"Place dans les respos dans le lancement", blank=True)
realisation = models.TextField(u"Place dans les respos dans la réalisation", blank=True)
valorisation = models.TextField(u"Place dans les respos dans la valorisation", blank=True)
biblio = models.TextField(u"Bibliographie", blank=True)
partenaires = models.TextField(u"Les partenaires (si c'est le cas)", blank=True)
matos = models.TextField(u"Besoins matériels", blank=True)
annexe = models.FileField(u"Annexes jointes", blank=True, upload_to='fiche_action')
user = models.ForeignKey(User)
def __unicode__(self):
return self.titre
|
[
"django.db.models.FileField",
"django.db.models.NullBooleanField",
"django.db.models.TextField",
"django.db.models.OneToOneField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.PositiveIntegerField",
"django.db.models.BooleanField",
"django.db.models.ImageField",
"django.db.models.IntegerField"
] |
[((388, 444), 'django.db.models.IntegerField', 'models.IntegerField', (['u"""Branche"""'], {'choices': 'BRANCHE_CHOICES'}), "(u'Branche', choices=BRANCHE_CHOICES)\n", (407, 444), False, 'from django.db import models\n'), ((455, 515), 'django.db.models.CharField', 'models.CharField', (['u"""Nom de l\'unité/équipage"""'], {'max_length': '(100)'}), '(u"Nom de l\'unité/équipage", max_length=100)\n', (471, 515), False, 'from django.db import models\n'), ((538, 606), 'django.db.models.NullBooleanField', 'models.NullBooleanField', (['u"""Nous souhaitons participer à l\'ExploGRAM"""'], {}), '(u"Nous souhaitons participer à l\'ExploGRAM")\n', (561, 606), False, 'from django.db import models\n'), ((627, 692), 'django.db.models.NullBooleanField', 'models.NullBooleanField', (['u"""Nous souhaitons participer au ConGRAM"""'], {}), "(u'Nous souhaitons participer au ConGRAM')\n", (650, 692), False, 'from django.db import models\n'), ((713, 792), 'django.db.models.NullBooleanField', 'models.NullBooleanField', (['u"""Nous souhaitons participer au rassemblement TanGRAM"""'], {}), "(u'Nous souhaitons participer au rassemblement TanGRAM')\n", (736, 792), False, 'from django.db import models\n'), ((815, 859), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)'}), '(max_length=100, blank=True)\n', (831, 859), False, 'from django.db import models\n'), ((881, 972), 'django.db.models.TextField', 'models.TextField', (['u"""Descriptif de l\'ExploGRAM tel qu\'il en est aujourd\'hui"""'], {'blank': '(True)'}), '(u"Descriptif de l\'ExploGRAM tel qu\'il en est aujourd\'hui",\n blank=True)\n', (897, 972), False, 'from django.db import models\n'), ((984, 1060), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', (['u"""Effectif approximatif"""'], {'blank': '(True)', 'null': '(True)'}), "(u'Effectif approximatif', blank=True, null=True)\n", (1011, 1060), False, 'from django.db import models\n'), ((1075, 1119), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)'}), '(max_length=100, blank=True)\n', (1091, 1119), False, 'from django.db import models\n'), ((1130, 1174), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(True)'}), '(max_length=100, blank=True)\n', (1146, 1174), False, 'from django.db import models\n'), ((1186, 1212), 'django.db.models.OneToOneField', 'models.OneToOneField', (['User'], {}), '(User)\n', (1206, 1212), False, 'from django.db import models\n'), ((1229, 1323), 'django.db.models.IntegerField', 'models.IntegerField', (['u"""Notre exploGRAM"""'], {'null': '(True)', 'blank': '(True)', 'choices': 'EXPLOGRAM_CHOICES'}), "(u'Notre exploGRAM', null=True, blank=True, choices=\n EXPLOGRAM_CHOICES)\n", (1248, 1323), False, 'from django.db import models\n'), ((1336, 1401), 'django.db.models.TextField', 'models.TextField', (['u"""Le projet qu\'on rêve de réaliser"""'], {'blank': '(True)'}), '(u"Le projet qu\'on rêve de réaliser", blank=True)\n', (1352, 1401), False, 'from django.db import models\n'), ((1420, 1486), 'django.db.models.CharField', 'models.CharField', (['u"""Nombre de votants"""'], {'max_length': '(100)', 'blank': '(True)'}), "(u'Nombre de votants', max_length=100, blank=True)\n", (1436, 1486), False, 'from django.db import models\n'), ((1506, 1574), 'django.db.models.CharField', 'models.CharField', (['u"""Résultats des votes"""'], {'max_length': '(100)', 'blank': '(True)'}), "(u'Résultats des votes', max_length=100, blank=True)\n", (1522, 1574), False, 'from django.db import models\n'), ((1590, 1632), 'django.db.models.TextField', 'models.TextField', (['u"""Sont élus"""'], {'blank': '(True)'}), "(u'Sont élus', blank=True)\n", (1606, 1632), False, 'from django.db import models\n'), ((1652, 1693), 'django.db.models.TextField', 'models.TextField', (['u"""Adresses"""'], {'blank': '(True)'}), "(u'Adresses', blank=True)\n", (1668, 1693), False, 'from django.db import models\n'), ((1717, 1784), 'django.db.models.TextField', 'models.TextField', (['u"""Nombre de propositions à présenter"""'], {'blank': '(True)'}), "(u'Nombre de propositions à présenter', blank=True)\n", (1733, 1784), False, 'from django.db import models\n'), ((1800, 1853), 'django.db.models.CharField', 'models.CharField', (['u"""Date"""'], {'max_length': '(100)', 'blank': '(True)'}), "(u'Date', max_length=100, blank=True)\n", (1816, 1853), False, 'from django.db import models\n'), ((1870, 1917), 'django.db.models.TextField', 'models.TextField', (['u"""Temps consacré"""'], {'blank': '(True)'}), "(u'Temps consacré', blank=True)\n", (1886, 1917), False, 'from django.db import models\n'), ((1934, 2000), 'django.db.models.TextField', 'models.TextField', (['u"""Forme adoptée (conseil, jeux,...)"""'], {'blank': '(True)'}), "(u'Forme adoptée (conseil, jeux,...)', blank=True)\n", (1950, 2000), False, 'from django.db import models\n'), ((2017, 2111), 'django.db.models.IntegerField', 'models.IntegerField', (['u"""Notre exploGRAM"""'], {'null': '(True)', 'blank': '(True)', 'choices': 'EXPLOGRAM_CHOICES'}), "(u'Notre exploGRAM', null=True, blank=True, choices=\n EXPLOGRAM_CHOICES)\n", (2036, 2111), False, 'from django.db import models\n'), ((2129, 2194), 'django.db.models.TextField', 'models.TextField', (['u"""Le projet qu\'on rêve de réaliser"""'], {'blank': '(True)'}), '(u"Le projet qu\'on rêve de réaliser", blank=True)\n', (2145, 2194), False, 'from django.db import models\n'), ((2212, 2271), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Nous avons listé les tâches à faire"""'], {}), "(u'Nous avons listé les tâches à faire')\n", (2231, 2271), False, 'from django.db import models\n'), ((2288, 2347), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Nous nous sommes répartis les rôles"""'], {}), "(u'Nous nous sommes répartis les rôles')\n", (2307, 2347), False, 'from django.db import models\n'), ((2369, 2427), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Nous allons chercher un partenaire"""'], {}), "(u'Nous allons chercher un partenaire')\n", (2388, 2427), False, 'from django.db import models\n'), ((2447, 2507), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Nous avons fait la liste du matériel"""'], {}), "(u'Nous avons fait la liste du matériel')\n", (2466, 2507), False, 'from django.db import models\n'), ((2527, 2612), 'django.db.models.IntegerField', 'models.IntegerField', (['u"""Nombre de réunions de préparation"""'], {'null': '(True)', 'blank': '(True)'}), "(u'Nombre de réunions de préparation', null=True, blank=True\n )\n", (2546, 2612), False, 'from django.db import models\n'), ((2624, 2718), 'django.db.models.IntegerField', 'models.IntegerField', (['u"""Notre exploGRAM"""'], {'null': '(True)', 'blank': '(True)', 'choices': 'EXPLOGRAM_CHOICES'}), "(u'Notre exploGRAM', null=True, blank=True, choices=\n EXPLOGRAM_CHOICES)\n", (2643, 2718), False, 'from django.db import models\n'), ((2730, 2794), 'django.db.models.TextField', 'models.TextField', (['u"""La carte postale de l\'exploGRAM"""'], {'blank': '(True)'}), '(u"La carte postale de l\'exploGRAM", blank=True)\n', (2746, 2794), False, 'from django.db import models\n'), ((2811, 2889), 'django.db.models.ImageField', 'models.ImageField', (['u"""Photo"""'], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""carte_postale/"""'}), "(u'Photo', blank=True, null=True, upload_to='carte_postale/')\n", (2828, 2889), False, 'from django.db import models\n'), ((2906, 3000), 'django.db.models.IntegerField', 'models.IntegerField', (['u"""Notre exploGRAM"""'], {'null': '(True)', 'blank': '(True)', 'choices': 'EXPLOGRAM_CHOICES'}), "(u'Notre exploGRAM', null=True, blank=True, choices=\n EXPLOGRAM_CHOICES)\n", (2925, 3000), False, 'from django.db import models\n'), ((3011, 3064), 'django.db.models.CharField', 'models.CharField', (['u"""Date"""'], {'max_length': '(100)', 'blank': '(True)'}), "(u'Date', max_length=100, blank=True)\n", (3027, 3064), False, 'from django.db import models\n'), ((3086, 3145), 'django.db.models.TextField', 'models.TextField', (['u"""Descriptif de notre projet"""'], {'blank': '(True)'}), "(u'Descriptif de notre projet', blank=True)\n", (3102, 3145), False, 'from django.db import models\n'), ((3165, 3233), 'django.db.models.TextField', 'models.TextField', (['u"""Les points positifs de notre projet"""'], {'blank': '(True)'}), "(u'Les points positifs de notre projet', blank=True)\n", (3181, 3233), False, 'from django.db import models\n'), ((3250, 3344), 'django.db.models.IntegerField', 'models.IntegerField', (['u"""Notre exploGRAM"""'], {'null': '(True)', 'blank': '(True)', 'choices': 'EXPLOGRAM_CHOICES'}), "(u'Notre exploGRAM', null=True, blank=True, choices=\n EXPLOGRAM_CHOICES)\n", (3269, 3344), False, 'from django.db import models\n'), ((3362, 3430), 'django.db.models.TextField', 'models.TextField', (['u"""Description de notre retransmission"""'], {'blank': '(True)'}), "(u'Description de notre retransmission', blank=True)\n", (3378, 3430), False, 'from django.db import models\n'), ((3449, 3546), 'django.db.models.CharField', 'models.CharField', (['u"""Le temps qu\'il nous faudra pour l\'installer"""'], {'max_length': '(100)', 'blank': '(True)'}), '(u"Le temps qu\'il nous faudra pour l\'installer", max_length\n =100, blank=True)\n', (3465, 3546), False, 'from django.db import models\n'), ((3565, 3701), 'django.db.models.CharField', 'models.CharField', (['u"""Le temps qu\'il nous faudra pour le présenter pendant le rassemblement tanGRAM"""'], {'max_length': '(100)', 'blank': '(True)'}), '(\n u"Le temps qu\'il nous faudra pour le présenter pendant le rassemblement tanGRAM"\n , max_length=100, blank=True)\n', (3581, 3701), False, 'from django.db import models\n'), ((3709, 3784), 'django.db.models.CharField', 'models.CharField', (['u"""L\'espace qu\'il nous faudra"""'], {'max_length': '(100)', 'blank': '(True)'}), '(u"L\'espace qu\'il nous faudra", max_length=100, blank=True)\n', (3725, 3784), False, 'from django.db import models\n'), ((3801, 3854), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Nous aurons besoin d\'un micro"""'], {}), '(u"Nous aurons besoin d\'un micro")\n', (3820, 3854), False, 'from django.db import models\n'), ((3871, 3924), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Nous aurons besoin d\'un écran"""'], {}), '(u"Nous aurons besoin d\'un écran")\n', (3890, 3924), False, 'from django.db import models\n'), ((3940, 3999), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Nous aurons besoin de supports expo"""'], {}), "(u'Nous aurons besoin de supports expo')\n", (3959, 3999), False, 'from django.db import models\n'), ((4016, 4083), 'django.db.models.TextField', 'models.TextField', (['u"""Nous aurons besoin d\'autres choses"""'], {'blank': '(True)'}), '(u"Nous aurons besoin d\'autres choses", blank=True)\n', (4032, 4083), False, 'from django.db import models\n'), ((4097, 4134), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Gram attribué"""'], {}), "(u'Gram attribué')\n", (4116, 4134), False, 'from django.db import models\n'), ((4148, 4185), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Gram attribué"""'], {}), "(u'Gram attribué')\n", (4167, 4185), False, 'from django.db import models\n'), ((4199, 4236), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Gram attribué"""'], {}), "(u'Gram attribué')\n", (4218, 4236), False, 'from django.db import models\n'), ((4250, 4287), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Gram attribué"""'], {}), "(u'Gram attribué')\n", (4269, 4287), False, 'from django.db import models\n'), ((4301, 4338), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Gram attribué"""'], {}), "(u'Gram attribué')\n", (4320, 4338), False, 'from django.db import models\n'), ((4352, 4389), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Gram attribué"""'], {}), "(u'Gram attribué')\n", (4371, 4389), False, 'from django.db import models\n'), ((4403, 4440), 'django.db.models.BooleanField', 'models.BooleanField', (['u"""Gram attribué"""'], {}), "(u'Gram attribué')\n", (4422, 4440), False, 'from django.db import models\n'), ((4456, 4506), 'django.db.models.IntegerField', 'models.IntegerField', (['u"""nombre de grams"""'], {'default': '(0)'}), "(u'nombre de grams', default=0)\n", (4475, 4506), False, 'from django.db import models\n'), ((4727, 4769), 'django.db.models.CharField', 'models.CharField', (['u"""Titre"""'], {'max_length': '(100)'}), "(u'Titre', max_length=100)\n", (4743, 4769), False, 'from django.db import models\n'), ((4780, 4848), 'django.db.models.CharField', 'models.CharField', (['u"""Action réalisée par"""'], {'max_length': '(100)', 'blank': '(True)'}), "(u'Action réalisée par', max_length=100, blank=True)\n", (4796, 4848), False, 'from django.db import models\n'), ((4862, 4941), 'django.db.models.TextField', 'models.TextField', (['u"""Description public (âge, autonomie, effectif…)"""'], {'blank': '(True)'}), "(u'Description public (âge, autonomie, effectif…)', blank=True)\n", (4878, 4941), False, 'from django.db import models\n'), ((4956, 5010), 'django.db.models.TextField', 'models.TextField', (['u"""Descriptif du déroulé"""'], {'blank': '(True)'}), "(u'Descriptif du déroulé', blank=True)\n", (4972, 5010), False, 'from django.db import models\n'), ((5026, 5144), 'django.db.models.CharField', 'models.CharField', (['u"""Type d\'activité (grand jeux, forum, projet de longue durée…)"""'], {'max_length': '(100)', 'blank': '(True)'}), '(\n u"Type d\'activité (grand jeux, forum, projet de longue durée…)",\n max_length=100, blank=True)\n', (5042, 5144), False, 'from django.db import models\n'), ((5152, 5194), 'django.db.models.TextField', 'models.TextField', (['u"""Objectifs"""'], {'blank': '(True)'}), "(u'Objectifs', blank=True)\n", (5168, 5194), False, 'from django.db import models\n'), ((5207, 5319), 'django.db.models.TextField', 'models.TextField', (['u"""Place dans une démarche d\'année (lancement, construction, conclusion…)"""'], {'blank': '(True)'}), '(\n u"Place dans une démarche d\'année (lancement, construction, conclusion…)",\n blank=True)\n', (5223, 5319), False, 'from django.db import models\n'), ((5323, 5377), 'django.db.models.CharField', 'models.CharField', (['u"""Durée"""'], {'max_length': '(100)', 'blank': '(True)'}), "(u'Durée', max_length=100, blank=True)\n", (5339, 5377), False, 'from django.db import models\n'), ((5394, 5466), 'django.db.models.TextField', 'models.TextField', (['u"""Place dans les respos dans le lancement"""'], {'blank': '(True)'}), "(u'Place dans les respos dans le lancement', blank=True)\n", (5410, 5466), False, 'from django.db import models\n'), ((5485, 5559), 'django.db.models.TextField', 'models.TextField', (['u"""Place dans les respos dans la réalisation"""'], {'blank': '(True)'}), "(u'Place dans les respos dans la réalisation', blank=True)\n", (5501, 5559), False, 'from django.db import models\n'), ((5579, 5654), 'django.db.models.TextField', 'models.TextField', (['u"""Place dans les respos dans la valorisation"""'], {'blank': '(True)'}), "(u'Place dans les respos dans la valorisation', blank=True)\n", (5595, 5654), False, 'from django.db import models\n'), ((5668, 5714), 'django.db.models.TextField', 'models.TextField', (['u"""Bibliographie"""'], {'blank': '(True)'}), "(u'Bibliographie', blank=True)\n", (5684, 5714), False, 'from django.db import models\n'), ((5733, 5799), 'django.db.models.TextField', 'models.TextField', (['u"""Les partenaires (si c\'est le cas)"""'], {'blank': '(True)'}), '(u"Les partenaires (si c\'est le cas)", blank=True)\n', (5749, 5799), False, 'from django.db import models\n'), ((5812, 5862), 'django.db.models.TextField', 'models.TextField', (['u"""Besoins matériels"""'], {'blank': '(True)'}), "(u'Besoins matériels', blank=True)\n", (5828, 5862), False, 'from django.db import models\n'), ((5876, 5950), 'django.db.models.FileField', 'models.FileField', (['u"""Annexes jointes"""'], {'blank': '(True)', 'upload_to': '"""fiche_action"""'}), "(u'Annexes jointes', blank=True, upload_to='fiche_action')\n", (5892, 5950), False, 'from django.db import models\n'), ((5962, 5985), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {}), '(User)\n', (5979, 5985), False, 'from django.db import models\n')]
|
from phrase_similarity import dedup_by_embedding, dedup_by_stemming
# Test1
result1 = dedup_by_stemming(['civilization', 'civil', 'computer'])
sol1 = ['civilization', 'computer']
if result1 == sol1:
print("Test 1 Passed")
else:
print("Test 1 Failed")
print(result1)
exit()
#Test 2
result2 = dedup_by_embedding(["database technique", "database techniques",
"cloud network", "cloud networks",
"machine learning",
"supervised learning",
"un supervised learning",
"data mining",
"data mining technique", "data mining techniques"])
sol2 = ['database technique',
'cloud network',
'machine learning',
'supervised learning',
'un supervised learning',
'data mining',
'data mining technique'
]
if result2 == sol2:
print("Test 2 Passed")
else:
print("Test 2 Failed")
print(result2)
exit()
#Test 3
result3 = dedup_by_embedding(["Linear Neural network",
"Convolutional Neural Network",
"Database system", "Database systems", "database system",
"data mining techniques", "Data mining methods",
"programming language", "program languages",
"cloud storage",
"cloud network", "cloud networks"])
sol3 = ['linear neural network',
'convolutional neural network',
'database system',
'data mining techniques',
'programming language',
'cloud storage',
'cloud network']
if result3 == sol3:
print("Test 3 Passed")
else:
print("Test 3 Failed")
print(result3)
exit()
#Test 4
result4 = dedup_by_embedding(["machine learning", "machine-learning", "machine learn",
"machine translation",
"machine translation system",
"machine translation evaluation",
"machine vision",
"machine vision system",
"machine vision application",
"machine intelligence", "machine consciousness", "machine perception",
"machine learning algorithm", "machine learning algorithms", "machine learn algorithm",
"machine learning techniques", "machine learning technique", "machine learn technique",
"machine learn method", "machine learning methods", "machine learning method",
"machine learning approach", "machine learn approach",
"machine learning classifiers", "machine learning classifier",
"machine-type communications", "machine type communications",
"machine-type communication", "machine type communication",
"machine structure", "machine structures"
])
sol4 = ['machine learning',
'machine translation',
'machine translation system',
'machine translation evaluation',
'machine vision',
'machine vision system',
'machine vision application',
'machine consciousness',
'machine learning algorithm',
'machine learning techniques',
'machine learning approach',
'machine learning classifiers',
'machine type communications',
'machine structure']
if result4 == sol4:
print("Test 4 Passed")
else:
print("Test 4 Failed")
print(result4)
exit()
#Test 5
result5 = dedup_by_embedding([
"data mining",
"data mining algorithm", "data mining technique",
"data structure", "data structures",
"database design",
"data stream", "data streams",
"database", "databases",
"data analysis", "data analytics",
"big data analytics",
"data visualization",
"database system",
"data privacy", "data security",
"image database",
"graph database",
])
sol5 = ['data mining',
'data mining algorithm',
'data mining technique',
'data structure',
'database design',
'data stream',
'database',
'data analysis',
'big data analytics',
'data visualization',
'database system',
'data privacy',
'image database',
'graph database']
if result5 == sol5:
print("Test 5 Passed")
else:
print("Test 5 Failed")
print(result5)
exit()
#Test 6
result6 = dedup_by_embedding(["helloworld", "world", "network"])
sol6 = ["helloworld", "world", "network"]
if result6 == sol6:
print("Test 6 Passed")
else:
print("Test 6 Failed")
print(result6)
exit()
|
[
"phrase_similarity.dedup_by_stemming",
"phrase_similarity.dedup_by_embedding"
] |
[((90, 146), 'phrase_similarity.dedup_by_stemming', 'dedup_by_stemming', (["['civilization', 'civil', 'computer']"], {}), "(['civilization', 'civil', 'computer'])\n", (107, 146), False, 'from phrase_similarity import dedup_by_embedding, dedup_by_stemming\n'), ((324, 571), 'phrase_similarity.dedup_by_embedding', 'dedup_by_embedding', (["['database technique', 'database techniques', 'cloud network',\n 'cloud networks', 'machine learning', 'supervised learning',\n 'un supervised learning', 'data mining', 'data mining technique',\n 'data mining techniques']"], {}), "(['database technique', 'database techniques',\n 'cloud network', 'cloud networks', 'machine learning',\n 'supervised learning', 'un supervised learning', 'data mining',\n 'data mining technique', 'data mining techniques'])\n", (342, 571), False, 'from phrase_similarity import dedup_by_embedding, dedup_by_stemming\n'), ((1103, 1396), 'phrase_similarity.dedup_by_embedding', 'dedup_by_embedding', (["['Linear Neural network', 'Convolutional Neural Network', 'Database system',\n 'Database systems', 'database system', 'data mining techniques',\n 'Data mining methods', 'programming language', 'program languages',\n 'cloud storage', 'cloud network', 'cloud networks']"], {}), "(['Linear Neural network', 'Convolutional Neural Network',\n 'Database system', 'Database systems', 'database system',\n 'data mining techniques', 'Data mining methods', 'programming language',\n 'program languages', 'cloud storage', 'cloud network', 'cloud networks'])\n", (1121, 1396), False, 'from phrase_similarity import dedup_by_embedding, dedup_by_stemming\n'), ((1910, 2808), 'phrase_similarity.dedup_by_embedding', 'dedup_by_embedding', (["['machine learning', 'machine-learning', 'machine learn',\n 'machine translation', 'machine translation system',\n 'machine translation evaluation', 'machine vision',\n 'machine vision system', 'machine vision application',\n 'machine intelligence', 'machine consciousness', 'machine perception',\n 'machine learning algorithm', 'machine learning algorithms',\n 'machine learn algorithm', 'machine learning techniques',\n 'machine learning technique', 'machine learn technique',\n 'machine learn method', 'machine learning methods',\n 'machine learning method', 'machine learning approach',\n 'machine learn approach', 'machine learning classifiers',\n 'machine learning classifier', 'machine-type communications',\n 'machine type communications', 'machine-type communication',\n 'machine type communication', 'machine structure', 'machine structures']"], {}), "(['machine learning', 'machine-learning', 'machine learn',\n 'machine translation', 'machine translation system',\n 'machine translation evaluation', 'machine vision',\n 'machine vision system', 'machine vision application',\n 'machine intelligence', 'machine consciousness', 'machine perception',\n 'machine learning algorithm', 'machine learning algorithms',\n 'machine learn algorithm', 'machine learning techniques',\n 'machine learning technique', 'machine learn technique',\n 'machine learn method', 'machine learning methods',\n 'machine learning method', 'machine learning approach',\n 'machine learn approach', 'machine learning classifiers',\n 'machine learning classifier', 'machine-type communications',\n 'machine type communications', 'machine-type communication',\n 'machine type communication', 'machine structure', 'machine structures'])\n", (1928, 2808), False, 'from phrase_similarity import dedup_by_embedding, dedup_by_stemming\n'), ((3826, 4210), 'phrase_similarity.dedup_by_embedding', 'dedup_by_embedding', (["['data mining', 'data mining algorithm', 'data mining technique',\n 'data structure', 'data structures', 'database design', 'data stream',\n 'data streams', 'database', 'databases', 'data analysis',\n 'data analytics', 'big data analytics', 'data visualization',\n 'database system', 'data privacy', 'data security', 'image database',\n 'graph database']"], {}), "(['data mining', 'data mining algorithm',\n 'data mining technique', 'data structure', 'data structures',\n 'database design', 'data stream', 'data streams', 'database',\n 'databases', 'data analysis', 'data analytics', 'big data analytics',\n 'data visualization', 'database system', 'data privacy',\n 'data security', 'image database', 'graph database'])\n", (3844, 4210), False, 'from phrase_similarity import dedup_by_embedding, dedup_by_stemming\n'), ((4783, 4837), 'phrase_similarity.dedup_by_embedding', 'dedup_by_embedding', (["['helloworld', 'world', 'network']"], {}), "(['helloworld', 'world', 'network'])\n", (4801, 4837), False, 'from phrase_similarity import dedup_by_embedding, dedup_by_stemming\n')]
|
import xlwt
from django.shortcuts import get_object_or_404, HttpResponse
from django.urls import reverse_lazy
from django.utils.datetime_safe import datetime
from django.views.generic import DetailView, ListView, UpdateView, DeleteView, TemplateView
# Create your views here.
from employee_information_site.models import Employee
from vacation_schedule.forms import VacationPeriodForm
from vacation_schedule.models import EmployeeVacationPeriod, DaysRemainder
class VacationListPage(ListView):
template_name = 'vacation_schedule/vacation_list_page.html'
model = EmployeeVacationPeriod
context_object_name = 'vacation_periods'
def get_queryset(self):
queryset = super().get_queryset()
employee = Employee.objects.filter(user=self.request.user.id).first()
current_year = datetime.now().year
return queryset.filter(employeeId=employee.id, startDateVacation__year=current_year)
def get_context_data(self, **kwargs):
context = super(VacationListPage, self).get_context_data(**kwargs)
employee = Employee.objects.filter(user=self.request.user.id).first()
context['current_user'] = employee
context['days_remainder'] = DaysRemainder.objects.filter(employee=employee).first()
return context
class UpdateOrCreateVacationPeriod(UpdateView):
model = EmployeeVacationPeriod
form_class = VacationPeriodForm
template_name = 'vacation_schedule/add_vacation_page.html'
success_url = reverse_lazy('vacation_schedule:vacationListPage')
context_object_name = 'vacation_period'
def get_object(self, **kwargs):
vacation_id = self.kwargs.get('id')
return self.model.objects.filter(id=vacation_id).first()
def form_invalid(self, form):
return self.form_validate(form)
def form_valid(self, form):
return self.form_validate(form)
def form_validate(self, form):
if not form.errors.get('employeeId') is None:
form.errors.pop('employeeId')
if not form.errors.get('vacationDays') is None:
form.errors.pop('vacationDays')
employee = Employee.objects.filter(user=self.request.user.id).first()
days_remainder = DaysRemainder.objects.filter(employee=employee).first()
if form.instance.vacationDays:
days_remainder.remainder += form.instance.vacationDays
form.instance.employeeId = employee
form.instance.vacationDays = (form.instance.endDateVacation - form.instance.startDateVacation).days
self.validate_date(form, days_remainder)
if form.is_valid():
days_remainder.remainder -= form.instance.vacationDays
days_remainder.save()
return super().form_valid(form)
return super().form_invalid(form)
def validate_date(self, form, days_remainder):
if form.instance.vacationDays <= 0:
form.add_error('endDateVacation', 'Неправильно выбрана дата окончания отпуска')
if form.instance.vacationDays > days_remainder.remainder:
form.add_error('vacationDays', 'Выбрано больше дней, чем осталось')
vacation_periods = self.model.objects.filter(employeeId=days_remainder.employee)
if vacation_periods:
if any(x for x in vacation_periods if self.check_date_intersection(form, x)):
form.add_error('startDateVacation',
'Период отпуска пересекается с предыдущими периодамами')
def get_context_data(self, **kwargs):
context = super(UpdateOrCreateVacationPeriod, self).get_context_data(**kwargs)
current_user = Employee.objects.filter(user=self.request.user.id).first()
context['current_user'] = current_user
return context
@staticmethod
def check_date_intersection(form, vacation_period):
return form.instance.id != vacation_period.id and (
vacation_period.startDateVacation <= form.instance.startDateVacation <= vacation_period.endDateVacation
or vacation_period.startDateVacation <= form.instance.endDateVacation <= vacation_period.endDateVacation
or form.instance.startDateVacation <= vacation_period.startDateVacation <= form.instance.endDateVacation
or form.instance.startDateVacation <= vacation_period.endDateVacation <= form.instance.endDateVacation)
class DeleteVacationPeriod(DeleteView):
model = EmployeeVacationPeriod
success_url = reverse_lazy('vacation_schedule:vacationListPage')
context_object_name = 'period'
def get_object(self, **kwargs):
vacation_id = self.kwargs.get('id')
return get_object_or_404(self.model, id=vacation_id)
def delete(self, request, *args, **kwargs):
vacation_period = self.get_object(**kwargs)
days_remainder = DaysRemainder.objects.filter(employee=vacation_period.employeeId).first()
days_remainder.remainder += vacation_period.vacationDays
if days_remainder.remainder > days_remainder.maxCountDays.maxCountDays:
days_remainder.remainder = days_remainder.maxCountDays.maxCountDays
days_remainder.save()
return super(DeleteVacationPeriod, self).delete(request, *args, **kwargs)
class EmployeeVacationPage(TemplateView):
template_name = 'vacation_schedule/employee_vacation_page.html'
def get_context_data(self, **kwargs):
context = super(EmployeeVacationPage, self).get_context_data(**kwargs)
current_user = Employee.objects.filter(user=self.request.user.id).first()
context['current_user'] = current_user
return context
class ExportVacationXlsView(DetailView):
def get(self, request, *args, **kwargs):
response = HttpResponse(content_type='application/ms-excel')
response['Content-Disposition'] = 'attachment; filename="users.xls"'
wb = xlwt.Workbook(encoding='utf-8')
ws = wb.add_sheet('Users')
row_num = 0
columns = [field for field in EmployeeVacationPeriod._meta.get_fields()][1:]
for col_num in range(len(columns)):
ws.write(row_num, col_num, columns[col_num].verbose_name)
rows = EmployeeVacationPeriod.objects.all()
for row_object in rows:
row_num += 1
for col_num, value in enumerate(columns):
ws.write(row_num, col_num, str(getattr(row_object, value.name)))
wb.save(response)
return response
|
[
"vacation_schedule.models.EmployeeVacationPeriod.objects.all",
"xlwt.Workbook",
"vacation_schedule.models.EmployeeVacationPeriod._meta.get_fields",
"django.shortcuts.HttpResponse",
"django.urls.reverse_lazy",
"employee_information_site.models.Employee.objects.filter",
"django.shortcuts.get_object_or_404",
"django.utils.datetime_safe.datetime.now",
"vacation_schedule.models.DaysRemainder.objects.filter"
] |
[((1482, 1532), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""vacation_schedule:vacationListPage"""'], {}), "('vacation_schedule:vacationListPage')\n", (1494, 1532), False, 'from django.urls import reverse_lazy\n'), ((4471, 4521), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""vacation_schedule:vacationListPage"""'], {}), "('vacation_schedule:vacationListPage')\n", (4483, 4521), False, 'from django.urls import reverse_lazy\n'), ((4654, 4699), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['self.model'], {'id': 'vacation_id'}), '(self.model, id=vacation_id)\n', (4671, 4699), False, 'from django.shortcuts import get_object_or_404, HttpResponse\n'), ((5733, 5782), 'django.shortcuts.HttpResponse', 'HttpResponse', ([], {'content_type': '"""application/ms-excel"""'}), "(content_type='application/ms-excel')\n", (5745, 5782), False, 'from django.shortcuts import get_object_or_404, HttpResponse\n'), ((5874, 5905), 'xlwt.Workbook', 'xlwt.Workbook', ([], {'encoding': '"""utf-8"""'}), "(encoding='utf-8')\n", (5887, 5905), False, 'import xlwt\n'), ((6178, 6214), 'vacation_schedule.models.EmployeeVacationPeriod.objects.all', 'EmployeeVacationPeriod.objects.all', ([], {}), '()\n', (6212, 6214), False, 'from vacation_schedule.models import EmployeeVacationPeriod, DaysRemainder\n'), ((812, 826), 'django.utils.datetime_safe.datetime.now', 'datetime.now', ([], {}), '()\n', (824, 826), False, 'from django.utils.datetime_safe import datetime\n'), ((730, 780), 'employee_information_site.models.Employee.objects.filter', 'Employee.objects.filter', ([], {'user': 'self.request.user.id'}), '(user=self.request.user.id)\n', (753, 780), False, 'from employee_information_site.models import Employee\n'), ((1063, 1113), 'employee_information_site.models.Employee.objects.filter', 'Employee.objects.filter', ([], {'user': 'self.request.user.id'}), '(user=self.request.user.id)\n', (1086, 1113), False, 'from employee_information_site.models import Employee\n'), ((1201, 1248), 'vacation_schedule.models.DaysRemainder.objects.filter', 'DaysRemainder.objects.filter', ([], {'employee': 'employee'}), '(employee=employee)\n', (1229, 1248), False, 'from vacation_schedule.models import EmployeeVacationPeriod, DaysRemainder\n'), ((2125, 2175), 'employee_information_site.models.Employee.objects.filter', 'Employee.objects.filter', ([], {'user': 'self.request.user.id'}), '(user=self.request.user.id)\n', (2148, 2175), False, 'from employee_information_site.models import Employee\n'), ((2209, 2256), 'vacation_schedule.models.DaysRemainder.objects.filter', 'DaysRemainder.objects.filter', ([], {'employee': 'employee'}), '(employee=employee)\n', (2237, 2256), False, 'from vacation_schedule.models import EmployeeVacationPeriod, DaysRemainder\n'), ((3630, 3680), 'employee_information_site.models.Employee.objects.filter', 'Employee.objects.filter', ([], {'user': 'self.request.user.id'}), '(user=self.request.user.id)\n', (3653, 3680), False, 'from employee_information_site.models import Employee\n'), ((4826, 4891), 'vacation_schedule.models.DaysRemainder.objects.filter', 'DaysRemainder.objects.filter', ([], {'employee': 'vacation_period.employeeId'}), '(employee=vacation_period.employeeId)\n', (4854, 4891), False, 'from vacation_schedule.models import EmployeeVacationPeriod, DaysRemainder\n'), ((5497, 5547), 'employee_information_site.models.Employee.objects.filter', 'Employee.objects.filter', ([], {'user': 'self.request.user.id'}), '(user=self.request.user.id)\n', (5520, 5547), False, 'from employee_information_site.models import Employee\n'), ((6001, 6042), 'vacation_schedule.models.EmployeeVacationPeriod._meta.get_fields', 'EmployeeVacationPeriod._meta.get_fields', ([], {}), '()\n', (6040, 6042), False, 'from vacation_schedule.models import EmployeeVacationPeriod, DaysRemainder\n')]
|
import cv2 # DO NOT REMOVE
from datasets import SceneData, ScenesDataSet
import train
from utils import general_utils, path_utils
from utils.Phases import Phases
import torch
def train_single_model(conf, device, phase):
# Create data
scene_data = SceneData.create_scene_data(conf)
# Create model
model = general_utils.get_class("models." + conf.get_string("model.type"))(conf).to(device)
if phase is Phases.FINE_TUNE:
path = path_utils.path_to_model(conf, Phases.TRAINING)
model.load_state_dict(torch.load(path))
# Sequential Optimization
if conf.get_bool("train.sequential", default=False):
n_cams = scene_data.y.shape[0]
conf['train']['num_of_epochs'] = 1000
conf['train']['scheduler_milestone'] = []
for subset_size in range(2, n_cams):
print("########## Train model on subset of size {} ##########".format(subset_size))
subset_data = SceneData.get_subset(scene_data, subset_size)
conf["dataset"]["scan"] = subset_data.scan_name
dubscene_dataset = ScenesDataSet.ScenesDataSet([subset_data], return_all=True)
subscene_loader = ScenesDataSet.DataLoader(dubscene_dataset).to(device)
_, _, _, _ = train.train(conf, subscene_loader, model, phase)
conf['train']['num_of_epochs'] = 20000
conf['train']['scheduler_milestone'] = [10000]
conf["dataset"]["scan"] = scene_data.scan_name
# Optimize Scene
scene_dataset = ScenesDataSet.ScenesDataSet([scene_data], return_all=True)
scene_loader = ScenesDataSet.DataLoader(scene_dataset).to(device)
train_stat, train_errors, _, _ = train.train(conf, scene_loader, model, phase)
# Write results
train_errors.drop("Mean", inplace=True)
train_stat["Scene"] = train_errors.index
train_stat.set_index("Scene", inplace=True)
train_res = train_errors.join(train_stat)
general_utils.write_results(conf, train_res, file_name="Results_" + phase.name, append=True)
if __name__ == "__main__":
conf, device, phase = general_utils.init_exp(Phases.OPTIMIZATION.name)
train_single_model(conf, device, phase)
|
[
"datasets.ScenesDataSet.ScenesDataSet",
"utils.path_utils.path_to_model",
"datasets.ScenesDataSet.DataLoader",
"torch.load",
"datasets.SceneData.create_scene_data",
"utils.general_utils.write_results",
"train.train",
"datasets.SceneData.get_subset",
"utils.general_utils.init_exp"
] |
[((258, 291), 'datasets.SceneData.create_scene_data', 'SceneData.create_scene_data', (['conf'], {}), '(conf)\n', (285, 291), False, 'from datasets import SceneData, ScenesDataSet\n'), ((1498, 1556), 'datasets.ScenesDataSet.ScenesDataSet', 'ScenesDataSet.ScenesDataSet', (['[scene_data]'], {'return_all': '(True)'}), '([scene_data], return_all=True)\n', (1525, 1556), False, 'from datasets import SceneData, ScenesDataSet\n'), ((1664, 1709), 'train.train', 'train.train', (['conf', 'scene_loader', 'model', 'phase'], {}), '(conf, scene_loader, model, phase)\n', (1675, 1709), False, 'import train\n'), ((1918, 2015), 'utils.general_utils.write_results', 'general_utils.write_results', (['conf', 'train_res'], {'file_name': "('Results_' + phase.name)", 'append': '(True)'}), "(conf, train_res, file_name='Results_' + phase.\n name, append=True)\n", (1945, 2015), False, 'from utils import general_utils, path_utils\n'), ((2066, 2114), 'utils.general_utils.init_exp', 'general_utils.init_exp', (['Phases.OPTIMIZATION.name'], {}), '(Phases.OPTIMIZATION.name)\n', (2088, 2114), False, 'from utils import general_utils, path_utils\n'), ((457, 504), 'utils.path_utils.path_to_model', 'path_utils.path_to_model', (['conf', 'Phases.TRAINING'], {}), '(conf, Phases.TRAINING)\n', (481, 504), False, 'from utils import general_utils, path_utils\n'), ((535, 551), 'torch.load', 'torch.load', (['path'], {}), '(path)\n', (545, 551), False, 'import torch\n'), ((943, 988), 'datasets.SceneData.get_subset', 'SceneData.get_subset', (['scene_data', 'subset_size'], {}), '(scene_data, subset_size)\n', (963, 988), False, 'from datasets import SceneData, ScenesDataSet\n'), ((1080, 1139), 'datasets.ScenesDataSet.ScenesDataSet', 'ScenesDataSet.ScenesDataSet', (['[subset_data]'], {'return_all': '(True)'}), '([subset_data], return_all=True)\n', (1107, 1139), False, 'from datasets import SceneData, ScenesDataSet\n'), ((1249, 1297), 'train.train', 'train.train', (['conf', 'subscene_loader', 'model', 'phase'], {}), '(conf, subscene_loader, model, phase)\n', (1260, 1297), False, 'import train\n'), ((1576, 1615), 'datasets.ScenesDataSet.DataLoader', 'ScenesDataSet.DataLoader', (['scene_dataset'], {}), '(scene_dataset)\n', (1600, 1615), False, 'from datasets import SceneData, ScenesDataSet\n'), ((1170, 1212), 'datasets.ScenesDataSet.DataLoader', 'ScenesDataSet.DataLoader', (['dubscene_dataset'], {}), '(dubscene_dataset)\n', (1194, 1212), False, 'from datasets import SceneData, ScenesDataSet\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 30 07:05:07 2018
@author: massimo
"""
from brightway2 import *
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
from scipy import stats
projects
projects.set_current('bw2_import_ecoinvent_3.4')
databases
db = Database("ecoinvent 3.4 conseq")
ipcc = ('IPCC 2013', 'climate change', 'GTP 100a')
# Simple montecarlo on ecoinvent process as we know it.
mydemand = {db.random(): 1} # select a random process
lca = LCA(mydemand, ipcc)
lca.lci()
lca.lcia()
lca.score
mc = MonteCarloLCA(mydemand, ipcc)
mc_results = [next(mc) for x in range(500)]
plt.hist(mc_results, density=True)
plt.ylabel("Probability")
plt.xlabel(methods[ipcc]["unit"])
pd.DataFrame(mc_results).describe()
lca.score
np.exp(np.mean(np.log(mc_results))) # geometric mean
# Now comparative analysis
db.search('lorry transport euro5') # look at the names
activity_name = 'transport, freight, lorry >32 metric ton, EURO5'
for activity in Database("ecoinvent 3.4 conseq"):
if activity['name'] == activity_name:
truckE5 = Database("ecoinvent 3.4 conseq").get(activity['code'])
activity_name = 'transport, freight, lorry >32 metric ton, EURO6'
for activity in Database("ecoinvent 3.4 conseq"):
if activity['name'] == activity_name:
truckE6 = Database("ecoinvent 3.4 conseq").get(activity['code'])
truckE5.as_dict()
truckE6.as_dict()
# make a list with the alternatives
demands = [{truckE5: 1}, {truckE6: 1}]
mc = MonteCarloLCA(demands[0], ipcc)
next(mc)
# look at this first
mc.redo_lcia(demands[0])
mc.score
mc.redo_lcia(demands[1])
mc.score
mc.redo_lcia(demands[0])
mc.score
# Now for several iterations
iterations = 100
simulations = []
for _ in range(iterations):
print(_)
next(mc)
mcresults = []
for i in demands:
mc.redo_lcia(i)
mcresults.append(mc.score)
simulations.append(mcresults)
simulations
df = pd.DataFrame(simulations, columns = ['truckE5','truckE6'])
df.to_csv('ComparativeMCsimulation.csv') # to save it
#plot stuff (using the matplotlib package)
df.plot(kind = 'box')
#df.T.melt()
plt.plot(df.truckE5, df.truckE6, 'o')
plt.xlabel('truckE5 - kg CO2-eq')
plt.ylabel('truckE6 - kg CO2-eq')
# You can see how many times the difference is positive. This is what Simapro does
df.diffe = df.truckE5 - df.truckE6
plt.hist(df.diffe.values)
len(df.diffe[df.diffe < 0])
len(df.diffe[df.diffe > 0])
len(df.diffe[df.diffe == 0])
# Statistical testing (using the stats package)
# I can use a paired t-test
t_value, p_value = stats.ttest_rel(df.truckE5,df.truckE6)
t_value
p_value
# But wait! did we check for normality?
plt.hist(df.truckE5.values)
plt.xlabel('truckE5 - kg CO2-eq')
SW_value, SW_p_value = stats.shapiro(df.truckE5)
SW_p_value # Not normally distributed...
plt.hist(df.truckE6.values)
SW_value, SW_p_value = stats.shapiro(df.truckE6)
SW_p_value # Normally distributed if alpha = 0.05...Not strong though if we hasd say 1000 samples
# Alright need a non-parametric test. Wilcox sign rank test
s_value, p_value = stats.wilcoxon(df.truckE5, df.truckE6)
s_value
p_value # Not bad, significant difference!
# What if we had done the MC on the processes independently.
mc1 = MonteCarloLCA({truckE5: 1}, ipcc)
mc1_results = [next(mc1) for x in range(100)]
mc2 = MonteCarloLCA({truckE5: 1}, ipcc) # it's still truckE5!
mc2_results = [next(mc2) for x in range(100)]
df_ind = pd.DataFrame({'mc1': mc1_results, 'mc2' : mc2_results})
# compare to this
demands = [{truckE5: 1}, {truckE5: 1}] # I am using the smae process two times.
mc = MonteCarloLCA(demands[0], ipcc)
iterations = 100
simulations = []
for _ in range(iterations):
print(_)
next(mc)
mcresults = []
for i in demands:
mc.redo_lcia(i)
mcresults.append(mc.score)
simulations.append(mcresults)
simulations
df_dep = pd.DataFrame(simulations, columns = ['mc1','mc2'])
# Plot stuff
df_dep.plot(kind = 'box')
df_ind.plot(kind = 'box')
plt.plot(df_dep.mc1, df_dep.mc2, 'o')
plt.plot(df_ind.mc1, df_ind.mc2, 'o') # see?
# and of course:
t_value, p_value = stats.ttest_rel(df_dep.mc1, df_dep.mc2)
t_value
p_value # no difference AT ALL (as expected)
t_value, p_value = stats.ttest_rel(df_ind.mc1, df_ind.mc2)
t_value
p_value # no difference (as expected! But still some variance!)
s_value, p_value = stats.wilcoxon(df_ind.mc1, df_ind.mc2)
s_value
p_value
|
[
"pandas.DataFrame",
"numpy.log",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.hist",
"scipy.stats.ttest_rel",
"scipy.stats.shapiro",
"matplotlib.pyplot.ylabel",
"scipy.stats.wilcoxon",
"matplotlib.pyplot.xlabel"
] |
[((646, 680), 'matplotlib.pyplot.hist', 'plt.hist', (['mc_results'], {'density': '(True)'}), '(mc_results, density=True)\n', (654, 680), True, 'from matplotlib import pyplot as plt\n'), ((681, 706), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Probability"""'], {}), "('Probability')\n", (691, 706), True, 'from matplotlib import pyplot as plt\n'), ((707, 740), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (["methods[ipcc]['unit']"], {}), "(methods[ipcc]['unit'])\n", (717, 740), True, 'from matplotlib import pyplot as plt\n'), ((1969, 2026), 'pandas.DataFrame', 'pd.DataFrame', (['simulations'], {'columns': "['truckE5', 'truckE6']"}), "(simulations, columns=['truckE5', 'truckE6'])\n", (1981, 2026), True, 'import pandas as pd\n'), ((2163, 2200), 'matplotlib.pyplot.plot', 'plt.plot', (['df.truckE5', 'df.truckE6', '"""o"""'], {}), "(df.truckE5, df.truckE6, 'o')\n", (2171, 2200), True, 'from matplotlib import pyplot as plt\n'), ((2201, 2234), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""truckE5 - kg CO2-eq"""'], {}), "('truckE5 - kg CO2-eq')\n", (2211, 2234), True, 'from matplotlib import pyplot as plt\n'), ((2235, 2268), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""truckE6 - kg CO2-eq"""'], {}), "('truckE6 - kg CO2-eq')\n", (2245, 2268), True, 'from matplotlib import pyplot as plt\n'), ((2389, 2414), 'matplotlib.pyplot.hist', 'plt.hist', (['df.diffe.values'], {}), '(df.diffe.values)\n', (2397, 2414), True, 'from matplotlib import pyplot as plt\n'), ((2597, 2636), 'scipy.stats.ttest_rel', 'stats.ttest_rel', (['df.truckE5', 'df.truckE6'], {}), '(df.truckE5, df.truckE6)\n', (2612, 2636), False, 'from scipy import stats\n'), ((2693, 2720), 'matplotlib.pyplot.hist', 'plt.hist', (['df.truckE5.values'], {}), '(df.truckE5.values)\n', (2701, 2720), True, 'from matplotlib import pyplot as plt\n'), ((2721, 2754), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""truckE5 - kg CO2-eq"""'], {}), "('truckE5 - kg CO2-eq')\n", (2731, 2754), True, 'from matplotlib import pyplot as plt\n'), ((2779, 2804), 'scipy.stats.shapiro', 'stats.shapiro', (['df.truckE5'], {}), '(df.truckE5)\n', (2792, 2804), False, 'from scipy import stats\n'), ((2847, 2874), 'matplotlib.pyplot.hist', 'plt.hist', (['df.truckE6.values'], {}), '(df.truckE6.values)\n', (2855, 2874), True, 'from matplotlib import pyplot as plt\n'), ((2898, 2923), 'scipy.stats.shapiro', 'stats.shapiro', (['df.truckE6'], {}), '(df.truckE6)\n', (2911, 2923), False, 'from scipy import stats\n'), ((3102, 3140), 'scipy.stats.wilcoxon', 'stats.wilcoxon', (['df.truckE5', 'df.truckE6'], {}), '(df.truckE5, df.truckE6)\n', (3116, 3140), False, 'from scipy import stats\n'), ((3462, 3516), 'pandas.DataFrame', 'pd.DataFrame', (["{'mc1': mc1_results, 'mc2': mc2_results}"], {}), "({'mc1': mc1_results, 'mc2': mc2_results})\n", (3474, 3516), True, 'import pandas as pd\n'), ((3913, 3962), 'pandas.DataFrame', 'pd.DataFrame', (['simulations'], {'columns': "['mc1', 'mc2']"}), "(simulations, columns=['mc1', 'mc2'])\n", (3925, 3962), True, 'import pandas as pd\n'), ((4031, 4068), 'matplotlib.pyplot.plot', 'plt.plot', (['df_dep.mc1', 'df_dep.mc2', '"""o"""'], {}), "(df_dep.mc1, df_dep.mc2, 'o')\n", (4039, 4068), True, 'from matplotlib import pyplot as plt\n'), ((4069, 4106), 'matplotlib.pyplot.plot', 'plt.plot', (['df_ind.mc1', 'df_ind.mc2', '"""o"""'], {}), "(df_ind.mc1, df_ind.mc2, 'o')\n", (4077, 4106), True, 'from matplotlib import pyplot as plt\n'), ((4151, 4190), 'scipy.stats.ttest_rel', 'stats.ttest_rel', (['df_dep.mc1', 'df_dep.mc2'], {}), '(df_dep.mc1, df_dep.mc2)\n', (4166, 4190), False, 'from scipy import stats\n'), ((4265, 4304), 'scipy.stats.ttest_rel', 'stats.ttest_rel', (['df_ind.mc1', 'df_ind.mc2'], {}), '(df_ind.mc1, df_ind.mc2)\n', (4280, 4304), False, 'from scipy import stats\n'), ((4398, 4436), 'scipy.stats.wilcoxon', 'stats.wilcoxon', (['df_ind.mc1', 'df_ind.mc2'], {}), '(df_ind.mc1, df_ind.mc2)\n', (4412, 4436), False, 'from scipy import stats\n'), ((742, 766), 'pandas.DataFrame', 'pd.DataFrame', (['mc_results'], {}), '(mc_results)\n', (754, 766), True, 'import pandas as pd\n'), ((805, 823), 'numpy.log', 'np.log', (['mc_results'], {}), '(mc_results)\n', (811, 823), True, 'import numpy as np\n')]
|
from network import Regressor, Loss_gamma_0_6
import numpy as np
import skorch
from skorch import NeuralNetRegressor
from torch import optim
def load_model(load_cp, n_in=106,device='cuda'):
cp = skorch.callbacks.Checkpoint(dirname=load_cp)
net = NeuralNetRegressor(
Regressor(n_in=n_in),
criterion=Loss_gamma_0_6,
max_epochs=2000,
optimizer=optim.Adam,
optimizer__amsgrad=True,
optimizer__weight_decay=0.1,
lr=0.0003,
iterator_train__shuffle=True,
iterator_train__num_workers=32,
iterator_train__pin_memory=True,
device=device,
batch_size=50000,
iterator_train__batch_size=50000,
)
net.initialize()
net.load_params(checkpoint=cp)
return net
def save_model_para(model_cp):
'''
convert trained model paras(saved at checkpoint model_cp) to numpy format
:param model_cp:
:return:
'''
model = load_model(model_cp, n_in=106,device='cpu')
paras = []
for para in model.get_params()['module'].parameters():
paras.append(para.data.cpu().numpy())
np.save("model_paras.npy", paras)
save_model_para('cp_gamma_0_6')
|
[
"skorch.callbacks.Checkpoint",
"numpy.save",
"network.Regressor"
] |
[((201, 245), 'skorch.callbacks.Checkpoint', 'skorch.callbacks.Checkpoint', ([], {'dirname': 'load_cp'}), '(dirname=load_cp)\n', (228, 245), False, 'import skorch\n'), ((1112, 1145), 'numpy.save', 'np.save', (['"""model_paras.npy"""', 'paras'], {}), "('model_paras.npy', paras)\n", (1119, 1145), True, 'import numpy as np\n'), ((284, 304), 'network.Regressor', 'Regressor', ([], {'n_in': 'n_in'}), '(n_in=n_in)\n', (293, 304), False, 'from network import Regressor, Loss_gamma_0_6\n')]
|
import numpy as np
from gensim.models import Word2Vec
from src.utils import io
def run(
random_walk_files, output_file, dimensions=128, context_size=10, epochs=1, workers=1
):
"""Generates node vector embeddings from a list of files containing random
walks performed on different layers of a multilayer network.
Parameters
----------
random_walk_files: list
List of files containing random walks. Each file should correspond to random walks perform on a different layer
of the network of interest.
output_file: str
The file in which the node embeddings will be saved.
dimensions: int (default: 128)
Number of dimensions of the generated vector embeddings.
context_size: int (default: 10)
Context size in Word2Vec.
epochs: int (default: 1)
Number of epochs in stochastic gradient descent.
workers: int (default: 1)
Number of worker threads used to train the model.
"""
walks = np.concatenate([io.read_random_walks(file) for file in random_walk_files])
#print(walks.shape)
walks_trim = np.split(walks, walks.shape[0])
walks_trim = [walk[walk!=0].astype(str).tolist() for walk in walks]
#print(walks_trim)
model = Word2Vec(
walks_trim,
size=dimensions,
window=context_size,
min_count=0,
sg=1, # use skip-gram
workers=workers,
iter=epochs,
)
model.wv.save_word2vec_format(output_file)
|
[
"src.utils.io.read_random_walks",
"gensim.models.Word2Vec",
"numpy.split"
] |
[((1109, 1140), 'numpy.split', 'np.split', (['walks', 'walks.shape[0]'], {}), '(walks, walks.shape[0])\n', (1117, 1140), True, 'import numpy as np\n'), ((1248, 1360), 'gensim.models.Word2Vec', 'Word2Vec', (['walks_trim'], {'size': 'dimensions', 'window': 'context_size', 'min_count': '(0)', 'sg': '(1)', 'workers': 'workers', 'iter': 'epochs'}), '(walks_trim, size=dimensions, window=context_size, min_count=0, sg=\n 1, workers=workers, iter=epochs)\n', (1256, 1360), False, 'from gensim.models import Word2Vec\n'), ((1009, 1035), 'src.utils.io.read_random_walks', 'io.read_random_walks', (['file'], {}), '(file)\n', (1029, 1035), False, 'from src.utils import io\n')]
|
"""Class representing the object being modeled."""
import json
import logging
import re
from pathlib import Path
from typing import Dict, List, Tuple, Union
from natsort import natsorted
from openpyxl import load_workbook
class PartObject:
""" Load and create a part from a source """
def __init__(self, pins, filename):
super().__init__()
self.log = logging.getLogger("partmap.object")
self._pins = pins
self._columns, self._rows = self.sort_and_split_pin_list()
self.filename = Path(filename)
@classmethod
def from_excel(cls, filename):
""" Import an Excel and create a PartObject """
number = "Number"
name = "Name"
workbook = load_workbook(filename)
sheet = workbook.active # Grab the first sheet
try:
column = get_col_index([number, name], sheet)
bga = dict()
for excel_row in range(2, sheet.max_row + 1):
pin = sheet.cell(row=excel_row, column=column[number]).value
net = sheet.cell(row=excel_row, column=column[name])
if pin is not None or net.value is not None:
if net.fill.patternType == "solid":
bga.update(
{
pin: {
"name": net.value,
"color": str("#" + net.fill.start_color.rgb[2:]),
}
}
)
else:
bga.update({pin: {"name": net.value, "color": "#ffffff"}})
except (TypeError, ValueError, KeyError, UnboundLocalError) as error:
print(error)
raise
return cls(bga, filename)
@classmethod
def from_telesis(cls, filename, refdes):
""" Import a Telesis formatted file and create a PartObject """
with open(filename, "r") as tel_file:
tel_text = tel_file.readlines()
tel_netlist = dict()
for line in tel_text:
reg = re.match(r"(.*);", line)
reg2 = re.findall(refdes + r"\.([a-zA-Z0-9]+)", line)
if reg and reg2:
net = reg.group(1)
for reg_match in reg2:
pin = reg_match
tel_netlist.update({pin: {"name": net, "color": "#ffffff"}})
return cls(tel_netlist, filename)
@classmethod
def from_json(cls, filename):
""" Import a json file with a format {pin: {name:, color:}} """
return cls(json.load(open(filename)), filename)
def add_pin(self, pin: str, net: str, color: str) -> None:
"""Add a new pin to the part.
Args:
pin: The Pin Number. (A12)
net: The functional name of the net. (USB_P)
color: The color to fill with.
"""
self._pins.update({pin: {"name": net, "color": color}})
@property
def columns(self) -> List:
""" Get the columns in a part. [1-n] """
return self._columns
@columns.setter
def columns(self, new_columns):
"""Update the columns."""
self._columns = new_columns
@property
def rows(self) -> List:
""" Get the rows in a part. [A - AZ] """
return self._rows
@rows.setter
def rows(self, new_rows):
"""Update the rows."""
self._rows = new_rows
def get_pin(self, prefix: str, suffix: str) -> Union[str, None]:
""" Get the name and color of a pin """
pin = None
if prefix + suffix in self._pins:
pin = self._pins[prefix + suffix]
elif suffix + prefix in self._pins:
pin = self._pins[suffix + prefix]
return pin
@property
def pins(self):
""" Return the pin names """
return self._pins.keys()
def get_number_of_pins(self):
""" Return how many pins are in the part """
return len(self._pins)
def get_net_names(self):
""" Return the net names """
return self._pins.values()["name"]
def dump_json(self):
""" Dump the PartObject dictionary to a .json file """
save_file = self.filename.with_suffix(".json")
self.log.info(f"Saved as json to {save_file}")
with open(save_file, "w") as outfile:
json.dump(self._pins, outfile, sort_keys=True, indent=4, separators=(",", ": "))
def sort_and_split_pin_list(self) -> Tuple[List, List]:
""" Take a list of pins and spilt by letter and number then sort """
r_list: List = list()
c_list = list()
for pin in self.pins:
split_pin = re.split(r"(\d+)", pin)
if split_pin[0] not in r_list:
r_list.append(split_pin[0])
c_list.append(split_pin[1])
temp = list()
temp2 = list()
for item in r_list:
if len(item) > 1:
temp.append(item)
else:
temp2.append(item)
temp2 = natsorted(temp2)
temp2.extend(natsorted(temp))
return natsorted(set(c_list)), temp2
def get_col_index(name: List, worksheet) -> Dict:
""" return a list of the column numbers if it matches """
indexes = dict()
for rows in worksheet.iter_rows(min_row=1, max_row=1, min_col=1):
for column in rows:
if column.value in name:
indexes.update({column.value: column.col_idx})
return indexes
|
[
"json.dump",
"re.split",
"re.match",
"openpyxl.load_workbook",
"logging.getLogger",
"pathlib.Path",
"re.findall",
"natsort.natsorted"
] |
[((379, 414), 'logging.getLogger', 'logging.getLogger', (['"""partmap.object"""'], {}), "('partmap.object')\n", (396, 414), False, 'import logging\n'), ((532, 546), 'pathlib.Path', 'Path', (['filename'], {}), '(filename)\n', (536, 546), False, 'from pathlib import Path\n'), ((723, 746), 'openpyxl.load_workbook', 'load_workbook', (['filename'], {}), '(filename)\n', (736, 746), False, 'from openpyxl import load_workbook\n'), ((5075, 5091), 'natsort.natsorted', 'natsorted', (['temp2'], {}), '(temp2)\n', (5084, 5091), False, 'from natsort import natsorted\n'), ((2122, 2145), 're.match', 're.match', (['"""(.*);"""', 'line'], {}), "('(.*);', line)\n", (2130, 2145), False, 'import re\n'), ((2166, 2212), 're.findall', 're.findall', (["(refdes + '\\\\.([a-zA-Z0-9]+)')", 'line'], {}), "(refdes + '\\\\.([a-zA-Z0-9]+)', line)\n", (2176, 2212), False, 'import re\n'), ((4391, 4476), 'json.dump', 'json.dump', (['self._pins', 'outfile'], {'sort_keys': '(True)', 'indent': '(4)', 'separators': "(',', ': ')"}), "(self._pins, outfile, sort_keys=True, indent=4, separators=(',', ': ')\n )\n", (4400, 4476), False, 'import json\n'), ((4718, 4741), 're.split', 're.split', (['"""(\\\\d+)"""', 'pin'], {}), "('(\\\\d+)', pin)\n", (4726, 4741), False, 'import re\n'), ((5113, 5128), 'natsort.natsorted', 'natsorted', (['temp'], {}), '(temp)\n', (5122, 5128), False, 'from natsort import natsorted\n')]
|
#!/usr/bin/env python
'Turbobil agi Asterisk'
__author__ = "<NAME>"
__version__ = "0.1.2"
__email__ = "<EMAIL>"
import os
import sys
from turbodb import *
import logging
from agi import *
from dialer import *
#Type pay
PRE_PAY = 1
POST_PAY = 2
# INFO, DEBUG, WARNING, CRITICAL, ERROR
def set_logging(cfg_level=logging.INFO):
logging.basicConfig(level=cfg_level)
logFormatter = logging.Formatter("%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s")
rootLogger = logging.getLogger()
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(logFormatter)
rootLogger.addHandler(consoleHandler)
logger = logging.getLogger(__name__)
def check_credit(customer):
""" Check customer credit """
if customer.type_pay == PRE_PAY:
if customer.credit <= 0:
logging.info('customer id %s dont have credit' % customer.id)
sys.exit()
elif customer.type_pay == POST_PAY:
logging.info('customer id %s POST_PAY' % customer.id)
else:
logging.error('customer id %s dont have valid method pay' % customer.id)
sys.exit()
if __name__ == '__main__':
set_logging()
accountcode = sys.argv[1].strip()
destination = sys.argv[2].strip()
timeout = 45
database = TurboDb()
customer = database.get_customer_by_accountcode(accountcode)
if not customer:
logging.error('customer not found')
sys.exit()
check_credit(customer)
#TODO
#Not yet implement
#if customer.customer_id:
#check reseller credit
# reseller = database.get_customer_by_id(customer.customer_id)
# check_credit(reseller)
#check route
routes = database.get_routes_to_customer(customer, destination)
if not routes:
logging.error('routes not found')
sys.exit()
#dialer call
agi = AGI()
dialer = dialer(agi)
for r in routes:
price = database.get_price_customer_route(r)
agi.verbose(price)
if customer.type_pay == PRE_PAY:
limit = int(customer.credit /(price / 60) * 1000)
else:
limit = 10000000
str_provider = 'SIP/%s@%s_provider,%s' % (destination, r.Provider.id, timeout)
op_dial = '|L(%s:0:0),90' % (limit)
str_dial = str_provider + op_dial
d_status = dialer.dial(str_dial)
database.save_call(destination, customer, r, d_status, price)
if d_status['dialstatus'] in ['NOANSWER', 'CANCEL'] :
break
elif d_status['dialstatus'] in ['ANSWER']:
break
elif d_status['dialstatus'] in ['CHANUNAVAIL', 'CONGESTION', 'BUSY']:
continue
sys.exit()
|
[
"logging.error",
"logging.basicConfig",
"logging.StreamHandler",
"logging.getLogger",
"logging.Formatter",
"logging.info",
"sys.exit"
] |
[((342, 378), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'cfg_level'}), '(level=cfg_level)\n', (361, 378), False, 'import logging\n'), ((399, 477), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s"""'], {}), "('%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s')\n", (416, 477), False, 'import logging\n'), ((495, 514), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (512, 514), False, 'import logging\n'), ((537, 560), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (558, 560), False, 'import logging\n'), ((663, 690), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (680, 690), False, 'import logging\n'), ((2684, 2694), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2692, 2694), False, 'import sys\n'), ((1395, 1430), 'logging.error', 'logging.error', (['"""customer not found"""'], {}), "('customer not found')\n", (1408, 1430), False, 'import logging\n'), ((1439, 1449), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1447, 1449), False, 'import sys\n'), ((1784, 1817), 'logging.error', 'logging.error', (['"""routes not found"""'], {}), "('routes not found')\n", (1797, 1817), False, 'import logging\n'), ((1826, 1836), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1834, 1836), False, 'import sys\n'), ((836, 897), 'logging.info', 'logging.info', (["('customer id %s dont have credit' % customer.id)"], {}), "('customer id %s dont have credit' % customer.id)\n", (848, 897), False, 'import logging\n'), ((910, 920), 'sys.exit', 'sys.exit', ([], {}), '()\n', (918, 920), False, 'import sys\n'), ((969, 1022), 'logging.info', 'logging.info', (["('customer id %s POST_PAY' % customer.id)"], {}), "('customer id %s POST_PAY' % customer.id)\n", (981, 1022), False, 'import logging\n'), ((1041, 1113), 'logging.error', 'logging.error', (["('customer id %s dont have valid method pay' % customer.id)"], {}), "('customer id %s dont have valid method pay' % customer.id)\n", (1054, 1113), False, 'import logging\n'), ((1122, 1132), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1130, 1132), False, 'import sys\n')]
|
import pygame, assets
from options import OptionsScreen
class TitleScreen:
def __init__(self, screen):
self.screen = screen
big_font = pygame.font.SysFont(assets.font, 90)
small_font = pygame.font.SysFont(assets.font, 24)
self.heading = small_font.render("Super Extreme", True, (255,255,255))
self.title = big_font.render("TIC-TAC-TOE", True, (255,255,255))
self.title_shadow = big_font.render("TIC-TAC-TOE", True, (192,192,192))
self.start = small_font.render("Press ENTER to start!", True, (255,255,255))
self.title_shadow_rect = self.title_shadow.get_rect()
self.title_shadow_rect.center = (screen.get_width()/2, screen.get_height()/2)
self.title_rect = self.title_shadow_rect.move(-10, -10)
self.heading_rect = self.heading.get_rect()
self.heading_rect.topleft = self.title_rect.topleft
self.heading_rect.left -= 5
self.start_rect = self.start.get_rect()
self.start_rect.center = (self.title_shadow_rect.centerx, (self.title_rect.bottom + screen.get_height())/2)
def event(self, event):
if event.type == pygame.KEYDOWN and event.key == pygame.K_RETURN:
OptionsScreen.instance = OptionsScreen(self.screen)
return OptionsScreen.instance
else: return self
def logic(self):
assets.background.logic()
def draw(self):
self.screen.fill((0,0,0))
assets.background.draw()
self.screen.blit(self.title_shadow, self.title_shadow_rect)
self.screen.blit(self.title, self.title_rect)
self.screen.blit(self.heading, self.heading_rect)
self.screen.blit(self.start, self.start_rect)
|
[
"options.OptionsScreen",
"assets.background.draw",
"assets.background.logic",
"pygame.font.SysFont"
] |
[((144, 180), 'pygame.font.SysFont', 'pygame.font.SysFont', (['assets.font', '(90)'], {}), '(assets.font, 90)\n', (163, 180), False, 'import pygame, assets\n'), ((196, 232), 'pygame.font.SysFont', 'pygame.font.SysFont', (['assets.font', '(24)'], {}), '(assets.font, 24)\n', (215, 232), False, 'import pygame, assets\n'), ((1247, 1272), 'assets.background.logic', 'assets.background.logic', ([], {}), '()\n', (1270, 1272), False, 'import pygame, assets\n'), ((1322, 1346), 'assets.background.draw', 'assets.background.draw', ([], {}), '()\n', (1344, 1346), False, 'import pygame, assets\n'), ((1145, 1171), 'options.OptionsScreen', 'OptionsScreen', (['self.screen'], {}), '(self.screen)\n', (1158, 1171), False, 'from options import OptionsScreen\n')]
|
import random
import math
from PIL import Image
def sample(x, y, num_of_sample_directions=64):
s = 0.0
for i in range(num_of_sample_directions):
# random_rad = 2 * math.pi * random.uniform(0.0, 1.0)
random_rad = 2 * math.pi * (i + random.uniform(0.0, 1.0)) / num_of_sample_directions
s += trace(x, y, math.cos(random_rad), math.sin(random_rad))
return s / num_of_sample_directions # * 2 * math.pi
def trace(ox, oy, dx, dy):
t = 0.0
i = 0
while (i < 10) and (t < 2.0):
i += 1
sd = circleSDF(ox + dx * t, oy + dy * t, 0.5, 0.5, 0.1)
if sd < 1e-6: return 2.0
t += sd
return 0.0
def circleSDF(x, y, cx, cy, cr):
"""Return:
negative if (x, y) is inside the circle;
positive if (x, y) is outside the circle;
zero if (x, y) is on the circle
"""
return math.sqrt((x - cx) * (x - cx) + (y - cy) * (y - cy)) - cr
def main():
width, height = 512, 512
img = Image.new('L', (width, height))
pixels = img.load()
for h in range(height):
for w in range(width):
pixels[h, w] = int(min(sample(h / float(height), w / float(width)) * 255.0, 255.0))
img.save("moon2.png")
if __name__ == '__main__':
main()
|
[
"PIL.Image.new",
"math.sqrt",
"random.uniform",
"math.sin",
"math.cos"
] |
[((967, 998), 'PIL.Image.new', 'Image.new', (['"""L"""', '(width, height)'], {}), "('L', (width, height))\n", (976, 998), False, 'from PIL import Image\n'), ((857, 909), 'math.sqrt', 'math.sqrt', (['((x - cx) * (x - cx) + (y - cy) * (y - cy))'], {}), '((x - cx) * (x - cx) + (y - cy) * (y - cy))\n', (866, 909), False, 'import math\n'), ((334, 354), 'math.cos', 'math.cos', (['random_rad'], {}), '(random_rad)\n', (342, 354), False, 'import math\n'), ((356, 376), 'math.sin', 'math.sin', (['random_rad'], {}), '(random_rad)\n', (364, 376), False, 'import math\n'), ((256, 280), 'random.uniform', 'random.uniform', (['(0.0)', '(1.0)'], {}), '(0.0, 1.0)\n', (270, 280), False, 'import random\n')]
|
from pymongo.errors import BulkWriteError
import logging
import time
import tqdm
import tweepy
logging.basicConfig(format='[%(asctime)s] - %(name)s - %(funcName)s - %(levelname)s : %(message)s', level=logging.INFO)
log = logging.getLogger(__name__)
def bulk_write_to_mongo(collection, data):
to_insert = len(data)
try:
if to_insert > 0:
collection.insert_many(data, ordered=False)
return to_insert, 0
except BulkWriteError as e:
log.error("BulkWriteError")
inserted = e.details["nInserted"]
return inserted, to_insert - inserted
def download_timeline(user_id: str, n: int = 3200, count: int = 200, trim_user=True, tweet_mode="extended", **kwargs):
log.info(f'Downloading timeline from user id: {user_id}')
start_time = time.time()
tweets = [status for status in tqdm.tqdm(tweepy.Cursor(
api.user_timeline,
user_id=user_id,
count=count,
trim_user=trim_user,
tweet_mode=tweet_mode,
**kwargs).items(n), total=n)]
total_time = time.time() - start_time
log.info(f"Downloaded finished: {len(tweets)} tweets in {total_time:.4f} seconds.")
return tweets
|
[
"logging.getLogger",
"tweepy.Cursor",
"logging.basicConfig",
"time.time"
] |
[((96, 224), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""[%(asctime)s] - %(name)s - %(funcName)s - %(levelname)s : %(message)s"""', 'level': 'logging.INFO'}), "(format=\n '[%(asctime)s] - %(name)s - %(funcName)s - %(levelname)s : %(message)s',\n level=logging.INFO)\n", (115, 224), False, 'import logging\n'), ((222, 249), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (239, 249), False, 'import logging\n'), ((795, 806), 'time.time', 'time.time', ([], {}), '()\n', (804, 806), False, 'import time\n'), ((1059, 1070), 'time.time', 'time.time', ([], {}), '()\n', (1068, 1070), False, 'import time\n'), ((852, 973), 'tweepy.Cursor', 'tweepy.Cursor', (['api.user_timeline'], {'user_id': 'user_id', 'count': 'count', 'trim_user': 'trim_user', 'tweet_mode': 'tweet_mode'}), '(api.user_timeline, user_id=user_id, count=count, trim_user=\n trim_user, tweet_mode=tweet_mode, **kwargs)\n', (865, 973), False, 'import tweepy\n')]
|
from typing import Callable
from tensorflow.python.layers import base
from tensorflow.python.eager import context
from tensorflow.python.estimator import util as estimator_util
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.layers import utils as layers_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.layers import base
from tensorflow.python.layers import utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import standard_ops
from tensorflow.contrib.layers import fully_connected
import tensorflow as tf
import sys
from helpers.misc_utils import debug_tensor, debug_shape
from helpers.ops import safe_log
FLAGS = tf.app.flags.FLAGS
class CopyLayer(base.Layer):
"""Densely-connected layer class.
This layer implements the operation:
`outputs = activation(inputs * kernel + bias)`
Where `activation` is the activation function passed as the `activation`
argument (if not `None`), `kernel` is a weights matrix created by the layer,
and `bias` is a bias vector created by the layer
(only if `use_bias` is `True`).
Note: if the input to the layer has a rank greater than 2, then it is
flattened prior to the initial matrix multiply by `kernel`.
Arguments:
units: Integer or Long, dimensionality of the output space.
activation: Activation function (callable). Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: Initializer function for the weight matrix.
If `None` (default), weights are initialized using the default
initializer used by `tf.get_variable`.
bias_initializer: Initializer function for the bias.
kernel_regularizer: Regularizer function for the weight matrix.
bias_regularizer: Regularizer function for the bias.
activity_regularizer: Regularizer function for the output.
kernel_constraint: An optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: An optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require reuse=True in such cases.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Properties:
units: Python integer, dimensionality of the output space.
activation: Activation function (callable).
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: Initializer instance (or name) for the kernel matrix.
bias_initializer: Initializer instance (or name) for the bias.
kernel_regularizer: Regularizer instance for the kernel matrix (callable)
bias_regularizer: Regularizer instance for the bias (callable).
activity_regularizer: Regularizer instance for the output (callable)
kernel_constraint: Constraint function for the kernel matrix.
bias_constraint: Constraint function for the bias.
kernel: Weight matrix (TensorFlow variable or tensor).
bias: Bias vector, if applicable (TensorFlow variable or tensor).
"""
def __init__(self, embedding_dim,
units,
switch_units=64,
activation=None,
use_bias=False,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
source_provider: Callable[[], tf.Tensor] = None,
source_provider_sl: Callable[[], tf.Tensor] = None,
condition_encoding: Callable[[], tf.Tensor] = None,
output_mask: Callable[[], tf.Tensor] = None,
training_mode=False,
vocab_size=None,
context_as_set=False,
max_copy_size=None,
mask_oovs=False,
**kwargs):
super(CopyLayer, self).__init__(trainable=trainable, name=name,
activity_regularizer=activity_regularizer,
**kwargs)
self.vocab_size = vocab_size
self.source_provider = source_provider
self.source_provider_sl = source_provider_sl
self.embedding_dim = embedding_dim
self.units = units
self.switch_units = switch_units
self.activation = activation
self.use_bias = use_bias
self.kernel_initializer = kernel_initializer
self.bias_initializer = bias_initializer
self.kernel_regularizer = kernel_regularizer
self.bias_regularizer = bias_regularizer
self.kernel_constraint = kernel_constraint
self.bias_constraint = bias_constraint
self.input_spec = base.InputSpec(min_ndim=2)
self.training_mode=training_mode
# self.output_mask=output_mask
self.max_copy_size=max_copy_size
self.mask_oovs = mask_oovs
self.context_as_set=context_as_set
self.condition_encoding = condition_encoding
def build(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
# print("building copy layer")
# print(input_shape)
self.built = True
def call(self, inputs):
inputs = ops.convert_to_tensor(inputs, dtype=self.dtype) # batch x len_source+emb_dim
# inputs = debug_shape(inputs, "inputs")
# print(inputs)
# [batch_size, emb_dim + len_source] in eval,
# [len_target, batch_size,emb_dim + len_source] in train
source = self.source_provider() # [batch_size, len_source]
# source = debug_shape(source,"src")
source_sl = self.source_provider_sl()
condition_encoding = self.condition_encoding()
# condition_encoding = debug_shape(condition_encoding, "cond enc")
batch_size = tf.shape(source)[0]
len_source = tf.shape(source)[1]
shape = tf.shape(inputs)
is_eval = len(inputs.get_shape()) == 2
beam_width = tf.constant(1) if is_eval else shape[1]
# len_target = tf.Print(len_target, [len_target, batch_size, shape[-1]], "input reshape")
# inputs = tf.reshape(inputs, [-1, shape[-1]]) # [len_target * batch_size, len_source + emb_dim]
inputs_new = tf.reshape(inputs,
[batch_size*beam_width, shape[-1]]) # [len_target, batch_size, len_source + emb_dim]
# inputs_new = debug_shape(inputs_new, "inputs_new")
# -- [len_target, batch_size, embedding_dim] attention, []
# -- [len_target, batch_size, len_source] alignments
# attention, alignments = tf.split(inputs, [self.embedding_dim, -1], axis=1)
attention, alignments = tf.split(inputs_new, num_or_size_splits=[self.embedding_dim, -1], axis=-1)
# [len_target, batch_size, vocab_size]
if FLAGS.out_vocab_cpu:
with tf.device('/cpu:*'):
shortlist = tf.layers.dense(attention, self.vocab_size, activation=tf.nn.softmax, use_bias=False)
else:
shortlist = tf.layers.dense(attention, self.vocab_size, activation=tf.nn.softmax, use_bias=False)
# attention = debug_shape(attention, "attn")
# alignments = debug_shape(alignments, "align ("+str(self.units)+" desired)")
# alignments = debug_tensor(alignments, "alignments")
# print(alignments)
# shortlist = debug_shape(shortlist, "shortlist")
# TEMP: kill OOVs
s = tf.shape(shortlist)
mask = tf.concat([tf.ones((s[0],1)),tf.zeros((s[0],1)),tf.ones((s[0],s[1]-2))], axis=1)
shortlist = tf.cond(self.mask_oovs, lambda: shortlist * mask, lambda: shortlist)
# pad the alignments to the longest possible source st output vocab is fixed size
# TODO: Check for non zero alignments outside the seq length
# alignments_padded = debug_shape(alignments_padded, "align padded")
# switch takes st, vt and yt−1 as inputs
# vt = concat(weighted context encoding at t; condition encoding)
# st = hidden state at t
# y_t-1 is previous generated token
condition_encoding_tiled = tf.contrib.seq2seq.tile_batch(condition_encoding, multiplier=beam_width)
vt = tf.concat([attention, condition_encoding_tiled], axis=1)
# NOTE: this is missing the previous input y_t-1 and s_t
switch_input = tf.concat([vt],axis=1)
switch_h1 = tf.layers.dropout(tf.layers.dense(switch_input, self.switch_units, activation=tf.nn.tanh, kernel_initializer=tf.glorot_uniform_initializer()), rate=0.3, training=self.training_mode)
switch_h2 = tf.layers.dropout(tf.layers.dense(switch_h1, self.switch_units, activation=tf.nn.tanh, kernel_initializer=tf.glorot_uniform_initializer()), rate=0.3, training=self.training_mode)
self.switch = tf.layers.dense(switch_h2, 1, activation=tf.sigmoid, kernel_initializer=tf.glorot_uniform_initializer())
# switch = debug_shape(switch, "switch")
if FLAGS.disable_copy:
self.switch = 0
elif FLAGS.disable_shortlist:
self.switch = 1
# if self.output_mask is not None:
# alignments = self.output_mask() * alignments
source_tiled = tf.contrib.seq2seq.tile_batch(source, multiplier=beam_width)
source_tiled_sl = tf.contrib.seq2seq.tile_batch(source_sl, multiplier=beam_width)
shortlist = (1-self.switch)*shortlist
alignments = self.switch*alignments
# Take any tokens that are the same in either vocab and combine their probabilities
# old: mult by a big sparse matrix - not v mem efficient..
# opt1: mult the copy dist by a vocab x copy matrix and add to vocab part
# opt2: do an nd_gather to copy the relevant prob mass, then mask carefully to remove it
if FLAGS.combine_vocab:
# copy everything in real shortlist except special toks
# print(len_source, self.max_copy_size)
source_tiled_sl_padded = tf.pad(source_tiled_sl, [[0, 0], [0, self.max_copy_size-tf.shape(source_tiled_sl)[-1]]], 'CONSTANT', constant_values=0)
# attempt 2!
batch_ix = tf.tile(tf.expand_dims(tf.range(batch_size*beam_width),axis=-1),[1,len_source])
# seq_ix = tf.tile(tf.expand_dims(tf.range(len_source),axis=0),[batch_size*beam_width,1])
tgt_indices = tf.reshape(tf.concat([tf.expand_dims(batch_ix,-1),tf.expand_dims(source_tiled_sl,-1)], axis=2),[-1,2])
ident_indices = tf.where(tf.greater(source_tiled_sl, -1)) # get ixs of all elements
# ident_indices = tf.where()
# tgt_indices = debug_tensor(tgt_indices)
# get the copy probs at each point in the source..
updates = tf.reshape(tf.gather_nd(alignments, ident_indices),[-1])
# and send them to the their shortlist index
sum_part = tf.scatter_nd(tgt_indices, updates, [batch_size*beam_width, self.vocab_size+self.max_copy_size])
# then zero out the ix's that got copied
align_zeroed = alignments * tf.cast(tf.greater_equal(source_tiled_sl,self.vocab_size),tf.float32)
align_moved = alignments * tf.cast(tf.less(source_tiled_sl,self.vocab_size),tf.float32) # ie only let through stuff that *isnt* in SL
# and add the correct pieces together
alignments = align_zeroed
shortlist = shortlist + sum_part[:,:self.vocab_size]
# result = tf.Print(result, [tf.reduce_sum(result[:,:self.vocab_size],-1)], "result sl sum")
# shortlist = tf.Print(shortlist, [tf.reduce_sum(align_moved,-1)], "sum align_moved")
# shortlist = tf.Print(shortlist, [tf.reduce_sum(sum_part[:,:self.vocab_size],-1)], "sum sum_part")
# convert position probs to ids
if self.context_as_set:
# print(source) # batch x seq
# print(alignments) # batch x seq
pos_to_id = tf.one_hot(source_tiled-self.vocab_size, depth=self.max_copy_size) # batch x seq x vocab
if FLAGS.maxout_pointer:
copy_dist = tf.reduce_max(pos_to_id * tf.expand_dims(alignments, 2), axis=1)
else:
copy_dist = tf.squeeze(tf.matmul(tf.expand_dims(alignments,1), pos_to_id), axis=1)
else:
copy_dist=alignments
copy_dist_padded = tf.pad(copy_dist, [[0, 0], [0, self.max_copy_size-tf.shape(copy_dist)[-1]]], 'CONSTANT', constant_values=0)
result = tf.concat([shortlist,copy_dist_padded], axis=1) # this used to be safe_log'd
# if FLAGS.combine_vocab:
# result = tf.Print(result, [tf.reduce_sum(result,-1)], "result sum")
target_shape = tf.concat([shape[:-1], [-1]], 0)
result =tf.reshape(result, target_shape)
return result
# return tf.Print(result, [tf.reduce_max(switch), tf.reduce_max(shortlist),
# tf.reduce_max(alignments)], summarize=10)
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
input_shape = input_shape.with_rank_at_least(2)
# print(input_shape)
if input_shape[-1].value is None:
raise ValueError(
'The innermost dimension of input_shape must be defined, but saw: %s'
% input_shape)
return input_shape[:-1].concatenate(self.units+self.vocab_size if not self.context_as_set else self.vocab_size+self.max_copy_size)
# this for older tf versions
def _compute_output_shape(self, input_shape):
return self.compute_output_shape(input_shape)
def dense(
inputs, units,
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for the densely-connected layer.
This layer implements the operation:
`outputs = activation(inputs.kernel + bias)`
Where `activation` is the activation function passed as the `activation`
argument (if not `None`), `kernel` is a weights matrix created by the layer,
and `bias` is a bias vector created by the layer
(only if `use_bias` is `True`).
Note: if the `inputs` tensor has a rank greater than 2, then it is
flattened prior to the initial matrix multiply by `kernel`.
Arguments:
inputs: Tensor input.
units: Integer or Long, dimensionality of the output space.
activation: Activation function (callable). Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: Initializer function for the weight matrix.
If `None` (default), weights are initialized using the default
initializer used by `tf.get_variable`.
bias_initializer: Initializer function for the bias.
kernel_regularizer: Regularizer function for the weight matrix.
bias_regularizer: Regularizer function for the bias.
activity_regularizer: Regularizer function for the output.
kernel_constraint: An optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: An optional projection function to be applied to the
bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: String, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
Raises:
ValueError: if eager execution is enabled.
"""
layer = CopyLayer(units,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_scope=name,
_reuse=reuse)
print("inside copy layer, yaaay!")
sys.exit(0)
return layer.apply(inputs)
|
[
"tensorflow.cond",
"tensorflow.contrib.seq2seq.tile_batch",
"tensorflow.gather_nd",
"tensorflow.reshape",
"tensorflow.python.layers.base.InputSpec",
"tensorflow.python.framework.tensor_shape.TensorShape",
"tensorflow.greater_equal",
"tensorflow.greater",
"tensorflow.split",
"tensorflow.scatter_nd",
"tensorflow.one_hot",
"tensorflow.less",
"tensorflow.concat",
"tensorflow.python.ops.init_ops.zeros_initializer",
"tensorflow.python.framework.ops.convert_to_tensor",
"tensorflow.ones",
"tensorflow.range",
"tensorflow.constant",
"sys.exit",
"tensorflow.expand_dims",
"tensorflow.glorot_uniform_initializer",
"tensorflow.layers.dense",
"tensorflow.device",
"tensorflow.shape",
"tensorflow.zeros"
] |
[((15151, 15179), 'tensorflow.python.ops.init_ops.zeros_initializer', 'init_ops.zeros_initializer', ([], {}), '()\n', (15177, 15179), False, 'from tensorflow.python.ops import init_ops\n'), ((18328, 18339), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (18336, 18339), False, 'import sys\n'), ((4482, 4510), 'tensorflow.python.ops.init_ops.zeros_initializer', 'init_ops.zeros_initializer', ([], {}), '()\n', (4508, 4510), False, 'from tensorflow.python.ops import init_ops\n'), ((6106, 6132), 'tensorflow.python.layers.base.InputSpec', 'base.InputSpec', ([], {'min_ndim': '(2)'}), '(min_ndim=2)\n', (6120, 6132), False, 'from tensorflow.python.layers import base\n'), ((6442, 6479), 'tensorflow.python.framework.tensor_shape.TensorShape', 'tensor_shape.TensorShape', (['input_shape'], {}), '(input_shape)\n', (6466, 6479), False, 'from tensorflow.python.framework import tensor_shape\n'), ((6620, 6667), 'tensorflow.python.framework.ops.convert_to_tensor', 'ops.convert_to_tensor', (['inputs'], {'dtype': 'self.dtype'}), '(inputs, dtype=self.dtype)\n', (6641, 6667), False, 'from tensorflow.python.framework import ops\n'), ((7281, 7297), 'tensorflow.shape', 'tf.shape', (['inputs'], {}), '(inputs)\n', (7289, 7297), True, 'import tensorflow as tf\n'), ((7632, 7688), 'tensorflow.reshape', 'tf.reshape', (['inputs', '[batch_size * beam_width, shape[-1]]'], {}), '(inputs, [batch_size * beam_width, shape[-1]])\n', (7642, 7688), True, 'import tensorflow as tf\n'), ((8076, 8150), 'tensorflow.split', 'tf.split', (['inputs_new'], {'num_or_size_splits': '[self.embedding_dim, -1]', 'axis': '(-1)'}), '(inputs_new, num_or_size_splits=[self.embedding_dim, -1], axis=-1)\n', (8084, 8150), True, 'import tensorflow as tf\n'), ((8833, 8852), 'tensorflow.shape', 'tf.shape', (['shortlist'], {}), '(shortlist)\n', (8841, 8852), True, 'import tensorflow as tf\n'), ((8969, 9039), 'tensorflow.cond', 'tf.cond', (['self.mask_oovs', '(lambda : shortlist * mask)', '(lambda : shortlist)'], {}), '(self.mask_oovs, lambda : shortlist * mask, lambda : shortlist)\n', (8976, 9039), True, 'import tensorflow as tf\n'), ((9512, 9584), 'tensorflow.contrib.seq2seq.tile_batch', 'tf.contrib.seq2seq.tile_batch', (['condition_encoding'], {'multiplier': 'beam_width'}), '(condition_encoding, multiplier=beam_width)\n', (9541, 9584), True, 'import tensorflow as tf\n'), ((9599, 9655), 'tensorflow.concat', 'tf.concat', (['[attention, condition_encoding_tiled]'], {'axis': '(1)'}), '([attention, condition_encoding_tiled], axis=1)\n', (9608, 9655), True, 'import tensorflow as tf\n'), ((9744, 9767), 'tensorflow.concat', 'tf.concat', (['[vt]'], {'axis': '(1)'}), '([vt], axis=1)\n', (9753, 9767), True, 'import tensorflow as tf\n'), ((10598, 10658), 'tensorflow.contrib.seq2seq.tile_batch', 'tf.contrib.seq2seq.tile_batch', (['source'], {'multiplier': 'beam_width'}), '(source, multiplier=beam_width)\n', (10627, 10658), True, 'import tensorflow as tf\n'), ((10685, 10748), 'tensorflow.contrib.seq2seq.tile_batch', 'tf.contrib.seq2seq.tile_batch', (['source_sl'], {'multiplier': 'beam_width'}), '(source_sl, multiplier=beam_width)\n', (10714, 10748), True, 'import tensorflow as tf\n'), ((13863, 13911), 'tensorflow.concat', 'tf.concat', (['[shortlist, copy_dist_padded]'], {'axis': '(1)'}), '([shortlist, copy_dist_padded], axis=1)\n', (13872, 13911), True, 'import tensorflow as tf\n'), ((14081, 14113), 'tensorflow.concat', 'tf.concat', (['[shape[:-1], [-1]]', '(0)'], {}), '([shape[:-1], [-1]], 0)\n', (14090, 14113), True, 'import tensorflow as tf\n'), ((14131, 14163), 'tensorflow.reshape', 'tf.reshape', (['result', 'target_shape'], {}), '(result, target_shape)\n', (14141, 14163), True, 'import tensorflow as tf\n'), ((14420, 14457), 'tensorflow.python.framework.tensor_shape.TensorShape', 'tensor_shape.TensorShape', (['input_shape'], {}), '(input_shape)\n', (14444, 14457), False, 'from tensorflow.python.framework import tensor_shape\n'), ((7204, 7220), 'tensorflow.shape', 'tf.shape', (['source'], {}), '(source)\n', (7212, 7220), True, 'import tensorflow as tf\n'), ((7245, 7261), 'tensorflow.shape', 'tf.shape', (['source'], {}), '(source)\n', (7253, 7261), True, 'import tensorflow as tf\n'), ((7367, 7381), 'tensorflow.constant', 'tf.constant', (['(1)'], {}), '(1)\n', (7378, 7381), True, 'import tensorflow as tf\n'), ((8420, 8509), 'tensorflow.layers.dense', 'tf.layers.dense', (['attention', 'self.vocab_size'], {'activation': 'tf.nn.softmax', 'use_bias': '(False)'}), '(attention, self.vocab_size, activation=tf.nn.softmax,\n use_bias=False)\n', (8435, 8509), True, 'import tensorflow as tf\n'), ((12263, 12368), 'tensorflow.scatter_nd', 'tf.scatter_nd', (['tgt_indices', 'updates', '[batch_size * beam_width, self.vocab_size + self.max_copy_size]'], {}), '(tgt_indices, updates, [batch_size * beam_width, self.\n vocab_size + self.max_copy_size])\n', (12276, 12368), True, 'import tensorflow as tf\n'), ((13323, 13391), 'tensorflow.one_hot', 'tf.one_hot', (['(source_tiled - self.vocab_size)'], {'depth': 'self.max_copy_size'}), '(source_tiled - self.vocab_size, depth=self.max_copy_size)\n', (13333, 13391), True, 'import tensorflow as tf\n'), ((8247, 8266), 'tensorflow.device', 'tf.device', (['"""/cpu:*"""'], {}), "('/cpu:*')\n", (8256, 8266), True, 'import tensorflow as tf\n'), ((8296, 8385), 'tensorflow.layers.dense', 'tf.layers.dense', (['attention', 'self.vocab_size'], {'activation': 'tf.nn.softmax', 'use_bias': '(False)'}), '(attention, self.vocab_size, activation=tf.nn.softmax,\n use_bias=False)\n', (8311, 8385), True, 'import tensorflow as tf\n'), ((8879, 8897), 'tensorflow.ones', 'tf.ones', (['(s[0], 1)'], {}), '((s[0], 1))\n', (8886, 8897), True, 'import tensorflow as tf\n'), ((8897, 8916), 'tensorflow.zeros', 'tf.zeros', (['(s[0], 1)'], {}), '((s[0], 1))\n', (8905, 8916), True, 'import tensorflow as tf\n'), ((8916, 8941), 'tensorflow.ones', 'tf.ones', (['(s[0], s[1] - 2)'], {}), '((s[0], s[1] - 2))\n', (8923, 8941), True, 'import tensorflow as tf\n'), ((10262, 10293), 'tensorflow.glorot_uniform_initializer', 'tf.glorot_uniform_initializer', ([], {}), '()\n', (10291, 10293), True, 'import tensorflow as tf\n'), ((11886, 11917), 'tensorflow.greater', 'tf.greater', (['source_tiled_sl', '(-1)'], {}), '(source_tiled_sl, -1)\n', (11896, 11917), True, 'import tensorflow as tf\n'), ((12137, 12176), 'tensorflow.gather_nd', 'tf.gather_nd', (['alignments', 'ident_indices'], {}), '(alignments, ident_indices)\n', (12149, 12176), True, 'import tensorflow as tf\n'), ((9896, 9927), 'tensorflow.glorot_uniform_initializer', 'tf.glorot_uniform_initializer', ([], {}), '()\n', (9925, 9927), True, 'import tensorflow as tf\n'), ((10095, 10126), 'tensorflow.glorot_uniform_initializer', 'tf.glorot_uniform_initializer', ([], {}), '()\n', (10124, 10126), True, 'import tensorflow as tf\n'), ((11561, 11594), 'tensorflow.range', 'tf.range', (['(batch_size * beam_width)'], {}), '(batch_size * beam_width)\n', (11569, 11594), True, 'import tensorflow as tf\n'), ((12461, 12511), 'tensorflow.greater_equal', 'tf.greater_equal', (['source_tiled_sl', 'self.vocab_size'], {}), '(source_tiled_sl, self.vocab_size)\n', (12477, 12511), True, 'import tensorflow as tf\n'), ((12570, 12611), 'tensorflow.less', 'tf.less', (['source_tiled_sl', 'self.vocab_size'], {}), '(source_tiled_sl, self.vocab_size)\n', (12577, 12611), True, 'import tensorflow as tf\n'), ((11768, 11796), 'tensorflow.expand_dims', 'tf.expand_dims', (['batch_ix', '(-1)'], {}), '(batch_ix, -1)\n', (11782, 11796), True, 'import tensorflow as tf\n'), ((11796, 11831), 'tensorflow.expand_dims', 'tf.expand_dims', (['source_tiled_sl', '(-1)'], {}), '(source_tiled_sl, -1)\n', (11810, 11831), True, 'import tensorflow as tf\n'), ((13503, 13532), 'tensorflow.expand_dims', 'tf.expand_dims', (['alignments', '(2)'], {}), '(alignments, 2)\n', (13517, 13532), True, 'import tensorflow as tf\n'), ((13609, 13638), 'tensorflow.expand_dims', 'tf.expand_dims', (['alignments', '(1)'], {}), '(alignments, 1)\n', (13623, 13638), True, 'import tensorflow as tf\n'), ((13787, 13806), 'tensorflow.shape', 'tf.shape', (['copy_dist'], {}), '(copy_dist)\n', (13795, 13806), True, 'import tensorflow as tf\n'), ((11425, 11450), 'tensorflow.shape', 'tf.shape', (['source_tiled_sl'], {}), '(source_tiled_sl)\n', (11433, 11450), True, 'import tensorflow as tf\n')]
|
import math
import pytest
from e3nn.math import perm
@pytest.mark.parametrize('n', [0, 1, 2, 3, 4, 5])
def test_inverse(n):
for p in perm.group(n):
ip = perm.inverse(p)
assert perm.compose(p, ip) == perm.identity(n)
assert perm.compose(ip, p) == perm.identity(n)
@pytest.mark.parametrize('n', [0, 1, 2, 3, 4, 5])
def test_int_inverse(n):
for j in range(math.factorial(n)):
p = perm.from_int(j, n)
i = perm.to_int(p)
assert i == j
@pytest.mark.parametrize('n', [0, 1, 2, 3, 4, 5])
def test_int_injection(n):
group = {perm.from_int(j, n) for j in range(math.factorial(n))}
assert len(group) == math.factorial(n)
def test_germinate():
assert perm.is_group(perm.germinate({(1, 2, 3, 4, 0)}))
assert perm.is_group(perm.germinate({(1, 0, 2, 3), (0, 2, 1, 3), (0, 1, 3, 2)}))
@pytest.mark.parametrize('n', [0, 1, 2, 3, 4, 5])
def test_rand(n):
perm.is_perm(perm.rand(n))
def test_not_group():
assert not perm.is_group(set()) # empty
assert not perm.is_group({(1, 0, 2), (0, 2, 1), (1, 2, 0), (2, 0, 1), (2, 1, 0)}) # missing neutral
assert not perm.is_group({(0, 1, 2), (1, 2, 0)}) # missing inverse
|
[
"e3nn.math.perm.compose",
"e3nn.math.perm.germinate",
"e3nn.math.perm.from_int",
"e3nn.math.perm.group",
"e3nn.math.perm.inverse",
"e3nn.math.perm.to_int",
"math.factorial",
"e3nn.math.perm.is_group",
"pytest.mark.parametrize",
"e3nn.math.perm.rand",
"e3nn.math.perm.identity"
] |
[((58, 106), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""n"""', '[0, 1, 2, 3, 4, 5]'], {}), "('n', [0, 1, 2, 3, 4, 5])\n", (81, 106), False, 'import pytest\n'), ((299, 347), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""n"""', '[0, 1, 2, 3, 4, 5]'], {}), "('n', [0, 1, 2, 3, 4, 5])\n", (322, 347), False, 'import pytest\n'), ((496, 544), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""n"""', '[0, 1, 2, 3, 4, 5]'], {}), "('n', [0, 1, 2, 3, 4, 5])\n", (519, 544), False, 'import pytest\n'), ((855, 903), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""n"""', '[0, 1, 2, 3, 4, 5]'], {}), "('n', [0, 1, 2, 3, 4, 5])\n", (878, 903), False, 'import pytest\n'), ((141, 154), 'e3nn.math.perm.group', 'perm.group', (['n'], {}), '(n)\n', (151, 154), False, 'from e3nn.math import perm\n'), ((169, 184), 'e3nn.math.perm.inverse', 'perm.inverse', (['p'], {}), '(p)\n', (181, 184), False, 'from e3nn.math import perm\n'), ((392, 409), 'math.factorial', 'math.factorial', (['n'], {}), '(n)\n', (406, 409), False, 'import math\n'), ((424, 443), 'e3nn.math.perm.from_int', 'perm.from_int', (['j', 'n'], {}), '(j, n)\n', (437, 443), False, 'from e3nn.math import perm\n'), ((456, 470), 'e3nn.math.perm.to_int', 'perm.to_int', (['p'], {}), '(p)\n', (467, 470), False, 'from e3nn.math import perm\n'), ((585, 604), 'e3nn.math.perm.from_int', 'perm.from_int', (['j', 'n'], {}), '(j, n)\n', (598, 604), False, 'from e3nn.math import perm\n'), ((665, 682), 'math.factorial', 'math.factorial', (['n'], {}), '(n)\n', (679, 682), False, 'import math\n'), ((732, 765), 'e3nn.math.perm.germinate', 'perm.germinate', (['{(1, 2, 3, 4, 0)}'], {}), '({(1, 2, 3, 4, 0)})\n', (746, 765), False, 'from e3nn.math import perm\n'), ((792, 850), 'e3nn.math.perm.germinate', 'perm.germinate', (['{(1, 0, 2, 3), (0, 2, 1, 3), (0, 1, 3, 2)}'], {}), '({(1, 0, 2, 3), (0, 2, 1, 3), (0, 1, 3, 2)})\n', (806, 850), False, 'from e3nn.math import perm\n'), ((939, 951), 'e3nn.math.perm.rand', 'perm.rand', (['n'], {}), '(n)\n', (948, 951), False, 'from e3nn.math import perm\n'), ((1037, 1107), 'e3nn.math.perm.is_group', 'perm.is_group', (['{(1, 0, 2), (0, 2, 1), (1, 2, 0), (2, 0, 1), (2, 1, 0)}'], {}), '({(1, 0, 2), (0, 2, 1), (1, 2, 0), (2, 0, 1), (2, 1, 0)})\n', (1050, 1107), False, 'from e3nn.math import perm\n'), ((1142, 1179), 'e3nn.math.perm.is_group', 'perm.is_group', (['{(0, 1, 2), (1, 2, 0)}'], {}), '({(0, 1, 2), (1, 2, 0)})\n', (1155, 1179), False, 'from e3nn.math import perm\n'), ((201, 220), 'e3nn.math.perm.compose', 'perm.compose', (['p', 'ip'], {}), '(p, ip)\n', (213, 220), False, 'from e3nn.math import perm\n'), ((224, 240), 'e3nn.math.perm.identity', 'perm.identity', (['n'], {}), '(n)\n', (237, 240), False, 'from e3nn.math import perm\n'), ((256, 275), 'e3nn.math.perm.compose', 'perm.compose', (['ip', 'p'], {}), '(ip, p)\n', (268, 275), False, 'from e3nn.math import perm\n'), ((279, 295), 'e3nn.math.perm.identity', 'perm.identity', (['n'], {}), '(n)\n', (292, 295), False, 'from e3nn.math import perm\n'), ((620, 637), 'math.factorial', 'math.factorial', (['n'], {}), '(n)\n', (634, 637), False, 'import math\n')]
|
from typing import Generic, Optional, TypeVar
from abc import ABCMeta, abstractmethod
from datetime import datetime, timedelta
import logging
from crontab import CronTab
from dateutil.relativedelta import *
from django.db import transaction
from django.utils import timezone
from processes.common.request_helpers import context_with_request
from processes.models import (
Alert, AlertSendStatus, AlertMethod, MissingScheduledExecution,
Schedulable
)
MIN_DELAY_BETWEEN_EXPECTED_AND_ACTUAL_SECONDS = 300
MAX_EARLY_STARTUP_SECONDS = 60
MAX_STARTUP_SECONDS = 10 * 60
MAX_SCHEDULED_LATENESS_SECONDS = 30 * 60
logger = logging.getLogger(__name__)
BoundSchedulable = TypeVar('BoundSchedulable', bound=Schedulable)
class ScheduleChecker(Generic[BoundSchedulable], metaclass=ABCMeta):
def check_all(self) -> None:
model_name = self.model_name()
for schedulable in self.manager().filter(enabled=True).exclude(schedule='').all():
logger.info(f"Found {model_name} {schedulable} with schedule {schedulable.schedule}")
try:
self.check_execution_on_time(schedulable)
except Exception:
logger.exception(f"check_all() failed on {model_name} {schedulable.uuid}")
def check_execution_on_time(self, schedulable: BoundSchedulable) \
-> Optional[MissingScheduledExecution]:
model_name = self.model_name()
schedule = schedulable.schedule.strip()
if not schedule:
logger.warning(f"For schedulable entity {schedulable.uuid}, schedule '{schedule}' is blank, skipping")
return None
mse: Optional[MissingScheduledExecution] = None
m = Schedulable.CRON_REGEX.match(schedule)
if m:
cron_expr = m.group(1)
logger.info(
f"check_execution_on_time(): {model_name} {schedulable.name} with schedule {schedulable.schedule} has cron expression '{cron_expr}'")
try:
entry = CronTab(cron_expr)
except Exception as ex:
logger.exception(f"Can't parse cron expression '{cron_expr}'")
raise ex
utc_now = timezone.now()
negative_previous_execution_seconds_ago = entry.previous(
default_utc=True)
if negative_previous_execution_seconds_ago is None:
logger.info('check_execution_on_time(): No expected previous execution, returning')
return None
previous_execution_seconds_ago = -(negative_previous_execution_seconds_ago or 0.0)
if previous_execution_seconds_ago < MIN_DELAY_BETWEEN_EXPECTED_AND_ACTUAL_SECONDS:
logger.info('check_execution_on_time(): Expected previous execution too recent, returning')
return None
expected_datetime = (utc_now - timedelta(seconds=previous_execution_seconds_ago) + timedelta(
microseconds=500000)).replace(microsecond=0)
if expected_datetime < schedulable.schedule_updated_at:
logger.info(
f"check_execution_on_time(): Previous execution expected to start at at {expected_datetime} but that is before the schedule was last updated at {schedulable.schedule_updated_at}")
return None
logger.info(
f"check_execution_on_time(): Previous execution was supposed to start {previous_execution_seconds_ago / 60} minutes ago at {expected_datetime}")
with transaction.atomic():
mse = self.check_executed_at(schedulable, expected_datetime)
else:
m = Schedulable.RATE_REGEX.match(schedule)
if m:
n = int(m.group(1))
time_unit = m.group(2).lower().rstrip('s')
logger.info(
f"{model_name} {schedulable.name} with schedule {schedulable.schedule} has rate {n} per {time_unit}")
relative_delta = self.make_relative_delta(n, time_unit)
utc_now = timezone.now()
expected_datetime = utc_now - relative_delta
if expected_datetime < schedulable.schedule_updated_at:
logger.info(
f"check_execution_on_time(): Previous execution expected after {expected_datetime} but that is before the schedule was last updated at {schedulable.schedule_updated_at}")
return None
logger.info(
f"check_execution_on_time(): Previous execution was supposed to start executed after {expected_datetime}")
with transaction.atomic():
mse = self.check_executed_after(schedulable,
expected_datetime, relative_delta, utc_now)
else:
raise Exception(f"Schedule '{schedule}' is not a cron or rate expression")
if mse:
self.send_alerts(mse)
return mse
def check_executed_at(self, schedulable: BoundSchedulable,
expected_datetime: datetime) -> Optional[MissingScheduledExecution]:
model_name = self.model_name()
mse = self.missing_scheduled_executions_of(schedulable).filter(
expected_execution_at=expected_datetime).first()
if mse:
logger.info(
f"check_executed_at(): Found existing matching missing scheduled execution {mse.uuid}, not alerting")
return None
logger.info('check_executed_at(): No existing matching missing scheduled execution found')
pe = self.executions_of(schedulable).filter(
started_at__gte=expected_datetime - timedelta(seconds=MAX_EARLY_STARTUP_SECONDS),
started_at__lte=expected_datetime + timedelta(seconds=MAX_STARTUP_SECONDS)).first()
if pe:
logger.info(
f"check_execution_on_time(): Found execution of {model_name} {schedulable.uuid} within the expected time window")
return None
logger.info(
f"check_executed_at(): No execution of {model_name} {schedulable.uuid} found within the expected time window")
if schedulable.max_concurrency and \
(schedulable.max_concurrency > 0):
concurrency = schedulable.concurrency_at(expected_datetime)
if concurrency >= schedulable.max_concurrency:
logger.info(
f"check_executed_at(): {concurrency} concurrent executions of execution of {model_name} {schedulable.uuid} during the expected execution time prevented execution")
return None
mse = self.make_missing_scheduled_execution(schedulable, expected_datetime)
mse.save()
return mse
def check_executed_after(self, schedulable: BoundSchedulable,
expected_datetime: datetime, relative_delta: relativedelta,
utc_now: datetime):
model_name = self.model_name()
mse = self.missing_scheduled_executions_of(schedulable).order_by('-expected_execution_at').first()
if mse:
next_expected_execution_at = mse.expected_execution_at + relative_delta
if next_expected_execution_at >= expected_datetime:
logger.info(
f"check_executed_after(): Found existing missing scheduled execution {mse.uuid} expected at {mse.expected_execution_at}, next expected at {next_expected_execution_at}, not alerting")
return None
else:
logger.info(
f"check_executed_after(): No existing missing scheduled execution instances for {model_name} {schedulable.uuid}")
pe = self.executions_of(schedulable).filter(
started_at__gte=expected_datetime - timedelta(seconds=MAX_EARLY_STARTUP_SECONDS),
started_at__lte=utc_now).first()
if pe:
logger.info(
f"check_executed_after(): Found execution of {model_name} {schedulable.uuid} after the expected time")
return None
logger.info(
f"check_executed_after(): No execution of {model_name} {schedulable.uuid} found after expected time")
if schedulable.max_concurrency and \
(schedulable.max_concurrency > 0):
concurrency = schedulable.concurrency_at(expected_datetime)
if concurrency >= schedulable.max_concurrency:
logger.info(
f"check_executed_after(): {concurrency} concurrent executions of execution of {model_name} {schedulable.uuid} during the expected execution time prevent execution")
return None
mse = self.make_missing_scheduled_execution(schedulable, expected_datetime)
mse.save()
return mse
@staticmethod
def make_relative_delta(n: int, time_unit: str) -> relativedelta:
if time_unit == 'second':
return relativedelta(seconds=n)
if time_unit == 'minute':
return relativedelta(minutes=n)
if time_unit == 'hour':
return relativedelta(hours=n)
if time_unit == 'day':
return relativedelta(days=n)
if time_unit == 'month':
return relativedelta(months=n)
if time_unit == 'year':
return relativedelta(years=n)
raise Exception(f"Unknown time unit '{time_unit}'")
def send_alerts(self, mse) -> None:
details = self.missing_scheduled_execution_to_details(mse, context_with_request())
for am in mse.schedulable_instance.alert_methods.filter(
enabled=True).exclude(error_severity_on_missing_execution='').all():
severity = am.error_severity_on_missing_execution
mspea = self.make_missing_execution_alert(mse, am)
mspea.save()
epoch_minutes = divmod(mse.expected_execution_at.timestamp(), 60)[0]
grouping_key = f"missing_scheduled_{self.model_name().replace(' ', '_')}-{mse.schedulable_instance.uuid}-{epoch_minutes}"
try:
result = am.send(details=details, severity=severity,
summary_template=self.alert_summary_template(),
grouping_key=grouping_key)
mspea.send_result = result
mspea.send_status = AlertSendStatus.SUCCEEDED
mspea.completed_at = timezone.now()
except Exception as ex:
logger.exception(f"Failed to send alert for missing execution of {mse.schedulable_instance.uuid}")
mspea.send_status = AlertSendStatus.FAILED
mspea.error_message = str(ex)[:Alert.MAX_ERROR_MESSAGE_LENGTH]
mspea.save()
@abstractmethod
def model_name(self) -> str:
pass
@abstractmethod
def manager(self):
pass
@abstractmethod
def missing_scheduled_executions_of(self, schedulable: BoundSchedulable):
pass
@abstractmethod
def executions_of(self, schedulable: BoundSchedulable):
pass
@abstractmethod
def make_missing_scheduled_execution(self, schedulable: BoundSchedulable,
expected_execution_at: datetime) -> MissingScheduledExecution:
pass
@abstractmethod
def missing_scheduled_execution_to_details(self,
mse: MissingScheduledExecution, context) -> dict:
pass
@abstractmethod
def make_missing_execution_alert(self, mse: MissingScheduledExecution,
alert_method: AlertMethod) -> Alert:
pass
@abstractmethod
def alert_summary_template(self) -> str:
pass
|
[
"processes.models.Schedulable.CRON_REGEX.match",
"django.utils.timezone.now",
"processes.models.Schedulable.RATE_REGEX.match",
"crontab.CronTab",
"datetime.timedelta",
"typing.TypeVar",
"django.db.transaction.atomic",
"processes.common.request_helpers.context_with_request",
"logging.getLogger"
] |
[((660, 687), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (677, 687), False, 'import logging\n'), ((712, 758), 'typing.TypeVar', 'TypeVar', (['"""BoundSchedulable"""'], {'bound': 'Schedulable'}), "('BoundSchedulable', bound=Schedulable)\n", (719, 758), False, 'from typing import Generic, Optional, TypeVar\n'), ((1757, 1795), 'processes.models.Schedulable.CRON_REGEX.match', 'Schedulable.CRON_REGEX.match', (['schedule'], {}), '(schedule)\n', (1785, 1795), False, 'from processes.models import Alert, AlertSendStatus, AlertMethod, MissingScheduledExecution, Schedulable\n'), ((2258, 2272), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (2270, 2272), False, 'from django.utils import timezone\n'), ((3758, 3796), 'processes.models.Schedulable.RATE_REGEX.match', 'Schedulable.RATE_REGEX.match', (['schedule'], {}), '(schedule)\n', (3786, 3796), False, 'from processes.models import Alert, AlertSendStatus, AlertMethod, MissingScheduledExecution, Schedulable\n'), ((9722, 9744), 'processes.common.request_helpers.context_with_request', 'context_with_request', ([], {}), '()\n', (9742, 9744), False, 'from processes.common.request_helpers import context_with_request\n'), ((2071, 2089), 'crontab.CronTab', 'CronTab', (['cron_expr'], {}), '(cron_expr)\n', (2078, 2089), False, 'from crontab import CronTab\n'), ((3626, 3646), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (3644, 3646), False, 'from django.db import transaction\n'), ((4172, 4186), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (4184, 4186), False, 'from django.utils import timezone\n'), ((10652, 10666), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (10664, 10666), False, 'from django.utils import timezone\n'), ((4771, 4791), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (4789, 4791), False, 'from django.db import transaction\n'), ((3012, 3042), 'datetime.timedelta', 'timedelta', ([], {'microseconds': '(500000)'}), '(microseconds=500000)\n', (3021, 3042), False, 'from datetime import datetime, timedelta\n'), ((2960, 3009), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'previous_execution_seconds_ago'}), '(seconds=previous_execution_seconds_ago)\n', (2969, 3009), False, 'from datetime import datetime, timedelta\n'), ((5834, 5878), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'MAX_EARLY_STARTUP_SECONDS'}), '(seconds=MAX_EARLY_STARTUP_SECONDS)\n', (5843, 5878), False, 'from datetime import datetime, timedelta\n'), ((5929, 5967), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'MAX_STARTUP_SECONDS'}), '(seconds=MAX_STARTUP_SECONDS)\n', (5938, 5967), False, 'from datetime import datetime, timedelta\n'), ((7963, 8007), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'MAX_EARLY_STARTUP_SECONDS'}), '(seconds=MAX_EARLY_STARTUP_SECONDS)\n', (7972, 8007), False, 'from datetime import datetime, timedelta\n')]
|
import liblo
import time
addresses = [liblo.Address("192.168.1.3","2222"),liblo.Address("192.168.1.4","2222"),liblo.Address("192.168.1.5","2222"),liblo.Address("192.168.1.6","2222"),liblo.Address("192.168.1.7","2222"),liblo.Address("192.168.1.8","2222"),liblo.Address("192.168.1.9","2222"),liblo.Address("192.168.1.10","2222"),liblo.Address("192.168.1.11","2222"),liblo.Address("192.168.1.12","2222"),liblo.Address("192.168.1.13","2222"),liblo.Address("192.168.1.14","2222"),liblo.Address("192.168.1.15","2222"),liblo.Address("192.168.1.16","2222"),liblo.Address("192.168.1.17","2222")]
r=0
g=0
b=0
for address in addresses:
liblo.send(address,'22',('f', r),('f', g),('f', b))
|
[
"liblo.send",
"liblo.Address"
] |
[((42, 78), 'liblo.Address', 'liblo.Address', (['"""192.168.1.3"""', '"""2222"""'], {}), "('192.168.1.3', '2222')\n", (55, 78), False, 'import liblo\n'), ((78, 114), 'liblo.Address', 'liblo.Address', (['"""192.168.1.4"""', '"""2222"""'], {}), "('192.168.1.4', '2222')\n", (91, 114), False, 'import liblo\n'), ((114, 150), 'liblo.Address', 'liblo.Address', (['"""192.168.1.5"""', '"""2222"""'], {}), "('192.168.1.5', '2222')\n", (127, 150), False, 'import liblo\n'), ((150, 186), 'liblo.Address', 'liblo.Address', (['"""192.168.1.6"""', '"""2222"""'], {}), "('192.168.1.6', '2222')\n", (163, 186), False, 'import liblo\n'), ((186, 222), 'liblo.Address', 'liblo.Address', (['"""192.168.1.7"""', '"""2222"""'], {}), "('192.168.1.7', '2222')\n", (199, 222), False, 'import liblo\n'), ((222, 258), 'liblo.Address', 'liblo.Address', (['"""192.168.1.8"""', '"""2222"""'], {}), "('192.168.1.8', '2222')\n", (235, 258), False, 'import liblo\n'), ((258, 294), 'liblo.Address', 'liblo.Address', (['"""192.168.1.9"""', '"""2222"""'], {}), "('192.168.1.9', '2222')\n", (271, 294), False, 'import liblo\n'), ((294, 331), 'liblo.Address', 'liblo.Address', (['"""192.168.1.10"""', '"""2222"""'], {}), "('192.168.1.10', '2222')\n", (307, 331), False, 'import liblo\n'), ((331, 368), 'liblo.Address', 'liblo.Address', (['"""192.168.1.11"""', '"""2222"""'], {}), "('192.168.1.11', '2222')\n", (344, 368), False, 'import liblo\n'), ((368, 405), 'liblo.Address', 'liblo.Address', (['"""192.168.1.12"""', '"""2222"""'], {}), "('192.168.1.12', '2222')\n", (381, 405), False, 'import liblo\n'), ((405, 442), 'liblo.Address', 'liblo.Address', (['"""192.168.1.13"""', '"""2222"""'], {}), "('192.168.1.13', '2222')\n", (418, 442), False, 'import liblo\n'), ((442, 479), 'liblo.Address', 'liblo.Address', (['"""192.168.1.14"""', '"""2222"""'], {}), "('192.168.1.14', '2222')\n", (455, 479), False, 'import liblo\n'), ((479, 516), 'liblo.Address', 'liblo.Address', (['"""192.168.1.15"""', '"""2222"""'], {}), "('192.168.1.15', '2222')\n", (492, 516), False, 'import liblo\n'), ((516, 553), 'liblo.Address', 'liblo.Address', (['"""192.168.1.16"""', '"""2222"""'], {}), "('192.168.1.16', '2222')\n", (529, 553), False, 'import liblo\n'), ((553, 590), 'liblo.Address', 'liblo.Address', (['"""192.168.1.17"""', '"""2222"""'], {}), "('192.168.1.17', '2222')\n", (566, 590), False, 'import liblo\n'), ((642, 697), 'liblo.send', 'liblo.send', (['address', '"""22"""', "('f', r)", "('f', g)", "('f', b)"], {}), "(address, '22', ('f', r), ('f', g), ('f', b))\n", (652, 697), False, 'import liblo\n')]
|
import numpy as np
import abc
class ProbabilityDistribution(abc.ABC):
"""
Class representing the interface for a probability distribution
"""
@abc.abstractmethod
def sample(self, size):
"""
This method must return an array with length "size", sampling the distribution
# Arguments:
size: Size of the sampling
"""
class NormalDistribution(ProbabilityDistribution):
"""
Implements Normal Distribution
# Arguments:
mean: Mean of the normal distribution.
std: Standard deviation of the normal distribution
"""
def __init__(self, mean, std):
self._mean = mean
self._std = std
def sample(self, size):
"""
This method provides a sample of the given size of a gaussian distributions
# Arguments:
size: size of the sample
# Returns:
Sample of a gaussian distribution of a given size
"""
return np.random.normal(self._mean, self._std, size)
class GaussianMixture(ProbabilityDistribution):
"""
Implements the combination of Normal Distributions
# Arguments:
params: Array of arrays with mean and std for every gaussian distribution.
weights: Array of weights for every distribution with sum 1.
# Example:
```python
# Parameters for two Gaussian
mu_M = 178
mu_F = 162
sigma_M = 7
sigma_F = 7
# Parameters
norm_params = np.array([[mu_M, sigma_M],
[mu_F, sigma_F]])
weights = np.ones(2) / 2.0
# Creating combination of gaussian
distribution = GaussianMixture(norm_params, weights)
```
"""
def __init__(self, params, weights):
self._gaussian_distributions = []
for param in params:
self._gaussian_distributions.append(NormalDistribution(param[0], param[1]))
self._weights = weights
def sample(self, size):
"""
This method provides a sample of the given size of a mixture of gaussian distributions
# Arguments:
size: size of the sample
# Returns:
Sample of a mixture of gaussian distributions of a given size
"""
mixture_idx = np.random.choice(len(self._weights), size=size, replace=True, p=self._weights)
values = []
for i in mixture_idx:
gaussian_distributions = self._gaussian_distributions[i]
values.append(gaussian_distributions.sample(1))
return np.fromiter(values, dtype=np.float64)
|
[
"numpy.random.normal",
"numpy.fromiter"
] |
[((987, 1032), 'numpy.random.normal', 'np.random.normal', (['self._mean', 'self._std', 'size'], {}), '(self._mean, self._std, size)\n', (1003, 1032), True, 'import numpy as np\n'), ((2570, 2607), 'numpy.fromiter', 'np.fromiter', (['values'], {'dtype': 'np.float64'}), '(values, dtype=np.float64)\n', (2581, 2607), True, 'import numpy as np\n')]
|
import tensorflow as tf
import numpy as np
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import copy
from morphodynamics.landscapes.utils import get_meshgrid
from morphodynamics.landscapes.analysis.get_fields import *
from morphodynamics.landscapes.analysis.sde_forward import *
class Save_Ims():
"""
Save losses during training and the potential as an array
"""
def __init__(self, model, save_dir):
"""
- model: this is the physics-informed neural network (PINN)
- save_dir: where to save the plot and potential to
"""
self.model = model
self.save_dir = save_dir
x_test, y_test = get_meshgrid(model.xlims, model.ylims, model.dims, flatBool = True)
self.x_test , self.y_test = tf.convert_to_tensor(x_test), tf.convert_to_tensor(y_test)
self.fig = plt.figure(figsize = (30, 20))
self.gs = gridspec.GridSpec(nrows = 15, ncols = 17)
def __call__(self):
self._plot_losses()
self._plot_pdfs_getUandD()
self._plot_and_save_U()
#plt.savefig(self.save_dir + 'View_{}_{}.png'.format(self.model.save_append, self.model.idx_save))
#plt.close()
def _setup_ax(self, ax):
ax.set_aspect('equal', adjustable = 'box')
ax.set_xlim(self.model.xlims)
ax.set_ylim(self.model.ylims)
def _plot_losses(self):
"""
Plot how each of the loss terms changes in time
"""
ax = self.fig.add_subplot(self.gs[2:5, :7])
losses = [self.model.data_losses, self.model.BC_losses, self.model.pde_losses, self.model.total_losses, self.model.norm_losses]
labels = ['pdf', 'BC', 'pde', 'total', 'norm']
zipped = zip(losses, labels)
for loss_list, label in zipped:
ax.plot(np.log10(loss_list), label = label)
ax.legend()
def _plot_pdfs_getUandD(self):
"""
Run inference to get the pdf, potential (U) and diffusivity (D)
"""
p_max = 0
D_max = 0
for idx_t, test_time in enumerate(np.linspace(self.model.tlims[0], self.model.tlims[1], 7)): # test for a range of unseen times
t_test = np.tile(np.array([test_time]), (self.x_test.shape[0], 1))
t_test = tf.convert_to_tensor(t_test)
xyt_test = tf.concat((self.x_test, self.y_test, t_test), axis = 1)
p_out, D_out, U_out = self.model.predict(xyt_test)
D_out = D_out.numpy()
p_max = max(p_max, np.max(p_out))
D_max = max(D_max, np.max(D_out))
for idx_t, test_time in enumerate(np.linspace(self.model.tlims[0], self.model.tlims[1], 7)): # test for a range of unseen times
t_test = np.tile(np.array([test_time]), (self.x_test.shape[0], 1))
t_test = tf.convert_to_tensor(t_test)
xyt_test = tf.concat((self.x_test, self.y_test, t_test), axis = 1)
p_out, D_out, U_out = self.model.predict(xyt_test)
p_out = p_out.numpy()
D_out = D_out.numpy()
U_out = U_out.numpy()
ax_p = self.fig.add_subplot(self.gs[6, idx_t])
p_out[p_out<1e-7] = np.nan
ax_p.scatter(self.x_test, self.y_test, c = np.log10(p_out), vmin = -7, vmax = max(np.log10(p_max), -7))
self._setup_ax(ax_p)
ax_D = self.fig.add_subplot(self.gs[6, 8+idx_t])
ax_D.scatter(self.x_test, self.y_test, c = D_out, vmin = 0, vmax = D_max)
self._setup_ax(ax_D)
for idx_t, arr in enumerate(self.model.pdf_list):
ax = self.fig.add_subplot(self.gs[14, idx_t])
to_log = copy.deepcopy(arr)
to_log[to_log<1e-7] = np.nan
ax.imshow(np.log10(to_log.reshape((200, 200))[::-1, :]))
self.U_out = U_out
def _plot_and_save_U(self):
"""
Plot and save the potential as an array
"""
U = np.reshape(self.U_out, (self.model.dims, self.model.dims))
path = self.save_dir + 'potential.pickle'
dump_pickle(U, path)
ax = self.fig.add_subplot(self.gs[:4, 10:14])
gx, gy = np.gradient(U)
ax.imshow(np.log10(np.sqrt(gx**2 + gy**2))[::-1, :])
ax.set_aspect('equal', adjustable = 'box')
|
[
"copy.deepcopy",
"tensorflow.convert_to_tensor",
"tensorflow.concat",
"numpy.log10",
"matplotlib.pyplot.figure",
"matplotlib.use",
"numpy.array",
"morphodynamics.landscapes.utils.get_meshgrid",
"numpy.reshape",
"numpy.linspace",
"numpy.max",
"matplotlib.gridspec.GridSpec",
"numpy.gradient",
"numpy.sqrt"
] |
[((61, 82), 'matplotlib.use', 'matplotlib.use', (['"""agg"""'], {}), "('agg')\n", (75, 82), False, 'import matplotlib\n'), ((725, 790), 'morphodynamics.landscapes.utils.get_meshgrid', 'get_meshgrid', (['model.xlims', 'model.ylims', 'model.dims'], {'flatBool': '(True)'}), '(model.xlims, model.ylims, model.dims, flatBool=True)\n', (737, 790), False, 'from morphodynamics.landscapes.utils import get_meshgrid\n'), ((908, 936), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(30, 20)'}), '(figsize=(30, 20))\n', (918, 936), True, 'import matplotlib.pyplot as plt\n'), ((957, 994), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', ([], {'nrows': '(15)', 'ncols': '(17)'}), '(nrows=15, ncols=17)\n', (974, 994), True, 'import matplotlib.gridspec as gridspec\n'), ((3971, 4029), 'numpy.reshape', 'np.reshape', (['self.U_out', '(self.model.dims, self.model.dims)'], {}), '(self.U_out, (self.model.dims, self.model.dims))\n', (3981, 4029), True, 'import numpy as np\n'), ((4182, 4196), 'numpy.gradient', 'np.gradient', (['U'], {}), '(U)\n', (4193, 4196), True, 'import numpy as np\n'), ((829, 857), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['x_test'], {}), '(x_test)\n', (849, 857), True, 'import tensorflow as tf\n'), ((859, 887), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['y_test'], {}), '(y_test)\n', (879, 887), True, 'import tensorflow as tf\n'), ((2124, 2180), 'numpy.linspace', 'np.linspace', (['self.model.tlims[0]', 'self.model.tlims[1]', '(7)'], {}), '(self.model.tlims[0], self.model.tlims[1], 7)\n', (2135, 2180), True, 'import numpy as np\n'), ((2318, 2346), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['t_test'], {}), '(t_test)\n', (2338, 2346), True, 'import tensorflow as tf\n'), ((2370, 2423), 'tensorflow.concat', 'tf.concat', (['(self.x_test, self.y_test, t_test)'], {'axis': '(1)'}), '((self.x_test, self.y_test, t_test), axis=1)\n', (2379, 2423), True, 'import tensorflow as tf\n'), ((2659, 2715), 'numpy.linspace', 'np.linspace', (['self.model.tlims[0]', 'self.model.tlims[1]', '(7)'], {}), '(self.model.tlims[0], self.model.tlims[1], 7)\n', (2670, 2715), True, 'import numpy as np\n'), ((2854, 2882), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['t_test'], {}), '(t_test)\n', (2874, 2882), True, 'import tensorflow as tf\n'), ((2906, 2959), 'tensorflow.concat', 'tf.concat', (['(self.x_test, self.y_test, t_test)'], {'axis': '(1)'}), '((self.x_test, self.y_test, t_test), axis=1)\n', (2915, 2959), True, 'import tensorflow as tf\n'), ((3695, 3713), 'copy.deepcopy', 'copy.deepcopy', (['arr'], {}), '(arr)\n', (3708, 3713), False, 'import copy\n'), ((1857, 1876), 'numpy.log10', 'np.log10', (['loss_list'], {}), '(loss_list)\n', (1865, 1876), True, 'import numpy as np\n'), ((2247, 2268), 'numpy.array', 'np.array', (['[test_time]'], {}), '([test_time])\n', (2255, 2268), True, 'import numpy as np\n'), ((2554, 2567), 'numpy.max', 'np.max', (['p_out'], {}), '(p_out)\n', (2560, 2567), True, 'import numpy as np\n'), ((2600, 2613), 'numpy.max', 'np.max', (['D_out'], {}), '(D_out)\n', (2606, 2613), True, 'import numpy as np\n'), ((2783, 2804), 'numpy.array', 'np.array', (['[test_time]'], {}), '([test_time])\n', (2791, 2804), True, 'import numpy as np\n'), ((3282, 3297), 'numpy.log10', 'np.log10', (['p_out'], {}), '(p_out)\n', (3290, 3297), True, 'import numpy as np\n'), ((4224, 4250), 'numpy.sqrt', 'np.sqrt', (['(gx ** 2 + gy ** 2)'], {}), '(gx ** 2 + gy ** 2)\n', (4231, 4250), True, 'import numpy as np\n'), ((3321, 3336), 'numpy.log10', 'np.log10', (['p_max'], {}), '(p_max)\n', (3329, 3336), True, 'import numpy as np\n')]
|
# Copyright 2019 <NAME>. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import unittest
from clispy.macro.system_macro import *
from clispy.package import PackageManager
from clispy.parser import Parser
class SystemMacroUnitTestCase(unittest.TestCase):
def testSystemMacro(self):
# Makes an instance of SystemMacro.
macro = SystemMacro()
# Checks official representation.
self.assertRegex(str(macro), r"#<SYSTEM-MACRO SYSTEM-MACRO \{[0-9A-Z]+\}>")
class BlockSystemMacroUnitTestCase(unittest.TestCase):
def testBlockSystemMacro(self):
# Makes an instance of BlockSystemMacro.
macro = BlockSystemMacro()
# Checks official representation.
self.assertRegex(str(macro), r"#<SYSTEM-MACRO BLOCK \{[0-9A-Z]+\}>")
def testBlockSystemMacro_call(self):
# Makes an instance of BlockSystemMacro.
macro = BlockSystemMacro()
# Checks official representation.
forms = Parser.parse('(NAME (+ 1 2 3))')
forms = macro(
forms,
PackageManager.current_package.env['VARIABLE'],
PackageManager.current_package.env['FUNCTION'],
PackageManager.current_package.env['MACRO']
)
# Checks expanded forms.
self.assertEqual(str(forms), '(BLOCK NAME (PROGN (+ 1 2 3)))')
class FletSystemMacroUnitTestCase(unittest.TestCase):
def testFletSystemMacro(self):
# Makes an instance of FletSystemMacro.
macro = FletSystemMacro()
# Checks official representation.
self.assertRegex(str(macro), r"#<SYSTEM-MACRO FLET \{[0-9A-Z]+\}>")
def testFletSystemMacro_call(self):
# Makes an instance of FletSystemMacro.
macro = FletSystemMacro()
# Checks official representation.
forms = Parser.parse('(((TEST (X) (* X X X))) (TEST 10))')
forms = macro(
forms,
PackageManager.current_package.env['VARIABLE'],
PackageManager.current_package.env['FUNCTION'],
PackageManager.current_package.env['MACRO']
)
# Checks expanded forms.
self.assertEqual(str(forms), '(FLET ((TEST (X) (* X X X))) (PROGN (TEST 10)))')
class IfSystemMacroUnitTestCase(unittest.TestCase):
def testIfSystemMacro(self):
# Makes an instance of IfSystemMacro.
macro = IfSystemMacro()
# Checks official representation.
self.assertRegex(str(macro), r"#<SYSTEM-MACRO IF \{[0-9A-Z]+\}>")
def testIfSystemMacro_call(self):
# Makes an instance of IfSystemMacro.
macro = IfSystemMacro()
# Checks official representation.
self.assertRegex(str(macro), r"#<SYSTEM-MACRO IF \{[0-9A-Z]+\}>")
# Checks official representation.
forms = Parser.parse('((= 1 2) 3)')
forms = macro(
forms,
PackageManager.current_package.env['VARIABLE'],
PackageManager.current_package.env['FUNCTION'],
PackageManager.current_package.env['MACRO']
)
# Checks expanded forms.
self.assertEqual(str(forms), '(IF (= 1 2) 3 NIL)')
class LabelsSystemMacroUnitTestCase(unittest.TestCase):
def testLabelsSystemMacro(self):
# Makes an instance of LabelsSystemMacro.
macro = LabelsSystemMacro()
# Checks official representation.
self.assertRegex(str(macro), r"#<SYSTEM-MACRO LABELS \{[0-9A-Z]+\}>")
def testLabelsSystemMacro_call(self):
# Makes an instance of LabelsSystemMacro.
macro = LabelsSystemMacro()
# Checks official representation.
forms = Parser.parse('(((TEST (X) (* X X X))) (TEST 10))')
forms = macro(
forms,
PackageManager.current_package.env['VARIABLE'],
PackageManager.current_package.env['FUNCTION'],
PackageManager.current_package.env['MACRO']
)
# Checks expanded forms.
self.assertEqual(str(forms), '(LABELS ((TEST (X) (* X X X))) (PROGN (TEST 10)))')
class LetSystemMacroUnitTestCase(unittest.TestCase):
def testLetSystemMacro(self):
# Makes an instance of LetSystemMacro.
macro = LetSystemMacro()
# Checks official representation.
self.assertRegex(str(macro), r"#<SYSTEM-MACRO LET \{[0-9A-Z]+\}>")
def testLetSystemMacro_call(self):
# Makes an instance of LetSystemMacro.
macro = LetSystemMacro()
# Checks official representation.
forms = Parser.parse('(((TEST 10)) (CONS TEST NIL))')
forms = macro(
forms,
PackageManager.current_package.env['VARIABLE'],
PackageManager.current_package.env['FUNCTION'],
PackageManager.current_package.env['MACRO']
)
# Checks expanded forms.
self.assertEqual(str(forms), '(LET ((TEST 10)) (PROGN (CONS TEST NIL)))')
class LetAsterSystemMacroUnitTestCase(unittest.TestCase):
def testLetAsterSystemMacro(self):
# Makes an instance of LetAsterSystemMacro.
macro = LetAsterSystemMacro()
# Checks official representation.
self.assertRegex(str(macro), r"#<SYSTEM-MACRO LET\* \{[0-9A-Z]+\}>")
def testLetAsterSystemMacro_call(self):
# Makes an instance of LetAsterSystemMacro.
macro = LetAsterSystemMacro()
# Checks official representation.
forms = Parser.parse('(((TEST 10)) (CONS TEST NIL))')
forms = macro(
forms,
PackageManager.current_package.env['VARIABLE'],
PackageManager.current_package.env['FUNCTION'],
PackageManager.current_package.env['MACRO']
)
# Checks expanded forms.
self.assertEqual(str(forms), '(LET* ((TEST 10)) (PROGN (CONS TEST NIL)))')
class QuoteSystemMacroUnitTestCase(unittest.TestCase):
def testQuoteSystemMacro(self):
# Makes an instance of QuoteSystemMacro.
macro = QuoteSystemMacro()
# Checks official representation.
self.assertRegex(str(macro), r"#<SYSTEM-MACRO QUOTE \{[0-9A-Z]+\}>")
def testQuoteSystemMacro_call(self):
# Makes an instance of QuoteSystemMacro.
macro = QuoteSystemMacro()
# Checks official representation.
forms = Parser.parse('(A)')
forms = macro(
forms,
PackageManager.current_package.env['VARIABLE'],
PackageManager.current_package.env['FUNCTION'],
PackageManager.current_package.env['MACRO']
)
# Checks expanded forms.
self.assertEqual(str(forms), '(QUOTE A)')
class LambdaSystemMacroUnitTestCase(unittest.TestCase):
def testLambdaSystemMacro(self):
# Makes an instance of LambdaSystemMacro.
macro = LambdaSystemMacro()
# Checks official representation.
self.assertRegex(str(macro), r"#<SYSTEM-MACRO LAMBDA \{[0-9A-Z]+\}>")
def testLambdaSystemMacro_call(self):
# Makes an instance of LambdaSystemMacro.
macro = LambdaSystemMacro()
# Checks official representation.
forms = Parser.parse('((X) (* X X X))')
forms = macro(
forms,
PackageManager.current_package.env['VARIABLE'],
PackageManager.current_package.env['FUNCTION'],
PackageManager.current_package.env['MACRO']
)
# Checks expanded forms.
self.assertEqual(str(forms), '(LAMBDA (X) (PROGN (* X X X)))')
class DefunSystemMacroUnitTestCase(unittest.TestCase):
def testDefunSystemMacro(self):
# Makes an instance of DefunSystemMacro.
macro = DefunSystemMacro()
# Checks official representation.
self.assertRegex(str(macro), r"#<SYSTEM-MACRO DEFUN \{[0-9A-Z]+\}>")
def testDefunSystemMacro_call(self):
# Makes an instance of DefunSystemMacro.
macro = DefunSystemMacro()
# Checks official representation.
forms = Parser.parse('(NAME (X) (* X X X))')
forms = macro(
forms,
PackageManager.current_package.env['VARIABLE'],
PackageManager.current_package.env['FUNCTION'],
PackageManager.current_package.env['MACRO']
)
# Checks expanded forms.
self.assertEqual(str(forms), '(DEFUN NAME (X) (BLOCK NAME (PROGN (* X X X))))')
class DefmacroSystemMacroUnitTestCase(unittest.TestCase):
def testDefmacroSystemMacro(self):
# Makes an instance of DefmacroSystemMacro.
macro = DefmacroSystemMacro()
# Checks official representation.
self.assertRegex(str(macro), r"#<SYSTEM-MACRO DEFMACRO \{[0-9A-Z]+\}>")
def testDefmacroSystemMacro_call(self):
# Makes an instance of DefunSystemMacro.
macro = DefmacroSystemMacro()
# Checks official representation.
forms = Parser.parse('(NAME (X) (* X X X))')
forms = macro(
forms,
PackageManager.current_package.env['VARIABLE'],
PackageManager.current_package.env['FUNCTION'],
PackageManager.current_package.env['MACRO']
)
# Checks expanded forms.
self.assertEqual(str(forms), '(DEFMACRO NAME (X) (BLOCK NAME (PROGN (* X X X))))')
class BackquoteSystemMacroUnitTestCase(unittest.TestCase):
def testBackquoteSystemMacro(self):
# Makes an instance of BackquoteSystemMacro.
macro = BackquoteSystemMacro()
# Checks official representation.
self.assertRegex(str(macro), r"#<SYSTEM-MACRO BACKQUOTE \{[0-9A-Z]+\}>")
def testBackquoteSystemMacro_call(self):
# Makes an instance of DefunSystemMacro.
macro = BackquoteSystemMacro()
# Checks official representation.
forms = Parser.parse('(((UNQUOTE X) (UNQUOTE-SPLICING Y)))')
forms = macro(
forms,
PackageManager.current_package.env['VARIABLE'],
PackageManager.current_package.env['FUNCTION'],
PackageManager.current_package.env['MACRO']
)
# Checks expanded forms.
self.assertEqual(str(forms), '(CONS X (APPEND Y (QUOTE NIL)))')
|
[
"clispy.parser.Parser.parse"
] |
[((1571, 1603), 'clispy.parser.Parser.parse', 'Parser.parse', (['"""(NAME (+ 1 2 3))"""'], {}), "('(NAME (+ 1 2 3))')\n", (1583, 1603), False, 'from clispy.parser import Parser\n'), ((2411, 2461), 'clispy.parser.Parser.parse', 'Parser.parse', (['"""(((TEST (X) (* X X X))) (TEST 10))"""'], {}), "('(((TEST (X) (* X X X))) (TEST 10))')\n", (2423, 2461), False, 'from clispy.parser import Parser\n'), ((3387, 3414), 'clispy.parser.Parser.parse', 'Parser.parse', (['"""((= 1 2) 3)"""'], {}), "('((= 1 2) 3)')\n", (3399, 3414), False, 'from clispy.parser import Parser\n'), ((4226, 4276), 'clispy.parser.Parser.parse', 'Parser.parse', (['"""(((TEST (X) (* X X X))) (TEST 10))"""'], {}), "('(((TEST (X) (* X X X))) (TEST 10))')\n", (4238, 4276), False, 'from clispy.parser import Parser\n'), ((5095, 5140), 'clispy.parser.Parser.parse', 'Parser.parse', (['"""(((TEST 10)) (CONS TEST NIL))"""'], {}), "('(((TEST 10)) (CONS TEST NIL))')\n", (5107, 5140), False, 'from clispy.parser import Parser\n'), ((5988, 6033), 'clispy.parser.Parser.parse', 'Parser.parse', (['"""(((TEST 10)) (CONS TEST NIL))"""'], {}), "('(((TEST 10)) (CONS TEST NIL))')\n", (6000, 6033), False, 'from clispy.parser import Parser\n'), ((6861, 6880), 'clispy.parser.Parser.parse', 'Parser.parse', (['"""(A)"""'], {}), "('(A)')\n", (6873, 6880), False, 'from clispy.parser import Parser\n'), ((7683, 7714), 'clispy.parser.Parser.parse', 'Parser.parse', (['"""((X) (* X X X))"""'], {}), "('((X) (* X X X))')\n", (7695, 7714), False, 'from clispy.parser import Parser\n'), ((8530, 8566), 'clispy.parser.Parser.parse', 'Parser.parse', (['"""(NAME (X) (* X X X))"""'], {}), "('(NAME (X) (* X X X))')\n", (8542, 8566), False, 'from clispy.parser import Parser\n'), ((9420, 9456), 'clispy.parser.Parser.parse', 'Parser.parse', (['"""(NAME (X) (* X X X))"""'], {}), "('(NAME (X) (* X X X))')\n", (9432, 9456), False, 'from clispy.parser import Parser\n'), ((10320, 10372), 'clispy.parser.Parser.parse', 'Parser.parse', (['"""(((UNQUOTE X) (UNQUOTE-SPLICING Y)))"""'], {}), "('(((UNQUOTE X) (UNQUOTE-SPLICING Y)))')\n", (10332, 10372), False, 'from clispy.parser import Parser\n')]
|
from flask import request, Blueprint
from controllers import timelines
from pkg.warpResponse import warpResponse
timeline = Blueprint('timeline', __name__)
@timeline.route("/", methods=['GET'])
def getAllTimelines():
resp, code = timelines.FindAll()
if resp is not None:
return warpResponse(resp)
else:
return warpResponse(None, code)
@timeline.route("/findOne", methods=['POST'])
def getOneTimelines():
r = request.get_json()
if 'start' in r:
resp, code = timelines.FindAllWithCond(r)
else:
resp, code = timelines.FindOne(r)
if resp is not None:
return warpResponse(resp)
else:
return warpResponse(None, code)
@timeline.route("/add", methods=['POST'])
def createTimelines():
r = request.get_json()
code = timelines.Create(r)
if code is not None:
return warpResponse(None, code)
else:
return warpResponse(None, code)
@timeline.route("/<id>", methods=['POST'])
def patchTimelines(id):
r = request.get_json()
code = timelines.Patch(id, r)
if code is not None:
return warpResponse(None, code)
else:
return warpResponse(None, code)
|
[
"controllers.timelines.FindAllWithCond",
"flask.Blueprint",
"pkg.warpResponse.warpResponse",
"controllers.timelines.Create",
"controllers.timelines.FindOne",
"controllers.timelines.FindAll",
"controllers.timelines.Patch",
"flask.request.get_json"
] |
[((126, 157), 'flask.Blueprint', 'Blueprint', (['"""timeline"""', '__name__'], {}), "('timeline', __name__)\n", (135, 157), False, 'from flask import request, Blueprint\n'), ((238, 257), 'controllers.timelines.FindAll', 'timelines.FindAll', ([], {}), '()\n', (255, 257), False, 'from controllers import timelines\n'), ((446, 464), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (462, 464), False, 'from flask import request, Blueprint\n'), ((772, 790), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (788, 790), False, 'from flask import request, Blueprint\n'), ((802, 821), 'controllers.timelines.Create', 'timelines.Create', (['r'], {}), '(r)\n', (818, 821), False, 'from controllers import timelines\n'), ((1014, 1032), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1030, 1032), False, 'from flask import request, Blueprint\n'), ((1044, 1066), 'controllers.timelines.Patch', 'timelines.Patch', (['id', 'r'], {}), '(id, r)\n', (1059, 1066), False, 'from controllers import timelines\n'), ((298, 316), 'pkg.warpResponse.warpResponse', 'warpResponse', (['resp'], {}), '(resp)\n', (310, 316), False, 'from pkg.warpResponse import warpResponse\n'), ((342, 366), 'pkg.warpResponse.warpResponse', 'warpResponse', (['None', 'code'], {}), '(None, code)\n', (354, 366), False, 'from pkg.warpResponse import warpResponse\n'), ((507, 535), 'controllers.timelines.FindAllWithCond', 'timelines.FindAllWithCond', (['r'], {}), '(r)\n', (532, 535), False, 'from controllers import timelines\n'), ((567, 587), 'controllers.timelines.FindOne', 'timelines.FindOne', (['r'], {}), '(r)\n', (584, 587), False, 'from controllers import timelines\n'), ((628, 646), 'pkg.warpResponse.warpResponse', 'warpResponse', (['resp'], {}), '(resp)\n', (640, 646), False, 'from pkg.warpResponse import warpResponse\n'), ((672, 696), 'pkg.warpResponse.warpResponse', 'warpResponse', (['None', 'code'], {}), '(None, code)\n', (684, 696), False, 'from pkg.warpResponse import warpResponse\n'), ((862, 886), 'pkg.warpResponse.warpResponse', 'warpResponse', (['None', 'code'], {}), '(None, code)\n', (874, 886), False, 'from pkg.warpResponse import warpResponse\n'), ((912, 936), 'pkg.warpResponse.warpResponse', 'warpResponse', (['None', 'code'], {}), '(None, code)\n', (924, 936), False, 'from pkg.warpResponse import warpResponse\n'), ((1107, 1131), 'pkg.warpResponse.warpResponse', 'warpResponse', (['None', 'code'], {}), '(None, code)\n', (1119, 1131), False, 'from pkg.warpResponse import warpResponse\n'), ((1157, 1181), 'pkg.warpResponse.warpResponse', 'warpResponse', (['None', 'code'], {}), '(None, code)\n', (1169, 1181), False, 'from pkg.warpResponse import warpResponse\n')]
|
from __future__ import absolute_import, division, print_function
import os
import glob
from glue.logger import logger
from glue.core.data import Data
from glue.config import data_factory
import vaex.hdf5.dataset
from .data import DataVaex
def is_vaex_file(source):
return vaex.hdf5.dataset.Hdf5MemoryMapped.can_open(source)
@data_factory(
label='vaex file or directory',
identifier=is_vaex_file,
priority=1000,
)
def vaex_reader(source):
"""
Read a vaex hdf5 file
"""
if os.path.isdir(source):
arrays = {}
for filename in glob.glob(os.path.join(source, '*')):
if is_vaex_file(filename):
logger.info("Reading vaex data from {0}".format(filename))
ds = vaex.open(filename)
else:
logger.info("Not a vaex file: {0}".format(filename))
# If there are no vaex files, we raise an error, and if there is one
# then we are done!
if len(arrays) == 0:
raise Exception("No vaex files found in directory: {0}".format(source))
elif len(arrays) == 1:
label = list(arrays.keys())[0]
return [Data(array=arrays[label], label=label)]
# We now check whether all the shapes of the vaex files are the same,
# and if so, we merge them into a single file.
labels = sorted(arrays)
ref_shape = arrays[labels[0]].shape
for label in labels[1:]:
if arrays[label].shape != ref_shape:
break
else:
# Since we are here, the shapes of all the vaex files match, so
# we can construct a higher-dimensional array.
# Make sure arrays are sorted while constructing array
array = np.array([arrays[label] for label in labels])
# We flip the array here on that in most cases we expect that the
# scan will start at the top of e.g. the body and move downwards.
array = array[::-1]
return [Data(array=array, label=dicom_label(source))]
# If we are here, the shapes of the vaex files didn't match, so we
# simply return one Data object per vaex file.
return [Data(array=arrays[label], label=label) for label in labels]
else:
ds = vaex.open(source)
data = [DataVaex(ds)]
return data
|
[
"os.path.isdir",
"glue.config.data_factory",
"os.path.join",
"glue.core.data.Data"
] |
[((335, 423), 'glue.config.data_factory', 'data_factory', ([], {'label': '"""vaex file or directory"""', 'identifier': 'is_vaex_file', 'priority': '(1000)'}), "(label='vaex file or directory', identifier=is_vaex_file,\n priority=1000)\n", (347, 423), False, 'from glue.config import data_factory\n'), ((510, 531), 'os.path.isdir', 'os.path.isdir', (['source'], {}), '(source)\n', (523, 531), False, 'import os\n'), ((588, 613), 'os.path.join', 'os.path.join', (['source', '"""*"""'], {}), "(source, '*')\n", (600, 613), False, 'import os\n'), ((2217, 2255), 'glue.core.data.Data', 'Data', ([], {'array': 'arrays[label]', 'label': 'label'}), '(array=arrays[label], label=label)\n', (2221, 2255), False, 'from glue.core.data import Data\n'), ((1172, 1210), 'glue.core.data.Data', 'Data', ([], {'array': 'arrays[label]', 'label': 'label'}), '(array=arrays[label], label=label)\n', (1176, 1210), False, 'from glue.core.data import Data\n')]
|
import numpy as np
import inspect
from scipy.linalg import qr as qr_factorization
from copy import deepcopy
from pyapprox.utilities import cartesian_product, outer_product
from pyapprox.univariate_polynomials.quadrature import gauss_jacobi_pts_wts_1D
from pyapprox.barycentric_interpolation import (
compute_barycentric_weights_1d,
multivariate_barycentric_lagrange_interpolation
)
from pyapprox.models.wrappers import (
evaluate_1darray_function_on_2d_array
)
from pyapprox.utilities import qr_solve
def kronecker_product_2d(matrix1, matrix2):
"""
TODO: I can store kroneker as a sparse matrix see ( scipy.kron )
"""
assert matrix1.shape == matrix2.shape
assert matrix1.ndim == 2
block_num_rows = matrix1.shape[0]
matrix_num_rows = block_num_rows**2
matrix = np.empty((matrix_num_rows, matrix_num_rows), float)
# loop through blocks
start_col = 0
for jj in range(block_num_rows):
start_row = 0
for ii in range(block_num_rows):
matrix[start_row:start_row+block_num_rows,
start_col:start_col+block_num_rows] = \
matrix2*matrix1[ii, jj]
start_row += block_num_rows
start_col += block_num_rows
return matrix
def chebyshev_derivative_matrix(order):
if order == 0:
pts = np.array([1], float)
derivative_matrix = np.array([0], float)
else:
# this is reverse order used by matlab cheb function
pts = -np.cos(np.linspace(0., np.pi, order+1))
scalars = np.ones((order+1), float)
scalars[0] = 2.
scalars[order] = 2.
scalars[1:order+1:2] *= -1
derivative_matrix = np.empty((order+1, order+1), float)
for ii in range(order+1):
row_sum = 0.
for jj in range(order+1):
if (ii == jj):
denominator = 1.
else:
denominator = pts[ii]-pts[jj]
numerator = scalars[ii] / scalars[jj]
derivative_matrix[ii, jj] = numerator / denominator
row_sum += derivative_matrix[ii, jj]
derivative_matrix[ii, ii] -= row_sum
# I return points and calculate derivatives using reverse order of points
# compared to what is used by Matlab cheb function thus the
# derivative matrix I return will be the negative of the matlab version
return pts, derivative_matrix
class SteadyStateDiffusionEquation1D(object):
"""
solve (a(x)*u_x)_x = f; x in [0,1]; subject to u(0)=a; u(1)=b
"""
def __init__(self):
self.diffusivity = None
self.forcing_function = None
self.bndry_cond = [0., 0.]
self.xlim = [0, 1]
self.adjoint_derivative_matrix = None
self.adjoint_mesh_pts = None
self.num_time_steps = 0
self.time_step_size = None
self.initial_sol = None
self.num_stored_timesteps = 1
self.time_step_method = 'crank-nicholson'
# default qoi functional is integral of solution over entire domain
self.qoi_functional = self.integrate
self.qoi_functional_deriv = lambda x: x*0.+1.
def scale_canonical_pts(self, pts):
return (self.xlim[1]-self.xlim[0])*(pts+1.)/2.+self.xlim[0]
def initialize(self, order, bndry_cond=None, xlim=None):
self.order = order
if xlim is not None:
self.xlim = xlim
if bndry_cond is not None:
self.bndry_cond = bndry_cond
mesh_pts, self.derivative_matrix = chebyshev_derivative_matrix(order)
# scale mesh points to from [-1,1] to [a,b]
self.mesh_pts_1d = self.scale_canonical_pts(mesh_pts)
self.mesh_pts = self.mesh_pts_1d
# scale derivative matrix from [-1,1] to [a,b]
self.derivative_matrix *= 2./(self.xlim[1]-self.xlim[0])
def set_diffusivity(self, func):
assert callable(func)
assert len(inspect.getargspec(func)[0]) == 2
self.diffusivity = func
def set_forcing(self, func):
assert callable(func)
assert len(inspect.getargspec(func)[0]) == 2
self.forcing_function = func
def form_collocation_matrix(self, derivative_matrix, diagonal):
scaled_matrix = np.empty(derivative_matrix.shape)
for i in range(scaled_matrix.shape[0]):
scaled_matrix[i, :] = derivative_matrix[i, :] * diagonal[i]
matrix = np.dot(derivative_matrix, scaled_matrix)
return matrix
def apply_boundary_conditions_to_matrix(self, matrix):
matrix[0, :] = 0
matrix[-1, :] = 0
matrix[0, 0] = 1
matrix[-1, -1] = 1
return matrix
def apply_boundary_conditions_to_rhs(self, rhs):
rhs[0] = self.bndry_cond[0]
rhs[-1] = self.bndry_cond[1]
return rhs
def apply_boundary_conditions(self, matrix, forcing):
assert len(self.bndry_cond) == 2
matrix = self.apply_boundary_conditions_to_matrix(matrix)
forcing = self.apply_boundary_conditions_to_rhs(forcing)
return matrix, forcing
def explicit_runge_kutta(self, rhs, sol, time, time_step_size):
assert callable(rhs)
dt2 = time_step_size/2.
k1 = rhs(time, sol)
k2 = rhs(time+dt2, sol+dt2*k1)
k3 = rhs(time+dt2, sol+dt2*k2)
k4 = rhs(time+time_step_size, sol+time_step_size*k3)
new_sol = sol+time_step_size/6.*(k1+2.*k2+2.*k3+k4)
new_sol[0] = self.bndry_cond[0]
new_sol[-1] = self.bndry_cond[1]
return new_sol
def form_adams_moulton_3rd_order_system(self, matrix, current_sol,
current_forcing, future_forcing,
prev_forcing, prev_sol,
time_step_size):
""" 3rd order Adams-Moultobn method
WARNING: seems to be unstable (at least my implementation)
y_{n+2} = y_{n+1}+h(c_0y_{n+2}+c_1y_{n+1}+c_3y_{n})
c = (5/12,2/3,-1./12)
"""
dt12 = time_step_size/12.
dt12matrix = dt12*matrix
identity = np.eye(matrix.shape[0])
matrix = identity-5.*dt12matrix
forcing = np.dot(identity+8.*dt12matrix, current_sol)
forcing += dt12*(5.*future_forcing+8.*current_forcing-prev_forcing)
forcing -= np.dot(dt12matrix, prev_sol)
# currently I do not support time varying boundary conditions
return self.apply_boundary_conditions(matrix, forcing)
def get_implicit_time_step_rhs(self, current_sol, time, sample):
future_forcing = self.forcing_function(
self.mesh_pts, time+self.time_step_size, sample)
if (self.time_step_method == "backward-euler"):
forcing = current_sol + self.time_step_size*future_forcing
elif (self.time_step_method == "crank-nicholson"):
identity = np.eye(self.collocation_matrix.shape[0])
forcing = np.dot(
identity+0.5*self.time_step_size*self.collocation_matrix, current_sol)
current_forcing = self.forcing_function(
self.mesh_pts, time, sample)
forcing += 0.5*self.time_step_size*(current_forcing+future_forcing)
else:
raise Exception('incorrect timestepping method specified')
# apply boundary conditions
forcing[0] = self.bndry_cond[0]
forcing[-1] = self.bndry_cond[1]
return forcing
def get_implicit_timestep_matrix_inverse_factors(self, matrix):
identity = np.eye(matrix.shape[0])
if (self.time_step_method == "backward-euler"):
matrix = identity-self.time_step_size*matrix
elif (self.time_step_method == "crank-nicholson"):
matrix = identity-self.time_step_size/2.*matrix
else:
raise Exception('incorrect timestepping method specified')
self.apply_boundary_conditions_to_matrix(matrix)
return qr_factorization(matrix)
def time_step(self, current_sol, time, sample):
if self.time_step_method == 'RK4':
def rhs_func(t, u): return np.dot(
self.collocation_matrix, u) +\
self.forcing_function(self.mesh_pts, t, sample)
current_sol = self.explicit_runge_kutta(
rhs_func, current_sol, time, self.time_step_size)
else:
rhs = self.get_implicit_time_step_rhs(current_sol, time, sample)
current_sol = qr_solve(
self.implicit_matrix_factors[0], self.implicit_matrix_factors[1],
rhs[:, None])[:, 0]
#current_sol = np.linalg.solve( matrix, rhs )
return current_sol
def transient_solve(self, sample):
# in future consider supporting time varying diffusivity. This would
# require updating collocation matrix at each time-step
# for now make diffusivity time-independent
# assert self.diffusivity_function.__code__.co_argcount == 3
diffusivity = self.diffusivity_function(self.mesh_pts, sample)
self.collocation_matrix = self.form_collocation_matrix(
self.derivative_matrix, diffusivity)
# consider replacing time = 0 with time = self.initial_time
time = 0.
assert self.forcing_function.__code__.co_argcount == 3
current_forcing = self.forcing_function(self.mesh_pts, time, sample)
if self.num_time_steps > 0:
assert self.initial_sol is not None
assert self.time_step_size is not None
current_sol = self.initial_sol.copy()
assert self.num_stored_timesteps <= self.num_time_steps
# num_time_steps is number of steps taken after initial time
self.times = np.empty((self.num_stored_timesteps), float)
sols = np.empty((self.initial_sol.shape[0],
self.num_stored_timesteps), float)
sol_cntr = 0
sol_storage_stride = self.num_time_steps/self.num_stored_timesteps
if self.time_step_method != 'RK4':
self.implicit_matrix_factors = \
self.get_implicit_timestep_matrix_inverse_factors(
self.collocation_matrix)
for i in range(1, self.num_time_steps+1):
# Construct linear system
current_sol = self.time_step(current_sol, time, sample)
time += self.time_step_size
# Store history if requested
if i % sol_storage_stride == 0:
sols[:, sol_cntr] = current_sol
self.times[sol_cntr] = time
sol_cntr += 1
assert sol_cntr == self.num_stored_timesteps
return sols
else:
current_forcing = self.forcing_function(
self.mesh_pts, time, sample)
matrix, rhs = self.apply_boundary_conditions(
self.collocation_matrix.copy(), current_forcing)
return np.linalg.solve(matrix, rhs)
def solve(self, diffusivity, forcing):
assert diffusivity.ndim == 1
assert forcing.ndim == 1
# forcing will be overwritten with bounary values so must take a
# deep copy
forcing = forcing.copy()
# we need another copy so that forcing can be used when solving adjoint
self.forcing_vals = forcing.copy()
assert not np.any(diffusivity <= 0.)
self.collocation_matrix = self.form_collocation_matrix(
self.derivative_matrix, diffusivity)
matrix, forcing = self.apply_boundary_conditions(
self.collocation_matrix.copy(), forcing)
solution = np.linalg.solve(matrix, forcing)
# store solution for use with adjoints
self.fwd_solution = solution.copy()
return solution
def run(self, sample):
assert sample.ndim == 1
diffusivity = self.diffusivity_function(self.mesh_pts, sample)
forcing = self.forcing_function(self.mesh_pts, sample)
solution = self.solve(diffusivity, forcing)
return solution
def solve_adjoint(self, sample, order):
"""
Typically with FEM we solve Ax=b and the discrete adjoint equation
is A'y=z. But with collocation this does not work. Instead of
taking the adjoint of the discrete system as the aforemntioned
approach does. We discretize continuous adjoint equation. Which for
the ellipic diffusion equation is just Ay=z. That is the adjoint
of A is A.
"""
if order == self.order:
# used when computing gradient from adjoint solution
matrix = self.collocation_matrix.copy()
else:
# used when computing error estimate from adjoint solution
if self.adjoint_derivative_matrix is None:
adjoint_mesh_pts, self.adjoint_derivative_matrix = \
chebyshev_derivative_matrix(order)
self.adjoint_mesh_pts = self.scale_canonical_pts(
adjoint_mesh_pts)
# scale derivative matrix from [-1,1] to [a,b]
self.adjoint_derivative_matrix *= 2. / \
(self.xlim[1]-self.xlim[0])
diffusivity = self.diffusivity_function(
self.adjoint_mesh_pts, sample)
matrix = self.form_collocation_matrix(
self.adjoint_derivative_matrix, diffusivity)
self.adjoint_collocation_matrix = matrix.copy()
# regardless of whether computing error estimate or
# computing gradient, rhs is always derivative (with respect to the
# solution) of the qoi_functional
qoi_deriv = self.qoi_functional_deriv(self.fwd_solution)
matrix = self.apply_boundary_conditions_to_matrix(matrix)
qoi_deriv = self.apply_adjoint_boundary_conditions_to_rhs(qoi_deriv)
adj_solution = np.linalg.solve(matrix, qoi_deriv)
return adj_solution
def apply_adjoint_boundary_conditions_to_rhs(self, qoi_deriv):
# adjoint always has zero Dirichlet BC
qoi_deriv[0] = 0
qoi_deriv[-1] = 0
return qoi_deriv
def compute_residual(self, matrix, solution, forcing):
matrix, forcing = self.apply_boundary_conditions(matrix, forcing)
return forcing - np.dot(matrix, solution)
def compute_residual_derivative(self, solution, diagonal,
forcing_deriv):
matrix = self.form_collocation_matrix(self.derivative_matrix,
diagonal)
# Todo: check if boundary conditions need to be applied to both
# matrix and forcing_derivs or just matrix. If the former
# what boundary conditions to I impose on the focing deriv
matrix = self.apply_boundary_conditions_to_matrix(
matrix)
# the values here are the derivative of the boundary conditions
# with respect to the random parameters. I assume that
# this is always zero
forcing_deriv[0] = 0
forcing_deriv[-1] = 0
return forcing_deriv.squeeze() - np.dot(matrix, solution)
def compute_error_estimate(self, sample):
raise NotImplementedError("Not passing tests")
# must solve adjoint with a higher order grid
adj_solution = self.solve_adjoint(sample, self.order*2)
# interpolate forward solution onto higher-order grid
interp_fwd_solution = self.interpolate(
self.fwd_solution, self.adjoint_mesh_pts)
# compute residual of forward solution using higher-order grid
forcing_vals = self.forcing_function(self.adjoint_mesh_pts,
sample)
# compute residual
residual = self.compute_residual(self.adjoint_collocation_matrix,
interp_fwd_solution, forcing_vals)
# self.plot(interp_fwd_solution+adj_solution,
# plot_mesh_coords=self.adjoint_mesh_pts )
# self.plot(residual, plot_mesh_coords=self.adjoint_mesh_pts,
# color='r')
# pylab.show()
# print self.integrate((adj_solution+interp_fwd_solution )**2)
# print(np.dot(residual, adj_solution )/self.integrate(
# residual * adj_solution)
print('cond', np.linalg.cond(self.adjoint_collocation_matrix))
error_estimate = self.integrate(residual * adj_solution, self.order*2)
return error_estimate
def evaluate_gradient(self, sample):
assert sample.ndim == 1
num_stoch_dims = sample.shape[0]
# qoi_deriv = self.qoi_functional_deriv(self.mesh_pts)
adj_solution = self.solve_adjoint(sample, self.order)
gradient = np.empty((num_stoch_dims), float)
for i in range(num_stoch_dims):
diffusivity_deriv_vals_i = self.diffusivity_derivs_function(
self.mesh_pts.squeeze(), sample, i)
forcing_deriv_vals_i = self.forcing_derivs_function(
self.mesh_pts.squeeze(), sample, i)
residual_deriv = self.compute_residual_derivative(
self.fwd_solution, diffusivity_deriv_vals_i,
forcing_deriv_vals_i)
gradient[i] = self.integrate(residual_deriv * adj_solution)
return gradient
def value(self, sample):
assert sample.ndim == 1
solution = self.run(sample)
qoi = self.qoi_functional(solution)
if np.isscalar(qoi) or qoi.ndim == 0:
qoi = np.array([qoi])
return qoi
def integrate(self, mesh_values, order=None):
if order is None:
order = self.order
# Get Gauss-Legendre rule
gl_pts, gl_wts = gauss_jacobi_pts_wts_1D(order, 0, 0)
# Scale points from [-1,1] to to physical domain
x_range = self.xlim[1]-self.xlim[0]
gl_pts = x_range*(gl_pts+1.)/2.+self.xlim[0]
# Remove factor of 0.5 from weights
gl_wts *= x_range
# Interpolate mesh values onto quadrature nodes
gl_vals = self.interpolate(mesh_values, gl_pts)
# Compute and return integral
return np.dot(gl_vals[:, 0], gl_wts)
def interpolate(self, mesh_values, eval_samples):
if eval_samples.ndim == 1:
eval_samples = eval_samples[None, :]
if mesh_values.ndim == 1:
mesh_values = mesh_values[:, None]
assert mesh_values.ndim == 2
num_dims = eval_samples.shape[0]
abscissa_1d = [self.mesh_pts_1d]*num_dims
weights_1d = [compute_barycentric_weights_1d(xx) for xx in abscissa_1d]
interp_vals = multivariate_barycentric_lagrange_interpolation(
eval_samples,
abscissa_1d,
weights_1d,
mesh_values,
np.arange(num_dims))
return interp_vals
def plot(self, mesh_values, num_plot_pts_1d=None, plot_mesh_coords=None,
color='k'):
import pylab
if num_plot_pts_1d is not None:
# interpolate values onto plot points
plot_mesh = np.linspace(
self.xlim[0], self.xlim[1], num_plot_pts_1d)
interp_vals = self.interpolate(mesh_values, plot_mesh)
pylab.plot(plot_mesh, interp_vals, color+'-')
elif plot_mesh_coords is not None:
assert mesh_values.shape[0] == plot_mesh_coords.squeeze().shape[0]
pylab.plot(plot_mesh_coords, mesh_values, 'o-'+color)
else:
# just plot values on mesh points
pylab.plot(self.mesh_pts, mesh_values, color)
def get_collocation_points(self):
return np.atleast_2d(self.mesh_pts)
def get_derivative_matrix(self):
return self.derivative_matrix
def __call__(self, samples):
return evaluate_1darray_function_on_2d_array(
self.value, samples, None)
class SteadyStateDiffusionEquation2D(SteadyStateDiffusionEquation1D):
"""
solve (a(x)*u_x)_x = f; x in [0,1]x[0,1];
subject to u(0,:)=a(x); u(:,0)=b(x), u(1,:)=c(x), u(:,1)=d(x)
"""
def __init__(self):
self.diffusivity = None
self.forcing_function = None
self.bndry_cond = [0., 0., 0., 0.]
self.xlim = [0, 1]
self.ylim = [0, 1]
self.left_bc, self.right_bc = None, None
self.top_bc, self.bottom_bc = None, None
# default qoi functional is integral of solution over entire domain
self.qoi_functional = self.integrate
self.qoi_functional_deriv = lambda x: x*0.+1.
def determine_boundary_indices(self):
# boundary edges are stored with the following order,
# left, right, bottom, top
self.boundary_edges = [[], [], [], []]
self.boundary_indices = np.empty((4*self.order), int)
# To avoid double counting the bottom and upper boundaries
# will not include the edge indices
cntr = 0
for i in range(self.mesh_pts.shape[1]):
if (self.mesh_pts[0, i] == self.xlim[0]):
self.boundary_indices[cntr] = i
self.boundary_edges[0].append(cntr)
cntr += 1
elif (self.mesh_pts[0, i] == self.xlim[1]):
self.boundary_indices[cntr] = i
self.boundary_edges[1].append(cntr)
cntr += 1
elif (self.mesh_pts[1, i] == self.ylim[0]):
self.boundary_indices[cntr] = i
self.boundary_edges[2].append(cntr)
cntr += 1
elif (self.mesh_pts[1, i] == self.ylim[1]):
self.boundary_indices[cntr] = i
self.boundary_edges[3].append(cntr)
cntr += 1
def initialize(self, order, bndry_cond=None, lims=None):
# 1d model transforms mesh pts 1d from are on [-1,1] to [a,b]
# I will asssume that second physical dimension is also [a,b]
super(SteadyStateDiffusionEquation2D, self).initialize(order,
bndry_cond[:2],
lims[:2])
self.ylim = lims[2:]
self.bndry_cond = bndry_cond
self.order = order
self.mesh_pts_1d = self.mesh_pts
self.mesh_pts = cartesian_product([self.mesh_pts_1d]*2, 1)
# note scaling of self.derivative_matrix to [a,b] happens at base class
self.determine_boundary_indices()
# form derivative (in x1-direction) matrix of a 2d polynomial
# this assumes that 2d-mesh_pts varies in x1 faster than x2,
# e.g. points are
# [[x11,x21],[x12,x21],[x13,x12],[x11,x22],[x12,x22],...]
Ident = np.eye(self.order+1)
derivative_matrix_1d = self.get_derivative_matrix()
self.derivative_matrix_1 = np.kron(Ident, derivative_matrix_1d)
# form derivative (in x2-direction) matrix of a 2d polynomial
self.derivative_matrix_2 = np.kron(derivative_matrix_1d, Ident)
def form_collocation_matrix(self, derivative_matrix, diagonal):
scaled_matrix_1 = np.empty(self.derivative_matrix_1.shape)
scaled_matrix_2 = np.empty(self.derivative_matrix_2.shape)
for i in range(scaled_matrix_1.shape[0]):
scaled_matrix_1[i, :] = self.derivative_matrix_1[i, :]*diagonal[i]
scaled_matrix_2[i, :] = self.derivative_matrix_2[i, :]*diagonal[i]
matrix_1 = np.dot(self.derivative_matrix_1, scaled_matrix_1)
matrix_2 = np.dot(self.derivative_matrix_2, scaled_matrix_2)
return matrix_1 + matrix_2
def apply_boundary_conditions_to_matrix(self, matrix):
# apply default homogeenous zero value direchlet conditions if
# necessary
if self.left_bc is None:
self.left_bc = lambda x: 0.
if self.right_bc is None:
self.right_bc = lambda x: 0.
if self.bottom_bc is None:
self.bottom_bc = lambda x: 0.
if self.top_bc is None:
self.top_bc = lambda x: 0.
# adjust collocation matrix
matrix[self.boundary_indices, :] = 0.
for i in range(self.boundary_indices.shape[0]):
index = self.boundary_indices[i]
matrix[index, index] = 1.
return matrix
def apply_boundary_conditions_to_rhs(self, forcing):
# apply left boundary condition
indices = self.boundary_indices[self.boundary_edges[0]]
forcing[indices] = self.left_bc(self.mesh_pts[0, indices])
# apply right boundary condition
indices = self.boundary_indices[self.boundary_edges[1]]
forcing[indices] = self.right_bc(self.mesh_pts[0, indices])
# apply bottom boundary condition
indices = self.boundary_indices[self.boundary_edges[2]]
forcing[indices] = self.bottom_bc(self.mesh_pts[1, indices])
# apply top boundary condition
indices = self.boundary_indices[self.boundary_edges[3]]
forcing[indices] = self.top_bc(self.mesh_pts[1, indices])
return forcing
def plot(self, mesh_values, num_plot_pts_1d=100):
if num_plot_pts_1d is not None:
# interpolate values onto plot points
def func(x): return self.interpolate(mesh_values, x)
from utilities.visualisation import plot_surface_from_function
plot_surface_from_function(func, [self.xlim[0], self.xlim[1],
self.ylim[0], self.ylim[1]],
num_plot_pts_1d, False)
def apply_adjoint_boundary_conditions_to_rhs(self, qoi_deriv):
# adjoint always has zero Dirichlet BC
# apply left boundary condition
for ii in range(4):
indices = self.boundary_indices[self.boundary_edges[ii]]
qoi_deriv[indices] = 0
return qoi_deriv
def integrate(self, mesh_values, order=None):
if order is None:
order = self.order
# Get Gauss-Legendre rule
gl_pts, gl_wts = gauss_jacobi_pts_wts_1D(order, 0, 0)
pts_1d, wts_1d = [], []
lims = self.xlim+self.ylim
for ii in range(2):
# Scale points from [-1,1] to to physical domain
x_range = lims[2*ii+1]-lims[2*ii]
# Remove factor of 0.5 from weights and shift to [a,b]
wts_1d.append(gl_wts*x_range)
pts_1d.append(x_range*(gl_pts+1.)/2.+lims[2*ii])
# Interpolate mesh values onto quadrature nodes
pts = cartesian_product(pts_1d)
wts = outer_product(wts_1d)
gl_vals = self.interpolate(mesh_values, pts)
# Compute and return integral
return np.dot(gl_vals[:, 0], wts)
|
[
"numpy.empty",
"pyapprox.utilities.qr_solve",
"numpy.ones",
"numpy.linalg.cond",
"pyapprox.barycentric_interpolation.compute_barycentric_weights_1d",
"numpy.arange",
"pyapprox.utilities.cartesian_product",
"numpy.linalg.solve",
"numpy.atleast_2d",
"pylab.plot",
"numpy.kron",
"numpy.linspace",
"utilities.visualisation.plot_surface_from_function",
"scipy.linalg.qr",
"numpy.dot",
"pyapprox.univariate_polynomials.quadrature.gauss_jacobi_pts_wts_1D",
"pyapprox.models.wrappers.evaluate_1darray_function_on_2d_array",
"numpy.isscalar",
"numpy.any",
"numpy.array",
"inspect.getargspec",
"numpy.eye",
"pyapprox.utilities.outer_product"
] |
[((807, 858), 'numpy.empty', 'np.empty', (['(matrix_num_rows, matrix_num_rows)', 'float'], {}), '((matrix_num_rows, matrix_num_rows), float)\n', (815, 858), True, 'import numpy as np\n'), ((1327, 1347), 'numpy.array', 'np.array', (['[1]', 'float'], {}), '([1], float)\n', (1335, 1347), True, 'import numpy as np\n'), ((1376, 1396), 'numpy.array', 'np.array', (['[0]', 'float'], {}), '([0], float)\n', (1384, 1396), True, 'import numpy as np\n'), ((1541, 1566), 'numpy.ones', 'np.ones', (['(order + 1)', 'float'], {}), '(order + 1, float)\n', (1548, 1566), True, 'import numpy as np\n'), ((1682, 1721), 'numpy.empty', 'np.empty', (['(order + 1, order + 1)', 'float'], {}), '((order + 1, order + 1), float)\n', (1690, 1721), True, 'import numpy as np\n'), ((4251, 4284), 'numpy.empty', 'np.empty', (['derivative_matrix.shape'], {}), '(derivative_matrix.shape)\n', (4259, 4284), True, 'import numpy as np\n'), ((4422, 4462), 'numpy.dot', 'np.dot', (['derivative_matrix', 'scaled_matrix'], {}), '(derivative_matrix, scaled_matrix)\n', (4428, 4462), True, 'import numpy as np\n'), ((6117, 6140), 'numpy.eye', 'np.eye', (['matrix.shape[0]'], {}), '(matrix.shape[0])\n', (6123, 6140), True, 'import numpy as np\n'), ((6199, 6247), 'numpy.dot', 'np.dot', (['(identity + 8.0 * dt12matrix)', 'current_sol'], {}), '(identity + 8.0 * dt12matrix, current_sol)\n', (6205, 6247), True, 'import numpy as np\n'), ((6338, 6366), 'numpy.dot', 'np.dot', (['dt12matrix', 'prev_sol'], {}), '(dt12matrix, prev_sol)\n', (6344, 6366), True, 'import numpy as np\n'), ((7538, 7561), 'numpy.eye', 'np.eye', (['matrix.shape[0]'], {}), '(matrix.shape[0])\n', (7544, 7561), True, 'import numpy as np\n'), ((7951, 7975), 'scipy.linalg.qr', 'qr_factorization', (['matrix'], {}), '(matrix)\n', (7967, 7975), True, 'from scipy.linalg import qr as qr_factorization\n'), ((11685, 11717), 'numpy.linalg.solve', 'np.linalg.solve', (['matrix', 'forcing'], {}), '(matrix, forcing)\n', (11700, 11717), True, 'import numpy as np\n'), ((13926, 13960), 'numpy.linalg.solve', 'np.linalg.solve', (['matrix', 'qoi_deriv'], {}), '(matrix, qoi_deriv)\n', (13941, 13960), True, 'import numpy as np\n'), ((16789, 16820), 'numpy.empty', 'np.empty', (['num_stoch_dims', 'float'], {}), '(num_stoch_dims, float)\n', (16797, 16820), True, 'import numpy as np\n'), ((17771, 17807), 'pyapprox.univariate_polynomials.quadrature.gauss_jacobi_pts_wts_1D', 'gauss_jacobi_pts_wts_1D', (['order', '(0)', '(0)'], {}), '(order, 0, 0)\n', (17794, 17807), False, 'from pyapprox.univariate_polynomials.quadrature import gauss_jacobi_pts_wts_1D\n'), ((18197, 18226), 'numpy.dot', 'np.dot', (['gl_vals[:, 0]', 'gl_wts'], {}), '(gl_vals[:, 0], gl_wts)\n', (18203, 18226), True, 'import numpy as np\n'), ((19684, 19712), 'numpy.atleast_2d', 'np.atleast_2d', (['self.mesh_pts'], {}), '(self.mesh_pts)\n', (19697, 19712), True, 'import numpy as np\n'), ((19838, 19902), 'pyapprox.models.wrappers.evaluate_1darray_function_on_2d_array', 'evaluate_1darray_function_on_2d_array', (['self.value', 'samples', 'None'], {}), '(self.value, samples, None)\n', (19875, 19902), False, 'from pyapprox.models.wrappers import evaluate_1darray_function_on_2d_array\n'), ((20802, 20831), 'numpy.empty', 'np.empty', (['(4 * self.order)', 'int'], {}), '(4 * self.order, int)\n', (20810, 20831), True, 'import numpy as np\n'), ((22316, 22360), 'pyapprox.utilities.cartesian_product', 'cartesian_product', (['([self.mesh_pts_1d] * 2)', '(1)'], {}), '([self.mesh_pts_1d] * 2, 1)\n', (22333, 22360), False, 'from pyapprox.utilities import cartesian_product, outer_product\n'), ((22729, 22751), 'numpy.eye', 'np.eye', (['(self.order + 1)'], {}), '(self.order + 1)\n', (22735, 22751), True, 'import numpy as np\n'), ((22845, 22881), 'numpy.kron', 'np.kron', (['Ident', 'derivative_matrix_1d'], {}), '(Ident, derivative_matrix_1d)\n', (22852, 22881), True, 'import numpy as np\n'), ((22987, 23023), 'numpy.kron', 'np.kron', (['derivative_matrix_1d', 'Ident'], {}), '(derivative_matrix_1d, Ident)\n', (22994, 23023), True, 'import numpy as np\n'), ((23119, 23159), 'numpy.empty', 'np.empty', (['self.derivative_matrix_1.shape'], {}), '(self.derivative_matrix_1.shape)\n', (23127, 23159), True, 'import numpy as np\n'), ((23186, 23226), 'numpy.empty', 'np.empty', (['self.derivative_matrix_2.shape'], {}), '(self.derivative_matrix_2.shape)\n', (23194, 23226), True, 'import numpy as np\n'), ((23454, 23503), 'numpy.dot', 'np.dot', (['self.derivative_matrix_1', 'scaled_matrix_1'], {}), '(self.derivative_matrix_1, scaled_matrix_1)\n', (23460, 23503), True, 'import numpy as np\n'), ((23523, 23572), 'numpy.dot', 'np.dot', (['self.derivative_matrix_2', 'scaled_matrix_2'], {}), '(self.derivative_matrix_2, scaled_matrix_2)\n', (23529, 23572), True, 'import numpy as np\n'), ((26044, 26080), 'pyapprox.univariate_polynomials.quadrature.gauss_jacobi_pts_wts_1D', 'gauss_jacobi_pts_wts_1D', (['order', '(0)', '(0)'], {}), '(order, 0, 0)\n', (26067, 26080), False, 'from pyapprox.univariate_polynomials.quadrature import gauss_jacobi_pts_wts_1D\n'), ((26523, 26548), 'pyapprox.utilities.cartesian_product', 'cartesian_product', (['pts_1d'], {}), '(pts_1d)\n', (26540, 26548), False, 'from pyapprox.utilities import cartesian_product, outer_product\n'), ((26563, 26584), 'pyapprox.utilities.outer_product', 'outer_product', (['wts_1d'], {}), '(wts_1d)\n', (26576, 26584), False, 'from pyapprox.utilities import cartesian_product, outer_product\n'), ((26691, 26717), 'numpy.dot', 'np.dot', (['gl_vals[:, 0]', 'wts'], {}), '(gl_vals[:, 0], wts)\n', (26697, 26717), True, 'import numpy as np\n'), ((9745, 9787), 'numpy.empty', 'np.empty', (['self.num_stored_timesteps', 'float'], {}), '(self.num_stored_timesteps, float)\n', (9753, 9787), True, 'import numpy as np\n'), ((9809, 9880), 'numpy.empty', 'np.empty', (['(self.initial_sol.shape[0], self.num_stored_timesteps)', 'float'], {}), '((self.initial_sol.shape[0], self.num_stored_timesteps), float)\n', (9817, 9880), True, 'import numpy as np\n'), ((11005, 11033), 'numpy.linalg.solve', 'np.linalg.solve', (['matrix', 'rhs'], {}), '(matrix, rhs)\n', (11020, 11033), True, 'import numpy as np\n'), ((11416, 11442), 'numpy.any', 'np.any', (['(diffusivity <= 0.0)'], {}), '(diffusivity <= 0.0)\n', (11422, 11442), True, 'import numpy as np\n'), ((14339, 14363), 'numpy.dot', 'np.dot', (['matrix', 'solution'], {}), '(matrix, solution)\n', (14345, 14363), True, 'import numpy as np\n'), ((15154, 15178), 'numpy.dot', 'np.dot', (['matrix', 'solution'], {}), '(matrix, solution)\n', (15160, 15178), True, 'import numpy as np\n'), ((16371, 16418), 'numpy.linalg.cond', 'np.linalg.cond', (['self.adjoint_collocation_matrix'], {}), '(self.adjoint_collocation_matrix)\n', (16385, 16418), True, 'import numpy as np\n'), ((17516, 17532), 'numpy.isscalar', 'np.isscalar', (['qoi'], {}), '(qoi)\n', (17527, 17532), True, 'import numpy as np\n'), ((17569, 17584), 'numpy.array', 'np.array', (['[qoi]'], {}), '([qoi])\n', (17577, 17584), True, 'import numpy as np\n'), ((18597, 18631), 'pyapprox.barycentric_interpolation.compute_barycentric_weights_1d', 'compute_barycentric_weights_1d', (['xx'], {}), '(xx)\n', (18627, 18631), False, 'from pyapprox.barycentric_interpolation import compute_barycentric_weights_1d, multivariate_barycentric_lagrange_interpolation\n'), ((18838, 18857), 'numpy.arange', 'np.arange', (['num_dims'], {}), '(num_dims)\n', (18847, 18857), True, 'import numpy as np\n'), ((19124, 19180), 'numpy.linspace', 'np.linspace', (['self.xlim[0]', 'self.xlim[1]', 'num_plot_pts_1d'], {}), '(self.xlim[0], self.xlim[1], num_plot_pts_1d)\n', (19135, 19180), True, 'import numpy as np\n'), ((19277, 19324), 'pylab.plot', 'pylab.plot', (['plot_mesh', 'interp_vals', "(color + '-')"], {}), "(plot_mesh, interp_vals, color + '-')\n", (19287, 19324), False, 'import pylab\n'), ((25365, 25483), 'utilities.visualisation.plot_surface_from_function', 'plot_surface_from_function', (['func', '[self.xlim[0], self.xlim[1], self.ylim[0], self.ylim[1]]', 'num_plot_pts_1d', '(False)'], {}), '(func, [self.xlim[0], self.xlim[1], self.ylim[0],\n self.ylim[1]], num_plot_pts_1d, False)\n', (25391, 25483), False, 'from utilities.visualisation import plot_surface_from_function\n'), ((1490, 1524), 'numpy.linspace', 'np.linspace', (['(0.0)', 'np.pi', '(order + 1)'], {}), '(0.0, np.pi, order + 1)\n', (1501, 1524), True, 'import numpy as np\n'), ((6888, 6928), 'numpy.eye', 'np.eye', (['self.collocation_matrix.shape[0]'], {}), '(self.collocation_matrix.shape[0])\n', (6894, 6928), True, 'import numpy as np\n'), ((6951, 7038), 'numpy.dot', 'np.dot', (['(identity + 0.5 * self.time_step_size * self.collocation_matrix)', 'current_sol'], {}), '(identity + 0.5 * self.time_step_size * self.collocation_matrix,\n current_sol)\n', (6957, 7038), True, 'import numpy as np\n'), ((8466, 8558), 'pyapprox.utilities.qr_solve', 'qr_solve', (['self.implicit_matrix_factors[0]', 'self.implicit_matrix_factors[1]', 'rhs[:, None]'], {}), '(self.implicit_matrix_factors[0], self.implicit_matrix_factors[1],\n rhs[:, None])\n', (8474, 8558), False, 'from pyapprox.utilities import qr_solve\n'), ((19458, 19513), 'pylab.plot', 'pylab.plot', (['plot_mesh_coords', 'mesh_values', "('o-' + color)"], {}), "(plot_mesh_coords, mesh_values, 'o-' + color)\n", (19468, 19513), False, 'import pylab\n'), ((19584, 19629), 'pylab.plot', 'pylab.plot', (['self.mesh_pts', 'mesh_values', 'color'], {}), '(self.mesh_pts, mesh_values, color)\n', (19594, 19629), False, 'import pylab\n'), ((3938, 3962), 'inspect.getargspec', 'inspect.getargspec', (['func'], {}), '(func)\n', (3956, 3962), False, 'import inspect\n'), ((4087, 4111), 'inspect.getargspec', 'inspect.getargspec', (['func'], {}), '(func)\n', (4105, 4111), False, 'import inspect\n'), ((8111, 8145), 'numpy.dot', 'np.dot', (['self.collocation_matrix', 'u'], {}), '(self.collocation_matrix, u)\n', (8117, 8145), True, 'import numpy as np\n')]
|
"""
Codec for serializing and deserializing blob fields.
See `Blob Fields <https://xrpl.org/serialization.html#blob-fields>`_
"""
from __future__ import annotations
from typing import Type
from xrpl.core.binarycodec.binary_wrappers.binary_parser import BinaryParser
from xrpl.core.binarycodec.exceptions import XRPLBinaryCodecException
from xrpl.core.binarycodec.types.serialized_type import SerializedType
class Blob(SerializedType):
"""
Codec for serializing and deserializing blob fields.
See `Blob Fields <https://xrpl.org/serialization.html#blob-fields>`_
"""
def __init__(self: Blob, buffer: bytes) -> None:
"""Construct a new Blob type from a ``bytes`` value."""
super().__init__(buffer)
@classmethod
def from_parser(cls: Type[Blob], parser: BinaryParser, length_hint: int) -> Blob:
"""
Defines how to read a Blob from a BinaryParser.
Args:
parser: The parser to construct a Blob from.
length_hint: The number of bytes to consume from the parser.
Returns:
The Blob constructed from parser.
"""
return cls(parser.read(length_hint))
@classmethod
def from_value(cls: Type[Blob], value: str) -> Blob:
"""
Create a Blob object from a hex-string.
Args:
value: The hex-encoded string to construct a Blob from.
Returns:
The Blob constructed from value.
Raises:
XRPLBinaryCodecException: If the Blob can't be constructed from value.
"""
if not isinstance(value, str):
raise XRPLBinaryCodecException(
"Invalid type to construct a Blob: expected str, received "
f"{value.__class__.__name__}."
)
if isinstance(value, str):
return cls(bytes.fromhex(value))
raise XRPLBinaryCodecException("Cannot construct Blob from value given")
|
[
"xrpl.core.binarycodec.exceptions.XRPLBinaryCodecException"
] |
[((1883, 1949), 'xrpl.core.binarycodec.exceptions.XRPLBinaryCodecException', 'XRPLBinaryCodecException', (['"""Cannot construct Blob from value given"""'], {}), "('Cannot construct Blob from value given')\n", (1907, 1949), False, 'from xrpl.core.binarycodec.exceptions import XRPLBinaryCodecException\n'), ((1624, 1747), 'xrpl.core.binarycodec.exceptions.XRPLBinaryCodecException', 'XRPLBinaryCodecException', (['f"""Invalid type to construct a Blob: expected str, received {value.__class__.__name__}."""'], {}), "(\n f'Invalid type to construct a Blob: expected str, received {value.__class__.__name__}.'\n )\n", (1648, 1747), False, 'from xrpl.core.binarycodec.exceptions import XRPLBinaryCodecException\n')]
|
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from django.db import models
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
import time
def user_media_path(instance, filename):
"""
Returns the path to where a user uploaded file is saved.
Has the form: user_<id>/YYYY/MMM/filename
"""
return 'user_{0}/{1}/{2}/{3}'.format(instance.author.id,
time.strftime('%Y'),
time.strftime('%b'),
filename)
@python_2_unicode_compatible
class Article(models.Model):
# Editable fields:
title = models.CharField(max_length=100)
content = models.TextField()
image = models.ImageField(upload_to=user_media_path, null=True, blank=True)
# Non-editable fields:
slug = models.SlugField(max_length=50, unique=True)
published_on = models.DateTimeField(auto_now_add=True)
author = models.ForeignKey(User, on_delete=models.CASCADE)
word_count = models.PositiveIntegerField()
# readtimes- slow: 100 wpm, avg: 130 wpm, fast: 160wpm
read_time_in_mins = models.PositiveIntegerField()
# # `word_count` and `read_time_in_mins` will be (re)assigned
# # everytime the article is saved.
# def save(self, *args, **kwargs):
# self.word_count = len(self.content.split())
# self.read_time_in_mins = self.word_count / 130 # assuming avg reading speed.
# return super(Article, self).save(*args, **kwargs)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('blog:read_post', kwargs={'slug': self.slug})
class Meta:
ordering = ['-published_on']
|
[
"django.db.models.TextField",
"django.core.urlresolvers.reverse",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.PositiveIntegerField",
"time.strftime",
"django.db.models.SlugField",
"django.db.models.ImageField",
"django.db.models.DateTimeField"
] |
[((728, 760), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (744, 760), False, 'from django.db import models\n'), ((775, 793), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (791, 793), False, 'from django.db import models\n'), ((806, 873), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': 'user_media_path', 'null': '(True)', 'blank': '(True)'}), '(upload_to=user_media_path, null=True, blank=True)\n', (823, 873), False, 'from django.db import models\n'), ((913, 957), 'django.db.models.SlugField', 'models.SlugField', ([], {'max_length': '(50)', 'unique': '(True)'}), '(max_length=50, unique=True)\n', (929, 957), False, 'from django.db import models\n'), ((977, 1016), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (997, 1016), False, 'from django.db import models\n'), ((1030, 1079), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE'}), '(User, on_delete=models.CASCADE)\n', (1047, 1079), False, 'from django.db import models\n'), ((1097, 1126), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (1124, 1126), False, 'from django.db import models\n'), ((1210, 1239), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (1237, 1239), False, 'from django.db import models\n'), ((501, 520), 'time.strftime', 'time.strftime', (['"""%Y"""'], {}), "('%Y')\n", (514, 520), False, 'import time\n'), ((562, 581), 'time.strftime', 'time.strftime', (['"""%b"""'], {}), "('%b')\n", (575, 581), False, 'import time\n'), ((1686, 1739), 'django.core.urlresolvers.reverse', 'reverse', (['"""blog:read_post"""'], {'kwargs': "{'slug': self.slug}"}), "('blog:read_post', kwargs={'slug': self.slug})\n", (1693, 1739), False, 'from django.core.urlresolvers import reverse\n')]
|
#!/usr/bin/env python3
import gi
# import GStreamer and GLib-Helper classes
gi.require_version('Gtk', '3.0')
gi.require_version('Gst', '1.0')
gi.require_version('GstVideo', '1.0')
gi.require_version('GstNet', '1.0')
from gi.repository import Gtk, Gdk, Gst, GstVideo
import signal
import logging
import sys
import os
sys.path.insert(0, '.')
from vocto.debug import gst_log_messages
# check min-version
minGst = (1, 5)
minPy = (3, 0)
Gst.init([])
if Gst.version() < minGst:
raise Exception('GStreamer version', Gst.version(),
'is too old, at least', minGst, 'is required')
if sys.version_info < minPy:
raise Exception('Python version', sys.version_info,
'is too old, at least', minPy, 'is required')
Gdk.init([])
Gtk.init([])
# select Awaita:Dark theme
settings = Gtk.Settings.get_default()
settings.set_property("gtk-theme-name", "Adwaita")
settings.set_property("gtk-application-prefer-dark-theme", True) # if you want use dark theme, set second arg to True
# main class
class Voctogui(object):
def __init__(self):
self.log = logging.getLogger('Voctogui')
from lib.args import Args
from lib.ui import Ui
# Load UI file
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ui/voctogui.ui')
self.log.info('Loading ui-file from file %s', path)
if os.path.isfile(path):
self.ui = Ui(path)
else:
raise Exception("Can't find any .ui-Files to use in {}".format(path))
#
# search for a .css style sheet file and load it
#
css_provider = Gtk.CssProvider()
context = Gtk.StyleContext()
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ui/voctogui.css')
self.log.info('Loading css-file from file %s', path)
if os.path.isfile(path):
css_provider.load_from_path(path)
else:
raise Exception("Can't find .css file '{}'".format(path))
context.add_provider_for_screen(
Gdk.Screen.get_default(),
css_provider,
Gtk.STYLE_PROVIDER_PRIORITY_USER
)
self.ui.setup()
def run(self):
self.log.info('Setting UI visible')
self.ui.show()
try:
self.log.info('Running.')
Gtk.main()
self.log.info('Connection lost. Exiting.')
except KeyboardInterrupt:
self.log.info('Terminated via Ctrl-C')
def quit(self):
self.log.info('Quitting.')
Gtk.main_quit()
# run mainclass
def main():
# parse command-line args
from lib import args
args.parse()
from lib.args import Args
docolor = (Args.color == 'always') \
or (Args.color == 'auto' and sys.stderr.isatty())
from lib.loghandler import LogHandler
handler = LogHandler(docolor, Args.timestamp)
logging.root.addHandler(handler)
levels = { 3 : logging.DEBUG, 2 : logging.INFO, 1 : logging.WARNING, 0 : logging.ERROR }
logging.root.setLevel(levels[Args.verbose])
gst_levels = { 3 : Gst.DebugLevel.DEBUG, 2 : Gst.DebugLevel.INFO, 1 : Gst.DebugLevel.WARNING, 0 : Gst.DebugLevel.ERROR }
gst_log_messages(gst_levels[Args.gstreamer_log])
# make killable by ctrl-c
logging.debug('setting SIGINT handler')
signal.signal(signal.SIGINT, signal.SIG_DFL)
logging.info('Python Version: %s', sys.version_info)
logging.info('GStreamer Version: %s', Gst.version())
logging.debug('loading Config')
from lib import config
config.load()
from lib.config import Config
# establish a synchronus connection to server
import lib.connection as Connection
Connection.establish(Config.getHost())
# fetch config from server
Config.fetchServerConfig()
# Warn when connecting to a non-local core without preview-encoders enabled
# The list-comparison is not complete
# (one could use a local hostname or the local system ip),
# but it's only here to warn that one might be making a mistake
localhosts = ['::1',
'127.0.0.1',
'localhost']
if not Config.getPreviewsEnabled() and Config.getHost() not in localhosts:
logging.warning(
'Connecting to `%s` (which looks like a remote host) '
'might not work without enabeling the preview encoders '
'(set `[previews] enabled=true` on the core) or it might saturate '
'your ethernet link between the two machines.',
Config.getHost()
)
import lib.connection as Connection
import lib.clock as ClockManager
# obtain network-clock
ClockManager.obtainClock(Connection.ip)
# switch connection to nonblocking, event-driven mode
Connection.enterNonblockingMode()
# init main-class and main-loop
# (this binds all event-hander on the Connection)
logging.debug('initializing Voctogui')
voctogui = Voctogui()
# start the Mainloop and show the Window
logging.debug('running Voctogui')
voctogui.run()
if __name__ == '__main__':
try:
main()
except RuntimeError as e:
logging.error(str(e))
sys.exit(1)
|
[
"gi.repository.Gtk.Settings.get_default",
"gi.repository.Gdk.Screen.get_default",
"lib.connection.enterNonblockingMode",
"os.path.isfile",
"gi.repository.Gdk.init",
"gi.repository.Gtk.main_quit",
"lib.config.Config.getHost",
"gi.repository.Gtk.init",
"lib.args.parse",
"sys.stderr.isatty",
"lib.config.Config.fetchServerConfig",
"lib.config.load",
"lib.loghandler.LogHandler",
"gi.repository.Gst.version",
"logging.root.setLevel",
"gi.repository.Gtk.main",
"gi.repository.Gtk.StyleContext",
"lib.ui.Ui",
"gi.repository.Gtk.CssProvider",
"os.path.realpath",
"vocto.debug.gst_log_messages",
"gi.repository.Gst.init",
"signal.signal",
"logging.root.addHandler",
"sys.exit",
"gi.require_version",
"lib.config.Config.getPreviewsEnabled",
"logging.debug",
"lib.clock.obtainClock",
"sys.path.insert",
"logging.info",
"logging.getLogger"
] |
[((76, 108), 'gi.require_version', 'gi.require_version', (['"""Gtk"""', '"""3.0"""'], {}), "('Gtk', '3.0')\n", (94, 108), False, 'import gi\n'), ((109, 141), 'gi.require_version', 'gi.require_version', (['"""Gst"""', '"""1.0"""'], {}), "('Gst', '1.0')\n", (127, 141), False, 'import gi\n'), ((142, 179), 'gi.require_version', 'gi.require_version', (['"""GstVideo"""', '"""1.0"""'], {}), "('GstVideo', '1.0')\n", (160, 179), False, 'import gi\n'), ((180, 215), 'gi.require_version', 'gi.require_version', (['"""GstNet"""', '"""1.0"""'], {}), "('GstNet', '1.0')\n", (198, 215), False, 'import gi\n'), ((318, 341), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""."""'], {}), "(0, '.')\n", (333, 341), False, 'import sys\n'), ((436, 448), 'gi.repository.Gst.init', 'Gst.init', (['[]'], {}), '([])\n', (444, 448), False, 'from gi.repository import Gtk, Gdk, Gst, GstVideo\n'), ((752, 764), 'gi.repository.Gdk.init', 'Gdk.init', (['[]'], {}), '([])\n', (760, 764), False, 'from gi.repository import Gtk, Gdk, Gst, GstVideo\n'), ((765, 777), 'gi.repository.Gtk.init', 'Gtk.init', (['[]'], {}), '([])\n', (773, 777), False, 'from gi.repository import Gtk, Gdk, Gst, GstVideo\n'), ((817, 843), 'gi.repository.Gtk.Settings.get_default', 'Gtk.Settings.get_default', ([], {}), '()\n', (841, 843), False, 'from gi.repository import Gtk, Gdk, Gst, GstVideo\n'), ((452, 465), 'gi.repository.Gst.version', 'Gst.version', ([], {}), '()\n', (463, 465), False, 'from gi.repository import Gtk, Gdk, Gst, GstVideo\n'), ((2657, 2669), 'lib.args.parse', 'args.parse', ([], {}), '()\n', (2667, 2669), False, 'from lib import args\n'), ((2857, 2892), 'lib.loghandler.LogHandler', 'LogHandler', (['docolor', 'Args.timestamp'], {}), '(docolor, Args.timestamp)\n', (2867, 2892), False, 'from lib.loghandler import LogHandler\n'), ((2897, 2929), 'logging.root.addHandler', 'logging.root.addHandler', (['handler'], {}), '(handler)\n', (2920, 2929), False, 'import logging\n'), ((3028, 3071), 'logging.root.setLevel', 'logging.root.setLevel', (['levels[Args.verbose]'], {}), '(levels[Args.verbose])\n', (3049, 3071), False, 'import logging\n'), ((3202, 3250), 'vocto.debug.gst_log_messages', 'gst_log_messages', (['gst_levels[Args.gstreamer_log]'], {}), '(gst_levels[Args.gstreamer_log])\n', (3218, 3250), False, 'from vocto.debug import gst_log_messages\n'), ((3286, 3325), 'logging.debug', 'logging.debug', (['"""setting SIGINT handler"""'], {}), "('setting SIGINT handler')\n", (3299, 3325), False, 'import logging\n'), ((3330, 3374), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal.SIG_DFL'], {}), '(signal.SIGINT, signal.SIG_DFL)\n', (3343, 3374), False, 'import signal\n'), ((3380, 3432), 'logging.info', 'logging.info', (['"""Python Version: %s"""', 'sys.version_info'], {}), "('Python Version: %s', sys.version_info)\n", (3392, 3432), False, 'import logging\n'), ((3495, 3526), 'logging.debug', 'logging.debug', (['"""loading Config"""'], {}), "('loading Config')\n", (3508, 3526), False, 'import logging\n'), ((3558, 3571), 'lib.config.load', 'config.load', ([], {}), '()\n', (3569, 3571), False, 'from lib import config\n'), ((3777, 3803), 'lib.config.Config.fetchServerConfig', 'Config.fetchServerConfig', ([], {}), '()\n', (3801, 3803), False, 'from lib.config import Config\n'), ((4674, 4713), 'lib.clock.obtainClock', 'ClockManager.obtainClock', (['Connection.ip'], {}), '(Connection.ip)\n', (4698, 4713), True, 'import lib.clock as ClockManager\n'), ((4777, 4810), 'lib.connection.enterNonblockingMode', 'Connection.enterNonblockingMode', ([], {}), '()\n', (4808, 4810), True, 'import lib.connection as Connection\n'), ((4906, 4944), 'logging.debug', 'logging.debug', (['"""initializing Voctogui"""'], {}), "('initializing Voctogui')\n", (4919, 4944), False, 'import logging\n'), ((5021, 5054), 'logging.debug', 'logging.debug', (['"""running Voctogui"""'], {}), "('running Voctogui')\n", (5034, 5054), False, 'import logging\n'), ((517, 530), 'gi.repository.Gst.version', 'Gst.version', ([], {}), '()\n', (528, 530), False, 'from gi.repository import Gtk, Gdk, Gst, GstVideo\n'), ((1097, 1126), 'logging.getLogger', 'logging.getLogger', (['"""Voctogui"""'], {}), "('Voctogui')\n", (1114, 1126), False, 'import logging\n'), ((1377, 1397), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (1391, 1397), False, 'import os\n'), ((1628, 1645), 'gi.repository.Gtk.CssProvider', 'Gtk.CssProvider', ([], {}), '()\n', (1643, 1645), False, 'from gi.repository import Gtk, Gdk, Gst, GstVideo\n'), ((1664, 1682), 'gi.repository.Gtk.StyleContext', 'Gtk.StyleContext', ([], {}), '()\n', (1680, 1682), False, 'from gi.repository import Gtk, Gdk, Gst, GstVideo\n'), ((1848, 1868), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (1862, 1868), False, 'import os\n'), ((2552, 2567), 'gi.repository.Gtk.main_quit', 'Gtk.main_quit', ([], {}), '()\n', (2565, 2567), False, 'from gi.repository import Gtk, Gdk, Gst, GstVideo\n'), ((3475, 3488), 'gi.repository.Gst.version', 'Gst.version', ([], {}), '()\n', (3486, 3488), False, 'from gi.repository import Gtk, Gdk, Gst, GstVideo\n'), ((3723, 3739), 'lib.config.Config.getHost', 'Config.getHost', ([], {}), '()\n', (3737, 3739), False, 'from lib.config import Config\n'), ((1421, 1429), 'lib.ui.Ui', 'Ui', (['path'], {}), '(path)\n', (1423, 1429), False, 'from lib.ui import Ui\n'), ((2054, 2078), 'gi.repository.Gdk.Screen.get_default', 'Gdk.Screen.get_default', ([], {}), '()\n', (2076, 2078), False, 'from gi.repository import Gtk, Gdk, Gst, GstVideo\n'), ((2337, 2347), 'gi.repository.Gtk.main', 'Gtk.main', ([], {}), '()\n', (2345, 2347), False, 'from gi.repository import Gtk, Gdk, Gst, GstVideo\n'), ((2779, 2798), 'sys.stderr.isatty', 'sys.stderr.isatty', ([], {}), '()\n', (2796, 2798), False, 'import sys\n'), ((4156, 4183), 'lib.config.Config.getPreviewsEnabled', 'Config.getPreviewsEnabled', ([], {}), '()\n', (4181, 4183), False, 'from lib.config import Config\n'), ((4188, 4204), 'lib.config.Config.getHost', 'Config.getHost', ([], {}), '()\n', (4202, 4204), False, 'from lib.config import Config\n'), ((4537, 4553), 'lib.config.Config.getHost', 'Config.getHost', ([], {}), '()\n', (4551, 4553), False, 'from lib.config import Config\n'), ((5195, 5206), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5203, 5206), False, 'import sys\n'), ((1259, 1285), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1275, 1285), False, 'import os\n'), ((1728, 1754), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1744, 1754), False, 'import os\n')]
|
"""
Setup module.
"""
import re
from os.path import join as pjoin
from setuptools import setup
with open(pjoin('diapason', '__init__.py')) as f:
line = next(l for l in f if l.startswith('__version__'))
version = re.match('__version__ = [\'"]([^\'"]+)[\'"]', line).group(1)
setup(
name='diapason',
version=version,
description='Python module to deal with note sounds.',
long_description='''The diapason Python module can be used to deal with
note sounds: WAV generation, note frequency calculation...''',
url='https://github.com/Soundphy/diapason',
author='<NAME>',
author_email='<EMAIL>',
license='License :: OSI Approved :: BSD License',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Education',
'Intended Audience :: End Users/Desktop',
'Topic :: Education',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Sound/Audio :: Sound Synthesis',
'Topic :: Multimedia :: Sound/Audio :: Analysis',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
],
keywords='diapason',
packages=['diapason'],
install_requires=['numpy', 'scipy'],
extras_require={
'dev': [],
'test': ['tox'],
'docs': ['sphinx', 'numpydoc', 'sphinx_rtd_theme'],
},
)
|
[
"re.match",
"os.path.join",
"setuptools.setup"
] |
[((284, 1379), 'setuptools.setup', 'setup', ([], {'name': '"""diapason"""', 'version': 'version', 'description': '"""Python module to deal with note sounds."""', 'long_description': '"""The diapason Python module can be used to deal with\n note sounds: WAV generation, note frequency calculation..."""', 'url': '"""https://github.com/Soundphy/diapason"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""License :: OSI Approved :: BSD License"""', 'classifiers': "['Development Status :: 1 - Planning', 'Intended Audience :: Education',\n 'Intended Audience :: End Users/Desktop', 'Topic :: Education',\n 'Topic :: Artistic Software',\n 'Topic :: Multimedia :: Sound/Audio :: Sound Synthesis',\n 'Topic :: Multimedia :: Sound/Audio :: Analysis',\n 'License :: OSI Approved :: BSD License',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: Implementation :: CPython']", 'keywords': '"""diapason"""', 'packages': "['diapason']", 'install_requires': "['numpy', 'scipy']", 'extras_require': "{'dev': [], 'test': ['tox'], 'docs': ['sphinx', 'numpydoc', 'sphinx_rtd_theme']\n }"}), '(name=\'diapason\', version=version, description=\n \'Python module to deal with note sounds.\', long_description=\n """The diapason Python module can be used to deal with\n note sounds: WAV generation, note frequency calculation..."""\n , url=\'https://github.com/Soundphy/diapason\', author=\'<NAME>\',\n author_email=\'<EMAIL>\', license=\n \'License :: OSI Approved :: BSD License\', classifiers=[\n \'Development Status :: 1 - Planning\', \'Intended Audience :: Education\',\n \'Intended Audience :: End Users/Desktop\', \'Topic :: Education\',\n \'Topic :: Artistic Software\',\n \'Topic :: Multimedia :: Sound/Audio :: Sound Synthesis\',\n \'Topic :: Multimedia :: Sound/Audio :: Analysis\',\n \'License :: OSI Approved :: BSD License\',\n \'Programming Language :: Python :: 3\',\n \'Programming Language :: Python :: 3.5\',\n \'Programming Language :: Python :: Implementation :: CPython\'],\n keywords=\'diapason\', packages=[\'diapason\'], install_requires=[\'numpy\',\n \'scipy\'], extras_require={\'dev\': [], \'test\': [\'tox\'], \'docs\': [\'sphinx\',\n \'numpydoc\', \'sphinx_rtd_theme\']})\n', (289, 1379), False, 'from setuptools import setup\n'), ((107, 139), 'os.path.join', 'pjoin', (['"""diapason"""', '"""__init__.py"""'], {}), "('diapason', '__init__.py')\n", (112, 139), True, 'from os.path import join as pjoin\n'), ((222, 273), 're.match', 're.match', (['"""__version__ = [\'"]([^\'"]+)[\'"]"""', 'line'], {}), '(\'__version__ = [\\\'"]([^\\\'"]+)[\\\'"]\', line)\n', (230, 273), False, 'import re\n')]
|
#!/usr/bin/env python3
import os
import sys
from Bio import SeqIO
from Bio.Seq import Seq
def insilico_trypsinized(seq) :
segments = []
seg = []
for i in range(len(seq)) :
if seq[i] in ('K','R') :
if i == len(seq)-1 :
seg.append(seq[i])
elif seq[i+1] == 'P' :
seg.append(seq[i])
else :
#found first tryptic site
if len(seg) :
segments.append(seg)
segments.append( [seq[i]] )
seg = []
else :
seg.append(seq[i])
if len(seg) :
segments.append(seg)
segs_len = sum([len(x) for x in segments])
try :
assert(segs_len == len(seq))
except Exception as e :
segged_seq = []
for s in segments :
segged_seq.extend(s)
print >> sys.stderr , "lens:" , len(seq), len(segged_seq)
print >> sys.stderr , "original_seq:"
print >> sys.stderr , "".join(seq)
print >> sys.stderr , "new_seq:"
print >> sys.stderr , "".join(segged_seq)
raise(e)
return segments
def tryp_rev(seq):
segments = insilico_trypsinized(seq)
final_seq = []
for s in segments :
if len(s) > 1 :
if s[-1] in ['R', 'K']:
new_s = s[:-1]
new_s.reverse()
new_s.append(s[-1])
else:
new_s = s
new_s.reverse()
else :
new_s = s
final_seq.extend(new_s)
seq.seq = Seq(''.join(final_seq))
seq.id = 'decoy_{}'.format(seq.name)
return seq
def main():
fa = sys.argv[1]
out = os.path.join(os.path.split(fa)[0], 'decoy_{}'.format(os.path.basename(fa)))
print(out)
with open(fa) as fp, open(out, 'w') as wfp:
seqs = SeqIO.parse(fp, 'fasta')
SeqIO.write((tryp_rev(x) for x in seqs), wfp, 'fasta')
if __name__ == '__main__':
main()
|
[
"os.path.split",
"Bio.SeqIO.parse",
"os.path.basename"
] |
[((1840, 1864), 'Bio.SeqIO.parse', 'SeqIO.parse', (['fp', '"""fasta"""'], {}), "(fp, 'fasta')\n", (1851, 1864), False, 'from Bio import SeqIO\n'), ((1699, 1716), 'os.path.split', 'os.path.split', (['fa'], {}), '(fa)\n', (1712, 1716), False, 'import os\n'), ((1739, 1759), 'os.path.basename', 'os.path.basename', (['fa'], {}), '(fa)\n', (1755, 1759), False, 'import os\n')]
|
import re
import unittest
from functools import partial
from pycyqle.builder import dict_build, param_build
from pycyqle.factory import Component, Factory
class FactoryTest(unittest.TestCase):
def test_param_build(self):
factory = param_build(
Factory,
name='bicycle-factory',
table='bicycle',
primary_key='id'
)
self._assert_bicycle_factory(factory)
return factory
def test_dict_build(self):
factory = dict_build(Factory, {
'name': 'bicycle-factory',
'table': 'bicycle',
'primary_key': 'id'
})
self._assert_bicycle_factory(factory)
return factory
def _assert_bicycle_factory(self, factory):
self.assertEqual(factory.name(), 'bicycle-factory')
self.assertEqual(factory.table(), 'bicycle')
self.assertEqual(factory.primary_key(), 'id')
@staticmethod
def _format_query(query):
return re.sub(r'\s?,\s?', ',', ' '.join(query.split()))
def test_query(self):
components = list(map(partial(dict_build, Component), [
{'name': 'tire', 'column': 'tire'},
{'name': 'seat', 'column': 'seat'}
]))
factory = self.test_dict_build()
factory.components(components)
self.assertEqual(len(factory.components()), len(components))
self.assertEqual(
FactoryTest._format_query(factory.query(['tire'], {})),
FactoryTest._format_query("""
SELECT bicycle.id AS "__id__"
, bicycle.tire AS tire
FROM bicycle WHERE 1=1
""")
)
new_components = list(map(partial(dict_build, Component), [
{'name': 'pedal', 'column': 'pedal'}
]))
factory.components(components + new_components)
self.assertEqual(
len(factory.components()),
len(components) + len(new_components)
)
self.assertEqual(
FactoryTest._format_query(
factory.query(['seat', 'pedal'], {
'id0': 42
})
),
FactoryTest._format_query("""
SELECT bicycle.id AS "__id__"
, bicycle.seat AS seat
, bicycle.pedal AS pedal
FROM bicycle WHERE bicycle.id IN (%(id0)s)
""")
)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"functools.partial",
"pycyqle.builder.dict_build",
"pycyqle.builder.param_build"
] |
[((2523, 2538), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2536, 2538), False, 'import unittest\n'), ((258, 337), 'pycyqle.builder.param_build', 'param_build', (['Factory'], {'name': '"""bicycle-factory"""', 'table': '"""bicycle"""', 'primary_key': '"""id"""'}), "(Factory, name='bicycle-factory', table='bicycle', primary_key='id')\n", (269, 337), False, 'from pycyqle.builder import dict_build, param_build\n'), ((525, 618), 'pycyqle.builder.dict_build', 'dict_build', (['Factory', "{'name': 'bicycle-factory', 'table': 'bicycle', 'primary_key': 'id'}"], {}), "(Factory, {'name': 'bicycle-factory', 'table': 'bicycle',\n 'primary_key': 'id'})\n", (535, 618), False, 'from pycyqle.builder import dict_build, param_build\n'), ((1134, 1164), 'functools.partial', 'partial', (['dict_build', 'Component'], {}), '(dict_build, Component)\n', (1141, 1164), False, 'from functools import partial\n'), ((1766, 1796), 'functools.partial', 'partial', (['dict_build', 'Component'], {}), '(dict_build, Component)\n', (1773, 1796), False, 'from functools import partial\n')]
|
import firebase_admin
from firebase_admin import credentials, messaging
file_path = './pycon-monitoring-workshop-firebase-adminsdk.json'
cred = credentials.Certificate(file_path)
default_app = firebase_admin.initialize_app(cred)
def send_message(recipients, message, dry_run=False):
if not isinstance(recipients, list):
raise TypeError(
"`recipients` expected to be a `list` but `{0}` found".format(
type(recipients)
)
)
for registration_token in recipients:
message = messaging.Message(
data=dict(message=message),
token=registration_token,
)
return messaging.send(message, dry_run)
|
[
"firebase_admin.initialize_app",
"firebase_admin.credentials.Certificate",
"firebase_admin.messaging.send"
] |
[((146, 180), 'firebase_admin.credentials.Certificate', 'credentials.Certificate', (['file_path'], {}), '(file_path)\n', (169, 180), False, 'from firebase_admin import credentials, messaging\n'), ((195, 230), 'firebase_admin.initialize_app', 'firebase_admin.initialize_app', (['cred'], {}), '(cred)\n', (224, 230), False, 'import firebase_admin\n'), ((663, 695), 'firebase_admin.messaging.send', 'messaging.send', (['message', 'dry_run'], {}), '(message, dry_run)\n', (677, 695), False, 'from firebase_admin import credentials, messaging\n')]
|
from influxdb import InfluxDBClient
class Global_Influx():
Client_all = InfluxDBClient(host='172.16.20.190',port=8086,username='voicecomm',password='<PASSWORD>')
|
[
"influxdb.InfluxDBClient"
] |
[((76, 172), 'influxdb.InfluxDBClient', 'InfluxDBClient', ([], {'host': '"""172.16.20.190"""', 'port': '(8086)', 'username': '"""voicecomm"""', 'password': '"""<PASSWORD>"""'}), "(host='172.16.20.190', port=8086, username='voicecomm',\n password='<PASSWORD>')\n", (90, 172), False, 'from influxdb import InfluxDBClient\n')]
|
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as img
from sklearn.decomposition import TruncatedSVD
"""
打个比方说一张女人图片,我们如何判定这个女人是不是美女呢。我们会看比较关键的一些特征,比如说脸好不好看,胸好不好看,屁股怎么样,腿怎么样,至于衣服上是某个花纹还是手臂上有一个小痔还是,这些特征我们都是不关心的,就可以过滤掉。我们关心的是主成分,也就是对结果贡献系数较大的特征。SVD算法的作用就是来告诉你哪些特征是重要的,有多重要,哪些特征是不重要的,是可以忽略的。
接下来我们使用sklearn提供的TruncatedSVD模块来对美女图片进行压缩。
首先我们使用matplotlib显示一张美女png图片,png图片的格式非常简单,每一个像素有三个维度的颜色值RGB,整个图片就是一个「height x width x 3」维的矩阵。
"""
# 加载png数据矩阵
img_array = img.imread('test2.png')
shape = img_array.shape
print(shape)
# 高度、宽度、RGB通道数=3
height, width, channels = shape[0], shape[1], shape[2]
# 转换成numpy array
img_matrix = np.array(img_array)
# 存储RGB三个通道转换后的数据
planes = []
# RGB三个通道分别处理
for idx in range(channels):
# 提取通道
plane = img_matrix[:, :, idx]
# 转成二维矩阵
plane = np.reshape(plane, (height, width))
# 保留10个主成分
svd = TruncatedSVD(n_components=10)
# 拟合数据,进行矩阵分解,生成特征空间,剔去无关紧要的成分
svd.fit(plane)
# 将输入数据转换到特征空间
new_plane = svd.transform(plane)
# 再将特征空间的数据转换会数据空间
plane = svd.inverse_transform(new_plane)
# 存起来
planes.append(plane)
# 合并三个通道平面数据
img_matrix = np.dstack(planes)
# 显示处理后的图像
plt.imshow(img_matrix)
plt.show()
|
[
"numpy.dstack",
"matplotlib.image.imread",
"matplotlib.pyplot.show",
"sklearn.decomposition.TruncatedSVD",
"matplotlib.pyplot.imshow",
"numpy.array",
"numpy.reshape"
] |
[((511, 534), 'matplotlib.image.imread', 'img.imread', (['"""test2.png"""'], {}), "('test2.png')\n", (521, 534), True, 'import matplotlib.image as img\n'), ((676, 695), 'numpy.array', 'np.array', (['img_array'], {}), '(img_array)\n', (684, 695), True, 'import numpy as np\n'), ((1170, 1187), 'numpy.dstack', 'np.dstack', (['planes'], {}), '(planes)\n', (1179, 1187), True, 'import numpy as np\n'), ((1199, 1221), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img_matrix'], {}), '(img_matrix)\n', (1209, 1221), True, 'import matplotlib.pyplot as plt\n'), ((1223, 1233), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1231, 1233), True, 'import matplotlib.pyplot as plt\n'), ((840, 874), 'numpy.reshape', 'np.reshape', (['plane', '(height, width)'], {}), '(plane, (height, width))\n', (850, 874), True, 'import numpy as np\n'), ((900, 929), 'sklearn.decomposition.TruncatedSVD', 'TruncatedSVD', ([], {'n_components': '(10)'}), '(n_components=10)\n', (912, 929), False, 'from sklearn.decomposition import TruncatedSVD\n')]
|
from scipy.stats import uniform
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import minimize
from scipy.stats import exponweib
def calculate_parameters(interarrivals):
sample = np.array(interarrivals)
x = np.linspace(0, 1 - 1 / sample.shape[0], sample.shape[0])
x = x[sample > 0]
sample = sample[sample > 0]
sample = sample[x > 0]
x = x[x > 0]
m, c = minimize(lambda t: np.mean((np.log(sample) - (t[0] * np.log(-np.log(1 - x)) + t[1])) ** 2), [1, 0]).x
return 1 / m, np.exp(-c - m)
def main():
step = 4
interarrivals = exponweib(size=10000)
print(calculate_parameters(interarrivals))
hours = []
hour = []
params = []
time = 0
last_time = 0
for arrival in interarrivals:
if time + arrival > last_time + 1000 * 60 * 60 * step:
params.append(calculate_parameters(hour))
hours.append(hour)
hour = []
last_time = time = last_time + 1000 * 60 * 60 * step
time = time + arrival
hour.append(arrival)
fig, ax1 = plt.subplots()
ax2 = plt.twinx()
ax1.plot([p[0] for p in params])
ax2.plot([p[1] for p in params], color='orange')
plt.show()
if __name__ == '__main__':
main()
|
[
"matplotlib.pyplot.show",
"numpy.log",
"matplotlib.pyplot.twinx",
"numpy.array",
"numpy.exp",
"numpy.linspace",
"scipy.stats.exponweib",
"matplotlib.pyplot.subplots"
] |
[((211, 234), 'numpy.array', 'np.array', (['interarrivals'], {}), '(interarrivals)\n', (219, 234), True, 'import numpy as np\n'), ((243, 299), 'numpy.linspace', 'np.linspace', (['(0)', '(1 - 1 / sample.shape[0])', 'sample.shape[0]'], {}), '(0, 1 - 1 / sample.shape[0], sample.shape[0])\n', (254, 299), True, 'import numpy as np\n'), ((591, 612), 'scipy.stats.exponweib', 'exponweib', ([], {'size': '(10000)'}), '(size=10000)\n', (600, 612), False, 'from scipy.stats import exponweib\n'), ((1084, 1098), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1096, 1098), True, 'import matplotlib.pyplot as plt\n'), ((1110, 1121), 'matplotlib.pyplot.twinx', 'plt.twinx', ([], {}), '()\n', (1119, 1121), True, 'import matplotlib.pyplot as plt\n'), ((1219, 1229), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1227, 1229), True, 'import matplotlib.pyplot as plt\n'), ((529, 543), 'numpy.exp', 'np.exp', (['(-c - m)'], {}), '(-c - m)\n', (535, 543), True, 'import numpy as np\n'), ((437, 451), 'numpy.log', 'np.log', (['sample'], {}), '(sample)\n', (443, 451), True, 'import numpy as np\n'), ((470, 483), 'numpy.log', 'np.log', (['(1 - x)'], {}), '(1 - x)\n', (476, 483), True, 'import numpy as np\n')]
|
# coding: utf-8
import sys
import os
import flask
from flask import redirect,request,render_template_string,render_template
from werkzeug.utils import secure_filename
import importlib
import zipfile
import threading
import random
from datetime import datetime
import pytz
import time
from sqlalchemy import create_engine
import json
from google.cloud import storage
import firebase_admin
from firebase_admin import auth
#Flask_Startup
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
os.chdir(os.path.join("./",os.path.dirname(__file__)))
app = flask.Flask(__name__)
wsgi_util=importlib.import_module("wsgi_util")
#prevent uploading too large file
app.config['MAX_CONTENT_LENGTH'] = 100000000
#unzip CDN contents for fallback
try:zipfile.ZipFile(os.path.join("./static/","bootstrap-4.4.1-dist.zip")).extractall("./static/")
except:print("cant unzip CDN contents")
wsgi_util.Resource_Reload()
@app.route("/")
def indexpage_show():
wsgi_util.access_counter+=1
return wsgi_util.render_template_2("index.html",
STATUS_TABLE=wsgi_util.status_table,
access_counter=str(wsgi_util.access_counter)
)
@app.route("/<name>.html")
def html_show(name):
try :return wsgi_util.render_template_2('./'+name+'.html')
except:return redirect('./'),404
@app.route("/<name>.py",methods=['GET', 'POST'])
def py_show(name):
try :return importlib.import_module(name).show(request)
except Exception as e:
return wsgi_util.render_template_2("error.html",
form_error_code="500",form_error_text=str(e)),500
application=app
if __name__ == "__main__":
app.run()
|
[
"os.path.abspath",
"importlib.import_module",
"flask.redirect",
"os.path.dirname",
"flask.Flask",
"os.path.join"
] |
[((580, 601), 'flask.Flask', 'flask.Flask', (['__name__'], {}), '(__name__)\n', (591, 601), False, 'import flask\n'), ((613, 649), 'importlib.import_module', 'importlib.import_module', (['"""wsgi_util"""'], {}), "('wsgi_util')\n", (636, 649), False, 'import importlib\n'), ((489, 514), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (504, 514), False, 'import os\n'), ((545, 570), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (560, 570), False, 'import os\n'), ((786, 839), 'os.path.join', 'os.path.join', (['"""./static/"""', '"""bootstrap-4.4.1-dist.zip"""'], {}), "('./static/', 'bootstrap-4.4.1-dist.zip')\n", (798, 839), False, 'import os\n'), ((1297, 1311), 'flask.redirect', 'redirect', (['"""./"""'], {}), "('./')\n", (1305, 1311), False, 'from flask import redirect, request, render_template_string, render_template\n'), ((1405, 1434), 'importlib.import_module', 'importlib.import_module', (['name'], {}), '(name)\n', (1428, 1434), False, 'import importlib\n')]
|
from pyspark.sql import SparkSession
if __name__ == "__main__":
# input = sample_warc_loc
spark: SparkSession = SparkSession.builder \
.appName('Activity 2.1') \
.getOrCreate()
spark.sparkContext.setLogLevel('ERROR') # avoids printing of info messages
from operator import add
from collections import defaultdict
from typing import Dict
from Chapter02.utilities02_py.helper_python import extract_raw_records, parse_raw_warc
input = "/Users/a/CC-MAIN-20191013195541-20191013222541-00000.warc"
warc_records = extract_raw_records(input, spark).flatMap(lambda record: parse_raw_warc(record))
# print(warc_records.count())
keyed_by_language = warc_records.filter(lambda rec: rec.language != '').map(lambda rec: (rec.language, 1))
language_map: Dict[str, int] = keyed_by_language.reduceByKey(add).collectAsMap()
## language_list = keyed_by_language.reduceByKey(add).collect()
## language_map: Dict[str, int] = defaultdict(int)
## for key, value in language_list:
## ... language_map[key] += value
## language_map
# warc_records.filter(lambda rec: rec.language != '').map(lambda rec: rec.language).countByValue()
sorted_language_list = [(key, language_map[key]) for key in sorted(language_map, key=language_map.get)]
sorted_language_list[0:10] # a subset of 10 of the rarest languages
sorted_language_list[len(sorted_language_list)-1] # most frequent language
uz_records = warc_records.filter(lambda rec: rec.language != '' and rec.language == 'uz').map(lambda rec: rec.target_uri)
print(uz_records.collect())
wikipages = warc_records.filter(lambda rec: 'wikipedia' in rec.target_uri).map(lambda rec: rec.target_uri)
print(wikipages.collect())
untagged = warc_records.filter(lambda record: record.language == '')
print(untagged.count())
|
[
"pyspark.sql.SparkSession.builder.appName",
"Chapter02.utilities02_py.helper_python.extract_raw_records",
"Chapter02.utilities02_py.helper_python.parse_raw_warc"
] |
[((124, 168), 'pyspark.sql.SparkSession.builder.appName', 'SparkSession.builder.appName', (['"""Activity 2.1"""'], {}), "('Activity 2.1')\n", (152, 168), False, 'from pyspark.sql import SparkSession\n'), ((566, 599), 'Chapter02.utilities02_py.helper_python.extract_raw_records', 'extract_raw_records', (['input', 'spark'], {}), '(input, spark)\n', (585, 599), False, 'from Chapter02.utilities02_py.helper_python import extract_raw_records, parse_raw_warc\n'), ((623, 645), 'Chapter02.utilities02_py.helper_python.parse_raw_warc', 'parse_raw_warc', (['record'], {}), '(record)\n', (637, 645), False, 'from Chapter02.utilities02_py.helper_python import extract_raw_records, parse_raw_warc\n')]
|
from abc import ABC, abstractmethod
from crawler_magazine.downloader.asynchronous import AsyncDownloader
class CrawlerInterface(ABC):
def __init__(self, url):
self.url = url
self.downloader = AsyncDownloader()
async def get_page(self, url=None):
return await self.downloader.get(url or self.url)
@abstractmethod
def parse(self, html, *args):
"""Need to be implemented"""
@abstractmethod
def crawl(self):
"""Need to be implemented"""
|
[
"crawler_magazine.downloader.asynchronous.AsyncDownloader"
] |
[((215, 232), 'crawler_magazine.downloader.asynchronous.AsyncDownloader', 'AsyncDownloader', ([], {}), '()\n', (230, 232), False, 'from crawler_magazine.downloader.asynchronous import AsyncDownloader\n')]
|
import pymysql, json
from model.sql import Connection
class DeviceModel():
def create_device(self, allowed, blocked, unknown):
sql = "INSERT INTO `device` (`allowed_devices`, `blocked_devices`, `unknown_devices`) VALUES (%s, %s, %s)"
values = (allowed, blocked, unknown)
conn = Connection()
conn.create(sql, values)
def get_device(self):
sql = "SELECT * FROM device ORDER BY device_id ASC LIMIT 1"
conn = Connection()
fail, events = conn.get(sql)
data = ''
if not fail:
for event in events:
event['created_date'] = event['created_date'].isoformat()
data = {
"length": len(events),
"device": events
}
else:
data = {
"length": 0,
"device": ''
}
return data
|
[
"model.sql.Connection"
] |
[((308, 320), 'model.sql.Connection', 'Connection', ([], {}), '()\n', (318, 320), False, 'from model.sql import Connection\n'), ((465, 477), 'model.sql.Connection', 'Connection', ([], {}), '()\n', (475, 477), False, 'from model.sql import Connection\n')]
|
import os
import subprocess
import uuid
class CfnStackValidation:
@classmethod
def validate_config(cls, config):
"""Validate section of the stack config"""
if "aws" not in config:
raise KeyError("aws is required in a stack definition")
else:
cls._validate_aws_config(config["aws"])
if "location" not in config:
raise KeyError("location is required in a stack definition")
elif not cls._file_exists(os.path.expanduser(config["location"])):
raise FileNotFoundError(f"Template {config['location']} does not exist")
if "template" not in config:
raise KeyError("template is required in a stack definition")
else:
cls._validate_template_config(config["template"])
if "functions" in config:
cls._validate_lambda_config(config["functions"])
@classmethod
def validate_stack(cls, template, tmp_location=f"/tmp/{uuid.uuid1().hex}.yaml"):
"""Validate CFN stack with CFNLINT"""
with open(tmp_location, "w+") as f:
f.write(template)
try:
subprocess.check_output(
f"cfn-lint {tmp_location}".split(" "), stderr=subprocess.STDOUT
)
except subprocess.CalledProcessError:
raise RuntimeError("Your CFN stack is not valid")
os.remove(tmp_location)
@classmethod
def _validate_aws_config(cls, config):
"""Validate section of the stack config"""
if type(config) != dict:
raise ValueError("aws must be a dict")
if "region" not in config:
raise KeyError("aws.region is required in stack definition")
if "account-id" not in config:
raise KeyError("aws.account-id is required in stack definition")
if type(config["region"]) != str:
raise ValueError("aws.region must be a string")
if (
type(config["account-id"]) != str
or len(str(config["account-id"])) != 12
or len([x for x in config["account-id"] if not x.isdigit()])
):
raise ValueError("aws.account-id must be a 12 digit string")
@classmethod
def _validate_template_config(cls, config):
"""Validate section of the stack config"""
if type(config) != dict:
raise ValueError("template must be a dict")
if "name" not in config:
raise KeyError("template.name is required in stack definition")
if type(config["name"]) != str:
raise ValueError("template.name must be a string")
if "parameters" in config and type(config["parameters"]) != dict:
raise ValueError("template.parameters must be a dict")
@classmethod
def _validate_lambda_config(cls, config):
"""Validate section of the stack config"""
for lambd in config:
if "name" not in lambd:
raise KeyError("Lambdas must have a name")
if "location" not in lambd:
raise KeyError("Lambdas must have a location")
if "template-attribute" not in lambd:
raise KeyError("Lambdas must have a template-attribute")
if "bucket" not in lambd:
raise KeyError("Lambdas must have a artifact bucket location")
if (
type(lambd["name"]) != str
or type(lambd["template-attribute"]) != str
or type(lambd["bucket"]) != str
):
raise ValueError(
"One of these parameters is not a string: name, template-attribute, bucket"
)
if not os.path.isdir(lambd["location"]):
raise ValueError("Lambda package is not found")
@classmethod
def _file_exists(cls, file_path):
"""Check if a file exists"""
return os.path.exists(os.path.expanduser(file_path))
|
[
"os.path.isdir",
"uuid.uuid1",
"os.remove",
"os.path.expanduser"
] |
[((1377, 1400), 'os.remove', 'os.remove', (['tmp_location'], {}), '(tmp_location)\n', (1386, 1400), False, 'import os\n'), ((3903, 3932), 'os.path.expanduser', 'os.path.expanduser', (['file_path'], {}), '(file_path)\n', (3921, 3932), False, 'import os\n'), ((3682, 3714), 'os.path.isdir', 'os.path.isdir', (["lambd['location']"], {}), "(lambd['location'])\n", (3695, 3714), False, 'import os\n'), ((485, 523), 'os.path.expanduser', 'os.path.expanduser', (["config['location']"], {}), "(config['location'])\n", (503, 523), False, 'import os\n'), ((971, 983), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (981, 983), False, 'import uuid\n')]
|
from __future__ import print_function
from math import fabs
from math import sqrt
__author__ = ["<NAME>"]
__copyright__ = "Copyright 2020, Design Machine Group - University of Washington"
__license__ = "MIT License"
__email__ = "<EMAIL>"
__version__ = "0.1.0"
def midpoint_point_point(a, b):
return [0.5 * (a[0] + b[0]),
0.5 * (a[1] + b[1]),
0.5 * (a[2] + b[2])]
def geometric_key(xyz, precision=None, sanitize=True):
x, y, z = xyz
if not precision:
precision = '3f'
if precision == 'd':
return '{0},{1},{2}'.format(int(x), int(y), int(z))
if sanitize:
minzero = "-{0:.{1}}".format(0.0, precision)
if "{0:.{1}}".format(x, precision) == minzero:
x = 0.0
if "{0:.{1}}".format(y, precision) == minzero:
y = 0.0
if "{0:.{1}}".format(z, precision) == minzero:
z = 0.0
return '{0:.{3}},{1:.{3}},{2:.{3}}'.format(x, y, z, precision)
def distance_point_point(a, b):
"""Compute the distance bewteen a and b.
Parameters
----------
a : sequence of float
XYZ coordinates of point a.
b : sequence of float
XYZ coordinates of point b.
Returns
-------
float
Distance bewteen a and b.
Examples
--------
>>> distance_point_point([0.0, 0.0, 0.0], [2.0, 0.0, 0.0])
2.0
See Also
--------
distance_point_point_xy
"""
ab = subtract_vectors(b, a)
return length_vector(ab)
def centroid_points(points):
"""Compute the centroid of a set of points.
Warnings
--------
Duplicate points are **NOT** removed. If there are duplicates in the
sequence, they should be there intentionally.
Parameters
----------
points : sequence
A sequence of XYZ coordinates.
Returns
-------
list
XYZ coordinates of the centroid.
Examples
--------
>>>
"""
p = len(points)
x, y, z = zip(*points)
return [sum(x) / p, sum(y) / p, sum(z) / p]
def subtract_vectors(u, v):
"""Subtract one vector from another.
Parameters
----------
u : list
XYZ components of the first vector.
v : list
XYZ components of the second vector.
Returns
-------
list
The resulting vector.
Examples
--------
>>>
"""
return [a - b for (a, b) in zip(u, v)]
def cross_vectors(u, v):
r"""Compute the cross product of two vectors.
Parameters
----------
u : tuple, list, Vector
XYZ components of the first vector.
v : tuple, list, Vector
XYZ components of the second vector.
Returns
-------
cross : list
The cross product of the two vectors.
Notes
-----
The xyz components of the cross product of two vectors :math:`\mathbf{u}`
and :math:`\mathbf{v}` can be computed as the *minors* of the following matrix:
.. math::
:nowrap:
\begin{bmatrix}
x & y & z \\
u_{x} & u_{y} & u_{z} \\
v_{x} & v_{y} & v_{z}
\end{bmatrix}
Therefore, the cross product can be written as:
.. math::
:nowrap:
\mathbf{u} \times \mathbf{v}
=
\begin{bmatrix}
u_{y} * v_{z} - u_{z} * v_{y} \\
u_{z} * v_{x} - u_{x} * v_{z} \\
u_{x} * v_{y} - u_{y} * v_{x}
\end{bmatrix}
Examples
--------
>>> cross_vectors([1.0, 0.0, 0.0], [0.0, 1.0, 0.0])
[0.0, 0.0, 1.0]
"""
return [u[1] * v[2] - u[2] * v[1],
u[2] * v[0] - u[0] * v[2],
u[0] * v[1] - u[1] * v[0]]
def length_vector(vector):
"""Calculate the length of the vector.
Parameters
----------
vector : list
XYZ components of the vector.
Returns
-------
float
The length of the vector.
Examples
--------
>>> length_vector([2.0, 0.0, 0.0])
2.0
>>> length_vector([1.0, 1.0, 0.0]) == sqrt(2.0)
True
"""
return sqrt(length_vector_sqrd(vector))
def length_vector_sqrd(vector):
"""Compute the squared length of a vector.
Parameters
----------
vector : list
XYZ components of the vector.
Returns
-------
float
The squared length.
Examples
--------
>>> length_vector_sqrd([1.0, 1.0, 0.0])
2.0
"""
return vector[0] ** 2 + vector[1] ** 2 + vector[2] ** 2
def dot_vectors(u, v):
"""Compute the dot product of two vectors.
Parameters
----------
u : tuple, list, Vector
XYZ components of the first vector.
v : tuple, list, Vector
XYZ components of the second vector.
Returns
-------
dot : float
The dot product of the two vectors.
Examples
--------
>>> dot_vectors([1.0, 0, 0], [2.0, 0, 0])
2.0
"""
return sum(a * b for a, b in zip(u, v))
def area_polygon(polygon):
"""Compute the area of a polygon.
Parameters
----------
polygon : sequence
The XYZ coordinates of the vertices/corners of the polygon.
The vertices are assumed to be in order.
The polygon is assumed to be closed:
the first and last vertex in the sequence should not be the same.
Returns
-------
float
The area of the polygon.
Examples
--------
>>>
"""
o = centroid_points(polygon)
a = polygon[-1]
b = polygon[0]
oa = subtract_vectors(a, o)
ob = subtract_vectors(b, o)
n0 = cross_vectors(oa, ob)
area = 0.5 * length_vector(n0)
for i in range(0, len(polygon) - 1):
oa = ob
b = polygon[i + 1]
ob = subtract_vectors(b, o)
n = cross_vectors(oa, ob)
if dot_vectors(n, n0) > 0:
area += 0.5 * length_vector(n)
else:
area -= 0.5 * length_vector(n)
return area
def add_vectors(u, v):
"""Add two vectors.
Parameters
----------
u : sequence of float
XYZ components of the first vector.
v : sequence of float
XYZ components of the second vector.
Returns
-------
list
The resulting vector.
"""
return [a + b for (a, b) in zip(u, v)]
def scale_vector(vector, factor):
"""Scale a vector by a given factor.
Parameters
----------
vector : list, tuple
XYZ components of the vector.
factor : float
The scaling factor.
Returns
-------
list
The scaled vector.
Examples
--------
>>> scale_vector([1.0, 2.0, 3.0], 2.0)
[2.0, 4.0, 6.0]
>>> v = [2.0, 0.0, 0.0]
>>> scale_vector(v, 1 / length_vector(v))
[1.0, 0.0, 0.0]
"""
return [axis * factor for axis in vector]
def intersection_line_plane(line, plane, tol=1e-6):
"""Computes the intersection point of a line and a plane
Parameters
----------
line : tuple
Two points defining the line.
plane : tuple
The base point and normal defining the plane.
tol : float, optional
A tolerance for membership verification.
Default is ``1e-6``.
Returns
-------
point or None
"""
a, b = line
o, n = plane
ab = subtract_vectors(b, a)
cosa = dot_vectors(n, ab)
if fabs(cosa) <= tol:
# if the dot product (cosine of the angle between segment and plane)
# is close to zero the line and the normal are almost perpendicular
# hence there is no intersection
return None
# based on the ratio = -dot_vectors(n, ab) / dot_vectors(n, oa)
# there are three scenarios
# 1) 0.0 < ratio < 1.0: the intersection is between a and b
# 2) ratio < 0.0: the intersection is on the other side of a
# 3) ratio > 1.0: the intersection is on the other side of b
oa = subtract_vectors(a, o)
ratio = - dot_vectors(n, oa) / cosa
ab = scale_vector(ab, ratio)
return add_vectors(a, ab)
def intersection_segment_plane(segment, plane, tol=1e-6):
"""Computes the intersection point of a line segment and a plane
Parameters
----------
segment : tuple
Two points defining the line segment.
plane : tuple
The base point and normal defining the plane.
tol : float, optional
A tolerance for membership verification.
Default is ``1e-6``.
Returns
-------
point or None
"""
a, b = segment
o, n = plane
ab = subtract_vectors(b, a)
cosa = dot_vectors(n, ab)
if fabs(cosa) <= tol:
# if the dot product (cosine of the angle between segment and plane)
# is close to zero the line and the normal are almost perpendicular
# hence there is no intersection
return None
# based on the ratio = -dot_vectors(n, ab) / dot_vectors(n, oa)
# there are three scenarios
# 1) 0.0 < ratio < 1.0: the intersection is between a and b
# 2) ratio < 0.0: the intersection is on the other side of a
# 3) ratio > 1.0: the intersection is on the other side of b
oa = subtract_vectors(a, o)
ratio = - dot_vectors(n, oa) / cosa
if 0.0 <= ratio and ratio <= 1.0:
ab = scale_vector(ab, ratio)
return add_vectors(a, ab)
return None
def normalize_vector(vector):
"""Normalise a given vector.
Parameters
----------
vector : list, tuple
XYZ components of the vector.
Returns
-------
list
The normalized vector.
Examples
--------
>>>
"""
length = length_vector(vector)
if not length:
return vector
return [vector[0] / length, vector[1] / length, vector[2] / length]
def normal_polygon(polygon, unitized=True):
"""Compute the normal of a polygon defined by a sequence of points.
Parameters
----------
polygon : list of list
A list of polygon point coordinates.
Returns
-------
list
The normal vector.
Raises
------
ValueError
If less than three points are provided.
Notes
-----
The points in the list should be unique. For example, the first and last
point in the list should not be the same.
"""
p = len(polygon)
assert p > 2, "At least three points required"
nx = 0
ny = 0
nz = 0
o = centroid_points(polygon)
a = polygon[-1]
oa = subtract_vectors(a, o)
for i in range(p):
b = polygon[i]
ob = subtract_vectors(b, o)
n = cross_vectors(oa, ob)
oa = ob
nx += n[0]
ny += n[1]
nz += n[2]
if not unitized:
return nx, ny, nz
return normalize_vector([nx, ny, nz])
|
[
"math.fabs"
] |
[((7233, 7243), 'math.fabs', 'fabs', (['cosa'], {}), '(cosa)\n', (7237, 7243), False, 'from math import fabs\n'), ((8457, 8467), 'math.fabs', 'fabs', (['cosa'], {}), '(cosa)\n', (8461, 8467), False, 'from math import fabs\n')]
|
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# Defin_Projection_GB_for_folder.py
# Created on: 2015-04-04 12:22:48.00000
# (generated by ArcGIS/ModelBuilder)
# Description:
# ---------------------------------------------------------------------------
# Import arcpy module
import os
import arcpy
# Dynamic variables:
rasterFolder = arcpy.GetParameterAsText(0)
# Set the workspace environment to local file geodatabase
arcpy.env.workspace = rasterFolder
shpFiles = arcpy.ListFeatureClasses()
spatialReference = arcpy.GetParameter(1)
projection = spatialReference.exportToString()
# Process: Define Projection
if shpFiles != None:
for shpFile in shpFiles:
arcpy.DefineProjection_management(shpFile, projection)
|
[
"arcpy.GetParameter",
"arcpy.ListFeatureClasses",
"arcpy.DefineProjection_management",
"arcpy.GetParameterAsText"
] |
[((394, 421), 'arcpy.GetParameterAsText', 'arcpy.GetParameterAsText', (['(0)'], {}), '(0)\n', (418, 421), False, 'import arcpy\n'), ((528, 554), 'arcpy.ListFeatureClasses', 'arcpy.ListFeatureClasses', ([], {}), '()\n', (552, 554), False, 'import arcpy\n'), ((575, 596), 'arcpy.GetParameter', 'arcpy.GetParameter', (['(1)'], {}), '(1)\n', (593, 596), False, 'import arcpy\n'), ((732, 786), 'arcpy.DefineProjection_management', 'arcpy.DefineProjection_management', (['shpFile', 'projection'], {}), '(shpFile, projection)\n', (765, 786), False, 'import arcpy\n')]
|
# coding: utf-8
"""
Implementation of validate(instance, schema)
"""
import re
import datetime
import typing
from ...logic import actions, objects, datatypes
from ..errors import ObjectDoesNotExistError, ValidationError, ValidationMultiError
from .utils import units_are_valid
def validate(instance: typing.Union[dict, list], schema: dict, path: typing.Optional[typing.List[str]] = None) -> None:
"""
Validates the given instance using the given schema and raises a ValidationError if it is invalid.
:param instance: the sampledb object
:param schema: the valid sampledb object schema
:param path: the path to this subinstance / subschema
:raise ValidationError: if the schema is invalid.
"""
if path is None:
path = []
if not isinstance(schema, dict):
raise ValidationError('invalid schema (must be dict)', path)
if 'type' not in schema:
raise ValidationError('invalid schema (must contain type)', path)
if schema['type'] == 'array':
return _validate_array(instance, schema, path)
elif schema['type'] == 'object':
return _validate_object(instance, schema, path)
elif schema['type'] == 'text':
return _validate_text(instance, schema, path)
elif schema['type'] == 'datetime':
return _validate_datetime(instance, schema, path)
elif schema['type'] == 'bool':
return _validate_bool(instance, schema, path)
elif schema['type'] == 'quantity':
return _validate_quantity(instance, schema, path)
elif schema['type'] == 'sample':
return _validate_sample(instance, schema, path)
elif schema['type'] == 'measurement':
return _validate_measurement(instance, schema, path)
elif schema['type'] == 'tags':
return _validate_tags(instance, schema, path)
elif schema['type'] == 'hazards':
return _validate_hazards(instance, schema, path)
else:
raise ValidationError('invalid type', path)
def _validate_array(instance: list, schema: dict, path: typing.List[str]) -> None:
"""
Validates the given instance using the given array schema and raises a ValidationError if it is invalid.
:param instance: the sampledb object
:param schema: the valid sampledb object schema
:param path: the path to this subinstance / subschema
:raise ValidationError: if the schema is invalid.
"""
if not isinstance(instance, list):
raise ValidationError('instance must be list', path)
if 'minItems' in schema and len(instance) < schema['minItems']:
raise ValidationError('expected at least {} items'.format(schema['minItems']), path)
if 'maxItems' in schema and len(instance) > schema['maxItems']:
raise ValidationError('expected at most {} items'.format(schema['maxItems']), path)
errors = []
for index, item in enumerate(instance):
try:
validate(item, schema['items'], path + [str(index)])
except ValidationError as e:
errors.append(e)
if len(errors) == 1:
raise errors[0]
elif len(errors) > 1:
raise ValidationMultiError(errors)
def _validate_hazards(instance: list, schema: dict, path: typing.List[str]) -> None:
"""
Validate the given instance using the given GHS hazards schema and raise a ValidationError if it is invalid.
:param instance: the sampledb object
:param schema: the valid sampledb object schema
:param path: the path to this subinstance / subschema
:raise ValidationError: if the schema is invalid.
"""
if not isinstance(instance, dict):
raise ValidationError('instance must be dict', path)
if path != ['hazards']:
raise ValidationError('GHS hazards must be a top-level entry named "hazards"', path)
valid_keys = {'_type', 'hazards'}
required_keys = valid_keys
schema_keys = set(instance.keys())
invalid_keys = schema_keys - valid_keys
if invalid_keys:
raise ValidationError('unexpected keys in schema: {}'.format(invalid_keys), path)
missing_keys = required_keys - schema_keys
if missing_keys:
raise ValidationError('missing keys in schema: {}'.format(missing_keys), path)
if instance['_type'] != 'hazards':
raise ValidationError('expected _type "hazards"', path)
if not isinstance(instance['hazards'], list):
raise ValidationError('hazards must be list', path)
errors = []
hazards = []
for index, item in enumerate(instance['hazards']):
if not isinstance(item, int):
errors.append(ValidationError('invalid hazard index type: {}'.format(type(item)), path + ['hazards', str(index)]))
elif item in hazards:
errors.append(ValidationError('duplicate hazard index: {}'.format(item), path + ['hazards', str(index)]))
elif item < 1 or item > 9:
errors.append(ValidationError('invalid hazard index: {}'.format(item), path + ['hazards', str(index)]))
else:
hazards.append(item)
if len(errors) == 1:
raise errors[0]
elif len(errors) > 1:
raise ValidationMultiError(errors)
def _validate_tags(instance: list, schema: dict, path: typing.List[str]) -> None:
"""
Validates the given instance using the given tags schema and raises a ValidationError if it is invalid.
:param instance: the sampledb object
:param schema: the valid sampledb object schema
:param path: the path to this subinstance / subschema
:raise ValidationError: if the schema is invalid.
"""
if not isinstance(instance, dict):
raise ValidationError('instance must be dict', path)
valid_keys = {'_type', 'tags'}
required_keys = valid_keys
schema_keys = set(instance.keys())
invalid_keys = schema_keys - valid_keys
if invalid_keys:
raise ValidationError('unexpected keys in schema: {}'.format(invalid_keys), path)
missing_keys = required_keys - schema_keys
if missing_keys:
raise ValidationError('missing keys in schema: {}'.format(missing_keys), path)
if instance['_type'] != 'tags':
raise ValidationError('expected _type "tags"', path)
if not isinstance(instance['tags'], list):
raise ValidationError('tags must be list', path)
errors = []
tags = []
for index, item in enumerate(instance['tags']):
if not isinstance(item, str):
errors.append(ValidationError('invalid tag type: {}'.format(type(item)), path + ['tags', str(index)]))
elif item in tags:
errors.append(ValidationError('duplicate tag: {}'.format(item), path + ['tags', str(index)]))
elif item.lower() != item:
errors.append(ValidationError('tag not lowercase: {}'.format(item), path + ['tags', str(index)]))
elif any(c not in 'abcdefghijklmnopqrstuvwxyz0123456789_-äöüß' for c in item):
errors.append(ValidationError('tag contains invalid character: {}'.format(item), path + ['tags', str(index)]))
else:
tags.append(item)
if len(errors) == 1:
raise errors[0]
elif len(errors) > 1:
raise ValidationMultiError(errors)
def _validate_object(instance: dict, schema: dict, path: typing.List[str]) -> None:
"""
Validates the given instance using the given object schema and raises a ValidationError if it is invalid.
:param instance: the sampledb object
:param schema: the valid sampledb object schema
:param path: the path to this subinstance / subschema
:raise ValidationError: if the schema is invalid.
"""
if not isinstance(instance, dict):
raise ValidationError('instance must be dict', path)
errors = []
if 'required' in schema:
for property_name in schema['required']:
if property_name not in instance:
errors.append(ValidationError('missing required property "{}"'.format(property_name), path + [property_name]))
for property_name, property_value in instance.items():
try:
if property_name not in schema['properties']:
raise ValidationError('unknown property "{}"'.format(property_name), path + [property_name])
else:
validate(property_value, schema['properties'][property_name], path + [property_name])
except ValidationError as e:
errors.append(e)
if len(errors) == 1:
raise errors[0]
elif len(errors) > 1:
raise ValidationMultiError(errors)
def _validate_text(instance: dict, schema: dict, path: typing.List[str]) -> None:
"""
Validates the given instance using the given text object schema and raises a ValidationError if it is invalid.
:param instance: the sampledb object
:param schema: the valid sampledb object schema
:param path: the path to this subinstance / subschema
:raise ValidationError: if the schema is invalid.
"""
if not isinstance(instance, dict):
raise ValidationError('instance must be dict', path)
valid_keys = {'_type', 'text'}
required_keys = valid_keys
schema_keys = set(instance.keys())
invalid_keys = schema_keys - valid_keys
if invalid_keys:
raise ValidationError('unexpected keys in schema: {}'.format(invalid_keys), path)
missing_keys = required_keys - schema_keys
if missing_keys:
raise ValidationError('missing keys in schema: {}'.format(missing_keys), path)
if instance['_type'] != 'text':
raise ValidationError('expected _type "text"', path)
if not isinstance(instance['text'], str):
raise ValidationError('text must be str', path)
choices = schema.get('choices', None)
if choices and instance['text'] not in choices:
raise ValidationError('The text must be one of {}.'.format(choices), path)
min_length = schema.get('minLength', 0)
max_length = schema.get('maxLength', None)
if len(instance['text']) < min_length:
raise ValidationError('The text must be at least {} characters long.'.format(min_length), path)
if max_length is not None and len(instance['text']) > max_length:
raise ValidationError('The text must be at most {} characters long.'.format(max_length), path)
if 'pattern' in schema:
if re.match(schema['pattern'], instance['text']) is None:
raise ValidationError('The text must match the pattern: {}.'.format(schema['pattern']), path)
def _validate_datetime(instance: dict, schema: dict, path: typing.List[str]) -> None:
"""
Validates the given instance using the given datetime object schema and raises a ValidationError if it is invalid.
:param instance: the sampledb object
:param schema: the valid sampledb object schema
:param path: the path to this subinstance / subschema
:raise ValidationError: if the schema is invalid.
"""
if not isinstance(instance, dict):
raise ValidationError('instance must be dict', path)
valid_keys = {'_type', 'utc_datetime'}
required_keys = valid_keys
schema_keys = set(instance.keys())
invalid_keys = schema_keys - valid_keys
if invalid_keys:
raise ValidationError('unexpected keys in schema: {}'.format(invalid_keys), path)
missing_keys = required_keys - schema_keys
if missing_keys:
raise ValidationError('missing keys in schema: {}'.format(missing_keys), path)
if instance['_type'] != 'datetime':
raise ValidationError('expected _type "datetime"', path)
if not isinstance(instance['utc_datetime'], str):
raise ValidationError('utc_datetime must be str', path)
try:
datetime.datetime.strptime(instance['utc_datetime'], '%Y-%m-%d %H:%M:%S')
except ValueError:
raise ValidationError('Please enter the date and time in the format: YYYY-MM-DD HH:MM:SS.', path)
def _validate_bool(instance: dict, schema: dict, path: typing.List[str]) -> None:
"""
Validates the given instance using the given boolean object schema and raises a ValidationError if it is invalid.
:param instance: the sampledb object
:param schema: the valid sampledb object schema
:param path: the path to this subinstance / subschema
:raise ValidationError: if the schema is invalid.
"""
if not isinstance(instance, dict):
raise ValidationError('instance must be dict', path)
valid_keys = {'_type', 'value'}
required_keys = valid_keys
schema_keys = set(instance.keys())
invalid_keys = schema_keys - valid_keys
if invalid_keys:
raise ValidationError('unexpected keys in schema: {}'.format(invalid_keys), path)
missing_keys = required_keys - schema_keys
if missing_keys:
raise ValidationError('missing keys in schema: {}'.format(missing_keys), path)
if instance['_type'] != 'bool':
raise ValidationError('expected _type "bool"', path)
if not isinstance(instance['value'], bool):
raise ValidationError('value must be bool', path)
def _validate_quantity(instance: dict, schema: dict, path: typing.List[str]) -> None:
"""
Validates the given instance using the given quantity object schema and raises a ValidationError if it is invalid.
:param instance: the sampledb object
:param schema: the valid sampledb object schema
:param path: the path to this subinstance / subschema
:raise ValidationError: if the schema is invalid.
"""
if not isinstance(instance, dict):
raise ValidationError('instance must be dict', path)
valid_keys = {'_type', 'units', 'dimensionality', 'magnitude_in_base_units'}
required_keys = valid_keys
schema_keys = set(instance.keys())
invalid_keys = schema_keys - valid_keys
if invalid_keys:
raise ValidationError('unexpected keys in schema: {}'.format(invalid_keys), path)
missing_keys = required_keys - schema_keys
if missing_keys:
raise ValidationError('missing keys in schema: {}'.format(missing_keys), path)
if instance['_type'] != 'quantity':
raise ValidationError('expected _type "quantity"', path)
if not isinstance(instance['units'], str):
raise ValidationError('units must be str', path)
if not units_are_valid(instance['units']):
raise ValidationError('Invalid/Unknown units', path)
if not isinstance(instance['magnitude_in_base_units'], float) and not isinstance(instance['magnitude_in_base_units'], int):
raise ValidationError('magnitude_in_base_units must be float or int', path)
try:
quantity = datatypes.Quantity(instance['magnitude_in_base_units'], units=instance['units'])
except Exception:
raise ValidationError('Unable to create quantity', path)
if not isinstance(instance['dimensionality'], str):
raise ValidationError('dimensionality must be str', path)
try:
schema_quantity = datatypes.Quantity(1.0, units=schema['units'])
except Exception:
raise ValidationError('Unable to create schema quantity', path)
if quantity.dimensionality != schema_quantity.dimensionality:
raise ValidationError('Invalid units, expected units for dimensionality "{}"'.format(str(schema_quantity.dimensionality)), path)
if str(quantity.dimensionality) != instance['dimensionality']:
raise ValidationError('Invalid dimensionality, expected "{}"'.format(str(schema_quantity.dimensionality)), path)
def _validate_sample(instance: dict, schema: dict, path: typing.List[str]) -> None:
"""
Validates the given instance using the given sample object schema and raises a ValidationError if it is invalid.
:param instance: the sampledb object
:param schema: the valid sampledb object schema
:param path: the path to this subinstance / subschema
:raise ValidationError: if the schema is invalid.
"""
if not isinstance(instance, dict):
raise ValidationError('instance must be dict', path)
valid_keys = {'_type', 'object_id'}
required_keys = valid_keys
schema_keys = set(instance.keys())
invalid_keys = schema_keys - valid_keys
if invalid_keys:
raise ValidationError('unexpected keys in schema: {}'.format(invalid_keys), path)
missing_keys = required_keys - schema_keys
if missing_keys:
raise ValidationError('missing keys in schema: {}'.format(missing_keys), path)
if instance['_type'] != 'sample':
raise ValidationError('expected _type "sample"', path)
if not isinstance(instance['object_id'], int):
raise ValidationError('object_id must be int', path)
try:
sample = objects.get_object(object_id=instance['object_id'])
except ObjectDoesNotExistError:
raise ValidationError('object does not exist', path)
action = actions.get_action(sample.action_id)
if action.type != actions.ActionType.SAMPLE_CREATION:
raise ValidationError('object must be sample', path)
def _validate_measurement(instance: dict, schema: dict, path: typing.List[str]) -> None:
"""
Validates the given instance using the given measurement object schema and raises a ValidationError if it is invalid.
:param instance: the sampledb object
:param schema: the valid sampledb object schema
:param path: the path to this subinstance / subschema
:raise ValidationError: if the schema is invalid.
"""
if not isinstance(instance, dict):
raise ValidationError('instance must be dict', path)
valid_keys = {'_type', 'object_id'}
required_keys = valid_keys
schema_keys = set(instance.keys())
invalid_keys = schema_keys - valid_keys
if invalid_keys:
raise ValidationError('unexpected keys in schema: {}'.format(invalid_keys), path)
missing_keys = required_keys - schema_keys
if missing_keys:
raise ValidationError('missing keys in schema: {}'.format(missing_keys), path)
if instance['_type'] != 'measurement':
raise ValidationError('expected _type "measurement"', path)
if not isinstance(instance['object_id'], int):
raise ValidationError('object_id must be int', path)
try:
measurement = objects.get_object(object_id=instance['object_id'])
except ObjectDoesNotExistError:
raise ValidationError('object does not exist', path)
action = actions.get_action(measurement.action_id)
if action.type != actions.ActionType.MEASUREMENT:
raise ValidationError('object must be measurement', path)
|
[
"datetime.datetime.strptime",
"re.match"
] |
[((11580, 11653), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["instance['utc_datetime']", '"""%Y-%m-%d %H:%M:%S"""'], {}), "(instance['utc_datetime'], '%Y-%m-%d %H:%M:%S')\n", (11606, 11653), False, 'import datetime\n'), ((10227, 10272), 're.match', 're.match', (["schema['pattern']", "instance['text']"], {}), "(schema['pattern'], instance['text'])\n", (10235, 10272), False, 'import re\n')]
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
import time
# In[2]:
def createGraph(depotNodes ,requiredEdges, numNodes, show=True):
G = nx.Graph()
edges = []
pos = {}
reqPos = {}
s = [1, 1, 2, 2, 3, 3, 4, 4, 4, 5, 6, 7]
t = [2, 3, 4, 6, 4, 5, 5, 7, 6, 8, 7, 8]
weights = [2.3, 2, 3, 1.5, 3.2, 2.2, 3.8, 2.6, 2.2, 2.8, 1.8, 0.8]
xData = [-2, -0.5, -1, 0, 1, 1.5, 2, 2.5];
yData = [ 0, -2, 2.5, 0, 3, -2, 0.3, 1.5];
for i in range(len(s)):
edges.append((s[i], t[i], weights[i]))
for i in range(1, numNodes+1):
G.add_node(i)
pos[i] =(xData[i-1], yData[i-1])
node_color = ['y']*int(G.number_of_nodes())
depot_node_color = node_color
for i in range(1, len(node_color)+1):
if i in depotNodes:
depot_node_color[i-1] = 'g'
G.add_weighted_edges_from(edges)
labels = nx.get_edge_attributes(G,'weight')
nx.draw_networkx(G,pos, node_color = node_color)
nx.draw_networkx(G,pos, node_color = depot_node_color)
nx.draw_networkx_edges(G, pos, edgelist=requiredEdges, width=3, alpha=0.5,
edge_color="r")
nx.draw_networkx_edge_labels(G, pos, edge_labels=labels)
if show:
plt.figure(1)
plt.show()
return G,pos, depot_node_color
# In[3]:
# Allocating task based on distance between base station and desired edge and UAV availability
def taskAllocation(G, depotNodes, requiredNodes, numrequiredEdges, uavsInDepotNodes):
depotNodesCost = np.zeros((len(depotNodes), numrequiredEdges))
depotPath = []
bestPathTillDesiredEdge = []
bestCostTillDesiredEdge = []
for j in range(numrequiredEdges):
for i in range(len(depotNodes)):
c1 = nx.dijkstra_path_length(G, source=depotNodes[i], target=requiredNodes[j][0])
c2 = nx.dijkstra_path_length(G, source=depotNodes[i], target=requiredNodes[j][1])
l = []
if c1 <= c2:
l = nx.dijkstra_path(G, source=depotNodes[i], target=requiredNodes[j][0])
l.append(requiredNodes[j][1])
else:
l = nx.dijkstra_path(G, source=depotNodes[i], target=requiredNodes[j][1])
l.append(requiredNodes[j][0])
depotNodesCost[i,j] = min(c1,c2)
depotNodesCost[i,j] += G.get_edge_data(requiredNodes[j][0], requiredNodes[j][1])['weight']
depotPath.append(l)
if uavsInDepotNodes[np.argmin(depotNodesCost[:,j])] > 0:
uavsInDepotNodes[np.argmin(depotNodesCost[:,j])] -= 1
else:
depotNodesCost[np.argmin(depotNodesCost[:,j]),j] = np.inf
depotPath = np.transpose(np.array(depotPath, dtype=object).reshape((len(depotNodes), numrequiredEdges)))
taskAllocatedtoBaseStations = []
print("Task Allocation Algorithm Output: ")
for i in range(numrequiredEdges):
taskAllocatedtoBaseStations.append(np.argmin(depotNodesCost[:,i]))
bestCostTillDesiredEdge.append(depotNodesCost[taskAllocatedtoBaseStations[i],i])
bestPathTillDesiredEdge.append(depotPath[taskAllocatedtoBaseStations[i],i])
print('Allocating arc ' + str(requiredNodes[i][0]) + ' - ' + str(requiredNodes[i][1]) + ' to base station - node ' + str(depotNodes[taskAllocatedtoBaseStations[i]]))
return bestPathTillDesiredEdge, bestCostTillDesiredEdge
# In[4]:
def pathScanningAlgorithm(G, numrequiredEdges,depotNodes, bestPathTillDesiredEdge, bestCostTillDesiredEdge, vehicleCapacity):
bestRoute = []
bestRouteCost = []
minCost = np.inf
for j in range(numrequiredEdges):
minCost = np.inf
l = []
for i in range(len(depotNodes)):
c1 = nx.dijkstra_path_length(G, source=bestPathTillDesiredEdge[j][-1], target=depotNodes[i])
if c1 <= minCost:
l = nx.dijkstra_path(G, source=bestPathTillDesiredEdge[j][-1], target=depotNodes[i])[1:]
minCost = c1
bestRoute.append(bestPathTillDesiredEdge[j] + l)
bestRouteCost.append(bestCostTillDesiredEdge[j] + minCost)
if bestRouteCost[j] > vehicleCapacity:
bestRoute[j] = None
bestRouteCost[j] = np.inf
print("Path Scanning Algorithm Output: ")
return bestRoute, bestRouteCost
# In[5]:
def visualizePath(depotNodes, requiredNodes, numNodes, path, pathType="solution"):
plt.figure(1)
for j in range(len(path)):
if path[j] != None:
# plt.figure(j+1)
G, pos, depot_node_color = createGraph(depotNodes, requiredNodes , numNodes, show=False)
G1 = nx.DiGraph()
pos1 = {}
node_color = []
edges = []
for i in range(len(path[j])-1):
edges.append((path[j][i], path[j][i+1], G.get_edge_data(path[j][i], path[j][i+1])['weight']))
pos1[path[j][i]] = pos[path[j][i]]
if i == len(path[j])-2:
pos1[path[j][i+1]] = pos[path[j][i+1]]
for key in pos1.keys():
node_color.append(depot_node_color[key-1])
G1.add_weighted_edges_from(edges)
nx.draw_networkx(G1,pos1, arrows=True, node_color = node_color, edge_color='b', arrowsize=12, width=1, arrowstyle='simple')
if pathType == "solution":
plt.legend(["Solution Path"], loc ="upper left")
else:
plt.legend(["Path"], loc ="upper left")
plt.show()
# In[6]:
def main():
# Initializing Parameters
vehicleCapacity = 14
numNodes = 8
requiredNodes = [[2, 4], [6, 7]];
uavsInDepotNodes = [0, 2];
totalUavs = sum(uavsInDepotNodes);
numrequiredEdges = 2;
depotNodes = [1, 5];
taskAllocatedtoBaseStations = [];
start = time.time()
G,pos, depot_node_color = createGraph(depotNodes, requiredNodes, numNodes, show=False)
bestPathTillDesiredEdge, bestCostTillDesiredEdge = taskAllocation(G, depotNodes, requiredNodes, numrequiredEdges, uavsInDepotNodes)
visualizePath(depotNodes, requiredNodes, numNodes, bestPathTillDesiredEdge, pathType="normal")
bestRoute, bestRouteCost = pathScanningAlgorithm(G, numrequiredEdges, depotNodes, bestPathTillDesiredEdge, bestCostTillDesiredEdge, vehicleCapacity)
visualizePath(depotNodes, requiredNodes, numNodes, bestRoute)
end = time.time()
print("Execution took "+ str(end-start) + " seconds.")
if __name__ == "__main__":
# execute only if run as a script
main()
# In[ ]:
# In[ ]:
# In[ ]:
|
[
"matplotlib.pyplot.show",
"networkx.draw_networkx_edges",
"networkx.dijkstra_path_length",
"matplotlib.pyplot.legend",
"networkx.get_edge_attributes",
"networkx.draw_networkx",
"time.time",
"numpy.argmin",
"networkx.dijkstra_path",
"matplotlib.pyplot.figure",
"networkx.Graph",
"numpy.array",
"networkx.draw_networkx_edge_labels",
"networkx.DiGraph"
] |
[((221, 231), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (229, 231), True, 'import networkx as nx\n'), ((979, 1014), 'networkx.get_edge_attributes', 'nx.get_edge_attributes', (['G', '"""weight"""'], {}), "(G, 'weight')\n", (1001, 1014), True, 'import networkx as nx\n'), ((1018, 1065), 'networkx.draw_networkx', 'nx.draw_networkx', (['G', 'pos'], {'node_color': 'node_color'}), '(G, pos, node_color=node_color)\n', (1034, 1065), True, 'import networkx as nx\n'), ((1071, 1124), 'networkx.draw_networkx', 'nx.draw_networkx', (['G', 'pos'], {'node_color': 'depot_node_color'}), '(G, pos, node_color=depot_node_color)\n', (1087, 1124), True, 'import networkx as nx\n'), ((1130, 1224), 'networkx.draw_networkx_edges', 'nx.draw_networkx_edges', (['G', 'pos'], {'edgelist': 'requiredEdges', 'width': '(3)', 'alpha': '(0.5)', 'edge_color': '"""r"""'}), "(G, pos, edgelist=requiredEdges, width=3, alpha=0.5,\n edge_color='r')\n", (1152, 1224), True, 'import networkx as nx\n'), ((1265, 1321), 'networkx.draw_networkx_edge_labels', 'nx.draw_networkx_edge_labels', (['G', 'pos'], {'edge_labels': 'labels'}), '(G, pos, edge_labels=labels)\n', (1293, 1321), True, 'import networkx as nx\n'), ((4508, 4521), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (4518, 4521), True, 'import matplotlib.pyplot as plt\n'), ((5905, 5916), 'time.time', 'time.time', ([], {}), '()\n', (5914, 5916), False, 'import time\n'), ((6472, 6483), 'time.time', 'time.time', ([], {}), '()\n', (6481, 6483), False, 'import time\n'), ((1343, 1356), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (1353, 1356), True, 'import matplotlib.pyplot as plt\n'), ((1365, 1375), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1373, 1375), True, 'import matplotlib.pyplot as plt\n'), ((1854, 1930), 'networkx.dijkstra_path_length', 'nx.dijkstra_path_length', (['G'], {'source': 'depotNodes[i]', 'target': 'requiredNodes[j][0]'}), '(G, source=depotNodes[i], target=requiredNodes[j][0])\n', (1877, 1930), True, 'import networkx as nx\n'), ((1948, 2024), 'networkx.dijkstra_path_length', 'nx.dijkstra_path_length', (['G'], {'source': 'depotNodes[i]', 'target': 'requiredNodes[j][1]'}), '(G, source=depotNodes[i], target=requiredNodes[j][1])\n', (1971, 2024), True, 'import networkx as nx\n'), ((3051, 3082), 'numpy.argmin', 'np.argmin', (['depotNodesCost[:, i]'], {}), '(depotNodesCost[:, i])\n', (3060, 3082), True, 'import numpy as np\n'), ((3833, 3925), 'networkx.dijkstra_path_length', 'nx.dijkstra_path_length', (['G'], {'source': 'bestPathTillDesiredEdge[j][-1]', 'target': 'depotNodes[i]'}), '(G, source=bestPathTillDesiredEdge[j][-1], target=\n depotNodes[i])\n', (3856, 3925), True, 'import networkx as nx\n'), ((4729, 4741), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (4739, 4741), True, 'import networkx as nx\n'), ((5274, 5401), 'networkx.draw_networkx', 'nx.draw_networkx', (['G1', 'pos1'], {'arrows': '(True)', 'node_color': 'node_color', 'edge_color': '"""b"""', 'arrowsize': '(12)', 'width': '(1)', 'arrowstyle': '"""simple"""'}), "(G1, pos1, arrows=True, node_color=node_color, edge_color=\n 'b', arrowsize=12, width=1, arrowstyle='simple')\n", (5290, 5401), True, 'import networkx as nx\n'), ((5588, 5598), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5596, 5598), True, 'import matplotlib.pyplot as plt\n'), ((2089, 2158), 'networkx.dijkstra_path', 'nx.dijkstra_path', (['G'], {'source': 'depotNodes[i]', 'target': 'requiredNodes[j][0]'}), '(G, source=depotNodes[i], target=requiredNodes[j][0])\n', (2105, 2158), True, 'import networkx as nx\n'), ((2243, 2312), 'networkx.dijkstra_path', 'nx.dijkstra_path', (['G'], {'source': 'depotNodes[i]', 'target': 'requiredNodes[j][1]'}), '(G, source=depotNodes[i], target=requiredNodes[j][1])\n', (2259, 2312), True, 'import networkx as nx\n'), ((2580, 2611), 'numpy.argmin', 'np.argmin', (['depotNodesCost[:, j]'], {}), '(depotNodesCost[:, j])\n', (2589, 2611), True, 'import numpy as np\n'), ((2650, 2681), 'numpy.argmin', 'np.argmin', (['depotNodesCost[:, j]'], {}), '(depotNodesCost[:, j])\n', (2659, 2681), True, 'import numpy as np\n'), ((2805, 2838), 'numpy.array', 'np.array', (['depotPath'], {'dtype': 'object'}), '(depotPath, dtype=object)\n', (2813, 2838), True, 'import numpy as np\n'), ((5453, 5500), 'matplotlib.pyplot.legend', 'plt.legend', (["['Solution Path']"], {'loc': '"""upper left"""'}), "(['Solution Path'], loc='upper left')\n", (5463, 5500), True, 'import matplotlib.pyplot as plt\n'), ((5536, 5574), 'matplotlib.pyplot.legend', 'plt.legend', (["['Path']"], {'loc': '"""upper left"""'}), "(['Path'], loc='upper left')\n", (5546, 5574), True, 'import matplotlib.pyplot as plt\n'), ((2728, 2759), 'numpy.argmin', 'np.argmin', (['depotNodesCost[:, j]'], {}), '(depotNodesCost[:, j])\n', (2737, 2759), True, 'import numpy as np\n'), ((3971, 4056), 'networkx.dijkstra_path', 'nx.dijkstra_path', (['G'], {'source': 'bestPathTillDesiredEdge[j][-1]', 'target': 'depotNodes[i]'}), '(G, source=bestPathTillDesiredEdge[j][-1], target=depotNodes[i]\n )\n', (3987, 4056), True, 'import networkx as nx\n')]
|
import ffmpy, subprocess, json
import argparse
def main():
# Globals in Python are global to a module, not across all modules.
# global validDatesDict
# global stripDates
# TODO: use command line parameters to determine path to scan
# https://stackabuse.com/command-line-arguments-in-python/
parser = argparse.ArgumentParser(
description='checkmetadata')
# parser.add_argument('-i','--input', help='Input file name', required=True)
# generic directory scanning operations - into internal dictionary tree data structure
parser.add_argument('-i','--input', help='Input file name')
args = parser.parse_args()
print ("Input file: %s" % args.input )
# ffprobe = ffmpy.FFprobe(global_options="-loglevel quiet -sexagesimal -of json -show_entries stream=width,height,duration -show_entries format=duration -select_streams v:0", inputs={args.input : None})
ffprobe = ffmpy.FFprobe(global_options="-show_format -of json", inputs={args.input : None})
print("ffprobe.cmd:", ffprobe.cmd) # printout the resulting ffprobe shell command
stdout, stderr = ffprobe.run(stderr=subprocess.PIPE, stdout=subprocess.PIPE)
# std* is byte sequence, but json in Python 3.5.2 requires str
ff0string = str(stdout,'utf-8')
ffinfo = json.loads(ff0string)
print(json.dumps(ffinfo, indent=4)) # pretty print
# print("Video Dimensions: {}x{}".format(ffinfo["streams"][0]["width"], ffinfo["streams"][0]["height"]))
# print("Streams Duration:", ffinfo["streams"][0]["duration"])
# print("Format Duration: ", ffinfo["format"]["duration"])
if ("ensemble" in ffinfo["format"]["tags"]):
print("ADS ensemble: ", ffinfo["format"]["tags"]["ensemble"])
if ("boundary_condition" in ffinfo["format"]["tags"]):
print("ADS boundary_condition: ", ffinfo["format"]["tags"]["boundary_condition"])
if ("init" in ffinfo["format"]["tags"]):
print("ADS init: ", ffinfo["format"]["tags"]["init"])
if ("plot" in ffinfo["format"]["tags"]):
print("ADS plot: ", ffinfo["format"]["tags"]["plot"])
if ("plot_group" in ffinfo["format"]["tags"]):
print("ADS plot_group: ", ffinfo["format"]["tags"]["plot_group"])
if __name__ == '__main__':
main()
|
[
"json.loads",
"argparse.ArgumentParser",
"json.dumps",
"ffmpy.FFprobe"
] |
[((329, 381), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""checkmetadata"""'}), "(description='checkmetadata')\n", (352, 381), False, 'import argparse\n'), ((925, 1010), 'ffmpy.FFprobe', 'ffmpy.FFprobe', ([], {'global_options': '"""-show_format -of json"""', 'inputs': '{args.input: None}'}), "(global_options='-show_format -of json', inputs={args.input: None}\n )\n", (938, 1010), False, 'import ffmpy, subprocess, json\n'), ((1292, 1313), 'json.loads', 'json.loads', (['ff0string'], {}), '(ff0string)\n', (1302, 1313), False, 'import ffmpy, subprocess, json\n'), ((1324, 1352), 'json.dumps', 'json.dumps', (['ffinfo'], {'indent': '(4)'}), '(ffinfo, indent=4)\n', (1334, 1352), False, 'import ffmpy, subprocess, json\n')]
|
from unittest import mock
from django.test import TestCase
from django.contrib.auth.models import AnonymousUser
import globus_sdk
from globus_portal_framework.utils import load_globus_client
from globus_portal_framework.tests.mocks import (
MockGlobusClient, mock_user, globus_client_is_loaded_with_authorizer
)
class GlobusPortalFrameworkUtilsTests(TestCase):
@mock.patch('globus_sdk.SearchClient', MockGlobusClient)
def test_load_search_client_with_anonymous_user(self):
c = load_globus_client(AnonymousUser(), globus_sdk.SearchClient,
'search.api.globus.org')
self.assertFalse(globus_client_is_loaded_with_authorizer(c))
@mock.patch('globus_sdk.SearchClient', MockGlobusClient)
def test_load_globus_client_with_real_user(self):
user = mock_user('bob', ['search.api.globus.org'])
c = load_globus_client(user, globus_sdk.SearchClient,
'search.api.globus.org')
self.assertTrue(globus_client_is_loaded_with_authorizer(c))
@mock.patch('globus_sdk.SearchClient', MockGlobusClient)
def test_load_transfer_client_with_bad_token(self):
user = mock_user('bob', ['transfer.api.globus.org'])
with self.assertRaises(ValueError):
c = load_globus_client(user, globus_sdk.SearchClient,
'search.api.globus.org')
|
[
"django.contrib.auth.models.AnonymousUser",
"globus_portal_framework.utils.load_globus_client",
"globus_portal_framework.tests.mocks.globus_client_is_loaded_with_authorizer",
"globus_portal_framework.tests.mocks.mock_user",
"unittest.mock.patch"
] |
[((376, 431), 'unittest.mock.patch', 'mock.patch', (['"""globus_sdk.SearchClient"""', 'MockGlobusClient'], {}), "('globus_sdk.SearchClient', MockGlobusClient)\n", (386, 431), False, 'from unittest import mock\n'), ((695, 750), 'unittest.mock.patch', 'mock.patch', (['"""globus_sdk.SearchClient"""', 'MockGlobusClient'], {}), "('globus_sdk.SearchClient', MockGlobusClient)\n", (705, 750), False, 'from unittest import mock\n'), ((1056, 1111), 'unittest.mock.patch', 'mock.patch', (['"""globus_sdk.SearchClient"""', 'MockGlobusClient'], {}), "('globus_sdk.SearchClient', MockGlobusClient)\n", (1066, 1111), False, 'from unittest import mock\n'), ((820, 863), 'globus_portal_framework.tests.mocks.mock_user', 'mock_user', (['"""bob"""', "['search.api.globus.org']"], {}), "('bob', ['search.api.globus.org'])\n", (829, 863), False, 'from globus_portal_framework.tests.mocks import MockGlobusClient, mock_user, globus_client_is_loaded_with_authorizer\n'), ((876, 950), 'globus_portal_framework.utils.load_globus_client', 'load_globus_client', (['user', 'globus_sdk.SearchClient', '"""search.api.globus.org"""'], {}), "(user, globus_sdk.SearchClient, 'search.api.globus.org')\n", (894, 950), False, 'from globus_portal_framework.utils import load_globus_client\n'), ((1183, 1228), 'globus_portal_framework.tests.mocks.mock_user', 'mock_user', (['"""bob"""', "['transfer.api.globus.org']"], {}), "('bob', ['transfer.api.globus.org'])\n", (1192, 1228), False, 'from globus_portal_framework.tests.mocks import MockGlobusClient, mock_user, globus_client_is_loaded_with_authorizer\n'), ((522, 537), 'django.contrib.auth.models.AnonymousUser', 'AnonymousUser', ([], {}), '()\n', (535, 537), False, 'from django.contrib.auth.models import AnonymousUser\n'), ((645, 687), 'globus_portal_framework.tests.mocks.globus_client_is_loaded_with_authorizer', 'globus_client_is_loaded_with_authorizer', (['c'], {}), '(c)\n', (684, 687), False, 'from globus_portal_framework.tests.mocks import MockGlobusClient, mock_user, globus_client_is_loaded_with_authorizer\n'), ((1006, 1048), 'globus_portal_framework.tests.mocks.globus_client_is_loaded_with_authorizer', 'globus_client_is_loaded_with_authorizer', (['c'], {}), '(c)\n', (1045, 1048), False, 'from globus_portal_framework.tests.mocks import MockGlobusClient, mock_user, globus_client_is_loaded_with_authorizer\n'), ((1289, 1363), 'globus_portal_framework.utils.load_globus_client', 'load_globus_client', (['user', 'globus_sdk.SearchClient', '"""search.api.globus.org"""'], {}), "(user, globus_sdk.SearchClient, 'search.api.globus.org')\n", (1307, 1363), False, 'from globus_portal_framework.utils import load_globus_client\n')]
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class ArticlesConfig(AppConfig):
name = 'ripiu.cmsplugin_articles'
verbose_name = _('Articles and sections')
|
[
"django.utils.translation.ugettext_lazy"
] |
[((182, 208), 'django.utils.translation.ugettext_lazy', '_', (['"""Articles and sections"""'], {}), "('Articles and sections')\n", (183, 208), True, 'from django.utils.translation import ugettext_lazy as _\n')]
|
from crawler import Downloader, DownloaderError, Worker
from .testing import WorkerTestCase, FakeApp
import asyncio
import aiohttp
from urllib.parse import urljoin
class DownloaderTestCase(WorkerTestCase):
def setUp(self):
super().setUp()
# reset class attribute
Downloader.SEQ_ID = 0
self.url_queue = asyncio.Queue()
async def asyncSetUp(self):
super().setUp()
self.http_client = aiohttp.ClientSession(
headers={"User-Agent": "Test Client"},
)
async def asyncTearDown(self):
await super().asyncTearDown()
await self.http_client.close()
class TestDownloaderBasic(DownloaderTestCase):
def test_init(self):
downloader = Downloader(
self.stop_ev,
self.cache,
self.url_queue,
self.http_client
)
self.assertIsInstance(downloader, Worker, "Is a Worker")
def test_object_enumeration(self):
for i in range(1, 8):
downloader = Downloader(
self.stop_ev,
self.cache,
self.url_queue,
self.http_client
)
self.assertEqual(downloader.seq_id, i, "sequential id")
self.assertEqual(str(downloader), "Downloader-%d" % i, "__str__")
class TestDownloaderOperations(DownloaderTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.webapp = FakeApp()
cls.webapp.start()
@classmethod
def tearDownClass(cls):
super().tearDownClass()
cls.webapp.stop()
def make_url(self, uri):
return "http://%s:%d/%s" % (
self.webapp.host,
self.webapp.port,
uri.lstrip("/")
)
def test_extract_urls(self):
downloader = Downloader(
self.stop_ev,
self.cache,
self.url_queue,
self.http_client,
)
page_url = "http://localhost"
paths = ["/test/%d" % i for i in range(16)]
links = ['<a href="%s"></a>' % p for p in paths]
text = "<body>%s</body>" % "".join(links)
urls = downloader.extract_urls(page_url, text)
absurls = [urljoin(page_url, p) for p in paths]
self.assertEqual(urls, absurls, "Returns absolute urls")
async def test_errors(self):
downloader = Downloader(
self.stop_ev,
self.cache,
self.url_queue,
self.http_client,
)
error_codes = [400, 401, 402, 403, 404, 422, 500, 502]
for ec in error_codes:
job = {"url": self.make_url("/error/%d" % ec)}
response = await downloader.HEAD(job)
self.assertEqual(response.status, ec, "Handler error: %d" % ec)
async def test_HEAD(self):
downloader = Downloader(
self.stop_ev,
self.cache,
self.url_queue,
self.http_client,
size_limit_kb=64,
)
requested_url = self.make_url("/html/1")
job = {"url": requested_url}
response = await downloader.HEAD(job)
self.assertEqual(response.status, 200, "HEAD")
async def test_GET(self):
downloader = Downloader(
self.stop_ev,
self.cache,
self.url_queue,
self.http_client,
size_limit_kb=64,
)
requested_url = self.make_url("/html/10")
job = {"url": requested_url}
url, headers, content = await downloader.GET(job)
self.assertEqual(url, requested_url)
self.assertTrue(headers)
self.assertTrue(content)
async def test_size_limit(self):
downloader = Downloader(
self.stop_ev,
self.cache,
self.url_queue,
self.http_client,
size_limit_kb=1,
)
requested_url = self.make_url("/html/1000")
job = {"url": requested_url}
with self.assertRaises(DownloaderError):
await downloader.GET(job)
async def test_reacts_to_stop(self):
downloader = Downloader(
self.stop_ev,
self.cache,
self.url_queue,
self.http_client,
size_limit_kb=1,
)
task = asyncio.create_task(downloader())
self.stop_ev.set()
try:
await asyncio.wait_for(asyncio.gather(task), timeout=1)
except (asyncio.TimeoutError, asyncio.CancelledError):
self.fail("Did not react to stop event")
async def test_run(self):
downloader = Downloader(
self.stop_ev,
self.cache,
self.url_queue,
self.http_client,
size_limit_kb=128,
whitelist=["localhost"],
)
n = 128
for i in range(n):
url = self.make_url("/html/%d" % i)
self.url_queue.put_nowait({"url": url})
tasks = [
asyncio.create_task(downloader())
for _ in range(8)
]
total_processed = 0
wait_interval = 0.1
max_wait_loops = 3 / wait_interval
while max_wait_loops and total_processed < n:
max_wait_loops -= 1
cur = self.dbconn.cursor()
cur.execute("SELECT COUNT(*) FROM urls WHERE mtime IS NOT NULL")
total_processed = cur.fetchone()[0]
await asyncio.sleep(wait_interval)
self.stop_ev.set()
await asyncio.wait_for(asyncio.gather(*tasks), timeout=1)
self.assertEqual(total_processed, n, "Processed all")
|
[
"asyncio.gather",
"urllib.parse.urljoin",
"asyncio.sleep",
"aiohttp.ClientSession",
"crawler.Downloader",
"asyncio.Queue"
] |
[((341, 356), 'asyncio.Queue', 'asyncio.Queue', ([], {}), '()\n', (354, 356), False, 'import asyncio\n'), ((441, 501), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'headers': "{'User-Agent': 'Test Client'}"}), "(headers={'User-Agent': 'Test Client'})\n", (462, 501), False, 'import aiohttp\n'), ((733, 803), 'crawler.Downloader', 'Downloader', (['self.stop_ev', 'self.cache', 'self.url_queue', 'self.http_client'], {}), '(self.stop_ev, self.cache, self.url_queue, self.http_client)\n', (743, 803), False, 'from crawler import Downloader, DownloaderError, Worker\n'), ((1824, 1894), 'crawler.Downloader', 'Downloader', (['self.stop_ev', 'self.cache', 'self.url_queue', 'self.http_client'], {}), '(self.stop_ev, self.cache, self.url_queue, self.http_client)\n', (1834, 1894), False, 'from crawler import Downloader, DownloaderError, Worker\n'), ((2384, 2454), 'crawler.Downloader', 'Downloader', (['self.stop_ev', 'self.cache', 'self.url_queue', 'self.http_client'], {}), '(self.stop_ev, self.cache, self.url_queue, self.http_client)\n', (2394, 2454), False, 'from crawler import Downloader, DownloaderError, Worker\n'), ((2847, 2939), 'crawler.Downloader', 'Downloader', (['self.stop_ev', 'self.cache', 'self.url_queue', 'self.http_client'], {'size_limit_kb': '(64)'}), '(self.stop_ev, self.cache, self.url_queue, self.http_client,\n size_limit_kb=64)\n', (2857, 2939), False, 'from crawler import Downloader, DownloaderError, Worker\n'), ((3247, 3339), 'crawler.Downloader', 'Downloader', (['self.stop_ev', 'self.cache', 'self.url_queue', 'self.http_client'], {'size_limit_kb': '(64)'}), '(self.stop_ev, self.cache, self.url_queue, self.http_client,\n size_limit_kb=64)\n', (3257, 3339), False, 'from crawler import Downloader, DownloaderError, Worker\n'), ((3723, 3814), 'crawler.Downloader', 'Downloader', (['self.stop_ev', 'self.cache', 'self.url_queue', 'self.http_client'], {'size_limit_kb': '(1)'}), '(self.stop_ev, self.cache, self.url_queue, self.http_client,\n size_limit_kb=1)\n', (3733, 3814), False, 'from crawler import Downloader, DownloaderError, Worker\n'), ((4122, 4213), 'crawler.Downloader', 'Downloader', (['self.stop_ev', 'self.cache', 'self.url_queue', 'self.http_client'], {'size_limit_kb': '(1)'}), '(self.stop_ev, self.cache, self.url_queue, self.http_client,\n size_limit_kb=1)\n', (4132, 4213), False, 'from crawler import Downloader, DownloaderError, Worker\n'), ((4607, 4725), 'crawler.Downloader', 'Downloader', (['self.stop_ev', 'self.cache', 'self.url_queue', 'self.http_client'], {'size_limit_kb': '(128)', 'whitelist': "['localhost']"}), "(self.stop_ev, self.cache, self.url_queue, self.http_client,\n size_limit_kb=128, whitelist=['localhost'])\n", (4617, 4725), False, 'from crawler import Downloader, DownloaderError, Worker\n'), ((1022, 1092), 'crawler.Downloader', 'Downloader', (['self.stop_ev', 'self.cache', 'self.url_queue', 'self.http_client'], {}), '(self.stop_ev, self.cache, self.url_queue, self.http_client)\n', (1032, 1092), False, 'from crawler import Downloader, DownloaderError, Worker\n'), ((2227, 2247), 'urllib.parse.urljoin', 'urljoin', (['page_url', 'p'], {}), '(page_url, p)\n', (2234, 2247), False, 'from urllib.parse import urljoin\n'), ((5423, 5451), 'asyncio.sleep', 'asyncio.sleep', (['wait_interval'], {}), '(wait_interval)\n', (5436, 5451), False, 'import asyncio\n'), ((5511, 5533), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (5525, 5533), False, 'import asyncio\n'), ((4406, 4426), 'asyncio.gather', 'asyncio.gather', (['task'], {}), '(task)\n', (4420, 4426), False, 'import asyncio\n')]
|
from transformers import pipeline
import json
sentiment_analysis = pipeline(
"sentiment-analysis",
model="./model",
tokenizer="./model",
return_all_scores = True
)
def handler(event, context):
print('Received event: ' + json.dumps(event, indent=2))
hebrew_text = event['hebrew_text']
result = sentiment_analysis(hebrew_text)
print('result: {}'.format(result))
return json.dumps(result)
|
[
"transformers.pipeline",
"json.dumps"
] |
[((69, 165), 'transformers.pipeline', 'pipeline', (['"""sentiment-analysis"""'], {'model': '"""./model"""', 'tokenizer': '"""./model"""', 'return_all_scores': '(True)'}), "('sentiment-analysis', model='./model', tokenizer='./model',\n return_all_scores=True)\n", (77, 165), False, 'from transformers import pipeline\n'), ((409, 427), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (419, 427), False, 'import json\n'), ((243, 270), 'json.dumps', 'json.dumps', (['event'], {'indent': '(2)'}), '(event, indent=2)\n', (253, 270), False, 'import json\n')]
|
from draw_image import make_image
from twitter_interface import get_twitter_api, fetch_statues
from config import TWITTER_HANDLERS
def run():
api = get_twitter_api()
for handler in TWITTER_HANDLERS:
statues, user = fetch_statues(api, handler, count=20)
for status in statues:
make_image(user.name, status, verbose=True)
if __name__ == '__main__':
run()
|
[
"twitter_interface.fetch_statues",
"twitter_interface.get_twitter_api",
"draw_image.make_image"
] |
[((154, 171), 'twitter_interface.get_twitter_api', 'get_twitter_api', ([], {}), '()\n', (169, 171), False, 'from twitter_interface import get_twitter_api, fetch_statues\n'), ((234, 271), 'twitter_interface.fetch_statues', 'fetch_statues', (['api', 'handler'], {'count': '(20)'}), '(api, handler, count=20)\n', (247, 271), False, 'from twitter_interface import get_twitter_api, fetch_statues\n'), ((316, 359), 'draw_image.make_image', 'make_image', (['user.name', 'status'], {'verbose': '(True)'}), '(user.name, status, verbose=True)\n', (326, 359), False, 'from draw_image import make_image\n')]
|
# -*- coding: utf-8 -*-
from pydatajson.readers import read_catalog
from pydatajson.reporting import generate_datasets_summary
from pydatajson.validators\
.distribution_download_urls_validator \
import DistributionDownloadUrlsValidator
class StatusIndicatorsGenerator(object):
def __init__(self, catalog, validator=None, verify_ssl=True,
url_check_timeout=1, threads_count=1):
self.download_url_ok = None
self.catalog = read_catalog(catalog)
self.summary = generate_datasets_summary(self.catalog,
validator=validator,
verify_ssl=verify_ssl)
self.verify_url = verify_ssl
self.url_check_timeout = url_check_timeout
self.threads_count = threads_count
def datasets_cant(self):
return len(self.summary)
def distribuciones_cant(self):
return sum(ds['cant_distribuciones'] for ds in self.summary)
def datasets_meta_ok_cant(self):
return sum(ds['estado_metadatos'] == 'OK' for ds in self.summary)
def datasets_meta_error_cant(self):
return sum(ds['estado_metadatos'] == 'ERROR' for ds in self.summary)
def datasets_meta_ok_pct(self):
return self._get_dataset_percentage(self.datasets_meta_ok_cant)
def datasets_con_datos_cant(self):
return sum(ds['tiene_datos'] == 'SI' for ds in self.summary)
def datasets_sin_datos_cant(self):
return sum(ds['tiene_datos'] == 'NO' for ds in self.summary)
def datasets_con_datos_pct(self):
return self._get_dataset_percentage(self.datasets_con_datos_cant)
def distribuciones_download_url_ok_cant(self):
if self.download_url_ok:
return self.download_url_ok
validator = DistributionDownloadUrlsValidator(
self.catalog, self.verify_url, self.url_check_timeout,
self.threads_count)
self.download_url_ok = validator.validate()
return self.download_url_ok
def distribuciones_download_url_error_cant(self):
return self.distribuciones_cant() - \
self.distribuciones_download_url_ok_cant()
def distribuciones_download_url_ok_pct(self):
total = self.distribuciones_cant()
if not total:
return None
return \
round(float(self.distribuciones_download_url_ok_cant()) / total, 4)
def _get_dataset_percentage(self, indicator):
total = self.datasets_cant()
if not total:
return None
return round(float(indicator()) / total, 4)
|
[
"pydatajson.validators.distribution_download_urls_validator.DistributionDownloadUrlsValidator",
"pydatajson.readers.read_catalog",
"pydatajson.reporting.generate_datasets_summary"
] |
[((469, 490), 'pydatajson.readers.read_catalog', 'read_catalog', (['catalog'], {}), '(catalog)\n', (481, 490), False, 'from pydatajson.readers import read_catalog\n'), ((514, 602), 'pydatajson.reporting.generate_datasets_summary', 'generate_datasets_summary', (['self.catalog'], {'validator': 'validator', 'verify_ssl': 'verify_ssl'}), '(self.catalog, validator=validator, verify_ssl=\n verify_ssl)\n', (539, 602), False, 'from pydatajson.reporting import generate_datasets_summary\n'), ((1810, 1923), 'pydatajson.validators.distribution_download_urls_validator.DistributionDownloadUrlsValidator', 'DistributionDownloadUrlsValidator', (['self.catalog', 'self.verify_url', 'self.url_check_timeout', 'self.threads_count'], {}), '(self.catalog, self.verify_url, self.\n url_check_timeout, self.threads_count)\n', (1843, 1923), False, 'from pydatajson.validators.distribution_download_urls_validator import DistributionDownloadUrlsValidator\n')]
|
class Solution:
def majorityElement(self, nums: List[int]) -> int:
vote = 0
result = None
n = len(nums) // 2
for num in nums:
if vote == 0:
result = num
if result == num:
vote += 1
if vote > n:
return result
else:
vote -= 1
return result
class Solution:
def majorityElement(self, nums: List[int]) -> int:
vote = 0
result = None
for num in nums:
if vote == 0:
result = num
if result == num:
vote += 1
else:
vote -= 1
return result
from collections import defaultdict
class Solution:
def majorityElement(self, nums: List[int]) -> int:
lenght = len(nums)
major = lenght // 2
hashMap = defaultdict(int)
for num in nums:
hashMap[num] += 1
if hashMap[num] > major:
return num
class Solution:
def majorityElement(self, nums: List[int]) -> int:
hashMap = {}
high = 0
output = 0
for num in nums:
if num not in hashMap:
hashMap[num] = 0
hashMap[num] += 1
if high < hashMap[num]:
high = hashMap[num]
output = num
return output
class Solution:
def majorityElement(self, nums: List[int]) -> int:
hashMap = defaultdict(int)
high = 0
output = 0
for num in nums:
hashMap[num] += 1
if high < hashMap[num]:
high = hashMap[num]
output = num
return output
|
[
"collections.defaultdict"
] |
[((919, 935), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (930, 935), False, 'from collections import defaultdict\n'), ((1527, 1543), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (1538, 1543), False, 'from collections import defaultdict\n')]
|
"""
cd G:\GitLab\pdf_extraction\ModularInsurance
python algo1.py
"""
import sys
import random
sys.setrecursionlimit(1500)
print(sys.getrecursionlimit())
def merge_sort(arr,n=None,pivot_index=None):
"""Returns sorted array"""
n = len(arr) if n is None else n
if n==1:
return arr
if n==2:
a = arr[0]
b = arr[1]
srt = arr if a<b else arr[::-1]
return srt
pivot_index = int(n/2)-1 if pivot_index is None else pivot_index
# pivot_element = arr[pivot_index]
left_arr = arr[:pivot_index]
right_arr = arr[pivot_index:]
left_sorted = merge_sort(left_arr, len(left_arr))
right_sorted = merge_sort(right_arr, len(right_arr))
len_left = len(left_sorted)
len_right = len(right_sorted)
i = 0
j = 0
big_old_arr = []
while True:
if i==len_left or j == len_right:
break
elif left_sorted[i] > right_sorted[j]:
big_old_arr.append(right_sorted[j])
j+=1
else:
big_old_arr.append(left_sorted[i])
i+=1
return big_old_arr
if __name__ == "__main__":
arr = [4,8,1,5,7]
print(merge_sort(arr))
if __name__ == "__main__2":
a = "FarhanHaiKhan"
sample_space = "<KEY>"
details = dict()
for each_letter in sample_space:
if each_letter in a:
details[each_letter] = a.count(each_letter)
else:
details[each_letter] = 0
for key in details:
val = details[key]
if not val== 0:
print(key, val)
|
[
"sys.getrecursionlimit",
"sys.setrecursionlimit"
] |
[((95, 122), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(1500)'], {}), '(1500)\n', (116, 122), False, 'import sys\n'), ((129, 152), 'sys.getrecursionlimit', 'sys.getrecursionlimit', ([], {}), '()\n', (150, 152), False, 'import sys\n')]
|
import numpy as np
import pandas as pd
import xarray as xr
from shape import BBox
from .mot import Mot
class BboxMot(Mot):
def __init__(self, **kwargs):
"""
Ground truth stored in xarray.Dataset with frame and id coordinates (frames are 0-indexed).
Example:
<xarray.Dataset>
Dimensions: (frame: 5928, id: 5)
Coordinates:
* frame (frame) int64 0 1 2 3 4 5 6 ... 5922 5923 5924 5925 5926 5927
* id (id) int64 1 2 3 4 5
Data variables:
x (frame, id) float64 434.5 277.7 179.2 180.0 ... nan nan nan nan
y (frame, id) float64 279.0 293.6 407.9 430.0 ... nan nan nan nan
width (frame, id) float64 nan nan nan nan nan ... nan nan nan nan nan
height (frame, id) float64 nan nan nan nan nan ... nan nan nan nan nan
confidence (frame, id) float64 1.0 1.0 1.0 1.0 1.0 ... 1.0 1.0 1.0 1.0 1.0
"""
super(Mot, self).__init__(**kwargs)
def init_blank(self, frames, ids):
"""
Initialize blank ground truth.
:param frames: list of frames
:param ids: list of identities
"""
self.ds = xr.Dataset(
data_vars={
"x": (["frame", "id"], np.nan * np.ones((len(frames), len(ids)))),
"y": (["frame", "id"], np.nan * np.ones((len(frames), len(ids)))),
"width": (["frame", "id"], np.nan * np.ones((len(frames), len(ids)))),
"height": (["frame", "id"], np.nan * np.ones((len(frames), len(ids)))),
"confidence": (
["frame", "id"],
np.nan * np.ones((len(frames), len(ids))),
),
},
coords={"frame": frames, "id": ids},
)
def load(self, filename):
"""
Load Multiple Object Tacking Challenge trajectories file.
Format described in https://arxiv.org/abs/1603.00831, section 3.3 Data Format
Loads trajectories into a DataFrame, columns frame and id start with 1 (MATLAB indexing).
:param filename: mot filename_or_buffer or buffer
"""
df = pd.read_csv(
filename,
index_col=["frame", "id"],
names=["frame", "id", "x", "y", "width", "height", "confidence"],
converters={
"frame": lambda x: int(x) - 1,
"id": lambda x: int(x) - 1,
},
)
df[df == -1] = np.nan
ds = df.to_xarray()
# ensure that all frames are in the Dataset
self.init_blank(list(range(ds.frame.min(), ds.frame.max())), ds.id)
self.ds = ds.merge(self.ds)
def save(self, filename, make_backup=False):
import datetime
import os
if make_backup and os.path.exists(filename):
dt = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
os.rename(filename, filename[:-4] + "_" + dt + ".txt")
df = self.ds.to_dataframe().reset_index()
df[df.isna()] = -1
df["frame"] += 1
df["id"] += 1
df.to_csv(filename, index=False, header=False)
def get_bboxes(self, frame):
"""
Get GT bounding boxes in a frame.
The returned BBoxes include obj_id attribute.
:param frame: frame number
:return: list of bounding boxes (BBox)
"""
bboxes = []
for obj_id, obj in self.get_positions_dataframe(frame).iterrows():
if not (
np.isnan(obj.x)
or np.isnan(obj.y)
or np.isnan(obj.width)
or np.isnan(obj.height)
):
bbox = BBox.from_xywh(obj.x, obj.y, obj.width, obj.height, frame)
bbox.obj_id = obj_id
bboxes.append(bbox)
return bboxes
def get_object_distance(self, frame, obj_id, other):
"""
TODO bbox iou
:param frame:
:param obj_id:
:param other:
:return:
"""
assert False, "not implemented"
def draw_frame(self, img, frame, mapping=None):
"""
Draw objects on an image.
:param img: ndarray
:param frame: frame
:param mapping: mapping of ids, dict
:return: image
"""
if frame in self.ds.frame:
if self.colors is None:
self._init_draw()
if mapping is None:
mapping = dict(list(zip(self.ds.id.data, self.ds.id.data)))
for bbox in self.get_bboxes(frame):
bbox.draw_to_image(img, color=self.colors[mapping[bbox.obj_id]])
return img
|
[
"shape.BBox.from_xywh",
"os.rename",
"os.path.exists",
"numpy.isnan",
"datetime.datetime.now"
] |
[((2848, 2872), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (2862, 2872), False, 'import os\n'), ((2957, 3011), 'os.rename', 'os.rename', (['filename', "(filename[:-4] + '_' + dt + '.txt')"], {}), "(filename, filename[:-4] + '_' + dt + '.txt')\n", (2966, 3011), False, 'import os\n'), ((3730, 3788), 'shape.BBox.from_xywh', 'BBox.from_xywh', (['obj.x', 'obj.y', 'obj.width', 'obj.height', 'frame'], {}), '(obj.x, obj.y, obj.width, obj.height, frame)\n', (3744, 3788), False, 'from shape import BBox\n'), ((2891, 2914), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2912, 2914), False, 'import datetime\n'), ((3562, 3577), 'numpy.isnan', 'np.isnan', (['obj.x'], {}), '(obj.x)\n', (3570, 3577), True, 'import numpy as np\n'), ((3597, 3612), 'numpy.isnan', 'np.isnan', (['obj.y'], {}), '(obj.y)\n', (3605, 3612), True, 'import numpy as np\n'), ((3632, 3651), 'numpy.isnan', 'np.isnan', (['obj.width'], {}), '(obj.width)\n', (3640, 3651), True, 'import numpy as np\n'), ((3671, 3691), 'numpy.isnan', 'np.isnan', (['obj.height'], {}), '(obj.height)\n', (3679, 3691), True, 'import numpy as np\n')]
|
#!/usr/bin/env python3
#
# filter out known vulnerabilities from wiz.io vulnerability report
#
# ./filter_vulnerabilities.py <report_file1> [<report_file2> ...]
#
# usage:
# ./filter_vulnerabilities.py data/vulnerability-reports/1644573599308653316.csv
# ./filter_vulnerabilities.py file1.csv file2.csv
# input file format:
# Created At,Title,Severity,Status,Resource Type,Resource external ID,Subscription ID,Project IDs,Project Names,Resolved Time,Resolution,Control ID,Resource Name,Resource Region,Resource Status,Resource Platform,Resource OS,Resource original JSON,Issue ID,Resource vertex ID,Ticket URLs
import logging
import os
import sys
sys.path.insert(0, os.path.dirname(__file__) + "/lib")
import wiz_io_tools.reports
from wiz_io_tools.reports_cli import configure_logger, parse_argv
# version string including prerelease and metadata (if appliccable)
# major.minor.patch[-prerelease][+metadata]
VERSIONSTRING="0.1.0-alpha2"
LH = logging.getLogger()
if __name__ == "__main__":
configure_logger(LH, logging.INFO)
config = parse_argv(VERSIONSTRING)
ignored_issues = [
# [ issue.title, issue.resource.external_id ]
]
fixed_issues = [
# [ issue.title, issue.resource.external_id ]
]
exemptions = [
# [ issue.title, issue.resource.external_id ]
]
error_count = 0
issues = list()
for csvfile in config["reports"]:
try:
issues.extend(wiz_io_tools.reports.parse_issues_report(csvfile))
except FileNotFoundError as e:
LH.error("Skipping '%s': %s", csvfile, e.strerror)
error_count += 1
counter_ignored = 0
counter_already_fixed = 0
counter_exempted = 0
counters_severity = {
"LOW": 0,
"MEDIUM": 0,
"HIGH": 0,
"CRITICAL": 0,
}
for issue in issues:
counters_severity[issue.severity] += 1
if issue.severity in ["LOW", "MEDIUM"]:
continue
skip_issue = False
for ignored_issue in ignored_issues:
if issue.title == ignored_issue[0] and issue.resource.external_id == ignored_issue[1]:
counter_ignored += 1
skip_issue = True
break
for exemption in exemptions:
if issue.title == exemption[0] and issue.resource.external_id == exemption[1]:
counter_exempted += 1
skip_issue = True
break
for fixed_issue in fixed_issues:
if issue.title == fixed_issue[0] and issue.resource.external_id == fixed_issue[1]:
counter_already_fixed += 1
skip_issue = True
break
if skip_issue:
continue
# add additional filter conditions here
print("{:100s} {} {} {} <{}>".format(issue.title, issue.severity, issue.resource.name, issue.resource.type, issue.resource.external_id))
issue_count = len(issues)
if issue_count == 0:
LH.info("Found no issues. Awesome!")
else:
if counters_severity["CRITICAL"] == 0 and counters_severity["HIGH"] == 0:
LH.warning("Found %i issues. (no critical, no high)", issue_count, counters_severity["CRITICAL"], counters_severity["HIGH"])
else:
LH.error("Found %i issues. (critical: %i, high: %i)", issue_count, counters_severity["CRITICAL"], counters_severity["HIGH"])
LH.info("(%i already fixed, %i exempted, %i ignored)", counter_already_fixed, counter_exempted, counter_ignored)
if error_count:
LH.warning("Encountered %i error(s)! Please verify input.", error_count)
|
[
"wiz_io_tools.reports_cli.parse_argv",
"os.path.dirname",
"wiz_io_tools.reports_cli.configure_logger",
"logging.getLogger"
] |
[((955, 974), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (972, 974), False, 'import logging\n'), ((1007, 1041), 'wiz_io_tools.reports_cli.configure_logger', 'configure_logger', (['LH', 'logging.INFO'], {}), '(LH, logging.INFO)\n', (1023, 1041), False, 'from wiz_io_tools.reports_cli import configure_logger, parse_argv\n'), ((1056, 1081), 'wiz_io_tools.reports_cli.parse_argv', 'parse_argv', (['VERSIONSTRING'], {}), '(VERSIONSTRING)\n', (1066, 1081), False, 'from wiz_io_tools.reports_cli import configure_logger, parse_argv\n'), ((675, 700), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (690, 700), False, 'import os\n')]
|
from subprocess import Popen, PIPE, STDOUT
import shlex
import time
import sys
import psutil
import os, signal
mod = 'aa' # first good moving camera data generated with carla version 0.97
mod = 'test' # 100 views
#mod = 'ab' # 88 views, yaw 0 to 359nes - 5 sceenes test
mod = 'ac' # 51 views, yaw 0 to 359 - 10 scenes
mod = 'ad' # 51 views, yaw 0 to 359 - 100 scenes
mod = 'ae' # 51 views, yaw 0 to 359 - 10 scenes
mod = 'af' # 51 views, yaw 0 to 359 - 10 scenes
mod = 'ag' # 51 views, yaw 0 to 359 - 10 scenes
mod = 'test' # 51 views, yaw 0 to 359 - 10 scenes
mod = 'ah' # 51 views, yaw 0 to 359 - 5 scenes
mod = 'ai' # 51 views, yaw 0 to 359 - 10 scenes
mod = 'aj' # 51 views, yaw 0 to 359 - 10 scenes
mod = 'ak' # 51 views, yaw 0 to 359 - 300 scenes
mod = 'test' # 51 views, yaw 0 to 359 - 10 scenes
#mod = 'rotaa'#43 views, 40 vehicles,
mod = 'rot_ab'#43 views, 35 vehicles,
mod = 'rot_ac'#43 views, 35 vehicles,
mod = 'test' # 51 views, yaw 0 to 359 - 10 scenes
# mod = 'ep_aa' # 51 views, yaw 0 to 359 - 10 scenes
mod = 'hiaa'
mod = 'hiab'
mod = 'hiac'
mod = 'hiad'
mod = 'hiae'
mod = 'hiaf'
mod = 'hiag'
mod = 'hiah'
mod = 'hiai'
mod = 'hiaj'
mod = 'hiak'
mod = 'hial'
mod = 'hiam'
mod = 'hian'
mod = 'hiao'
mod = 'hiap' # The above are all 43 views, 30 vehicles
mod = 'test'
mod = 'vehaa' # two vehicles, rotate around the two vehicles, second veh atleast 5 meters away
mod = 'vehab' # no bikes, two vehicles, rotate around the two vehicles, second veh atleast 5 meters away
mod = 'test'
mod = 'mr06' # with segmentation masks
save_dir = '/hdd/gsarch/data'
carla_sim = "/hdd/carla97/CarlaUE4.sh -carla-server -windows -ResX=100 -ResY=100 -benchmark"
carla_sim_args = shlex.split(carla_sim)
cnt = 0
for i in range(0,100):
p1 = Popen(carla_sim_args, stdout=PIPE, stderr=PIPE)
time.sleep(10)
print("Number of times carla simulator started: ", cnt)
cnt+=1
p2 = Popen(["python3.5","new_static_cam_2veh_sem.py", str(i), mod, save_dir], stdout=PIPE, stderr=PIPE)
time.sleep(1)
out, err = p2.communicate()
print(out.decode('utf-8'))
print(err.decode('utf-8'))
# for line in out.decode("utf-8").split('\\n'):
# print('\t' + line)
# print('ERROR')
# for line in err.decode("utf-8").split('\\n'):
# print('\t' + line)
p1.terminate()
time.sleep(5)
# Iterate over all running process
for proc in psutil.process_iter():
try:
# Get process name & pid from process object.
processName = proc.name()
processID = proc.pid
#print(processName , ' ::: ', processID)
if 'Carla' in processName:
print("PROCESS FOUND")
print(processName)
os.kill(processID, signal.SIGSTOP)
print("PROCESS STOPPED")
time.sleep(5)
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
pass
for proc in psutil.process_iter():
try:
# Get process name & pid from process object.
processName = proc.name()
processID = proc.pid
#print(processName , ' ::: ', processID)
if 'Carla' in processName:
print("PROCESS FOUND")
print(processName)
os.kill(processID, signal.SIGKILL)
print("PROCESS KILLED")
time.sleep(5)
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
pass
print("Done with single iteration. Terminating everything")
print("==========================================================")
p2.terminate()
time.sleep(10)
|
[
"psutil.process_iter",
"subprocess.Popen",
"shlex.split",
"time.sleep",
"os.kill"
] |
[((1691, 1713), 'shlex.split', 'shlex.split', (['carla_sim'], {}), '(carla_sim)\n', (1702, 1713), False, 'import shlex\n'), ((1754, 1801), 'subprocess.Popen', 'Popen', (['carla_sim_args'], {'stdout': 'PIPE', 'stderr': 'PIPE'}), '(carla_sim_args, stdout=PIPE, stderr=PIPE)\n', (1759, 1801), False, 'from subprocess import Popen, PIPE, STDOUT\n'), ((1806, 1820), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (1816, 1820), False, 'import time\n'), ((2004, 2017), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2014, 2017), False, 'import time\n'), ((2324, 2337), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (2334, 2337), False, 'import time\n'), ((2393, 2414), 'psutil.process_iter', 'psutil.process_iter', ([], {}), '()\n', (2412, 2414), False, 'import psutil\n'), ((2962, 2983), 'psutil.process_iter', 'psutil.process_iter', ([], {}), '()\n', (2981, 2983), False, 'import psutil\n'), ((3673, 3687), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (3683, 3687), False, 'import time\n'), ((2740, 2774), 'os.kill', 'os.kill', (['processID', 'signal.SIGSTOP'], {}), '(processID, signal.SIGSTOP)\n', (2747, 2774), False, 'import os, signal\n'), ((2833, 2846), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (2843, 2846), False, 'import time\n'), ((3309, 3343), 'os.kill', 'os.kill', (['processID', 'signal.SIGKILL'], {}), '(processID, signal.SIGKILL)\n', (3316, 3343), False, 'import os, signal\n'), ((3401, 3414), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (3411, 3414), False, 'import time\n')]
|
# (C) Copyright 2007-2020 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only under
# the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
""" The interface for action manager builders. """
# Enthought library imports.
from traits.api import Interface, List
# Local imports.
from .action_set import ActionSet
class IActionManagerBuilder(Interface):
""" The interface for action manager builders.
An action manager builder populates action managers (i.e. menus, menu bars
and tool bars) from the menus, groups and actions defined in its action
sets.
"""
# The action sets used by the builder.
action_sets = List(ActionSet)
def initialize_action_manager(self, action_manager, root):
""" Initialize an action manager from the builder's action sets.
"""
|
[
"traits.api.List"
] |
[((910, 925), 'traits.api.List', 'List', (['ActionSet'], {}), '(ActionSet)\n', (914, 925), False, 'from traits.api import Interface, List\n')]
|
import dofus_protocol as dp
from fm_state import FMState
def packet_handle(pkt: dp.DofusPacket):
state.update(pkt)
state = FMState()
listener = dp.DofusListener(packet_handle)
|
[
"fm_state.FMState",
"dofus_protocol.DofusListener"
] |
[((128, 137), 'fm_state.FMState', 'FMState', ([], {}), '()\n', (135, 137), False, 'from fm_state import FMState\n'), ((149, 180), 'dofus_protocol.DofusListener', 'dp.DofusListener', (['packet_handle'], {}), '(packet_handle)\n', (165, 180), True, 'import dofus_protocol as dp\n')]
|
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from __future__ import unicode_literals
from django.db import models
class Users(models.Model):
# Primary Store connection details
temporaryCredentialsRequestUrl = models.CharField(max_length=254,null=True)
adminAuthorizationUrl = models.CharField(max_length=254,null=True)
accessTokenRequestUrl = models.CharField(max_length=254,null=True)
apiUrl = models.CharField(max_length=254,null=True)
consumerKey = models.CharField(max_length=254,null=True)
consumerSecret = models.CharField(max_length=254,null=True)
# Store details
password = models.CharField(max_length=128)
last_login = models.DateTimeField(blank=True, null=True)
is_superuser = models.IntegerField(null=True)
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.CharField(max_length=254)
registered_at = models.IntegerField(null=True)
webstore_url = models.CharField(max_length=254,null=True)
webstore_platform = models.CharField(max_length=254,null=True)
class Stores(models.Model):
name = models.CharField(max_length=100)
|
[
"django.db.models.CharField",
"django.db.models.DateTimeField",
"django.db.models.IntegerField"
] |
[((643, 686), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)', 'null': '(True)'}), '(max_length=254, null=True)\n', (659, 686), False, 'from django.db import models\n'), ((714, 757), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)', 'null': '(True)'}), '(max_length=254, null=True)\n', (730, 757), False, 'from django.db import models\n'), ((785, 828), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)', 'null': '(True)'}), '(max_length=254, null=True)\n', (801, 828), False, 'from django.db import models\n'), ((841, 884), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)', 'null': '(True)'}), '(max_length=254, null=True)\n', (857, 884), False, 'from django.db import models\n'), ((902, 945), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)', 'null': '(True)'}), '(max_length=254, null=True)\n', (918, 945), False, 'from django.db import models\n'), ((966, 1009), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)', 'null': '(True)'}), '(max_length=254, null=True)\n', (982, 1009), False, 'from django.db import models\n'), ((1045, 1077), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (1061, 1077), False, 'from django.db import models\n'), ((1095, 1138), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1115, 1138), False, 'from django.db import models\n'), ((1158, 1188), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (1177, 1188), False, 'from django.db import models\n'), ((1206, 1237), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (1222, 1237), False, 'from django.db import models\n'), ((1254, 1285), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (1270, 1285), False, 'from django.db import models\n'), ((1298, 1330), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)'}), '(max_length=254)\n', (1314, 1330), False, 'from django.db import models\n'), ((1351, 1381), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (1370, 1381), False, 'from django.db import models\n'), ((1401, 1444), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)', 'null': '(True)'}), '(max_length=254, null=True)\n', (1417, 1444), False, 'from django.db import models\n'), ((1468, 1511), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(254)', 'null': '(True)'}), '(max_length=254, null=True)\n', (1484, 1511), False, 'from django.db import models\n'), ((1551, 1583), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1567, 1583), False, 'from django.db import models\n')]
|
from typing import List
import pytest
from backend.biz.policy import InstanceBean, InstanceBeanList, PathNodeBean, PathNodeBeanList
from backend.common.error_codes import APIException
from backend.service.models import PathResourceType, ResourceTypeDict
from backend.service.models.instance_selection import InstanceSelection
@pytest.fixture()
def path_node_bean():
return PathNodeBean(
id="id", name="name", system_id="system_id", type="type", type_name="type_name", type_name_en="type_name_en"
)
@pytest.fixture()
def resource_type_dict():
return ResourceTypeDict(data={("system_id", "type"): {"name": "name_test", "name_en": "name_en_test"}})
class TestPathNodeBean:
def test_fill_empty_fields(self, path_node_bean: PathNodeBean, resource_type_dict: ResourceTypeDict):
path_node_bean.fill_empty_fields(resource_type_dict)
assert path_node_bean.type_name == "name_test" and path_node_bean.type_name_en == "name_en_test"
@pytest.mark.parametrize(
"resource_system_id, resource_type_id, expected",
[
("system_id", "type", True),
("system_id_no", "type", False),
("system_id", "type_no", False),
],
)
def test_match_resource_type(self, path_node_bean: PathNodeBean, resource_system_id, resource_type_id, expected):
assert path_node_bean.match_resource_type(resource_system_id, resource_type_id) == expected
def test_to_path_resource_type(self, path_node_bean: PathNodeBean):
assert path_node_bean.to_path_resource_type() == PathResourceType(
system_id=path_node_bean.system_id, id=path_node_bean.type
)
@pytest.fixture()
def path_node_bean_list(path_node_bean: PathNodeBean):
path_node_bean1 = path_node_bean.copy(deep=True)
path_node_bean1.id = "id1"
path_node_bean1.name = "name1"
path_node_bean1.type = "type1"
path_node_bean1.type_name = "type_name1"
path_node_bean1.type_name_en = "type_name_en1"
return PathNodeBeanList(
nodes=[
path_node_bean,
path_node_bean1,
]
)
def gen_instance_selection(chian: List, ignore_iam_path=False) -> InstanceSelection:
return InstanceSelection(
id="id",
system_id="system_id",
name="name",
name_en="name_en",
ignore_iam_path=ignore_iam_path,
resource_type_chain=chian,
)
class TestPathNodeBeanList:
def test_dict(self, path_node_bean_list: PathNodeBeanList):
assert path_node_bean_list.dict() == [
{
"id": "id",
"name": "name",
"system_id": "system_id",
"type": "type",
"type_name": "type_name",
"type_name_en": "type_name_en",
},
{
"id": "id1",
"name": "name1",
"system_id": "system_id",
"type": "type1",
"type_name": "type_name1",
"type_name_en": "type_name_en1",
},
]
def test_to_path_string(self, path_node_bean_list: PathNodeBeanList):
assert path_node_bean_list.to_path_string() == "/type,id/type1,id1/"
def test_to_path_resource_types(self, path_node_bean_list: PathNodeBeanList):
assert path_node_bean_list._to_path_resource_types() == [
PathResourceType(system_id="system_id", id="type"),
PathResourceType(system_id="system_id", id="type1"),
]
def test_display(self, path_node_bean_list: PathNodeBeanList):
assert path_node_bean_list.display() == "type:name/type1:name1"
def test_match_selection_one_node(self, path_node_bean_list: PathNodeBeanList):
path_node_bean_list.nodes.pop()
assert path_node_bean_list.match_selection("system_id", "type", None)
@pytest.mark.parametrize(
"instance_selection, expected",
[
(
gen_instance_selection(
[{"system_id": "system_id", "id": "type"}, {"system_id": "system_id", "id": "type1"}]
),
True,
),
(gen_instance_selection([{"system_id": "system_id", "id": "type"}]), False),
],
)
def test_match_selection(self, path_node_bean_list: PathNodeBeanList, instance_selection, expected):
assert path_node_bean_list.match_selection("system_id", "type", instance_selection) == expected
@pytest.mark.parametrize(
"instance_selection, start, end",
[
(
gen_instance_selection(
[{"system_id": "system_id", "id": "type"}, {"system_id": "system_id", "id": "type1"}],
ignore_iam_path=True,
),
1,
2,
),
(gen_instance_selection([{"system_id": "system_id", "id": "type"}]), 0, 2),
],
)
def test_ignore_path(self, path_node_bean_list: PathNodeBeanList, instance_selection, start, end):
assert path_node_bean_list.ignore_path(instance_selection) == path_node_bean_list.nodes[start:end]
@pytest.fixture()
def instance_bean(path_node_bean: PathNodeBean):
path_node_bean1 = path_node_bean.copy(deep=True)
path_node_bean1.id = "id1"
path_node_bean1.name = "name1"
path_node_bean1.type = "type1"
path_node_bean1.type_name = "type_name1"
path_node_bean1.type_name_en = "type_name_en1"
return InstanceBean(path=[[path_node_bean, path_node_bean1]], type="type")
def gen_paths():
return [
[
PathNodeBean(
id="id",
name="name",
system_id="system_id",
type="type",
type_name="type_name",
type_name_en="type_name_en",
),
PathNodeBean(
id="id1",
name="name1",
system_id="system_id",
type="type1",
type_name="type_name1",
type_name_en="type_name_en1",
),
]
]
class TestInstanceBean:
def test_fill_empty_fields(self, instance_bean: InstanceBean, resource_type_dict: ResourceTypeDict):
instance_bean.fill_empty_fields(resource_type_dict)
assert instance_bean.name == "name_test"
assert instance_bean.name_en == "name_en_test"
assert instance_bean.path[0][0].type_name == "name_test"
assert instance_bean.path[0][0].type_name_en == "name_en_test"
assert instance_bean.path[0][1].type_name == ""
assert instance_bean.path[0][1].type_name_en == ""
def test_iter_path_node(self, instance_bean: InstanceBean):
assert list(instance_bean.iter_path_node()) == instance_bean.path[0]
def test_get_system_id_set(self, instance_bean: InstanceBean):
assert instance_bean.get_system_id_set() == {"system_id"}
@pytest.mark.parametrize(
"paths, length",
[
(gen_paths(), 1),
([[gen_paths()[0][0]]], 2),
],
)
def test_add_paths(self, instance_bean: InstanceBean, paths, length):
instance_bean.add_paths(paths)
assert len(instance_bean.path) == length
@pytest.mark.parametrize(
"paths, length",
[
(gen_paths(), 0),
([[gen_paths()[0][0]]], 1),
],
)
def test_remove_paths(self, instance_bean: InstanceBean, paths, length):
instance_bean.remove_paths(paths)
assert len(instance_bean.path) == length
def test_is_empty(self, instance_bean: InstanceBean):
assert not instance_bean.is_empty
instance_bean.path.pop()
assert instance_bean.is_empty
def test_count(self, instance_bean: InstanceBean):
assert instance_bean.count() == 1
@pytest.mark.parametrize(
"instance_selection, length",
[
(
gen_instance_selection(
[{"system_id": "system_id", "id": "type"}, {"system_id": "system_id", "id": "type1"}]
),
1,
),
(gen_instance_selection([{"system_id": "system_id", "id": "type"}]), 0),
],
)
def test_clone_and_filter_by_instance_selections(self, instance_bean: InstanceBean, instance_selection, length):
instance_bean1 = instance_bean.clone_and_filter_by_instance_selections(
"system_id", "type", [instance_selection]
)
if instance_bean1 is not None:
assert len(instance_bean1.path) == length
else:
assert 0 == length
@pytest.mark.parametrize(
"instance_selection, raise_exception",
[
(
gen_instance_selection(
[{"system_id": "system_id", "id": "type"}, {"system_id": "system_id", "id": "type1"}]
),
False,
),
(gen_instance_selection([{"system_id": "system_id", "id": "type"}]), True),
],
)
def test_check_instance_selection(self, instance_bean: InstanceBean, instance_selection, raise_exception):
try:
instance_bean.check_instance_selection("system_id", "type", [instance_selection])
assert not raise_exception
except APIException:
assert raise_exception
def test_check_instance_selection_ignore_path(self, instance_bean: InstanceBean):
instance_selection = gen_instance_selection(
[{"system_id": "system_id", "id": "type"}, {"system_id": "system_id", "id": "type1"}], ignore_iam_path=True
)
instance_bean.check_instance_selection("system_id", "type", [instance_selection], ignore_path=True)
assert len(instance_bean.path[0]) == 1
@pytest.fixture()
def instance_bean_list(instance_bean: InstanceBean):
instance_bean1 = instance_bean.copy(deep=True)
instance_bean1.type = "type1"
return InstanceBeanList([instance_bean, instance_bean1])
class TestInstanceBeanList:
def test_get(self, instance_bean_list: InstanceBeanList):
assert instance_bean_list.get("type").type == "type"
assert instance_bean_list.get("test") is None
def test_add(self, instance_bean_list: InstanceBeanList):
instance_bean_list1 = InstanceBeanList([instance_bean_list.instances.pop()])
instance_bean_list._instance_dict.pop("type1")
assert len(instance_bean_list.instances) == 1
instance_bean_list.add(instance_bean_list1)
assert len(instance_bean_list.instances) == 2
instance_bean_list.add(instance_bean_list1)
assert instance_bean_list.instances[1].type == "type1"
assert len(instance_bean_list.instances[1].path) == 1
instance_bean_list1 = InstanceBeanList([instance_bean_list.instances.pop()])
instance_bean_list._instance_dict.pop("type1")
assert len(instance_bean_list.instances) == 1
instance_bean_list1.instances[0].type = "type"
instance_bean_list1.instances[0].path[0][-1].id = "id2"
instance_bean_list.add(instance_bean_list1)
assert len(instance_bean_list.instances) == 1
assert len(instance_bean_list.instances[0].path) == 2
def test_sub(self, instance_bean_list: InstanceBeanList):
instance_bean_list1 = InstanceBeanList([instance_bean_list.instances.pop()])
instance_bean_list._instance_dict.pop("type1")
assert len(instance_bean_list.instances) == 1
instance_bean_list1.instances[0].type = "type"
instance_bean_list1._instance_dict.pop("type1")
instance_bean_list1._instance_dict["type"] = instance_bean_list1.instances[0]
instance_bean_list.sub(instance_bean_list1)
assert len(instance_bean_list.instances) == 0
"""
PolicyBeanList sub
1. 需要sub没有关联资源类型的操作
2. 需要sub都是任意的操作
"""
|
[
"backend.biz.policy.InstanceBean",
"backend.biz.policy.PathNodeBean",
"backend.service.models.ResourceTypeDict",
"backend.service.models.instance_selection.InstanceSelection",
"backend.biz.policy.PathNodeBeanList",
"pytest.fixture",
"backend.biz.policy.InstanceBeanList",
"backend.service.models.PathResourceType",
"pytest.mark.parametrize"
] |
[((331, 347), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (345, 347), False, 'import pytest\n'), ((521, 537), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (535, 537), False, 'import pytest\n'), ((1667, 1683), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1681, 1683), False, 'import pytest\n'), ((5135, 5151), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (5149, 5151), False, 'import pytest\n'), ((9767, 9783), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (9781, 9783), False, 'import pytest\n'), ((381, 507), 'backend.biz.policy.PathNodeBean', 'PathNodeBean', ([], {'id': '"""id"""', 'name': '"""name"""', 'system_id': '"""system_id"""', 'type': '"""type"""', 'type_name': '"""type_name"""', 'type_name_en': '"""type_name_en"""'}), "(id='id', name='name', system_id='system_id', type='type',\n type_name='type_name', type_name_en='type_name_en')\n", (393, 507), False, 'from backend.biz.policy import InstanceBean, InstanceBeanList, PathNodeBean, PathNodeBeanList\n'), ((575, 675), 'backend.service.models.ResourceTypeDict', 'ResourceTypeDict', ([], {'data': "{('system_id', 'type'): {'name': 'name_test', 'name_en': 'name_en_test'}}"}), "(data={('system_id', 'type'): {'name': 'name_test',\n 'name_en': 'name_en_test'}})\n", (591, 675), False, 'from backend.service.models import PathResourceType, ResourceTypeDict\n'), ((976, 1156), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""resource_system_id, resource_type_id, expected"""', "[('system_id', 'type', True), ('system_id_no', 'type', False), ('system_id',\n 'type_no', False)]"], {}), "('resource_system_id, resource_type_id, expected', [\n ('system_id', 'type', True), ('system_id_no', 'type', False), (\n 'system_id', 'type_no', False)])\n", (999, 1156), False, 'import pytest\n'), ((2000, 2057), 'backend.biz.policy.PathNodeBeanList', 'PathNodeBeanList', ([], {'nodes': '[path_node_bean, path_node_bean1]'}), '(nodes=[path_node_bean, path_node_bean1])\n', (2016, 2057), False, 'from backend.biz.policy import InstanceBean, InstanceBeanList, PathNodeBean, PathNodeBeanList\n'), ((2205, 2351), 'backend.service.models.instance_selection.InstanceSelection', 'InstanceSelection', ([], {'id': '"""id"""', 'system_id': '"""system_id"""', 'name': '"""name"""', 'name_en': '"""name_en"""', 'ignore_iam_path': 'ignore_iam_path', 'resource_type_chain': 'chian'}), "(id='id', system_id='system_id', name='name', name_en=\n 'name_en', ignore_iam_path=ignore_iam_path, resource_type_chain=chian)\n", (2222, 2351), False, 'from backend.service.models.instance_selection import InstanceSelection\n'), ((5462, 5529), 'backend.biz.policy.InstanceBean', 'InstanceBean', ([], {'path': '[[path_node_bean, path_node_bean1]]', 'type': '"""type"""'}), "(path=[[path_node_bean, path_node_bean1]], type='type')\n", (5474, 5529), False, 'from backend.biz.policy import InstanceBean, InstanceBeanList, PathNodeBean, PathNodeBeanList\n'), ((9933, 9982), 'backend.biz.policy.InstanceBeanList', 'InstanceBeanList', (['[instance_bean, instance_bean1]'], {}), '([instance_bean, instance_bean1])\n', (9949, 9982), False, 'from backend.biz.policy import InstanceBean, InstanceBeanList, PathNodeBean, PathNodeBeanList\n'), ((1565, 1641), 'backend.service.models.PathResourceType', 'PathResourceType', ([], {'system_id': 'path_node_bean.system_id', 'id': 'path_node_bean.type'}), '(system_id=path_node_bean.system_id, id=path_node_bean.type)\n', (1581, 1641), False, 'from backend.service.models import PathResourceType, ResourceTypeDict\n'), ((5584, 5710), 'backend.biz.policy.PathNodeBean', 'PathNodeBean', ([], {'id': '"""id"""', 'name': '"""name"""', 'system_id': '"""system_id"""', 'type': '"""type"""', 'type_name': '"""type_name"""', 'type_name_en': '"""type_name_en"""'}), "(id='id', name='name', system_id='system_id', type='type',\n type_name='type_name', type_name_en='type_name_en')\n", (5596, 5710), False, 'from backend.biz.policy import InstanceBean, InstanceBeanList, PathNodeBean, PathNodeBeanList\n'), ((5831, 5962), 'backend.biz.policy.PathNodeBean', 'PathNodeBean', ([], {'id': '"""id1"""', 'name': '"""name1"""', 'system_id': '"""system_id"""', 'type': '"""type1"""', 'type_name': '"""type_name1"""', 'type_name_en': '"""type_name_en1"""'}), "(id='id1', name='name1', system_id='system_id', type='type1',\n type_name='type_name1', type_name_en='type_name_en1')\n", (5843, 5962), False, 'from backend.biz.policy import InstanceBean, InstanceBeanList, PathNodeBean, PathNodeBeanList\n'), ((3377, 3427), 'backend.service.models.PathResourceType', 'PathResourceType', ([], {'system_id': '"""system_id"""', 'id': '"""type"""'}), "(system_id='system_id', id='type')\n", (3393, 3427), False, 'from backend.service.models import PathResourceType, ResourceTypeDict\n'), ((3441, 3492), 'backend.service.models.PathResourceType', 'PathResourceType', ([], {'system_id': '"""system_id"""', 'id': '"""type1"""'}), "(system_id='system_id', id='type1')\n", (3457, 3492), False, 'from backend.service.models import PathResourceType, ResourceTypeDict\n')]
|
from robotpy_ext.autonomous import StatefulAutonomous, timed_state, state
class DriveForward(StatefulAutonomous):
DEFAULT = False
MODE_NAME = 'Drive Forward'
def initialize(self):
self.drive.setAutoSetpoint(696.75*10.5*12)
@timed_state(duration=0.5, next_state='drive_forward', first=True)
def drive_wait(self):
self.drive.driveMeBoi(0, 0)
self.drive.resetEncoders()
@timed_state(duration=1.5, next_state='stop')
def drive_forward(self):
self.drive.autoForward.enable()
self.drive.driveMeBoi(0, 0)
if not self.drive.autoForward.isEnabled():
self.next_state('stop')
@state()
def stop(self):
self.drive.driveMeBoi(0, 0)
|
[
"robotpy_ext.autonomous.state",
"robotpy_ext.autonomous.timed_state"
] |
[((256, 321), 'robotpy_ext.autonomous.timed_state', 'timed_state', ([], {'duration': '(0.5)', 'next_state': '"""drive_forward"""', 'first': '(True)'}), "(duration=0.5, next_state='drive_forward', first=True)\n", (267, 321), False, 'from robotpy_ext.autonomous import StatefulAutonomous, timed_state, state\n'), ((427, 471), 'robotpy_ext.autonomous.timed_state', 'timed_state', ([], {'duration': '(1.5)', 'next_state': '"""stop"""'}), "(duration=1.5, next_state='stop')\n", (438, 471), False, 'from robotpy_ext.autonomous import StatefulAutonomous, timed_state, state\n'), ((670, 677), 'robotpy_ext.autonomous.state', 'state', ([], {}), '()\n', (675, 677), False, 'from robotpy_ext.autonomous import StatefulAutonomous, timed_state, state\n')]
|
import os
import shutil
class LocalFileStorage(object):
def __init__(self, path):
self.path = path
self.filename = os.path.basename(path)
def list(self, relative=False):
matches = []
for root, dirnames, filenames in os.walk(self.path):
for filename in filenames:
file_path = os.path.join(root, filename)
if relative:
file_path = os.path.relpath(file_path, self.path)
matches.append(file_path)
return matches
def sync_to(self, output_path):
if not os.path.exists(self.path):
return
if self.is_dir(self.path):
input_paths = self.list(relative=True)
if not os.path.exists(output_path):
os.makedirs(output_path)
output_paths = LocalFileStorage(output_path).list(relative=True)
new_paths = set(input_paths) - set(output_paths)
for path in new_paths:
LocalFileStorage(os.path.join(self.path, path)).sync_to(os.path.join(output_path, path))
else:
output_dir = os.path.dirname(output_path)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
shutil.copyfile(self.path, output_path)
def sync_from(self, input_path):
if not os.path.exists(input_path):
return
if self.is_dir(input_path):
input_paths = LocalFileStorage(input_path).list(relative=True)
if not os.path.exists(self.path):
os.makedirs(self.path)
output_paths = self.list(relative=True)
new_paths = set(input_paths) - set(output_paths)
for path in new_paths:
LocalFileStorage(os.path.join(self.path, path)).sync_from(os.path.join(input_path, path))
else:
output_dir = os.path.dirname(self.path)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
shutil.copyfile(input_path, self.path)
def is_dir(self, path=None):
if path is None:
path = self.path
_, extension = os.path.splitext(path)
return os.path.isdir(path) or len(extension) == 0 or path.endswith('/')
|
[
"os.makedirs",
"os.path.basename",
"os.path.isdir",
"os.path.dirname",
"os.walk",
"os.path.exists",
"os.path.splitext",
"os.path.relpath",
"shutil.copyfile",
"os.path.join"
] |
[((138, 160), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (154, 160), False, 'import os\n'), ((260, 278), 'os.walk', 'os.walk', (['self.path'], {}), '(self.path)\n', (267, 278), False, 'import os\n'), ((2163, 2185), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (2179, 2185), False, 'import os\n'), ((592, 617), 'os.path.exists', 'os.path.exists', (['self.path'], {}), '(self.path)\n', (606, 617), False, 'import os\n'), ((1130, 1158), 'os.path.dirname', 'os.path.dirname', (['output_path'], {}), '(output_path)\n', (1145, 1158), False, 'import os\n'), ((1258, 1297), 'shutil.copyfile', 'shutil.copyfile', (['self.path', 'output_path'], {}), '(self.path, output_path)\n', (1273, 1297), False, 'import shutil\n'), ((1351, 1377), 'os.path.exists', 'os.path.exists', (['input_path'], {}), '(input_path)\n', (1365, 1377), False, 'import os\n'), ((1887, 1913), 'os.path.dirname', 'os.path.dirname', (['self.path'], {}), '(self.path)\n', (1902, 1913), False, 'import os\n'), ((2013, 2051), 'shutil.copyfile', 'shutil.copyfile', (['input_path', 'self.path'], {}), '(input_path, self.path)\n', (2028, 2051), False, 'import shutil\n'), ((2201, 2220), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (2214, 2220), False, 'import os\n'), ((347, 375), 'os.path.join', 'os.path.join', (['root', 'filename'], {}), '(root, filename)\n', (359, 375), False, 'import os\n'), ((743, 770), 'os.path.exists', 'os.path.exists', (['output_path'], {}), '(output_path)\n', (757, 770), False, 'import os\n'), ((788, 812), 'os.makedirs', 'os.makedirs', (['output_path'], {}), '(output_path)\n', (799, 812), False, 'import os\n'), ((1178, 1204), 'os.path.exists', 'os.path.exists', (['output_dir'], {}), '(output_dir)\n', (1192, 1204), False, 'import os\n'), ((1222, 1245), 'os.makedirs', 'os.makedirs', (['output_dir'], {}), '(output_dir)\n', (1233, 1245), False, 'import os\n'), ((1528, 1553), 'os.path.exists', 'os.path.exists', (['self.path'], {}), '(self.path)\n', (1542, 1553), False, 'import os\n'), ((1571, 1593), 'os.makedirs', 'os.makedirs', (['self.path'], {}), '(self.path)\n', (1582, 1593), False, 'import os\n'), ((1933, 1959), 'os.path.exists', 'os.path.exists', (['output_dir'], {}), '(output_dir)\n', (1947, 1959), False, 'import os\n'), ((1977, 2000), 'os.makedirs', 'os.makedirs', (['output_dir'], {}), '(output_dir)\n', (1988, 2000), False, 'import os\n'), ((437, 474), 'os.path.relpath', 'os.path.relpath', (['file_path', 'self.path'], {}), '(file_path, self.path)\n', (452, 474), False, 'import os\n'), ((1058, 1089), 'os.path.join', 'os.path.join', (['output_path', 'path'], {}), '(output_path, path)\n', (1070, 1089), False, 'import os\n'), ((1816, 1846), 'os.path.join', 'os.path.join', (['input_path', 'path'], {}), '(input_path, path)\n', (1828, 1846), False, 'import os\n'), ((1019, 1048), 'os.path.join', 'os.path.join', (['self.path', 'path'], {}), '(self.path, path)\n', (1031, 1048), False, 'import os\n'), ((1775, 1804), 'os.path.join', 'os.path.join', (['self.path', 'path'], {}), '(self.path, path)\n', (1787, 1804), False, 'import os\n')]
|
from __future__ import print_function
from __future__ import division
from builtins import range
from past.utils import old_div
import os
import numpy as num
from anuga.file.netcdf import NetCDFFile
import pylab as P
import anuga
from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular
from anuga.shallow_water.shallow_water_domain import Domain
from anuga.shallow_water.boundaries import Reflective_boundary
from anuga.coordinate_transforms.geo_reference import Geo_reference
from anuga.shallow_water.forcing import *
from anuga.utilities.numerical_tools import ensure_numeric
from anuga.file.sww import Write_sww
from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a, \
netcdf_float
def sts2sww_mesh(basename_in, basename_out=None,
spatial_thinning=1, verbose=False):
from anuga.mesh_engine.mesh_engine import NoTrianglesError
from anuga.pmesh.mesh import Mesh
if verbose:
print("Starting sts2sww_mesh")
mean_stage=0.
zscale=1.
if (basename_in[:-4]=='.sts'):
stsname = basename_in
else:
stsname = basename_in + '.sts'
if verbose: print("Reading sts NetCDF file: %s" %stsname)
infile = NetCDFFile(stsname, netcdf_mode_r)
cellsize = infile.cellsize
ncols = infile.ncols
nrows = infile.nrows
no_data = infile.no_data
refzone = infile.zone
x_origin = infile.xllcorner
y_origin = infile.yllcorner
origin = num.array([x_origin, y_origin])
x = infile.variables['x'][:]
y = infile.variables['y'][:]
times = infile.variables['time'][:]
wind_speed_full = infile.variables['wind_speed'][:]
wind_angle_full = infile.variables['wind_angle'][:]
pressure_full = infile.variables['barometric_pressure'][:]
infile.close()
number_of_points = nrows*ncols
points_utm = num.zeros((number_of_points,2),num.float)
points_utm[:,0]=x+x_origin
points_utm[:,1]=y+y_origin
thinned_indices=[]
for i in range(number_of_points):
if (old_div(i,ncols)==0 or old_div(i,ncols)==ncols-1 or (old_div(i,ncols))%(spatial_thinning)==0):
if ( i%(spatial_thinning)==0 or i%nrows==0 or i%nrows==nrows-1 ):
thinned_indices.append(i)
#Spatial thinning
points_utm=points_utm[thinned_indices]
number_of_points = points_utm.shape[0]
number_of_timesteps = wind_speed_full.shape[0]
wind_speed = num.empty((number_of_timesteps,number_of_points),dtype=float)
wind_angle = num.empty((number_of_timesteps,number_of_points),dtype=float)
barometric_pressure = num.empty((number_of_timesteps,number_of_points),dtype=float)
if verbose:
print("Total number of points: ", nrows*ncols)
print("Number of thinned points: ", number_of_points)
for i in range(number_of_timesteps):
wind_speed[i] = wind_speed_full[i,thinned_indices]
wind_angle[i] = wind_angle_full[i,thinned_indices]
barometric_pressure[i] = pressure_full[i,thinned_indices]
#P.plot(points_utm[:,0],points_utm[:,1],'ro')
#P.show()
if verbose:
print("Generating sww triangulation of gems data")
mesh = Mesh()
mesh.add_vertices(points_utm)
mesh.auto_segment(smooth_indents=True, expand_pinch=True)
mesh.auto_segment(mesh.shape.get_alpha() * 1.1)
try:
mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
except NoTrianglesError:
# This is a bit of a hack, going in and changing the data structure.
mesh.holes = []
mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
mesh_dic = mesh.Mesh2MeshList()
points_utm=ensure_numeric(points_utm)
assert num.alltrue(ensure_numeric(mesh_dic['generatedpointlist'])
== ensure_numeric(points_utm))
volumes = mesh_dic['generatedtrianglelist']
# Write sww intro and grid stuff.
if (basename_out is not None and basename_out[:-4]=='.sww'):
swwname = basename_out
else:
swwname = basename_in + '.sww'
if verbose: 'Output to %s' % swwname
if verbose:
print("Writing sww wind and pressure field file")
outfile = NetCDFFile(swwname, netcdf_mode_w)
sww = Write_sww([], ['wind_speed','wind_angle','barometric_pressure'])
sww.store_header(outfile, times, len(volumes), len(points_utm),
verbose=verbose, sww_precision='d')
outfile.mean_stage = mean_stage
outfile.zscale = zscale
sww.store_triangulation(outfile, points_utm, volumes,
refzone,
new_origin=origin, #check effect of this line
verbose=verbose)
if verbose:
print('Converting quantities')
# Read in a time slice from the sts file and write it to the SWW file
#print wind_angle[0,:10]
for i in range(len(times)):
sww.store_quantities(outfile,
slice_index=i,
verbose=verbose,
wind_speed=wind_speed[i,:],
wind_angle=wind_angle[i,:],
barometric_pressure=barometric_pressure[i,:],
sww_precision=num.float)
if verbose:
sww.verbose_quantities(outfile)
outfile.close()
|
[
"builtins.range",
"anuga.utilities.numerical_tools.ensure_numeric",
"past.utils.old_div",
"numpy.empty",
"anuga.file.sww.Write_sww",
"numpy.zeros",
"numpy.array",
"anuga.pmesh.mesh.Mesh",
"anuga.file.netcdf.NetCDFFile"
] |
[((1240, 1274), 'anuga.file.netcdf.NetCDFFile', 'NetCDFFile', (['stsname', 'netcdf_mode_r'], {}), '(stsname, netcdf_mode_r)\n', (1250, 1274), False, 'from anuga.file.netcdf import NetCDFFile\n'), ((1488, 1519), 'numpy.array', 'num.array', (['[x_origin, y_origin]'], {}), '([x_origin, y_origin])\n', (1497, 1519), True, 'import numpy as num\n'), ((1877, 1920), 'numpy.zeros', 'num.zeros', (['(number_of_points, 2)', 'num.float'], {}), '((number_of_points, 2), num.float)\n', (1886, 1920), True, 'import numpy as num\n'), ((2018, 2041), 'builtins.range', 'range', (['number_of_points'], {}), '(number_of_points)\n', (2023, 2041), False, 'from builtins import range\n'), ((2449, 2512), 'numpy.empty', 'num.empty', (['(number_of_timesteps, number_of_points)'], {'dtype': 'float'}), '((number_of_timesteps, number_of_points), dtype=float)\n', (2458, 2512), True, 'import numpy as num\n'), ((2528, 2591), 'numpy.empty', 'num.empty', (['(number_of_timesteps, number_of_points)'], {'dtype': 'float'}), '((number_of_timesteps, number_of_points), dtype=float)\n', (2537, 2591), True, 'import numpy as num\n'), ((2618, 2681), 'numpy.empty', 'num.empty', (['(number_of_timesteps, number_of_points)'], {'dtype': 'float'}), '((number_of_timesteps, number_of_points), dtype=float)\n', (2627, 2681), True, 'import numpy as num\n'), ((2826, 2852), 'builtins.range', 'range', (['number_of_timesteps'], {}), '(number_of_timesteps)\n', (2831, 2852), False, 'from builtins import range\n'), ((3193, 3199), 'anuga.pmesh.mesh.Mesh', 'Mesh', ([], {}), '()\n', (3197, 3199), False, 'from anuga.pmesh.mesh import Mesh\n'), ((3680, 3706), 'anuga.utilities.numerical_tools.ensure_numeric', 'ensure_numeric', (['points_utm'], {}), '(points_utm)\n', (3694, 3706), False, 'from anuga.utilities.numerical_tools import ensure_numeric\n'), ((4197, 4231), 'anuga.file.netcdf.NetCDFFile', 'NetCDFFile', (['swwname', 'netcdf_mode_w'], {}), '(swwname, netcdf_mode_w)\n', (4207, 4231), False, 'from anuga.file.netcdf import NetCDFFile\n'), ((4242, 4308), 'anuga.file.sww.Write_sww', 'Write_sww', (['[]', "['wind_speed', 'wind_angle', 'barometric_pressure']"], {}), "([], ['wind_speed', 'wind_angle', 'barometric_pressure'])\n", (4251, 4308), False, 'from anuga.file.sww import Write_sww\n'), ((3730, 3776), 'anuga.utilities.numerical_tools.ensure_numeric', 'ensure_numeric', (["mesh_dic['generatedpointlist']"], {}), "(mesh_dic['generatedpointlist'])\n", (3744, 3776), False, 'from anuga.utilities.numerical_tools import ensure_numeric\n'), ((3803, 3829), 'anuga.utilities.numerical_tools.ensure_numeric', 'ensure_numeric', (['points_utm'], {}), '(points_utm)\n', (3817, 3829), False, 'from anuga.utilities.numerical_tools import ensure_numeric\n'), ((2055, 2072), 'past.utils.old_div', 'old_div', (['i', 'ncols'], {}), '(i, ncols)\n', (2062, 2072), False, 'from past.utils import old_div\n'), ((2078, 2095), 'past.utils.old_div', 'old_div', (['i', 'ncols'], {}), '(i, ncols)\n', (2085, 2095), False, 'from past.utils import old_div\n'), ((2108, 2125), 'past.utils.old_div', 'old_div', (['i', 'ncols'], {}), '(i, ncols)\n', (2115, 2125), False, 'from past.utils import old_div\n')]
|
from flask import request
from flask_restplus import Resource, fields, Namespace
from app import db
from app.models import Application, Constraints, CounterExample, Model, Result, UserNets, UserProps, \
Verification, get_workdir
from app.schemas import CounterExampleSchema, ModelSchema, ResultSchema, VerificationSchema
MODEL_NOT_FOUND = "Model not found."
VERIFICATION_NOT_FOUND = "Verification not found."
RESULT_NOT_FOUND = "Result not found."
COUNTER_EXAMPLE_NOT_FOUND = "Counter-example not found."
URL_ID = "/<int:id>"
def create_schema(schema_type, bool):
if bool:
return schema_type(many=bool)
else:
return schema_type()
a = Application()
models_ns = Namespace('models', description='models related operations')
verifications_ns = Namespace(
'verifications', description='verifications related operations')
results_ns = Namespace('results', description='results related operations')
counter_examples_ns = Namespace(
'counter_examples', description='counter-examples related operations')
verification_input = verifications_ns.model(
'Verification', {
'model': fields.Raw(),
'usernets': fields.List(fields.String, description='Network01Bag'),
'userdefs': fields.List(fields.String, description='User1'),
'userprops': fields.List(fields.String, description='MessageSound'),
'constraintNode': fields.String('TRUE'),
'constraintEdge': fields.String('TRUE')
})
@models_ns.route('')
class ModelList(Resource):
def get(self):
models = a.get_all_elements(Model)
return (create_schema(ModelSchema, True)).jsonify(models)
@models_ns.route(f'{URL_ID}')
class ModelById(Resource):
def get(self, id):
m = a.get_element_by_id(Model, id)
if m:
return (create_schema(ModelSchema, False)).jsonify(m)
return {'message': MODEL_NOT_FOUND}, 404
@verifications_ns.route(f'{URL_ID}/model')
class ModelByVerification(Resource):
def get(self, id):
model_id = (a.get_element_by_id(Verification, id)).model_id
return ModelById.get(self, model_id)
@counter_examples_ns.route(f'{URL_ID}/model')
class ModelByCounterExample(Resource):
def get(self, id):
ce = a.get_element_by_id(CounterExample, id)
m_id = ce.get_result().get_verification().model_id
return ModelById.get(self, m_id)
@verifications_ns.route('')
class VerificationList(Resource):
def get(self):
v = a.get_all_elements(Verification)
return (create_schema(VerificationSchema, True)).jsonify(v)
@verifications_ns.expect(verification_input)
def post(self):
data = request.get_json()
model = (data['model']['xml'])
usernets = data['usernets']
userdefs = data['userdefs']
userprops = data['userprops']
constraints = str(f'CONSTANT ConstraintNode <- {data["constraintNode"]}\n'
f' ConstraintEdge <- {data["constraintEdge"]}\n'
" Constraint <- ConstraintNodeEdge\n")
v = a.create_verification()
try:
m = v.create_model(model)
v.create_file(UserNets, usernets, m.name)
if not userdefs is None:
v.create_properties_files(userdefs, userprops, m.name)
else:
v.create_file(UserProps, userprops, m.name)
v.create_file(Constraints, constraints, m.name)
output = v.launch_check(m.name)
workdir = get_workdir(output)
xx = v.create_results_list(workdir, m.name)
v.create_counter_examples(workdir, m.name, xx)
del m, v
return output
except (AttributeError, TypeError) as e:
print(e)
v.aborted()
return ("Incorrect model")
@verifications_ns.route(f'{URL_ID}')
class VerificationById(Resource):
def get(self, id):
v = a.get_element_by_id(Verification, id)
if v:
return (create_schema(VerificationSchema, False)).jsonify(v)
return {'message': VERIFICATION_NOT_FOUND}, 404
def delete(self, id):
v = Verification.query.get(id)
db.session.delete(v)
db.session.commit()
return "Verification was successfully deleted"
@results_ns.route(f'{URL_ID}/verification')
class VerificationByResult(Resource):
def get(self, id):
verification = (a.get_element_by_id(Result, id)).verification
return (create_schema(VerificationSchema, False)).jsonify(verification)
@verifications_ns.route(f'/latest')
class LatestVerification(Resource):
def get(self):
v = a.get_latest_verification()
return (create_schema(VerificationSchema, False)).jsonify(v)
@results_ns.route('')
class ResultList(Resource):
def get(self):
r = a.get_all_elements(Result)
return (create_schema(ResultSchema, True)).jsonify(r)
@results_ns.route(f'{URL_ID}')
class ResultById(Resource):
def get(self, id):
r = a.get_element_by_id(Result, id)
if r:
return (create_schema(ResultSchema, False)).jsonify(r)
return {'message': RESULT_NOT_FOUND}, 404
@verifications_ns.route(f'{URL_ID}/results')
class ResultByVerification(Resource):
def get(self, id):
verification = a.get_element_by_id(Verification, id)
return (create_schema(ResultSchema, True)).jsonify(verification.results)
@counter_examples_ns.route('')
class CounterExampleList(Resource):
def get(self):
ce = a.get_all_elements(CounterExample)
return (create_schema(CounterExampleSchema, True)).jsonify(ce)
@counter_examples_ns.route(f'{URL_ID}')
class CounterExampleById(Resource):
def get(self, id):
ce = a.get_element_by_id(CounterExample, id)
if ce:
return (create_schema(CounterExampleSchema, False)).jsonify(ce)
return {'message': COUNTER_EXAMPLE_NOT_FOUND}, 404
@results_ns.route(f'{URL_ID}/counter_examples')
class CounterExampleByResult(Resource):
def get(self, id):
counter_example = (a.get_element_by_id(Result, id)).counter_example
if counter_example:
return (create_schema(CounterExampleSchema, False)).jsonify(counter_example)
else:
return "Record not found", 400
|
[
"app.models.Verification.query.get",
"flask_restplus.fields.Raw",
"flask_restplus.fields.List",
"app.models.get_workdir",
"app.models.Application",
"app.db.session.delete",
"flask_restplus.fields.String",
"app.db.session.commit",
"flask_restplus.Namespace",
"flask.request.get_json"
] |
[((668, 681), 'app.models.Application', 'Application', ([], {}), '()\n', (679, 681), False, 'from app.models import Application, Constraints, CounterExample, Model, Result, UserNets, UserProps, Verification, get_workdir\n'), ((695, 755), 'flask_restplus.Namespace', 'Namespace', (['"""models"""'], {'description': '"""models related operations"""'}), "('models', description='models related operations')\n", (704, 755), False, 'from flask_restplus import Resource, fields, Namespace\n'), ((775, 849), 'flask_restplus.Namespace', 'Namespace', (['"""verifications"""'], {'description': '"""verifications related operations"""'}), "('verifications', description='verifications related operations')\n", (784, 849), False, 'from flask_restplus import Resource, fields, Namespace\n'), ((868, 930), 'flask_restplus.Namespace', 'Namespace', (['"""results"""'], {'description': '"""results related operations"""'}), "('results', description='results related operations')\n", (877, 930), False, 'from flask_restplus import Resource, fields, Namespace\n'), ((953, 1038), 'flask_restplus.Namespace', 'Namespace', (['"""counter_examples"""'], {'description': '"""counter-examples related operations"""'}), "('counter_examples', description='counter-examples related operations'\n )\n", (962, 1038), False, 'from flask_restplus import Resource, fields, Namespace\n'), ((1124, 1136), 'flask_restplus.fields.Raw', 'fields.Raw', ([], {}), '()\n', (1134, 1136), False, 'from flask_restplus import Resource, fields, Namespace\n'), ((1158, 1212), 'flask_restplus.fields.List', 'fields.List', (['fields.String'], {'description': '"""Network01Bag"""'}), "(fields.String, description='Network01Bag')\n", (1169, 1212), False, 'from flask_restplus import Resource, fields, Namespace\n'), ((1234, 1281), 'flask_restplus.fields.List', 'fields.List', (['fields.String'], {'description': '"""User1"""'}), "(fields.String, description='User1')\n", (1245, 1281), False, 'from flask_restplus import Resource, fields, Namespace\n'), ((1304, 1358), 'flask_restplus.fields.List', 'fields.List', (['fields.String'], {'description': '"""MessageSound"""'}), "(fields.String, description='MessageSound')\n", (1315, 1358), False, 'from flask_restplus import Resource, fields, Namespace\n'), ((1386, 1407), 'flask_restplus.fields.String', 'fields.String', (['"""TRUE"""'], {}), "('TRUE')\n", (1399, 1407), False, 'from flask_restplus import Resource, fields, Namespace\n'), ((1435, 1456), 'flask_restplus.fields.String', 'fields.String', (['"""TRUE"""'], {}), "('TRUE')\n", (1448, 1456), False, 'from flask_restplus import Resource, fields, Namespace\n'), ((2658, 2676), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (2674, 2676), False, 'from flask import request\n'), ((4161, 4187), 'app.models.Verification.query.get', 'Verification.query.get', (['id'], {}), '(id)\n', (4183, 4187), False, 'from app.models import Application, Constraints, CounterExample, Model, Result, UserNets, UserProps, Verification, get_workdir\n'), ((4196, 4216), 'app.db.session.delete', 'db.session.delete', (['v'], {}), '(v)\n', (4213, 4216), False, 'from app import db\n'), ((4225, 4244), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (4242, 4244), False, 'from app import db\n'), ((3518, 3537), 'app.models.get_workdir', 'get_workdir', (['output'], {}), '(output)\n', (3529, 3537), False, 'from app.models import Application, Constraints, CounterExample, Model, Result, UserNets, UserProps, Verification, get_workdir\n')]
|
import json
import pandas as pd
from us import states
from bs4 import BeautifulSoup
import urllib.parse
import json
from can_tools.scrapers import variables
from can_tools.scrapers.official.base import TableauDashboard
from can_tools.scrapers.util import requests_retry_session
class PhiladelphiaVaccine(TableauDashboard):
state_fips = int(states.lookup("Pennsylvania").fips)
has_location = True
location_type = "county"
provider = "county"
source = (
"https://www.phila.gov/programs/coronavirus-disease-2019-covid-19/data/vaccine/"
)
source_name = "Philadelphia Department of Public Health"
baseurl = "https://healthviz.phila.gov/t/PublicHealth/"
viewPath = "COVIDVaccineDashboard/COVID_Vaccine"
data_tableau_table = "Residents Percentage {dose_type}"
variables = {
"Residents Receiving At Least 1 Dose* ": variables.INITIATING_VACCINATIONS_ALL,
"Fully Vaccinated Residents*": variables.FULLY_VACCINATED_ALL,
}
def fetch(self) -> pd.DataFrame:
# create a dict of the 2 dose type tables
# which are titled "Residents Percentage New" and "... Full"
return {
dose_type: self.get_tableau_view(dose_type=dose_type)[
self.data_tableau_table.format(dose_type=dose_type)
]
for dose_type in ["New", "Full"]
}
def normalize(self, data: pd.DataFrame) -> pd.DataFrame:
dataframes = []
for dose_type in ["New", "Full"]:
dose_data = (
data[dose_type]
.rename(
columns={
"Measure Values-alias": "value",
"Measure Names-alias": "variable",
}
)
.loc[:, ["value", "variable"]]
.query(
"variable in"
"['Residents Receiving At Least 1 Dose* ', 'Fully Vaccinated Residents*']"
)
.assign(
location=42101,
value=lambda x: pd.to_numeric(x["value"].str.replace(",", "")),
vintage=self._retrieve_vintage(),
)
.pipe(
self._rename_or_add_date_and_location,
location_column="location",
timezone="US/Eastern",
)
)
dataframes.append(dose_data)
data = (
self.extract_CMU(df=pd.concat(dataframes), cmu=self.variables)
.drop(columns={"variable"})
.reset_index(drop=True)
)
# break scraper if both init and completed variables are not included in data
vars = {"total_vaccine_initiated", "total_vaccine_completed"}
assert vars <= set(data["category"])
return data
# could not find a way to select the "Demographics New" dashboard tab in the usual manner,
# so edit request body to manually select Demographic tab/sheets
# this is the default function with only form_data["sheet_id"] altered
def get_tableau_view(self, dose_type, url=None):
def onAlias(it, value, cstring):
return value[it] if (it >= 0) else cstring["dataValues"][abs(it) - 1]
req = requests_retry_session()
fullURL = self.baseurl + "/views/" + self.viewPath
reqg = req.get(
fullURL,
params={
":language": "en",
":display_count": "y",
":origin": "viz_share_link",
":embed": "y",
":showVizHome": "n",
":jsdebug": "y",
":apiID": "host4",
"#navType": "1",
"navSrc": "Parse",
},
headers={"Accept": "text/javascript"},
)
soup = BeautifulSoup(reqg.text, "html.parser")
tableauTag = soup.find("textarea", {"id": "tsConfigContainer"})
tableauData = json.loads(tableauTag.text)
parsed_url = urllib.parse.urlparse(fullURL)
dataUrl = f'{parsed_url.scheme}://{parsed_url.hostname}{tableauData["vizql_root"]}/bootstrapSession/sessions/{tableauData["sessionid"]}'
# copy over some additional headers from tableauData
form_data = {}
form_map = {
"sheetId": "sheet_id",
"showParams": "showParams",
"stickySessionKey": "stickySessionKey",
}
for k, v in form_map.items():
if k in tableauData:
form_data[v] = tableauData[k]
# set sheet manually to access the subsheets we need
form_data["sheet_id"] = f"Demographics {dose_type}"
resp = req.post(
dataUrl,
data=form_data,
headers={"Accept": "text/javascript"},
)
# Parse the response.
# The response contains multiple chuncks of the form
# `<size>;<json>` where `<size>` is the number of bytes in `<json>`
resp_text = resp.text
data = []
while len(resp_text) != 0:
size, rest = resp_text.split(";", 1)
chunck = json.loads(rest[: int(size)])
data.append(chunck)
resp_text = rest[int(size) :]
# The following section (to the end of the method) uses code from
# https://stackoverflow.com/questions/64094560/how-do-i-scrape-tableau-data-from-website-into-r
presModel = data[1]["secondaryInfo"]["presModelMap"]
metricInfo = presModel["vizData"]["presModelHolder"]
metricInfo = metricInfo["genPresModelMapPresModel"]["presModelMap"]
data = presModel["dataDictionary"]["presModelHolder"]
data = data["genDataDictionaryPresModel"]["dataSegments"]["0"]["dataColumns"]
scrapedData = {}
for metric in metricInfo:
metricsDict = metricInfo[metric]["presModelHolder"]["genVizDataPresModel"]
columnsData = metricsDict["paneColumnsData"]
result = [
{
"fieldCaption": t.get("fieldCaption", ""),
"valueIndices": columnsData["paneColumnsList"][t["paneIndices"][0]][
"vizPaneColumns"
][t["columnIndices"][0]]["valueIndices"],
"aliasIndices": columnsData["paneColumnsList"][t["paneIndices"][0]][
"vizPaneColumns"
][t["columnIndices"][0]]["aliasIndices"],
"dataType": t.get("dataType"),
"paneIndices": t["paneIndices"][0],
"columnIndices": t["columnIndices"][0],
}
for t in columnsData["vizDataColumns"]
if t.get("fieldCaption")
]
frameData = {}
cstring = [t for t in data if t["dataType"] == "cstring"][0]
for t in data:
for index in result:
if t["dataType"] == index["dataType"]:
if len(index["valueIndices"]) > 0:
frameData[f'{index["fieldCaption"]}-value'] = [
t["dataValues"][abs(it)] for it in index["valueIndices"]
]
if len(index["aliasIndices"]) > 0:
frameData[f'{index["fieldCaption"]}-alias'] = [
onAlias(it, t["dataValues"], cstring)
for it in index["aliasIndices"]
]
df = pd.DataFrame.from_dict(frameData, orient="index").fillna(0).T
scrapedData[metric] = df
return scrapedData
|
[
"pandas.DataFrame.from_dict",
"json.loads",
"us.states.lookup",
"can_tools.scrapers.util.requests_retry_session",
"bs4.BeautifulSoup",
"pandas.concat"
] |
[((3274, 3298), 'can_tools.scrapers.util.requests_retry_session', 'requests_retry_session', ([], {}), '()\n', (3296, 3298), False, 'from can_tools.scrapers.util import requests_retry_session\n'), ((3838, 3877), 'bs4.BeautifulSoup', 'BeautifulSoup', (['reqg.text', '"""html.parser"""'], {}), "(reqg.text, 'html.parser')\n", (3851, 3877), False, 'from bs4 import BeautifulSoup\n'), ((3972, 3999), 'json.loads', 'json.loads', (['tableauTag.text'], {}), '(tableauTag.text)\n', (3982, 3999), False, 'import json\n'), ((347, 376), 'us.states.lookup', 'states.lookup', (['"""Pennsylvania"""'], {}), "('Pennsylvania')\n", (360, 376), False, 'from us import states\n'), ((7546, 7595), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['frameData'], {'orient': '"""index"""'}), "(frameData, orient='index')\n", (7568, 7595), True, 'import pandas as pd\n'), ((2493, 2514), 'pandas.concat', 'pd.concat', (['dataframes'], {}), '(dataframes)\n', (2502, 2514), True, 'import pandas as pd\n')]
|
import sys, time
from PyQt5.QtWidgets import QMainWindow, QWidget, QHBoxLayout, QApplication, QPushButton
class mainwindow(QMainWindow):
def __init__(self):
super(mainwindow, self).__init__()
layout = QHBoxLayout()
w = QWidget()
w.setLayout(layout)
self.setCentralWidget(w)
btn = QPushButton("点击")
layout.addWidget(btn)
btn.clicked.connect(self.count)
def count(self):
pass
if __name__ == '__main__':
app = QApplication([])
m = mainwindow()
m.show()
sys.exit(app.exec())
|
[
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QPushButton"
] |
[((497, 513), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['[]'], {}), '([])\n', (509, 513), False, 'from PyQt5.QtWidgets import QMainWindow, QWidget, QHBoxLayout, QApplication, QPushButton\n'), ((224, 237), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (235, 237), False, 'from PyQt5.QtWidgets import QMainWindow, QWidget, QHBoxLayout, QApplication, QPushButton\n'), ((250, 259), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (257, 259), False, 'from PyQt5.QtWidgets import QMainWindow, QWidget, QHBoxLayout, QApplication, QPushButton\n'), ((336, 353), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""点击"""'], {}), "('点击')\n", (347, 353), False, 'from PyQt5.QtWidgets import QMainWindow, QWidget, QHBoxLayout, QApplication, QPushButton\n')]
|
import os
import sys
import unittest
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from decision_tree import *
from decision_tree import DecisionTree
class DecisionTreeTests(unittest.TestCase):
def test_read_data(self):
result_data = [['FALSE', 'high', 'hot', 'sunny', 'no'],
['TRUE', 'high', 'hot', 'sunny', 'no'],
['FALSE', 'high', 'hot', 'overcast', 'yes'],
['FALSE', 'high', 'mild', 'rainy', 'yes'],
['FALSE', 'normal', 'cool', 'rainy', 'yes'],
['TRUE', 'normal', 'cool', 'rainy', 'no'],
['TRUE', 'normal', 'cool', 'overcast', 'yes'],
['FALSE', 'high', 'mild', 'sunny', 'no'],
['FALSE', 'normal', 'cool', 'sunny', 'yes'],
['FALSE', 'normal', 'mild', 'rainy', 'yes'],
['TRUE', 'normal', 'mild', 'sunny', 'yes'],
['TRUE', 'high', 'mild', 'overcast', 'yes'],
['FALSE', 'normal', 'hot', 'overcast', 'yes'],
['TRUE', 'high', 'mild', 'rainy', 'no']]
self.assertEquals(DecisionTree.read_libsvm_data("../resources/weatherNominalTr.txt"), result_data)
def test_gini_index_1(self):
data = [[[1, 1], [1, 1]], [[0, 0], [0, 0]]]
self.failUnless(DecisionTree([item for sublist in data for item in sublist] ).gini_index(data) == 0.0)
def test_gini_index_2(self):
data = [[[1, 1], [0, 0]], [[1, 1], [0, 0]]]
self.failUnless(DecisionTree([item for sublist in data for item in sublist] ).gini_index(data) == 1.0)
def test_gini_index_3(self):
data = [[[1, 0], [1, 0]], [[1, 1], [0.3, 1], [0, 0], [0.6, 1]]]
self.failUnless(DecisionTree([item for sublist in data for item in sublist]).gini_index(data) == 0.375)
def test_gini_index_4(self):
data = [[[1, 0], [1, 0], [1, 0], [0.3, 1]], [[0, 0], [0.6, 1]]]
result = DecisionTree([item for sublist in data for item in sublist]).gini_index(data)
print(result)
self.failUnless(result == 0.875)
def test_split_1(self):
to_split = [[1, 0], [1, 0], [1, 1], [0.3, 1], [0, 0], [0.6, 1]]
splitted = [[[0.3, 1], [0, 0]], [[1, 0], [1, 0], [1, 1], [0.6, 1]]]
self.assertEquals(DecisionTree(to_split).test_split(to_split, 0, 0.5), splitted)
# def test_split_2(self):
# to_split = [[1], [2], [3]]
# with self.assertRaises(IndexError) as context:
# DecisionTree(to_split).test_split(to_split, 1, 0.5)
# def test_get_split(self):
# dataset = [[2.771244718, 1.784783929, 0],
# [1.728571309, 1.169761413, 0],
# [3.678319846, 2.81281357, 0],
# [3.961043357, 2.61995032, 0],
# [2.999208922, 2.209014212, 0],
# [7.497545867, 3.162953546, 1],
# [9.00220326, 3.339047188, 1],
# [7.444542326, 0.476683375, 1],
# [10.12493903, 3.234550982, 1],
# [6.642287351, 3.319983761, 1]]
#
# split = DecisionTree(dataset).get_split(dataset)
# group_1 = [[2.771244718, 1.784783929, 0], [1.728571309, 1.169761413, 0], [3.678319846, 2.81281357, 0],
# [3.961043357, 2.61995032, 0], [2.999208922, 2.209014212, 0]]
# group_2 = [[7.497545867, 3.162953546, 1], [9.00220326, 3.339047188, 1], [7.444542326, 0.476683375, 1],
# [10.12493903, 3.234550982, 1], [6.642287351, 3.319983761, 1]]
# result = [0, 6.642, group_1, group_2]
# self.assertEquals([split['index'], round(split['value'], 3), split['groups'][0], split['groups'][1]], result)
def test_to_terminal_1(self):
dataset = [[2.771244718, 1.784783929, 0],
[1.728571309, 1.169761413, 0],
[3.678319846, 2.81281357, 0],
[3.961043357, 2.61995032, 0],
[2.999208922, 2.209014212, 1],
[7.497545867, 3.162953546, 1],
[9.00220326, 3.339047188, 1],
[7.444542326, 0.476683375, 1],
[10.12493903, 3.234550982, 1],
[6.642287351, 3.319983761, 1]]
self.assertEquals(TerminalNode(dataset).group, dataset)
# def test_build_tree(self):
# n0 = TerminalNode([0])
# n1 = TerminalNode([0, 0, 0, 0])
# n2 = TerminalNode([1])
# n3 = TerminalNode([1, 1, 1, 1])
# sn0 = SplitNode[0]
# dataset = [[2.771244718, 1.784783929, 0],
# [1.728571309, 1.169761413, 0],
# [3.678319846, 2.81281357, 0],
# [3.961043357, 2.61995032, 0],
# [2.999208922, 2.209014212, 0],
# [7.497545867, 3.162953546, 1],
# [9.00220326, 3.339047188, 1],
# [7.444542326, 0.476683375, 1],
# [10.12493903, 3.234550982, 0],
# [6.642287351, 3.319983761, 1]]
#
# tree = DecisionTree(dataset, 2, 1)
# tree.print(tree)
# print('Split: [X%d < %.3f]' % ((split['index']+1), split['value']))
def main():
unittest.main()
if __name__ == '__main__':
main()
|
[
"unittest.main",
"decision_tree.DecisionTree",
"os.path.dirname",
"decision_tree.DecisionTree.read_libsvm_data"
] |
[((5234, 5249), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5247, 5249), False, 'import unittest\n'), ((86, 111), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (101, 111), False, 'import os\n'), ((1150, 1216), 'decision_tree.DecisionTree.read_libsvm_data', 'DecisionTree.read_libsvm_data', (['"""../resources/weatherNominalTr.txt"""'], {}), "('../resources/weatherNominalTr.txt')\n", (1179, 1216), False, 'from decision_tree import DecisionTree\n'), ((1967, 2027), 'decision_tree.DecisionTree', 'DecisionTree', (['[item for sublist in data for item in sublist]'], {}), '([item for sublist in data for item in sublist])\n', (1979, 2027), False, 'from decision_tree import DecisionTree\n'), ((2311, 2333), 'decision_tree.DecisionTree', 'DecisionTree', (['to_split'], {}), '(to_split)\n', (2323, 2333), False, 'from decision_tree import DecisionTree\n'), ((1342, 1402), 'decision_tree.DecisionTree', 'DecisionTree', (['[item for sublist in data for item in sublist]'], {}), '([item for sublist in data for item in sublist])\n', (1354, 1402), False, 'from decision_tree import DecisionTree\n'), ((1539, 1599), 'decision_tree.DecisionTree', 'DecisionTree', (['[item for sublist in data for item in sublist]'], {}), '([item for sublist in data for item in sublist])\n', (1551, 1599), False, 'from decision_tree import DecisionTree\n'), ((1756, 1816), 'decision_tree.DecisionTree', 'DecisionTree', (['[item for sublist in data for item in sublist]'], {}), '([item for sublist in data for item in sublist])\n', (1768, 1816), False, 'from decision_tree import DecisionTree\n')]
|
import os
import hashlib
import logging
import traceback
from typing import Union
from xml.etree.ElementTree import Element, SubElement, parse, ElementTree
from xpath_blindeye.xnode import XNode
from xpath_blindeye.util import prettify
from xpath_blindeye.config import ROOT_PATH, URL
logger = logging.getLogger("xpath-blindeye")
def retrieve():
url_md5 = hashlib.md5(URL.encode())
try:
os.mkdir('./saved_requests')
except FileExistsError:
pass
save_location = './saved_requests/{}.xml'.format(url_md5.hexdigest())
saved_root = None
try:
saved_tree = parse(save_location)
saved_root = saved_tree.getroot()
except FileNotFoundError:
pass
root_path = ROOT_PATH
root_node_name = XNode.get_node_name(root_path)
logger.info("Root node name is " + root_node_name)
xml_root = Element(root_node_name)
try:
visit_node(root_node_name, root_path, None, xml_root, saved_root)
except KeyboardInterrupt:
pass
except Exception as e:
traceback.print_exc()
finally:
print(prettify(xml_root))
result = input("\n\nOverwrite last xml save?(Y/N)")
if result.lower() != "y":
exit(0)
et = ElementTree(xml_root)
logger.info("Saving...")
et.write(save_location)
def visit_node(node_name: str, path: str, parent: Union[Element, None], xml_root: Element, saved_root: Element):
if parent is None:
node = xml_root
else:
node = SubElement(parent, node_name) # type: Element
xnode = XNode(node_name, path, parent, xml_root, saved_root)
# Get and add attributes
node.attrib = xnode.get_attributes()
# Get and add text value
node.text = xnode.get_node_text()
# Get children
child_names = xnode.get_child_node_names()
# Do last
for child_name, child_path in child_names:
visit_node(node_name=child_name, path=child_path, parent=node, xml_root=xml_root, saved_root=saved_root)
|
[
"os.mkdir",
"xml.etree.ElementTree.parse",
"traceback.print_exc",
"xpath_blindeye.util.prettify",
"xml.etree.ElementTree.Element",
"xpath_blindeye.xnode.XNode.get_node_name",
"xpath_blindeye.xnode.XNode",
"xpath_blindeye.config.URL.encode",
"xml.etree.ElementTree.SubElement",
"logging.getLogger",
"xml.etree.ElementTree.ElementTree"
] |
[((296, 331), 'logging.getLogger', 'logging.getLogger', (['"""xpath-blindeye"""'], {}), "('xpath-blindeye')\n", (313, 331), False, 'import logging\n'), ((757, 787), 'xpath_blindeye.xnode.XNode.get_node_name', 'XNode.get_node_name', (['root_path'], {}), '(root_path)\n', (776, 787), False, 'from xpath_blindeye.xnode import XNode\n'), ((858, 881), 'xml.etree.ElementTree.Element', 'Element', (['root_node_name'], {}), '(root_node_name)\n', (865, 881), False, 'from xml.etree.ElementTree import Element, SubElement, parse, ElementTree\n'), ((1572, 1624), 'xpath_blindeye.xnode.XNode', 'XNode', (['node_name', 'path', 'parent', 'xml_root', 'saved_root'], {}), '(node_name, path, parent, xml_root, saved_root)\n', (1577, 1624), False, 'from xpath_blindeye.xnode import XNode\n'), ((376, 388), 'xpath_blindeye.config.URL.encode', 'URL.encode', ([], {}), '()\n', (386, 388), False, 'from xpath_blindeye.config import ROOT_PATH, URL\n'), ((407, 435), 'os.mkdir', 'os.mkdir', (['"""./saved_requests"""'], {}), "('./saved_requests')\n", (415, 435), False, 'import os\n'), ((603, 623), 'xml.etree.ElementTree.parse', 'parse', (['save_location'], {}), '(save_location)\n', (608, 623), False, 'from xml.etree.ElementTree import Element, SubElement, parse, ElementTree\n'), ((1239, 1260), 'xml.etree.ElementTree.ElementTree', 'ElementTree', (['xml_root'], {}), '(xml_root)\n', (1250, 1260), False, 'from xml.etree.ElementTree import Element, SubElement, parse, ElementTree\n'), ((1513, 1542), 'xml.etree.ElementTree.SubElement', 'SubElement', (['parent', 'node_name'], {}), '(parent, node_name)\n', (1523, 1542), False, 'from xml.etree.ElementTree import Element, SubElement, parse, ElementTree\n'), ((1043, 1064), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (1062, 1064), False, 'import traceback\n'), ((1092, 1110), 'xpath_blindeye.util.prettify', 'prettify', (['xml_root'], {}), '(xml_root)\n', (1100, 1110), False, 'from xpath_blindeye.util import prettify\n')]
|
from copy import deepcopy
from eth2spec.test.context import spec_state_test, with_all_phases
from eth2spec.test.helpers.state import (
next_epoch,
next_slot
)
from eth2spec.test.helpers.block import apply_empty_block
from eth2spec.test.helpers.attestations import (
add_attestation_to_state,
fill_aggregate_attestation,
get_valid_attestation,
sign_attestation,
)
from eth2spec.test.phase_0.epoch_processing.run_epoch_process_base import run_epoch_processing_with
def run_process_crosslinks(spec, state):
yield from run_epoch_processing_with(spec, state, 'process_crosslinks')
@with_all_phases
@spec_state_test
def test_no_attestations(spec, state):
yield from run_process_crosslinks(spec, state)
for shard in range(spec.SHARD_COUNT):
assert state.previous_crosslinks[shard] == state.current_crosslinks[shard]
@with_all_phases
@spec_state_test
def test_single_crosslink_update_from_current_epoch(spec, state):
next_epoch(spec, state)
attestation = get_valid_attestation(spec, state, signed=True)
fill_aggregate_attestation(spec, state, attestation)
add_attestation_to_state(spec, state, attestation, state.slot + spec.MIN_ATTESTATION_INCLUSION_DELAY)
assert len(state.current_epoch_attestations) == 1
shard = attestation.data.crosslink.shard
pre_crosslink = deepcopy(state.current_crosslinks[shard])
yield from run_process_crosslinks(spec, state)
assert state.previous_crosslinks[shard] != state.current_crosslinks[shard]
assert pre_crosslink != state.current_crosslinks[shard]
@with_all_phases
@spec_state_test
def test_single_crosslink_update_from_previous_epoch(spec, state):
next_epoch(spec, state)
attestation = get_valid_attestation(spec, state, signed=True)
fill_aggregate_attestation(spec, state, attestation)
add_attestation_to_state(spec, state, attestation, state.slot + spec.SLOTS_PER_EPOCH)
assert len(state.previous_epoch_attestations) == 1
shard = attestation.data.crosslink.shard
pre_crosslink = deepcopy(state.current_crosslinks[shard])
crosslink_deltas = spec.get_crosslink_deltas(state)
yield from run_process_crosslinks(spec, state)
assert state.previous_crosslinks[shard] != state.current_crosslinks[shard]
assert pre_crosslink != state.current_crosslinks[shard]
# ensure rewarded
for index in spec.get_crosslink_committee(
state,
attestation.data.target.epoch,
attestation.data.crosslink.shard):
assert crosslink_deltas[0][index] > 0
assert crosslink_deltas[1][index] == 0
@with_all_phases
@spec_state_test
def test_double_late_crosslink(spec, state):
if spec.get_committee_count(state, spec.get_current_epoch(state)) < spec.SHARD_COUNT:
print("warning: ignoring test, test-assumptions are incompatible with configuration")
return
next_epoch(spec, state)
state.slot += 4
attestation_1 = get_valid_attestation(spec, state, signed=True)
fill_aggregate_attestation(spec, state, attestation_1)
# add attestation_1 to next epoch
next_epoch(spec, state)
add_attestation_to_state(spec, state, attestation_1, state.slot + 1)
for _ in range(spec.SLOTS_PER_EPOCH):
attestation_2 = get_valid_attestation(spec, state)
if attestation_2.data.crosslink.shard == attestation_1.data.crosslink.shard:
sign_attestation(spec, state, attestation_2)
break
next_slot(spec, state)
apply_empty_block(spec, state)
fill_aggregate_attestation(spec, state, attestation_2)
# add attestation_2 in the next epoch after attestation_1 has
# already updated the relevant crosslink
next_epoch(spec, state)
add_attestation_to_state(spec, state, attestation_2, state.slot + 1)
assert len(state.previous_epoch_attestations) == 1
assert len(state.current_epoch_attestations) == 0
crosslink_deltas = spec.get_crosslink_deltas(state)
yield from run_process_crosslinks(spec, state)
shard = attestation_2.data.crosslink.shard
# ensure that the current crosslinks were not updated by the second attestation
assert state.previous_crosslinks[shard] == state.current_crosslinks[shard]
# ensure no reward, only penalties for the failed crosslink
for index in spec.get_crosslink_committee(
state,
attestation_2.data.target.epoch,
attestation_2.data.crosslink.shard):
assert crosslink_deltas[0][index] == 0
assert crosslink_deltas[1][index] > 0
|
[
"eth2spec.test.helpers.state.next_epoch",
"copy.deepcopy",
"eth2spec.test.helpers.attestations.fill_aggregate_attestation",
"eth2spec.test.helpers.attestations.sign_attestation",
"eth2spec.test.helpers.state.next_slot",
"eth2spec.test.phase_0.epoch_processing.run_epoch_process_base.run_epoch_processing_with",
"eth2spec.test.helpers.attestations.get_valid_attestation",
"eth2spec.test.helpers.block.apply_empty_block",
"eth2spec.test.helpers.attestations.add_attestation_to_state"
] |
[((965, 988), 'eth2spec.test.helpers.state.next_epoch', 'next_epoch', (['spec', 'state'], {}), '(spec, state)\n', (975, 988), False, 'from eth2spec.test.helpers.state import next_epoch, next_slot\n'), ((1008, 1055), 'eth2spec.test.helpers.attestations.get_valid_attestation', 'get_valid_attestation', (['spec', 'state'], {'signed': '(True)'}), '(spec, state, signed=True)\n', (1029, 1055), False, 'from eth2spec.test.helpers.attestations import add_attestation_to_state, fill_aggregate_attestation, get_valid_attestation, sign_attestation\n'), ((1061, 1113), 'eth2spec.test.helpers.attestations.fill_aggregate_attestation', 'fill_aggregate_attestation', (['spec', 'state', 'attestation'], {}), '(spec, state, attestation)\n', (1087, 1113), False, 'from eth2spec.test.helpers.attestations import add_attestation_to_state, fill_aggregate_attestation, get_valid_attestation, sign_attestation\n'), ((1118, 1224), 'eth2spec.test.helpers.attestations.add_attestation_to_state', 'add_attestation_to_state', (['spec', 'state', 'attestation', '(state.slot + spec.MIN_ATTESTATION_INCLUSION_DELAY)'], {}), '(spec, state, attestation, state.slot + spec.\n MIN_ATTESTATION_INCLUSION_DELAY)\n', (1142, 1224), False, 'from eth2spec.test.helpers.attestations import add_attestation_to_state, fill_aggregate_attestation, get_valid_attestation, sign_attestation\n'), ((1341, 1382), 'copy.deepcopy', 'deepcopy', (['state.current_crosslinks[shard]'], {}), '(state.current_crosslinks[shard])\n', (1349, 1382), False, 'from copy import deepcopy\n'), ((1682, 1705), 'eth2spec.test.helpers.state.next_epoch', 'next_epoch', (['spec', 'state'], {}), '(spec, state)\n', (1692, 1705), False, 'from eth2spec.test.helpers.state import next_epoch, next_slot\n'), ((1725, 1772), 'eth2spec.test.helpers.attestations.get_valid_attestation', 'get_valid_attestation', (['spec', 'state'], {'signed': '(True)'}), '(spec, state, signed=True)\n', (1746, 1772), False, 'from eth2spec.test.helpers.attestations import add_attestation_to_state, fill_aggregate_attestation, get_valid_attestation, sign_attestation\n'), ((1778, 1830), 'eth2spec.test.helpers.attestations.fill_aggregate_attestation', 'fill_aggregate_attestation', (['spec', 'state', 'attestation'], {}), '(spec, state, attestation)\n', (1804, 1830), False, 'from eth2spec.test.helpers.attestations import add_attestation_to_state, fill_aggregate_attestation, get_valid_attestation, sign_attestation\n'), ((1835, 1925), 'eth2spec.test.helpers.attestations.add_attestation_to_state', 'add_attestation_to_state', (['spec', 'state', 'attestation', '(state.slot + spec.SLOTS_PER_EPOCH)'], {}), '(spec, state, attestation, state.slot + spec.\n SLOTS_PER_EPOCH)\n', (1859, 1925), False, 'from eth2spec.test.helpers.attestations import add_attestation_to_state, fill_aggregate_attestation, get_valid_attestation, sign_attestation\n'), ((2043, 2084), 'copy.deepcopy', 'deepcopy', (['state.current_crosslinks[shard]'], {}), '(state.current_crosslinks[shard])\n', (2051, 2084), False, 'from copy import deepcopy\n'), ((2891, 2914), 'eth2spec.test.helpers.state.next_epoch', 'next_epoch', (['spec', 'state'], {}), '(spec, state)\n', (2901, 2914), False, 'from eth2spec.test.helpers.state import next_epoch, next_slot\n'), ((2956, 3003), 'eth2spec.test.helpers.attestations.get_valid_attestation', 'get_valid_attestation', (['spec', 'state'], {'signed': '(True)'}), '(spec, state, signed=True)\n', (2977, 3003), False, 'from eth2spec.test.helpers.attestations import add_attestation_to_state, fill_aggregate_attestation, get_valid_attestation, sign_attestation\n'), ((3008, 3062), 'eth2spec.test.helpers.attestations.fill_aggregate_attestation', 'fill_aggregate_attestation', (['spec', 'state', 'attestation_1'], {}), '(spec, state, attestation_1)\n', (3034, 3062), False, 'from eth2spec.test.helpers.attestations import add_attestation_to_state, fill_aggregate_attestation, get_valid_attestation, sign_attestation\n'), ((3106, 3129), 'eth2spec.test.helpers.state.next_epoch', 'next_epoch', (['spec', 'state'], {}), '(spec, state)\n', (3116, 3129), False, 'from eth2spec.test.helpers.state import next_epoch, next_slot\n'), ((3134, 3202), 'eth2spec.test.helpers.attestations.add_attestation_to_state', 'add_attestation_to_state', (['spec', 'state', 'attestation_1', '(state.slot + 1)'], {}), '(spec, state, attestation_1, state.slot + 1)\n', (3158, 3202), False, 'from eth2spec.test.helpers.attestations import add_attestation_to_state, fill_aggregate_attestation, get_valid_attestation, sign_attestation\n'), ((3500, 3530), 'eth2spec.test.helpers.block.apply_empty_block', 'apply_empty_block', (['spec', 'state'], {}), '(spec, state)\n', (3517, 3530), False, 'from eth2spec.test.helpers.block import apply_empty_block\n'), ((3536, 3590), 'eth2spec.test.helpers.attestations.fill_aggregate_attestation', 'fill_aggregate_attestation', (['spec', 'state', 'attestation_2'], {}), '(spec, state, attestation_2)\n', (3562, 3590), False, 'from eth2spec.test.helpers.attestations import add_attestation_to_state, fill_aggregate_attestation, get_valid_attestation, sign_attestation\n'), ((3707, 3730), 'eth2spec.test.helpers.state.next_epoch', 'next_epoch', (['spec', 'state'], {}), '(spec, state)\n', (3717, 3730), False, 'from eth2spec.test.helpers.state import next_epoch, next_slot\n'), ((3735, 3803), 'eth2spec.test.helpers.attestations.add_attestation_to_state', 'add_attestation_to_state', (['spec', 'state', 'attestation_2', '(state.slot + 1)'], {}), '(spec, state, attestation_2, state.slot + 1)\n', (3759, 3803), False, 'from eth2spec.test.helpers.attestations import add_attestation_to_state, fill_aggregate_attestation, get_valid_attestation, sign_attestation\n'), ((546, 606), 'eth2spec.test.phase_0.epoch_processing.run_epoch_process_base.run_epoch_processing_with', 'run_epoch_processing_with', (['spec', 'state', '"""process_crosslinks"""'], {}), "(spec, state, 'process_crosslinks')\n", (571, 606), False, 'from eth2spec.test.phase_0.epoch_processing.run_epoch_process_base import run_epoch_processing_with\n'), ((3270, 3304), 'eth2spec.test.helpers.attestations.get_valid_attestation', 'get_valid_attestation', (['spec', 'state'], {}), '(spec, state)\n', (3291, 3304), False, 'from eth2spec.test.helpers.attestations import add_attestation_to_state, fill_aggregate_attestation, get_valid_attestation, sign_attestation\n'), ((3473, 3495), 'eth2spec.test.helpers.state.next_slot', 'next_slot', (['spec', 'state'], {}), '(spec, state)\n', (3482, 3495), False, 'from eth2spec.test.helpers.state import next_epoch, next_slot\n'), ((3402, 3446), 'eth2spec.test.helpers.attestations.sign_attestation', 'sign_attestation', (['spec', 'state', 'attestation_2'], {}), '(spec, state, attestation_2)\n', (3418, 3446), False, 'from eth2spec.test.helpers.attestations import add_attestation_to_state, fill_aggregate_attestation, get_valid_attestation, sign_attestation\n')]
|
#!/usr/bin/python3
import openpyxl
from openpyxl.utils import get_column_letter, column_index_from_string
# Porque usar o util https://is.gd/YrDuST
wb = openpyxl.load_workbook('example.xlsx')
sheet = wb.get_sheet_by_name('Sheet1')
print('Obtendo a letra da coluna a partir de um inteiro')
print(get_column_letter(1))
print(get_column_letter(2))
print(get_column_letter(27))
print(get_column_letter(900))
print('\nObtendo o numero da coluna a partir da letra')
print(get_column_letter(sheet.max_column))
# Obetendo a letra da ultima coluna
print(column_index_from_string(get_column_letter(sheet.max_column)))
print(column_index_from_string('A'))
print(column_index_from_string('AA'))
print('\nObtendo linhas e colunas das planilhas')
# Imprimindo os valores em uma tupla
# Contem tres duplas dentro
# print(tuple(sheet['A1':'C3']))
# print(tuple(sheet.columns)[1])
# for cellObj in list(sheet.columns)[1]:
# print(cellObj.value)
# Percorrendo a area de A1 a C3
for rowOfCellObjects in sheet['A1':'C3']:
for cellObj in rowOfCellObjects:
print(cellObj.coordinate, cellObj.value)
print('--- END OF ROW ---')
print('\nAcessando valores de celulas de uma linha ou coluna')
for cellObj in list(sheet.columns)[1]: # 1 = Coluna B
print(cellObj.value)
# Porque precisa utilizar o metodo list https://is.gd/I3d9PR
|
[
"openpyxl.utils.column_index_from_string",
"openpyxl.utils.get_column_letter",
"openpyxl.load_workbook"
] |
[((161, 199), 'openpyxl.load_workbook', 'openpyxl.load_workbook', (['"""example.xlsx"""'], {}), "('example.xlsx')\n", (183, 199), False, 'import openpyxl\n'), ((308, 328), 'openpyxl.utils.get_column_letter', 'get_column_letter', (['(1)'], {}), '(1)\n', (325, 328), False, 'from openpyxl.utils import get_column_letter, column_index_from_string\n'), ((337, 357), 'openpyxl.utils.get_column_letter', 'get_column_letter', (['(2)'], {}), '(2)\n', (354, 357), False, 'from openpyxl.utils import get_column_letter, column_index_from_string\n'), ((366, 387), 'openpyxl.utils.get_column_letter', 'get_column_letter', (['(27)'], {}), '(27)\n', (383, 387), False, 'from openpyxl.utils import get_column_letter, column_index_from_string\n'), ((396, 418), 'openpyxl.utils.get_column_letter', 'get_column_letter', (['(900)'], {}), '(900)\n', (413, 418), False, 'from openpyxl.utils import get_column_letter, column_index_from_string\n'), ((488, 523), 'openpyxl.utils.get_column_letter', 'get_column_letter', (['sheet.max_column'], {}), '(sheet.max_column)\n', (505, 523), False, 'from openpyxl.utils import get_column_letter, column_index_from_string\n'), ((639, 668), 'openpyxl.utils.column_index_from_string', 'column_index_from_string', (['"""A"""'], {}), "('A')\n", (663, 668), False, 'from openpyxl.utils import get_column_letter, column_index_from_string\n'), ((677, 707), 'openpyxl.utils.column_index_from_string', 'column_index_from_string', (['"""AA"""'], {}), "('AA')\n", (701, 707), False, 'from openpyxl.utils import get_column_letter, column_index_from_string\n'), ((594, 629), 'openpyxl.utils.get_column_letter', 'get_column_letter', (['sheet.max_column'], {}), '(sheet.max_column)\n', (611, 629), False, 'from openpyxl.utils import get_column_letter, column_index_from_string\n')]
|
import pathlib
import pandas as pd
from palmnet.visualization.utils import get_palminized_model_and_df, get_df
import matplotlib.pyplot as plt
import numpy as np
import logging
import plotly.graph_objects as go
import plotly.io as pio
from pprint import pprint as pprint
mpl_logger = logging.getLogger('matplotlib')
mpl_logger.setLevel(logging.ERROR)
pio.templates.default = "plotly_white"
dataset = {
"Cifar10": "--cifar10",
"Cifar100": "--cifar100",
# "SVHN": "--svhn",
"MNIST": "--mnist"
}
models_data = {
"Cifar10": ["--cifar10-vgg19"],
# "Cifar100": ["--cifar100-resnet20", "--cifar100-resnet50"],
"Cifar100": ["--cifar100-vgg19", "--cifar100-resnet20", "--cifar100-resnet50"],
"SVHN": ["--svhn-vgg19"],
"MNIST":["--mnist-lenet"],
}
color_bars_sparsity = {
2: "g",
3: "c",
4: "b",
5: "y"
}
tasks = {
"nb-param-compressed-total",
"finetuned-score",
"param-compression-rate-total"
}
ylabel_task = {
"nb-param-compressed-total": "log(# non-zero value)",
"finetuned-score": "Accuracy",
"param-compression-rate-total": "Compression Rate"
}
scale_tasks = {
"nb-param-compressed-total": "log",
"finetuned-score": "linear",
"param-compression-rate-total": "linear"
}
def get_palm_results():
results_path = "2020/03/9_10_finetune_palminized_no_useless"
results_path_2 = "2020/04/9_10_finetune_palminized_no_useless"
src_results_path = root_source_dir / results_path / "results.csv"
src_results_path_2 = root_source_dir / results_path_2 / "results.csv"
df = pd.read_csv(src_results_path, header=0)
df_2 = pd.read_csv(src_results_path_2, header=0)
df = pd.concat([df, df_2])
df = df.fillna("None")
df = df.drop(columns=["Unnamed: 0", "idx-expe"]).drop_duplicates()
df = df[df["keep-last-layer"] == 0]
df = df[df["use-clr"] == 1]
df = df.assign(**{"only-dense": False, "keep-first-layer": False})
return df
def get_faust_results():
results_path = "2020/05/3_4_finetune_faust_no_hierarchical_only_cifar_mnist"
src_results_path = root_source_dir / results_path / "results.csv"
df = pd.read_csv(src_results_path, header=0)
df = df.fillna("None")
df = df[df["hierarchical"] == False]
df = df.drop(columns=["Unnamed: 0", "idx-expe"]).drop_duplicates()
df = df[df["keep-last-layer"] == 0]
df = df.assign(**{"only-dense": False, "keep-first-layer": False})
return df
def get_tucker_results():
results_path_tucker = "2020/04/0_1_compression_tucker_tensortrain"
src_results_path_tucker = root_source_dir / results_path_tucker / "results.csv"
df_tucker_tt = pd.read_csv(src_results_path_tucker, header=0)
df_tucker_tt = df_tucker_tt.fillna("None")
df_tucker_tt = df_tucker_tt.assign(**{"only-dense": False, "use-pretrained": False})
df_tucker_tt = df_tucker_tt[df_tucker_tt["compression"] == "tucker"]
return df_tucker_tt
def get_tensortrain_results():
results_path_tucker = "2020/05/2_3_compression_tensortrain"
src_results_path_tucker = root_source_dir / results_path_tucker / "results.csv"
df_tucker_tt = pd.read_csv(src_results_path_tucker, header=0)
df_tucker_tt = df_tucker_tt.fillna("None")
# df_tucker_tt = df_tucker_tt[df_tucker_tt["use-pretrained"] == True]
# df_tucker_tt = df_tucker_tt[df_tucker_tt["only-dense"] == False]
return df_tucker_tt
def get_tucker_tensortrain_only_denseresults():
results_path_tucker = "2020/05/2_3_compression_tucker_tensortrain_only_dense"
src_results_path_tucker = root_source_dir / results_path_tucker / "results.csv"
df_tucker_tt = pd.read_csv(src_results_path_tucker, header=0)
df_tucker_tt = df_tucker_tt.fillna("None")
# df_tucker_tt = df_tucker_tt[df_tucker_tt["use-pretrained"] == True]
# df_tucker_tt = df_tucker_tt[df_tucker_tt["only-dense"] == True]
return df_tucker_tt
def get_palm_results_only_dense_keep_first():
results_path = "2020/05/5_6_finetune_sparse_facto_no_hierarchical_keep_first_layer_only_dense"
src_results_path = root_source_dir / results_path / "results.csv"
df = pd.read_csv(src_results_path, header=0)
df = df.fillna("None")
df = df.drop(columns=["Unnamed: 0", "idx-expe"]).drop_duplicates()
# df = df[df["only-dense"] == False]
return df
def get_deepfried_results():
results_path_tucker = "2020/05/5_6_compression_baselines"
src_results_path_tucker = root_source_dir / results_path_tucker / "results.csv"
df_tucker_tt = pd.read_csv(src_results_path_tucker, header=0)
df_tucker_tt = df_tucker_tt.fillna("None")
# df_tucker_tt = df_tucker_tt.assign(**{"only-dense": True, "use-pretrained": False})
df_tucker_tt = df_tucker_tt[df_tucker_tt["compression"] == "deepfried"]
return df_tucker_tt
def get_magnitude_results():
results_path_tucker = "2020/05/5_6_compression_baselines"
src_results_path_tucker = root_source_dir / results_path_tucker / "results.csv"
df_tucker_tt = pd.read_csv(src_results_path_tucker, header=0)
df_tucker_tt = df_tucker_tt.fillna("None")
# df_tucker_tt = df_tucker_tt.assign(**{"only-dense": True, "use-pretrained": False})
df_tucker_tt = df_tucker_tt[df_tucker_tt["compression"] == "magnitude"]
return df_tucker_tt
if __name__ == "__main__":
root_source_dir = pathlib.Path("/home/luc/PycharmProjects/palmnet/results/processed")
SHOW_FAUST = False
SHOW_KEEP_FIRST_ONLY = False
SHOW_PRETRAINED_ONLY = True
results_path = "2020/05/5_6_finetune_sparse_facto_perf_vs_param"
df_tucker = get_tucker_results()
df_tt = get_tensortrain_results()
df_deepfried = get_deepfried_results()
df_tucker_tt_only_dense = get_tucker_tensortrain_only_denseresults()
df_magnitude = get_magnitude_results()
df_tucker_tt_deepfried = pd.concat([df_tucker, df_tt, df_tucker_tt_only_dense, df_deepfried, df_magnitude])
df_palm = get_palm_results()
df_palm_bis = get_palm_results_only_dense_keep_first()
df_palm = pd.concat([df_palm, df_palm_bis])
# ONLY_DENSE = False
# df_tucker_tt = df_tucker_tt[df_tucker_tt["only-dense"] == ONLY_DENSE]
# df_palm = df_palm[df_palm["only-dense"] == ONLY_DENSE]
root_output_dir = pathlib.Path("/home/luc/PycharmProjects/palmnet/reports/figures/")
output_dir = root_output_dir / results_path / "histogrammes"
output_dir.mkdir(parents=True, exist_ok=True)
# sparsity_factors = sorted(set(df_palminized["--sparsity-factor"]))
# nb_factors = set(df_palm["nb-factor"].values)
hue_by_sparsity= {
2: 10,
3: 60,
4: 110,
5: 180
}
saturation_by_perm = {
1: 50,
0: 75
}
saturation_by_hier = {
1: 50,
0: 75
}
lum_by_clr = {
1: 20,
0: 30
}
lum_by_keep = {
1: 40,
0: 50
}
dct_symbol = {
"FAUST Q=2": "square",
"FAUST Q=3": "diamond",
"FAUST Q=None": "square-x",
"FAUST Q=None H": "star-square",
"PYQALM Q=2": "square-open",
"PYQALM Q=3": "diamond-open",
"PYQALM Q=None": "hash-open",
"PYQALM Q=None H": "star-square-open",
"PYQALM Q=2 -1": "square-open-dot",
"PYQALM Q=3 -1": "diamond-open-dot",
"PYQALM Q=None -1": "hash-open-dot",
"PYQALM Q=None H -1": "star-square-open-dot",
"PYQALM Q=2 -1 M": "square",
"PYQALM Q=3 -1 M": "diamond",
"PYQALM Q=None -1 M": "hash",
"PYQALM Q=None H -1 M": "star-square",
"Base": "x",
"Tucker": "circle",
"Tucker -1": "circle-dot",
"TT": "triangle-up",
"TT -1": "triangle-up-dot",
"TT -1 pretrained": "triangle-up-open-dot",
"Deepfried": "hexagram",
"Magnitude ": "square",
"Magnitude -1": "square",
}
dct_colors = {
"PALM K=2": "dodgerblue",
"PALM K=3": "darkorchid",
"PALM K=4": "green",
"PALM K=6": "aqua",
"PALM K=8": "cadetblue",
"TT R=2": "orange",
"TT R=6": "gold",
"TT R=10": "red",
"TT R=12": "darkred",
"TT R=14": "indianred",
"Base": "grey",
"Tucker": "pink",
"Tucker + Low Rank 10%": "orange",
"Tucker + Low Rank 20%": "gold",
"Tucker + Low Rank 30%": "red",
"Deepfried": "blueviolet",
"Magnitude 50%": "red",
"Magnitude 70%": "red",
"Magnitude 90%": "red",
}
SIZE_MARKERS = 15
WIDTH_MARKER_LINES = 2
datasets = set(df_palm["dataset"].values)
dct_table = dict()
for dataname in datasets:
dct_table[dataname] = dict()
df_data_palm = df_palm[df_palm["dataset"] == dataname]
df_tucker_tt_data = df_tucker_tt_deepfried[df_tucker_tt_deepfried["dataset"] == dataname]
df_model_values = set(df_data_palm["model"].values)
for modelname in df_model_values:
dct_table[dataname][modelname] = dict()
df_model_palm = df_data_palm[df_data_palm["model"] == modelname]
df_tucker_tt_model = df_tucker_tt_data[df_tucker_tt_data["model"] == modelname]
for ONLY_DENSE in [True, False]:
df_tucker_tt_model_dense = df_tucker_tt_model[df_tucker_tt_model["only-dense"] == ONLY_DENSE]
df_model_palm_dense = df_model_palm[df_model_palm["only-dense"] == ONLY_DENSE]
if ONLY_DENSE:
str_nb_param_compressed = "nb-param-compressed-dense"
str_nb_param_base = "nb-param-base-dense"
str_only_dense = " only dense"
else:
str_nb_param_compressed = "nb-param-compressed-total"
str_nb_param_base = "nb-param-base-total"
str_only_dense = ""
dct_entry_only_dense = "Dense" if ONLY_DENSE else "Conv+Dense"
dct_table[dataname][modelname][dct_entry_only_dense] = list()
fig = go.Figure()
base_score = None
base_nb_param = None
palm_algo = "PYQALM"
for idx_row, row in df_model_palm_dense.iterrows():
hierarchical_value = row["hierarchical"]
str_hierarchical = ' H' if hierarchical_value is True else ''
try:
nb_factor = int(row["nb-factor"])
except:
nb_factor = None
sparsity_factor = int(row["sparsity-factor"])
keep_first = row["keep-first-layer"]
str_keep_first = ' -1' if keep_first is True else ''
if SHOW_KEEP_FIRST_ONLY and not keep_first and not ONLY_DENSE:
continue
only_mask = row["only-mask"]
str_only_mask = " M" if only_mask is True else ""
name_trace = f"{palm_algo} Q={nb_factor} K={sparsity_factor}{str_hierarchical}{str_keep_first}{str_only_mask}"
finetuned_score = row["finetuned-score"]
nb_param = row[str_nb_param_compressed]
dct_row = dict()
dct_row["method"] = name_trace
dct_row["perf"] = finetuned_score
dct_row["nb_param"] = nb_param
dct_table[dataname][modelname][dct_entry_only_dense].append(dct_row)
base_score_tmp = row["base-model-score"]
assert base_score == base_score_tmp or base_score is None
base_nb_param_tmp = row[str_nb_param_base]
assert base_nb_param == base_nb_param_tmp or base_nb_param is None
base_score = base_score_tmp
base_nb_param = base_nb_param_tmp
fig.add_trace(
go.Scatter(
x=[nb_param],
y=[finetuned_score],
mode='markers',
name=name_trace,
hovertext=name_trace,
legendgroup=f"{palm_algo} K={sparsity_factor}{str_only_mask}",
marker=dict(
color=dct_colors[f"PALM K={sparsity_factor}"],
symbol=dct_symbol[f"{palm_algo} Q={nb_factor}{str_hierarchical}{str_keep_first}{str_only_mask}"],
size=SIZE_MARKERS,
line=dict(
color='Black',
width=WIDTH_MARKER_LINES
)
)
))
dct_row = dict()
dct_row["method"] = "Base"
dct_row["perf"] = base_score
dct_row["nb_param"] = base_nb_param
dct_table[dataname][modelname][dct_entry_only_dense].append(dct_row)
#############
# base data #
#############
fig.add_trace(
go.Scatter(
x=[base_nb_param],
y=[base_score],
mode='markers',
name="Base",
hovertext="Base",
legendgroup=f"Base",
marker=dict(
color=dct_colors[f"Base"],
symbol=dct_symbol[f"Base"],
size=SIZE_MARKERS,
line=dict(
color='Black',
width=WIDTH_MARKER_LINES,
)
)
))
###############
# tucker data #
###############
df_tucker = df_tucker_tt_model_dense[df_tucker_tt_model_dense["compression"] == "tucker"]
for idx_row, row in df_tucker.iterrows():
keep_first = row["keep-first-layer"]
str_keep_first = ' -1' if keep_first is True else ''
if SHOW_KEEP_FIRST_ONLY and not keep_first and not ONLY_DENSE:
continue
try:
rank_percentage = int(float(row["rank-percentage-dense"]) * 100)
except:
try:
rank_percentage = int(float(row["rank-percentage"]) * 100)
except:
rank_percentage = None
str_percentage = f' + Low Rank {rank_percentage}%' if rank_percentage is not None else ''
name_trace = f"Tucker{str_keep_first}{str_percentage}"
finetuned_score = row["finetuned-score"]
nb_param = row[str_nb_param_compressed]
dct_row = dict()
dct_row["method"] = name_trace
dct_row["perf"] = finetuned_score
dct_row["nb_param"] = nb_param
dct_table[dataname][modelname][dct_entry_only_dense].append(dct_row)
base_score_tmp = row["base-model-score"]
assert base_score == base_score_tmp or base_score is None
base_nb_param_tmp = row[str_nb_param_base]
assert base_nb_param == base_nb_param_tmp or base_nb_param is None or base_nb_param_tmp == 0, f"{base_nb_param}!={base_nb_param_tmp}"
fig.add_trace(
go.Scatter(
x=[nb_param],
y=[finetuned_score],
mode='markers',
name=name_trace,
hovertext=name_trace,
legendgroup=f"Tucker{str_percentage}",
marker=dict(
color=dct_colors[f"Tucker{str_percentage}"],
symbol=dct_symbol[f"Tucker{str_keep_first}"],
size=SIZE_MARKERS,
line=dict(
color='Black',
width=WIDTH_MARKER_LINES
)
)
))
###############
# magnitude data #
###############
df_magnitude = df_tucker_tt_model_dense[df_tucker_tt_model_dense["compression"] == "magnitude"]
for idx_row, row in df_magnitude.iterrows():
keep_first = row["keep-first-layer"]
str_keep_first = ' -1' if keep_first is True else ''
if SHOW_KEEP_FIRST_ONLY and not keep_first and not ONLY_DENSE:
continue
# try:
sparsity_percentage = int(float(row["final-sparsity"]) * 100)
# except:
# try:
# rank_percentage = int(float(row["rank-percentage"]) * 100)
# except:
# rank_percentage = None
str_percentage = f' {sparsity_percentage}%' #if sparsity_percentage is not None else ''
name_trace = f"Magnitude {str_keep_first}{str_percentage}"
finetuned_score = row["finetuned-score"]
nb_param = row[str_nb_param_compressed]
dct_row = dict()
dct_row["method"] = name_trace
dct_row["perf"] = finetuned_score
dct_row["nb_param"] = nb_param
dct_table[dataname][modelname][dct_entry_only_dense].append(dct_row)
print(finetuned_score)
base_score_tmp = row["base-model-score"]
assert np.isclose(base_score, base_score_tmp) or base_score is None, f"{base_score}!={base_score_tmp}"
base_nb_param_tmp = row[str_nb_param_base]
assert base_nb_param == base_nb_param_tmp or base_nb_param is None or base_nb_param_tmp == 0, f"{base_nb_param}!={base_nb_param_tmp}"
fig.add_trace(
go.Scatter(
x=[nb_param],
y=[finetuned_score],
mode='markers',
name=name_trace,
hovertext=name_trace,
legendgroup=f"Magnitude",
marker=dict(
color=dct_colors[f"Magnitude {str_percentage}"],
symbol=dct_symbol[f"Magnitude {str_keep_first}"],
size=SIZE_MARKERS,
line=dict(
color='Black',
width=WIDTH_MARKER_LINES
)
)
))
###############
# deepfried data #
###############
df_deepfried = df_tucker_tt_model_dense[df_tucker_tt_model_dense["compression"] == "deepfried"]
for idx_row, row in df_deepfried.iterrows():
keep_first = row["keep-first-layer"]
str_keep_first = ' -1' if keep_first is True else ''
if SHOW_KEEP_FIRST_ONLY and not keep_first and not ONLY_DENSE:
continue
# try:
# sparsity_percentage = int(float(row["final-sparsity"]) * 100)
# except:
# try:
# rank_percentage = int(float(row["rank-percentage"]) * 100)
# except:
# rank_percentage = None
# str_percentage = f' {sparsity_percentage}%' #if sparsity_percentage is not None else ''
name_trace = f"Deepfried {str_keep_first}"
finetuned_score = row["finetuned-score"]
nb_param = row[str_nb_param_compressed]
if nb_param == 0:
conv_nb_weights = row["nb-param-base-total"] - base_nb_param
nb_param = row["nb-param-compressed-total"] - conv_nb_weights
dct_row = dict()
dct_row["method"] = name_trace
dct_row["perf"] = finetuned_score
dct_row["nb_param"] = nb_param
dct_table[dataname][modelname][dct_entry_only_dense].append(dct_row)
print(finetuned_score)
base_score_tmp = row["base-model-score"]
assert np.isclose(base_score, base_score_tmp) or base_score is None, f"{base_score}!={base_score_tmp}"
base_nb_param_tmp = row[str_nb_param_base]
assert base_nb_param == base_nb_param_tmp or base_nb_param is None or base_nb_param_tmp == 0, f"{base_nb_param}!={base_nb_param_tmp}"
fig.add_trace(
go.Scatter(
x=[nb_param],
y=[finetuned_score],
mode='markers',
name=name_trace,
hovertext=name_trace,
legendgroup=f"Deepfried",
marker=dict(
color=dct_colors[f"Deepfried"],
symbol=dct_symbol[f"Deepfried"],
size=SIZE_MARKERS,
line=dict(
color='Black',
width=WIDTH_MARKER_LINES
)
)
))
####################
# tensortrain data #
####################
df_tt = df_tucker_tt_model_dense[df_tucker_tt_model_dense["compression"] == "tensortrain"]
for idx_row, row in df_tt.iterrows():
keep_first = row["keep-first-layer"]
str_keep_first = ' -1' if keep_first is True else ''
if SHOW_KEEP_FIRST_ONLY and not keep_first and not ONLY_DENSE:
continue
order = int(row["order"])
rank_value = int(row["rank-value"])
if not np.isnan(row["use-pretrained"]):
use_petrained = bool(row["use-pretrained"])
str_pretrained = " pretrained" if use_petrained else ""
else:
use_petrained = False
str_pretrained = ""
if SHOW_PRETRAINED_ONLY and not use_petrained and not ONLY_DENSE:
continue
name_trace = f"Tensortrain{str_keep_first} K={order} R={rank_value}{str_pretrained}"
finetuned_score = row["finetuned-score"]
nb_param = row[str_nb_param_compressed]
dct_row = dict()
dct_row["method"] = name_trace
dct_row["perf"] = finetuned_score
dct_row["nb_param"] = nb_param
dct_table[dataname][modelname][dct_entry_only_dense].append(dct_row)
base_score_tmp = row["base-model-score"]
assert base_score == base_score_tmp or base_score is None
base_nb_param_tmp = row[str_nb_param_base]
assert base_nb_param == base_nb_param_tmp or base_nb_param is None
fig.add_trace(
go.Scatter(
x=[nb_param],
y=[finetuned_score],
mode='markers',
name=name_trace,
hovertext=name_trace,
legendgroup=f"TT R={rank_value}",
marker=dict(
color=dct_colors[f"TT R={rank_value}"],
symbol=dct_symbol[f"TT{str_keep_first}{str_pretrained}"],
size=SIZE_MARKERS,
line=dict(
color='Black',
width=WIDTH_MARKER_LINES
)
)
))
title = "Performance = f(# Param); " + dataname + " " + modelname + str_only_dense
fig.update_layout(title=title,
xaxis_title="# Parameter in Dense and Conv Layers",
yaxis_title="Accuracy (%)",
xaxis_type="log",
)
fig.show()
fig.write_image(str((output_dir / title).absolute()) + ".png")
pprint(dct_table)
# string_table = """
# \begin{tabular}{lcccccccccccccccccccccc}
# \toprule
#
# {} & \multicolumn{2}{c}{ \thead{ Ensemble } } & \multicolumn{2}{c}{ \thead{ Kmeans } } & \multicolumn{2}{c}{ \thead{ NN-OMP\\w/o weights } } & \multicolumn{2}{c}{ \thead{ NN-OMP } } & \multicolumn{2}{c}{ \thead{ OMP\\w/o weights } } & \multicolumn{2}{c}{ \thead{ OMP } } & \multicolumn{2}{c}{ \thead{ Random } } & \multicolumn{2}{c}{ \thead{ Zhang\\Predictions } } & \multicolumn{2}{c}{ \thead{ Zhang\\Similarities } }\\
# \midrule
# Diam. & 3.032E+05 & 86 & \underline{3.024E+05} & \underline{143} & \textbf{3.024E+05} & \textbf{86} & 3.033E+05 & 86 & 3.025E+05 & 143 & \textit{3.087E+05} & \textit{29} & 3.025E+05 & 114 & 3.047E+05 & 143 & 3.032E+05 & 143\\
# Diab. & 3.431E+03 & 32 & \underline{3.281E+03} & \underline{36} & 3.317E+03 & 36 & 3.549E+03 & 36 & 3.324E+03 & 36 & \textit{3.607E+03} & \textit{25} & 3.303E+03 & 32 & 3.282E+03 & 36 & \textbf{3.241E+03} & \textbf{32}\\
# Kin. & 1.892E-02 & 200 & \textit{2.024E-02} & \textit{33} & 1.921E-02 & 133 & \underline{1.809E-02} & \underline{133} & 1.931E-02 & 67 & \textbf{1.776E-02} & \textbf{333} & 2.002E-02 & 333 & 2.089E-02 & 333 & 2.017E-02 & 333\\
# <NAME>. & \underline{2.187E-01} & \underline{267} & \textit{2.449E-01} & \textit{33} & 2.239E-01 & 100 & \textbf{2.180E-01} & \textbf{133} & \textit{2.267E-01} & \textit{33} & 2.197E-01 & 133 & 2.390E-01 & 333 & 2.536E-01 & 333 & 2.452E-01 & 333\\
# Bos. & 1.267E+01 & 30 & \textit{1.278E+01} & \textit{13} & \textbf{1.214E+01} & \textbf{33} & 1.253E+01 & 33 & \underline{1.247E+01} & \underline{27} & \textit{1.293E+01} & \textit{13} & 1.253E+01 & 33 & 1.430E+01 & 33 & 1.283E+01 & 33\\
# \midrule
# Sp. B. & 94.27\% & 133 & 95.52\% & 167 & \textit{95.57\%} & \textit{100} & \underline{\textit{95.59\%}} & \underline{\textit{100}} & 95.56\% & 167 & 95.39\% & 133 & \textbf{95.59\%} & \textbf{167} & 95.45\% & 333 & 95.46\% & 167\\
# St. P. & 98.69\% & 233 & 99.05\% & 267 & \underline{\textit{99.95\%}} & \underline{\textit{67}} & \textbf{99.95\%} & \textbf{100} & \textit{99.64\%} & \textit{67} & 99.90\% & 333 & \textit{99.41\%} & \textit{67} & 99.43\% & 167 & 98.92\% & 300\\
# KR-KP & \textit{98.22\%} & \textit{33} & 99.00\% & 333 & \underline{99.42\%} & \underline{100} & 99.39\% & 100 & 99.22\% & 100 & \textbf{99.48\%} & \textbf{100} & 99.14\% & 267 & 99.14\% & 133 & 98.94\% & 333\\
# B. C. & 95.09\% & 100 & \textbf{\textit{96.58\%}} & \textbf{\textit{33}} & \underline{96.49\%} & \underline{67} & \textbf{96.58\%} & \textbf{67} & 95.79\% & 133 & 95.35\% & 67 & 95.88\% & 300 & \textit{95.70\%} & \textit{33} & 95.61\% & 333\\
# LFW P. & \textit{56.00\%} & \textit{67} & 65.25\% & 333 & \textbf{66.02\%} & \textbf{333} & 65.73\% & 233 & 65.32\% & 133 & 65.55\% & 167 & \underline{65.98\%} & \underline{267} & 65.43\% & 333 & 65.27\% & 333\\
# Gam. & \textit{80.78\%} & \textit{3} & 87.68\% & 33 & \underline{87.75\%} & \underline{33} & \underline{87.75\%} & \underline{33} & \underline{87.75\%} & \underline{33} & \underline{87.75\%} & \underline{33} & \textbf{87.76\%} & \textbf{33} & 87.72\% & 33 & 87.68\% & 33\\
#
# \bottomrule
# \end{tabular}
# """
tab_headers = [
"Dataset",
"Architecture",
"Compressed layers",
"Method",
"Performance",
"# Parameters"
]
str_table = """\\begin{{tabular}}{{cccccc}}
\\toprule
{}
\\bottomrule
\end{{tabular}}
"""
lst_lines_tabular = ["&".join(tab_headers)]
for dataname in dct_table:
for model in dct_table[dataname]:
for layers in dct_table[dataname][model]:
if layers != "Conv+Dense":
continue
for lin in dct_table[dataname][model][layers]:
if "PYQALM Q=None" in str(lin["method"]):
continue
lst_line = [dataname, model, layers]
lst_line.append(str(lin["method"]))
lst_line.append("{:.2f}".format(lin["perf"]))
lst_line.append(str(int(lin["nb_param"])))
str_line = "&".join(lst_line).replace("%", "\%").replace("#", "\#")
lst_lines_tabular.append(str_line)
final_string = str_table.format("\\\\ \n".join(lst_lines_tabular) + "\\\\")
with open(str((output_dir / "table.tex").absolute()), 'w') as wf:
wf.write(final_string)
print(final_string)
|
[
"pandas.read_csv",
"plotly.graph_objects.Figure",
"numpy.isnan",
"pathlib.Path",
"numpy.isclose",
"pprint.pprint",
"pandas.concat",
"logging.getLogger"
] |
[((285, 316), 'logging.getLogger', 'logging.getLogger', (['"""matplotlib"""'], {}), "('matplotlib')\n", (302, 316), False, 'import logging\n'), ((1573, 1612), 'pandas.read_csv', 'pd.read_csv', (['src_results_path'], {'header': '(0)'}), '(src_results_path, header=0)\n', (1584, 1612), True, 'import pandas as pd\n'), ((1624, 1665), 'pandas.read_csv', 'pd.read_csv', (['src_results_path_2'], {'header': '(0)'}), '(src_results_path_2, header=0)\n', (1635, 1665), True, 'import pandas as pd\n'), ((1675, 1696), 'pandas.concat', 'pd.concat', (['[df, df_2]'], {}), '([df, df_2])\n', (1684, 1696), True, 'import pandas as pd\n'), ((2143, 2182), 'pandas.read_csv', 'pd.read_csv', (['src_results_path'], {'header': '(0)'}), '(src_results_path, header=0)\n', (2154, 2182), True, 'import pandas as pd\n'), ((2650, 2696), 'pandas.read_csv', 'pd.read_csv', (['src_results_path_tucker'], {'header': '(0)'}), '(src_results_path_tucker, header=0)\n', (2661, 2696), True, 'import pandas as pd\n'), ((3132, 3178), 'pandas.read_csv', 'pd.read_csv', (['src_results_path_tucker'], {'header': '(0)'}), '(src_results_path_tucker, header=0)\n', (3143, 3178), True, 'import pandas as pd\n'), ((3632, 3678), 'pandas.read_csv', 'pd.read_csv', (['src_results_path_tucker'], {'header': '(0)'}), '(src_results_path_tucker, header=0)\n', (3643, 3678), True, 'import pandas as pd\n'), ((4123, 4162), 'pandas.read_csv', 'pd.read_csv', (['src_results_path'], {'header': '(0)'}), '(src_results_path, header=0)\n', (4134, 4162), True, 'import pandas as pd\n'), ((4514, 4560), 'pandas.read_csv', 'pd.read_csv', (['src_results_path_tucker'], {'header': '(0)'}), '(src_results_path_tucker, header=0)\n', (4525, 4560), True, 'import pandas as pd\n'), ((4996, 5042), 'pandas.read_csv', 'pd.read_csv', (['src_results_path_tucker'], {'header': '(0)'}), '(src_results_path_tucker, header=0)\n', (5007, 5042), True, 'import pandas as pd\n'), ((5332, 5399), 'pathlib.Path', 'pathlib.Path', (['"""/home/luc/PycharmProjects/palmnet/results/processed"""'], {}), "('/home/luc/PycharmProjects/palmnet/results/processed')\n", (5344, 5399), False, 'import pathlib\n'), ((5824, 5910), 'pandas.concat', 'pd.concat', (['[df_tucker, df_tt, df_tucker_tt_only_dense, df_deepfried, df_magnitude]'], {}), '([df_tucker, df_tt, df_tucker_tt_only_dense, df_deepfried,\n df_magnitude])\n', (5833, 5910), True, 'import pandas as pd\n'), ((6014, 6047), 'pandas.concat', 'pd.concat', (['[df_palm, df_palm_bis]'], {}), '([df_palm, df_palm_bis])\n', (6023, 6047), True, 'import pandas as pd\n'), ((6234, 6300), 'pathlib.Path', 'pathlib.Path', (['"""/home/luc/PycharmProjects/palmnet/reports/figures/"""'], {}), "('/home/luc/PycharmProjects/palmnet/reports/figures/')\n", (6246, 6300), False, 'import pathlib\n'), ((25628, 25645), 'pprint.pprint', 'pprint', (['dct_table'], {}), '(dct_table)\n', (25634, 25645), True, 'from pprint import pprint as pprint\n'), ((9998, 10009), 'plotly.graph_objects.Figure', 'go.Figure', ([], {}), '()\n', (10007, 10009), True, 'import plotly.graph_objects as go\n'), ((18241, 18279), 'numpy.isclose', 'np.isclose', (['base_score', 'base_score_tmp'], {}), '(base_score, base_score_tmp)\n', (18251, 18279), True, 'import numpy as np\n'), ((21201, 21239), 'numpy.isclose', 'np.isclose', (['base_score', 'base_score_tmp'], {}), '(base_score, base_score_tmp)\n', (21211, 21239), True, 'import numpy as np\n'), ((22987, 23018), 'numpy.isnan', 'np.isnan', (["row['use-pretrained']"], {}), "(row['use-pretrained'])\n", (22995, 23018), True, 'import numpy as np\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.