code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
import os
from os.path import join
import cv2
import pickle
import torch
import numpy as np
import pandas as pd
import torch.utils.data as data
class InteriorNet(data.Dataset):
def __init__(self, root_dir, label_name='_raycastingV2',
pred_dir='pred', method_name='sharpnet_pred',
gt_dir='data', depth_ext='-depth-plane.png', normal_ext='-normal.png', im_ext='-rgb.png',
label_dir='label', label_ext='-order-pix.npy'):
super(InteriorNet, self).__init__()
self.root_dir = root_dir
self.label_name = label_name
self.method_name = method_name
self.im_ext = im_ext
self.gt_dir = gt_dir
self.label_dir = label_dir
self.pred_dir = pred_dir
self.depth_ext = depth_ext
self.normal_ext = normal_ext
self.label_ext = label_ext
self.df = pd.read_csv(join(root_dir, 'InteriorNet.txt'))
def __len__(self):
return len(self.df)
def __getitem__(self, index):
depth_gt, depth_pred, label, normal, img = self._fetch_data(index)
depth_gt = torch.from_numpy(np.ascontiguousarray(depth_gt)).float().unsqueeze(0)
depth_pred = torch.from_numpy(np.ascontiguousarray(depth_pred)).float().unsqueeze(0)
label = torch.from_numpy(np.ascontiguousarray(label)).float().permute(2, 0, 1)
normal = torch.from_numpy(np.ascontiguousarray(normal)).float().permute(2, 0, 1)
img = torch.from_numpy(np.ascontiguousarray(img)).float().permute(2, 0, 1)
return depth_gt, depth_pred, label, normal, img
def _fetch_data(self, index):
# fetch predicted depth map in meters
depth_pred_path = join(self.root_dir, self.pred_dir, self.df.iloc[index]['scene'],
self.method_name, 'data', '{}.pkl'.format(self.df.iloc[index]['image']))
with open(depth_pred_path, 'rb') as f:
depth_pred = pickle.load(f)
# fetch ground truth depth map in meters
depth_gt_path = join(self.root_dir, self.gt_dir,
'{}{}'.format(self.df.iloc[index]['scene'], self.label_name),
'{:04d}{}'.format(self.df.iloc[index]['image'], self.depth_ext))
if not os.path.exists(depth_gt_path):
print(depth_gt_path)
depth_gt = cv2.imread(depth_gt_path, -1) / 1000
# fetch normal map in norm-1 vectors
normal_path = join(self.root_dir, self.gt_dir,
'{}{}'.format(self.df.iloc[index]['scene'], self.label_name),
'{:04d}{}'.format(self.df.iloc[index]['image'], self.normal_ext))
normal = cv2.imread(normal_path, -1) / (2 ** 16 - 1) * 2 - 1
normal = normal[:, :, ::-1]
# fetch rgb image
image_path = join(self.root_dir, self.gt_dir,
'{}{}'.format(self.df.iloc[index]['scene'], self.label_name),
'{:04d}{}'.format(self.df.iloc[index]['image'], self.im_ext))
img = cv2.imread(image_path, -1) / 255
img = img[:, :, ::-1]
# fetch occlusion orientation labels
label_path = join(self.root_dir, self.label_dir,
'{}{}'.format(self.df.iloc[index]['scene'], self.label_name),
'{:04d}{}'.format(self.df.iloc[index]['image'], self.label_ext))
label = np.load(label_path)
return depth_gt, depth_pred, label, normal, img
if __name__ == "__main__":
root_dir = '/space_sdd/InteriorNet'
dataset = InteriorNet(root_dir)
print(len(dataset))
from tqdm import tqdm
from torch.utils.data import DataLoader
import sys
test_loader = DataLoader(dataset, batch_size=4, shuffle=False)
for i, data in tqdm(enumerate(test_loader)):
if i == 0:
print(data[0].shape, data[1].shape, data[2].shape, data[3].shape, data[4].shape)
sys.exit()
|
[
"numpy.load",
"torch.utils.data.DataLoader",
"numpy.ascontiguousarray",
"os.path.exists",
"cv2.imread",
"pickle.load",
"os.path.join",
"sys.exit"
] |
[((3710, 3758), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': '(4)', 'shuffle': '(False)'}), '(dataset, batch_size=4, shuffle=False)\n', (3720, 3758), False, 'from torch.utils.data import DataLoader\n'), ((3400, 3419), 'numpy.load', 'np.load', (['label_path'], {}), '(label_path)\n', (3407, 3419), True, 'import numpy as np\n'), ((892, 925), 'os.path.join', 'join', (['root_dir', '"""InteriorNet.txt"""'], {}), "(root_dir, 'InteriorNet.txt')\n", (896, 925), False, 'from os.path import join\n'), ((1936, 1950), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1947, 1950), False, 'import pickle\n'), ((2259, 2288), 'os.path.exists', 'os.path.exists', (['depth_gt_path'], {}), '(depth_gt_path)\n', (2273, 2288), False, 'import os\n'), ((2342, 2371), 'cv2.imread', 'cv2.imread', (['depth_gt_path', '(-1)'], {}), '(depth_gt_path, -1)\n', (2352, 2371), False, 'import cv2\n'), ((3038, 3064), 'cv2.imread', 'cv2.imread', (['image_path', '(-1)'], {}), '(image_path, -1)\n', (3048, 3064), False, 'import cv2\n'), ((3933, 3943), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3941, 3943), False, 'import sys\n'), ((2679, 2706), 'cv2.imread', 'cv2.imread', (['normal_path', '(-1)'], {}), '(normal_path, -1)\n', (2689, 2706), False, 'import cv2\n'), ((1126, 1156), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['depth_gt'], {}), '(depth_gt)\n', (1146, 1156), True, 'import numpy as np\n'), ((1217, 1249), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['depth_pred'], {}), '(depth_pred)\n', (1237, 1249), True, 'import numpy as np\n'), ((1305, 1332), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['label'], {}), '(label)\n', (1325, 1332), True, 'import numpy as np\n'), ((1393, 1421), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['normal'], {}), '(normal)\n', (1413, 1421), True, 'import numpy as np\n'), ((1479, 1504), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['img'], {}), '(img)\n', (1499, 1504), True, 'import numpy as np\n')]
|
#!/usr/bin/env python3
from pathlib import Path
import requests
from bs4 import BeautifulSoup
page = requests.get(
"https://animalcrossing.fandom.com/wiki/K.K._Slider_song_list_(New_Horizons)"
)
tree = BeautifulSoup(page.content, "lxml")
with open(Path("src") / "turbot" / "assets" / "songs.csv", "w", newline="") as out:
def data_from(item):
title = item.select("a")[1]
return title.text
table_tag = tree.select("table")[1]
data = [
[data_from(item) for item in row_data.select("td")]
for row_data in table_tag.select("tr")
]
out.write("name\n")
for row in data:
for title in row:
out.write(f"{title}\n")
|
[
"bs4.BeautifulSoup",
"pathlib.Path",
"requests.get"
] |
[((104, 205), 'requests.get', 'requests.get', (['"""https://animalcrossing.fandom.com/wiki/K.K._Slider_song_list_(New_Horizons)"""'], {}), "(\n 'https://animalcrossing.fandom.com/wiki/K.K._Slider_song_list_(New_Horizons)'\n )\n", (116, 205), False, 'import requests\n'), ((209, 244), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page.content', '"""lxml"""'], {}), "(page.content, 'lxml')\n", (222, 244), False, 'from bs4 import BeautifulSoup\n'), ((257, 268), 'pathlib.Path', 'Path', (['"""src"""'], {}), "('src')\n", (261, 268), False, 'from pathlib import Path\n')]
|
import filecmp
import shutil
import os
import unittest
import cluster_vcf_records
from minos import vcf_chunker
this_dir = os.path.dirname(os.path.abspath(__file__))
data_dir = os.path.join(this_dir, "data", "vcf_chunker")
class TestVcfChunker(unittest.TestCase):
def test_total_variants_and_alleles_in_vcf_dict(self):
"""test _total_variants_and_alleles_in_vcf_dict"""
class FakeVcf:
def __init__(self, alt):
self.ALT = alt
test_dict = {
"chrom1": [FakeVcf("123"), FakeVcf("1"), FakeVcf("123456789")],
"chrom2": [FakeVcf("12"), FakeVcf("1234")],
}
expect_variants = 5
expect_alleles = 24
(
got_variants,
got_alleles,
) = vcf_chunker.VcfChunker._total_variants_and_alleles_in_vcf_dict(test_dict)
self.assertEqual(expect_variants, got_variants)
self.assertEqual(expect_alleles, got_alleles)
def test_chunk_end_indexes_from_vcf_record_list(self):
"""test _chunk_end_indexes_from_vcf_record_list"""
record_list = [
cluster_vcf_records.vcf_record.VcfRecord("ref\t1\t.\tA\tG\t.\t.\t.\t."),
cluster_vcf_records.vcf_record.VcfRecord(
"ref\t2\t.\tC\tT,A,G,TA\t.\t.\t.\t."
),
cluster_vcf_records.vcf_record.VcfRecord("ref\t3\t.\tT\tA,C\t.\t.\t.\t."),
cluster_vcf_records.vcf_record.VcfRecord(
"ref\t5\t.\tAGAGTCACGTA\tG\t.\t.\t.\t."
),
cluster_vcf_records.vcf_record.VcfRecord("ref\t18\t.\tA\tG\t.\t.\t.\t."),
cluster_vcf_records.vcf_record.VcfRecord("ref\t21\t.\tG\tT\t.\t.\t.\t."),
]
self.assertEqual(
(0, 0, 1),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_alleles=1
),
)
self.assertEqual(
(0, 0, 1),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_alleles=2
),
)
self.assertEqual(
(0, 0, 1),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_alleles=3
),
)
self.assertEqual(
(0, 0, 1),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_alleles=4
),
)
self.assertEqual(
(0, 0, 1),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_alleles=5
),
)
self.assertEqual(
(0, 0, 1),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_alleles=6
),
)
self.assertEqual(
(0, 1, 2),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_alleles=7
),
)
self.assertEqual(
(0, 1, 2),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_alleles=8
),
)
self.assertEqual(
(0, 1, 2),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_alleles=9
),
)
self.assertEqual(
(0, 2, 2),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_alleles=10
),
)
self.assertEqual(
(0, 2, 2),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_alleles=11
),
)
self.assertEqual(
(0, 3, 3),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_alleles=12
),
)
self.assertEqual(
(0, 0, 1),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_sites=1
),
)
self.assertEqual(
(0, 1, 2),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_sites=2
),
)
self.assertEqual(
(0, 2, 2),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_sites=3
),
)
self.assertEqual(
(0, 3, 3),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_sites=4
),
)
self.assertEqual(
(0, 4, 4),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_sites=5
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_sites=6
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_sites=7
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 1, total_sites=8
),
)
self.assertEqual(
(0, 0, 2),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 2, total_sites=1
),
)
self.assertEqual(
(0, 1, 2),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 2, total_sites=2
),
)
self.assertEqual(
(0, 2, 3),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 2, total_sites=3
),
)
self.assertEqual(
(0, 3, 3),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 2, total_sites=4
),
)
self.assertEqual(
(0, 4, 4),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 2, total_sites=5
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 2, total_sites=6
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 2, total_sites=7
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 2, total_sites=8
),
)
self.assertEqual(
(0, 0, 2),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 3, total_sites=1
),
)
self.assertEqual(
(0, 1, 3),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 3, total_sites=2
),
)
self.assertEqual(
(0, 2, 3),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 3, total_sites=3
),
)
self.assertEqual(
(0, 3, 4),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 3, total_sites=4
),
)
self.assertEqual(
(0, 4, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 3, total_sites=5
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 3, total_sites=6
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 3, total_sites=7
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 3, total_sites=8
),
)
self.assertEqual(
(0, 0, 3),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 4, total_sites=1
),
)
self.assertEqual(
(0, 1, 3),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 4, total_sites=2
),
)
self.assertEqual(
(0, 2, 3),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 4, total_sites=3
),
)
self.assertEqual(
(0, 3, 4),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 4, total_sites=4
),
)
self.assertEqual(
(0, 4, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 4, total_sites=5
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 4, total_sites=6
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 4, total_sites=7
),
)
self.assertEqual(
(0, 1, 2),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 1, 1, total_sites=1
),
)
self.assertEqual(
(0, 1, 2),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 1, 2, total_sites=1
),
)
self.assertEqual(
(0, 1, 3),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 1, 3, total_sites=1
),
)
self.assertEqual(
(0, 1, 3),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 1, 15, total_sites=1
),
)
self.assertEqual(
(0, 1, 4),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 1, 16, total_sites=1
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 1, 1, total_sites=6
),
)
self.assertEqual(
(4, 4, 4),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 4, 1, total_sites=1
),
)
self.assertEqual(
(4, 4, 4),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 4, 2, total_sites=1
),
)
self.assertEqual(
(3, 4, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 4, 3, total_sites=1
),
)
self.assertEqual(
(4, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 4, 1, total_sites=2
),
)
self.assertEqual(
(5, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 5, 1, total_sites=1
),
)
self.assertEqual(
(5, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 5, 1, total_sites=2
),
)
self.assertEqual(
(5, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 5, 2, total_sites=2
),
)
self.assertEqual(
(4, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 5, 3, total_sites=2
),
)
self.assertEqual(
(4, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 5, 4, total_sites=2
),
)
self.assertEqual(
(4, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 5, 5, total_sites=2
),
)
self.assertEqual(
(3, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 5, 6, total_sites=2
),
)
self.assertEqual(
(3, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 5, 7, total_sites=2
),
)
self.assertEqual(
(3, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 5, 17, total_sites=2
),
)
self.assertEqual(
(2, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 5, 18, total_sites=2
),
)
self.assertEqual(
(1, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 5, 19, total_sites=2
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 5, 20, total_sites=2
),
)
self.assertEqual(
(0, 5, 5),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 5, 21, total_sites=2
),
)
# These records caused minos error because variant at 800
# was included in the last split file, but the use_end_index was at
# position of the variant at 610. So the one at 800 was not getting used.
record_list = [
cluster_vcf_records.vcf_record.VcfRecord("ref\t75\t.\tA\tG\t.\t.\t.\t."),
cluster_vcf_records.vcf_record.VcfRecord("ref\t150\t.\tG\tA,T\t.\t.\t.\t."),
cluster_vcf_records.vcf_record.VcfRecord("ref\t450\t.\tT\tC\t.\t.\t.\t."),
cluster_vcf_records.vcf_record.VcfRecord("ref\t610\t.\tA\tG\t.\t.\t.\t."),
cluster_vcf_records.vcf_record.VcfRecord("ref\t800\t.\tC\tCA\t.\t.\t.\t."),
]
self.assertEqual(
(0, 1, 1),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 0, 100, total_sites=2
),
)
self.assertEqual(
(2, 3, 3),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 2, 100, total_sites=2
),
)
self.assertEqual(
(4, 4, 4),
vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list(
record_list, 4, 100, total_sites=2
),
)
def test_make_split_files(self):
"""test make_split_files"""
infile = os.path.join(data_dir, "make_split_files.in.vcf")
tmp_out = "tmp.vcf_chunker.make_split_files"
ref_fa = os.path.join(data_dir, "make_split_files.in.ref.fa")
if os.path.exists(tmp_out):
shutil.rmtree(tmp_out)
vcf1 = cluster_vcf_records.vcf_record.VcfRecord(
"ref1\t1\t.\tG\tT\t.\tPASS\t.\t.\t."
)
vcf2 = cluster_vcf_records.vcf_record.VcfRecord(
"ref1\t2\t.\tC\tT\t.\tPASS\t.\t.\t."
)
vcf3 = cluster_vcf_records.vcf_record.VcfRecord(
"ref1\t3\t.\tT\tA\t.\tPASS\t.\t.\t."
)
vcf4 = cluster_vcf_records.vcf_record.VcfRecord(
"ref1\t5\t.\tAGAGTCACGTA\tG\t.\tPASS\t.\t.\t."
)
vcf5 = cluster_vcf_records.vcf_record.VcfRecord(
"ref1\t18\t.\tA\tG\t.\tPASS\t.\t.\t."
)
vcf6 = cluster_vcf_records.vcf_record.VcfRecord(
"ref1\t21\t.\tG\tT\t.\tPASS\t.\t.\t."
)
vcf7 = cluster_vcf_records.vcf_record.VcfRecord(
"ref2\t42\t.\tC\tG\t.\tPASS\t.\t.\t."
)
header_lines = [
"##header1",
"##header2",
"#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tsample_name",
]
chunker = vcf_chunker.VcfChunker(
tmp_out,
vcf_infile=infile,
ref_fasta=ref_fa,
variants_per_split=2,
flank_length=1,
gramtools_kmer_size=5,
)
chunker.make_split_files()
self.assertTrue(os.path.exists(chunker.metadata_pickle))
got_header, got_records = cluster_vcf_records.vcf_file_read.vcf_file_to_list(
os.path.join(tmp_out, "split.0.in.vcf")
)
self.assertEqual(header_lines, got_header)
self.assertEqual([vcf1, vcf2, vcf3], got_records)
got_header, got_records = cluster_vcf_records.vcf_file_read.vcf_file_to_list(
os.path.join(tmp_out, "split.1.in.vcf")
)
self.assertEqual(header_lines, got_header)
self.assertEqual([vcf2, vcf3, vcf4], got_records)
got_header, got_records = cluster_vcf_records.vcf_file_read.vcf_file_to_list(
os.path.join(tmp_out, "split.2.in.vcf")
)
self.assertEqual(header_lines, got_header)
self.assertEqual([vcf5, vcf6], got_records)
got_header, got_records = cluster_vcf_records.vcf_file_read.vcf_file_to_list(
os.path.join(tmp_out, "split.3.in.vcf")
)
self.assertEqual(header_lines, got_header)
self.assertEqual([vcf7], got_records)
self.assertFalse(os.path.exists(os.path.join(tmp_out, "split.4.in.vcf")))
shutil.rmtree(tmp_out)
chunker = vcf_chunker.VcfChunker(
tmp_out,
vcf_infile=infile,
ref_fasta=ref_fa,
variants_per_split=4,
flank_length=3,
gramtools_kmer_size=5,
)
chunker.make_split_files()
self.assertTrue(os.path.exists(chunker.metadata_pickle))
got_header, got_records = cluster_vcf_records.vcf_file_read.vcf_file_to_list(
os.path.join(tmp_out, "split.0.in.vcf")
)
self.assertEqual(header_lines, got_header)
self.assertEqual([vcf1, vcf2, vcf3, vcf4, vcf5], got_records)
got_header, got_records = cluster_vcf_records.vcf_file_read.vcf_file_to_list(
os.path.join(tmp_out, "split.1.in.vcf")
)
self.assertEqual(header_lines, got_header)
self.assertEqual([vcf4, vcf5, vcf6], got_records)
got_header, got_records = cluster_vcf_records.vcf_file_read.vcf_file_to_list(
os.path.join(tmp_out, "split.2.in.vcf")
)
self.assertEqual(header_lines, got_header)
self.assertEqual([vcf7], got_records)
self.assertFalse(os.path.exists(os.path.join(tmp_out, "split.3.in.vcf")))
chunker2 = vcf_chunker.VcfChunker(tmp_out, gramtools_kmer_size=5)
self.assertEqual(chunker.vcf_infile, chunker2.vcf_infile)
self.assertEqual(chunker.ref_fasta, chunker2.ref_fasta)
self.assertEqual(chunker.variants_per_split, chunker2.variants_per_split)
self.assertEqual(chunker.total_splits, chunker2.total_splits)
self.assertEqual(chunker.flank_length, chunker2.flank_length)
self.assertEqual(chunker.gramtools_kmer_size, chunker2.gramtools_kmer_size)
self.assertEqual(chunker.total_split_files, chunker2.total_split_files)
self.assertEqual(chunker.vcf_split_files, chunker2.vcf_split_files)
shutil.rmtree(tmp_out)
def test_make_split_files_2(self):
"""test make_split_files with different input from previous test"""
# These records cause a minos bug. Last record was not being used
# when merging because the index was wrong.
# They are test data from multi_sample_pipeline tests
infile = os.path.join(data_dir, "make_split_files2.in.vcf")
tmp_out = "tmp.vcf_chunker.make_split_files2"
ref_fa = os.path.join(data_dir, "make_split_files2.in.ref.fa")
if os.path.exists(tmp_out):
shutil.rmtree(tmp_out)
chunker = vcf_chunker.VcfChunker(
tmp_out,
vcf_infile=infile,
ref_fasta=ref_fa,
variants_per_split=2,
flank_length=200,
gramtools_kmer_size=5,
)
chunker.make_split_files()
self.assertTrue(os.path.exists(chunker.metadata_pickle))
chunker2 = vcf_chunker.VcfChunker(tmp_out, gramtools_kmer_size=5)
self.assertEqual(1, len(chunker2.vcf_split_files))
self.assertEqual(3, len(chunker2.vcf_split_files["ref.0"]))
self.assertEqual(4, chunker2.vcf_split_files["ref.0"][-1].use_end_index)
shutil.rmtree(tmp_out)
# Test with two threads
chunker = vcf_chunker.VcfChunker(
tmp_out,
vcf_infile=infile,
ref_fasta=ref_fa,
variants_per_split=2,
flank_length=200,
threads=2,
gramtools_kmer_size=5,
)
chunker.make_split_files()
self.assertTrue(os.path.exists(chunker.metadata_pickle))
chunker2 = vcf_chunker.VcfChunker(tmp_out, gramtools_kmer_size=5)
self.assertEqual(1, len(chunker2.vcf_split_files))
self.assertEqual(3, len(chunker2.vcf_split_files["ref.0"]))
self.assertEqual(4, chunker2.vcf_split_files["ref.0"][-1].use_end_index)
shutil.rmtree(tmp_out)
def test_merge_files(self):
"""test merge_files"""
vcf_to_split = os.path.join(data_dir, "merge_files.in.vcf")
ref_fasta = os.path.join(data_dir, "merge_files.in.ref.fa")
tmp_outdir = "tmp.vcf_chunker.merge_files"
chunker = vcf_chunker.VcfChunker(
tmp_outdir,
vcf_infile=vcf_to_split,
ref_fasta=ref_fasta,
variants_per_split=4,
flank_length=3,
gramtools_kmer_size=5,
)
chunker.make_split_files()
to_merge = {}
for ref, split_list in chunker.vcf_split_files.items():
to_merge[ref] = [x.filename for x in split_list]
tmp_vcf_out = "tmp.vcf_chunker.merge_files.out.vcf"
chunker.merge_files(to_merge, tmp_vcf_out)
self.assertTrue(filecmp.cmp(vcf_to_split, tmp_vcf_out, shallow=False))
os.unlink(tmp_vcf_out)
shutil.rmtree(tmp_outdir)
|
[
"os.path.abspath",
"os.unlink",
"minos.vcf_chunker.VcfChunker",
"cluster_vcf_records.vcf_record.VcfRecord",
"os.path.exists",
"minos.vcf_chunker.VcfChunker._total_variants_and_alleles_in_vcf_dict",
"minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list",
"shutil.rmtree",
"filecmp.cmp",
"os.path.join"
] |
[((180, 225), 'os.path.join', 'os.path.join', (['this_dir', '"""data"""', '"""vcf_chunker"""'], {}), "(this_dir, 'data', 'vcf_chunker')\n", (192, 225), False, 'import os\n'), ((142, 167), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (157, 167), False, 'import os\n'), ((773, 846), 'minos.vcf_chunker.VcfChunker._total_variants_and_alleles_in_vcf_dict', 'vcf_chunker.VcfChunker._total_variants_and_alleles_in_vcf_dict', (['test_dict'], {}), '(test_dict)\n', (835, 846), False, 'from minos import vcf_chunker\n'), ((16268, 16317), 'os.path.join', 'os.path.join', (['data_dir', '"""make_split_files.in.vcf"""'], {}), "(data_dir, 'make_split_files.in.vcf')\n", (16280, 16317), False, 'import os\n'), ((16388, 16440), 'os.path.join', 'os.path.join', (['data_dir', '"""make_split_files.in.ref.fa"""'], {}), "(data_dir, 'make_split_files.in.ref.fa')\n", (16400, 16440), False, 'import os\n'), ((16452, 16475), 'os.path.exists', 'os.path.exists', (['tmp_out'], {}), '(tmp_out)\n', (16466, 16475), False, 'import os\n'), ((16528, 16606), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref1\t1\t.\tG\tT\t.\tPASS\t.\t.\t."""'], {}), "('ref1\\t1\\t.\\tG\\tT\\t.\\tPASS\\t.\\t.\\t.')\n", (16568, 16606), False, 'import cluster_vcf_records\n'), ((16644, 16722), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref1\t2\t.\tC\tT\t.\tPASS\t.\t.\t."""'], {}), "('ref1\\t2\\t.\\tC\\tT\\t.\\tPASS\\t.\\t.\\t.')\n", (16684, 16722), False, 'import cluster_vcf_records\n'), ((16760, 16838), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref1\t3\t.\tT\tA\t.\tPASS\t.\t.\t."""'], {}), "('ref1\\t3\\t.\\tT\\tA\\t.\\tPASS\\t.\\t.\\t.')\n", (16800, 16838), False, 'import cluster_vcf_records\n'), ((16876, 16969), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref1\t5\t.\tAGAGTCACGTA\tG\t.\tPASS\t.\t.\t."""'], {}), "(\n 'ref1\\t5\\t.\\tAGAGTCACGTA\\tG\\t.\\tPASS\\t.\\t.\\t.')\n", (16916, 16969), False, 'import cluster_vcf_records\n'), ((17002, 17081), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref1\t18\t.\tA\tG\t.\tPASS\t.\t.\t."""'], {}), "('ref1\\t18\\t.\\tA\\tG\\t.\\tPASS\\t.\\t.\\t.')\n", (17042, 17081), False, 'import cluster_vcf_records\n'), ((17119, 17198), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref1\t21\t.\tG\tT\t.\tPASS\t.\t.\t."""'], {}), "('ref1\\t21\\t.\\tG\\tT\\t.\\tPASS\\t.\\t.\\t.')\n", (17159, 17198), False, 'import cluster_vcf_records\n'), ((17236, 17315), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref2\t42\t.\tC\tG\t.\tPASS\t.\t.\t."""'], {}), "('ref2\\t42\\t.\\tC\\tG\\t.\\tPASS\\t.\\t.\\t.')\n", (17276, 17315), False, 'import cluster_vcf_records\n'), ((17524, 17657), 'minos.vcf_chunker.VcfChunker', 'vcf_chunker.VcfChunker', (['tmp_out'], {'vcf_infile': 'infile', 'ref_fasta': 'ref_fa', 'variants_per_split': '(2)', 'flank_length': '(1)', 'gramtools_kmer_size': '(5)'}), '(tmp_out, vcf_infile=infile, ref_fasta=ref_fa,\n variants_per_split=2, flank_length=1, gramtools_kmer_size=5)\n', (17546, 17657), False, 'from minos import vcf_chunker\n'), ((18942, 18964), 'shutil.rmtree', 'shutil.rmtree', (['tmp_out'], {}), '(tmp_out)\n', (18955, 18964), False, 'import shutil\n'), ((18984, 19117), 'minos.vcf_chunker.VcfChunker', 'vcf_chunker.VcfChunker', (['tmp_out'], {'vcf_infile': 'infile', 'ref_fasta': 'ref_fa', 'variants_per_split': '(4)', 'flank_length': '(3)', 'gramtools_kmer_size': '(5)'}), '(tmp_out, vcf_infile=infile, ref_fasta=ref_fa,\n variants_per_split=4, flank_length=3, gramtools_kmer_size=5)\n', (19006, 19117), False, 'from minos import vcf_chunker\n'), ((20174, 20228), 'minos.vcf_chunker.VcfChunker', 'vcf_chunker.VcfChunker', (['tmp_out'], {'gramtools_kmer_size': '(5)'}), '(tmp_out, gramtools_kmer_size=5)\n', (20196, 20228), False, 'from minos import vcf_chunker\n'), ((20829, 20851), 'shutil.rmtree', 'shutil.rmtree', (['tmp_out'], {}), '(tmp_out)\n', (20842, 20851), False, 'import shutil\n'), ((21173, 21223), 'os.path.join', 'os.path.join', (['data_dir', '"""make_split_files2.in.vcf"""'], {}), "(data_dir, 'make_split_files2.in.vcf')\n", (21185, 21223), False, 'import os\n'), ((21295, 21348), 'os.path.join', 'os.path.join', (['data_dir', '"""make_split_files2.in.ref.fa"""'], {}), "(data_dir, 'make_split_files2.in.ref.fa')\n", (21307, 21348), False, 'import os\n'), ((21360, 21383), 'os.path.exists', 'os.path.exists', (['tmp_out'], {}), '(tmp_out)\n', (21374, 21383), False, 'import os\n'), ((21439, 21574), 'minos.vcf_chunker.VcfChunker', 'vcf_chunker.VcfChunker', (['tmp_out'], {'vcf_infile': 'infile', 'ref_fasta': 'ref_fa', 'variants_per_split': '(2)', 'flank_length': '(200)', 'gramtools_kmer_size': '(5)'}), '(tmp_out, vcf_infile=infile, ref_fasta=ref_fa,\n variants_per_split=2, flank_length=200, gramtools_kmer_size=5)\n', (21461, 21574), False, 'from minos import vcf_chunker\n'), ((21773, 21827), 'minos.vcf_chunker.VcfChunker', 'vcf_chunker.VcfChunker', (['tmp_out'], {'gramtools_kmer_size': '(5)'}), '(tmp_out, gramtools_kmer_size=5)\n', (21795, 21827), False, 'from minos import vcf_chunker\n'), ((22044, 22066), 'shutil.rmtree', 'shutil.rmtree', (['tmp_out'], {}), '(tmp_out)\n', (22057, 22066), False, 'import shutil\n'), ((22118, 22264), 'minos.vcf_chunker.VcfChunker', 'vcf_chunker.VcfChunker', (['tmp_out'], {'vcf_infile': 'infile', 'ref_fasta': 'ref_fa', 'variants_per_split': '(2)', 'flank_length': '(200)', 'threads': '(2)', 'gramtools_kmer_size': '(5)'}), '(tmp_out, vcf_infile=infile, ref_fasta=ref_fa,\n variants_per_split=2, flank_length=200, threads=2, gramtools_kmer_size=5)\n', (22140, 22264), False, 'from minos import vcf_chunker\n'), ((22475, 22529), 'minos.vcf_chunker.VcfChunker', 'vcf_chunker.VcfChunker', (['tmp_out'], {'gramtools_kmer_size': '(5)'}), '(tmp_out, gramtools_kmer_size=5)\n', (22497, 22529), False, 'from minos import vcf_chunker\n'), ((22746, 22768), 'shutil.rmtree', 'shutil.rmtree', (['tmp_out'], {}), '(tmp_out)\n', (22759, 22768), False, 'import shutil\n'), ((22856, 22900), 'os.path.join', 'os.path.join', (['data_dir', '"""merge_files.in.vcf"""'], {}), "(data_dir, 'merge_files.in.vcf')\n", (22868, 22900), False, 'import os\n'), ((22921, 22968), 'os.path.join', 'os.path.join', (['data_dir', '"""merge_files.in.ref.fa"""'], {}), "(data_dir, 'merge_files.in.ref.fa')\n", (22933, 22968), False, 'import os\n'), ((23038, 23184), 'minos.vcf_chunker.VcfChunker', 'vcf_chunker.VcfChunker', (['tmp_outdir'], {'vcf_infile': 'vcf_to_split', 'ref_fasta': 'ref_fasta', 'variants_per_split': '(4)', 'flank_length': '(3)', 'gramtools_kmer_size': '(5)'}), '(tmp_outdir, vcf_infile=vcf_to_split, ref_fasta=\n ref_fasta, variants_per_split=4, flank_length=3, gramtools_kmer_size=5)\n', (23060, 23184), False, 'from minos import vcf_chunker\n'), ((23643, 23665), 'os.unlink', 'os.unlink', (['tmp_vcf_out'], {}), '(tmp_vcf_out)\n', (23652, 23665), False, 'import os\n'), ((23674, 23699), 'shutil.rmtree', 'shutil.rmtree', (['tmp_outdir'], {}), '(tmp_outdir)\n', (23687, 23699), False, 'import shutil\n'), ((1112, 1183), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref\t1\t.\tA\tG\t.\t.\t.\t."""'], {}), "('ref\\t1\\t.\\tA\\tG\\t.\\t.\\t.\\t.')\n", (1152, 1183), False, 'import cluster_vcf_records\n'), ((1197, 1275), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref\t2\t.\tC\tT,A,G,TA\t.\t.\t.\t."""'], {}), "('ref\\t2\\t.\\tC\\tT,A,G,TA\\t.\\t.\\t.\\t.')\n", (1237, 1275), False, 'import cluster_vcf_records\n'), ((1319, 1392), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref\t3\t.\tT\tA,C\t.\t.\t.\t."""'], {}), "('ref\\t3\\t.\\tT\\tA,C\\t.\\t.\\t.\\t.')\n", (1359, 1392), False, 'import cluster_vcf_records\n'), ((1406, 1492), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref\t5\t.\tAGAGTCACGTA\tG\t.\t.\t.\t."""'], {}), "(\n 'ref\\t5\\t.\\tAGAGTCACGTA\\tG\\t.\\t.\\t.\\t.')\n", (1446, 1492), False, 'import cluster_vcf_records\n'), ((1531, 1603), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref\t18\t.\tA\tG\t.\t.\t.\t."""'], {}), "('ref\\t18\\t.\\tA\\tG\\t.\\t.\\t.\\t.')\n", (1571, 1603), False, 'import cluster_vcf_records\n'), ((1617, 1689), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref\t21\t.\tG\tT\t.\t.\t.\t."""'], {}), "('ref\\t21\\t.\\tG\\tT\\t.\\t.\\t.\\t.')\n", (1657, 1689), False, 'import cluster_vcf_records\n'), ((1763, 1865), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_alleles': '(1)'}), '(record_list,\n 0, 1, total_alleles=1)\n', (1825, 1865), False, 'from minos import vcf_chunker\n'), ((1964, 2066), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_alleles': '(2)'}), '(record_list,\n 0, 1, total_alleles=2)\n', (2026, 2066), False, 'from minos import vcf_chunker\n'), ((2165, 2267), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_alleles': '(3)'}), '(record_list,\n 0, 1, total_alleles=3)\n', (2227, 2267), False, 'from minos import vcf_chunker\n'), ((2366, 2468), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_alleles': '(4)'}), '(record_list,\n 0, 1, total_alleles=4)\n', (2428, 2468), False, 'from minos import vcf_chunker\n'), ((2567, 2669), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_alleles': '(5)'}), '(record_list,\n 0, 1, total_alleles=5)\n', (2629, 2669), False, 'from minos import vcf_chunker\n'), ((2768, 2870), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_alleles': '(6)'}), '(record_list,\n 0, 1, total_alleles=6)\n', (2830, 2870), False, 'from minos import vcf_chunker\n'), ((2969, 3071), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_alleles': '(7)'}), '(record_list,\n 0, 1, total_alleles=7)\n', (3031, 3071), False, 'from minos import vcf_chunker\n'), ((3170, 3272), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_alleles': '(8)'}), '(record_list,\n 0, 1, total_alleles=8)\n', (3232, 3272), False, 'from minos import vcf_chunker\n'), ((3371, 3473), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_alleles': '(9)'}), '(record_list,\n 0, 1, total_alleles=9)\n', (3433, 3473), False, 'from minos import vcf_chunker\n'), ((3572, 3675), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_alleles': '(10)'}), '(record_list,\n 0, 1, total_alleles=10)\n', (3634, 3675), False, 'from minos import vcf_chunker\n'), ((3774, 3877), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_alleles': '(11)'}), '(record_list,\n 0, 1, total_alleles=11)\n', (3836, 3877), False, 'from minos import vcf_chunker\n'), ((3976, 4079), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_alleles': '(12)'}), '(record_list,\n 0, 1, total_alleles=12)\n', (4038, 4079), False, 'from minos import vcf_chunker\n'), ((4179, 4279), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_sites': '(1)'}), '(record_list,\n 0, 1, total_sites=1)\n', (4241, 4279), False, 'from minos import vcf_chunker\n'), ((4378, 4478), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_sites': '(2)'}), '(record_list,\n 0, 1, total_sites=2)\n', (4440, 4478), False, 'from minos import vcf_chunker\n'), ((4577, 4677), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_sites': '(3)'}), '(record_list,\n 0, 1, total_sites=3)\n', (4639, 4677), False, 'from minos import vcf_chunker\n'), ((4776, 4876), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_sites': '(4)'}), '(record_list,\n 0, 1, total_sites=4)\n', (4838, 4876), False, 'from minos import vcf_chunker\n'), ((4975, 5075), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_sites': '(5)'}), '(record_list,\n 0, 1, total_sites=5)\n', (5037, 5075), False, 'from minos import vcf_chunker\n'), ((5174, 5274), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_sites': '(6)'}), '(record_list,\n 0, 1, total_sites=6)\n', (5236, 5274), False, 'from minos import vcf_chunker\n'), ((5373, 5473), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_sites': '(7)'}), '(record_list,\n 0, 1, total_sites=7)\n', (5435, 5473), False, 'from minos import vcf_chunker\n'), ((5572, 5672), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(1)'], {'total_sites': '(8)'}), '(record_list,\n 0, 1, total_sites=8)\n', (5634, 5672), False, 'from minos import vcf_chunker\n'), ((5772, 5872), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(2)'], {'total_sites': '(1)'}), '(record_list,\n 0, 2, total_sites=1)\n', (5834, 5872), False, 'from minos import vcf_chunker\n'), ((5971, 6071), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(2)'], {'total_sites': '(2)'}), '(record_list,\n 0, 2, total_sites=2)\n', (6033, 6071), False, 'from minos import vcf_chunker\n'), ((6170, 6270), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(2)'], {'total_sites': '(3)'}), '(record_list,\n 0, 2, total_sites=3)\n', (6232, 6270), False, 'from minos import vcf_chunker\n'), ((6369, 6469), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(2)'], {'total_sites': '(4)'}), '(record_list,\n 0, 2, total_sites=4)\n', (6431, 6469), False, 'from minos import vcf_chunker\n'), ((6568, 6668), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(2)'], {'total_sites': '(5)'}), '(record_list,\n 0, 2, total_sites=5)\n', (6630, 6668), False, 'from minos import vcf_chunker\n'), ((6767, 6867), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(2)'], {'total_sites': '(6)'}), '(record_list,\n 0, 2, total_sites=6)\n', (6829, 6867), False, 'from minos import vcf_chunker\n'), ((6966, 7066), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(2)'], {'total_sites': '(7)'}), '(record_list,\n 0, 2, total_sites=7)\n', (7028, 7066), False, 'from minos import vcf_chunker\n'), ((7165, 7265), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(2)'], {'total_sites': '(8)'}), '(record_list,\n 0, 2, total_sites=8)\n', (7227, 7265), False, 'from minos import vcf_chunker\n'), ((7365, 7465), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(3)'], {'total_sites': '(1)'}), '(record_list,\n 0, 3, total_sites=1)\n', (7427, 7465), False, 'from minos import vcf_chunker\n'), ((7564, 7664), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(3)'], {'total_sites': '(2)'}), '(record_list,\n 0, 3, total_sites=2)\n', (7626, 7664), False, 'from minos import vcf_chunker\n'), ((7763, 7863), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(3)'], {'total_sites': '(3)'}), '(record_list,\n 0, 3, total_sites=3)\n', (7825, 7863), False, 'from minos import vcf_chunker\n'), ((7962, 8062), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(3)'], {'total_sites': '(4)'}), '(record_list,\n 0, 3, total_sites=4)\n', (8024, 8062), False, 'from minos import vcf_chunker\n'), ((8161, 8261), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(3)'], {'total_sites': '(5)'}), '(record_list,\n 0, 3, total_sites=5)\n', (8223, 8261), False, 'from minos import vcf_chunker\n'), ((8360, 8460), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(3)'], {'total_sites': '(6)'}), '(record_list,\n 0, 3, total_sites=6)\n', (8422, 8460), False, 'from minos import vcf_chunker\n'), ((8559, 8659), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(3)'], {'total_sites': '(7)'}), '(record_list,\n 0, 3, total_sites=7)\n', (8621, 8659), False, 'from minos import vcf_chunker\n'), ((8758, 8858), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(3)'], {'total_sites': '(8)'}), '(record_list,\n 0, 3, total_sites=8)\n', (8820, 8858), False, 'from minos import vcf_chunker\n'), ((8958, 9058), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(4)'], {'total_sites': '(1)'}), '(record_list,\n 0, 4, total_sites=1)\n', (9020, 9058), False, 'from minos import vcf_chunker\n'), ((9157, 9257), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(4)'], {'total_sites': '(2)'}), '(record_list,\n 0, 4, total_sites=2)\n', (9219, 9257), False, 'from minos import vcf_chunker\n'), ((9356, 9456), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(4)'], {'total_sites': '(3)'}), '(record_list,\n 0, 4, total_sites=3)\n', (9418, 9456), False, 'from minos import vcf_chunker\n'), ((9555, 9655), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(4)'], {'total_sites': '(4)'}), '(record_list,\n 0, 4, total_sites=4)\n', (9617, 9655), False, 'from minos import vcf_chunker\n'), ((9754, 9854), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(4)'], {'total_sites': '(5)'}), '(record_list,\n 0, 4, total_sites=5)\n', (9816, 9854), False, 'from minos import vcf_chunker\n'), ((9953, 10053), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(4)'], {'total_sites': '(6)'}), '(record_list,\n 0, 4, total_sites=6)\n', (10015, 10053), False, 'from minos import vcf_chunker\n'), ((10152, 10252), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(4)'], {'total_sites': '(7)'}), '(record_list,\n 0, 4, total_sites=7)\n', (10214, 10252), False, 'from minos import vcf_chunker\n'), ((10352, 10452), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(1)', '(1)'], {'total_sites': '(1)'}), '(record_list,\n 1, 1, total_sites=1)\n', (10414, 10452), False, 'from minos import vcf_chunker\n'), ((10551, 10651), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(1)', '(2)'], {'total_sites': '(1)'}), '(record_list,\n 1, 2, total_sites=1)\n', (10613, 10651), False, 'from minos import vcf_chunker\n'), ((10750, 10850), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(1)', '(3)'], {'total_sites': '(1)'}), '(record_list,\n 1, 3, total_sites=1)\n', (10812, 10850), False, 'from minos import vcf_chunker\n'), ((10949, 11050), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(1)', '(15)'], {'total_sites': '(1)'}), '(record_list,\n 1, 15, total_sites=1)\n', (11011, 11050), False, 'from minos import vcf_chunker\n'), ((11149, 11250), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(1)', '(16)'], {'total_sites': '(1)'}), '(record_list,\n 1, 16, total_sites=1)\n', (11211, 11250), False, 'from minos import vcf_chunker\n'), ((11349, 11449), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(1)', '(1)'], {'total_sites': '(6)'}), '(record_list,\n 1, 1, total_sites=6)\n', (11411, 11449), False, 'from minos import vcf_chunker\n'), ((11549, 11649), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(4)', '(1)'], {'total_sites': '(1)'}), '(record_list,\n 4, 1, total_sites=1)\n', (11611, 11649), False, 'from minos import vcf_chunker\n'), ((11748, 11848), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(4)', '(2)'], {'total_sites': '(1)'}), '(record_list,\n 4, 2, total_sites=1)\n', (11810, 11848), False, 'from minos import vcf_chunker\n'), ((11947, 12047), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(4)', '(3)'], {'total_sites': '(1)'}), '(record_list,\n 4, 3, total_sites=1)\n', (12009, 12047), False, 'from minos import vcf_chunker\n'), ((12146, 12246), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(4)', '(1)'], {'total_sites': '(2)'}), '(record_list,\n 4, 1, total_sites=2)\n', (12208, 12246), False, 'from minos import vcf_chunker\n'), ((12346, 12446), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(5)', '(1)'], {'total_sites': '(1)'}), '(record_list,\n 5, 1, total_sites=1)\n', (12408, 12446), False, 'from minos import vcf_chunker\n'), ((12545, 12645), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(5)', '(1)'], {'total_sites': '(2)'}), '(record_list,\n 5, 1, total_sites=2)\n', (12607, 12645), False, 'from minos import vcf_chunker\n'), ((12744, 12844), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(5)', '(2)'], {'total_sites': '(2)'}), '(record_list,\n 5, 2, total_sites=2)\n', (12806, 12844), False, 'from minos import vcf_chunker\n'), ((12943, 13043), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(5)', '(3)'], {'total_sites': '(2)'}), '(record_list,\n 5, 3, total_sites=2)\n', (13005, 13043), False, 'from minos import vcf_chunker\n'), ((13142, 13242), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(5)', '(4)'], {'total_sites': '(2)'}), '(record_list,\n 5, 4, total_sites=2)\n', (13204, 13242), False, 'from minos import vcf_chunker\n'), ((13341, 13441), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(5)', '(5)'], {'total_sites': '(2)'}), '(record_list,\n 5, 5, total_sites=2)\n', (13403, 13441), False, 'from minos import vcf_chunker\n'), ((13540, 13640), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(5)', '(6)'], {'total_sites': '(2)'}), '(record_list,\n 5, 6, total_sites=2)\n', (13602, 13640), False, 'from minos import vcf_chunker\n'), ((13739, 13839), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(5)', '(7)'], {'total_sites': '(2)'}), '(record_list,\n 5, 7, total_sites=2)\n', (13801, 13839), False, 'from minos import vcf_chunker\n'), ((13938, 14039), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(5)', '(17)'], {'total_sites': '(2)'}), '(record_list,\n 5, 17, total_sites=2)\n', (14000, 14039), False, 'from minos import vcf_chunker\n'), ((14138, 14239), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(5)', '(18)'], {'total_sites': '(2)'}), '(record_list,\n 5, 18, total_sites=2)\n', (14200, 14239), False, 'from minos import vcf_chunker\n'), ((14338, 14439), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(5)', '(19)'], {'total_sites': '(2)'}), '(record_list,\n 5, 19, total_sites=2)\n', (14400, 14439), False, 'from minos import vcf_chunker\n'), ((14538, 14639), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(5)', '(20)'], {'total_sites': '(2)'}), '(record_list,\n 5, 20, total_sites=2)\n', (14600, 14639), False, 'from minos import vcf_chunker\n'), ((14738, 14839), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(5)', '(21)'], {'total_sites': '(2)'}), '(record_list,\n 5, 21, total_sites=2)\n', (14800, 14839), False, 'from minos import vcf_chunker\n'), ((15138, 15210), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref\t75\t.\tA\tG\t.\t.\t.\t."""'], {}), "('ref\\t75\\t.\\tA\\tG\\t.\\t.\\t.\\t.')\n", (15178, 15210), False, 'import cluster_vcf_records\n'), ((15224, 15299), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref\t150\t.\tG\tA,T\t.\t.\t.\t."""'], {}), "('ref\\t150\\t.\\tG\\tA,T\\t.\\t.\\t.\\t.')\n", (15264, 15299), False, 'import cluster_vcf_records\n'), ((15313, 15386), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref\t450\t.\tT\tC\t.\t.\t.\t."""'], {}), "('ref\\t450\\t.\\tT\\tC\\t.\\t.\\t.\\t.')\n", (15353, 15386), False, 'import cluster_vcf_records\n'), ((15400, 15473), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref\t610\t.\tA\tG\t.\t.\t.\t."""'], {}), "('ref\\t610\\t.\\tA\\tG\\t.\\t.\\t.\\t.')\n", (15440, 15473), False, 'import cluster_vcf_records\n'), ((15487, 15561), 'cluster_vcf_records.vcf_record.VcfRecord', 'cluster_vcf_records.vcf_record.VcfRecord', (['"""ref\t800\t.\tC\tCA\t.\t.\t.\t."""'], {}), "('ref\\t800\\t.\\tC\\tCA\\t.\\t.\\t.\\t.')\n", (15527, 15561), False, 'import cluster_vcf_records\n'), ((15635, 15737), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(0)', '(100)'], {'total_sites': '(2)'}), '(record_list,\n 0, 100, total_sites=2)\n', (15697, 15737), False, 'from minos import vcf_chunker\n'), ((15836, 15938), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(2)', '(100)'], {'total_sites': '(2)'}), '(record_list,\n 2, 100, total_sites=2)\n', (15898, 15938), False, 'from minos import vcf_chunker\n'), ((16037, 16139), 'minos.vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', 'vcf_chunker.VcfChunker._chunk_end_indexes_from_vcf_record_list', (['record_list', '(4)', '(100)'], {'total_sites': '(2)'}), '(record_list,\n 4, 100, total_sites=2)\n', (16099, 16139), False, 'from minos import vcf_chunker\n'), ((16489, 16511), 'shutil.rmtree', 'shutil.rmtree', (['tmp_out'], {}), '(tmp_out)\n', (16502, 16511), False, 'import shutil\n'), ((17796, 17835), 'os.path.exists', 'os.path.exists', (['chunker.metadata_pickle'], {}), '(chunker.metadata_pickle)\n', (17810, 17835), False, 'import os\n'), ((17936, 17975), 'os.path.join', 'os.path.join', (['tmp_out', '"""split.0.in.vcf"""'], {}), "(tmp_out, 'split.0.in.vcf')\n", (17948, 17975), False, 'import os\n'), ((18194, 18233), 'os.path.join', 'os.path.join', (['tmp_out', '"""split.1.in.vcf"""'], {}), "(tmp_out, 'split.1.in.vcf')\n", (18206, 18233), False, 'import os\n'), ((18452, 18491), 'os.path.join', 'os.path.join', (['tmp_out', '"""split.2.in.vcf"""'], {}), "(tmp_out, 'split.2.in.vcf')\n", (18464, 18491), False, 'import os\n'), ((18704, 18743), 'os.path.join', 'os.path.join', (['tmp_out', '"""split.3.in.vcf"""'], {}), "(tmp_out, 'split.3.in.vcf')\n", (18716, 18743), False, 'import os\n'), ((19256, 19295), 'os.path.exists', 'os.path.exists', (['chunker.metadata_pickle'], {}), '(chunker.metadata_pickle)\n', (19270, 19295), False, 'import os\n'), ((19396, 19435), 'os.path.join', 'os.path.join', (['tmp_out', '"""split.0.in.vcf"""'], {}), "(tmp_out, 'split.0.in.vcf')\n", (19408, 19435), False, 'import os\n'), ((19666, 19705), 'os.path.join', 'os.path.join', (['tmp_out', '"""split.1.in.vcf"""'], {}), "(tmp_out, 'split.1.in.vcf')\n", (19678, 19705), False, 'import os\n'), ((19924, 19963), 'os.path.join', 'os.path.join', (['tmp_out', '"""split.2.in.vcf"""'], {}), "(tmp_out, 'split.2.in.vcf')\n", (19936, 19963), False, 'import os\n'), ((21397, 21419), 'shutil.rmtree', 'shutil.rmtree', (['tmp_out'], {}), '(tmp_out)\n', (21410, 21419), False, 'import shutil\n'), ((21713, 21752), 'os.path.exists', 'os.path.exists', (['chunker.metadata_pickle'], {}), '(chunker.metadata_pickle)\n', (21727, 21752), False, 'import os\n'), ((22415, 22454), 'os.path.exists', 'os.path.exists', (['chunker.metadata_pickle'], {}), '(chunker.metadata_pickle)\n', (22429, 22454), False, 'import os\n'), ((23580, 23633), 'filecmp.cmp', 'filecmp.cmp', (['vcf_to_split', 'tmp_vcf_out'], {'shallow': '(False)'}), '(vcf_to_split, tmp_vcf_out, shallow=False)\n', (23591, 23633), False, 'import filecmp\n'), ((18892, 18931), 'os.path.join', 'os.path.join', (['tmp_out', '"""split.4.in.vcf"""'], {}), "(tmp_out, 'split.4.in.vcf')\n", (18904, 18931), False, 'import os\n'), ((20112, 20151), 'os.path.join', 'os.path.join', (['tmp_out', '"""split.3.in.vcf"""'], {}), "(tmp_out, 'split.3.in.vcf')\n", (20124, 20151), False, 'import os\n')]
|
#!/usr/bin/env python
"""
A light wrapper for Cybersource SOAP Toolkit API
"""
import os
import sys
from setuptools import setup, find_packages
import pycybersource
# fix permissions for sdist
if 'sdist' in sys.argv:
os.system('chmod -R a+rX .')
os.umask(int('022', 8))
base_dir = os.path.dirname(__file__)
with open(os.path.join(base_dir, 'README.md'), 'rb') as fp:
long_description = fp.read().decode('utf-8')
setup(
name='pycybersource',
version='0.1.2a0',
description='A light wrapper for Cybersource SOAP Toolkit API',
author='<NAME>',
author_email='<EMAIL>',
url='',
platforms=['Platform Independent'],
license='BSD',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=['pycybersource'],
keywords='cybersource payment soap zeep api wrapper',
requires=['zeep'],
install_requires=['zeep'],
test_suite='pycybersource.tests',
)
|
[
"os.path.join",
"os.path.dirname",
"os.system",
"setuptools.setup"
] |
[((292, 317), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (307, 317), False, 'import os\n'), ((429, 1147), 'setuptools.setup', 'setup', ([], {'name': '"""pycybersource"""', 'version': '"""0.1.2a0"""', 'description': '"""A light wrapper for Cybersource SOAP Toolkit API"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '""""""', 'platforms': "['Platform Independent']", 'license': '"""BSD"""', 'classifiers': "['Development Status :: 3 - Alpha', 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License', 'Natural Language :: English',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3.6',\n 'Topic :: Software Development :: Libraries :: Python Modules']", 'packages': "['pycybersource']", 'keywords': '"""cybersource payment soap zeep api wrapper"""', 'requires': "['zeep']", 'install_requires': "['zeep']", 'test_suite': '"""pycybersource.tests"""'}), "(name='pycybersource', version='0.1.2a0', description=\n 'A light wrapper for Cybersource SOAP Toolkit API', author='<NAME>',\n author_email='<EMAIL>', url='', platforms=['Platform Independent'],\n license='BSD', classifiers=['Development Status :: 3 - Alpha',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License', 'Natural Language :: English',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3.6',\n 'Topic :: Software Development :: Libraries :: Python Modules'],\n packages=['pycybersource'], keywords=\n 'cybersource payment soap zeep api wrapper', requires=['zeep'],\n install_requires=['zeep'], test_suite='pycybersource.tests')\n", (434, 1147), False, 'from setuptools import setup, find_packages\n'), ((223, 251), 'os.system', 'os.system', (['"""chmod -R a+rX ."""'], {}), "('chmod -R a+rX .')\n", (232, 251), False, 'import os\n'), ((329, 364), 'os.path.join', 'os.path.join', (['base_dir', '"""README.md"""'], {}), "(base_dir, 'README.md')\n", (341, 364), False, 'import os\n')]
|
#!/usr/bin/env python
import sys, os, optparse, time
from os.path import expanduser
PY2 = sys.version_info[0] == 2
if PY2:
from urllib import quote
from urllib2 import urlopen, Request
from urllib2 import HTTPError,URLError
else:
from urllib import parse
from urllib.request import urlopen, Request
from urllib.error import HTTPError,URLError
from output import Output
import json
import pprint
import random, string
p = optparse.OptionParser(
description = '''Description
===========
Management of B2FIND communities within EUDAT-B2FIND, comprising
- Creating communities, i.e. CKAN groups
- .....
''',
formatter = optparse.TitledHelpFormatter(),
prog = 'CreateCommuities.py',
version = "%prog " + 'v0.1',
usage = "%prog [options] COMMUNITY"
)
p.add_option('-v', '--verbose', action="count", help="increase output verbosity (e.g., -vv is more than -v)", default=False)
p.add_option('--iphost', '-i', help="IP adress of B2FIND portal (CKAN instance)", metavar='IP')
p.add_option('--auth', help="Authentification for CKAN API (API key, by default taken from file $HOME/.netrc)",metavar='STRING')
p.add_option('--jobdir', help='\ndirectory where log, error and html-result files are stored. By default directory is created as startday/starthour/processid .', default=None)
p.add_option('--mode', '-m', metavar='PROCESSINGMODE', help='\nSupported modes are (c)reate, (u)pdate, (patch), (d)elete, (p)urge and (s)how . default is creation of a group', default='c')
options,arguments = p.parse_args()
pstat=dict()
now = time.strftime("%Y-%m-%d %H:%M:%S")
jid = os.getpid()
OUT = Output(pstat,now,jid,options)
logger = OUT.setup_custom_logger('root',options.verbose)
community=sys.argv[1]
conffile='mapfiles/%s.json' % community
with open(conffile, 'r') as f:
group_dict = json.load(f)
# checking given options:
if (not options.iphost):
logger.critical('The option iphost is mandatory !')
sys.exit()
if (not options.auth):
home = os.path.expanduser("~")
if (not os.path.isfile(home+'/.netrc')):
logger.critical('Can not access job host authentification file %s/.netrc ' % home )
sys.exit()
else:
f = open(home+'/.netrc','r')
lines=f.read().splitlines()
f.close()
l = 0
for host in lines:
if(options.iphost == host.split()[0]):
options.auth = host.split()[1]
break
if (not options.auth):
logger.critical('API key is neither given by option --auth nor can retrieved from %s/.netrc' % home )
sys.exit()
print('aaauth %s' % options.auth)
if options.mode == 'c' :
action='group_create'
##elif options.mode == 'u' :
## action='group_update'
## group_dict['id']=group_dict['name']
elif options.mode == 'patch' :
action='group_patch'
group_dict['id']=group_dict['name']
elif options.mode == 'd' :
action='group_delete'
elif options.mode == 'p' :
action='group_purge'
group_dict['id']=group_dict['name']
elif options.mode == 's' :
action='group_show'
group_dict['id']=group_dict['name']
else :
logger.critical('Mode %s not supported' % options.mode)
sys.exit(-1)
##HEW-T print('group_dict %s' % group_dict)
if (True):
##for group_dict in groupsdict.itervalues() :
##HEW-T print('group_dict:\t%s\n' % (group_dict))
# Use the json module to dump the dictionary to a string for posting.
### data_string = urllib.parse.quote(json.dumps(dataset_dict))
encoding='utf-8'
if PY2 :
data_string = quote(json.dumps(group_dict))##.encode("utf-8") ## HEW-D 160810 , encoding="latin-1" ))##HEW-D .decode(encoding)
else :
data_string = parse.quote(json.dumps(group_dict)).encode(encoding) ## HEW-D 160810 , encoding="latin-1" ))##HEW-D .decode(encoding)
# The action that should be excecuted.
apiaction='http://%s/api/action/%s' % (options.iphost,action)
print('API action excecuted : %s' % apiaction)
request = Request(apiaction,data_string)
# Creating a group requires an authorization header.
request.add_header('Authorization', options.auth)
# Make the HTTP request.
###Py2 response = urllib.request.urlopen(request, data_string)
try:
response = urlopen(request)
assert response.code == 200
except HTTPError as e:
logger.critical('%s : Can not excecute the HTTP request' % e)
sys.exit(-1)
# Use the json module to load CKAN's response into a dictionary.
## print('Response %s' % response.read().decode('utf-8'))
response_dict = response.read().decode('utf-8')
##HEW-T print('Response %s' % response_dict)
response_dict = json.loads(response_dict)
## assert response_dict["success"] is True
# package_create returns the created package as its result.
created_package = response_dict['result']
print('Response:')
pprint.pprint(created_package)
|
[
"json.load",
"os.getpid",
"urllib.request.Request",
"output.Output",
"json.loads",
"time.strftime",
"urllib.request.urlopen",
"json.dumps",
"optparse.TitledHelpFormatter",
"os.path.isfile",
"pprint.pprint",
"os.path.expanduser",
"sys.exit"
] |
[((1759, 1793), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d %H:%M:%S"""'], {}), "('%Y-%m-%d %H:%M:%S')\n", (1772, 1793), False, 'import sys, os, optparse, time\n'), ((1800, 1811), 'os.getpid', 'os.getpid', ([], {}), '()\n', (1809, 1811), False, 'import sys, os, optparse, time\n'), ((1818, 1850), 'output.Output', 'Output', (['pstat', 'now', 'jid', 'options'], {}), '(pstat, now, jid, options)\n', (1824, 1850), False, 'from output import Output\n'), ((2017, 2029), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2026, 2029), False, 'import json\n'), ((2142, 2152), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2150, 2152), False, 'import sys, os, optparse, time\n'), ((2198, 2221), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (2216, 2221), False, 'import sys, os, optparse, time\n'), ((4216, 4247), 'urllib.request.Request', 'Request', (['apiaction', 'data_string'], {}), '(apiaction, data_string)\n', (4223, 4247), False, 'from urllib.request import urlopen, Request\n'), ((4908, 4933), 'json.loads', 'json.loads', (['response_dict'], {}), '(response_dict)\n', (4918, 4933), False, 'import json\n'), ((5119, 5149), 'pprint.pprint', 'pprint.pprint', (['created_package'], {}), '(created_package)\n', (5132, 5149), False, 'import pprint\n'), ((837, 867), 'optparse.TitledHelpFormatter', 'optparse.TitledHelpFormatter', ([], {}), '()\n', (865, 867), False, 'import sys, os, optparse, time\n'), ((2234, 2266), 'os.path.isfile', 'os.path.isfile', (["(home + '/.netrc')"], {}), "(home + '/.netrc')\n", (2248, 2266), False, 'import sys, os, optparse, time\n'), ((2367, 2377), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2375, 2377), False, 'import sys, os, optparse, time\n'), ((4484, 4500), 'urllib.request.urlopen', 'urlopen', (['request'], {}), '(request)\n', (4491, 4500), False, 'from urllib.request import urlopen, Request\n'), ((2798, 2808), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2806, 2808), False, 'import sys, os, optparse, time\n'), ((3775, 3797), 'json.dumps', 'json.dumps', (['group_dict'], {}), '(group_dict)\n', (3785, 3797), False, 'import json\n'), ((4642, 4654), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (4650, 4654), False, 'import sys, os, optparse, time\n'), ((3398, 3410), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (3406, 3410), False, 'import sys, os, optparse, time\n'), ((3927, 3949), 'json.dumps', 'json.dumps', (['group_dict'], {}), '(group_dict)\n', (3937, 3949), False, 'import json\n')]
|
#!/usr/bin/env python3
import sys
import os
import glob
if len(sys.argv[1:]) == 0:
dirs = [os.getcwd()]
else:
dirs = sys.argv[1:]
for dir in dirs:
for notebook in glob.glob(os.path.join(dir, '*.ipynb')):
cmd = 'ipython nbconvert --to rst {0}'.format(notebook)
print(cmd)
os.system(cmd)
|
[
"os.getcwd",
"os.path.join",
"os.system"
] |
[((97, 108), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (106, 108), False, 'import os\n'), ((188, 216), 'os.path.join', 'os.path.join', (['dir', '"""*.ipynb"""'], {}), "(dir, '*.ipynb')\n", (200, 216), False, 'import os\n'), ((310, 324), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (319, 324), False, 'import os\n')]
|
#-*- coding: utf-8 -*-
from DBP.models import Base,session
from sqlalchemy import Column, Integer, Unicode, Enum, Date, String
from sqlalchemy import Table, ForeignKey, PrimaryKeyConstraint
from sqlalchemy.sql.expression import label
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
from datetime import datetime
import random
from werkzeug.security import generate_password_hash, check_password_hash
enrollcode = {
"Waiting" : u"승인 대기중",
"Approved" : u"승인 완료",
"Refused" : u"승인 거절",
}
class Enroll(Base):
__tablename__ = 'Enroll'
__table_args__ = (PrimaryKeyConstraint('taskprefix','userid',name='enroll_pk'),)
taskprefix = Column('taskprefix', Unicode(100), ForeignKey('Task.prefix'), nullable = False)
userid = Column('userid', Integer, ForeignKey('User.id'), nullable = False)
status = Column('status',Enum(u"Waiting",u"Approved",u"Refused"), nullable = False , server_default = "Waiting")
user = relationship("User", backref="enrolls")
class User(Base):
__tablename__ = 'User'
id = Column(Integer, primary_key=True, autoincrement = True, nullable = False)
loginid = Column(Unicode(100), unique = True, nullable = False)
password = Column(String(100), nullable = False)
name = Column(Unicode(100), nullable = False)
gender = Column(Enum(u"남자", u"여자"), nullable = False, server_default = u"남자")
address = Column(Unicode(255))
role = Column(Enum(u"관리자", u"제출자", u"평가자"), nullable = False, server_default = u"제출자")
score = Column(Integer, server_default = "0", nullable = False)
birth = Column(Date)
cellphone = Column(Unicode(15))
def __init__(self,loginid,name,password):
self.loginid = loginid
self.name = name
self.password = generate_password_hash(password)
def checkPassword(self,password):
return check_password_hash(self.password, password)
def dict(self):
tslist = []
for x in self.enrolls:
x.task.setTables()
ts = x.task.dict()
ts["parsednum"] = x.task.getParsedNumBySubmitter(self)
ts["tasknum"] = x.task.getTaskNumBySubmitter(self)
tslist.append(ts)
data = {"id" : self.id,
"loginid" : self.loginid,
"name" : self.name,
"gender" : self.gender,
"address" : self.address,
"role" : self.role,
"score" : self.score,
"birthstring" : self.birth,
"cellphone" : self.cellphone,
"tasks" : tslist
}
if data["birthstring"] :
data["birthstring"] = data["birthstring"].isoformat()
return data
def enrollStatus(self):
enrolls = list()
for enroll in self.enrolls:
task = enroll.task.dict()
if task["status"] == "Stop":
task["status"] = u"수집 종료"
else :
task["status"] = enrollcode[enroll.status]
enrolls.append(task)
return enrolls
def editInfo(self, name, password, gender, address, birth, cellphone):
if password.strip() != "":
self.password = generate_password_hash(password)
self.name = name
self.gender = gender
self.address = address
self.birth = datetime.strptime(birth, "%a %b %d %Y").date()
self.cellphone = cellphone
session.commit()
def setScore(self):
sums = list()
for en in self.enrolls:
en.task.setTables()
ps = session.query(en.task.parsed).filter(en.task.parsed.submitterid == self.id).filter(en.task.parsed.status == u"Evaluated").all()
for p in ps:
sums.append(p.score)
self.score = sum(sums)/len(sums)
def getSubmitInfo(self):
info = self.dict()
submitinfo = dict(parsed = 0, taskdata = 0)
for en in self.enrolls:
en.task.setTables()
submitinfo["parsed"] += session.query(en.task.parsed).filter(en.task.parsed.submitterid == self.id).count()
ps = session.query(en.task.parsed).filter(en.task.parsed.submitterid == self.id).filter(en.task.parsed.status == u"Evaluated").all()
for p in ps:
submitinfo["taskdata"] += len(p.tasks)
info["submitinfo"] = submitinfo
return info
def setStatus(self,status):
self.status = status
session.commit()
@staticmethod
def randomEvaluator():
maxnum = session.query(User).filter(User.role == u"평가자").count()
if maxnum == 0:
return session.query(User).filter(User.role == u"관리자").first()
return session.query(User).filter(User.role == u"평가자")[random.randrange(0,maxnum)]
@staticmethod
def getUser(id):
return session.query(User).get(id)
@staticmethod
def getUsers():
return session.query(User).order_by(User.id).all()
@staticmethod
def newUser(loginid, password, name, gender, address , role, birth,cellphone):
user = User(loginid, name, password)
user.gender = gender
user.address = address
user.role = role
user.birth = datetime.strptime(birth, "%a %b %d %Y").date()
user.cellphone = cellphone
session.add(user)
session.commit()
@staticmethod
def login(loginid, password):
user = session.query(User).filter(User.loginid == loginid).first()
if user and user.checkPassword(password) :
return user
else :
return None
@staticmethod
def deleteUser(user):
for en in user.enrolls:
en.task.setTables()
ps = session.query(en.task.parsed).filter(en.task.parsed.submitterid == user.id).all()
for p in ps:
for t in p.tasks:
session.delete(t)
session.delete(p)
for e in user.enrolls :
session.delete(e)
session.delete(user)
session.commit()
|
[
"sqlalchemy.Enum",
"sqlalchemy.Unicode",
"DBP.models.session.query",
"sqlalchemy.ForeignKey",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.orm.relationship",
"werkzeug.security.check_password_hash",
"datetime.datetime.strptime",
"random.randrange",
"sqlalchemy.Column",
"sqlalchemy.String",
"DBP.models.session.delete",
"DBP.models.session.add",
"DBP.models.session.commit",
"werkzeug.security.generate_password_hash"
] |
[((941, 980), 'sqlalchemy.orm.relationship', 'relationship', (['"""User"""'], {'backref': '"""enrolls"""'}), "('User', backref='enrolls')\n", (953, 980), False, 'from sqlalchemy.orm import relationship\n'), ((1033, 1102), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)', 'autoincrement': '(True)', 'nullable': '(False)'}), '(Integer, primary_key=True, autoincrement=True, nullable=False)\n', (1039, 1102), False, 'from sqlalchemy import Column, Integer, Unicode, Enum, Date, String\n'), ((1478, 1529), 'sqlalchemy.Column', 'Column', (['Integer'], {'server_default': '"""0"""', 'nullable': '(False)'}), "(Integer, server_default='0', nullable=False)\n", (1484, 1529), False, 'from sqlalchemy import Column, Integer, Unicode, Enum, Date, String\n'), ((1543, 1555), 'sqlalchemy.Column', 'Column', (['Date'], {}), '(Date)\n', (1549, 1555), False, 'from sqlalchemy import Column, Integer, Unicode, Enum, Date, String\n'), ((585, 647), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""taskprefix"""', '"""userid"""'], {'name': '"""enroll_pk"""'}), "('taskprefix', 'userid', name='enroll_pk')\n", (605, 647), False, 'from sqlalchemy import Table, ForeignKey, PrimaryKeyConstraint\n'), ((683, 695), 'sqlalchemy.Unicode', 'Unicode', (['(100)'], {}), '(100)\n', (690, 695), False, 'from sqlalchemy import Column, Integer, Unicode, Enum, Date, String\n'), ((697, 722), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""Task.prefix"""'], {}), "('Task.prefix')\n", (707, 722), False, 'from sqlalchemy import Table, ForeignKey, PrimaryKeyConstraint\n'), ((778, 799), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""User.id"""'], {}), "('User.id')\n", (788, 799), False, 'from sqlalchemy import Table, ForeignKey, PrimaryKeyConstraint\n'), ((845, 886), 'sqlalchemy.Enum', 'Enum', (['u"""Waiting"""', 'u"""Approved"""', 'u"""Refused"""'], {}), "(u'Waiting', u'Approved', u'Refused')\n", (849, 886), False, 'from sqlalchemy import Column, Integer, Unicode, Enum, Date, String\n'), ((1125, 1137), 'sqlalchemy.Unicode', 'Unicode', (['(100)'], {}), '(100)\n', (1132, 1137), False, 'from sqlalchemy import Column, Integer, Unicode, Enum, Date, String\n'), ((1191, 1202), 'sqlalchemy.String', 'String', (['(100)'], {}), '(100)\n', (1197, 1202), False, 'from sqlalchemy import Column, Integer, Unicode, Enum, Date, String\n'), ((1237, 1249), 'sqlalchemy.Unicode', 'Unicode', (['(100)'], {}), '(100)\n', (1244, 1249), False, 'from sqlalchemy import Column, Integer, Unicode, Enum, Date, String\n'), ((1286, 1304), 'sqlalchemy.Enum', 'Enum', (['u"""남자"""', 'u"""여자"""'], {}), "(u'남자', u'여자')\n", (1290, 1304), False, 'from sqlalchemy import Column, Integer, Unicode, Enum, Date, String\n'), ((1366, 1378), 'sqlalchemy.Unicode', 'Unicode', (['(255)'], {}), '(255)\n', (1373, 1378), False, 'from sqlalchemy import Column, Integer, Unicode, Enum, Date, String\n'), ((1395, 1423), 'sqlalchemy.Enum', 'Enum', (['u"""관리자"""', 'u"""제출자"""', 'u"""평가자"""'], {}), "(u'관리자', u'제출자', u'평가자')\n", (1399, 1423), False, 'from sqlalchemy import Column, Integer, Unicode, Enum, Date, String\n'), ((1576, 1587), 'sqlalchemy.Unicode', 'Unicode', (['(15)'], {}), '(15)\n', (1583, 1587), False, 'from sqlalchemy import Column, Integer, Unicode, Enum, Date, String\n'), ((1697, 1729), 'werkzeug.security.generate_password_hash', 'generate_password_hash', (['password'], {}), '(password)\n', (1719, 1729), False, 'from werkzeug.security import generate_password_hash, check_password_hash\n'), ((1775, 1819), 'werkzeug.security.check_password_hash', 'check_password_hash', (['self.password', 'password'], {}), '(self.password, password)\n', (1794, 1819), False, 'from werkzeug.security import generate_password_hash, check_password_hash\n'), ((3007, 3023), 'DBP.models.session.commit', 'session.commit', ([], {}), '()\n', (3021, 3023), False, 'from DBP.models import Base, session\n'), ((3890, 3906), 'DBP.models.session.commit', 'session.commit', ([], {}), '()\n', (3904, 3906), False, 'from DBP.models import Base, session\n'), ((4642, 4659), 'DBP.models.session.add', 'session.add', (['user'], {}), '(user)\n', (4653, 4659), False, 'from DBP.models import Base, session\n'), ((4662, 4678), 'DBP.models.session.commit', 'session.commit', ([], {}), '()\n', (4676, 4678), False, 'from DBP.models import Base, session\n'), ((5199, 5219), 'DBP.models.session.delete', 'session.delete', (['user'], {}), '(user)\n', (5213, 5219), False, 'from DBP.models import Base, session\n'), ((5222, 5238), 'DBP.models.session.commit', 'session.commit', ([], {}), '()\n', (5236, 5238), False, 'from DBP.models import Base, session\n'), ((2814, 2846), 'werkzeug.security.generate_password_hash', 'generate_password_hash', (['password'], {}), '(password)\n', (2836, 2846), False, 'from werkzeug.security import generate_password_hash, check_password_hash\n'), ((4167, 4194), 'random.randrange', 'random.randrange', (['(0)', 'maxnum'], {}), '(0, maxnum)\n', (4183, 4194), False, 'import random\n'), ((5179, 5196), 'DBP.models.session.delete', 'session.delete', (['e'], {}), '(e)\n', (5193, 5196), False, 'from DBP.models import Base, session\n'), ((2929, 2968), 'datetime.datetime.strptime', 'datetime.strptime', (['birth', '"""%a %b %d %Y"""'], {}), "(birth, '%a %b %d %Y')\n", (2946, 2968), False, 'from datetime import datetime\n'), ((4232, 4251), 'DBP.models.session.query', 'session.query', (['User'], {}), '(User)\n', (4245, 4251), False, 'from DBP.models import Base, session\n'), ((4564, 4603), 'datetime.datetime.strptime', 'datetime.strptime', (['birth', '"""%a %b %d %Y"""'], {}), "(birth, '%a %b %d %Y')\n", (4581, 4603), False, 'from datetime import datetime\n'), ((5131, 5148), 'DBP.models.session.delete', 'session.delete', (['p'], {}), '(p)\n', (5145, 5148), False, 'from DBP.models import Base, session\n'), ((4113, 4132), 'DBP.models.session.query', 'session.query', (['User'], {}), '(User)\n', (4126, 4132), False, 'from DBP.models import Base, session\n'), ((5109, 5126), 'DBP.models.session.delete', 'session.delete', (['t'], {}), '(t)\n', (5123, 5126), False, 'from DBP.models import Base, session\n'), ((3963, 3982), 'DBP.models.session.query', 'session.query', (['User'], {}), '(User)\n', (3976, 3982), False, 'from DBP.models import Base, session\n'), ((4303, 4322), 'DBP.models.session.query', 'session.query', (['User'], {}), '(User)\n', (4316, 4322), False, 'from DBP.models import Base, session\n'), ((4739, 4758), 'DBP.models.session.query', 'session.query', (['User'], {}), '(User)\n', (4752, 4758), False, 'from DBP.models import Base, session\n'), ((3501, 3530), 'DBP.models.session.query', 'session.query', (['en.task.parsed'], {}), '(en.task.parsed)\n', (3514, 3530), False, 'from DBP.models import Base, session\n'), ((4047, 4066), 'DBP.models.session.query', 'session.query', (['User'], {}), '(User)\n', (4060, 4066), False, 'from DBP.models import Base, session\n'), ((4984, 5013), 'DBP.models.session.query', 'session.query', (['en.task.parsed'], {}), '(en.task.parsed)\n', (4997, 5013), False, 'from DBP.models import Base, session\n'), ((3122, 3151), 'DBP.models.session.query', 'session.query', (['en.task.parsed'], {}), '(en.task.parsed)\n', (3135, 3151), False, 'from DBP.models import Base, session\n'), ((3593, 3622), 'DBP.models.session.query', 'session.query', (['en.task.parsed'], {}), '(en.task.parsed)\n', (3606, 3622), False, 'from DBP.models import Base, session\n')]
|
from abc import abstractmethod
from typing import List, Dict
from src.bounding_box import BoundingBox
from src.utils.enumerators import BBType, BBFormat
import torch.nn.functional as F
class ModelEvaluator:
def __init__(self):
self._gt_bboxes = []
self._predicted_bboxes = []
self._img_count = 0
def get_gt_bboxes(self) -> List[BoundingBox]:
"""
Returns a list containing the ground truth bounding boxes
:return:
"""
return self._gt_bboxes
def get_predicted_bboxes(self) -> List[BoundingBox]:
"""
Returns a list containing the predicted bounding boxes
:return:
"""
return self._predicted_bboxes
def add_predictions(self, targets, predictions):
img_count_temp = self._img_count
for target in targets:
for label, [x, y, w, h] in zip(target['labels'].tolist(), target['boxes'].tolist()):
self._gt_bboxes.append(BoundingBox(
image_name=str(self._img_count),
class_id=str(label),
coordinates=(x - w / 2, y - h / 2, w, h),
bb_type=BBType.GROUND_TRUTH,
format=BBFormat.XYWH,
))
self._img_count += 1
pred_logits, pred_boxes_images = predictions['pred_logits'], predictions['pred_boxes']
prob = F.softmax(pred_logits, -1)
scores_images, labels_images = prob[..., :-1].max(-1)
for scores, labels, pred_boxes in zip(scores_images, labels_images, pred_boxes_images):
for score, label, [x, y, w, h] in zip(scores, labels, pred_boxes):
label = label.item()
score = score.item()
if label >= 0:
self._predicted_bboxes.append(
BoundingBox(
image_name=str(img_count_temp),
class_id=str(label),
coordinates=(x - w / 2, y - h / 2, w, h),
bb_type=BBType.DETECTED,
format=BBFormat.XYWH,
confidence=score
)
)
img_count_temp += 1
@abstractmethod
def get_metrics(self) -> Dict:
pass
|
[
"torch.nn.functional.softmax"
] |
[((1402, 1428), 'torch.nn.functional.softmax', 'F.softmax', (['pred_logits', '(-1)'], {}), '(pred_logits, -1)\n', (1411, 1428), True, 'import torch.nn.functional as F\n')]
|
"""
Numba-specific errors and warnings.
"""
from __future__ import print_function, division, absolute_import
import contextlib
from collections import defaultdict
import warnings
# Filled at the end
__all__ = []
class NumbaWarning(Warning):
"""
Base category for all Numba compiler warnings.
"""
class PerformanceWarning(NumbaWarning):
"""
Warning category for when an operation might not be
as fast as expected.
"""
class WarningsFixer(object):
"""
An object "fixing" warnings of a given category caught during
certain phases. The warnings can have their filename and lineno fixed,
and they are deduplicated as well.
"""
def __init__(self, category):
self._category = category
# {(filename, lineno, category) -> messages}
self._warnings = defaultdict(set)
@contextlib.contextmanager
def catch_warnings(self, filename=None, lineno=None):
"""
Store warnings and optionally fix their filename and lineno.
"""
with warnings.catch_warnings(record=True) as wlist:
warnings.simplefilter('always', self._category)
yield
for w in wlist:
msg = str(w.message)
if issubclass(w.category, self._category):
# Store warnings of this category for deduplication
filename = filename or w.filename
lineno = lineno or w.lineno
self._warnings[filename, lineno, w.category].add(msg)
else:
# Simply emit other warnings again
warnings.warn_explicit(msg, w.category,
w.filename, w.lineno)
def flush(self):
"""
Emit all stored warnings.
"""
for (filename, lineno, category), messages in sorted(self._warnings.items()):
for msg in sorted(messages):
warnings.warn_explicit(msg, category, filename, lineno)
self._warnings.clear()
class NumbaError(Exception):
pass
class IRError(NumbaError):
"""
An error occurred during Numba IR generation.
"""
class RedefinedError(IRError):
pass
class NotDefinedError(IRError):
def __init__(self, name, loc=None):
self.name = name
self.loc = loc
def __str__(self):
loc = "?" if self.loc is None else self.loc
return "{name!r} is not defined in {loc}".format(name=self.name,
loc=self.loc)
class VerificationError(IRError):
pass
class MacroError(NumbaError):
"""
An error occurred during macro expansion.
"""
class DeprecationError(NumbaError):
pass
class LoweringError(NumbaError):
"""
An error occurred during lowering.
"""
def __init__(self, msg, loc):
self.msg = msg
self.loc = loc
super(LoweringError, self).__init__("%s\n%s" % (msg, loc.strformat()))
class ForbiddenConstruct(LoweringError):
"""
A forbidden Python construct was encountered (e.g. use of locals()).
"""
class TypingError(NumbaError):
"""
A type inference failure.
"""
def __init__(self, msg, loc=None):
self.msg = msg
self.loc = loc
if loc:
super(TypingError, self).__init__("%s\n%s" % (msg, loc.strformat()))
else:
super(TypingError, self).__init__("%s" % (msg,))
class UntypedAttributeError(TypingError):
def __init__(self, value, attr, loc=None):
msg = 'Unknown attribute "{attr}" of type {type}'.format(type=value,
attr=attr)
super(UntypedAttributeError, self).__init__(msg, loc=loc)
class ByteCodeSupportError(NumbaError):
"""
Failure to extract the bytecode of the user's function.
"""
class CompilerError(NumbaError):
"""
Some high-level error in the compiler.
"""
class ConstantInferenceError(NumbaError):
"""
Failure during constant inference.
"""
__all__ += [name for (name, value) in globals().items()
if not name.startswith('_') and isinstance(value, type)
and issubclass(value, (Exception, Warning))]
|
[
"collections.defaultdict",
"warnings.warn_explicit",
"warnings.catch_warnings",
"warnings.simplefilter"
] |
[((827, 843), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (838, 843), False, 'from collections import defaultdict\n'), ((1040, 1076), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (1063, 1076), False, 'import warnings\n'), ((1099, 1146), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""', 'self._category'], {}), "('always', self._category)\n", (1120, 1146), False, 'import warnings\n'), ((1595, 1656), 'warnings.warn_explicit', 'warnings.warn_explicit', (['msg', 'w.category', 'w.filename', 'w.lineno'], {}), '(msg, w.category, w.filename, w.lineno)\n', (1617, 1656), False, 'import warnings\n'), ((1919, 1974), 'warnings.warn_explicit', 'warnings.warn_explicit', (['msg', 'category', 'filename', 'lineno'], {}), '(msg, category, filename, lineno)\n', (1941, 1974), False, 'import warnings\n')]
|
from collections import namedtuple
from contextlib import contextmanager
from pyroscope import agent
Config = namedtuple('Config', ('app_name', 'server_address',
'auth_token', 'sample_rate', 'with_subprocesses', 'log_level'))
class PyroscopeError(Exception):
pass
def configure(app_name, server_address, auth_token="", sample_rate=100, with_subprocesses=0, log_level="debug", tags=None):
agent.start(app_name, server_address, auth_token, sample_rate, int(with_subprocesses), log_level)
if tags is not None:
tag(tags)
def stop():
agent.stop()
def change_name(name):
agent.change_name(name)
@contextmanager
def tag_wrapper(tags):
for key, value in tags.items():
agent.set_tag(key, value)
try:
yield
finally:
for key in tags.keys():
agent.set_tag(key, "")
def tag(tags):
for key, value in tags.items():
agent.set_tag(key, value)
def remove_tags(*keys):
for key in keys:
agent.set_tag(key, "")
def build_summary():
return agent.build_summary()
def test_logger():
agent.test_logger()
|
[
"pyroscope.agent.stop",
"pyroscope.agent.set_tag",
"collections.namedtuple",
"pyroscope.agent.test_logger",
"pyroscope.agent.build_summary",
"pyroscope.agent.change_name"
] |
[((112, 231), 'collections.namedtuple', 'namedtuple', (['"""Config"""', "('app_name', 'server_address', 'auth_token', 'sample_rate',\n 'with_subprocesses', 'log_level')"], {}), "('Config', ('app_name', 'server_address', 'auth_token',\n 'sample_rate', 'with_subprocesses', 'log_level'))\n", (122, 231), False, 'from collections import namedtuple\n'), ((581, 593), 'pyroscope.agent.stop', 'agent.stop', ([], {}), '()\n', (591, 593), False, 'from pyroscope import agent\n'), ((623, 646), 'pyroscope.agent.change_name', 'agent.change_name', (['name'], {}), '(name)\n', (640, 646), False, 'from pyroscope import agent\n'), ((1060, 1081), 'pyroscope.agent.build_summary', 'agent.build_summary', ([], {}), '()\n', (1079, 1081), False, 'from pyroscope import agent\n'), ((1107, 1126), 'pyroscope.agent.test_logger', 'agent.test_logger', ([], {}), '()\n', (1124, 1126), False, 'from pyroscope import agent\n'), ((732, 757), 'pyroscope.agent.set_tag', 'agent.set_tag', (['key', 'value'], {}), '(key, value)\n', (745, 757), False, 'from pyroscope import agent\n'), ((922, 947), 'pyroscope.agent.set_tag', 'agent.set_tag', (['key', 'value'], {}), '(key, value)\n', (935, 947), False, 'from pyroscope import agent\n'), ((1003, 1025), 'pyroscope.agent.set_tag', 'agent.set_tag', (['key', '""""""'], {}), "(key, '')\n", (1016, 1025), False, 'from pyroscope import agent\n'), ((838, 860), 'pyroscope.agent.set_tag', 'agent.set_tag', (['key', '""""""'], {}), "(key, '')\n", (851, 860), False, 'from pyroscope import agent\n')]
|
import discord
from discord.ext import commands
from forex_python.converter import CurrencyRates,CurrencyCodes
from datetime import date
class Exchange(commands.Cog):
def __init__(self,bot):
self.bot = bot
self.exchangeNames = {
"EUR":["eur","euro member countries"],
"IDR":["idr","indonesia rupiah"],
"BGN":["bgn","bulgaria lev"],
"ILS":["ils","israel shekel"],
"GBP":["gbp","united kingdom pound"],
"DKK":["dkk","denmark krone"],
"CAD":["cad","canada dollar"],
"JPY":["jpy","japan yen"],
"HUF":["huf","hungary forint"],
"RON":["ron","Romania New Leu"],
"MYR":["myr","malaysia ringgit"],
"SEK":["sek","sweden krona"],
"SGD":["sgd","singapore dollar"],
"HKD":["hkd","hong kong dollar"],
"AUD":["aud","australia dollar"],
"CHF":["chf","switzerland franc"],
"KRW":["krw","korea won","korea south won"],
"CNY":["cny","china yun renminbi"],
"TRY":["try","turkey lira"],
"HRK":["hrk","croatia kuna"],
"NZD":["nzd","new zealand dollar"],
"THB":["thb","thailand baht"],
"USD":["usd","united states dollar"],
"NOK":["nok","norway krone"],
"RUB":["rub","russia ruble"],
"INR":["inr","india ruppe"],
"MXN":["mxn","mexico peso"],
"CZK":["czh","czech republic koruna"],
"BRL":["brl","brazil real"],
"PLN":["pln","poland zloty"],
"PHP":["php","philippines peso"],
"ZAR":["zar","south africa rand"]
}
self.CurrencyRates = CurrencyRates()
self.CurrencyCodes = CurrencyCodes()
@commands.command()
async def ExchangeRate(self,ctx):
''' Gets exchange rate from between two currencies.
$ExchangeRate USD to JPY => The exchange rate from USD to JPY is xxx.xx
'''
letters = ctx.message.content.split(maxsplit=1)[1]
letters = letters.lower()
letters = letters.split("to")
fromAddress = letters[0].strip()
toAddress = letters[1].strip()
fromID = self.getAddressName(fromAddress)
toID = self.getAddressName(toAddress)
fromCode = self.CurrencyCodes.get_symbol(fromID)
toCode = self.CurrencyCodes.get_symbol(toID)
if fromID == -1:
await ctx.send("Was unable to find currency for {}".format(fromAddress))
elif toID == -1:
await ctx.send("Was unable to find currency for {}".format(toAddress))
else:
rate = self.CurrencyRates.get_rate(fromID,toID)
await ctx.send("The exchange rate from {}1 is {}{:.2f}".format(fromCode,toCode,rate))
def getAddressName(self,address):
'''Gets the proper address name for desired currency
address is the name of the desired currency
returns the id of the desired currency or -1 if none are valid
'''
for id,addArray in self.exchangeNames.items():
if address in addArray:
return id
return -1
def setup(bot):
bot.add_cog(Exchange(bot))
|
[
"forex_python.converter.CurrencyRates",
"forex_python.converter.CurrencyCodes",
"discord.ext.commands.command"
] |
[((1797, 1815), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (1813, 1815), False, 'from discord.ext import commands\n'), ((1730, 1745), 'forex_python.converter.CurrencyRates', 'CurrencyRates', ([], {}), '()\n', (1743, 1745), False, 'from forex_python.converter import CurrencyRates, CurrencyCodes\n'), ((1775, 1790), 'forex_python.converter.CurrencyCodes', 'CurrencyCodes', ([], {}), '()\n', (1788, 1790), False, 'from forex_python.converter import CurrencyRates, CurrencyCodes\n')]
|
# encoding: utf-8
import logging_helper
from .timeout import TimersBase
logging = logging_helper.setup_logging()
class Stopwatch(TimersBase):
def __init__(self,
high_precision=None):
super(Stopwatch, self).__init__(high_precision=high_precision)
self.reset()
def reset(self):
self.__stop_time = None
self.__laps = []
self.__start_time = self._now
def stop(self):
if self.__stop_time is None:
self.__stop_time = self._now
return self.glance
else:
return self.__stop_time
def lap(self):
lap_end_time = self._now
lap_start_time = (self.__start_time
if not self.__laps
else self.__laps[-1][u'lap_end_time'])
self.__laps.append({
u'lap_start_time': lap_start_time,
u'lap_end_time': lap_end_time,
u'lap_time': lap_end_time - lap_start_time
})
return self.__laps[-1][u'lap_time']
@property
def lap_times(self):
return self.__laps
@property
def glance(self):
if self.__stop_time:
return self.__stop_time - self.__start_time
else:
return self._now - self.__start_time
|
[
"logging_helper.setup_logging"
] |
[((84, 114), 'logging_helper.setup_logging', 'logging_helper.setup_logging', ([], {}), '()\n', (112, 114), False, 'import logging_helper\n')]
|
import select
import socket
import struct
import time
import uuid
from collections import deque
from .icmp import parse_icmp_packet
from .ip import get_ip_address, parse_ipv4_packet
_FMT_ICMP_PACKET = '>BBHHH'
def chesksum(data):
n = len(data)
m = n % 2
sum_ = 0
for i in range(0, n - m, 2):
# 传入data以每两个字节(十六进制)通过ord转十进制,第一字节在低位,第二个字节在高位
sum_ += (data[i]) + ((data[i + 1]) << 8)
if m:
sum_ += (data[-1])
# 将高于16位与低16位相加
sum_ = (sum_ >> 16) + (sum_ & 0xffff)
# 如果还有高于16位,将继续与低16位相加
sum_ += (sum_ >> 16)
answer = ~sum_ & 0xffff
# 主机字节序转网络字节序列(参考小端序转大端序)
answer = answer >> 8 | (answer << 8 & 0xff00)
return answer
def dealtime(dst_addr, sumtime, shorttime, longtime, accept, i, time):
sumtime += time
print(sumtime)
if i == 4:
print("{0}的Ping统计信息:".format(dst_addr))
msg = "数据包:已发送={0},接收={1},丢失={2}({3}%丢失),\n往返行程的估计时间(以毫秒为单位):\n\t最短={4}ms,最长={5}ms,平均={6}ms"
print(msg.format(i + 1, accept, i + 1 - accept, (i + 1 - accept) / (i + 1) * 100, shorttime, longtime, sumtime))
class TimedData:
def __init__(self, data, ts):
self.data = data
self.ts = ts
class MovingStatistic:
def __init__(self, duration):
self._duration = duration
self._q = deque()
def update(self, data):
now = time.time()
self._q.append(TimedData(data, now))
while len(self._q) > 0 and now - self._q[0].ts > self._duration:
self._q.popleft()
class PingStat(MovingStatistic):
def total(self):
return len(self._q)
# def success(self):
# return sum(err is None for _, err in self._q)
def failure(self):
return sum(item.data[1] is not None for item in self._q)
def failure_ratio(self):
total = self.total()
if total == 0:
return 0.0
return self.failure() / total
def time_avg(self):
cnt = 0
sum_t = 0.0
for item in self._q:
t, err = item.data
if err is None:
cnt += 1
sum_t += t
if cnt == 0:
return 0.0
return sum_t / cnt
def _get_random_payload(length):
if length == 0:
return b''
n = (length // 16) + 1
if n == 1:
return uuid.uuid4().bytes[:length]
else:
return (uuid.uuid4().bytes * n)[:length]
def ping(addr: str, interval=3.0, timeout=3.0):
stat = PingStat(60.0)
rawsocket = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_ICMP)
data_sequence = 1
while True:
time_elapsed, err = _ping_once(rawsocket, addr, data_sequence, _get_random_payload(64), timeout)
data_sequence = (data_sequence + 1) % 0xffff
stat.update((time_elapsed, err))
total = stat.total()
fail = stat.failure()
print('total:', total, ', failed:', fail, ', average time:', stat.time_avg())
time.sleep(interval)
def _ping_once(rawsocket, addr, data_sequence, payload, timeout):
try:
dst_addr = str(get_ip_address(addr))
except Exception as e:
return timeout, f'failed to resolve domain, {e}'
data_type = 8
data_code = 0
data_id = 0
icmp_packet = build_icmp_packet(data_type, data_code, data_id, data_sequence, payload)
t0 = time.time()
rawsocket.sendto(icmp_packet, (dst_addr, 0))
while True:
time_elapsed = time.time() - t0
if time_elapsed >= timeout:
return timeout, 'timeout'
rlist, _, _ = select.select([rawsocket], [], [], timeout - time_elapsed)
if len(rlist) == 0:
return timeout, 'timeout'
data, _ = rawsocket.recvfrom(1500)
time_elapsed = time.time() - t0
header, ip_payload = parse_ipv4_packet(data)
if header.protocol == 'icmp':
icmp_header, res_payload = parse_icmp_packet(ip_payload)
if payload == res_payload:
return time_elapsed, None
else:
continue
else:
continue
def build_icmp_packet(data_type, data_code, data_id, data_seq, payload):
l_payload = len(payload)
if l_payload == 0:
icmp_packet = struct.pack(_FMT_ICMP_PACKET, data_type, data_code, 0, data_id, data_seq)
icmp_chesksum = chesksum(icmp_packet)
icmp_packet = struct.pack(_FMT_ICMP_PACKET, data_type, data_code, icmp_chesksum, data_id, data_seq)
else:
fmt = _FMT_ICMP_PACKET + f'{l_payload}s'
icmp_packet = struct.pack(fmt, data_type, data_code, 0, data_id, data_seq, payload)
icmp_chesksum = chesksum(icmp_packet)
icmp_packet = struct.pack(fmt, data_type, data_code, icmp_chesksum, data_id, data_seq, payload)
return icmp_packet
def play_packet():
# print(socket.getaddrinfo(socket.gethostname(), None, family=socket.AddressFamily.AF_INET))
sock = socket.socket(socket.PF_PACKET, socket.SOCK_RAW, socket.htons(0x0800))
sock.bind(('wlp3s0', socket.htons(0x0800)))
header = struct.pack('>6s6s2s', b'\xaa\xaa\xaa\xaa\xaa\xaa', b'\xbb\xbb\xbb\xbb\xbb\xbb', b'\x08\x00')
packet = header + b'hello, world!'
sock.send(packet)
print(packet)
# print(res)
sock.close()
def arp_boardcast():
sock = socket.socket(socket.PF_PACKET, socket.SOCK_RAW, socket.htons(0x0800))
sock.bind(('wlp3s0', socket.htons(0x0800)))
ether_type = b'\x08\x06'
header = struct.pack('>6s6s2s', b'\xff\xff\xff\xff\xff\xff', b'\xbb\xbb\xbb\xbb\xbb\xbb', ether_type)
|
[
"uuid.uuid4",
"socket.socket",
"struct.pack",
"time.sleep",
"time.time",
"select.select",
"socket.htons",
"collections.deque"
] |
[((2494, 2561), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_RAW', 'socket.IPPROTO_ICMP'], {}), '(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_ICMP)\n', (2507, 2561), False, 'import socket\n'), ((3331, 3342), 'time.time', 'time.time', ([], {}), '()\n', (3340, 3342), False, 'import time\n'), ((5033, 5130), 'struct.pack', 'struct.pack', (['""">6s6s2s"""', "b'\\xaa\\xaa\\xaa\\xaa\\xaa\\xaa'", "b'\\xbb\\xbb\\xbb\\xbb\\xbb\\xbb'", "b'\\x08\\x00'"], {}), "('>6s6s2s', b'\\xaa\\xaa\\xaa\\xaa\\xaa\\xaa',\n b'\\xbb\\xbb\\xbb\\xbb\\xbb\\xbb', b'\\x08\\x00')\n", (5044, 5130), False, 'import struct\n'), ((5435, 5531), 'struct.pack', 'struct.pack', (['""">6s6s2s"""', "b'\\xff\\xff\\xff\\xff\\xff\\xff'", "b'\\xbb\\xbb\\xbb\\xbb\\xbb\\xbb'", 'ether_type'], {}), "('>6s6s2s', b'\\xff\\xff\\xff\\xff\\xff\\xff',\n b'\\xbb\\xbb\\xbb\\xbb\\xbb\\xbb', ether_type)\n", (5446, 5531), False, 'import struct\n'), ((1302, 1309), 'collections.deque', 'deque', ([], {}), '()\n', (1307, 1309), False, 'from collections import deque\n'), ((1353, 1364), 'time.time', 'time.time', ([], {}), '()\n', (1362, 1364), False, 'import time\n'), ((2952, 2972), 'time.sleep', 'time.sleep', (['interval'], {}), '(interval)\n', (2962, 2972), False, 'import time\n'), ((3544, 3602), 'select.select', 'select.select', (['[rawsocket]', '[]', '[]', '(timeout - time_elapsed)'], {}), '([rawsocket], [], [], timeout - time_elapsed)\n', (3557, 3602), False, 'import select\n'), ((4220, 4293), 'struct.pack', 'struct.pack', (['_FMT_ICMP_PACKET', 'data_type', 'data_code', '(0)', 'data_id', 'data_seq'], {}), '(_FMT_ICMP_PACKET, data_type, data_code, 0, data_id, data_seq)\n', (4231, 4293), False, 'import struct\n'), ((4362, 4451), 'struct.pack', 'struct.pack', (['_FMT_ICMP_PACKET', 'data_type', 'data_code', 'icmp_chesksum', 'data_id', 'data_seq'], {}), '(_FMT_ICMP_PACKET, data_type, data_code, icmp_chesksum, data_id,\n data_seq)\n', (4373, 4451), False, 'import struct\n'), ((4529, 4598), 'struct.pack', 'struct.pack', (['fmt', 'data_type', 'data_code', '(0)', 'data_id', 'data_seq', 'payload'], {}), '(fmt, data_type, data_code, 0, data_id, data_seq, payload)\n', (4540, 4598), False, 'import struct\n'), ((4667, 4752), 'struct.pack', 'struct.pack', (['fmt', 'data_type', 'data_code', 'icmp_chesksum', 'data_id', 'data_seq', 'payload'], {}), '(fmt, data_type, data_code, icmp_chesksum, data_id, data_seq,\n payload)\n', (4678, 4752), False, 'import struct\n'), ((4950, 4968), 'socket.htons', 'socket.htons', (['(2048)'], {}), '(2048)\n', (4962, 4968), False, 'import socket\n'), ((5323, 5341), 'socket.htons', 'socket.htons', (['(2048)'], {}), '(2048)\n', (5335, 5341), False, 'import socket\n'), ((3431, 3442), 'time.time', 'time.time', ([], {}), '()\n', (3440, 3442), False, 'import time\n'), ((3735, 3746), 'time.time', 'time.time', ([], {}), '()\n', (3744, 3746), False, 'import time\n'), ((4997, 5015), 'socket.htons', 'socket.htons', (['(2048)'], {}), '(2048)\n', (5009, 5015), False, 'import socket\n'), ((5370, 5388), 'socket.htons', 'socket.htons', (['(2048)'], {}), '(2048)\n', (5382, 5388), False, 'import socket\n'), ((2315, 2327), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2325, 2327), False, 'import uuid\n'), ((2369, 2381), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2379, 2381), False, 'import uuid\n')]
|
from glob import glob
import re
require_pattern = re.compile(r'\brequire\("(.*)"\)')
print("digraph require_graph {")
for path in glob("**/*.lua", recursive=True):
with open(path) as f:
caller = path.replace(".lua", "").replace("/", ".")
caller_node = caller.replace(".", "__")
print(f" {caller_node} [label=\"{caller}\"];")
for line in f.readlines():
match = require_pattern.search(line)
if match:
calee = match.group(1)
calee_node = calee.replace(".", "__")
print(f" {caller_node} -> {calee_node};")
print("}")
|
[
"glob.glob",
"re.compile"
] |
[((51, 87), 're.compile', 're.compile', (['"""\\\\brequire\\\\("(.*)"\\\\)"""'], {}), '(\'\\\\brequire\\\\("(.*)"\\\\)\')\n', (61, 87), False, 'import re\n'), ((133, 165), 'glob.glob', 'glob', (['"""**/*.lua"""'], {'recursive': '(True)'}), "('**/*.lua', recursive=True)\n", (137, 165), False, 'from glob import glob\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 29 15:47:36 2018
@author: akurnizk
"""
import flopy
import numpy as np
import sys,os
import matplotlib.pyplot as plt
# Location of BitBucket folder containing cgw folder
cgw_code_dir = 'E:\python'
sys.path.insert(0,cgw_code_dir)
from cgw.utils import general_utils as genu
from cgw.utils import feature_utils as shpu
from cgw.utils import raster_utils as rastu
# Assign name and create modflow model object
modelname = 'CheqModel1'
work_dir = r'E:\Herring'
mf = flopy.modflow.Modflow(modelname, exe_name='mf2005',model_ws=work_dir)
swt = flopy.seawat.Seawat(modelname, exe_name='swtv4')
print(swt.namefile)
mean_sea_level = 0.843 # in meters at closest NOAA station
#%%
# Example of making a MODFLOW-like grid from a shapefile
data_dir = r'E:\ArcGIS'
shp_fname = os.path.join(data_dir,'Chequesset_Model_Area_UTM.shp')
cell_spacing = 10. # model grid cell spacing in meters
# Define inputs for shp_to_grid function
shp_to_grid_dict = {'shp':shp_fname,'cell_spacing':cell_spacing}
grid_outputs = shpu.shp_to_grid(**shp_to_grid_dict)
# Pop out all of the outputs into individual variables
[X_nodes,Y_nodes],model_polygon,[out_proj,[xshift,yshift],min_angle] = grid_outputs
grid_transform = [out_proj,[xshift,yshift],min_angle] # make transform list
# Can calculate cell centers (where heads are calculated), in different coordinates
cc,cc_proj,cc_ll = shpu.nodes_to_cc([X_nodes,Y_nodes],grid_transform)
# Use model_polygon to define active cells in the model
ir,ic,_ = shpu.gridpts_in_shp(model_polygon,cc)
active_cells = genu.define_mask(cc,[ir,ic])
"""
Plot active cells
"""
#fig,ax = genu.plt.subplots(1,2)
#genu.quick_plot(active_cells.astype(int),ax=ax[0]) # in row, column space
#ax[0].set_xlabel('column #')
#ax[0].set_ylabel('row #')
#c1=ax[1].pcolormesh(cc[0],cc[1],active_cells.astype(int)) # in model coordinates
#genu.plt.colorbar(c1,ax=ax[1],orientation='horizontal')
#ax[1].set_xlabel('X [m]')
#ax[1].set_ylabel('Y [m]')
#%% Example of loading DEM data for that area
dem_fname = os.path.join(data_dir,'Cheq10mx10m_UTM.tif')
# Experimental part \/
dem_X,dem_Y,dem_da = rastu.load_geotif(dem_fname) # da is an xarray data array
dem_vals = dem_da.values.squeeze()
#dem_X, dem_Y, dem_vals = rastu.read_griddata(dem_fname)
# Know that dem is way higher resolution...can decimate it to save time
decimate_by_ncells = 1 # by every n cells
#dem_X = dem_X[::decimate_by_ncells,::decimate_by_ncells]
#dem_Y = dem_Y[::decimate_by_ncells,::decimate_by_ncells]
#dem_vals = dem_vals[::decimate_by_ncells,::decimate_by_ncells]
# Set no-data value to nan
dem_vals[dem_vals==dem_da.nodatavals[0]] = genu.np.nan
# Transform dem to model coordinates with linear interpolation
trans_dict = {'orig_xy':[dem_X,dem_Y],'orig_val':dem_vals,'active_method':'linear',
'new_xy':cc_proj} # if dem in same projection as model boundary shp
dem_trans = rastu.subsection_griddata(**trans_dict)
dem_trans[dem_trans<-1000] = genu.np.nan
genu.quick_plot(dem_trans)
#%% DEM model inputs
Lx = np.amax(dem_X)-np.amin(dem_X)
Ly = np.amax(dem_Y)-np.amin(dem_Y)
zbot = -100 # if bottom of model is horizontal, approx. bedrock (check Masterson)
nlay = 1 # 1 layer model
nrow, ncol = cc[0].shape # to use when cheq_griddev is implemented
delr = cell_spacing
delc = cell_spacing
delv = (dem_trans - zbot) / nlay
botm = zbot
# Tutorial 1 model domain and grid definition
#Lx = 1000.
#Ly = 1000.
#ztop = 0.
#zbot = -50.
#nlay = 1
#nrow = 10
#ncol = 10
#delr = Lx/ncol
#delc = Ly/nrow
#delv = (ztop - zbot) / nlay
#botm = np.linspace(ztop, zbot, nlay + 1)
#%%
"""
Time Stepping
"""
# Time step parameters
total_length = 10 # days
dt = 6 # stress period time step, hrs
perlen_days = dt/24. # stress period time step, days
nper = int(total_length/perlen_days) # the number of stress periods in the simulation
nstp_default = dt/0.5 # stress period time step divided by step time length (to better interpolate tidal changes, set to 0.5 hrs)
perlen = [perlen_days]*nper # length of a stress period; each item in the matrix is the amount
# of elapsed time since the previous point (need to change the first)
perlen[0] = 1 # set first step as steady state
steady = [False]*nper
steady[0] = True # first step steady state
nstp = [nstp_default]*nper # number of time steps in a stress period
nstp[0] = 1
#Tutorial 2 default time step parameters
#nper = 3
#perlen = [1, 100, 100]
#nstp = [1, 100, 100]
#steady = [True, False, False]
#%% # Create the discretization (DIS) object
dis = flopy.modflow.ModflowDis(mf, nlay, nrow, ncol, delr=delr, delc=delc,
top=dem_trans, botm=botm)
# Tutorial 1 DIS object
#dis = flopy.modflow.ModflowDis(mf, nlay, nrow, ncol, delr=delr, delc=delc,
#top=dem_vals, botm=botm[1:])
# Tutorial 2 DIS object when transient conditions are implemented
# dis = flopy.modflow.ModflowDis(mf, nlay, nrow, ncol, delr=delr, delc=delc,
# top=ztop, botm=botm[1:],
# nper=nper, perlen=perlen, nstp=nstp, steady=steady)
#%% # Variables for the BAS (basic) package
# Added 5/28/19
"""
Active cells and the like are defined with the Basic package (BAS), which is required for every MOD-FLOW model.
It contains the ibound array, which is used to specify which cells are active (value is positive), inactive (value is 0),
or fixed head (value is negative). The numpy package (aliased as np) can be used to quickly initialize the ibound array
with values of 1, and then set the ibound value for the first and last columns to −1. The numpy package (and Python, in general)
uses zero-based indexing and supports negative indexing so that row 1 and column 1, and row 1 and column 201, can be
referenced as [0,0], and [0,−1], respectively. Although this simulation is for steady flow, starting heads still need
to be specified. They are used as the head for fixed-head cells (where ibound is negative), and as a starting point to compute
the saturated thickness for cases of unconfined flow.
ibound = np.ones((1, 201))
ibound[0, 0] = ibound[0, -1] = -1
"""
ibound = np.ones((nlay, nrow, ncol), dtype=np.int32)
ibound[:,~active_cells] = 0 # far offshore cells are inactive
ibound[0,dem_trans<mean_sea_level] = -1 # fixed head for everything less than msl
ibound[:,np.isnan(dem_trans)] = 0 # nan cells are inactive
genu.quick_plot(ibound) # plots boundary condition: 1 is above mean sea level (msl), 0 is msl, -1 is under msl.
strt = np.ones((nlay, nrow, ncol), dtype=np.float32)
active_dem_heights = dem_trans[active_cells & ~np.isnan(dem_trans)]
strt[0, active_cells & ~np.isnan(dem_trans)] = active_dem_heights # start with freshwater at surface elevation
strt[0, dem_trans<mean_sea_level] = mean_sea_level # start with water at sea level
genu.quick_plot(strt) # plots starting condition
bas = flopy.modflow.ModflowBas(mf, ibound=ibound, strt=strt)
#%% # added 3/8/19 - creates matrix where hydraulic conductivities (hk = horiz, vk = vert) can be implemented
hk1 = np.ones((nlay,nrow,ncol), np.float)
hk1[:,:,:]=10. # everything set to 10 - use data? calculate?
vka1 = np.ones((nlay,nrow,ncol), np.float)
vka1[:,:,:]=10. # everything set to 10.
# Add LPF package to the MODFLOW model
lpf = flopy.modflow.ModflowLpf(mf, hk=hk1, vka=vka1, ipakcb=53)
#%%
"""
Transient General-Head Boundary Package
First, we will create the GHB object, which is of the following type:
flopy.modflow.ModflowGhb.
The key to creating Flopy transient boundary packages is recognizing that the
boundary data is stored in a dictionary with key values equal to the
zero-based stress period number and values equal to the boundary conditions
for that stress period. For a GHB the values can be a two-dimensional nested
list of [layer, row, column, stage, conductance]:
Datums for 8447435, Chatham, Lydia Cove MA
https://tidesandcurrents.noaa.gov/datums.html?units=1&epoch=0&id=8447435&name=Chatham%2C+Lydia+Cove&state=MA
"""
# Make list for stress period 1
# Using Mean Sea Level (MSL) in meters at closest NOAA station for stages
#stageleft = mean_sea_level
#stageright = mean_sea_level
#bound_sp1 = []
#for il in range(nlay):
# # Figure out looping through hk1 array to get hk values at each cell for changing conductance.
# condleft = hk1[0,0,0] * (stageleft - zbot) * delc
# condright = hk1[0,0,0] * (stageright - zbot) * delc
# for ir in range(nrow):
# bound_sp1.append([il, ir, 0, stageleft, condleft])
# bound_sp1.append([il, ir, ncol - 1, stageright, condright])
## Only 1 stress period for steady-state model
#print('Adding ', len(bound_sp1), 'GHBs for stress period 1.')
#
#stress_period_data = {0: bound_sp1}
#ghb = flopy.modflow.ModflowGhb(mf, stress_period_data=stress_period_data)
# using single conductance value (see drain for modification based on Masterson, 2004)
conductance = 1000. # (modify 1000 to actual conductance)
bound_sp1 = []
stress_period_data = {0: bound_sp1}
ghb = flopy.modflow.ModflowGhb(mf, stress_period_data=stress_period_data)
#%% # Add drain condition
#Darcy's law states that
#Q = -KA(h1 - h0)/(X1 - X0)
#Where Q is the flow (L3/T)
#K is the hydraulic conductivity (L/T)
#A is the area perpendicular to flow (L2)
#h is head (L)
#X is the position at which head is measured (L)
#Conductance combines the K, A and X terms so that Darcy's law can be expressed as
#Q = -C(h1 - h0)
#where C is the conductance (L2/T)
# https://water.usgs.gov/nrp/gwsoftware/modflow2000/MFDOC/index.html?drn.htm
# from Masterson, 2004
# C = KWL/M where
#C is hydraulic conductance of the seabed (ft2/d);
#K is vertical hydraulic conductivity of seabed deposits
#(ft/d);
#W is width of the model cell containing the seabed (ft);
#L is length of the model cell containing the seabed (ft);
#and
#M is thickness of seabed deposits (ft).
#The vertical hydraulic conductivity (K) of the seabed
#deposits in most of the study area was assumed to be 1 ft/d,
#which is consistent with model simulations of similar coastal
#discharge areas in other areas on Cape Cod (Masterson and
#others, 1998). In the area occupied by Salt Pond and Nauset
#Marsh, it was assumed that there were thick deposits of lowpermeability
#material (<NAME>, U.S. Geological Survey,
#oral commun., 2002) and the vertical hydraulic conductivity
#was set to 0.1 ft/d. The thickness of the seabed deposits was
#assumed to be half the thickness of the model cell containing the
#boundary.
# still using simple conductance
land_cells = active_cells & ~np.isnan(dem_trans) & (dem_trans>mean_sea_level)
landrows, landcols = land_cells.nonzero()
lrcec = {0:np.column_stack([np.zeros_like(landrows),landrows,landcols,dem_trans[land_cells],conductance*np.ones_like(landrows)])} # this drain will be applied to all stress periods
drn = flopy.modflow.ModflowDrn(mf, stress_period_data=lrcec)
#%% # Add recharge condition
# steady state, units in [m/day]?
rch = flopy.modflow.ModflowRch(mf, nrchop=3, rech=1.4e-3) # from https://pubs.usgs.gov/wsp/2447/report.pdf
#%% # Add OC package to the MODFLOW model
spd = {(0, 0): ['print head', 'print budget', 'save head', 'save budget']}
oc = flopy.modflow.ModflowOc(mf, stress_period_data=spd, compact=True)
#%% # Add PCG package to the MODFLOW model
pcg = flopy.modflow.ModflowPcg(mf)
#%% # Write the MODFLOW model input files
mf.write_input()
#%% # Run the MODFLOW model
success, buff = mf.run_model()
#%%
"""
Post-Processing the Results
Now that we have successfully built and run our MODFLOW model, we can look at the results.
MODFLOW writes the simulated heads to a binary data output file.
We cannot look at these heads with a text editor, but flopy has a binary utility that can be used to read the heads.
The following statements will read the binary head file and create a plot of simulated heads for layer 1:
"""
import flopy.utils.binaryfile as bf
from mpl_toolkits.axes_grid1 import make_axes_locatable
plt.subplot(1,1,1,aspect='equal')
hds = bf.HeadFile(os.path.join(work_dir,modelname+'.hds'))
head = hds.get_data(totim=hds.get_times()[-1])
head[head<-100] = np.nan
#levels = np.arange(1,10,1)
extent = (delr/2., Lx - delr/2., Ly - delc/2., delc/2.)
# headplot = plt.contour(head[0, :, :], levels=levels, extent=extent) #
headplot = plt.contour(head[0, :, :], extent=extent)
plt.xlabel('Lx')
plt.ylabel('Ly')
plt.colorbar(headplot) # plots heads as contours
#plt.colorbar.set_label('heads')
plt.savefig('CheqModel1a.png')
genu.quick_plot(head) # plots heads with color gradient
genu.quick_plot(dem_trans) # plots elevations
#%%
"""
Flopy also has some pre-canned plotting capabilities can can be accessed using the ModelMap class.
The following code shows how to use the modelmap class to plot boundary conditions (IBOUND),
plot the grid, plot head contours, and plot vectors:
"""
fig = plt.figure(figsize=(10,10))
ax = fig.add_subplot(1, 1, 1, aspect='equal')
hds = bf.HeadFile(modelname+'.hds')
times = hds.get_times()
head = hds.get_data(totim=times[-1])
levels = np.linspace(0, 10, 11)
cbb = bf.CellBudgetFile(modelname+'.cbc')
kstpkper_list = cbb.get_kstpkper()
frf = cbb.get_data(text='FLOW RIGHT FACE', totim=times[-1])[0]
fff = cbb.get_data(text='FLOW FRONT FACE', totim=times[-1])[0]
#%%
"""
The pre-canned plotting doesn't seem to be able to allow averaging to reduce nrow and ncol
on the plot, making it difficult to plot a large grid. The commented section below uses the
modelmap class from Tutorial 1, followed by use of the plotting from the Henry Problem.
"""
#modelmap = flopy.plot.ModelMap(model=mf, layer=0)
#qm = modelmap.plot_ibound()
#lc = modelmap.plot_grid() # Need to fix grid to have fewer rows and columns
#cs = modelmap.contour_array(head, levels=levels)
#quiver = modelmap.plot_discharge(frf, fff, head=head)
#plt.savefig('CheqModel1b.png')
"""
# Load data (when implementing SEAWAT)
ucnobj = bf.UcnFile('MT3D001.UCN', model=swt)
times = ucnobj.get_times()
concentration = ucnobj.get_data(totim=times[-1])
"""
# Average flows to cell centers
qx_avg = np.empty(frf.shape, dtype=frf.dtype)
qx_avg[:, :, 1:] = 0.5 * (frf[:, :, 0:ncol-1] + frf[:, :, 1:ncol])
qx_avg[:, :, 0] = 0.5 * frf[:, :, 0]
qy_avg = np.empty(fff.shape, dtype=fff.dtype)
qy_avg[1:, :, :] = 0.5 * (fff[0:nlay-1, :, :] + fff[1:nlay, :, :])
qy_avg[0, :, :] = 0.5 * fff[0, :, :]
# Make the plot
fig = plt.figure(figsize=(10, 10))
ax = fig.add_subplot(1, 1, 1, aspect='equal')
#ax.imshow(concentration[:, 0, :], interpolation='nearest',
# extent=(0, Lx, 0, Ly))
y, x, z = dis.get_node_coordinates()
X, Y = np.meshgrid(x, y)
iskip = 3
ax.quiver(X[::iskip, ::iskip], Y[::iskip, ::iskip],
qx_avg[::iskip, 0, ::iskip], -qy_avg[::iskip, 0, ::iskip],
color='k', scale=5, headwidth=3, headlength=2,
headaxislength=2, width=0.0025)
plt.savefig('CheqModel1b.png')
plt.show()
#%%
"""
Post-Processing the Results
Once again, we can read heads from the MODFLOW binary output file, using the flopy.utils.binaryfile module. Included with the HeadFile object are several methods that we will use here: * get_times() will return a list of times contained in the binary head file * get_data() will return a three-dimensional head array for the specified time * get_ts() will return a time series array [ntimes, headval] for the specified cell
Using these methods, we can create head plots and hydrographs from the model results.:
"""
# headfile and budget file objects already created
# Setup contour parameters (levels already set)
extent = (delr/2., Lx - delr/2., delc/2., Ly - delc/2.)
print('Levels: ', levels)
print('Extent: ', extent)
# Make the plots
#Print statistics
print('Head statistics')
print(' min: ', head.min())
print(' max: ', head.max())
print(' std: ', head.std())
"""
Again, commented out section using modelmap
"""
## Flow right face and flow front face already extracted
##%%
##Create the plot
#f = plt.figure()
#plt.subplot(1, 1, 1, aspect='equal')
#
#
#modelmap = flopy.plot.ModelMap(model=mf, layer=0)
#qm = modelmap.plot_ibound()
##
## lc = modelmap.plot_grid()
#qm = modelmap.plot_bc('GHB', alpha=0.5)
#cs = modelmap.contour_array(head, levels=levels)
#plt.clabel(cs, inline=1, fontsize=10, fmt='%1.1f', zorder=11)
#quiver = modelmap.plot_discharge(frf, fff, head=head)
#
#mfc='black'
#plt.plot(lw=0, marker='o', markersize=8,
# markeredgewidth=0.5,
# markeredgecolor='black', markerfacecolor=mfc, zorder=9)
#plt.savefig('CheqModel2-{}.png')
"""
From <NAME>
"""
fig = plt.figure(figsize=(10, 10))
ax = fig.add_subplot(1, 1, 1, aspect='equal')
im = ax.imshow(head[:, 0, :], interpolation='nearest',
extent=(0, Lx, 0, Ly))
ax.set_title('Simulated Heads')
|
[
"flopy.modflow.ModflowOc",
"numpy.amin",
"cgw.utils.raster_utils.subsection_griddata",
"numpy.empty",
"numpy.ones",
"numpy.isnan",
"cgw.utils.general_utils.quick_plot",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.contour",
"cgw.utils.feature_utils.nodes_to_cc",
"os.path.join",
"numpy.meshgrid",
"numpy.zeros_like",
"flopy.modflow.ModflowPcg",
"flopy.utils.binaryfile.HeadFile",
"flopy.modflow.ModflowGhb",
"matplotlib.pyplot.colorbar",
"numpy.linspace",
"flopy.modflow.ModflowDrn",
"cgw.utils.feature_utils.gridpts_in_shp",
"flopy.modflow.Modflow",
"flopy.modflow.ModflowRch",
"matplotlib.pyplot.show",
"numpy.ones_like",
"cgw.utils.feature_utils.shp_to_grid",
"flopy.utils.binaryfile.CellBudgetFile",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.subplot",
"cgw.utils.raster_utils.load_geotif",
"flopy.modflow.ModflowDis",
"sys.path.insert",
"flopy.modflow.ModflowLpf",
"numpy.amax",
"flopy.modflow.ModflowBas",
"flopy.seawat.Seawat",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig",
"cgw.utils.general_utils.define_mask"
] |
[((261, 293), 'sys.path.insert', 'sys.path.insert', (['(0)', 'cgw_code_dir'], {}), '(0, cgw_code_dir)\n', (276, 293), False, 'import sys, os\n'), ((537, 607), 'flopy.modflow.Modflow', 'flopy.modflow.Modflow', (['modelname'], {'exe_name': '"""mf2005"""', 'model_ws': 'work_dir'}), "(modelname, exe_name='mf2005', model_ws=work_dir)\n", (558, 607), False, 'import flopy\n'), ((614, 662), 'flopy.seawat.Seawat', 'flopy.seawat.Seawat', (['modelname'], {'exe_name': '"""swtv4"""'}), "(modelname, exe_name='swtv4')\n", (633, 662), False, 'import flopy\n'), ((849, 904), 'os.path.join', 'os.path.join', (['data_dir', '"""Chequesset_Model_Area_UTM.shp"""'], {}), "(data_dir, 'Chequesset_Model_Area_UTM.shp')\n", (861, 904), False, 'import sys, os\n'), ((1088, 1124), 'cgw.utils.feature_utils.shp_to_grid', 'shpu.shp_to_grid', ([], {}), '(**shp_to_grid_dict)\n', (1104, 1124), True, 'from cgw.utils import feature_utils as shpu\n'), ((1454, 1506), 'cgw.utils.feature_utils.nodes_to_cc', 'shpu.nodes_to_cc', (['[X_nodes, Y_nodes]', 'grid_transform'], {}), '([X_nodes, Y_nodes], grid_transform)\n', (1470, 1506), True, 'from cgw.utils import feature_utils as shpu\n'), ((1575, 1613), 'cgw.utils.feature_utils.gridpts_in_shp', 'shpu.gridpts_in_shp', (['model_polygon', 'cc'], {}), '(model_polygon, cc)\n', (1594, 1613), True, 'from cgw.utils import feature_utils as shpu\n'), ((1629, 1659), 'cgw.utils.general_utils.define_mask', 'genu.define_mask', (['cc', '[ir, ic]'], {}), '(cc, [ir, ic])\n', (1645, 1659), True, 'from cgw.utils import general_utils as genu\n'), ((2117, 2162), 'os.path.join', 'os.path.join', (['data_dir', '"""Cheq10mx10m_UTM.tif"""'], {}), "(data_dir, 'Cheq10mx10m_UTM.tif')\n", (2129, 2162), False, 'import sys, os\n'), ((2210, 2238), 'cgw.utils.raster_utils.load_geotif', 'rastu.load_geotif', (['dem_fname'], {}), '(dem_fname)\n', (2227, 2238), True, 'from cgw.utils import raster_utils as rastu\n'), ((2996, 3035), 'cgw.utils.raster_utils.subsection_griddata', 'rastu.subsection_griddata', ([], {}), '(**trans_dict)\n', (3021, 3035), True, 'from cgw.utils import raster_utils as rastu\n'), ((3081, 3107), 'cgw.utils.general_utils.quick_plot', 'genu.quick_plot', (['dem_trans'], {}), '(dem_trans)\n', (3096, 3107), True, 'from cgw.utils import general_utils as genu\n'), ((4694, 4793), 'flopy.modflow.ModflowDis', 'flopy.modflow.ModflowDis', (['mf', 'nlay', 'nrow', 'ncol'], {'delr': 'delr', 'delc': 'delc', 'top': 'dem_trans', 'botm': 'botm'}), '(mf, nlay, nrow, ncol, delr=delr, delc=delc, top=\n dem_trans, botm=botm)\n', (4718, 4793), False, 'import flopy\n'), ((6408, 6451), 'numpy.ones', 'np.ones', (['(nlay, nrow, ncol)'], {'dtype': 'np.int32'}), '((nlay, nrow, ncol), dtype=np.int32)\n', (6415, 6451), True, 'import numpy as np\n'), ((6661, 6684), 'cgw.utils.general_utils.quick_plot', 'genu.quick_plot', (['ibound'], {}), '(ibound)\n', (6676, 6684), True, 'from cgw.utils import general_utils as genu\n'), ((6783, 6828), 'numpy.ones', 'np.ones', (['(nlay, nrow, ncol)'], {'dtype': 'np.float32'}), '((nlay, nrow, ncol), dtype=np.float32)\n', (6790, 6828), True, 'import numpy as np\n'), ((7097, 7118), 'cgw.utils.general_utils.quick_plot', 'genu.quick_plot', (['strt'], {}), '(strt)\n', (7112, 7118), True, 'from cgw.utils import general_utils as genu\n'), ((7155, 7209), 'flopy.modflow.ModflowBas', 'flopy.modflow.ModflowBas', (['mf'], {'ibound': 'ibound', 'strt': 'strt'}), '(mf, ibound=ibound, strt=strt)\n', (7179, 7209), False, 'import flopy\n'), ((7330, 7367), 'numpy.ones', 'np.ones', (['(nlay, nrow, ncol)', 'np.float'], {}), '((nlay, nrow, ncol), np.float)\n', (7337, 7367), True, 'import numpy as np\n'), ((7436, 7473), 'numpy.ones', 'np.ones', (['(nlay, nrow, ncol)', 'np.float'], {}), '((nlay, nrow, ncol), np.float)\n', (7443, 7473), True, 'import numpy as np\n'), ((7562, 7619), 'flopy.modflow.ModflowLpf', 'flopy.modflow.ModflowLpf', (['mf'], {'hk': 'hk1', 'vka': 'vka1', 'ipakcb': '(53)'}), '(mf, hk=hk1, vka=vka1, ipakcb=53)\n', (7586, 7619), False, 'import flopy\n'), ((9327, 9394), 'flopy.modflow.ModflowGhb', 'flopy.modflow.ModflowGhb', (['mf'], {'stress_period_data': 'stress_period_data'}), '(mf, stress_period_data=stress_period_data)\n', (9351, 9394), False, 'import flopy\n'), ((11186, 11240), 'flopy.modflow.ModflowDrn', 'flopy.modflow.ModflowDrn', (['mf'], {'stress_period_data': 'lrcec'}), '(mf, stress_period_data=lrcec)\n', (11210, 11240), False, 'import flopy\n'), ((11450, 11501), 'flopy.modflow.ModflowRch', 'flopy.modflow.ModflowRch', (['mf'], {'nrchop': '(3)', 'rech': '(0.0014)'}), '(mf, nrchop=3, rech=0.0014)\n', (11474, 11501), False, 'import flopy\n'), ((11678, 11743), 'flopy.modflow.ModflowOc', 'flopy.modflow.ModflowOc', (['mf'], {'stress_period_data': 'spd', 'compact': '(True)'}), '(mf, stress_period_data=spd, compact=True)\n', (11701, 11743), False, 'import flopy\n'), ((11797, 11825), 'flopy.modflow.ModflowPcg', 'flopy.modflow.ModflowPcg', (['mf'], {}), '(mf)\n', (11821, 11825), False, 'import flopy\n'), ((12479, 12515), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(1)', '(1)'], {'aspect': '"""equal"""'}), "(1, 1, 1, aspect='equal')\n", (12490, 12515), True, 'import matplotlib.pyplot as plt\n'), ((12819, 12860), 'matplotlib.pyplot.contour', 'plt.contour', (['head[0, :, :]'], {'extent': 'extent'}), '(head[0, :, :], extent=extent)\n', (12830, 12860), True, 'import matplotlib.pyplot as plt\n'), ((12862, 12878), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Lx"""'], {}), "('Lx')\n", (12872, 12878), True, 'import matplotlib.pyplot as plt\n'), ((12880, 12896), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Ly"""'], {}), "('Ly')\n", (12890, 12896), True, 'import matplotlib.pyplot as plt\n'), ((12898, 12920), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['headplot'], {}), '(headplot)\n', (12910, 12920), True, 'import matplotlib.pyplot as plt\n'), ((12982, 13012), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""CheqModel1a.png"""'], {}), "('CheqModel1a.png')\n", (12993, 13012), True, 'import matplotlib.pyplot as plt\n'), ((13014, 13035), 'cgw.utils.general_utils.quick_plot', 'genu.quick_plot', (['head'], {}), '(head)\n', (13029, 13035), True, 'from cgw.utils import general_utils as genu\n'), ((13071, 13097), 'cgw.utils.general_utils.quick_plot', 'genu.quick_plot', (['dem_trans'], {}), '(dem_trans)\n', (13086, 13097), True, 'from cgw.utils import general_utils as genu\n'), ((13391, 13419), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 10)'}), '(figsize=(10, 10))\n', (13401, 13419), True, 'import matplotlib.pyplot as plt\n'), ((13475, 13506), 'flopy.utils.binaryfile.HeadFile', 'bf.HeadFile', (["(modelname + '.hds')"], {}), "(modelname + '.hds')\n", (13486, 13506), True, 'import flopy.utils.binaryfile as bf\n'), ((13578, 13600), 'numpy.linspace', 'np.linspace', (['(0)', '(10)', '(11)'], {}), '(0, 10, 11)\n', (13589, 13600), True, 'import numpy as np\n'), ((13610, 13647), 'flopy.utils.binaryfile.CellBudgetFile', 'bf.CellBudgetFile', (["(modelname + '.cbc')"], {}), "(modelname + '.cbc')\n", (13627, 13647), True, 'import flopy.utils.binaryfile as bf\n'), ((14623, 14659), 'numpy.empty', 'np.empty', (['frf.shape'], {'dtype': 'frf.dtype'}), '(frf.shape, dtype=frf.dtype)\n', (14631, 14659), True, 'import numpy as np\n'), ((14776, 14812), 'numpy.empty', 'np.empty', (['fff.shape'], {'dtype': 'fff.dtype'}), '(fff.shape, dtype=fff.dtype)\n', (14784, 14812), True, 'import numpy as np\n'), ((14945, 14973), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 10)'}), '(figsize=(10, 10))\n', (14955, 14973), True, 'import matplotlib.pyplot as plt\n'), ((15166, 15183), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (15177, 15183), True, 'import numpy as np\n'), ((15423, 15453), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""CheqModel1b.png"""'], {}), "('CheqModel1b.png')\n", (15434, 15453), True, 'import matplotlib.pyplot as plt\n'), ((15455, 15465), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (15463, 15465), True, 'import matplotlib.pyplot as plt\n'), ((17164, 17192), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 10)'}), '(figsize=(10, 10))\n', (17174, 17192), True, 'import matplotlib.pyplot as plt\n'), ((3138, 3152), 'numpy.amax', 'np.amax', (['dem_X'], {}), '(dem_X)\n', (3145, 3152), True, 'import numpy as np\n'), ((3153, 3167), 'numpy.amin', 'np.amin', (['dem_X'], {}), '(dem_X)\n', (3160, 3167), True, 'import numpy as np\n'), ((3174, 3188), 'numpy.amax', 'np.amax', (['dem_Y'], {}), '(dem_Y)\n', (3181, 3188), True, 'import numpy as np\n'), ((3189, 3203), 'numpy.amin', 'np.amin', (['dem_Y'], {}), '(dem_Y)\n', (3196, 3203), True, 'import numpy as np\n'), ((12532, 12574), 'os.path.join', 'os.path.join', (['work_dir', "(modelname + '.hds')"], {}), "(work_dir, modelname + '.hds')\n", (12544, 12574), False, 'import sys, os\n'), ((6608, 6627), 'numpy.isnan', 'np.isnan', (['dem_trans'], {}), '(dem_trans)\n', (6616, 6627), True, 'import numpy as np\n'), ((6877, 6896), 'numpy.isnan', 'np.isnan', (['dem_trans'], {}), '(dem_trans)\n', (6885, 6896), True, 'import numpy as np\n'), ((10905, 10924), 'numpy.isnan', 'np.isnan', (['dem_trans'], {}), '(dem_trans)\n', (10913, 10924), True, 'import numpy as np\n'), ((11026, 11049), 'numpy.zeros_like', 'np.zeros_like', (['landrows'], {}), '(landrows)\n', (11039, 11049), True, 'import numpy as np\n'), ((6923, 6942), 'numpy.isnan', 'np.isnan', (['dem_trans'], {}), '(dem_trans)\n', (6931, 6942), True, 'import numpy as np\n'), ((11102, 11124), 'numpy.ones_like', 'np.ones_like', (['landrows'], {}), '(landrows)\n', (11114, 11124), True, 'import numpy as np\n')]
|
# Copyright 2017-2020 Fitbit, Inc
# SPDX-License-Identifier: Apache-2.0
"""
Invoke configuration for Golden Gate
"""
# First check that we are running in a Python >= 3.5 environment
from __future__ import print_function
import sys
if not sys.version_info.major == 3 and sys.version_info.minor >= 5:
print(
"""You are using 'invoke' in a Python 2.x environment, but Python >= 3.5 is required.
You have probably not activated the 'gg' conda environment, please check the 'Getting Started'
guide for more details on how to setup your environment""")
sys.exit(1)
# Imports
import os
import subprocess
from invoke import Collection, Config, task
from . import android
from . import apple
from . import pylon
from . import native
from . import clean
from . import wasm
from . import doc
from . import docker
# Assuming you haven't moved the default location of '.git', the .git/ folder (even for submodules)
# will be at the root of the repo. Thus, find the folder .git/ is within and assume that's the root
GIT_DIR = subprocess.check_output("git rev-parse --show-toplevel",
shell=True).strip().decode("utf-8")
ROOT_DIR = GIT_DIR
# Initialize constants that are common among all platforms/products
def initialize_constants(cfg):
cfg.C = {}
# We can't access the paths variable by using dot notation, since there is a paths() function
# on a Config object. We much use Dictionary syntax.
# http://docs.pyinvoke.org/en/0.15.0/api/config.html#module-invoke.config
cfg.C.ROOT_DIR = ROOT_DIR
cfg.C.BIN_DIR = os.path.join(cfg.C.ROOT_DIR, "bin")
cfg.C.BUILD_ROOT_DIR = os.path.join(cfg.C.ROOT_DIR, "xp/build")
cfg.C.BUILD_DIR = os.path.join(cfg.C.ROOT_DIR, "xp/build/cmake")
cfg.C.BUILD_DIR_NATIVE = os.path.join(cfg.C.BUILD_DIR, "native")
cfg.C.PLATFORM_DIR = os.path.join(cfg.C.ROOT_DIR, "platform")
cfg.C.APPS_DIR = os.path.join(cfg.C.BUILD_DIR_NATIVE, "apps")
cfg.C.APPLE_BUILD_TEMP_DIR = os.path.join(cfg.C.PLATFORM_DIR, "apple/output")
cfg.C.DOC_DIR = os.path.join(cfg.C.ROOT_DIR, "docs")
config = Config(project_location=ROOT_DIR)
initialize_constants(config)
# Add collections
ns = Collection()
ns.add_collection(android)
ns.add_collection(apple)
ns.add_collection(pylon)
ns.add_collection(native)
ns.add_collection(clean)
ns.add_collection(wasm)
ns.add_collection(doc)
ns.add_collection(docker)
# After collections are set up, set the config.
ns.configure(config)
ns.configure(android.config)
ns.configure(apple.config)
ns.configure(pylon.config)
|
[
"subprocess.check_output",
"invoke.Collection",
"invoke.Config",
"os.path.join",
"sys.exit"
] |
[((2173, 2206), 'invoke.Config', 'Config', ([], {'project_location': 'ROOT_DIR'}), '(project_location=ROOT_DIR)\n', (2179, 2206), False, 'from invoke import Collection, Config, task\n'), ((2260, 2272), 'invoke.Collection', 'Collection', ([], {}), '()\n', (2270, 2272), False, 'from invoke import Collection, Config, task\n'), ((557, 568), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (565, 568), False, 'import sys\n'), ((1596, 1631), 'os.path.join', 'os.path.join', (['cfg.C.ROOT_DIR', '"""bin"""'], {}), "(cfg.C.ROOT_DIR, 'bin')\n", (1608, 1631), False, 'import os\n'), ((1665, 1705), 'os.path.join', 'os.path.join', (['cfg.C.ROOT_DIR', '"""xp/build"""'], {}), "(cfg.C.ROOT_DIR, 'xp/build')\n", (1677, 1705), False, 'import os\n'), ((1739, 1785), 'os.path.join', 'os.path.join', (['cfg.C.ROOT_DIR', '"""xp/build/cmake"""'], {}), "(cfg.C.ROOT_DIR, 'xp/build/cmake')\n", (1751, 1785), False, 'import os\n'), ((1819, 1858), 'os.path.join', 'os.path.join', (['cfg.C.BUILD_DIR', '"""native"""'], {}), "(cfg.C.BUILD_DIR, 'native')\n", (1831, 1858), False, 'import os\n'), ((1892, 1932), 'os.path.join', 'os.path.join', (['cfg.C.ROOT_DIR', '"""platform"""'], {}), "(cfg.C.ROOT_DIR, 'platform')\n", (1904, 1932), False, 'import os\n'), ((1966, 2010), 'os.path.join', 'os.path.join', (['cfg.C.BUILD_DIR_NATIVE', '"""apps"""'], {}), "(cfg.C.BUILD_DIR_NATIVE, 'apps')\n", (1978, 2010), False, 'import os\n'), ((2044, 2092), 'os.path.join', 'os.path.join', (['cfg.C.PLATFORM_DIR', '"""apple/output"""'], {}), "(cfg.C.PLATFORM_DIR, 'apple/output')\n", (2056, 2092), False, 'import os\n'), ((2126, 2162), 'os.path.join', 'os.path.join', (['cfg.C.ROOT_DIR', '"""docs"""'], {}), "(cfg.C.ROOT_DIR, 'docs')\n", (2138, 2162), False, 'import os\n'), ((1026, 1094), 'subprocess.check_output', 'subprocess.check_output', (['"""git rev-parse --show-toplevel"""'], {'shell': '(True)'}), "('git rev-parse --show-toplevel', shell=True)\n", (1049, 1094), False, 'import subprocess\n')]
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_arp(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def hide_arp_holder_system_max_arp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
system_max = ET.SubElement(hide_arp_holder, "system-max")
arp = ET.SubElement(system_max, "arp")
arp.text = kwargs.pop('arp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_arp_ip_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address.text = kwargs.pop('arp_ip_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_mac_address_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
mac_address_value = ET.SubElement(arp_entry, "mac-address-value")
mac_address_value.text = kwargs.pop('mac_address_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacename(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacename = ET.SubElement(arp_entry, "interfacename")
interfacename.text = kwargs.pop('interfacename')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacetype_Port_channel_Port_channel(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacetype = ET.SubElement(arp_entry, "interfacetype")
Port_channel = ET.SubElement(interfacetype, "Port-channel")
Port_channel = ET.SubElement(Port_channel, "Port-channel")
Port_channel.text = kwargs.pop('Port_channel')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacetype_GigabitEthernet_GigabitEthernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacetype = ET.SubElement(arp_entry, "interfacetype")
GigabitEthernet = ET.SubElement(interfacetype, "GigabitEthernet")
GigabitEthernet = ET.SubElement(GigabitEthernet, "GigabitEthernet")
GigabitEthernet.text = kwargs.pop('GigabitEthernet')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacetype_TenGigabitEthernet_TenGigabitEthernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacetype = ET.SubElement(arp_entry, "interfacetype")
TenGigabitEthernet = ET.SubElement(interfacetype, "TenGigabitEthernet")
TenGigabitEthernet = ET.SubElement(TenGigabitEthernet, "TenGigabitEthernet")
TenGigabitEthernet.text = kwargs.pop('TenGigabitEthernet')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacetype_FortyGigabitEthernet_FortyGigabitEthernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacetype = ET.SubElement(arp_entry, "interfacetype")
FortyGigabitEthernet = ET.SubElement(interfacetype, "FortyGigabitEthernet")
FortyGigabitEthernet = ET.SubElement(FortyGigabitEthernet, "FortyGigabitEthernet")
FortyGigabitEthernet.text = kwargs.pop('FortyGigabitEthernet')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacetype_HundredGigabitEthernet_HundredGigabitEthernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacetype = ET.SubElement(arp_entry, "interfacetype")
HundredGigabitEthernet = ET.SubElement(interfacetype, "HundredGigabitEthernet")
HundredGigabitEthernet = ET.SubElement(HundredGigabitEthernet, "HundredGigabitEthernet")
HundredGigabitEthernet.text = kwargs.pop('HundredGigabitEthernet')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacetype_Ve_Ve(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacetype = ET.SubElement(arp_entry, "interfacetype")
Ve = ET.SubElement(interfacetype, "Ve")
Ve = ET.SubElement(Ve, "Ve")
Ve.text = kwargs.pop('Ve')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_input_input_type_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
input = ET.SubElement(get_arp, "input")
input_type = ET.SubElement(input, "input-type")
interface = ET.SubElement(input_type, "interface")
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_input_input_type_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
input = ET.SubElement(get_arp, "input")
input_type = ET.SubElement(input, "input-type")
interface = ET.SubElement(input_type, "interface")
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_input_input_type_dynamic_dynamic(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
input = ET.SubElement(get_arp, "input")
input_type = ET.SubElement(input, "input-type")
dynamic = ET.SubElement(input_type, "dynamic")
dynamic = ET.SubElement(dynamic, "dynamic")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_input_input_type_static_static(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
input = ET.SubElement(get_arp, "input")
input_type = ET.SubElement(input, "input-type")
static = ET.SubElement(input_type, "static")
static = ET.SubElement(static, "static")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_input_input_type_ip_ip_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
input = ET.SubElement(get_arp, "input")
input_type = ET.SubElement(input, "input-type")
ip = ET.SubElement(input_type, "ip")
ip_address = ET.SubElement(ip, "ip-address")
ip_address.text = kwargs.pop('ip_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_ip_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address = ET.SubElement(arp_entry, "ip-address")
ip_address.text = kwargs.pop('ip_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_mac_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address_key = ET.SubElement(arp_entry, "ip-address")
ip_address_key.text = kwargs.pop('ip_address')
mac_address = ET.SubElement(arp_entry, "mac-address")
mac_address.text = kwargs.pop('mac_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address_key = ET.SubElement(arp_entry, "ip-address")
ip_address_key.text = kwargs.pop('ip_address')
interface_type = ET.SubElement(arp_entry, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address_key = ET.SubElement(arp_entry, "ip-address")
ip_address_key.text = kwargs.pop('ip_address')
interface_name = ET.SubElement(arp_entry, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_is_resolved(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address_key = ET.SubElement(arp_entry, "ip-address")
ip_address_key.text = kwargs.pop('ip_address')
is_resolved = ET.SubElement(arp_entry, "is-resolved")
is_resolved.text = kwargs.pop('is_resolved')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_age(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address_key = ET.SubElement(arp_entry, "ip-address")
ip_address_key.text = kwargs.pop('ip_address')
age = ET.SubElement(arp_entry, "age")
age.text = kwargs.pop('age')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_entry_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address_key = ET.SubElement(arp_entry, "ip-address")
ip_address_key.text = kwargs.pop('ip_address')
entry_type = ET.SubElement(arp_entry, "entry-type")
entry_type.text = kwargs.pop('entry_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_system_max_arp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
system_max = ET.SubElement(hide_arp_holder, "system-max")
arp = ET.SubElement(system_max, "arp")
arp.text = kwargs.pop('arp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_arp_ip_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address.text = kwargs.pop('arp_ip_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_mac_address_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
mac_address_value = ET.SubElement(arp_entry, "mac-address-value")
mac_address_value.text = kwargs.pop('mac_address_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacename(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacename = ET.SubElement(arp_entry, "interfacename")
interfacename.text = kwargs.pop('interfacename')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacetype_Port_channel_Port_channel(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacetype = ET.SubElement(arp_entry, "interfacetype")
Port_channel = ET.SubElement(interfacetype, "Port-channel")
Port_channel = ET.SubElement(Port_channel, "Port-channel")
Port_channel.text = kwargs.pop('Port_channel')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacetype_GigabitEthernet_GigabitEthernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacetype = ET.SubElement(arp_entry, "interfacetype")
GigabitEthernet = ET.SubElement(interfacetype, "GigabitEthernet")
GigabitEthernet = ET.SubElement(GigabitEthernet, "GigabitEthernet")
GigabitEthernet.text = kwargs.pop('GigabitEthernet')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacetype_TenGigabitEthernet_TenGigabitEthernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacetype = ET.SubElement(arp_entry, "interfacetype")
TenGigabitEthernet = ET.SubElement(interfacetype, "TenGigabitEthernet")
TenGigabitEthernet = ET.SubElement(TenGigabitEthernet, "TenGigabitEthernet")
TenGigabitEthernet.text = kwargs.pop('TenGigabitEthernet')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacetype_FortyGigabitEthernet_FortyGigabitEthernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacetype = ET.SubElement(arp_entry, "interfacetype")
FortyGigabitEthernet = ET.SubElement(interfacetype, "FortyGigabitEthernet")
FortyGigabitEthernet = ET.SubElement(FortyGigabitEthernet, "FortyGigabitEthernet")
FortyGigabitEthernet.text = kwargs.pop('FortyGigabitEthernet')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacetype_HundredGigabitEthernet_HundredGigabitEthernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacetype = ET.SubElement(arp_entry, "interfacetype")
HundredGigabitEthernet = ET.SubElement(interfacetype, "HundredGigabitEthernet")
HundredGigabitEthernet = ET.SubElement(HundredGigabitEthernet, "HundredGigabitEthernet")
HundredGigabitEthernet.text = kwargs.pop('HundredGigabitEthernet')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def hide_arp_holder_arp_entry_interfacetype_Ve_Ve(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.SubElement(hide_arp_holder, "arp-entry")
arp_ip_address_key = ET.SubElement(arp_entry, "arp-ip-address")
arp_ip_address_key.text = kwargs.pop('arp_ip_address')
interfacetype = ET.SubElement(arp_entry, "interfacetype")
Ve = ET.SubElement(interfacetype, "Ve")
Ve = ET.SubElement(Ve, "Ve")
Ve.text = kwargs.pop('Ve')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_input_input_type_interface_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
input = ET.SubElement(get_arp, "input")
input_type = ET.SubElement(input, "input-type")
interface = ET.SubElement(input_type, "interface")
interface_type = ET.SubElement(interface, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_input_input_type_interface_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
input = ET.SubElement(get_arp, "input")
input_type = ET.SubElement(input, "input-type")
interface = ET.SubElement(input_type, "interface")
interface_name = ET.SubElement(interface, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_input_input_type_dynamic_dynamic(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
input = ET.SubElement(get_arp, "input")
input_type = ET.SubElement(input, "input-type")
dynamic = ET.SubElement(input_type, "dynamic")
dynamic = ET.SubElement(dynamic, "dynamic")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_input_input_type_static_static(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
input = ET.SubElement(get_arp, "input")
input_type = ET.SubElement(input, "input-type")
static = ET.SubElement(input_type, "static")
static = ET.SubElement(static, "static")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_input_input_type_ip_ip_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
input = ET.SubElement(get_arp, "input")
input_type = ET.SubElement(input, "input-type")
ip = ET.SubElement(input_type, "ip")
ip_address = ET.SubElement(ip, "ip-address")
ip_address.text = kwargs.pop('ip_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_ip_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address = ET.SubElement(arp_entry, "ip-address")
ip_address.text = kwargs.pop('ip_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_mac_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address_key = ET.SubElement(arp_entry, "ip-address")
ip_address_key.text = kwargs.pop('ip_address')
mac_address = ET.SubElement(arp_entry, "mac-address")
mac_address.text = kwargs.pop('mac_address')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_interface_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address_key = ET.SubElement(arp_entry, "ip-address")
ip_address_key.text = kwargs.pop('ip_address')
interface_type = ET.SubElement(arp_entry, "interface-type")
interface_type.text = kwargs.pop('interface_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address_key = ET.SubElement(arp_entry, "ip-address")
ip_address_key.text = kwargs.pop('ip_address')
interface_name = ET.SubElement(arp_entry, "interface-name")
interface_name.text = kwargs.pop('interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_is_resolved(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address_key = ET.SubElement(arp_entry, "ip-address")
ip_address_key.text = kwargs.pop('ip_address')
is_resolved = ET.SubElement(arp_entry, "is-resolved")
is_resolved.text = kwargs.pop('is_resolved')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_age(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address_key = ET.SubElement(arp_entry, "ip-address")
ip_address_key.text = kwargs.pop('ip_address')
age = ET.SubElement(arp_entry, "age")
age.text = kwargs.pop('age')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def get_arp_output_arp_entry_entry_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_arp = ET.Element("get_arp")
config = get_arp
output = ET.SubElement(get_arp, "output")
arp_entry = ET.SubElement(output, "arp-entry")
ip_address_key = ET.SubElement(arp_entry, "ip-address")
ip_address_key.text = kwargs.pop('ip_address')
entry_type = ET.SubElement(arp_entry, "entry-type")
entry_type.text = kwargs.pop('entry_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
[
"xml.etree.ElementTree.Element",
"xml.etree.ElementTree.SubElement"
] |
[((335, 355), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (345, 355), True, 'import xml.etree.ElementTree as ET\n'), ((382, 469), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (395, 469), True, 'import xml.etree.ElementTree as ET\n'), ((486, 530), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""system-max"""'], {}), "(hide_arp_holder, 'system-max')\n", (499, 530), True, 'import xml.etree.ElementTree as ET\n'), ((545, 577), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['system_max', '"""arp"""'], {}), "(system_max, 'arp')\n", (558, 577), True, 'import xml.etree.ElementTree as ET\n'), ((841, 861), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (851, 861), True, 'import xml.etree.ElementTree as ET\n'), ((888, 975), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (901, 975), True, 'import xml.etree.ElementTree as ET\n'), ((991, 1034), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (1004, 1034), True, 'import xml.etree.ElementTree as ET\n'), ((1060, 1102), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (1073, 1102), True, 'import xml.etree.ElementTree as ET\n'), ((1391, 1411), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (1401, 1411), True, 'import xml.etree.ElementTree as ET\n'), ((1438, 1525), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (1451, 1525), True, 'import xml.etree.ElementTree as ET\n'), ((1541, 1584), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (1554, 1584), True, 'import xml.etree.ElementTree as ET\n'), ((1614, 1656), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (1627, 1656), True, 'import xml.etree.ElementTree as ET\n'), ((1748, 1793), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""mac-address-value"""'], {}), "(arp_entry, 'mac-address-value')\n", (1761, 1793), True, 'import xml.etree.ElementTree as ET\n'), ((2084, 2104), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (2094, 2104), True, 'import xml.etree.ElementTree as ET\n'), ((2131, 2218), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (2144, 2218), True, 'import xml.etree.ElementTree as ET\n'), ((2234, 2277), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (2247, 2277), True, 'import xml.etree.ElementTree as ET\n'), ((2307, 2349), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (2320, 2349), True, 'import xml.etree.ElementTree as ET\n'), ((2437, 2478), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacename"""'], {}), "(arp_entry, 'interfacename')\n", (2450, 2478), True, 'import xml.etree.ElementTree as ET\n'), ((2787, 2807), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (2797, 2807), True, 'import xml.etree.ElementTree as ET\n'), ((2834, 2921), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (2847, 2921), True, 'import xml.etree.ElementTree as ET\n'), ((2937, 2980), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (2950, 2980), True, 'import xml.etree.ElementTree as ET\n'), ((3010, 3052), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (3023, 3052), True, 'import xml.etree.ElementTree as ET\n'), ((3140, 3181), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacetype"""'], {}), "(arp_entry, 'interfacetype')\n", (3153, 3181), True, 'import xml.etree.ElementTree as ET\n'), ((3205, 3249), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interfacetype', '"""Port-channel"""'], {}), "(interfacetype, 'Port-channel')\n", (3218, 3249), True, 'import xml.etree.ElementTree as ET\n'), ((3273, 3316), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['Port_channel', '"""Port-channel"""'], {}), "(Port_channel, 'Port-channel')\n", (3286, 3316), True, 'import xml.etree.ElementTree as ET\n'), ((3629, 3649), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (3639, 3649), True, 'import xml.etree.ElementTree as ET\n'), ((3676, 3763), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (3689, 3763), True, 'import xml.etree.ElementTree as ET\n'), ((3779, 3822), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (3792, 3822), True, 'import xml.etree.ElementTree as ET\n'), ((3852, 3894), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (3865, 3894), True, 'import xml.etree.ElementTree as ET\n'), ((3982, 4023), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacetype"""'], {}), "(arp_entry, 'interfacetype')\n", (3995, 4023), True, 'import xml.etree.ElementTree as ET\n'), ((4050, 4097), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interfacetype', '"""GigabitEthernet"""'], {}), "(interfacetype, 'GigabitEthernet')\n", (4063, 4097), True, 'import xml.etree.ElementTree as ET\n'), ((4124, 4173), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['GigabitEthernet', '"""GigabitEthernet"""'], {}), "(GigabitEthernet, 'GigabitEthernet')\n", (4137, 4173), True, 'import xml.etree.ElementTree as ET\n'), ((4498, 4518), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (4508, 4518), True, 'import xml.etree.ElementTree as ET\n'), ((4545, 4632), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (4558, 4632), True, 'import xml.etree.ElementTree as ET\n'), ((4648, 4691), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (4661, 4691), True, 'import xml.etree.ElementTree as ET\n'), ((4721, 4763), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (4734, 4763), True, 'import xml.etree.ElementTree as ET\n'), ((4851, 4892), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacetype"""'], {}), "(arp_entry, 'interfacetype')\n", (4864, 4892), True, 'import xml.etree.ElementTree as ET\n'), ((4922, 4972), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interfacetype', '"""TenGigabitEthernet"""'], {}), "(interfacetype, 'TenGigabitEthernet')\n", (4935, 4972), True, 'import xml.etree.ElementTree as ET\n'), ((5002, 5057), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['TenGigabitEthernet', '"""TenGigabitEthernet"""'], {}), "(TenGigabitEthernet, 'TenGigabitEthernet')\n", (5015, 5057), True, 'import xml.etree.ElementTree as ET\n'), ((5392, 5412), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (5402, 5412), True, 'import xml.etree.ElementTree as ET\n'), ((5439, 5526), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (5452, 5526), True, 'import xml.etree.ElementTree as ET\n'), ((5542, 5585), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (5555, 5585), True, 'import xml.etree.ElementTree as ET\n'), ((5615, 5657), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (5628, 5657), True, 'import xml.etree.ElementTree as ET\n'), ((5745, 5786), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacetype"""'], {}), "(arp_entry, 'interfacetype')\n", (5758, 5786), True, 'import xml.etree.ElementTree as ET\n'), ((5818, 5870), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interfacetype', '"""FortyGigabitEthernet"""'], {}), "(interfacetype, 'FortyGigabitEthernet')\n", (5831, 5870), True, 'import xml.etree.ElementTree as ET\n'), ((5902, 5961), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['FortyGigabitEthernet', '"""FortyGigabitEthernet"""'], {}), "(FortyGigabitEthernet, 'FortyGigabitEthernet')\n", (5915, 5961), True, 'import xml.etree.ElementTree as ET\n'), ((6304, 6324), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (6314, 6324), True, 'import xml.etree.ElementTree as ET\n'), ((6351, 6438), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (6364, 6438), True, 'import xml.etree.ElementTree as ET\n'), ((6454, 6497), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (6467, 6497), True, 'import xml.etree.ElementTree as ET\n'), ((6527, 6569), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (6540, 6569), True, 'import xml.etree.ElementTree as ET\n'), ((6657, 6698), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacetype"""'], {}), "(arp_entry, 'interfacetype')\n", (6670, 6698), True, 'import xml.etree.ElementTree as ET\n'), ((6732, 6786), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interfacetype', '"""HundredGigabitEthernet"""'], {}), "(interfacetype, 'HundredGigabitEthernet')\n", (6745, 6786), True, 'import xml.etree.ElementTree as ET\n'), ((6820, 6883), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['HundredGigabitEthernet', '"""HundredGigabitEthernet"""'], {}), "(HundredGigabitEthernet, 'HundredGigabitEthernet')\n", (6833, 6883), True, 'import xml.etree.ElementTree as ET\n'), ((7190, 7210), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (7200, 7210), True, 'import xml.etree.ElementTree as ET\n'), ((7237, 7324), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (7250, 7324), True, 'import xml.etree.ElementTree as ET\n'), ((7340, 7383), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (7353, 7383), True, 'import xml.etree.ElementTree as ET\n'), ((7413, 7455), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (7426, 7455), True, 'import xml.etree.ElementTree as ET\n'), ((7543, 7584), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacetype"""'], {}), "(arp_entry, 'interfacetype')\n", (7556, 7584), True, 'import xml.etree.ElementTree as ET\n'), ((7598, 7632), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interfacetype', '"""Ve"""'], {}), "(interfacetype, 'Ve')\n", (7611, 7632), True, 'import xml.etree.ElementTree as ET\n'), ((7646, 7669), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['Ve', '"""Ve"""'], {}), "(Ve, 'Ve')\n", (7659, 7669), True, 'import xml.etree.ElementTree as ET\n'), ((7940, 7960), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (7950, 7960), True, 'import xml.etree.ElementTree as ET\n'), ((7979, 8000), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (7989, 8000), True, 'import xml.etree.ElementTree as ET\n'), ((8042, 8073), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""input"""'], {}), "(get_arp, 'input')\n", (8055, 8073), True, 'import xml.etree.ElementTree as ET\n'), ((8095, 8129), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input', '"""input-type"""'], {}), "(input, 'input-type')\n", (8108, 8129), True, 'import xml.etree.ElementTree as ET\n'), ((8150, 8188), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input_type', '"""interface"""'], {}), "(input_type, 'interface')\n", (8163, 8188), True, 'import xml.etree.ElementTree as ET\n'), ((8214, 8256), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interface', '"""interface-type"""'], {}), "(interface, 'interface-type')\n", (8227, 8256), True, 'import xml.etree.ElementTree as ET\n'), ((8551, 8571), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (8561, 8571), True, 'import xml.etree.ElementTree as ET\n'), ((8590, 8611), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (8600, 8611), True, 'import xml.etree.ElementTree as ET\n'), ((8653, 8684), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""input"""'], {}), "(get_arp, 'input')\n", (8666, 8684), True, 'import xml.etree.ElementTree as ET\n'), ((8706, 8740), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input', '"""input-type"""'], {}), "(input, 'input-type')\n", (8719, 8740), True, 'import xml.etree.ElementTree as ET\n'), ((8761, 8799), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input_type', '"""interface"""'], {}), "(input_type, 'interface')\n", (8774, 8799), True, 'import xml.etree.ElementTree as ET\n'), ((8825, 8867), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interface', '"""interface-name"""'], {}), "(interface, 'interface-name')\n", (8838, 8867), True, 'import xml.etree.ElementTree as ET\n'), ((9153, 9173), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (9163, 9173), True, 'import xml.etree.ElementTree as ET\n'), ((9192, 9213), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (9202, 9213), True, 'import xml.etree.ElementTree as ET\n'), ((9255, 9286), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""input"""'], {}), "(get_arp, 'input')\n", (9268, 9286), True, 'import xml.etree.ElementTree as ET\n'), ((9308, 9342), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input', '"""input-type"""'], {}), "(input, 'input-type')\n", (9321, 9342), True, 'import xml.etree.ElementTree as ET\n'), ((9361, 9397), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input_type', '"""dynamic"""'], {}), "(input_type, 'dynamic')\n", (9374, 9397), True, 'import xml.etree.ElementTree as ET\n'), ((9416, 9449), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['dynamic', '"""dynamic"""'], {}), "(dynamic, 'dynamic')\n", (9429, 9449), True, 'import xml.etree.ElementTree as ET\n'), ((9674, 9694), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (9684, 9694), True, 'import xml.etree.ElementTree as ET\n'), ((9713, 9734), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (9723, 9734), True, 'import xml.etree.ElementTree as ET\n'), ((9776, 9807), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""input"""'], {}), "(get_arp, 'input')\n", (9789, 9807), True, 'import xml.etree.ElementTree as ET\n'), ((9829, 9863), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input', '"""input-type"""'], {}), "(input, 'input-type')\n", (9842, 9863), True, 'import xml.etree.ElementTree as ET\n'), ((9881, 9916), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input_type', '"""static"""'], {}), "(input_type, 'static')\n", (9894, 9916), True, 'import xml.etree.ElementTree as ET\n'), ((9934, 9965), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['static', '"""static"""'], {}), "(static, 'static')\n", (9947, 9965), True, 'import xml.etree.ElementTree as ET\n'), ((10190, 10210), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (10200, 10210), True, 'import xml.etree.ElementTree as ET\n'), ((10229, 10250), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (10239, 10250), True, 'import xml.etree.ElementTree as ET\n'), ((10292, 10323), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""input"""'], {}), "(get_arp, 'input')\n", (10305, 10323), True, 'import xml.etree.ElementTree as ET\n'), ((10345, 10379), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input', '"""input-type"""'], {}), "(input, 'input-type')\n", (10358, 10379), True, 'import xml.etree.ElementTree as ET\n'), ((10393, 10424), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input_type', '"""ip"""'], {}), "(input_type, 'ip')\n", (10406, 10424), True, 'import xml.etree.ElementTree as ET\n'), ((10446, 10477), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['ip', '"""ip-address"""'], {}), "(ip, 'ip-address')\n", (10459, 10477), True, 'import xml.etree.ElementTree as ET\n'), ((10750, 10770), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (10760, 10770), True, 'import xml.etree.ElementTree as ET\n'), ((10789, 10810), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (10799, 10810), True, 'import xml.etree.ElementTree as ET\n'), ((10853, 10885), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (10866, 10885), True, 'import xml.etree.ElementTree as ET\n'), ((10906, 10940), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (10919, 10940), True, 'import xml.etree.ElementTree as ET\n'), ((10962, 11000), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (10975, 11000), True, 'import xml.etree.ElementTree as ET\n'), ((11274, 11294), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (11284, 11294), True, 'import xml.etree.ElementTree as ET\n'), ((11313, 11334), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (11323, 11334), True, 'import xml.etree.ElementTree as ET\n'), ((11377, 11409), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (11390, 11409), True, 'import xml.etree.ElementTree as ET\n'), ((11430, 11464), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (11443, 11464), True, 'import xml.etree.ElementTree as ET\n'), ((11490, 11528), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (11503, 11528), True, 'import xml.etree.ElementTree as ET\n'), ((11606, 11645), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""mac-address"""'], {}), "(arp_entry, 'mac-address')\n", (11619, 11645), True, 'import xml.etree.ElementTree as ET\n'), ((11924, 11944), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (11934, 11944), True, 'import xml.etree.ElementTree as ET\n'), ((11963, 11984), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (11973, 11984), True, 'import xml.etree.ElementTree as ET\n'), ((12027, 12059), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (12040, 12059), True, 'import xml.etree.ElementTree as ET\n'), ((12080, 12114), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (12093, 12114), True, 'import xml.etree.ElementTree as ET\n'), ((12140, 12178), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (12153, 12178), True, 'import xml.etree.ElementTree as ET\n'), ((12259, 12301), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interface-type"""'], {}), "(arp_entry, 'interface-type')\n", (12272, 12301), True, 'import xml.etree.ElementTree as ET\n'), ((12586, 12606), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (12596, 12606), True, 'import xml.etree.ElementTree as ET\n'), ((12625, 12646), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (12635, 12646), True, 'import xml.etree.ElementTree as ET\n'), ((12689, 12721), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (12702, 12721), True, 'import xml.etree.ElementTree as ET\n'), ((12742, 12776), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (12755, 12776), True, 'import xml.etree.ElementTree as ET\n'), ((12802, 12840), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (12815, 12840), True, 'import xml.etree.ElementTree as ET\n'), ((12921, 12963), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interface-name"""'], {}), "(arp_entry, 'interface-name')\n", (12934, 12963), True, 'import xml.etree.ElementTree as ET\n'), ((13245, 13265), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (13255, 13265), True, 'import xml.etree.ElementTree as ET\n'), ((13284, 13305), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (13294, 13305), True, 'import xml.etree.ElementTree as ET\n'), ((13348, 13380), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (13361, 13380), True, 'import xml.etree.ElementTree as ET\n'), ((13401, 13435), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (13414, 13435), True, 'import xml.etree.ElementTree as ET\n'), ((13461, 13499), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (13474, 13499), True, 'import xml.etree.ElementTree as ET\n'), ((13577, 13616), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""is-resolved"""'], {}), "(arp_entry, 'is-resolved')\n", (13590, 13616), True, 'import xml.etree.ElementTree as ET\n'), ((13884, 13904), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (13894, 13904), True, 'import xml.etree.ElementTree as ET\n'), ((13923, 13944), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (13933, 13944), True, 'import xml.etree.ElementTree as ET\n'), ((13987, 14019), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (14000, 14019), True, 'import xml.etree.ElementTree as ET\n'), ((14040, 14074), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (14053, 14074), True, 'import xml.etree.ElementTree as ET\n'), ((14100, 14138), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (14113, 14138), True, 'import xml.etree.ElementTree as ET\n'), ((14208, 14239), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""age"""'], {}), "(arp_entry, 'age')\n", (14221, 14239), True, 'import xml.etree.ElementTree as ET\n'), ((14498, 14518), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (14508, 14518), True, 'import xml.etree.ElementTree as ET\n'), ((14537, 14558), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (14547, 14558), True, 'import xml.etree.ElementTree as ET\n'), ((14601, 14633), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (14614, 14633), True, 'import xml.etree.ElementTree as ET\n'), ((14654, 14688), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (14667, 14688), True, 'import xml.etree.ElementTree as ET\n'), ((14714, 14752), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (14727, 14752), True, 'import xml.etree.ElementTree as ET\n'), ((14829, 14867), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""entry-type"""'], {}), "(arp_entry, 'entry-type')\n", (14842, 14867), True, 'import xml.etree.ElementTree as ET\n'), ((15135, 15155), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (15145, 15155), True, 'import xml.etree.ElementTree as ET\n'), ((15182, 15269), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (15195, 15269), True, 'import xml.etree.ElementTree as ET\n'), ((15286, 15330), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""system-max"""'], {}), "(hide_arp_holder, 'system-max')\n", (15299, 15330), True, 'import xml.etree.ElementTree as ET\n'), ((15345, 15377), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['system_max', '"""arp"""'], {}), "(system_max, 'arp')\n", (15358, 15377), True, 'import xml.etree.ElementTree as ET\n'), ((15641, 15661), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (15651, 15661), True, 'import xml.etree.ElementTree as ET\n'), ((15688, 15775), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (15701, 15775), True, 'import xml.etree.ElementTree as ET\n'), ((15791, 15834), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (15804, 15834), True, 'import xml.etree.ElementTree as ET\n'), ((15860, 15902), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (15873, 15902), True, 'import xml.etree.ElementTree as ET\n'), ((16191, 16211), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (16201, 16211), True, 'import xml.etree.ElementTree as ET\n'), ((16238, 16325), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (16251, 16325), True, 'import xml.etree.ElementTree as ET\n'), ((16341, 16384), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (16354, 16384), True, 'import xml.etree.ElementTree as ET\n'), ((16414, 16456), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (16427, 16456), True, 'import xml.etree.ElementTree as ET\n'), ((16548, 16593), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""mac-address-value"""'], {}), "(arp_entry, 'mac-address-value')\n", (16561, 16593), True, 'import xml.etree.ElementTree as ET\n'), ((16884, 16904), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (16894, 16904), True, 'import xml.etree.ElementTree as ET\n'), ((16931, 17018), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (16944, 17018), True, 'import xml.etree.ElementTree as ET\n'), ((17034, 17077), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (17047, 17077), True, 'import xml.etree.ElementTree as ET\n'), ((17107, 17149), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (17120, 17149), True, 'import xml.etree.ElementTree as ET\n'), ((17237, 17278), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacename"""'], {}), "(arp_entry, 'interfacename')\n", (17250, 17278), True, 'import xml.etree.ElementTree as ET\n'), ((17587, 17607), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (17597, 17607), True, 'import xml.etree.ElementTree as ET\n'), ((17634, 17721), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (17647, 17721), True, 'import xml.etree.ElementTree as ET\n'), ((17737, 17780), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (17750, 17780), True, 'import xml.etree.ElementTree as ET\n'), ((17810, 17852), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (17823, 17852), True, 'import xml.etree.ElementTree as ET\n'), ((17940, 17981), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacetype"""'], {}), "(arp_entry, 'interfacetype')\n", (17953, 17981), True, 'import xml.etree.ElementTree as ET\n'), ((18005, 18049), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interfacetype', '"""Port-channel"""'], {}), "(interfacetype, 'Port-channel')\n", (18018, 18049), True, 'import xml.etree.ElementTree as ET\n'), ((18073, 18116), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['Port_channel', '"""Port-channel"""'], {}), "(Port_channel, 'Port-channel')\n", (18086, 18116), True, 'import xml.etree.ElementTree as ET\n'), ((18429, 18449), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (18439, 18449), True, 'import xml.etree.ElementTree as ET\n'), ((18476, 18563), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (18489, 18563), True, 'import xml.etree.ElementTree as ET\n'), ((18579, 18622), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (18592, 18622), True, 'import xml.etree.ElementTree as ET\n'), ((18652, 18694), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (18665, 18694), True, 'import xml.etree.ElementTree as ET\n'), ((18782, 18823), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacetype"""'], {}), "(arp_entry, 'interfacetype')\n", (18795, 18823), True, 'import xml.etree.ElementTree as ET\n'), ((18850, 18897), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interfacetype', '"""GigabitEthernet"""'], {}), "(interfacetype, 'GigabitEthernet')\n", (18863, 18897), True, 'import xml.etree.ElementTree as ET\n'), ((18924, 18973), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['GigabitEthernet', '"""GigabitEthernet"""'], {}), "(GigabitEthernet, 'GigabitEthernet')\n", (18937, 18973), True, 'import xml.etree.ElementTree as ET\n'), ((19298, 19318), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (19308, 19318), True, 'import xml.etree.ElementTree as ET\n'), ((19345, 19432), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (19358, 19432), True, 'import xml.etree.ElementTree as ET\n'), ((19448, 19491), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (19461, 19491), True, 'import xml.etree.ElementTree as ET\n'), ((19521, 19563), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (19534, 19563), True, 'import xml.etree.ElementTree as ET\n'), ((19651, 19692), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacetype"""'], {}), "(arp_entry, 'interfacetype')\n", (19664, 19692), True, 'import xml.etree.ElementTree as ET\n'), ((19722, 19772), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interfacetype', '"""TenGigabitEthernet"""'], {}), "(interfacetype, 'TenGigabitEthernet')\n", (19735, 19772), True, 'import xml.etree.ElementTree as ET\n'), ((19802, 19857), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['TenGigabitEthernet', '"""TenGigabitEthernet"""'], {}), "(TenGigabitEthernet, 'TenGigabitEthernet')\n", (19815, 19857), True, 'import xml.etree.ElementTree as ET\n'), ((20192, 20212), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (20202, 20212), True, 'import xml.etree.ElementTree as ET\n'), ((20239, 20326), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (20252, 20326), True, 'import xml.etree.ElementTree as ET\n'), ((20342, 20385), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (20355, 20385), True, 'import xml.etree.ElementTree as ET\n'), ((20415, 20457), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (20428, 20457), True, 'import xml.etree.ElementTree as ET\n'), ((20545, 20586), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacetype"""'], {}), "(arp_entry, 'interfacetype')\n", (20558, 20586), True, 'import xml.etree.ElementTree as ET\n'), ((20618, 20670), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interfacetype', '"""FortyGigabitEthernet"""'], {}), "(interfacetype, 'FortyGigabitEthernet')\n", (20631, 20670), True, 'import xml.etree.ElementTree as ET\n'), ((20702, 20761), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['FortyGigabitEthernet', '"""FortyGigabitEthernet"""'], {}), "(FortyGigabitEthernet, 'FortyGigabitEthernet')\n", (20715, 20761), True, 'import xml.etree.ElementTree as ET\n'), ((21104, 21124), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (21114, 21124), True, 'import xml.etree.ElementTree as ET\n'), ((21151, 21238), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (21164, 21238), True, 'import xml.etree.ElementTree as ET\n'), ((21254, 21297), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (21267, 21297), True, 'import xml.etree.ElementTree as ET\n'), ((21327, 21369), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (21340, 21369), True, 'import xml.etree.ElementTree as ET\n'), ((21457, 21498), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacetype"""'], {}), "(arp_entry, 'interfacetype')\n", (21470, 21498), True, 'import xml.etree.ElementTree as ET\n'), ((21532, 21586), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interfacetype', '"""HundredGigabitEthernet"""'], {}), "(interfacetype, 'HundredGigabitEthernet')\n", (21545, 21586), True, 'import xml.etree.ElementTree as ET\n'), ((21620, 21683), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['HundredGigabitEthernet', '"""HundredGigabitEthernet"""'], {}), "(HundredGigabitEthernet, 'HundredGigabitEthernet')\n", (21633, 21683), True, 'import xml.etree.ElementTree as ET\n'), ((21990, 22010), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (22000, 22010), True, 'import xml.etree.ElementTree as ET\n'), ((22037, 22124), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['config', '"""hide-arp-holder"""'], {'xmlns': '"""urn:brocade.com:mgmt:brocade-arp"""'}), "(config, 'hide-arp-holder', xmlns=\n 'urn:brocade.com:mgmt:brocade-arp')\n", (22050, 22124), True, 'import xml.etree.ElementTree as ET\n'), ((22140, 22183), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['hide_arp_holder', '"""arp-entry"""'], {}), "(hide_arp_holder, 'arp-entry')\n", (22153, 22183), True, 'import xml.etree.ElementTree as ET\n'), ((22213, 22255), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""arp-ip-address"""'], {}), "(arp_entry, 'arp-ip-address')\n", (22226, 22255), True, 'import xml.etree.ElementTree as ET\n'), ((22343, 22384), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interfacetype"""'], {}), "(arp_entry, 'interfacetype')\n", (22356, 22384), True, 'import xml.etree.ElementTree as ET\n'), ((22398, 22432), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interfacetype', '"""Ve"""'], {}), "(interfacetype, 'Ve')\n", (22411, 22432), True, 'import xml.etree.ElementTree as ET\n'), ((22446, 22469), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['Ve', '"""Ve"""'], {}), "(Ve, 'Ve')\n", (22459, 22469), True, 'import xml.etree.ElementTree as ET\n'), ((22740, 22760), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (22750, 22760), True, 'import xml.etree.ElementTree as ET\n'), ((22779, 22800), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (22789, 22800), True, 'import xml.etree.ElementTree as ET\n'), ((22842, 22873), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""input"""'], {}), "(get_arp, 'input')\n", (22855, 22873), True, 'import xml.etree.ElementTree as ET\n'), ((22895, 22929), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input', '"""input-type"""'], {}), "(input, 'input-type')\n", (22908, 22929), True, 'import xml.etree.ElementTree as ET\n'), ((22950, 22988), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input_type', '"""interface"""'], {}), "(input_type, 'interface')\n", (22963, 22988), True, 'import xml.etree.ElementTree as ET\n'), ((23014, 23056), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interface', '"""interface-type"""'], {}), "(interface, 'interface-type')\n", (23027, 23056), True, 'import xml.etree.ElementTree as ET\n'), ((23351, 23371), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (23361, 23371), True, 'import xml.etree.ElementTree as ET\n'), ((23390, 23411), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (23400, 23411), True, 'import xml.etree.ElementTree as ET\n'), ((23453, 23484), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""input"""'], {}), "(get_arp, 'input')\n", (23466, 23484), True, 'import xml.etree.ElementTree as ET\n'), ((23506, 23540), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input', '"""input-type"""'], {}), "(input, 'input-type')\n", (23519, 23540), True, 'import xml.etree.ElementTree as ET\n'), ((23561, 23599), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input_type', '"""interface"""'], {}), "(input_type, 'interface')\n", (23574, 23599), True, 'import xml.etree.ElementTree as ET\n'), ((23625, 23667), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['interface', '"""interface-name"""'], {}), "(interface, 'interface-name')\n", (23638, 23667), True, 'import xml.etree.ElementTree as ET\n'), ((23953, 23973), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (23963, 23973), True, 'import xml.etree.ElementTree as ET\n'), ((23992, 24013), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (24002, 24013), True, 'import xml.etree.ElementTree as ET\n'), ((24055, 24086), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""input"""'], {}), "(get_arp, 'input')\n", (24068, 24086), True, 'import xml.etree.ElementTree as ET\n'), ((24108, 24142), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input', '"""input-type"""'], {}), "(input, 'input-type')\n", (24121, 24142), True, 'import xml.etree.ElementTree as ET\n'), ((24161, 24197), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input_type', '"""dynamic"""'], {}), "(input_type, 'dynamic')\n", (24174, 24197), True, 'import xml.etree.ElementTree as ET\n'), ((24216, 24249), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['dynamic', '"""dynamic"""'], {}), "(dynamic, 'dynamic')\n", (24229, 24249), True, 'import xml.etree.ElementTree as ET\n'), ((24474, 24494), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (24484, 24494), True, 'import xml.etree.ElementTree as ET\n'), ((24513, 24534), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (24523, 24534), True, 'import xml.etree.ElementTree as ET\n'), ((24576, 24607), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""input"""'], {}), "(get_arp, 'input')\n", (24589, 24607), True, 'import xml.etree.ElementTree as ET\n'), ((24629, 24663), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input', '"""input-type"""'], {}), "(input, 'input-type')\n", (24642, 24663), True, 'import xml.etree.ElementTree as ET\n'), ((24681, 24716), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input_type', '"""static"""'], {}), "(input_type, 'static')\n", (24694, 24716), True, 'import xml.etree.ElementTree as ET\n'), ((24734, 24765), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['static', '"""static"""'], {}), "(static, 'static')\n", (24747, 24765), True, 'import xml.etree.ElementTree as ET\n'), ((24990, 25010), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (25000, 25010), True, 'import xml.etree.ElementTree as ET\n'), ((25029, 25050), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (25039, 25050), True, 'import xml.etree.ElementTree as ET\n'), ((25092, 25123), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""input"""'], {}), "(get_arp, 'input')\n", (25105, 25123), True, 'import xml.etree.ElementTree as ET\n'), ((25145, 25179), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input', '"""input-type"""'], {}), "(input, 'input-type')\n", (25158, 25179), True, 'import xml.etree.ElementTree as ET\n'), ((25193, 25224), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['input_type', '"""ip"""'], {}), "(input_type, 'ip')\n", (25206, 25224), True, 'import xml.etree.ElementTree as ET\n'), ((25246, 25277), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['ip', '"""ip-address"""'], {}), "(ip, 'ip-address')\n", (25259, 25277), True, 'import xml.etree.ElementTree as ET\n'), ((25550, 25570), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (25560, 25570), True, 'import xml.etree.ElementTree as ET\n'), ((25589, 25610), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (25599, 25610), True, 'import xml.etree.ElementTree as ET\n'), ((25653, 25685), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (25666, 25685), True, 'import xml.etree.ElementTree as ET\n'), ((25706, 25740), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (25719, 25740), True, 'import xml.etree.ElementTree as ET\n'), ((25762, 25800), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (25775, 25800), True, 'import xml.etree.ElementTree as ET\n'), ((26074, 26094), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (26084, 26094), True, 'import xml.etree.ElementTree as ET\n'), ((26113, 26134), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (26123, 26134), True, 'import xml.etree.ElementTree as ET\n'), ((26177, 26209), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (26190, 26209), True, 'import xml.etree.ElementTree as ET\n'), ((26230, 26264), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (26243, 26264), True, 'import xml.etree.ElementTree as ET\n'), ((26290, 26328), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (26303, 26328), True, 'import xml.etree.ElementTree as ET\n'), ((26406, 26445), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""mac-address"""'], {}), "(arp_entry, 'mac-address')\n", (26419, 26445), True, 'import xml.etree.ElementTree as ET\n'), ((26724, 26744), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (26734, 26744), True, 'import xml.etree.ElementTree as ET\n'), ((26763, 26784), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (26773, 26784), True, 'import xml.etree.ElementTree as ET\n'), ((26827, 26859), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (26840, 26859), True, 'import xml.etree.ElementTree as ET\n'), ((26880, 26914), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (26893, 26914), True, 'import xml.etree.ElementTree as ET\n'), ((26940, 26978), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (26953, 26978), True, 'import xml.etree.ElementTree as ET\n'), ((27059, 27101), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interface-type"""'], {}), "(arp_entry, 'interface-type')\n", (27072, 27101), True, 'import xml.etree.ElementTree as ET\n'), ((27386, 27406), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (27396, 27406), True, 'import xml.etree.ElementTree as ET\n'), ((27425, 27446), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (27435, 27446), True, 'import xml.etree.ElementTree as ET\n'), ((27489, 27521), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (27502, 27521), True, 'import xml.etree.ElementTree as ET\n'), ((27542, 27576), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (27555, 27576), True, 'import xml.etree.ElementTree as ET\n'), ((27602, 27640), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (27615, 27640), True, 'import xml.etree.ElementTree as ET\n'), ((27721, 27763), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""interface-name"""'], {}), "(arp_entry, 'interface-name')\n", (27734, 27763), True, 'import xml.etree.ElementTree as ET\n'), ((28045, 28065), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (28055, 28065), True, 'import xml.etree.ElementTree as ET\n'), ((28084, 28105), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (28094, 28105), True, 'import xml.etree.ElementTree as ET\n'), ((28148, 28180), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (28161, 28180), True, 'import xml.etree.ElementTree as ET\n'), ((28201, 28235), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (28214, 28235), True, 'import xml.etree.ElementTree as ET\n'), ((28261, 28299), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (28274, 28299), True, 'import xml.etree.ElementTree as ET\n'), ((28377, 28416), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""is-resolved"""'], {}), "(arp_entry, 'is-resolved')\n", (28390, 28416), True, 'import xml.etree.ElementTree as ET\n'), ((28684, 28704), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (28694, 28704), True, 'import xml.etree.ElementTree as ET\n'), ((28723, 28744), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (28733, 28744), True, 'import xml.etree.ElementTree as ET\n'), ((28787, 28819), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (28800, 28819), True, 'import xml.etree.ElementTree as ET\n'), ((28840, 28874), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (28853, 28874), True, 'import xml.etree.ElementTree as ET\n'), ((28900, 28938), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (28913, 28938), True, 'import xml.etree.ElementTree as ET\n'), ((29008, 29039), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""age"""'], {}), "(arp_entry, 'age')\n", (29021, 29039), True, 'import xml.etree.ElementTree as ET\n'), ((29298, 29318), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""config"""'], {}), "('config')\n", (29308, 29318), True, 'import xml.etree.ElementTree as ET\n'), ((29337, 29358), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""get_arp"""'], {}), "('get_arp')\n", (29347, 29358), True, 'import xml.etree.ElementTree as ET\n'), ((29401, 29433), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['get_arp', '"""output"""'], {}), "(get_arp, 'output')\n", (29414, 29433), True, 'import xml.etree.ElementTree as ET\n'), ((29454, 29488), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['output', '"""arp-entry"""'], {}), "(output, 'arp-entry')\n", (29467, 29488), True, 'import xml.etree.ElementTree as ET\n'), ((29514, 29552), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""ip-address"""'], {}), "(arp_entry, 'ip-address')\n", (29527, 29552), True, 'import xml.etree.ElementTree as ET\n'), ((29629, 29667), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['arp_entry', '"""entry-type"""'], {}), "(arp_entry, 'entry-type')\n", (29642, 29667), True, 'import xml.etree.ElementTree as ET\n')]
|
import discord
from discord.ext import commands
import aiohttp
import sys
import time
import googletrans
import functools
class utility:
def __init__(self, bot):
self.bot = bot
@commands.command()
async def avatar(self, ctx, *, member: discord.Member = None):
if member is None:
embed=discord.Embed(title="No mention!", description="Please mention a user to view his profile!", color=0xff0000)
await ctx.send(embed=embed)
else:
embed = discord.Embed(title=f"{member}'s profile picture", color=0xeee657)
embed.set_image(url=member.avatar_url)
await ctx.send(embed=embed)
@commands.command()
async def code(self, ctx, *, msg):
"""Write text in code format."""
await ctx.message.delete()
await ctx.send("```" + msg.replace("`", "") + "```")
@commands.command()
async def echo(self, ctx, *, content:str):
await ctx.send(content)
await ctx.message.delete()
@commands.command()
async def hello(self, ctx):
"""*hello
A command that will respond with a random greeting.
"""
choices = ('Hey!', 'Hello!', 'Hi!', 'Hallo!', 'Bonjour!', 'Hola!')
await ctx.send(choice(choices))
@commands.command(aliases=['platform'])
async def plat(self,ctx):
await ctx.send('Running on ' + sys.platform)
@commands.command(name='members')
async def membs(self, ctx):
server = ctx.guild
for member in server.members:
await ctx.send(member)
@commands.command(name='roles')
async def rols(self, ctx):
server = ctx.guild
for role in server.roles:
await ctx.send(role)
@commands.command(name='member')
async def mem(self, ctx):
server = ctx.guild
list = []
for member in server.members:
list.append(member.name)
embed = discord.Embed(name = 'Members', description = str(list) ,colour = discord.Colour.green())
await ctx.send(embed=embed)
@commands.command(name='role')
async def rol(self, ctx):
server = ctx.guild
list = []
for role in server.roles:
list.append(role.name)
embed = discord.Embed(name = 'Roles', description = str(list) ,colour = discord.Colour.green())
await ctx.send(embed=embed)
@commands.command(name='pingme')
async def pingme(self, ctx):
await ctx.send(ctx.author.mention)
def setup(bot):
bot.add_cog(utility(bot))
|
[
"discord.ext.commands.command",
"discord.Embed",
"discord.Colour.green"
] |
[((196, 214), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (212, 214), False, 'from discord.ext import commands\n'), ((662, 680), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (678, 680), False, 'from discord.ext import commands\n'), ((879, 897), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (895, 897), False, 'from discord.ext import commands\n'), ((1026, 1044), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (1042, 1044), False, 'from discord.ext import commands\n'), ((1307, 1345), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['platform']"}), "(aliases=['platform'])\n", (1323, 1345), False, 'from discord.ext import commands\n'), ((1439, 1471), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""members"""'}), "(name='members')\n", (1455, 1471), False, 'from discord.ext import commands\n'), ((1610, 1640), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""roles"""'}), "(name='roles')\n", (1626, 1640), False, 'from discord.ext import commands\n'), ((1772, 1803), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""member"""'}), "(name='member')\n", (1788, 1803), False, 'from discord.ext import commands\n'), ((2111, 2140), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""role"""'}), "(name='role')\n", (2127, 2140), False, 'from discord.ext import commands\n'), ((2440, 2471), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""pingme"""'}), "(name='pingme')\n", (2456, 2471), False, 'from discord.ext import commands\n'), ((324, 437), 'discord.Embed', 'discord.Embed', ([], {'title': '"""No mention!"""', 'description': '"""Please mention a user to view his profile!"""', 'color': '(16711680)'}), "(title='No mention!', description=\n 'Please mention a user to view his profile!', color=16711680)\n", (337, 437), False, 'import discord\n'), ((502, 568), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""{member}\'s profile picture"""', 'color': '(15656535)'}), '(title=f"{member}\'s profile picture", color=15656535)\n', (515, 568), False, 'import discord\n'), ((2045, 2067), 'discord.Colour.green', 'discord.Colour.green', ([], {}), '()\n', (2065, 2067), False, 'import discord\n'), ((2374, 2396), 'discord.Colour.green', 'discord.Colour.green', ([], {}), '()\n', (2394, 2396), False, 'import discord\n')]
|
import pandas as pd
import numpy as np
import pytest
from ..wrangling import (
subset_plot_data_for_income_bins,
subset_plot_data_for_scatter_plot,
subset_year_age_sex_geo
)
def test_subset_plot_data_for_income_bins():
expected_result = {'Age group': {598748: '35 to 44 years',
598749: '35 to 44 years',
598750: '35 to 44 years',
598751: '35 to 44 years',
598752: '35 to 44 years',
598753: '35 to 44 years',
598754: '35 to 44 years',
598755: '35 to 44 years',
598756: '35 to 44 years',
598757: '35 to 44 years',
598758: '35 to 44 years',
598759: '35 to 44 years',
598760: '35 to 44 years'},
'GEO': {598748: 'Canada',
598749: 'Canada',
598750: 'Canada',
598751: 'Canada',
598752: 'Canada',
598753: 'Canada',
598754: 'Canada',
598755: 'Canada',
598756: 'Canada',
598757: 'Canada',
598758: 'Canada',
598759: 'Canada',
598760: 'Canada'},
'Persons with income': {598748: 'Persons with income under $5,000',
598749: 'Persons with income of $5,000 and over',
598750: 'Persons with income of $10,000 and over',
598751: 'Persons with income of $15,000 and over',
598752: 'Persons with income of $20,000 and over',
598753: 'Persons with income of $25,000 and over',
598754: 'Persons with income of $35,000 and over',
598755: 'Persons with income of $50,000 and over',
598756: 'Persons with income of $75,000 and over',
598757: 'Persons with income of $100,000 and over',
598758: 'Persons with income of $150,000 and over',
598759: 'Persons with income of $200,000 and over',
598760: 'Persons with income of $250,000 and over'},
'REF_DATE': {598748: 2017,
598749: 2017,
598750: 2017,
598751: 2017,
598752: 2017,
598753: 2017,
598754: 2017,
598755: 2017,
598756: 2017,
598757: 2017,
598758: 2017,
598759: 2017,
598760: 2017},
'SCALAR_FACTOR': {598748: 'units ',
598749: 'units ',
598750: 'units ',
598751: 'units ',
598752: 'units ',
598753: 'units ',
598754: 'units ',
598755: 'units ',
598756: 'units ',
598757: 'units ',
598758: 'units ',
598759: 'units ',
598760: 'units '},
'Sex': {598748: 'Females',
598749: 'Females',
598750: 'Females',
598751: 'Females',
598752: 'Females',
598753: 'Females',
598754: 'Females',
598755: 'Females',
598756: 'Females',
598757: 'Females',
598758: 'Females',
598759: 'Females',
598760: 'Females'},
'VALUE': {598748: 116190.0,
598749: 2214880.0,
598750: 2098920.0,
598751: 1966980.0,
598752: 1836860.0,
598753: 1699380.0,
598754: 1406370.0,
598755: 958310.0,
598756: 470300.0,
598757: 193910.0,
598758: 48780.0,
598759: 20580.0,
598760: 10390.0}}
# params
path = r"../../data/raw/11100008.csv"
df = pd.read_csv(path, low_memory=False)
age = "35 to 44 years"
year = 2017
geo = "Canada"
sex = "Females"
df = subset_year_age_sex_geo(df, year, age, sex, geo)
df = subset_plot_data_for_income_bins(df)
assert expected_result == df.to_dict()
def test_subset_plot_data_for_scatter_plot():
expected_value = {'Age group': {1550629: '25 to 34 years'},
'GEO': {1550629: 'Canada'},
'Income source': {1550629: 'Total income'},
'REF_DATE': {1550629: 2017},
'SCALAR_FACTOR': {1550629: 'units'},
'Sex': {1550629: 'Females'},
'Statistics': {1550629: 'Median income (excluding zeros)'},
'VALUE': {1550629: 34800.0}}
# load the data
path = r"../../data/raw/11100239.csv"
df = pd.read_csv(path, low_memory=False)
# parameters
year = 2017
Age = '25 to 34 years'
sex = "Females"
geo = "Canada"
cols_to_keep = ['REF_DATE',
'GEO',
'Sex',
'Age group',
'Income source',
'Statistics',
'SCALAR_FACTOR',
'VALUE',
]
df = subset_plot_data_for_scatter_plot(
df, year, Age, sex, geo,
["Total income"], ['Median income (excluding zeros)'],
cols_to_keep)
assert expected_value == df.to_dict()
|
[
"pandas.read_csv"
] |
[((5769, 5804), 'pandas.read_csv', 'pd.read_csv', (['path'], {'low_memory': '(False)'}), '(path, low_memory=False)\n', (5780, 5804), True, 'import pandas as pd\n'), ((6649, 6684), 'pandas.read_csv', 'pd.read_csv', (['path'], {'low_memory': '(False)'}), '(path, low_memory=False)\n', (6660, 6684), True, 'import pandas as pd\n')]
|
import flowsim.client as c
get_chunk = c.get_chunk(port=8080)
|
[
"flowsim.client.get_chunk"
] |
[((40, 62), 'flowsim.client.get_chunk', 'c.get_chunk', ([], {'port': '(8080)'}), '(port=8080)\n', (51, 62), True, 'import flowsim.client as c\n')]
|
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
import requests
API_KEY = '<KEY>'
API_SECRET = '<KEY>'
API_URL = 'http://apicn.faceplusplus.com'
def detect(path):
data = {
'api_key': API_KEY,
'api_secret': API_SECRET,
}
files = {
'img': open(path, 'rb'),
}
r = requests.post(API_URL + '/detection/detect',
data=data,
files=files)
try:
face_id = r.json()["face"][0]["face_id"]
data = {
'api_key': API_KEY,
'api_secret': API_SECRET,
'face_id': face_id
}
result = requests.post(API_URL + '/detection/landmark',
data=data)
return result.json()
except:
return -1
# detect(u'source.jpg')
|
[
"requests.post"
] |
[((302, 370), 'requests.post', 'requests.post', (["(API_URL + '/detection/detect')"], {'data': 'data', 'files': 'files'}), "(API_URL + '/detection/detect', data=data, files=files)\n", (315, 370), False, 'import requests\n'), ((606, 663), 'requests.post', 'requests.post', (["(API_URL + '/detection/landmark')"], {'data': 'data'}), "(API_URL + '/detection/landmark', data=data)\n", (619, 663), False, 'import requests\n')]
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Provide functions for declaring Bokeh API information.
Within the Bokeh codebase, functions, classes, methods, and properties may
be defined to be "public" or "internal", as well as note what Bokeh version
the object was first introduced in.
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
logger = logging.getLogger(__name__)
# This one module is exempted from this :)
# from bokeh.util.api import public, internal ; public, internal
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Bokeh imports
from ..util.string import nice_join, format_docstring
from .future import wraps
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
INTERNAL = 'internal'
PUBLIC = 'public'
#-----------------------------------------------------------------------------
# Public API
#-----------------------------------------------------------------------------o
def internal(version):
''' Declare an object to be ``'public'``, introduced in ``version``.
This decorator annotates a function or class with information about what
version it was first introduced in, as well as that it is part of the
internal API. Specifically, the decorated object will have attributes:
.. code-block:: python
__bkversion__ = version
__bklevel__ = {internal}
Args:
version (tuple) :
A version tuple ``(x,y,z)`` stating what version this object was
introduced.
Returns:
Class or Function
'''
return _access(version, 'internal')
internal.__doc__ = format_docstring(internal.__doc__, internal=repr(INTERNAL))
def is_declared(obj):
'''
Args:
obj (object) :
The function, class, method, or property to test
Returns:
bool
'''
return hasattr(obj, '__bklevel__') and hasattr(obj, '__bkversion__')
def is_level(obj, level):
'''
Args:
obj (object) :
The function, class, method, or property to declare a level for
level ({public} or {internal})
Whether to declare the object public or internal
Returns:
bool
'''
if level not in _LEVELS:
raise ValueError("Unknown API level %r, expected %s" % (level, nice_join(_LEVELS)))
return obj.__bklevel__ == level
is_level.__doc__ = format_docstring(is_level.__doc__, public=repr(PUBLIC), internal=repr(INTERNAL))
def is_version(obj, version):
'''
Args:
obj (object) :
The function, class, method, or property to declare a version for
Returns:
bool
'''
return obj.__bkversion__ == version
def public(version):
''' Declare an object to be ``'public'``, introduced in ``version``.
This decorator annotates a function or class with information about what
version it was first introduced in, as well as that it is part of the
internal API. Specifically, the decorated object will have attributes:
.. code-block:: python
__bkversion__ = version
__bklevel__ = {public}
Args:
version (tuple) :
A version tuple ``(x,y,z)`` stating what version this object was
introduced.
Returns:
Class or Function
'''
return _access(version, 'public')
public.__doc__ = format_docstring(public.__doc__, public=repr(PUBLIC))
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
_LEVELS = [PUBLIC, INTERNAL]
def _access(version, level):
''' Declare an object to be ``{{ level }}``, introduced in ``version``.
This generic decorator annotates a function or class with information about
what version it was first introduced in, as well as whether it is a public
or internal API level. Specifically, the decorated object will have
attributes:
.. code-block:: python
__bkversion__ = version
__bklevel__ = level
Args:
version (tuple) :
A version tuple ``(x,y,z)`` stating what version this object was
introduced.
level: (str)
Whether this object is ``'public'`` or ``'internal'``
Returns:
Class or Function
'''
assert level in _LEVELS
def decorator(obj):
# Keep track of how many public/internal things there are declared
# in a module so we can make sure api tests are comprehensive
mod = _get_module(obj)
_increment_api_count(mod, level)
# If we are decorating a class
if isinstance(obj, type):
obj.__bkversion__ = version
obj.__bklevel__ = level
return obj
# Otherwise we are decorating a function or method
@wraps(obj)
def wrapper(*args, **kw):
return obj(*args, **kw)
wrapper.__bkversion__ = version
wrapper.__bklevel__ = level
return wrapper
return decorator
def _get_module(obj):
''' Given an function, class, method, or property, return the module
that is was defined in.
This function is written with the usages of the Bokeh codebase in
mind, and may not work in general
'''
import sys
if isinstance(obj, property):
modname = obj.fget.__module__
else:
modname = obj.__module__
return sys.modules[modname]
def _increment_api_count(mod, level):
''' Updates the __bkapi__ dict on a module, creating a new one if necessary
'''
if not hasattr(mod, '__bkapi__'):
mod.__bkapi__ = {PUBLIC: 0, INTERNAL:0}
mod.__bkapi__[level] += 1
|
[
"logging.getLogger"
] |
[((880, 907), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (897, 907), False, 'import logging\n')]
|
# Functions of img processing.
from functools import total_ordering
import config
import numpy as np
import copy
import torch
import cv2
from skimage.color import rgb2gray
from XCSLBP import XCSLBP
def extractPixelBlock(originalImg, labels):
'''
input_param:
originalImg: Original pixels matrix that squeezed to 2 dimentions of input img. np.ndarray
labels: label matrix of input img. np.ndarray
output_param:
pixelBlockList: a list contains all pixelblock which incoporates same label pixels.
'''
# Copy a new labels due to max() function alters dimentions of its parameter
newLabels = copy.deepcopy(labels)
maxLabel = max(newLabels)
pixelBlockList = []
labels = labels.reshape(-1,1)
blankBlock = np.array([255, 255, 255])
for i in range(maxLabel + 1):
# Uncomment line24 and comment line25 to visualize pixelBlock.
# pixelBlock = [pixel if label == i else blankBlock for pixel, label in zip(originalImg, labels)]
pixelBlock = [pixel if label == i else config.blankBlock for pixel, label in zip(originalImg, labels)]
pixelBlock = np.array(pixelBlock)
pixelBlock = pixelBlock.reshape(config.imgSize[0], config.imgSize[1], -1)
pixelBlockList.append(pixelBlock)
return pixelBlockList
def extractFeature(pixelBlockList):
'''
input_param:
pixelBlockList: A list contains all element.
output_param:
featureList: A list contains each element's feature. feature contains 3 channel's mean value and mean position info.
'''
featureList = []
for i in range(len(pixelBlockList)):
pixelList = []
locationList = []
for y in range(len(pixelBlockList[0])):
for x in range(len(pixelBlockList[1])):
if (pixelBlockList[i][y][x] != config.blankBlock).any():
pixelList.append(list(pixelBlockList[i][y][x]))
locationList.append((x,y))
colorFeature = np.mean(np.array(pixelList), axis=0)
locationFeature = np.mean(np.array(locationList), axis=0)
features = np.append(colorFeature, locationFeature)
featureList.append(features)
featureList = np.array(featureList)
return featureList
# Optimized version
def regionColorFeatures(img, labels):
'''
input_param:
img: img matrix. torch.tensor
labels: Kmeans clustering labels. torch.tensor
output_param:
colorFeatureList: A list contains each element's feature. feature contains 3 channel's mean value.
'''
numlab = max(labels)
rlabels = labels.view(config.imgSize)
colorFeatureList = []
grayFrame = torch.tensor(rgb2gray(img))
redFrame = img[:, :, 0]
greenFrame = img[:, :, 1]
blueFrame = img[:, :, 2]
for i in range(numlab + 1):
f = torch.eq(rlabels, i)
graySpLocal = torch.mean(grayFrame[f].float())
redSpLocal = torch.mean(redFrame[f].float())
greenSpLocal = torch.mean(greenFrame[f].float())
blueSpLocal = torch.mean(blueFrame[f].float())
colorFeature = [redSpLocal, greenSpLocal, blueSpLocal, graySpLocal]
colorFeatureList.append(colorFeature)
colorFeatureList = torch.tensor(colorFeatureList)
return colorFeatureList
def regionTextureFeatures(img, labels):
'''
input_param:
img: CV2.imread
labels
'''
numlab = max(labels)
rlabels = labels.view(config.imgSize)
# I = rgb2gray(img)
XCS = XCSLBP(img)
XCS = XCS * (255/ 16)
XCSframe = torch.tensor(XCS)
textureFeatureList = []
for i in range(numlab + 1):
f = torch.eq(rlabels, i)
XCSSpLocal = torch.mean(XCSframe[f].float())
textureFeatureList.append(XCSSpLocal)
textureFeatureList = torch.tensor(textureFeatureList)
textureFeatureList = textureFeatureList.unsqueeze(1)
return textureFeatureList
def regionEdgeFeatures(img, labels):
'''
input_param:
img: CV2.imread
labels
'''
numlab = max(labels)
rlabels = labels.view(config.imgSize)
# frame = rgb2gray(img)
Gx = cv2.Sobel(img, cv2.CV_64F, 1, 0)
Gy = cv2.Sobel(img, cv2.CV_64F, 0, 1)
Gmag = np.sqrt(Gx**2.0 + Gy**2.0)
Gdir = np.arctan2(Gy, Gx) * (180 / np.pi)
Gx, Gy, Gmag, Gdir = torch.tensor(Gx), torch.tensor(Gy), torch.tensor(Gmag), torch.tensor(Gdir)
edgeFeatureList = []
for i in range(numlab + 1):
f = torch.eq(rlabels, i)
GxSpLocal = torch.mean(Gx[f].float())
GySpLocal = torch.mean(Gy[f].float())
GmagSpLocal = torch.mean(Gmag[f].float())
GdirSpLocal = torch.mean(Gdir[f].float())
edgeFeature = [GxSpLocal, GySpLocal, GmagSpLocal, GdirSpLocal]
edgeFeatureList.append(edgeFeature)
edgeFeatureList = torch.tensor(edgeFeatureList)
return edgeFeatureList
def regionSpatialFeatures(labels):
numlab = max(labels)
rlabels = labels.view(config.imgSize)
col, row = config.imgSize
x = range(1, col + 1)
y = range(1, row + 1)
Sx, Sy = np.meshgrid(y, x)
Sx, Sy = torch.tensor(Sx), torch.tensor(Sy)
spatialFeatureList = []
for i in range(numlab + 1):
f = torch.eq(rlabels, i)
SxSpLocal = torch.mean(Sx[f].float())
SySpLocal = torch.mean(Sy[f].float())
spatialFeature = [SxSpLocal, SySpLocal]
spatialFeatureList.append(spatialFeature)
spatialFeatureList = torch.tensor(spatialFeatureList)
return spatialFeatureList
|
[
"torch.eq",
"copy.deepcopy",
"numpy.meshgrid",
"skimage.color.rgb2gray",
"numpy.arctan2",
"numpy.append",
"numpy.array",
"numpy.sqrt",
"cv2.Sobel",
"torch.tensor",
"XCSLBP.XCSLBP"
] |
[((637, 658), 'copy.deepcopy', 'copy.deepcopy', (['labels'], {}), '(labels)\n', (650, 658), False, 'import copy\n'), ((766, 791), 'numpy.array', 'np.array', (['[255, 255, 255]'], {}), '([255, 255, 255])\n', (774, 791), True, 'import numpy as np\n'), ((2226, 2247), 'numpy.array', 'np.array', (['featureList'], {}), '(featureList)\n', (2234, 2247), True, 'import numpy as np\n'), ((3242, 3272), 'torch.tensor', 'torch.tensor', (['colorFeatureList'], {}), '(colorFeatureList)\n', (3254, 3272), False, 'import torch\n'), ((3521, 3532), 'XCSLBP.XCSLBP', 'XCSLBP', (['img'], {}), '(img)\n', (3527, 3532), False, 'from XCSLBP import XCSLBP\n'), ((3575, 3592), 'torch.tensor', 'torch.tensor', (['XCS'], {}), '(XCS)\n', (3587, 3592), False, 'import torch\n'), ((3816, 3848), 'torch.tensor', 'torch.tensor', (['textureFeatureList'], {}), '(textureFeatureList)\n', (3828, 3848), False, 'import torch\n'), ((4156, 4188), 'cv2.Sobel', 'cv2.Sobel', (['img', 'cv2.CV_64F', '(1)', '(0)'], {}), '(img, cv2.CV_64F, 1, 0)\n', (4165, 4188), False, 'import cv2\n'), ((4198, 4230), 'cv2.Sobel', 'cv2.Sobel', (['img', 'cv2.CV_64F', '(0)', '(1)'], {}), '(img, cv2.CV_64F, 0, 1)\n', (4207, 4230), False, 'import cv2\n'), ((4243, 4273), 'numpy.sqrt', 'np.sqrt', (['(Gx ** 2.0 + Gy ** 2.0)'], {}), '(Gx ** 2.0 + Gy ** 2.0)\n', (4250, 4273), True, 'import numpy as np\n'), ((4839, 4868), 'torch.tensor', 'torch.tensor', (['edgeFeatureList'], {}), '(edgeFeatureList)\n', (4851, 4868), False, 'import torch\n'), ((5096, 5113), 'numpy.meshgrid', 'np.meshgrid', (['y', 'x'], {}), '(y, x)\n', (5107, 5113), True, 'import numpy as np\n'), ((5473, 5505), 'torch.tensor', 'torch.tensor', (['spatialFeatureList'], {}), '(spatialFeatureList)\n', (5485, 5505), False, 'import torch\n'), ((1135, 1155), 'numpy.array', 'np.array', (['pixelBlock'], {}), '(pixelBlock)\n', (1143, 1155), True, 'import numpy as np\n'), ((2124, 2164), 'numpy.append', 'np.append', (['colorFeature', 'locationFeature'], {}), '(colorFeature, locationFeature)\n', (2133, 2164), True, 'import numpy as np\n'), ((2709, 2722), 'skimage.color.rgb2gray', 'rgb2gray', (['img'], {}), '(img)\n', (2717, 2722), False, 'from skimage.color import rgb2gray\n'), ((2855, 2875), 'torch.eq', 'torch.eq', (['rlabels', 'i'], {}), '(rlabels, i)\n', (2863, 2875), False, 'import torch\n'), ((3670, 3690), 'torch.eq', 'torch.eq', (['rlabels', 'i'], {}), '(rlabels, i)\n', (3678, 3690), False, 'import torch\n'), ((4281, 4299), 'numpy.arctan2', 'np.arctan2', (['Gy', 'Gx'], {}), '(Gy, Gx)\n', (4291, 4299), True, 'import numpy as np\n'), ((4342, 4358), 'torch.tensor', 'torch.tensor', (['Gx'], {}), '(Gx)\n', (4354, 4358), False, 'import torch\n'), ((4360, 4376), 'torch.tensor', 'torch.tensor', (['Gy'], {}), '(Gy)\n', (4372, 4376), False, 'import torch\n'), ((4378, 4396), 'torch.tensor', 'torch.tensor', (['Gmag'], {}), '(Gmag)\n', (4390, 4396), False, 'import torch\n'), ((4398, 4416), 'torch.tensor', 'torch.tensor', (['Gdir'], {}), '(Gdir)\n', (4410, 4416), False, 'import torch\n'), ((4488, 4508), 'torch.eq', 'torch.eq', (['rlabels', 'i'], {}), '(rlabels, i)\n', (4496, 4508), False, 'import torch\n'), ((5127, 5143), 'torch.tensor', 'torch.tensor', (['Sx'], {}), '(Sx)\n', (5139, 5143), False, 'import torch\n'), ((5145, 5161), 'torch.tensor', 'torch.tensor', (['Sy'], {}), '(Sy)\n', (5157, 5161), False, 'import torch\n'), ((5236, 5256), 'torch.eq', 'torch.eq', (['rlabels', 'i'], {}), '(rlabels, i)\n', (5244, 5256), False, 'import torch\n'), ((2010, 2029), 'numpy.array', 'np.array', (['pixelList'], {}), '(pixelList)\n', (2018, 2029), True, 'import numpy as np\n'), ((2073, 2095), 'numpy.array', 'np.array', (['locationList'], {}), '(locationList)\n', (2081, 2095), True, 'import numpy as np\n')]
|
# Copyright 2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import flatbuffers
import math
from .. import DirectionalLight
from .. import Light
from ..LightUnion import LightUnion
from .. import PointLight
from .. import SceneLightSet
from .. import SpotLight
from DeepSeaScene.Color3f import CreateColor3f
from DeepSeaScene.Vector3f import CreateVector3f
class Object:
pass
def convertLightSet(convertContext, data):
"""
Converts a light set for a scene. The data map is expected to contain the following elements:
- lights: array of lights to initially populate the light set with. Each member of the array
has the following members:
- name: the name of the light.
- color: the color of the light as an array of three float values, typically in the range
[0, 1].
- intensity: the intensity of the light, which multiplies the color.
- type: the type of the light. The following types are supported with the members they expect:
- "Directional"
- direction: direction of the light as an array of three float values.
- "Point"
- position: position of the light as an array of three float values.
- linearFalloff: amount the light falls off based on distance.
- quadraticFalloff: amount the light falls off based on squared distance. Defaults to 1.
- "Spot"
- position: position of the light as an array of three float values.
- direction: direction of the light as an array of three float values.
- linearFalloff: amount the light falls off based on distance. Defaults to 1.
- quadraticFalloff: amount the light falls off based on squared distance. Defaults to 1.
- innerSpotAngle: the angle in degrees of the spot light where it starts to fade out.
- outerSpotAngle: the angle in degrees of the spot light where it finishes fade out.
- maxLights: the maximum number of lights that can be stored. If unset, the number of elements
in lights will be used.
- ambientColor: the color of the ambient light as an array of three floats, typically in the
range [0,1]. Defaults to all 0.
- ambientIntensity: the intensity of the ambient light, which multiplies the color. Defaults
to 0.
- mainLight: the name of the main light. If omitted no light will be considered the main light.
- srgb: true to treat all color values as sRGB values to be converted to linear space. Defaults
to false.
"""
def readFloat(value, name, minVal = None, maxVal = None):
try:
floatVal = float(value)
if (minVal is not None and floatVal < minVal) or \
(maxVal is not None and floatVal > maxVal):
raise Exception() # Common error handling in except block.
return floatVal
except:
raise Exception('Invalid ' + name + ' value "' + str(value) + '".')
def readInt(value, name, minVal):
try:
intVal = int(value)
if intVal < minVal:
raise Exception() # Common error handling in except block.
return intVal
except:
raise Exception('Invalid ' + name + ' value "' + str(value) + '".')
def readColor(value, name, srgb):
if not isinstance(value, list) or len(value) != 3:
raise Exception('SceneLight ' + name + ' must be an array of three floats.')
color = [readFloat(value[0], name + ' red channel'),
readFloat(value[1], name + ' green channel'),
readFloat(value[2], name + ' blue channel')]
if srgb:
for i in range(0, 3):
if color[i] <= 0.04045:
color[i] = color[i]/12.92
else:
color[i] = pow((color[i] + 0.055)/1.055, 2.4)
return color
def readVector(value, name):
if not isinstance(value, list) or len(value) != 3:
raise Exception('SceneLight ' + name + ' must be an array of three floats.')
return [readFloat(value[0], name + ' x'),
readFloat(value[1], name + ' y'),
readFloat(value[2], name + ' z')]
try:
srgb = data.get('srgb', False)
lightsData = data.get('lights', [])
lights = []
try:
for lightData in lightsData:
try:
light = Object()
light.name = str(lightData['name'])
light.color = readColor(lightData['color'], 'light color', srgb)
light.intensity = readFloat(lightData['intensity'], 'light intensity', 0.0)
lightType = lightData['type']
if lightType == 'Directional':
light.type = LightUnion.DirectionalLight
light.direction = readVector(lightData['direction'], 'light direction')
elif lightType == 'Point':
light.type = LightUnion.PointLight
light.position = readVector(lightData['position'], 'light position')
light.linearFalloff = readFloat(lightData.get('linearFalloff', 1.0),
'light linear falloff', 0.0)
light.quadraticFalloff = readFloat(lightData.get('quadraticFalloff', 1.0),
'light quadratic falloff', 0.0)
elif lightType == 'Spot':
light.type = LightUnion.SpotLight
light.position = readVector(lightData['position'], 'light position')
light.direction = readVector(lightData['direction'], 'light direction')
light.linearFalloff = readFloat(lightData.get('linearFalloff', 1.0),
'light linear falloff', 0.0)
light.quadraticFalloff = readFloat(lightData.get('quadraticFalloff', 1.0),
'light quadratic falloff', 0.0)
light.innerSpotAngle = math.radians(readFloat(lightData['innerSpotAngle'],
'inner spot angle', 0.0, 180.0))
light.outerSpotAngle = math.radians(readFloat(lightData['outerSpotAngle'],
'outer spot angle', 0.0, 180.0))
if light.innerSpotAngle > light.outerSpotAngle:
raise Exception(
'Spot light inner spot angle must be less than outer spot angle.')
except KeyError as e:
raise Exception('LightSet light doesn\'t contain element ' + str(e) + '.')
lights.append(light)
except (TypeError, ValueError):
raise Exception('SceneLights "lights" must be an array of objects.')
maxLights = readInt(data.get('maxLights', 0), 'maxLights', 0)
if not maxLights and not lights:
raise Exception('SceneLights cannot have zero max lights.')
ambientColorData = data.get('ambientColor')
if ambientColorData:
ambientColor = readColor(ambientColorData, 'ambient color', srgb)
else:
ambientColor = None
ambientIntensity = readFloat(data.get('ambientIntensity', 0.0), 'ambient intensity', 0.0)
mainLight = str(data.get('mainLight', ''))
except KeyError as e:
raise Exception('LightSet doesn\'t contain element ' + str(e) + '.')
except (AttributeError, TypeError, ValueError):
raise Exception('LightSet must be an object.')
builder = flatbuffers.Builder(0)
lightOffsets = []
for light in lights:
nameOffset = builder.CreateString(light.name)
if light.type == LightUnion.DirectionalLight:
DirectionalLight.Start(builder)
DirectionalLight.AddDirection(builder, CreateVector3f(builder, light.direction[0],
light.direction[1], light.direction[2]))
DirectionalLight.AddColor(builder, CreateColor3f(builder, light.color[0],
light.color[1], light.color[2]))
DirectionalLight.AddIntensity(builder, light.intensity)
lightUnionOffset = DirectionalLight.End(builder)
elif light.type == LightUnion.PointLight:
PointLight.Start(builder)
PointLight.AddPosition(builder, CreateVector3f(builder, light.position[0],
light.position[1], light.position[2]))
PointLight.AddColor(builder, CreateColor3f(builder, light.color[0], light.color[1],
light.color[2]))
PointLight.AddIntensity(builder, light.intensity)
PointLight.AddLinearFalloff(builder, light.linearFalloff)
PointLight.AddQuadraticFalloff(builder, light.quadraticFalloff)
lightUnionOffset = PointLight.End(builder)
elif light.type == LightUnion.SpotLight:
SpotLight.Start(builder)
SpotLight.AddPosition(builder, CreateVector3f(builder, light.position[0],
light.position[1], light.position[2]))
SpotLight.AddDirection(builder, CreateVector3f(builder, light.direction[0],
light.direction[1], light.direction[2]))
SpotLight.AddColor(builder, CreateColor3f(builder, light.color[0], light.color[1],
light.color[2]))
SpotLight.AddIntensity(builder, light.intensity)
SpotLight.AddLinearFalloff(builder, light.linearFalloff)
SpotLight.AddQuadraticFalloff(builder, light.quadraticFalloff)
SpotLight.AddInnerSpotAngle(builder, light.innerSpotAngle)
SpotLight.AddOuterSpotAngle(builder, light.outerSpotAngle)
lightUnionOffset = SpotLight.End(builder)
Light.Start(builder)
Light.AddName(builder, nameOffset)
Light.AddLightType(builder, light.type)
Light.AddLight(builder, lightUnionOffset)
lightOffsets.append(Light.End(builder))
if lightOffsets:
SceneLightSet.StartLightsVector(builder, len(lightOffsets))
for offset in reversed(lightOffsets):
builder.PrependUOffsetTRelative(offset)
lightsOffset = builder.EndVector()
else:
lightsOffset
mainLightOffset = 0
if mainLight:
mainLightOffset = builder.CreateString(mainLight)
SceneLightSet.Start(builder)
SceneLightSet.AddLights(builder, lightsOffset)
SceneLightSet.AddMaxLights(builder, maxLights)
SceneLightSet.AddAmbientColor(builder,
CreateColor3f(builder, ambientColor[0], ambientColor[1], ambientColor[2]) if ambientColor
else 0)
SceneLightSet.AddAmbientIntensity(builder, ambientIntensity)
SceneLightSet.AddMainLight(builder, mainLightOffset)
builder.Finish(SceneLightSet.End(builder))
return builder.Output()
|
[
"flatbuffers.Builder",
"DeepSeaScene.Color3f.CreateColor3f",
"DeepSeaScene.Vector3f.CreateVector3f"
] |
[((7001, 7023), 'flatbuffers.Builder', 'flatbuffers.Builder', (['(0)'], {}), '(0)\n', (7020, 7023), False, 'import flatbuffers\n'), ((9521, 9594), 'DeepSeaScene.Color3f.CreateColor3f', 'CreateColor3f', (['builder', 'ambientColor[0]', 'ambientColor[1]', 'ambientColor[2]'], {}), '(builder, ambientColor[0], ambientColor[1], ambientColor[2])\n', (9534, 9594), False, 'from DeepSeaScene.Color3f import CreateColor3f\n'), ((7239, 7327), 'DeepSeaScene.Vector3f.CreateVector3f', 'CreateVector3f', (['builder', 'light.direction[0]', 'light.direction[1]', 'light.direction[2]'], {}), '(builder, light.direction[0], light.direction[1], light.\n direction[2])\n', (7253, 7327), False, 'from DeepSeaScene.Vector3f import CreateVector3f\n'), ((7366, 7436), 'DeepSeaScene.Color3f.CreateColor3f', 'CreateColor3f', (['builder', 'light.color[0]', 'light.color[1]', 'light.color[2]'], {}), '(builder, light.color[0], light.color[1], light.color[2])\n', (7379, 7436), False, 'from DeepSeaScene.Color3f import CreateColor3f\n'), ((7661, 7746), 'DeepSeaScene.Vector3f.CreateVector3f', 'CreateVector3f', (['builder', 'light.position[0]', 'light.position[1]', 'light.position[2]'], {}), '(builder, light.position[0], light.position[1], light.position[2]\n )\n', (7675, 7746), False, 'from DeepSeaScene.Vector3f import CreateVector3f\n'), ((7779, 7849), 'DeepSeaScene.Color3f.CreateColor3f', 'CreateColor3f', (['builder', 'light.color[0]', 'light.color[1]', 'light.color[2]'], {}), '(builder, light.color[0], light.color[1], light.color[2])\n', (7792, 7849), False, 'from DeepSeaScene.Color3f import CreateColor3f\n'), ((8187, 8272), 'DeepSeaScene.Vector3f.CreateVector3f', 'CreateVector3f', (['builder', 'light.position[0]', 'light.position[1]', 'light.position[2]'], {}), '(builder, light.position[0], light.position[1], light.position[2]\n )\n', (8201, 8272), False, 'from DeepSeaScene.Vector3f import CreateVector3f\n'), ((8308, 8396), 'DeepSeaScene.Vector3f.CreateVector3f', 'CreateVector3f', (['builder', 'light.direction[0]', 'light.direction[1]', 'light.direction[2]'], {}), '(builder, light.direction[0], light.direction[1], light.\n direction[2])\n', (8322, 8396), False, 'from DeepSeaScene.Vector3f import CreateVector3f\n'), ((8428, 8498), 'DeepSeaScene.Color3f.CreateColor3f', 'CreateColor3f', (['builder', 'light.color[0]', 'light.color[1]', 'light.color[2]'], {}), '(builder, light.color[0], light.color[1], light.color[2])\n', (8441, 8498), False, 'from DeepSeaScene.Color3f import CreateColor3f\n')]
|
"""
Deque is double-ended-queue. Lets you append and prepend to a list. Faster at
finding stuff I guess?
It is O(1) of memory use when inserting or popping, but lists are O(N).
"""
from collections import deque
def search(lines, pattern, history=5):
previous_lines = deque(maxlen=history)
for line in lines:
if pattern in line:
yield line, previous_lines
previous_lines.append(line)
with open("potato.txt") as f:
for line, prevlines in search(f, "python", 3):
for pline in prevlines:
print(pline, end="")
print(line, end="")
print("-" * 20)
"""
You don't have to delete items from a deque. They are automatically deleted if
you add more items than the maxlength allows.
You can just use deques as lists too.
"""
p = deque(maxlen=3)
p.append(1)
p.append(2)
p.append(3)
p.append(4)
print(p)
p.appendleft(5)
print(p)
|
[
"collections.deque"
] |
[((800, 815), 'collections.deque', 'deque', ([], {'maxlen': '(3)'}), '(maxlen=3)\n', (805, 815), False, 'from collections import deque\n'), ((274, 295), 'collections.deque', 'deque', ([], {'maxlen': 'history'}), '(maxlen=history)\n', (279, 295), False, 'from collections import deque\n')]
|
"""This module contains tests for pyspark interval identifier.
isort:skip_file
"""
import pandas as pd
import pytest
from pywrangler.util.testing import PlainFrame
pytestmark = pytest.mark.pyspark # noqa: E402
pyspark = pytest.importorskip("pyspark") # noqa: E402
from tests.test_data.interval_identifier import (
CollectionGeneral,
CollectionIdenticalStartEnd,
CollectionMarkerSpecifics,
CollectionNoOrderGroupBy,
MultipleIntervalsSpanningGroupbyExtendedTriple,
ResultTypeRawIids,
ResultTypeValidIids
)
from pywrangler.pyspark.wranglers.interval_identifier import (
VectorizedCumSum,
VectorizedCumSumAdjusted
)
WRANGLER = (VectorizedCumSum, VectorizedCumSumAdjusted)
WRANGLER_IDS = [x.__name__ for x in WRANGLER]
WRANGLER_KWARGS = dict(argnames='wrangler',
argvalues=WRANGLER,
ids=WRANGLER_IDS)
@pytest.mark.parametrize(**WRANGLER_KWARGS)
@CollectionGeneral.pytest_parametrize_kwargs("marker_use")
@CollectionGeneral.pytest_parametrize_testcases
def test_base(testcase, wrangler, marker_use):
"""Tests against all available wranglers and test cases.
Parameters
----------
testcase: DataTestCase
Generates test data for given test case.
wrangler: pywrangler.wrangler_instance.interfaces.IntervalIdentifier
Refers to the actual wrangler_instance begin tested. See `WRANGLER`.
marker_use: dict
Defines the marker start/end use.
"""
# instantiate test case
testcase_instance = testcase("pyspark")
# instantiate wrangler
kwargs = testcase_instance.test_kwargs.copy()
kwargs.update(marker_use)
wrangler_instance = wrangler(**kwargs)
# pass wrangler to test case
testcase_instance.test(wrangler_instance.transform)
@pytest.mark.parametrize(**WRANGLER_KWARGS)
@CollectionIdenticalStartEnd.pytest_parametrize_testcases
def test_identical_start_end(testcase, wrangler):
"""Tests against all available wranglers and test cases.
Parameters
----------
testcase: DataTestCase
Generates test data for given test case.
wrangler: pywrangler.wrangler_instance.interfaces.IntervalIdentifier
Refers to the actual wrangler_instance begin tested. See `WRANGLER`.
"""
# instantiate test case
testcase_instance = testcase("pyspark")
# instantiate wrangler
wrangler_instance = wrangler(**testcase_instance.test_kwargs)
# pass wrangler to test case
testcase_instance.test(wrangler_instance.transform)
@pytest.mark.parametrize(**WRANGLER_KWARGS)
@CollectionMarkerSpecifics.pytest_parametrize_testcases
def test_marker_specifics(testcase, wrangler):
"""Tests specific `marker_start_use_first` and `marker_end_use_first`
scenarios.
Parameters
----------
testcase: DataTestCase
Generates test data for given test case.
wrangler: pywrangler.wrangler_instance.interfaces.IntervalIdentifier
Refers to the actual wrangler_instance begin tested. See `WRANGLER`.
"""
# instantiate test case
testcase_instance = testcase("pyspark")
# instantiate wrangler
wrangler_instance = wrangler(**testcase_instance.test_kwargs)
# pass wrangler to test case
testcase_instance.test(wrangler_instance.transform)
@pytest.mark.parametrize(**WRANGLER_KWARGS)
def test_repartition(wrangler):
"""Tests that repartition has no effect.
Parameters
----------
wrangler: pywrangler.wrangler_instance.interfaces.IntervalIdentifier
Refers to the actual wrangler_instance begin tested. See `WRANGLER`.
"""
# instantiate test case
testcase_instance = MultipleIntervalsSpanningGroupbyExtendedTriple()
# instantiate wrangler
wrangler_instance = wrangler(**testcase_instance.test_kwargs)
# pass wrangler to test case
testcase_instance.test.pyspark(wrangler_instance.transform, repartition=5)
@pytest.mark.parametrize(**WRANGLER_KWARGS)
def test_result_type_raw_iids(wrangler):
"""Test for correct raw iids constraints. Returned result only needs to
distinguish intervals regardless of their validity. Interval ids do not
need to be in specific order.
Parameters
----------
wrangler: pywrangler.wrangler_instance.interfaces.IntervalIdentifier
Refers to the actual wrangler_instance begin tested. See `WRANGLER`.
"""
testcase_instance = ResultTypeRawIids("pandas")
wrangler_instance = wrangler(result_type="raw",
**testcase_instance.test_kwargs)
df_input = testcase_instance.input.to_pyspark()
df_output = testcase_instance.output.to_pandas()
df_result = wrangler_instance.transform(df_input)
df_result = (PlainFrame.from_pyspark(df_result)
.to_pandas()
.sort_values(testcase_instance.orderby_columns)
.reset_index(drop=True))
col = testcase_instance.target_column_name
pd.testing.assert_series_equal(df_result[col].diff().ne(0),
df_output[col].diff().ne(0))
@CollectionGeneral.pytest_parametrize_kwargs("marker_use")
@pytest.mark.parametrize(**WRANGLER_KWARGS)
def test_result_type_valid_iids(wrangler, marker_use):
"""Test for correct valid iids constraints. Returned result needs to
distinguish valid from invalid intervals. Invalid intervals need to be 0.
Parameters
----------
wrangler: pywrangler.wrangler_instance.interfaces.IntervalIdentifier
Refers to the actual wrangler_instance begin tested. See `WRANGLER`.
marker_use: dict
Contains `marker_start_use_first` and `marker_end_use_first` parameters
as dict.
"""
testcase_instance = ResultTypeValidIids("pyspark")
kwargs = testcase_instance.test_kwargs.copy()
kwargs.update(marker_use)
wrangler_instance = wrangler(result_type="valid", **kwargs)
df_input = testcase_instance.input.to_pyspark()
df_output = testcase_instance.output.to_pandas()
df_result = wrangler_instance.transform(df_input)
df_result = (PlainFrame.from_pyspark(df_result)
.to_pandas()
.sort_values(testcase_instance.orderby_columns)
.reset_index(drop=True))
col = testcase_instance.target_column_name
pd.testing.assert_series_equal(df_result[col].diff().ne(0),
df_output[col].diff().ne(0))
pd.testing.assert_series_equal(df_result[col].eq(0),
df_output[col].eq(0))
@pytest.mark.parametrize(**WRANGLER_KWARGS)
@CollectionNoOrderGroupBy.pytest_parametrize_testcases
def test_no_order_groupby(testcase, wrangler):
"""Tests correct behaviour for missing groupby columns.
Parameters
----------
testcase: DataTestCase
Generates test data for given test case.
wrangler: pywrangler.wrangler_instance.interfaces.IntervalIdentifier
Refers to the actual wrangler_instance begin tested. See `WRANGLER`.
"""
# instantiate test case
testcase_instance = testcase("pyspark")
# instantiate wrangler
kwargs = testcase_instance.test_kwargs.copy()
kwargs.update({'groupby_columns': None})
wrangler_instance = wrangler(**kwargs)
# pass wrangler to test case
testcase_instance.test(wrangler_instance.transform)
|
[
"tests.test_data.interval_identifier.ResultTypeValidIids",
"pytest.importorskip",
"tests.test_data.interval_identifier.CollectionGeneral.pytest_parametrize_kwargs",
"pytest.mark.parametrize",
"pywrangler.util.testing.PlainFrame.from_pyspark",
"tests.test_data.interval_identifier.MultipleIntervalsSpanningGroupbyExtendedTriple",
"tests.test_data.interval_identifier.ResultTypeRawIids"
] |
[((223, 253), 'pytest.importorskip', 'pytest.importorskip', (['"""pyspark"""'], {}), "('pyspark')\n", (242, 253), False, 'import pytest\n'), ((888, 930), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([], {}), '(**WRANGLER_KWARGS)\n', (911, 930), False, 'import pytest\n'), ((932, 989), 'tests.test_data.interval_identifier.CollectionGeneral.pytest_parametrize_kwargs', 'CollectionGeneral.pytest_parametrize_kwargs', (['"""marker_use"""'], {}), "('marker_use')\n", (975, 989), False, 'from tests.test_data.interval_identifier import CollectionGeneral, CollectionIdenticalStartEnd, CollectionMarkerSpecifics, CollectionNoOrderGroupBy, MultipleIntervalsSpanningGroupbyExtendedTriple, ResultTypeRawIids, ResultTypeValidIids\n'), ((1792, 1834), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([], {}), '(**WRANGLER_KWARGS)\n', (1815, 1834), False, 'import pytest\n'), ((2530, 2572), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([], {}), '(**WRANGLER_KWARGS)\n', (2553, 2572), False, 'import pytest\n'), ((3291, 3333), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([], {}), '(**WRANGLER_KWARGS)\n', (3314, 3333), False, 'import pytest\n'), ((3913, 3955), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([], {}), '(**WRANGLER_KWARGS)\n', (3936, 3955), False, 'import pytest\n'), ((5072, 5129), 'tests.test_data.interval_identifier.CollectionGeneral.pytest_parametrize_kwargs', 'CollectionGeneral.pytest_parametrize_kwargs', (['"""marker_use"""'], {}), "('marker_use')\n", (5115, 5129), False, 'from tests.test_data.interval_identifier import CollectionGeneral, CollectionIdenticalStartEnd, CollectionMarkerSpecifics, CollectionNoOrderGroupBy, MultipleIntervalsSpanningGroupbyExtendedTriple, ResultTypeRawIids, ResultTypeValidIids\n'), ((5131, 5173), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([], {}), '(**WRANGLER_KWARGS)\n', (5154, 5173), False, 'import pytest\n'), ((6531, 6573), 'pytest.mark.parametrize', 'pytest.mark.parametrize', ([], {}), '(**WRANGLER_KWARGS)\n', (6554, 6573), False, 'import pytest\n'), ((3654, 3702), 'tests.test_data.interval_identifier.MultipleIntervalsSpanningGroupbyExtendedTriple', 'MultipleIntervalsSpanningGroupbyExtendedTriple', ([], {}), '()\n', (3700, 3702), False, 'from tests.test_data.interval_identifier import CollectionGeneral, CollectionIdenticalStartEnd, CollectionMarkerSpecifics, CollectionNoOrderGroupBy, MultipleIntervalsSpanningGroupbyExtendedTriple, ResultTypeRawIids, ResultTypeValidIids\n'), ((4398, 4425), 'tests.test_data.interval_identifier.ResultTypeRawIids', 'ResultTypeRawIids', (['"""pandas"""'], {}), "('pandas')\n", (4415, 4425), False, 'from tests.test_data.interval_identifier import CollectionGeneral, CollectionIdenticalStartEnd, CollectionMarkerSpecifics, CollectionNoOrderGroupBy, MultipleIntervalsSpanningGroupbyExtendedTriple, ResultTypeRawIids, ResultTypeValidIids\n'), ((5713, 5743), 'tests.test_data.interval_identifier.ResultTypeValidIids', 'ResultTypeValidIids', (['"""pyspark"""'], {}), "('pyspark')\n", (5732, 5743), False, 'from tests.test_data.interval_identifier import CollectionGeneral, CollectionIdenticalStartEnd, CollectionMarkerSpecifics, CollectionNoOrderGroupBy, MultipleIntervalsSpanningGroupbyExtendedTriple, ResultTypeRawIids, ResultTypeValidIids\n'), ((4721, 4755), 'pywrangler.util.testing.PlainFrame.from_pyspark', 'PlainFrame.from_pyspark', (['df_result'], {}), '(df_result)\n', (4744, 4755), False, 'from pywrangler.util.testing import PlainFrame\n'), ((6065, 6099), 'pywrangler.util.testing.PlainFrame.from_pyspark', 'PlainFrame.from_pyspark', (['df_result'], {}), '(df_result)\n', (6088, 6099), False, 'from pywrangler.util.testing import PlainFrame\n')]
|
import os
from tmuxdir.dirmngr import ConfigHandler, DirMngr
import pytest
@pytest.fixture
def dir_mngr() -> DirMngr:
folder_name = "/tmp/tmuxdirtest/"
os.makedirs(folder_name, exist_ok=True)
cfg_handler = ConfigHandler(folder_name=folder_name)
yield DirMngr([], [".git"], cfg_handler=cfg_handler)
try:
os.remove(str(cfg_handler._full_path))
except FileNotFoundError:
pass
os.removedirs(folder_name)
@pytest.fixture
def cfg_handler() -> ConfigHandler:
folder_name = "/tmp/tmuxdirtest/"
os.makedirs(folder_name, exist_ok=True)
config_handler = ConfigHandler(folder_name=folder_name)
yield config_handler
try:
os.remove(str(config_handler._full_path))
except FileNotFoundError:
pass
os.removedirs(folder_name)
@pytest.fixture
def tmp_git_folder() -> str:
folder_name = "/tmp/repo/.git"
os.makedirs(folder_name, exist_ok=True)
yield "/".join(folder_name.split("/")[:-1])
os.removedirs(folder_name)
class TestDirManager:
def test_first_save(self, cfg_handler: ConfigHandler):
data = {"dirs": {"/tmp": "/tmp"}, "ignored_dirs": {}}
cfg_handler.save(data)
loaded = cfg_handler.load()
assert loaded == data
def test_add_dir(self, dir_mngr: DirMngr, tmp_git_folder: str):
assert dir_mngr.add(tmp_git_folder)[0] == tmp_git_folder
assert dir_mngr.add("/tmp/foo") == []
assert tmp_git_folder in dir_mngr.dirs
def test_add_dir_list(self, dir_mngr: DirMngr, tmp_git_folder: str):
folder = "/tmp/pit/"
assert dir_mngr.add(folder) == []
def test_add_clear(self, dir_mngr: DirMngr, tmp_git_folder: str):
assert dir_mngr.add(tmp_git_folder) == [tmp_git_folder]
assert dir_mngr.clear_added_dir(tmp_git_folder)
assert dir_mngr.list_dirs() == []
assert not dir_mngr.clear_added_dir("/tmp/random/")
assert tmp_git_folder not in dir_mngr.dirs
def test_clear_added_dirs(self, dir_mngr: DirMngr, tmp_git_folder: str):
assert dir_mngr.add(tmp_git_folder)[0] == tmp_git_folder
assert dir_mngr.clear_added_dir(tmp_git_folder)
assert dir_mngr.dirs == {}
assert dir_mngr.cfg_handler.load()[dir_mngr._DIRS_KEY] == {}
def test_add_ignore(self, dir_mngr: DirMngr, tmp_git_folder: str):
assert dir_mngr.add(tmp_git_folder) == [tmp_git_folder]
assert dir_mngr.ignore(tmp_git_folder)
assert dir_mngr.add(tmp_git_folder) == []
assert tmp_git_folder not in dir_mngr.list_dirs()
def test_clear_ignored_dirs(self, dir_mngr: DirMngr, tmp_git_folder: str):
assert dir_mngr.add(tmp_git_folder)[0] == tmp_git_folder
assert dir_mngr.ignore(tmp_git_folder)
assert dir_mngr.clear_ignored_dirs()
assert dir_mngr.ignored_dirs == {}
assert dir_mngr.cfg_handler.load()[dir_mngr._IGNORED_DIRS_KEY] == {}
@pytest.mark.skipif(
not os.path.isdir(os.path.expanduser("~/b/repos"))
or not os.environ.get("TMUXDIR_BENCH", False),
reason="~/b/repos doesn't exist",
)
def test_find_projects_v10_eager(self, benchmark, dir_mngr: DirMngr) -> None:
benchmark(dir_mngr.find_projects, "~/b/repos", [".git"], 3, True)
@pytest.mark.skipif(
not os.path.isdir(os.path.expanduser("~/b/repos"))
or not os.environ.get("TMUXDIR_BENCH", False),
reason="~/b/repos doesn't exist",
)
def test_find_projects_v11_not_eager(self, benchmark, dir_mngr: DirMngr) -> None:
benchmark(dir_mngr.find_projects, "~/b/repos", [".git"], 3, False)
|
[
"os.makedirs",
"os.removedirs",
"os.environ.get",
"tmuxdir.dirmngr.ConfigHandler",
"os.path.expanduser",
"tmuxdir.dirmngr.DirMngr"
] |
[((162, 201), 'os.makedirs', 'os.makedirs', (['folder_name'], {'exist_ok': '(True)'}), '(folder_name, exist_ok=True)\n', (173, 201), False, 'import os\n'), ((220, 258), 'tmuxdir.dirmngr.ConfigHandler', 'ConfigHandler', ([], {'folder_name': 'folder_name'}), '(folder_name=folder_name)\n', (233, 258), False, 'from tmuxdir.dirmngr import ConfigHandler, DirMngr\n'), ((419, 445), 'os.removedirs', 'os.removedirs', (['folder_name'], {}), '(folder_name)\n', (432, 445), False, 'import os\n'), ((542, 581), 'os.makedirs', 'os.makedirs', (['folder_name'], {'exist_ok': '(True)'}), '(folder_name, exist_ok=True)\n', (553, 581), False, 'import os\n'), ((603, 641), 'tmuxdir.dirmngr.ConfigHandler', 'ConfigHandler', ([], {'folder_name': 'folder_name'}), '(folder_name=folder_name)\n', (616, 641), False, 'from tmuxdir.dirmngr import ConfigHandler, DirMngr\n'), ((773, 799), 'os.removedirs', 'os.removedirs', (['folder_name'], {}), '(folder_name)\n', (786, 799), False, 'import os\n'), ((886, 925), 'os.makedirs', 'os.makedirs', (['folder_name'], {'exist_ok': '(True)'}), '(folder_name, exist_ok=True)\n', (897, 925), False, 'import os\n'), ((978, 1004), 'os.removedirs', 'os.removedirs', (['folder_name'], {}), '(folder_name)\n', (991, 1004), False, 'import os\n'), ((269, 315), 'tmuxdir.dirmngr.DirMngr', 'DirMngr', (['[]', "['.git']"], {'cfg_handler': 'cfg_handler'}), "([], ['.git'], cfg_handler=cfg_handler)\n", (276, 315), False, 'from tmuxdir.dirmngr import ConfigHandler, DirMngr\n'), ((3014, 3052), 'os.environ.get', 'os.environ.get', (['"""TMUXDIR_BENCH"""', '(False)'], {}), "('TMUXDIR_BENCH', False)\n", (3028, 3052), False, 'import os\n'), ((3358, 3396), 'os.environ.get', 'os.environ.get', (['"""TMUXDIR_BENCH"""', '(False)'], {}), "('TMUXDIR_BENCH', False)\n", (3372, 3396), False, 'import os\n'), ((2966, 2997), 'os.path.expanduser', 'os.path.expanduser', (['"""~/b/repos"""'], {}), "('~/b/repos')\n", (2984, 2997), False, 'import os\n'), ((3310, 3341), 'os.path.expanduser', 'os.path.expanduser', (['"""~/b/repos"""'], {}), "('~/b/repos')\n", (3328, 3341), False, 'import os\n')]
|
import os
import unittest
from baseLogger.ConsoleLogger import ConsoleLogger
from baseLogger.FileLogger import FileLogger
from baseLogger.LoggingConfig import LoggingConfig
from baseLogger.constants.LoggingEnabled import LoggingEnabled
from baseLogger.constants.MessageType import MessageType
from utilities.Config import Config
from utilities.StringProcessor import StringProcessor
# Logging Configuration unit test class.
# Tests running in serial.
# @Test(singleThreaded = true)
class LoggingConfigUnitTest(unittest.TestCase):
# Test getting Logging Enabled Setting.
# Override Config to 'YES'
def test_getLoggingEnabledSettingTest(self):
new_value_map = {"log": "YES"}
config = Config()
config.add_general_test_setting_values(new_value_map, True)
self.assertEquals(LoggingConfig(config).get_logging_enabled_setting(Config()), LoggingEnabled.YES.name,
"Expected Logging Enabled Setting YES.")
# Test getting Logging Enabled Setting.
# Override Config to 'ONFAIL'
def test_getLoggingEnabledOnFailSettingTest(self):
new_value_map = {"Log": "ONFAIL"}
config = Config()
config.add_general_test_setting_values(new_value_map, True)
self.assertEquals(LoggingConfig().get_logging_enabled_setting(config), LoggingEnabled.ONFAIL.name,
"Expected Logging Enabled Setting ONFAIL.")
# Test getting Logging Enabled Setting.
# Override Config to 'NO'
def test_getLoggingDisabledSettingTest(self):
new_value_map = {"Log": "NO"}
Config().add_general_test_setting_values(new_value_map, True)
self.assertEquals(LoggingConfig().get_logging_enabled_setting(), LoggingEnabled.NO.name,
"Expected Logging Enabled Setting NO.")
# Test getting Logging Enabled Setting with an Illegal Argument
# Override Config to 'INVALIDVALUE' - Expect IllegalArgumentException
def test_getLoggingSettingIllegalArgumentTest(self):
with self.assertRaises(NotImplementedError):
new_value_map = {"Log": "INVALIDVALUE"}
Config().add_general_test_setting_values(new_value_map, True)
LoggingConfig().get_logging_enabled_setting()
# Test getting Logging Level Setting.
# Override Config to 'VERBOSE'
def test_getLoggingLevelVerboseSettingTest(self):
new_value_map = {"LogLevel": "VERBOSE"}
Config().add_general_test_setting_values(new_value_map, True)
self.assertEquals(MessageType.VERBOSE.name, LoggingConfig().get_logging_level_setting(),
"Expected Logging Level Setting VERBOSE.")
# Test getting Logging Level Setting.
# Override Config to 'INFORMATION'
def test_getLoggingLevelInformationSettingTest(self):
new_value_map = {"LogLevel": "INFORMATION"}
Config().add_general_test_setting_values(new_value_map, True)
self.assertEquals(MessageType.INFORMATION.name, LoggingConfig().get_logging_level_setting(),
"Expected Logging Level Setting INFORMATION.")
# Test getting Logging Level Setting.
# Override Config to 'GENERIC'
def test_getLoggingLevelGenericSettingTest(self):
new_value_map = {"LogLevel": "GENERIC"}
Config().add_general_test_setting_values(new_value_map, True)
self.assertEquals(MessageType.GENERIC.name, LoggingConfig().get_logging_level_setting(),
"Expected Logging Level Setting GENERIC.")
# Test getting Logging Level Setting.
# Override Config to 'SUCCESS'
def test_getLoggingLevelSuccessSettingTest(self):
new_value_map = {"LogLevel": "SUCCESS"}
Config().add_general_test_setting_values(new_value_map, True)
self.assertEquals(MessageType.SUCCESS.name, LoggingConfig().get_logging_level_setting(),
"Expected Logging Level Setting SUCCESS.")
# Test getting Logging Level Setting.
# Override Config to 'WARNING'
def test_getLoggingLevelWarningSettingTest(self):
new_value_map = {"LogLevel": "WARNING"}
Config().add_general_test_setting_values(new_value_map, True)
self.assertEquals(MessageType.WARNING.name, LoggingConfig().get_logging_level_setting(),
"Expected Logging Level Setting WARNING.")
# Test getting Logging Level Setting.
# Override Config to 'ERROR'
def test_getLoggingLevelErrorSettingTest(self):
new_value_map = {"LogLevel": "ERROR"}
Config().add_general_test_setting_values(new_value_map, True)
self.assertEquals(MessageType.ERROR.name, LoggingConfig().get_logging_level_setting(),
"Expected Logging Level Setting ERROR.")
# Test getting Logging Level Setting.
# Override Config to 'SUSPENDED'
def test_getLoggingLevelSuspendedSettingTest(self):
new_value_map = {"LogLevel": "SUSPENDED"}
Config().add_general_test_setting_values(new_value_map, True)
self.assertEquals(MessageType.SUSPENDED.name, LoggingConfig().get_logging_level_setting(),
"Expected Logging Level Setting SUSPENDED.")
# Test getting Logging Level Setting with Illegal Argument.
# Override Config to 'INVALIDVALUE' - Expect IllegalArgumentException
def test_getLoggingLevelIllegalArgumentTest(self):
with self.assertRaises(AttributeError):
new_value_map = {"LogLevel": "INVALIDVALUE"}
config = Config().add_general_test_setting_values(new_value_map, True)
LoggingConfig(config).get_logging_level_setting()
# Test getting File Logger.
# Override Config LogType to 'TXT' which creates FileLogger.
def test_getFileLoggerTest(self):
new_value_map = {"LogType": "TXT", "Log": "YES"}
config = Config().add_general_test_setting_values(new_value_map, True)
file_name = "TestLog.txt"
logging_config = LoggingConfig(config).get_logger(file_name)
self.assertTrue(isinstance(logging_config, FileLogger), "Expected Logger to be of Type FileLogger.")
# Test getting File Logger.
# Override Config LogType to 'CONSOLE' which creates ConsoleLogger.
def test_getConsoleLoggerTest(self):
new_value_map = {"LogType": "CONSOLE", "Log": "YES"}
logging_config = LoggingConfig()
logging_config.add_general_test_setting_values(new_value_map, True)
file_name = "TestLog.txt"
logger = logging_config.get_logger(file_name)
instance = isinstance(logger, ConsoleLogger)
self.assertTrue(instance, "Expected Logger to be of Type ConsoleLogger.")
# Test getting File Logger.
# Override Config Log to 'NO' which creates ConsoleLogger by default.
def test_getConsoleLoggerLoggingDisabledTest(self):
new_value_map = {"Log": "NO"}
Config().add_general_test_setting_values(new_value_map, True)
file_name = "TestLog.txt"
logging_config = LoggingConfig().get_logger(file_name)
instance = isinstance(logging_config, ConsoleLogger)
self.assertTrue(instance, "Expected Logger to be of Type ConsoleLogger.")
# Test getting Log Directory.
def test_getLogDirectoryTest(self):
default_path = os.path.abspath(os.path.dirname(__file__)) + "\\Logs"
self.assertEquals(LoggingConfig().get_log_directory(), default_path,
StringProcessor.safe_formatter("Expected Default Path '{}'.", default_path))
|
[
"utilities.StringProcessor.StringProcessor.safe_formatter",
"os.path.dirname",
"utilities.Config.Config",
"baseLogger.LoggingConfig.LoggingConfig"
] |
[((712, 720), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (718, 720), False, 'from utilities.Config import Config\n'), ((1161, 1169), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (1167, 1169), False, 'from utilities.Config import Config\n'), ((6341, 6356), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', ([], {}), '()\n', (6354, 6356), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((7422, 7497), 'utilities.StringProcessor.StringProcessor.safe_formatter', 'StringProcessor.safe_formatter', (['"""Expected Default Path \'{}\'."""', 'default_path'], {}), '("Expected Default Path \'{}\'.", default_path)\n', (7452, 7497), False, 'from utilities.StringProcessor import StringProcessor\n'), ((865, 873), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (871, 873), False, 'from utilities.Config import Config\n'), ((1586, 1594), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (1592, 1594), False, 'from utilities.Config import Config\n'), ((2436, 2444), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (2442, 2444), False, 'from utilities.Config import Config\n'), ((2865, 2873), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (2871, 2873), False, 'from utilities.Config import Config\n'), ((3289, 3297), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (3295, 3297), False, 'from utilities.Config import Config\n'), ((3705, 3713), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (3711, 3713), False, 'from utilities.Config import Config\n'), ((4121, 4129), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (4127, 4129), False, 'from utilities.Config import Config\n'), ((4531, 4539), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (4537, 4539), False, 'from utilities.Config import Config\n'), ((4949, 4957), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (4955, 4957), False, 'from utilities.Config import Config\n'), ((5835, 5843), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (5841, 5843), False, 'from utilities.Config import Config\n'), ((5956, 5977), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', (['config'], {}), '(config)\n', (5969, 5977), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((6865, 6873), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (6871, 6873), False, 'from utilities.Config import Config\n'), ((6986, 7001), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', ([], {}), '()\n', (6999, 7001), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((7281, 7306), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (7296, 7306), False, 'import os\n'), ((815, 836), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', (['config'], {}), '(config)\n', (828, 836), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((1264, 1279), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', ([], {}), '()\n', (1277, 1279), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((1674, 1689), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', ([], {}), '()\n', (1687, 1689), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((2128, 2136), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (2134, 2136), False, 'from utilities.Config import Config\n'), ((2202, 2217), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', ([], {}), '()\n', (2215, 2217), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((2550, 2565), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', ([], {}), '()\n', (2563, 2565), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((2983, 2998), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', ([], {}), '()\n', (2996, 2998), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((3403, 3418), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', ([], {}), '()\n', (3416, 3418), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((3819, 3834), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', ([], {}), '()\n', (3832, 3834), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((4235, 4250), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', ([], {}), '()\n', (4248, 4250), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((4643, 4658), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', ([], {}), '()\n', (4656, 4658), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((5065, 5080), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', ([], {}), '()\n', (5078, 5080), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((5501, 5509), 'utilities.Config.Config', 'Config', ([], {}), '()\n', (5507, 5509), False, 'from utilities.Config import Config\n'), ((5575, 5596), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', (['config'], {}), '(config)\n', (5588, 5596), False, 'from baseLogger.LoggingConfig import LoggingConfig\n'), ((7345, 7360), 'baseLogger.LoggingConfig.LoggingConfig', 'LoggingConfig', ([], {}), '()\n', (7358, 7360), False, 'from baseLogger.LoggingConfig import LoggingConfig\n')]
|
#fuzzytest.py
#<NAME>
#<NAME>
#fuzzy clustering for testFun.dat
from __future__ import division, print_function
import numpy as np
import matplotlib.pyplot as plt
import skfuzzy as fuzz
colors = ['b', 'orange', 'g', 'r', 'c', 'm', 'y', 'k', 'Brown', 'ForestGreen']
# Insert his test data instead !!!!
# Then our data
# Collect Test Data
with open("testFun.dat") as textFile:
y = [line.split() for line in textFile]
y = np.array(y)
X = np.zeros(shape=(200,2))
# stores test data as number in array X (converts from strings)
for i in range(0,len(y)): # num rows
for j in range(0,len(y[0])): # num columns
X[i,j] = float(y[i,j])
xpts = np.zeros(len(y))
ypts = np.zeros(len(y))
labels = np.zeros(len(y)) # no labels
# xpts = x[all rows][0]
for i in range (0, len(y)):
xpts[i] = X[i][0]
# ypts = x[all rows][1]
for i in range (0, len(y)):
ypts[i] = X[i][1]
# Visualize the test data
fig0, ax0 = plt.subplots()
for label in range(2): # need 2 different kinds of labels, only have 1 cuz theyre not labeled...
ax0.plot(xpts[labels == label], ypts[labels == label], '.',
color=colors[label])
ax0.set_title('Test data: 200 points x2 clusters.')
plt.show()
# Set up the loop and plot
fig1, axes1 = plt.subplots(2, 1, figsize=(8, 8)) #number of figures
alldata = np.vstack((xpts, ypts))
fpcs = []
for ncenters, ax in enumerate(axes1.reshape(-1), 2):
cntr, u, u0, d, jm, p, fpc = fuzz.cluster.cmeans(
alldata, ncenters, 2, error=0.005, maxiter=1000, init=None)
print("Centers = ", str(ncenters), "\n") # u0 is the array of the memberiship functions
for i in range (len(y)): # columns
print ("Data point: ",xpts[i], ",", ypts[i]) #data point
print("Membership: ")
for j in range(ncenters): #number of clusters
print("Cluster: ", j, "\n", u0[j][i]) #membership for cluster
print()
# Store fpc values for later
fpcs.append(fpc)
# Plot assigned clusters, for each data point in training set
cluster_membership = np.argmax(u, axis=0)
for j in range(ncenters):
ax.plot(xpts[cluster_membership == j],
ypts[cluster_membership == j], '.', color=colors[j])
# Mark the center of each fuzzy cluster
for pt in cntr:
ax.plot(pt[0], pt[1], 'rs')
ax.set_title('Centers = {0}; FPC = {1:.2f}'.format(ncenters, fpc))
ax.axis('off')
fig1.tight_layout()
plt.show()
|
[
"matplotlib.pyplot.show",
"numpy.argmax",
"numpy.zeros",
"skfuzzy.cluster.cmeans",
"numpy.array",
"matplotlib.pyplot.subplots",
"numpy.vstack"
] |
[((430, 441), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (438, 441), True, 'import numpy as np\n'), ((446, 470), 'numpy.zeros', 'np.zeros', ([], {'shape': '(200, 2)'}), '(shape=(200, 2))\n', (454, 470), True, 'import numpy as np\n'), ((925, 939), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (937, 939), True, 'import matplotlib.pyplot as plt\n'), ((1187, 1197), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1195, 1197), True, 'import matplotlib.pyplot as plt\n'), ((1240, 1274), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {'figsize': '(8, 8)'}), '(2, 1, figsize=(8, 8))\n', (1252, 1274), True, 'import matplotlib.pyplot as plt\n'), ((1304, 1327), 'numpy.vstack', 'np.vstack', (['(xpts, ypts)'], {}), '((xpts, ypts))\n', (1313, 1327), True, 'import numpy as np\n'), ((2400, 2410), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2408, 2410), True, 'import matplotlib.pyplot as plt\n'), ((1425, 1504), 'skfuzzy.cluster.cmeans', 'fuzz.cluster.cmeans', (['alldata', 'ncenters', '(2)'], {'error': '(0.005)', 'maxiter': '(1000)', 'init': 'None'}), '(alldata, ncenters, 2, error=0.005, maxiter=1000, init=None)\n', (1444, 1504), True, 'import skfuzzy as fuzz\n'), ((2020, 2040), 'numpy.argmax', 'np.argmax', (['u'], {'axis': '(0)'}), '(u, axis=0)\n', (2029, 2040), True, 'import numpy as np\n')]
|
# coding=utf-8
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.contrib.sites.managers import CurrentSiteManager
from django.db import models
from django.dispatch import receiver
from django.utils.translation import ugettext_lazy as _
from .fields import JSONField
from .consistency_enforcers import *
class EdgeTypeManager(models.Manager):
# Cache to avoid re-looking up EdgeType objects all over the place.
_cache = {}
def get(self, *args, **kwargs):
et = None
if 'id' in kwargs:
try:
et = self.__class__._cache[self.db][kwargs['id']]
except KeyError:
pass
elif 'pk' in kwargs:
try:
et = self.__class__._cache[self.db][kwargs['pk']]
except KeyError:
pass
elif 'name' in kwargs:
try:
et = self.__class__._cache[self.db][kwargs['name']]
except KeyError:
pass
if et is None:
et = super(EdgeTypeManager, self).get(*args, **kwargs)
self._add_to_cache(self.db, et)
return et
def _add_to_cache(self, using, et):
self.__class__._cache.setdefault(using, {})[et.id] = et
self.__class__._cache.setdefault(using, {})[et.name] = et
def rem_from_cache(self, using, et):
try:
del self.__class__._cache.setdefault(using, {})[et.id]
del self.__class__._cache.setdefault(using, {})[et.name]
except KeyError:
pass
def clear_cache(self):
"""
Clear out the edge-type cache.
"""
self.__class__._cache.clear()
class EdgeType(models.Model):
name = models.CharField(_(u'name'), max_length=100, unique=True)
read_as = models.CharField(_(u'read as'), max_length=100)
objects = EdgeTypeManager()
class Meta(object):
ordering = ['name']
verbose_name = _(u'Edge type')
verbose_name_plural = _(u'Edge types')
def __unicode__(self):
return u'%s' % self.name
def setting_name(self):
return self.name.upper()
def delete(self, using=None):
self.__class__.objects.rem_from_cache(using, self)
super(EdgeType, self).delete(using)
class EdgeTypeAssociationManager(models.Manager):
# Cache to avoid re-looking up EdgeTypeAssociation objects all over the place.
_cache = {}
_direct_cache = {}
_inverse_cache = {}
def get(self, *args, **kwargs):
eta = None
if 'id' in kwargs:
try:
eta = self.__class__._cache[self.db][kwargs['id']]
except KeyError:
pass
elif 'pk' in kwargs:
try:
eta = self.__class__._cache[self.db][kwargs['pk']]
except KeyError:
pass
if eta is None:
eta = super(EdgeTypeAssociationManager, self).get(*args, **kwargs)
self._add_to_cache(self.db, eta)
return eta
def get_for_direct_edge_type(self, et):
try:
eta = self.__class__._direct_cache[self.db][et.id]
except KeyError:
eta = self.get(direct=et)
self._add_to_cache(self.db, eta)
return eta
def get_for_inverse_edge_type(self, et):
try:
eta = self.__class__._inverse_cache[self.db][et.id]
except KeyError:
eta = self.get(inverse=et)
self._add_to_cache(self.db, eta)
return eta
def _add_to_cache(self, using, eta):
self.__class__._cache.setdefault(using, {})[eta.id] = eta
self.__class__._direct_cache.setdefault(using, {})[eta.direct.id] = eta
self.__class__._inverse_cache.setdefault(using, {})[eta.inverse.id] = eta
def rem_from_cache(self, using, eta):
try:
del self.__class__._cache.setdefault(using, {})[eta.id]
del self.__class__._direct_cache.setdefault(using, {})[eta.direct.id]
del self.__class__._inverse_cache.setdefault(using, {})[eta.inverse.id]
except KeyError:
pass
def clear_cache(self):
"""
Clear out the edge-type-association cache.
"""
self.__class__._cache.clear()
class EdgeTypeAssociation(models.Model):
direct = models.ForeignKey(EdgeType, unique=True, related_name='is_direct_in')
inverse = models.ForeignKey(EdgeType, unique=True, related_name='is_inverse_in')
objects = EdgeTypeAssociationManager()
def __unicode__(self):
return u"%(direct)s <-> %(inverse)s" % {
'direct': self.direct,
'inverse': self.inverse
}
def delete(self, using=None):
self.__class__.objects.rem_from_cache(using, self)
super(EdgeTypeAssociation, self).delete(using)
class Edge(models.Model):
# fromNode field
fromNode_type = models.ForeignKey(ContentType,
verbose_name=_(u'from node type'),
related_name="from_node_type_set_for_%(class)s")
fromNode_pk = models.TextField(_(u'fromNode ID'))
fromNode = generic.GenericForeignKey(ct_field="fromNode_type", fk_field="fromNode_pk")
# toNode field
toNode_type = models.ForeignKey(ContentType,
verbose_name=_(u'to node type'),
related_name="to_node_type_set_for_%(class)s")
toNode_pk = models.TextField(_(u'toNode ID'))
toNode = generic.GenericForeignKey(ct_field="toNode_type", fk_field="toNode_pk")
# edge attributes
type = models.ForeignKey(EdgeType)
attributes = JSONField(_(u'attributes'), default='{}')
# edge metadata
time = models.DateTimeField(_(u'time'), auto_now_add=True)
site = models.ForeignKey(Site, verbose_name=_(u'site'), related_name='edges')
auto = models.BooleanField(_(u'auto created'), default=False)
objects = models.Manager()
on_site = CurrentSiteManager()
class Meta(object):
unique_together = ['fromNode_type', 'fromNode_pk', 'toNode_type', 'toNode_pk', 'type', 'site']
ordering = ['-time']
def __unicode__(self):
return (
_(u'%(from)s %(verb)s %(to)s') % {
'from': self.fromNode if self.fromNode else '',
'verb': self.type.read_as,
'to': self.toNode if self.toNode else ''
}
)
@receiver(models.signals.pre_save, sender=Edge, dispatch_uid='pre_save_edge')
def pre_save_handler(instance, **kwargs):
if not instance.site_id:
instance.site = getattr(instance.fromNode, 'site', getattr(instance.toNode, 'site', Site.objects.get_current()))
class EdgeCount(models.Model):
# fromNode field
fromNode_type = models.ForeignKey(ContentType,
verbose_name=_(u'from node type'))
fromNode_pk = models.TextField(_(u'fromNode ID'))
fromNode = generic.GenericForeignKey(ct_field="fromNode_type", fk_field="fromNode_pk")
# edge attributes
type = models.ForeignKey(EdgeType)
# count
count = models.IntegerField(_(u'count'), default=0)
site = models.ForeignKey(Site, verbose_name=_(u'site'), related_name='edge_counters')
objects = models.Manager()
on_site = CurrentSiteManager()
def __unicode__(self):
return (
_(u'%(from)s has %(count)d %(type)s edge(s)') % {
'from': self.fromNode if self.fromNode else '',
'count': self.count,
'type': self.type
}
)
class Meta(object):
unique_together = ['fromNode_type', 'fromNode_pk', 'type', 'site']
@receiver(models.signals.pre_save, sender=EdgeCount, dispatch_uid='pre_save_edge_count')
def pre_save_count_handler(instance, **kwargs):
if not instance.site_id:
instance.site = getattr(instance.fromNode, 'site', Site.objects.get_current())
# CONNECT LISTENERS TO ENFORCE GRAPH CONSISTENCY
models.signals.post_save.connect(
SymmetricEdgeManager.create_symmetric_edge,
sender=Edge,
dispatch_uid='create_symmetric_edge'
)
models.signals.post_delete.connect(
SymmetricEdgeManager.delete_symmetric_edge,
sender=Edge,
dispatch_uid='delete_symmetric_edge'
)
models.signals.post_save.connect(
SymmetricEdgeTypeAssociationManager.create_symmetric_association,
sender=EdgeTypeAssociation,
dispatch_uid='create_symmetric_edge_type_association'
)
models.signals.post_delete.connect(
SymmetricEdgeTypeAssociationManager.delete_symmetric_association,
sender=EdgeTypeAssociation,
dispatch_uid='delete_symmetric_edge_type_association'
)
models.signals.post_save.connect(
EdgeCounter.increase_count,
sender=Edge,
dispatch_uid='increase_edge_count'
)
models.signals.post_delete.connect(
EdgeCounter.decrease_count,
sender=Edge,
dispatch_uid='decrease_edge_count'
)
models.signals.pre_delete.connect(
EdgeCleaner.clean_edges,
dispatch_uid='clean_edges'
)
# Clear the EdgeType cache
EdgeType.objects.clear_cache()
|
[
"django.db.models.signals.post_delete.connect",
"django.db.models.ForeignKey",
"django.contrib.sites.models.Site.objects.get_current",
"django.dispatch.receiver",
"django.contrib.sites.managers.CurrentSiteManager",
"django.db.models.Manager",
"django.db.models.signals.pre_delete.connect",
"django.contrib.contenttypes.generic.GenericForeignKey",
"django.db.models.signals.post_save.connect",
"django.utils.translation.ugettext_lazy"
] |
[((6541, 6617), 'django.dispatch.receiver', 'receiver', (['models.signals.pre_save'], {'sender': 'Edge', 'dispatch_uid': '"""pre_save_edge"""'}), "(models.signals.pre_save, sender=Edge, dispatch_uid='pre_save_edge')\n", (6549, 6617), False, 'from django.dispatch import receiver\n'), ((7791, 7883), 'django.dispatch.receiver', 'receiver', (['models.signals.pre_save'], {'sender': 'EdgeCount', 'dispatch_uid': '"""pre_save_edge_count"""'}), "(models.signals.pre_save, sender=EdgeCount, dispatch_uid=\n 'pre_save_edge_count')\n", (7799, 7883), False, 'from django.dispatch import receiver\n'), ((8095, 8226), 'django.db.models.signals.post_save.connect', 'models.signals.post_save.connect', (['SymmetricEdgeManager.create_symmetric_edge'], {'sender': 'Edge', 'dispatch_uid': '"""create_symmetric_edge"""'}), "(SymmetricEdgeManager.create_symmetric_edge,\n sender=Edge, dispatch_uid='create_symmetric_edge')\n", (8127, 8226), False, 'from django.db import models\n'), ((8237, 8371), 'django.db.models.signals.post_delete.connect', 'models.signals.post_delete.connect', (['SymmetricEdgeManager.delete_symmetric_edge'], {'sender': 'Edge', 'dispatch_uid': '"""delete_symmetric_edge"""'}), "(SymmetricEdgeManager.\n delete_symmetric_edge, sender=Edge, dispatch_uid='delete_symmetric_edge')\n", (8271, 8371), False, 'from django.db import models\n'), ((8382, 8573), 'django.db.models.signals.post_save.connect', 'models.signals.post_save.connect', (['SymmetricEdgeTypeAssociationManager.create_symmetric_association'], {'sender': 'EdgeTypeAssociation', 'dispatch_uid': '"""create_symmetric_edge_type_association"""'}), "(SymmetricEdgeTypeAssociationManager.\n create_symmetric_association, sender=EdgeTypeAssociation, dispatch_uid=\n 'create_symmetric_edge_type_association')\n", (8414, 8573), False, 'from django.db import models\n'), ((8578, 8771), 'django.db.models.signals.post_delete.connect', 'models.signals.post_delete.connect', (['SymmetricEdgeTypeAssociationManager.delete_symmetric_association'], {'sender': 'EdgeTypeAssociation', 'dispatch_uid': '"""delete_symmetric_edge_type_association"""'}), "(SymmetricEdgeTypeAssociationManager.\n delete_symmetric_association, sender=EdgeTypeAssociation, dispatch_uid=\n 'delete_symmetric_edge_type_association')\n", (8612, 8771), False, 'from django.db import models\n'), ((8777, 8890), 'django.db.models.signals.post_save.connect', 'models.signals.post_save.connect', (['EdgeCounter.increase_count'], {'sender': 'Edge', 'dispatch_uid': '"""increase_edge_count"""'}), "(EdgeCounter.increase_count, sender=Edge,\n dispatch_uid='increase_edge_count')\n", (8809, 8890), False, 'from django.db import models\n'), ((8901, 9016), 'django.db.models.signals.post_delete.connect', 'models.signals.post_delete.connect', (['EdgeCounter.decrease_count'], {'sender': 'Edge', 'dispatch_uid': '"""decrease_edge_count"""'}), "(EdgeCounter.decrease_count, sender=Edge,\n dispatch_uid='decrease_edge_count')\n", (8935, 9016), False, 'from django.db import models\n'), ((9028, 9119), 'django.db.models.signals.pre_delete.connect', 'models.signals.pre_delete.connect', (['EdgeCleaner.clean_edges'], {'dispatch_uid': '"""clean_edges"""'}), "(EdgeCleaner.clean_edges, dispatch_uid=\n 'clean_edges')\n", (9061, 9119), False, 'from django.db import models\n'), ((4414, 4483), 'django.db.models.ForeignKey', 'models.ForeignKey', (['EdgeType'], {'unique': '(True)', 'related_name': '"""is_direct_in"""'}), "(EdgeType, unique=True, related_name='is_direct_in')\n", (4431, 4483), False, 'from django.db import models\n'), ((4498, 4568), 'django.db.models.ForeignKey', 'models.ForeignKey', (['EdgeType'], {'unique': '(True)', 'related_name': '"""is_inverse_in"""'}), "(EdgeType, unique=True, related_name='is_inverse_in')\n", (4515, 4568), False, 'from django.db import models\n'), ((5249, 5324), 'django.contrib.contenttypes.generic.GenericForeignKey', 'generic.GenericForeignKey', ([], {'ct_field': '"""fromNode_type"""', 'fk_field': '"""fromNode_pk"""'}), "(ct_field='fromNode_type', fk_field='fromNode_pk')\n", (5274, 5324), False, 'from django.contrib.contenttypes import generic\n'), ((5609, 5680), 'django.contrib.contenttypes.generic.GenericForeignKey', 'generic.GenericForeignKey', ([], {'ct_field': '"""toNode_type"""', 'fk_field': '"""toNode_pk"""'}), "(ct_field='toNode_type', fk_field='toNode_pk')\n", (5634, 5680), False, 'from django.contrib.contenttypes import generic\n'), ((5715, 5742), 'django.db.models.ForeignKey', 'models.ForeignKey', (['EdgeType'], {}), '(EdgeType)\n', (5732, 5742), False, 'from django.db import models\n'), ((6049, 6065), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (6063, 6065), False, 'from django.db import models\n'), ((6080, 6100), 'django.contrib.sites.managers.CurrentSiteManager', 'CurrentSiteManager', ([], {}), '()\n', (6098, 6100), False, 'from django.contrib.sites.managers import CurrentSiteManager\n'), ((7057, 7132), 'django.contrib.contenttypes.generic.GenericForeignKey', 'generic.GenericForeignKey', ([], {'ct_field': '"""fromNode_type"""', 'fk_field': '"""fromNode_pk"""'}), "(ct_field='fromNode_type', fk_field='fromNode_pk')\n", (7082, 7132), False, 'from django.contrib.contenttypes import generic\n'), ((7167, 7194), 'django.db.models.ForeignKey', 'models.ForeignKey', (['EdgeType'], {}), '(EdgeType)\n', (7184, 7194), False, 'from django.db import models\n'), ((7370, 7386), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (7384, 7386), False, 'from django.db import models\n'), ((7401, 7421), 'django.contrib.sites.managers.CurrentSiteManager', 'CurrentSiteManager', ([], {}), '()\n', (7419, 7421), False, 'from django.contrib.sites.managers import CurrentSiteManager\n'), ((1835, 1845), 'django.utils.translation.ugettext_lazy', '_', (['u"""name"""'], {}), "(u'name')\n", (1836, 1845), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1907, 1920), 'django.utils.translation.ugettext_lazy', '_', (['u"""read as"""'], {}), "(u'read as')\n", (1908, 1920), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2047, 2062), 'django.utils.translation.ugettext_lazy', '_', (['u"""Edge type"""'], {}), "(u'Edge type')\n", (2048, 2062), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2093, 2109), 'django.utils.translation.ugettext_lazy', '_', (['u"""Edge types"""'], {}), "(u'Edge types')\n", (2094, 2109), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5215, 5232), 'django.utils.translation.ugettext_lazy', '_', (['u"""fromNode ID"""'], {}), "(u'fromNode ID')\n", (5216, 5232), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5579, 5594), 'django.utils.translation.ugettext_lazy', '_', (['u"""toNode ID"""'], {}), "(u'toNode ID')\n", (5580, 5594), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5770, 5786), 'django.utils.translation.ugettext_lazy', '_', (['u"""attributes"""'], {}), "(u'attributes')\n", (5771, 5786), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5855, 5865), 'django.utils.translation.ugettext_lazy', '_', (['u"""time"""'], {}), "(u'time')\n", (5856, 5865), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5999, 6017), 'django.utils.translation.ugettext_lazy', '_', (['u"""auto created"""'], {}), "(u'auto created')\n", (6000, 6017), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7023, 7040), 'django.utils.translation.ugettext_lazy', '_', (['u"""fromNode ID"""'], {}), "(u'fromNode ID')\n", (7024, 7040), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7240, 7251), 'django.utils.translation.ugettext_lazy', '_', (['u"""count"""'], {}), "(u'count')\n", (7241, 7251), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5071, 5091), 'django.utils.translation.ugettext_lazy', '_', (['u"""from node type"""'], {}), "(u'from node type')\n", (5072, 5091), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5443, 5461), 'django.utils.translation.ugettext_lazy', '_', (['u"""to node type"""'], {}), "(u'to node type')\n", (5444, 5461), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5934, 5944), 'django.utils.translation.ugettext_lazy', '_', (['u"""site"""'], {}), "(u'site')\n", (5935, 5944), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6315, 6345), 'django.utils.translation.ugettext_lazy', '_', (['u"""%(from)s %(verb)s %(to)s"""'], {}), "(u'%(from)s %(verb)s %(to)s')\n", (6316, 6345), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6966, 6986), 'django.utils.translation.ugettext_lazy', '_', (['u"""from node type"""'], {}), "(u'from node type')\n", (6967, 6986), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7313, 7323), 'django.utils.translation.ugettext_lazy', '_', (['u"""site"""'], {}), "(u'site')\n", (7314, 7323), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7479, 7524), 'django.utils.translation.ugettext_lazy', '_', (['u"""%(from)s has %(count)d %(type)s edge(s)"""'], {}), "(u'%(from)s has %(count)d %(type)s edge(s)')\n", (7480, 7524), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((8015, 8041), 'django.contrib.sites.models.Site.objects.get_current', 'Site.objects.get_current', ([], {}), '()\n', (8039, 8041), False, 'from django.contrib.sites.models import Site\n'), ((6781, 6807), 'django.contrib.sites.models.Site.objects.get_current', 'Site.objects.get_current', ([], {}), '()\n', (6805, 6807), False, 'from django.contrib.sites.models import Site\n')]
|
from uuid import uuid4
import boto3
import pytest
from moto import mock_dynamodb2, mock_s3
from tests.local_login import MockAuthenticator
from warehouse14 import DBBackend, PackageStorage
from warehouse14.repos_dynamo import DynamoDBBackend
from warehouse14.storage import S3Storage
@pytest.fixture
def bucket():
"""Pytest fixture that creates the bucket in
the fake moto AWS account
"""
with mock_s3():
s3 = boto3.resource("s3", region_name="us-east-1")
bucket = s3.Bucket(str(uuid4()))
bucket.create()
yield bucket
@pytest.fixture
def table():
"""Pytest fixture that creates the table in
the fake moto AWS account
"""
with mock_dynamodb2():
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
yield dynamodb.create_table(
TableName=str(uuid4()),
AttributeDefinitions=[
{"AttributeName": "pk", "AttributeType": "S"},
{"AttributeName": "sk", "AttributeType": "S"},
],
KeySchema=[
{"AttributeName": "pk", "KeyType": "HASH"},
{"AttributeName": "sk", "KeyType": "RANGE"},
],
BillingMode="PAY_PER_REQUEST",
GlobalSecondaryIndexes=[
{
"IndexName": "sk_gsi",
"KeySchema": [
{"AttributeName": "sk", "KeyType": "HASH"},
{"AttributeName": "pk", "KeyType": "RANGE"},
],
"Projection": {
"ProjectionType": "ALL",
},
}
],
)
@pytest.fixture
def authenticator():
return MockAuthenticator()
@pytest.fixture
def db(table) -> DBBackend:
return DynamoDBBackend(table)
@pytest.fixture
def storage(bucket) -> PackageStorage:
return S3Storage(bucket)
@pytest.fixture
async def page():
from pyppeteer import launch
browser = await launch({"headless": True})
yield (await browser.pages())[0]
await browser.close()
|
[
"warehouse14.storage.S3Storage",
"moto.mock_dynamodb2",
"uuid.uuid4",
"warehouse14.repos_dynamo.DynamoDBBackend",
"tests.local_login.MockAuthenticator",
"boto3.resource",
"moto.mock_s3",
"pyppeteer.launch"
] |
[((1731, 1750), 'tests.local_login.MockAuthenticator', 'MockAuthenticator', ([], {}), '()\n', (1748, 1750), False, 'from tests.local_login import MockAuthenticator\n'), ((1808, 1830), 'warehouse14.repos_dynamo.DynamoDBBackend', 'DynamoDBBackend', (['table'], {}), '(table)\n', (1823, 1830), False, 'from warehouse14.repos_dynamo import DynamoDBBackend\n'), ((1899, 1916), 'warehouse14.storage.S3Storage', 'S3Storage', (['bucket'], {}), '(bucket)\n', (1908, 1916), False, 'from warehouse14.storage import S3Storage\n'), ((414, 423), 'moto.mock_s3', 'mock_s3', ([], {}), '()\n', (421, 423), False, 'from moto import mock_dynamodb2, mock_s3\n'), ((438, 483), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {'region_name': '"""us-east-1"""'}), "('s3', region_name='us-east-1')\n", (452, 483), False, 'import boto3\n'), ((696, 712), 'moto.mock_dynamodb2', 'mock_dynamodb2', ([], {}), '()\n', (710, 712), False, 'from moto import mock_dynamodb2, mock_s3\n'), ((733, 784), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {'region_name': '"""us-east-1"""'}), "('dynamodb', region_name='us-east-1')\n", (747, 784), False, 'import boto3\n'), ((2007, 2033), 'pyppeteer.launch', 'launch', (["{'headless': True}"], {}), "({'headless': True})\n", (2013, 2033), False, 'from pyppeteer import launch\n'), ((515, 522), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (520, 522), False, 'from uuid import uuid4\n'), ((848, 855), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (853, 855), False, 'from uuid import uuid4\n')]
|
from django.contrib import admin
from .. import admin as enhanced_admin
from .models import Author, Book, Character, Theme
class EnhancedModelAdmin(enhanced_admin.EnhancedModelAdminMixin,
admin.ModelAdmin):
pass
class CharacterInline(enhanced_admin.EnhancedAdminMixin,
admin.TabularInline):
model = Character
class BookAdmin(EnhancedModelAdmin):
inlines = (CharacterInline,)
filter_horizontal = ('themes',)
admin.site.register(Author, EnhancedModelAdmin)
admin.site.register(Book, BookAdmin)
admin.site.register(Theme, EnhancedModelAdmin)
|
[
"django.contrib.admin.site.register"
] |
[((478, 525), 'django.contrib.admin.site.register', 'admin.site.register', (['Author', 'EnhancedModelAdmin'], {}), '(Author, EnhancedModelAdmin)\n', (497, 525), False, 'from django.contrib import admin\n'), ((526, 562), 'django.contrib.admin.site.register', 'admin.site.register', (['Book', 'BookAdmin'], {}), '(Book, BookAdmin)\n', (545, 562), False, 'from django.contrib import admin\n'), ((563, 609), 'django.contrib.admin.site.register', 'admin.site.register', (['Theme', 'EnhancedModelAdmin'], {}), '(Theme, EnhancedModelAdmin)\n', (582, 609), False, 'from django.contrib import admin\n')]
|
import typing
import discord
from discord.ext import commands
from .database import fetch_guild_db
from .logging import LoggingMixin
from .rolls import (RollHandler, QuietRollHandler, SekretRollHandler,
RollList, DiceDelta)
from .utils import handle_http_exception
class RollCommands(commands.Cog, LoggingMixin):
def __init__(self, bot, db):
self.bot = bot
self.db = db
super().__init__()
@commands.command(name='z', help='Evaluate a dice roll.')
@fetch_guild_db
@handle_http_exception
async def zardoz_roll(self, ctx, *, args):
try:
roll = RollHandler(ctx, self.log, ctx.variables, args,
game_mode=ctx.game_mode)
except ValueError as e:
self.log.error(f'Roll handling failed: {e}')
await ctx.message.reply(f'You fucked up your roll, {ctx.author}. {e}')
else:
await roll.add_to_db(ctx.guild_db)
await ctx.message.reply(roll.msg())
@commands.command(name='zq', help='Evaluate a dice roll, quietly.')
@fetch_guild_db
@handle_http_exception
async def zardoz_quiet_roll(self, ctx, *, args):
try:
roll = QuietRollHandler(ctx, self.log, ctx.variables, args,
game_mode=ctx.game_mode)
except ValueError as e:
self.log.error(f'Roll handling failed: {e}')
await ctx.message.reply(f'You fucked up your roll, {ctx.author}. {e}')
else:
await roll.add_to_db(ctx.guild_db)
await ctx.message.reply(roll.msg())
@commands.command(name='zs', help='Make a secret roll and DM to member.')
@fetch_guild_db
@handle_http_exception
async def zardoz_secret_roll(self, ctx, member: typing.Optional[discord.Member], *, args):
if member is None:
member = ctx.author
try:
roll = SekretRollHandler(ctx, self.log, ctx.variables, args,
game_mode=ctx.game_mode, require_tag=True)
except ValueError as e:
self.log.error(f'Roll handling failed: {e}')
await ctx.author.send(f'You fucked up your roll, {ctx.author}. {e}')
else:
await roll.add_to_db(ctx.guild_db)
await member.send(roll.msg())
@commands.command(name='zr', help='Reroll previous roll')
@fetch_guild_db
@handle_http_exception
async def zroll_reroll(self, ctx, member: typing.Optional[discord.Member]):
if member is None:
member = ctx.author
saved = await ctx.guild_db.get_last_user_roll(member.id)
if saved is None:
await ctx.message.reply(f'Ope, no roll history for {member}.')
else:
cmd = saved['roll']
roll = RollHandler(ctx, self.log, ctx.variables, cmd,
game_mode=ctx.game_mode)
await roll.add_to_db(ctx.guild_db)
await ctx.message.reply(f'Reroll {roll.msg()}')
|
[
"discord.ext.commands.command"
] |
[((449, 505), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""z"""', 'help': '"""Evaluate a dice roll."""'}), "(name='z', help='Evaluate a dice roll.')\n", (465, 505), False, 'from discord.ext import commands\n'), ((1024, 1090), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""zq"""', 'help': '"""Evaluate a dice roll, quietly."""'}), "(name='zq', help='Evaluate a dice roll, quietly.')\n", (1040, 1090), False, 'from discord.ext import commands\n'), ((1625, 1697), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""zs"""', 'help': '"""Make a secret roll and DM to member."""'}), "(name='zs', help='Make a secret roll and DM to member.')\n", (1641, 1697), False, 'from discord.ext import commands\n'), ((2345, 2401), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""zr"""', 'help': '"""Reroll previous roll"""'}), "(name='zr', help='Reroll previous roll')\n", (2361, 2401), False, 'from discord.ext import commands\n')]
|
#!/usr/bin/python2.7
# This is a simple script which copies old tripit auth details into etcd.
# If you've never run the scraper, you don't need to run this script.
# $1 is a job tag, which identifies the tripit user authentication details
# in etcd.
import json
import os
import sys
import etcd
etcd_path = '/todoist/%s' % sys.argv[1]
etcd_client = etcd.Client(host='192.168.50.1', port=2379)
# Copy across our auth details
with open(os.path.expanduser('~/.todoist')) as f:
etcd_client.write('%s/auth' % etcd_path, f.read())
# Dump the finished state
def dumpdir(path):
dir = etcd_client.get(path)
for result in dir.children:
if result.dir:
dumpdir(result.key)
else:
print('%s: %s' %(result.key, result.value))
dumpdir(etcd_path)
|
[
"os.path.expanduser",
"etcd.Client"
] |
[((356, 399), 'etcd.Client', 'etcd.Client', ([], {'host': '"""192.168.50.1"""', 'port': '(2379)'}), "(host='192.168.50.1', port=2379)\n", (367, 399), False, 'import etcd\n'), ((442, 474), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.todoist"""'], {}), "('~/.todoist')\n", (460, 474), False, 'import os\n')]
|
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D
from tensorflow.keras import Model
class CNN(Model):
def __init__(self):
super(CNN, self).__init__()
self.conv1 = Conv2D(32, 3, padding='same', activation='relu')
self.conv2 = Conv2D(64, 3, padding='same', activation='relu')
self.pool1 = MaxPooling2D(pool_size=(2, 2), padding='same')
self.dropout1 = Dropout(0.25)
self.flatten = Flatten()
self.d1 = Dense(128, activation='relu')
self.dropout2 = Dropout(0.5)
self.d2 = tf.keras.layers.Dense(3, activation='softmax')
def call(self, x):
x = self.conv1(x)
x = self.conv2(x)
x = self.pool1(x)
x = self.dropout1(x)
x = self.flatten(x)
x = self.d1(x)
x = self.dropout2(x)
x = self.d2(x)
return x
def model(self):
x = keras.Input(shape=(15, 15, 1))
return Model(inputs=[x], outputs=self.call(x))
|
[
"tensorflow.keras.layers.MaxPooling2D",
"tensorflow.keras.layers.Conv2D",
"tensorflow.keras.layers.Dropout",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.Input",
"tensorflow.keras.layers.Flatten"
] |
[((322, 370), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(32, 3, padding='same', activation='relu')\n", (328, 370), False, 'from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D\n'), ((401, 449), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(3)'], {'padding': '"""same"""', 'activation': '"""relu"""'}), "(64, 3, padding='same', activation='relu')\n", (407, 449), False, 'from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D\n'), ((498, 544), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', ([], {'pool_size': '(2, 2)', 'padding': '"""same"""'}), "(pool_size=(2, 2), padding='same')\n", (510, 544), False, 'from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D\n'), ((578, 591), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.25)'], {}), '(0.25)\n', (585, 591), False, 'from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D\n'), ((642, 651), 'tensorflow.keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (649, 651), False, 'from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D\n'), ((679, 708), 'tensorflow.keras.layers.Dense', 'Dense', (['(128)'], {'activation': '"""relu"""'}), "(128, activation='relu')\n", (684, 708), False, 'from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D\n'), ((742, 754), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (749, 754), False, 'from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D\n'), ((782, 828), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['(3)'], {'activation': '"""softmax"""'}), "(3, activation='softmax')\n", (803, 828), True, 'import tensorflow as tf\n'), ((1246, 1276), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': '(15, 15, 1)'}), '(shape=(15, 15, 1))\n', (1257, 1276), False, 'from tensorflow import keras\n')]
|
from collections import Counter
from collections import deque
class Solution:
def ladderLength(self, beginWord: str, endWord: str, wordList: List[str]) -> int:
wordList = set(wordList)
if endWord not in wordList:
return 0
q = deque([beginWord])
step = 0
wordList.discard(beginWord)
while len(q) != 0:
sz = len(q)
step += 1
for _ in range(sz):
cur_node = q.popleft()
if cur_node == endWord:
return step
for i in range(len(cur_node)):
for c in 'abcdefghijklmnopqrstuvwxyz':
next_node = cur_node[:i] + c + cur_node[i+1:]
if next_node in wordList:
wordList.remove(next_node)
q.append(next_node)
return 0
|
[
"collections.deque"
] |
[((275, 293), 'collections.deque', 'deque', (['[beginWord]'], {}), '([beginWord])\n', (280, 293), False, 'from collections import deque\n')]
|
from telegram import update
from telegram.ext import Updater, CommandHandler
import requests
import re
import os
URL = "https://random.dog/woof.json"
def get_url():
contents = requests.get(URL).json()
url = contents["url"]
return url
def woof(update, context):
url = get_url()
chat_id = update.message.chat_id
context.bot.send_photo(chat_id=chat_id, photo=url)
def main():
token_id = os.environ["TELEGRAM_WOOF_TOKEN"] or "NA"
updater = Updater(token=token_id, use_context=True)
dp = updater.dispatcher
dp.add_handler(CommandHandler("woof", woof))
updater.start_polling()
updater.idle()
if __name__ == "__main__":
main()
|
[
"telegram.ext.Updater",
"telegram.ext.CommandHandler",
"requests.get"
] |
[((475, 516), 'telegram.ext.Updater', 'Updater', ([], {'token': 'token_id', 'use_context': '(True)'}), '(token=token_id, use_context=True)\n', (482, 516), False, 'from telegram.ext import Updater, CommandHandler\n'), ((564, 592), 'telegram.ext.CommandHandler', 'CommandHandler', (['"""woof"""', 'woof'], {}), "('woof', woof)\n", (578, 592), False, 'from telegram.ext import Updater, CommandHandler\n'), ((183, 200), 'requests.get', 'requests.get', (['URL'], {}), '(URL)\n', (195, 200), False, 'import requests\n')]
|
#!/usr/bin/env python
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate random formatId."""
import random
import d1_common.const
import d1_test.test_files
class Generate(object):
def __init__(self):
self._format_id_list = None
def __call__(self):
if self._format_id_list is None:
format_id_set = {
o.formatId
for o in d1_test.test_files.load_xml_to_pyxb(
"objectFormatList_v2_0.xml"
).objectFormat
}
# Remove the formatIds for object types that are parsed by GMN
format_id_set.remove(d1_common.const.ORE_FORMAT_ID)
format_id_set -= set(
d1_test.test_files.load_json("scimeta_format_id_list.json")
)
self._format_id_list = sorted(format_id_set)
return random.choice(self._format_id_list)
generate = Generate()
|
[
"random.choice"
] |
[((1609, 1644), 'random.choice', 'random.choice', (['self._format_id_list'], {}), '(self._format_id_list)\n', (1622, 1644), False, 'import random\n')]
|
import argparse
import os
import sys
import torch
import torch.utils.data
from tensorboardX import SummaryWriter
import torch.backends.cudnn as cudnn
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../../')
from utils.Survival_Aanlysis import SurvivalAnalysis
from utils.RiskLayer import cox_cost
from Prognostic.data.image_producer import ImageDataset
from Prognostic.model import MODELS
from lifelines.utils import concordance_index
from utils.LaycaOptimizer import MinimalLaycaSGD, LaycaSGD
parser = argparse.ArgumentParser(description='Predicting survival time')
parser.add_argument('--data_path', '-d_p', default='./data/patch_prognostic', type=str,
help='data path')
parser.add_argument('--use_cuda', '-use_cuda', default='True', type=bool, help='use cuda')
parser.add_argument('--lr', '-lr', default='1e-4', type=float, help='learning rate')
parser.add_argument('--momentum', '-mom', default='0.9', type=float, help='SGD momentum')
parser.add_argument('--batch_size', '-b', default='5', type=int, help='batch size')
parser.add_argument('--num_worker', '-nw', default='2', type=int, help='num_worker')
parser.add_argument('--start', '-s', default='0', type=int, help='start epoch')
parser.add_argument('--end', '-e', default='10000', type=int, help='end epoch')
parser.add_argument('--experiment_id', '-eid', default='0', help='experiment id')
parser.add_argument('--experiment_name', '-name', default='prognostic_res_101_mixup', help='experiment name')
parser.add_argument('--ckpt_path_save', '-ckpt_s', default='./model/', help='checkpoint path to save')
parser.add_argument('--log_path', '-lp', default='./log/', help='log path to save')
parser.add_argument('--ckpt', '-ckpt', default='./', help='checkpoint path to load')
parser.add_argument('--resume', '-r', action='store_true', help='resume from checkpoint')
parser.add_argument('--way', '-way', default='10', type=str, help='train way, 40 10 or combinate')
parser.add_argument('--load_pth_train', '-lpth_t', default='./tensor_path', help='train tensor path to load')
parser.add_argument('--load_pth_valid', '-lpth_v', default='./tensor_path', help='valid tensor path to load')
parser.add_argument('--alpha', '-a', default='1.0', type=float, help='mixup alpha')
parser.add_argument('--device_ids', default='0,1,2,3,4', type=str, help='comma separated indices of GPU to use,'
' e.g. 0,1 for using GPU_0'
' and GPU_1, default 0.')
parser.add_argument('--drop_group', '-drop_group', default='3,4', help='drop groups')
parser.add_argument('--drop_prob', '-drop_prob', default='0.1', type=float, help='drop prob')
parser.add_argument('--freeze', '-f', action='store_true', help='Freeze convolutional layer parameters')
parser.add_argument('--type-key', '-type-key', default='tumor', type=str, help='tumor or tumor_beside or fibrous_tissue')
parser.add_argument('--experimentway', '-eway', default='prognosis', type=str, help='prognosis or replase')
parser.add_argument('--use_std', '-std', default='use', type=str, help='use std as feature, u:use, o:only, n:not use ')
parser.add_argument('--optimizer', '-o', default='a', type=str, help='choose optimizer:a(adam), s(sgd), '
'Adadelta(Adadelta), m(MinimalLaycaSGD) '
'or l(LaycaSGD)')
args = parser.parse_args()
cudnn.benchmark = True
log_path = os.path.join(args.log_path, args.experiment_name + "_" + str(args.experiment_id))
if not os.path.isdir(log_path):
os.mkdir(log_path)
ckpt_path_save = os.path.join(args.ckpt_path_save, args.experiment_name + "_" + str(args.experiment_id))
if not os.path.exists(ckpt_path_save):
os.mkdir(ckpt_path_save)
os.environ["CUDA_VISIBLE_DEVICES"] = args.device_ids
device = torch.device("cuda" if args.use_cuda else "cpu")
num_GPU = len(args.device_ids.split(','))
batch_size_train = args.batch_size * num_GPU
batch_size_valid = args.batch_size * num_GPU
print("batch_size:",batch_size_train)
num_workers = args.num_worker * num_GPU
SA = SurvivalAnalysis()
def load_checkpoint(args, net):
print("Use ckpt: ", args.ckpt)
assert len(args.ckpt) != 0, "Please input a valid ckpt_path"
checkpoint = torch.load(args.ckpt)
pretrained_dict = checkpoint['state_dict']
net.load_state_dict(pretrained_dict)
return net
def adjust_learning_rate(optimizer, epoch, step, len_epoch):
"""decrease the learning rate at 200 and 300 epoch"""
lr = args.lr
if epoch >= 20:
lr /= 10
if epoch >= 40:
lr /= 10
if epoch >= 80:
lr /= 10
'''warmup'''
if epoch < 5:
lr = lr * float(1 + step + epoch * len_epoch) / (5. * len_epoch)
print('epoch = {}, step = {}, lr = {}'.format(epoch, step, lr))
elif step == 0:
print('epoch = {}, lr={}'.format(epoch, lr))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
return lr
drop_prob = [0.] * 4
if args.drop_group:
drop_probs = args.drop_prob
drop_group = [int(x) for x in args.drop_group.split(',')]
for block_group in drop_group:
if block_group < 1 or block_group > 4:
raise ValueError(
'drop_group should be a comma separated list of integers'
'between 1 and 4(drop_group:{}).'.format(args.drop_group)
)
drop_prob[block_group - 1] = drop_probs / 4.0 ** (4 - block_group)
if args.freeze:
net = MODELS[('resnet50')](factor=args.way, drop_prob=drop_prob, require_grad=False).to(device)
for param in net.fc.parameters():
param.requires_grad = True
optimizer = torch.optim.SGD(filter(lambda p: p.requires_grad, net.parameters()),
lr=args.lr, weight_decay=1e-2)
else:
net = MODELS[('resnet50')](factor=args.way, drop_prob=drop_prob).to(device)
if args.optimizer == 'a':
print('use adam')
optimizer = torch.optim.Adam(net.parameters(), lr=args.lr, betas=(0.9, 0.99), weight_decay=1e-4)
if args.optimizer == 's':
print('use SGD')
optimizer = torch.optim.SGD(net.parameters(), momentum=0.9, lr=args.lr, weight_decay=5e-4)
if args.optimizer == 'l':
print('use LaycaSGD')
optimizer = LaycaSGD(net.parameters(), lr=args.lr, momentum=0.9, weight_decay=1e-4, nesterov=True)
if args.optimizer == 'm':
print('use MinimalLaycaSGD')
optimizer = MinimalLaycaSGD(net.parameters(), lr=args.lr, momentum=0.9, weight_decay=1e-4, nesterov=True)
if args.optimizer == 'Adadelta':
print('use Adadelta')
optimizer = torch.optim.Adadelta(net.parameters(), lr=args.lr, rho=0.9, eps=1e-06, weight_decay=1e-4)
net = torch.nn.DataParallel(net, device_ids=None)
if args.resume:
net = load_checkpoint(args, net)
def train(epoch, dataloader, summary):
loss_sum = 0
acc_sum = 0
net.train()
pth = ""
length = len(dataloader)
Prediction = torch.Tensor().to(device)
Survival = torch.Tensor().to(device)
Observed = torch.Tensor().to(device)
for idx, (img, T, O, _, count) in enumerate(dataloader):
if O.sum() == 0:
continue
N = O.shape[0]
print('T:', T)
print('O:', O)
if args.optimizer != 'Adadelta':
lr = adjust_learning_rate(optimizer, epoch, idx, len(dataloader))
img = img.to(device)
output = net(img)
output, T, O, at_risk, failures, ties, _ = SA.calc_at_risk(output, T, O)
print('ties:', ties)
T = T.to(device)
O = O.to(device)
loss = cox_cost(output, at_risk, O.reshape((N, 1)), failures, ties)
loss.register_hook(lambda g: print(g))
optimizer.zero_grad()
loss.backward()
torch.nn.utils.clip_grad_norm_(net.parameters(), 5)
optimizer.step()
Prediction = torch.cat((Prediction, output))
Survival = torch.cat((Survival, T.float()))
Observed = torch.cat((Observed, O.float()))
Prediction, Survival, Observed, at_risk, failures, ties, _ = SA.calc_at_risk(Prediction, Survival.cpu(), Observed.cpu())
CI = concordance_index(Survival.cpu().detach().numpy(), -Prediction.cpu().detach().numpy(),
Observed.cpu().detach().numpy())
loss = cox_cost(Prediction, at_risk, Observed.reshape((Observed.shape[0],1)).to(device), failures, ties)
print("loss:", loss.item(), "CI:", CI.item())
summary['loss'] = loss.item()
summary['CI'] = CI.item()
summary['lr'] = optimizer.param_groups[0]['lr']
return summary
def valid(dataloader, summary):
net.eval()
length = len(dataloader)
Prediction = torch.Tensor().to(device)
Survival = torch.Tensor().to(device)
Observed = torch.Tensor().to(device)
with torch.no_grad():
for idx, (img, T, O, _, count) in enumerate(dataloader):
N = O.shape[0]
print('T:', T)
print('O:', O)
img = img.to(device)
output = net(img)
output, T, O, at_risk, failures, ties, _ = SA.calc_at_risk(output, T, O)
T = T.to(device)
O = O.to(device)
loss = cox_cost(output, at_risk, O.reshape((N, 1)), failures, ties)
print("loss:", loss.item())
Prediction = torch.cat((Prediction, output))
Survival = torch.cat((Survival, T.float()))
Observed = torch.cat((Observed, O.float()))
Prediction, Survival, Observed, at_risk, failures, ties, _ = SA.calc_at_risk(Prediction, Survival.cpu(), Observed.cpu())
CI = concordance_index(Survival.cpu().detach().numpy(), -Prediction.cpu().detach().numpy(),
Observed.cpu().detach().numpy())
loss = cox_cost(Prediction, at_risk, Observed.reshape((Observed.shape[0],1)).to(device), failures, ties)
print("loss:", loss.item(), "CI:", CI.item())
summary['loss'] = loss.item()
summary['CI'] = CI.item()
return summary
d_pth = args.data_path
sp = ckpt_path_save + '/' + str(args.way)
if not os.path.exists(sp):
os.mkdir(sp)
print(d_pth)
train_data = ImageDataset(d_pth, factor=args.way, val=False, type_key=args.type_key,
ExperimentWay=args.experimentway)
valid_data = ImageDataset(d_pth, way="valid", factor=args.way, val=False, type_key=args.type_key,
ExperimentWay=args.experimentway)
print(len(train_data))
print(len(valid_data))
train_dataloader = torch.utils.data.DataLoader(train_data,
batch_size=batch_size_train,
num_workers=num_workers,
drop_last=True,
shuffle=True)
valid_dataloader = torch.utils.data.DataLoader(valid_data,
batch_size=batch_size_valid,
num_workers=num_workers,
drop_last=False,
shuffle=False)
print("length:", len(train_dataloader))
summary_train = {'epoch': 0, 'fp': 0, 'tp': 0, 'Neg': 0, 'Pos': 0}
summary_valid = {'loss': float('inf'), 'acc': 0}
summary_writer = SummaryWriter(log_path)
loss_valid_best = float('inf')
for epoch in range(args.start, args.end):
summary_train = train(epoch, train_dataloader, summary_train)
summary_writer.add_scalar(
'train/loss', summary_train['loss'], epoch)
summary_writer.add_scalar(
'train/CI', summary_train['CI'], epoch)
if epoch % 1 == 0:
torch.save({'epoch': summary_train['epoch'],
'state_dict': net.state_dict()},
(sp + '/' + str(epoch) + '.ckpt'))
summary_valid = valid(valid_dataloader, summary_valid)
summary_writer.add_scalar(
'valid/loss', summary_valid['loss'], epoch)
summary_writer.add_scalar(
'valid/CI', summary_valid['CI'], epoch)
summary_writer.add_scalar(
'learning_rate', summary_train['lr'], epoch
)
print('train/loss', summary_train['loss'], epoch)
print('train/CI', summary_train['CI'], epoch)
print('valid/loss', float(summary_valid['loss']), epoch)
print('valid/CI', summary_valid['CI'], epoch)
if summary_valid['loss'] < loss_valid_best:
loss_vd_best = summary_valid['loss']
torch.save({'epoch': summary_train['epoch'],
'optimizer': optimizer.state_dict(),
'state_dict': net.state_dict()},
os.path.join(sp, 'best.ckpt'))
summary_writer.close()
|
[
"os.mkdir",
"tensorboardX.SummaryWriter",
"os.path.abspath",
"argparse.ArgumentParser",
"torch.utils.data.DataLoader",
"Prognostic.data.image_producer.ImageDataset",
"os.path.isdir",
"torch.load",
"os.path.exists",
"torch.cat",
"utils.Survival_Aanlysis.SurvivalAnalysis",
"torch.Tensor",
"torch.device",
"torch.nn.DataParallel",
"torch.no_grad",
"os.path.join"
] |
[((522, 585), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Predicting survival time"""'}), "(description='Predicting survival time')\n", (545, 585), False, 'import argparse\n'), ((3944, 3992), 'torch.device', 'torch.device', (["('cuda' if args.use_cuda else 'cpu')"], {}), "('cuda' if args.use_cuda else 'cpu')\n", (3956, 3992), False, 'import torch\n'), ((4210, 4228), 'utils.Survival_Aanlysis.SurvivalAnalysis', 'SurvivalAnalysis', ([], {}), '()\n', (4226, 4228), False, 'from utils.Survival_Aanlysis import SurvivalAnalysis\n'), ((6863, 6906), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['net'], {'device_ids': 'None'}), '(net, device_ids=None)\n', (6884, 6906), False, 'import torch\n'), ((10271, 10380), 'Prognostic.data.image_producer.ImageDataset', 'ImageDataset', (['d_pth'], {'factor': 'args.way', 'val': '(False)', 'type_key': 'args.type_key', 'ExperimentWay': 'args.experimentway'}), '(d_pth, factor=args.way, val=False, type_key=args.type_key,\n ExperimentWay=args.experimentway)\n', (10283, 10380), False, 'from Prognostic.data.image_producer import ImageDataset\n'), ((10416, 10539), 'Prognostic.data.image_producer.ImageDataset', 'ImageDataset', (['d_pth'], {'way': '"""valid"""', 'factor': 'args.way', 'val': '(False)', 'type_key': 'args.type_key', 'ExperimentWay': 'args.experimentway'}), "(d_pth, way='valid', factor=args.way, val=False, type_key=args.\n type_key, ExperimentWay=args.experimentway)\n", (10428, 10539), False, 'from Prognostic.data.image_producer import ImageDataset\n'), ((10626, 10753), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['train_data'], {'batch_size': 'batch_size_train', 'num_workers': 'num_workers', 'drop_last': '(True)', 'shuffle': '(True)'}), '(train_data, batch_size=batch_size_train,\n num_workers=num_workers, drop_last=True, shuffle=True)\n', (10653, 10753), False, 'import torch\n'), ((10957, 11086), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['valid_data'], {'batch_size': 'batch_size_valid', 'num_workers': 'num_workers', 'drop_last': '(False)', 'shuffle': '(False)'}), '(valid_data, batch_size=batch_size_valid,\n num_workers=num_workers, drop_last=False, shuffle=False)\n', (10984, 11086), False, 'import torch\n'), ((11444, 11467), 'tensorboardX.SummaryWriter', 'SummaryWriter', (['log_path'], {}), '(log_path)\n', (11457, 11467), False, 'from tensorboardX import SummaryWriter\n'), ((3661, 3684), 'os.path.isdir', 'os.path.isdir', (['log_path'], {}), '(log_path)\n', (3674, 3684), False, 'import os\n'), ((3690, 3708), 'os.mkdir', 'os.mkdir', (['log_path'], {}), '(log_path)\n', (3698, 3708), False, 'import os\n'), ((3821, 3851), 'os.path.exists', 'os.path.exists', (['ckpt_path_save'], {}), '(ckpt_path_save)\n', (3835, 3851), False, 'import os\n'), ((3857, 3881), 'os.mkdir', 'os.mkdir', (['ckpt_path_save'], {}), '(ckpt_path_save)\n', (3865, 3881), False, 'import os\n'), ((4380, 4401), 'torch.load', 'torch.load', (['args.ckpt'], {}), '(args.ckpt)\n', (4390, 4401), False, 'import torch\n'), ((10208, 10226), 'os.path.exists', 'os.path.exists', (['sp'], {}), '(sp)\n', (10222, 10226), False, 'import os\n'), ((10232, 10244), 'os.mkdir', 'os.mkdir', (['sp'], {}), '(sp)\n', (10240, 10244), False, 'import os\n'), ((8022, 8053), 'torch.cat', 'torch.cat', (['(Prediction, output)'], {}), '((Prediction, output))\n', (8031, 8053), False, 'import torch\n'), ((8948, 8963), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (8961, 8963), False, 'import torch\n'), ((184, 209), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (199, 209), False, 'import os\n'), ((7115, 7129), 'torch.Tensor', 'torch.Tensor', ([], {}), '()\n', (7127, 7129), False, 'import torch\n'), ((7156, 7170), 'torch.Tensor', 'torch.Tensor', ([], {}), '()\n', (7168, 7170), False, 'import torch\n'), ((7197, 7211), 'torch.Tensor', 'torch.Tensor', ([], {}), '()\n', (7209, 7211), False, 'import torch\n'), ((8830, 8844), 'torch.Tensor', 'torch.Tensor', ([], {}), '()\n', (8842, 8844), False, 'import torch\n'), ((8871, 8885), 'torch.Tensor', 'torch.Tensor', ([], {}), '()\n', (8883, 8885), False, 'import torch\n'), ((8912, 8926), 'torch.Tensor', 'torch.Tensor', ([], {}), '()\n', (8924, 8926), False, 'import torch\n'), ((9465, 9496), 'torch.cat', 'torch.cat', (['(Prediction, output)'], {}), '((Prediction, output))\n', (9474, 9496), False, 'import torch\n'), ((12757, 12786), 'os.path.join', 'os.path.join', (['sp', '"""best.ckpt"""'], {}), "(sp, 'best.ckpt')\n", (12769, 12786), False, 'import os\n')]
|
import logging
from logging.handlers import RotatingFileHandler
import numpy as np
import sys
try: # python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
# TODO: create sublogger for different scripts
logPath = '.'
fileName = 'lpjguesstools'
class MultiLineFormatter(logging.Formatter):
""" A custom multi-line logging formatter """
def format(self, record):
str = logging.Formatter.format(self, record)
header, footer = str.split(record.message)
str = str.replace('\n', '\n' + ' ' * len(header))
return str
# optional colored console logger (nice!)
try:
import colorlog
class MultiLineFormatterColor(colorlog.ColoredFormatter):
def format(self, record):
record.__dict__.update(colorlog.escape_codes)
record.log_color = self.color(self.log_colors, record.levelname)
str = logging.Formatter.format(self, record)
header, footer = str.split(record.message)
str = str.replace('\n', '\n' + ' ' * len(header))
return str
CONS_FORMAT = "[%(log_color)s%(levelname)-8s%(reset)s] %(log_color)s%(message)s%(reset)s"
except ImportError:
# both formatters should use the default (non-color)
MultiLineFormatterColor = MultiLineFormatter
CONS_FORMAT = "[%(levelname)-8s] %(message)s"
FILE_FORMAT = "%(asctime)s [%(levelname)-8s] %(message)s (%(filename)s:%(lineno)s)"
lfCons = MultiLineFormatterColor(CONS_FORMAT, datefmt='%Y-%m-%d %H:%M:%S')
lfFile = MultiLineFormatter(FILE_FORMAT, datefmt='%Y-%m-%d %H:%M:%S')
rootLogger = logging.getLogger(__name__)
rootLogger.setLevel(logging.DEBUG)
hCons = logging.StreamHandler()
hCons.setFormatter(lfCons)
hCons.setLevel(logging.DEBUG)
rootLogger.addHandler(hCons)
hFile = RotatingFileHandler("{0}/{1}.log".format(logPath, fileName), maxBytes=10000)
hFile.setFormatter(lfFile)
hFile.setLevel(logging.DEBUG)
rootLogger.addHandler(hFile)
EPILOG = """<NAME>, SENCKENBERG Biodiversity and Climate Research Centre (BiK-F)
email: <EMAIL>
2017/09/26"""
|
[
"logging.Formatter.format",
"logging.StreamHandler",
"logging.getLogger",
"logging.NullHandler"
] |
[((1726, 1753), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1743, 1753), False, 'import logging\n'), ((1798, 1821), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (1819, 1821), False, 'import logging\n'), ((300, 313), 'logging.NullHandler', 'NullHandler', ([], {}), '()\n', (311, 313), False, 'from logging import NullHandler\n'), ((261, 288), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (278, 288), False, 'import logging\n'), ((545, 583), 'logging.Formatter.format', 'logging.Formatter.format', (['self', 'record'], {}), '(self, record)\n', (569, 583), False, 'import logging\n'), ((1030, 1068), 'logging.Formatter.format', 'logging.Formatter.format', (['self', 'record'], {}), '(self, record)\n', (1054, 1068), False, 'import logging\n')]
|
import argparse
from transformers import BertTokenizer
from tqdm import tqdm
def main(args):
"""Tokenize a corpus and write one sentence per line in order to be able to train
fastText on it.
Args:
args (TYPE)
"""
tok = BertTokenizer.from_pretrained(args.vocab)
with open(args.corpus, "r") as fin, open(args.outfile, "w") as feng:
for line in tqdm(fin):
tokenized = tok.tokenize(line.strip())
feng.write(" ".join([args.prefix + x for x in tokenized]) + "\n")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--corpus", default=None, type=str, required=True, help="")
parser.add_argument("--vocab", default=None, type=str, required=True, help="")
parser.add_argument("--prefix", default=None, type=str, required=True, help="")
parser.add_argument("--outfile", default=None, type=str, required=True, help="")
args = parser.parse_args()
main(args)
|
[
"tqdm.tqdm",
"transformers.BertTokenizer.from_pretrained",
"argparse.ArgumentParser"
] |
[((254, 295), 'transformers.BertTokenizer.from_pretrained', 'BertTokenizer.from_pretrained', (['args.vocab'], {}), '(args.vocab)\n', (283, 295), False, 'from transformers import BertTokenizer\n'), ((571, 596), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (594, 596), False, 'import argparse\n'), ((389, 398), 'tqdm.tqdm', 'tqdm', (['fin'], {}), '(fin)\n', (393, 398), False, 'from tqdm import tqdm\n')]
|
from _erwin import build
from _erwin import serve
from _erwin import clean
from _erwin import initialize
def run(argv):
if argv[0] == "clean" or argv[0] == "c":
print("Cleaning output folder")
clean.run_clean()
elif argv[0] == "build" or argv[0] == "b":
print("Build")
build.main()
elif argv[0] == "serve" or argv[0] == "s":
print("Serve")
serve.run_server()
elif argv[0] == "init" or argv[0] == "i":
print("Initialize")
print("")
read = input("Initialize will override templates, sure you want to proceed? [Y|n] ")
if read == "Y":
initialize.run_init()
else:
print("Aborted")
else:
print("usage: python erwin.py build|serve|clean|init b|s|c|i")
|
[
"_erwin.initialize.run_init",
"_erwin.serve.run_server",
"_erwin.clean.run_clean",
"_erwin.build.main"
] |
[((210, 227), '_erwin.clean.run_clean', 'clean.run_clean', ([], {}), '()\n', (225, 227), False, 'from _erwin import clean\n'), ((302, 314), '_erwin.build.main', 'build.main', ([], {}), '()\n', (312, 314), False, 'from _erwin import build\n'), ((389, 407), '_erwin.serve.run_server', 'serve.run_server', ([], {}), '()\n', (405, 407), False, 'from _erwin import serve\n'), ((617, 638), '_erwin.initialize.run_init', 'initialize.run_init', ([], {}), '()\n', (636, 638), False, 'from _erwin import initialize\n')]
|
import os
import unittest
import numpy as np
import pandas as pd
import cassiopeia
class TestErrorCorrectIntBCstoWhitelist(unittest.TestCase):
def setUp(self):
dir_path = os.path.dirname(os.path.realpath(__file__))
test_files_path = os.path.join(dir_path, "test_files")
self.whitelist_fp = os.path.join(test_files_path, "intbc_whitelist.txt")
self.whitelist = ["ACTT", "TAAG"]
self.multi_case = pd.DataFrame.from_dict(
{
"cellBC": [
"A",
"A",
"A",
"B",
"B",
"C",
"C",
"C",
"C",
"D",
"D",
],
"UMI": [
"AACCT",
"AACCG",
"AACCC",
"AACCT",
"AACCG",
"AACCT",
"AACCG",
"AAGGA",
"AACCT",
"AACCT",
"AAGGG",
],
"readCount": [20, 30, 30, 40, 50, 10, 10, 15, 10, 10, 10],
"Seq": [
"AACCTTGG",
"AACCTTGG",
"AACCTTCC",
"AACCTTGG",
"AACCTTGC",
"AACCTTCC",
"AACCTTCG",
"AACCTCAG",
"AACCTTGG",
"AACCTTGG",
"AACCTAAA",
],
"intBC": [
"ACTT",
"AAGG",
"ACTA",
"AAGN",
"TACT",
"TAAG",
"TNNG",
"ANNN",
"GCTT",
"NNNN",
"AAAA",
],
"r1": ["1", "1", "1", "1", "1", "1", "1", "1", "1", "1", "1"],
"r2": ["2", "2", "2", "2", "2", "2", "2", "2", "2", "2", "2"],
"r3": ["3", "3", "3", "3", "3", "3", "3", "3", "3", "3", "3"],
"AlignmentScore": [
"20",
"20",
"20",
"20",
"20",
"20",
"20",
"20",
"20",
"20",
"20",
],
"CIGAR": [
"NA",
"NA",
"NA",
"NA",
"NA",
"NA",
"NA",
"NA",
"NA",
"NA",
"NA",
],
}
)
self.multi_case["readName"] = self.multi_case.apply(
lambda x: "_".join([x.cellBC, x.UMI, str(x.readCount)]), axis=1
)
self.multi_case["allele"] = self.multi_case.apply(
lambda x: "_".join([x.r1, x.r2, x.r3]), axis=1
)
self.corrections = {
"ACTT": "ACTT",
"TAAG": "TAAG",
"ACTA": "ACTT",
"TNNG": "TAAG",
"ANNN": "ACTT",
}
def test_correct(self):
df = cassiopeia.pp.error_correct_intbcs_to_whitelist(
self.multi_case, self.whitelist_fp, intbc_dist_thresh=1
)
expected_df = self.multi_case.copy()
expected_df["intBC"] = expected_df["intBC"].map(self.corrections)
expected_df.dropna(subset=["intBC"], inplace=True)
pd.testing.assert_frame_equal(df, expected_df)
def test_correct_whitelist_list(self):
df = cassiopeia.pp.error_correct_intbcs_to_whitelist(
self.multi_case, self.whitelist, intbc_dist_thresh=1
)
expected_df = self.multi_case.copy()
expected_df["intBC"] = expected_df["intBC"].map(self.corrections)
expected_df.dropna(subset=["intBC"], inplace=True)
pd.testing.assert_frame_equal(df, expected_df)
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"pandas.testing.assert_frame_equal",
"pandas.DataFrame.from_dict",
"os.path.realpath",
"cassiopeia.pp.error_correct_intbcs_to_whitelist",
"os.path.join"
] |
[((4234, 4249), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4247, 4249), False, 'import unittest\n'), ((257, 293), 'os.path.join', 'os.path.join', (['dir_path', '"""test_files"""'], {}), "(dir_path, 'test_files')\n", (269, 293), False, 'import os\n'), ((322, 374), 'os.path.join', 'os.path.join', (['test_files_path', '"""intbc_whitelist.txt"""'], {}), "(test_files_path, 'intbc_whitelist.txt')\n", (334, 374), False, 'import os\n'), ((444, 1338), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (["{'cellBC': ['A', 'A', 'A', 'B', 'B', 'C', 'C', 'C', 'C', 'D', 'D'], 'UMI':\n ['AACCT', 'AACCG', 'AACCC', 'AACCT', 'AACCG', 'AACCT', 'AACCG', 'AAGGA',\n 'AACCT', 'AACCT', 'AAGGG'], 'readCount': [20, 30, 30, 40, 50, 10, 10, \n 15, 10, 10, 10], 'Seq': ['AACCTTGG', 'AACCTTGG', 'AACCTTCC', 'AACCTTGG',\n 'AACCTTGC', 'AACCTTCC', 'AACCTTCG', 'AACCTCAG', 'AACCTTGG', 'AACCTTGG',\n 'AACCTAAA'], 'intBC': ['ACTT', 'AAGG', 'ACTA', 'AAGN', 'TACT', 'TAAG',\n 'TNNG', 'ANNN', 'GCTT', 'NNNN', 'AAAA'], 'r1': ['1', '1', '1', '1', '1',\n '1', '1', '1', '1', '1', '1'], 'r2': ['2', '2', '2', '2', '2', '2', '2',\n '2', '2', '2', '2'], 'r3': ['3', '3', '3', '3', '3', '3', '3', '3', '3',\n '3', '3'], 'AlignmentScore': ['20', '20', '20', '20', '20', '20', '20',\n '20', '20', '20', '20'], 'CIGAR': ['NA', 'NA', 'NA', 'NA', 'NA', 'NA',\n 'NA', 'NA', 'NA', 'NA', 'NA']}"], {}), "({'cellBC': ['A', 'A', 'A', 'B', 'B', 'C', 'C', 'C',\n 'C', 'D', 'D'], 'UMI': ['AACCT', 'AACCG', 'AACCC', 'AACCT', 'AACCG',\n 'AACCT', 'AACCG', 'AAGGA', 'AACCT', 'AACCT', 'AAGGG'], 'readCount': [20,\n 30, 30, 40, 50, 10, 10, 15, 10, 10, 10], 'Seq': ['AACCTTGG', 'AACCTTGG',\n 'AACCTTCC', 'AACCTTGG', 'AACCTTGC', 'AACCTTCC', 'AACCTTCG', 'AACCTCAG',\n 'AACCTTGG', 'AACCTTGG', 'AACCTAAA'], 'intBC': ['ACTT', 'AAGG', 'ACTA',\n 'AAGN', 'TACT', 'TAAG', 'TNNG', 'ANNN', 'GCTT', 'NNNN', 'AAAA'], 'r1':\n ['1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'], 'r2': ['2',\n '2', '2', '2', '2', '2', '2', '2', '2', '2', '2'], 'r3': ['3', '3', '3',\n '3', '3', '3', '3', '3', '3', '3', '3'], 'AlignmentScore': ['20', '20',\n '20', '20', '20', '20', '20', '20', '20', '20', '20'], 'CIGAR': ['NA',\n 'NA', 'NA', 'NA', 'NA', 'NA', 'NA', 'NA', 'NA', 'NA', 'NA']})\n", (466, 1338), True, 'import pandas as pd\n'), ((3424, 3533), 'cassiopeia.pp.error_correct_intbcs_to_whitelist', 'cassiopeia.pp.error_correct_intbcs_to_whitelist', (['self.multi_case', 'self.whitelist_fp'], {'intbc_dist_thresh': '(1)'}), '(self.multi_case, self.\n whitelist_fp, intbc_dist_thresh=1)\n', (3471, 3533), False, 'import cassiopeia\n'), ((3738, 3784), 'pandas.testing.assert_frame_equal', 'pd.testing.assert_frame_equal', (['df', 'expected_df'], {}), '(df, expected_df)\n', (3767, 3784), True, 'import pandas as pd\n'), ((3843, 3949), 'cassiopeia.pp.error_correct_intbcs_to_whitelist', 'cassiopeia.pp.error_correct_intbcs_to_whitelist', (['self.multi_case', 'self.whitelist'], {'intbc_dist_thresh': '(1)'}), '(self.multi_case, self.\n whitelist, intbc_dist_thresh=1)\n', (3890, 3949), False, 'import cassiopeia\n'), ((4154, 4200), 'pandas.testing.assert_frame_equal', 'pd.testing.assert_frame_equal', (['df', 'expected_df'], {}), '(df, expected_df)\n', (4183, 4200), True, 'import pandas as pd\n'), ((203, 229), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (219, 229), False, 'import os\n')]
|
from django.urls import reverse
from rest_framework.test import APITestCase
from tests.testapp.models import Book, Course, Student, Phone
class ViewTests(APITestCase):
def setUp(self):
self.book1 = Book.objects.create(title="Advanced Data Structures", author="S.Mobit")
self.book2 = Book.objects.create(title="Basic Data Structures", author="S.Mobit")
self.course1 = Course.objects.create(
name="Data Structures", code="CS210"
)
self.course2 = Course.objects.create(
name="Programming", code="CS150"
)
self.course1.books.set([self.book1, self.book2])
self.course2.books.set([self.book1])
self.student = Student.objects.create(
name="Yezy", age=24, course=self.course1
)
self.phone1 = Phone.objects.create(number="076711110", type="Office", student=self.student)
self.phone2 = Phone.objects.create(number="073008880", type="Home", student=self.student)
def tearDown(self):
Book.objects.all().delete()
Course.objects.all().delete()
Student.objects.all().delete()
# **************** POST Tests ********************* #
def test_post_on_pk_nested_foreignkey_related_field(self):
url = reverse("rstudent-list")
data = {
"name": "yezy",
"age": 33,
"course": 2
}
response = self.client.post(url, data, format="json")
self.assertEqual(
response.data,
{
'name': 'yezy',
'age': 33,
'course': {
'name': 'Programming',
'code': 'CS150',
'books': [
{"title": "Advanced Data Structures", "author": "S.Mobit"}
]
},
'phone_numbers': []
}
)
def test_post_on_writable_nested_foreignkey_related_field(self):
url = reverse("wstudent-list")
data = {
"name": "yezy",
"age": 33,
"course": {"name": "Programming", "code": "CS50"},
}
response = self.client.post(url, data, format="json")
self.assertEqual(
response.data,
{
'name': 'yezy',
'age': 33,
'course': {
'name': 'Programming',
'code': 'CS50',
'books': []
},
'phone_numbers': []
}
)
def test_post_with_add_operation(self):
url = reverse("rcourse-list")
data = {
"name": "Data Structures",
"code": "CS310",
"books": {"add":[1,2]}
}
response = self.client.post(url, data, format="json")
self.assertEqual(
response.data,
{
"name": "Data Structures",
"code": "CS310",
"books": [
{'title': 'Advanced Data Structures', 'author': 'S.Mobit'},
{'title': 'Basic Data Structures', 'author': 'S.Mobit'}
]
}
)
def test_post_with_create_operation(self):
data = {
"name": "Data Structures",
"code": "CS310",
"books": {"create": [
{"title": "Linear Math", "author": "Me"},
{"title": "Algebra Three", "author": "Me"}
]}
}
url = reverse("wcourse-list")
response = self.client.post(url, data, format="json")
self.assertEqual(
response.data,
{
"name": "Data Structures",
"code": "CS310",
"books": [
{"title": "Linear Math", "author": "Me"},
{"title": "Algebra Three", "author": "Me"}
]
}
)
def test_post_on_deep_nested_fields(self):
url = reverse("wstudent-list")
data = {
"name": "yezy",
"age": 33,
"course": {
"name": "Programming",
"code": "CS50",
"books": {"create": [
{"title": "Python Tricks", "author": "<NAME>"}
]}
}
}
response = self.client.post(url, data, format="json")
self.assertEqual(
response.data,
{
'name': 'yezy',
'age': 33,
'course': {
'name': 'Programming',
'code': 'CS50',
'books': [
{"title": "Python Tricks", "author": "<NAME>"}
]
},
'phone_numbers': []
}
)
def test_post_on_many_2_one_relation(self):
url = reverse("wstudent-list")
data = {
"name": "yezy",
"age": 33,
"course": {"name": "Programming", "code": "CS50"},
"phone_numbers": {
'create': [
{'number': '076750000', 'type': 'office'}
]
}
}
response = self.client.post(url, data, format="json")
self.assertEqual(
response.data,
{
'name': 'yezy',
'age': 33,
'course': {
'name': 'Programming',
'code': 'CS50',
'books': []
},
'phone_numbers': [
{'number': '076750000', 'type': 'office', 'student': 2}
]
}
)
# **************** PUT Tests ********************* #
def test_put_on_pk_nested_foreignkey_related_field(self):
url = reverse("rstudent-detail", args=[self.student.id])
data = {
"name": "yezy",
"age": 33,
"course": 2
}
response = self.client.put(url, data, format="json")
self.assertEqual(
response.data,
{
'name': 'yezy', 'age': 33,
'course': {
'name': 'Programming', 'code': 'CS150',
'books': [
{"title": "Advanced Data Structures", "author": "S.Mobit"}
]
},
'phone_numbers': [
{'number': '076711110', 'type': 'Office', 'student': 1},
{'number': '073008880', 'type': 'Home', 'student': 1}
]
}
)
def test_put_on_writable_nested_foreignkey_related_field(self):
url = reverse("wstudent-detail", args=[self.student.id])
data = {
"name": "yezy",
"age": 33,
"course": {"name": "Programming", "code": "CS50"}
}
response = self.client.put(url, data, format="json")
self.assertEqual(
response.data,
{
'name': 'yezy', 'age': 33,
'course': {
'name': 'Programming', 'code': 'CS50',
'books': [
{'title': 'Advanced Data Structures', 'author': 'S.Mobit'},
{'title': 'Basic Data Structures', 'author': 'S.Mobit'}
]
},
'phone_numbers': [
{'number': '076711110', 'type': 'Office', 'student': 1},
{'number': '073008880', 'type': 'Home', 'student': 1}
]
}
)
def test_put_with_add_operation(self):
url = reverse("rcourse-detail", args=[self.course2.id])
data = {
"name": "Data Structures",
"code": "CS410",
"books": {
"add": [2]
}
}
response = self.client.put(url, data, format="json")
self.assertEqual(
response.data,
{
"name": "Data Structures",
"code": "CS410",
"books": [
{'title': 'Advanced Data Structures', 'author': 'S.Mobit'},
{'title': 'Basic Data Structures', 'author': 'S.Mobit'}
]
}
)
def test_put_with_remove_operation(self):
url = reverse("rcourse-detail", args=[self.course2.id])
data = {
"name": "Data Structures",
"code": "CS410",
"books": {
"remove": [1]
}
}
response = self.client.put(url, data, format="json")
self.assertEqual(
response.data,
{
"name": "Data Structures",
"code": "CS410",
"books": []
}
)
def test_put_with_create_operation(self):
url = reverse("wcourse-detail", args=[self.course2.id])
data = {
"name": "Data Structures",
"code": "CS310",
"books": {
"create": [
{"title": "Primitive Data Types", "author": "S.Mobit"}
]
}
}
response = self.client.put(url, data, format="json")
self.assertEqual(
response.data,
{
"name": "Data Structures",
"code": "CS310",
"books": [
{'title': 'Advanced Data Structures', 'author': 'S.Mobit'},
{"title": "Primitive Data Types", "author": "S.Mobit"}
]
}
)
def test_put_with_update_operation(self):
url = reverse("wcourse-detail", args=[self.course2.id])
data = {
"name": "Data Structures",
"code": "CS310",
"books": {
"update": {
1: {"title": "React Programming", "author": "M.Json"}
}
}
}
response = self.client.put(url, data, format="json")
self.assertEqual(
response.data,
{
"name": "Data Structures",
"code": "CS310",
"books": [
{"title": "React Programming", "author": "M.Json"}
]
}
)
def test_put_on_deep_nested_fields(self):
url = reverse("wstudent-detail", args=[self.student.id])
data = {
"name": "yezy",
"age": 33,
"course": {
"name": "Programming",
"code": "CS50",
"books": {
"remove": [1]
}
}
}
response = self.client.put(url, data, format="json")
self.assertEqual(
response.data,
{
'name': 'yezy', 'age': 33,
'course': {
'name': 'Programming', 'code': 'CS50',
'books': [
{'title': 'Basic Data Structures', 'author': 'S.Mobit'}
]
},
'phone_numbers': [
{'number': '076711110', 'type': 'Office', 'student': 1},
{'number': '073008880', 'type': 'Home', 'student': 1}
]
}
)
def test_put_on_many_2_one_relation(self):
url = reverse("wstudent-detail", args=[self.student.id])
data = {
"name": "yezy",
"age": 33,
"course": {"name": "Programming", "code": "CS50"},
"phone_numbers": {
'update': {
1: {'number': '073008811', 'type': 'office'}
},
'create': [
{'number': '076750000', 'type': 'office'}
]
}
}
response = self.client.put(url, data, format="json")
self.assertEqual(
response.data,
{
'name': 'yezy', 'age': 33,
'course': {
'name': 'Programming', 'code': 'CS50',
'books': [
{'title': 'Advanced Data Structures', 'author': 'S.Mobit'},
{'title': 'Basic Data Structures', 'author': 'S.Mobit'}
]
},
'phone_numbers': [
{'number': '073008811', 'type': 'office', 'student': 1},
{'number': '073008880', 'type': 'Home', 'student': 1},
{'number': '076750000', 'type': 'office', 'student': 1}
]
}
)
|
[
"tests.testapp.models.Student.objects.all",
"tests.testapp.models.Book.objects.create",
"tests.testapp.models.Phone.objects.create",
"tests.testapp.models.Book.objects.all",
"tests.testapp.models.Student.objects.create",
"django.urls.reverse",
"tests.testapp.models.Course.objects.create",
"tests.testapp.models.Course.objects.all"
] |
[((212, 283), 'tests.testapp.models.Book.objects.create', 'Book.objects.create', ([], {'title': '"""Advanced Data Structures"""', 'author': '"""S.Mobit"""'}), "(title='Advanced Data Structures', author='S.Mobit')\n", (231, 283), False, 'from tests.testapp.models import Book, Course, Student, Phone\n'), ((305, 373), 'tests.testapp.models.Book.objects.create', 'Book.objects.create', ([], {'title': '"""Basic Data Structures"""', 'author': '"""S.Mobit"""'}), "(title='Basic Data Structures', author='S.Mobit')\n", (324, 373), False, 'from tests.testapp.models import Book, Course, Student, Phone\n'), ((398, 457), 'tests.testapp.models.Course.objects.create', 'Course.objects.create', ([], {'name': '"""Data Structures"""', 'code': '"""CS210"""'}), "(name='Data Structures', code='CS210')\n", (419, 457), False, 'from tests.testapp.models import Book, Course, Student, Phone\n'), ((503, 558), 'tests.testapp.models.Course.objects.create', 'Course.objects.create', ([], {'name': '"""Programming"""', 'code': '"""CS150"""'}), "(name='Programming', code='CS150')\n", (524, 558), False, 'from tests.testapp.models import Book, Course, Student, Phone\n'), ((708, 772), 'tests.testapp.models.Student.objects.create', 'Student.objects.create', ([], {'name': '"""Yezy"""', 'age': '(24)', 'course': 'self.course1'}), "(name='Yezy', age=24, course=self.course1)\n", (730, 772), False, 'from tests.testapp.models import Book, Course, Student, Phone\n'), ((818, 895), 'tests.testapp.models.Phone.objects.create', 'Phone.objects.create', ([], {'number': '"""076711110"""', 'type': '"""Office"""', 'student': 'self.student'}), "(number='076711110', type='Office', student=self.student)\n", (838, 895), False, 'from tests.testapp.models import Book, Course, Student, Phone\n'), ((918, 993), 'tests.testapp.models.Phone.objects.create', 'Phone.objects.create', ([], {'number': '"""073008880"""', 'type': '"""Home"""', 'student': 'self.student'}), "(number='073008880', type='Home', student=self.student)\n", (938, 993), False, 'from tests.testapp.models import Book, Course, Student, Phone\n'), ((1270, 1294), 'django.urls.reverse', 'reverse', (['"""rstudent-list"""'], {}), "('rstudent-list')\n", (1277, 1294), False, 'from django.urls import reverse\n'), ((2005, 2029), 'django.urls.reverse', 'reverse', (['"""wstudent-list"""'], {}), "('wstudent-list')\n", (2012, 2029), False, 'from django.urls import reverse\n'), ((2642, 2665), 'django.urls.reverse', 'reverse', (['"""rcourse-list"""'], {}), "('rcourse-list')\n", (2649, 2665), False, 'from django.urls import reverse\n'), ((3585, 3608), 'django.urls.reverse', 'reverse', (['"""wcourse-list"""'], {}), "('wcourse-list')\n", (3592, 3608), False, 'from django.urls import reverse\n'), ((4071, 4095), 'django.urls.reverse', 'reverse', (['"""wstudent-list"""'], {}), "('wstudent-list')\n", (4078, 4095), False, 'from django.urls import reverse\n'), ((4975, 4999), 'django.urls.reverse', 'reverse', (['"""wstudent-list"""'], {}), "('wstudent-list')\n", (4982, 4999), False, 'from django.urls import reverse\n'), ((5934, 5984), 'django.urls.reverse', 'reverse', (['"""rstudent-detail"""'], {'args': '[self.student.id]'}), "('rstudent-detail', args=[self.student.id])\n", (5941, 5984), False, 'from django.urls import reverse\n'), ((6817, 6867), 'django.urls.reverse', 'reverse', (['"""wstudent-detail"""'], {'args': '[self.student.id]'}), "('wstudent-detail', args=[self.student.id])\n", (6824, 6867), False, 'from django.urls import reverse\n'), ((7813, 7862), 'django.urls.reverse', 'reverse', (['"""rcourse-detail"""'], {'args': '[self.course2.id]'}), "('rcourse-detail', args=[self.course2.id])\n", (7820, 7862), False, 'from django.urls import reverse\n'), ((8532, 8581), 'django.urls.reverse', 'reverse', (['"""rcourse-detail"""'], {'args': '[self.course2.id]'}), "('rcourse-detail', args=[self.course2.id])\n", (8539, 8581), False, 'from django.urls import reverse\n'), ((9081, 9130), 'django.urls.reverse', 'reverse', (['"""wcourse-detail"""'], {'args': '[self.course2.id]'}), "('wcourse-detail', args=[self.course2.id])\n", (9088, 9130), False, 'from django.urls import reverse\n'), ((9901, 9950), 'django.urls.reverse', 'reverse', (['"""wcourse-detail"""'], {'args': '[self.course2.id]'}), "('wcourse-detail', args=[self.course2.id])\n", (9908, 9950), False, 'from django.urls import reverse\n'), ((10636, 10686), 'django.urls.reverse', 'reverse', (['"""wstudent-detail"""'], {'args': '[self.student.id]'}), "('wstudent-detail', args=[self.student.id])\n", (10643, 10686), False, 'from django.urls import reverse\n'), ((11659, 11709), 'django.urls.reverse', 'reverse', (['"""wstudent-detail"""'], {'args': '[self.student.id]'}), "('wstudent-detail', args=[self.student.id])\n", (11666, 11709), False, 'from django.urls import reverse\n'), ((1027, 1045), 'tests.testapp.models.Book.objects.all', 'Book.objects.all', ([], {}), '()\n', (1043, 1045), False, 'from tests.testapp.models import Book, Course, Student, Phone\n'), ((1063, 1083), 'tests.testapp.models.Course.objects.all', 'Course.objects.all', ([], {}), '()\n', (1081, 1083), False, 'from tests.testapp.models import Book, Course, Student, Phone\n'), ((1101, 1122), 'tests.testapp.models.Student.objects.all', 'Student.objects.all', ([], {}), '()\n', (1120, 1122), False, 'from tests.testapp.models import Book, Course, Student, Phone\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: <NAME>
@email: <EMAIL>
@time: 2021/12/17 20:25
"""
from unittest import mock
import pytest
from mussel.core.make_requests import MakeRequest
from mussel.scheme.api import Interface
class TestMakeAPIRequests:
def test_can_be_instantiated(self):
mr = MakeRequest()
assert isinstance(mr, MakeRequest)
@pytest.mark.parametrize(
"interface",
[
Interface("delete", "url"),
Interface("get", "url"),
Interface("head", "url"),
Interface("options", "url"),
Interface("patch", "url"),
Interface("post", "url"),
Interface("put", "url"),
],
)
@mock.patch("mussel.core.make_requests.Session")
def test_http_method_calls_correct_session_method(self, mocked_session, interface):
mar = MakeRequest()
mar.send(interface)
getattr(mar.session, interface.method).assert_called_once()
|
[
"unittest.mock.patch",
"mussel.core.make_requests.MakeRequest",
"mussel.scheme.api.Interface"
] |
[((739, 786), 'unittest.mock.patch', 'mock.patch', (['"""mussel.core.make_requests.Session"""'], {}), "('mussel.core.make_requests.Session')\n", (749, 786), False, 'from unittest import mock\n'), ((327, 340), 'mussel.core.make_requests.MakeRequest', 'MakeRequest', ([], {}), '()\n', (338, 340), False, 'from mussel.core.make_requests import MakeRequest\n'), ((889, 902), 'mussel.core.make_requests.MakeRequest', 'MakeRequest', ([], {}), '()\n', (900, 902), False, 'from mussel.core.make_requests import MakeRequest\n'), ((459, 485), 'mussel.scheme.api.Interface', 'Interface', (['"""delete"""', '"""url"""'], {}), "('delete', 'url')\n", (468, 485), False, 'from mussel.scheme.api import Interface\n'), ((499, 522), 'mussel.scheme.api.Interface', 'Interface', (['"""get"""', '"""url"""'], {}), "('get', 'url')\n", (508, 522), False, 'from mussel.scheme.api import Interface\n'), ((536, 560), 'mussel.scheme.api.Interface', 'Interface', (['"""head"""', '"""url"""'], {}), "('head', 'url')\n", (545, 560), False, 'from mussel.scheme.api import Interface\n'), ((574, 601), 'mussel.scheme.api.Interface', 'Interface', (['"""options"""', '"""url"""'], {}), "('options', 'url')\n", (583, 601), False, 'from mussel.scheme.api import Interface\n'), ((615, 640), 'mussel.scheme.api.Interface', 'Interface', (['"""patch"""', '"""url"""'], {}), "('patch', 'url')\n", (624, 640), False, 'from mussel.scheme.api import Interface\n'), ((654, 678), 'mussel.scheme.api.Interface', 'Interface', (['"""post"""', '"""url"""'], {}), "('post', 'url')\n", (663, 678), False, 'from mussel.scheme.api import Interface\n'), ((692, 715), 'mussel.scheme.api.Interface', 'Interface', (['"""put"""', '"""url"""'], {}), "('put', 'url')\n", (701, 715), False, 'from mussel.scheme.api import Interface\n')]
|
import pickle
from pathlib import Path
from datetime import datetime
from tqdm import tqdm
from dfp_main import patch, PatchStats, setVerbose
TEST_SET_PATH = "testSet"
# How many test files should be examined [0,100]
LIMIT = None
def evaluateTestSet():
testFiles = list(Path(TEST_SET_PATH).iterdir())
testPairs = [(testFiles[i], testFiles[i + 1]) for i in range(0, len(testFiles), 2)]
all_stats = []
for dockerfile, violationFile in tqdm(testPairs[:LIMIT]):
stats = patch(str(dockerfile), str(violationFile), "hadolint.exe", quiet=True)
all_stats.append(stats)
for s in all_stats:
print(s)
with open(f"evalStats_{datetime.now().strftime('%d%m%Y_%H%M')}.pkl", "wb") as f:
pickle.dump(all_stats, f, protocol=pickle.HIGHEST_PROTOCOL)
times = list(map(lambda it: it.time, all_stats))
avg_time = sum(times) / len(times)
total = sum(map(lambda it: it.total, all_stats))
fixed = sum(map(lambda it: it.fixed, all_stats))
unfixed = sum(map(lambda it: it.unfixed, all_stats))
verified_patches = [p for stat in all_stats for p in stat.patches]
position_dist = {}
rule_dist = {}
for p in verified_patches:
if p.position not in position_dist:
position_dist[p.position] = 0
position_dist[p.position] += 1
if p.rule not in rule_dist:
rule_dist[p.rule] = 0
rule_dist[p.rule] += 1
setVerbose(True)
PatchStats(total, fixed, unfixed).print()
print(f"Average time: {avg_time}s")
print(f"Position distribution: {position_dist}")
print(f"Rule distribution: {rule_dist}")
if __name__ == "__main__":
evaluateTestSet()
|
[
"tqdm.tqdm",
"pickle.dump",
"dfp_main.setVerbose",
"pathlib.Path",
"datetime.datetime.now",
"dfp_main.PatchStats"
] |
[((455, 478), 'tqdm.tqdm', 'tqdm', (['testPairs[:LIMIT]'], {}), '(testPairs[:LIMIT])\n', (459, 478), False, 'from tqdm import tqdm\n'), ((1430, 1446), 'dfp_main.setVerbose', 'setVerbose', (['(True)'], {}), '(True)\n', (1440, 1446), False, 'from dfp_main import patch, PatchStats, setVerbose\n'), ((736, 795), 'pickle.dump', 'pickle.dump', (['all_stats', 'f'], {'protocol': 'pickle.HIGHEST_PROTOCOL'}), '(all_stats, f, protocol=pickle.HIGHEST_PROTOCOL)\n', (747, 795), False, 'import pickle\n'), ((1451, 1484), 'dfp_main.PatchStats', 'PatchStats', (['total', 'fixed', 'unfixed'], {}), '(total, fixed, unfixed)\n', (1461, 1484), False, 'from dfp_main import patch, PatchStats, setVerbose\n'), ((279, 298), 'pathlib.Path', 'Path', (['TEST_SET_PATH'], {}), '(TEST_SET_PATH)\n', (283, 298), False, 'from pathlib import Path\n'), ((670, 684), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (682, 684), False, 'from datetime import datetime\n')]
|
from os import path
from unittest.mock import call
from uptimer.events import SCHEMATA_PATH
from uptimer.events.cache import SchemaCache
def test_schemacache_init(mocker):
mocked_open = mocker.patch.object(SchemaCache, "__missing__")
schema_cache = SchemaCache()
assert schema_cache is not None
mocked_open.assert_not_called()
def test_loading_missing_schema(mocker):
mocked_open = mocker.patch.object(SchemaCache, "__missing__")
schema_cache = SchemaCache()
schema_cache["root.json"]
mocked_open.assert_called_once_with("root.json")
def test_loading_dependant_of_root_json(mocker):
mocked_open = mocker.patch("builtins.open", side_effect=open)
calls = [
call(path.join(SCHEMATA_PATH, "probe-event.json"), "r"),
call(path.join(SCHEMATA_PATH, "root.json"), "r"),
]
# Defaults to resolve $refs in the schema, should open two files.
schema_cache = SchemaCache()
schema_cache["probe-event.json"]
mocked_open.assert_called()
assert mocked_open.call_count == 2
mocked_open.assert_has_calls(calls)
mocked_open.reset_mock()
# Non-resolving cache should only open the asked-for file
schema_cache_non_resolving = SchemaCache(resolve_refs=False)
schema_cache_non_resolving["probe-event.json"]
mocked_open.assert_called_once()
mocked_open.assert_has_calls([calls[0]])
def test_return_cached_result(mocker):
mocked_open = mocker.patch("builtins.open", side_effect=open)
schema_cache = SchemaCache()
schema_cache["probe-event.json"]
mocked_open.assert_called()
assert mocked_open.call_count == 2
# Request the same schema again; call_count stays the same.
schema_cache["probe-event.json"]
assert mocked_open.call_count == 2
# Resolving should have cached root.json as well; call_count stays the same
schema_cache["root.json"]
assert mocked_open.call_count == 2
|
[
"os.path.join",
"uptimer.events.cache.SchemaCache"
] |
[((262, 275), 'uptimer.events.cache.SchemaCache', 'SchemaCache', ([], {}), '()\n', (273, 275), False, 'from uptimer.events.cache import SchemaCache\n'), ((477, 490), 'uptimer.events.cache.SchemaCache', 'SchemaCache', ([], {}), '()\n', (488, 490), False, 'from uptimer.events.cache import SchemaCache\n'), ((925, 938), 'uptimer.events.cache.SchemaCache', 'SchemaCache', ([], {}), '()\n', (936, 938), False, 'from uptimer.events.cache import SchemaCache\n'), ((1212, 1243), 'uptimer.events.cache.SchemaCache', 'SchemaCache', ([], {'resolve_refs': '(False)'}), '(resolve_refs=False)\n', (1223, 1243), False, 'from uptimer.events.cache import SchemaCache\n'), ((1504, 1517), 'uptimer.events.cache.SchemaCache', 'SchemaCache', ([], {}), '()\n', (1515, 1517), False, 'from uptimer.events.cache import SchemaCache\n'), ((719, 763), 'os.path.join', 'path.join', (['SCHEMATA_PATH', '"""probe-event.json"""'], {}), "(SCHEMATA_PATH, 'probe-event.json')\n", (728, 763), False, 'from os import path\n'), ((784, 821), 'os.path.join', 'path.join', (['SCHEMATA_PATH', '"""root.json"""'], {}), "(SCHEMATA_PATH, 'root.json')\n", (793, 821), False, 'from os import path\n')]
|
from sklearn.neighbors import NearestNeighbors
import Sv
import logging
import pandas as pd
import numpy as np
import functools
import os
import math
logger = logging.getLogger('marin')
logger.setLevel(logging.DEBUG)
def point_processing(tracks_data):
"""
input: tracking data matrix
ouput: column of distances to nearest neighbors in meters
"""
tracks = tracks_data.loc[:, ['x_gps', 'y_gps', 'z_gps']] # get position of each tracks
tracks['long_m'] = tracks.y_gps * (
40075000 * np.cos(tracks.x_gps) / 360) # Get the equivalent of the longitude in meters
tracks['lat_m'] = tracks.x_gps * 60 * 1852 # Get the equivalent of the latitude in meters
array = np.vstack(
[tracks.lat_m, tracks.long_m, tracks.z_gps]) # preparing the array for nearest neighbors algorithm
array = np.transpose(array)
nbrs = NearestNeighbors(n_neighbors=2, algorithm='ball_tree').fit(array) # nearest neighbors algorithm
distances, indices = nbrs.kneighbors(array)
return distances[:, 1]
def conjunction(*conditions):
"""Multiple conditions filter for panda"""
return functools.reduce(np.logical_and, conditions)
def calc_distance_lat(lat1, lat2):
"""Returns a distance between 2 latitudes"""
dlat = lat2 - lat1
dist = dlat * 60 * 1852
return dist
def calc_distance_long(lat, lon1, lon2):
"""Returns a distance between 2 longitudes for a given latitude"""
dlon = lon2 - lon1
dist = dlon * (40075000 * math.cos(lat) / 360)
return dist
def unit_vector(vector):
""" Returns the unit vector of the vector. """
return vector / np.linalg.norm(vector)
def pickle_processing(path_pickle, path_output, transducer, freq_TS, TS_parameters, hac_info, orient):
"""
Process the pickle file from pymovies tracking and returns several key parameters for each track.
input:
- path_pickle: path to a pickle file, output of movies TS analysis
- path_output: path to store output csv
- transducer; name of the used transducer
- freq_TS: reference frequence for TS extraction
- TS_parameters: parameter for the TS detection and tracks selection
- hac_info: complementary info on the different runs, same for all tracks of each run
- orient: orientation ('H' or 'V')
outputs: multiple csv
- tracks: matrix of tracks with:
- track, target: relative and absolute index for each tracks
- TSrange: mean distance in m to transducer
- TSalong, TSarthwart: mean angle in the transducer beam
- x, y, z, x_gps, y_gps, z_gps: relative and absolute position
- TScomp_mean, TScomp: mean TS of all frequencies or for the closest frequency from reference frequency
- nb_target: number of targets per tracks
- timeInt and Time: mean time in ns since 1970 and in string formats
- k_dist: distance in m to the nearest neighbour
- State, Abrv, tailleMoyenne: variables from the hac info file
- b20: b20 value
- Nv: Nv value
- dist_x, dist_y, dist_z, dist_range, dist_tot: mean displacement in m following different axis
- tilt_angle, cap_rel, cap_abs: tilt or heading angle (absolute and relative) in degrees (according to orientation)
- vit_x, vit_y, vit_z, vit_range: speed following different axis
- sd_x, sd_y, sd_z, sd_range, sd_ta: standard deviation of previous displacement and angle
- sd_tot: sum of standard deviation
- targets: matrix of all targets
- freq: mean TScomp for each frequency
"""
if path_pickle[-7:] != ".pickle": # Check the pickle file
logger.error("Not a pickle file !")
return
name_transect = os.path.basename(path_pickle)[:-18]
logger.info("reading...")
if os.path.getsize(path_pickle) > 0:
result = pd.read_pickle(path_pickle) # read the pickle file
else:
logger.error("File empty !") # Si le fichier Pickle est vide
logger.info("done !")
for i in range(len(result[10])): # get index for the sounder and transducer according to given transducer
for j in range(len(result[10][i])):
if result[10][i][j] == transducer:
indexSounder = i
indexTransducer = j
logger.info("creating tables...") # Extract the pickle data in several panda tables.
nb_target = len(result[0][indexSounder][indexTransducer]) # number of targets for the given sounder and transducer
if nb_target > 0: # check if any targets
nb_freq = int(len(result[9][indexSounder][indexTransducer]) / nb_target)
index_targets = []
for i in range(nb_target):
index_targets += [i for j in range(nb_freq)]
targets = pd.DataFrame( # individual target data
{
"track": np.array(result[8][indexSounder][indexTransducer]),
"target": range(nb_target),
"timeTarget": np.array(result[0][indexSounder][indexTransducer]),
"TSrange": np.array(result[1][indexSounder][indexTransducer]),
"TSalong": np.array(result[4][indexSounder][indexTransducer]),
"TSathwart": np.array(result[5][indexSounder][indexTransducer]),
},
index=range(nb_target)
)
freq = pd.DataFrame( # TS and frequency data
{
"target": index_targets,
"TScomp": np.array(result[2][indexSounder][indexTransducer]),
"TSucomp": np.array(result[3][indexSounder][indexTransducer]),
"TSfreq": np.array(result[9][indexSounder][indexTransducer]),
},
index=range(nb_freq * nb_target)
)
# get the position of each targets (relative and absolute)
position = pd.DataFrame(result[6][indexSounder][indexTransducer],
index=range(0, len(result[0][indexSounder][indexTransducer])), columns=['x', 'y', 'z'])
positionGPS = pd.DataFrame(result[7][indexSounder][indexTransducer],
index=range(0, len(result[0][indexSounder][indexTransducer])),
columns=['x_gps', 'y_gps', 'z_gps'])
TS_means = freq.groupby(by="target").mean() # get the TScomp_mean: mean TScomp for all frequencies
TS_means = TS_means.rename(columns={'TScomp': 'TScomp_mean'})
freq_TS = min(list(freq['TSfreq']), key=lambda x: abs(x - freq_TS)) # closest frequency from the reference
# frequency freq_TS
TS_freq = freq[freq.TSfreq == freq_TS] # get the TScomp for the given reference frequency
TS_freq.index = range(len(TS_freq))
logger.info("done !")
targets = pd.concat([targets, position, positionGPS, TS_means['TScomp_mean'], TS_freq['TScomp']],
axis=1) # merge of all the data
tracks = targets.groupby(by="track").target.agg('count') # get number of target per tracks
tracks_len = pd.DataFrame(
{'track': tracks.index,
'nb_target': tracks.values},
index=range(len(tracks.index))
)
targets = pd.merge(targets, tracks_len, how='inner', on='track') # add the track length to the target data
targets_selected = targets.loc[targets['nb_target'] >= TS_parameters['MinEchoNumber']] # Select by track length
targets_data = targets_selected.sort_values('track')
targets_data['timeInt'] = targets_data['timeTarget'].apply(lambda x: x.value) # convert time to int (ns, 1970)
logger.info("targets ready !")
##### Tracks grouping and analysis
logger.info('Gathering tracks data...')
tracks_data = targets_data.groupby('track').mean() # group targets by tracks, keep each parameters as mean
tracks_data['Time'] = pd.to_datetime(tracks_data['timeInt']) # panda's datetime
tracks_data['k_dist'] = point_processing(tracks_data) # Distance to closest neighbor
for index, row in hac_info.iterrows(): # add the hac_info columns (same for each run)
if row.Name == name_transect:
for header in hac_info.columns[1:]:
tracks_data[header] = row[header]
tracks_data['b20'] = tracks_data['TScomp'] - (
20 * np.log10(tracks_data['tailleMoyenne'])) # get the b20 from TScomp and taille moyenne
# get the Nv value for each track
path_Nv = path_output + '/' + name_transect + "_Nv.csv"
if os.path.exists(path_Nv):
Nv = pd.read_csv(path_Nv)
tracks_data['Nv'] = Sv.get_nv(tracks_data, Nv)
else:
tracks_data['Nv'] = -999 # No Nv data provided
# tracks movement analysis
tracks_id = list(targets_data.groupby('track').groups)
scores = []
for i in tracks_id: # for each track
track_i = targets_data.loc[
targets_data['track'] == i, ['timeTarget', 'x', 'y', 'z', 'TSrange', 'x_gps', 'y_gps']]
track_i = track_i.sort_values('timeTarget') # Sort by time
deltas = [[], [], [], [], [], [], [], [], []]
for j in range(1, len(track_i)):
deltas[0].append(track_i.x.iloc[j] - track_i.x.iloc[j - 1]) # delta in x axis
deltas[1].append(track_i.y.iloc[j] - track_i.y.iloc[j - 1]) # delta in y axis
deltas[2].append(track_i.z.iloc[j] - track_i.z.iloc[j - 1]) # delta in z axis
deltas[3].append(track_i.TSrange.iloc[j] - track_i.TSrange.iloc[j - 1]) # delta in range
deltas[4].append(calc_distance_lat(track_i.x_gps.iloc[j],
track_i.x_gps.iloc[j - 1])) # distance between the 2 latitudes
deltas[5].append(calc_distance_long(track_i.x_gps.iloc[j], track_i.y_gps.iloc[j],
track_i.y_gps.iloc[j - 1])) # distance between the 2 longitudes
if orient == 'H': #Horizontal echo sounder
if track_i.x.iloc[
j] > 0: # check if x is coherent (beam is oriented on starboard), corrects direction
# accordingly
cap_rel = abs(math.degrees(
math.atan2(deltas[1][j - 1], - deltas[0][j - 1]))) # heading relative to the boat
else:
cap_rel = abs(math.degrees(math.atan2(deltas[1][j - 1], deltas[0][j - 1])))
cap_abs = math.degrees(
math.atan2(deltas[5][j - 1], deltas[4][j - 1])) # absolute (geographical) heading
if cap_abs < 0:
cap_abs = 360 + cap_abs # correct to have 0-360° headings
tilt_angle = (math.degrees(
math.atan2(math.sqrt(deltas[0][j - 1] ** 2 + deltas[1][j - 1] ** 2),
deltas[2][j - 1])) - 90) # tilt angle of the track
deltas[6].append(tilt_angle)
deltas[7].append(cap_rel)
deltas[8].append(cap_abs)
else: #vertical echo sounder
tilt_angle = (math.degrees(
math.atan2(math.sqrt(deltas[0][j - 1] ** 2 + deltas[1][j - 1] ** 2),
deltas[2][j - 1])) - 90) # tilt angle of the track
deltas[6].append(tilt_angle)
deltas[7].append(999) # relative and absolute heading is irrelevant on vertical echo sounder
deltas[8].append(999)
delta_t = track_i.timeTarget.iloc[len(track_i) - 1] - track_i.timeTarget.iloc[0]
delta_t = delta_t.total_seconds() # time length of the track (s)
dist_x = np.sum(deltas[4]) # dist is the length of the track on several dimensions
dist_y = np.sum(deltas[5])
dist_z = np.sum(deltas[2])
dist_range = np.sum(deltas[3])
dist_tot = dist_x + dist_y + dist_z
tilt_angle = np.mean(deltas[6]) # mean tilt angle of the track
cap_rel = np.mean(deltas[7]) # mean relative heading of the track
cap_abs = np.mean(deltas[8]) # mean absolute heading of the track
vit_x = dist_x / delta_t # speed
vit_y = dist_y / delta_t
vit_z = dist_z / delta_t
vit_range = dist_range / delta_t
sd_x = np.std(deltas[4]) # standard deviation
sd_y = np.std(deltas[5])
sd_z = np.std(deltas[2])
sd_range = np.std(deltas[3])
sd_ta = np.std(deltas[6])
sd_cr = np.std(deltas[7])
sd_ca = np.std(deltas[8])
sd_tot = sd_x + sd_y + sd_z
scores.append(
[i, dist_x / len(track_i), dist_y / len(track_i), dist_z / len(track_i), dist_range, dist_tot,
tilt_angle, cap_rel, cap_abs, vit_x, vit_y, vit_z, vit_range, sd_x, sd_y, sd_z, sd_range, sd_tot,
sd_ta, sd_cr, sd_ca]
)
dist_scores = pd.DataFrame(scores, index=range(len(scores)), # storing values as a panda data frame
columns=['track', 'dist_x', 'dist_y', 'dist_z', 'dist_range', 'dist_tot',
'tilt_angle', 'cap_rel', 'cap_abs', 'vit_x', 'vit_y', 'vit_z', 'vit_range',
'sd_x',
'sd_y', 'sd_z', 'sd_range', 'sd_tot', 'sd_ta', 'sd_cr', 'sd_ca'])
tracks_data = pd.merge(tracks_data, dist_scores, how='inner', on='track') # merge with the main data frame
logger.info("Done !")
logger.debug('Tracks summary :')
logger.debug(str(tracks_data.describe()))
# Storing 2 different data frames as csv:
# - targets, with individual targets of each points
# - tracks, with the run track data
filename_1 = path_output + "/" + name_transect + "_tracks.csv"
filename_2 = path_output + "/" + name_transect + "_targets.csv"
tracks_data.to_csv(filename_1, index=False)
targets_data.to_csv(filename_2, index=False)
logger.info("files saved !")
freq_data = freq.groupby('TSfreq').mean()
freq_data['freq'] = freq_data.index
filename_3 = path_output + "/" + name_transect + "_freq.csv"
freq_data.to_csv(filename_3, index=False)
else:
logger.error("No targets !!!")
|
[
"numpy.sum",
"math.atan2",
"pandas.read_csv",
"numpy.mean",
"numpy.linalg.norm",
"numpy.std",
"pandas.merge",
"numpy.transpose",
"os.path.exists",
"sklearn.neighbors.NearestNeighbors",
"math.cos",
"numpy.log10",
"pandas.concat",
"math.sqrt",
"os.path.basename",
"os.path.getsize",
"functools.reduce",
"pandas.to_datetime",
"numpy.cos",
"numpy.vstack",
"Sv.get_nv",
"numpy.array",
"pandas.read_pickle",
"logging.getLogger"
] |
[((160, 186), 'logging.getLogger', 'logging.getLogger', (['"""marin"""'], {}), "('marin')\n", (177, 186), False, 'import logging\n'), ((705, 759), 'numpy.vstack', 'np.vstack', (['[tracks.lat_m, tracks.long_m, tracks.z_gps]'], {}), '([tracks.lat_m, tracks.long_m, tracks.z_gps])\n', (714, 759), True, 'import numpy as np\n'), ((836, 855), 'numpy.transpose', 'np.transpose', (['array'], {}), '(array)\n', (848, 855), True, 'import numpy as np\n'), ((1129, 1173), 'functools.reduce', 'functools.reduce', (['np.logical_and', 'conditions'], {}), '(np.logical_and, conditions)\n', (1145, 1173), False, 'import functools\n'), ((1630, 1652), 'numpy.linalg.norm', 'np.linalg.norm', (['vector'], {}), '(vector)\n', (1644, 1652), True, 'import numpy as np\n'), ((3811, 3840), 'os.path.basename', 'os.path.basename', (['path_pickle'], {}), '(path_pickle)\n', (3827, 3840), False, 'import os\n'), ((3885, 3913), 'os.path.getsize', 'os.path.getsize', (['path_pickle'], {}), '(path_pickle)\n', (3900, 3913), False, 'import os\n'), ((3936, 3963), 'pandas.read_pickle', 'pd.read_pickle', (['path_pickle'], {}), '(path_pickle)\n', (3950, 3963), True, 'import pandas as pd\n'), ((6835, 6935), 'pandas.concat', 'pd.concat', (["[targets, position, positionGPS, TS_means['TScomp_mean'], TS_freq['TScomp']]"], {'axis': '(1)'}), "([targets, position, positionGPS, TS_means['TScomp_mean'], TS_freq\n ['TScomp']], axis=1)\n", (6844, 6935), True, 'import pandas as pd\n'), ((7270, 7324), 'pandas.merge', 'pd.merge', (['targets', 'tracks_len'], {'how': '"""inner"""', 'on': '"""track"""'}), "(targets, tracks_len, how='inner', on='track')\n", (7278, 7324), True, 'import pandas as pd\n'), ((7950, 7988), 'pandas.to_datetime', 'pd.to_datetime', (["tracks_data['timeInt']"], {}), "(tracks_data['timeInt'])\n", (7964, 7988), True, 'import pandas as pd\n'), ((8628, 8651), 'os.path.exists', 'os.path.exists', (['path_Nv'], {}), '(path_Nv)\n', (8642, 8651), False, 'import os\n'), ((13745, 13804), 'pandas.merge', 'pd.merge', (['tracks_data', 'dist_scores'], {'how': '"""inner"""', 'on': '"""track"""'}), "(tracks_data, dist_scores, how='inner', on='track')\n", (13753, 13804), True, 'import pandas as pd\n'), ((867, 921), 'sklearn.neighbors.NearestNeighbors', 'NearestNeighbors', ([], {'n_neighbors': '(2)', 'algorithm': '"""ball_tree"""'}), "(n_neighbors=2, algorithm='ball_tree')\n", (883, 921), False, 'from sklearn.neighbors import NearestNeighbors\n'), ((8670, 8690), 'pandas.read_csv', 'pd.read_csv', (['path_Nv'], {}), '(path_Nv)\n', (8681, 8690), True, 'import pandas as pd\n'), ((8723, 8749), 'Sv.get_nv', 'Sv.get_nv', (['tracks_data', 'Nv'], {}), '(tracks_data, Nv)\n', (8732, 8749), False, 'import Sv\n'), ((11946, 11963), 'numpy.sum', 'np.sum', (['deltas[4]'], {}), '(deltas[4])\n', (11952, 11963), True, 'import numpy as np\n'), ((12042, 12059), 'numpy.sum', 'np.sum', (['deltas[5]'], {}), '(deltas[5])\n', (12048, 12059), True, 'import numpy as np\n'), ((12081, 12098), 'numpy.sum', 'np.sum', (['deltas[2]'], {}), '(deltas[2])\n', (12087, 12098), True, 'import numpy as np\n'), ((12124, 12141), 'numpy.sum', 'np.sum', (['deltas[3]'], {}), '(deltas[3])\n', (12130, 12141), True, 'import numpy as np\n'), ((12215, 12233), 'numpy.mean', 'np.mean', (['deltas[6]'], {}), '(deltas[6])\n', (12222, 12233), True, 'import numpy as np\n'), ((12288, 12306), 'numpy.mean', 'np.mean', (['deltas[7]'], {}), '(deltas[7])\n', (12295, 12306), True, 'import numpy as np\n'), ((12367, 12385), 'numpy.mean', 'np.mean', (['deltas[8]'], {}), '(deltas[8])\n', (12374, 12385), True, 'import numpy as np\n'), ((12608, 12625), 'numpy.std', 'np.std', (['deltas[4]'], {}), '(deltas[4])\n', (12614, 12625), True, 'import numpy as np\n'), ((12667, 12684), 'numpy.std', 'np.std', (['deltas[5]'], {}), '(deltas[5])\n', (12673, 12684), True, 'import numpy as np\n'), ((12704, 12721), 'numpy.std', 'np.std', (['deltas[2]'], {}), '(deltas[2])\n', (12710, 12721), True, 'import numpy as np\n'), ((12745, 12762), 'numpy.std', 'np.std', (['deltas[3]'], {}), '(deltas[3])\n', (12751, 12762), True, 'import numpy as np\n'), ((12783, 12800), 'numpy.std', 'np.std', (['deltas[6]'], {}), '(deltas[6])\n', (12789, 12800), True, 'import numpy as np\n'), ((12821, 12838), 'numpy.std', 'np.std', (['deltas[7]'], {}), '(deltas[7])\n', (12827, 12838), True, 'import numpy as np\n'), ((12859, 12876), 'numpy.std', 'np.std', (['deltas[8]'], {}), '(deltas[8])\n', (12865, 12876), True, 'import numpy as np\n'), ((520, 540), 'numpy.cos', 'np.cos', (['tracks.x_gps'], {}), '(tracks.x_gps)\n', (526, 540), True, 'import numpy as np\n'), ((1494, 1507), 'math.cos', 'math.cos', (['lat'], {}), '(lat)\n', (1502, 1507), False, 'import math\n'), ((4919, 4969), 'numpy.array', 'np.array', (['result[8][indexSounder][indexTransducer]'], {}), '(result[8][indexSounder][indexTransducer])\n', (4927, 4969), True, 'import numpy as np\n'), ((5045, 5095), 'numpy.array', 'np.array', (['result[0][indexSounder][indexTransducer]'], {}), '(result[0][indexSounder][indexTransducer])\n', (5053, 5095), True, 'import numpy as np\n'), ((5124, 5174), 'numpy.array', 'np.array', (['result[1][indexSounder][indexTransducer]'], {}), '(result[1][indexSounder][indexTransducer])\n', (5132, 5174), True, 'import numpy as np\n'), ((5203, 5253), 'numpy.array', 'np.array', (['result[4][indexSounder][indexTransducer]'], {}), '(result[4][indexSounder][indexTransducer])\n', (5211, 5253), True, 'import numpy as np\n'), ((5284, 5334), 'numpy.array', 'np.array', (['result[5][indexSounder][indexTransducer]'], {}), '(result[5][indexSounder][indexTransducer])\n', (5292, 5334), True, 'import numpy as np\n'), ((5532, 5582), 'numpy.array', 'np.array', (['result[2][indexSounder][indexTransducer]'], {}), '(result[2][indexSounder][indexTransducer])\n', (5540, 5582), True, 'import numpy as np\n'), ((5611, 5661), 'numpy.array', 'np.array', (['result[3][indexSounder][indexTransducer]'], {}), '(result[3][indexSounder][indexTransducer])\n', (5619, 5661), True, 'import numpy as np\n'), ((5689, 5739), 'numpy.array', 'np.array', (['result[9][indexSounder][indexTransducer]'], {}), '(result[9][indexSounder][indexTransducer])\n', (5697, 5739), True, 'import numpy as np\n'), ((8424, 8462), 'numpy.log10', 'np.log10', (["tracks_data['tailleMoyenne']"], {}), "(tracks_data['tailleMoyenne'])\n", (8432, 8462), True, 'import numpy as np\n'), ((10705, 10751), 'math.atan2', 'math.atan2', (['deltas[5][j - 1]', 'deltas[4][j - 1]'], {}), '(deltas[5][j - 1], deltas[4][j - 1])\n', (10715, 10751), False, 'import math\n'), ((10428, 10475), 'math.atan2', 'math.atan2', (['deltas[1][j - 1]', '(-deltas[0][j - 1])'], {}), '(deltas[1][j - 1], -deltas[0][j - 1])\n', (10438, 10475), False, 'import math\n'), ((10588, 10634), 'math.atan2', 'math.atan2', (['deltas[1][j - 1]', 'deltas[0][j - 1]'], {}), '(deltas[1][j - 1], deltas[0][j - 1])\n', (10598, 10634), False, 'import math\n'), ((10990, 11046), 'math.sqrt', 'math.sqrt', (['(deltas[0][j - 1] ** 2 + deltas[1][j - 1] ** 2)'], {}), '(deltas[0][j - 1] ** 2 + deltas[1][j - 1] ** 2)\n', (10999, 11046), False, 'import math\n'), ((11404, 11460), 'math.sqrt', 'math.sqrt', (['(deltas[0][j - 1] ** 2 + deltas[1][j - 1] ** 2)'], {}), '(deltas[0][j - 1] ** 2 + deltas[1][j - 1] ** 2)\n', (11413, 11460), False, 'import math\n')]
|
#!/usr/bin/env python3
import argparse
import os
import sys
import collections
import textwrap
from functools import partial
import machinery as ma
from machinery import ErrMsg, chk, bail
from machinery import LogEntry as L
from generic import lty, interleave, itemify, dupchk, listify, w_str
# ------------------------------------------------------------------------------
# Html file (including navigation and sections)
class HtmlFile:
"""Html file representing litmus test results"""
sp = ' '
# HTML prefix before tables
prefix = textwrap.dedent("""\
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>GPU Litmus Test Results</title>
<link rel="stylesheet" href="common.css" type="text/css" media="screen"/>
</head>
<body>
<div class="outer">
<div class="inner">
<h1>GPU Litmus Test Results</h1>
<br>
<center>
To view the logfile for a test and chip, click on the corresponding number.
The logfile contains the litmus test code, and the incantations used for the
test run.
</center>
<br><br>
""")
# HTML suffix after tables
suffix = textwrap.dedent("""
</div>
</div>
</body>
</html>
""")
def __init__(self):
self.items = []
self.nav = '<h4>Contents</h4>\n'
self.secn = 0
self.last_level = -1
def add_nav_item(self, link, level):
sp = self.sp
li = sp * (level + 1)
ul = sp * (self.last_level + 1)
if level == self.last_level:
self.nav += li + '<li><a href="#id' + str(self.secn) + '">' + link +\
'</a></li>\n'
elif level == self.last_level + 1:
self.nav += ul + '<ul>\n'
self.nav += li + '<li><a href="#id' + str(self.secn) + '">' + link +\
'</a></li>\n'
elif level < self.last_level:
self.close_nav(level)
self.nav += li + '<li><a href="#id' + str(self.secn) + '">' + link +\
'</a></li>\n'
else:
assert(False)
self.last_level = level
def close_nav(self, level):
sp = self.sp
while self.last_level > level:
self.nav += sp * self.last_level + '</ul>\n'
self.last_level -= 1
def new_section(self, heading, level):
assert(0 <= level <= 2)
l = str(level+2)
s = '<h' + l + '><a id="id' + str(self.secn) + '">' + heading + '</a></h'\
+ l + '>\n'
self.items.append(s)
self.add_nav_item(heading, level)
self.secn += 1
def add_html(self, html):
self.items.append(html)
def finish(self, nav=True):
self.close_nav(-1)
l = [self.prefix]
if nav:
l += [self.nav]
l += self.items + [self.suffix]
self.s = ''.join(l)
def write(self, fn):
assert(self.s)
f = open(fn, 'w')
f.write(self.s)
f.close()
# ------------------------------------------------------------------------------
### Used by all HTML file producers
# ks: list of test names to include in the table
# logs: list of log objects (only logs which have the key are included in the
# table)
def produce_table(ks, logs, diro='entries'):
logs = [ l for l in logs if l.any_key(ks) ]
s = '<table>\n'
# Process header
s += '<tr>\n'
s += ' <th>Scope tree</th>\n'
s += ' <th>Memory map</th>\n'
s += ' <th>Name</th>\n'
for log in logs:
# Remove directory prefix and suffix
name = os.path.basename(log.fn)
idx = name.find('.')
if idx != -1:
name = name[:idx]
s += ' <th>' + name + '</th>\n'
s += '</tr>\n'
# Process rows
for k in ks:
# Start new row
s += '<tr>\n'
le = ma.get_entry(k, logs)
s += le.pp_prefix(2)
for log in logs:
e = log.get(k)
if e:
s += e.pp_cell_link_dir(2, diro)
# Produce file containing raw litmus log
e.store_log_dir(diro)
else:
s += '<td><a href="">---</a></td>\n'
s += '</tr>\n'
s += '</table>\n'
return s
# Filtering according to scopes and memory regions; no filtering according to
# names
def get_section_filters():
def c(f, g):
return lambda e: f(e) and g(e)
# List of functions that each take a log entry
d = [
# Simple scopes, global memory
c(L.is_warp, L.is_global),
c(L.is_cta, L.is_global),
c(L.is_ker, L.is_global),
# Simple scopes, shared memory
c(L.is_warp, L.is_shared),
# Simple scopes, mixed memory
c(L.is_warp, L.is_mixed_mem),
# Mixed scopes, global memory
c(L.is_mixed_scope, L.is_global),
# Mixed scopes, shared memory
c(L.is_mixed_scope, L.is_shared),
# Mixed scopes, mixed memory
c(L.is_mixed_scope, L.is_mixed_mem)
]
return d
def get_section_names():
# Parallel the above functions
names = [
'Different warps, same CTA; global memory',
'Different CTAs, same kernel; global memory',
'Different kernels, same device; global memory',
'Different warps, same CTA; shared memory',
'Different warps, same CTA; mixed memory',
'Mixed scopes, global memory',
'Mixed scopes, shared memory',
'Mixed scopes, mixed memory'
]
return names
# Get key patterns per axiom
def get_axiom_patterns():
l = [
('SC per location', ['CO', 'Co']),
('No Thin Air', ['(LB$)|(LB\+)|(LB\-)']),
('Observation', ['(MP$)|(MP\+)|(MP\-)', 'WRC', 'ISA2']),
('Propagation Light', ['2\+2W', 'W\+RW\+2W', '(S$)|(S\+)|(S\-)']),
('Propagation Heavy', [ 'SB', '(R$)|(R\+)|(R\-)', 'RWC', 'IRIW' ])
]
return l
# ------------------------------------------------------------------------------
############
# Toplevel #
############
# f: function to be called; args: arguments to the function
def mux(f, args):
inp = args.input
l = list(listify(inp))
if hasattr(args, 'out'):
l.append(args.out)
chk(not dupchk(l), 'duplicate files given')
# Read ordinary logs (if we do not want to read an incantation log)
if f != incantations and f != incantations_flat and f != incantations_html_flat:
c = type(inp) is list
if not c:
inp = [inp]
inp = ma.get_logs(inp, lh=ma.Log)
if not c:
inp = inp[0]
args.input = inp
f(args)
###############
# Subcommands #
###############
### Produce table with sections according to axioms
def classified(args):
pos = args.pos
logs = args.input
assert(lty(logs, ma.Log))
assert(hasattr(args, 'diro'))
l = get_axiom_patterns()
h = HtmlFile()
all_matching = []
for name, val in l:
ks = ma.get_matching_keys(val, logs)
if pos:
ks = ma.get_pos_keys(logs, ks)
all_matching += ks
if ks:
h.new_section(name, 0)
s = produce_table(ks, logs, diro=args.diro)
h.add_html(s)
all_matching = set(all_matching)
if pos:
ks = ma.get_pos_keys(logs)
else:
ks = ma.get_keys(logs)
ks = set(ks) - all_matching
ks = list(ks)
if ks:
h.new_section('Other', 0)
ks.sort()
s = produce_table(ks, logs)
h.add_html(s)
h.finish()
h.write(args.out)
### Two level classification
def two_level(args):
pos = args.pos
logs = args.input
assert(lty(logs, ma.Log))
assert(hasattr(args, 'diro'))
l = get_axiom_patterns()
h = HtmlFile()
all_matching = []
for name, val in l:
ks_s = ma.get_matching_keys(val, logs)
if pos:
ks_s = ma.get_pos_keys(logs, ks_s)
all_matching += ks_s
if ks_s:
h.new_section(name, 0)
# Now divide by other sections
filters = get_section_filters()
names = get_section_names()
for f, name in zip(filters, names):
ks = ma.get_filtered_keys(f, logs, ks_s)
if pos:
ks = ma.get_pos_keys(logs, ks)
if ks:
h.new_section(name, 1)
s = produce_table(ks, logs, diro=args.diro)
h.add_html(s)
# Rest
all_matching = set(all_matching)
if pos:
ks_s = ma.get_pos_keys(logs)
else:
ks_s = ma.get_keys(logs)
ks_s = set(ks_s) - all_matching
ks_s = list(ks_s)
if ks_s:
h.new_section('Other', 0)
ks_s.sort()
filters = get_section_filters()
names = get_section_names()
for f, name in zip(filters, names):
ks = ma.get_filtered_keys(f, logs, ks_s)
if pos:
ks = ma.get_pos_keys(logs, ks)
if ks:
h.new_section(name, 1)
s = produce_table(ks, logs, diro=args.diro)
h.add_html(s)
h.finish()
h.write(args.out)
### Produce table with sections according to scopes and memory regions
def sections(args):
pos = args.pos
logs = args.input
assert(lty(logs, ma.Log))
assert(hasattr(args, 'diro'))
s = ''
h = HtmlFile()
filters = get_section_filters()
names = get_section_names()
for f, name in zip(filters, names):
ks = ma.get_filtered_keys(f, logs)
if pos:
ks = ma.get_pos_keys(logs, ks)
if ks:
h.new_section(name, 0)
s = produce_table(ks, logs, diro=args.diro)
h.add_html(s)
h.finish()
h.write(args.out)
### Produce flat table with all tests
def flat(args):
pos = args.pos
logs = args.input
assert(lty(logs, ma.Log))
assert(hasattr(args, 'diro'))
# Get all the keys
if pos:
ks = ma.get_pos_keys(logs)
else:
ks = ma.get_keys(logs)
s = produce_table(ks, logs, diro=args.diro)
h = HtmlFile()
h.add_html(s)
h.finish(nav=False)
h.write(args.out)
# ------------------------------------------------------------------------------
### Fill up table line by line
# l: list of items
# sep: separator
# end: end of line
# n: number of elements on line
def fill_up(l, sep, end, nl):
n = len(l)
s = ""
while l:
chunk = l[:nl]
line = sep.join(chunk)
s += line + ((nl - len(chunk)) * sep) + end
l = l[nl:]
return s
def latex_tbl(f, logs, n):
ks = ma.get_filtered_keys(f, logs)
sep = ' & '
s = ''
def mapper(k):
e = ma.get_entry(k, logs)
return e.short_name.lower() + sep + str(e.pos)
l = list(map(mapper, ks))
header = sep.join(["Test" + sep + "Freq."] * n) + "\\\\\n"
header += '\midrule\n'
s = header + fill_up(l, sep, '\\\\\n', n)
s += '\\bottomrule\n'
return s
def latex_tbl2(f, logs, n):
ks = ma.get_filtered_keys(f, logs)
sep = ' & '
s = '\midrule\n'
def mapper(k):
e = ma.get_entry(k, logs)
return e.short_name.lower(), str(e.pos)
l = list(map(mapper, ks))
l1, l2 = zip(*l)
l = interleave(l1, l2, n)
s = fill_up(l, sep, '\\\\\n', n)
s += '\\bottomrule\n'
return s
### Produce latex tables
def latex(args):
pos = args.pos
logs = args.input
assert(type(logs) == ma.Log)
n = 4
l = ['CO', 'Co', 'LB[^+]', 'MP[^+]', 'WRC[^+]', 'ISA2[^+]', '2\+2W[^+]',
'W\+RW\+2W[^+]', 'S[^+]+$', 'SB[^+]', 'R[^+]+$', 'RWC[^+]', 'IRIW[^+]']
# Produce d-warp:s-cta table, global memory
f = lambda e: L.is_global(e) and \
((L.is_warp(e) and L.does_match(e, l)) or
(L.does_match(e, ['CoWW', 'COWW'])))
s = latex_tbl(f, logs, n)
s += '\n'
# Produce d-warp:s-cta table, shared memory
f = lambda e: L.is_shared(e) and \
((L.is_warp(e) and L.does_match(e, l)) or
(L.does_match(e, ['CoWW', 'COWW'])))
s += latex_tbl(f, logs, n)
s += '\n'
# Produce d-cta:s-ker table, global memory
f = lambda e: L.is_global(e) and \
((L.is_cta(e) and L.does_match(e, l)))
s += latex_tbl(f, logs, n)
w_str(args.out, s)
def latex2(args):
pos = args.pos
logs = args.input
assert(type(logs) == ma.Log)
sep = ' & '
l = ['CO', 'Co', 'LB[^+]', 'MP[^+]', 'WRC[^+]', 'ISA2[^+]', '2\+2W[^+]',
'W\+RW\+2W[^+]', 'S[^+]+$', 'SB[^+]', 'R[^+]+$', 'RWC[^+]', 'IRIW[^+]']
lc = ['CoWW', 'COWW']
ks = ma.get_matching_keys(l, logs)
# Names + s1 + global memory
f = lambda e: L.is_global(e) and (L.is_warp(e) or L.does_match(e, lc))
ks1 = ma.get_filtered_keys(f, logs, ks)
ks1.sort()
n = len(ks1)
l = list()
for i, k in enumerate(ks1):
e = ma.get_entry(k, logs)
l.append(e.short_name.lower() + sep + str(e.pos) + sep)
# s1 + shared memory
f = lambda e: L.is_shared(e) and (L.is_warp(e) or L.does_match(e, lc))
ks2 = ma.get_filtered_keys(f, logs, ks)
ks2.sort()
assert(len(ks2) == n)
for i, k in enumerate(ks2):
e = ma.get_entry(k, logs)
l[i] += str(e.pos) + sep
# s2 + global memory
f = lambda e: L.is_global(e) and (L.is_cta(e) or L.does_match(e, lc))
ks3 = ma.get_filtered_keys(f, logs, ks)
ks3.sort()
assert(len(ks3) == n)
for i, k in enumerate(ks3):
e = ma.get_entry(k, logs)
l[i] += str(e.pos) + '\\\\'
s = '\n'.join(l)
w_str(args.out, s)
### Produce latex tables
def latex3(args):
pos = args.pos
logs = args.input
assert(type(logs) == ma.Log)
n = 8
l = ['CO', 'Co', 'LB[^+]', 'MP[^+]', 'WRC[^+]', 'ISA2[^+]', '2\+2W[^+]',
'W\+RW\+2W[^+]', 'S[^+]+$', 'SB[^+]', 'R[^+]+$', 'RWC[^+]', 'IRIW[^+]']
# Produce d-warp:s-cta table, global memory
f = lambda e: L.is_global(e) and \
((L.is_warp(e) and L.does_match(e, l)) or
(L.does_match(e, ['CoWW', 'COWW'])))
s = latex_tbl2(f, logs, n)
s += '\n'
# Produce d-warp:s-cta table, shared memory
f = lambda e: L.is_shared(e) and \
((L.is_warp(e) and L.does_match(e, l)) or
(L.does_match(e, ['CoWW', 'COWW'])))
s += latex_tbl2(f, logs, n)
s += '\n'
# Produce d-cta:s-ker table, global memory
f = lambda e: L.is_global(e) and \
((L.is_cta(e) and L.does_match(e, l)))
s += latex_tbl2(f, logs, n)
w_str(args.out, s)
# ------------------------------------------------------------------------------
### Produce incantations tables
# All tests that are not explicitely listed under 'line filters' in this file
# are ignored; non-existing tests and non-existing entries (e.g. for a certain
# combination of incantations) are also ignored
def incantations(args):
log = args.input
assert(type(log) == str)
# Get chip name
chip = os.path.basename(log)
assert(type(chip) == str)
chip_old = chip
while True:
chip = os.path.splitext(chip)[0]
if chip == chip_old:
break
chip_old = chip
assert(type(chip) == str)
# Get incantation log
log = ma.get_logs(log, lh=ma.LogInc)
assert(lty(log, ma.LogInc))
assert(len(log) == 1)
log = log[0]
out_base = args.out
assert(out_base)
les = log.get_all()
assert(lty(les, L))
# Table header
prefix = textwrap.dedent(r"""
\definecolor{Gray}{gray}{0.85}
\newcolumntype{g}{>{\columncolor{Gray}}r}
\newcolumntype{h}{>{\columncolor{Gray}}c}
\begin{tabular}{l g g g g r r r r g g g g r r r r}
\toprule
\multicolumn{17}{l}{Chip: <chip>}\\
\multicolumn{17}{l}{GPU Configuration: <config>}\\
\hline
& \multicolumn{4}{h}{Critical Incantations:} & \multicolumn{4}{c}{Critical Incantations:} & \multicolumn{4}{h}{Critical Incantations:} & \multicolumn{4}{c}{Critical Incantations:}\\
& \multicolumn{4}{h}{none} & \multicolumn{4}{c}{GBC} & \multicolumn{4}{h}{MS} & \multicolumn{4}{c}{GBC+MS}\\
& \multicolumn{4}{h}{Extra Incantations:} & \multicolumn{4}{c}{Extra Incantations:} & \multicolumn{4}{h}{Extra Incantations:} & \multicolumn{4}{c}{Extra Incantations:}\\
& none & R & S & R+S & none & R & S & R+S & none & R & S & R+S & none & R & S & R+S\\
\hline
""")
# Scope and mem filters, including table description and filename suffix
sfs = [
(lambda e: L.is_warp(e) and L.is_global(e),
'All threads in different warps, global memory',
's1-global'),
(lambda e: L.is_warp(e) and L.is_shared(e),
'All threads in different warps, shared memory',
's1-shared'),
(lambda e: L.is_cta(e) and L.is_global(e),
'All threads in different CTAs, global memory',
's2-global')
]
# Column filters
fs1 = [lambda e: not L.is_mem_stress(e), lambda e: L.is_mem_stress(e)]
fs2 = [lambda e: not L.is_general_bc(e), lambda e: L.is_general_bc(e)]
fs3 = [lambda e: not L.is_barrier(e), lambda e: L.is_barrier(e)]
fs4 = [lambda e: not L.is_rand_threads(e), lambda e: L.is_rand_threads(e)]
nc = 16
# Line filters
lfs = [
('uniproc', ['corr', 'corw', 'cowr', 'coww']),
('observation', ['mp', 'isa2', 'wrc']),
('prop light', ['2+2w', 'w+rw+2w', 's']),
('prop heavy', ['sb', 'rwc', 'iriw', 'r']),
('thin air', ['lb'])
]
lfs = collections.OrderedDict(lfs)
for sf, cfg, suf in sfs:
s = prefix
s = s.replace('<config>', cfg, 1)
s = s.replace('<chip>', chip, 1)
l1 = list(filter(sf, les))
assert(lty(l1, L))
for sec, tests in lfs.items():
tests.sort()
# Section header
s += r'{\bf ' + sec + '}' + (' &' * nc) + r'\\' + '\n'
for t in tests:
# Get all tests that match a simple test name (like rwc)
l2 = list(filter(partial(L.simple_match, s=t), l1))
assert(lty(l2, L))
if (len(l2) == 0):
continue
s += t
for i in range(0, nc):
i1 = (i & 0b1000) >> 3
i2 = (i & 0b0100) >> 2
i3 = (i & 0b0010) >> 1
i4 = (i & 0b0001)
f1 = fs1[i1]
f2 = fs2[i2]
f3 = fs3[i3]
f4 = fs4[i4]
f = lambda e: f1(e) and f2(e) and f3(e) and f4(e)
entry = '-'
item = list(filter(f, l2))
if item:
item = itemify(item)
assert(type(item) == L)
entry = item.pos
# ppi_incantations: mem_stress, general_bc, barrier, rand_threads
s += ' & ' + str(entry)
s += '\\\\\n'
s += '\\hline\n'
s += '\\end{tabular}\n'
# Write table to file
f_out = out_base + '-' + suf + '.tex'
w_str(f_out, s)
# ------------------------------------------------------------------------------
### Produce flat incantation tables
def incantations_flat(args):
log = args.input
assert(type(log) == str)
chip = os.path.basename(log)
assert(type(chip) == str)
chip_old = chip
while True:
chip = os.path.splitext(chip)[0]
if chip == chip_old:
break
chip_old = chip
assert(type(chip) == str)
log = ma.get_logs(log, lh=ma.LogInc)
assert(lty(log, ma.LogInc))
assert(len(log) == 1)
log = log[0]
# Prefix of output filename, default is the command name
out_base = args.out
assert(out_base)
les = log.get_all()
assert(lty(les, L))
short_names = log.get_names()
assert(lty(short_names, str))
short_names.sort()
# Table header
prefix = textwrap.dedent(r"""
\definecolor{Gray}{gray}{0.85}
\newcolumntype{g}{>{\columncolor{Gray}}r}
\newcolumntype{h}{>{\columncolor{Gray}}c}
\begin{tabular}{l g g g g r r r r g g g g r r r r}
\toprule
\multicolumn{17}{l}{Chip: <chip>}\\
\multicolumn{17}{l}{GPU Configuration: <config>}\\
\hline
& \multicolumn{4}{h}{Critical Incantations:} & \multicolumn{4}{c}{Critical Incantations:} & \multicolumn{4}{h}{Critical Incantations:} & \multicolumn{4}{c}{Critical Incantations:}\\
& \multicolumn{4}{h}{none} & \multicolumn{4}{c}{GBC} & \multicolumn{4}{h}{MS} & \multicolumn{4}{c}{GBC+MS}\\
& \multicolumn{4}{h}{Extra Incantations:} & \multicolumn{4}{c}{Extra Incantations:} & \multicolumn{4}{h}{Extra Incantations:} & \multicolumn{4}{c}{Extra Incantations:}\\
& none & R & S & R+S & none & R & S & R+S & none & R & S & R+S & none & R & S & R+S\\
\hline
""")
# Scope and mem filters, including table description and filename suffix
sfs = [
(lambda e: L.is_warp(e) and L.is_global(e),
'All threads in different warps, global memory',
's1-global'),
(lambda e: L.is_warp(e) and L.is_shared(e),
'All threads in different warps, shared memory',
's1-shared'),
(lambda e: L.is_cta(e) and L.is_global(e),
'All threads in different CTAs, global memory',
's2-global')
]
# Column filter building blocks (need to be combined to yield a single column
# filter)
fs1 = [lambda e: not L.is_mem_stress(e), lambda e: L.is_mem_stress(e)]
fs2 = [lambda e: not L.is_general_bc(e), lambda e: L.is_general_bc(e)]
fs3 = [lambda e: not L.is_barrier(e), lambda e: L.is_barrier(e)]
fs4 = [lambda e: not L.is_rand_threads(e), lambda e: L.is_rand_threads(e)]
nc = 16
# Scope and mem filters, table description, filename suffix
for sf, cfg, suf in sfs:
s = prefix
s = s.replace('<config>', cfg, 1)
s = s.replace('<chip>', chip, 1)
l1 = list(filter(sf, les))
assert(lty(l1, L))
for t in short_names:
l2 = list(filter(partial(L.simple_match, s=t), l1))
assert(lty(l2, L))
if (len(l2) == 0):
continue
# Name of test
s += t
for i in range(0, nc):
i1 = (i & 0b1000) >> 3
i2 = (i & 0b0100) >> 2
i3 = (i & 0b0010) >> 1
i4 = (i & 0b0001)
f1 = fs1[i1]
f2 = fs2[i2]
f3 = fs3[i3]
f4 = fs4[i4]
f = lambda e: f1(e) and f2(e) and f3(e) and f4(e)
entry = '-'
item = list(filter(f, l2))
if item:
item = itemify(item)
assert(type(item) == L)
entry = item.pos
# ppi_incantations: mem_stress, general_bc, barrier, rand_threads
s += ' & ' + str(entry)
s += '\\\\\n'
s += '\\end{tabular}\n'
# Write table to file
f_out = out_base + '-' + suf + '.tex'
w_str(f_out, s)
# ------------------------------------------------------------------------------
### Produce flat incantation tables
def incantations_html_flat(args):
log = args.input
assert(type(log) == str)
assert(hasattr(args, 'diro'))
chip = os.path.basename(log)
assert(type(chip) == str)
chip_old = chip
while True:
chip = os.path.splitext(chip)[0]
if chip == chip_old:
break
chip_old = chip
assert(type(chip) == str)
log = ma.get_logs(log, lh=ma.LogInc)
assert(lty(log, ma.LogInc))
assert(len(log) == 1)
log = log[0]
# Prefix of output filename, default is the command name
out_base = args.out
assert(out_base)
les = log.get_all()
assert(lty(les, L))
short_names = log.get_names()
assert(lty(short_names, str))
short_names.sort()
# Table header
# ' ': non-breaking space
# '✓': checkmark
prefix = textwrap.dedent(r"""
<!DOCTYPE html>
<html style="background:white;">
<head>
<meta charset="UTF-8">
<title>Evaluating incantations</title>
<link rel="stylesheet" href="common.css" type="text/css" media="screen"/>
<style>
ul {
padding-top: 10px;
}
li {
padding-top: 5px;
}
th, td {
text-align: right;
padding: 5px;
padding-right: 15px;
padding-left: 15px;
}
td:nth-child(1) {
text-align: left;
}
tr:nth-child(1), tr:nth-child(5) {
border-bottom: 2px solid black;
}
table {
border-top: none;
}
</style>
</head>
<body>
<div class="outer" style="width: 100%;">
<div class="inner">
<h1>Evaluating incantations</h1>
<br>
<center>
To view the logfile for a test, click on the corresponding number. The logfile
also contains the litmus test code. When a dash appears instead of a result,
it is either because optcheck failed or because there were insufficient
resources on the chip to run the test.
</center>
<br>
<center>
<table style="border:none">
<tr style="border:none">
<td style="text-align:left">Chip:</td>
<td style="text-align:left"> <chip> </td>
</tr>
<tr style="border:none">
<td style="text-align:left">Config:</td>
<td style="text-align:left"> <config> </td>
</tr>
</table>
</center>
<br>
<table>
<tr>
<td> </td>
<td>1</td>
<td>2</td>
<td>3</td>
<td>4</td>
<td>5</td>
<td>6</td>
<td>7</td>
<td>8</td>
<td>9</td>
<td>10</td>
<td>11</td>
<td>12</td>
<td>13</td>
<td>14</td>
<td>15</td>
<td>16</td>
</tr>
<tr>
<td>memory stress</td>
<td> </td><td> </td><td> </td><td> </td>
<td> </td><td> </td><td> </td><td> </td>
<td>✓</td><td>✓</td><td>✓</td><td>✓</td>
<td>✓</td><td>✓</td><td>✓</td><td>✓</td>
</tr>
<tr>
<td>general bank conflicts</td>
<td> </td><td> </td><td> </td><td> </td>
<td>✓</td><td>✓</td><td>✓</td><td>✓</td>
<td> </td><td> </td><td> </td><td> </td>
<td>✓</td><td>✓</td><td>✓</td><td>✓</td>
</tr>
<tr>
<td>thread synchronisation</td>
<td> </td><td> </td><td>✓</td><td>✓</td>
<td> </td><td> </td><td>✓</td><td>✓</td>
<td> </td><td> </td><td>✓</td><td>✓</td>
<td> </td><td> </td><td>✓</td><td>✓</td>
</tr>
<tr>
<td>thread randomisation</td>
<td> </td><td>✓</td><td> </td><td>✓</td>
<td> </td><td>✓</td><td> </td><td>✓</td>
<td> </td><td>✓</td><td> </td><td>✓</td>
<td> </td><td>✓</td><td> </td><td>✓</td>
</tr>
""")
# Scope and mem filters, including table description and filename suffix
sfs = [
(lambda e: L.is_warp(e) and L.is_global(e),
'All threads in different warps, global memory',
's1-global'),
(lambda e: L.is_warp(e) and L.is_shared(e),
'All threads in different warps, shared memory',
's1-shared'),
(lambda e: L.is_cta(e) and L.is_global(e),
'All threads in different CTAs, global memory',
's2-global')
]
# Column filter building blocks (need to be combined to yield a single column
# filter)
fs1 = [lambda e: not L.is_mem_stress(e), lambda e: L.is_mem_stress(e)]
fs2 = [lambda e: not L.is_general_bc(e), lambda e: L.is_general_bc(e)]
fs3 = [lambda e: not L.is_barrier(e), lambda e: L.is_barrier(e)]
fs4 = [lambda e: not L.is_rand_threads(e), lambda e: L.is_rand_threads(e)]
nc = 16
# Scope and mem filters, table description, filename suffix
for sf, cfg, suf in sfs:
s = prefix
s = s.replace('<config>', cfg, 1)
s = s.replace('<chip>', chip, 1)
l1 = list(filter(sf, les))
assert(lty(l1, L))
for t in short_names:
l2 = list(filter(partial(L.simple_match, s=t), l1))
assert(lty(l2, L))
if (len(l2) == 0):
continue
# Name of test
s += '<tr>\n'
s += '<td>' + t + '</td>'
for i in range(0, nc):
i1 = (i & 0b1000) >> 3
i2 = (i & 0b0100) >> 2
i3 = (i & 0b0010) >> 1
i4 = (i & 0b0001)
f1 = fs1[i1]
f2 = fs2[i2]
f3 = fs3[i3]
f4 = fs4[i4]
f = lambda e: f1(e) and f2(e) and f3(e) and f4(e)
entry = '-'
item = list(filter(f, l2))
if item:
item = itemify(item)
assert(type(item) == L)
entry = item.pos
s += item.pp_cell_link_dir(2, args.diro)
# Produce file containing raw litmus log
item.store_log_dir(args.diro)
else:
# ppi_incantations: mem_stress, general_bc, barrier, rand_threads
s += '<td>' + str(entry) + '</td>'
s += '</tr>\n'
s += """
</table>
</div>
</div>
</body>
</html>
"""
# Write table to file
f_out = out_base + '-' + suf + '.html'
w_str(f_out, s)
# ------------------------------------------------------------------------------
#######################
# Command line parser #
#######################
# Open files and parse or unpickle
class InputAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
def get_cmdline_parser(cmds):
# Parent of all
p = argparse.ArgumentParser()
# Dummy parent for common options
parent = argparse.ArgumentParser(add_help=False)
parent.add_argument('-p', '--pos', action='store_true')
# Subparsers
sp = p.add_subparsers(help='use <subcommand> -h for further help', title=
'subcommands')
# Flat
p1 = sp.add_parser(cmds[0], parents=[parent])
p1.add_argument('input', nargs='+', action=InputAction)
f = cmds[0] + '.html'
p1.add_argument('-o', '--out', action='store', default=f)
p1.add_argument('-d', '--diro', action='store', default='entries')
p1.set_defaults(func=partial(mux, flat))
# Classified
p2 = sp.add_parser(cmds[1], parents=[parent])
p2.add_argument('input', nargs='+', action=InputAction)
f = cmds[1] + '.html'
p2.add_argument('-o', '--out', action='store', default=f)
p2.add_argument('-d', '--diro', action='store', default='entries')
p2.set_defaults(func=partial(mux, classified))
# Sections
p3 = sp.add_parser(cmds[2], parents=[parent])
p3.add_argument('input', nargs='+', action=InputAction)
f = cmds[2] + '.html'
p3.add_argument('-o', '--out', action='store', default=f)
p3.add_argument('-d', '--diro', action='store', default='entries')
p3.set_defaults(func=partial(mux, sections))
# Two-level
p4 = sp.add_parser(cmds[3], parents=[parent])
p4.add_argument('input', nargs='+', action=InputAction)
f = cmds[3] + '.html'
p4.add_argument('-o', '--out', action='store', default=f)
p4.add_argument('-d', '--diro', action='store', default='entries')
p4.set_defaults(func=partial(mux, two_level))
# Latex
p5 = sp.add_parser(cmds[4], parents=[parent])
p5.add_argument('input', action=InputAction)
f = cmds[4] + '.tex'
p5.add_argument('-o', '--out', action='store', default=f)
p5.set_defaults(func=partial(mux, latex))
# Latex 2
p6 = sp.add_parser(cmds[5], parents=[parent])
p6.add_argument('input', action=InputAction)
f = cmds[5] + '.tex'
p6.add_argument('-o', '--out', action='store', default=f)
p6.set_defaults(func=partial(mux, latex2))
# Latex 3
p7 = sp.add_parser(cmds[6], parents=[parent])
p7.add_argument('input', action=InputAction)
f = cmds[6] + '.tex'
p7.add_argument('-o', '--out', action='store', default=f)
p7.set_defaults(func=partial(mux, latex3))
# Incantations
p8 = sp.add_parser(cmds[7], description='Produce tables comparing the\
effectiveness of the incantations')
p8.add_argument('input', action=InputAction, help='log (text or pickle)')
f = cmds[7]
p8.add_argument('-o', '--out', action='store', default=f,
help='output file basename (instead of default name)')
p8.set_defaults(func=partial(mux, incantations))
# Incantations flat
p9 = sp.add_parser(cmds[8], description='Produce flat tables comparing the\
effectiveness of the incantations')
p9.add_argument('input', action=InputAction, help='log (text or pickle)')
f = cmds[8]
p9.add_argument('-o', '--out', action='store', default=f,
help='output file basename (instead of default name)')
p9.set_defaults(func=partial(mux, incantations_flat))
# Incantations html
p10 = sp.add_parser(cmds[9], description='Produce flat html tables comparing\
the effectiveness of the incantations')
p10.add_argument('input', action=InputAction, help='log (text or pickle)')
f = cmds[9]
p10.add_argument('-o', '--out', action='store', default=f,
help='output file basename (instead of default name)')
p10.add_argument('-d', '--diro', action='store', default='entries-inc')
p10.set_defaults(func=partial(mux, incantations_html_flat))
return p
if __name__ == "__main__":
if len(sys.argv) == 1:
sys.argv += ['-h']
cmd = sys.argv[1]
ma.setup_err_handling('log2tbl.py')
cmds = ['flat', 'classified', 'sections', 'two-level', 'latex', 'latex2',
'latex3', 'incantations', 'incantations-flat', 'incantations-html']
p = get_cmdline_parser(cmds)
if cmd not in cmds:
p.print_help()
sys.exit(2)
print('cmd: ' + cmd)
pr = p.parse_args()
pr.func(pr)
|
[
"machinery.LogEntry.does_match",
"machinery.setup_err_handling",
"generic.interleave",
"argparse.ArgumentParser",
"machinery.LogEntry.is_mem_stress",
"machinery.LogEntry.is_barrier",
"generic.dupchk",
"machinery.get_entry",
"machinery.get_matching_keys",
"machinery.get_pos_keys",
"machinery.LogEntry.is_global",
"generic.lty",
"machinery.LogEntry.is_warp",
"machinery.get_filtered_keys",
"functools.partial",
"os.path.basename",
"generic.itemify",
"machinery.LogEntry.is_rand_threads",
"machinery.LogEntry.is_general_bc",
"machinery.LogEntry.is_cta",
"machinery.get_logs",
"sys.exit",
"machinery.get_keys",
"textwrap.dedent",
"generic.listify",
"generic.w_str",
"machinery.LogEntry.is_shared",
"os.path.splitext",
"collections.OrderedDict"
] |
[((547, 1072), 'textwrap.dedent', 'textwrap.dedent', (['""" <!DOCTYPE html>\n <html>\n <head>\n <meta charset="UTF-8">\n <title>GPU Litmus Test Results</title>\n <link rel="stylesheet" href="common.css" type="text/css" media="screen"/>\n </head>\n\n <body>\n <div class="outer">\n <div class="inner">\n\n <h1>GPU Litmus Test Results</h1>\n <br>\n\n <center>\n To view the logfile for a test and chip, click on the corresponding number.\n The logfile contains the litmus test code, and the incantations used for the\n test run.\n </center>\n <br><br>\n\n """'], {}), '(\n """ <!DOCTYPE html>\n <html>\n <head>\n <meta charset="UTF-8">\n <title>GPU Litmus Test Results</title>\n <link rel="stylesheet" href="common.css" type="text/css" media="screen"/>\n </head>\n\n <body>\n <div class="outer">\n <div class="inner">\n\n <h1>GPU Litmus Test Results</h1>\n <br>\n\n <center>\n To view the logfile for a test and chip, click on the corresponding number.\n The logfile contains the litmus test code, and the incantations used for the\n test run.\n </center>\n <br><br>\n\n """\n )\n', (562, 1072), False, 'import textwrap\n'), ((1106, 1170), 'textwrap.dedent', 'textwrap.dedent', (['"""\n </div>\n </div>\n </body>\n </html>\n """'], {}), '("""\n </div>\n </div>\n </body>\n </html>\n """)\n', (1121, 1170), False, 'import textwrap\n'), ((6161, 6178), 'generic.lty', 'lty', (['logs', 'ma.Log'], {}), '(logs, ma.Log)\n', (6164, 6178), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((6918, 6935), 'generic.lty', 'lty', (['logs', 'ma.Log'], {}), '(logs, ma.Log)\n', (6921, 6935), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((8340, 8357), 'generic.lty', 'lty', (['logs', 'ma.Log'], {}), '(logs, ma.Log)\n', (8343, 8357), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((8854, 8871), 'generic.lty', 'lty', (['logs', 'ma.Log'], {}), '(logs, ma.Log)\n', (8857, 8871), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((9546, 9575), 'machinery.get_filtered_keys', 'ma.get_filtered_keys', (['f', 'logs'], {}), '(f, logs)\n', (9566, 9575), True, 'import machinery as ma\n'), ((9927, 9956), 'machinery.get_filtered_keys', 'ma.get_filtered_keys', (['f', 'logs'], {}), '(f, logs)\n', (9947, 9956), True, 'import machinery as ma\n'), ((10134, 10155), 'generic.interleave', 'interleave', (['l1', 'l2', 'n'], {}), '(l1, l2, n)\n', (10144, 10155), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((11151, 11169), 'generic.w_str', 'w_str', (['args.out', 's'], {}), '(args.out, s)\n', (11156, 11169), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((11458, 11487), 'machinery.get_matching_keys', 'ma.get_matching_keys', (['l', 'logs'], {}), '(l, logs)\n', (11478, 11487), True, 'import machinery as ma\n'), ((11603, 11636), 'machinery.get_filtered_keys', 'ma.get_filtered_keys', (['f', 'logs', 'ks'], {}), '(f, logs, ks)\n', (11623, 11636), True, 'import machinery as ma\n'), ((11903, 11936), 'machinery.get_filtered_keys', 'ma.get_filtered_keys', (['f', 'logs', 'ks'], {}), '(f, logs, ks)\n', (11923, 11936), True, 'import machinery as ma\n'), ((12169, 12202), 'machinery.get_filtered_keys', 'ma.get_filtered_keys', (['f', 'logs', 'ks'], {}), '(f, logs, ks)\n', (12189, 12202), True, 'import machinery as ma\n'), ((12354, 12372), 'generic.w_str', 'w_str', (['args.out', 's'], {}), '(args.out, s)\n', (12359, 12372), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((13302, 13320), 'generic.w_str', 'w_str', (['args.out', 's'], {}), '(args.out, s)\n', (13307, 13320), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((13739, 13760), 'os.path.basename', 'os.path.basename', (['log'], {}), '(log)\n', (13755, 13760), False, 'import os\n'), ((13978, 14008), 'machinery.get_logs', 'ma.get_logs', (['log'], {'lh': 'ma.LogInc'}), '(log, lh=ma.LogInc)\n', (13989, 14008), True, 'import machinery as ma\n'), ((14018, 14037), 'generic.lty', 'lty', (['log', 'ma.LogInc'], {}), '(log, ma.LogInc)\n', (14021, 14037), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((14152, 14163), 'generic.lty', 'lty', (['les', 'L'], {}), '(les, L)\n', (14155, 14163), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((14194, 15115), 'textwrap.dedent', 'textwrap.dedent', (['"""\n \\\\definecolor{Gray}{gray}{0.85}\n \\\\newcolumntype{g}{>{\\\\columncolor{Gray}}r}\n \\\\newcolumntype{h}{>{\\\\columncolor{Gray}}c}\n\n \\\\begin{tabular}{l g g g g r r r r g g g g r r r r}\n \\\\toprule\n \\\\multicolumn{17}{l}{Chip: <chip>}\\\\\\\\\n \\\\multicolumn{17}{l}{GPU Configuration: <config>}\\\\\\\\\n \\\\hline\n & \\\\multicolumn{4}{h}{Critical Incantations:} & \\\\multicolumn{4}{c}{Critical Incantations:} & \\\\multicolumn{4}{h}{Critical Incantations:} & \\\\multicolumn{4}{c}{Critical Incantations:}\\\\\\\\\n & \\\\multicolumn{4}{h}{none} & \\\\multicolumn{4}{c}{GBC} & \\\\multicolumn{4}{h}{MS} & \\\\multicolumn{4}{c}{GBC+MS}\\\\\\\\\n & \\\\multicolumn{4}{h}{Extra Incantations:} & \\\\multicolumn{4}{c}{Extra Incantations:} & \\\\multicolumn{4}{h}{Extra Incantations:} & \\\\multicolumn{4}{c}{Extra Incantations:}\\\\\\\\\n & none & R & S & R+S & none & R & S & R+S & none & R & S & R+S & none & R & S & R+S\\\\\\\\\n \\\\hline\n """'], {}), '(\n """\n \\\\definecolor{Gray}{gray}{0.85}\n \\\\newcolumntype{g}{>{\\\\columncolor{Gray}}r}\n \\\\newcolumntype{h}{>{\\\\columncolor{Gray}}c}\n\n \\\\begin{tabular}{l g g g g r r r r g g g g r r r r}\n \\\\toprule\n \\\\multicolumn{17}{l}{Chip: <chip>}\\\\\\\\\n \\\\multicolumn{17}{l}{GPU Configuration: <config>}\\\\\\\\\n \\\\hline\n & \\\\multicolumn{4}{h}{Critical Incantations:} & \\\\multicolumn{4}{c}{Critical Incantations:} & \\\\multicolumn{4}{h}{Critical Incantations:} & \\\\multicolumn{4}{c}{Critical Incantations:}\\\\\\\\\n & \\\\multicolumn{4}{h}{none} & \\\\multicolumn{4}{c}{GBC} & \\\\multicolumn{4}{h}{MS} & \\\\multicolumn{4}{c}{GBC+MS}\\\\\\\\\n & \\\\multicolumn{4}{h}{Extra Incantations:} & \\\\multicolumn{4}{c}{Extra Incantations:} & \\\\multicolumn{4}{h}{Extra Incantations:} & \\\\multicolumn{4}{c}{Extra Incantations:}\\\\\\\\\n & none & R & S & R+S & none & R & S & R+S & none & R & S & R+S & none & R & S & R+S\\\\\\\\\n \\\\hline\n """\n )\n', (14209, 15115), False, 'import textwrap\n'), ((16096, 16124), 'collections.OrderedDict', 'collections.OrderedDict', (['lfs'], {}), '(lfs)\n', (16119, 16124), False, 'import collections\n'), ((17634, 17655), 'os.path.basename', 'os.path.basename', (['log'], {}), '(log)\n', (17650, 17655), False, 'import os\n'), ((17849, 17879), 'machinery.get_logs', 'ma.get_logs', (['log'], {'lh': 'ma.LogInc'}), '(log, lh=ma.LogInc)\n', (17860, 17879), True, 'import machinery as ma\n'), ((17889, 17908), 'generic.lty', 'lty', (['log', 'ma.LogInc'], {}), '(log, ma.LogInc)\n', (17892, 17908), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((18082, 18093), 'generic.lty', 'lty', (['les', 'L'], {}), '(les, L)\n', (18085, 18093), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((18137, 18158), 'generic.lty', 'lty', (['short_names', 'str'], {}), '(short_names, str)\n', (18140, 18158), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((18210, 19131), 'textwrap.dedent', 'textwrap.dedent', (['"""\n \\\\definecolor{Gray}{gray}{0.85}\n \\\\newcolumntype{g}{>{\\\\columncolor{Gray}}r}\n \\\\newcolumntype{h}{>{\\\\columncolor{Gray}}c}\n\n \\\\begin{tabular}{l g g g g r r r r g g g g r r r r}\n \\\\toprule\n \\\\multicolumn{17}{l}{Chip: <chip>}\\\\\\\\\n \\\\multicolumn{17}{l}{GPU Configuration: <config>}\\\\\\\\\n \\\\hline\n & \\\\multicolumn{4}{h}{Critical Incantations:} & \\\\multicolumn{4}{c}{Critical Incantations:} & \\\\multicolumn{4}{h}{Critical Incantations:} & \\\\multicolumn{4}{c}{Critical Incantations:}\\\\\\\\\n & \\\\multicolumn{4}{h}{none} & \\\\multicolumn{4}{c}{GBC} & \\\\multicolumn{4}{h}{MS} & \\\\multicolumn{4}{c}{GBC+MS}\\\\\\\\\n & \\\\multicolumn{4}{h}{Extra Incantations:} & \\\\multicolumn{4}{c}{Extra Incantations:} & \\\\multicolumn{4}{h}{Extra Incantations:} & \\\\multicolumn{4}{c}{Extra Incantations:}\\\\\\\\\n & none & R & S & R+S & none & R & S & R+S & none & R & S & R+S & none & R & S & R+S\\\\\\\\\n \\\\hline\n """'], {}), '(\n """\n \\\\definecolor{Gray}{gray}{0.85}\n \\\\newcolumntype{g}{>{\\\\columncolor{Gray}}r}\n \\\\newcolumntype{h}{>{\\\\columncolor{Gray}}c}\n\n \\\\begin{tabular}{l g g g g r r r r g g g g r r r r}\n \\\\toprule\n \\\\multicolumn{17}{l}{Chip: <chip>}\\\\\\\\\n \\\\multicolumn{17}{l}{GPU Configuration: <config>}\\\\\\\\\n \\\\hline\n & \\\\multicolumn{4}{h}{Critical Incantations:} & \\\\multicolumn{4}{c}{Critical Incantations:} & \\\\multicolumn{4}{h}{Critical Incantations:} & \\\\multicolumn{4}{c}{Critical Incantations:}\\\\\\\\\n & \\\\multicolumn{4}{h}{none} & \\\\multicolumn{4}{c}{GBC} & \\\\multicolumn{4}{h}{MS} & \\\\multicolumn{4}{c}{GBC+MS}\\\\\\\\\n & \\\\multicolumn{4}{h}{Extra Incantations:} & \\\\multicolumn{4}{c}{Extra Incantations:} & \\\\multicolumn{4}{h}{Extra Incantations:} & \\\\multicolumn{4}{c}{Extra Incantations:}\\\\\\\\\n & none & R & S & R+S & none & R & S & R+S & none & R & S & R+S & none & R & S & R+S\\\\\\\\\n \\\\hline\n """\n )\n', (18225, 19131), False, 'import textwrap\n'), ((21288, 21309), 'os.path.basename', 'os.path.basename', (['log'], {}), '(log)\n', (21304, 21309), False, 'import os\n'), ((21503, 21533), 'machinery.get_logs', 'ma.get_logs', (['log'], {'lh': 'ma.LogInc'}), '(log, lh=ma.LogInc)\n', (21514, 21533), True, 'import machinery as ma\n'), ((21543, 21562), 'generic.lty', 'lty', (['log', 'ma.LogInc'], {}), '(log, ma.LogInc)\n', (21546, 21562), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((21736, 21747), 'generic.lty', 'lty', (['les', 'L'], {}), '(les, L)\n', (21739, 21747), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((21791, 21812), 'generic.lty', 'lty', (['short_names', 'str'], {}), '(short_names, str)\n', (21794, 21812), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((21923, 24892), 'textwrap.dedent', 'textwrap.dedent', (['"""\n <!DOCTYPE html>\n <html style="background:white;">\n <head>\n <meta charset="UTF-8">\n <title>Evaluating incantations</title>\n <link rel="stylesheet" href="common.css" type="text/css" media="screen"/>\n <style>\n\n ul {\n padding-top: 10px;\n }\n\n li {\n padding-top: 5px;\n }\n\n th, td {\n text-align: right;\n padding: 5px;\n padding-right: 15px;\n padding-left: 15px;\n }\n\n td:nth-child(1) {\n text-align: left;\n }\n\n tr:nth-child(1), tr:nth-child(5) {\n border-bottom: 2px solid black;\n }\n\n table {\n border-top: none;\n }\n\n </style>\n </head>\n\n <body>\n <div class="outer" style="width: 100%;">\n <div class="inner">\n <h1>Evaluating incantations</h1>\n\n <br>\n\n <center>\n To view the logfile for a test, click on the corresponding number. The logfile\n also contains the litmus test code. When a dash appears instead of a result,\n it is either because optcheck failed or because there were insufficient\n resources on the chip to run the test.\n </center>\n\n <br>\n\n <center>\n <table style="border:none">\n <tr style="border:none">\n <td style="text-align:left">Chip:</td>\n <td style="text-align:left"> <chip> </td>\n </tr>\n <tr style="border:none">\n <td style="text-align:left">Config:</td>\n <td style="text-align:left"> <config> </td>\n </tr>\n </table>\n </center>\n\n <br>\n\n <table>\n <tr>\n <td> </td>\n <td>1</td>\n <td>2</td>\n <td>3</td>\n <td>4</td>\n <td>5</td>\n <td>6</td>\n <td>7</td>\n <td>8</td>\n <td>9</td>\n <td>10</td>\n <td>11</td>\n <td>12</td>\n <td>13</td>\n <td>14</td>\n <td>15</td>\n <td>16</td>\n </tr>\n <tr>\n <td>memory stress</td>\n <td> </td><td> </td><td> </td><td> </td>\n <td> </td><td> </td><td> </td><td> </td>\n <td>✓</td><td>✓</td><td>✓</td><td>✓</td>\n <td>✓</td><td>✓</td><td>✓</td><td>✓</td>\n </tr>\n <tr>\n <td>general bank conflicts</td>\n <td> </td><td> </td><td> </td><td> </td>\n <td>✓</td><td>✓</td><td>✓</td><td>✓</td>\n <td> </td><td> </td><td> </td><td> </td>\n <td>✓</td><td>✓</td><td>✓</td><td>✓</td>\n </tr>\n <tr>\n <td>thread synchronisation</td>\n <td> </td><td> </td><td>✓</td><td>✓</td>\n <td> </td><td> </td><td>✓</td><td>✓</td>\n <td> </td><td> </td><td>✓</td><td>✓</td>\n <td> </td><td> </td><td>✓</td><td>✓</td>\n </tr>\n <tr>\n <td>thread randomisation</td>\n <td> </td><td>✓</td><td> </td><td>✓</td>\n <td> </td><td>✓</td><td> </td><td>✓</td>\n <td> </td><td>✓</td><td> </td><td>✓</td>\n <td> </td><td>✓</td><td> </td><td>✓</td>\n </tr>\n """'], {}), '(\n """\n <!DOCTYPE html>\n <html style="background:white;">\n <head>\n <meta charset="UTF-8">\n <title>Evaluating incantations</title>\n <link rel="stylesheet" href="common.css" type="text/css" media="screen"/>\n <style>\n\n ul {\n padding-top: 10px;\n }\n\n li {\n padding-top: 5px;\n }\n\n th, td {\n text-align: right;\n padding: 5px;\n padding-right: 15px;\n padding-left: 15px;\n }\n\n td:nth-child(1) {\n text-align: left;\n }\n\n tr:nth-child(1), tr:nth-child(5) {\n border-bottom: 2px solid black;\n }\n\n table {\n border-top: none;\n }\n\n </style>\n </head>\n\n <body>\n <div class="outer" style="width: 100%;">\n <div class="inner">\n <h1>Evaluating incantations</h1>\n\n <br>\n\n <center>\n To view the logfile for a test, click on the corresponding number. The logfile\n also contains the litmus test code. When a dash appears instead of a result,\n it is either because optcheck failed or because there were insufficient\n resources on the chip to run the test.\n </center>\n\n <br>\n\n <center>\n <table style="border:none">\n <tr style="border:none">\n <td style="text-align:left">Chip:</td>\n <td style="text-align:left"> <chip> </td>\n </tr>\n <tr style="border:none">\n <td style="text-align:left">Config:</td>\n <td style="text-align:left"> <config> </td>\n </tr>\n </table>\n </center>\n\n <br>\n\n <table>\n <tr>\n <td> </td>\n <td>1</td>\n <td>2</td>\n <td>3</td>\n <td>4</td>\n <td>5</td>\n <td>6</td>\n <td>7</td>\n <td>8</td>\n <td>9</td>\n <td>10</td>\n <td>11</td>\n <td>12</td>\n <td>13</td>\n <td>14</td>\n <td>15</td>\n <td>16</td>\n </tr>\n <tr>\n <td>memory stress</td>\n <td> </td><td> </td><td> </td><td> </td>\n <td> </td><td> </td><td> </td><td> </td>\n <td>✓</td><td>✓</td><td>✓</td><td>✓</td>\n <td>✓</td><td>✓</td><td>✓</td><td>✓</td>\n </tr>\n <tr>\n <td>general bank conflicts</td>\n <td> </td><td> </td><td> </td><td> </td>\n <td>✓</td><td>✓</td><td>✓</td><td>✓</td>\n <td> </td><td> </td><td> </td><td> </td>\n <td>✓</td><td>✓</td><td>✓</td><td>✓</td>\n </tr>\n <tr>\n <td>thread synchronisation</td>\n <td> </td><td> </td><td>✓</td><td>✓</td>\n <td> </td><td> </td><td>✓</td><td>✓</td>\n <td> </td><td> </td><td>✓</td><td>✓</td>\n <td> </td><td> </td><td>✓</td><td>✓</td>\n </tr>\n <tr>\n <td>thread randomisation</td>\n <td> </td><td>✓</td><td> </td><td>✓</td>\n <td> </td><td>✓</td><td> </td><td>✓</td>\n <td> </td><td>✓</td><td> </td><td>✓</td>\n <td> </td><td>✓</td><td> </td><td>✓</td>\n </tr>\n """\n )\n', (21938, 24892), False, 'import textwrap\n'), ((27501, 27526), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (27524, 27526), False, 'import argparse\n'), ((27577, 27616), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)'}), '(add_help=False)\n', (27600, 27616), False, 'import argparse\n'), ((31170, 31205), 'machinery.setup_err_handling', 'ma.setup_err_handling', (['"""log2tbl.py"""'], {}), "('log2tbl.py')\n", (31191, 31205), True, 'import machinery as ma\n'), ((3260, 3284), 'os.path.basename', 'os.path.basename', (['log.fn'], {}), '(log.fn)\n', (3276, 3284), False, 'import os\n'), ((3486, 3507), 'machinery.get_entry', 'ma.get_entry', (['k', 'logs'], {}), '(k, logs)\n', (3498, 3507), True, 'import machinery as ma\n'), ((5566, 5578), 'generic.listify', 'listify', (['inp'], {}), '(inp)\n', (5573, 5578), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((5898, 5925), 'machinery.get_logs', 'ma.get_logs', (['inp'], {'lh': 'ma.Log'}), '(inp, lh=ma.Log)\n', (5909, 5925), True, 'import machinery as ma\n'), ((6311, 6342), 'machinery.get_matching_keys', 'ma.get_matching_keys', (['val', 'logs'], {}), '(val, logs)\n', (6331, 6342), True, 'import machinery as ma\n'), ((6580, 6601), 'machinery.get_pos_keys', 'ma.get_pos_keys', (['logs'], {}), '(logs)\n', (6595, 6601), True, 'import machinery as ma\n'), ((6619, 6636), 'machinery.get_keys', 'ma.get_keys', (['logs'], {}), '(logs)\n', (6630, 6636), True, 'import machinery as ma\n'), ((7069, 7100), 'machinery.get_matching_keys', 'ma.get_matching_keys', (['val', 'logs'], {}), '(val, logs)\n', (7089, 7100), True, 'import machinery as ma\n'), ((7670, 7691), 'machinery.get_pos_keys', 'ma.get_pos_keys', (['logs'], {}), '(logs)\n', (7685, 7691), True, 'import machinery as ma\n'), ((7711, 7728), 'machinery.get_keys', 'ma.get_keys', (['logs'], {}), '(logs)\n', (7722, 7728), True, 'import machinery as ma\n'), ((8530, 8559), 'machinery.get_filtered_keys', 'ma.get_filtered_keys', (['f', 'logs'], {}), '(f, logs)\n', (8550, 8559), True, 'import machinery as ma\n'), ((8946, 8967), 'machinery.get_pos_keys', 'ma.get_pos_keys', (['logs'], {}), '(logs)\n', (8961, 8967), True, 'import machinery as ma\n'), ((8985, 9002), 'machinery.get_keys', 'ma.get_keys', (['logs'], {}), '(logs)\n', (8996, 9002), True, 'import machinery as ma\n'), ((9624, 9645), 'machinery.get_entry', 'ma.get_entry', (['k', 'logs'], {}), '(k, logs)\n', (9636, 9645), True, 'import machinery as ma\n'), ((10015, 10036), 'machinery.get_entry', 'ma.get_entry', (['k', 'logs'], {}), '(k, logs)\n', (10027, 10036), True, 'import machinery as ma\n'), ((11716, 11737), 'machinery.get_entry', 'ma.get_entry', (['k', 'logs'], {}), '(k, logs)\n', (11728, 11737), True, 'import machinery as ma\n'), ((12012, 12033), 'machinery.get_entry', 'ma.get_entry', (['k', 'logs'], {}), '(k, logs)\n', (12024, 12033), True, 'import machinery as ma\n'), ((12278, 12299), 'machinery.get_entry', 'ma.get_entry', (['k', 'logs'], {}), '(k, logs)\n', (12290, 12299), True, 'import machinery as ma\n'), ((16285, 16295), 'generic.lty', 'lty', (['l1', 'L'], {}), '(l1, L)\n', (16288, 16295), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((17411, 17426), 'generic.w_str', 'w_str', (['f_out', 's'], {}), '(f_out, s)\n', (17416, 17426), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((20153, 20163), 'generic.lty', 'lty', (['l1', 'L'], {}), '(l1, L)\n', (20156, 20163), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((21031, 21046), 'generic.w_str', 'w_str', (['f_out', 's'], {}), '(f_out, s)\n', (21036, 21046), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((25949, 25959), 'generic.lty', 'lty', (['l1', 'L'], {}), '(l1, L)\n', (25952, 25959), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((27092, 27107), 'generic.w_str', 'w_str', (['f_out', 's'], {}), '(f_out, s)\n', (27097, 27107), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((31430, 31441), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (31438, 31441), False, 'import sys\n'), ((5640, 5649), 'generic.dupchk', 'dupchk', (['l'], {}), '(l)\n', (5646, 5649), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((6366, 6391), 'machinery.get_pos_keys', 'ma.get_pos_keys', (['logs', 'ks'], {}), '(logs, ks)\n', (6381, 6391), True, 'import machinery as ma\n'), ((7126, 7153), 'machinery.get_pos_keys', 'ma.get_pos_keys', (['logs', 'ks_s'], {}), '(logs, ks_s)\n', (7141, 7153), True, 'import machinery as ma\n'), ((7961, 7996), 'machinery.get_filtered_keys', 'ma.get_filtered_keys', (['f', 'logs', 'ks_s'], {}), '(f, logs, ks_s)\n', (7981, 7996), True, 'import machinery as ma\n'), ((8583, 8608), 'machinery.get_pos_keys', 'ma.get_pos_keys', (['logs', 'ks'], {}), '(logs, ks)\n', (8598, 8608), True, 'import machinery as ma\n'), ((10566, 10580), 'machinery.LogEntry.is_global', 'L.is_global', (['e'], {}), '(e)\n', (10577, 10580), True, 'from machinery import LogEntry as L\n'), ((10805, 10819), 'machinery.LogEntry.is_shared', 'L.is_shared', (['e'], {}), '(e)\n', (10816, 10819), True, 'from machinery import LogEntry as L\n'), ((11042, 11056), 'machinery.LogEntry.is_global', 'L.is_global', (['e'], {}), '(e)\n', (11053, 11056), True, 'from machinery import LogEntry as L\n'), ((11538, 11552), 'machinery.LogEntry.is_global', 'L.is_global', (['e'], {}), '(e)\n', (11549, 11552), True, 'from machinery import LogEntry as L\n'), ((11838, 11852), 'machinery.LogEntry.is_shared', 'L.is_shared', (['e'], {}), '(e)\n', (11849, 11852), True, 'from machinery import LogEntry as L\n'), ((12105, 12119), 'machinery.LogEntry.is_global', 'L.is_global', (['e'], {}), '(e)\n', (12116, 12119), True, 'from machinery import LogEntry as L\n'), ((12714, 12728), 'machinery.LogEntry.is_global', 'L.is_global', (['e'], {}), '(e)\n', (12725, 12728), True, 'from machinery import LogEntry as L\n'), ((12954, 12968), 'machinery.LogEntry.is_shared', 'L.is_shared', (['e'], {}), '(e)\n', (12965, 12968), True, 'from machinery import LogEntry as L\n'), ((13192, 13206), 'machinery.LogEntry.is_global', 'L.is_global', (['e'], {}), '(e)\n', (13203, 13206), True, 'from machinery import LogEntry as L\n'), ((13832, 13854), 'os.path.splitext', 'os.path.splitext', (['chip'], {}), '(chip)\n', (13848, 13854), False, 'import os\n'), ((15595, 15613), 'machinery.LogEntry.is_mem_stress', 'L.is_mem_stress', (['e'], {}), '(e)\n', (15610, 15613), True, 'from machinery import LogEntry as L\n'), ((15668, 15686), 'machinery.LogEntry.is_general_bc', 'L.is_general_bc', (['e'], {}), '(e)\n', (15683, 15686), True, 'from machinery import LogEntry as L\n'), ((15738, 15753), 'machinery.LogEntry.is_barrier', 'L.is_barrier', (['e'], {}), '(e)\n', (15750, 15753), True, 'from machinery import LogEntry as L\n'), ((15810, 15830), 'machinery.LogEntry.is_rand_threads', 'L.is_rand_threads', (['e'], {}), '(e)\n', (15827, 15830), True, 'from machinery import LogEntry as L\n'), ((17727, 17749), 'os.path.splitext', 'os.path.splitext', (['chip'], {}), '(chip)\n', (17743, 17749), False, 'import os\n'), ((19684, 19702), 'machinery.LogEntry.is_mem_stress', 'L.is_mem_stress', (['e'], {}), '(e)\n', (19699, 19702), True, 'from machinery import LogEntry as L\n'), ((19757, 19775), 'machinery.LogEntry.is_general_bc', 'L.is_general_bc', (['e'], {}), '(e)\n', (19772, 19775), True, 'from machinery import LogEntry as L\n'), ((19827, 19842), 'machinery.LogEntry.is_barrier', 'L.is_barrier', (['e'], {}), '(e)\n', (19839, 19842), True, 'from machinery import LogEntry as L\n'), ((19899, 19919), 'machinery.LogEntry.is_rand_threads', 'L.is_rand_threads', (['e'], {}), '(e)\n', (19916, 19919), True, 'from machinery import LogEntry as L\n'), ((20262, 20272), 'generic.lty', 'lty', (['l2', 'L'], {}), '(l2, L)\n', (20265, 20272), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((21381, 21403), 'os.path.splitext', 'os.path.splitext', (['chip'], {}), '(chip)\n', (21397, 21403), False, 'import os\n'), ((25480, 25498), 'machinery.LogEntry.is_mem_stress', 'L.is_mem_stress', (['e'], {}), '(e)\n', (25495, 25498), True, 'from machinery import LogEntry as L\n'), ((25553, 25571), 'machinery.LogEntry.is_general_bc', 'L.is_general_bc', (['e'], {}), '(e)\n', (25568, 25571), True, 'from machinery import LogEntry as L\n'), ((25623, 25638), 'machinery.LogEntry.is_barrier', 'L.is_barrier', (['e'], {}), '(e)\n', (25635, 25638), True, 'from machinery import LogEntry as L\n'), ((25695, 25715), 'machinery.LogEntry.is_rand_threads', 'L.is_rand_threads', (['e'], {}), '(e)\n', (25712, 25715), True, 'from machinery import LogEntry as L\n'), ((26058, 26068), 'generic.lty', 'lty', (['l2', 'L'], {}), '(l2, L)\n', (26061, 26068), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((28078, 28096), 'functools.partial', 'partial', (['mux', 'flat'], {}), '(mux, flat)\n', (28085, 28096), False, 'from functools import partial\n'), ((28396, 28420), 'functools.partial', 'partial', (['mux', 'classified'], {}), '(mux, classified)\n', (28403, 28420), False, 'from functools import partial\n'), ((28718, 28740), 'functools.partial', 'partial', (['mux', 'sections'], {}), '(mux, sections)\n', (28725, 28740), False, 'from functools import partial\n'), ((29039, 29062), 'functools.partial', 'partial', (['mux', 'two_level'], {}), '(mux, two_level)\n', (29046, 29062), False, 'from functools import partial\n'), ((29276, 29295), 'functools.partial', 'partial', (['mux', 'latex'], {}), '(mux, latex)\n', (29283, 29295), False, 'from functools import partial\n'), ((29511, 29531), 'functools.partial', 'partial', (['mux', 'latex2'], {}), '(mux, latex2)\n', (29518, 29531), False, 'from functools import partial\n'), ((29747, 29767), 'functools.partial', 'partial', (['mux', 'latex3'], {}), '(mux, latex3)\n', (29754, 29767), False, 'from functools import partial\n'), ((30132, 30158), 'functools.partial', 'partial', (['mux', 'incantations'], {}), '(mux, incantations)\n', (30139, 30158), False, 'from functools import partial\n'), ((30533, 30564), 'functools.partial', 'partial', (['mux', 'incantations_flat'], {}), '(mux, incantations_flat)\n', (30540, 30564), False, 'from functools import partial\n'), ((31022, 31058), 'functools.partial', 'partial', (['mux', 'incantations_html_flat'], {}), '(mux, incantations_html_flat)\n', (31029, 31058), False, 'from functools import partial\n'), ((7385, 7420), 'machinery.get_filtered_keys', 'ma.get_filtered_keys', (['f', 'logs', 'ks_s'], {}), '(f, logs, ks_s)\n', (7405, 7420), True, 'import machinery as ma\n'), ((8024, 8049), 'machinery.get_pos_keys', 'ma.get_pos_keys', (['logs', 'ks'], {}), '(logs, ks)\n', (8039, 8049), True, 'import machinery as ma\n'), ((10663, 10696), 'machinery.LogEntry.does_match', 'L.does_match', (['e', "['CoWW', 'COWW']"], {}), "(e, ['CoWW', 'COWW'])\n", (10675, 10696), True, 'from machinery import LogEntry as L\n'), ((10902, 10935), 'machinery.LogEntry.does_match', 'L.does_match', (['e', "['CoWW', 'COWW']"], {}), "(e, ['CoWW', 'COWW'])\n", (10914, 10935), True, 'from machinery import LogEntry as L\n'), ((11081, 11092), 'machinery.LogEntry.is_cta', 'L.is_cta', (['e'], {}), '(e)\n', (11089, 11092), True, 'from machinery import LogEntry as L\n'), ((11097, 11115), 'machinery.LogEntry.does_match', 'L.does_match', (['e', 'l'], {}), '(e, l)\n', (11109, 11115), True, 'from machinery import LogEntry as L\n'), ((11558, 11570), 'machinery.LogEntry.is_warp', 'L.is_warp', (['e'], {}), '(e)\n', (11567, 11570), True, 'from machinery import LogEntry as L\n'), ((11574, 11593), 'machinery.LogEntry.does_match', 'L.does_match', (['e', 'lc'], {}), '(e, lc)\n', (11586, 11593), True, 'from machinery import LogEntry as L\n'), ((11858, 11870), 'machinery.LogEntry.is_warp', 'L.is_warp', (['e'], {}), '(e)\n', (11867, 11870), True, 'from machinery import LogEntry as L\n'), ((11874, 11893), 'machinery.LogEntry.does_match', 'L.does_match', (['e', 'lc'], {}), '(e, lc)\n', (11886, 11893), True, 'from machinery import LogEntry as L\n'), ((12125, 12136), 'machinery.LogEntry.is_cta', 'L.is_cta', (['e'], {}), '(e)\n', (12133, 12136), True, 'from machinery import LogEntry as L\n'), ((12140, 12159), 'machinery.LogEntry.does_match', 'L.does_match', (['e', 'lc'], {}), '(e, lc)\n', (12152, 12159), True, 'from machinery import LogEntry as L\n'), ((12811, 12844), 'machinery.LogEntry.does_match', 'L.does_match', (['e', "['CoWW', 'COWW']"], {}), "(e, ['CoWW', 'COWW'])\n", (12823, 12844), True, 'from machinery import LogEntry as L\n'), ((13051, 13084), 'machinery.LogEntry.does_match', 'L.does_match', (['e', "['CoWW', 'COWW']"], {}), "(e, ['CoWW', 'COWW'])\n", (13063, 13084), True, 'from machinery import LogEntry as L\n'), ((13231, 13242), 'machinery.LogEntry.is_cta', 'L.is_cta', (['e'], {}), '(e)\n', (13239, 13242), True, 'from machinery import LogEntry as L\n'), ((13247, 13265), 'machinery.LogEntry.does_match', 'L.does_match', (['e', 'l'], {}), '(e, l)\n', (13259, 13265), True, 'from machinery import LogEntry as L\n'), ((15565, 15583), 'machinery.LogEntry.is_mem_stress', 'L.is_mem_stress', (['e'], {}), '(e)\n', (15580, 15583), True, 'from machinery import LogEntry as L\n'), ((15638, 15656), 'machinery.LogEntry.is_general_bc', 'L.is_general_bc', (['e'], {}), '(e)\n', (15653, 15656), True, 'from machinery import LogEntry as L\n'), ((15711, 15726), 'machinery.LogEntry.is_barrier', 'L.is_barrier', (['e'], {}), '(e)\n', (15723, 15726), True, 'from machinery import LogEntry as L\n'), ((15778, 15798), 'machinery.LogEntry.is_rand_threads', 'L.is_rand_threads', (['e'], {}), '(e)\n', (15795, 15798), True, 'from machinery import LogEntry as L\n'), ((16597, 16607), 'generic.lty', 'lty', (['l2', 'L'], {}), '(l2, L)\n', (16600, 16607), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((19654, 19672), 'machinery.LogEntry.is_mem_stress', 'L.is_mem_stress', (['e'], {}), '(e)\n', (19669, 19672), True, 'from machinery import LogEntry as L\n'), ((19727, 19745), 'machinery.LogEntry.is_general_bc', 'L.is_general_bc', (['e'], {}), '(e)\n', (19742, 19745), True, 'from machinery import LogEntry as L\n'), ((19800, 19815), 'machinery.LogEntry.is_barrier', 'L.is_barrier', (['e'], {}), '(e)\n', (19812, 19815), True, 'from machinery import LogEntry as L\n'), ((19867, 19887), 'machinery.LogEntry.is_rand_threads', 'L.is_rand_threads', (['e'], {}), '(e)\n', (19884, 19887), True, 'from machinery import LogEntry as L\n'), ((25450, 25468), 'machinery.LogEntry.is_mem_stress', 'L.is_mem_stress', (['e'], {}), '(e)\n', (25465, 25468), True, 'from machinery import LogEntry as L\n'), ((25523, 25541), 'machinery.LogEntry.is_general_bc', 'L.is_general_bc', (['e'], {}), '(e)\n', (25538, 25541), True, 'from machinery import LogEntry as L\n'), ((25596, 25611), 'machinery.LogEntry.is_barrier', 'L.is_barrier', (['e'], {}), '(e)\n', (25608, 25611), True, 'from machinery import LogEntry as L\n'), ((25663, 25683), 'machinery.LogEntry.is_rand_threads', 'L.is_rand_threads', (['e'], {}), '(e)\n', (25680, 25683), True, 'from machinery import LogEntry as L\n'), ((7452, 7477), 'machinery.get_pos_keys', 'ma.get_pos_keys', (['logs', 'ks'], {}), '(logs, ks)\n', (7467, 7477), True, 'import machinery as ma\n'), ((10605, 10617), 'machinery.LogEntry.is_warp', 'L.is_warp', (['e'], {}), '(e)\n', (10614, 10617), True, 'from machinery import LogEntry as L\n'), ((10622, 10640), 'machinery.LogEntry.does_match', 'L.does_match', (['e', 'l'], {}), '(e, l)\n', (10634, 10640), True, 'from machinery import LogEntry as L\n'), ((10844, 10856), 'machinery.LogEntry.is_warp', 'L.is_warp', (['e'], {}), '(e)\n', (10853, 10856), True, 'from machinery import LogEntry as L\n'), ((10861, 10879), 'machinery.LogEntry.does_match', 'L.does_match', (['e', 'l'], {}), '(e, l)\n', (10873, 10879), True, 'from machinery import LogEntry as L\n'), ((12753, 12765), 'machinery.LogEntry.is_warp', 'L.is_warp', (['e'], {}), '(e)\n', (12762, 12765), True, 'from machinery import LogEntry as L\n'), ((12770, 12788), 'machinery.LogEntry.does_match', 'L.does_match', (['e', 'l'], {}), '(e, l)\n', (12782, 12788), True, 'from machinery import LogEntry as L\n'), ((12993, 13005), 'machinery.LogEntry.is_warp', 'L.is_warp', (['e'], {}), '(e)\n', (13002, 13005), True, 'from machinery import LogEntry as L\n'), ((13010, 13028), 'machinery.LogEntry.does_match', 'L.does_match', (['e', 'l'], {}), '(e, l)\n', (13022, 13028), True, 'from machinery import LogEntry as L\n'), ((15173, 15185), 'machinery.LogEntry.is_warp', 'L.is_warp', (['e'], {}), '(e)\n', (15182, 15185), True, 'from machinery import LogEntry as L\n'), ((15190, 15204), 'machinery.LogEntry.is_global', 'L.is_global', (['e'], {}), '(e)\n', (15201, 15204), True, 'from machinery import LogEntry as L\n'), ((15294, 15306), 'machinery.LogEntry.is_warp', 'L.is_warp', (['e'], {}), '(e)\n', (15303, 15306), True, 'from machinery import LogEntry as L\n'), ((15311, 15325), 'machinery.LogEntry.is_shared', 'L.is_shared', (['e'], {}), '(e)\n', (15322, 15325), True, 'from machinery import LogEntry as L\n'), ((15415, 15426), 'machinery.LogEntry.is_cta', 'L.is_cta', (['e'], {}), '(e)\n', (15423, 15426), True, 'from machinery import LogEntry as L\n'), ((15431, 15445), 'machinery.LogEntry.is_global', 'L.is_global', (['e'], {}), '(e)\n', (15442, 15445), True, 'from machinery import LogEntry as L\n'), ((19189, 19201), 'machinery.LogEntry.is_warp', 'L.is_warp', (['e'], {}), '(e)\n', (19198, 19201), True, 'from machinery import LogEntry as L\n'), ((19206, 19220), 'machinery.LogEntry.is_global', 'L.is_global', (['e'], {}), '(e)\n', (19217, 19220), True, 'from machinery import LogEntry as L\n'), ((19310, 19322), 'machinery.LogEntry.is_warp', 'L.is_warp', (['e'], {}), '(e)\n', (19319, 19322), True, 'from machinery import LogEntry as L\n'), ((19327, 19341), 'machinery.LogEntry.is_shared', 'L.is_shared', (['e'], {}), '(e)\n', (19338, 19341), True, 'from machinery import LogEntry as L\n'), ((19431, 19442), 'machinery.LogEntry.is_cta', 'L.is_cta', (['e'], {}), '(e)\n', (19439, 19442), True, 'from machinery import LogEntry as L\n'), ((19447, 19461), 'machinery.LogEntry.is_global', 'L.is_global', (['e'], {}), '(e)\n', (19458, 19461), True, 'from machinery import LogEntry as L\n'), ((20214, 20242), 'functools.partial', 'partial', (['L.simple_match'], {'s': 't'}), '(L.simple_match, s=t)\n', (20221, 20242), False, 'from functools import partial\n'), ((20729, 20742), 'generic.itemify', 'itemify', (['item'], {}), '(item)\n', (20736, 20742), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((24985, 24997), 'machinery.LogEntry.is_warp', 'L.is_warp', (['e'], {}), '(e)\n', (24994, 24997), True, 'from machinery import LogEntry as L\n'), ((25002, 25016), 'machinery.LogEntry.is_global', 'L.is_global', (['e'], {}), '(e)\n', (25013, 25016), True, 'from machinery import LogEntry as L\n'), ((25106, 25118), 'machinery.LogEntry.is_warp', 'L.is_warp', (['e'], {}), '(e)\n', (25115, 25118), True, 'from machinery import LogEntry as L\n'), ((25123, 25137), 'machinery.LogEntry.is_shared', 'L.is_shared', (['e'], {}), '(e)\n', (25134, 25137), True, 'from machinery import LogEntry as L\n'), ((25227, 25238), 'machinery.LogEntry.is_cta', 'L.is_cta', (['e'], {}), '(e)\n', (25235, 25238), True, 'from machinery import LogEntry as L\n'), ((25243, 25257), 'machinery.LogEntry.is_global', 'L.is_global', (['e'], {}), '(e)\n', (25254, 25257), True, 'from machinery import LogEntry as L\n'), ((26010, 26038), 'functools.partial', 'partial', (['L.simple_match'], {'s': 't'}), '(L.simple_match, s=t)\n', (26017, 26038), False, 'from functools import partial\n'), ((26564, 26577), 'generic.itemify', 'itemify', (['item'], {}), '(item)\n', (26571, 26577), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n'), ((16547, 16575), 'functools.partial', 'partial', (['L.simple_match'], {'s': 't'}), '(L.simple_match, s=t)\n', (16554, 16575), False, 'from functools import partial\n'), ((17077, 17090), 'generic.itemify', 'itemify', (['item'], {}), '(item)\n', (17084, 17090), False, 'from generic import lty, interleave, itemify, dupchk, listify, w_str\n')]
|
# -*- coding: utf-8 -*-
"""This module contains the the methods related to scraping articles from arXiv.
To only scrape the metadata from the articles in the rss-stream use the
harvestMetaDataRss method.
It's also possible to scrape articles between any two dates,
to accomplish this use the get_records_by_date method."""
import datetime
import requests
__author__ = '<NAME> and <NAME>'
__copyright__ = 'Copyright 2020, The arXivDigest project'
import urllib
import xml.etree.ElementTree as ET
from time import sleep
from urllib.request import urlopen
import feedparser
OAI = '{http://www.openarchives.org/OAI/2.0/}'
ARXIV = '{http://arxiv.org/OAI/arXiv/}'
def prepare_record(record):
"""Formats the data to a dictionary structure that is easy to work with."""
if record.find(OAI + 'header').get('status', None) == 'deleted':
return {}
info = record.find(OAI + 'metadata').find(ARXIV + 'arXiv')
result = {'title': info.find(ARXIV + 'title').text.replace('\n', ' '),
'description': info.find(ARXIV + 'abstract').text.replace('\n', ' '),
'id': info.find(ARXIV + 'id').text,
'categories': info.find(ARXIV + 'categories').text.split(),
}
doi = info.find(ARXIV + 'doi')
comments = info.find(ARXIV + 'comments')
licenses = info.find(ARXIV + 'license')
journal = info.find(ARXIV + 'journal-ref')
# check that element is not None before trying to access the text
result['doi'] = doi.text if doi is not None else None
result['comments'] = comments.text if comments is not None else None
result['license'] = licenses.text if licenses is not None else None
result['journal'] = journal.text if journal is not None else None
authors = []
for author in info.find(ARXIV + 'authors'):
a = {}
firstname = author.find(ARXIV + 'forenames')
a['firstname'] = '' if firstname is None else firstname.text
a['lastname'] = author.find(ARXIV + 'keyname').text
a['affiliations'] = []
for affiliation in author.findall(ARXIV + 'affiliation'):
a['affiliations'].append(affiliation.text)
authors.append(a)
result['authors'] = authors
datestamp = record.find(OAI + 'header').find(OAI + 'datestamp')
result['datestamp'] = datestamp.text
return result
def get_records_by_date(start_date, end_date=None):
"""Scrapes the OAI-api for articles submitted from the n previous days."""
base_url = 'http://export.arxiv.org/oai2'
params = {'verb': 'ListRecords',
'metadataPrefix': 'arXiv',
'from': start_date}
if end_date:
params['until'] = end_date
result = {}
while True:
r = requests.get(base_url, params=params)
print('Fetching', r.url)
if r.status_code == 503:
time_out = int(r.headers.get('retry-after', 5))
msg = '503: Have to wait before further requests. Retrying in {} seconds.'
print(msg.format(time_out))
sleep(time_out)
continue
# generate elementtree from responsedata
root = ET.fromstring(r.text)
# parse the response and add it to result
for record in root.find(OAI + 'ListRecords').findall(OAI + 'record'):
element = prepare_record(record)
if element:
result[element['id']] = element
# If the xmlfile contains more than 1000 articles arXiv will add a
# resumptiontoken to the response, if we already have all the articles
# there will be no resumptiontoken and we can safely break
token = root.find(OAI + 'ListRecords').find(OAI + 'resumptionToken')
if token is None or token.text is None:
break
# update url to use resumptiontoken in the next request
params = {'verb': 'ListRecords', 'resumptionToken': token.text}
return result
def get_record(id):
"""Gets metadata for a single record."""
url = 'http://export.arxiv.org/oai2?verb=GetRecord&identifier=oai:arXiv.org:%s&metadataPrefix=arXiv' % id
print('Fetching', url)
response = urlopen(url)
root = ET.fromstring(response.read())
record = root.find(OAI + 'GetRecord').find(OAI + 'record')
return prepare_record(record)
def get_categories():
"""Returns a dict of all the main categories available with info."""
url = 'http://export.arxiv.org/oai2?verb=ListSets'
print('fetching', url)
while True:
try:
response = urlopen(url)
except urllib.error.HTTPError as e:
if e.code == 503:
timeOut = int(e.headers.get('retry-after', 30))
print(
'503: Have to wait before further requests. Retrying in %d seconds.' % timeOut)
sleep(timeOut)
continue
else:
raise
break
root = ET.fromstring(response.read())
categories = root.find(OAI + 'ListSets').findall(OAI + 'set')
result = {}
for category in categories:
categoryID = category.find(OAI + 'setSpec').text
categoryName = category.find(OAI + 'setName').text
categoryInfo = {'name': categoryName}
categoryID = categoryID.split(':')
if len(categoryID) > 1:
categoryInfo['masterCategory'] = categoryID[0].capitalize()
result[categoryID[-1]] = categoryInfo
return result
def get_id_from_rss():
"""Returns a set of all the article-ids found in the rss stream, which will
be approximately the same as the articles uploaded the previous day."""
rssUrl = 'http://export.arxiv.org/rss/'
result = set()
for category in get_categories():
print('Fetching IDs from the %s rss-feed' % category)
feed = feedparser.parse(rssUrl + category)
for entry in feed['entries']:
id = entry['link'].split('abs/')[1]
result.add(id)
return result
def harvest_metadata_rss():
"""This function will return the metadata from all the articles present
in any of the arXiv rss-streams."""
rss_ids = get_id_from_rss()
yesterday = datetime.datetime.utcnow().date() - datetime.timedelta(days=1)
articles = get_records_by_date(yesterday)
result = {}
for item in rss_ids:
if item not in articles: # download missing articles, if any
element = get_record(item)
result[element['id']] = element
else:
result[item] = articles[item]
return result
|
[
"feedparser.parse",
"xml.etree.ElementTree.fromstring",
"urllib.request.urlopen",
"time.sleep",
"datetime.datetime.utcnow",
"datetime.timedelta",
"requests.get"
] |
[((4135, 4147), 'urllib.request.urlopen', 'urlopen', (['url'], {}), '(url)\n', (4142, 4147), False, 'from urllib.request import urlopen\n'), ((2726, 2763), 'requests.get', 'requests.get', (['base_url'], {'params': 'params'}), '(base_url, params=params)\n', (2738, 2763), False, 'import requests\n'), ((3131, 3152), 'xml.etree.ElementTree.fromstring', 'ET.fromstring', (['r.text'], {}), '(r.text)\n', (3144, 3152), True, 'import xml.etree.ElementTree as ET\n'), ((5791, 5826), 'feedparser.parse', 'feedparser.parse', (['(rssUrl + category)'], {}), '(rssUrl + category)\n', (5807, 5826), False, 'import feedparser\n'), ((6188, 6214), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (6206, 6214), False, 'import datetime\n'), ((3029, 3044), 'time.sleep', 'sleep', (['time_out'], {}), '(time_out)\n', (3034, 3044), False, 'from time import sleep\n'), ((4518, 4530), 'urllib.request.urlopen', 'urlopen', (['url'], {}), '(url)\n', (4525, 4530), False, 'from urllib.request import urlopen\n'), ((6152, 6178), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (6176, 6178), False, 'import datetime\n'), ((4808, 4822), 'time.sleep', 'sleep', (['timeOut'], {}), '(timeOut)\n', (4813, 4822), False, 'from time import sleep\n')]
|
# This is sample baseline for CIKM Personalization Cup 2016
# by <NAME> & <NAME>
import numpy as np
import pandas as pd
import datetime
start_time = datetime.datetime.now()
print("Running baseline. Now it's", start_time.isoformat())
# Loading queries (assuming data placed in <dataset-train/>
queries = pd.read_csv('dataset-train/train-queries.csv', sep=';')[['queryId', 'items', 'is.test']]
print('Total queries', len(queries))
# Leaving only test queries (the ones which items we have to sort)
queries = queries[queries['is.test'] == True][['queryId', 'items']]
print('Test queries', len(queries))
queries.reset_index(inplace=True)
queries.drop(['index'], axis=1, inplace=True)
# Loading item views; taking itemId column
item_views = pd.read_csv('dataset-train/train-item-views.csv', sep=';')[['itemId']]
print('Item views', len(item_views))
# Loading clicks; taking itemId column
clicks = pd.read_csv('dataset-train/train-clicks.csv', sep=';')[['itemId']]
print('Clicks', len(clicks))
# Loading purchases; taking itemId column
purchases = pd.read_csv('dataset-train/train-purchases.csv', sep=';')[['itemId']]
print('Purchases', len(purchases))
# Calculating popularity as [Amount of views] * 1 + Amount of clicks * 2 + [Amount of purchases] * 3
print('Scoring popularity for each item ...')
prod_pop = {}
for cost, container in enumerate([item_views, clicks, purchases]):
for prod in container.values:
product = str(prod[0])
if product not in prod_pop:
prod_pop[product] = cost
else:
prod_pop[product] += cost
print('Popularity scored for', len(prod_pop), 'products')
# For each query:
# parse items (comma-separated values in last column)
# sort them by score;
# write them to the submission file.
# This is longest part; it usually takes around 5 minutes.
print('Sorting items per query by popularity...')
answers = []
step = int(len(queries) / 20)
with open('submission.txt', 'w+') as submission:
for i, q in enumerate(queries.values):
# Fancy progressbar
if i % step == 0:
print(5 * i / step, '%...')
# Splitting last column which contains comma-separated items
items = q[-1].split(',')
# Getting scores for each item. Also, inverting scores here, so we can use argsort
items_scores = list(map(lambda x: -prod_pop.get(x, 0), items))
# Sorting items using items_scores order permutation
sorted_items = np.array(items)[np.array(items_scores).argsort()]
# Squashing items together
s = ','.join(sorted_items)
# and writing them to submission
submission.write(str(q[0]) + " " + s + "\n")
end_time = datetime.datetime.now()
print("Done. Now it's ", end_time.isoformat())
print("Calculated baseline in ", (end_time - start_time).seconds, " seconds")
|
[
"pandas.read_csv",
"datetime.datetime.now",
"numpy.array"
] |
[((151, 174), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (172, 174), False, 'import datetime\n'), ((2686, 2709), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2707, 2709), False, 'import datetime\n'), ((306, 361), 'pandas.read_csv', 'pd.read_csv', (['"""dataset-train/train-queries.csv"""'], {'sep': '""";"""'}), "('dataset-train/train-queries.csv', sep=';')\n", (317, 361), True, 'import pandas as pd\n'), ((741, 799), 'pandas.read_csv', 'pd.read_csv', (['"""dataset-train/train-item-views.csv"""'], {'sep': '""";"""'}), "('dataset-train/train-item-views.csv', sep=';')\n", (752, 799), True, 'import pandas as pd\n'), ((898, 952), 'pandas.read_csv', 'pd.read_csv', (['"""dataset-train/train-clicks.csv"""'], {'sep': '""";"""'}), "('dataset-train/train-clicks.csv', sep=';')\n", (909, 952), True, 'import pandas as pd\n'), ((1049, 1106), 'pandas.read_csv', 'pd.read_csv', (['"""dataset-train/train-purchases.csv"""'], {'sep': '""";"""'}), "('dataset-train/train-purchases.csv', sep=';')\n", (1060, 1106), True, 'import pandas as pd\n'), ((2460, 2475), 'numpy.array', 'np.array', (['items'], {}), '(items)\n', (2468, 2475), True, 'import numpy as np\n'), ((2476, 2498), 'numpy.array', 'np.array', (['items_scores'], {}), '(items_scores)\n', (2484, 2498), True, 'import numpy as np\n')]
|
"""Discrete KL divergence
KL loss for Categorical and RelaxedCategorical
ref) KL divergence in PyTorch
https://pytorch.org/docs/stable/_modules/torch/distributions/kl.html#kl_divergence
"""
from typing import Optional, List, Dict, Tuple
import sympy
import torch
from torch._six import inf
from pixyz.distributions import Distribution
from pixyz.losses.losses import Loss
from pixyz.utils import get_dict_values
def _kl_categorical_categorical(p: torch.distributions.Distribution,
q: torch.distributions.Distribution
) -> torch.Tensor:
"""KL divergence between categorical and categorical, KL(p||q).
Args:
p (torch.distributions.Distribution): PyTorch Distribution class.
q (torch.distributions.Distribution): PyTorch Distribution class.
Returns:
t (torch.Tensor): Calculated KL divergence.
"""
t = p.probs * (p.logits - q.logits)
t[(q.probs == 0).expand_as(t)] = inf
t[(p.probs == 0).expand_as(t)] = 0
return t.sum(-1)
class CategoricalKullbackLeibler(Loss):
"""Kullback Leibler divergence for categorical distributions.
Args:
p (pixyz.distributions.distributions.Distribution): Distribution class.
q (pixyz.distributions.distributions.Distribution): Distribution class.
input_var (list, optional): Input variable name.
dim (int, optional): Aggregate dimension.
"""
def __init__(self,
p: Distribution,
q: Distribution,
input_var: Optional[List[str]] = None,
dim: Optional[int] = None):
self.dim = dim
super().__init__(p, q, input_var)
@property
def _symbol(self):
return sympy.Symbol("D_{{KL}} \\left[{}||{} \\right]".format(
self.p.prob_text, self.q.prob_text))
def _get_eval(self,
x_dict: Dict[str, torch.Tensor],
**kwargs) -> Tuple[torch.Tensor, Dict[str, torch.Tensor]]:
if (not hasattr(self.p, 'distribution_torch_class')) \
or (not hasattr(self.q, 'distribution_torch_class')):
raise ValueError("Divergence between these two distributions "
"cannot be evaluated, got %s and %s."
% (self.p.distribution_name,
self.q.distribution_name))
input_dict = get_dict_values(x_dict, self.p.input_var, True)
self.p.set_dist(input_dict)
input_dict = get_dict_values(x_dict, self.q.input_var, True)
self.q.set_dist(input_dict)
divergence = _kl_categorical_categorical(self.p.dist, self.q.dist)
if self.dim is not None:
divergence = torch.sum(divergence, dim=self.dim)
return divergence, x_dict
dim_list = list(torch.arange(divergence.dim()))
divergence = torch.sum(divergence, dim=dim_list[1:])
return divergence, x_dict
|
[
"torch.sum",
"pixyz.utils.get_dict_values"
] |
[((2427, 2474), 'pixyz.utils.get_dict_values', 'get_dict_values', (['x_dict', 'self.p.input_var', '(True)'], {}), '(x_dict, self.p.input_var, True)\n', (2442, 2474), False, 'from pixyz.utils import get_dict_values\n'), ((2533, 2580), 'pixyz.utils.get_dict_values', 'get_dict_values', (['x_dict', 'self.q.input_var', '(True)'], {}), '(x_dict, self.q.input_var, True)\n', (2548, 2580), False, 'from pixyz.utils import get_dict_values\n'), ((2904, 2943), 'torch.sum', 'torch.sum', (['divergence'], {'dim': 'dim_list[1:]'}), '(divergence, dim=dim_list[1:])\n', (2913, 2943), False, 'import torch\n'), ((2752, 2787), 'torch.sum', 'torch.sum', (['divergence'], {'dim': 'self.dim'}), '(divergence, dim=self.dim)\n', (2761, 2787), False, 'import torch\n')]
|
#!/usr/bin/env python
""" christmas.py
Prints a christmas tree on the terminal using coloured and blinking characters.
Uses ansi terminal escape sequences.
The '\033[' part is the escape code.
We pass '5;' for the colours other than green to make them blink.
The next part is the colour code and the 'm' ends the sequence.
To reset the colour we pass "\033[0m" after each character.
Python 3 version by antiloquax (2015), based on code from datamungeblog.com.
"""
from random import choice
from random import random
def main():
"""Make the tree and print it."""
# If you change this, use an odd number.
SIZE = 21
print(makeTree(SIZE))
def makeTree(size):
"""Creates the tree string."""
# Probability that a character will be green.
prob_gr = 0.6
# Colour codes.
colours = [31, 33, 34, 35, 36, 37]
# Characters to use for decorations. Experiment with these.
# The chr(169) and chr(174) characters may not work in all terminals
# (extended ASCII, c and r in a circle).
decs = ['@', '&', '*', chr(169), chr(174)]
# Format string for printing blinking characters.
blink_col = "\033[5;{0}m{1}\033[0m"
# String to print a green octothorpe ('#').
leaf = "\033[32m#\033[0m"
# Width of the tree, will grow by 2 each time.
width = 1
# Initialise the tree string, with a star at the top.
tree = "\n{}*\n".format(' ' * (size))
""" Main Loop starts now."""
""" We can't use the normal "format" centering approach:
("{:^nn}".format(string) where "nn" is the width of the line),
with these ansi codes. This is because Python sees the strings as being
more than one character long (15 & 10 for baubles and leaves)."""
# Loop from (size - 1) down to 0, using the counter as the padding size.
for pad in range(size - 1, -1, -1):
# Increase the width of the tree by 2.
width += 2
# Put the characters for the line in "temp".
temp = ""
for j in range(width):
# Make some leaves.
if random() < prob_gr:
temp += leaf
# And also some baubles.
else:
temp += blink_col.format(choice(colours), choice(decs))
# Add that string to the line, with padding.
tree += "{0}{1}\n".format(' ' * pad, temp)
# Add a "trunk" of 2 lines and return.
return tree + "{0}{1}\n".format(' ' * (size - 1), "000") * 2
if __name__ == "__main__":
main()
|
[
"random.random",
"random.choice"
] |
[((2075, 2083), 'random.random', 'random', ([], {}), '()\n', (2081, 2083), False, 'from random import random\n'), ((2220, 2235), 'random.choice', 'choice', (['colours'], {}), '(colours)\n', (2226, 2235), False, 'from random import choice\n'), ((2237, 2249), 'random.choice', 'choice', (['decs'], {}), '(decs)\n', (2243, 2249), False, 'from random import choice\n')]
|
import sys
input = sys.stdin.readline
sys.setrecursionlimit(10 ** 7)
n = int(input())
exp = 0
while 2 ** (exp + 1) <= n:
exp += 1
print(2 ** exp)
|
[
"sys.setrecursionlimit"
] |
[((38, 68), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(10 ** 7)'], {}), '(10 ** 7)\n', (59, 68), False, 'import sys\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import List, Optional
import torch
import torch.nn as nn
from pytorchvideo.layers.utils import set_attributes
from pytorchvideo.models.weight_init import init_net_weights
class Net(nn.Module):
"""
Build a general Net models with a list of blocks for video recognition.
::
Input
↓
Block 1
↓
.
.
.
↓
Block N
↓
The ResNet builder can be found in `create_resnet`.
"""
def __init__(self, *, blocks: nn.ModuleList) -> None:
"""
Args:
blocks (torch.nn.module_list): the list of block modules.
"""
super().__init__()
assert blocks is not None
self.blocks = blocks
init_net_weights(self)
def forward(self, x: torch.Tensor) -> torch.Tensor:
for idx in range(len(self.blocks)):
x = self.blocks[idx](x)
return x
class DetectionBBoxNetwork(nn.Module):
"""
A general purpose model that handles bounding boxes as part of input.
"""
def __init__(self, model: nn.Module, detection_head: nn.Module):
"""
Args:
model (nn.Module): a model that preceeds the head. Ex: stem + stages.
detection_head (nn.Module): a network head. that can take in input bounding boxes
and the outputs from the model.
"""
super().__init__()
self.model = model
self.detection_head = detection_head
def forward(self, x: torch.Tensor, bboxes: torch.Tensor):
"""
Args:
x (torch.tensor): input tensor
bboxes (torch.tensor): accociated bounding boxes.
The format is N*5 (Index, X_1,Y_1,X_2,Y_2) if using RoIAlign
and N*6 (Index, x_ctr, y_ctr, width, height, angle_degrees) if
using RoIAlignRotated.
"""
features = self.model(x)
out = self.detection_head(features, bboxes)
return out.view(out.shape[0], -1)
class MultiPathWayWithFuse(nn.Module):
"""
Build multi-pathway block with fusion for video recognition, each of the pathway
contains its own Blocks and Fusion layers across different pathways.
::
Pathway 1 ... Pathway N
↓ ↓
Block 1 Block N
↓⭠ --Fusion----↓
"""
def __init__(
self,
*,
multipathway_blocks: nn.ModuleList,
multipathway_fusion: Optional[nn.Module],
inplace: Optional[bool] = True,
) -> None:
"""
Args:
multipathway_blocks (nn.module_list): list of models from all pathways.
multipathway_fusion (nn.module): fusion model.
inplace (bool): If inplace, directly update the input list without making
a copy.
"""
super().__init__()
set_attributes(self, locals())
def forward(self, x: List[torch.Tensor]) -> torch.Tensor:
assert isinstance(
x, list
), "input for MultiPathWayWithFuse needs to be a list of tensors"
if self.inplace:
x_out = x
else:
x_out = [None] * len(x)
for pathway_idx in range(len(self.multipathway_blocks)):
if self.multipathway_blocks[pathway_idx] is not None:
x_out[pathway_idx] = self.multipathway_blocks[pathway_idx](
x[pathway_idx]
)
if self.multipathway_fusion is not None:
x_out = self.multipathway_fusion(x_out)
return x_out
|
[
"pytorchvideo.models.weight_init.init_net_weights"
] |
[((1164, 1186), 'pytorchvideo.models.weight_init.init_net_weights', 'init_net_weights', (['self'], {}), '(self)\n', (1180, 1186), False, 'from pytorchvideo.models.weight_init import init_net_weights\n')]
|
from setuptools import setup
import sys
setup(name='nuodbawsquickstart',
version='1.1.0',
description='Script to deploy a multi-region and multi-instance AWS cluster',
url='http://github.com/nuodb/nuodb-aws-quickstart',
author='<NAME>.',
author_email='<EMAIL>',
#data_files=[('nuodbawsquickstart/templates', ['nuodbawsquickstart/templates/init.py'])],
install_requires=["argparse", "boto", "requests"],
license='BSD licence, see LICENSE',
packages=['nuodbawsquickstart'],
scripts=["bin/nuodb_aws_quickstart.py"],
zip_safe=True)
|
[
"setuptools.setup"
] |
[((41, 465), 'setuptools.setup', 'setup', ([], {'name': '"""nuodbawsquickstart"""', 'version': '"""1.1.0"""', 'description': '"""Script to deploy a multi-region and multi-instance AWS cluster"""', 'url': '"""http://github.com/nuodb/nuodb-aws-quickstart"""', 'author': '"""<NAME>."""', 'author_email': '"""<EMAIL>"""', 'install_requires': "['argparse', 'boto', 'requests']", 'license': '"""BSD licence, see LICENSE"""', 'packages': "['nuodbawsquickstart']", 'scripts': "['bin/nuodb_aws_quickstart.py']", 'zip_safe': '(True)'}), "(name='nuodbawsquickstart', version='1.1.0', description=\n 'Script to deploy a multi-region and multi-instance AWS cluster', url=\n 'http://github.com/nuodb/nuodb-aws-quickstart', author='<NAME>.',\n author_email='<EMAIL>', install_requires=['argparse', 'boto',\n 'requests'], license='BSD licence, see LICENSE', packages=[\n 'nuodbawsquickstart'], scripts=['bin/nuodb_aws_quickstart.py'],\n zip_safe=True)\n", (46, 465), False, 'from setuptools import setup\n')]
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from importlib import import_module
import flask_login
from flask import url_for, redirect, request
# Need to expose these downstream
# flake8: noqa: F401
# noinspection PyUnresolvedReferences
from flask_login import current_user, logout_user, login_required, login_user
from flask_oauthlib.client import OAuth
from airflow import models, configuration
from airflow.utils.db import provide_session
from airflow.utils.log.logging_mixin import LoggingMixin
import os, ssl
if (os.environ.get('PYTHONHTTPSVERIFY', '') is '0' and
getattr(ssl, '_create_unverified_context', None)):
ssl._create_default_https_context = ssl._create_unverified_context
log = LoggingMixin().log
def get_config_param(param):
return str(configuration.conf.get('oauth', param))
def has_config_param(param):
return configuration.conf.has_option('oauth', param)
class OAuthUser(models.User):
def __init__(self, user):
self.user = user
@property
def is_active(self):
"""Required by flask_login"""
return self.user
@property
def is_authenticated(self):
"""Required by flask_login"""
return self.user
@property
def is_anonymous(self):
"""Required by flask_login"""
return False
def get_id(self):
"""Returns the current user id as required by flask_login"""
return self.user.get_id()
def data_profiling(self):
"""Provides access to data profiling tools"""
return self.user.superuser if self.user else False
def is_superuser(self):
"""Access all the things"""
return self.user.superuser if self.user else False
class AuthenticationError(Exception):
pass
class OAuthBackend(object):
def __init__(self):
self.login_manager = flask_login.LoginManager()
self.login_manager.login_view = 'airflow.login'
self.login_manager.login_message = None
self.flask_app = None
self.oauth = None
self.api_rev = None
def init_app(self, flask_app):
self.flask_app = flask_app
self.login_manager.init_app(self.flask_app)
self.oauth = OAuth(self.flask_app).remote_app(
'oauth',
consumer_key=get_config_param('client_id'),
consumer_secret=get_config_param('client_secret'),
base_url=get_config_param('base_url'),
request_token_params={'scope': [
"user:info",
"user:check-access"
]},
request_token_url=None,
access_token_method=get_config_param('access_token_method'),
access_token_url=get_config_param('access_token_url'),
authorize_url=get_config_param('authorize_url'))
self.login_manager.user_loader(self.load_user)
self.flask_app.add_url_rule(get_config_param('oauth_callback_route'),
'oauth_callback',
self.oauth_callback)
def login(self, request):
log.debug('Redirecting user to OAuth login')
scheme = request.environ['HTTP_X_FORWARDED_PROTO'] \
if 'HTTP_X_FORWARDED_PROTO' in request.environ and request.environ['HTTP_X_FORWARDED_PROTO'] \
else request.scheme if request.scheme \
else None
return self.oauth.authorize(callback=url_for(
'oauth_callback',
_scheme=scheme,
_external=True),
state=request.args.get('next') or request.referrer or None)
def get_user_profile_info(self, access_token):
resp = self.oauth.get(
get_config_param("user_info_url"),
token=(access_token, ''))
if not resp or resp.status != 200:
raise AuthenticationError(
'Failed to fetch user profile, status ({0})'.format(
resp.status if resp else 'None'))
return resp.data
def dict_get(self, dic, key):
keys = key.split(".")
value = dic
for k in keys:
value = value[k]
return value
@provide_session
def load_user(self, userid, session=None):
if not userid or userid == 'None':
return None
user = session.query(models.User).filter(
models.User.id == int(userid)).first()
return OAuthUser(user)
def authorize(self, authorized_response, user_info):
"""
Parameters
----------
authorized_response
Authorized response from OAuth client
user_info: dict
User information response from OAuth client
Returns
-------
(bool, bool, bool)
Return if 1. the user is allowed to access airflow, 2. if the user
is a superuser
"""
if has_config_param("oauth_permission_backend"):
permission_backend = import_module(get_config_param("oauth_permission_backend"))
return permission_backend.authorize(self.oauth, authorized_response, user_info)
return True, True
@provide_session
def oauth_callback(self, session=None):
log.debug('OAuth callback called')
next_url = request.args.get('state') or url_for('admin.index')
if get_config_param('base_url') in next_url:
next_url = url_for('admin.index')
resp = self.oauth.authorized_response()
try:
if resp is None:
raise AuthenticationError(
'Null response from OAuth service, denying access.'
)
access_token = resp['access_token']
user_info = self.get_user_profile_info(access_token)
username_key = get_config_param("username_key")
email_key = get_config_param("email_key")
username = self.dict_get(user_info, username_key)
email = self.dict_get(user_info, email_key)
authorized, superuser = self.authorize(resp, user_info)
except AuthenticationError:
return redirect(url_for('airflow.noaccess'))
user = session.query(models.User).filter(
models.User.username == username).first()
if not authorized:
if user:
session.delete(user)
session.commit()
return redirect(url_for('airflow.noaccess'))
if not user:
user = models.User(
username=username,
email=email,
superuser=superuser)
user.superuser = superuser
session.merge(user)
session.commit()
login_user(OAuthUser(user))
session.commit()
return redirect(next_url)
login_manager = OAuthBackend()
def login(self, request):
return login_manager.login(request)
|
[
"flask_oauthlib.client.OAuth",
"flask.redirect",
"flask.request.args.get",
"flask_login.LoginManager",
"os.environ.get",
"flask.url_for",
"airflow.models.User",
"airflow.configuration.conf.has_option",
"airflow.configuration.conf.get",
"airflow.utils.log.logging_mixin.LoggingMixin"
] |
[((1473, 1487), 'airflow.utils.log.logging_mixin.LoggingMixin', 'LoggingMixin', ([], {}), '()\n', (1485, 1487), False, 'from airflow.utils.log.logging_mixin import LoggingMixin\n'), ((1620, 1665), 'airflow.configuration.conf.has_option', 'configuration.conf.has_option', (['"""oauth"""', 'param'], {}), "('oauth', param)\n", (1649, 1665), False, 'from airflow import models, configuration\n'), ((1288, 1327), 'os.environ.get', 'os.environ.get', (['"""PYTHONHTTPSVERIFY"""', '""""""'], {}), "('PYTHONHTTPSVERIFY', '')\n", (1302, 1327), False, 'import os, ssl\n'), ((1538, 1576), 'airflow.configuration.conf.get', 'configuration.conf.get', (['"""oauth"""', 'param'], {}), "('oauth', param)\n", (1560, 1576), False, 'from airflow import models, configuration\n'), ((2596, 2622), 'flask_login.LoginManager', 'flask_login.LoginManager', ([], {}), '()\n', (2620, 2622), False, 'import flask_login\n'), ((7496, 7514), 'flask.redirect', 'redirect', (['next_url'], {}), '(next_url)\n', (7504, 7514), False, 'from flask import url_for, redirect, request\n'), ((6005, 6030), 'flask.request.args.get', 'request.args.get', (['"""state"""'], {}), "('state')\n", (6021, 6030), False, 'from flask import url_for, redirect, request\n'), ((6034, 6056), 'flask.url_for', 'url_for', (['"""admin.index"""'], {}), "('admin.index')\n", (6041, 6056), False, 'from flask import url_for, redirect, request\n'), ((6133, 6155), 'flask.url_for', 'url_for', (['"""admin.index"""'], {}), "('admin.index')\n", (6140, 6155), False, 'from flask import url_for, redirect, request\n'), ((7215, 7279), 'airflow.models.User', 'models.User', ([], {'username': 'username', 'email': 'email', 'superuser': 'superuser'}), '(username=username, email=email, superuser=superuser)\n', (7226, 7279), False, 'from airflow import models, configuration\n'), ((2958, 2979), 'flask_oauthlib.client.OAuth', 'OAuth', (['self.flask_app'], {}), '(self.flask_app)\n', (2963, 2979), False, 'from flask_oauthlib.client import OAuth\n'), ((4165, 4222), 'flask.url_for', 'url_for', (['"""oauth_callback"""'], {'_scheme': 'scheme', '_external': '(True)'}), "('oauth_callback', _scheme=scheme, _external=True)\n", (4172, 4222), False, 'from flask import url_for, redirect, request\n'), ((7145, 7172), 'flask.url_for', 'url_for', (['"""airflow.noaccess"""'], {}), "('airflow.noaccess')\n", (7152, 7172), False, 'from flask import url_for, redirect, request\n'), ((4279, 4303), 'flask.request.args.get', 'request.args.get', (['"""next"""'], {}), "('next')\n", (4295, 4303), False, 'from flask import url_for, redirect, request\n'), ((6864, 6891), 'flask.url_for', 'url_for', (['"""airflow.noaccess"""'], {}), "('airflow.noaccess')\n", (6871, 6891), False, 'from flask import url_for, redirect, request\n')]
|
from logging import getLogger, Formatter
from logging.handlers import RotatingFileHandler
from typing import Type
from src.framework import get_config, make_config_files, run_jobs, _sleeper
from src.zipped_logs import ZippedRotatingFileHandler
if __name__ == '__main__':
# setup section
make_config_files()
config = get_config()
formatter = Formatter(config['logging']['format'] % ())
log = getLogger()
log.setLevel(config.getint('logging', 'level'))
file_name = config['logging']['file_name']
if config.getboolean('logging', 'auto_zip'):
fh_type: Type[RotatingFileHandler] = ZippedRotatingFileHandler
else:
fh_type = RotatingFileHandler
fh = fh_type(
file_name + '.txt',
mode='w',
encoding='utf-8',
maxBytes=config.getfilesize('logging', 'max_file_size'), # type: ignore # pylint: disable=no-member
backupCount=config.getfilesize('logging', 'backup_count'), # type: ignore # pylint: disable=no-member
)
fh.setLevel(config.getint('logging', 'level'))
fh.setFormatter(formatter)
log.addHandler(fh)
# setup done
try:
run_jobs(_sleeper, [(x, ) for x in range(256)])
# optional teardown
# ...
finally:
# required teardown
log.removeHandler(fh)
fh.flush()
fh.close()
|
[
"logging.Formatter",
"src.framework.get_config",
"logging.getLogger",
"src.framework.make_config_files"
] |
[((298, 317), 'src.framework.make_config_files', 'make_config_files', ([], {}), '()\n', (315, 317), False, 'from src.framework import get_config, make_config_files, run_jobs, _sleeper\n'), ((331, 343), 'src.framework.get_config', 'get_config', ([], {}), '()\n', (341, 343), False, 'from src.framework import get_config, make_config_files, run_jobs, _sleeper\n'), ((361, 404), 'logging.Formatter', 'Formatter', (["(config['logging']['format'] % ())"], {}), "(config['logging']['format'] % ())\n", (370, 404), False, 'from logging import getLogger, Formatter\n'), ((415, 426), 'logging.getLogger', 'getLogger', ([], {}), '()\n', (424, 426), False, 'from logging import getLogger, Formatter\n')]
|
__author__ = 'DafniAntotsiou'
from gym_ext.envs.inverted_pendulum_ext import InvertedPendulumEnvExt
from gym_ext.envs.HalfCheetah_ext import HalfCheetahEnvExt
from gym.envs.mujoco.mujoco_env import MujocoEnv
import mujoco_py
from mapping.mjviewerext import MjViewerExt as MjViewer
def _get_viewer(self, mode):
self.viewer = self._viewers.get(mode)
if self.viewer is None:
if mode == 'human':
self.viewer = MjViewer(self.sim, self.name if hasattr(self, 'name') else None)
elif mode == 'rgb_array' or mode == 'depth_array':
self.viewer = mujoco_py.MjRenderContextOffscreen(self.sim, -1)
self.viewer_setup()
self._viewers[mode] = self.viewer
return self.viewer
setattr(MujocoEnv, '_get_viewer', _get_viewer)
|
[
"mujoco_py.MjRenderContextOffscreen"
] |
[((587, 635), 'mujoco_py.MjRenderContextOffscreen', 'mujoco_py.MjRenderContextOffscreen', (['self.sim', '(-1)'], {}), '(self.sim, -1)\n', (621, 635), False, 'import mujoco_py\n')]
|
"""@file data_reader.py
contains a reader class for data"""
from six.moves import configparser
from nabu.processing.processors import processor_factory
import gzip
import os
class DataReader(object):
"""the data reader class.
a reader for data. Data is not stored in tensorflow format
as was done in data.py. Data is returned in numpy format
and is accessed by indexing instead of looping over all
data. It is currently only used in postprocessing.
"""
def __init__(self, dataconfs, segment_lengths=['full']):
"""DataReader constructor
Args:
dataconfs: the database configuration
segment_lengths: A list containing the desired lengths of segments.
Possibly multiple segment lengths
"""
if len(segment_lengths) > 1:
print(
'Warning: Not yet implemented __call__ correctly for multiple segments. The returned utt_info, does not ' \
'contain the _part sufix and processed returns only 1 processed')
self.segment_lengths = segment_lengths
self.processors = []
self.start_index_set = [0]
self.datafile_lines = []
for dataconf in dataconfs:
# read the processor config
proc_cfg_file = dataconf['processor_config']
if not os.path.isfile(proc_cfg_file):
raise BaseException('%s does not exist' % proc_cfg_file)
parsed_proc_cfg = configparser.ConfigParser()
parsed_proc_cfg.read(proc_cfg_file)
proc_cfg = dict(parsed_proc_cfg.items('processor'))
# create a processor
self.processors.append(processor_factory.factory(proc_cfg['processor'])(proc_cfg, self.segment_lengths))
# get the datafiles lines
datafile = dataconf['datafiles'] # TODO: for the moment expecting only 1 file, but this also makes sense?
if datafile[-3:] == '.gz':
open_fn = gzip.open
else:
open_fn = open
f = open_fn(datafile)
datalines = f.readlines()
self.start_index_set.append(self.start_index_set[-1]+len(datalines))
self.datafile_lines.extend(datalines)
def __call__(self, list_pos):
"""read data from the datafile list
Args:
list_pos: position on the datafile list to read
Returns:
The processed data as a numpy array"""
line = self.datafile_lines[list_pos]
for ind, start_index in enumerate(self.start_index_set):
if start_index > list_pos:
processor = self.processors[ind-1]
break
# split the name and the data line
splitline = line.strip().split(' ')
utt_name = splitline[0]
dataline = ' '.join(splitline[1:])
# process the dataline
processed, utt_info = processor(dataline)
utt_info['utt_name'] = utt_name
# Currently only returning 1 processed!
processed = processed[self.segment_lengths[0]][0]
return processed, utt_info
def get_name_for_pos(self, list_pos):
""" get the name of the utterance for the given position from the datafile list
Args:
list_pos: position on the datafile list to read
Returns:
The name of the utterance"""
line = self.datafile_lines[list_pos]
# split the name and the data line
splitline = line.strip().split(' ')
utt_name = splitline[0]
return utt_name
|
[
"os.path.isfile",
"nabu.processing.processors.processor_factory.factory",
"six.moves.configparser.ConfigParser"
] |
[((1290, 1317), 'six.moves.configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (1315, 1317), False, 'from six.moves import configparser\n'), ((1177, 1206), 'os.path.isfile', 'os.path.isfile', (['proc_cfg_file'], {}), '(proc_cfg_file)\n', (1191, 1206), False, 'import os\n'), ((1463, 1511), 'nabu.processing.processors.processor_factory.factory', 'processor_factory.factory', (["proc_cfg['processor']"], {}), "(proc_cfg['processor'])\n", (1488, 1511), False, 'from nabu.processing.processors import processor_factory\n')]
|
from pathlib import Path
from typing import Tuple
from sacred.run import Run
import tensorflow as tf
from tensorflow.keras.callbacks import ReduceLROnPlateau, TensorBoard
from tensorflow.keras.models import Model
from .job import Job
from ..ingredients import (
get_data_loader,
get_builder,
)
from ..loaders import DataLoader
class KerasJob(Job):
def _main(
self,
run: Run,
seed: int,
fitable: Model = None,
fitable_config: dict = None,
loader_config: dict = None,
):
"""
Private method containing the actual work completed by the job. Implemented is a default
workflow for a basic keras/kerastuner type job.
:param run: sacred.Run object. See sacred documentation for more details on utility.
:param fitable: Optional tensorflow.keras.Model or kerastuner.Tuner object.
Model-like which contains a fit method.
:param fitable_config: Optional dict. Contains data which can be used to create a new
fitable instance.
:param loader_config: Optional dict. Contains data which can be used to create a new
DataLoader instance.
"""
loader, data = self._load_data(loader_config)
fitable = fitable or self._load_fitable(loader, fitable_config)
fitable = self._fit(run, fitable, data)
if self.exp_config["run_config"]["test"]:
self._test_fitable(run, fitable, data[-1])
if self.exp_config["run_config"]["save_model"]:
self._save_fitable(run, fitable)
return fitable
def _load_data(self, config: dict = None) -> Tuple[DataLoader, Tuple]:
"""
Obtains a loader using ingredients.get_loader and self.exp_config['loader_config']
:param config: Optional dict. config passed to get_data_loader to obtain specific
data_loader class.
:return: Loader object and the data returned by that Loader's get_data method.
"""
config = config or self.exp_config["loader_config"]
loader = get_data_loader(**config)
if self.exp_config["run_config"]["select_few"]:
data = loader.few_examples(**config["load_kwargs"])
else:
data = loader.load_data(**config["load_kwargs"])
return loader, data
def _load_fitable(self, loader: DataLoader, fitable_config: dict = None) -> Model:
"""
Defines and compiles a fitable (keras.model or keras_tuner.tuner) which implements
a 'fit' method. This method calls either get_builder, or get_hyper_factory, depending on
which type of fitable is beind loaded.
:return: Model or Tuner object.
"""
fitable_config = fitable_config or self.exp_config["builder_config"]
conf = dict(
**fitable_config,
max_z=loader.max_z,
num_points=loader.num_points,
mu=loader.mu,
sigma=loader.sigma,
)
builder = get_builder(**conf)
run_config = self.exp_config["run_config"]
compile_kwargs = dict(
loss=run_config["loss"],
loss_weights=run_config["loss_weights"],
optimizer=run_config["optimizer"],
metrics=run_config["metrics"],
run_eagerly=run_config["run_eagerly"],
)
if run_config["use_strategy"]:
strategy = tf.distribute.MirroredStrategy()
with strategy.scope():
model = builder.get_model()
model.compile(**compile_kwargs)
else:
model = builder.get_model()
model.compile(**compile_kwargs)
return model
def _fit(
self, run: Run, fitable: Model, data: tuple, callbacks: list = None,
) -> Model:
"""
:param run: sacred.Run object. See sacred documentation for details on utility.
:param fitable: tensorflow.keras.Model object.
:param data: tuple. train, validation, and test data in the form (train, val, test),
where train is
the tuple (x_train, y_train).
:param callbacks: Optional list. List of tensorflow.keras.Callback objects to pass to
fitable.fit method.
:return: tensorflow.keras.Model object.
"""
tensorboard_directory = self.exp_config["run_config"]["root_dir"] / "logs"
(x_train, y_train), val, _ = data
callbacks = callbacks or []
if self.exp_config["run_config"]["use_default_callbacks"]:
callbacks.extend(
[
TensorBoard(
**dict(
**self.exp_config["tb_config"],
log_dir=tensorboard_directory,
)
),
ReduceLROnPlateau(**self.exp_config["lr_config"]),
]
)
kwargs = dict(
x=x_train,
y=y_train,
epochs=self.exp_config["run_config"]["epochs"],
batch_size=self.exp_config["run_config"]["batch_size"],
validation_data=val,
class_weight=self.exp_config["run_config"]["class_weight"],
callbacks=callbacks,
verbose=self.exp_config["run_config"]["fit_verbosity"],
)
fitable.fit(**kwargs)
return fitable
def _test_fitable(self, run: Run, fitable: Model, test_data: tuple) -> float:
"""
:param fitable: tensorflow.keras.Model object.
:param test_data: tuple. contains (x_test, y_test).
:return: float. Scalar test_loss value.
"""
if test_data is None:
return 0.0
x_test, y_test = test_data
loss = fitable.evaluate(x=x_test, y=y_test, verbose=0)
print(f"Test split results: {loss}")
return loss
def _save_fitable(self, run: Run, fitable: Model):
"""
:param run: sacred.Run object. see sacred documentation for more details on utility.
:param fitable: tensorflow.keras.Model object.
"""
path = self.exp_config["run_config"]["model_path"]
if self.exp_config["run_config"]["save_verbosity"] > 0:
fitable.summary()
fitable.save(self.exp_config["run_config"]["model_path"])
run.add_artifact(path)
def _new_model_path(self, name: str):
model_path = Path(self.exp_config["run_config"]["model_path"]).parent / name
self.exp_config["run_config"]["model_path"] = model_path
return model_path
|
[
"tensorflow.keras.callbacks.ReduceLROnPlateau",
"pathlib.Path",
"tensorflow.distribute.MirroredStrategy"
] |
[((3404, 3436), 'tensorflow.distribute.MirroredStrategy', 'tf.distribute.MirroredStrategy', ([], {}), '()\n', (3434, 3436), True, 'import tensorflow as tf\n'), ((6397, 6446), 'pathlib.Path', 'Path', (["self.exp_config['run_config']['model_path']"], {}), "(self.exp_config['run_config']['model_path'])\n", (6401, 6446), False, 'from pathlib import Path\n'), ((4820, 4869), 'tensorflow.keras.callbacks.ReduceLROnPlateau', 'ReduceLROnPlateau', ([], {}), "(**self.exp_config['lr_config'])\n", (4837, 4869), False, 'from tensorflow.keras.callbacks import ReduceLROnPlateau, TensorBoard\n')]
|
from sklearn.manifold import TSNE
import pandas as pd
import matplotlib.pyplot as plt
def visualize(data):
data_embedded = TSNE(n_components=2).fit_transform(data)
print(data_embedded)
plt.plot(data_embedded)
plt.show()
|
[
"sklearn.manifold.TSNE",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot"
] |
[((198, 221), 'matplotlib.pyplot.plot', 'plt.plot', (['data_embedded'], {}), '(data_embedded)\n', (206, 221), True, 'import matplotlib.pyplot as plt\n'), ((226, 236), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (234, 236), True, 'import matplotlib.pyplot as plt\n'), ((128, 148), 'sklearn.manifold.TSNE', 'TSNE', ([], {'n_components': '(2)'}), '(n_components=2)\n', (132, 148), False, 'from sklearn.manifold import TSNE\n')]
|
from subs2cia.sources import Stream
import pycountry
import logging
def picker(streams: [Stream], target_lang: str = None, forced_stream: int = None):
r"""
Returns streams by priority. Streams which are not part of a container are preferred first,
followed by manually specified stream indices, then streams which match a specified language, finally followed
by the remaining streams.
:param streams: List of Stream objects
:param target_lang: Target language
:param forced_stream:
:return:
"""
for s in streams:
if s.is_standalone():
yield s
if forced_stream is not None:
yield streams[forced_stream]
if target_lang is not None:
target_lang = pycountry.languages.lookup(target_lang)
target_lang = target_lang.alpha_3
for s in streams:
if s.is_standalone():
continue
if s.get_language() == target_lang:
yield s
for s in streams:
if s.is_standalone():
continue
if s.get_language == target_lang:
continue
yield s
|
[
"pycountry.languages.lookup"
] |
[((732, 771), 'pycountry.languages.lookup', 'pycountry.languages.lookup', (['target_lang'], {}), '(target_lang)\n', (758, 771), False, 'import pycountry\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import logging
import os
from cloghandler import ConcurrentRotatingFileHandler
import logconf
def compose_logger(name, log_file):
logger = logging.Logger(name)
hdlr = ConcurrentRotatingFileHandler(
filename=os.path.join(LOG_FILE_DIR, log_file),
maxBytes=logconf.MAX_BYTES, backupCount=logconf.BACK_UP_COUNT)
formatter = logging.Formatter(logconf.VERBOSE_FORMATTER)
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.DEBUG)
return logger
|
[
"logging.Formatter",
"os.path.join",
"logging.Logger"
] |
[((190, 210), 'logging.Logger', 'logging.Logger', (['name'], {}), '(name)\n', (204, 210), False, 'import logging\n'), ((395, 439), 'logging.Formatter', 'logging.Formatter', (['logconf.VERBOSE_FORMATTER'], {}), '(logconf.VERBOSE_FORMATTER)\n', (412, 439), False, 'import logging\n'), ((270, 306), 'os.path.join', 'os.path.join', (['LOG_FILE_DIR', 'log_file'], {}), '(LOG_FILE_DIR, log_file)\n', (282, 306), False, 'import os\n')]
|
from django.contrib import admin
from .models import Concert
# Register your models here.
class ConcertAdmin(admin.ModelAdmin):
readonly_fields = ('created', 'updated')
admin.site.register(Concert, ConcertAdmin)
|
[
"django.contrib.admin.site.register"
] |
[((180, 222), 'django.contrib.admin.site.register', 'admin.site.register', (['Concert', 'ConcertAdmin'], {}), '(Concert, ConcertAdmin)\n', (199, 222), False, 'from django.contrib import admin\n')]
|
"""
Test ingress.py module
"""
import os
import pandas as pd
from sqlalchemy import create_engine
from edunotice.ingress import (
_update_courses,
_update_labs,
_update_subscriptions,
_update_details,
update_edu_data,
)
from edunotice.constants import (
CONST_TEST_DIR_DATA,
CONST_TEST1_FILENAME,
CONST_TEST2_FILENAME,
SQL_CONNECTION_STRING,
SQL_TEST_DBNAME1,
)
# wrong dataframe
wrong_df = pd.DataFrame(
{
"name": ["Jason", "Molly", "Tina", "Jake", "Amy"],
"year": [2012, 2012, 2013, 2014, 2014],
"reports": [4, 24, 31, 2, 3],
},
index=["Cochice", "Pima", "<NAME>", "Maricopa", "Yuma"],
)
# good data
file_path1 = os.path.join(CONST_TEST_DIR_DATA, CONST_TEST1_FILENAME)
eduhub_df1 = pd.read_csv(file_path1)
ENGINE = create_engine("%s/%s" % (SQL_CONNECTION_STRING, SQL_TEST_DBNAME1))
def test_update_courses():
"""
tests ingress._update_courses routine
"""
# wrong dataframe
success, error, _ = _update_courses(ENGINE, wrong_df)
assert success is False, error
# good data
success, error, course_dict = _update_courses(ENGINE, eduhub_df1)
assert success, error
assert len(course_dict) == 2
success, error, course_dict = _update_courses(ENGINE, eduhub_df1)
assert success, error
assert len(course_dict) == 2
def test_update_labs():
"""
tests ingress._update_labs routine
"""
# getting the courses
success, error, course_dict = _update_courses(ENGINE, eduhub_df1)
assert success, error
assert len(course_dict) == 2
# wrong dataframe
success, error, _ = _update_labs(ENGINE, wrong_df, course_dict)
assert success is False, error
# good data
success, error, lab_dict = _update_labs(ENGINE, eduhub_df1, course_dict)
assert success, error
assert len(lab_dict) == 2
success, error, lab_dict = _update_labs(ENGINE, eduhub_df1, course_dict)
assert success, error
assert len(lab_dict) == 2
def test_update_subscriptions():
"""
tests ingress._update_subscriptions routine
"""
# wrong dataframe
success, error, _ = _update_subscriptions(ENGINE, wrong_df)
assert success is False, error
# good data
success, error, sub_dict = _update_subscriptions(ENGINE, eduhub_df1)
assert success, error
assert len(sub_dict) == 2
success, error, sub_dict = _update_subscriptions(ENGINE, eduhub_df1)
assert success, error
assert len(sub_dict) == 2
def test_update_details_1():
"""
tests ingress._update_details routine
2 new subscriptions
"""
# getting the courses
success, error, course_dict = _update_courses(ENGINE, eduhub_df1)
assert success, error
assert len(course_dict) == 2
# getting the labs
success, error, lab_dict = _update_labs(ENGINE, eduhub_df1, course_dict)
assert success, error
assert len(lab_dict) == 2
# getting the subscriptions
success, error, sub_dict = _update_subscriptions(ENGINE, eduhub_df1)
assert success, error
assert len(sub_dict) == 2
# 2 new subscriptions
success, error, new_list, update_list = _update_details(
ENGINE, eduhub_df1, lab_dict, sub_dict
)
assert success, error
assert len(new_list) == 2
assert len(update_list) == 0
def test_update_details_2():
"""
tests ingress._update_details routine
1 update
"""
eduhub_df_local = pd.read_csv(
os.path.join(CONST_TEST_DIR_DATA, CONST_TEST2_FILENAME)
)
# getting the courses
success, error, course_dict = _update_courses(ENGINE, eduhub_df_local)
assert success, error
assert len(course_dict) == 2
# getting the labs
success, error, lab_dict = _update_labs(
ENGINE, eduhub_df_local, course_dict
)
assert success, error
assert len(lab_dict) == 2
# getting the subscriptions
success, error, sub_dict = _update_subscriptions(ENGINE, eduhub_df_local)
assert success, error
assert len(sub_dict) == 3
success, error, new_list, update_list = _update_details(
ENGINE, eduhub_df_local, lab_dict, sub_dict
)
assert success, error
assert len(new_list) == 1
assert len(update_list) == 2
def test_update_edu_data():
"""
tests ingress.update_edu_data routine
"""
# not a dataframe
(
success,
error,
_,
_,
sub_new_list,
sub_update_list
) = update_edu_data(ENGINE, None)
assert success is False, error
# empty dataframe
success, error, _, _, sub_new_list, sub_update_list = update_edu_data(
ENGINE, pd.DataFrame()
)
assert success is False, error
# real data
success, error, _, _, sub_new_list, sub_update_list = update_edu_data(
ENGINE, eduhub_df1
)
assert success, error
assert len(sub_new_list) == 0
assert len(sub_update_list) == 2
|
[
"pandas.DataFrame",
"pandas.read_csv",
"edunotice.ingress._update_labs",
"edunotice.ingress.update_edu_data",
"edunotice.ingress._update_courses",
"edunotice.ingress._update_details",
"sqlalchemy.create_engine",
"os.path.join",
"edunotice.ingress._update_subscriptions"
] |
[((436, 638), 'pandas.DataFrame', 'pd.DataFrame', (["{'name': ['Jason', 'Molly', 'Tina', 'Jake', 'Amy'], 'year': [2012, 2012, \n 2013, 2014, 2014], 'reports': [4, 24, 31, 2, 3]}"], {'index': "['Cochice', 'Pima', '<NAME>', 'Maricopa', 'Yuma']"}), "({'name': ['Jason', 'Molly', 'Tina', 'Jake', 'Amy'], 'year': [\n 2012, 2012, 2013, 2014, 2014], 'reports': [4, 24, 31, 2, 3]}, index=[\n 'Cochice', 'Pima', '<NAME>', 'Maricopa', 'Yuma'])\n", (448, 638), True, 'import pandas as pd\n'), ((697, 752), 'os.path.join', 'os.path.join', (['CONST_TEST_DIR_DATA', 'CONST_TEST1_FILENAME'], {}), '(CONST_TEST_DIR_DATA, CONST_TEST1_FILENAME)\n', (709, 752), False, 'import os\n'), ((766, 789), 'pandas.read_csv', 'pd.read_csv', (['file_path1'], {}), '(file_path1)\n', (777, 789), True, 'import pandas as pd\n'), ((800, 866), 'sqlalchemy.create_engine', 'create_engine', (["('%s/%s' % (SQL_CONNECTION_STRING, SQL_TEST_DBNAME1))"], {}), "('%s/%s' % (SQL_CONNECTION_STRING, SQL_TEST_DBNAME1))\n", (813, 866), False, 'from sqlalchemy import create_engine\n'), ((1001, 1034), 'edunotice.ingress._update_courses', '_update_courses', (['ENGINE', 'wrong_df'], {}), '(ENGINE, wrong_df)\n', (1016, 1034), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((1121, 1156), 'edunotice.ingress._update_courses', '_update_courses', (['ENGINE', 'eduhub_df1'], {}), '(ENGINE, eduhub_df1)\n', (1136, 1156), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((1251, 1286), 'edunotice.ingress._update_courses', '_update_courses', (['ENGINE', 'eduhub_df1'], {}), '(ENGINE, eduhub_df1)\n', (1266, 1286), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((1488, 1523), 'edunotice.ingress._update_courses', '_update_courses', (['ENGINE', 'eduhub_df1'], {}), '(ENGINE, eduhub_df1)\n', (1503, 1523), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((1630, 1673), 'edunotice.ingress._update_labs', '_update_labs', (['ENGINE', 'wrong_df', 'course_dict'], {}), '(ENGINE, wrong_df, course_dict)\n', (1642, 1673), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((1757, 1802), 'edunotice.ingress._update_labs', '_update_labs', (['ENGINE', 'eduhub_df1', 'course_dict'], {}), '(ENGINE, eduhub_df1, course_dict)\n', (1769, 1802), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((1891, 1936), 'edunotice.ingress._update_labs', '_update_labs', (['ENGINE', 'eduhub_df1', 'course_dict'], {}), '(ENGINE, eduhub_df1, course_dict)\n', (1903, 1936), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((2139, 2178), 'edunotice.ingress._update_subscriptions', '_update_subscriptions', (['ENGINE', 'wrong_df'], {}), '(ENGINE, wrong_df)\n', (2160, 2178), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((2262, 2303), 'edunotice.ingress._update_subscriptions', '_update_subscriptions', (['ENGINE', 'eduhub_df1'], {}), '(ENGINE, eduhub_df1)\n', (2283, 2303), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((2392, 2433), 'edunotice.ingress._update_subscriptions', '_update_subscriptions', (['ENGINE', 'eduhub_df1'], {}), '(ENGINE, eduhub_df1)\n', (2413, 2433), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((2665, 2700), 'edunotice.ingress._update_courses', '_update_courses', (['ENGINE', 'eduhub_df1'], {}), '(ENGINE, eduhub_df1)\n', (2680, 2700), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((2815, 2860), 'edunotice.ingress._update_labs', '_update_labs', (['ENGINE', 'eduhub_df1', 'course_dict'], {}), '(ENGINE, eduhub_df1, course_dict)\n', (2827, 2860), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((2981, 3022), 'edunotice.ingress._update_subscriptions', '_update_subscriptions', (['ENGINE', 'eduhub_df1'], {}), '(ENGINE, eduhub_df1)\n', (3002, 3022), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((3150, 3205), 'edunotice.ingress._update_details', '_update_details', (['ENGINE', 'eduhub_df1', 'lab_dict', 'sub_dict'], {}), '(ENGINE, eduhub_df1, lab_dict, sub_dict)\n', (3165, 3205), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((3580, 3620), 'edunotice.ingress._update_courses', '_update_courses', (['ENGINE', 'eduhub_df_local'], {}), '(ENGINE, eduhub_df_local)\n', (3595, 3620), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((3735, 3785), 'edunotice.ingress._update_labs', '_update_labs', (['ENGINE', 'eduhub_df_local', 'course_dict'], {}), '(ENGINE, eduhub_df_local, course_dict)\n', (3747, 3785), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((3920, 3966), 'edunotice.ingress._update_subscriptions', '_update_subscriptions', (['ENGINE', 'eduhub_df_local'], {}), '(ENGINE, eduhub_df_local)\n', (3941, 3966), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((4068, 4128), 'edunotice.ingress._update_details', '_update_details', (['ENGINE', 'eduhub_df_local', 'lab_dict', 'sub_dict'], {}), '(ENGINE, eduhub_df_local, lab_dict, sub_dict)\n', (4083, 4128), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((4458, 4487), 'edunotice.ingress.update_edu_data', 'update_edu_data', (['ENGINE', 'None'], {}), '(ENGINE, None)\n', (4473, 4487), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((4768, 4803), 'edunotice.ingress.update_edu_data', 'update_edu_data', (['ENGINE', 'eduhub_df1'], {}), '(ENGINE, eduhub_df1)\n', (4783, 4803), False, 'from edunotice.ingress import _update_courses, _update_labs, _update_subscriptions, _update_details, update_edu_data\n'), ((3457, 3512), 'os.path.join', 'os.path.join', (['CONST_TEST_DIR_DATA', 'CONST_TEST2_FILENAME'], {}), '(CONST_TEST_DIR_DATA, CONST_TEST2_FILENAME)\n', (3469, 3512), False, 'import os\n'), ((4637, 4651), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (4649, 4651), True, 'import pandas as pd\n')]
|
from django.core.management.base import BaseCommand
from django.db import transaction
from jobya.users.models import User
from jobya.users.tests.factories import UserFactory
class Command(BaseCommand):
help = "Set up users data"
def add_arguments(self, parser):
parser.add_argument(
"total",
nargs="+",
type=int,
help="Indicates the number of users to be created",
)
@transaction.atomic
def handle(self, *args, **options):
total = options["total"][0]
self.stdout.write("Deleting old data...")
# Don't delete superuser
User.objects.filter(is_superuser=False).delete()
self.stdout.write("Creating new data...")
# Create all the users
people = []
for _ in range(total):
person = UserFactory()
people.append(person)
self.stdout.write("Success")
|
[
"jobya.users.tests.factories.UserFactory",
"jobya.users.models.User.objects.filter"
] |
[((839, 852), 'jobya.users.tests.factories.UserFactory', 'UserFactory', ([], {}), '()\n', (850, 852), False, 'from jobya.users.tests.factories import UserFactory\n'), ((636, 675), 'jobya.users.models.User.objects.filter', 'User.objects.filter', ([], {'is_superuser': '(False)'}), '(is_superuser=False)\n', (655, 675), False, 'from jobya.users.models import User\n')]
|
import os
import re
import time
import logging
from virttest import data_dir
from virttest import env_process
from avocado.utils import process
from avocado.core import exceptions
from autotest.client.shared import error
from qemu.tests import thin_provisioning
@error.context_aware
def run(test, params, env):
"""
'thin-provisioning' functions test using sg_utils:
1) Boot up the guest with the scsi disk
2) using sg_utils to do some test
3) In guest, check the sha1 value of the guest disk
4) In host, check the sha1 value of the disk image
:param test: QEMU test object
:param params: Dictionary with the test parameters
:param env: Dictionary with test environment.
"""
def get_excution_time(session, cmd):
"""
This function is used to measure the real execution time of
the command in guest through shell command "time".
:param session: Guest session
:param cmd: Commands to execute
:return: The real execution time
"""
out = session.cmd_output(cmd)
try:
return float(re.search(r"real\s+\dm(.*)s", out).group(1))
except:
exceptions.TestError("Unable to read realtime, cmd output: %s" % out)
def run_sg_utils(disk_name, session):
"""
This function is used do to some test on the disk using sg_utils package.
:param disk_name: The Guest disk name
:param session: Guest Session
:return: None
"""
yesfile = "/home/buf"
cmd = """yes | head -n2048 > {0};"""
cmd += "sg_write_same --in {0} --num=32 --lba=80 {1};"
cmd += "sg_write_same --in /dev/zero --num=96 --lba=0 {1};"
cmd += "sg_write_same -U --in /dev/zero --num=16 --lba=0 {1};"
cmd = cmd.format(yesfile, disk_name)
session.cmd(cmd)
fetch_data_from_file = "sg_write_same --in {:s} --num=65536 --lba=131074 {:s}".format(yesfile, disk_name)
fetch_data_from_file = "(time {:s})".format(fetch_data_from_file)
realtime1 = get_excution_time(session, fetch_data_from_file)
logging.info("The real execution time of the command is:{:f}".format(realtime1))
if params.get("disk_type") == "scsi_debug":
bitmap = thin_provisioning.get_allocation_bitmap()
logging.debug("Block allocation bitmap is: {}".format(bitmap))
else:
output = process.system_output("qemu-img map --output=json {:s}".format(disk_name))
logging.debug("json map: {}".format(output))
time.sleep(0.1)
fetch_data_from_zero_device = "sg_write_same --in /dev/zero --num=65534 --lba=196608 {:s}".format(disk_name)
fetch_data_from_zero_device = "(time {:s})".format(fetch_data_from_zero_device)
realtime2 = get_excution_time(session, fetch_data_from_zero_device)
logging.info("The real execution time of the command is {:f}".format(realtime2))
out3 = session.cmd_output("sg_write_same --in /dev/zero --num=0 --lba=128 {:s}".format(disk_name))
logging.debug(out3)
if re.search(r"bad field in Write same", out3) is None:
raise exceptions.TestFail("sg_write_same command fails. output is {}".format(out3))
if realtime2 > realtime1:
raise exceptions.TestFail("time used is much longger")
thin_provisioning.destroy_vm(env)
if params.get("disk_type") == "scsi_debug":
disk_name = thin_provisioning.get_scsi_disk()[1]
params["image_name_image_test"] = disk_name
else:
disk_name = os.path.join(data_dir.get_data_dir(), params.get("image_name_image_test"))
disk_name = "{:s}.raw".format(disk_name)
params["start_vm"] = "yes"
vm_name = params["main_vm"]
env_process.preprocess_vm(test, params, env, vm_name)
vm = env.get_vm(vm_name)
vm.verify_alive()
timeout = float(params.get("login_timeout", 240))
session = vm.wait_for_login(timeout=timeout)
guest_disk_name = thin_provisioning.get_scsi_disk(session)[1]
run_sg_utils(guest_disk_name, session)
guest_sha1 = session.cmd_output("sha1sum {:s}".format(guest_disk_name)).split()[0]
host_sha1 = process.system_output("sha1sum {:s}".format(disk_name)).split()[0]
if guest_sha1 != host_sha1:
raise exceptions.TestFail("after sg_writesame, image hash value becomes different between guest and host ")
session.close()
if vm:
vm.destroy()
|
[
"qemu.tests.thin_provisioning.get_allocation_bitmap",
"virttest.data_dir.get_data_dir",
"virttest.env_process.preprocess_vm",
"logging.debug",
"qemu.tests.thin_provisioning.get_scsi_disk",
"qemu.tests.thin_provisioning.destroy_vm",
"avocado.core.exceptions.TestError",
"time.sleep",
"avocado.core.exceptions.TestFail",
"re.search"
] |
[((3389, 3422), 'qemu.tests.thin_provisioning.destroy_vm', 'thin_provisioning.destroy_vm', (['env'], {}), '(env)\n', (3417, 3422), False, 'from qemu.tests import thin_provisioning\n'), ((3801, 3854), 'virttest.env_process.preprocess_vm', 'env_process.preprocess_vm', (['test', 'params', 'env', 'vm_name'], {}), '(test, params, env, vm_name)\n', (3826, 3854), False, 'from virttest import env_process\n'), ((2602, 2617), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (2612, 2617), False, 'import time\n'), ((3103, 3122), 'logging.debug', 'logging.debug', (['out3'], {}), '(out3)\n', (3116, 3122), False, 'import logging\n'), ((4031, 4071), 'qemu.tests.thin_provisioning.get_scsi_disk', 'thin_provisioning.get_scsi_disk', (['session'], {}), '(session)\n', (4062, 4071), False, 'from qemu.tests import thin_provisioning\n'), ((4335, 4446), 'avocado.core.exceptions.TestFail', 'exceptions.TestFail', (['"""after sg_writesame, image hash value becomes different between guest and host """'], {}), "(\n 'after sg_writesame, image hash value becomes different between guest and host '\n )\n", (4354, 4446), False, 'from avocado.core import exceptions\n'), ((2309, 2350), 'qemu.tests.thin_provisioning.get_allocation_bitmap', 'thin_provisioning.get_allocation_bitmap', ([], {}), '()\n', (2348, 2350), False, 'from qemu.tests import thin_provisioning\n'), ((3134, 3176), 're.search', 're.search', (['"""bad field in Write same"""', 'out3'], {}), "('bad field in Write same', out3)\n", (3143, 3176), False, 'import re\n'), ((3335, 3383), 'avocado.core.exceptions.TestFail', 'exceptions.TestFail', (['"""time used is much longger"""'], {}), "('time used is much longger')\n", (3354, 3383), False, 'from avocado.core import exceptions\n'), ((3491, 3524), 'qemu.tests.thin_provisioning.get_scsi_disk', 'thin_provisioning.get_scsi_disk', ([], {}), '()\n', (3522, 3524), False, 'from qemu.tests import thin_provisioning\n'), ((3623, 3646), 'virttest.data_dir.get_data_dir', 'data_dir.get_data_dir', ([], {}), '()\n', (3644, 3646), False, 'from virttest import data_dir\n'), ((1208, 1277), 'avocado.core.exceptions.TestError', 'exceptions.TestError', (["('Unable to read realtime, cmd output: %s' % out)"], {}), "('Unable to read realtime, cmd output: %s' % out)\n", (1228, 1277), False, 'from avocado.core import exceptions\n'), ((1135, 1170), 're.search', 're.search', (['"""real\\\\s+\\\\dm(.*)s"""', 'out'], {}), "('real\\\\s+\\\\dm(.*)s', out)\n", (1144, 1170), False, 'import re\n')]
|
import sys
sys.path.append('~/Func2Wav/FuncToWav/src')
|
[
"sys.path.append"
] |
[((11, 54), 'sys.path.append', 'sys.path.append', (['"""~/Func2Wav/FuncToWav/src"""'], {}), "('~/Func2Wav/FuncToWav/src')\n", (26, 54), False, 'import sys\n')]
|
# python
import warnings
# Third party imports
import numpy as np
# grAdapt
from .base import Initial
from grAdapt.utils.sampling import sample_corner_bounds
class VerticesForceRandom(Initial):
"""
Samples all vertices if n_evals >= 2 ** len(bounds).
Else, a subset of vertices is sampled.
"""
def __init__(self, sampling_method):
"""
Parameters
----------
sampling_method : grAdapt.sampling.equidistributed Object
Sample low discrepancy sequences when initial point method is not feasible
"""
super().__init__(sampling_method)
def sample(self, bounds, n_evals):
"""Returns a numpy array of sampled points.
Does not include corner points of the hypercube/search space.
Parameters
----------
bounds : list of tuples or list of grAdapt.space.datatype.base
Each tuple in the list defines the bounds for the corresponding variable
Example: [(1, 2), (2, 3), (-1, 4)...]
n_evals : int
number of initial points sampled by method
Returns
-------
(self.n_evals, len(self.bounds)) numpy array
"""
super().sample(bounds, n_evals)
if 2 ** len(self.bounds) >= self.n_evals:
# sample corner points first which fits in n_evals
d_tilde = int(np.floor(np.log2(self.n_evals)))
corners_d_tilde = sample_corner_bounds(self.bounds[:d_tilde]) # (2 ** d_tilde, d_tilde) array
n_tilde = 2 ** d_tilde
# sample random fixed corner points
random_binary_array = np.random.randint(2, size=(len(self.bounds),))
remainder_bounds = self.bounds[d_tilde:]
fix_corners = np.zeros((1, len(remainder_bounds)))
for i in range(len(remainder_bounds)):
if random_binary_array[i] == 0:
fix_corners[0][i] = remainder_bounds[i][0]
else:
fix_corners[0][i] = remainder_bounds[i][1]
fix_corners_2d = np.tile(fix_corners, (n_tilde, 1)) # (n, d-d_tilde)
# corner points with fixed rest dimensions
corner_points_fixed = np.hstack((corners_d_tilde, fix_corners_2d))
# because 2 ** n_tilde <= n, sample n - n_tilde
if self.n_evals - n_tilde > 0:
random_points = self.sampling_method.sample(bounds=self.bounds,
n=(self.n_evals - n_tilde),
x_history=corner_points_fixed)
return np.vstack((corner_points_fixed, random_points))
else:
return corner_points_fixed
else:
corner_points = sample_corner_bounds(self.bounds)
num_corner_points = corner_points.shape[0]
random_points = self.sampling_method.sample(bounds=self.bounds,
n=(self.n_evals - num_corner_points),
x_history=corner_points)
return np.vstack((corner_points, random_points))
|
[
"numpy.log2",
"numpy.hstack",
"grAdapt.utils.sampling.sample_corner_bounds",
"numpy.tile",
"numpy.vstack"
] |
[((1438, 1481), 'grAdapt.utils.sampling.sample_corner_bounds', 'sample_corner_bounds', (['self.bounds[:d_tilde]'], {}), '(self.bounds[:d_tilde])\n', (1458, 1481), False, 'from grAdapt.utils.sampling import sample_corner_bounds\n'), ((2071, 2105), 'numpy.tile', 'np.tile', (['fix_corners', '(n_tilde, 1)'], {}), '(fix_corners, (n_tilde, 1))\n', (2078, 2105), True, 'import numpy as np\n'), ((2214, 2258), 'numpy.hstack', 'np.hstack', (['(corners_d_tilde, fix_corners_2d)'], {}), '((corners_d_tilde, fix_corners_2d))\n', (2223, 2258), True, 'import numpy as np\n'), ((2797, 2830), 'grAdapt.utils.sampling.sample_corner_bounds', 'sample_corner_bounds', (['self.bounds'], {}), '(self.bounds)\n', (2817, 2830), False, 'from grAdapt.utils.sampling import sample_corner_bounds\n'), ((3157, 3198), 'numpy.vstack', 'np.vstack', (['(corner_points, random_points)'], {}), '((corner_points, random_points))\n', (3166, 3198), True, 'import numpy as np\n'), ((2645, 2692), 'numpy.vstack', 'np.vstack', (['(corner_points_fixed, random_points)'], {}), '((corner_points_fixed, random_points))\n', (2654, 2692), True, 'import numpy as np\n'), ((1384, 1405), 'numpy.log2', 'np.log2', (['self.n_evals'], {}), '(self.n_evals)\n', (1391, 1405), True, 'import numpy as np\n')]
|
import numpy as np
import pickle
class onehot:
def __init__(self, sentences):
self.__sentences = sentences
self.__data = {}
self.__count = {}
self.__build()
def __build(self):
self.__word_num = 1
for sentence in self.__sentences:
for word in sentence:
if word in self.__data:
self.__count[word] += 1
else:
self.__count[word] = 1
self.__data[word] = self.__word_num
self.__word_num += 1
def __getitem__(self, word):
if word not in self.__data:
print('Error! The word not in it\'s map!')
else:
ret = np.zeros((self.__word_num - 1, 1))
ret[self.__data[word] - 1] = 1
return ret
def get_voca_size(self):
return self.__word_num - 1
def get_word_frequency(self, word):
if word not in self.__data:
print('Error! The word not in it\'s map!')
else:
return self.__count[word]
def get_index_of_word(self, word):
if word not in self.__data:
print('Error! The word not in it\'s map!')
else:
return self.__data[word] - 1
|
[
"numpy.zeros"
] |
[((748, 782), 'numpy.zeros', 'np.zeros', (['(self.__word_num - 1, 1)'], {}), '((self.__word_num - 1, 1))\n', (756, 782), True, 'import numpy as np\n')]
|
#! /usr/bin/env python2.7
import os
from app import app
# Change working directory
os.chdir(os.path.dirname(__file__))
# Run application
app.run(debug=True)
|
[
"os.path.dirname",
"app.app.run"
] |
[((138, 157), 'app.app.run', 'app.run', ([], {'debug': '(True)'}), '(debug=True)\n', (145, 157), False, 'from app import app\n'), ((93, 118), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (108, 118), False, 'import os\n')]
|
import datetime
from operator import attrgetter
from fastapi import APIRouter, HTTPException
from models import user as user_model, match
from resources.crud import read, create, custom
from schemas import user as user_schemas
from schemas.match import Match, FilterParams
from . import session_dep
match_router = APIRouter()
@match_router.post('/user/{user_id}/match/', response_model=user_schemas.UserGet, status_code=201)
async def add_new_match(user_id: int, filter_params: FilterParams, db=session_dep):
user_data = read.read_single_resource(model=user_model.User, identifier='id', value=user_id, db=db)
# check if user has already matched in the past 3 days
if user_data.last_matched_time:
three_days_after_match = user_data.last_matched_time + datetime.timedelta(days=3)
current_time = datetime.datetime.now()
if current_time < three_days_after_match:
next_valid_date = datetime.date(day=three_days_after_match.day, month=three_days_after_match.month,
year=three_days_after_match.year).strftime('%A %d %B %Y')
raise HTTPException(status_code=403,
detail=f"You've already matched within the past 3 days. Wait till {next_valid_date}")
# run matching algorithm
matched_user = custom.match_user(user_id=user_id, filter_params=filter_params, db=db)
# create Match Pydantic models
current_match_data = Match(current_user_id=user_id, other_user_id=matched_user.id,
other_user_name=f"{matched_user.first_name} {matched_user.last_name}")
other_match_data = Match(current_user_id=matched_user.id, other_user_id=user_id,
other_user_name=f"{user_data.first_name} {user_data.last_name}")
# create match objects in the database
current_match = create.create_single_isolated_resource(model=match.Match, data=current_match_data, db=db)
other_match = create.create_single_isolated_resource(model=match.Match, data=other_match_data, db=db)
# update last_matched_time for each user
user_data.last_matched_time = datetime.datetime.now()
matched_user.last_matched_time = datetime.datetime.now()
# commit all changes in the database
db.add(current_match)
db.add(other_match)
db.commit()
db.refresh(user_data)
return matched_user
@match_router.get("/user/{user_id}/match/latest/", response_model=user_schemas.UserGet, status_code=200)
async def get_latest_match(user_id: int, db=session_dep):
user_data = read.read_single_resource(model=user_model.User, identifier='id', value=user_id, db=db)
if user_data is None or user_data.previous_matches == []:
raise HTTPException(status_code=404, detail='No matches made yet!')
latest_match = max(user_data.previous_matches, key=attrgetter('matched_at'))
return read.read_single_resource(model=user_model.User, identifier='id', value=latest_match.other_user_id, db=db)
|
[
"fastapi.HTTPException",
"datetime.date",
"resources.crud.create.create_single_isolated_resource",
"operator.attrgetter",
"datetime.timedelta",
"resources.crud.custom.match_user",
"schemas.match.Match",
"datetime.datetime.now",
"resources.crud.read.read_single_resource",
"fastapi.APIRouter"
] |
[((317, 328), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (326, 328), False, 'from fastapi import APIRouter, HTTPException\n'), ((530, 622), 'resources.crud.read.read_single_resource', 'read.read_single_resource', ([], {'model': 'user_model.User', 'identifier': '"""id"""', 'value': 'user_id', 'db': 'db'}), "(model=user_model.User, identifier='id', value=\n user_id, db=db)\n", (555, 622), False, 'from resources.crud import read, create, custom\n'), ((1332, 1402), 'resources.crud.custom.match_user', 'custom.match_user', ([], {'user_id': 'user_id', 'filter_params': 'filter_params', 'db': 'db'}), '(user_id=user_id, filter_params=filter_params, db=db)\n', (1349, 1402), False, 'from resources.crud import read, create, custom\n'), ((1464, 1600), 'schemas.match.Match', 'Match', ([], {'current_user_id': 'user_id', 'other_user_id': 'matched_user.id', 'other_user_name': 'f"""{matched_user.first_name} {matched_user.last_name}"""'}), "(current_user_id=user_id, other_user_id=matched_user.id,\n other_user_name=f'{matched_user.first_name} {matched_user.last_name}')\n", (1469, 1600), False, 'from schemas.match import Match, FilterParams\n'), ((1651, 1781), 'schemas.match.Match', 'Match', ([], {'current_user_id': 'matched_user.id', 'other_user_id': 'user_id', 'other_user_name': 'f"""{user_data.first_name} {user_data.last_name}"""'}), "(current_user_id=matched_user.id, other_user_id=user_id,\n other_user_name=f'{user_data.first_name} {user_data.last_name}')\n", (1656, 1781), False, 'from schemas.match import Match, FilterParams\n'), ((1871, 1965), 'resources.crud.create.create_single_isolated_resource', 'create.create_single_isolated_resource', ([], {'model': 'match.Match', 'data': 'current_match_data', 'db': 'db'}), '(model=match.Match, data=\n current_match_data, db=db)\n', (1909, 1965), False, 'from resources.crud import read, create, custom\n'), ((1979, 2071), 'resources.crud.create.create_single_isolated_resource', 'create.create_single_isolated_resource', ([], {'model': 'match.Match', 'data': 'other_match_data', 'db': 'db'}), '(model=match.Match, data=\n other_match_data, db=db)\n', (2017, 2071), False, 'from resources.crud import read, create, custom\n'), ((2147, 2170), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2168, 2170), False, 'import datetime\n'), ((2208, 2231), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2229, 2231), False, 'import datetime\n'), ((2574, 2666), 'resources.crud.read.read_single_resource', 'read.read_single_resource', ([], {'model': 'user_model.User', 'identifier': '"""id"""', 'value': 'user_id', 'db': 'db'}), "(model=user_model.User, identifier='id', value=\n user_id, db=db)\n", (2599, 2666), False, 'from resources.crud import read, create, custom\n'), ((2895, 3006), 'resources.crud.read.read_single_resource', 'read.read_single_resource', ([], {'model': 'user_model.User', 'identifier': '"""id"""', 'value': 'latest_match.other_user_id', 'db': 'db'}), "(model=user_model.User, identifier='id', value=\n latest_match.other_user_id, db=db)\n", (2920, 3006), False, 'from resources.crud import read, create, custom\n'), ((827, 850), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (848, 850), False, 'import datetime\n'), ((2739, 2800), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""No matches made yet!"""'}), "(status_code=404, detail='No matches made yet!')\n", (2752, 2800), False, 'from fastapi import APIRouter, HTTPException\n'), ((777, 803), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(3)'}), '(days=3)\n', (795, 803), False, 'import datetime\n'), ((1134, 1260), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(403)', 'detail': 'f"""You\'ve already matched within the past 3 days. Wait till {next_valid_date}"""'}), '(status_code=403, detail=\n f"You\'ve already matched within the past 3 days. Wait till {next_valid_date}"\n )\n', (1147, 1260), False, 'from fastapi import APIRouter, HTTPException\n'), ((2857, 2881), 'operator.attrgetter', 'attrgetter', (['"""matched_at"""'], {}), "('matched_at')\n", (2867, 2881), False, 'from operator import attrgetter\n'), ((932, 1052), 'datetime.date', 'datetime.date', ([], {'day': 'three_days_after_match.day', 'month': 'three_days_after_match.month', 'year': 'three_days_after_match.year'}), '(day=three_days_after_match.day, month=three_days_after_match.\n month, year=three_days_after_match.year)\n', (945, 1052), False, 'import datetime\n')]
|
import scrapy
from wuba.items import WubaItem
from selenium import webdriver
from lxml import etree
from selenium.webdriver.chrome.options import Options # 无头浏览器
chrome_options = Options()
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-gpu')
from selenium.webdriver import ChromeOptions # 规避检测
option = ChromeOptions()
option.add_experimental_option('excludeSwitches', ['enable-automation'])
bot = webdriver.Chrome(executable_path='chromedriver.exe', chrome_options=chrome_options, options=option)
class Wuba1Spider(scrapy.Spider):
name = 'wuba_1'
# allowed_domains = ['www.xxx.com']
start_urls = [
'https://xianyang.58.com/job/?param7503=1&from=yjz2_zhaopin&utm_source=market&spm=u-2d2yxv86y3v43nkddh1.BDPCPZ_BT&PGTID=0d202408-01d1-d86d-1573-f4faec6defb1&ClickID=4']
#设置通用url模板
url = 'https://xianyang.58.com/job/pn%d/?param7503=1&from=yjz2_zhaopin&utm_source=market&spm=u-2d2yxv86y3v43nkddh1.BDPCPZ_BT&PGTID=0d302408-01d1-d5f6-40be-8cb0310f3453&ClickID=3'
page_num = 2
def parse(self, response):
li_list = response.xpath('//ul[@id="list_con"]/li')
for li in li_list:
#实例化一个item
item = WubaItem()
# 解析工作岗位
name = li.xpath('./div/div/a/span[2]/text()')[0].extract()
#解析详情页url
deta_url = li.xpath('./div/div/a/@href')[0].extract()
deta_url = ''.join(deta_url)
new_url = str(deta_url)
item['new_url'] = new_url
bot.get(new_url)
page = bot.page_source
tree = etree.HTML(page)
#解析工资
gongci = tree.xpath('/html/body/div[3]/div[3]/div[1]/div[2]/span[2]//text()')
gongci = ''.join(gongci)
#解析学历
yueli = tree.xpath('/html/body/div[3]/div[3]/div[1]/div[4]/span[2]//text()')[0]
# /html/body/div[3]/div[3]/div[1]/div[4]/span[2]
#提交item
item['gongci'] = gongci
item['yueli'] = yueli
item['name'] = name
yield item
#进行分页操作
if self.page_num <= 5:
num_url = format(self.url%self.page_num)
self.page_num+=1
yield scrapy.Request(url=num_url,callback=self.parse)
|
[
"selenium.webdriver.chrome.options.Options",
"scrapy.Request",
"wuba.items.WubaItem",
"selenium.webdriver.ChromeOptions",
"selenium.webdriver.Chrome",
"lxml.etree.HTML"
] |
[((180, 189), 'selenium.webdriver.chrome.options.Options', 'Options', ([], {}), '()\n', (187, 189), False, 'from selenium.webdriver.chrome.options import Options\n'), ((339, 354), 'selenium.webdriver.ChromeOptions', 'ChromeOptions', ([], {}), '()\n', (352, 354), False, 'from selenium.webdriver import ChromeOptions\n'), ((434, 538), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'executable_path': '"""chromedriver.exe"""', 'chrome_options': 'chrome_options', 'options': 'option'}), "(executable_path='chromedriver.exe', chrome_options=\n chrome_options, options=option)\n", (450, 538), False, 'from selenium import webdriver\n'), ((1201, 1211), 'wuba.items.WubaItem', 'WubaItem', ([], {}), '()\n', (1209, 1211), False, 'from wuba.items import WubaItem\n'), ((1590, 1606), 'lxml.etree.HTML', 'etree.HTML', (['page'], {}), '(page)\n', (1600, 1606), False, 'from lxml import etree\n'), ((2233, 2281), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'num_url', 'callback': 'self.parse'}), '(url=num_url, callback=self.parse)\n', (2247, 2281), False, 'import scrapy\n')]
|
#!/usr/bin/python
# (because /usr/bin/env python does not work when called from IDE on Windows)
#
# Copyright (c) 2012 <NAME>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os, sys, getopt, shutil
inputFileName = 'test.sl'
outputFileName = 'main.c'
architecture = 'testarch'
#architecture = 'msp430'
#architecture = 'pc'
targetOS = 'mansos'
pathToOS = '../..'
verboseMode = False
testMode = False
def exitProgram(code):
if not testMode:
exit(code)
print ("Would exit from program with code " + str(code))
raise Exception
def importsOk():
plyModuleOK = True # Python Lex Yacc - for compilation
try:
import ply
except ImportError:
plyModuleOK = False
if not plyModuleOK:
if os.name == 'posix':
installStr = "Make sure you have installed required modules. Run:\n\tsudo apt-get install"
else:
installStr = "Make sure you have installed modules:"
print ("Cannot run SEAL parser:")
if not plyModuleOK:
print ("\tPLY module not found")
installStr += " python-ply"
print (installStr)
return False
return True
def printLine(line):
sys.stderr.write(line)
def help(isError):
sys.stderr.write("Usage:\n")
sys.stderr.write(" -a <arch>, --arch Target architecture (defalt: {})\n".format(architecture))
sys.stderr.write(" -t <target>, --target Target OS (default: {0})\n".format(targetOS))
sys.stderr.write(" -o, --output <file> Output to file, '-' for stdout (default: {0})\n".format(outputFileName))
sys.stderr.write(" -p, --path <path> Path to the target OS installation (default: {0})\n".format(pathToOS))
sys.stderr.write(" -V, --verbose Verbose mode\n")
sys.stderr.write(" -v, --version Print version and exit\n")
sys.stderr.write(" -c, --continue Continue on errors (test mode)\n")
sys.stderr.write(" -h, --help Print this help\n")
sys.exit(int(isError))
def parseCommandLine(argv):
global inputFileName
global outputFileName
global architecture
global verboseMode
global testMode
global pathToOS
try:
opts, args = getopt.getopt(sys.argv[1:], "a:cho:p:t:Vv",
["arch=", "continue", "help", "output=",
"path=", "target=", "verbose", "version"])
except getopt.GetoptError as err:
# print help information and exit:
print (str(err)) # will print something like "option -a not recognized"
help(True)
isError = False
showHelp = False
for o, a in opts:
if o in ("-a", "--arch"):
architecture = a.lower()
if o in ("-t", "--target"):
targetOS = a.lower()
elif o in ("-v", "--version"):
versionFile = os.path.join("../..", "doc/VERSION")
release = "Unknown"
date = "Unknown"
try:
f = open(versionFile, "r")
lines = f.readlines()
f.close()
if len(lines) > 0:
release = lines[0].strip()
if len(lines) > 1:
date = lines[1].strip()
except:
pass
print ("MansOS version: " + release + " (Release date: " + date + ")")
sys.exit(0)
elif o in ("-V", "--verbose"):
verboseMode = True
elif o in ("-h", "--help"):
showHelp = True
elif o in ("-o", "--output"):
outputFileName = a
elif o in ("-p", "--path"):
pathToOS = a
elif o in ("-c", "--continue"):
testMode = True
if len(args):
inputFileName = args[0]
args = args[1:]
if len(args):
sys.stderr.write("Too many arguments given. ({0} remaining not parsed)\n".format(args))
isError = True
if showHelp or isError:
help(isError)
def main():
if not importsOk():
exit(1)
# import pathname where seal package is located
selfDirname = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(selfDirname, pathToOS, 'tools'))
sys.path.append(os.path.join(selfDirname, pathToOS, 'tools', 'seal', 'components'))
from seal import generator
# in case this is used multiple times
generator.components.clearGlobals()
parseCommandLine(sys.argv)
# for extension modules
sys.path.append(os.path.join(os.getcwd(), os.path.dirname(inputFileName)))
# read file to-be-parsed
with open(inputFileName, 'r') as inputFile:
contents = inputFile.read()
if contents == None:
sys.stderr.write('Failed to read file {0}'.format(inputFileName))
exitProgram(1)
# parse input file (SEAL code)
parser = generator.SealParser(architecture, printLine, verboseMode)
parser.run(contents)
if parser.isError:
exitProgram(1) # do not generate output file in this case
# generate C code to an output file
g = generator.createGenerator(targetOS)
if g is None:
sys.stderr.write('Failed to find code generator for target OS {0}'.format(targetOS))
exitProgram(1)
if outputFileName == '-':
g.generate(sys.stdout)
else:
outputDirName = os.path.dirname(outputFileName)
if len(outputDirName):
outputDirName += os.sep
if not os.path.exists(outputDirName):
os.makedirs(outputDirName)
numDirs = len(os.path.normpath(outputFileName).split(os.sep)) - 1
dirname = os.path.dirname(os.path.realpath(outputFileName))
if os.path.isabs(pathToOS):
makefilePathToOS = pathToOS.strip('\\'); # \ is special character, creates problems in makefile where this path is inserted
else:
makefilePathToOS = os.path.normpath(dirname + os.sep + ('/..' * numDirs) + os.sep + pathToOS)
with open(outputFileName, 'w') as outputFile:
g.generate(outputFile)
with open(outputDirName + "Makefile", 'w') as outputFile:
g.generateMakefile(outputFile, outputFileName, makefilePathToOS)
# use SEAL application's config file as the basis
try:
shutil.copyfile(outputDirName + ".." + os.sep + "config", outputDirName + "config-tmp")
except IOError as e:
try:
os.remove(outputDirName + "config-tmp")
except OSError as e:
pass
with open(outputDirName + "config-tmp", 'a+') as outputFile:
g.generateConfigFile(outputFile)
# replace the config file only if different: saves rebuiding time.
try:
isSame = (os.system("cmp -s " + outputDirName + "config-tmp " + outputDirName + "config") == 0)
except:
isSame = False
if not isSame:
try:
shutil.move(outputDirName + "config-tmp", outputDirName + "config")
except Exception as ex:
print (ex)
if generator.components.componentRegister.isError:
# cleanup
os.remove(outputFileName)
os.remove(outputDirName + "Makefile")
os.remove(outputDirName + "config")
return -1
if g.isComponentUsed("network"):
g.generateBaseStationCode(os.path.join(outputDirName, 'bs'), makefilePathToOS)
g.generateForwarderCode(os.path.join(outputDirName, 'fwd'), makefilePathToOS)
g.generateCollectorCode(os.path.join(outputDirName, 'coll'), makefilePathToOS)
elif g.isComponentUsed("radio"):
g.generateBaseStationCode(os.path.join(outputDirName, 'bs'), makefilePathToOS)
if g.isComponentUsed("sdcard"):
g.generateRaw2Csv(outputDirName, os.path.join(selfDirname, 'raw2csv-template.py'))
return 0
if __name__ == '__main__':
exit(main())
|
[
"os.path.isabs",
"os.remove",
"getopt.getopt",
"os.makedirs",
"os.getcwd",
"seal.generator.components.clearGlobals",
"os.path.realpath",
"os.path.dirname",
"os.path.exists",
"os.system",
"seal.generator.SealParser",
"os.path.normpath",
"shutil.move",
"shutil.copyfile",
"seal.generator.createGenerator",
"sys.stderr.write",
"os.path.join",
"sys.exit"
] |
[((2465, 2487), 'sys.stderr.write', 'sys.stderr.write', (['line'], {}), '(line)\n', (2481, 2487), False, 'import os, sys, getopt, shutil\n'), ((2512, 2540), 'sys.stderr.write', 'sys.stderr.write', (['"""Usage:\n"""'], {}), "('Usage:\\n')\n", (2528, 2540), False, 'import os, sys, getopt, shutil\n'), ((2977, 3035), 'sys.stderr.write', 'sys.stderr.write', (['""" -V, --verbose Verbose mode\n"""'], {}), "(' -V, --verbose Verbose mode\\n')\n", (2993, 3035), False, 'import os, sys, getopt, shutil\n'), ((3040, 3108), 'sys.stderr.write', 'sys.stderr.write', (['""" -v, --version Print version and exit\n"""'], {}), "(' -v, --version Print version and exit\\n')\n", (3056, 3108), False, 'import os, sys, getopt, shutil\n'), ((3113, 3189), 'sys.stderr.write', 'sys.stderr.write', (['""" -c, --continue Continue on errors (test mode)\n"""'], {}), "(' -c, --continue Continue on errors (test mode)\\n')\n", (3129, 3189), False, 'import os, sys, getopt, shutil\n'), ((3194, 3255), 'sys.stderr.write', 'sys.stderr.write', (['""" -h, --help Print this help\n"""'], {}), "(' -h, --help Print this help\\n')\n", (3210, 3255), False, 'import os, sys, getopt, shutil\n'), ((5635, 5670), 'seal.generator.components.clearGlobals', 'generator.components.clearGlobals', ([], {}), '()\n', (5668, 5670), False, 'from seal import generator\n'), ((6096, 6154), 'seal.generator.SealParser', 'generator.SealParser', (['architecture', 'printLine', 'verboseMode'], {}), '(architecture, printLine, verboseMode)\n', (6116, 6154), False, 'from seal import generator\n'), ((6318, 6353), 'seal.generator.createGenerator', 'generator.createGenerator', (['targetOS'], {}), '(targetOS)\n', (6343, 6353), False, 'from seal import generator\n'), ((3481, 3612), 'getopt.getopt', 'getopt.getopt', (['sys.argv[1:]', '"""a:cho:p:t:Vv"""', "['arch=', 'continue', 'help', 'output=', 'path=', 'target=', 'verbose',\n 'version']"], {}), "(sys.argv[1:], 'a:cho:p:t:Vv', ['arch=', 'continue', 'help',\n 'output=', 'path=', 'target=', 'verbose', 'version'])\n", (3494, 3612), False, 'import os, sys, getopt, shutil\n'), ((5375, 5401), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (5391, 5401), False, 'import os, sys, getopt, shutil\n'), ((5423, 5467), 'os.path.join', 'os.path.join', (['selfDirname', 'pathToOS', '"""tools"""'], {}), "(selfDirname, pathToOS, 'tools')\n", (5435, 5467), False, 'import os, sys, getopt, shutil\n'), ((5489, 5555), 'os.path.join', 'os.path.join', (['selfDirname', 'pathToOS', '"""tools"""', '"""seal"""', '"""components"""'], {}), "(selfDirname, pathToOS, 'tools', 'seal', 'components')\n", (5501, 5555), False, 'import os, sys, getopt, shutil\n'), ((6584, 6615), 'os.path.dirname', 'os.path.dirname', (['outputFileName'], {}), '(outputFileName)\n', (6599, 6615), False, 'import os, sys, getopt, shutil\n'), ((6930, 6953), 'os.path.isabs', 'os.path.isabs', (['pathToOS'], {}), '(pathToOS)\n', (6943, 6953), False, 'import os, sys, getopt, shutil\n'), ((5765, 5776), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5774, 5776), False, 'import os, sys, getopt, shutil\n'), ((5778, 5808), 'os.path.dirname', 'os.path.dirname', (['inputFileName'], {}), '(inputFileName)\n', (5793, 5808), False, 'import os, sys, getopt, shutil\n'), ((6885, 6917), 'os.path.realpath', 'os.path.realpath', (['outputFileName'], {}), '(outputFileName)\n', (6901, 6917), False, 'import os, sys, getopt, shutil\n'), ((7136, 7208), 'os.path.normpath', 'os.path.normpath', (["(dirname + os.sep + '/..' * numDirs + os.sep + pathToOS)"], {}), "(dirname + os.sep + '/..' * numDirs + os.sep + pathToOS)\n", (7152, 7208), False, 'import os, sys, getopt, shutil\n'), ((7528, 7619), 'shutil.copyfile', 'shutil.copyfile', (["(outputDirName + '..' + os.sep + 'config')", "(outputDirName + 'config-tmp')"], {}), "(outputDirName + '..' + os.sep + 'config', outputDirName +\n 'config-tmp')\n", (7543, 7619), False, 'import os, sys, getopt, shutil\n'), ((8408, 8433), 'os.remove', 'os.remove', (['outputFileName'], {}), '(outputFileName)\n', (8417, 8433), False, 'import os, sys, getopt, shutil\n'), ((8446, 8483), 'os.remove', 'os.remove', (["(outputDirName + 'Makefile')"], {}), "(outputDirName + 'Makefile')\n", (8455, 8483), False, 'import os, sys, getopt, shutil\n'), ((8496, 8531), 'os.remove', 'os.remove', (["(outputDirName + 'config')"], {}), "(outputDirName + 'config')\n", (8505, 8531), False, 'import os, sys, getopt, shutil\n'), ((4097, 4133), 'os.path.join', 'os.path.join', (['"""../.."""', '"""doc/VERSION"""'], {}), "('../..', 'doc/VERSION')\n", (4109, 4133), False, 'import os, sys, getopt, shutil\n'), ((4616, 4627), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4624, 4627), False, 'import os, sys, getopt, shutil\n'), ((6702, 6731), 'os.path.exists', 'os.path.exists', (['outputDirName'], {}), '(outputDirName)\n', (6716, 6731), False, 'import os, sys, getopt, shutil\n'), ((6749, 6775), 'os.makedirs', 'os.makedirs', (['outputDirName'], {}), '(outputDirName)\n', (6760, 6775), False, 'import os, sys, getopt, shutil\n'), ((7998, 8077), 'os.system', 'os.system', (["('cmp -s ' + outputDirName + 'config-tmp ' + outputDirName + 'config')"], {}), "('cmp -s ' + outputDirName + 'config-tmp ' + outputDirName + 'config')\n", (8007, 8077), False, 'import os, sys, getopt, shutil\n'), ((8183, 8250), 'shutil.move', 'shutil.move', (["(outputDirName + 'config-tmp')", "(outputDirName + 'config')"], {}), "(outputDirName + 'config-tmp', outputDirName + 'config')\n", (8194, 8250), False, 'import os, sys, getopt, shutil\n'), ((8634, 8667), 'os.path.join', 'os.path.join', (['outputDirName', '"""bs"""'], {}), "(outputDirName, 'bs')\n", (8646, 8667), False, 'import os, sys, getopt, shutil\n'), ((8723, 8757), 'os.path.join', 'os.path.join', (['outputDirName', '"""fwd"""'], {}), "(outputDirName, 'fwd')\n", (8735, 8757), False, 'import os, sys, getopt, shutil\n'), ((8813, 8848), 'os.path.join', 'os.path.join', (['outputDirName', '"""coll"""'], {}), "(outputDirName, 'coll')\n", (8825, 8848), False, 'import os, sys, getopt, shutil\n'), ((9086, 9134), 'os.path.join', 'os.path.join', (['selfDirname', '"""raw2csv-template.py"""'], {}), "(selfDirname, 'raw2csv-template.py')\n", (9098, 9134), False, 'import os, sys, getopt, shutil\n'), ((7678, 7717), 'os.remove', 'os.remove', (["(outputDirName + 'config-tmp')"], {}), "(outputDirName + 'config-tmp')\n", (7687, 7717), False, 'import os, sys, getopt, shutil\n'), ((8947, 8980), 'os.path.join', 'os.path.join', (['outputDirName', '"""bs"""'], {}), "(outputDirName, 'bs')\n", (8959, 8980), False, 'import os, sys, getopt, shutil\n'), ((6799, 6831), 'os.path.normpath', 'os.path.normpath', (['outputFileName'], {}), '(outputFileName)\n', (6815, 6831), False, 'import os, sys, getopt, shutil\n')]
|
from typing import List, Dict
from . import _target_configurator_base
from .. import configurator_enums
import template
import optimize
import launch
from os import path
import yaml
import warnings
import time
import re
class SpiloPostgresConfigurator(_target_configurator_base.TargetConfigurator):
label = "spilo"
identifier_type = launch.IdentifierType.application
pod_fetch_dict = {
"spilo": launch.IdentifierType.application
}
def __init__(self, client: 'DBClient'):
super().__init__(client)
# FIXME: Fragile path for refactoring
self.config_root = path.abspath(path.join(path.dirname(__file__), "../../../config", "target_spilo_postgres"))
def deploy(self, config: Dict[str, object], kube_context: launch.KubeContext):
config.update({
"namespace_name": kube_context.namespace_name
})
for k, v in config["param_config"].items():
try:
identifier, param = k.split(":")
if identifier == "postgres":
config["postgres_config"][param] = v
else:
warnings.warn("Unrecognized {0} parameter: {1}".format(identifier, param))
except Exception:
warnings.warn("Unrecognized parameter: {}".format(k))
continue
kubeconfig_dir = template.get_tempdir_with_config(path.join(self.config_root, "kubernetes"), config)
self.open_temp_dirs.append(kubeconfig_dir) # TODO: This is only needed for the next line, clean up later?
with open(path.join(kubeconfig_dir.name, "minimal-manifest.yaml"), "r+") as manifest_config:
minimal_manifest_yaml = yaml.load(manifest_config, Loader=yaml.SafeLoader)
postgresql_spec = minimal_manifest_yaml["spec"]["postgresql"]
if "parameters" not in postgresql_spec:
# convert to string since the postgresql crd spec only accepts string type
postgresql_spec["parameters"] = {k: str(v) for k, v in config["postgres_config"].items()}
else:
postgresql_spec["parameters"].update({k: str(v) for k, v in config["postgres_config"].items()})
manifest_config.seek(0)
manifest_config.truncate(0)
manifest_config.write(yaml.dump(minimal_manifest_yaml))
# Waiting not necessary for CRD
kube_context.apply_kubectl_yaml_config(path.join(kubeconfig_dir.name, "zalando", "manifests", "postgresql.crd.yaml"), wait_for_ready=False)
time.sleep(1)
kube_context.apply_kubectl_yaml_config(kubeconfig_dir.name, wait_for_ready=False)
kube_context.apply_kubectl_yaml_config(path.join(kubeconfig_dir.name, "cluster-level-rbac-patch.yaml"), namespaced=False)
# Need to wait manually because zalando postgres operator uses a CustomResourceDefinition that is not easily parseable to get StatefulSets
kube_context._sts_wait("acid-minimal-cluster", config["postgres_replicas"])
def prepare(self, config: Dict[str, object], kube_context: launch.KubeContext, pod_ids: Dict[str, List[str]]):
if self.client is configurator_enums.DBClient.ycsb:
assert(len(pod_ids["spilo"]) > 0)
kube_context.copy_to_pod(pod_ids["spilo"][0], path.join(self.config_root, "client_ycsb", "init_table.sql"), "/init_table.sql")
while re.search("error", kube_context.run_command(pod_ids["spilo"][0], ["psql", "-U", "postgres"])):
time.sleep(1)
kube_context.run_command(pod_ids["spilo"][0], ["psql", "-U", "postgres", "-f", "/init_table.sql"])
if not re.search("now connected", kube_context.run_command(pod_ids["spilo"][0], ["psql", "-U", "postgres", "-c", r"\c test"])):
raise Exception("Table did not properly initialize. Logs:\n{}".format(kube_context.kubectl_subprocess(["logs", pod_ids["spilo"][0]])))
return
warnings.warn("Unable to prepare, no client match.")
def execute(self, config: Dict[str, object], kube_context: launch.KubeContext, pod_ids: Dict[str, List[str]]):
if self.client is configurator_enums.DBClient.ycsb:
# Not necessary to do anything once tables are configured for ycsb
return
warnings.warn("Unable to execute, no client match.")
|
[
"yaml.load",
"os.path.dirname",
"yaml.dump",
"time.sleep",
"warnings.warn",
"os.path.join"
] |
[((2555, 2568), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2565, 2568), False, 'import time\n'), ((3953, 4005), 'warnings.warn', 'warnings.warn', (['"""Unable to prepare, no client match."""'], {}), "('Unable to prepare, no client match.')\n", (3966, 4005), False, 'import warnings\n'), ((4288, 4340), 'warnings.warn', 'warnings.warn', (['"""Unable to execute, no client match."""'], {}), "('Unable to execute, no client match.')\n", (4301, 4340), False, 'import warnings\n'), ((1409, 1450), 'os.path.join', 'path.join', (['self.config_root', '"""kubernetes"""'], {}), "(self.config_root, 'kubernetes')\n", (1418, 1450), False, 'from os import path\n'), ((1711, 1761), 'yaml.load', 'yaml.load', (['manifest_config'], {'Loader': 'yaml.SafeLoader'}), '(manifest_config, Loader=yaml.SafeLoader)\n', (1720, 1761), False, 'import yaml\n'), ((2446, 2523), 'os.path.join', 'path.join', (['kubeconfig_dir.name', '"""zalando"""', '"""manifests"""', '"""postgresql.crd.yaml"""'], {}), "(kubeconfig_dir.name, 'zalando', 'manifests', 'postgresql.crd.yaml')\n", (2455, 2523), False, 'from os import path\n'), ((2706, 2769), 'os.path.join', 'path.join', (['kubeconfig_dir.name', '"""cluster-level-rbac-patch.yaml"""'], {}), "(kubeconfig_dir.name, 'cluster-level-rbac-patch.yaml')\n", (2715, 2769), False, 'from os import path\n'), ((643, 665), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (655, 665), False, 'from os import path\n'), ((1592, 1647), 'os.path.join', 'path.join', (['kubeconfig_dir.name', '"""minimal-manifest.yaml"""'], {}), "(kubeconfig_dir.name, 'minimal-manifest.yaml')\n", (1601, 1647), False, 'from os import path\n'), ((2325, 2357), 'yaml.dump', 'yaml.dump', (['minimal_manifest_yaml'], {}), '(minimal_manifest_yaml)\n', (2334, 2357), False, 'import yaml\n'), ((3300, 3360), 'os.path.join', 'path.join', (['self.config_root', '"""client_ycsb"""', '"""init_table.sql"""'], {}), "(self.config_root, 'client_ycsb', 'init_table.sql')\n", (3309, 3360), False, 'from os import path\n'), ((3510, 3523), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3520, 3523), False, 'import time\n')]
|
import pytest
from PTMCMCSampler.nompi4py import MPIDummy
class TestMPIDummp(object):
"""Test the MPIDummpy class
"""
def setup(self):
"""Setup the MPIDummy object
"""
self.mpidummy = MPIDummy()
def test_Get_rank(self):
"""Test the `Get_rank` method
"""
assert self.mpidummy.Get_rank() == 0
def test_Get_size(self):
"""Test the `Get_size` function
"""
assert self.mpidummy.Get_size() == 1
|
[
"PTMCMCSampler.nompi4py.MPIDummy"
] |
[((222, 232), 'PTMCMCSampler.nompi4py.MPIDummy', 'MPIDummy', ([], {}), '()\n', (230, 232), False, 'from PTMCMCSampler.nompi4py import MPIDummy\n')]
|
import discord
import main
from discord.ext import commands
from cogs.help import Help
class Clear(commands.Cog):
def __init__(self, bot):
"""Returns embeds for the clear command."""
self.bot = bot
@commands.group(invoke_without_command=True, case_insensitive=True, aliases=['cl', 'pg', 'purge'])
@commands.check(main.mod_group)
async def clear(self, ctx, num=None, num2=None):
if num is None:
return await Help.clear(self, ctx)
try:
user_men = str(ctx.message.raw_mentions[0])
except IndexError:
user_men = ''
if ctx.guild is None:
await main.error_embed(ctx, 'You cannot use this command in DMs')
else:
if (user_men == '') and (len(num) != 18) and (num.isdigit()): # make sure a number is given and its not an ID
int_num = int(num)
if int_num > 0:
await ctx.channel.purge(limit=int_num)
channel = await self.bot.fetch_channel(main.ids(3))
await main.log_embed(None, 'Bulk messages deleted', f'{ctx.author.mention} cleared {int_num} messages in {ctx.channel.mention}', channel, ctx.author)
print(f'{ctx.author.id} cleared {int_num} messages in {ctx.channel}')
else:
await main.error_embed(ctx, 'You need to give a positive non zero number')
else:
int_num2 = int(num2)
if int_num2 > 0:
limit = 0
if user_men != '':
clear_member = self.bot.get_user(int(user_men)) # get the user if they mentioned
elif (num.isdigit()) and (len(num) == 18):
clear_member = self.bot.get_user(int(num)) # get the user if they gave an ID
else:
clear_member = ctx.guild.get_member_named(num) # get the member if they gave a name with/without discrimitor
if clear_member is None:
await main.error_embed(ctx, 'The user you gave is either invalid or the name you gave is not a member')
else:
async for message in ctx.channel.history(limit=None):
limit += 1
if message.author == clear_member:
int_num2 -= 1
if int_num2 == 0:
break
def member_check(m):
return m.author == clear_member
await ctx.channel.purge(limit=limit, check=member_check)
try:
await ctx.message.delete() # delete the command
except discord.NotFound: # ignore error if it was already deleted
pass
channel = await self.bot.fetch_channel(main.ids(3))
await main.log_embed(ctx, 'Bulk messages deleted', f'{ctx.author.mention} cleared {num2} messages in {ctx.channel.mention} from {clear_member.mention}', channel, clear_member)
print(f'{ctx.author.id} cleared {num2} messages in {ctx.channel.id} from {clear_member.id}')
else:
await main.error_embed(ctx, 'You need to give a positive non zero number')
def setup(bot):
bot.add_cog(Clear(bot))
|
[
"discord.ext.commands.check",
"main.ids",
"main.log_embed",
"discord.ext.commands.group",
"main.error_embed",
"cogs.help.Help.clear"
] |
[((226, 328), 'discord.ext.commands.group', 'commands.group', ([], {'invoke_without_command': '(True)', 'case_insensitive': '(True)', 'aliases': "['cl', 'pg', 'purge']"}), "(invoke_without_command=True, case_insensitive=True, aliases=\n ['cl', 'pg', 'purge'])\n", (240, 328), False, 'from discord.ext import commands\n'), ((329, 359), 'discord.ext.commands.check', 'commands.check', (['main.mod_group'], {}), '(main.mod_group)\n', (343, 359), False, 'from discord.ext import commands\n'), ((462, 483), 'cogs.help.Help.clear', 'Help.clear', (['self', 'ctx'], {}), '(self, ctx)\n', (472, 483), False, 'from cogs.help import Help\n'), ((654, 713), 'main.error_embed', 'main.error_embed', (['ctx', '"""You cannot use this command in DMs"""'], {}), "(ctx, 'You cannot use this command in DMs')\n", (670, 713), False, 'import main\n'), ((1075, 1227), 'main.log_embed', 'main.log_embed', (['None', '"""Bulk messages deleted"""', 'f"""{ctx.author.mention} cleared {int_num} messages in {ctx.channel.mention}"""', 'channel', 'ctx.author'], {}), "(None, 'Bulk messages deleted',\n f'{ctx.author.mention} cleared {int_num} messages in {ctx.channel.mention}'\n , channel, ctx.author)\n", (1089, 1227), False, 'import main\n'), ((1357, 1425), 'main.error_embed', 'main.error_embed', (['ctx', '"""You need to give a positive non zero number"""'], {}), "(ctx, 'You need to give a positive non zero number')\n", (1373, 1425), False, 'import main\n'), ((3382, 3450), 'main.error_embed', 'main.error_embed', (['ctx', '"""You need to give a positive non zero number"""'], {}), "(ctx, 'You need to give a positive non zero number')\n", (3398, 3450), False, 'import main\n'), ((1036, 1047), 'main.ids', 'main.ids', (['(3)'], {}), '(3)\n', (1044, 1047), False, 'import main\n'), ((2089, 2190), 'main.error_embed', 'main.error_embed', (['ctx', '"""The user you gave is either invalid or the name you gave is not a member"""'], {}), "(ctx,\n 'The user you gave is either invalid or the name you gave is not a member')\n", (2105, 2190), False, 'import main\n'), ((3047, 3225), 'main.log_embed', 'main.log_embed', (['ctx', '"""Bulk messages deleted"""', 'f"""{ctx.author.mention} cleared {num2} messages in {ctx.channel.mention} from {clear_member.mention}"""', 'channel', 'clear_member'], {}), "(ctx, 'Bulk messages deleted',\n f'{ctx.author.mention} cleared {num2} messages in {ctx.channel.mention} from {clear_member.mention}'\n , channel, clear_member)\n", (3061, 3225), False, 'import main\n'), ((3004, 3015), 'main.ids', 'main.ids', (['(3)'], {}), '(3)\n', (3012, 3015), False, 'import main\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 15 14:52:34 2019
@author: a.mohammadi
"""
import pyodbc
from collections import OrderedDict
#%%
def GetConnection(server, database):
return pyodbc.connect( ''.join(
[r'DRIVER={ODBC Driver 13 for SQL Server};',
r'Trusted_Connection=yes;',
r'SERVER=%s;' %server,
r'DATABASE=%s;' %database,]) )
def SQLExec(query, server, dataBase, commit):
with GetConnection(server,dataBase) as connection:
with connection.cursor() as cursor:
cursor.execute(query)
try:
rows = cursor.fetchall()
except:
rows = []
description = cursor.description #[col[0] for col in cursor.description]
if commit:
connection.commit()
lst = []
for row in rows:
d = OrderedDict()
for k, v in zip( description, row ):
d[k[0]] = v
lst.append( d )
return lst
# return [{k[0]: v for k, v in zip( description, row )} for row in rows]
#%%
if __name__ == "__main__":
server, dataBase, commit = 'AMESYD03','SafeEc', 0
query = """SELECT TOP 10 * FROM [dbo].[Company]"""
lst_of_dicts = SQLExec(query, server, dataBase, commit)
from pandas import DataFrame
df = DataFrame( lst_of_dicts )
|
[
"pandas.DataFrame",
"collections.OrderedDict"
] |
[((1368, 1391), 'pandas.DataFrame', 'DataFrame', (['lst_of_dicts'], {}), '(lst_of_dicts)\n', (1377, 1391), False, 'from pandas import DataFrame\n'), ((896, 909), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (907, 909), False, 'from collections import OrderedDict\n')]
|
import torch
import torch.nn.functional as F
import gym
import gym.spaces
import numpy as np
def autocrop_observations(observations, cell_size, output_size=None):
shape = observations.size()[3:]
if output_size is None:
new_shape = tuple(map(lambda x: (x // cell_size) * cell_size, shape))
else:
new_shape = tuple(map(lambda x: x * cell_size, output_size))
margin3_top = (shape[0] - new_shape[0]) // 2
margin3_bottom = -(shape[0] - new_shape[0] - margin3_top)
margin4_top = (shape[1] - new_shape[1]) // 2
margin4_bottom = -(shape[1] - new_shape[1] - margin4_top)
if margin3_bottom == 0:
margin3_bottom = None
if margin4_bottom == 0:
margin4_bottom = None
return observations[:, :, :, margin3_top:margin3_bottom, margin4_top:margin4_bottom]
def pixel_control_reward(observations, cell_size=4, output_size=None):
'''
Args:
observations: A tensor of shape `[B,T+1,C,H,W]`, where
* `T` is the sequence length, `B` is the batch size.
* `H` is height, `W` is width.
* `C...` is at least one channel dimension (e.g., colour, stack).
* `T` and `B` can be statically unknown.
cell_size: The size of each cell.
Returns:
shape (B, T, 1, H / cell_size, W / cell_size)
'''
with torch.no_grad():
observations = autocrop_observations(observations, cell_size, output_size=output_size)
abs_observation_diff = observations[:, 1:] - observations[:, :-1]
abs_observation_diff.abs_()
obs_shape = abs_observation_diff.size()
abs_diff = abs_observation_diff.view(-1, *obs_shape[2:])
avg_abs_diff = F.avg_pool2d(abs_diff, cell_size, stride=cell_size)
avg_abs_diff = avg_abs_diff.mean(1, keepdim=True)
return avg_abs_diff.view(*obs_shape[:2] + avg_abs_diff.size()[1:])
def pixel_control_loss(observations, actions, action_values, gamma=0.9, cell_size=4):
action_value_shape = action_values.size()
batch_shape = actions.size()[:2]
with torch.no_grad():
T = observations.size()[1] - 1
pseudo_rewards = pixel_control_reward(observations, cell_size, output_size=action_values.size()[-2:])
last_rewards = action_values[:, -1].max(1, keepdim=True)[0]
for i in reversed(range(T)):
previous_rewards = last_rewards if i + 1 == T else pseudo_rewards[:, i + 1]
pseudo_rewards[:, i].add_(gamma, previous_rewards)
q_actions = actions.view(*batch_shape + (1, 1, 1)).repeat(1, 1, 1, action_value_shape[3], action_value_shape[4])
q_actions = torch.gather(action_values[:, :-1], 2, q_actions)
loss = F.mse_loss(pseudo_rewards, q_actions)
return loss
def reward_prediction_loss(predictions, rewards):
with torch.no_grad():
target = torch.zeros(predictions.size(), dtype=torch.float32, device=predictions.device)
target[:, 0] = rewards == 0
target[:, 1] = rewards > 0
target[:, 2] = rewards < 0
return F.binary_cross_entropy_with_logits(predictions, target)
def discounted_commulative_reward(rewards, base_value, gamma):
cummulative_reward = rewards.clone()
max_t = cummulative_reward.size()[1]
for i in reversed(range(max_t)):
next_values = base_value if i + 1 == max_t else cummulative_reward[:, i + 1]
cummulative_reward[:, i].add_(gamma, next_values)
return cummulative_reward
def value_loss(values, rewards, gamma):
base_value = values[:, -1]
with torch.no_grad():
cummulative_reward = discounted_commulative_reward(rewards, base_value, gamma)
return F.mse_loss(values[:, :-1], cummulative_reward)
class UnrealEnvBaseWrapper(gym.Wrapper):
def __init__(self, env):
super().__init__(env)
self.last_action_reward = None
self.observation_space = gym.spaces.Tuple((
env.observation_space,
gym.spaces.Box(0.0, 1.0, (env.action_space.n + 1,), dtype=np.float32)
))
def reset(self):
self.last_action_reward = np.zeros(self.action_space.n + 1, dtype=np.float32)
return self.observation(self.env.reset())
def step(self, action):
observation, reward, done, stats = self.env.step(action)
self.last_action_reward = np.zeros(self.action_space.n + 1, dtype=np.float32)
self.last_action_reward[action] = 1.0
self.last_action_reward[-1] = np.clip(reward, -1, 1)
return self.observation(observation), reward, done, stats
def observation(self, observation):
return (observation, self.last_action_reward)
|
[
"torch.gather",
"torch.nn.functional.avg_pool2d",
"torch.nn.functional.mse_loss",
"numpy.zeros",
"torch.nn.functional.binary_cross_entropy_with_logits",
"numpy.clip",
"gym.spaces.Box",
"torch.no_grad"
] |
[((2582, 2631), 'torch.gather', 'torch.gather', (['action_values[:, :-1]', '(2)', 'q_actions'], {}), '(action_values[:, :-1], 2, q_actions)\n', (2594, 2631), False, 'import torch\n'), ((2644, 2681), 'torch.nn.functional.mse_loss', 'F.mse_loss', (['pseudo_rewards', 'q_actions'], {}), '(pseudo_rewards, q_actions)\n', (2654, 2681), True, 'import torch.nn.functional as F\n'), ((2991, 3046), 'torch.nn.functional.binary_cross_entropy_with_logits', 'F.binary_cross_entropy_with_logits', (['predictions', 'target'], {}), '(predictions, target)\n', (3025, 3046), True, 'import torch.nn.functional as F\n'), ((3602, 3648), 'torch.nn.functional.mse_loss', 'F.mse_loss', (['values[:, :-1]', 'cummulative_reward'], {}), '(values[:, :-1], cummulative_reward)\n', (3612, 3648), True, 'import torch.nn.functional as F\n'), ((1302, 1317), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1315, 1317), False, 'import torch\n'), ((1661, 1712), 'torch.nn.functional.avg_pool2d', 'F.avg_pool2d', (['abs_diff', 'cell_size'], {'stride': 'cell_size'}), '(abs_diff, cell_size, stride=cell_size)\n', (1673, 1712), True, 'import torch.nn.functional as F\n'), ((2026, 2041), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2039, 2041), False, 'import torch\n'), ((2759, 2774), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2772, 2774), False, 'import torch\n'), ((3487, 3502), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3500, 3502), False, 'import torch\n'), ((4026, 4077), 'numpy.zeros', 'np.zeros', (['(self.action_space.n + 1)'], {'dtype': 'np.float32'}), '(self.action_space.n + 1, dtype=np.float32)\n', (4034, 4077), True, 'import numpy as np\n'), ((4256, 4307), 'numpy.zeros', 'np.zeros', (['(self.action_space.n + 1)'], {'dtype': 'np.float32'}), '(self.action_space.n + 1, dtype=np.float32)\n', (4264, 4307), True, 'import numpy as np\n'), ((4392, 4414), 'numpy.clip', 'np.clip', (['reward', '(-1)', '(1)'], {}), '(reward, -1, 1)\n', (4399, 4414), True, 'import numpy as np\n'), ((3889, 3958), 'gym.spaces.Box', 'gym.spaces.Box', (['(0.0)', '(1.0)', '(env.action_space.n + 1,)'], {'dtype': 'np.float32'}), '(0.0, 1.0, (env.action_space.n + 1,), dtype=np.float32)\n', (3903, 3958), False, 'import gym\n')]
|
import json
import pytest
import responses
from cellengine.utils.generate_id import generate_id
from cellengine.resources.fcs_file import FcsFile
EXP_ID = "5d38a6f79fae87499999a74b"
FCSFILE_ID = "5d64abe2ca9df61349ed8e7c"
@responses.activate
def test_should_get_fcs_file(ENDPOINT_BASE, client, fcs_files):
file_id = fcs_files[0]["_id"]
responses.add(
responses.GET,
ENDPOINT_BASE + f"/experiments/{EXP_ID}/fcsfiles/{file_id}",
json=fcs_files[0],
)
file = FcsFile.get(EXP_ID, file_id)
assert type(file) is FcsFile
@responses.activate
def test_should_create_fcs_file(ENDPOINT_BASE, client, fcs_files):
"""Test upload of a new fcs_file.
This test must be run from the project root directory"""
responses.add(
responses.POST,
ENDPOINT_BASE + f"/experiments/{EXP_ID}/fcsfiles",
json=fcs_files[1],
)
FcsFile.create(EXP_ID, [fcs_files[0]["_id"]], "new file")
assert json.loads(responses.calls[0].request.body) == {
"fcsFiles": ["5d64abe2ca9df61349ed8e79"],
"filename": "new file",
}
params = [
(FCSFILE_ID, [FCSFILE_ID]),
([FCSFILE_ID], [FCSFILE_ID]),
(
["fcs_file_id_1", "fcs_file_id_2", "fcs_file_id_3"],
["fcs_file_id_1", "fcs_file_id_2", "fcs_file_id_3"],
),
({EXP_ID: FCSFILE_ID}, [{EXP_ID: FCSFILE_ID}]),
([{EXP_ID: FCSFILE_ID}], [{EXP_ID: FCSFILE_ID}]),
]
@pytest.mark.parametrize("fcs_file_args,expected_response", params)
@responses.activate
def test_should_create_fcs_file_and_correctly_parse_fcs_file_args(
ENDPOINT_BASE, client, fcs_files, fcs_file_args, expected_response
):
"""Test upload of a new fcs_file.
This test must be run from the project root directory"""
responses.add(
responses.POST,
ENDPOINT_BASE + f"/experiments/{EXP_ID}/fcsfiles",
json=fcs_files[1],
)
FcsFile.create(EXP_ID, fcs_file_args, "new file")
assert json.loads(responses.calls[0].request.body) == {
"fcsFiles": expected_response,
"filename": "new file",
}
@responses.activate
def test_should_create_fcs_file_and_correctly_parse_body_args(
ENDPOINT_BASE, client, fcs_files
):
"""Test upload of a new fcs_file.
This test must be run from the project root directory"""
responses.add(
responses.POST,
ENDPOINT_BASE + f"/experiments/{EXP_ID}/fcsfiles",
json=fcs_files[1],
)
FcsFile.create(
EXP_ID,
FCSFILE_ID,
"new name",
add_file_number=True,
add_event_number=True,
pre_subsample_n=1,
pre_subsample_p=1,
)
assert json.loads(responses.calls[0].request.body) == {
"fcsFiles": [FCSFILE_ID],
"filename": "new name",
"addFileNumber": True,
"addEventNumber": True,
"preSubsampleN": 1,
"preSubsampleP": 1
# leave out "seed" to test param not specified
}
@responses.activate
def test_should_delete_fcs_file(ENDPOINT_BASE, client, fcs_files):
fcs_file = FcsFile.from_dict(fcs_files[0])
responses.add(
responses.DELETE,
ENDPOINT_BASE + f"/experiments/{EXP_ID}/fcsfiles/{fcs_file._id}",
)
deleted = fcs_file.delete()
assert deleted is None
@responses.activate
def test_fcs_file_and_fcs_file_id_defined(
ENDPOINT_BASE, experiment, experiments, gates
):
responses.add(
responses.POST,
ENDPOINT_BASE + f"/experiments/{EXP_ID}/gates",
status=201,
json=gates[0],
)
with pytest.raises(
ValueError, match="Please specify only 'fcs_file' or 'fcs_file_id'."
):
experiment.create_rectangle_gate(
"FSC-A",
"FSC-W",
"fcs_rect_gate",
x1=1,
x2=2,
y1=3,
y2=4,
fcs_file="Specimen_001_A1_A01.fcs",
fcs_file_id="5d38a7159fae87499999a74e",
tailored_per_file=True,
)
@responses.activate
def test_tailored_per_file_true(ENDPOINT_BASE, experiment, rectangle_gate):
responses.add(
responses.POST,
ENDPOINT_BASE + f"/experiments/{EXP_ID}/gates",
status=201,
json=rectangle_gate,
)
experiment.create_rectangle_gate(
"FSC-A",
"FSC-W",
"fcs_rect_gate",
x1=1,
x2=2,
y1=3,
y2=4,
tailored_per_file=True,
)
assert json.loads(responses.calls[0].request.body)["tailoredPerFile"] is True
@responses.activate
def test_fcs_file_id_is_None_and_fcs_file_is_None(
ENDPOINT_BASE, experiment, rectangle_gate
):
responses.add(
responses.POST,
ENDPOINT_BASE + f"/experiments/{EXP_ID}/gates",
status=201,
json=rectangle_gate,
)
experiment.create_rectangle_gate(
"FSC-A", "FSC-W", "fcs_rect_gate", x1=1, x2=2, y1=3, y2=4
)
assert json.loads(responses.calls[0].request.body)["fcsFileId"] is None
@responses.activate
def test_create_global_tailored_gate(ENDPOINT_BASE, experiment, rectangle_gate):
global_gid = generate_id()
responses.add(
responses.POST,
ENDPOINT_BASE + f"/experiments/{EXP_ID}/gates",
status=201,
json=rectangle_gate,
)
experiment.create_rectangle_gate(
x_channel="FSC-A",
y_channel="FSC-W",
name="fcs_rect_gate",
x1=1,
x2=2,
y1=3,
y2=4,
tailored_per_file=True,
gid=global_gid,
)
assert json.loads(responses.calls[0].request.body)["tailoredPerFile"] is True
assert json.loads(responses.calls[0].request.body)["gid"] == global_gid
@responses.activate
def test_specify_fcs_file_id(ENDPOINT_BASE, experiment, rectangle_gate):
responses.add(
responses.POST,
ENDPOINT_BASE + f"/experiments/{EXP_ID}/gates",
status=201,
json=rectangle_gate,
)
experiment.create_rectangle_gate(
x_channel="FSC-A",
y_channel="FSC-W",
name="fcs_rect_gate",
x1=1,
x2=2,
y1=3,
y2=4,
fcs_file_id="5d38a7159fae87499999a751",
tailored_per_file=True,
)
assert (
json.loads(responses.calls[0].request.body)["fcsFileId"]
== "5d38a7159fae87499999a751"
)
@responses.activate
def test_fcs_file_called_by_name(ENDPOINT_BASE, experiment, fcs_files, rectangle_gate):
responses.add(
responses.GET,
ENDPOINT_BASE + f"/experiments/{EXP_ID}/fcsfiles",
json=[fcs_files[3]],
)
responses.add(
responses.GET,
ENDPOINT_BASE + f"/experiments/{EXP_ID}/fcsfiles/5d64abe2ca9df61349ed8e7c",
json=fcs_files[3],
)
responses.add(
responses.POST,
ENDPOINT_BASE + f"/experiments/{EXP_ID}/gates",
status=201,
json=rectangle_gate,
)
experiment.create_rectangle_gate(
"FSC-A",
"FSC-W",
"fcs_rect_gate",
x1=1,
x2=2,
y1=3,
y2=4,
fcs_file="Specimen_001_A1_A01.fcs",
tailored_per_file=True,
)
assert json.loads(responses.calls[2].request.body)["tailoredPerFile"] is True
assert (
json.loads(responses.calls[2].request.body)["fcsFileId"]
== "5d64abe2ca9df61349ed8e7c"
)
|
[
"cellengine.resources.fcs_file.FcsFile.get",
"cellengine.resources.fcs_file.FcsFile.from_dict",
"json.loads",
"responses.add",
"cellengine.resources.fcs_file.FcsFile.create",
"cellengine.utils.generate_id.generate_id",
"pytest.raises",
"pytest.mark.parametrize"
] |
[((1419, 1485), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""fcs_file_args,expected_response"""', 'params'], {}), "('fcs_file_args,expected_response', params)\n", (1442, 1485), False, 'import pytest\n'), ((348, 460), 'responses.add', 'responses.add', (['responses.GET', "(ENDPOINT_BASE + f'/experiments/{EXP_ID}/fcsfiles/{file_id}')"], {'json': 'fcs_files[0]'}), "(responses.GET, ENDPOINT_BASE +\n f'/experiments/{EXP_ID}/fcsfiles/{file_id}', json=fcs_files[0])\n", (361, 460), False, 'import responses\n'), ((499, 527), 'cellengine.resources.fcs_file.FcsFile.get', 'FcsFile.get', (['EXP_ID', 'file_id'], {}), '(EXP_ID, file_id)\n', (510, 527), False, 'from cellengine.resources.fcs_file import FcsFile\n'), ((753, 856), 'responses.add', 'responses.add', (['responses.POST', "(ENDPOINT_BASE + f'/experiments/{EXP_ID}/fcsfiles')"], {'json': 'fcs_files[1]'}), "(responses.POST, ENDPOINT_BASE +\n f'/experiments/{EXP_ID}/fcsfiles', json=fcs_files[1])\n", (766, 856), False, 'import responses\n'), ((888, 945), 'cellengine.resources.fcs_file.FcsFile.create', 'FcsFile.create', (['EXP_ID', "[fcs_files[0]['_id']]", '"""new file"""'], {}), "(EXP_ID, [fcs_files[0]['_id']], 'new file')\n", (902, 945), False, 'from cellengine.resources.fcs_file import FcsFile\n'), ((1750, 1853), 'responses.add', 'responses.add', (['responses.POST', "(ENDPOINT_BASE + f'/experiments/{EXP_ID}/fcsfiles')"], {'json': 'fcs_files[1]'}), "(responses.POST, ENDPOINT_BASE +\n f'/experiments/{EXP_ID}/fcsfiles', json=fcs_files[1])\n", (1763, 1853), False, 'import responses\n'), ((1885, 1934), 'cellengine.resources.fcs_file.FcsFile.create', 'FcsFile.create', (['EXP_ID', 'fcs_file_args', '"""new file"""'], {}), "(EXP_ID, fcs_file_args, 'new file')\n", (1899, 1934), False, 'from cellengine.resources.fcs_file import FcsFile\n'), ((2300, 2403), 'responses.add', 'responses.add', (['responses.POST', "(ENDPOINT_BASE + f'/experiments/{EXP_ID}/fcsfiles')"], {'json': 'fcs_files[1]'}), "(responses.POST, ENDPOINT_BASE +\n f'/experiments/{EXP_ID}/fcsfiles', json=fcs_files[1])\n", (2313, 2403), False, 'import responses\n'), ((2435, 2568), 'cellengine.resources.fcs_file.FcsFile.create', 'FcsFile.create', (['EXP_ID', 'FCSFILE_ID', '"""new name"""'], {'add_file_number': '(True)', 'add_event_number': '(True)', 'pre_subsample_n': '(1)', 'pre_subsample_p': '(1)'}), "(EXP_ID, FCSFILE_ID, 'new name', add_file_number=True,\n add_event_number=True, pre_subsample_n=1, pre_subsample_p=1)\n", (2449, 2568), False, 'from cellengine.resources.fcs_file import FcsFile\n'), ((3037, 3068), 'cellengine.resources.fcs_file.FcsFile.from_dict', 'FcsFile.from_dict', (['fcs_files[0]'], {}), '(fcs_files[0])\n', (3054, 3068), False, 'from cellengine.resources.fcs_file import FcsFile\n'), ((3073, 3174), 'responses.add', 'responses.add', (['responses.DELETE', "(ENDPOINT_BASE + f'/experiments/{EXP_ID}/fcsfiles/{fcs_file._id}')"], {}), "(responses.DELETE, ENDPOINT_BASE +\n f'/experiments/{EXP_ID}/fcsfiles/{fcs_file._id}')\n", (3086, 3174), False, 'import responses\n'), ((3375, 3483), 'responses.add', 'responses.add', (['responses.POST', "(ENDPOINT_BASE + f'/experiments/{EXP_ID}/gates')"], {'status': '(201)', 'json': 'gates[0]'}), "(responses.POST, ENDPOINT_BASE +\n f'/experiments/{EXP_ID}/gates', status=201, json=gates[0])\n", (3388, 3483), False, 'import responses\n'), ((4060, 4174), 'responses.add', 'responses.add', (['responses.POST', "(ENDPOINT_BASE + f'/experiments/{EXP_ID}/gates')"], {'status': '(201)', 'json': 'rectangle_gate'}), "(responses.POST, ENDPOINT_BASE +\n f'/experiments/{EXP_ID}/gates', status=201, json=rectangle_gate)\n", (4073, 4174), False, 'import responses\n'), ((4609, 4723), 'responses.add', 'responses.add', (['responses.POST', "(ENDPOINT_BASE + f'/experiments/{EXP_ID}/gates')"], {'status': '(201)', 'json': 'rectangle_gate'}), "(responses.POST, ENDPOINT_BASE +\n f'/experiments/{EXP_ID}/gates', status=201, json=rectangle_gate)\n", (4622, 4723), False, 'import responses\n'), ((5065, 5078), 'cellengine.utils.generate_id.generate_id', 'generate_id', ([], {}), '()\n', (5076, 5078), False, 'from cellengine.utils.generate_id import generate_id\n'), ((5083, 5197), 'responses.add', 'responses.add', (['responses.POST', "(ENDPOINT_BASE + f'/experiments/{EXP_ID}/gates')"], {'status': '(201)', 'json': 'rectangle_gate'}), "(responses.POST, ENDPOINT_BASE +\n f'/experiments/{EXP_ID}/gates', status=201, json=rectangle_gate)\n", (5096, 5197), False, 'import responses\n'), ((5730, 5844), 'responses.add', 'responses.add', (['responses.POST', "(ENDPOINT_BASE + f'/experiments/{EXP_ID}/gates')"], {'status': '(201)', 'json': 'rectangle_gate'}), "(responses.POST, ENDPOINT_BASE +\n f'/experiments/{EXP_ID}/gates', status=201, json=rectangle_gate)\n", (5743, 5844), False, 'import responses\n'), ((6380, 6484), 'responses.add', 'responses.add', (['responses.GET', "(ENDPOINT_BASE + f'/experiments/{EXP_ID}/fcsfiles')"], {'json': '[fcs_files[3]]'}), "(responses.GET, ENDPOINT_BASE +\n f'/experiments/{EXP_ID}/fcsfiles', json=[fcs_files[3]])\n", (6393, 6484), False, 'import responses\n'), ((6516, 6648), 'responses.add', 'responses.add', (['responses.GET', "(ENDPOINT_BASE + f'/experiments/{EXP_ID}/fcsfiles/5d64abe2ca9df61349ed8e7c')"], {'json': 'fcs_files[3]'}), "(responses.GET, ENDPOINT_BASE +\n f'/experiments/{EXP_ID}/fcsfiles/5d64abe2ca9df61349ed8e7c', json=\n fcs_files[3])\n", (6529, 6648), False, 'import responses\n'), ((6675, 6789), 'responses.add', 'responses.add', (['responses.POST', "(ENDPOINT_BASE + f'/experiments/{EXP_ID}/gates')"], {'status': '(201)', 'json': 'rectangle_gate'}), "(responses.POST, ENDPOINT_BASE +\n f'/experiments/{EXP_ID}/gates', status=201, json=rectangle_gate)\n", (6688, 6789), False, 'import responses\n'), ((957, 1000), 'json.loads', 'json.loads', (['responses.calls[0].request.body'], {}), '(responses.calls[0].request.body)\n', (967, 1000), False, 'import json\n'), ((1946, 1989), 'json.loads', 'json.loads', (['responses.calls[0].request.body'], {}), '(responses.calls[0].request.body)\n', (1956, 1989), False, 'import json\n'), ((2639, 2682), 'json.loads', 'json.loads', (['responses.calls[0].request.body'], {}), '(responses.calls[0].request.body)\n', (2649, 2682), False, 'import json\n'), ((3528, 3616), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Please specify only \'fcs_file\' or \'fcs_file_id\'."""'}), '(ValueError, match=\n "Please specify only \'fcs_file\' or \'fcs_file_id\'.")\n', (3541, 3616), False, 'import pytest\n'), ((4412, 4455), 'json.loads', 'json.loads', (['responses.calls[0].request.body'], {}), '(responses.calls[0].request.body)\n', (4422, 4455), False, 'import json\n'), ((4880, 4923), 'json.loads', 'json.loads', (['responses.calls[0].request.body'], {}), '(responses.calls[0].request.body)\n', (4890, 4923), False, 'import json\n'), ((5484, 5527), 'json.loads', 'json.loads', (['responses.calls[0].request.body'], {}), '(responses.calls[0].request.body)\n', (5494, 5527), False, 'import json\n'), ((5566, 5609), 'json.loads', 'json.loads', (['responses.calls[0].request.body'], {}), '(responses.calls[0].request.body)\n', (5576, 5609), False, 'import json\n'), ((6165, 6208), 'json.loads', 'json.loads', (['responses.calls[0].request.body'], {}), '(responses.calls[0].request.body)\n', (6175, 6208), False, 'import json\n'), ((7071, 7114), 'json.loads', 'json.loads', (['responses.calls[2].request.body'], {}), '(responses.calls[2].request.body)\n', (7081, 7114), False, 'import json\n'), ((7163, 7206), 'json.loads', 'json.loads', (['responses.calls[2].request.body'], {}), '(responses.calls[2].request.body)\n', (7173, 7206), False, 'import json\n')]
|
import numpy as np
from scipy.spatial.transform import Rotation
import numpy as np
import pybullet as p
def todegree(w):
return w*180/np.pi
def torad(w):
return w*np.pi/180
def angle(v1, v2):
v1_u = unit_vector(v1)
v2_u = unit_vector(v2)
return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0))
def unit_vector(vector):
""" Returns the unit vector of the vector. """
return vector / np.linalg.norm(vector)
def add_one(index):
if index+1 == 3:
index_out = 0
else:
index_out = index+1
return index_out
def to_H(R, T=np.zeros(3)):
H = np.eye(4)
H[:-1,:-1] = R
H[:-1,-1] = T
return H
def closest_axis_2_userdefined(H, vec):
#print (H)
#print (np.linalg.inv(H[:-1,:-1]))
min_angle = 190
x_des = np.array(vec)
index = 0
sign = 0
reverse = False
for i in range(3):
x = H[:-1, i]
theta = todegree(angle(x, x_des))
#print (theta)
if theta > 90:
theta = theta - 180
if theta ==0:
reverse = True
if min_angle > np.abs(theta):
min_angle = np.abs(theta)
index = i
if theta == 0.:
if reverse:
sign = -1
else:
sign = 1
else:
sign = np.sign(theta)
return min_angle, index, sign
def R_2vect(vector_orig, vector_fin):
"""Calculate the rotation matrix required to rotate from one vector to another.
For the rotation of one vector to another, there are an infinit series of rotation matrices
possible. Due to axially symmetry, the rotation axis can be any vector lying in the symmetry
plane between the two vectors. Hence the axis-angle convention will be used to construct the
matrix with the rotation axis defined as the cross product of the two vectors. The rotation
angle is the arccosine of the dot product of the two unit vectors.
Given a unit vector parallel to the rotation axis, w = [x, y, z] and the rotation angle a,
the rotation matrix R is::
| 1 + (1-cos(a))*(x*x-1) -z*sin(a)+(1-cos(a))*x*y y*sin(a)+(1-cos(a))*x*z |
R = | z*sin(a)+(1-cos(a))*x*y 1 + (1-cos(a))*(y*y-1) -x*sin(a)+(1-cos(a))*y*z |
| -y*sin(a)+(1-cos(a))*x*z x*sin(a)+(1-cos(a))*y*z 1 + (1-cos(a))*(z*z-1) |
@param R: The 3x3 rotation matrix to update.
@type R: 3x3 numpy array
@param vector_orig: The unrotated vector defined in the reference frame.
@type vector_orig: numpy array, len 3
@param vector_fin: The rotated vector defined in the reference frame.
@type vector_fin: numpy array, len 3
"""
# Convert the vectors to unit vectors.
vector_orig = vector_orig / np.linalg.norm(vector_orig)
vector_fin = vector_fin / np.linalg.norm(vector_fin)
# The rotation axis (normalised).
axis = np.cross(vector_orig, vector_fin)
axis_len = np.linalg.norm(axis)
if axis_len != 0.0:
axis = axis / axis_len
# Alias the axis coordinates.
x = axis[0]
y = axis[1]
z = axis[2]
if x==0 and y==0 and z==0:
z=1
# The rotation angle.
angle = np.arccos(np.clip(np.dot(vector_orig, vector_fin),-1,1))
# Trig functions (only need to do this maths once!).
ca = np.cos(angle)
sa = np.sin(angle)
R = np.eye(4)
# Calculate the rotation matrix elements.
R[0, 0] = 1.0 + (1.0 - ca) * (x ** 2 - 1.0)
R[0, 1] = -z * sa + (1.0 - ca) * x * y
R[0, 2] = y * sa + (1.0 - ca) * x * z
R[1, 0] = z * sa + (1.0 - ca) * x * y
R[1, 1] = 1.0 + (1.0 - ca) * (y ** 2 - 1.0)
R[1, 2] = -x * sa + (1.0 - ca) * y * z
R[2, 0] = -y * sa + (1.0 - ca) * x * z
R[2, 1] = x * sa + (1.0 - ca) * y * z
R[2, 2] = 1.0 + (1.0 - ca) * (z ** 2 - 1.0)
return R, axis, angle
class RotationPrimitives():
def __init__(self, H0, Hg):
self.H0 = H0
self.Hg = Hg
def set_goal(self, Hg):
self.Hg = Hg
def set_current_pose(self,H0):
self.H0 = H0
def get_control_seq(self, ax=None):
## Control Sequence will provide rotation vector and desired rotation to achieve target ##
################## Goal to Viapoint 2 ###################################
theta, index, sign = self.closest_axis_2_normal(self.Hg)
des_vec = np.array([0, 0, sign * 1])
R, r_vec_via2gw, ang_via = self.R_2vect(self.Hg[:-1, index], des_vec)
H_via2 = np.matmul(R, self.Hg)
H_via2[:-1,-1] = self.Hg[:-1,-1]
H_via2[2, -1] = 0.
r_vec_via2g = np.matmul(H_via2[:-1,:-1].T, r_vec_via2gw)
c2g = [r_vec_via2g, -ang_via, r_vec_via2gw]
#########################################################################
############# From Floor to Viapoint 1 ####################
index_H0, sign_H0 = self.find_index_z(self.H0)
#theta_0 ,index_H0, sign_H0 = self.closest_axis_2_normal(self.H0)
# print (index_H0, sign_H0, index, sign)
# input ("WAIT")
rot_index, ang_floor = self.find_rot_z(index_H0, sign_H0, index, sign)
if rot_index is not None:
r_vec_floor = np.zeros(3)
r_vec_floor[rot_index] = 1
rotation_floor = Rotation.from_rotvec(ang_floor * r_vec_floor)
R_floor_1 = rotation_floor.as_matrix()
R_floor_1 = to_H(R=R_floor_1)
H_via1 = np.matmul(self.H0, R_floor_1)
#H_via1[1,-1] = 0.3
else:
r_vec_floor = np.zeros(3)
r_vec_floor[index] = 1
ang_floor = 0.
H_via1 = self.H0
r_vec_floor_w = np.matmul(self.H0[:-1,:-1], r_vec_floor)
c01 = [r_vec_floor, ang_floor, r_vec_floor_w]
####################################################
############ From Viapoint 1 to Viapoint 2 ################
if index == 0:
vec_1 = H_via1[:-1, 1]
vec_2 = H_via2[:-1, 1]
else:
vec_1 = H_via1[:-1, 0]
vec_2 = H_via2[:-1, 0]
R12, r_vec_via12_p, ang_via12 = self.R_2vect(vec_1, vec_2)
r_vec_via12w = np.zeros(3)
r_vec_via12w[2] = np.sign(r_vec_via12_p[2])
r_vec_via12 = np.matmul(H_via1[:-1,:-1].T, r_vec_via12w)
c12 = [r_vec_via12, ang_via12, r_vec_via12w]
###########################################################
##### COMPUTE SHORTCUT: ########
rot_12 = to_H(Rotation.from_rotvec(c12[0]*c12[1]).as_matrix())
rot2g = to_H(Rotation.from_rotvec(c2g[0]*c2g[1]).as_matrix())
rot1g = np.matmul(rot_12,rot2g)
if np.allclose(rot1g, np.eye(4)):
c1g = [np.array([0,0,1]), 0.]
else:
rot1g = Rotation.from_matrix(rot1g[:-1,:-1]).as_rotvec()
c1g = [rot1g / np.linalg.norm(rot1g,ord=2), np.linalg.norm(rot1g,ord=2)]
##### Compute rotation from start to Via-2 ##
R_via2 = H_via2[:-1,:-1]
R_init = self.H0[:-1,:-1]
R_to_2 = np.matmul(R_init.T, R_via2)
if np.allclose(R_to_2, np.eye(3)):
c_to_2 = [np.array([0, 0, 1]), 0.]
else:
rot_to_2 = Rotation.from_matrix(R_to_2).as_rotvec()
c_to_2 = [rot_to_2 / np.linalg.norm(rot_to_2, ord=2), np.linalg.norm(rot_to_2, ord=2)]
##### Compute rotation from start to Goal ###
R_g = self.Hg[:-1, :-1]
R_init = self.H0[:-1, :-1]
R_to_g = np.matmul(R_init.T, R_g)
if np.allclose(R_to_g, np.eye(3)):
c_to_g = [np.array([0, 0, 1]), 0.]
else:
rot_to_g = Rotation.from_matrix(R_to_g).as_rotvec()
c_to_g = [rot_to_g / np.linalg.norm(rot_to_g, ord=2), np.linalg.norm(rot_to_g, ord=2)]
command_seq = [c01, c12,c2g]
return command_seq, [c1g], [c_to_2, c_to_g]
def find_index_z(self, H):
big_Z = 0.0
index = 0
for i in range(3):
z = H[2, i]
# print(z)
if np.abs(z) > big_Z:
big_Z = np.abs(z)
sign = np.sign(z)
index = i
return index, sign
def find_rot_z(self, index_H0, sign_H0, index, sign):
if index == index_H0:
if sign == sign_H0:
return None, None
else:
angle = np.pi
if index == 0:
rot_over = 1
else:
rot_over = 0
return rot_over, angle
else:
rot_over = 0
while (rot_over == index or rot_over == index_H0):
rot_over += 1
if sign == sign_H0:
angle = -np.pi / 2
if add_one(rot_over) != index_H0:
angle = -angle
else:
angle = np.pi / 2
if add_one(rot_over) != index_H0:
angle = -angle
return rot_over, angle
def closest_axis_2_normal(self, H):
# print (H)
# print (np.linalg.inv(H[:-1,:-1]))
min_angle = 190
x_des = np.array([0, 0, 1])
index = 0
sign = 0
reverse = False
for i in range(3):
x = H[:-1, i]
theta = todegree(angle(x, x_des))
# print (theta)
if theta > 90:
theta = theta - 180
if theta ==0:
reverse = True
if min_angle > np.abs(theta):
min_angle = np.abs(theta)
index = i
if theta == 0.:
if reverse:
sign = -1
else:
sign = 1
else:
sign = np.sign(theta)
return min_angle, index, sign
def R_2vect(self, vector_orig, vector_fin):
"""Calculate the rotation matrix required to rotate from one vector to another.
For the rotation of one vector to another, there are an infinit series of rotation matrices
possible. Due to axially symmetry, the rotation axis can be any vector lying in the symmetry
plane between the two vectors. Hence the axis-angle convention will be used to construct the
matrix with the rotation axis defined as the cross product of the two vectors. The rotation
angle is the arccosine of the dot product of the two unit vectors.
Given a unit vector parallel to the rotation axis, w = [x, y, z] and the rotation angle a,
the rotation matrix R is::
| 1 + (1-cos(a))*(x*x-1) -z*sin(a)+(1-cos(a))*x*y y*sin(a)+(1-cos(a))*x*z |
R = | z*sin(a)+(1-cos(a))*x*y 1 + (1-cos(a))*(y*y-1) -x*sin(a)+(1-cos(a))*y*z |
| -y*sin(a)+(1-cos(a))*x*z x*sin(a)+(1-cos(a))*y*z 1 + (1-cos(a))*(z*z-1) |
@param R: The 3x3 rotation matrix to update.
@type R: 3x3 numpy array
@param vector_orig: The unrotated vector defined in the reference frame.
@type vector_orig: numpy array, len 3
@param vector_fin: The rotated vector defined in the reference frame.
@type vector_fin: numpy array, len 3
"""
# Convert the vectors to unit vectors.
vector_orig = vector_orig / np.linalg.norm(vector_orig)
vector_fin = vector_fin / np.linalg.norm(vector_fin)
# The rotation axis (normalised).
axis = np.cross(vector_orig, vector_fin)
axis_len = np.linalg.norm(axis)
if axis_len != 0.0:
axis = axis / axis_len
# Alias the axis coordinates.
x = axis[0]
y = axis[1]
z = axis[2]
if x==0 and y==0 and z==0:
z=1
# The rotation angle.
angle = np.arccos(np.clip(np.dot(vector_orig, vector_fin),-1,1))
# Trig functions (only need to do this maths once!).
ca = np.cos(angle)
sa = np.sin(angle)
R = np.eye(4)
# Calculate the rotation matrix elements.
R[0, 0] = 1.0 + (1.0 - ca) * (x ** 2 - 1.0)
R[0, 1] = -z * sa + (1.0 - ca) * x * y
R[0, 2] = y * sa + (1.0 - ca) * x * z
R[1, 0] = z * sa + (1.0 - ca) * x * y
R[1, 1] = 1.0 + (1.0 - ca) * (y ** 2 - 1.0)
R[1, 2] = -x * sa + (1.0 - ca) * y * z
R[2, 0] = -y * sa + (1.0 - ca) * x * z
R[2, 1] = x * sa + (1.0 - ca) * y * z
R[2, 2] = 1.0 + (1.0 - ca) * (z ** 2 - 1.0)
return R, axis, angle
def calculate_mutltiple_goals(init_information, obs):
goal_orients = []
# This first calcuation step allows to calculate a viapoint! I.e. it yields a goal orientation for some axis alignment
init_orient = np.zeros(3)
init_orient[:2] = np.asarray(init_information[:2])
init_orient = init_orient / np.linalg.norm(init_orient)
current_orient = np.asarray(p.getMatrixFromQuaternion(obs["object_orientation"])).reshape(3, 3)
theta, index, sign = closest_axis_2_userdefined(to_H(current_orient), init_orient)
des_vec = sign * np.array(init_orient)
Rot1, r_vec_via2gw, ang_via = R_2vect(current_orient[:, index], des_vec)
first_goal = np.matmul(Rot1[:-1, :-1], current_orient)
first_goal = Rotation.from_matrix(first_goal)
goal_orients.append([first_goal.as_quat()+[0.001,0.001,0.001,0.001],[0,0,0.0325]]) #addition of noise needed since otherwise problems code,...
# this second calculation applies the relative transformation which is desired based on the current observation!!!
# now take into account the desired rotation from the target information:
des_rotation = np.asarray(p.getMatrixFromQuaternion(init_information[10:14])).reshape(3, 3)
init_orient = np.asarray([1,0,0])
theta, index, sign = closest_axis_2_userdefined(
to_H(current_orient), init_orient)
des_vec = sign * np.array(init_orient)
Rot1, r_vec_via2gw, ang_via = R_2vect(current_orient[:, index], des_vec)
second_goal = np.matmul(Rot1[:-1, :-1], current_orient)
# now apply rotation:
second_goal = np.matmul(des_rotation, second_goal)
# now rotate back to orientation that we are now at:
second_goal = np.matmul(Rot1[:-1, :-1].T, second_goal)
second_goal = Rotation.from_matrix(second_goal)
goal_orients.append([second_goal.as_quat()+[0.001,0.001,0.001,0.001],[init_information[7],init_information[8],init_information[9]]]) #addition of noise needed since otherwise problems code,...
return goal_orients
|
[
"numpy.abs",
"numpy.dot",
"numpy.asarray",
"pybullet.getMatrixFromQuaternion",
"numpy.cross",
"numpy.zeros",
"numpy.sign",
"numpy.sin",
"numpy.array",
"numpy.linalg.norm",
"numpy.cos",
"numpy.matmul",
"scipy.spatial.transform.Rotation.from_matrix",
"numpy.eye",
"scipy.spatial.transform.Rotation.from_rotvec"
] |
[((583, 594), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (591, 594), True, 'import numpy as np\n'), ((605, 614), 'numpy.eye', 'np.eye', (['(4)'], {}), '(4)\n', (611, 614), True, 'import numpy as np\n'), ((792, 805), 'numpy.array', 'np.array', (['vec'], {}), '(vec)\n', (800, 805), True, 'import numpy as np\n'), ((2950, 2983), 'numpy.cross', 'np.cross', (['vector_orig', 'vector_fin'], {}), '(vector_orig, vector_fin)\n', (2958, 2983), True, 'import numpy as np\n'), ((2999, 3019), 'numpy.linalg.norm', 'np.linalg.norm', (['axis'], {}), '(axis)\n', (3013, 3019), True, 'import numpy as np\n'), ((3364, 3377), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (3370, 3377), True, 'import numpy as np\n'), ((3387, 3400), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (3393, 3400), True, 'import numpy as np\n'), ((3410, 3419), 'numpy.eye', 'np.eye', (['(4)'], {}), '(4)\n', (3416, 3419), True, 'import numpy as np\n'), ((12756, 12767), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (12764, 12767), True, 'import numpy as np\n'), ((12790, 12822), 'numpy.asarray', 'np.asarray', (['init_information[:2]'], {}), '(init_information[:2])\n', (12800, 12822), True, 'import numpy as np\n'), ((13207, 13248), 'numpy.matmul', 'np.matmul', (['Rot1[:-1, :-1]', 'current_orient'], {}), '(Rot1[:-1, :-1], current_orient)\n', (13216, 13248), True, 'import numpy as np\n'), ((13266, 13298), 'scipy.spatial.transform.Rotation.from_matrix', 'Rotation.from_matrix', (['first_goal'], {}), '(first_goal)\n', (13286, 13298), False, 'from scipy.spatial.transform import Rotation\n'), ((13758, 13779), 'numpy.asarray', 'np.asarray', (['[1, 0, 0]'], {}), '([1, 0, 0])\n', (13768, 13779), True, 'import numpy as np\n'), ((14012, 14053), 'numpy.matmul', 'np.matmul', (['Rot1[:-1, :-1]', 'current_orient'], {}), '(Rot1[:-1, :-1], current_orient)\n', (14021, 14053), True, 'import numpy as np\n'), ((14098, 14134), 'numpy.matmul', 'np.matmul', (['des_rotation', 'second_goal'], {}), '(des_rotation, second_goal)\n', (14107, 14134), True, 'import numpy as np\n'), ((14210, 14250), 'numpy.matmul', 'np.matmul', (['Rot1[:-1, :-1].T', 'second_goal'], {}), '(Rot1[:-1, :-1].T, second_goal)\n', (14219, 14250), True, 'import numpy as np\n'), ((14269, 14302), 'scipy.spatial.transform.Rotation.from_matrix', 'Rotation.from_matrix', (['second_goal'], {}), '(second_goal)\n', (14289, 14302), False, 'from scipy.spatial.transform import Rotation\n'), ((420, 442), 'numpy.linalg.norm', 'np.linalg.norm', (['vector'], {}), '(vector)\n', (434, 442), True, 'import numpy as np\n'), ((2815, 2842), 'numpy.linalg.norm', 'np.linalg.norm', (['vector_orig'], {}), '(vector_orig)\n', (2829, 2842), True, 'import numpy as np\n'), ((2873, 2899), 'numpy.linalg.norm', 'np.linalg.norm', (['vector_fin'], {}), '(vector_fin)\n', (2887, 2899), True, 'import numpy as np\n'), ((4410, 4436), 'numpy.array', 'np.array', (['[0, 0, sign * 1]'], {}), '([0, 0, sign * 1])\n', (4418, 4436), True, 'import numpy as np\n'), ((4532, 4553), 'numpy.matmul', 'np.matmul', (['R', 'self.Hg'], {}), '(R, self.Hg)\n', (4541, 4553), True, 'import numpy as np\n'), ((4644, 4687), 'numpy.matmul', 'np.matmul', (['H_via2[:-1, :-1].T', 'r_vec_via2gw'], {}), '(H_via2[:-1, :-1].T, r_vec_via2gw)\n', (4653, 4687), True, 'import numpy as np\n'), ((5702, 5743), 'numpy.matmul', 'np.matmul', (['self.H0[:-1, :-1]', 'r_vec_floor'], {}), '(self.H0[:-1, :-1], r_vec_floor)\n', (5711, 5743), True, 'import numpy as np\n'), ((6194, 6205), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (6202, 6205), True, 'import numpy as np\n'), ((6232, 6257), 'numpy.sign', 'np.sign', (['r_vec_via12_p[2]'], {}), '(r_vec_via12_p[2])\n', (6239, 6257), True, 'import numpy as np\n'), ((6280, 6323), 'numpy.matmul', 'np.matmul', (['H_via1[:-1, :-1].T', 'r_vec_via12w'], {}), '(H_via1[:-1, :-1].T, r_vec_via12w)\n', (6289, 6323), True, 'import numpy as np\n'), ((6643, 6667), 'numpy.matmul', 'np.matmul', (['rot_12', 'rot2g'], {}), '(rot_12, rot2g)\n', (6652, 6667), True, 'import numpy as np\n'), ((7058, 7085), 'numpy.matmul', 'np.matmul', (['R_init.T', 'R_via2'], {}), '(R_init.T, R_via2)\n', (7067, 7085), True, 'import numpy as np\n'), ((7492, 7516), 'numpy.matmul', 'np.matmul', (['R_init.T', 'R_g'], {}), '(R_init.T, R_g)\n', (7501, 7516), True, 'import numpy as np\n'), ((9135, 9154), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (9143, 9154), True, 'import numpy as np\n'), ((11491, 11524), 'numpy.cross', 'np.cross', (['vector_orig', 'vector_fin'], {}), '(vector_orig, vector_fin)\n', (11499, 11524), True, 'import numpy as np\n'), ((11544, 11564), 'numpy.linalg.norm', 'np.linalg.norm', (['axis'], {}), '(axis)\n', (11558, 11564), True, 'import numpy as np\n'), ((11957, 11970), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (11963, 11970), True, 'import numpy as np\n'), ((11984, 11997), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (11990, 11997), True, 'import numpy as np\n'), ((12011, 12020), 'numpy.eye', 'np.eye', (['(4)'], {}), '(4)\n', (12017, 12020), True, 'import numpy as np\n'), ((12855, 12882), 'numpy.linalg.norm', 'np.linalg.norm', (['init_orient'], {}), '(init_orient)\n', (12869, 12882), True, 'import numpy as np\n'), ((13091, 13112), 'numpy.array', 'np.array', (['init_orient'], {}), '(init_orient)\n', (13099, 13112), True, 'import numpy as np\n'), ((13895, 13916), 'numpy.array', 'np.array', (['init_orient'], {}), '(init_orient)\n', (13903, 13916), True, 'import numpy as np\n'), ((289, 307), 'numpy.dot', 'np.dot', (['v1_u', 'v2_u'], {}), '(v1_u, v2_u)\n', (295, 307), True, 'import numpy as np\n'), ((1098, 1111), 'numpy.abs', 'np.abs', (['theta'], {}), '(theta)\n', (1104, 1111), True, 'import numpy as np\n'), ((1137, 1150), 'numpy.abs', 'np.abs', (['theta'], {}), '(theta)\n', (1143, 1150), True, 'import numpy as np\n'), ((3258, 3289), 'numpy.dot', 'np.dot', (['vector_orig', 'vector_fin'], {}), '(vector_orig, vector_fin)\n', (3264, 3289), True, 'import numpy as np\n'), ((5233, 5244), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (5241, 5244), True, 'import numpy as np\n'), ((5313, 5358), 'scipy.spatial.transform.Rotation.from_rotvec', 'Rotation.from_rotvec', (['(ang_floor * r_vec_floor)'], {}), '(ang_floor * r_vec_floor)\n', (5333, 5358), False, 'from scipy.spatial.transform import Rotation\n'), ((5473, 5502), 'numpy.matmul', 'np.matmul', (['self.H0', 'R_floor_1'], {}), '(self.H0, R_floor_1)\n', (5482, 5502), True, 'import numpy as np\n'), ((5575, 5586), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (5583, 5586), True, 'import numpy as np\n'), ((6697, 6706), 'numpy.eye', 'np.eye', (['(4)'], {}), '(4)\n', (6703, 6706), True, 'import numpy as np\n'), ((7117, 7126), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (7123, 7126), True, 'import numpy as np\n'), ((7548, 7557), 'numpy.eye', 'np.eye', (['(3)'], {}), '(3)\n', (7554, 7557), True, 'import numpy as np\n'), ((11344, 11371), 'numpy.linalg.norm', 'np.linalg.norm', (['vector_orig'], {}), '(vector_orig)\n', (11358, 11371), True, 'import numpy as np\n'), ((11406, 11432), 'numpy.linalg.norm', 'np.linalg.norm', (['vector_fin'], {}), '(vector_fin)\n', (11420, 11432), True, 'import numpy as np\n'), ((1351, 1365), 'numpy.sign', 'np.sign', (['theta'], {}), '(theta)\n', (1358, 1365), True, 'import numpy as np\n'), ((6728, 6747), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (6736, 6747), True, 'import numpy as np\n'), ((6890, 6918), 'numpy.linalg.norm', 'np.linalg.norm', (['rot1g'], {'ord': '(2)'}), '(rot1g, ord=2)\n', (6904, 6918), True, 'import numpy as np\n'), ((7151, 7170), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (7159, 7170), True, 'import numpy as np\n'), ((7320, 7351), 'numpy.linalg.norm', 'np.linalg.norm', (['rot_to_2'], {'ord': '(2)'}), '(rot_to_2, ord=2)\n', (7334, 7351), True, 'import numpy as np\n'), ((7582, 7601), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (7590, 7601), True, 'import numpy as np\n'), ((7751, 7782), 'numpy.linalg.norm', 'np.linalg.norm', (['rot_to_g'], {'ord': '(2)'}), '(rot_to_g, ord=2)\n', (7765, 7782), True, 'import numpy as np\n'), ((8033, 8042), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (8039, 8042), True, 'import numpy as np\n'), ((8076, 8085), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (8082, 8085), True, 'import numpy as np\n'), ((8109, 8119), 'numpy.sign', 'np.sign', (['z'], {}), '(z)\n', (8116, 8119), True, 'import numpy as np\n'), ((9496, 9509), 'numpy.abs', 'np.abs', (['theta'], {}), '(theta)\n', (9502, 9509), True, 'import numpy as np\n'), ((9539, 9552), 'numpy.abs', 'np.abs', (['theta'], {}), '(theta)\n', (9545, 9552), True, 'import numpy as np\n'), ((11843, 11874), 'numpy.dot', 'np.dot', (['vector_orig', 'vector_fin'], {}), '(vector_orig, vector_fin)\n', (11849, 11874), True, 'import numpy as np\n'), ((12915, 12967), 'pybullet.getMatrixFromQuaternion', 'p.getMatrixFromQuaternion', (["obs['object_orientation']"], {}), "(obs['object_orientation'])\n", (12940, 12967), True, 'import pybullet as p\n'), ((13674, 13724), 'pybullet.getMatrixFromQuaternion', 'p.getMatrixFromQuaternion', (['init_information[10:14]'], {}), '(init_information[10:14])\n', (13699, 13724), True, 'import pybullet as p\n'), ((6508, 6545), 'scipy.spatial.transform.Rotation.from_rotvec', 'Rotation.from_rotvec', (['(c12[0] * c12[1])'], {}), '(c12[0] * c12[1])\n', (6528, 6545), False, 'from scipy.spatial.transform import Rotation\n'), ((6578, 6615), 'scipy.spatial.transform.Rotation.from_rotvec', 'Rotation.from_rotvec', (['(c2g[0] * c2g[1])'], {}), '(c2g[0] * c2g[1])\n', (6598, 6615), False, 'from scipy.spatial.transform import Rotation\n'), ((6785, 6822), 'scipy.spatial.transform.Rotation.from_matrix', 'Rotation.from_matrix', (['rot1g[:-1, :-1]'], {}), '(rot1g[:-1, :-1])\n', (6805, 6822), False, 'from scipy.spatial.transform import Rotation\n'), ((6861, 6889), 'numpy.linalg.norm', 'np.linalg.norm', (['rot1g'], {'ord': '(2)'}), '(rot1g, ord=2)\n', (6875, 6889), True, 'import numpy as np\n'), ((7213, 7241), 'scipy.spatial.transform.Rotation.from_matrix', 'Rotation.from_matrix', (['R_to_2'], {}), '(R_to_2)\n', (7233, 7241), False, 'from scipy.spatial.transform import Rotation\n'), ((7287, 7318), 'numpy.linalg.norm', 'np.linalg.norm', (['rot_to_2'], {'ord': '(2)'}), '(rot_to_2, ord=2)\n', (7301, 7318), True, 'import numpy as np\n'), ((7644, 7672), 'scipy.spatial.transform.Rotation.from_matrix', 'Rotation.from_matrix', (['R_to_g'], {}), '(R_to_g)\n', (7664, 7672), False, 'from scipy.spatial.transform import Rotation\n'), ((7718, 7749), 'numpy.linalg.norm', 'np.linalg.norm', (['rot_to_g'], {'ord': '(2)'}), '(rot_to_g, ord=2)\n', (7732, 7749), True, 'import numpy as np\n'), ((9785, 9799), 'numpy.sign', 'np.sign', (['theta'], {}), '(theta)\n', (9792, 9799), True, 'import numpy as np\n')]
|
import argparse
import hashlib
import json
import csv
import os
MAESTRO_INDEX_PATH = '../mirdata/indexes/maestro_index.json'
def md5(file_path):
"""Get md5 hash of a file.
Parameters
----------
file_path: str
File path.
Returns
-------
md5_hash: str
md5 hash of data in file_path
"""
hash_md5 = hashlib.md5()
with open(file_path, 'rb') as fhandle:
for chunk in iter(lambda: fhandle.read(4096), b''):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def make_maestro_index(data_path):
metadata_path = os.path.join(data_path, 'maestro-v2.0.0.json')
print(metadata_path)
maestro_index = {}
with open(metadata_path, 'r') as fhandle:
metadata = json.load(fhandle)
for i, row in enumerate(metadata):
print(i)
trackid = row['midi_filename'].split('.')[0]
maestro_index[trackid] = {}
midi_path = os.path.join(data_path, row['midi_filename'])
midi_checksum = md5(midi_path)
maestro_index[trackid]['midi'] = [row['midi_filename'], midi_checksum]
audio_path = os.path.join(data_path, row['audio_filename'])
audio_checksum = md5(audio_path)
maestro_index[trackid]['audio'] = [row['audio_filename'], audio_checksum]
with open(MAESTRO_INDEX_PATH, 'w') as fhandle:
json.dump(maestro_index, fhandle, indent=2)
def main(args):
print("creating index...")
make_maestro_index(args.maestro_data_path)
print("done!")
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(description='Make MAESTRO index file.')
PARSER.add_argument(
'maestro_data_path', type=str, help='Path to MAESTRO data folder.'
)
main(PARSER.parse_args())
|
[
"json.dump",
"hashlib.md5",
"json.load",
"argparse.ArgumentParser",
"os.path.join"
] |
[((353, 366), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (364, 366), False, 'import hashlib\n'), ((594, 640), 'os.path.join', 'os.path.join', (['data_path', '"""maestro-v2.0.0.json"""'], {}), "(data_path, 'maestro-v2.0.0.json')\n", (606, 640), False, 'import os\n'), ((1598, 1661), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Make MAESTRO index file."""'}), "(description='Make MAESTRO index file.')\n", (1621, 1661), False, 'import argparse\n'), ((755, 773), 'json.load', 'json.load', (['fhandle'], {}), '(fhandle)\n', (764, 773), False, 'import json\n'), ((1397, 1440), 'json.dump', 'json.dump', (['maestro_index', 'fhandle'], {'indent': '(2)'}), '(maestro_index, fhandle, indent=2)\n', (1406, 1440), False, 'import json\n'), ((961, 1006), 'os.path.join', 'os.path.join', (['data_path', "row['midi_filename']"], {}), "(data_path, row['midi_filename'])\n", (973, 1006), False, 'import os\n'), ((1159, 1205), 'os.path.join', 'os.path.join', (['data_path', "row['audio_filename']"], {}), "(data_path, row['audio_filename'])\n", (1171, 1205), False, 'import os\n')]
|
from subprocess import Popen, PIPE
from selenium import webdriver
from PIL import Image
import io
class WebsiteScreenshotGenerator():
def __init__(self):
self._screenshot = None
def capture(self, url, width, height, crop=True):
print ("Capturing website screenshot of: " + url)
driver = webdriver.PhantomJS()
if width and height:
driver.set_window_size(width, height)
# go and get the content at the url
driver.get(url)
# get the screenshot and make it into a Pillow Image
self._screenshot = Image.open(io.BytesIO(driver.get_screenshot_as_png()))
print("Got a screenshot with the following dimensions: {0}".format(self._screenshot.size))
if crop:
# crop the image
self._screenshot = self._screenshot.crop((0,0, width, height))
print("Cropped the image to: {0} {1}".format(width, height))
return self
@property
def image(self):
return self._screenshot
@property
def image_bytes(self):
bytesio = io.BytesIO()
self._screenshot.save(bytesio, "PNG")
bytesio.seek(0)
return bytesio.getvalue()
if __name__ == "__main__":
import const
g = WebsiteScreenshotGenerator()
#g.do_screen_capturing(const.ApodEclipsePage(), "/Users/michaelheydt/thumbnail.png", 500, 100)
g.do_screen_capturing("http://espn.go.com", 500, 100)
# need to explicitly crop
|
[
"selenium.webdriver.PhantomJS",
"io.BytesIO"
] |
[((321, 342), 'selenium.webdriver.PhantomJS', 'webdriver.PhantomJS', ([], {}), '()\n', (340, 342), False, 'from selenium import webdriver\n'), ((1079, 1091), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (1089, 1091), False, 'import io\n')]
|
import sst
import sst.actions
from sst import config
# PhantomJS can not do alerts by design
if config.browser_type == 'phantomjs':
sst.actions.skip()
sst.actions.set_base_url('http://localhost:%s/' % sst.DEVSERVER_PORT)
sst.actions.go_to('/alerts')
# Accept an alert box and assert its text.
sst.actions.click_button('show-alert', wait=False)
sst.actions.accept_alert(u'JavaScript alert text')
sst.actions.assert_title('Page with JavaScript alerts')
# Accept a confirm box.
sst.actions.click_button('show-confirm', wait=False)
sst.actions.accept_alert()
sst.actions.accept_alert(u'Confirm accepted')
# Dismiss a confirm box and assert its text.
sst.actions.click_button('show-confirm', wait=False)
sst.actions.dismiss_alert(u'JavaScript confirm text')
sst.actions.accept_alert(u'Confirm dismissed')
# Enter text to a prompt box, accept it and assert its text.
sst.actions.click_button('show-prompt', wait=False)
sst.actions.accept_alert(u'JavaScript prompt text', 'Entered text')
sst.actions.accept_alert('Entered text')
# Enter text to a prompt box and dismiss it.
sst.actions.click_button('show-prompt', wait=False)
sst.actions.dismiss_alert(text_to_write='Entered text')
sst.actions.assert_title('Page with JavaScript alerts')
|
[
"sst.actions.set_base_url",
"sst.actions.click_button",
"sst.actions.dismiss_alert",
"sst.actions.assert_title",
"sst.actions.go_to",
"sst.actions.accept_alert",
"sst.actions.skip"
] |
[((159, 228), 'sst.actions.set_base_url', 'sst.actions.set_base_url', (["('http://localhost:%s/' % sst.DEVSERVER_PORT)"], {}), "('http://localhost:%s/' % sst.DEVSERVER_PORT)\n", (183, 228), False, 'import sst\n'), ((229, 257), 'sst.actions.go_to', 'sst.actions.go_to', (['"""/alerts"""'], {}), "('/alerts')\n", (246, 257), False, 'import sst\n'), ((302, 352), 'sst.actions.click_button', 'sst.actions.click_button', (['"""show-alert"""'], {'wait': '(False)'}), "('show-alert', wait=False)\n", (326, 352), False, 'import sst\n'), ((353, 403), 'sst.actions.accept_alert', 'sst.actions.accept_alert', (['u"""JavaScript alert text"""'], {}), "(u'JavaScript alert text')\n", (377, 403), False, 'import sst\n'), ((404, 459), 'sst.actions.assert_title', 'sst.actions.assert_title', (['"""Page with JavaScript alerts"""'], {}), "('Page with JavaScript alerts')\n", (428, 459), False, 'import sst\n'), ((485, 537), 'sst.actions.click_button', 'sst.actions.click_button', (['"""show-confirm"""'], {'wait': '(False)'}), "('show-confirm', wait=False)\n", (509, 537), False, 'import sst\n'), ((538, 564), 'sst.actions.accept_alert', 'sst.actions.accept_alert', ([], {}), '()\n', (562, 564), False, 'import sst\n'), ((565, 610), 'sst.actions.accept_alert', 'sst.actions.accept_alert', (['u"""Confirm accepted"""'], {}), "(u'Confirm accepted')\n", (589, 610), False, 'import sst\n'), ((657, 709), 'sst.actions.click_button', 'sst.actions.click_button', (['"""show-confirm"""'], {'wait': '(False)'}), "('show-confirm', wait=False)\n", (681, 709), False, 'import sst\n'), ((710, 763), 'sst.actions.dismiss_alert', 'sst.actions.dismiss_alert', (['u"""JavaScript confirm text"""'], {}), "(u'JavaScript confirm text')\n", (735, 763), False, 'import sst\n'), ((764, 810), 'sst.actions.accept_alert', 'sst.actions.accept_alert', (['u"""Confirm dismissed"""'], {}), "(u'Confirm dismissed')\n", (788, 810), False, 'import sst\n'), ((873, 924), 'sst.actions.click_button', 'sst.actions.click_button', (['"""show-prompt"""'], {'wait': '(False)'}), "('show-prompt', wait=False)\n", (897, 924), False, 'import sst\n'), ((925, 992), 'sst.actions.accept_alert', 'sst.actions.accept_alert', (['u"""JavaScript prompt text"""', '"""Entered text"""'], {}), "(u'JavaScript prompt text', 'Entered text')\n", (949, 992), False, 'import sst\n'), ((993, 1033), 'sst.actions.accept_alert', 'sst.actions.accept_alert', (['"""Entered text"""'], {}), "('Entered text')\n", (1017, 1033), False, 'import sst\n'), ((1080, 1131), 'sst.actions.click_button', 'sst.actions.click_button', (['"""show-prompt"""'], {'wait': '(False)'}), "('show-prompt', wait=False)\n", (1104, 1131), False, 'import sst\n'), ((1132, 1187), 'sst.actions.dismiss_alert', 'sst.actions.dismiss_alert', ([], {'text_to_write': '"""Entered text"""'}), "(text_to_write='Entered text')\n", (1157, 1187), False, 'import sst\n'), ((1188, 1243), 'sst.actions.assert_title', 'sst.actions.assert_title', (['"""Page with JavaScript alerts"""'], {}), "('Page with JavaScript alerts')\n", (1212, 1243), False, 'import sst\n'), ((138, 156), 'sst.actions.skip', 'sst.actions.skip', ([], {}), '()\n', (154, 156), False, 'import sst\n')]
|
from collections.abc import Iterable
import numpy as np
import pandas as pd
import param
import xarray as xr
from matplotlib.colors import LinearSegmentedColormap, rgb2hex
from .configuration import DEFAULTS, EASES, INTERPS, PRECEDENCES, REVERTS
from .util import is_str
class Easing(param.Parameterized):
interp = param.ClassSelector(
default=None,
class_=Iterable,
doc=f"Interpolation method; {INTERPS}",
precedence=PRECEDENCES["interp"],
)
ease = param.ClassSelector(
default="in_out",
class_=Iterable,
doc=f"Type of easing; {EASES}",
precedence=PRECEDENCES["interp"],
)
frames = param.Integer(
default=None,
bounds=(1, None),
doc="Number of frames between each base state",
precedence=PRECEDENCES["interp"],
)
revert = param.ObjectSelector(
default=None,
objects=REVERTS,
doc="Method for reverting to the initial state; "
"boomerang finds the shortest path to the initial state, "
"traceback backtracks the original path to the initial state, and "
"rollback is like traceback, but disregards the "
"original's path durations",
precedence=PRECEDENCES["interp"],
)
num_states = param.Integer(doc="Number of states", **DEFAULTS["num_kwds"])
num_steps = param.Integer(
doc="Number of frames between each base state", **DEFAULTS["num_kwds"]
)
def __init__(self, **kwds):
super().__init__(**kwds)
def interpolate(self, da, name=""):
interp = self.interp or "cubic"
ease = self.ease
da_origin = da.copy()
is_xarray = isinstance(da, xr.DataArray)
is_bar = False
if is_xarray:
if "state" not in da.dims:
return da_origin
(
da,
name,
dims,
coords,
interp,
ease,
is_bar,
is_errorbar_morph,
) = self._prep_xarray(da)
array = self._prep_array(da)
num_items, num_states, num_steps, num_result = self._calc_shapes(array)
if (num_steps == 1 or num_states == 1) and self.revert is None:
return da_origin
steps = np.linspace(0, 1, num_steps)
interp_args = (steps, interp, ease, num_states, num_steps, num_items)
array_dtype = array.dtype
if name in ["duration", "remark", "xerr", "yerr"] and not is_errorbar_morph:
result = self._interp_first(
array, num_states, num_steps, num_items, num_result, name
)
elif interp == "fill" or name.endswith(
("zoom", "discrete_trail", "morph_trail", "tick_label", "bar_label")
):
result = self._interp_fill(array, num_states, num_steps, name)
elif np.issubdtype(array_dtype, np.datetime64):
result = self._interp_time(array, pd.to_datetime, *interp_args)
elif np.issubdtype(array_dtype, np.timedelta64):
result = self._interp_time(array, pd.to_timedelta, *interp_args)
elif np.issubdtype(array_dtype, np.number) and not is_bar:
if name == "central_longitude":
interp = "linear"
result = self._interp_numeric(array, *interp_args)
elif name in "c": # must be after number
result = self._interp_color(array, num_result)
elif is_bar:
result = self._interp_fill(array, num_states, num_steps, name)
else: # str
result = self._interp_text(array, num_states, num_steps, num_result)
if self.revert in ["traceback", "rollback"]:
result = self._apply_revert(result, name)
if is_xarray:
result = self._rebuild_da(result, da, dims, coords)
return result
def _prep_xarray(self, da):
name = da.name
interp = da.attrs.get("interp")
ease = da.attrs.get("ease")
for item_dim in da.dims:
if "item" in item_dim:
if "batch" in da.dims:
da = da.transpose(item_dim, "batch", "state", ...)
else:
da = da.transpose(item_dim, "state", ...)
break
dims = da.dims
if da.ndim > 2: # more than (item, state)
if "grid_item" in dims:
da = da.stack({"stacked": ["grid_item", "grid_y", "grid_x"]})
elif "batch" in dims:
da = da.stack({"stacked": [item_dim, "batch"]})
da = da.transpose("stacked", "state")
coords = da.drop_vars("state", errors="ignore").coords
is_bar = da.attrs.get("is_bar")
is_errorbar_morph = da.attrs.get("is_errorbar_morph")
return da, name, dims, coords, interp, ease, is_bar, is_errorbar_morph
def _prep_array(self, da):
array = np.array(da)
if array.ndim == 1:
array = array[np.newaxis, :]
if self.revert == "boomerang":
array = np.hstack([array, array[:, :1]])
return array
def _calc_shapes(self, array):
num_items, num_states = array.shape
if self.frames is None:
if num_states < 10:
num_steps = int(np.ceil(60 / num_states))
else:
num_steps = int(np.ceil(100 / num_states))
else:
num_steps = self.frames
with param.edit_constant(self):
self.num_steps = num_steps
num_result = (num_states - 1) * num_steps
return num_items, num_states, num_steps, num_result
def _apply_revert(self, result, name):
if result.ndim == 1:
result_back = result[::-1]
else:
result_back = result[:, ::-1]
if name == "duration" and self.revert == "rollback":
result_back = np.repeat(1 / 60, result_back.shape[-1])[np.newaxis, :]
result = np.hstack([result, result_back])
return result
def _rebuild_da(self, result, da, dims, coords):
if len(dims) == 1:
result = result.squeeze()
result = xr.DataArray(
result,
dims=da.dims,
coords=coords,
name=da.name,
attrs=da.attrs,
)
if "stacked" in result.dims:
result = result.unstack().transpose(*dims)
return result
def _interp_first(self, array, num_states, num_steps, num_items, num_result, name):
if is_str(array):
fill = ""
dtype = np.object
else:
fill = 0.0
dtype = None
result = np.full((num_items, num_result), fill, dtype=dtype)
indices = np.arange(num_states) * num_steps
indices[-1] -= 1
result[:, indices] = array # (1, num_states)
return result
def _interp_fill(self, array, num_states, num_steps, name):
indices = np.arange(num_states * num_steps - num_steps)
result = (
pd.DataFrame(
array,
columns=np.arange(0, num_states * num_steps, num_steps),
)
.T.reindex(indices)
.T
)
if not name.endswith("discrete_trail"):
result = result.ffill(axis=1).fillna("").values
result[:, -1] = array[:, -1]
else:
result = result.values
return result
def _interp_color(self, array, num_result):
results = []
for colors in array: # item, state
cmap = LinearSegmentedColormap.from_list("eased", colors, N=num_result)
results.append([rgb2hex(rgb) for rgb in cmap(np.arange(num_result))])
result = np.array(results)
return result
def _interp_text(self, array, num_states, num_steps, num_result):
result = np.repeat(array, num_steps, axis=-1)
num_roll = -int(np.ceil(num_steps / num_states * 2))
if num_states > 2:
result = np.roll(result, num_roll, axis=-1)
result = result[:, :num_result]
else:
half_way = int(num_result / 2)
result = result[:, half_way:-half_way]
if num_steps % 2 != 0:
result = result[:, :-1]
return result
def _interp_time(
self, array, conversion, steps, interp, ease, num_states, num_steps, num_items
):
array = array.astype(float)
result = self._interp_numeric(
array, steps, interp, ease, num_states, num_steps, num_items
)
result = conversion(result.ravel()).values
result = result.reshape(num_items, -1)
return result
def _interp_numeric(
self, array, steps, interp, ease, num_states, num_steps, num_items
):
init = np.repeat(array[:, :-1], num_steps, axis=-1)
init_nans = np.isnan(init)
init[init_nans] = 0 # temporarily fill the nans
stop = np.repeat(array[:, 1:], num_steps, axis=-1)
stop_nans = np.isnan(stop)
tiled_steps = np.tile(steps, (num_states - 1) * num_items).reshape(
num_items, -1
)
weights = getattr(self, f"_{interp.lower()}")(tiled_steps, ease)
result = stop * weights + init * (1 - weights)
result[init_nans | stop_nans] = np.nan # replace nans
return result
def _linear(self, ts, ease):
return ts
def _quadratic(self, ts, ease):
if ease == "in":
ts = ts * ts
elif ease == "out":
ts = -(ts * (ts - 2))
elif ease == "in_out":
index = ts < 0.5
ts[index] = 2 * ts[index] * ts[index]
ts[~index] = (-2 * ts[~index] * ts[~index]) + (4 * ts[~index]) - 1
return ts
def _cubic(self, ts, ease):
if ease == "in":
ts = ts * ts * ts
elif ease == "out":
ts = (ts - 1) * (ts - 1) * (ts - 1) + 1
elif ease == "in_out":
index = ts < 0.5
ts[index] = 4 * ts[index] * ts[index] * ts[index]
ts[~index] = 2 * ts[~index] - 2
ts[~index] = 0.5 * ts[~index] * ts[~index] * ts[~index] + 1
return ts
def _quartic(self, ts, ease):
if ease == "in":
ts = ts * ts * ts * ts
elif ease == "out":
ts = (ts - 1) * (ts - 1) * (ts - 1) * (1 - ts) + 1
elif ease == "in_out":
index = ts < 0.5
ts[index] = 8 * ts[index] * ts[index] * ts[index] * ts[index]
ts[~index] = ts[~index] - 1
ts[~index] = -8 * ts[~index] * ts[~index] * ts[~index] * ts[~index] + 1
return ts
def _quintic(self, ts, ease):
if ease == "in":
ts = ts * ts * ts * ts * ts
elif ease == "out":
ts = (ts - 1) * (ts - 1) * (ts - 1) * (ts - 1) * (ts - 1) + 1
elif ease == "in_out":
index = ts < 0.5
ts[index] = 16 * ts[index] * ts[index] * ts[index] * ts[index] * ts[index]
ts[~index] = (2 * ts[~index]) - 2
ts[~index] = (
0.5 * ts[~index] * ts[~index] * ts[~index] * ts[~index] * ts[~index] + 1
)
return ts
def _sine(self, ts, ease):
if ease == "in":
ts = np.sin((ts - 1) * np.pi / 2) + 1
elif ease == "out":
ts = np.sin(ts * np.pi / 2)
elif ease == "in_out":
ts = 0.5 * (1 - np.cos(ts * np.pi))
return ts
def _circular(self, ts, ease):
if ease == "in":
ts = 1 - np.sqrt(1 - (ts * ts))
elif ease == "out":
ts = np.sqrt((2 - ts) * ts)
elif ease == "in_out":
index = ts < 0.5
ts[index] = 0.5 * (1 - np.sqrt(1 - 4 * (ts[index] * ts[index])))
ts[~index] = 0.5 * (
np.sqrt(-((2 * ts[~index]) - 3) * ((2 * ts[~index]) - 1)) + 1
)
return ts
def _exponential(self, ts, ease):
if ease == "in":
index = ts != 0
ts[~index] = 0
ts[index] = np.power(2, 10 * (ts[index] - 1))
elif ease == "out":
index = ts != 1
ts[~index] = 1
ts[index] = 1 - np.power(2, -10 * ts[index])
elif ease == "in_out":
index0 = (ts != 0) & (ts < 0.5) & (ts != 1)
index1 = (ts != 0) & (ts >= 0.5) & (ts != 1)
ts[index0] = 0.5 * np.power(2, (20 * ts[index0]) - 10)
ts[index1] = -0.5 * np.power(2, (-20 * ts[index1]) + 10) + 1
return ts
def _elastic(self, ts, ease):
if ease == "in":
ts = np.sin(13 * np.pi / 2 * ts) * np.power(2, 10 * (ts - 1))
elif ease == "out":
ts = np.sin(-13 * np.pi / 2 * (ts + 1)) * np.power(2, -10 * ts) + 1
elif ease == "in_out":
index = ts < 0.5
ts[index] = (
0.5
* np.sin(13 * np.pi / 2 * (2 * ts[index]))
* np.power(2, 10 * ((2 * ts[index]) - 1))
)
ts[~index] = 0.5 * (
np.sin(-13 * np.pi / 2 * ((2 * ts[~index] - 1) + 1))
* np.power(2, -10 * (2 * ts[~index] - 1))
+ 2
)
return ts
def _back(self, ts, ease):
if ease == "in":
ts = ts * ts * ts - ts * np.sin(ts * np.pi)
elif ease == "out":
ts = 1 - ts
ts = 1 - (ts * ts * ts - ts * np.sin(ts * np.pi))
elif ease == "in_out":
index = ts < 0.5
ts[index] = 2 * ts[index]
ts[index] = 0.5 * (
ts[index] * ts[index] * ts[index]
- ts[index] * np.sin(ts[index] * np.pi)
)
ts[~index] = 1 - (2 * ts[~index] - 1)
ts[~index] = (
0.5
* (
1
- (
ts[~index] * ts[~index] * ts[~index]
- ts[~index] * np.sin(ts[~index] * np.pi)
)
)
+ 0.5
)
return ts
def _bounce(self, ts, ease):
index = ts < 0.5
if ease == "in":
ts = 1 - ts
elif ease == "in_out":
ts[index] = 1 - (ts[index] * 2)
ts[~index] = ts[~index] * 2 - 1
index0 = ts < 4 / 11
index1 = (ts < 8 / 11) & ~index0
index2 = (ts < 9 / 10) & ~index1 & ~index0
index3 = ts >= 9 / 10
ts[index0] = 121 * ts[index0] * ts[index0] / 16
ts[index1] = (
(363 / 40.0 * ts[index1] * ts[index1]) - (99 / 10.0 * ts[index1]) + 17 / 5.0
)
ts[index2] = (
(4356 / 361.0 * ts[index2] * ts[index2])
- (35442 / 1805.0 * ts[index2])
+ 16061 / 1805.0
)
ts[index3] = (
(54 / 5.0 * ts[index3] * ts[index3])
- (513 / 25.0 * ts[index3])
+ 268 / 25.0
)
if ease == "in":
ts = 1 - ts
elif ease == "out":
pass
elif ease == "in_out":
ts[index] = 0.5 * (1 - ts[index])
ts[~index] = 0.5 * ts[~index] + 0.5
return ts
|
[
"numpy.isnan",
"numpy.sin",
"numpy.arange",
"numpy.tile",
"matplotlib.colors.rgb2hex",
"param.Integer",
"numpy.full",
"matplotlib.colors.LinearSegmentedColormap.from_list",
"numpy.power",
"param.edit_constant",
"numpy.linspace",
"numpy.repeat",
"numpy.ceil",
"numpy.roll",
"numpy.hstack",
"numpy.cos",
"numpy.issubdtype",
"numpy.array",
"xarray.DataArray",
"param.ClassSelector",
"param.ObjectSelector",
"numpy.sqrt"
] |
[((324, 453), 'param.ClassSelector', 'param.ClassSelector', ([], {'default': 'None', 'class_': 'Iterable', 'doc': 'f"""Interpolation method; {INTERPS}"""', 'precedence': "PRECEDENCES['interp']"}), "(default=None, class_=Iterable, doc=\n f'Interpolation method; {INTERPS}', precedence=PRECEDENCES['interp'])\n", (343, 453), False, 'import param\n'), ((499, 624), 'param.ClassSelector', 'param.ClassSelector', ([], {'default': '"""in_out"""', 'class_': 'Iterable', 'doc': 'f"""Type of easing; {EASES}"""', 'precedence': "PRECEDENCES['interp']"}), "(default='in_out', class_=Iterable, doc=\n f'Type of easing; {EASES}', precedence=PRECEDENCES['interp'])\n", (518, 624), False, 'import param\n'), ((672, 809), 'param.Integer', 'param.Integer', ([], {'default': 'None', 'bounds': '(1, None)', 'doc': '"""Number of frames between each base state"""', 'precedence': "PRECEDENCES['interp']"}), "(default=None, bounds=(1, None), doc=\n 'Number of frames between each base state', precedence=PRECEDENCES[\n 'interp'])\n", (685, 809), False, 'import param\n'), ((852, 1191), 'param.ObjectSelector', 'param.ObjectSelector', ([], {'default': 'None', 'objects': 'REVERTS', 'doc': '"""Method for reverting to the initial state; boomerang finds the shortest path to the initial state, traceback backtracks the original path to the initial state, and rollback is like traceback, but disregards the original\'s path durations"""', 'precedence': "PRECEDENCES['interp']"}), '(default=None, objects=REVERTS, doc=\n "Method for reverting to the initial state; boomerang finds the shortest path to the initial state, traceback backtracks the original path to the initial state, and rollback is like traceback, but disregards the original\'s path durations"\n , precedence=PRECEDENCES[\'interp\'])\n', (872, 1191), False, 'import param\n'), ((1283, 1344), 'param.Integer', 'param.Integer', ([], {'doc': '"""Number of states"""'}), "(doc='Number of states', **DEFAULTS['num_kwds'])\n", (1296, 1344), False, 'import param\n'), ((1361, 1451), 'param.Integer', 'param.Integer', ([], {'doc': '"""Number of frames between each base state"""'}), "(doc='Number of frames between each base state', **DEFAULTS[\n 'num_kwds'])\n", (1374, 1451), False, 'import param\n'), ((2313, 2341), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'num_steps'], {}), '(0, 1, num_steps)\n', (2324, 2341), True, 'import numpy as np\n'), ((4928, 4940), 'numpy.array', 'np.array', (['da'], {}), '(da)\n', (4936, 4940), True, 'import numpy as np\n'), ((5975, 6007), 'numpy.hstack', 'np.hstack', (['[result, result_back]'], {}), '([result, result_back])\n', (5984, 6007), True, 'import numpy as np\n'), ((6166, 6245), 'xarray.DataArray', 'xr.DataArray', (['result'], {'dims': 'da.dims', 'coords': 'coords', 'name': 'da.name', 'attrs': 'da.attrs'}), '(result, dims=da.dims, coords=coords, name=da.name, attrs=da.attrs)\n', (6178, 6245), True, 'import xarray as xr\n'), ((6677, 6728), 'numpy.full', 'np.full', (['(num_items, num_result)', 'fill'], {'dtype': 'dtype'}), '((num_items, num_result), fill, dtype=dtype)\n', (6684, 6728), True, 'import numpy as np\n'), ((6965, 7010), 'numpy.arange', 'np.arange', (['(num_states * num_steps - num_steps)'], {}), '(num_states * num_steps - num_steps)\n', (6974, 7010), True, 'import numpy as np\n'), ((7740, 7757), 'numpy.array', 'np.array', (['results'], {}), '(results)\n', (7748, 7757), True, 'import numpy as np\n'), ((7868, 7904), 'numpy.repeat', 'np.repeat', (['array', 'num_steps'], {'axis': '(-1)'}), '(array, num_steps, axis=-1)\n', (7877, 7904), True, 'import numpy as np\n'), ((8816, 8860), 'numpy.repeat', 'np.repeat', (['array[:, :-1]', 'num_steps'], {'axis': '(-1)'}), '(array[:, :-1], num_steps, axis=-1)\n', (8825, 8860), True, 'import numpy as np\n'), ((8881, 8895), 'numpy.isnan', 'np.isnan', (['init'], {}), '(init)\n', (8889, 8895), True, 'import numpy as np\n'), ((8968, 9011), 'numpy.repeat', 'np.repeat', (['array[:, 1:]', 'num_steps'], {'axis': '(-1)'}), '(array[:, 1:], num_steps, axis=-1)\n', (8977, 9011), True, 'import numpy as np\n'), ((9032, 9046), 'numpy.isnan', 'np.isnan', (['stop'], {}), '(stop)\n', (9040, 9046), True, 'import numpy as np\n'), ((5071, 5103), 'numpy.hstack', 'np.hstack', (['[array, array[:, :1]]'], {}), '([array, array[:, :1]])\n', (5080, 5103), True, 'import numpy as np\n'), ((5470, 5495), 'param.edit_constant', 'param.edit_constant', (['self'], {}), '(self)\n', (5489, 5495), False, 'import param\n'), ((6747, 6768), 'numpy.arange', 'np.arange', (['num_states'], {}), '(num_states)\n', (6756, 6768), True, 'import numpy as np\n'), ((7576, 7640), 'matplotlib.colors.LinearSegmentedColormap.from_list', 'LinearSegmentedColormap.from_list', (['"""eased"""', 'colors'], {'N': 'num_result'}), "('eased', colors, N=num_result)\n", (7609, 7640), False, 'from matplotlib.colors import LinearSegmentedColormap, rgb2hex\n'), ((8014, 8048), 'numpy.roll', 'np.roll', (['result', 'num_roll'], {'axis': '(-1)'}), '(result, num_roll, axis=-1)\n', (8021, 8048), True, 'import numpy as np\n'), ((12077, 12110), 'numpy.power', 'np.power', (['(2)', '(10 * (ts[index] - 1))'], {}), '(2, 10 * (ts[index] - 1))\n', (12085, 12110), True, 'import numpy as np\n'), ((2896, 2937), 'numpy.issubdtype', 'np.issubdtype', (['array_dtype', 'np.datetime64'], {}), '(array_dtype, np.datetime64)\n', (2909, 2937), True, 'import numpy as np\n'), ((5902, 5942), 'numpy.repeat', 'np.repeat', (['(1 / 60)', 'result_back.shape[-1]'], {}), '(1 / 60, result_back.shape[-1])\n', (5911, 5942), True, 'import numpy as np\n'), ((7929, 7964), 'numpy.ceil', 'np.ceil', (['(num_steps / num_states * 2)'], {}), '(num_steps / num_states * 2)\n', (7936, 7964), True, 'import numpy as np\n'), ((9069, 9113), 'numpy.tile', 'np.tile', (['steps', '((num_states - 1) * num_items)'], {}), '(steps, (num_states - 1) * num_items)\n', (9076, 9113), True, 'import numpy as np\n'), ((11283, 11311), 'numpy.sin', 'np.sin', (['((ts - 1) * np.pi / 2)'], {}), '((ts - 1) * np.pi / 2)\n', (11289, 11311), True, 'import numpy as np\n'), ((11361, 11383), 'numpy.sin', 'np.sin', (['(ts * np.pi / 2)'], {}), '(ts * np.pi / 2)\n', (11367, 11383), True, 'import numpy as np\n'), ((11563, 11583), 'numpy.sqrt', 'np.sqrt', (['(1 - ts * ts)'], {}), '(1 - ts * ts)\n', (11570, 11583), True, 'import numpy as np\n'), ((11631, 11653), 'numpy.sqrt', 'np.sqrt', (['((2 - ts) * ts)'], {}), '((2 - ts) * ts)\n', (11638, 11653), True, 'import numpy as np\n'), ((12630, 12657), 'numpy.sin', 'np.sin', (['(13 * np.pi / 2 * ts)'], {}), '(13 * np.pi / 2 * ts)\n', (12636, 12657), True, 'import numpy as np\n'), ((12660, 12686), 'numpy.power', 'np.power', (['(2)', '(10 * (ts - 1))'], {}), '(2, 10 * (ts - 1))\n', (12668, 12686), True, 'import numpy as np\n'), ((3028, 3070), 'numpy.issubdtype', 'np.issubdtype', (['array_dtype', 'np.timedelta64'], {}), '(array_dtype, np.timedelta64)\n', (3041, 3070), True, 'import numpy as np\n'), ((5303, 5327), 'numpy.ceil', 'np.ceil', (['(60 / num_states)'], {}), '(60 / num_states)\n', (5310, 5327), True, 'import numpy as np\n'), ((5379, 5404), 'numpy.ceil', 'np.ceil', (['(100 / num_states)'], {}), '(100 / num_states)\n', (5386, 5404), True, 'import numpy as np\n'), ((7669, 7681), 'matplotlib.colors.rgb2hex', 'rgb2hex', (['rgb'], {}), '(rgb)\n', (7676, 7681), False, 'from matplotlib.colors import LinearSegmentedColormap, rgb2hex\n'), ((12222, 12250), 'numpy.power', 'np.power', (['(2)', '(-10 * ts[index])'], {}), '(2, -10 * ts[index])\n', (12230, 12250), True, 'import numpy as np\n'), ((13338, 13356), 'numpy.sin', 'np.sin', (['(ts * np.pi)'], {}), '(ts * np.pi)\n', (13344, 13356), True, 'import numpy as np\n'), ((12426, 12459), 'numpy.power', 'np.power', (['(2)', '(20 * ts[index0] - 10)'], {}), '(2, 20 * ts[index0] - 10)\n', (12434, 12459), True, 'import numpy as np\n'), ((12732, 12766), 'numpy.sin', 'np.sin', (['(-13 * np.pi / 2 * (ts + 1))'], {}), '(-13 * np.pi / 2 * (ts + 1))\n', (12738, 12766), True, 'import numpy as np\n'), ((12769, 12790), 'numpy.power', 'np.power', (['(2)', '(-10 * ts)'], {}), '(2, -10 * ts)\n', (12777, 12790), True, 'import numpy as np\n'), ((12978, 13015), 'numpy.power', 'np.power', (['(2)', '(10 * (2 * ts[index] - 1))'], {}), '(2, 10 * (2 * ts[index] - 1))\n', (12986, 13015), True, 'import numpy as np\n'), ((3162, 3199), 'numpy.issubdtype', 'np.issubdtype', (['array_dtype', 'np.number'], {}), '(array_dtype, np.number)\n', (3175, 3199), True, 'import numpy as np\n'), ((7698, 7719), 'numpy.arange', 'np.arange', (['num_result'], {}), '(num_result)\n', (7707, 7719), True, 'import numpy as np\n'), ((11443, 11461), 'numpy.cos', 'np.cos', (['(ts * np.pi)'], {}), '(ts * np.pi)\n', (11449, 11461), True, 'import numpy as np\n'), ((11749, 11789), 'numpy.sqrt', 'np.sqrt', (['(1 - 4 * (ts[index] * ts[index]))'], {}), '(1 - 4 * (ts[index] * ts[index]))\n', (11756, 11789), True, 'import numpy as np\n'), ((11840, 11893), 'numpy.sqrt', 'np.sqrt', (['(-(2 * ts[~index] - 3) * (2 * ts[~index] - 1))'], {}), '(-(2 * ts[~index] - 3) * (2 * ts[~index] - 1))\n', (11847, 11893), True, 'import numpy as np\n'), ((12494, 12528), 'numpy.power', 'np.power', (['(2)', '(-20 * ts[index1] + 10)'], {}), '(2, -20 * ts[index1] + 10)\n', (12502, 12528), True, 'import numpy as np\n'), ((12919, 12959), 'numpy.sin', 'np.sin', (['(13 * np.pi / 2 * (2 * ts[index]))'], {}), '(13 * np.pi / 2 * (2 * ts[index]))\n', (12925, 12959), True, 'import numpy as np\n'), ((13451, 13469), 'numpy.sin', 'np.sin', (['(ts * np.pi)'], {}), '(ts * np.pi)\n', (13457, 13469), True, 'import numpy as np\n'), ((7103, 7150), 'numpy.arange', 'np.arange', (['(0)', '(num_states * num_steps)', 'num_steps'], {}), '(0, num_states * num_steps, num_steps)\n', (7112, 7150), True, 'import numpy as np\n'), ((13081, 13131), 'numpy.sin', 'np.sin', (['(-13 * np.pi / 2 * (2 * ts[~index] - 1 + 1))'], {}), '(-13 * np.pi / 2 * (2 * ts[~index] - 1 + 1))\n', (13087, 13131), True, 'import numpy as np\n'), ((13152, 13191), 'numpy.power', 'np.power', (['(2)', '(-10 * (2 * ts[~index] - 1))'], {}), '(2, -10 * (2 * ts[~index] - 1))\n', (13160, 13191), True, 'import numpy as np\n'), ((13681, 13706), 'numpy.sin', 'np.sin', (['(ts[index] * np.pi)'], {}), '(ts[index] * np.pi)\n', (13687, 13706), True, 'import numpy as np\n'), ((13984, 14010), 'numpy.sin', 'np.sin', (['(ts[~index] * np.pi)'], {}), '(ts[~index] * np.pi)\n', (13990, 14010), True, 'import numpy as np\n')]
|
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.paginator import Paginator
from django.db.models import Count
from django.shortcuts import get_object_or_404, redirect, render
from django.urls import reverse
from django.utils import timezone
from requests import request
from .forms import CommentForm, GroupForm, PostForm
from .models import Comment, Follow, Group, Post, User
from django.views.generic import (ListView,
DetailView,
CreateView,
UpdateView,
DeleteView)
class IndexListView(ListView):
model = Post
template_name = 'index.html'
paginate_by = 5
context_object_name = 'post_list'
extra_context = {
'index_page': True
}
# def group_posts(request, slug):
# '''Страница с публикиями связанными с группой'''
# group = get_object_or_404(Group, slug=slug)
# post_list = Post.objects.filter(
# group=group).select_related(
# 'author', 'group').annotate(
# comment_count=Count(
# 'commented_post')).order_by("-pub_date").all()
# paginator = Paginator(post_list, 10)
# page_number = request.GET.get('page')
# page = paginator.get_page(page_number)
# return render(request, "group.html", {'group': group,
# 'post_list': page,
# 'paginator': paginator})
class GroupPostView(ListView):
model = Post
template_name = 'group.html'
paginate_by = 5
context_object_name = 'post_list'
@property
def extra_context(self):
return {
'group': get_object_or_404(Group, slug=self.kwargs['slug'])
}
def get_queryset(self, *args, **kwargs):
query_set = super().get_queryset()
return query_set.filter(
group__slug=self.kwargs['slug']).select_related(
'author', 'group')
# @login_required
# def new_post(request):
# '''Страница создания новой публикации'''
# if request.method == 'POST':
# form = PostForm(request.POST, files=request.FILES or None)
# if form.is_valid():
# post = form.save(commit=False)
# post.author = request.user
# post.save()
# messages.add_message(
# request, messages.SUCCESS, f'Новая запись добавлена', extra_tags='success'
# )
# return redirect('index')
# else:
# form = PostForm()
# return render(request, 'new_post.html', {'form': form})
# @login_required
class NewPostCreateView(LoginRequiredMixin, CreateView):
model = Post
form_class = PostForm
template_name = 'new_post.html'
extra_context = {
'title': 'Создать новою запись'
}
def form_valid(self, form):
post = form.save(commit=False)
post.author = self.request.user
post.save()
messages.add_message(
self.request,
messages.SUCCESS,
f'Новая запись добавлена'
)
return super().form_valid(form)
def get_success_url(self):
return reverse('index')
# def post_view(request, post_id, username):
# '''Страница отдельной публикации'''
# user_profile = get_object_or_404(
# User.objects.filter(username=username).annotate(
# follower_count=Count('follower', distinct=True),
# following_count=Count('following', distinct=True),
# post_count=Count('post_author', distinct=True)))
# post = get_object_or_404(
# Post.objects.annotate(
# comment_count=Count(
# 'commented_post')).select_related('author', 'group'),
# pk=post_id)
# post_comment = Comment.objects.filter(
# post=post_id).select_related('author').order_by("-created").all()
# form = CommentForm()
# following = False
# if request.user.is_authenticated:
# if Follow.objects.filter(author=user_profile,
# user=request.user).exists():
# following = True
# return render(request, 'post_view.html', {'post': post,
# 'profile': user_profile,
# 'comments': post_comment,
# 'form': form,
# 'following': following})
class PostView(ListView):
model = Comment
template_name = 'post_view.html'
context_object_name = 'comments'
@property
def extra_context(self):
user_profile = get_object_or_404(
User.objects.filter(username=self.kwargs['username']).annotate(
follower_count=Count('follower', distinct=True),
following_count=Count('following', distinct=True),
post_count=Count('post_author', distinct=True)))
post = get_object_or_404(
Post.objects.annotate(
comment_count=Count(
'commented_post')).select_related('author', 'group'),
pk=self.kwargs['post_id'])
following = False
if self.request.user.is_authenticated:
if Follow.objects.filter(author=user_profile,
user=self.request.user).exists():
following = True
return {'post': post,
'profile': user_profile,
'form': CommentForm(),
'following': following}
def get_queryset(self):
query_set = super().get_queryset()
return query_set.filter(
post=self.kwargs['post_id']).select_related('author')
# def profile(request, username):
# '''Страница с публикациями пользователя'''
# user_profile = get_object_or_404(
# User.objects.filter(
# username=username).annotate(
# follower_count=Count('follower', distinct=True),
# following_count=Count('following', distinct=True)))
# post_list = Post.objects.filter(
# author=user_profile).select_related(
# 'group', 'author').annotate(
# comment_count=Count(
# 'commented_post')).order_by("-pub_date").all()
# paginator = Paginator(post_list, 10)
# page_number = request.GET.get('page')
# page = paginator.get_page(page_number)
# following = False
# if request.user.is_authenticated:
# if Follow.objects.filter(author=user_profile,
# user=request.user).exists():
# following = True
# return render(request, "profile.html", {'profile': user_profile,
# 'post_list': page,
# 'paginator': paginator,
# 'following': following})
class ProfileView(ListView):
model = Post
template_name = 'profile.html'
paginate_by = 5
context_object_name = 'post_list'
@property
def extra_context(self):
profile = get_object_or_404(
User.objects.filter(
username=self.kwargs['username']).annotate(
follower_count=Count('follower', distinct=True),
following_count=Count('following', distinct=True)))
following = False
if self.request.user.is_authenticated:
if Follow.objects.filter(author=profile,
user=self.request.user).exists():
following = True
return {
'profile': profile,
'following': following
}
def get_queryset(self):
profile = get_object_or_404(
User.objects.filter(
username=self.kwargs['username']).annotate(
follower_count=Count('follower', distinct=True),
following_count=Count('following', distinct=True)))
query_set = super().get_queryset()
return query_set.filter(
author=profile).select_related(
'group', 'author')
# @login_required
# def post_edit(request, username, post_id):
# '''Страница редактирования публикации'''
# title = 'Редактировать запись'
# post = get_object_or_404(Post.objects.select_related('author'), pk=post_id)
# if request.user == post.author:
# if request.method == "POST":
# form = PostForm(request.POST or None,
# files=request.FILES or None,
# instance=post)
# if form.is_valid():
# post = form.save(commit=False)
# post.pub_date = timezone.now()
# post.save()
# return redirect('post', post_id=post.pk, username=username)
# else:
# form = PostForm(instance=post)
# else:
# return redirect('post', post_id=post.pk, username=post.author)
# return render(
# request, "new_post.html", {'form': form, 'title': title, 'post': post})
class PostEditUpdateView(LoginRequiredMixin, UpdateView):
model = Post
slug_field = 'username'
pk_url_kwarg = 'post_id'
form_class = PostForm
template_name = 'new_post.html'
extra_context = {
'title': 'Редактировать запись'
}
def form_valid(self, form):
post = form.save(commit=False)
post.pub_date = timezone.now()
post.save()
messages.add_message(
self.request,
messages.SUCCESS,
f'Запись обновлена!',
extra_tags='success'
)
return super().form_valid(form)
def get_success_url(self):
return reverse('profile', kwargs={
'username': self.object.author,
})
# @login_required
# def post_delete(request, username, post_id):
# '''Функция для удаления публикации'''
# post = get_object_or_404(Post, pk=post_id)
# if request.user == post.author:
# post.delete()
# return redirect('profile', username=username)
# return redirect('post', post_id=post.pk, username=post.author)
class PostDeleteView(LoginRequiredMixin, DeleteView):
model = Post
template_name = 'post_delete.html'
slug_field = 'username'
pk_url_kwarg = 'post_id'
success_message = 'Запись удалена'
def get_success_url(self):
return reverse('profile', kwargs={
'username': self.object.author,
})
# @login_required
# def add_comment(request, username, post_id):
# '''Функция для добавления комментария к публикации'''
# post = get_object_or_404(Post, pk=post_id)
# if request.method == 'POST':
# form = CommentForm(request.POST)
# if form.is_valid():
# comment = form.save(commit=False)
# comment.post = post
# comment.author = request.user
# comment.save()
# return redirect('post', post_id=post_id, username=username)
# return redirect('post', post_id=post_id, username=username)
class AddCommentView(LoginRequiredMixin, CreateView):
model = Comment
template_name = 'comments.html'
form_class = CommentForm
slug_field = 'username'
pk_url_kwarg = 'post_id'
def form_valid(self, form):
post = get_object_or_404(Post, pk=self.kwargs['post_id'])
comment = form.save(commit=False)
comment.post = post
comment.author = self.request.user
comment.save()
return super().form_valid(form)
def get_success_url(self):
return reverse('post', kwargs={'username': self.object.author,
'post_id': self.object.post.pk
})
# @login_required
# def follow_index(request):
# '''Страница с публикациями избранных пользователей'''
# follow_page = True
# post_list = Post.objects.filter(
# author__following__user=request.user).select_related(
# 'group', 'author').annotate(
# comment_count=Count(
# 'commented_post')).order_by("-pub_date").all()
# paginator = Paginator(post_list, 10)
# page_number = request.GET.get('page')
# page = paginator.get_page(page_number)
# return render(request, "follow.html", {'page': page,
# 'paginator': paginator,
# 'follow_page': follow_page})
class FollowIndexView(LoginRequiredMixin, ListView):
model = Post
template_name = 'follow.html'
paginate_by = 5
context_object_name = 'post_list'
extra_context = {
'follow_page': True
}
def get_queryset(self):
query_set = super().get_queryset()
return query_set.filter(
author__following__user=self.request.user).select_related(
'group', 'author')
@login_required
def profile_follow(request, username):
'''Функция для подписки на пользователя'''
followed_author = get_object_or_404(User, username=username)
if followed_author == request.user:
return redirect('profile', username=username)
if Follow.objects.filter(user=request.user,
author=followed_author).exists():
return redirect('profile', username=username)
Follow.objects.create(author=followed_author, user=request.user)
return redirect('profile', username=username)
@login_required
def profile_unfollow(request, username):
'''Функция для отписки от пользователя'''
follover = Follow.objects.filter(author__username=username,
user=request.user)
follover.delete()
return redirect('profile', username=username)
@login_required
def delete_comment(request, username, post_id, comment_id):
'''Функция для удаления комментария к публикации'''
comment = get_object_or_404(Comment, post=post_id, pk=comment_id)
if request.user == comment.author:
comment.delete()
return redirect('post', username=username, post_id=post_id)
# @login_required
# def edit_comment(request, username, post_id, comment_id):
# '''Функция для редактирования комментария к публикации'''
# title = 'Редактировать комментарий'
# comment = get_object_or_404(Comment, post=post_id, pk=comment_id)
# if request.user == comment.author:
# if request.method == 'POST':
# form = CommentForm(request.POST, instance=comment)
# if form.is_valid():
# comment = form.save(commit=False)
# comment.created = timezone.now()
# comment.save()
# return redirect('post', username=username, post_id=post_id)
# form = CommentForm(instance=comment)
# return render(request, "new_post.html", {'form': form, 'title': title})
class CommentEditView(LoginRequiredMixin, UpdateView):
model = Comment
template_name = 'new_post.html'
form_class = CommentForm
pk_url_kwarg = 'comment_id'
extra_context = {
'title': 'Редактировать комментарий'
}
def form_valid(self, form):
comment = form.save(commit=False)
comment.created = timezone.now()
comment.save()
messages.add_message(
self.request,
messages.SUCCESS,
f'Коментарий отредактирован',
)
return super().form_valid(form)
def get_success_url(self):
return reverse('post', kwargs={'username': self.object.author,
'post_id': self.object.post.pk
})
# @login_required
# def add_group(request):
# '''Страница для добавления группы'''
# title = 'Создать группу'
# if request.method == 'POST':
# form = GroupForm(request.POST)
# if form.is_valid():
# slug = form.cleaned_data['slug']
# form.save()
# return redirect("group", slug=slug)
# return render(request, "new_post.html", {'form': form, 'title': title})
# form = GroupForm()
# return render(request, "new_post.html", {'form': form, 'title': title})
class GroupAddView(LoginRequiredMixin, CreateView):
model = Group
template_name = 'new_post.html'
form_class = GroupForm
extra_context = {
'title': 'Создать группу'
}
def get_success_url(self, **kwargs):
return reverse('group', kwargs={'slug': self.object.slug})
def page_not_found(request, exception):
'''Страница 404'''
return render(request, "misc/404.html", {"path": request.path}, status=404)
def server_error(request):
'''Страница 500'''
return render(request, "misc/500.html", status=500)
|
[
"django.shortcuts.redirect",
"django.utils.timezone.now",
"django.contrib.messages.add_message",
"django.urls.reverse",
"django.shortcuts.get_object_or_404",
"django.shortcuts.render",
"django.db.models.Count"
] |
[((13128, 13170), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['User'], {'username': 'username'}), '(User, username=username)\n', (13145, 13170), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((13510, 13548), 'django.shortcuts.redirect', 'redirect', (['"""profile"""'], {'username': 'username'}), "('profile', username=username)\n", (13518, 13548), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((13807, 13845), 'django.shortcuts.redirect', 'redirect', (['"""profile"""'], {'username': 'username'}), "('profile', username=username)\n", (13815, 13845), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((13994, 14049), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Comment'], {'post': 'post_id', 'pk': 'comment_id'}), '(Comment, post=post_id, pk=comment_id)\n', (14011, 14049), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((14125, 14177), 'django.shortcuts.redirect', 'redirect', (['"""post"""'], {'username': 'username', 'post_id': 'post_id'}), "('post', username=username, post_id=post_id)\n", (14133, 14177), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((16648, 16716), 'django.shortcuts.render', 'render', (['request', '"""misc/404.html"""', "{'path': request.path}"], {'status': '(404)'}), "(request, 'misc/404.html', {'path': request.path}, status=404)\n", (16654, 16716), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((16780, 16824), 'django.shortcuts.render', 'render', (['request', '"""misc/500.html"""'], {'status': '(500)'}), "(request, 'misc/500.html', status=500)\n", (16786, 16824), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3097, 3176), 'django.contrib.messages.add_message', 'messages.add_message', (['self.request', 'messages.SUCCESS', 'f"""Новая запись добавлена"""'], {}), "(self.request, messages.SUCCESS, f'Новая запись добавлена')\n", (3117, 3176), False, 'from django.contrib import messages\n'), ((3310, 3326), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (3317, 3326), False, 'from django.urls import reverse\n'), ((9542, 9556), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (9554, 9556), False, 'from django.utils import timezone\n'), ((9585, 9685), 'django.contrib.messages.add_message', 'messages.add_message', (['self.request', 'messages.SUCCESS', 'f"""Запись обновлена!"""'], {'extra_tags': '"""success"""'}), "(self.request, messages.SUCCESS, f'Запись обновлена!',\n extra_tags='success')\n", (9605, 9685), False, 'from django.contrib import messages\n'), ((9827, 9886), 'django.urls.reverse', 'reverse', (['"""profile"""'], {'kwargs': "{'username': self.object.author}"}), "('profile', kwargs={'username': self.object.author})\n", (9834, 9886), False, 'from django.urls import reverse\n'), ((10511, 10570), 'django.urls.reverse', 'reverse', (['"""profile"""'], {'kwargs': "{'username': self.object.author}"}), "('profile', kwargs={'username': self.object.author})\n", (10518, 10570), False, 'from django.urls import reverse\n'), ((11417, 11467), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Post'], {'pk': "self.kwargs['post_id']"}), "(Post, pk=self.kwargs['post_id'])\n", (11434, 11467), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((11691, 11784), 'django.urls.reverse', 'reverse', (['"""post"""'], {'kwargs': "{'username': self.object.author, 'post_id': self.object.post.pk}"}), "('post', kwargs={'username': self.object.author, 'post_id': self.\n object.post.pk})\n", (11698, 11784), False, 'from django.urls import reverse\n'), ((13226, 13264), 'django.shortcuts.redirect', 'redirect', (['"""profile"""'], {'username': 'username'}), "('profile', username=username)\n", (13234, 13264), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((13391, 13429), 'django.shortcuts.redirect', 'redirect', (['"""profile"""'], {'username': 'username'}), "('profile', username=username)\n", (13399, 13429), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((15301, 15315), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (15313, 15315), False, 'from django.utils import timezone\n'), ((15347, 15433), 'django.contrib.messages.add_message', 'messages.add_message', (['self.request', 'messages.SUCCESS', 'f"""Коментарий отредактирован"""'], {}), "(self.request, messages.SUCCESS,\n f'Коментарий отредактирован')\n", (15367, 15433), False, 'from django.contrib import messages\n'), ((15565, 15658), 'django.urls.reverse', 'reverse', (['"""post"""'], {'kwargs': "{'username': self.object.author, 'post_id': self.object.post.pk}"}), "('post', kwargs={'username': self.object.author, 'post_id': self.\n object.post.pk})\n", (15572, 15658), False, 'from django.urls import reverse\n'), ((16519, 16570), 'django.urls.reverse', 'reverse', (['"""group"""'], {'kwargs': "{'slug': self.object.slug}"}), "('group', kwargs={'slug': self.object.slug})\n", (16526, 16570), False, 'from django.urls import reverse\n'), ((1839, 1889), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Group'], {'slug': "self.kwargs['slug']"}), "(Group, slug=self.kwargs['slug'])\n", (1856, 1889), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4898, 4930), 'django.db.models.Count', 'Count', (['"""follower"""'], {'distinct': '(True)'}), "('follower', distinct=True)\n", (4903, 4930), False, 'from django.db.models import Count\n'), ((4964, 4997), 'django.db.models.Count', 'Count', (['"""following"""'], {'distinct': '(True)'}), "('following', distinct=True)\n", (4969, 4997), False, 'from django.db.models import Count\n'), ((5026, 5061), 'django.db.models.Count', 'Count', (['"""post_author"""'], {'distinct': '(True)'}), "('post_author', distinct=True)\n", (5031, 5061), False, 'from django.db.models import Count\n'), ((7361, 7393), 'django.db.models.Count', 'Count', (['"""follower"""'], {'distinct': '(True)'}), "('follower', distinct=True)\n", (7366, 7393), False, 'from django.db.models import Count\n'), ((7431, 7464), 'django.db.models.Count', 'Count', (['"""following"""'], {'distinct': '(True)'}), "('following', distinct=True)\n", (7436, 7464), False, 'from django.db.models import Count\n'), ((7982, 8014), 'django.db.models.Count', 'Count', (['"""follower"""'], {'distinct': '(True)'}), "('follower', distinct=True)\n", (7987, 8014), False, 'from django.db.models import Count\n'), ((8048, 8081), 'django.db.models.Count', 'Count', (['"""following"""'], {'distinct': '(True)'}), "('following', distinct=True)\n", (8053, 8081), False, 'from django.db.models import Count\n'), ((5164, 5187), 'django.db.models.Count', 'Count', (['"""commented_post"""'], {}), "('commented_post')\n", (5169, 5187), False, 'from django.db.models import Count\n')]
|
from __future__ import annotations
import json
import uuid
from argparse import ArgumentParser, Namespace
from collections.abc import Awaitable, Callable, Mapping, AsyncIterator
from contextlib import closing, asynccontextmanager, AsyncExitStack
from datetime import datetime
from functools import partial
from secrets import token_bytes
from typing import Any, Optional, Final
from aioredis import create_redis, Redis, ConnectionClosedError
from pysasl.creds import AuthenticationCredentials
from pymap.bytes import BytesFormat
from pymap.config import BackendCapability, IMAPConfig
from pymap.context import connection_exit
from pymap.exceptions import AuthorizationFailure, IncompatibleData, \
NotAllowedError, UserNotFound
from pymap.health import HealthStatus
from pymap.interfaces.backend import BackendInterface
from pymap.interfaces.login import LoginInterface, IdentityInterface
from pymap.interfaces.token import TokensInterface
from pymap.token import AllTokens
from pymap.user import UserMetadata
from .cleanup import CleanupTask
from .filter import FilterSet
from .keys import DATA_VERSION, RedisKey, GlobalKeys, CleanupKeys, \
NamespaceKeys
from .mailbox import Message, MailboxSet
from ..session import BaseSession
__all__ = ['RedisBackend', 'Config', 'Session']
class RedisBackend(BackendInterface):
"""Defines a backend that uses redis data structures for mailbox storage.
"""
def __init__(self, login: Login, config: Config,
status: HealthStatus) -> None:
super().__init__()
self._login = login
self._config = config
self._status = status
@property
def login(self) -> Login:
return self._login
@property
def config(self) -> Config:
return self._config
@property
def status(self) -> HealthStatus:
return self._status
@classmethod
def add_subparser(cls, name: str, subparsers: Any) -> ArgumentParser:
parser = subparsers.add_parser(name, help='redis backend')
parser.add_argument('--address', metavar='URL',
default='redis://localhost',
help='the redis server address')
parser.add_argument('--select', metavar='DB', type=int,
help='the redis database for mail data')
parser.add_argument('--separator', metavar='CHAR', default='/',
help='the redis key segment separator')
parser.add_argument('--prefix', metavar='VAL', default='/mail',
help='the mail data key prefix')
parser.add_argument('--users-prefix', metavar='VAL', default='/users',
help='the user lookup key prefix')
parser.add_argument('--users-json', action='store_true',
help='the user lookup value contains JSON')
return parser
@classmethod
async def init(cls, args: Namespace, **overrides: Any) \
-> tuple[RedisBackend, Config]:
config = Config.from_args(args)
status = HealthStatus()
connect_redis = partial(cls._connect_redis, config, status)
login = Login(config, connect_redis)
return cls(login, config, status), config
@classmethod
async def _connect_redis(cls, config: Config,
status: HealthStatus) -> Redis:
try:
redis = await create_redis(config.address)
except (ConnectionClosedError, OSError):
status.set_unhealthy()
raise
else:
status.set_healthy()
stack = connection_exit.get()
stack.enter_context(closing(redis))
return redis
async def start(self, stack: AsyncExitStack) -> None:
config = self._config
global_keys = config._global_keys
connect_redis = partial(self._connect_redis, config, self._status)
cleanup_task = CleanupTask(connect_redis, global_keys).start()
stack.callback(cleanup_task.cancel)
class Config(IMAPConfig):
"""The config implementation for the redis backend.
Args:
args: The command-line arguments.
address: The redis server address.
select: The redis database for mail data.
separator: The redis key segment separator.
prefix: The prefix for mail data keys.
users_prefix: The user lookup key prefix.
users_json: True if the user lookup value contains JSON.
"""
def __init__(self, args: Namespace, *, address: str, select: Optional[int],
separator: bytes, prefix: bytes, users_prefix: bytes,
users_json: bool, **extra: Any) -> None:
super().__init__(args, admin_key=token_bytes(), **extra)
self._address = address
self._select = select
self._separator = separator
self._prefix = prefix
self._users_prefix = users_prefix
self._users_json = users_json
@property
def backend_capability(self) -> BackendCapability:
return BackendCapability(idle=True, object_id=True, multi_append=True)
@property
def address(self) -> str:
"""The redis server address. Defaults to a connection to localhost.
See Also:
:func:`aioredis.create_connection`
"""
return self._address
@property
def select(self) -> Optional[int]:
"""The redis database for mail data. If given, the `SELECT`_ command is
called after successful user lookup.
.. _SELECT: https://redis.io/commands/select
"""
return self._select
@property
def separator(self) -> bytes:
"""The bytestring used to separate segments of composite redis keys."""
return self._separator
@property
def prefix(self) -> bytes:
"""The prefix for mail data keys. This prefix does not apply to
:attr:`.users_key`.
"""
return self._prefix
@property
def users_prefix(self) -> bytes:
"""The prefix for user lookup keys."""
return self._users_prefix
@property
def users_json(self) -> bool:
"""True if the value from the user lookup key contains a JSON object
with a ``"password"`` attribute, instead of a redis hash with a
``password`` key.
See Also:
`redis hashes
<https://redis.io/topics/data-types-intro#redis-hashes>`_
"""
return self._users_json
@property
def _joiner(self) -> BytesFormat:
return BytesFormat(self.separator)
@property
def _users_root(self) -> RedisKey:
return RedisKey(self._joiner, [self.users_prefix], {})
@property
def _global_keys(self) -> GlobalKeys:
key = RedisKey(self._joiner, [self.prefix], {})
return GlobalKeys(key)
@classmethod
def parse_args(cls, args: Namespace) -> Mapping[str, Any]:
return {**super().parse_args(args),
'address': args.address,
'select': args.select,
'separator': args.separator.encode('utf-8'),
'prefix': args.prefix.encode('utf-8'),
'users_prefix': args.users_prefix.encode('utf-8'),
'users_json': args.users_json}
class Session(BaseSession[Message]):
"""The session implementation for the redis backend."""
resource = __name__
def __init__(self, redis: Redis, owner: str, config: Config,
mailbox_set: MailboxSet, filter_set: FilterSet) -> None:
super().__init__(owner)
self._redis = redis
self._config = config
self._mailbox_set = mailbox_set
self._filter_set = filter_set
@property
def config(self) -> IMAPConfig:
return self._config
@property
def mailbox_set(self) -> MailboxSet:
return self._mailbox_set
@property
def filter_set(self) -> FilterSet:
return self._filter_set
class Login(LoginInterface):
"""The login implementation for the redis backend."""
def __init__(self, config: Config,
connect_redis: Callable[[], Awaitable[Redis]]) -> None:
super().__init__()
self._config = config
self._connect_redis = connect_redis
self._tokens = AllTokens()
@property
def tokens(self) -> TokensInterface:
return self._tokens
async def authenticate(self, credentials: AuthenticationCredentials) \
-> Identity:
config = self._config
redis = await self._connect_redis()
authcid = credentials.authcid
token_key: Optional[bytes] = None
role: Optional[str] = None
if credentials.authcid_type == 'admin-token':
authcid = credentials.identity
role = 'admin'
try:
authcid_identity = Identity(config, self.tokens, redis, authcid)
metadata = await authcid_identity.get()
except UserNotFound:
metadata = UserMetadata(config)
if 'key' in metadata.params:
token_key = bytes.fromhex(metadata.params['key'])
role = role or metadata.role
await metadata.check_password(credentials, token_key=token_key)
if role != 'admin' and authcid != credentials.identity:
raise AuthorizationFailure()
return Identity(config, self.tokens, redis, credentials.identity, role)
class Identity(IdentityInterface):
"""The identity implementation for the redis backend."""
def __init__(self, config: Config, tokens: TokensInterface,
redis: Redis, name: str, role: str = None) -> None:
super().__init__()
self.config: Final = config
self.tokens: Final = tokens
self._redis: Optional[Redis] = redis
self._name = name
self._role = role
@property
def name(self) -> str:
return self._name
@property
def redis(self) -> Redis:
redis = self._redis
if redis is None:
# Other methods may not be called after new_session(), since it
# may have called SELECT on the connection.
raise RuntimeError()
return redis
async def new_token(self, *, expiration: datetime = None) -> Optional[str]:
metadata = await self.get()
if 'key' not in metadata.params:
return None
key = bytes.fromhex(metadata.params['key'])
return self.tokens.get_login_token(self.name, key)
@asynccontextmanager
async def new_session(self) -> AsyncIterator[Session]:
config = self.config
redis = self.redis
self._redis = None
if config.select is not None:
await redis.select(config.select)
global_keys = config._global_keys
namespace = await self._get_namespace(redis, global_keys, self.name)
ns_keys = NamespaceKeys(global_keys, namespace)
cl_keys = CleanupKeys(global_keys)
mailbox_set = MailboxSet(redis, ns_keys, cl_keys)
filter_set = FilterSet(redis, ns_keys)
try:
await mailbox_set.add_mailbox('INBOX')
except ValueError:
pass
yield Session(redis, self.name, config, mailbox_set, filter_set)
async def _get_namespace(self, redis: Redis, global_keys: GlobalKeys,
user: str) -> bytes:
user_key = user.encode('utf-8')
new_namespace = uuid.uuid4().hex.encode('ascii')
ns_val = b'%d/%b' % (DATA_VERSION, new_namespace)
multi = redis.multi_exec()
multi.hsetnx(global_keys.namespaces, user_key, ns_val)
multi.hget(global_keys.namespaces, user_key)
_, ns_val = await multi.execute()
version, namespace = ns_val.split(b'/', 1)
if int(version) != DATA_VERSION:
raise IncompatibleData()
return namespace
async def get(self) -> UserMetadata:
redis = self.redis
user_bytes = self.name.encode('utf-8')
user_key = self.config._users_root.end(user_bytes)
if self.config.users_json:
json_data = await redis.get(user_key)
if json_data is None:
raise UserNotFound(self.name)
data_dict = json.loads(json_data)
else:
data_dict = await redis.hgetall(user_key, encoding='utf-8')
if data_dict is None:
raise UserNotFound(self.name)
return UserMetadata(self.config, **data_dict)
async def set(self, metadata: UserMetadata) -> None:
config = self.config
redis = self.redis
if self._role != 'admin' and metadata.role:
raise NotAllowedError('Cannot assign role.')
user_key = config._users_root.end(self.name.encode('utf-8'))
user_dict = metadata.to_dict(key=token_bytes().hex())
if self.config.users_json:
json_data = json.dumps(user_dict)
await redis.set(user_key, json_data)
else:
multi = redis.multi_exec()
multi.delete(user_key)
multi.hmset_dict(user_key, user_dict)
await multi.execute()
async def delete(self) -> None:
config = self.config
user_key = config._users_root.end(self.name.encode('utf-8'))
if not await self.redis.delete(user_key):
raise UserNotFound(self.name)
|
[
"pymap.context.connection_exit.get",
"functools.partial",
"secrets.token_bytes",
"uuid.uuid4",
"json.loads",
"pymap.exceptions.UserNotFound",
"pymap.bytes.BytesFormat",
"pymap.config.BackendCapability",
"pymap.exceptions.NotAllowedError",
"pymap.token.AllTokens",
"pymap.health.HealthStatus",
"pymap.user.UserMetadata",
"pymap.exceptions.IncompatibleData",
"json.dumps",
"aioredis.create_redis",
"contextlib.closing",
"pymap.exceptions.AuthorizationFailure"
] |
[((3084, 3098), 'pymap.health.HealthStatus', 'HealthStatus', ([], {}), '()\n', (3096, 3098), False, 'from pymap.health import HealthStatus\n'), ((3123, 3166), 'functools.partial', 'partial', (['cls._connect_redis', 'config', 'status'], {}), '(cls._connect_redis, config, status)\n', (3130, 3166), False, 'from functools import partial\n'), ((3878, 3928), 'functools.partial', 'partial', (['self._connect_redis', 'config', 'self._status'], {}), '(self._connect_redis, config, self._status)\n', (3885, 3928), False, 'from functools import partial\n'), ((5065, 5128), 'pymap.config.BackendCapability', 'BackendCapability', ([], {'idle': '(True)', 'object_id': '(True)', 'multi_append': '(True)'}), '(idle=True, object_id=True, multi_append=True)\n', (5082, 5128), False, 'from pymap.config import BackendCapability, IMAPConfig\n'), ((6564, 6591), 'pymap.bytes.BytesFormat', 'BytesFormat', (['self.separator'], {}), '(self.separator)\n', (6575, 6591), False, 'from pymap.bytes import BytesFormat\n'), ((8300, 8311), 'pymap.token.AllTokens', 'AllTokens', ([], {}), '()\n', (8309, 8311), False, 'from pymap.token import AllTokens\n'), ((12442, 12480), 'pymap.user.UserMetadata', 'UserMetadata', (['self.config'], {}), '(self.config, **data_dict)\n', (12454, 12480), False, 'from pymap.user import UserMetadata\n'), ((3628, 3649), 'pymap.context.connection_exit.get', 'connection_exit.get', ([], {}), '()\n', (3647, 3649), False, 'from pymap.context import connection_exit\n'), ((9315, 9337), 'pymap.exceptions.AuthorizationFailure', 'AuthorizationFailure', ([], {}), '()\n', (9335, 9337), False, 'from pymap.exceptions import AuthorizationFailure, IncompatibleData, NotAllowedError, UserNotFound\n'), ((11831, 11849), 'pymap.exceptions.IncompatibleData', 'IncompatibleData', ([], {}), '()\n', (11847, 11849), False, 'from pymap.exceptions import AuthorizationFailure, IncompatibleData, NotAllowedError, UserNotFound\n'), ((12239, 12260), 'json.loads', 'json.loads', (['json_data'], {}), '(json_data)\n', (12249, 12260), False, 'import json\n'), ((12665, 12703), 'pymap.exceptions.NotAllowedError', 'NotAllowedError', (['"""Cannot assign role."""'], {}), "('Cannot assign role.')\n", (12680, 12703), False, 'from pymap.exceptions import AuthorizationFailure, IncompatibleData, NotAllowedError, UserNotFound\n'), ((12894, 12915), 'json.dumps', 'json.dumps', (['user_dict'], {}), '(user_dict)\n', (12904, 12915), False, 'import json\n'), ((13340, 13363), 'pymap.exceptions.UserNotFound', 'UserNotFound', (['self.name'], {}), '(self.name)\n', (13352, 13363), False, 'from pymap.exceptions import AuthorizationFailure, IncompatibleData, NotAllowedError, UserNotFound\n'), ((3430, 3458), 'aioredis.create_redis', 'create_redis', (['config.address'], {}), '(config.address)\n', (3442, 3458), False, 'from aioredis import create_redis, Redis, ConnectionClosedError\n'), ((3682, 3696), 'contextlib.closing', 'closing', (['redis'], {}), '(redis)\n', (3689, 3696), False, 'from contextlib import closing, asynccontextmanager, AsyncExitStack\n'), ((4748, 4761), 'secrets.token_bytes', 'token_bytes', ([], {}), '()\n', (4759, 4761), False, 'from secrets import token_bytes\n'), ((9004, 9024), 'pymap.user.UserMetadata', 'UserMetadata', (['config'], {}), '(config)\n', (9016, 9024), False, 'from pymap.user import UserMetadata\n'), ((12191, 12214), 'pymap.exceptions.UserNotFound', 'UserNotFound', (['self.name'], {}), '(self.name)\n', (12203, 12214), False, 'from pymap.exceptions import AuthorizationFailure, IncompatibleData, NotAllowedError, UserNotFound\n'), ((12403, 12426), 'pymap.exceptions.UserNotFound', 'UserNotFound', (['self.name'], {}), '(self.name)\n', (12415, 12426), False, 'from pymap.exceptions import AuthorizationFailure, IncompatibleData, NotAllowedError, UserNotFound\n'), ((11437, 11449), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (11447, 11449), False, 'import uuid\n'), ((12814, 12827), 'secrets.token_bytes', 'token_bytes', ([], {}), '()\n', (12825, 12827), False, 'from secrets import token_bytes\n')]
|
from mmdnn.conversion.rewriter.rewriter import UnitRewriterBase
import numpy as np
import re
class LSTMRewriter(UnitRewriterBase):
def __init__(self, graph, weights_dict):
return super(LSTMRewriter, self).__init__(graph, weights_dict)
def process_lstm_cell(self, match_result):
if 'lstm_cell' not in match_result._pattern_to_op.keys():
return
kwargs = dict()
top_node = match_result._pattern_to_op[match_result._name_to_pattern['lstm_cell']]
w_e = match_result.get_op("cell_kernel")
w = self._weights_dict[w_e.name.replace('/read', '')]
num_units = w.shape[1]//4
[wx, wh] = np.split(w, [-1 * num_units])
input_size = wx.shape[0]
kwargs['num_units'] = num_units
kwargs['input_size'] = input_size
if hasattr(top_node, 'kwargs'):
top_node.kwargs.update(kwargs)
else:
top_node.kwargs = kwargs
def process_rnn_h_zero(self, match_result):
if 'h_zero' not in match_result._name_to_pattern.keys():
return
kwargs = dict()
top_node = match_result._pattern_to_op[match_result._name_to_pattern['h_zero']]
fill_size = match_result.get_op('fill_size')
fill_value = match_result.get_op('fill_value')
kwargs['fill_size'] = fill_size.get_attr('value').int_val[0]
kwargs['fill_value'] = fill_value.get_attr('value').float_val[0]
if hasattr(top_node, 'kwargs'):
top_node.kwargs.update(kwargs)
else:
top_node.kwargs = kwargs
def process_match_result(self, match_result, pattern_name):
if pattern_name == 'lstm_cell':
self.process_lstm_cell(match_result)
elif pattern_name == 'h_zero':
if self.check_match_scope(match_result, 'LSTMCellZeroState'):
self.process_rnn_h_zero(match_result)
'''For some short pattern, to avoid match other pattern, check it's scope'''
def check_match_scope(self, match_result, scope_name):
ops = match_result._pattern_to_op.values()
for op in ops:
op_name_splits = op.name.split('/')
if len(op_name_splits) < 2:
return False
if re.sub(r'(_\d+)*$', '', op_name_splits[-2]) != scope_name:
if len(op_name_splits) > 2:
if re.sub(r'(_\d+)*$', '', op_name_splits[-3]) != scope_name:
return False
else:
return False
return True
def run(self):
return super(LSTMRewriter, self).run(['lstm_cell', 'h_zero'], 'tensorflow')
|
[
"re.sub",
"numpy.split"
] |
[((666, 695), 'numpy.split', 'np.split', (['w', '[-1 * num_units]'], {}), '(w, [-1 * num_units])\n', (674, 695), True, 'import numpy as np\n'), ((2259, 2302), 're.sub', 're.sub', (['"""(_\\\\d+)*$"""', '""""""', 'op_name_splits[-2]'], {}), "('(_\\\\d+)*$', '', op_name_splits[-2])\n", (2265, 2302), False, 'import re\n'), ((2385, 2428), 're.sub', 're.sub', (['"""(_\\\\d+)*$"""', '""""""', 'op_name_splits[-3]'], {}), "('(_\\\\d+)*$', '', op_name_splits[-3])\n", (2391, 2428), False, 'import re\n')]
|
import space_mission_design
from space_mission_design.celestlab import celestlab_wrapper
from space_mission_design.visualisation import ploting_map
import numpy as np
import matplotlib.pyplot as plt
plt.style.use("presentation")
from astropy import units as u
from poliastro.bodies import Earth, Mars, Sun
from poliastro.twobody import Orbit
from tqdm.auto import tqdm
import os
from space_mission_design.power import body_illumination
wrapper = celestlab_wrapper.WrapperCelestlab(scilab_path="/home/tavant/Data/tavant/CSE-perso/tools/CNES/scilab-6.0.2/bin/",
celestlab_loader="/home/tavant/Data/tavant/CSE-perso/IonSat/power/loader_celestlab.sce")
print("Small example : propagate and plot")
specitic_params = {"year":2024, "hour":12, "inc": 51*np.pi/180, "sma": (Earth.R_mean + 300 * u.km).to(u.m).value,
"outputFileName":"results_ionsat.h5" }
wrapper.write_paramerter_file(specitic_params)
wrapper.launch_celestlab("ionsat_power.sce")
sun_position, ecf_position, eclipses, cj_date = wrapper.read_celestlab_results("results_ionsat.h5")
ploting_map.plot_planisphere(ecf_position)
ploting_map.plot_poles(ecf_position)
plt.show()
# plt.savefig("map_51deg.png", dpi=300)
|
[
"matplotlib.pyplot.show",
"space_mission_design.celestlab.celestlab_wrapper.WrapperCelestlab",
"matplotlib.pyplot.style.use",
"space_mission_design.visualisation.ploting_map.plot_planisphere",
"space_mission_design.visualisation.ploting_map.plot_poles"
] |
[((200, 229), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""presentation"""'], {}), "('presentation')\n", (213, 229), True, 'import matplotlib.pyplot as plt\n'), ((451, 667), 'space_mission_design.celestlab.celestlab_wrapper.WrapperCelestlab', 'celestlab_wrapper.WrapperCelestlab', ([], {'scilab_path': '"""/home/tavant/Data/tavant/CSE-perso/tools/CNES/scilab-6.0.2/bin/"""', 'celestlab_loader': '"""/home/tavant/Data/tavant/CSE-perso/IonSat/power/loader_celestlab.sce"""'}), "(scilab_path=\n '/home/tavant/Data/tavant/CSE-perso/tools/CNES/scilab-6.0.2/bin/',\n celestlab_loader=\n '/home/tavant/Data/tavant/CSE-perso/IonSat/power/loader_celestlab.sce')\n", (485, 667), False, 'from space_mission_design.celestlab import celestlab_wrapper\n'), ((1111, 1153), 'space_mission_design.visualisation.ploting_map.plot_planisphere', 'ploting_map.plot_planisphere', (['ecf_position'], {}), '(ecf_position)\n', (1139, 1153), False, 'from space_mission_design.visualisation import ploting_map\n'), ((1155, 1191), 'space_mission_design.visualisation.ploting_map.plot_poles', 'ploting_map.plot_poles', (['ecf_position'], {}), '(ecf_position)\n', (1177, 1191), False, 'from space_mission_design.visualisation import ploting_map\n'), ((1192, 1202), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1200, 1202), True, 'import matplotlib.pyplot as plt\n')]
|
from NXController import Controller
ctr = Controller()
ctr.LS()
ctr.A()
ctr.pause(1)
ctr.A()
ctr.pause(1)
ctr.A()
ctr.pause(0.3)
ctr.h()
response = input("Restart(y/n): ")
while response == 'y':
ctr.X()
ctr.A()
ctr.pause(3)
ctr.A()
ctr.pause(1)
ctr.A()
ctr.pause(15)
ctr.A()
ctr.pause(7)
ctr.A()
ctr.pause(1)
ctr.A()
ctr.pause(1)
ctr.A()
ctr.pause(0.3)
ctr.h()
response = input("Restart(y/n): ")
ctr.A()
ctr.close()
|
[
"NXController.Controller"
] |
[((43, 55), 'NXController.Controller', 'Controller', ([], {}), '()\n', (53, 55), False, 'from NXController import Controller\n')]
|
from spefit.pdf.base import PDFParameter, PDF
from spefit.common.stats import normal_pdf
import numpy as np
from numpy.testing import assert_allclose
import pytest
def test_pdf_parameter():
initial = 1
limits = (0, 4)
fixed = True
multi = True
param = PDFParameter(initial=initial, limits=limits, fixed=fixed, multi=multi)
assert param.initial == initial
assert param.limits == limits
assert param.fixed is fixed
assert param.multi is multi
param = PDFParameter(initial=initial, limits=limits)
assert param.initial == initial
assert param.limits == limits
assert param.fixed is False
assert param.multi is False
def test_pdf_class():
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2)),
sigma=PDFParameter(initial=0.1, limits=(0, 2)),
)
pdf = PDF(1, normal_pdf, parameters)
assert pdf.function == normal_pdf
assert pdf.n_illuminations == 1
assert len(pdf.parameters) == 2
assert pdf.parameters["sigma"].initial == 0.1
assert np.array_equal(pdf._lookup, np.array([[0, 1]]))
pdf = PDF(2, normal_pdf, parameters)
assert pdf.function == normal_pdf
assert pdf.n_illuminations == 2
assert len(pdf.parameters) == 2
assert pdf.parameters["sigma"].initial == 0.1
assert np.array_equal(pdf._lookup, np.array([[0, 1], [0, 1]]))
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2)),
sigma=PDFParameter(initial=0.1, limits=(0, 2), multi=True),
)
pdf = PDF(2, normal_pdf, parameters)
assert pdf.function == normal_pdf
assert pdf.n_illuminations == 2
assert len(pdf.parameters) == 3
assert pdf.parameters["sigma0"].initial == 0.1
assert pdf.parameters["sigma1"].initial == 0.1
assert np.array_equal(pdf._lookup, np.array([[0, 1], [0, 2]]))
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2), multi=True),
sigma=PDFParameter(initial=0.1, limits=(0, 2), multi=True),
)
pdf = PDF(2, normal_pdf, parameters)
assert pdf.function == normal_pdf
assert pdf.n_illuminations == 2
assert len(pdf.parameters) == 4
assert pdf.parameters["sigma0"].initial == 0.1
assert pdf.parameters["sigma1"].initial == 0.1
assert np.array_equal(pdf._lookup, np.array([[0, 2], [1, 3]]))
key_array = np.array(list(pdf.parameters.keys()))
assert np.array_equal(key_array[pdf._lookup[0]], ["mean0", "sigma0"])
assert np.array_equal(key_array[pdf._lookup[1]], ["mean1", "sigma1"])
def test_lookup_parameters():
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2)),
sigma=PDFParameter(initial=0.1, limits=(0, 2), multi=True),
)
pdf = PDF(2, normal_pdf, parameters)
pdf.update_parameters_initial(sigma1=0.3)
initial = np.array(list(pdf.initial.values()))
assert np.array_equal(pdf._lookup_parameters(initial, 0), np.array([0, 0.1]))
assert np.array_equal(pdf._lookup_parameters(initial, 1), np.array([0, 0.3]))
def test_call():
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2)),
sigma=PDFParameter(initial=0.1, limits=(0, 2), multi=True),
)
pdf = PDF(2, normal_pdf, parameters)
x = np.linspace(-1, 6, 100)
assert_allclose(pdf(x, np.array([0, 0.1, 0.2]), 0), pdf._function(x, 0, 0.1))
assert_allclose(pdf(x, np.array([0, 0.1, 0.2]), 1), pdf._function(x, 0, 0.2))
with pytest.raises(IndexError):
pdf(x, np.array([0, 0.1, 0.2]), 2)
with pytest.raises(IndexError):
pdf(x, np.array([0, 0.1]), 1)
with pytest.raises(TypeError):
# noinspection PyTypeChecker
pdf(x, [0, 0.1, 0.2], 1)
def test_update_parameters_initial():
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2)),
sigma=PDFParameter(initial=0.1, limits=(0, 2)),
)
pdf = PDF(2, normal_pdf, parameters)
assert pdf.parameters["mean"].initial == 0
assert pdf.parameters["sigma"].initial == 0.1
pdf.update_parameters_initial(mean=2, sigma=0.4)
assert pdf.parameters["mean"].initial == 2
assert pdf.parameters["sigma"].initial == 0.4
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2)),
sigma=PDFParameter(initial=0.1, limits=(0, 2), multi=True),
)
pdf = PDF(2, normal_pdf, parameters)
assert pdf.parameters["mean"].initial == 0
assert pdf.parameters["sigma0"].initial == 0.1
assert pdf.parameters["sigma1"].initial == 0.1
pdf.update_parameters_initial(mean=2, sigma=0.4)
assert pdf.parameters["mean"].initial == 2
assert pdf.parameters["sigma0"].initial == 0.4
assert pdf.parameters["sigma1"].initial == 0.4
pdf.update_parameters_initial(mean=2, sigma0=0.4, sigma1=0.5)
assert pdf.parameters["mean"].initial == 2
assert pdf.parameters["sigma0"].initial == 0.4
assert pdf.parameters["sigma1"].initial == 0.5
with pytest.raises(ValueError):
pdf.update_parameters_initial(mean0=2, sigma0=0.4, sigma1=0.5)
with pytest.raises(ValueError):
pdf.update_parameters_initial(mean=2, sigma0=0.4, sigma1=0.5, sigma2=0.5)
def test_update_parameters_limits():
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2)),
sigma=PDFParameter(initial=0.1, limits=(0, 2)),
)
pdf = PDF(1, normal_pdf, parameters)
assert pdf.parameters["mean"].limits == (-2, 2)
assert pdf.parameters["sigma"].limits == (0, 2)
pdf.update_parameters_limits(mean=(-3, 3), sigma=(0, 4))
assert pdf.parameters["mean"].limits == (-3, 3)
assert pdf.parameters["sigma"].limits == (0, 4)
# Test mutable
limit = [2, 3]
# noinspection PyTypeChecker
pdf.update_parameters_limits(mean=limit)
assert tuple(pdf.parameters["mean"].limits) == (2, 3)
limit[0] = 1
assert tuple(pdf.parameters["mean"].limits) == (2, 3)
def test_update_parameters_fixed():
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2)),
sigma=PDFParameter(initial=0.1, limits=(0, 2)),
)
pdf = PDF(1, normal_pdf, parameters)
assert pdf.parameters["mean"].fixed is False
assert pdf.parameters["sigma"].fixed is False
pdf.update_parameters_fixed(mean=True, sigma=True)
assert pdf.parameters["mean"].fixed is True
assert pdf.parameters["sigma"].fixed is True
# noinspection DuplicatedCode
def test_prepare_multi_illumination_parameters():
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2)),
sigma=PDFParameter(initial=0.1, limits=(0, 2)),
)
results = PDF._prepare_parameters(parameters, 1)
parameters, is_multi, lookup = results
assert len(parameters) == 2
assert len(is_multi) == 2
assert len(lookup) == 1
assert len(lookup[0]) == 2
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2), multi=True),
sigma=PDFParameter(initial=0.1, limits=(0, 2), multi=True),
)
results = PDF._prepare_parameters(parameters, 1)
parameters, is_multi, lookup = results
assert len(parameters) == 2
assert len(is_multi) == 2
assert len(lookup) == 1
assert len(lookup[0]) == 2
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2), multi=True),
sigma=PDFParameter(initial=0.1, limits=(0, 2), multi=True),
)
results = PDF._prepare_parameters(parameters, 2)
parameters, is_multi, lookup = results
assert len(parameters) == 4
assert len(is_multi) == 2
assert len(lookup) == 2
assert len(lookup[0]) == 2
def test_initial():
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2)),
sigma=PDFParameter(initial=0.1, limits=(0, 2), multi=True),
)
pdf = PDF(2, normal_pdf, parameters)
pdf.update_parameters_initial(sigma1=0.2)
assert pdf.initial == dict(mean=0, sigma0=0.1, sigma1=0.2)
def test_n_free_parameters():
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2)),
sigma=PDFParameter(initial=0.1, limits=(0, 2), multi=True),
)
pdf = PDF(2, normal_pdf, parameters)
assert pdf.n_free_parameters == 3
pdf.update_parameters_fixed(sigma1=True)
assert pdf.n_free_parameters == 2
def test_parameter_names():
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2)),
sigma=PDFParameter(initial=0.1, limits=(0, 2), multi=True),
)
pdf = PDF(2, normal_pdf, parameters)
assert pdf.parameter_names == ["mean", "sigma0", "sigma1"]
def test_iminuit_kwargs():
parameters = dict(
mean=PDFParameter(initial=0, limits=(-2, 2)),
sigma=PDFParameter(initial=0.1, limits=(0, 2), multi=True),
)
pdf = PDF(2, normal_pdf, parameters)
pdf.update_parameters_initial(sigma1=0.2)
pdf.update_parameters_limits(sigma1=(1, 2))
pdf.update_parameters_fixed(sigma1=True)
iminuit_kwargs = pdf.iminuit_kwargs
assert len(iminuit_kwargs) == 9
assert iminuit_kwargs["mean"] == 0
assert iminuit_kwargs["sigma0"] == 0.1
assert iminuit_kwargs["sigma1"] == 0.2
assert iminuit_kwargs["limit_mean"] == (-2, 2)
assert iminuit_kwargs["limit_sigma0"] == (0, 2)
assert iminuit_kwargs["limit_sigma1"] == (1, 2)
assert iminuit_kwargs["fix_mean"] is False
assert iminuit_kwargs["fix_sigma0"] is False
assert iminuit_kwargs["fix_sigma1"] is True
# noinspection PyPep8Naming,PyArgumentList
@pytest.mark.parametrize("PDFSubclass", PDF.__subclasses__())
def test_pdf_subclasses(PDFSubclass):
pdf = PDFSubclass(n_illuminations=1)
x = np.linspace(-5, 100, 1000)
y = pdf(x, np.array(list(pdf.initial.values())), 0)
np.testing.assert_allclose(np.trapz(y, x), 1, rtol=1e-3)
# noinspection PyPep8Naming,PyArgumentList
@pytest.mark.parametrize("PDFSubclass", PDF.__subclasses__())
def test_disable_pedestal(PDFSubclass):
pdf = PDFSubclass(n_illuminations=1, disable_pedestal=True)
x = np.linspace(-5, 100, 1000)
y = pdf(x, np.array(list(pdf.initial.values())), 0)
lambda_ = pdf.initial["lambda_0"]
pedestal_contribution = np.exp(-lambda_)
np.testing.assert_allclose(np.trapz(y, x), 1 - pedestal_contribution, rtol=1e-3)
def test_from_name():
pdf = PDF.from_name("SiPMGentile", n_illuminations=1)
assert pdf.__class__.__name__ == "SiPMGentile"
with pytest.raises(ValueError):
PDF.from_name("NULL", n_illuminations=1)
|
[
"spefit.pdf.base.PDF.__subclasses__",
"numpy.trapz",
"spefit.pdf.base.PDF",
"spefit.pdf.base.PDF._prepare_parameters",
"spefit.pdf.base.PDFParameter",
"spefit.pdf.base.PDF.from_name",
"pytest.raises",
"numpy.array",
"numpy.exp",
"numpy.linspace",
"numpy.array_equal"
] |
[((275, 345), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': 'initial', 'limits': 'limits', 'fixed': 'fixed', 'multi': 'multi'}), '(initial=initial, limits=limits, fixed=fixed, multi=multi)\n', (287, 345), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((493, 537), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': 'initial', 'limits': 'limits'}), '(initial=initial, limits=limits)\n', (505, 537), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((845, 875), 'spefit.pdf.base.PDF', 'PDF', (['(1)', 'normal_pdf', 'parameters'], {}), '(1, normal_pdf, parameters)\n', (848, 875), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((1106, 1136), 'spefit.pdf.base.PDF', 'PDF', (['(2)', 'normal_pdf', 'parameters'], {}), '(2, normal_pdf, parameters)\n', (1109, 1136), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((1526, 1556), 'spefit.pdf.base.PDF', 'PDF', (['(2)', 'normal_pdf', 'parameters'], {}), '(2, normal_pdf, parameters)\n', (1529, 1556), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((2010, 2040), 'spefit.pdf.base.PDF', 'PDF', (['(2)', 'normal_pdf', 'parameters'], {}), '(2, normal_pdf, parameters)\n', (2013, 2040), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((2385, 2447), 'numpy.array_equal', 'np.array_equal', (['key_array[pdf._lookup[0]]', "['mean0', 'sigma0']"], {}), "(key_array[pdf._lookup[0]], ['mean0', 'sigma0'])\n", (2399, 2447), True, 'import numpy as np\n'), ((2459, 2521), 'numpy.array_equal', 'np.array_equal', (['key_array[pdf._lookup[1]]', "['mean1', 'sigma1']"], {}), "(key_array[pdf._lookup[1]], ['mean1', 'sigma1'])\n", (2473, 2521), True, 'import numpy as np\n'), ((2715, 2745), 'spefit.pdf.base.PDF', 'PDF', (['(2)', 'normal_pdf', 'parameters'], {}), '(2, normal_pdf, parameters)\n', (2718, 2745), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((3187, 3217), 'spefit.pdf.base.PDF', 'PDF', (['(2)', 'normal_pdf', 'parameters'], {}), '(2, normal_pdf, parameters)\n', (3190, 3217), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((3227, 3250), 'numpy.linspace', 'np.linspace', (['(-1)', '(6)', '(100)'], {}), '(-1, 6, 100)\n', (3238, 3250), True, 'import numpy as np\n'), ((3865, 3895), 'spefit.pdf.base.PDF', 'PDF', (['(2)', 'normal_pdf', 'parameters'], {}), '(2, normal_pdf, parameters)\n', (3868, 3895), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((4305, 4335), 'spefit.pdf.base.PDF', 'PDF', (['(2)', 'normal_pdf', 'parameters'], {}), '(2, normal_pdf, parameters)\n', (4308, 4335), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((5317, 5347), 'spefit.pdf.base.PDF', 'PDF', (['(1)', 'normal_pdf', 'parameters'], {}), '(1, normal_pdf, parameters)\n', (5320, 5347), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((6054, 6084), 'spefit.pdf.base.PDF', 'PDF', (['(1)', 'normal_pdf', 'parameters'], {}), '(1, normal_pdf, parameters)\n', (6057, 6084), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((6571, 6609), 'spefit.pdf.base.PDF._prepare_parameters', 'PDF._prepare_parameters', (['parameters', '(1)'], {}), '(parameters, 1)\n', (6594, 6609), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((6952, 6990), 'spefit.pdf.base.PDF._prepare_parameters', 'PDF._prepare_parameters', (['parameters', '(1)'], {}), '(parameters, 1)\n', (6975, 6990), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((7333, 7371), 'spefit.pdf.base.PDF._prepare_parameters', 'PDF._prepare_parameters', (['parameters', '(2)'], {}), '(parameters, 2)\n', (7356, 7371), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((7719, 7749), 'spefit.pdf.base.PDF', 'PDF', (['(2)', 'normal_pdf', 'parameters'], {}), '(2, normal_pdf, parameters)\n', (7722, 7749), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((8052, 8082), 'spefit.pdf.base.PDF', 'PDF', (['(2)', 'normal_pdf', 'parameters'], {}), '(2, normal_pdf, parameters)\n', (8055, 8082), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((8395, 8425), 'spefit.pdf.base.PDF', 'PDF', (['(2)', 'normal_pdf', 'parameters'], {}), '(2, normal_pdf, parameters)\n', (8398, 8425), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((8679, 8709), 'spefit.pdf.base.PDF', 'PDF', (['(2)', 'normal_pdf', 'parameters'], {}), '(2, normal_pdf, parameters)\n', (8682, 8709), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((9543, 9569), 'numpy.linspace', 'np.linspace', (['(-5)', '(100)', '(1000)'], {}), '(-5, 100, 1000)\n', (9554, 9569), True, 'import numpy as np\n'), ((9434, 9454), 'spefit.pdf.base.PDF.__subclasses__', 'PDF.__subclasses__', ([], {}), '()\n', (9452, 9454), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((9906, 9932), 'numpy.linspace', 'np.linspace', (['(-5)', '(100)', '(1000)'], {}), '(-5, 100, 1000)\n', (9917, 9932), True, 'import numpy as np\n'), ((10055, 10071), 'numpy.exp', 'np.exp', (['(-lambda_)'], {}), '(-lambda_)\n', (10061, 10071), True, 'import numpy as np\n'), ((9772, 9792), 'spefit.pdf.base.PDF.__subclasses__', 'PDF.__subclasses__', ([], {}), '()\n', (9790, 9792), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((10191, 10238), 'spefit.pdf.base.PDF.from_name', 'PDF.from_name', (['"""SiPMGentile"""'], {'n_illuminations': '(1)'}), "('SiPMGentile', n_illuminations=1)\n", (10204, 10238), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((1075, 1093), 'numpy.array', 'np.array', (['[[0, 1]]'], {}), '([[0, 1]])\n', (1083, 1093), True, 'import numpy as np\n'), ((1336, 1362), 'numpy.array', 'np.array', (['[[0, 1], [0, 1]]'], {}), '([[0, 1], [0, 1]])\n', (1344, 1362), True, 'import numpy as np\n'), ((1808, 1834), 'numpy.array', 'np.array', (['[[0, 1], [0, 2]]'], {}), '([[0, 1], [0, 2]])\n', (1816, 1834), True, 'import numpy as np\n'), ((2292, 2318), 'numpy.array', 'np.array', (['[[0, 2], [1, 3]]'], {}), '([[0, 2], [1, 3]])\n', (2300, 2318), True, 'import numpy as np\n'), ((2905, 2923), 'numpy.array', 'np.array', (['[0, 0.1]'], {}), '([0, 0.1])\n', (2913, 2923), True, 'import numpy as np\n'), ((2987, 3005), 'numpy.array', 'np.array', (['[0, 0.3]'], {}), '([0, 0.3])\n', (2995, 3005), True, 'import numpy as np\n'), ((3425, 3450), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (3438, 3450), False, 'import pytest\n'), ((3505, 3530), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (3518, 3530), False, 'import pytest\n'), ((3580, 3604), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (3593, 3604), False, 'import pytest\n'), ((4912, 4937), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (4925, 4937), False, 'import pytest\n'), ((5020, 5045), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (5033, 5045), False, 'import pytest\n'), ((9657, 9671), 'numpy.trapz', 'np.trapz', (['y', 'x'], {}), '(y, x)\n', (9665, 9671), True, 'import numpy as np\n'), ((10103, 10117), 'numpy.trapz', 'np.trapz', (['y', 'x'], {}), '(y, x)\n', (10111, 10117), True, 'import numpy as np\n'), ((10300, 10325), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (10313, 10325), False, 'import pytest\n'), ((10335, 10375), 'spefit.pdf.base.PDF.from_name', 'PDF.from_name', (['"""NULL"""'], {'n_illuminations': '(1)'}), "('NULL', n_illuminations=1)\n", (10348, 10375), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((732, 771), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)'}), '(initial=0, limits=(-2, 2))\n', (744, 771), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((787, 827), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)'}), '(initial=0.1, limits=(0, 2))\n', (799, 827), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((1401, 1440), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)'}), '(initial=0, limits=(-2, 2))\n', (1413, 1440), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((1456, 1508), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)', 'multi': '(True)'}), '(initial=0.1, limits=(0, 2), multi=True)\n', (1468, 1508), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((1873, 1924), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)', 'multi': '(True)'}), '(initial=0, limits=(-2, 2), multi=True)\n', (1885, 1924), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((1940, 1992), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)', 'multi': '(True)'}), '(initial=0.1, limits=(0, 2), multi=True)\n', (1952, 1992), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((2590, 2629), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)'}), '(initial=0, limits=(-2, 2))\n', (2602, 2629), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((2645, 2697), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)', 'multi': '(True)'}), '(initial=0.1, limits=(0, 2), multi=True)\n', (2657, 2697), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((3062, 3101), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)'}), '(initial=0, limits=(-2, 2))\n', (3074, 3101), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((3117, 3169), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)', 'multi': '(True)'}), '(initial=0.1, limits=(0, 2), multi=True)\n', (3129, 3169), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((3278, 3301), 'numpy.array', 'np.array', (['[0, 0.1, 0.2]'], {}), '([0, 0.1, 0.2])\n', (3286, 3301), True, 'import numpy as np\n'), ((3360, 3383), 'numpy.array', 'np.array', (['[0, 0.1, 0.2]'], {}), '([0, 0.1, 0.2])\n', (3368, 3383), True, 'import numpy as np\n'), ((3467, 3490), 'numpy.array', 'np.array', (['[0, 0.1, 0.2]'], {}), '([0, 0.1, 0.2])\n', (3475, 3490), True, 'import numpy as np\n'), ((3547, 3565), 'numpy.array', 'np.array', (['[0, 0.1]'], {}), '([0, 0.1])\n', (3555, 3565), True, 'import numpy as np\n'), ((3752, 3791), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)'}), '(initial=0, limits=(-2, 2))\n', (3764, 3791), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((3807, 3847), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)'}), '(initial=0.1, limits=(0, 2))\n', (3819, 3847), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((4180, 4219), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)'}), '(initial=0, limits=(-2, 2))\n', (4192, 4219), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((4235, 4287), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)', 'multi': '(True)'}), '(initial=0.1, limits=(0, 2), multi=True)\n', (4247, 4287), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((5204, 5243), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)'}), '(initial=0, limits=(-2, 2))\n', (5216, 5243), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((5259, 5299), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)'}), '(initial=0.1, limits=(0, 2))\n', (5271, 5299), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((5941, 5980), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)'}), '(initial=0, limits=(-2, 2))\n', (5953, 5980), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((5996, 6036), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)'}), '(initial=0.1, limits=(0, 2))\n', (6008, 6036), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((6454, 6493), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)'}), '(initial=0, limits=(-2, 2))\n', (6466, 6493), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((6509, 6549), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)'}), '(initial=0.1, limits=(0, 2))\n', (6521, 6549), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((6811, 6862), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)', 'multi': '(True)'}), '(initial=0, limits=(-2, 2), multi=True)\n', (6823, 6862), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((6878, 6930), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)', 'multi': '(True)'}), '(initial=0.1, limits=(0, 2), multi=True)\n', (6890, 6930), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((7192, 7243), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)', 'multi': '(True)'}), '(initial=0, limits=(-2, 2), multi=True)\n', (7204, 7243), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((7259, 7311), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)', 'multi': '(True)'}), '(initial=0.1, limits=(0, 2), multi=True)\n', (7271, 7311), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((7594, 7633), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)'}), '(initial=0, limits=(-2, 2))\n', (7606, 7633), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((7649, 7701), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)', 'multi': '(True)'}), '(initial=0.1, limits=(0, 2), multi=True)\n', (7661, 7701), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((7927, 7966), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)'}), '(initial=0, limits=(-2, 2))\n', (7939, 7966), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((7982, 8034), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)', 'multi': '(True)'}), '(initial=0.1, limits=(0, 2), multi=True)\n', (7994, 8034), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((8270, 8309), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)'}), '(initial=0, limits=(-2, 2))\n', (8282, 8309), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((8325, 8377), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)', 'multi': '(True)'}), '(initial=0.1, limits=(0, 2), multi=True)\n', (8337, 8377), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((8554, 8593), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0)', 'limits': '(-2, 2)'}), '(initial=0, limits=(-2, 2))\n', (8566, 8593), False, 'from spefit.pdf.base import PDFParameter, PDF\n'), ((8609, 8661), 'spefit.pdf.base.PDFParameter', 'PDFParameter', ([], {'initial': '(0.1)', 'limits': '(0, 2)', 'multi': '(True)'}), '(initial=0.1, limits=(0, 2), multi=True)\n', (8621, 8661), False, 'from spefit.pdf.base import PDFParameter, PDF\n')]
|
# Copyright 2018 SAP SE
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from webob import Response
class RateLimitExceededResponse(Response):
"""
defines the rate limit response and defaults, which can be overwritten via configuration.
"""
def __init__(self, status=None, headers=None, content_type=None, body=None, json_body=None):
"""
creates a new RateLimitExceededResponse with either a body or json_body
:param status: the status code
:param headers: list of header dictionaries
:param body: the response body
:param json_body: the response json body
"""
if not status:
status = '429 Too Many Requests'
if body:
super(RateLimitExceededResponse, self).__init__(
status=status, headerlist=headers, content_type=content_type, body=body, charset="UTF-8"
)
return
elif not json_body:
content_type = "application/json"
json_body = {"error": {"status": status, "message": "Too Many Requests"}}
super(RateLimitExceededResponse, self).__init__(
status=status, headerlist=headers, content_type=content_type,
json_body=json.dumps(json_body), charset="UTF-8",
)
def set_retry_after(self, retry_after):
if not self.headerlist:
self.headerlist = []
self.headerlist.append(('Retry-After', str(retry_after)))
class BlocklistResponse(Response):
"""
defines the blocklist response and defaults, which can be overwritten via configuration.
"""
def __init__(self, status=None, headers=None, content_type=None, body=None, json_body=None):
"""
creates a new BlocklistResponse with either a body or json_body
:param status: the status code
:param headers: list of header dictionaries
:param body: the response body
:param json_body: the response json body
"""
if not status:
status = '403 Forbidden'
if body:
super(BlocklistResponse, self).__init__(
status=status, headerlist=headers, content_type=content_type, body=body, charset="UTF-8"
)
return
elif not json_body:
content_type = "application/json"
json_body = {"error": {"status": status, "message": "You have been blocklisted"}}
super(BlocklistResponse, self).__init__(
status=status, headerlist=headers, content_type=content_type,
json_body=json.dumps(json_body), charset="UTF-8"
)
|
[
"json.dumps"
] |
[((1745, 1766), 'json.dumps', 'json.dumps', (['json_body'], {}), '(json_body)\n', (1755, 1766), False, 'import json\n'), ((3072, 3093), 'json.dumps', 'json.dumps', (['json_body'], {}), '(json_body)\n', (3082, 3093), False, 'import json\n')]
|
"""
Google Documents functionality for the API
"""
import httplib2
import apiclient.discovery
from oauth2client.service_account import ServiceAccountCredentials
credentials = ServiceAccountCredentials.from_json_keyfile_name(
'credentials.json',
[
'https://www.googleapis.com/auth/spreadsheets',
'https://www.googleapis.com/auth/drive',
],
)
httpAuth = credentials.authorize(httplib2.Http())
service = apiclient.discovery.build('sheets', 'v4', http=httpAuth)
def create(name, sheets, mail):
spreadsheet = service.spreadsheets().create(
body = {
'properties': {
'title': name,
'locale': 'ru_RU',
},
'sheets': [{
'properties': {
'sheetType': 'GRID',
'sheetId': i,
'title': sheet,
'gridProperties': {
'rowCount': 100,
'columnCount': 30,
},
},
} for i, sheet in enumerate(sheets)],
},
).execute()
spreadsheetId = spreadsheet['spreadsheetId']
print(f"https://docs.google.com/spreadsheets/d/{spreadsheetId}")
driveService = apiclient.discovery.build('drive', 'v3', http=httpAuth)
driveService.permissions().create(
fileId = spreadsheetId,
body = {
'type': 'user',
'role': 'writer',
'emailAddress': mail,
},
fields = 'id'
).execute()
|
[
"oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name",
"httplib2.Http"
] |
[((178, 346), 'oauth2client.service_account.ServiceAccountCredentials.from_json_keyfile_name', 'ServiceAccountCredentials.from_json_keyfile_name', (['"""credentials.json"""', "['https://www.googleapis.com/auth/spreadsheets',\n 'https://www.googleapis.com/auth/drive']"], {}), "('credentials.json', [\n 'https://www.googleapis.com/auth/spreadsheets',\n 'https://www.googleapis.com/auth/drive'])\n", (226, 346), False, 'from oauth2client.service_account import ServiceAccountCredentials\n'), ((405, 420), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (418, 420), False, 'import httplib2\n')]
|
#!/usr/bin/env python3
from flask import Flask
from flask import request
from util import startTunnel, stopTunnel, addressesForInterface
from argparse import ArgumentParser
import logging
app = Flask(__name__)
settings = {}
@app.route("/connect")
def connect():
address = request.remote_addr
logging.info("Connect request from {}".format(address))
startTunnel(address, settings['localIP'], settings['bridge'])
return "Success", 200
def main():
global settings
parser = ArgumentParser()
parser.add_argument("--bridge", type=str)
parser.add_argument("interface", type=str)
args = parser.parse_args()
addrs = addressesForInterface(args.interface)
if addrs is None:
logging.error("No such interface: {}".format(args.interface))
return
if len(addrs) == 0:
logging.error("Interface {} has no IP4 address.".format(args.interface))
return
settings['localIP'] = addrs[0]
settings['bridge'] = args.bridge
app.run(host=addrs[0])
if __name__ == '__main__':
main()
|
[
"util.addressesForInterface",
"flask.Flask",
"util.startTunnel",
"argparse.ArgumentParser"
] |
[((196, 211), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (201, 211), False, 'from flask import Flask\n'), ((363, 424), 'util.startTunnel', 'startTunnel', (['address', "settings['localIP']", "settings['bridge']"], {}), "(address, settings['localIP'], settings['bridge'])\n", (374, 424), False, 'from util import startTunnel, stopTunnel, addressesForInterface\n'), ((502, 518), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (516, 518), False, 'from argparse import ArgumentParser\n'), ((656, 693), 'util.addressesForInterface', 'addressesForInterface', (['args.interface'], {}), '(args.interface)\n', (677, 693), False, 'from util import startTunnel, stopTunnel, addressesForInterface\n')]
|
from string import ascii_letters, digits
import esphome.config_validation as cv
import esphome.codegen as cg
from esphome.components import color
from esphome.const import (
CONF_VISIBLE,
)
from . import CONF_NEXTION_ID
from . import Nextion
CONF_VARIABLE_NAME = "variable_name"
CONF_COMPONENT_NAME = "component_name"
CONF_WAVE_CHANNEL_ID = "wave_channel_id"
CONF_WAVE_MAX_VALUE = "wave_max_value"
CONF_PRECISION = "precision"
CONF_WAVEFORM_SEND_LAST_VALUE = "waveform_send_last_value"
CONF_TFT_URL = "tft_url"
CONF_ON_SLEEP = "on_sleep"
CONF_ON_WAKE = "on_wake"
CONF_ON_SETUP = "on_setup"
CONF_TOUCH_SLEEP_TIMEOUT = "touch_sleep_timeout"
CONF_WAKE_UP_PAGE = "wake_up_page"
CONF_AUTO_WAKE_ON_TOUCH = "auto_wake_on_touch"
CONF_WAVE_MAX_LENGTH = "wave_max_length"
CONF_BACKGROUND_COLOR = "background_color"
CONF_BACKGROUND_PRESSED_COLOR = "background_pressed_color"
CONF_FOREGROUND_COLOR = "foreground_color"
CONF_FOREGROUND_PRESSED_COLOR = "foreground_pressed_color"
CONF_FONT_ID = "font_id"
def NextionName(value):
valid_chars = f"{ascii_letters + digits}."
if not isinstance(value, str) or len(value) > 29:
raise cv.Invalid("Must be a string less than 29 characters")
for char in value:
if char not in valid_chars:
raise cv.Invalid(
f"Must only consist of upper/lowercase characters, numbers and the period '.'. The character '{char}' cannot be used."
)
return value
CONFIG_BASE_COMPONENT_SCHEMA = cv.Schema(
{
cv.GenerateID(CONF_NEXTION_ID): cv.use_id(Nextion),
cv.Optional(CONF_BACKGROUND_COLOR): cv.use_id(color),
cv.Optional(CONF_FOREGROUND_COLOR): cv.use_id(color),
cv.Optional(CONF_VISIBLE, default=True): cv.boolean,
}
)
CONFIG_TEXT_COMPONENT_SCHEMA = CONFIG_BASE_COMPONENT_SCHEMA.extend(
cv.Schema(
{
cv.Required(CONF_COMPONENT_NAME): NextionName,
cv.Optional(CONF_FONT_ID): cv.int_range(min=0, max=255),
}
)
)
CONFIG_BINARY_SENSOR_SCHEMA = CONFIG_BASE_COMPONENT_SCHEMA.extend(
cv.Schema(
{
cv.Optional(CONF_COMPONENT_NAME): NextionName,
cv.Optional(CONF_VARIABLE_NAME): NextionName,
}
)
)
CONFIG_SENSOR_COMPONENT_SCHEMA = CONFIG_BINARY_SENSOR_SCHEMA.extend(
cv.Schema(
{
cv.Optional(CONF_FONT_ID): cv.int_range(min=0, max=255),
}
)
)
CONFIG_SWITCH_COMPONENT_SCHEMA = CONFIG_SENSOR_COMPONENT_SCHEMA.extend(
cv.Schema(
{
cv.Optional(CONF_FOREGROUND_PRESSED_COLOR): cv.use_id(color),
cv.Optional(CONF_BACKGROUND_PRESSED_COLOR): cv.use_id(color),
}
)
)
async def setup_component_core_(var, config, arg):
if CONF_VARIABLE_NAME in config:
cg.add(var.set_variable_name(config[CONF_VARIABLE_NAME]))
elif CONF_COMPONENT_NAME in config:
cg.add(
var.set_variable_name(
config[CONF_COMPONENT_NAME],
config[CONF_COMPONENT_NAME] + arg,
)
)
if CONF_BACKGROUND_COLOR in config:
color_component = await cg.get_variable(config[CONF_BACKGROUND_COLOR])
cg.add(var.set_background_color(color_component))
if CONF_BACKGROUND_PRESSED_COLOR in config:
color_component = await cg.get_variable(config[CONF_BACKGROUND_PRESSED_COLOR])
cg.add(var.set_background_pressed_color(color_component))
if CONF_FOREGROUND_COLOR in config:
color_component = await cg.get_variable(config[CONF_FOREGROUND_COLOR])
cg.add(var.set_foreground_color(color_component))
if CONF_FOREGROUND_PRESSED_COLOR in config:
color_component = await cg.get_variable(config[CONF_FOREGROUND_PRESSED_COLOR])
cg.add(var.set_foreground_pressed_color(color_component))
if CONF_FONT_ID in config:
cg.add(var.set_font_id(config[CONF_FONT_ID]))
if CONF_VISIBLE in config:
cg.add(var.set_visible(config[CONF_VISIBLE]))
|
[
"esphome.config_validation.use_id",
"esphome.config_validation.Required",
"esphome.codegen.get_variable",
"esphome.config_validation.int_range",
"esphome.config_validation.Invalid",
"esphome.config_validation.GenerateID",
"esphome.config_validation.Optional"
] |
[((1137, 1191), 'esphome.config_validation.Invalid', 'cv.Invalid', (['"""Must be a string less than 29 characters"""'], {}), "('Must be a string less than 29 characters')\n", (1147, 1191), True, 'import esphome.config_validation as cv\n'), ((1507, 1537), 'esphome.config_validation.GenerateID', 'cv.GenerateID', (['CONF_NEXTION_ID'], {}), '(CONF_NEXTION_ID)\n', (1520, 1537), True, 'import esphome.config_validation as cv\n'), ((1567, 1601), 'esphome.config_validation.Optional', 'cv.Optional', (['CONF_BACKGROUND_COLOR'], {}), '(CONF_BACKGROUND_COLOR)\n', (1578, 1601), True, 'import esphome.config_validation as cv\n'), ((1629, 1663), 'esphome.config_validation.Optional', 'cv.Optional', (['CONF_FOREGROUND_COLOR'], {}), '(CONF_FOREGROUND_COLOR)\n', (1640, 1663), True, 'import esphome.config_validation as cv\n'), ((1691, 1730), 'esphome.config_validation.Optional', 'cv.Optional', (['CONF_VISIBLE'], {'default': '(True)'}), '(CONF_VISIBLE, default=True)\n', (1702, 1730), True, 'import esphome.config_validation as cv\n'), ((1539, 1557), 'esphome.config_validation.use_id', 'cv.use_id', (['Nextion'], {}), '(Nextion)\n', (1548, 1557), True, 'import esphome.config_validation as cv\n'), ((1603, 1619), 'esphome.config_validation.use_id', 'cv.use_id', (['color'], {}), '(color)\n', (1612, 1619), True, 'import esphome.config_validation as cv\n'), ((1665, 1681), 'esphome.config_validation.use_id', 'cv.use_id', (['color'], {}), '(color)\n', (1674, 1681), True, 'import esphome.config_validation as cv\n'), ((1270, 1410), 'esphome.config_validation.Invalid', 'cv.Invalid', (['f"""Must only consist of upper/lowercase characters, numbers and the period \'.\'. The character \'{char}\' cannot be used."""'], {}), '(\n f"Must only consist of upper/lowercase characters, numbers and the period \'.\'. The character \'{char}\' cannot be used."\n )\n', (1280, 1410), True, 'import esphome.config_validation as cv\n'), ((1859, 1891), 'esphome.config_validation.Required', 'cv.Required', (['CONF_COMPONENT_NAME'], {}), '(CONF_COMPONENT_NAME)\n', (1870, 1891), True, 'import esphome.config_validation as cv\n'), ((1918, 1943), 'esphome.config_validation.Optional', 'cv.Optional', (['CONF_FONT_ID'], {}), '(CONF_FONT_ID)\n', (1929, 1943), True, 'import esphome.config_validation as cv\n'), ((1945, 1973), 'esphome.config_validation.int_range', 'cv.int_range', ([], {'min': '(0)', 'max': '(255)'}), '(min=0, max=255)\n', (1957, 1973), True, 'import esphome.config_validation as cv\n'), ((2098, 2130), 'esphome.config_validation.Optional', 'cv.Optional', (['CONF_COMPONENT_NAME'], {}), '(CONF_COMPONENT_NAME)\n', (2109, 2130), True, 'import esphome.config_validation as cv\n'), ((2157, 2188), 'esphome.config_validation.Optional', 'cv.Optional', (['CONF_VARIABLE_NAME'], {}), '(CONF_VARIABLE_NAME)\n', (2168, 2188), True, 'import esphome.config_validation as cv\n'), ((2328, 2353), 'esphome.config_validation.Optional', 'cv.Optional', (['CONF_FONT_ID'], {}), '(CONF_FONT_ID)\n', (2339, 2353), True, 'import esphome.config_validation as cv\n'), ((2355, 2383), 'esphome.config_validation.int_range', 'cv.int_range', ([], {'min': '(0)', 'max': '(255)'}), '(min=0, max=255)\n', (2367, 2383), True, 'import esphome.config_validation as cv\n'), ((2514, 2556), 'esphome.config_validation.Optional', 'cv.Optional', (['CONF_FOREGROUND_PRESSED_COLOR'], {}), '(CONF_FOREGROUND_PRESSED_COLOR)\n', (2525, 2556), True, 'import esphome.config_validation as cv\n'), ((2588, 2630), 'esphome.config_validation.Optional', 'cv.Optional', (['CONF_BACKGROUND_PRESSED_COLOR'], {}), '(CONF_BACKGROUND_PRESSED_COLOR)\n', (2599, 2630), True, 'import esphome.config_validation as cv\n'), ((2558, 2574), 'esphome.config_validation.use_id', 'cv.use_id', (['color'], {}), '(color)\n', (2567, 2574), True, 'import esphome.config_validation as cv\n'), ((2632, 2648), 'esphome.config_validation.use_id', 'cv.use_id', (['color'], {}), '(color)\n', (2641, 2648), True, 'import esphome.config_validation as cv\n'), ((3109, 3155), 'esphome.codegen.get_variable', 'cg.get_variable', (['config[CONF_BACKGROUND_COLOR]'], {}), '(config[CONF_BACKGROUND_COLOR])\n', (3124, 3155), True, 'import esphome.codegen as cg\n'), ((3295, 3349), 'esphome.codegen.get_variable', 'cg.get_variable', (['config[CONF_BACKGROUND_PRESSED_COLOR]'], {}), '(config[CONF_BACKGROUND_PRESSED_COLOR])\n', (3310, 3349), True, 'import esphome.codegen as cg\n'), ((3489, 3535), 'esphome.codegen.get_variable', 'cg.get_variable', (['config[CONF_FOREGROUND_COLOR]'], {}), '(config[CONF_FOREGROUND_COLOR])\n', (3504, 3535), True, 'import esphome.codegen as cg\n'), ((3675, 3729), 'esphome.codegen.get_variable', 'cg.get_variable', (['config[CONF_FOREGROUND_PRESSED_COLOR]'], {}), '(config[CONF_FOREGROUND_PRESSED_COLOR])\n', (3690, 3729), True, 'import esphome.codegen as cg\n')]
|
import sys
from koapy.compat.pyside2.QtWidgets import QApplication
from koapy import KiwoomOpenApiPlusQAxWidget
app = QApplication(sys.argv)
control = KiwoomOpenApiPlusQAxWidget()
APIModulePath = control.GetAPIModulePath()
print(APIModulePath)
|
[
"koapy.KiwoomOpenApiPlusQAxWidget",
"koapy.compat.pyside2.QtWidgets.QApplication"
] |
[((120, 142), 'koapy.compat.pyside2.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (132, 142), False, 'from koapy.compat.pyside2.QtWidgets import QApplication\n'), ((153, 181), 'koapy.KiwoomOpenApiPlusQAxWidget', 'KiwoomOpenApiPlusQAxWidget', ([], {}), '()\n', (179, 181), False, 'from koapy import KiwoomOpenApiPlusQAxWidget\n')]
|
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from uw_canvas import Canvas
from uw_canvas.accounts import ACCOUNTS_API
from uw_canvas.courses import COURSES_API
class ExternalToolsException(Exception):
pass
class ExternalTools(Canvas):
def get_external_tools_in_account(self, account_id, params={}):
"""
Return external tools for the passed canvas account id.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.index
"""
url = ACCOUNTS_API.format(account_id) + "/external_tools"
external_tools = []
for data in self._get_paged_resource(url, params=params):
external_tools.append(data)
return external_tools
def get_external_tools_in_account_by_sis_id(self, sis_id):
"""
Return external tools for given account sis id.
"""
return self.get_external_tools_in_account(self._sis_id(sis_id,
"account"))
def get_external_tools_in_course(self, course_id, params={}):
"""
Return external tools for the passed canvas course id.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.index
"""
url = COURSES_API.format(course_id) + "/external_tools"
external_tools = []
for data in self._get_paged_resource(url, params=params):
external_tools.append(data)
return external_tools
def get_external_tools_in_course_by_sis_id(self, sis_id):
"""
Return external tools for given course sis id.
"""
return self.get_external_tools_in_course(self._sis_id(sis_id,
"course"))
def create_external_tool_in_course(self, course_id, json_data):
return self._create_external_tool(COURSES_API, course_id, json_data)
def create_external_tool_in_account(self, account_id, json_data):
return self._create_external_tool(ACCOUNTS_API, account_id, json_data)
def _create_external_tool(self, context, context_id, json_data):
"""
Create an external tool using the passed json_data.
context is either COURSES_API or ACCOUNTS_API.
context_id is the Canvas course_id or account_id, depending on context.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.create
"""
url = context.format(context_id) + "/external_tools"
return self._post_resource(url, body=json_data)
def update_external_tool_in_course(self, course_id, external_tool_id,
json_data):
return self._update_external_tool(COURSES_API, course_id,
external_tool_id, json_data)
def update_external_tool_in_account(self, account_id, external_tool_id,
json_data):
return self._update_external_tool(ACCOUNTS_API, account_id,
external_tool_id, json_data)
def _update_external_tool(self, context, context_id, external_tool_id,
json_data):
"""
Update the external tool identified by external_tool_id with the passed
json data.
context is either COURSES_API or ACCOUNTS_API.
context_id is the course_id or account_id, depending on context
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.update
"""
url = context.format(context_id) + "/external_tools/{}".format(
external_tool_id)
return self._put_resource(url, body=json_data)
def delete_external_tool_in_course(self, course_id, external_tool_id):
return self._delete_external_tool(COURSES_API, course_id,
external_tool_id)
def delete_external_tool_in_account(self, account_id, external_tool_id):
return self._delete_external_tool(ACCOUNTS_API, account_id,
external_tool_id)
def _delete_external_tool(self, context, context_id, external_tool_id):
"""
Delete the external tool identified by external_tool_id.
context is either COURSES_API or ACCOUNTS_API.
context_id is the course_id or account_id, depending on context
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.destroy
"""
url = context.format(context_id) + "/external_tools/{}".format(
external_tool_id)
response = self._delete_resource(url)
return True
def _get_sessionless_launch_url(self, context, context_id, tool_id):
"""
Get a sessionless launch url for an external tool.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.generate_sessionless_launch
"""
url = context.format(context_id) + "/external_tools/sessionless_launch"
params = {"id": tool_id}
return self._get_resource(url, params)
def get_sessionless_launch_url_from_account(self, tool_id, account_id):
"""
Get a sessionless launch url for an external tool.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.generate_sessionless_launch
"""
return self._get_sessionless_launch_url(
ACCOUNTS_API, account_id, tool_id)
def get_sessionless_launch_url_from_account_sis_id(
self, tool_id, account_sis_id):
"""
Get a sessionless launch url for an external tool.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.generate_sessionless_launch
"""
return self.get_sessionless_launch_url_from_account(
tool_id, self._sis_id(account_sis_id, "account"))
def get_sessionless_launch_url_from_course(self, tool_id, course_id):
"""
Get a sessionless launch url for an external tool.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.generate_sessionless_launch
"""
return self._get_sessionless_launch_url(
COURSES_API, course_id, tool_id)
def get_sessionless_launch_url_from_course_sis_id(
self, tool_id, course_sis_id):
"""
Get a sessionless launch url for an external tool.
https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.generate_sessionless_launch
"""
return self.get_sessionless_launch_url_from_course(
tool_id, self._sis_id(course_sis_id, "course"))
|
[
"uw_canvas.accounts.ACCOUNTS_API.format",
"uw_canvas.courses.COURSES_API.format"
] |
[((552, 583), 'uw_canvas.accounts.ACCOUNTS_API.format', 'ACCOUNTS_API.format', (['account_id'], {}), '(account_id)\n', (571, 583), False, 'from uw_canvas.accounts import ACCOUNTS_API\n'), ((1323, 1352), 'uw_canvas.courses.COURSES_API.format', 'COURSES_API.format', (['course_id'], {}), '(course_id)\n', (1341, 1352), False, 'from uw_canvas.courses import COURSES_API\n')]
|
from pymarshal.csv import *
import pytest
def test_marshal_unmarshal_list():
class Test:
def __init__(self, a, b):
self.a = a
self.b = b
u = [Test("a", 2), Test("b", 3)]
assert u[0].a == "a", u[0].a
m = marshal_csv(u)
assert m[0][0] == "a", m[0][0]
u = unmarshal_csv_list(m, Test)
assert u[0].a == "a", u[0].a
def test__marshal_list_row_header():
class Test:
_marshal_list_row_header = "abc"
def __init__(self, a, b):
self.a = a
self.b = b
u = [Test("a", 2), Test("b", 3)]
m = marshal_csv(u)
assert m == [["abc", "a", 2], ["abc", "b", 3]], m
def test_unmarshal_csv():
class A:
_marshal_list_row_header = "a"
def __init__(self, a, b):
self.a = a
self.b = b
class B:
def __init__(self, a, b):
self.a = a
self.b = b
class C:
_marshal_list_row_header = "c"
def __init__(self, a, b):
self.a = a
self.b = b
class D:
_unmarshal_csv_map = {'a': {'arg_name': 'a', 'type': A}}
_unmarshal_csv_default_arg = {'arg_name': 'b', 'type': B}
_unmarshal_csv_singletons = {'c': {'arg_name': 'c', 'type': C}}
def __init__(self, a, b, c):
self.a = a
self.b = b
self.c = c
def __iter__(self):
for x in self.a:
yield x
for x in self.b:
yield x
yield self.c
d = D([A(1, 2)], [B(3, 4)], C(5, 6))
m = marshal_csv(d)
u = unmarshal_csv(m, D)
assert u.a[0].a == 1, u.a[0]
def test_unmarshal_csv_raises_attribute_error():
class A:
pass
with pytest.raises(AttributeError):
unmarshal_csv([], A)
def test_unmarshal_csv_raises_value_error():
class A:
_unmarshal_csv_map = {
'a': {'arg_name': 'a', 'type': object},
}
def __init__(self, a):
self.a = a
with pytest.raises(ValueError):
unmarshal_csv([[1, 2]], A)
def test_marshal_csv_dict():
class A:
_marshal_csv_dict = True
def __init__(self, a, b):
self.a = a
self.b = b
a = A(1, 2)
m = marshal_csv(a)
assert m[0] == ['a', 1], m
assert m[1] == ['b', 2], m
u = unmarshal_csv(m, A)
assert u.a == 1, u.a
assert u.b == 2, u.b
def test_csv_cast_empty_str_to_none():
func = csv_cast_empty_str_to_none(int)
assert func('') is None
assert func('23') == 23
|
[
"pytest.raises"
] |
[((1742, 1771), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (1755, 1771), False, 'import pytest\n'), ((2018, 2043), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2031, 2043), False, 'import pytest\n')]
|
# Copyright 2020 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tf_euler.python.euler_ops import base
from tf_euler.python.euler_ops import type_ops
import numpy as np
gen_pair = base._LIB_OP.gen_pair
_random_walk = base._LIB_OP.random_walk
def random_walk(nodes, edge_types, p=1.0, q=1.0, default_node=-1):
'''
Random walk from a list of nodes.
Args:
nodes: start node ids, 1-d Tensor
edge_types: list of 1-d Tensor of edge types
p: back probality
q: forward probality
default_node: default fill nodes
'''
if base.nebula_ops['random_walk']:
return nebula_random_walk(nodes, edge_types, p, q, default_node)
edge_types = [type_ops.get_edge_type_id(edge_type)
for edge_type in edge_types]
return _random_walk(nodes, edge_types, p, q, default_node)
def nebula_random_walk(nodes, edge_types, p=1.0, q=1.0, default_node=-1):
result = tf.py_func(
_nebula_random_walk,
[nodes, edge_types, p, q, default_node],
[tf.int64],
True,
'NebulaRandomWalk'
)
result[0].set_shape((nodes.shape.dims[0].value, len(edge_types) + 1))
return result[0]
def _nebula_random_walk(nodes, edge_types, p, q, default_node):
paths = []
uniq_nodes = {}.fromkeys(nodes).keys()
nql = 'USE {}; randomwalk {} from {} over {} where p=={} and q=={}'.format(
base.nebula_space,
len(edge_types),
', '.join(str(x) for x in uniq_nodes),
', '.join(str('e_' + x) for x in edge_types[0]),
p,
q
)
path_cache = {}
resp = base.nebula_client.execute_query(nql)
if resp.rows is not None:
for row in resp.rows:
path = row.columns[0].get_str()
path_nodes = map(lambda x: long(x if x != '-1' else default_node), path.split('#'))
path_cache[path_nodes[0]] = path_nodes
for node in nodes:
paths.append(path_cache[node])
return np.asarray(paths, np.int64)
|
[
"numpy.asarray",
"tf_euler.python.euler_ops.type_ops.get_edge_type_id",
"tf_euler.python.euler_ops.base.nebula_client.execute_query",
"tensorflow.py_func"
] |
[((1686, 1801), 'tensorflow.py_func', 'tf.py_func', (['_nebula_random_walk', '[nodes, edge_types, p, q, default_node]', '[tf.int64]', '(True)', '"""NebulaRandomWalk"""'], {}), "(_nebula_random_walk, [nodes, edge_types, p, q, default_node], [\n tf.int64], True, 'NebulaRandomWalk')\n", (1696, 1801), True, 'import tensorflow as tf\n'), ((2358, 2395), 'tf_euler.python.euler_ops.base.nebula_client.execute_query', 'base.nebula_client.execute_query', (['nql'], {}), '(nql)\n', (2390, 2395), False, 'from tf_euler.python.euler_ops import base\n'), ((2720, 2747), 'numpy.asarray', 'np.asarray', (['paths', 'np.int64'], {}), '(paths, np.int64)\n', (2730, 2747), True, 'import numpy as np\n'), ((1451, 1487), 'tf_euler.python.euler_ops.type_ops.get_edge_type_id', 'type_ops.get_edge_type_id', (['edge_type'], {}), '(edge_type)\n', (1476, 1487), False, 'from tf_euler.python.euler_ops import type_ops\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.