code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from rest_framework import serializers
from .models import Reply
from users.models import Profile
from comments.models import Comment
class ReplySerializer(serializers.HyperlinkedModelSerializer):
user = serializers.PrimaryKeyRelatedField(queryset=Profile.objects.all())
comment = serializers.PrimaryKeyRelatedField(queryset=Comment.objects.all())
class Meta:
model = Reply
fields = ('url', 'id', 'content', 'date_replied', 'user', 'comment')
read_only_fields = ['id', 'date_replied']
def get_fields(self):
fields = super().get_fields()
request = self.context.get('request', None) # to get the request object to access the method
if request.method == 'GET':
fields['user'] = serializers.HyperlinkedRelatedField(many=False, view_name='profile-detail', read_only=True)
fields['comment'] = serializers.HyperlinkedRelatedField(many=False, view_name='comment-detail', read_only=True)
# if the method been used is PUT than make name read only
if request and request.method == 'PUT':
fields['user'].read_only = True
fields['comment'].read_only = True
return fields
|
[
"rest_framework.serializers.HyperlinkedRelatedField",
"users.models.Profile.objects.all",
"comments.models.Comment.objects.all"
] |
[((251, 272), 'users.models.Profile.objects.all', 'Profile.objects.all', ([], {}), '()\n', (270, 272), False, 'from users.models import Profile\n'), ((330, 351), 'comments.models.Comment.objects.all', 'Comment.objects.all', ([], {}), '()\n', (349, 351), False, 'from comments.models import Comment\n'), ((716, 811), 'rest_framework.serializers.HyperlinkedRelatedField', 'serializers.HyperlinkedRelatedField', ([], {'many': '(False)', 'view_name': '"""profile-detail"""', 'read_only': '(True)'}), "(many=False, view_name='profile-detail',\n read_only=True)\n", (751, 811), False, 'from rest_framework import serializers\n'), ((834, 929), 'rest_framework.serializers.HyperlinkedRelatedField', 'serializers.HyperlinkedRelatedField', ([], {'many': '(False)', 'view_name': '"""comment-detail"""', 'read_only': '(True)'}), "(many=False, view_name='comment-detail',\n read_only=True)\n", (869, 929), False, 'from rest_framework import serializers\n')]
|
#!/usr/bin/env python
#
# collect.py renames files from subdirectories
#
# Copyright <NAME>, 2007--2018
"""
Synopsis:
Rename files or folders following a pattern containing an integer index,
as in 'image0001.png'. The file will be moved in the current directory
The number in the file name is incremented automatically for each file, and
also if files with this name already exist. Thus pre-existing files are not
overwritten, such that 'collect.py' can be used to pool together many similar
files in a common directory.
Syntax:
collect.py PATTERN [INTEGER] [--copy] PATH1 [PATH2] [PATH3] ...
Arguments:
PATTERN specifies the name of the output files, and should contain a variable
part that will be replaced by an integer. It can be a 'scanf' compatible
pattern such as '%i' or '%0Xi', for example 'image%04i.png'.
A character '%' repeated multiple times, such as `%%%%` or `%%%%%%`, can
also be used to specify the size of the integer portion of the name.
The pattern can include a '/' that would indicate a directory, and if this
directory does not exist, collect.py will create it before moving the file.
if specified, `--copy` will copy the files/directory instead of moving them
if specified, INTEGER is the first index to be used (default=0)
PATH1, PATH2, etc. is a list of files or directories
Examples:
collect.py image%%%%.png *.png
will rename image files to: image0000.png, image0001.png, etc.
collect.py --copy image%%%%.png 1 run*/image.png
will copy the image files, starting at index 1
collect.py run%%%%/config.cym config*.cym
will create directories `run????` and move the `config*.cym` files into them
<NAME>, 2012--2018. Last modified 2.10.2017
"""
import sys, shutil, os, curses.ascii
#------------------------------------------------------------------------
def copy_recursive(src, dst):
"""Copy directory recursively"""
if os.path.isfile(src):
shutil.copy2(src, dst)
elif os.path.isdir(src):
try:
os.mkdir(dst)
except OSError:
pass
files = os.listdir(src)
for f in files:
s = os.path.join(src, f)
d = os.path.join(dst, f)
copy_recursive(s, d)
def main(args):
"""rename files"""
do_copy = False
arg = args.pop(0);
# check if 'copy' specified before pattern
if arg=='-c' or arg=='--copy' or arg=='copy=1':
do_copy = True
pattern = args.pop(0);
else:
pattern = arg
# check validity of the pattern
if os.path.isfile(pattern):
sys.stderr.write("Error: first argument should be the pattern used to build output file name")
return 1
try:
res = ( pattern % 0 )
except:
# check for repeated '%' character:
for n in range(10,0,-1):
s = pattern.find('%'*n)
if s > 0:
pattern = pattern.replace('%'*n, '%0'+str(n)+'i', 1);
break
try:
res = ( pattern % 0 )
except:
sys.stderr.write("Error: the pattern should accept an integer: eg. '%04i'\n")
return 1
for c in res:
if curses.ascii.isspace(c):
sys.stderr.write("Error: the pattern includes or generates white space character\n")
return 1
# go
paths = []
idx = 0
# parse arguments:
for arg in args:
if arg=='-c' or arg=='--copy' or arg=='copy=1':
do_copy = True
elif args[0].isdigit():
idx = int(args[0])
elif os.path.isfile(arg) or os.path.isdir(arg):
paths.append(arg)
else:
sys.stderr.write("Error: '%s' is not a file or directory" % arg)
return 1
# process all files
res = []
for src in paths:
while idx < 1000000:
dst = pattern % idx
idx += 1
if dst == src:
res.append(dst)
break
if not os.path.exists(dst):
#make directory if name include a directory that does not exist:
dir = os.path.dirname(dst)
if dir and not os.path.isdir(dir):
os.mkdir(dir)
# process file:
if do_copy:
copy_recursive(src, dst)
else:
os.rename(src, dst)
res.append(dst)
print("%s -> %s" % (src, dst))
break
return res
#------------------------------------------------------------------------
if __name__ == "__main__":
if len(sys.argv) < 2 or sys.argv[1].endswith("help"):
print(__doc__)
else:
main(sys.argv[1:])
|
[
"os.mkdir",
"os.path.isdir",
"shutil.copy2",
"os.path.dirname",
"os.path.exists",
"os.rename",
"os.path.isfile",
"sys.stderr.write",
"os.path.join",
"os.listdir"
] |
[((2020, 2039), 'os.path.isfile', 'os.path.isfile', (['src'], {}), '(src)\n', (2034, 2039), False, 'import sys, shutil, os, curses.ascii\n'), ((2656, 2679), 'os.path.isfile', 'os.path.isfile', (['pattern'], {}), '(pattern)\n', (2670, 2679), False, 'import sys, shutil, os, curses.ascii\n'), ((2049, 2071), 'shutil.copy2', 'shutil.copy2', (['src', 'dst'], {}), '(src, dst)\n', (2061, 2071), False, 'import sys, shutil, os, curses.ascii\n'), ((2081, 2099), 'os.path.isdir', 'os.path.isdir', (['src'], {}), '(src)\n', (2094, 2099), False, 'import sys, shutil, os, curses.ascii\n'), ((2689, 2793), 'sys.stderr.write', 'sys.stderr.write', (['"""Error: first argument should be the pattern used to build output file name"""'], {}), "(\n 'Error: first argument should be the pattern used to build output file name'\n )\n", (2705, 2793), False, 'import sys, shutil, os, curses.ascii\n'), ((2197, 2212), 'os.listdir', 'os.listdir', (['src'], {}), '(src)\n', (2207, 2212), False, 'import sys, shutil, os, curses.ascii\n'), ((3319, 3408), 'sys.stderr.write', 'sys.stderr.write', (['"""Error: the pattern includes or generates white space character\n"""'], {}), "(\n 'Error: the pattern includes or generates white space character\\n')\n", (3335, 3408), False, 'import sys, shutil, os, curses.ascii\n'), ((2126, 2139), 'os.mkdir', 'os.mkdir', (['dst'], {}), '(dst)\n', (2134, 2139), False, 'import sys, shutil, os, curses.ascii\n'), ((2253, 2273), 'os.path.join', 'os.path.join', (['src', 'f'], {}), '(src, f)\n', (2265, 2273), False, 'import sys, shutil, os, curses.ascii\n'), ((2290, 2310), 'os.path.join', 'os.path.join', (['dst', 'f'], {}), '(dst, f)\n', (2302, 2310), False, 'import sys, shutil, os, curses.ascii\n'), ((4090, 4109), 'os.path.exists', 'os.path.exists', (['dst'], {}), '(dst)\n', (4104, 4109), False, 'import sys, shutil, os, curses.ascii\n'), ((4214, 4234), 'os.path.dirname', 'os.path.dirname', (['dst'], {}), '(dst)\n', (4229, 4234), False, 'import sys, shutil, os, curses.ascii\n'), ((3154, 3231), 'sys.stderr.write', 'sys.stderr.write', (['"""Error: the pattern should accept an integer: eg. \'%04i\'\n"""'], {}), '("Error: the pattern should accept an integer: eg. \'%04i\'\\n")\n', (3170, 3231), False, 'import sys, shutil, os, curses.ascii\n'), ((3664, 3683), 'os.path.isfile', 'os.path.isfile', (['arg'], {}), '(arg)\n', (3678, 3683), False, 'import sys, shutil, os, curses.ascii\n'), ((3687, 3705), 'os.path.isdir', 'os.path.isdir', (['arg'], {}), '(arg)\n', (3700, 3705), False, 'import sys, shutil, os, curses.ascii\n'), ((3763, 3827), 'sys.stderr.write', 'sys.stderr.write', (['("Error: \'%s\' is not a file or directory" % arg)'], {}), '("Error: \'%s\' is not a file or directory" % arg)\n', (3779, 3827), False, 'import sys, shutil, os, curses.ascii\n'), ((4306, 4319), 'os.mkdir', 'os.mkdir', (['dir'], {}), '(dir)\n', (4314, 4319), False, 'import sys, shutil, os, curses.ascii\n'), ((4467, 4486), 'os.rename', 'os.rename', (['src', 'dst'], {}), '(src, dst)\n', (4476, 4486), False, 'import sys, shutil, os, curses.ascii\n'), ((4266, 4284), 'os.path.isdir', 'os.path.isdir', (['dir'], {}), '(dir)\n', (4279, 4284), False, 'import sys, shutil, os, curses.ascii\n')]
|
import os
from dotenv import find_dotenv
from dotenv import load_dotenv
# Find and load dotenv
load_dotenv(find_dotenv())
class Config:
def __init__(self):
# Source and target languages
self.SRC = os.environ.get("SRC")
self.TGT = os.environ.get("TGT")
# Dirs
self.BASE_DIR = os.environ.get("BASE_DIR")
self.TP_DIR = os.environ.get("TP_DIR")
# Paths
self.FASTTEXT_MODEL_PATH = os.environ.get("FASTTEXT_MODEL_PATH")
self.STOPWORDS_PATH = os.environ.get("STOPWORDS_PATH")
self.EXTRA_ALIGN_PATH = os.environ.get("EXTRA_ALIGN_PATH")
|
[
"os.environ.get",
"dotenv.find_dotenv"
] |
[((108, 121), 'dotenv.find_dotenv', 'find_dotenv', ([], {}), '()\n', (119, 121), False, 'from dotenv import find_dotenv\n'), ((222, 243), 'os.environ.get', 'os.environ.get', (['"""SRC"""'], {}), "('SRC')\n", (236, 243), False, 'import os\n'), ((263, 284), 'os.environ.get', 'os.environ.get', (['"""TGT"""'], {}), "('TGT')\n", (277, 284), False, 'import os\n'), ((325, 351), 'os.environ.get', 'os.environ.get', (['"""BASE_DIR"""'], {}), "('BASE_DIR')\n", (339, 351), False, 'import os\n'), ((374, 398), 'os.environ.get', 'os.environ.get', (['"""TP_DIR"""'], {}), "('TP_DIR')\n", (388, 398), False, 'import os\n'), ((451, 488), 'os.environ.get', 'os.environ.get', (['"""FASTTEXT_MODEL_PATH"""'], {}), "('FASTTEXT_MODEL_PATH')\n", (465, 488), False, 'import os\n'), ((519, 551), 'os.environ.get', 'os.environ.get', (['"""STOPWORDS_PATH"""'], {}), "('STOPWORDS_PATH')\n", (533, 551), False, 'import os\n'), ((584, 618), 'os.environ.get', 'os.environ.get', (['"""EXTRA_ALIGN_PATH"""'], {}), "('EXTRA_ALIGN_PATH')\n", (598, 618), False, 'import os\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import cv2
import numpy as np
from cnocr import CnOcr
# 后续生成票据图像时的大小,按照标准增值税发票版式240mmX140mm来设定
height_resize = 1400
width_resize = 2400
# 实例化不同用途CnOcr对象
ocr = CnOcr(name='') # 混合字符
ocr_numbers = CnOcr(name='numbers', cand_alphabet='0123456789.') # 纯数字
ocr_UpperSerial = CnOcr(name='UpperSerial',
cand_alphabet='0123456789ABCDEFGHIJKLMNPQRSTUVWXYZ') # 编号,只包括大写字母(没有O)与数字
# 销售方字典
purchaser_dict = ['purchaserName', 'purchaserCode', 'purchaserAddrTel', 'purchaserBankCode']
seller_dict = ['sellerName', 'sellerCode', 'sellerAddrTel', 'sellerBankCode']
invoice_dict = ['invoiceCode', 'invoiceNumber', 'invoiceDate', 'checkCode']
# 截取图片中部分区域图像-字段
crop_range_list_name = ['invoice', 'purchaser', 'seller',
'totalExpense', 'totalTax', 'totalTaxExpenseZh', 'totalTaxExpense',
'remark', 'title', 'machineCode']
# 截取图片中部分区域图像-坐标
crop_range_list_data = [[1750, 20, 500, 250], [420, 280, 935, 220], [420, 1030, 935, 230],
[1500, 880, 390, 75], [2000, 880, 330, 75], [750, 960, 600, 65], [1870, 960, 300, 70],
[1455, 1045, 400, 180], [760, 50, 900, 110], [280, 200, 250, 75]]
# 截取图片中部分区域图像-使用ocr的类型,0:混合字符,1:纯数字,2:编号
crop_range_list_type = [3, 3, 3,
1, 1, 0, 1,
0, 0, 1]
# 调整原始图片尺寸
def resizeImg(image, height=height_resize):
h, w = image.shape[:2]
pro = height / h
size = (int(w * pro), int(height))
img = cv2.resize(image, size)
return img
# 边缘检测
def getCanny(image):
# 高斯模糊
binary = cv2.GaussianBlur(image, (3, 3), 2, 2)
# 边缘检测
binary = cv2.Canny(binary, 60, 240, apertureSize=3)
# 膨胀操作,尽量使边缘闭合
kernel = np.ones((3, 3), np.uint8)
binary = cv2.dilate(binary, kernel, iterations=1)
# 二值图
cv2.imwrite('result/binary.jpg', binary)
return binary
# 求出面积最大的轮廓
def findMaxContour(image):
# 寻找边缘
contours, _ = cv2.findContours(image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
# 计算面积
max_area = 0.0
max_contour = []
for contour in contours:
current_area = cv2.contourArea(contour)
if current_area > max_area:
max_area = current_area
max_contour = contour
return max_contour, max_area
# 多边形拟合凸包的四个顶点
def getBoxPoint(contour):
# 多边形拟合凸包
hull = cv2.convexHull(contour)
epsilon = 0.02 * cv2.arcLength(contour, True)
approx = cv2.approxPolyDP(hull, epsilon, True)
approx = approx.reshape((len(approx), 2))
return approx
# 适配原四边形点集
def adapPoint(box, pro):
box_pro = box
if pro != 1.0:
box_pro = box / pro
box_pro = np.trunc(box_pro)
return box_pro
# 四边形顶点排序,[top-left, top-right, bottom-right, bottom-left]
def orderPoints(pts):
rect = np.zeros((4, 2), dtype="float32")
s = pts.sum(axis=1)
rect[0] = pts[np.argmin(s)]
rect[2] = pts[np.argmax(s)]
diff = np.diff(pts, axis=1)
rect[1] = pts[np.argmin(diff)]
rect[3] = pts[np.argmax(diff)]
return rect
# 计算长宽
def pointDistance(a, b):
return int(np.sqrt(np.sum(np.square(a - b))))
# 透视变换
def warpImage(image, box):
w, h = pointDistance(box[0], box[1]), \
pointDistance(box[1], box[2])
dst_rect = np.array([[0, 0],
[w - 1, 0],
[w - 1, h - 1],
[0, h - 1]], dtype='float32')
M = cv2.getPerspectiveTransform(box, dst_rect)
warped = cv2.warpPerspective(image, M, (w, h))
return warped
# 根据四点画四边形
def drawRect(img, pt1, pt2, pt3, pt4, color, line_width):
cv2.line(img, pt1, pt2, color, line_width)
cv2.line(img, pt2, pt3, color, line_width)
cv2.line(img, pt3, pt4, color, line_width)
cv2.line(img, pt1, pt4, color, line_width)
# 统合图片预处理
def imagePreProcessing(path):
image = cv2.imread(path)
# 转灰度、降噪
# image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
# image = cv2.GaussianBlur(image, (3,3), 0)
# 边缘检测、寻找轮廓、确定顶点
ratio = height_resize / image.shape[0]
img = resizeImg(image)
binary_img = getCanny(img)
max_contour, max_area = findMaxContour(binary_img)
box = getBoxPoint(max_contour)
boxes = adapPoint(box, ratio)
boxes = orderPoints(boxes)
# 透视变化
warped = warpImage(image, boxes)
# 调整最终图片大小
size = (width_resize, height_resize)
warped = cv2.resize(warped, size, interpolation=cv2.INTER_CUBIC)
# 画边缘框
drawRect(image, tuple(boxes[0]), tuple(boxes[1]), tuple(boxes[2]), tuple(boxes[3]), (0, 0, 255), 2)
cv2.imwrite("result/outline.jpg", image)
return warped
# 截取图片中部分区域图像,测试阶段使用,包括显示与保存图片,实际使用时不使用这个函数,使用下面的正式版函数
def cropImage_test(img, crop_range, filename='Undefined'):
xpos, ypos, width, height = crop_range
crop = img[ypos:ypos + height, xpos:xpos + width]
if filename == 'Undefined': # 如果未指定文件名,采用坐标来指定文件名
filename = 'crop-' + str(xpos) + '-' + str(ypos) + '-' + str(width) + '-' + str(height) + '.jpg'
cv2.imshow(filename, crop) # 展示截取区域图片---测试用
# cv2.imwrite(filename, crop) #imwrite在文件名含有中文时会有乱码,应该采用下方imencode---测试用
# 保存截取区域图片---测试用
cv2.imencode('.jpg', crop)[1].tofile(filename)
return crop
# 截取图片中部分区域图像
def cropImage(img, crop_range):
xpos, ypos, width, height = crop_range
crop = img[ypos:ypos + height, xpos:xpos + width]
return crop
# 从截取图片中识别文字
def cropOCR(crop, ocrType):
text_crop = ''
if ocrType == 0:
text_crop_list = ocr.ocr_for_single_line(crop)
elif ocrType == 1:
text_crop_list = ocr_numbers.ocr_for_single_line(crop)
elif ocrType == 2:
text_crop_list = ocr_UpperSerial.ocr_for_single_line(crop)
elif ocrType == 3:
text_crop_list = ocr.ocr(crop)
for i in range(len(text_crop_list)):
ocr_text = ''.join(text_crop_list[i]).split(':')[-1].split(';')[-1]
# 如果出现- — _ ― 一律算作边框
if '-' in ocr_text or '—' in ocr_text or '_' in ocr_text or '―' in ocr_text:
continue
text_crop = text_crop + ocr_text + ','
return text_crop
text_crop = ''.join(text_crop_list)
return text_crop
def imageOcr(path):
# 预处理图像
# path = 'test.jpg'
warped = imagePreProcessing(path)
# 分块识别
receipt = {}
for i in range(len(crop_range_list_data)):
crop = cropImage(warped, crop_range_list_data[i])
crop_text = cropOCR(crop, crop_range_list_type[i])
# 发票中不会有小写字母o l O,凡是出现o的都使用0替代,凡是出现l的都使用1替代,凡是出现O的都使用0替代,并去掉空格和冒号前面的字符
crop_text = crop_text.replace('o', '0').replace(' ', '').replace('l', '1').replace('O', '0').split(':')[-1]
# 销售方信息
if crop_range_list_name[i] == 'seller':
crop_text = crop_text.split(',')
for i in range(4):
if i < len(crop_text):
receipt.update({seller_dict[i]: crop_text[i]})
else:
receipt.update({seller_dict[i]: ''})
elif crop_range_list_name[i] == 'invoice':
crop_text = crop_text.split(',')
for i in range(4):
if i < len(crop_text):
receipt.update({invoice_dict[i]: crop_text[i]})
else:
receipt.update({invoice_dict[i]: ''})
elif crop_range_list_name[i] == 'purchaser':
crop_text = crop_text.split(',')
for i in range(4):
if i < len(crop_text):
receipt.update({purchaser_dict[i]: crop_text[i]})
else:
receipt.update({purchaser_dict[i]: ''})
else:
if crop_range_list_name[i] == 'title':
crop_text = crop_text[0:2] + '增值税普通发票'
receipt.update({crop_range_list_name[i]: crop_text})
receipt['sellerCode'] = receipt['sellerCode'].replace('工', '1').replace('.', '')
receipt['purchaserCode'] = receipt['purchaserCode'].replace('工', '1').replace('.', '')
for key in receipt:
print(key + ':' + receipt[key])
receipt.update({"serviceDetails": []})
cv2.imwrite('result/block.jpg', warped)
# 展示识别区域
for i in range(len(crop_range_list_data)):
warped = cv2.rectangle(warped, (crop_range_list_data[i][0], crop_range_list_data[i][1]),
(crop_range_list_data[i][0] + crop_range_list_data[i][2],
crop_range_list_data[i][1] + crop_range_list_data[i][3]),
(0, 0, 255), 2)
# 展示与保存预处理的图片---测试用,生产环境会报错
# cv2.namedWindow("warpImage", 0)
# cv2.resizeWindow("warpImage", 1200, 700)
# cv2.imshow('warpImage', warped)
# 保存图片到本地
cv2.imwrite('result/result.jpg', warped)
return receipt
if __name__ == '__main__':
print(imageOcr("test0.jpg"))
# cv2.waitKey(0)
|
[
"cv2.GaussianBlur",
"cv2.approxPolyDP",
"cv2.getPerspectiveTransform",
"cv2.arcLength",
"numpy.argmax",
"numpy.ones",
"numpy.argmin",
"cv2.rectangle",
"cv2.imencode",
"cv2.imshow",
"cv2.line",
"cv2.warpPerspective",
"cv2.contourArea",
"cv2.dilate",
"cv2.imwrite",
"cv2.resize",
"cnocr.CnOcr",
"cv2.Canny",
"numpy.square",
"cv2.convexHull",
"numpy.zeros",
"cv2.imread",
"numpy.diff",
"numpy.array",
"numpy.trunc",
"cv2.findContours"
] |
[((208, 222), 'cnocr.CnOcr', 'CnOcr', ([], {'name': '""""""'}), "(name='')\n", (213, 222), False, 'from cnocr import CnOcr\n'), ((245, 295), 'cnocr.CnOcr', 'CnOcr', ([], {'name': '"""numbers"""', 'cand_alphabet': '"""0123456789."""'}), "(name='numbers', cand_alphabet='0123456789.')\n", (250, 295), False, 'from cnocr import CnOcr\n'), ((321, 399), 'cnocr.CnOcr', 'CnOcr', ([], {'name': '"""UpperSerial"""', 'cand_alphabet': '"""0123456789ABCDEFGHIJKLMNPQRSTUVWXYZ"""'}), "(name='UpperSerial', cand_alphabet='0123456789ABCDEFGHIJKLMNPQRSTUVWXYZ')\n", (326, 399), False, 'from cnocr import CnOcr\n'), ((1536, 1559), 'cv2.resize', 'cv2.resize', (['image', 'size'], {}), '(image, size)\n', (1546, 1559), False, 'import cv2\n'), ((1629, 1666), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['image', '(3, 3)', '(2)', '(2)'], {}), '(image, (3, 3), 2, 2)\n', (1645, 1666), False, 'import cv2\n'), ((1691, 1733), 'cv2.Canny', 'cv2.Canny', (['binary', '(60)', '(240)'], {'apertureSize': '(3)'}), '(binary, 60, 240, apertureSize=3)\n', (1700, 1733), False, 'import cv2\n'), ((1766, 1791), 'numpy.ones', 'np.ones', (['(3, 3)', 'np.uint8'], {}), '((3, 3), np.uint8)\n', (1773, 1791), True, 'import numpy as np\n'), ((1805, 1845), 'cv2.dilate', 'cv2.dilate', (['binary', 'kernel'], {'iterations': '(1)'}), '(binary, kernel, iterations=1)\n', (1815, 1845), False, 'import cv2\n'), ((1861, 1901), 'cv2.imwrite', 'cv2.imwrite', (['"""result/binary.jpg"""', 'binary'], {}), "('result/binary.jpg', binary)\n", (1872, 1901), False, 'import cv2\n'), ((1990, 2055), 'cv2.findContours', 'cv2.findContours', (['image', 'cv2.RETR_EXTERNAL', 'cv2.CHAIN_APPROX_NONE'], {}), '(image, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\n', (2006, 2055), False, 'import cv2\n'), ((2391, 2414), 'cv2.convexHull', 'cv2.convexHull', (['contour'], {}), '(contour)\n', (2405, 2414), False, 'import cv2\n'), ((2478, 2515), 'cv2.approxPolyDP', 'cv2.approxPolyDP', (['hull', 'epsilon', '(True)'], {}), '(hull, epsilon, True)\n', (2494, 2515), False, 'import cv2\n'), ((2697, 2714), 'numpy.trunc', 'np.trunc', (['box_pro'], {}), '(box_pro)\n', (2705, 2714), True, 'import numpy as np\n'), ((2828, 2861), 'numpy.zeros', 'np.zeros', (['(4, 2)'], {'dtype': '"""float32"""'}), "((4, 2), dtype='float32')\n", (2836, 2861), True, 'import numpy as np\n'), ((2961, 2981), 'numpy.diff', 'np.diff', (['pts'], {'axis': '(1)'}), '(pts, axis=1)\n', (2968, 2981), True, 'import numpy as np\n'), ((3288, 3363), 'numpy.array', 'np.array', (['[[0, 0], [w - 1, 0], [w - 1, h - 1], [0, h - 1]]'], {'dtype': '"""float32"""'}), "([[0, 0], [w - 1, 0], [w - 1, h - 1], [0, h - 1]], dtype='float32')\n", (3296, 3363), True, 'import numpy as np\n'), ((3447, 3489), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['box', 'dst_rect'], {}), '(box, dst_rect)\n', (3474, 3489), False, 'import cv2\n'), ((3503, 3540), 'cv2.warpPerspective', 'cv2.warpPerspective', (['image', 'M', '(w, h)'], {}), '(image, M, (w, h))\n', (3522, 3540), False, 'import cv2\n'), ((3634, 3676), 'cv2.line', 'cv2.line', (['img', 'pt1', 'pt2', 'color', 'line_width'], {}), '(img, pt1, pt2, color, line_width)\n', (3642, 3676), False, 'import cv2\n'), ((3681, 3723), 'cv2.line', 'cv2.line', (['img', 'pt2', 'pt3', 'color', 'line_width'], {}), '(img, pt2, pt3, color, line_width)\n', (3689, 3723), False, 'import cv2\n'), ((3728, 3770), 'cv2.line', 'cv2.line', (['img', 'pt3', 'pt4', 'color', 'line_width'], {}), '(img, pt3, pt4, color, line_width)\n', (3736, 3770), False, 'import cv2\n'), ((3775, 3817), 'cv2.line', 'cv2.line', (['img', 'pt1', 'pt4', 'color', 'line_width'], {}), '(img, pt1, pt4, color, line_width)\n', (3783, 3817), False, 'import cv2\n'), ((3872, 3888), 'cv2.imread', 'cv2.imread', (['path'], {}), '(path)\n', (3882, 3888), False, 'import cv2\n'), ((4399, 4454), 'cv2.resize', 'cv2.resize', (['warped', 'size'], {'interpolation': 'cv2.INTER_CUBIC'}), '(warped, size, interpolation=cv2.INTER_CUBIC)\n', (4409, 4454), False, 'import cv2\n'), ((4575, 4615), 'cv2.imwrite', 'cv2.imwrite', (['"""result/outline.jpg"""', 'image'], {}), "('result/outline.jpg', image)\n", (4586, 4615), False, 'import cv2\n'), ((5012, 5038), 'cv2.imshow', 'cv2.imshow', (['filename', 'crop'], {}), '(filename, crop)\n', (5022, 5038), False, 'import cv2\n'), ((8092, 8131), 'cv2.imwrite', 'cv2.imwrite', (['"""result/block.jpg"""', 'warped'], {}), "('result/block.jpg', warped)\n", (8103, 8131), False, 'import cv2\n'), ((8691, 8731), 'cv2.imwrite', 'cv2.imwrite', (['"""result/result.jpg"""', 'warped'], {}), "('result/result.jpg', warped)\n", (8702, 8731), False, 'import cv2\n'), ((2159, 2183), 'cv2.contourArea', 'cv2.contourArea', (['contour'], {}), '(contour)\n', (2174, 2183), False, 'import cv2\n'), ((2436, 2464), 'cv2.arcLength', 'cv2.arcLength', (['contour', '(True)'], {}), '(contour, True)\n', (2449, 2464), False, 'import cv2\n'), ((2904, 2916), 'numpy.argmin', 'np.argmin', (['s'], {}), '(s)\n', (2913, 2916), True, 'import numpy as np\n'), ((2936, 2948), 'numpy.argmax', 'np.argmax', (['s'], {}), '(s)\n', (2945, 2948), True, 'import numpy as np\n'), ((3000, 3015), 'numpy.argmin', 'np.argmin', (['diff'], {}), '(diff)\n', (3009, 3015), True, 'import numpy as np\n'), ((3035, 3050), 'numpy.argmax', 'np.argmax', (['diff'], {}), '(diff)\n', (3044, 3050), True, 'import numpy as np\n'), ((8210, 8431), 'cv2.rectangle', 'cv2.rectangle', (['warped', '(crop_range_list_data[i][0], crop_range_list_data[i][1])', '(crop_range_list_data[i][0] + crop_range_list_data[i][2], \n crop_range_list_data[i][1] + crop_range_list_data[i][3])', '(0, 0, 255)', '(2)'], {}), '(warped, (crop_range_list_data[i][0], crop_range_list_data[i][\n 1]), (crop_range_list_data[i][0] + crop_range_list_data[i][2], \n crop_range_list_data[i][1] + crop_range_list_data[i][3]), (0, 0, 255), 2)\n', (8223, 8431), False, 'import cv2\n'), ((3132, 3148), 'numpy.square', 'np.square', (['(a - b)'], {}), '(a - b)\n', (3141, 3148), True, 'import numpy as np\n'), ((5159, 5185), 'cv2.imencode', 'cv2.imencode', (['""".jpg"""', 'crop'], {}), "('.jpg', crop)\n", (5171, 5185), False, 'import cv2\n')]
|
from collections import deque
from dataclasses import dataclass
from enum import Enum, auto
class Type(Enum):
ERROR = auto()
INCOMPLETE = auto()
@dataclass
class SyntaxScore:
type: Type
value: int
OPENERS_CLOSERS = {
"(": ")",
"[": "]",
"{": "}",
"<": ">",
}
def get_score(entry: str) -> SyntaxScore:
"""Get score of a known bad navigation subsystem entry."""
queue = deque()
# error checker
error_scores = {
")": 3,
"]": 57,
"}": 1197,
">": 25137,
}
for symbol in entry:
if symbol in OPENERS_CLOSERS.keys():
queue.append(symbol)
else:
if OPENERS_CLOSERS[queue.pop()] != symbol:
return SyntaxScore(Type.ERROR, error_scores[symbol])
# no errors found, must be incomplete
incomplete_scores = {
")": 1,
"]": 2,
"}": 3,
">": 4,
}
value = 0
while queue:
symbol = queue.pop()
value *= 5
value += incomplete_scores[OPENERS_CLOSERS[symbol]]
return SyntaxScore(Type.INCOMPLETE, value)
if __name__ == "__main__":
from aocd import models, transforms
puzzle = models.Puzzle(year=2021, day=10)
data = transforms.lines(puzzle.input_data)
error_value_total = 0
incomplete_values = []
for data_entry in data:
score = get_score(data_entry)
if score.type == Type.ERROR:
error_value_total += score.value
else:
incomplete_values.append(score.value)
# sum of all error scores
puzzle.answer_a = error_value_total
# median of all incomplete scores
puzzle.answer_b = sorted(incomplete_values)[len(incomplete_values) // 2]
|
[
"enum.auto",
"aocd.models.Puzzle",
"collections.deque",
"aocd.transforms.lines"
] |
[((124, 130), 'enum.auto', 'auto', ([], {}), '()\n', (128, 130), False, 'from enum import Enum, auto\n'), ((148, 154), 'enum.auto', 'auto', ([], {}), '()\n', (152, 154), False, 'from enum import Enum, auto\n'), ((416, 423), 'collections.deque', 'deque', ([], {}), '()\n', (421, 423), False, 'from collections import deque\n'), ((1194, 1226), 'aocd.models.Puzzle', 'models.Puzzle', ([], {'year': '(2021)', 'day': '(10)'}), '(year=2021, day=10)\n', (1207, 1226), False, 'from aocd import models, transforms\n'), ((1238, 1273), 'aocd.transforms.lines', 'transforms.lines', (['puzzle.input_data'], {}), '(puzzle.input_data)\n', (1254, 1273), False, 'from aocd import models, transforms\n')]
|
from thespian.system.transport import ResultCallback
from datetime import datetime, timedelta
from time import sleep
class TestUnitResultCallback(object):
def _good(self, result, value):
if not hasattr(self, 'goods'): self.goods = []
self.goods.append( (result, value) )
def _fail(self, result, value):
if not hasattr(self, 'fails'): self.fails = []
self.fails.append( (result, value) )
def testGoodCallback(self):
self.goods = []
self.fails = []
rc = ResultCallback(self._good, self._fail)
rc.resultCallback(True, 5)
assert self.goods == [(True, 5)]
assert self.fails == []
def testFailCallback(self):
self.goods = []
self.fails = []
rc = ResultCallback(self._good, self._fail)
rc.resultCallback(False, 9)
assert self.goods == []
assert self.fails == [(False, 9)]
def testGoodCallbackReCall(self):
self.goods = []
self.fails = []
rc = ResultCallback(self._good, self._fail)
rc.resultCallback(True, 5)
assert self.goods == [(True, 5)]
assert self.fails == []
rc.resultCallback(True, 4)
assert self.goods == [(True, 5)]
assert self.fails == []
def testFailCallbackReCall(self):
self.goods = []
self.fails = []
rc = ResultCallback(self._good, self._fail)
rc.resultCallback(False, 9)
assert self.goods == []
assert self.fails == [(False, 9)]
rc.resultCallback(False, 8)
assert self.goods == []
assert self.fails == [(False, 9)]
def testGoodCallbackReCallFail(self):
self.goods = []
self.fails = []
rc = ResultCallback(self._good, self._fail)
rc.resultCallback(True, 5)
assert self.goods == [(True, 5)]
assert self.fails == []
rc.resultCallback(False, 4)
assert self.goods == [(True, 5)]
assert self.fails == []
def testFailCallbackReCallGood(self):
self.goods = []
self.fails = []
rc = ResultCallback(self._good, self._fail)
rc.resultCallback(False, 9)
assert self.goods == []
assert self.fails == [(False, 9)]
rc.resultCallback(True, 8)
assert self.goods == []
assert self.fails == [(False, 9)]
def testManyGoodCallbacks(self):
self.goods = []
self.fails = []
rc = [ResultCallback(self._good, self._fail) for N in range(20)]
for num,each in enumerate(rc):
each.resultCallback(True, num)
assert self.goods == [(True, N) for N in range(20)]
assert self.fails == []
def testManyFailCallbacks(self):
self.goods = []
self.fails = []
rc = [ResultCallback(self._good, self._fail) for N in range(20)]
for num,each in enumerate(rc):
each.resultCallback(False, num)
assert self.goods == []
assert self.fails == [(False, N) for N in range(20)]
def testManyGoodAndFailCallbacks(self):
self.goods = []
self.fails = []
rc = [ResultCallback(self._good, self._fail) for N in range(20)]
for num,each in enumerate(rc):
each.resultCallback(0 == num % 3, num)
assert self.goods == [(True, N) for N in range(20) if N % 3 == 0]
assert self.fails == [(False, N) for N in range(20) if N % 3]
def testChainedGoodCallbacks(self):
self.goods = []
self.fails = []
rc = ResultCallback(self._good, self._fail)
rc2 = ResultCallback(self._good, self._fail, rc)
rc3 = ResultCallback(self._good, self._fail, rc2)
rc3.resultCallback(True, 'good')
assert self.goods == [(True, 'good')] * 3
assert self.fails == []
def testChainedFailCallbacks(self):
self.goods = []
self.fails = []
rc = ResultCallback(self._good, self._fail)
rc2 = ResultCallback(self._good, self._fail, rc)
rc3 = ResultCallback(self._good, self._fail, rc2)
rc3.resultCallback(False, 'oops')
assert self.goods == []
assert self.fails == [(False, 'oops')] * 3
def testChainedGoodCallbacksDoNotDuplicate(self):
self.goods = []
self.fails = []
rc = ResultCallback(self._good, self._fail)
rc2 = ResultCallback(self._good, self._fail, rc)
rc3 = ResultCallback(self._good, self._fail, rc2)
rc2.resultCallback(True, 'ok')
assert self.goods == [(True, 'ok'), (True, 'ok')]
assert self.fails == []
rc3.resultCallback(True, 'good')
assert self.goods == [(True, 'ok'), (True, 'ok'), (True, 'good')]
assert self.fails == []
def testChainedFailCallbacksDoNotDuplicate(self):
self.goods = []
self.fails = []
rc = ResultCallback(self._good, self._fail)
rc2 = ResultCallback(self._good, self._fail, rc)
rc3 = ResultCallback(self._good, self._fail, rc2)
rc2.resultCallback(False, 'bad')
assert self.goods == []
assert self.fails == [(False, 'bad'), (False, 'bad')]
rc3.resultCallback(False, 'oops')
assert self.goods == []
assert self.fails == [(False, 'bad'), (False, 'bad'), (False, 'oops')]
def testChainedGoodCallbacksDoNotDuplicateOnFail(self):
self.goods = []
self.fails = []
rc = ResultCallback(self._good, self._fail)
rc2 = ResultCallback(self._good, self._fail, rc)
rc3 = ResultCallback(self._good, self._fail, rc2)
rc2.resultCallback(True, 'ok')
assert self.goods == [(True, 'ok'), (True, 'ok')]
assert self.fails == []
rc3.resultCallback(False, 'bad')
assert self.goods == [(True, 'ok'), (True, 'ok')]
assert self.fails == [(False, 'bad')]
def testChainedFailCallbacksDoNotDuplicateOnGood(self):
self.goods = []
self.fails = []
rc = ResultCallback(self._good, self._fail)
rc2 = ResultCallback(self._good, self._fail, rc)
rc3 = ResultCallback(self._good, self._fail, rc2)
rc2.resultCallback(False, 'bad')
assert self.goods == []
assert self.fails == [(False, 'bad'), (False, 'bad')]
rc3.resultCallback(True, 'yippee')
assert self.goods == [(True, 'yippee')]
assert self.fails == [(False, 'bad'), (False, 'bad')]
|
[
"thespian.system.transport.ResultCallback"
] |
[((525, 563), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (539, 563), False, 'from thespian.system.transport import ResultCallback\n'), ((766, 804), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (780, 804), False, 'from thespian.system.transport import ResultCallback\n'), ((1015, 1053), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (1029, 1053), False, 'from thespian.system.transport import ResultCallback\n'), ((1370, 1408), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (1384, 1408), False, 'from thespian.system.transport import ResultCallback\n'), ((1733, 1771), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (1747, 1771), False, 'from thespian.system.transport import ResultCallback\n'), ((2093, 2131), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (2107, 2131), False, 'from thespian.system.transport import ResultCallback\n'), ((3521, 3559), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (3535, 3559), False, 'from thespian.system.transport import ResultCallback\n'), ((3574, 3616), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail', 'rc'], {}), '(self._good, self._fail, rc)\n', (3588, 3616), False, 'from thespian.system.transport import ResultCallback\n'), ((3631, 3674), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail', 'rc2'], {}), '(self._good, self._fail, rc2)\n', (3645, 3674), False, 'from thespian.system.transport import ResultCallback\n'), ((3900, 3938), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (3914, 3938), False, 'from thespian.system.transport import ResultCallback\n'), ((3953, 3995), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail', 'rc'], {}), '(self._good, self._fail, rc)\n', (3967, 3995), False, 'from thespian.system.transport import ResultCallback\n'), ((4010, 4053), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail', 'rc2'], {}), '(self._good, self._fail, rc2)\n', (4024, 4053), False, 'from thespian.system.transport import ResultCallback\n'), ((4295, 4333), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (4309, 4333), False, 'from thespian.system.transport import ResultCallback\n'), ((4348, 4390), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail', 'rc'], {}), '(self._good, self._fail, rc)\n', (4362, 4390), False, 'from thespian.system.transport import ResultCallback\n'), ((4405, 4448), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail', 'rc2'], {}), '(self._good, self._fail, rc2)\n', (4419, 4448), False, 'from thespian.system.transport import ResultCallback\n'), ((4841, 4879), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (4855, 4879), False, 'from thespian.system.transport import ResultCallback\n'), ((4894, 4936), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail', 'rc'], {}), '(self._good, self._fail, rc)\n', (4908, 4936), False, 'from thespian.system.transport import ResultCallback\n'), ((4951, 4994), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail', 'rc2'], {}), '(self._good, self._fail, rc2)\n', (4965, 4994), False, 'from thespian.system.transport import ResultCallback\n'), ((5405, 5443), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (5419, 5443), False, 'from thespian.system.transport import ResultCallback\n'), ((5458, 5500), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail', 'rc'], {}), '(self._good, self._fail, rc)\n', (5472, 5500), False, 'from thespian.system.transport import ResultCallback\n'), ((5515, 5558), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail', 'rc2'], {}), '(self._good, self._fail, rc2)\n', (5529, 5558), False, 'from thespian.system.transport import ResultCallback\n'), ((5955, 5993), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (5969, 5993), False, 'from thespian.system.transport import ResultCallback\n'), ((6008, 6050), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail', 'rc'], {}), '(self._good, self._fail, rc)\n', (6022, 6050), False, 'from thespian.system.transport import ResultCallback\n'), ((6065, 6108), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail', 'rc2'], {}), '(self._good, self._fail, rc2)\n', (6079, 6108), False, 'from thespian.system.transport import ResultCallback\n'), ((2451, 2489), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (2465, 2489), False, 'from thespian.system.transport import ResultCallback\n'), ((2784, 2822), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (2798, 2822), False, 'from thespian.system.transport import ResultCallback\n'), ((3126, 3164), 'thespian.system.transport.ResultCallback', 'ResultCallback', (['self._good', 'self._fail'], {}), '(self._good, self._fail)\n', (3140, 3164), False, 'from thespian.system.transport import ResultCallback\n')]
|
import sys
from cli_augments import arg_parser
from htmlreader import read_page
purgeFiles = False
newItem = False
weight = ''
upc = ''
video_link = None
# parse arguments
processedArgs = arg_parser(sys.argv)
if type(processedArgs) == str:
url = processedArgs
read_page(url, False)
elif type(processedArgs) == dict:
read_page(processedArgs, False)
elif type(processedArgs) == list:
for url_arg in processedArgs:
read_page(url_arg, True)
else:
print('\nNo valid URL was supplied. Program will now terminate.')
exit(0)
|
[
"cli_augments.arg_parser",
"htmlreader.read_page"
] |
[((190, 210), 'cli_augments.arg_parser', 'arg_parser', (['sys.argv'], {}), '(sys.argv)\n', (200, 210), False, 'from cli_augments import arg_parser\n'), ((271, 292), 'htmlreader.read_page', 'read_page', (['url', '(False)'], {}), '(url, False)\n', (280, 292), False, 'from htmlreader import read_page\n'), ((331, 362), 'htmlreader.read_page', 'read_page', (['processedArgs', '(False)'], {}), '(processedArgs, False)\n', (340, 362), False, 'from htmlreader import read_page\n'), ((439, 463), 'htmlreader.read_page', 'read_page', (['url_arg', '(True)'], {}), '(url_arg, True)\n', (448, 463), False, 'from htmlreader import read_page\n')]
|
from abc import abstractmethod
from weakref import WeakValueDictionary
from typing import Iterable, Tuple, Type, Generic, TypeVar
T = TypeVar('T')
class ResourceLevels(Generic[T]):
"""
Common class for named resource levels
Representation for the levels of multiple named resources. Every set of resources,
such as :py:class:`usim.Resources` or :py:class:`usim.Capacities`, specializes a
:py:class:`~.ResourceLevels` subclass with one attribute for each named resource.
For example, ``Resources(a=3, b=4)`` uses a :py:class:`~.ResourceLevels` with
attributes ``a`` and ``b``.
.. code:: python3
from usim import Resources
resources = Resources(a=3, b=4)
print(resources.levels.a) # 3
print(resources.levels.b) # 4
print(resources.levels.c) # raises AttributeError
:py:class:`~.ResourceLevels` subtypes allow no additional attributes other than
their initial resources, but their values may be changed.
Instantiating a subtype requires resource levels to be specified by keyword;
missing resource are set to zero.
Each resource always uses the same :py:class:`~.ResourceLevels` subtype.
Binary operators for comparisons and arithmetic can be applied for
instances of the same subtype.
.. describe:: levels_a + levels_b
levels_a - levels_b
Elementwise addition/subtraction of values.
.. describe:: levels_a > levels_b
levels_a >= levels_b
levels_a <= levels_b
levels_a < levels_b
Strict elementwise comparison of values.
:py:data:`True` if the comparison is satisfied by each element pair,
:py:data:`False` otherwise.
.. describe:: levels_a == levels_b
Total elementwise equality of values.
:py:data:`True` if each element pair is equal,
:py:data:`False` otherwise.
The inverse of ``levels_a != levels_b``.
.. describe:: levels_a != levels_b
Partial elementwise unequality of values.
:py:data:`False` if each element pair is equal,
:py:data:`True` otherwise.
The inverse of ``levels_a == levels_b``.
In addition, iteration on a :py:class:`~.ResourceLevels` subtype yields
``field, value`` pairs. This is similar to :py:meth:`dict.items`.
.. describe:: for field, value in levels_a
Iterate over the current ``field, value`` pairs.
.. describe:: dict(levels_a)
Create :py:class:`dict` of ``field: value`` pairs.
"""
__slots__ = ()
__fields__: Tuple[str] = ()
#: cache of currently used specialisations to avoid
#: recreating/duplicating commonly used types
__specialisation_cache__ = WeakValueDictionary()
def __init__(self, **kwargs: T):
spec_name = f'{__specialise__.__module__}.{__specialise__.__qualname__}'
raise TypeError(
f'Base class {self.__class__.__name__} cannot be instantiated.\n'
'\n'
f'The {self.__class__.__name__} type is intended to be automatically\n'
'subclassed by resources. You should not encounter the base class during\n'
'well-behaved simulations.\n'
'\n'
f'Use {spec_name} to declare subtypes with valid resource level names.\n'
)
@abstractmethod
def __add__(self, other: 'ResourceLevels[T]') -> 'ResourceLevels[T]':
raise NotImplementedError
@abstractmethod
def __sub__(self, other: 'ResourceLevels[T]') -> 'ResourceLevels[T]':
raise NotImplementedError
@abstractmethod
def __gt__(self, other: 'ResourceLevels[T]') -> bool:
raise NotImplementedError
@abstractmethod
def __ge__(self, other: 'ResourceLevels[T]') -> bool:
raise NotImplementedError
@abstractmethod
def __le__(self, other: 'ResourceLevels[T]') -> bool:
raise NotImplementedError
@abstractmethod
def __lt__(self, other: 'ResourceLevels[T]') -> bool:
raise NotImplementedError
@abstractmethod
def __eq__(self, other: 'ResourceLevels[T]') -> bool:
raise NotImplementedError
@abstractmethod
def __ne__(self, other: 'ResourceLevels[T]') -> bool:
raise NotImplementedError
def __iter__(self):
for field in self.__fields__:
yield field, getattr(self, field)
def __repr__(self):
content = ', '.join(
f'{key}={item}' for key, item in self
)
return f'{self.__class__.__name__}({content})'
def __specialise__(zero: T, names: Iterable[str]) -> Type[ResourceLevels[T]]:
"""
Create a specialisation of :py:class:`~.ResourceLevels`
:param zero: zero value for all fields
:param names: names of fields
"""
fields = tuple(sorted(names))
try:
return ResourceLevels.__specialisation_cache__[fields]
except KeyError:
pass
class SpecialisedResourceLevels(ResourceLevels):
__slots__ = fields
__fields__ = fields
__init__ = __make_init__(zero, fields)
__add__ = __binary_op__('__add__', '+', fields)
__sub__ = __binary_op__('__sub__', '-', fields)
__gt__ = __comparison_op__('__gt__', '>', fields)
__ge__ = __comparison_op__('__ge__', '>=', fields)
__le__ = __comparison_op__('__le__', '<=', fields)
__lt__ = __comparison_op__('__le__', '<', fields)
__eq__ = __comparison_op__('__eq__', '==', fields)
def __ne__(self, other):
return not self == other
ResourceLevels.__specialisation_cache__[fields] = SpecialisedResourceLevels
return SpecialisedResourceLevels
def __make_init__(zero, names: Tuple[str, ...]):
"""Make an ``__init__`` with ``names`` as keywords and defaults of ``zero``"""
namespace = {}
args_list = f'={zero}, '.join(names)
exec(
'\n'.join(
[
f"""def __init__(self, *, {args_list}={zero}):"""
] + [
f""" self.{name} = {name}"""
for name in names
]
),
namespace
)
return namespace['__init__']
def __binary_op__(op_name: str, op_symbol: str, names: Tuple[str, ...]):
"""
Make an operator method ``op_name`` to apply ``op_symbol`` to all fields ``names``
.. code:: python3
__add__ = __make_binary_op__("__add__", '+', ('foo', 'bar'))
def __add__(self, other):
return type(self)(
foo = self.foo + other.foo,
bar = self.bar + other.bar,
)
"""
namespace = {}
exec(
'\n'.join(
[
f"""def {op_name}(self, other):""",
""" assert type(self) is type(other),\\""",
""" 'resource levels specialisations cannot be mixed'""",
""" return type(self)(""",
] + [
f""" {name} = self.{name} {op_symbol} other.{name},"""
for name in names
] + [
""" )"""
]
),
namespace
)
return namespace[op_name]
def __comparison_op__(op_name: str, op_symbol: str, names: Tuple[str]):
"""
Make a comparison method ``op_name`` to apply ``op_symbol`` to all fields ``names``
.. code:: python3
__eq__ = __make_binary_op__("__eq__", '==', ('foo', 'bar'))
def __add__(self, other):
return (
self.foo + other.foo
and self.bar + other.bar
)
"""
namespace = {}
exec(
'\n'.join(
[
f"""def {op_name}(self, other):""",
""" assert type(self) is type(other),\\""",
""" 'resource levels specialisations cannot be mixed'""",
""" return (""",
f""" self.{names[0]} {op_symbol} other.{names[0]}"""
] + [
f""" and self.{name} {op_symbol} other.{name}"""
for name in names[1:]
] + [
""" )"""
]
),
namespace
)
return namespace[op_name]
|
[
"typing.TypeVar",
"weakref.WeakValueDictionary"
] |
[((136, 148), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (143, 148), False, 'from typing import Iterable, Tuple, Type, Generic, TypeVar\n'), ((2751, 2772), 'weakref.WeakValueDictionary', 'WeakValueDictionary', ([], {}), '()\n', (2770, 2772), False, 'from weakref import WeakValueDictionary\n')]
|
import numpy as np
# create array data
predict = np.array([[1,2,2,1],
[4.5,2.5,10,0.5],
[6,6,8,4],
[6.26,6.26,8.26,4.26]],np.double)
truth = np.array([[1,4,3,3],
[1.2,2.2,2.2,1.2],
[5,2,8,1],
[6.1,6.1,8.1,4.1],
[8.1,8.1,11.1,9.1]], np.double)
# get useful variables
nums_pred = len(predict)
nums_gt = len(truth)
iou_matrix = np.zeros((nums_pred,nums_gt))
# boxA 存储的是边界框的左上顶点坐标和右下顶点坐标
# boxA=[x1,y1,x2,y2]
def iou(boxA, boxB):
# 计算重合部分的上下左右4个边的值,注意最大最小函数的使用
left_max = max(boxA[0],boxB[0])
top_max = max(boxA[1],boxB[1])
right_min = min(boxA[2], boxB[2])
bottom_min = min(boxA[3], boxB[3])
# 计算重合部分的面积
inter = max(0,(right_min-left_max)) * max(0, (bottom_min-top_max)) # 宽*高
Sa = (boxA[2]-boxA[0])*(boxA[3]-boxA[1])
Sb = (boxB[2]-boxB[0])*(boxB[3]-boxB[1])
# 计算所有区域的面积并计算 iou
union = Sa+Sb-inter
iou = inter/union
return iou
def transformBBox(boxA):
# 将 BBox 从左下 + 右上 表示转换为 左上 + 右下
return [boxA[0], boxA[3], boxA[2], boxA[1]]
# get iou matrix
for i in range(nums_pred):
for j in range(nums_gt):
#print(truth[j])
iou_matrix[i][j] = iou(transformBBox(predict[i]), transformBBox(truth[j]))
print(iou_matrix)
res = []
IOU_theta = 0.4
while np.any(iou_matrix > IOU_theta):
ind = np.argmax(iou_matrix)
ind_col = ind % nums_gt
ind_row = (ind - ind_col) // nums_gt
print("row = %d, col = %d"%(ind_row, ind_col))
# store results for more analysis
res.append([predict[ind_row], truth[ind_col]])
# set the correspoding row and col to zero
# exclude those already paired from future comparsion
iou_matrix[ind_row][:] = 0
# set col to 0
for ii in range(nums_pred):
iou_matrix[ii][ind_col] = 0
print(iou_matrix)
print(res)
|
[
"numpy.zeros",
"numpy.any",
"numpy.array",
"numpy.argmax"
] |
[((51, 152), 'numpy.array', 'np.array', (['[[1, 2, 2, 1], [4.5, 2.5, 10, 0.5], [6, 6, 8, 4], [6.26, 6.26, 8.26, 4.26]]', 'np.double'], {}), '([[1, 2, 2, 1], [4.5, 2.5, 10, 0.5], [6, 6, 8, 4], [6.26, 6.26, \n 8.26, 4.26]], np.double)\n', (59, 152), True, 'import numpy as np\n'), ((202, 322), 'numpy.array', 'np.array', (['[[1, 4, 3, 3], [1.2, 2.2, 2.2, 1.2], [5, 2, 8, 1], [6.1, 6.1, 8.1, 4.1], [\n 8.1, 8.1, 11.1, 9.1]]', 'np.double'], {}), '([[1, 4, 3, 3], [1.2, 2.2, 2.2, 1.2], [5, 2, 8, 1], [6.1, 6.1, 8.1,\n 4.1], [8.1, 8.1, 11.1, 9.1]], np.double)\n', (210, 322), True, 'import numpy as np\n'), ((452, 482), 'numpy.zeros', 'np.zeros', (['(nums_pred, nums_gt)'], {}), '((nums_pred, nums_gt))\n', (460, 482), True, 'import numpy as np\n'), ((1332, 1362), 'numpy.any', 'np.any', (['(iou_matrix > IOU_theta)'], {}), '(iou_matrix > IOU_theta)\n', (1338, 1362), True, 'import numpy as np\n'), ((1371, 1392), 'numpy.argmax', 'np.argmax', (['iou_matrix'], {}), '(iou_matrix)\n', (1380, 1392), True, 'import numpy as np\n')]
|
from kb import KB, TRAIN_LABEL, DEV_LABEL, TEST_LABEL
import random
import numpy as np
class SampleKB:
def __init__(self, num_relations, num_entities,
arities=[0.0, 1.0, 0.0],
fb_densities=[0.0, 0.0, 0.0],
arg_densities=[0., 0.1, 0.0],
fact_prob=0.2,
num_symm=2,
num_impl=[0, 2, 0],
num_impl_inv=2,
num_impl_conj=[0, 2, 0],
num_trans_single=2,
num_trans_diff=2,
seed=0,
position_dependent_args=False,
position_densities=[0., 0.5, 0.0]):
"""
:param num_relations:
:param num_entities: number of distinct entities to generate
:param arities: fraction of arities
:param arg_densities: fraction of entity combinations that are observed
:param fact_prob:
:param num_inv: number of 'inv' formulae R(X0, X1) :- R(X1, X0)
:param num_impl:
:param num_impl_conj:
:param num_trans:
:param negated_head_prob:
:param seed:
:return:
"""
random.seed(seed)
self.kb = KB(seed=seed)
num_relations_per_arity = [int(x * num_relations) for x in arities]
entities = list(map(lambda x: "e" + str(x), range(1, num_entities+1)))
entities_arg1 = []
entities_arg2 = []
entities_arg3 = []
if position_dependent_args:
arg1_boundary = int(len(entities)*position_densities[0])
arg2_boundary = arg1_boundary + int(len(entities)*position_densities[1])
entities_arg1 = entities[0:arg1_boundary]
entities_arg2 = entities[arg1_boundary:arg2_boundary]
entities_arg3 = entities[arg2_boundary:]
else:
entities_arg1 = entities
entities_arg2 = entities
entities_arg3 = entities
pairs = [(x, y) for x in entities_arg1
for y in entities_arg2 if not x == y]
triples = [(x, y, z) for x in entities_arg1
for y in entities_arg2 for z in entities_arg3
if not x == y and not y == z and not z == x]
num_pair_samples = min(len(pairs), int(len(entities_arg1) *
len(entities_arg2) *
arg_densities[1]))
num_triple_samples = min(len(triples), int(len(entities_arg1) *
len(entities_arg2) *
len(entities_arg3) *
arg_densities[2]))
entities_per_arity = {
1: entities_arg1,
2: random.sample(pairs, num_pair_samples),
3: random.sample(triples, num_triple_samples)
}
relations_per_arity = {}
for arity in range(1, len(num_relations_per_arity) + 1):
for i in range(1, num_relations_per_arity[arity - 1] + 1):
fb_prefix = ""
if fb_densities[arity-1] > random.uniform(0, 1.0):
fb_prefix = "REL$"
if arity == 1:
rel = fb_prefix+"u"
elif arity == 2:
rel = fb_prefix+"b"
else:
rel = fb_prefix+"t"
rel += str(i)
if not arity in relations_per_arity:
relations_per_arity[arity] = list()
relations_per_arity[arity].append(rel)
for args in random.sample(entities_per_arity[arity],
int(len(entities_per_arity[arity]) * fact_prob)):
self.kb.add_train(rel, args)
inverse = []
# sample symmetric relations r(X,Y) => r(Y,X)
if 2 in relations_per_arity:
symm = random.sample([(x, x) for x in relations_per_arity[2]], num_symm)
inverse += symm
# sampling implication, reversed: r1(X,Y) => r2(Y,X)
if 2 in relations_per_arity:
inverse += random.sample([(x, y) for x in relations_per_arity[2]
for y in relations_per_arity[2]
if not x == y], num_impl_inv)
if len(inverse) > 0:
self.kb.add_formulae("inv", {2: inverse})
# sampling implications:
# r1(X) => r2(X)
# r1(X,Y) => r2(X,Y)
implications_per_arity = {}
for arity in range(1, len(num_relations_per_arity) + 1):
if arity in relations_per_arity:
implications_per_arity[arity] = \
random.sample([(x, y) for x in relations_per_arity[arity] for y in relations_per_arity[arity]
if not x == y], num_impl[arity - 1])
self.kb.add_formulae("impl", implications_per_arity)
# sampling implications with conjunction in body:
# r1(X,Y) ^ r2(X,Y) => r3(X,Y)
# r1(X) ^ r2(X) => r3(X)
implications_with_conjunction_per_arity = {}
for arity in range(1, len(num_relations_per_arity) + 1):
if arity in relations_per_arity and len(relations_per_arity[arity]) >= 3:
implications_with_conjunction_per_arity[arity] = \
random.sample([(x, y, z) for x in relations_per_arity[arity]
for y in relations_per_arity[arity]
for z in relations_per_arity[arity]
if not x == y and not y == z and not z == x],
num_impl_conj[arity - 1])
self.kb.add_formulae("impl_conj", implications_with_conjunction_per_arity)
# sampling transitivities:
transitivities = []
# (1) simple transitivities r(X,Y) ^ r(Y,Z) => r(X,Z)
# (2) general transitivities r1(X,Y) ^ r2(Y,Z) => r3(X,Z) (r1, r2, r3 differ)
if 2 in relations_per_arity:
if num_trans_single > 0:
transitivities += random.sample([(x, x, x)
for x in relations_per_arity[2]], num_trans_single)
if num_trans_diff > 0:
transitivities += random.sample([(x, y, z)
for x in relations_per_arity[2]
for y in relations_per_arity[2]
for z in relations_per_arity[2]
if not x == y and
not y == z and
not z == x], num_trans_diff)
if len(transitivities) > 0:
self.kb.add_formulae("trans", {2: transitivities})
# todo: sampling negation (also applies to all heads of formulae above):
# r1 => !r2
def get_kb(self):
return self.kb
if __name__=="__main__":
import sys
import argparse
import os
#fixed args
sampled_unobserved_per_true = 1 # number of false (unobserved) test facts added for each true test fact (inferred from clause)
simple_transitivities = False
seed = 846
np.random.seed(seed)
#input args
argparser = argparse.ArgumentParser('create artificial dataset (train+test) with rules (all arity 2)')
argparser.add_argument('--entities', '-E', required=True, type=int, help='number of entities')
argparser.add_argument('--predicates', '-P', required=True, type=int, help='number of predicates')
argparser.add_argument('--test-prob', type=float, default=0.5,
help='fraction of inferred facts (from formulae) to be added to test set')
argparser.add_argument('--arg-density', type=float, default=0.1,
help='fraction of all possible pairs of entities observed')
argparser.add_argument('--fact-prob', type=float, default=0.1,
help='for all observed pairs: fraction of those that occur with each relation')
argparser.add_argument('--symm', type=int, default=0,
help='number of clauses p(X0, X1) :- p(X1, X0)')
argparser.add_argument('--impl', type=int, default=0,
help='number of clauses p(X0, X1) :- q(X0, X1) (with p and q different)')
argparser.add_argument('--impl-inv', type=int, default=0,
help='number of clauses p(X0, X1) :- q(X1, X0)')
argparser.add_argument('--impl-conj', type=int, default=0,
help='number of clauses r(X0, X1) :- p(X0, X1), q(X0, X1)')
argparser.add_argument('--trans-single', type=int, default=0,
help='number of clauses r(X0, X2) :- r(X0, X1), r(X1, X2)')
argparser.add_argument('--trans-diff', type=int, default=0,
help='number of clauses r(X0, X2) :- p(X0, X1), q(X1, X2) (with p,q,r different)')
argparser.add_argument('--dir', type=str, default='../../data/synth/sampled',
help='target directory')
argparser.add_argument('--tag', type=str, default='synth',
help='experiment tag')
args = argparser.parse_args(sys.argv[1:])
cmd = ' '.join(arg for arg in sys.argv[1:])
Ne = args.entities
Nr = args.predicates
test_prob = args.test_prob
arg_density = args.arg_density
fact_prob = args.fact_prob
num_symm = args.symm
num_impl = args.impl
num_impl_inv = args.impl_inv
num_impl_conj = args.impl_conj
num_trans_single = args.trans_single
num_trans_diff = args.trans_diff
testKB = SampleKB(Nr, Ne,
arg_densities=[0, arg_density, 0],
fact_prob=fact_prob,
num_symm=num_symm,
num_impl_inv=num_impl_inv,
num_impl=[0, num_impl, 0],
num_impl_conj=[0, num_impl_conj, 0],
num_trans_single=num_trans_single,
num_trans_diff=num_trans_diff,
seed=seed
).get_kb()
N_original_facts = len(testKB.get_all_facts(of_types=TRAIN_LABEL))
# for fact in testKB.get_all_facts(of_types=TRAIN_LABEL):
# print(fact)
# for clause in testKB.get_formulae_strings():
# print(clause)
testKB.apply_formulae(test_prob=test_prob, sampled_unobserved_per_true=sampled_unobserved_per_true)
#create train / test file for inferbeddings
train_file = os.path.join(args.dir, args.tag + '_train.tsv')
valid_file = os.path.join(args.dir, args.tag + '_valid.tsv')
test_file = os.path.join(args.dir, args.tag + '_test.tsv')
clause_file = os.path.join(args.dir, args.tag + '_clauses.pl')
readme_file = os.path.join(args.dir, args.tag + '_config.txt')
msg = '#file: '+ args.tag + '_config.txt\n'
msg += '#%d original purely random train facts (without formulae)\n'%N_original_facts
train_facts = testKB.get_all_facts(of_types=(TRAIN_LABEL,))
msg +='#%d train facts (after creating rules and adding inferred facts to train set with prob %.3f)\n'%(len(train_facts), 1.-test_prob)
test_facts = testKB.get_all_facts(of_types=(TEST_LABEL,))
test_facts_T = [f for f in test_facts if f[1]]
test_facts_F = [f for f in test_facts if not f[1]]
msg += '#%d test facts (%d True, %d False)\n'%(len(test_facts), len(test_facts_T), len(test_facts_F))
print('\n' + msg)
for clause in testKB.get_formulae_for_ntp_strings():
print(clause)
with open(readme_file, 'w') as rf:
rf.write('\n#command:\npython3 %s\n'%' '.join(list(sys.argv)))
rf.write('\n#config:\n')
for k in ['tag', 'entities', 'predicates', 'test_prob', 'arg_density', 'fact_prob',
'symm', 'impl', 'impl_inv', 'impl_conj', 'trans_single', 'trans_diff',
'dir']:
rf.write('{}\t{}\n'.format(k, vars(args)[k]))
rf.write('seed\t{}\n'.format(seed))
rf.write('sampled_unobserved_per_true\t{}\n'.format(sampled_unobserved_per_true))
rf.write('simple_transitivities\t{}\n'.format(simple_transitivities))
rf.write('\n#stats:\n')
rf.write(msg)
with open(train_file, 'w') as trf:
for fact in sorted(testKB.get_all_facts(of_types=TRAIN_LABEL)):
pred, (subj, obj) = fact[0]
trf.write('{}\t{}\t{}\n'.format(subj, pred, obj))
with open(valid_file, 'w') as vaf:
#simple strategy for artificial setting: tune on train data
#but: for AUC evaluation, we need false train facts as well
# (sampled_unobserved_per_true randomly sampled unobserved ones per positive train fact
nb_pos_test = int(len(testKB.get_all_facts(of_types=TEST_LABEL))/(sampled_unobserved_per_true+1.))
train_facts_True = testKB.get_all_facts(of_types=TRAIN_LABEL)
np.random.shuffle(train_facts_True)
valid_facts_True = train_facts_True #[:nb_pos_test]
valid_facts_False = []
for (pred, (subj, obj)), truth, _ in valid_facts_True:
if truth: #should be the case
vaf.write('{}\t{}\t{}\t{}\n'.format(subj, pred, obj, {True: 1, False: 0}[truth]))
((pred_n, (subj_n, obj_n)), _, _) = testKB.sample_neg(pred, 0, 1, oracle=True)
vaf.write('{}\t{}\t{}\t{}\n'.format(subj_n, pred, obj_n, 0)) #negative fact for same relation
with open(test_file, 'w') as tef:
for fact in sorted(testKB.get_all_facts(of_types=TEST_LABEL)):
pred, (subj, obj) = fact[0]
truth = fact[1]
tef.write('{}\t{}\t{}\t{}\n'.format(subj, pred, obj, {True: 1, False: 0}[truth]))
with open(clause_file, 'w') as clf:
for clause in testKB.get_formulae_for_ntp_strings():
clf.write(clause+'\n')
|
[
"numpy.random.seed",
"argparse.ArgumentParser",
"random.uniform",
"random.sample",
"kb.KB",
"random.seed",
"os.path.join",
"numpy.random.shuffle"
] |
[((7344, 7364), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (7358, 7364), True, 'import numpy as np\n'), ((7398, 7493), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""create artificial dataset (train+test) with rules (all arity 2)"""'], {}), "(\n 'create artificial dataset (train+test) with rules (all arity 2)')\n", (7421, 7493), False, 'import argparse\n'), ((10689, 10736), 'os.path.join', 'os.path.join', (['args.dir', "(args.tag + '_train.tsv')"], {}), "(args.dir, args.tag + '_train.tsv')\n", (10701, 10736), False, 'import os\n'), ((10754, 10801), 'os.path.join', 'os.path.join', (['args.dir', "(args.tag + '_valid.tsv')"], {}), "(args.dir, args.tag + '_valid.tsv')\n", (10766, 10801), False, 'import os\n'), ((10818, 10864), 'os.path.join', 'os.path.join', (['args.dir', "(args.tag + '_test.tsv')"], {}), "(args.dir, args.tag + '_test.tsv')\n", (10830, 10864), False, 'import os\n'), ((10883, 10931), 'os.path.join', 'os.path.join', (['args.dir', "(args.tag + '_clauses.pl')"], {}), "(args.dir, args.tag + '_clauses.pl')\n", (10895, 10931), False, 'import os\n'), ((10950, 10998), 'os.path.join', 'os.path.join', (['args.dir', "(args.tag + '_config.txt')"], {}), "(args.dir, args.tag + '_config.txt')\n", (10962, 10998), False, 'import os\n'), ((1172, 1189), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (1183, 1189), False, 'import random\n'), ((1208, 1221), 'kb.KB', 'KB', ([], {'seed': 'seed'}), '(seed=seed)\n', (1210, 1221), False, 'from kb import KB, TRAIN_LABEL, DEV_LABEL, TEST_LABEL\n'), ((13068, 13103), 'numpy.random.shuffle', 'np.random.shuffle', (['train_facts_True'], {}), '(train_facts_True)\n', (13085, 13103), True, 'import numpy as np\n'), ((2802, 2840), 'random.sample', 'random.sample', (['pairs', 'num_pair_samples'], {}), '(pairs, num_pair_samples)\n', (2815, 2840), False, 'import random\n'), ((2857, 2899), 'random.sample', 'random.sample', (['triples', 'num_triple_samples'], {}), '(triples, num_triple_samples)\n', (2870, 2899), False, 'import random\n'), ((3961, 4026), 'random.sample', 'random.sample', (['[(x, x) for x in relations_per_arity[2]]', 'num_symm'], {}), '([(x, x) for x in relations_per_arity[2]], num_symm)\n', (3974, 4026), False, 'import random\n'), ((4177, 4296), 'random.sample', 'random.sample', (['[(x, y) for x in relations_per_arity[2] for y in relations_per_arity[2] if \n not x == y]', 'num_impl_inv'], {}), '([(x, y) for x in relations_per_arity[2] for y in\n relations_per_arity[2] if not x == y], num_impl_inv)\n', (4190, 4296), False, 'import random\n'), ((4754, 4888), 'random.sample', 'random.sample', (['[(x, y) for x in relations_per_arity[arity] for y in relations_per_arity[\n arity] if not x == y]', 'num_impl[arity - 1]'], {}), '([(x, y) for x in relations_per_arity[arity] for y in\n relations_per_arity[arity] if not x == y], num_impl[arity - 1])\n', (4767, 4888), False, 'import random\n'), ((5403, 5615), 'random.sample', 'random.sample', (['[(x, y, z) for x in relations_per_arity[arity] for y in relations_per_arity\n [arity] for z in relations_per_arity[arity] if not x == y and not y ==\n z and not z == x]', 'num_impl_conj[arity - 1]'], {}), '([(x, y, z) for x in relations_per_arity[arity] for y in\n relations_per_arity[arity] for z in relations_per_arity[arity] if not x ==\n y and not y == z and not z == x], num_impl_conj[arity - 1])\n', (5416, 5615), False, 'import random\n'), ((6154, 6230), 'random.sample', 'random.sample', (['[(x, x, x) for x in relations_per_arity[2]]', 'num_trans_single'], {}), '([(x, x, x) for x in relations_per_arity[2]], num_trans_single)\n', (6167, 6230), False, 'import random\n'), ((6348, 6538), 'random.sample', 'random.sample', (['[(x, y, z) for x in relations_per_arity[2] for y in relations_per_arity[2] for\n z in relations_per_arity[2] if not x == y and not y == z and not z == x]', 'num_trans_diff'], {}), '([(x, y, z) for x in relations_per_arity[2] for y in\n relations_per_arity[2] for z in relations_per_arity[2] if not x == y and\n not y == z and not z == x], num_trans_diff)\n', (6361, 6538), False, 'import random\n'), ((3154, 3176), 'random.uniform', 'random.uniform', (['(0)', '(1.0)'], {}), '(0, 1.0)\n', (3168, 3176), False, 'import random\n')]
|
'''
@author: <NAME>
'''
import time
import numpy as np
import matplotlib.pyplot as plt
from algorithms import primes1, primes2, primes3, primes4, primes5, primes6, primes7, primes8
ubounds = range(0, 10000, 100)
num = len(ubounds)
results = []
for algorithm in (primes1, primes2, primes3, primes4, primes5, primes6, primes7, primes8):
print(f'Testing algorithm {algorithm.__name__}')
results_for_current_algorithm = []
for ubound in ubounds:
starttime = time.time()
result = algorithm(ubound)
endtime = time.time()
duration = endtime - starttime
results_for_current_algorithm.append(duration)
results.append(results_for_current_algorithm)
plt.plot(np.transpose(np.array(results)), linewidth=2)
plt.xticks(range(len(ubounds))[0::10], ubounds[0::10])
plt.xlabel('Upper bound for primes')
plt.ylabel('Time in seconds to generate primes')
plt.legend(['algorithm 1', 'algorithm 2', 'algorithm 3', 'algorithm 4',
'algorithm 5', 'algorithm 6', 'algorithm 7', 'algorithm 8'], loc=2)
plt.show()
|
[
"matplotlib.pyplot.show",
"matplotlib.pyplot.legend",
"time.time",
"numpy.array",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel"
] |
[((812, 848), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Upper bound for primes"""'], {}), "('Upper bound for primes')\n", (822, 848), True, 'import matplotlib.pyplot as plt\n'), ((849, 897), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Time in seconds to generate primes"""'], {}), "('Time in seconds to generate primes')\n", (859, 897), True, 'import matplotlib.pyplot as plt\n'), ((898, 1041), 'matplotlib.pyplot.legend', 'plt.legend', (["['algorithm 1', 'algorithm 2', 'algorithm 3', 'algorithm 4', 'algorithm 5',\n 'algorithm 6', 'algorithm 7', 'algorithm 8']"], {'loc': '(2)'}), "(['algorithm 1', 'algorithm 2', 'algorithm 3', 'algorithm 4',\n 'algorithm 5', 'algorithm 6', 'algorithm 7', 'algorithm 8'], loc=2)\n", (908, 1041), True, 'import matplotlib.pyplot as plt\n'), ((1050, 1060), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1058, 1060), True, 'import matplotlib.pyplot as plt\n'), ((480, 491), 'time.time', 'time.time', ([], {}), '()\n', (489, 491), False, 'import time\n'), ((545, 556), 'time.time', 'time.time', ([], {}), '()\n', (554, 556), False, 'import time\n'), ((724, 741), 'numpy.array', 'np.array', (['results'], {}), '(results)\n', (732, 741), True, 'import numpy as np\n')]
|
from flask import Flask, render_template
from flask import request
from database import Tableone
from database import db
from database import app
from selenium import webdriver
from bs4 import BeautifulSoup
@app.route('/index')
def index():
return render_template('index.html')
@app.route('/coin', methods = ['POST'])
def coin():
if request.method == 'POST':
name = request.form['name']
url = 'https://coinmarketcap.com/currencies/'+ str(name) + "/news/"
driver = webdriver.Firefox()
driver.get(url)
page = driver.page_source
soup = BeautifulSoup(page,'html.parser')
news = []
findAllNews = []
findAllNews = soup.findAll('div', class_='sc-16r8icm-0 jKrmxw container')
for news_item in findAllNews:
if news_item.find('p', class_='sc-1eb5slv-0 svowul-3 ddtKCV') is not None:
news.append(news_item.text)
for p in news:
new_row = Tableone(name, p)
db.session.add(new_row)
db.session.commit()
return p
if __name__ == '__main__':
app.run(debug=True)
|
[
"database.app.route",
"database.db.session.add",
"database.Tableone",
"database.app.run",
"selenium.webdriver.Firefox",
"database.db.session.commit",
"flask.render_template",
"bs4.BeautifulSoup"
] |
[((210, 229), 'database.app.route', 'app.route', (['"""/index"""'], {}), "('/index')\n", (219, 229), False, 'from database import app\n'), ((287, 323), 'database.app.route', 'app.route', (['"""/coin"""'], {'methods': "['POST']"}), "('/coin', methods=['POST'])\n", (296, 323), False, 'from database import app\n'), ((254, 283), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (269, 283), False, 'from flask import Flask, render_template\n'), ((1133, 1152), 'database.app.run', 'app.run', ([], {'debug': '(True)'}), '(debug=True)\n', (1140, 1152), False, 'from database import app\n'), ((502, 521), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (519, 521), False, 'from selenium import webdriver\n'), ((595, 629), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page', '"""html.parser"""'], {}), "(page, 'html.parser')\n", (608, 629), False, 'from bs4 import BeautifulSoup\n'), ((993, 1010), 'database.Tableone', 'Tableone', (['name', 'p'], {}), '(name, p)\n', (1001, 1010), False, 'from database import Tableone\n'), ((1023, 1046), 'database.db.session.add', 'db.session.add', (['new_row'], {}), '(new_row)\n', (1037, 1046), False, 'from database import db\n'), ((1059, 1078), 'database.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1076, 1078), False, 'from database import db\n')]
|
from graphics import *
import random
import math
max_width = 500
max_height = 500
n = int(input("Ile bokow: "))
win = GraphWin('<NAME> zadanie 6', max_width, max_height)
win.setBackground('brown')
center = (250, 250)
r = 125
for item in range(n):
start_point = Point(center[0] + r * math.cos(2 * math.pi * item / n), center[1] + r * math.sin(2 * math.pi * item / n))
if item != n - 1:
next_point = Point(center[0] + r * math.cos(2 * math.pi * (item + 1) / n), center[1] + r * math.sin(2 * math.pi * (item + 1) / n))
else:
next_point = Point(center[0] + r * math.cos(2 * math.pi * 0 / n), center[1] + r * math.sin(2 * math.pi * 0 / n))
l = Line(start_point,next_point)
print("{0} => {1}".format(start_point, next_point))
l.draw(win)
win.getMouse()
win.close()
|
[
"math.cos",
"math.sin"
] |
[((292, 324), 'math.cos', 'math.cos', (['(2 * math.pi * item / n)'], {}), '(2 * math.pi * item / n)\n', (300, 324), False, 'import math\n'), ((342, 374), 'math.sin', 'math.sin', (['(2 * math.pi * item / n)'], {}), '(2 * math.pi * item / n)\n', (350, 374), False, 'import math\n'), ((442, 480), 'math.cos', 'math.cos', (['(2 * math.pi * (item + 1) / n)'], {}), '(2 * math.pi * (item + 1) / n)\n', (450, 480), False, 'import math\n'), ((498, 536), 'math.sin', 'math.sin', (['(2 * math.pi * (item + 1) / n)'], {}), '(2 * math.pi * (item + 1) / n)\n', (506, 536), False, 'import math\n'), ((592, 621), 'math.cos', 'math.cos', (['(2 * math.pi * 0 / n)'], {}), '(2 * math.pi * 0 / n)\n', (600, 621), False, 'import math\n'), ((639, 668), 'math.sin', 'math.sin', (['(2 * math.pi * 0 / n)'], {}), '(2 * math.pi * 0 / n)\n', (647, 668), False, 'import math\n')]
|
import rpyc
conn = rpyc.connect("localhost", 12345)
unload = rpyc.async_(conn.root.unload)
unload()
|
[
"rpyc.connect",
"rpyc.async_"
] |
[((22, 54), 'rpyc.connect', 'rpyc.connect', (['"""localhost"""', '(12345)'], {}), "('localhost', 12345)\n", (34, 54), False, 'import rpyc\n'), ((65, 94), 'rpyc.async_', 'rpyc.async_', (['conn.root.unload'], {}), '(conn.root.unload)\n', (76, 94), False, 'import rpyc\n')]
|
# coding: utf-8
#
import base64
import io
import json
import os
import platform
import queue
import subprocess
import sys
import time
import traceback
from concurrent.futures import ThreadPoolExecutor
from subprocess import PIPE
from typing import Union
import six
import tornado
from logzero import logger
from PIL import Image
from tornado.concurrent import run_on_executor
from tornado.escape import json_decode
from ..device import connect_device, get_device
from ..utils import tostr
from ..version import __version__
from ..jsonrpc_client import ConsoleKernel
pathjoin = os.path.join
class BaseHandler(tornado.web.RequestHandler):
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with")
self.set_header("Access-Control-Allow-Credentials",
"true") # allow cookie
self.set_header('Access-Control-Allow-Methods',
'POST, GET, PUT, DELETE, OPTIONS')
def options(self, *args):
self.set_status(204) # no body
self.finish()
def check_origin(self, origin):
""" allow cors request """
return True
class VersionHandler(BaseHandler):
def get(self):
ret = {
'name': "weditor",
'version': __version__,
}
self.write(ret)
class MainHandler(BaseHandler):
def get(self):
self.render("index.html")
gqueue = queue.Queue()
class BuildWSHandler(tornado.websocket.WebSocketHandler):
executor = ThreadPoolExecutor(max_workers=4)
def open(self):
print("Websocket opened")
self.proc = None
def check_origin(self, origin):
return True
@run_on_executor
def _run(self, device_url, code):
"""
Thanks: https://gist.github.com/mosquito/e638dded87291d313717
"""
try:
print("DEBUG: run code\n%s" % code)
env = os.environ.copy()
env['UIAUTOMATOR_DEBUG'] = 'true'
if device_url and device_url != 'default':
env['ATX_CONNECT_URL'] = tostr(device_url)
start_time = time.time()
self.proc = subprocess.Popen([sys.executable, "-u"],
env=env,
stdout=PIPE,
stderr=subprocess.STDOUT,
stdin=PIPE)
self.proc.stdin.write(code)
self.proc.stdin.close()
for line in iter(self.proc.stdout.readline, b''):
print("recv subprocess:", repr(line))
if line is None:
break
gqueue.put((self, {"buffer": line.decode('utf-8')}))
print("Wait exit")
exit_code = self.proc.wait()
duration = time.time() - start_time
ret = {
"buffer": "",
"result": {
"exitCode": exit_code,
"duration": int(duration) * 1000
}
}
gqueue.put((self, ret))
time.sleep(3) # wait until write done
except Exception:
traceback.print_exc()
@tornado.gen.coroutine
def on_message(self, message):
jdata = json.loads(message)
if self.proc is None:
code = jdata['content']
device_url = jdata.get('deviceUrl')
yield self._run(device_url, code.encode('utf-8'))
self.close()
else:
self.proc.terminate()
# on Windows, kill is alais of terminate()
if platform.system() == 'Windows':
return
yield tornado.gen.sleep(0.5)
if self.proc.poll():
return
yield tornado.gen.sleep(1.2)
if self.proc.poll():
return
print("Force to kill")
self.proc.kill()
def on_close(self):
print("Websocket closed")
class DeviceConnectHandler(BaseHandler):
def post(self):
platform = self.get_argument("platform").lower()
device_url = self.get_argument("deviceUrl")
try:
id = connect_device(platform, device_url)
except RuntimeError as e:
self.set_status(410) # 410 Gone
self.write({
"success": False,
"description": str(e),
})
except Exception as e:
logger.warning("device connect error: %s", e)
self.set_status(410) # 410 Gone
self.write({
"success": False,
"description": traceback.format_exc(),
})
else:
ret = {
"deviceId": id,
'success': True,
}
if platform == "android":
ws_addr = get_device(id).device.address.replace("http://", "ws://") # yapf: disable
ret['screenWebSocketUrl'] = ws_addr + "/minicap"
self.write(ret)
class DeviceHierarchyHandler(BaseHandler):
def get(self, device_id):
d = get_device(device_id)
self.write(d.dump_hierarchy())
class DeviceHierarchyHandlerV2(BaseHandler):
def get(self, device_id):
d = get_device(device_id)
self.write(d.dump_hierarchy2())
class WidgetPreviewHandler(BaseHandler):
def get(self, id):
self.render("widget_preview.html", id=id)
class DeviceWidgetListHandler(BaseHandler):
__store_dir = os.path.expanduser("~/.weditor/widgets")
def generate_id(self):
os.makedirs(self.__store_dir, exist_ok=True)
names = [
name for name in os.listdir(self.__store_dir)
if os.path.isdir(os.path.join(self.__store_dir, name))
]
return "%05d" % (len(names) + 1)
def get(self, widget_id: str):
data_dir = os.path.join(self.__store_dir, widget_id)
with open(pathjoin(data_dir, "hierarchy.xml"), "r",
encoding="utf-8") as f:
hierarchy = f.read()
with open(os.path.join(data_dir, "meta.json"), "rb") as f:
meta_info = json.load(f)
meta_info['hierarchy'] = hierarchy
self.write(meta_info)
def json_parse(self, source):
with open(source, "r", encoding="utf-8") as f:
return json.load(f)
def put(self, widget_id: str):
""" update widget data """
data = json_decode(self.request.body)
target_dir = os.path.join(self.__store_dir, widget_id)
with open(pathjoin(target_dir, "hierarchy.xml"), "w",
encoding="utf-8") as f:
f.write(data['hierarchy'])
# update meta
meta_path = pathjoin(target_dir, "meta.json")
meta = self.json_parse(meta_path)
meta["xpath"] = data['xpath']
with open(meta_path, "w", encoding="utf-8") as f:
f.write(json.dumps(meta, indent=4, ensure_ascii=False))
self.write({
"success": True,
"description": f"widget {widget_id} updated",
})
def post(self):
data = json_decode(self.request.body)
widget_id = self.generate_id()
target_dir = os.path.join(self.__store_dir, widget_id)
os.makedirs(target_dir, exist_ok=True)
image_fd = io.BytesIO(base64.b64decode(data['screenshot']))
im = Image.open(image_fd)
im.save(pathjoin(target_dir, "screenshot.jpg"))
lx, ly, rx, ry = bounds = data['bounds']
im.crop(bounds).save(pathjoin(target_dir, "template.jpg"))
cx, cy = (lx + rx) // 2, (ly + ry) // 2
# TODO(ssx): missing offset
# pprint(data)
widget_data = {
"resource_id": data["resourceId"],
"text": data['text'],
"description": data["description"],
"target_size": [rx - lx, ry - ly],
"package": data["package"],
"activity": data["activity"],
"class_name": data['className'],
"rect": dict(x=lx, y=ly, width=rx-lx, height=ry-ly),
"window_size": data['windowSize'],
"xpath": data['xpath'],
"target_image": {
"size": [rx - lx, ry - ly],
"url": f"http://localhost:17310/widgets/{widget_id}/template.jpg",
},
"device_image": {
"size": im.size,
"url": f"http://localhost:17310/widgets/{widget_id}/screenshot.jpg",
},
# "hierarchy": data['hierarchy'],
} # yapf: disable
with open(pathjoin(target_dir, "meta.json"), "w",
encoding="utf-8") as f:
json.dump(widget_data, f, ensure_ascii=False, indent=4)
with open(pathjoin(target_dir, "hierarchy.xml"), "w",
encoding="utf-8") as f:
f.write(data['hierarchy'])
self.write({
"success": True,
"id": widget_id,
"note": data['text'] or data['description'], # 备注
"data": widget_data,
})
class DeviceScreenshotHandler(BaseHandler):
def get(self, serial):
logger.info("Serial: %s", serial)
try:
d = get_device(serial)
buffer = io.BytesIO()
d.screenshot().convert("RGB").save(buffer, format='JPEG')
b64data = base64.b64encode(buffer.getvalue())
response = {
"type": "jpeg",
"encoding": "base64",
"data": b64data.decode('utf-8'),
}
self.write(response)
except EnvironmentError as e:
traceback.print_exc()
self.set_status(430, "Environment Error")
self.write({"description": str(e)})
except RuntimeError as e:
self.set_status(410) # Gone
self.write({"description": traceback.print_exc()})
class DeviceCodeDebugHandler(BaseHandler):
executor = ThreadPoolExecutor(max_workers=4)
@run_on_executor
def _run(self, device_id, code):
logger.debug("RUN code: %s", code)
client = ConsoleKernel.get_singleton()
output = client.call_output("run_device_code", [device_id, code])
return output
async def post(self, device_id):
start = time.time()
d = get_device(device_id)
logger.debug("deviceId: %s", device_id)
code = self.get_argument('code')
output = await self._run(device_id, code)
self.write({
"success": True,
"duration": int((time.time() - start) * 1000),
"content": output,
})
async def delete(self, device_id):
client = ConsoleKernel.get_singleton()
client.send_interrupt()
self.write({
"success": True,
})
|
[
"os.environ.copy",
"base64.b64decode",
"json.dumps",
"os.path.join",
"traceback.print_exc",
"json.loads",
"traceback.format_exc",
"concurrent.futures.ThreadPoolExecutor",
"json.dump",
"subprocess.Popen",
"io.BytesIO",
"time.sleep",
"platform.system",
"queue.Queue",
"os.listdir",
"tornado.gen.sleep",
"json.load",
"os.makedirs",
"tornado.escape.json_decode",
"logzero.logger.info",
"PIL.Image.open",
"time.time",
"logzero.logger.debug",
"logzero.logger.warning",
"os.path.expanduser"
] |
[((1494, 1507), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (1505, 1507), False, 'import queue\n'), ((1583, 1616), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {'max_workers': '(4)'}), '(max_workers=4)\n', (1601, 1616), False, 'from concurrent.futures import ThreadPoolExecutor\n'), ((5595, 5635), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.weditor/widgets"""'], {}), "('~/.weditor/widgets')\n", (5613, 5635), False, 'import os\n'), ((10047, 10080), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {'max_workers': '(4)'}), '(max_workers=4)\n', (10065, 10080), False, 'from concurrent.futures import ThreadPoolExecutor\n'), ((3366, 3385), 'json.loads', 'json.loads', (['message'], {}), '(message)\n', (3376, 3385), False, 'import json\n'), ((5672, 5716), 'os.makedirs', 'os.makedirs', (['self.__store_dir'], {'exist_ok': '(True)'}), '(self.__store_dir, exist_ok=True)\n', (5683, 5716), False, 'import os\n'), ((5966, 6007), 'os.path.join', 'os.path.join', (['self.__store_dir', 'widget_id'], {}), '(self.__store_dir, widget_id)\n', (5978, 6007), False, 'import os\n'), ((6537, 6567), 'tornado.escape.json_decode', 'json_decode', (['self.request.body'], {}), '(self.request.body)\n', (6548, 6567), False, 'from tornado.escape import json_decode\n'), ((6589, 6630), 'os.path.join', 'os.path.join', (['self.__store_dir', 'widget_id'], {}), '(self.__store_dir, widget_id)\n', (6601, 6630), False, 'import os\n'), ((7213, 7243), 'tornado.escape.json_decode', 'json_decode', (['self.request.body'], {}), '(self.request.body)\n', (7224, 7243), False, 'from tornado.escape import json_decode\n'), ((7304, 7345), 'os.path.join', 'os.path.join', (['self.__store_dir', 'widget_id'], {}), '(self.__store_dir, widget_id)\n', (7316, 7345), False, 'import os\n'), ((7354, 7392), 'os.makedirs', 'os.makedirs', (['target_dir'], {'exist_ok': '(True)'}), '(target_dir, exist_ok=True)\n', (7365, 7392), False, 'import os\n'), ((7475, 7495), 'PIL.Image.open', 'Image.open', (['image_fd'], {}), '(image_fd)\n', (7485, 7495), False, 'from PIL import Image\n'), ((9240, 9273), 'logzero.logger.info', 'logger.info', (['"""Serial: %s"""', 'serial'], {}), "('Serial: %s', serial)\n", (9251, 9273), False, 'from logzero import logger\n'), ((10148, 10182), 'logzero.logger.debug', 'logger.debug', (['"""RUN code: %s"""', 'code'], {}), "('RUN code: %s', code)\n", (10160, 10182), False, 'from logzero import logger\n'), ((10380, 10391), 'time.time', 'time.time', ([], {}), '()\n', (10389, 10391), False, 'import time\n'), ((10434, 10473), 'logzero.logger.debug', 'logger.debug', (['"""deviceId: %s"""', 'device_id'], {}), "('deviceId: %s', device_id)\n", (10446, 10473), False, 'from logzero import logger\n'), ((1988, 2005), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (2003, 2005), False, 'import os\n'), ((2191, 2202), 'time.time', 'time.time', ([], {}), '()\n', (2200, 2202), False, 'import time\n'), ((2228, 2333), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '-u']"], {'env': 'env', 'stdout': 'PIPE', 'stderr': 'subprocess.STDOUT', 'stdin': 'PIPE'}), "([sys.executable, '-u'], env=env, stdout=PIPE, stderr=\n subprocess.STDOUT, stdin=PIPE)\n", (2244, 2333), False, 'import subprocess\n'), ((3188, 3201), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (3198, 3201), False, 'import time\n'), ((6235, 6247), 'json.load', 'json.load', (['f'], {}), '(f)\n', (6244, 6247), False, 'import json\n'), ((6438, 6450), 'json.load', 'json.load', (['f'], {}), '(f)\n', (6447, 6450), False, 'import json\n'), ((7424, 7460), 'base64.b64decode', 'base64.b64decode', (["data['screenshot']"], {}), "(data['screenshot'])\n", (7440, 7460), False, 'import base64\n'), ((8772, 8827), 'json.dump', 'json.dump', (['widget_data', 'f'], {'ensure_ascii': '(False)', 'indent': '(4)'}), '(widget_data, f, ensure_ascii=False, indent=4)\n', (8781, 8827), False, 'import json\n'), ((9343, 9355), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (9353, 9355), False, 'import io\n'), ((2909, 2920), 'time.time', 'time.time', ([], {}), '()\n', (2918, 2920), False, 'import time\n'), ((3265, 3286), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (3284, 3286), False, 'import traceback\n'), ((3705, 3722), 'platform.system', 'platform.system', ([], {}), '()\n', (3720, 3722), False, 'import platform\n'), ((3778, 3800), 'tornado.gen.sleep', 'tornado.gen.sleep', (['(0.5)'], {}), '(0.5)\n', (3795, 3800), False, 'import tornado\n'), ((3875, 3897), 'tornado.gen.sleep', 'tornado.gen.sleep', (['(1.2)'], {}), '(1.2)\n', (3892, 3897), False, 'import tornado\n'), ((4552, 4597), 'logzero.logger.warning', 'logger.warning', (['"""device connect error: %s"""', 'e'], {}), "('device connect error: %s', e)\n", (4566, 4597), False, 'from logzero import logger\n'), ((5764, 5792), 'os.listdir', 'os.listdir', (['self.__store_dir'], {}), '(self.__store_dir)\n', (5774, 5792), False, 'import os\n'), ((6162, 6197), 'os.path.join', 'os.path.join', (['data_dir', '"""meta.json"""'], {}), "(data_dir, 'meta.json')\n", (6174, 6197), False, 'import os\n'), ((7009, 7055), 'json.dumps', 'json.dumps', (['meta'], {'indent': '(4)', 'ensure_ascii': '(False)'}), '(meta, indent=4, ensure_ascii=False)\n', (7019, 7055), False, 'import json\n'), ((9725, 9746), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (9744, 9746), False, 'import traceback\n'), ((5822, 5858), 'os.path.join', 'os.path.join', (['self.__store_dir', 'name'], {}), '(self.__store_dir, name)\n', (5834, 5858), False, 'import os\n'), ((4733, 4755), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (4753, 4755), False, 'import traceback\n'), ((9963, 9984), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (9982, 9984), False, 'import traceback\n'), ((10645, 10656), 'time.time', 'time.time', ([], {}), '()\n', (10654, 10656), False, 'import time\n')]
|
"""
echopype data model inherited from based class EchoData for EK60 data.
"""
import datetime as dt
import numpy as np
import xarray as xr
from .echo_data import EchoData
class EchoDataEK60(EchoData):
"""Class for manipulating EK60 echo data that is already converted to netCDF."""
def __init__(self, file_path=""):
EchoData.__init__(self, file_path)
self.tvg_correction_factor = 2 # range bin offset factor for calculating time-varying gain in EK60
def calibrate(self, save=False):
"""Perform echo-integration to get volume backscattering strength (Sv) from EK60 power data.
TODO: need to write a separate method for calculating TS as have been done for AZFP data.
Parameters
-----------
save : bool, optional
whether to save calibrated Sv output
default to ``False``
"""
# Open data set for Environment and Beam groups
ds_env = xr.open_dataset(self.file_path, group="Environment")
ds_beam = xr.open_dataset(self.file_path, group="Beam")
# Derived params
sample_thickness = ds_env.sound_speed_indicative * ds_beam.sample_interval / 2 # sample thickness
wavelength = ds_env.sound_speed_indicative / ds_env.frequency # wavelength
# Calc gain
CSv = 10 * np.log10((ds_beam.transmit_power * (10 ** (ds_beam.gain_correction / 10)) ** 2 *
wavelength ** 2 * ds_env.sound_speed_indicative * ds_beam.transmit_duration_nominal *
10 ** (ds_beam.equivalent_beam_angle / 10)) /
(32 * np.pi ** 2))
# Get TVG and absorption
range_meter = ds_beam.range_bin * sample_thickness - \
self.tvg_correction_factor * sample_thickness # DataArray [frequency x range_bin]
range_meter = range_meter.where(range_meter > 0, other=0) # set all negative elements to 0
TVG = np.real(20 * np.log10(range_meter.where(range_meter != 0, other=1)))
ABS = 2 * ds_env.absorption_indicative * range_meter
# Save TVG and ABS for noise estimation use
self.sample_thickness = sample_thickness
self.TVG = TVG
self.ABS = ABS
# Calibration and echo integration
Sv = ds_beam.backscatter_r + TVG + ABS - CSv - 2 * ds_beam.sa_correction
Sv.name = 'Sv'
# Save calibrated data into the calling instance and
# ... to a separate .nc file in the same directory as the data file
self.Sv = Sv
if save:
print('%s saving calibrated Sv to %s' % (dt.datetime.now().strftime('%H:%M:%S'), self.Sv_path))
Sv.to_netcdf(path=self.Sv_path, mode="w")
# Close opened resources
ds_env.close()
ds_beam.close()
|
[
"numpy.log10",
"xarray.open_dataset",
"datetime.datetime.now"
] |
[((956, 1008), 'xarray.open_dataset', 'xr.open_dataset', (['self.file_path'], {'group': '"""Environment"""'}), "(self.file_path, group='Environment')\n", (971, 1008), True, 'import xarray as xr\n'), ((1027, 1072), 'xarray.open_dataset', 'xr.open_dataset', (['self.file_path'], {'group': '"""Beam"""'}), "(self.file_path, group='Beam')\n", (1042, 1072), True, 'import xarray as xr\n'), ((1330, 1572), 'numpy.log10', 'np.log10', (['(ds_beam.transmit_power * (10 ** (ds_beam.gain_correction / 10)) ** 2 * \n wavelength ** 2 * ds_env.sound_speed_indicative * ds_beam.\n transmit_duration_nominal * 10 ** (ds_beam.equivalent_beam_angle / 10) /\n (32 * np.pi ** 2))'], {}), '(ds_beam.transmit_power * (10 ** (ds_beam.gain_correction / 10)) **\n 2 * wavelength ** 2 * ds_env.sound_speed_indicative * ds_beam.\n transmit_duration_nominal * 10 ** (ds_beam.equivalent_beam_angle / 10) /\n (32 * np.pi ** 2))\n', (1338, 1572), True, 'import numpy as np\n'), ((2620, 2637), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (2635, 2637), True, 'import datetime as dt\n')]
|
from __future__ import nested_scopes
# Levenberg Marquardt minimization routines
"""
fmin_lm : standard Levenberg Marquardt
fmin_lmNoJ : Levenberg Marquardt using a cost function instead of
a residual function and a gradient/J^tJ pair instead
of the derivative of the residual function. Useful
in problems where the number of residuals is very large.
fmin_lm_scale : scale invariant Levenberg Marquardt
"""
import scipy
from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum
import scipy.linalg
import copy
import SloppyCell.Utility
save = SloppyCell.Utility.save # module that provides pickled save
import SloppyCell.KeyedList_mod as KeyedList_mod
KeyedList = KeyedList_mod.KeyedList
abs = absolute
_epsilon = sqrt(scipy.finfo(scipy.float_).eps)
def approx_fprime(xk,f,epsilon,*args):
f0 = apply(f,(xk,)+args)
grad = scipy.zeros((len(xk),),scipy.float_)
ei = scipy.zeros((len(xk),),scipy.float_)
for k in range(len(xk)):
ei[k] = epsilon
grad[k] = (apply(f,(xk+ei,)+args) - f0)/epsilon
ei[k] = 0.0
return grad
def approx_fprime1(xk,f,epsilon,*args):
""" centred difference formula to approximate fprime """
#f0 = apply(f,(xk,)+args)
grad = scipy.zeros((len(xk),),scipy.float_)
ei = scipy.zeros((len(xk),),scipy.float_)
epsilon = (epsilon**2.0)**(1.0/3.0) # should be macheps^(1/3)
for k in range(len(xk)):
ei[k] = epsilon
grad[k] = (apply(f,(xk+ei,)+args) - apply(f,(xk-ei,)+args))/(2.0*epsilon)
ei[k] = 0.0
return grad
def approx_fprime2(xk,f,epsilon,*args):
""" centred difference formula to approximate the jacobian, given the residual
function """
#f0 = apply(f,(xk,)+args)
grad = scipy.zeros((len(xk),),scipy.float_)
ei = scipy.zeros((len(xk),),scipy.float_)
epsilon = (epsilon**2.0)**(1.0/3.0) # should be macheps^(1/3)
ei[0] = epsilon
resminus = asarray(apply(f,(xk-ei,)+args))
resplus = asarray(apply(f,(xk+ei,)+args))
m = len(resminus)
jac = scipy.zeros((m,len(xk)),scipy.float_)
jac[:,0] = (resplus-resminus)/(2.0*epsilon)
ei[0] = 0.0
for k in range(1,len(xk)):
ei[k] = epsilon
resplus = asarray(apply(f,(xk+ei,)+args))
resminus = asarray(apply(f,(xk-ei,)+args))
jac[:,k] = (resplus-resminus)/(2.0*epsilon)
#jac[k,:] = mat(transpose(mat(apply(f,(xk+ei,)+args) - apply(f,(xk-ei,)+args))))/(2.0*epsilon)
ei[k] = 0.0
return jac
def check_grad(func, grad, x0, *args):
approx_grad = approx_fprime(x0,func,_epsilon,*args)
print("Finite difference gradient ", approx_grad)
analytic_grad = grad(x0,*args)
print("Analytic gradient ", analytic_grad)
differencenorm = sqrt(sum(approx_grad-analytic_grad)**2)
print("Norm of difference is ", differencenorm)
return differencenorm
def approx_fhess_p(x0,p,fprime,epsilon,*args):
f2 = apply(fprime,(x0+epsilon*p,)+args)
f1 = apply(fprime,(x0,)+args)
return (f2 - f1)/epsilon
def safe_res(f,x,args):
"""
Applies f to x.
Returns f(x) and cost = sum(f(x)**2).
In the case that cost = NaN, returns cost = inf.
In the case of an exception, returns res = None, cost = inf.
"""
try:
res = asarray(apply(f,(x,)+args))
cost = sum(res**2)
except (SloppyCell.Utility.SloppyCellException,OverflowError):
res = None
cost = scipy.inf
if scipy.isnan(cost): cost = scipy.inf
return res, cost
def safe_fprime(fprime,x,args):
"""
Applies fprime to x.
Returns j and exit code. For nonzero exit codes, j is returned as None.
Exit code 0: No errors.
Exit code 3: Jacobian contains NaN or inf.
Exit code 4: Exception in Jacobian calculation.
"""
try:
j = asarray(apply(fprime,(x,)+args))
err = 0
except SloppyCell.Utility.SloppyCellException:
j = None
err = 4
if j is not None:
if ( scipy.isnan(j).any() or scipy.isinf(j).any() ):
j = None
err = 3
return j, err
def fmin_lm(f, x0, fprime=None, args=(), avegtol=1e-5, epsilon=_epsilon,
maxiter=None, full_output=0, disp=1, retall=0, lambdainit = None,
jinit = None, trustradius = 1.0):
"""Minimizer for a nonlinear least squares problem. Allowed to
have more residuals than parameters or vice versa.
f : residual function (function of parameters)
fprime : derivative of residual function with respect to parameters.
Should return a matrix (J) with dimensions number of residuals
by number of parameters.
x0 : initial parameter set
avegtol : convergence tolerance on the gradient vector
epsilon : size of steps to use for finite differencing of f (if fprime
not passed in)
maxiter : maximum number of iterations
full_output : 0 to get only the minimum set of parameters back
1 if you also want the best parameter set, the
lowest value of f, the number of function calls,
the number of gradient calls, the convergence flag,
the last Marquardt parameter used (lambda), and the
last evaluation of fprime (J matrix)
disp : 0 for no display, 1 to give cost at each iteration and convergence
conditions at the end
retall : 0 for nothing extra to be returned, 1 for all the parameter
sets during the optimization to be returned
lambdainit : initial value of the Marquardt parameter to use (useful if
continuing from an old optimization run
jinit : initial evaluation of the residual sensitivity matrix (J).
trustradius : set this to the maximum move you want to allow in a single
parameter direction.
If you are using log parameters, then setting this
to 1.0, for example, corresponds to a multiplicative
change of exp(1) = 2.718
"""
app_fprime = 0
if fprime is None:
app_fprime = 1
xcopy = copy.copy(x0)
if isinstance(x0,KeyedList) :
x0 = asarray(x0.values())
else :
x0 = asarray(x0)
if lambdainit != None :
Lambda = lambdainit
else :
Lambda = 1.0e-2
Mult = 10.0
n = len(x0)
func_calls = 0
grad_calls = 0
res,currentcost = safe_res(f,x0,args)
func_calls+=1
m = res.shape[0]
if maxiter is None :
maxiter = 200*n
niters = 0
x = x0
gtol = n*avegtol
if retall:
allvecs = [x]
x1 = x0
x2 = x0
d = zeros(n,scipy.float_)
move = zeros(n,scipy.float_)
finish = 0
if jinit!=None :
j = jinit
else :
if app_fprime :
j = asarray(apply(approx_fprime2,(x,f,epsilon)+args))
func_calls = func_calls + 2*len(x)
else :
j,err = safe_fprime(fprime,x,args)
if err:
finish = err
grad_calls+=1
# NOTE: Below is actually *half* the gradient (because
# we define the cost as the sum of squares of residuals)
# However the equations defining the optimization move, dp,
# are 2.0*J^tJ dp = -2.0*J^t r, where r is the residual
# vector; therefore, the twos cancel both sides
if j is not None: grad = mat(res)*mat(j)
while (niters<maxiter) and (finish == 0):
# note: grad, res and j will be available from the end of the
# last iteration. They just need to be computed the zeroth
# time aswell (above)
lmh = mat(transpose(j))*mat(j)
# use more accurate way to get e-vals/dirns
#[u,s,v] = scipy.linalg.svd(lmh)
[u,ssqrt,vt] = scipy.linalg.svd(j)
# want n singular values even if m<n and we have
# more parameters than data points.
if (len(ssqrt) == n) :
s = ssqrt**2
elif (len(ssqrt)<n) :
s = zeros((n,),scipy.float_)
s[0:len(ssqrt)] = ssqrt**2
#print "s is (in original) ", s
#rhsvect = -mat(transpose(u))*mat(transpose(grad))
rhsvect = -mat(vt)*mat(transpose(grad))
rhsvect = asarray(rhsvect)[:,0]
move = abs(rhsvect)/(s+Lambda*scipy.ones(n)+1.0e-30*scipy.ones(n))
move = list(move)
maxindex = move.index(max(move))
move = asarray(move)
if max(move) > trustradius :
Lambda = Mult*(1.0/trustradius*abs(rhsvect[maxindex])-s[maxindex])
#print " Increasing lambda to ", Lambda
# now do the matrix inversion
for i in range(0,n) :
if (s[i]+Lambda) < 1.0e-30 :
d[i] = 0.0
else :
d[i] = 1.0/(s[i]+Lambda)
move[i] = d[i]*rhsvect[i]
move = asarray(move)
# move = asarray(mat(transpose(v))*mat(transpose(mat(move))))[:,0]
move = asarray(mat(transpose(vt))*mat(transpose(mat(move))))[:,0]
# print move
x1 = x + move
moveold = move[:]
for i in range(0,n) :
if (s[i]+Lambda/Mult) < 1.0e-30 :
d[i] = 0.0
else :
d[i] = 1.0/(s[i]+Lambda/Mult)
move[i] = d[i]*rhsvect[i]
move = asarray(mat(transpose(vt))*mat(transpose(mat(move))))[:,0]
x2 = x + asarray(move)
_,currentcost = safe_res(f,x,args)
func_calls+=1
res2,costlambdasmaller = safe_res(f,x2,args)
func_calls+=1
res1,costlambda = safe_res(f,x1,args)
func_calls+=1
if disp :
print('Iteration number', niters)
print('Current cost', currentcost)
print("Move 1 gives cost of" , costlambda)
print("Move 2 gives cost of ", costlambdasmaller)
#fp = open('LMoutfile','a')
#fp.write('Iteration number ' + niters.__str__() + '\n')
#fp.write('Current cost ' + currentcost.__str__() + '\n')
#fp.write('Move 1 gives cost of ' + costlambda.__str__() + '\n')
#fp.write('Move 2 gives cost of ' + costlambdasmaller.__str__() + '\n')
#fp.close()
oldcost = currentcost
oldres = res
oldjac = j
if costlambdasmaller <= currentcost :
xprev = x[:]
Lambda = Lambda/Mult
x = x2[:]
if retall:
allvecs.append(x)
currentcost = costlambdasmaller
if app_fprime :
j = asarray(apply(approx_fprime2,(x2,f,epsilon)+args))
func_calls = func_calls + 2*len(x2)
else :
j,err = safe_fprime(fprime,x2,args)
if err:
x = xprev[:]
finish = err
grad_calls+=1
if j is not None: grad = mat(res2)*mat(j)
if sum(abs(2.0*grad), axis=None) < gtol :
finish = 2
elif costlambda <= currentcost :
xprev = x[:]
currentcost = costlambda
x = x1[:]
move = moveold[:]
if retall:
allvecs.append(x)
if app_fprime :
j = asarray(apply(approx_fprime2,(x1,f,epsilon)+args))
func_calls = func_calls + 2*len(x1)
else :
j,err = safe_fprime(fprime,x1,args)
if err:
x = xprev[:]
finish = err
grad_calls+=1
if j is not None: grad = mat(res1)*mat(j)
if sum(abs(2.0*grad), axis=None) < gtol :
finish = 2
else :
Lambdamult = Lambda
costmult = costlambda
piOverFour = .78539816339744825
NTrials = 0
NTrials2 = 0
move = moveold[:]
while (costmult > currentcost) and (NTrials < 10) :
num = -scipy.dot(grad,move)[0]
den = scipy.linalg.norm(grad)*scipy.linalg.norm(move)
gamma = scipy.arccos(num/den)
NTrials = NTrials+1
# was (gamma>piOverFour) below but that doens't
# make much sense to me. I don't think you should
# cut back on a given step, I think the trust
# region strategy is more successful
if (gamma > 0) :
Lambdamult = Lambdamult*Mult
for i in range(0,n) :
if s[i]+Lambdamult < 1.0e-30 :
d[i] = 0.0
else :
d[i] = 1.0/(s[i]+Lambdamult)
move[i] = d[i]*rhsvect[i]
move = asarray(mat(transpose(vt))*mat(transpose(mat(move))))[:,0]
x1 = x + move
res1,costmult = safe_res(f,x1,args)
func_calls+=1
else :
NTrials2 = 0
while (costmult > currentcost) and (NTrials2 < 10) :
NTrials2 = NTrials2 + 1
if disp == 1:
print(" Decreasing stepsize ")
move = (.5)**NTrials2*moveold
x1 = x + asarray(move)
res1,costmult = safe_res(f,x1,args)
func_calls+=1
if (NTrials==10) or (NTrials2==10) :
if disp == 1:
print(" Failed to converge")
finish = 1
else :
xprev = x[:]
x = x1[:]
if retall:
allvecs.append(x)
Lambda = Lambdamult
if app_fprime :
j = asarray(apply(approx_fprime2,(x,f,epsilon)+args))
func_calls = func_calls + 2*len(x)
else :
j,err = safe_fprime(fprime,x,args)
if err:
x = xprev[:]
finish = err
grad_calls+=1
if j is not None: grad = mat(res1)*mat(j)
currentcost = costmult
if sum(abs(2.0*grad), axis=None) < gtol :
finish = 2
niters = niters + 1
# see if we need to reduce the trust region
newmodelval = oldres+asarray(mat(oldjac)*mat(transpose(mat(move))))[:,0]
oldmodelval = oldres
#print oldcost-sum(newmodelval**2)
#print trustradius
if ((oldcost-sum(newmodelval**2))>1.0e-16) :
ratio = (oldcost-currentcost)/(oldcost-sum(newmodelval**2))
if ratio < .25 :
trustradius = trustradius/2.0
if ratio >.25 and ratio<=.75 :
trustradius = trustradius
if ratio > .75 and trustradius<10.0 :
trustradius = 2.0*trustradius
#save(x,'currentParamsLM')
if disp :
if (niters>=maxiter) and (finish != 2) :
print(" Current function value: %f" % currentcost)
print(" Iterations: %d" % niters)
print(" Function evaluations: %d" % func_calls)
print(" Gradient evaluations: %d" % grad_calls)
print(" Maximum number of iterations exceeded with no convergence ")
if (finish == 2) :
print(" Optimization terminated successfully.")
print(" Current function value: %f" % currentcost)
print(" Iterations: %d" % niters)
print(" Function evaluations: %d" % func_calls)
print(" Gradient evaluations: %d" % grad_calls)
if (finish == 3) :
print(" Optimization aborted: Jacobian contains nan or inf.")
print(" Current function value: %f" % currentcost)
print(" Iterations: %d" % niters)
print(" Function evaluations: %d" % func_calls)
print(" Gradient evaluations: %d" % grad_calls)
if (finish == 4) :
print(" Optimization aborted: Exception in Jacobian calculation.")
print(" Current function value: %f" % currentcost)
print(" Iterations: %d" % niters)
print(" Function evaluations: %d" % func_calls)
print(" Gradient evaluations: %d" % grad_calls)
if isinstance(xcopy,KeyedList) :
xcopy.update(x)
else :
xcopy = x
if full_output:
retlist = xcopy, currentcost, func_calls, grad_calls, finish, Lambda, j
if retall:
retlist += (allvecs,)
else :
retlist = xcopy
if retall :
retlist = (xcopy,allvecs)
return retlist
def fmin_lmNoJ(fcost, x0, fjtj, args=(), avegtol=1e-5, epsilon=_epsilon,
maxiter=None, full_output=0, disp=1, retall=0, trustradius=1.0):
"""Minimizer for a nonlinear least squares problem. Allowed to
have more residuals than parameters or vice versa
fcost : the cost function (*not* the residual function)
fjtj : this function must return back an ordered pair, the first entry
is the gradient of the cost and the second entry is the Levenberg
Marquardt (LM) approximation to the cost function.
NOTE: If the cost function = 1/2 * sum(residuals**2) then
the LM approximation is the matrix matrix product J^t J
where J = derivative of residual function with respect to parameters.
However if cost = k*sum(residuals**2) for some constant k, then
the LM approximation is 2*k*J^t J, so beware of this factor!!!
x0 : initial parameter set
avegtol : convergence tolerance on the gradient vector
epsilon : size of steps to use for finite differencing of f (if fprime
not passed in)
maxiter : maximum number of iterations
full_output : 0 to get only the minimum set of parameters back
1 if you also want the best parameter set, the
lowest value of f, the number of function calls,
the number of gradient calls, the convergence flag,
the last Marquardt parameter used (lambda), and the
last evaluation of fprime (J matrix)
disp : 0 for no display, 1 to give cost at each iteration and convergence
conditions at the end
retall : 0 for nothing extra to be returned, 1 for all the parameter
sets during the optimization to be returned
trustradius : set this to the maximum move you want to allow in a single
parameter direction.
If you are using log parameters, then setting this
to 1.0, for example, corresponds to a multiplicative
change of exp(1) = 2.718
This version requires fjtj to pass back an ordered pair with
a gradient evaluation of the cost and JtJ, but not a function for J.
This is important in problems when there is many residuals and J is too
cumbersome to compute and pass around, but JtJ is a lot "slimmer". """
xcopy = copy.copy(x0)
if isinstance(x0,KeyedList) :
x0 = asarray(x0.values())
else :
x0 = asarray(x0)
Lambda = 1.0e-02
Mult = 10.0
n = len(x0)
func_calls = 0
grad_calls = 0
if maxiter==None :
maxiter = 200*n
niters = 0
x = x0
gtol = n*avegtol
if retall:
allvecs = [x]
x1 = x0
x2 = x0
d = zeros(n,scipy.float_)
move = zeros(n,scipy.float_)
finish = 0
grad, lmh = apply(fjtj,(x,))
grad_calls+=1
while (niters<maxiter) and (finish == 0):
# estimate what Lambda should be
[u,s,v] = scipy.linalg.svd(lmh)
#print "s is (in NoJ) ", s
#s,u = scipy.linalg.eig(lmh)
#s = real(s)
#u = real(u)
oldlmh = lmh[:,:]
oldgrad = grad[:]
rhsvect = -scipy.dot(transpose(u),grad)
# rhsvect = asarray(rhsvect)[:,0]
move = abs(rhsvect)/(s+Lambda*ones(n)+1.0e-30*ones(n))
move = list(move)
maxindex = move.index(max(move))
move = asarray(move)
if max(move) > trustradius :
Lambda = Mult*(1.0/trustradius*abs(rhsvect[maxindex])-s[maxindex])
#print " Increasing lambda to ", Lambda
## lmhreg = lmh + Lambda*eye(n,n,typecode=scipy.float_)
## [u,s,v] = scipy.linalg.svd(lmhreg)
rhsvect = -scipy.dot(transpose(u),grad)
# rhsvect = asarray(rhsvect)[:,0]
for i in range(0,len(s)) :
if (s[i]+Lambda) < 1.0e-30 :
d[i] = 0.0
else :
d[i] = 1.0/(s[i]+Lambda)
move[i] = d[i]*rhsvect[i]
move = asarray(move)
move = dot(asarray(u),move)
x1 = x + move
moveold = move[:]
for i in range(0,len(s)) :
if (s[i]+Lambda/Mult) < 1.0e-30 :
d[i] = 0.0
else :
d[i] = 1.0/(s[i]+Lambda/Mult)
move[i] = d[i]*rhsvect[i]
move = asarray(move)
move = dot(asarray(u),move)
x2 = x + asarray(move)
currentcost = apply(fcost,(x,))
oldcost = currentcost
func_calls+=1
try:
costlambdasmaller = apply(fcost,(x2,))
except SloppyCell.Utility.SloppyCellException:
costlambdasmaller = scipy.inf
func_calls+=1
try:
costlambda = apply(fcost,(x1,))
except SloppyCell.Utility.SloppyCellException:
costlambda = scipy.inf
func_calls+=1
if disp :
print('Iteration number', niters)
print('Current cost', currentcost)
print("Move 1 gives cost of" , costlambda)
print("Move 2 gives cost of ", costlambdasmaller)
#fp = open('LMoutfile','a')
#fp.write('Iteration number ' + niters.__str__() + '\n')
#fp.write('Current cost ' + currentcost.__str__() + '\n')
#fp.write('Move 1 gives cost of ' + costlambda.__str__() + '\n')
#fp.write('Move 2 gives cost of ' + costlambdasmaller.__str__() + '\n')
#fp.close()
if costlambdasmaller <= currentcost :
Lambda = Lambda/Mult
x = x2[:]
if retall:
allvecs.append(x)
currentcost = costlambdasmaller
grad, lmh = apply(fjtj,(x2,))
grad_calls+=1
#if scipy.linalg.norm(asarray(grad)) < avegtol :
if sum(abs(2.0*grad), axis=None) < gtol :
finish = 2
elif costlambda <= currentcost :
currentcost = costlambda
move = moveold[:]
x = x1[:]
if retall:
allvecs.append(x)
grad, lmh = apply(fjtj,(x1,))
grad_calls+=1
# if scipy.linalg.norm(asarray(grad)) < avegtol :
if sum(abs(2.0*grad), axis=None) < gtol :
finish = 2
else :
Lambdamult = Lambda
costmult = costlambda
piOverFour = .78539816339744825
NTrials2 = 0
NTrials = 0
while (costmult > currentcost) and (NTrials < 10) :
# num = -dot(transpose(asarray(grad)),asarray(moveold) )
# den = scipy.linalg.norm(grad)*scipy.linalg.norm(moveold)
gamma = .1 # scipy.arccos(num/den)
NTrials = NTrials+1
if (gamma > 0) :
Lambdamult = Lambdamult*Mult
for i in range(0,len(s)) :
if s[i] + Lambdamult < 1.0e-30 :
d[i] = 0.0
else :
d[i] = 1.0/(s[i] + Lambdamult)
move[i] = d[i]*rhsvect[i]
move = asarray(move)
move = dot(asarray(u),move)
x1 = x + asarray(move)
func_calls+=1
costmult = apply(fcost,(x1,))
else :
NTrials2 = 0
while (costmult > currentcost) and (NTrials2 < 10) :
NTrials2 = NTrials2 + 1
if disp :
print(" Decreasing stepsize ")
move = (.5)**NTrials2*moveold
x1 = x + asarray(moveold)
func_calls+=1
costmult = apply(fcost,(x1,))
if (NTrials==10) or (NTrials2==10) :
if disp :
print(" Failed to converge")
finish = 1
else :
x = x1[:]
if retall:
allvecs.append(x)
Lambda = Lambdamult
grad, lmh = apply(fjtj,(x1,))
grad_calls+=1
currentcost = costmult
# if scipy.linalg.norm(grad) < avegtol :
if sum(abs(2.0*grad), axis=None) < gtol :
finish = 2
niters = niters + 1
# see if we need to reduce the trust region, compare the actual change in
# cost to the linear and quadratic change in cost
model_change = scipy.dot(scipy.transpose(oldgrad),move) + \
.5*scipy.dot(scipy.transpose(move),scipy.dot(oldlmh,move) )
#print oldcost-sum(newmodelval**2)
#print trustradius
if model_change>1.0e-16 :
ratio = (oldcost-currentcost)/(model_change)
if ratio < .25 :
trustradius = trustradius/2.0
if ratio >.25 and ratio<=.75 :
trustradius = trustradius
if ratio > .75 and trustradius<10.0 :
trustradius = 2.0*trustradius
#save(x,'currentParamsLM')
if disp :
if (niters>=maxiter) and (finish != 2) :
print(" Current function value: %f" % currentcost)
print(" Iterations: %d" % niters)
print(" Function evaluations: %d" % func_calls)
print(" Gradient evaluations: %d" % grad_calls)
print(" Maximum number of iterations exceeded with no convergence ")
if (finish == 2) :
print("Optimization terminated successfully.")
print(" Current function value: %f" % currentcost)
print(" Iterations: %d" % niters)
print(" Function evaluations: %d" % func_calls)
print(" Gradient evaluations: %d" % grad_calls)
if isinstance(xcopy,KeyedList) :
xcopy.update(x)
else :
xcopy = x
if full_output:
retlist = xcopy, currentcost, func_calls, grad_calls, finish, Lambda, lmh
if retall:
retlist += (allvecs,)
else:
retlist = xcopy
if retall:
retlist = (xcopy, allvecs)
return retlist
def solve_lmsys(Lambda,s,g,rhsvect,currentcost,n) :
d = zeros(n,scipy.float_)
move = zeros(n,scipy.float_)
for i in range(0,n) :
if s[i] < 1.0e-20 :
d[i] = 0.0
else :
d[i] = 1.0/(s[i])
move[i] = d[i]*rhsvect[i]
return move
def fmin_lm_scale(f, x0, fprime=None, args=(), avegtol=1e-5, epsilon=_epsilon,
maxiter=None, full_output=0, disp=1, retall=0,trustradius=1.0):
"""
Minimizer for a nonlinear least squares problem. Allowed to
have more residuals than parameters or vice versa.
f : residual function (function of parameters)
fprime : derivative of residual function with respect to parameters.
Should return a matrix (J) with dimensions number of residuals
by number of parameters.
x0 : initial parameter set
avegtol : convergence tolerance on the gradient vector
epsilon : size of steps to use for finite differencing of f (if fprime
not passed in)
maxiter : maximum number of iterations
full_output : 0 to get only the minimum set of parameters back
1 if you also want the best parameter set, the
lowest value of f, the number of function calls,
the number of gradient calls, the convergence flag,
the last Marquardt parameter used (lambda), and the
last evaluation of fprime (J matrix)
disp : 0 for no display, 1 to give cost at each iteration and convergence
conditions at the end
retall : 0 for nothing extra to be returned, 1 for all the parameter
sets during the optimization to be returned
trustradius : set this to the maximum length of move you want.
If you are using log parameters, then setting this
to 1.0, for example, corresponds to a multiplicative
change of exp(1) = 2.718 if the move is along a single
parameter direction
This version is scale invariant. This means that under a change of
scale of the parameters the direction the optimizer chooses to move
in does not change. To achieve this, we don't use a Marquardt
parameter to impose a trust region but rather take the infinite trust
region step and just cut it back to the length given in the variable
trustradius. """
app_fprime = 0
if fprime is None:
app_fprime = 1
xcopy = copy.copy(x0)
if isinstance(x0,KeyedList) :
x0 = asarray(x0.values())
else :
x0 = asarray(x0)
Lambda = 1.0e-02
Mult = 10.0
n = len(x0)
func_calls = 0
grad_calls = 0
res = asarray(apply(f,(x0,)))
m = res.shape[0]
if maxiter is None :
maxiter = 200*n
niters = 0
x = x0
gtol = n*avegtol
if retall:
allvecs = [x]
x1 = x0
x2 = x0
d = zeros(n,scipy.float_)
move = zeros(n,scipy.float_)
finish = 0
if app_fprime :
j = asarray(apply(approx_fprime2,(x,f,epsilon)+args))
func_calls = func_calls + 2*len(x)
else :
j = asarray(apply(fprime,(x,)))
grad_calls+=1
res = asarray(apply(f,(x,)))
func_calls+=1
grad = mat(res)*mat(j)
while (niters<maxiter) and (finish == 0):
# note: grad, res and j will be available from the end of the
# last iteration. They just need to be computed the zeroth
# time aswell (above)
lmh = mat(transpose(j))*mat(j)
# use more accurate way to get e-vals/dirns
#[u,s,v] = scipy.linalg.svd(lmh)
[u,ssqrt,vt] = scipy.linalg.svd(j)
# want n singular values even if m<n and we have
# more parameters than data points.
if (len(ssqrt) == n) :
s = ssqrt**2
elif (len(ssqrt)<n) :
s = zeros((n,),scipy.float_)
s[0:len(ssqrt)] = ssqrt**2
#rhsvect = -mat(transpose(u))*mat(transpose(grad))
rhsvect = -mat(vt)*mat(transpose(grad))
rhsvect = asarray(rhsvect)[:,0]
currentcost = sum(asarray(apply(f,(x,)))**2)
g = asarray(grad)[0,:]
Lambda = 0
move = solve_lmsys(Lambda,s,g,rhsvect,currentcost,n)
move = asarray(move)
move = asarray(mat(transpose(vt))*mat(transpose(mat(move))))[:,0]
unitmove = move/(scipy.linalg.norm(move))
move1 = unitmove*trustradius
# print move
x1 = x + move1
move2 = unitmove*trustradius*Mult
x2 = x + asarray(move2)
func_calls+=1
try:
res2 = asarray(apply(f,(x2,)))
costlambdasmaller = sum(res2**2)
except SloppyCell.Utility.SloppyCellException:
costlambdasmaller = scipy.inf
func_calls+=1
try:
res1 = asarray(apply(f,(x1,)))
costlambda = sum(res1**2)
except SloppyCell.Utility.SloppyCellException:
costlambda = scipy.inf
func_calls+=1
if disp :
print("Cost is ", currentcost)
print("Iteration is", niters)
oldcost = currentcost
oldres = res
oldjac = j
if costlambdasmaller <= currentcost :
trustradius = trustradius*Mult
x = x2
if retall:
allvecs.append(x)
currentcost = costlambdasmaller
if app_fprime :
j = asarray(apply(approx_fprime2,(x2,f,epsilon)+args))
func_calls = func_calls + 2*len(x2)
else :
j = asarray(apply(fprime,(x2,)))
grad_calls+=1
grad = mat(res2)*mat(j)
if sum(abs(2.0*grad), axis=None) < gtol :
finish = 2
move = move2
elif costlambda <= currentcost :
currentcost = costlambda
x = x1
if retall:
allvecs.append(x)
if app_fprime :
j = asarray(apply(approx_fprime2,(x1,f,epsilon)+args))
func_calls = func_calls + 2*len(x1)
else :
j = asarray(apply(fprime,(x1,)))
grad_calls+=1
grad = mat(res1)*mat(j)
if sum(abs(2.0*grad), axis=None) < gtol :
finish = 2
move = move1
else :
trustradmult = trustradius
costmult = costlambda
NTrials = 0
move = unitmove
while (costmult > currentcost) and (NTrials < 100) :
while (costmult > currentcost) and (NTrials < 100) :
NTrials = NTrials + 1
#print " Decreasing stepsize "
trustradmult = trustradmult/2.0
move = move*trustradmult
x1 = x + asarray(move)
res1 = asarray(apply(f,(x1,)))
func_calls+=1
costmult = sum(res1**2)
if (NTrials==100) :
if disp :
print(" Failed to converge")
finish = 1
else :
x = x1
if retall:
allvecs.append(x)
trustradius = trustradmult
if app_fprime :
j = asarray(apply(approx_fprime2,(x,f,epsilon)+args))
func_calls = func_calls + 2*len(x)
else :
j = asarray(apply(fprime,(x,)))
grad_calls+=1
grad = mat(res1)*mat(j)
currentcost = costmult
if sum(abs(2.0*grad), axis=None) < gtol :
finish = 2
niters = niters + 1
# see if we need to reduce the trust region
newmodelval = oldres+asarray(mat(oldjac)*mat(transpose(mat(move))))[:,0]
oldmodelval = oldres
#print oldcost-sum(newmodelval**2)
#print trustradius
if ((oldcost-sum(newmodelval**2))>1.0e-16) :
ratio = (oldcost-currentcost)/(oldcost-sum(newmodelval**2))
if ratio < .25 :
trustradius = trustradius/2.0
if ratio >.25 and ratio<=.75 :
trustradius = trustradius
if ratio > .75 and trustradius<10.0 :
trustradius = 2.0*trustradius
if disp :
if (niters>=maxiter) and (finish != 2) :
print(" Current function value: %f" % currentcost)
print(" Iterations: %d" % niters)
print(" Function evaluations: %d" % func_calls)
print(" Gradient evaluations: %d" % grad_calls)
print(" Maximum number of iterations exceeded with no convergence ")
if (finish == 2) :
print("Optimization terminated successfully.")
print(" Current function value: %f" % currentcost)
print(" Iterations: %d" % niters)
print(" Function evaluations: %d" % func_calls)
print(" Gradient evaluations: %d" % grad_calls)
if isinstance(xcopy,KeyedList) :
xcopy.update(x)
else :
xcopy = x
if full_output:
retlist = xcopy, currentcost, func_calls, grad_calls, finish, Lambda, j
if retall:
retlist += (allvecs,)
else:
retlist = xcopy
if retall:
retlist = (xcopy, allvecs)
return retlist
|
[
"scipy.isinf",
"scipy.sum",
"scipy.ones",
"copy.copy",
"scipy.arccos",
"scipy.linalg.svd",
"scipy.asarray",
"scipy.linalg.norm",
"scipy.isnan",
"scipy.zeros",
"scipy.transpose",
"scipy.dot",
"scipy.mat",
"scipy.finfo"
] |
[((3469, 3486), 'scipy.isnan', 'scipy.isnan', (['cost'], {}), '(cost)\n', (3480, 3486), False, 'import scipy\n'), ((6131, 6144), 'copy.copy', 'copy.copy', (['x0'], {}), '(x0)\n', (6140, 6144), False, 'import copy\n'), ((6663, 6685), 'scipy.zeros', 'zeros', (['n', 'scipy.float_'], {}), '(n, scipy.float_)\n', (6668, 6685), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((6696, 6718), 'scipy.zeros', 'zeros', (['n', 'scipy.float_'], {}), '(n, scipy.float_)\n', (6701, 6718), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((19119, 19132), 'copy.copy', 'copy.copy', (['x0'], {}), '(x0)\n', (19128, 19132), False, 'import copy\n'), ((19493, 19515), 'scipy.zeros', 'zeros', (['n', 'scipy.float_'], {}), '(n, scipy.float_)\n', (19498, 19515), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((19526, 19548), 'scipy.zeros', 'zeros', (['n', 'scipy.float_'], {}), '(n, scipy.float_)\n', (19531, 19548), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((27020, 27042), 'scipy.zeros', 'zeros', (['n', 'scipy.float_'], {}), '(n, scipy.float_)\n', (27025, 27042), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((27053, 27075), 'scipy.zeros', 'zeros', (['n', 'scipy.float_'], {}), '(n, scipy.float_)\n', (27058, 27075), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((29433, 29446), 'copy.copy', 'copy.copy', (['x0'], {}), '(x0)\n', (29442, 29446), False, 'import copy\n'), ((29869, 29891), 'scipy.zeros', 'zeros', (['n', 'scipy.float_'], {}), '(n, scipy.float_)\n', (29874, 29891), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((29902, 29924), 'scipy.zeros', 'zeros', (['n', 'scipy.float_'], {}), '(n, scipy.float_)\n', (29907, 29924), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((786, 811), 'scipy.finfo', 'scipy.finfo', (['scipy.float_'], {}), '(scipy.float_)\n', (797, 811), False, 'import scipy\n'), ((3339, 3352), 'scipy.sum', 'sum', (['(res ** 2)'], {}), '(res ** 2)\n', (3342, 3352), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((6237, 6248), 'scipy.asarray', 'asarray', (['x0'], {}), '(x0)\n', (6244, 6248), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((7760, 7779), 'scipy.linalg.svd', 'scipy.linalg.svd', (['j'], {}), '(j)\n', (7776, 7779), False, 'import scipy\n'), ((8392, 8405), 'scipy.asarray', 'asarray', (['move'], {}), '(move)\n', (8399, 8405), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((8825, 8838), 'scipy.asarray', 'asarray', (['move'], {}), '(move)\n', (8832, 8838), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((19225, 19236), 'scipy.asarray', 'asarray', (['x0'], {}), '(x0)\n', (19232, 19236), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((19723, 19744), 'scipy.linalg.svd', 'scipy.linalg.svd', (['lmh'], {}), '(lmh)\n', (19739, 19744), False, 'import scipy\n'), ((20147, 20160), 'scipy.asarray', 'asarray', (['move'], {}), '(move)\n', (20154, 20160), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((20747, 20760), 'scipy.asarray', 'asarray', (['move'], {}), '(move)\n', (20754, 20760), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((21073, 21086), 'scipy.asarray', 'asarray', (['move'], {}), '(move)\n', (21080, 21086), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((29539, 29550), 'scipy.asarray', 'asarray', (['x0'], {}), '(x0)\n', (29546, 29550), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((30200, 30208), 'scipy.mat', 'mat', (['res'], {}), '(res)\n', (30203, 30208), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((30209, 30215), 'scipy.mat', 'mat', (['j'], {}), '(j)\n', (30212, 30215), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((30574, 30593), 'scipy.linalg.svd', 'scipy.linalg.svd', (['j'], {}), '(j)\n', (30590, 30593), False, 'import scipy\n'), ((31190, 31203), 'scipy.asarray', 'asarray', (['move'], {}), '(move)\n', (31197, 31203), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((2778, 2810), 'scipy.sum', 'sum', (['(approx_grad - analytic_grad)'], {}), '(approx_grad - analytic_grad)\n', (2781, 2810), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((7386, 7394), 'scipy.mat', 'mat', (['res'], {}), '(res)\n', (7389, 7394), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((7395, 7401), 'scipy.mat', 'mat', (['j'], {}), '(j)\n', (7398, 7401), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((7637, 7643), 'scipy.mat', 'mat', (['j'], {}), '(j)\n', (7640, 7643), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((8213, 8229), 'scipy.asarray', 'asarray', (['rhsvect'], {}), '(rhsvect)\n', (8220, 8229), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((9364, 9377), 'scipy.asarray', 'asarray', (['move'], {}), '(move)\n', (9371, 9377), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((20780, 20790), 'scipy.asarray', 'asarray', (['u'], {}), '(u)\n', (20787, 20790), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((21106, 21116), 'scipy.asarray', 'asarray', (['u'], {}), '(u)\n', (21113, 21116), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((21140, 21153), 'scipy.asarray', 'asarray', (['move'], {}), '(move)\n', (21147, 21153), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((30451, 30457), 'scipy.mat', 'mat', (['j'], {}), '(j)\n', (30454, 30457), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((30986, 31002), 'scipy.asarray', 'asarray', (['rhsvect'], {}), '(rhsvect)\n', (30993, 31002), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((31075, 31088), 'scipy.asarray', 'asarray', (['grad'], {}), '(grad)\n', (31082, 31088), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((31303, 31326), 'scipy.linalg.norm', 'scipy.linalg.norm', (['move'], {}), '(move)\n', (31320, 31326), False, 'import scipy\n'), ((31478, 31492), 'scipy.asarray', 'asarray', (['move2'], {}), '(move2)\n', (31485, 31492), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((31604, 31618), 'scipy.sum', 'sum', (['(res2 ** 2)'], {}), '(res2 ** 2)\n', (31607, 31618), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((31817, 31831), 'scipy.sum', 'sum', (['(res1 ** 2)'], {}), '(res1 ** 2)\n', (31820, 31831), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((7623, 7635), 'scipy.transpose', 'transpose', (['j'], {}), '(j)\n', (7632, 7635), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((7983, 8008), 'scipy.zeros', 'zeros', (['(n,)', 'scipy.float_'], {}), '((n,), scipy.float_)\n', (7988, 8008), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((8166, 8173), 'scipy.mat', 'mat', (['vt'], {}), '(vt)\n', (8169, 8173), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((8178, 8193), 'scipy.transpose', 'transpose', (['grad'], {}), '(grad)\n', (8187, 8193), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((14590, 14611), 'scipy.sum', 'sum', (['(newmodelval ** 2)'], {}), '(newmodelval ** 2)\n', (14593, 14611), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((19941, 19953), 'scipy.transpose', 'transpose', (['u'], {}), '(u)\n', (19950, 19953), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((20469, 20481), 'scipy.transpose', 'transpose', (['u'], {}), '(u)\n', (20478, 20481), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((25291, 25315), 'scipy.transpose', 'scipy.transpose', (['oldgrad'], {}), '(oldgrad)\n', (25306, 25315), False, 'import scipy\n'), ((30437, 30449), 'scipy.transpose', 'transpose', (['j'], {}), '(j)\n', (30446, 30449), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((30797, 30822), 'scipy.zeros', 'zeros', (['(n,)', 'scipy.float_'], {}), '((n,), scipy.float_)\n', (30802, 30822), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((30939, 30946), 'scipy.mat', 'mat', (['vt'], {}), '(vt)\n', (30942, 30946), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((30951, 30966), 'scipy.transpose', 'transpose', (['grad'], {}), '(grad)\n', (30960, 30966), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((32598, 32607), 'scipy.mat', 'mat', (['res2'], {}), '(res2)\n', (32601, 32607), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((32608, 32614), 'scipy.mat', 'mat', (['j'], {}), '(j)\n', (32611, 32614), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((34909, 34930), 'scipy.sum', 'sum', (['(newmodelval ** 2)'], {}), '(newmodelval ** 2)\n', (34912, 34930), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((3998, 4012), 'scipy.isnan', 'scipy.isnan', (['j'], {}), '(j)\n', (4009, 4012), False, 'import scipy\n'), ((4022, 4036), 'scipy.isinf', 'scipy.isinf', (['j'], {}), '(j)\n', (4033, 4036), False, 'import scipy\n'), ((8295, 8308), 'scipy.ones', 'scipy.ones', (['n'], {}), '(n)\n', (8305, 8308), False, 'import scipy\n'), ((10856, 10865), 'scipy.mat', 'mat', (['res2'], {}), '(res2)\n', (10859, 10865), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((10866, 10872), 'scipy.mat', 'mat', (['j'], {}), '(j)\n', (10869, 10872), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((12052, 12075), 'scipy.arccos', 'scipy.arccos', (['(num / den)'], {}), '(num / den)\n', (12064, 12075), False, 'import scipy\n'), ((14673, 14694), 'scipy.sum', 'sum', (['(newmodelval ** 2)'], {}), '(newmodelval ** 2)\n', (14676, 14694), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((20056, 20063), 'scipy.ones', 'ones', (['n'], {}), '(n)\n', (20060, 20063), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((25347, 25368), 'scipy.transpose', 'scipy.transpose', (['move'], {}), '(move)\n', (25362, 25368), False, 'import scipy\n'), ((25369, 25392), 'scipy.dot', 'scipy.dot', (['oldlmh', 'move'], {}), '(oldlmh, move)\n', (25378, 25392), False, 'import scipy\n'), ((33144, 33153), 'scipy.mat', 'mat', (['res1'], {}), '(res1)\n', (33147, 33153), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((33154, 33160), 'scipy.mat', 'mat', (['j'], {}), '(j)\n', (33157, 33160), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((34992, 35013), 'scipy.sum', 'sum', (['(newmodelval ** 2)'], {}), '(newmodelval ** 2)\n', (34995, 35013), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((8273, 8286), 'scipy.ones', 'scipy.ones', (['n'], {}), '(n)\n', (8283, 8286), False, 'import scipy\n'), ((8941, 8954), 'scipy.transpose', 'transpose', (['vt'], {}), '(vt)\n', (8950, 8954), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((9299, 9312), 'scipy.transpose', 'transpose', (['vt'], {}), '(vt)\n', (9308, 9312), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((11545, 11554), 'scipy.mat', 'mat', (['res1'], {}), '(res1)\n', (11548, 11554), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((11555, 11561), 'scipy.mat', 'mat', (['j'], {}), '(j)\n', (11558, 11561), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((11980, 12003), 'scipy.linalg.norm', 'scipy.linalg.norm', (['grad'], {}), '(grad)\n', (11997, 12003), False, 'import scipy\n'), ((12004, 12027), 'scipy.linalg.norm', 'scipy.linalg.norm', (['move'], {}), '(move)\n', (12021, 12027), False, 'import scipy\n'), ((14426, 14437), 'scipy.mat', 'mat', (['oldjac'], {}), '(oldjac)\n', (14429, 14437), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((20040, 20047), 'scipy.ones', 'ones', (['n'], {}), '(n)\n', (20044, 20047), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((23874, 23887), 'scipy.asarray', 'asarray', (['move'], {}), '(move)\n', (23881, 23887), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((31231, 31244), 'scipy.transpose', 'transpose', (['vt'], {}), '(vt)\n', (31240, 31244), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((33890, 33904), 'scipy.sum', 'sum', (['(res1 ** 2)'], {}), '(res1 ** 2)\n', (33893, 33904), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((34482, 34491), 'scipy.mat', 'mat', (['res1'], {}), '(res1)\n', (34485, 34491), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((34492, 34498), 'scipy.mat', 'mat', (['j'], {}), '(j)\n', (34495, 34498), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((34745, 34756), 'scipy.mat', 'mat', (['oldjac'], {}), '(oldjac)\n', (34748, 34756), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((8970, 8979), 'scipy.mat', 'mat', (['move'], {}), '(move)\n', (8973, 8979), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((9328, 9337), 'scipy.mat', 'mat', (['move'], {}), '(move)\n', (9331, 9337), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((11934, 11955), 'scipy.dot', 'scipy.dot', (['grad', 'move'], {}), '(grad, move)\n', (11943, 11955), False, 'import scipy\n'), ((14163, 14172), 'scipy.mat', 'mat', (['res1'], {}), '(res1)\n', (14166, 14172), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((14173, 14179), 'scipy.mat', 'mat', (['j'], {}), '(j)\n', (14176, 14179), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((23919, 23929), 'scipy.asarray', 'asarray', (['u'], {}), '(u)\n', (23926, 23929), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((23965, 23978), 'scipy.asarray', 'asarray', (['move'], {}), '(move)\n', (23972, 23978), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((31260, 31269), 'scipy.mat', 'mat', (['move'], {}), '(move)\n', (31263, 31269), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((33760, 33773), 'scipy.asarray', 'asarray', (['move'], {}), '(move)\n', (33767, 33773), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((13303, 13316), 'scipy.asarray', 'asarray', (['move'], {}), '(move)\n', (13310, 13316), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((14452, 14461), 'scipy.mat', 'mat', (['move'], {}), '(move)\n', (14455, 14461), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((24421, 24437), 'scipy.asarray', 'asarray', (['moveold'], {}), '(moveold)\n', (24428, 24437), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((34771, 34780), 'scipy.mat', 'mat', (['move'], {}), '(move)\n', (34774, 34780), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((12750, 12763), 'scipy.transpose', 'transpose', (['vt'], {}), '(vt)\n', (12759, 12763), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n'), ((12779, 12788), 'scipy.mat', 'mat', (['move'], {}), '(move)\n', (12782, 12788), False, 'from scipy import absolute, sqrt, asarray, zeros, mat, transpose, ones, dot, sum\n')]
|
import os
import time
import socket
import zipfile
from datetime import datetime, timedelta
from flask import Flask, request, g, render_template, jsonify, redirect, Response
from chaac.chaacdb import ChaacDB
app = Flask(__name__)
app.config.from_object(__name__) # load config from this file , flaskr.py
# Load default config and override config from an environment variable
app.config.update(dict(DATABASE=os.getenv("DATABASE")))
def get_pretty_hostname():
hostname = socket.gethostname()
if "chaac-" in hostname:
hostname = " ".join(hostname.split('-')[1:]).title()
else:
hostname = " ".join(hostname.split('-')).title()
return hostname
hostname = get_pretty_hostname()
default_uid = None
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
"""
if not hasattr(g, "sqlite_db"):
g.sqlite_db = ChaacDB(app.config["DATABASE"])
return g.sqlite_db
@app.teardown_appcontext
def close_db(error):
"""Closes the database again at the end of the request."""
if hasattr(g, "sqlite_db"):
g.sqlite_db.close()
def get_latest_sample(uid):
""" Get latest weather data (and past day's rainfall) """
# Get last sample
db = get_db()
# Past day
now = datetime.fromtimestamp(int(time.time()))
end_time = start_time = time.mktime(now.timetuple())
# Start at midnight today
start_time = time.mktime(now.replace(hour=0, minute=0, second=0).timetuple())
end_time = time.mktime(now.timetuple())
rows = db.get_records("minute", start_date=start_time, end_date=end_time, order="desc", uid=uid)
if len(rows) == 0:
return None
sample = {}
# Convert the units
for key, val in rows[0]._asdict().items():
if key == "timestamp":
sample[key] = datetime.fromtimestamp(val).strftime("%Y-%m-%d %H:%M:%S")
sample["ts"] = val
elif val == None:
sample[key] = 0
else:
sample[key] = round(float(val), 2)
rain_total = 0
for row in rows:
rain_total += row.rain
sample["rain"] = round(rain_total, 2)
return sample
@app.route("/latest")
def latest_json():
db = get_db()
data = {"hostname": hostname, "devices":{}}
for device, name in db.devices.items():
sample = get_latest_sample(device)
if sample:
data["devices"][device] = get_latest_sample(device)
data["devices"][device]["name"] = name
return jsonify(data)
@app.route("/")
def summary():
return render_template("status.html", hostname=hostname)
rain_mod = {"day": (60 * 60), "week": (60 * 60 * 24), "month": (60 * 60 * 24)}
def get_start_bin(end_date, table):
""" Figure out what time it is now to start the bar chart
The numbers depend on whether it's a day/week/month plot
"""
# Start one day (% rain_mod) after today
end_date += rain_mod[table]
if table == "day":
return datetime.fromtimestamp(end_date).timetuple().tm_hour
elif table == "week":
return datetime.fromtimestamp(end_date).timetuple().tm_wday
elif table == "month":
return datetime.fromtimestamp(end_date).timetuple().tm_mday
else:
return None
days = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
def get_rain_label(idx, table):
""" Get nice labels for the bar chart. Unfortunately, plotly
keeps re-sorting all the numbers so we have to add strings
around them to keep it in the correct order. """
if table == "day":
return "(" + str(idx) + ")"
elif table == "week":
return days[idx]
elif table == "month":
# TODO: Deal with days=0 etc
return "(" + str(idx) + ")"
else:
return None
def get_data_dict(uid, start_date, end_date, table="day"):
""" Get weather data for the specified weather period """
db = get_db()
if table == "day":
real_table = "minute"
else:
real_table = "hour"
rows = db.get_records(real_table, start_date=start_date, end_date=end_date, uid=uid)
if len(rows) == 0:
return None
plot = {}
col_names = rows[0]._asdict().keys()
for name in col_names:
plot[name] = []
# Rain doesn't have the same timestamp as the rest of the data
plot["rain_time"] = []
# Create lists with each data type and make timestamp pretty
for row in rows:
for name in col_names:
if name == "timestamp":
plot[name].append(
datetime.fromtimestamp(getattr(row, name)).strftime(
"%Y-%m-%d %H:%M:%S"
)
)
elif name == "uid" or name == "id" or name == "rain":
continue
else:
if getattr(row, name) is None:
plot[name].append(0)
else:
plot[name].append(round(getattr(row, name), 3))
# Bin data into the appropriate size for histograms
idx = get_start_bin(int(end_date - 1), table)
bins = range(int(start_date), int(end_date), rain_mod[table])
# Loop through each rain bin
for rain_bin in bins:
plot["rain_time"].append(get_rain_label(idx, table))
rain = 0
# Loop through each rain sample
for row in rows:
if row.rain == 0:
continue
# Check if the sample falls into our bin
if row.timestamp >= rain_bin and row.timestamp < (rain_bin + rain_mod[table]):
rain += row.rain
plot["rain"].append(rain)
# Wrap around depending on the number of bins (since we don't always start at 0)
idx = (idx + 1) % len(bins)
return plot
def join_data(prev_data, new_data):
for key in prev_data.keys():
if prev_data["timestamp"][-1] < new_data["timestamp"][0]:
prev_data[key] += new_data[key]
else:
prev_data[key] = new_data[key] + prev_data[key]
return prev_data
def get_stats(uid, start_date, end_date):
""" Get weather data for the specified weather period """
db = get_db()
rows = db.get_stats(start_date=start_date, end_date=end_date, uid=uid)
if len(rows) == 0:
return
plot = {}
col_names = rows[0]._asdict().keys()
plot["stat_fields"] = []
for name in col_names:
split_name = name.rsplit("__")
if len(split_name) > 1:
if split_name[0] not in plot:
plot[split_name[0]] = {}
plot["stat_fields"].append(split_name[0])
plot[split_name[0]][split_name[1]] = []
else:
plot[name] = []
ignored_fields = ["uid", "id", "data_period", "temperature_in", "wind_dir"]
# Create lists with each data type and make timestamp pretty
for row in rows:
for name in col_names:
if name == "timestamp":
plot[name].append(
datetime.fromtimestamp(getattr(row, name)).strftime("%Y-%m-%d")
)
elif name in ignored_fields:
continue
else:
split_name = name.rsplit("__")
if len(split_name) > 1:
if getattr(row, name) is None:
plot[split_name[0]][split_name[1]].append(0)
else:
plot[split_name[0]][split_name[1]].append(
round(getattr(row, name), 3)
)
else:
if getattr(row, name) is None:
plot[name].append(0)
else:
plot[name].append(round(getattr(row, name), 3))
if sum(plot["rain"]) == 0:
del plot["rain"]
return plot
def join_stats(stats1, stats2):
for key in stats1:
if isinstance(stats1[key], list):
stats1[key].extend(stats2[key])
elif isinstance(stats1[key], dict):
stats1[key] = join_stats(stats1[key], stats2[key])
return stats1
@app.route("/json/stats/year")
def json_stats_year_str():
db = get_db()
# time.time() is utc time, but now is a "naive"
# datetime object in current timezone
now = datetime.fromtimestamp(int(time.time()))
# Start this year before the next full hour
start_time = time.mktime(
(now.replace(minute=0, second=0, hour=0, day=1, month=1)).timetuple()
)
end_time = time.mktime(now.timetuple())
stats = {"hostname": hostname}
stats["start_date"] = datetime.fromtimestamp(start_time).strftime("%Y-%m-%d")
stats["end_date"] = datetime.fromtimestamp(end_time).strftime("%Y-%m-%d")
stats["devices"] = {}
for device, name in db.devices.items():
uid_stats = get_stats(device, start_time, end_time)
if uid_stats is not None:
if name in stats["devices"]:
stats["devices"][name] = join_stats(uid_stats, stats["devices"][name])
else:
stats["devices"][name] = uid_stats
return jsonify(stats)
@app.route("/json/day")
def json_day_str():
# time.time() is utc time, but now is a "naive"
# datetime object in current timezone
now = datetime.fromtimestamp(int(time.time()))
# Start 24 hours before the next full hour
start_time = time.mktime(
(
now.replace(minute=0, second=0) + timedelta(hours=1) - timedelta(days=1)
).timetuple()
)
end_time = time.mktime(now.timetuple())
db = get_db()
data = {"hostname": hostname}
data["start_date"] = datetime.fromtimestamp(start_time).strftime(
"%Y-%m-%d %H:%M:%S"
)
data["end_date"] = datetime.fromtimestamp(end_time).strftime("%Y-%m-%d %H:%M:%S")
data["data"] = {}
for device, name in db.devices.items():
data_dict = get_data_dict(device, start_time, end_time, "day")
if data_dict is not None:
if name in data["data"]:
data["data"][name] = join_data(data["data"][name], data_dict)
else:
data["data"][name] = data_dict
return jsonify(data)
@app.route("/json/week")
def json_week_str():
# time.time() is utc time, but now is a "naive"
# datetime object in current timezone
now = datetime.fromtimestamp(int(time.time()))
# Round to the full day, start 7 days ago
start_time = time.mktime(
(
now.replace(hour=0, minute=0, second=0)
+ timedelta(days=1)
- timedelta(weeks=1)
).timetuple()
)
end_time = time.mktime(now.timetuple())
db = get_db()
data = {"hostname": hostname}
data["start_date"] = datetime.fromtimestamp(start_time).strftime(
"%Y-%m-%d %H:%M:%S"
)
data["end_date"] = datetime.fromtimestamp(end_time).strftime("%Y-%m-%d %H:%M:%S")
data["data"] = {}
for device, name in db.devices.items():
data_dict = get_data_dict(device, start_time, end_time, "week")
if data_dict is not None:
if name in data["data"]:
data["data"][name] = join_data(data["data"][name], data_dict)
else:
data["data"][name] = data_dict
return jsonify(data)
@app.route("/json/month")
def json_month_str():
# time.time() is utc time, but now is a "naive"
# datetime object in current timezone
now = datetime.fromtimestamp(int(time.time()))
# TODO - round to the month?
# Round to the full day, start 31 days ago
start_time = time.mktime(
(
now.replace(hour=0, minute=0, second=0)
+ timedelta(days=1)
- timedelta(days=31)
).timetuple()
)
end_time = time.mktime(now.timetuple())
db = get_db()
data = {"hostname": hostname}
data["start_date"] = datetime.fromtimestamp(start_time).strftime(
"%Y-%m-%d %H:%M:%S"
)
data["end_date"] = datetime.fromtimestamp(end_time).strftime("%Y-%m-%d %H:%M:%S")
data["data"] = {}
for device, name in db.devices.items():
data_dict = get_data_dict(device, start_time, end_time, "month")
if data_dict is not None:
if name in data["data"]:
data["data"][name] = join_data(data["data"][name], data_dict)
else:
data["data"][name] = data_dict
return jsonify(data)
@app.route("/plots")
def plots():
return render_template("plots.html", hostname=hostname)
@app.route("/stats")
def stats():
return render_template("stats.html", hostname=hostname)
# Don't add hostname to redirect
# See https://stackoverflow.com/questions/30006740/how-can-i-tell-flask-not-to-add-host-scheme-info-to-my-redirect
class NoHostnameResponse(Response):
autocorrect_location_header = False
@app.route("/zipdb")
def download_zip_db():
dbname = "chaac.db.zip"
with zipfile.ZipFile(
f"/tmp/files/{dbname}", "w", compression=zipfile.ZIP_DEFLATED
) as dbzip:
print("Zipping ", os.getenv("DATABASE"))
dbzip.write(os.getenv("DATABASE"))
return redirect(f"files/{dbname}", Response=NoHostnameResponse)
|
[
"zipfile.ZipFile",
"flask.redirect",
"chaac.chaacdb.ChaacDB",
"flask.Flask",
"time.time",
"socket.gethostname",
"flask.jsonify",
"datetime.timedelta",
"flask.render_template",
"datetime.datetime.fromtimestamp",
"flask.g.sqlite_db.close",
"os.getenv"
] |
[((215, 230), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (220, 230), False, 'from flask import Flask, request, g, render_template, jsonify, redirect, Response\n'), ((478, 498), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (496, 498), False, 'import socket\n'), ((2529, 2542), 'flask.jsonify', 'jsonify', (['data'], {}), '(data)\n', (2536, 2542), False, 'from flask import Flask, request, g, render_template, jsonify, redirect, Response\n'), ((2587, 2636), 'flask.render_template', 'render_template', (['"""status.html"""'], {'hostname': 'hostname'}), "('status.html', hostname=hostname)\n", (2602, 2636), False, 'from flask import Flask, request, g, render_template, jsonify, redirect, Response\n'), ((9112, 9126), 'flask.jsonify', 'jsonify', (['stats'], {}), '(stats)\n', (9119, 9126), False, 'from flask import Flask, request, g, render_template, jsonify, redirect, Response\n'), ((10170, 10183), 'flask.jsonify', 'jsonify', (['data'], {}), '(data)\n', (10177, 10183), False, 'from flask import Flask, request, g, render_template, jsonify, redirect, Response\n'), ((11260, 11273), 'flask.jsonify', 'jsonify', (['data'], {}), '(data)\n', (11267, 11273), False, 'from flask import Flask, request, g, render_template, jsonify, redirect, Response\n'), ((12388, 12401), 'flask.jsonify', 'jsonify', (['data'], {}), '(data)\n', (12395, 12401), False, 'from flask import Flask, request, g, render_template, jsonify, redirect, Response\n'), ((12449, 12497), 'flask.render_template', 'render_template', (['"""plots.html"""'], {'hostname': 'hostname'}), "('plots.html', hostname=hostname)\n", (12464, 12497), False, 'from flask import Flask, request, g, render_template, jsonify, redirect, Response\n'), ((12545, 12593), 'flask.render_template', 'render_template', (['"""stats.html"""'], {'hostname': 'hostname'}), "('stats.html', hostname=hostname)\n", (12560, 12593), False, 'from flask import Flask, request, g, render_template, jsonify, redirect, Response\n'), ((13107, 13163), 'flask.redirect', 'redirect', (['f"""files/{dbname}"""'], {'Response': 'NoHostnameResponse'}), "(f'files/{dbname}', Response=NoHostnameResponse)\n", (13115, 13163), False, 'from flask import Flask, request, g, render_template, jsonify, redirect, Response\n'), ((915, 946), 'chaac.chaacdb.ChaacDB', 'ChaacDB', (["app.config['DATABASE']"], {}), "(app.config['DATABASE'])\n", (922, 946), False, 'from chaac.chaacdb import ChaacDB\n'), ((1121, 1140), 'flask.g.sqlite_db.close', 'g.sqlite_db.close', ([], {}), '()\n', (1138, 1140), False, 'from flask import Flask, request, g, render_template, jsonify, redirect, Response\n'), ((12901, 12979), 'zipfile.ZipFile', 'zipfile.ZipFile', (['f"""/tmp/files/{dbname}"""', '"""w"""'], {'compression': 'zipfile.ZIP_DEFLATED'}), "(f'/tmp/files/{dbname}', 'w', compression=zipfile.ZIP_DEFLATED)\n", (12916, 12979), False, 'import zipfile\n'), ((411, 432), 'os.getenv', 'os.getenv', (['"""DATABASE"""'], {}), "('DATABASE')\n", (420, 432), False, 'import os\n'), ((1328, 1339), 'time.time', 'time.time', ([], {}), '()\n', (1337, 1339), False, 'import time\n'), ((8320, 8331), 'time.time', 'time.time', ([], {}), '()\n', (8329, 8331), False, 'import time\n'), ((8604, 8638), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['start_time'], {}), '(start_time)\n', (8626, 8638), False, 'from datetime import datetime, timedelta\n'), ((8684, 8716), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['end_time'], {}), '(end_time)\n', (8706, 8716), False, 'from datetime import datetime, timedelta\n'), ((9304, 9315), 'time.time', 'time.time', ([], {}), '()\n', (9313, 9315), False, 'import time\n'), ((9642, 9676), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['start_time'], {}), '(start_time)\n', (9664, 9676), False, 'from datetime import datetime, timedelta\n'), ((9744, 9776), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['end_time'], {}), '(end_time)\n', (9766, 9776), False, 'from datetime import datetime, timedelta\n'), ((10363, 10374), 'time.time', 'time.time', ([], {}), '()\n', (10372, 10374), False, 'import time\n'), ((10731, 10765), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['start_time'], {}), '(start_time)\n', (10753, 10765), False, 'from datetime import datetime, timedelta\n'), ((10833, 10865), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['end_time'], {}), '(end_time)\n', (10855, 10865), False, 'from datetime import datetime, timedelta\n'), ((11455, 11466), 'time.time', 'time.time', ([], {}), '()\n', (11464, 11466), False, 'import time\n'), ((11858, 11892), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['start_time'], {}), '(start_time)\n', (11880, 11892), False, 'from datetime import datetime, timedelta\n'), ((11960, 11992), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['end_time'], {}), '(end_time)\n', (11982, 11992), False, 'from datetime import datetime, timedelta\n'), ((13030, 13051), 'os.getenv', 'os.getenv', (['"""DATABASE"""'], {}), "('DATABASE')\n", (13039, 13051), False, 'import os\n'), ((13073, 13094), 'os.getenv', 'os.getenv', (['"""DATABASE"""'], {}), "('DATABASE')\n", (13082, 13094), False, 'import os\n'), ((1847, 1874), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['val'], {}), '(val)\n', (1869, 1874), False, 'from datetime import datetime, timedelta\n'), ((3007, 3039), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['end_date'], {}), '(end_date)\n', (3029, 3039), False, 'from datetime import datetime, timedelta\n'), ((9473, 9490), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (9482, 9490), False, 'from datetime import datetime, timedelta\n'), ((10562, 10580), 'datetime.timedelta', 'timedelta', ([], {'weeks': '(1)'}), '(weeks=1)\n', (10571, 10580), False, 'from datetime import datetime, timedelta\n'), ((11688, 11706), 'datetime.timedelta', 'timedelta', ([], {'days': '(31)'}), '(days=31)\n', (11697, 11706), False, 'from datetime import datetime, timedelta\n'), ((3101, 3133), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['end_date'], {}), '(end_date)\n', (3123, 3133), False, 'from datetime import datetime, timedelta\n'), ((9452, 9470), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (9461, 9470), False, 'from datetime import datetime, timedelta\n'), ((10530, 10547), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (10539, 10547), False, 'from datetime import datetime, timedelta\n'), ((11656, 11673), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (11665, 11673), False, 'from datetime import datetime, timedelta\n'), ((3196, 3228), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['end_date'], {}), '(end_date)\n', (3218, 3228), False, 'from datetime import datetime, timedelta\n')]
|
import random
#
# RANDOM FUNCTION EXAMPLE
#
def random_example_function(data, param_min, param_max):
parameters = random.uniform(param_min, param_max)
transformed_data, _ = example_function(data, parameters)
transform = ["example_function", example_function, {"parameters": parameters}]
return transformed_data, transform
#
# FUNCTION EXAMPLE
#
def example_function(data, parameters):
return data, None
|
[
"random.uniform"
] |
[((119, 155), 'random.uniform', 'random.uniform', (['param_min', 'param_max'], {}), '(param_min, param_max)\n', (133, 155), False, 'import random\n')]
|
# Copyright (c) 2015 by <NAME> and <NAME>
# See https://github.com/scisoft/autocmake/blob/master/LICENSE
import subprocess
import os
import sys
import shutil
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
def check_cmake_exists(cmake_command):
"""
Check whether CMake is installed. If not, print
informative error message and quits.
"""
p = subprocess.Popen('%s --version' % cmake_command,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
if not ('cmake version' in p.communicate()[0].decode('UTF-8')):
sys.stderr.write(' This code is built using CMake\n\n')
sys.stderr.write(' CMake is not found\n')
sys.stderr.write(' get CMake at http://www.cmake.org/\n')
sys.stderr.write(' on many clusters CMake is installed\n')
sys.stderr.write(' but you have to load it first:\n')
sys.stderr.write(' $ module load cmake\n')
sys.exit(1)
def setup_build_path(build_path):
"""
Create build directory. If this already exists, print informative
error message and quit.
"""
if os.path.isdir(build_path):
fname = os.path.join(build_path, 'CMakeCache.txt')
if os.path.exists(fname):
sys.stderr.write('aborting setup\n')
sys.stderr.write('build directory %s which contains CMakeCache.txt already exists\n' % build_path)
sys.stderr.write('remove the build directory and then rerun setup\n')
sys.exit(1)
else:
os.makedirs(build_path, 0o755)
def test_adapt_cmake_command_to_platform():
cmake_command = "FC=foo CC=bar CXX=RABOOF cmake -DTHIS -DTHAT='this and that cmake' .."
res = adapt_cmake_command_to_platform(cmake_command, 'linux')
assert res == cmake_command
res = adapt_cmake_command_to_platform(cmake_command, 'win32')
assert res == "set FC=foo && set CC=bar && set CXX=RABOOF && cmake -DTHIS -DTHAT='this and that cmake' .."
cmake_command = "cmake -DTHIS -DTHAT='this and that cmake' .."
res = adapt_cmake_command_to_platform(cmake_command, 'linux')
assert res == cmake_command
res = adapt_cmake_command_to_platform(cmake_command, 'win32')
assert res == cmake_command
def adapt_cmake_command_to_platform(cmake_command, platform):
"""
Adapt CMake command to MS Windows platform.
"""
if platform == 'win32':
pos = cmake_command.find('cmake')
s = ['set %s &&' % e for e in cmake_command[:pos].split()]
s.append(cmake_command[pos:])
return ' '.join(s)
else:
return cmake_command
def run_cmake(command, build_path, default_build_path):
"""
Execute CMake command.
"""
topdir = os.getcwd()
os.chdir(build_path)
p = subprocess.Popen(command,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout_coded, stderr_coded = p.communicate()
stdout = stdout_coded.decode('UTF-8')
stderr = stderr_coded.decode('UTF-8')
if stderr:
sys.stderr.write(stderr)
sys.exit(1)
# print cmake output to screen
print(stdout)
# write cmake output to file
f = open('cmake_output', 'w')
f.write(stdout)
f.close()
# change directory and return
os.chdir(topdir)
if 'Configuring incomplete' in stdout:
# configuration was not successful
if (build_path == default_build_path):
# remove build_path iff not set by the user
# otherwise removal can be dangerous
shutil.rmtree(default_build_path)
else:
# configuration was successful
save_setup_command(sys.argv, build_path)
print_build_help(build_path, default_build_path)
def print_build_help(build_path, default_build_path):
"""
Print help text after configuration step is done.
"""
print(' configure step is done')
print(' now you need to compile the sources:')
if (build_path == default_build_path):
print(' $ cd build')
else:
print(' $ cd ' + build_path)
print(' $ make')
def save_setup_command(argv, build_path):
"""
Save setup command to a file.
"""
file_name = os.path.join(build_path, 'setup_command')
f = open(file_name, 'w')
f.write(' '.join(argv[:]) + '\n')
f.close()
def configure(root_directory, build_path, cmake_command, only_show):
"""
Main configure function.
"""
default_build_path = os.path.join(root_directory, 'build')
# check that CMake is available, if not stop
check_cmake_exists('cmake')
# deal with build path
if build_path is None:
build_path = default_build_path
if not only_show:
setup_build_path(build_path)
cmake_command = adapt_cmake_command_to_platform(cmake_command, sys.platform)
print('%s\n' % cmake_command)
if only_show:
sys.exit(0)
run_cmake(cmake_command, build_path, default_build_path)
|
[
"subprocess.Popen",
"os.makedirs",
"os.path.isdir",
"os.getcwd",
"os.path.exists",
"shutil.rmtree",
"sys.stderr.write",
"os.path.join",
"os.chdir",
"sys.exit"
] |
[((469, 581), 'subprocess.Popen', 'subprocess.Popen', (["('%s --version' % cmake_command)"], {'shell': '(True)', 'stdin': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE'}), "('%s --version' % cmake_command, shell=True, stdin=\n subprocess.PIPE, stdout=subprocess.PIPE)\n", (485, 581), False, 'import subprocess\n'), ((1269, 1294), 'os.path.isdir', 'os.path.isdir', (['build_path'], {}), '(build_path)\n', (1282, 1294), False, 'import os\n'), ((2865, 2876), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2874, 2876), False, 'import os\n'), ((2881, 2901), 'os.chdir', 'os.chdir', (['build_path'], {}), '(build_path)\n', (2889, 2901), False, 'import os\n'), ((2910, 3023), 'subprocess.Popen', 'subprocess.Popen', (['command'], {'shell': '(True)', 'stdin': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(command, shell=True, stdin=subprocess.PIPE, stdout=\n subprocess.PIPE, stderr=subprocess.PIPE)\n', (2926, 3023), False, 'import subprocess\n'), ((3512, 3528), 'os.chdir', 'os.chdir', (['topdir'], {}), '(topdir)\n', (3520, 3528), False, 'import os\n'), ((4442, 4483), 'os.path.join', 'os.path.join', (['build_path', '"""setup_command"""'], {}), "(build_path, 'setup_command')\n", (4454, 4483), False, 'import os\n'), ((4706, 4743), 'os.path.join', 'os.path.join', (['root_directory', '"""build"""'], {}), "(root_directory, 'build')\n", (4718, 4743), False, 'import os\n'), ((728, 785), 'sys.stderr.write', 'sys.stderr.write', (['""" This code is built using CMake\n\n"""'], {}), "(' This code is built using CMake\\n\\n')\n", (744, 785), False, 'import sys\n'), ((794, 837), 'sys.stderr.write', 'sys.stderr.write', (['""" CMake is not found\n"""'], {}), "(' CMake is not found\\n')\n", (810, 837), False, 'import sys\n'), ((846, 905), 'sys.stderr.write', 'sys.stderr.write', (['""" get CMake at http://www.cmake.org/\n"""'], {}), "(' get CMake at http://www.cmake.org/\\n')\n", (862, 905), False, 'import sys\n'), ((914, 974), 'sys.stderr.write', 'sys.stderr.write', (['""" on many clusters CMake is installed\n"""'], {}), "(' on many clusters CMake is installed\\n')\n", (930, 974), False, 'import sys\n'), ((983, 1038), 'sys.stderr.write', 'sys.stderr.write', (['""" but you have to load it first:\n"""'], {}), "(' but you have to load it first:\\n')\n", (999, 1038), False, 'import sys\n'), ((1047, 1091), 'sys.stderr.write', 'sys.stderr.write', (['""" $ module load cmake\n"""'], {}), "(' $ module load cmake\\n')\n", (1063, 1091), False, 'import sys\n'), ((1100, 1111), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1108, 1111), False, 'import sys\n'), ((1312, 1354), 'os.path.join', 'os.path.join', (['build_path', '"""CMakeCache.txt"""'], {}), "(build_path, 'CMakeCache.txt')\n", (1324, 1354), False, 'import os\n'), ((1366, 1387), 'os.path.exists', 'os.path.exists', (['fname'], {}), '(fname)\n', (1380, 1387), False, 'import os\n'), ((1673, 1701), 'os.makedirs', 'os.makedirs', (['build_path', '(493)'], {}), '(build_path, 493)\n', (1684, 1701), False, 'import os\n'), ((3275, 3299), 'sys.stderr.write', 'sys.stderr.write', (['stderr'], {}), '(stderr)\n', (3291, 3299), False, 'import sys\n'), ((3308, 3319), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3316, 3319), False, 'import sys\n'), ((5123, 5134), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (5131, 5134), False, 'import sys\n'), ((1401, 1437), 'sys.stderr.write', 'sys.stderr.write', (['"""aborting setup\n"""'], {}), "('aborting setup\\n')\n", (1417, 1437), False, 'import sys\n'), ((1450, 1557), 'sys.stderr.write', 'sys.stderr.write', (["('build directory %s which contains CMakeCache.txt already exists\\n' %\n build_path)"], {}), "(\n 'build directory %s which contains CMakeCache.txt already exists\\n' %\n build_path)\n", (1466, 1557), False, 'import sys\n'), ((1561, 1630), 'sys.stderr.write', 'sys.stderr.write', (['"""remove the build directory and then rerun setup\n"""'], {}), "('remove the build directory and then rerun setup\\n')\n", (1577, 1630), False, 'import sys\n'), ((1643, 1654), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1651, 1654), False, 'import sys\n'), ((3779, 3812), 'shutil.rmtree', 'shutil.rmtree', (['default_build_path'], {}), '(default_build_path)\n', (3792, 3812), False, 'import shutil\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 31 14:42:37 2019
@author: owenmadin
"""
import numpy
from bayesiantesting.kernels.bayes import ThermodynamicIntegration
from bayesiantesting.models.continuous import GaussianModel
def main():
priors = {"uniform": ("uniform", numpy.array([-5.0, 5.0]))}
# Build the model / models.
model = GaussianModel("gaussian", priors, 0.0, 1.0)
# Draw the initial parameter values from the model priors.
initial_parameters = model.sample_priors()
# Run the simulation
simulation = ThermodynamicIntegration(
legendre_gauss_degree=16,
model=model,
warm_up_steps=100000,
steps=500000,
output_directory_path="gaussian",
)
_, integral, error = simulation.run(initial_parameters, number_of_processes=4)
print("Final Integral:", integral, " +/- ", error)
print("==============================")
if __name__ == "__main__":
main()
|
[
"bayesiantesting.kernels.bayes.ThermodynamicIntegration",
"numpy.array",
"bayesiantesting.models.continuous.GaussianModel"
] |
[((377, 420), 'bayesiantesting.models.continuous.GaussianModel', 'GaussianModel', (['"""gaussian"""', 'priors', '(0.0)', '(1.0)'], {}), "('gaussian', priors, 0.0, 1.0)\n", (390, 420), False, 'from bayesiantesting.models.continuous import GaussianModel\n'), ((575, 712), 'bayesiantesting.kernels.bayes.ThermodynamicIntegration', 'ThermodynamicIntegration', ([], {'legendre_gauss_degree': '(16)', 'model': 'model', 'warm_up_steps': '(100000)', 'steps': '(500000)', 'output_directory_path': '"""gaussian"""'}), "(legendre_gauss_degree=16, model=model,\n warm_up_steps=100000, steps=500000, output_directory_path='gaussian')\n", (599, 712), False, 'from bayesiantesting.kernels.bayes import ThermodynamicIntegration\n'), ((305, 329), 'numpy.array', 'numpy.array', (['[-5.0, 5.0]'], {}), '([-5.0, 5.0])\n', (316, 329), False, 'import numpy\n')]
|
from PIL import Image
import numpy as np
img = Image.open('cifar.png')
pic = np.array(img)
noise = np.random.randint(-10,10,pic.shape[-1])
print(noise.shape)
pic = pic+noise
pic = pic.astype(np.uint8)
asd = Image.fromarray(pic)
|
[
"PIL.Image.fromarray",
"numpy.random.randint",
"numpy.array",
"PIL.Image.open"
] |
[((47, 70), 'PIL.Image.open', 'Image.open', (['"""cifar.png"""'], {}), "('cifar.png')\n", (57, 70), False, 'from PIL import Image\n'), ((77, 90), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (85, 90), True, 'import numpy as np\n'), ((99, 140), 'numpy.random.randint', 'np.random.randint', (['(-10)', '(10)', 'pic.shape[-1]'], {}), '(-10, 10, pic.shape[-1])\n', (116, 140), True, 'import numpy as np\n'), ((207, 227), 'PIL.Image.fromarray', 'Image.fromarray', (['pic'], {}), '(pic)\n', (222, 227), False, 'from PIL import Image\n')]
|
import torch
from torch.autograd import Variable
from scattering.scattering1d.utils import pad1D, modulus, subsample_fourier
from scattering.scattering1d.utils import compute_border_indices
import numpy as np
import pytest
def test_pad1D(random_state=42):
"""
Tests the correctness and differentiability of pad1D
"""
torch.manual_seed(random_state)
N = 128
for pad_left in range(0, N, 16):
for pad_right in range(0, N, 16):
x = Variable(torch.randn(100, 4, N), requires_grad=True)
x_pad = pad1D(x, pad_left, pad_right, mode='reflect')
# Check the size
x2 = x.data.clone()
x_pad2 = x_pad.data.clone()
for t in range(1, pad_left + 1):
diff = x_pad2[..., pad_left - t] - x2[..., t]
assert torch.max(torch.abs(diff)) <= 1e-7
for t in range(x2.shape[-1]):
diff = x_pad2[..., pad_left + t] - x2[..., t]
assert torch.max(torch.abs(diff)) <= 1e-7
for t in range(1, pad_right + 1):
diff = x_pad2[..., x_pad.shape[-1] - 1 - pad_right + t]
diff -= x2[..., x.shape[-1] - 1 - t]
assert torch.max(torch.abs(diff)) <= 1e-7
# check the differentiability
loss = 0.5 * torch.sum(x_pad**2)
loss.backward()
# compute the theoretical gradient for x
x_grad_original = x.data.clone()
x_grad = x_grad_original.new(x_grad_original.shape).fill_(0.)
x_grad += x_grad_original
for t in range(1, pad_left + 1):
x_grad[..., t] += x_grad_original[..., t]
for t in range(1, pad_right + 1): # it is counted twice!
t0 = x.shape[-1] - 1 - t
x_grad[..., t0] += x_grad_original[..., t0]
# get the difference
diff = x.grad.data - x_grad
assert torch.max(torch.abs(diff)) <= 1e-7
# Check that the padding shows an error if we try to pad
with pytest.raises(ValueError):
pad1D(x, x.shape[-1], 0, mode='reflect')
with pytest.raises(ValueError):
pad1D(x, 0, x.shape[-1], mode='reflect')
def test_modulus(random_state=42):
"""
Tests the stability and differentiability of modulus
"""
torch.manual_seed(random_state)
# Test with a random vector
x = Variable(torch.randn(100, 4, 128, 2), requires_grad=True)
x_abs = modulus(x)
assert len(x_abs.shape) == len(x.shape) - 1
# check the value
x_abs2 = x_abs.data.clone()
x2 = x.data.clone()
diff = x_abs2 - torch.sqrt(x2[..., 0]**2 + x2[..., 1]**2)
assert torch.max(torch.abs(diff)) <= 1e-7
# check the gradient
loss = torch.sum(x_abs)
loss.backward()
x_grad = x2 / x_abs2.unsqueeze(-1)
diff = x.grad.data - x_grad
assert torch.max(torch.abs(diff)) <= 1e-7
# Test the differentiation with a vector made of zeros
x0 = Variable(torch.zeros(100, 4, 128, 2), requires_grad=True)
x_abs0 = modulus(x0)
loss0 = torch.sum(x_abs0)
loss0.backward()
assert torch.max(torch.abs(x0.grad.data)) <= 1e-7
def test_subsample_fourier(random_state=42):
"""
Tests whether the periodization in Fourier performs a good subsampling
in time
"""
rng = np.random.RandomState(random_state)
J = 10
x = rng.randn(100, 4, 2**J) + 1j * rng.randn(100, 4, 2**J)
x_fft = np.fft.fft(x, axis=-1)[..., np.newaxis]
x_fft.dtype = 'float64' # make it a vector
x_fft_th = torch.from_numpy(x_fft)
for j in range(J + 1):
x_fft_sub_th = subsample_fourier(x_fft_th, 2**j)
x_fft_sub = x_fft_sub_th.numpy()
x_fft_sub.dtype = 'complex128'
x_sub = np.fft.ifft(x_fft_sub[..., 0], axis=-1)
assert np.max(np.abs(x[:, :, ::2**j] - x_sub)) < 1e-7
def test_border_indices(random_state=42):
"""
Tests whether the border indices to unpad are well computed
"""
rng = np.random.RandomState(random_state)
J_signal = 10 # signal lives in 2**J_signal
J = 6 # maximal subsampling
T = 2**J_signal
i0 = rng.randint(0, T // 2 + 1, 1)[0]
i1 = rng.randint(i0 + 1, T, 1)[0]
x = np.ones(T)
x[i0:i1] = 0.
ind_start, ind_end = compute_border_indices(J, i0, i1)
for j in range(J + 1):
assert j in ind_start.keys()
assert j in ind_end.keys()
x_sub = x[::2**j]
# check that we did take the strict interior
assert np.max(x_sub[ind_start[j]:ind_end[j]]) == 0.
# check that we have not forgotten points
if ind_start[j] > 0:
assert np.min(x_sub[:ind_start[j]]) > 0.
if ind_end[j] < x_sub.shape[-1]:
assert np.min(x_sub[ind_end[j]:]) > 0.
|
[
"numpy.abs",
"torch.sqrt",
"numpy.ones",
"torch.randn",
"scattering.scattering1d.utils.subsample_fourier",
"numpy.fft.fft",
"numpy.random.RandomState",
"pytest.raises",
"scattering.scattering1d.utils.modulus",
"numpy.max",
"torch.zeros",
"scattering.scattering1d.utils.pad1D",
"numpy.fft.ifft",
"torch.manual_seed",
"scattering.scattering1d.utils.compute_border_indices",
"numpy.min",
"torch.sum",
"torch.from_numpy",
"torch.abs"
] |
[((335, 366), 'torch.manual_seed', 'torch.manual_seed', (['random_state'], {}), '(random_state)\n', (352, 366), False, 'import torch\n'), ((2321, 2352), 'torch.manual_seed', 'torch.manual_seed', (['random_state'], {}), '(random_state)\n', (2338, 2352), False, 'import torch\n'), ((2463, 2473), 'scattering.scattering1d.utils.modulus', 'modulus', (['x'], {}), '(x)\n', (2470, 2473), False, 'from scattering.scattering1d.utils import pad1D, modulus, subsample_fourier\n'), ((2744, 2760), 'torch.sum', 'torch.sum', (['x_abs'], {}), '(x_abs)\n', (2753, 2760), False, 'import torch\n'), ((3038, 3049), 'scattering.scattering1d.utils.modulus', 'modulus', (['x0'], {}), '(x0)\n', (3045, 3049), False, 'from scattering.scattering1d.utils import pad1D, modulus, subsample_fourier\n'), ((3062, 3079), 'torch.sum', 'torch.sum', (['x_abs0'], {}), '(x_abs0)\n', (3071, 3079), False, 'import torch\n'), ((3315, 3350), 'numpy.random.RandomState', 'np.random.RandomState', (['random_state'], {}), '(random_state)\n', (3336, 3350), True, 'import numpy as np\n'), ((3540, 3563), 'torch.from_numpy', 'torch.from_numpy', (['x_fft'], {}), '(x_fft)\n', (3556, 3563), False, 'import torch\n'), ((3980, 4015), 'numpy.random.RandomState', 'np.random.RandomState', (['random_state'], {}), '(random_state)\n', (4001, 4015), True, 'import numpy as np\n'), ((4209, 4219), 'numpy.ones', 'np.ones', (['T'], {}), '(T)\n', (4216, 4219), True, 'import numpy as np\n'), ((4264, 4297), 'scattering.scattering1d.utils.compute_border_indices', 'compute_border_indices', (['J', 'i0', 'i1'], {}), '(J, i0, i1)\n', (4286, 4297), False, 'from scattering.scattering1d.utils import compute_border_indices\n'), ((2046, 2071), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2059, 2071), False, 'import pytest\n'), ((2081, 2121), 'scattering.scattering1d.utils.pad1D', 'pad1D', (['x', 'x.shape[-1]', '(0)'], {'mode': '"""reflect"""'}), "(x, x.shape[-1], 0, mode='reflect')\n", (2086, 2121), False, 'from scattering.scattering1d.utils import pad1D, modulus, subsample_fourier\n'), ((2131, 2156), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2144, 2156), False, 'import pytest\n'), ((2166, 2206), 'scattering.scattering1d.utils.pad1D', 'pad1D', (['x', '(0)', 'x.shape[-1]'], {'mode': '"""reflect"""'}), "(x, 0, x.shape[-1], mode='reflect')\n", (2171, 2206), False, 'from scattering.scattering1d.utils import pad1D, modulus, subsample_fourier\n'), ((2402, 2429), 'torch.randn', 'torch.randn', (['(100)', '(4)', '(128)', '(2)'], {}), '(100, 4, 128, 2)\n', (2413, 2429), False, 'import torch\n'), ((2620, 2665), 'torch.sqrt', 'torch.sqrt', (['(x2[..., 0] ** 2 + x2[..., 1] ** 2)'], {}), '(x2[..., 0] ** 2 + x2[..., 1] ** 2)\n', (2630, 2665), False, 'import torch\n'), ((2976, 3003), 'torch.zeros', 'torch.zeros', (['(100)', '(4)', '(128)', '(2)'], {}), '(100, 4, 128, 2)\n', (2987, 3003), False, 'import torch\n'), ((3437, 3459), 'numpy.fft.fft', 'np.fft.fft', (['x'], {'axis': '(-1)'}), '(x, axis=-1)\n', (3447, 3459), True, 'import numpy as np\n'), ((3614, 3649), 'scattering.scattering1d.utils.subsample_fourier', 'subsample_fourier', (['x_fft_th', '(2 ** j)'], {}), '(x_fft_th, 2 ** j)\n', (3631, 3649), False, 'from scattering.scattering1d.utils import pad1D, modulus, subsample_fourier\n'), ((3744, 3783), 'numpy.fft.ifft', 'np.fft.ifft', (['x_fft_sub[..., 0]'], {'axis': '(-1)'}), '(x_fft_sub[..., 0], axis=-1)\n', (3755, 3783), True, 'import numpy as np\n'), ((547, 592), 'scattering.scattering1d.utils.pad1D', 'pad1D', (['x', 'pad_left', 'pad_right'], {'mode': '"""reflect"""'}), "(x, pad_left, pad_right, mode='reflect')\n", (552, 592), False, 'from scattering.scattering1d.utils import pad1D, modulus, subsample_fourier\n'), ((2683, 2698), 'torch.abs', 'torch.abs', (['diff'], {}), '(diff)\n', (2692, 2698), False, 'import torch\n'), ((2873, 2888), 'torch.abs', 'torch.abs', (['diff'], {}), '(diff)\n', (2882, 2888), False, 'import torch\n'), ((3122, 3145), 'torch.abs', 'torch.abs', (['x0.grad.data'], {}), '(x0.grad.data)\n', (3131, 3145), False, 'import torch\n'), ((4492, 4530), 'numpy.max', 'np.max', (['x_sub[ind_start[j]:ind_end[j]]'], {}), '(x_sub[ind_start[j]:ind_end[j]])\n', (4498, 4530), True, 'import numpy as np\n'), ((483, 505), 'torch.randn', 'torch.randn', (['(100)', '(4)', 'N'], {}), '(100, 4, N)\n', (494, 505), False, 'import torch\n'), ((1317, 1338), 'torch.sum', 'torch.sum', (['(x_pad ** 2)'], {}), '(x_pad ** 2)\n', (1326, 1338), False, 'import torch\n'), ((3806, 3839), 'numpy.abs', 'np.abs', (['(x[:, :, ::2 ** j] - x_sub)'], {}), '(x[:, :, ::2 ** j] - x_sub)\n', (3812, 3839), True, 'import numpy as np\n'), ((4635, 4663), 'numpy.min', 'np.min', (['x_sub[:ind_start[j]]'], {}), '(x_sub[:ind_start[j]])\n', (4641, 4663), True, 'import numpy as np\n'), ((4729, 4755), 'numpy.min', 'np.min', (['x_sub[ind_end[j]:]'], {}), '(x_sub[ind_end[j]:])\n', (4735, 4755), True, 'import numpy as np\n'), ((1951, 1966), 'torch.abs', 'torch.abs', (['diff'], {}), '(diff)\n', (1960, 1966), False, 'import torch\n'), ((834, 849), 'torch.abs', 'torch.abs', (['diff'], {}), '(diff)\n', (843, 849), False, 'import torch\n'), ((996, 1011), 'torch.abs', 'torch.abs', (['diff'], {}), '(diff)\n', (1005, 1011), False, 'import torch\n'), ((1225, 1240), 'torch.abs', 'torch.abs', (['diff'], {}), '(diff)\n', (1234, 1240), False, 'import torch\n')]
|
# -*- coding: utf8 -*-
import os
from shlex import split as shlex_split
from sos.report.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
from subprocess import check_output, CalledProcessError
from typing import Dict, List, Optional, Tuple
import psycopg2
DEFAULT_DSN = 'postgresql://postgres@localhost/postgres'
class LoggingInfo:
def __init__(self, collect_logs, log_dir, data_dir):
self.collect_logs = collect_logs
self.log_dir = log_dir
self.data_dir = data_dir
class PostgreSQLAlt(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin):
"""PostgreSQL alternative collection plugin"""
plugin_name = "postgresql_alt"
requires_root = False
short_desc = 'PostgreSQL alternative collection plugin'
option_list = [
('dsn', 'The PostgreSQL DSN to collect information from.', '', DEFAULT_DSN),
('container_id', 'The docker container id where PostgreSQL runs in.', '', '')
]
@classmethod
def do_connect(cls, dsn: str) -> Tuple[object, Optional[Exception]]:
try:
conn = psycopg2.connect(dsn=dsn)
except psycopg2.Error as err:
return (None, err)
return (conn, None)
@classmethod
def _do_query(cls, conn: object, sql: str) -> Tuple[str, Optional[Exception]]:
try:
with conn.cursor() as cur:
cur.execute(sql)
return (cur.fetchall(), None)
except psycopg2.Error as err:
return (None, err)
@classmethod
def get_config(cls, conn: object) -> Tuple[List, Optional[Exception]]:
sql = 'SELECT name, setting FROM pg_settings ORDER BY name ASC'
return cls._do_query(conn, sql)
@classmethod
def config_to_string(cls, config: List[Tuple[str, str]]) -> str:
def normalize_string(s):
return s if s else "''"
return '\n'.join([f'{key} = {normalize_string(value)}' for key, value in config])
@classmethod
def get_logging_info(cls, conn: object) -> Tuple[LoggingInfo, Optional[Exception]]:
logging_info = LoggingInfo(False, '', '')
try:
with conn.cursor() as cur:
cur.execute('''
SELECT name, setting
FROM pg_settings
WHERE name IN (
'log_destination'
, 'logging_collector'
, 'log_directory'
, 'data_directory'
)''')
logging_config = cur.fetchall()
logging_config = {key:value for key, value in logging_config}
log_destinations = logging_config['log_destination'].split(',')
logging_collector = logging_config['logging_collector']
logging_info.log_dir = logging_config['log_directory']
logging_info.data_dir = logging_config['data_directory']
except psycopg2.Error as err:
return (logging_info, err)
except KeyError as err:
return (logging_info, err)
if any(item in ['stderr', 'csvlog'] for item in log_destinations):
if logging_collector == 'on':
logging_info.collect_logs = True
return (logging_info, None)
@classmethod
def docker_get_data_dir_host(cls, container_id: str, pg_data_dir: str) -> Tuple[str, Optional[Exception]]:
inspect_cmd = "docker inspect -f "
inspect_cmd += "'{{ range .Mounts }}{{ println .Destination .Source }}{{ end }}' "
inspect_cmd += container_id
try:
docker_mounts = check_output(shlex_split(inspect_cmd), universal_newlines=True)
docker_mounts = docker_mounts.split('\n')
data_dir = [mount.split(' ')[1] for mount in docker_mounts if pg_data_dir in mount][1]
except CalledProcessError as err:
return ('', err)
except IndexError as err:
return ('', err)
return (data_dir, None)
@classmethod
def get_s64_license(cls, conn: object) -> Tuple[Dict, str]:
sql = 'SELECT * FROM swarm64da.show_license()'
license_info, err = cls._do_query(conn, sql)
if err:
return (None, err)
if not license_info:
return ({}, err)
license_info = license_info[0]
return ({
'type': license_info[0],
'start': license_info[1],
'expiry': license_info[2],
'customer': license_info[3]
}, err)
def write_output(self, output):
self.add_string_as_file(output, 'postgresql.conf')
def setup(self):
dsn = self.get_option('dsn')
conn, error = PostgreSQLAlt.do_connect(dsn)
if error:
self.write_output(f'Could not connect to PostgreSQL to get config: {error}')
return
config, error = PostgreSQLAlt.get_config(conn)
if error:
self.write_output(f'Could not get PostgreSQL config: {error}')
return
config_str = PostgreSQLAlt.config_to_string(config)
self.write_output(config_str)
logging_info, error = PostgreSQLAlt.get_logging_info(conn)
if error:
self.write_output(f'Could not get log collection info: {error}')
return
container_id = self.get_option('container_id')
if logging_info.collect_logs and container_id:
data_dir_host = PostgreSQLAlt.docker_get_data_dir_host(container_id, logging_info.data_dir)
log_dir_host = os.path.join(data_dir_host, logging_info.log_dir, '*')
self.add_copy_spec(log_dir_host)
license_info, error = PostgreSQLAlt.get_s64_license(conn)
if error:
self.write_output(f'Could not get Swarm64 license: {error}')
self.write_output(f'Swarm64 license info: {str(license_info)}')
|
[
"os.path.join",
"shlex.split",
"psycopg2.connect"
] |
[((1081, 1106), 'psycopg2.connect', 'psycopg2.connect', ([], {'dsn': 'dsn'}), '(dsn=dsn)\n', (1097, 1106), False, 'import psycopg2\n'), ((5558, 5612), 'os.path.join', 'os.path.join', (['data_dir_host', 'logging_info.log_dir', '"""*"""'], {}), "(data_dir_host, logging_info.log_dir, '*')\n", (5570, 5612), False, 'import os\n'), ((3637, 3661), 'shlex.split', 'shlex_split', (['inspect_cmd'], {}), '(inspect_cmd)\n', (3648, 3661), True, 'from shlex import split as shlex_split\n')]
|
from rest_framework import viewsets, status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.decorators import action
import json
from .serializers import SEOSerializer
from .utils import website_analysis
class SEOViewSet(viewsets.ViewSet):
"""
A viewset for creating seo analysis.
"""
@action(detail = False, methods = ['post'], permission_classes=[IsAuthenticated])
def create(self, request, *args, **kwargs):
serializer = SEOSerializer(data = request.data)
serializer.is_valid(raise_exception = True)
if self.request.user.is_anonymous:
return Response(status = status.HTTP_401_UNAUTHORIZED)
url = serializer.validated_data['url']
user_agent = self.request.user.user_agent
try:
analysis = website_analysis(url, user_agent)
except Exception:
analysis = []
return Response({
"analysis": analysis,
}, status = status.HTTP_200_OK)
|
[
"rest_framework.response.Response",
"rest_framework.decorators.action"
] |
[((377, 453), 'rest_framework.decorators.action', 'action', ([], {'detail': '(False)', 'methods': "['post']", 'permission_classes': '[IsAuthenticated]'}), "(detail=False, methods=['post'], permission_classes=[IsAuthenticated])\n", (383, 453), False, 'from rest_framework.decorators import action\n'), ((963, 1022), 'rest_framework.response.Response', 'Response', (["{'analysis': analysis}"], {'status': 'status.HTTP_200_OK'}), "({'analysis': analysis}, status=status.HTTP_200_OK)\n", (971, 1022), False, 'from rest_framework.response import Response\n'), ((678, 723), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_401_UNAUTHORIZED'}), '(status=status.HTTP_401_UNAUTHORIZED)\n', (686, 723), False, 'from rest_framework.response import Response\n')]
|
import logging, yaml, os, sys, json, urllib3, requests
from cerberus import Validator
from files import JSONFile
from common import Singleton
from common import ContextInfo
from urllib.parse import urlparse
from .data_type_config import DataTypeConfig
logger = logging.getLogger(__name__)
class DataFileManager(metaclass=Singleton):
def __init__(self, config_file_loc):
self.context_info = ContextInfo()
# Load config yaml.
logger.debug('Loading config file: %s' % config_file_loc)
config_file = open(config_file_loc, 'r')
self.config_data = yaml.load(config_file, Loader=yaml.SafeLoader)
logger.debug("Config Data: %s" % self.config_data)
# Load validation yaml.
validation_yaml_file_loc = os.path.abspath('src/config/validation.yml')
logger.debug('Loading validation schema: %s' % validation_yaml_file_loc)
validation_schema_file = open(validation_yaml_file_loc, 'r')
self.validation_schema = yaml.load(validation_schema_file, Loader=yaml.SafeLoader)
# Assign values for thread counts.
self.FileTransactorThreads = self.config_data['FileTransactorThreads']
# Loading a JSON blurb from a file as a placeholder for submission system query.
other_file_meta_data = os.path.abspath('src/config/local_submission.json')
self.non_submission_system_data = JSONFile().get_data(other_file_meta_data)
urllib3.disable_warnings()
self.http = urllib3.PoolManager()
# use the recently created snapshot
api_url = self.context_info.env["FMS_API_URL"] + '/api/snapshot/release/' + self.context_info.env["ALLIANCE_RELEASE"]
logger.info(api_url)
submission_data = self.http.request('GET', api_url)
if submission_data.status != 200:
logger.error("Status: %s" % submission_data.status)
logger.error("No Data came from API: %s" % api_url)
sys.exit(-1)
self.snapshot_submission_system_data = json.loads(submission_data.data.decode('UTF-8'))
logger.debug(self.snapshot_submission_system_data)
for dataFile in self.non_submission_system_data['snapShot']['dataFiles']:
self.snapshot_submission_system_data['snapShot']['dataFiles'].append(dataFile)
logger.debug(self.snapshot_submission_system_data)
# List used for MOD and data type objects.
self.master_data_dictionary = {}
# Dictionary for transformed submission system data.
self.transformed_submission_system_data = {}
# process config file during initialization
self.process_config()
def get_FT_thread_settings(self):
return self.FileTransactorThreads
def get_config(self, data_type):
# Get the object for a data type. If the object doesn't exist, this returns None.
logger.debug("Getting config for: [%s] -> Config[%s]" % (data_type, self.master_data_dictionary))
return self.master_data_dictionary.get(data_type)
def dispatch_to_object(self):
# This function sends off our data types to become DataTypeConfig objects.
# The smaller SubTypeConfig objects are created in the DataTypeConfig functions, see data_type_config.py.
for config_entry in self.transformed_submission_system_data.keys():
# Skip string entries (e.g. schemaVersion, releaseVersion).
if isinstance(self.transformed_submission_system_data[config_entry], str):
continue
logger.debug('Processing DataType: %s' % config_entry)
# Create our data type object and add it to our master dictionary filed under the config_entry.
# e.g. Create BGI DataTypeConfig object and file it under BGI in the dictionary.
self.master_data_dictionary[config_entry] = DataTypeConfig(config_entry,
self.transformed_submission_system_data[config_entry])
def download_and_validate(self):
logger.debug('Beginning download and validation.')
for entry in self.master_data_dictionary.keys():
logger.debug('Downloading %s data.' % entry)
if isinstance(self.master_data_dictionary[entry], DataTypeConfig): # If we're dealing with an object.
self.master_data_dictionary[entry].get_data()
logger.debug('done with %s data.' % entry)
def process_config(self):
# This checks for the validity of the YAML file.
# See src/config/validation.yml for the layout of the schema.
# TODO Add requirement checking and more validation to the YAML schema.
validator = Validator(self.validation_schema)
validation_results = validator.validate(self.config_data)
if validation_results is True:
logger.debug('Config file validation successful.')
else:
logger.critical('Config file validation unsuccessful!')
for field, values in validator.errors.items():
for value in values: # May have more than one error per field.
message = field + ': ' + value
logger.critical(message)
logger.critical('Exiting')
sys.exit(-1)
# Query the submission system for the required data.
self.query_submission_system()
# Create our DataTypeConfig (which in turn create our SubTypeConfig) objects.
self.dispatch_to_object()
def _search_submission_data(self, dataType, dataSubType):
try:
returned_dict = next(item for item in self.snapshot_submission_system_data['snapShot']['dataFiles']
if item['dataType'].get('name') == dataType and item['dataSubType'].get('name') == dataSubType)
except StopIteration:
logger.debug('dataType: %s subType: %s not found in submission system data.' % (dataType, dataSubType))
logger.debug('Creating entry with \'None\' path and extracted path.')
returned_dict = {
'dataType': dataType,
'subType': dataSubType,
'path': None,
'tempExtractedFile': None
}
return returned_dict
def _query_api_datafile_latest(self, dataType, dataSubType):
api_url = self.context_info.env["FMS_API_URL"] + '/api/datafile/by/' + self.context_info.env["ALLIANCE_RELEASE"] + '/' + dataType + '/' + dataSubType + '?latest=true'
logger.debug(api_url)
submission_data = self.http.request('GET', api_url)
if submission_data.status != 200:
logger.error("Status: %s" % submission_data.status)
logger.error("No Data came from API: %s" % api_url)
sys.exit(-1)
endpoint_submission_system_data = json.loads(submission_data.data.decode('UTF-8'))
logger.debug(endpoint_submission_system_data)
s3Url = endpoint_submission_system_data[0].get('s3Url')
returned_dict = {
'dataType': dataType,
'subType': dataSubType,
's3Url': s3Url,
'tempExtractedFile': None
}
return returned_dict
def query_submission_system(self):
self.transformed_submission_system_data['releaseVersion'] = self.snapshot_submission_system_data['snapShot']['releaseVersion']['releaseVersion']
config_values_to_ignore = [
'releaseVersion', # Manually assigned above.
'schemaVersion', # There is no endpoint for latest schema version in api
'FileTransactorThreads',
'Neo4jTransactorThreads'
]
for datatype in self.config_data.keys(): # Iterate through our config file.
logger.debug("Datatype: %s" % datatype)
if datatype not in config_values_to_ignore: # Skip these entries.
self.transformed_submission_system_data[datatype] = [] # Create our empty list.
for sub_datatype in self.config_data[datatype]:
# to process by querying the api for the latest path
submission_system_dict = self._query_api_datafile_latest(datatype, sub_datatype)
# to process by using the release snapshot for that path
# submission_system_dict = self._search_submission_data(datatype, sub_datatype)
path = submission_system_dict.get('s3Url')
logger.debug("datatype %s sub_datatype %s path %s" % (datatype, sub_datatype, path))
tempExtractedFile = submission_system_dict.get('tempExtractedFile')
if tempExtractedFile is None or tempExtractedFile == '':
tempExtractedFile = urlparse(submission_system_dict.get('s3Url')).path[1:]
tempExtractedFile = os.path.basename(tempExtractedFile)
if tempExtractedFile is not None and len(tempExtractedFile) > 0 and tempExtractedFile.endswith('gz'):
tempExtractedFile = os.path.splitext(tempExtractedFile)[0]
self.transformed_submission_system_data[datatype].append([sub_datatype, path, tempExtractedFile])
else:
logger.debug("Ignoring datatype: %s" % datatype)
logger.debug("Loaded Types: %s" % self.transformed_submission_system_data)
|
[
"common.ContextInfo",
"yaml.load",
"os.path.abspath",
"files.JSONFile",
"os.path.basename",
"urllib3.PoolManager",
"os.path.splitext",
"cerberus.Validator",
"sys.exit",
"urllib3.disable_warnings",
"logging.getLogger"
] |
[((264, 291), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (281, 291), False, 'import logging, yaml, os, sys, json, urllib3, requests\n'), ((413, 426), 'common.ContextInfo', 'ContextInfo', ([], {}), '()\n', (424, 426), False, 'from common import ContextInfo\n'), ((598, 644), 'yaml.load', 'yaml.load', (['config_file'], {'Loader': 'yaml.SafeLoader'}), '(config_file, Loader=yaml.SafeLoader)\n', (607, 644), False, 'import logging, yaml, os, sys, json, urllib3, requests\n'), ((772, 816), 'os.path.abspath', 'os.path.abspath', (['"""src/config/validation.yml"""'], {}), "('src/config/validation.yml')\n", (787, 816), False, 'import logging, yaml, os, sys, json, urllib3, requests\n'), ((1000, 1057), 'yaml.load', 'yaml.load', (['validation_schema_file'], {'Loader': 'yaml.SafeLoader'}), '(validation_schema_file, Loader=yaml.SafeLoader)\n', (1009, 1057), False, 'import logging, yaml, os, sys, json, urllib3, requests\n'), ((1302, 1353), 'os.path.abspath', 'os.path.abspath', (['"""src/config/local_submission.json"""'], {}), "('src/config/local_submission.json')\n", (1317, 1353), False, 'import logging, yaml, os, sys, json, urllib3, requests\n'), ((1446, 1472), 'urllib3.disable_warnings', 'urllib3.disable_warnings', ([], {}), '()\n', (1470, 1472), False, 'import logging, yaml, os, sys, json, urllib3, requests\n'), ((1493, 1514), 'urllib3.PoolManager', 'urllib3.PoolManager', ([], {}), '()\n', (1512, 1514), False, 'import logging, yaml, os, sys, json, urllib3, requests\n'), ((4722, 4755), 'cerberus.Validator', 'Validator', (['self.validation_schema'], {}), '(self.validation_schema)\n', (4731, 4755), False, 'from cerberus import Validator\n'), ((1959, 1971), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (1967, 1971), False, 'import logging, yaml, os, sys, json, urllib3, requests\n'), ((5293, 5305), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (5301, 5305), False, 'import logging, yaml, os, sys, json, urllib3, requests\n'), ((6865, 6877), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (6873, 6877), False, 'import logging, yaml, os, sys, json, urllib3, requests\n'), ((1396, 1406), 'files.JSONFile', 'JSONFile', ([], {}), '()\n', (1404, 1406), False, 'from files import JSONFile\n'), ((8959, 8994), 'os.path.basename', 'os.path.basename', (['tempExtractedFile'], {}), '(tempExtractedFile)\n', (8975, 8994), False, 'import logging, yaml, os, sys, json, urllib3, requests\n'), ((9169, 9204), 'os.path.splitext', 'os.path.splitext', (['tempExtractedFile'], {}), '(tempExtractedFile)\n', (9185, 9204), False, 'import logging, yaml, os, sys, json, urllib3, requests\n')]
|
from pathlib import Path
import numpy as np
from .config import Config
from .spin import Spin
def load(path: Path) -> Config:
with path.open() as file:
lines = file.readlines()
global_optimum, best_solution = lines[0].split(' ')
global_optimum = float(global_optimum.strip())
best_solution = best_solution.strip()
best_solution = [float(gene) for gene in best_solution]
best_solution = np.array(best_solution)
spin_configs = []
for line in lines[2:]:
a_index, b_index, factor = line.split(' ')
a_index = int(a_index)
b_index = int(b_index)
factor = int(factor)
spin_config = Spin(
a_index,
b_index,
factor
)
spin_configs.append(spin_config)
config = Config(
path.name,
global_optimum,
best_solution,
spin_configs
)
return config
|
[
"numpy.array"
] |
[((445, 468), 'numpy.array', 'np.array', (['best_solution'], {}), '(best_solution)\n', (453, 468), True, 'import numpy as np\n')]
|
from typing import List, Optional, Sequence, Union
import os
from aiocache import cached, Cache
from aiocache.serializers import PickleSerializer
from asyncpg import Connection, Record
from pypika import Query
from app.db.errors import EntityDoesNotExist
from app.db.queries.queries import queries
from app.db.queries.tables import (
Parameter,
articles,
articles_to_tags,
favorites,
tags as tags_table,
users,
)
from app.db.repositories.base import BaseRepository
from app.db.repositories.profiles import ProfilesRepository
from app.db.repositories.tags import TagsRepository
from app.models.domain.articles import Article
from app.models.domain.users import User
from app.db.caches import key_builder
AUTHOR_USERNAME_ALIAS = "author_username"
SLUG_ALIAS = "slug"
CAMEL_OR_SNAKE_CASE_TO_WORDS = r"^[a-z\d_\-]+|[A-Z\d_\-][^A-Z\d_\-]*"
class ArticlesRepository(BaseRepository): # noqa: WPS214
def __init__(self, conn: Connection) -> None:
super().__init__(conn)
self._profiles_repo = ProfilesRepository(conn)
self._tags_repo = TagsRepository(conn)
async def create_article( # noqa: WPS211
self,
*,
slug: str,
title: str,
description: str,
body: str,
author: User,
tags: Optional[Sequence[str]] = None,
) -> Article:
async with self.connection.transaction():
article_row = await queries.create_new_article(
self.connection,
slug=slug,
title=title,
description=description,
body=body,
author_username=author.username,
)
if tags:
await self._tags_repo.create_tags_that_dont_exist(tags=tags)
await self._link_article_with_tags(slug=slug, tags=tags)
return await self._get_article_from_db_record(
article_row=article_row,
slug=slug,
author_username=article_row[AUTHOR_USERNAME_ALIAS],
requested_user=author,
)
async def update_article( # noqa: WPS211
self,
*,
article: Article,
slug: Optional[str] = None,
title: Optional[str] = None,
body: Optional[str] = None,
description: Optional[str] = None,
) -> Article:
updated_article = article.copy(deep=True)
updated_article.slug = slug or updated_article.slug
updated_article.title = title or article.title
updated_article.body = body or article.body
updated_article.description = description or article.description
async with self.connection.transaction():
updated_article.updated_at = await queries.update_article(
self.connection,
slug=article.slug,
author_username=article.author.username,
new_slug=updated_article.slug,
new_title=updated_article.title,
new_body=updated_article.body,
new_description=updated_article.description,
)
return updated_article
async def delete_article(self, *, article: Article) -> None:
async with self.connection.transaction():
await queries.delete_article(
self.connection,
slug=article.slug,
author_username=article.author.username,
)
async def filter_articles( # noqa: WPS211
self,
*,
tag: Optional[str] = None,
author: Optional[str] = None,
favorited: Optional[str] = None,
limit: int = 20,
offset: int = 0,
requested_user: Optional[User] = None,
) -> List[Article]:
query_params: List[Union[str, int]] = []
query_params_count = 0
# fmt: off
query = Query.from_(
articles,
).select(
articles.id,
articles.slug,
articles.title,
articles.description,
articles.body,
articles.created_at,
articles.updated_at,
Query.from_(
users,
).where(
users.id == articles.author_id,
).select(
users.username,
).as_(
AUTHOR_USERNAME_ALIAS,
),
)
# fmt: on
if tag:
query_params.append(tag)
query_params_count += 1
# fmt: off
query = query.join(
articles_to_tags,
).on(
(articles.id == articles_to_tags.article_id) & (
articles_to_tags.tag == Query.from_(
tags_table,
).where(
tags_table.tag == Parameter(query_params_count),
).select(
tags_table.tag,
)
),
)
# fmt: on
if author:
query_params.append(author)
query_params_count += 1
# fmt: off
query = query.join(
users,
).on(
(articles.author_id == users.id) & (
users.id == Query.from_(
users,
).where(
users.username == Parameter(query_params_count),
).select(
users.id,
)
),
)
# fmt: on
if favorited:
query_params.append(favorited)
query_params_count += 1
# fmt: off
query = query.join(
favorites,
).on(
(articles.id == favorites.article_id) & (
favorites.user_id == Query.from_(
users,
).where(
users.username == Parameter(query_params_count),
).select(
users.id,
)
),
)
# fmt: on
query = query.limit(Parameter(query_params_count + 1)).offset(
Parameter(query_params_count + 2),
)
query_params.extend([limit, offset])
articles_rows = await self.connection.fetch(query.get_sql(), *query_params)
return [
await self._get_article_from_db_record(
article_row=article_row,
slug=article_row[SLUG_ALIAS],
author_username=article_row[AUTHOR_USERNAME_ALIAS],
requested_user=requested_user,
)
for article_row in articles_rows
]
@cached(cache=Cache.REDIS,
serializer=PickleSerializer(),
endpoint=os.environ.get('REDIS_HOST'),
key_builder=key_builder)
async def get_articles_for_user_feed(
self,
*,
user: User,
limit: int = 20,
offset: int = 0,
) -> List[Article]:
articles_rows = await queries.get_articles_for_feed(
self.connection,
follower_username=user.username,
limit=limit,
offset=offset,
)
return [
await self._get_article_from_db_record(
article_row=article_row,
slug=article_row[SLUG_ALIAS],
author_username=article_row[AUTHOR_USERNAME_ALIAS],
requested_user=user,
)
for article_row in articles_rows
]
@cached(cache=Cache.REDIS,
serializer=PickleSerializer(),
endpoint=os.environ.get('REDIS_HOST'),
key_builder=key_builder)
async def get_article_by_slug(
self,
*,
slug: str,
requested_user: Optional[User] = None,
) -> Article:
article_row = await queries.get_article_by_slug(self.connection, slug=slug)
if article_row:
return await self._get_article_from_db_record(
article_row=article_row,
slug=article_row[SLUG_ALIAS],
author_username=article_row[AUTHOR_USERNAME_ALIAS],
requested_user=requested_user,
)
raise EntityDoesNotExist("article with slug {0} does not exist".format(slug))
@cached(cache=Cache.REDIS,
serializer=PickleSerializer(),
endpoint=os.environ.get('REDIS_HOST'),
key_builder=key_builder)
async def get_tags_for_article_by_slug(self, *, slug: str) -> List[str]:
tag_rows = await queries.get_tags_for_article_by_slug(
self.connection,
slug=slug,
)
return [row["tag"] for row in tag_rows]
@cached(cache=Cache.REDIS,
serializer=PickleSerializer(),
endpoint=os.environ.get('REDIS_HOST'),
key_builder=key_builder)
async def get_favorites_count_for_article_by_slug(self, *, slug: str) -> int:
return (
await queries.get_favorites_count_for_article(self.connection, slug=slug)
)["favorites_count"]
async def is_article_favorited_by_user(self, *, slug: str, user: User) -> bool:
return (
await queries.is_article_in_favorites(
self.connection,
username=user.username,
slug=slug,
)
)["favorited"]
async def add_article_into_favorites(self, *, article: Article, user: User) -> None:
await queries.add_article_to_favorites(
self.connection,
username=user.username,
slug=article.slug,
)
async def remove_article_from_favorites(
self,
*,
article: Article,
user: User,
) -> None:
await queries.remove_article_from_favorites(
self.connection,
username=user.username,
slug=article.slug,
)
@cached(cache=Cache.REDIS,
serializer=PickleSerializer(),
endpoint=os.environ.get('REDIS_HOST'),
key_builder=key_builder)
async def _get_article_from_db_record(
self,
*,
article_row: Record,
slug: str,
author_username: str,
requested_user: Optional[User],
) -> Article:
return Article(
id_=article_row["id"],
slug=slug,
title=article_row["title"],
description=article_row["description"],
body=article_row["body"],
author=await self._profiles_repo.get_profile_by_username(
username=author_username,
requested_user=requested_user,
),
tags=await self.get_tags_for_article_by_slug(slug=slug),
favorites_count=await self.get_favorites_count_for_article_by_slug(
slug=slug,
),
favorited=await self.is_article_favorited_by_user(
slug=slug,
user=requested_user,
)
if requested_user
else False,
created_at=article_row["created_at"],
updated_at=article_row["updated_at"],
)
async def _link_article_with_tags(self, *, slug: str, tags: Sequence[str]) -> None:
await queries.add_tags_to_article(
self.connection,
[{SLUG_ALIAS: slug, "tag": tag} for tag in tags],
)
|
[
"app.db.queries.queries.queries.add_tags_to_article",
"app.db.queries.tables.Parameter",
"app.db.repositories.tags.TagsRepository",
"app.db.queries.queries.queries.add_article_to_favorites",
"app.db.queries.queries.queries.remove_article_from_favorites",
"app.db.queries.queries.queries.get_article_by_slug",
"app.db.queries.queries.queries.get_articles_for_feed",
"app.db.repositories.profiles.ProfilesRepository",
"os.environ.get",
"app.db.queries.queries.queries.delete_article",
"app.db.queries.queries.queries.is_article_in_favorites",
"app.db.queries.queries.queries.create_new_article",
"aiocache.serializers.PickleSerializer",
"app.db.queries.queries.queries.get_favorites_count_for_article",
"app.db.queries.queries.queries.get_tags_for_article_by_slug",
"app.db.queries.queries.queries.update_article",
"pypika.Query.from_"
] |
[((1034, 1058), 'app.db.repositories.profiles.ProfilesRepository', 'ProfilesRepository', (['conn'], {}), '(conn)\n', (1052, 1058), False, 'from app.db.repositories.profiles import ProfilesRepository\n'), ((1085, 1105), 'app.db.repositories.tags.TagsRepository', 'TagsRepository', (['conn'], {}), '(conn)\n', (1099, 1105), False, 'from app.db.repositories.tags import TagsRepository\n'), ((6226, 6259), 'app.db.queries.tables.Parameter', 'Parameter', (['(query_params_count + 2)'], {}), '(query_params_count + 2)\n', (6235, 6259), False, 'from app.db.queries.tables import Parameter, articles, articles_to_tags, favorites, tags as tags_table, users\n'), ((7096, 7208), 'app.db.queries.queries.queries.get_articles_for_feed', 'queries.get_articles_for_feed', (['self.connection'], {'follower_username': 'user.username', 'limit': 'limit', 'offset': 'offset'}), '(self.connection, follower_username=user.\n username, limit=limit, offset=offset)\n', (7125, 7208), False, 'from app.db.queries.queries import queries\n'), ((6797, 6815), 'aiocache.serializers.PickleSerializer', 'PickleSerializer', ([], {}), '()\n', (6813, 6815), False, 'from aiocache.serializers import PickleSerializer\n'), ((6838, 6866), 'os.environ.get', 'os.environ.get', (['"""REDIS_HOST"""'], {}), "('REDIS_HOST')\n", (6852, 6866), False, 'import os\n'), ((7928, 7983), 'app.db.queries.queries.queries.get_article_by_slug', 'queries.get_article_by_slug', (['self.connection'], {'slug': 'slug'}), '(self.connection, slug=slug)\n', (7955, 7983), False, 'from app.db.queries.queries import queries\n'), ((7648, 7666), 'aiocache.serializers.PickleSerializer', 'PickleSerializer', ([], {}), '()\n', (7664, 7666), False, 'from aiocache.serializers import PickleSerializer\n'), ((7689, 7717), 'os.environ.get', 'os.environ.get', (['"""REDIS_HOST"""'], {}), "('REDIS_HOST')\n", (7703, 7717), False, 'import os\n'), ((8635, 8699), 'app.db.queries.queries.queries.get_tags_for_article_by_slug', 'queries.get_tags_for_article_by_slug', (['self.connection'], {'slug': 'slug'}), '(self.connection, slug=slug)\n', (8671, 8699), False, 'from app.db.queries.queries import queries\n'), ((8425, 8443), 'aiocache.serializers.PickleSerializer', 'PickleSerializer', ([], {}), '()\n', (8441, 8443), False, 'from aiocache.serializers import PickleSerializer\n'), ((8466, 8494), 'os.environ.get', 'os.environ.get', (['"""REDIS_HOST"""'], {}), "('REDIS_HOST')\n", (8480, 8494), False, 'import os\n'), ((8838, 8856), 'aiocache.serializers.PickleSerializer', 'PickleSerializer', ([], {}), '()\n', (8854, 8856), False, 'from aiocache.serializers import PickleSerializer\n'), ((8879, 8907), 'os.environ.get', 'os.environ.get', (['"""REDIS_HOST"""'], {}), "('REDIS_HOST')\n", (8893, 8907), False, 'import os\n'), ((9554, 9650), 'app.db.queries.queries.queries.add_article_to_favorites', 'queries.add_article_to_favorites', (['self.connection'], {'username': 'user.username', 'slug': 'article.slug'}), '(self.connection, username=user.username,\n slug=article.slug)\n', (9586, 9650), False, 'from app.db.queries.queries import queries\n'), ((9840, 9942), 'app.db.queries.queries.queries.remove_article_from_favorites', 'queries.remove_article_from_favorites', (['self.connection'], {'username': 'user.username', 'slug': 'article.slug'}), '(self.connection, username=user.\n username, slug=article.slug)\n', (9877, 9942), False, 'from app.db.queries.queries import queries\n'), ((10040, 10058), 'aiocache.serializers.PickleSerializer', 'PickleSerializer', ([], {}), '()\n', (10056, 10058), False, 'from aiocache.serializers import PickleSerializer\n'), ((10081, 10109), 'os.environ.get', 'os.environ.get', (['"""REDIS_HOST"""'], {}), "('REDIS_HOST')\n", (10095, 10109), False, 'import os\n'), ((11337, 11436), 'app.db.queries.queries.queries.add_tags_to_article', 'queries.add_tags_to_article', (['self.connection', "[{SLUG_ALIAS: slug, 'tag': tag} for tag in tags]"], {}), "(self.connection, [{SLUG_ALIAS: slug, 'tag': tag\n } for tag in tags])\n", (11364, 11436), False, 'from app.db.queries.queries import queries\n'), ((1430, 1570), 'app.db.queries.queries.queries.create_new_article', 'queries.create_new_article', (['self.connection'], {'slug': 'slug', 'title': 'title', 'description': 'description', 'body': 'body', 'author_username': 'author.username'}), '(self.connection, slug=slug, title=title,\n description=description, body=body, author_username=author.username)\n', (1456, 1570), False, 'from app.db.queries.queries import queries\n'), ((2731, 2985), 'app.db.queries.queries.queries.update_article', 'queries.update_article', (['self.connection'], {'slug': 'article.slug', 'author_username': 'article.author.username', 'new_slug': 'updated_article.slug', 'new_title': 'updated_article.title', 'new_body': 'updated_article.body', 'new_description': 'updated_article.description'}), '(self.connection, slug=article.slug, author_username=\n article.author.username, new_slug=updated_article.slug, new_title=\n updated_article.title, new_body=updated_article.body, new_description=\n updated_article.description)\n', (2753, 2985), False, 'from app.db.queries.queries import queries\n'), ((3264, 3368), 'app.db.queries.queries.queries.delete_article', 'queries.delete_article', (['self.connection'], {'slug': 'article.slug', 'author_username': 'article.author.username'}), '(self.connection, slug=article.slug, author_username=\n article.author.username)\n', (3286, 3368), False, 'from app.db.queries.queries import queries\n'), ((3851, 3872), 'pypika.Query.from_', 'Query.from_', (['articles'], {}), '(articles)\n', (3862, 3872), False, 'from pypika import Query\n'), ((9063, 9130), 'app.db.queries.queries.queries.get_favorites_count_for_article', 'queries.get_favorites_count_for_article', (['self.connection'], {'slug': 'slug'}), '(self.connection, slug=slug)\n', (9102, 9130), False, 'from app.db.queries.queries import queries\n'), ((9280, 9367), 'app.db.queries.queries.queries.is_article_in_favorites', 'queries.is_article_in_favorites', (['self.connection'], {'username': 'user.username', 'slug': 'slug'}), '(self.connection, username=user.username,\n slug=slug)\n', (9311, 9367), False, 'from app.db.queries.queries import queries\n'), ((6171, 6204), 'app.db.queries.tables.Parameter', 'Parameter', (['(query_params_count + 1)'], {}), '(query_params_count + 1)\n', (6180, 6204), False, 'from app.db.queries.tables import Parameter, articles, articles_to_tags, favorites, tags as tags_table, users\n'), ((4123, 4141), 'pypika.Query.from_', 'Query.from_', (['users'], {}), '(users)\n', (4134, 4141), False, 'from pypika import Query\n'), ((4690, 4713), 'pypika.Query.from_', 'Query.from_', (['tags_table'], {}), '(tags_table)\n', (4701, 4713), False, 'from pypika import Query\n'), ((4810, 4839), 'app.db.queries.tables.Parameter', 'Parameter', (['query_params_count'], {}), '(query_params_count)\n', (4819, 4839), False, 'from app.db.queries.tables import Parameter, articles, articles_to_tags, favorites, tags as tags_table, users\n'), ((5266, 5284), 'pypika.Query.from_', 'Query.from_', (['users'], {}), '(users)\n', (5277, 5284), False, 'from pypika import Query\n'), ((5381, 5410), 'app.db.queries.tables.Parameter', 'Parameter', (['query_params_count'], {}), '(query_params_count)\n', (5390, 5410), False, 'from app.db.queries.tables import Parameter, articles, articles_to_tags, favorites, tags as tags_table, users\n'), ((5855, 5873), 'pypika.Query.from_', 'Query.from_', (['users'], {}), '(users)\n', (5866, 5873), False, 'from pypika import Query\n'), ((5970, 5999), 'app.db.queries.tables.Parameter', 'Parameter', (['query_params_count'], {}), '(query_params_count)\n', (5979, 5999), False, 'from app.db.queries.tables import Parameter, articles, articles_to_tags, favorites, tags as tags_table, users\n')]
|
"""
Server for publication db
"""
import os
import logging
import binascii
from functools import wraps
from urllib.parse import urlparse
import base64
import csv
from io import StringIO
import itertools
from tornado.web import RequestHandler, HTTPError
from rest_tools.server import RestServer, from_environment, catch_error
import motor.motor_asyncio
import pymongo
from bson.objectid import ObjectId
from . import __version__ as version
from . import PUBLICATION_TYPES, PROJECTS, SITES
from .utils import create_indexes, date_format, add_pub, edit_pub, try_import_file
logger = logging.getLogger('server')
def basic_auth(method):
@wraps(method)
async def wrapper(self, *args, **kwargs):
if not self.current_user:
header = self.request.headers.get('Authorization')
if header is None or not header.lower().startswith('basic '):
self.set_header('WWW-Authenticate', 'Basic realm=IceCube')
self.set_status(401)
self.finish()
return
raise HTTPError(403, reason="authentication failed")
return await method(self, *args, **kwargs)
return wrapper
def get_domain(link):
"""Get domain name of a url"""
if (not link.startswith('http')) and not link.startswith('//'):
link = f'//{link}'
return urlparse(link).netloc
class BaseHandler(RequestHandler):
def initialize(self, db=None, basic_auth=None, debug=False, **kwargs):
super().initialize(**kwargs)
self.db = db
self.basic_auth = basic_auth if basic_auth else {}
self.debug = debug
def set_default_headers(self):
self._headers['Server'] = f'Pub DB {version}'
def get_template_namespace(self):
namespace = super().get_template_namespace()
namespace['domain'] = get_domain
namespace['date_format'] = date_format
namespace['experiment'] = 'IceCube'
namespace['title'] = ''
namespace['PUBLICATION_TYPES'] = PUBLICATION_TYPES
namespace['PROJECTS'] = PROJECTS
namespace['SITES'] = SITES
namespace['error'] = None
namespace['edit'] = False
return namespace
def get_current_user(self):
try:
type, data = self.request.headers['Authorization'].split(' ', 1)
if type.lower() != 'basic':
raise Exception('bad header type')
logger.debug(f'auth data: {data}')
auth_decoded = base64.b64decode(data).decode('ascii')
username, password = str(auth_decoded).split(':', 1)
if self.basic_auth.get(username, None) == password:
return username
except Exception:
if self.debug and 'Authorization' in self.request.headers:
logger.info('Authorization: %r', self.request.headers['Authorization'])
logger.info('failed auth', exc_info=True)
return None
def args_to_match_query(self):
match = {}
if projects := self.get_arguments('projects'):
match['projects'] = {"$all": projects}
if sites := self.get_arguments('sites'):
match['sites'] = {"$all": sites}
start = self.get_argument('start_date', '')
end = self.get_argument('end_date', '')
if start and end:
match['date'] = {"$gte": start, "$lte": end}
elif start:
match['date'] = {"$gte": start}
elif end:
match['date'] = {"$lte": end}
if types := self.get_arguments('type'):
match['type'] = {"$in": types}
if search := self.get_argument('search', ''):
match['$text'] = {"$search": search}
if authors := self.get_arguments('authors'):
match['authors'] = {"$all": authors}
return match, {
'projects': projects,
'sites': sites,
'start_date': start,
'end_date': end,
'type': types,
'search': search,
'authors': authors,
}
async def count_pubs(self):
match, _ = self.args_to_match_query()
return await self.db.publications.count_documents(match)
async def get_pubs(self, mongoid=False):
match, args = self.args_to_match_query()
kwargs = {}
if not mongoid:
kwargs['projection'] = {'_id': False}
if page := self.get_argument('page', None):
page = int(page)
if limit := self.get_argument('limit', None):
limit = int(limit)
pubs = []
i = -1
async for row in self.db.publications.find(match, **kwargs).sort('date', pymongo.DESCENDING):
i += 1
if mongoid:
row['_id'] = str(row['_id'])
if page is not None and limit and i < page*limit:
continue
if 'projects' in row:
row['projects'].sort()
if 'sites' in row:
row['sites'].sort()
pubs.append(row)
if page is not None and limit and len(pubs) >= limit:
break
args['publications'] = pubs
return args
async def get_authors(self):
aggregation = [
{"$unwind": "$authors"},
{"$group": {
"_id": 0,
"authornames": {"$addToSet": "$authors"}
}}
]
authors = []
async for row in self.db.publications.aggregate(aggregation):
authors = row["authornames"]
return authors
class Main(BaseHandler):
async def get(self):
hide_projects = self.get_argument('hide_projects', 'false').lower() == 'true'
pubs = await self.get_pubs()
self.render('main.html', **pubs, hide_projects=hide_projects)
class CSV(BaseHandler):
async def get(self):
pubs = await self.get_pubs()
f = StringIO()
writer = csv.DictWriter(f, fieldnames=list(pubs['publications'][0].keys()))
writer.writeheader()
for p in pubs['publications']:
data = {}
for k in p:
if isinstance(p[k], list):
data[k] = ','.join(p[k])
else:
data[k] = p[k]
writer.writerow(data)
self.write(f.getvalue())
self.set_header('Content-Type', 'text/csv; charset=utf-8')
class Manage(BaseHandler):
@catch_error
@basic_auth
async def get(self):
existing_authors = await self.get_authors()
pubs = await self.get_pubs(mongoid=True)
self.render('manage.html', message='', existing_authors=existing_authors, **pubs)
@catch_error
@basic_auth
async def post(self):
message = ''
try:
if action := self.get_argument('action', None):
if action == 'delete':
mongoid = ObjectId(self.get_argument('pub_id'))
await self.db.publications.delete_one({'_id': mongoid})
elif action == 'new':
doc = {
'title': self.get_argument('new_title').strip(),
'authors': [a.strip() for a in self.get_argument('new_authors').split('\n') if a.strip()],
'date': self.get_argument('new_date'),
'pub_type': self.get_argument('new_type'),
'citation': self.get_argument('new_citation').strip(),
'downloads': [d.strip() for d in self.get_argument('new_downloads').split('\n') if d.strip()],
'projects': self.get_arguments('new_projects'),
'sites': self.get_arguments('new_sites'),
}
await add_pub(db=self.db, **doc)
elif action == 'edit':
mongoid = ObjectId(self.get_argument('pub_id'))
doc = {
'title': self.get_argument('new_title').strip(),
'authors': [a.strip() for a in self.get_argument('new_authors').split('\n') if a.strip()],
'date': self.get_argument('new_date'),
'pub_type': self.get_argument('new_type'),
'citation': self.get_argument('new_citation').strip(),
'downloads': [d.strip() for d in self.get_argument('new_downloads').split('\n') if d.strip()],
'projects': self.get_arguments('new_projects'),
'sites': self.get_arguments('new_sites'),
}
await edit_pub(db=self.db, mongo_id=mongoid, **doc)
elif action == 'import':
if not self.request.files:
raise Exception('no files uploaded')
for files in itertools.chain(self.request.files.values()):
for f in files:
await try_import_file(self.db, f.body.decode('utf-8-sig'))
else:
raise Exception('bad action')
except Exception as e:
if self.debug:
logging.debug('manage error', exc_info=True)
message = f'Error: {e}'
existing_authors = await self.get_authors()
pubs = await self.get_pubs(mongoid=True)
self.render('manage.html', message=message, existing_authors=existing_authors, **pubs)
class APIBaseHandler(BaseHandler):
def write_error(self, status_code=500, **kwargs):
"""Write out custom error json."""
data = {
'code': status_code,
'error': self._reason,
}
self.write(data)
self.finish()
class APIPubs(APIBaseHandler):
@catch_error
async def get(self):
pubs = await self.get_pubs()
self.write(pubs)
class APIPubsCount(APIBaseHandler):
@catch_error
async def get(self):
pubs = await self.count_pubs()
self.write({"count": pubs})
class APIFilterDefaults(APIBaseHandler):
@catch_error
async def get(self):
self.write({
'projects': [],
'sites': [],
'start_date': '',
'end_date': '',
'type': [],
'search': '',
'authors': [],
'hide_projects': False,
})
class APITypes(APIBaseHandler):
@catch_error
async def get(self):
self.write(PUBLICATION_TYPES)
class APIProjects(APIBaseHandler):
@catch_error
async def get(self):
self.write(PROJECTS)
def create_server():
static_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'static')
template_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates')
default_config = {
'HOST': 'localhost',
'PORT': 8080,
'DEBUG': False,
'DB_URL': 'mongodb://localhost/pub_db',
'COOKIE_SECRET': binascii.hexlify(b'secret').decode('utf-8'),
'BASIC_AUTH': '', # user:pass,user:pass
}
config = from_environment(default_config)
logging.info(f'DB: {config["DB_URL"]}')
db_url, db_name = config['DB_URL'].rsplit('/', 1)
logging.info(f'DB name: {db_name}')
db = motor.motor_asyncio.AsyncIOMotorClient(db_url)
create_indexes(db_url, db_name)
users = {v.split(':')[0]: v.split(':')[1] for v in config['BASIC_AUTH'].split(',') if v}
logging.info(f'BASIC_AUTH users: {users.keys()}')
main_args = {
'debug': config['DEBUG'],
'db': db[db_name],
'basic_auth': users,
}
server = RestServer(static_path=static_path, template_path=template_path,
cookie_secret=config['COOKIE_SECRET'], xsrf_cookies=True,
debug=config['DEBUG'])
server.add_route(r'/', Main, main_args)
server.add_route(r'/csv', CSV, main_args)
server.add_route(r'/manage', Manage, main_args)
server.add_route(r'/api/publications', APIPubs, main_args)
server.add_route(r'/api/publications/count', APIPubsCount, main_args)
server.add_route(r'/api/filter_defaults', APIFilterDefaults, main_args)
server.add_route(r'/api/types', APITypes, main_args)
server.add_route(r'/api/projects', APIProjects, main_args)
server.startup(address=config['HOST'], port=config['PORT'])
return server
|
[
"io.StringIO",
"tornado.web.HTTPError",
"os.path.abspath",
"rest_tools.server.RestServer",
"logging.debug",
"binascii.hexlify",
"urllib.parse.urlparse",
"base64.b64decode",
"logging.info",
"functools.wraps",
"rest_tools.server.from_environment",
"logging.getLogger"
] |
[((584, 611), 'logging.getLogger', 'logging.getLogger', (['"""server"""'], {}), "('server')\n", (601, 611), False, 'import logging\n'), ((642, 655), 'functools.wraps', 'wraps', (['method'], {}), '(method)\n', (647, 655), False, 'from functools import wraps\n'), ((11068, 11100), 'rest_tools.server.from_environment', 'from_environment', (['default_config'], {}), '(default_config)\n', (11084, 11100), False, 'from rest_tools.server import RestServer, from_environment, catch_error\n'), ((11106, 11145), 'logging.info', 'logging.info', (['f"""DB: {config[\'DB_URL\']}"""'], {}), '(f"DB: {config[\'DB_URL\']}")\n', (11118, 11145), False, 'import logging\n'), ((11204, 11239), 'logging.info', 'logging.info', (['f"""DB name: {db_name}"""'], {}), "(f'DB name: {db_name}')\n", (11216, 11239), False, 'import logging\n'), ((11609, 11763), 'rest_tools.server.RestServer', 'RestServer', ([], {'static_path': 'static_path', 'template_path': 'template_path', 'cookie_secret': "config['COOKIE_SECRET']", 'xsrf_cookies': '(True)', 'debug': "config['DEBUG']"}), "(static_path=static_path, template_path=template_path,\n cookie_secret=config['COOKIE_SECRET'], xsrf_cookies=True, debug=config[\n 'DEBUG'])\n", (11619, 11763), False, 'from rest_tools.server import RestServer, from_environment, catch_error\n'), ((1337, 1351), 'urllib.parse.urlparse', 'urlparse', (['link'], {}), '(link)\n', (1345, 1351), False, 'from urllib.parse import urlparse\n'), ((5902, 5912), 'io.StringIO', 'StringIO', ([], {}), '()\n', (5910, 5912), False, 'from io import StringIO\n'), ((1056, 1102), 'tornado.web.HTTPError', 'HTTPError', (['(403)'], {'reason': '"""authentication failed"""'}), "(403, reason='authentication failed')\n", (1065, 1102), False, 'from tornado.web import RequestHandler, HTTPError\n'), ((10655, 10680), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (10670, 10680), False, 'import os\n'), ((10742, 10767), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (10757, 10767), False, 'import os\n'), ((10955, 10982), 'binascii.hexlify', 'binascii.hexlify', (["b'secret'"], {}), "(b'secret')\n", (10971, 10982), False, 'import binascii\n'), ((2476, 2498), 'base64.b64decode', 'base64.b64decode', (['data'], {}), '(data)\n', (2492, 2498), False, 'import base64\n'), ((9184, 9228), 'logging.debug', 'logging.debug', (['"""manage error"""'], {'exc_info': '(True)'}), "('manage error', exc_info=True)\n", (9197, 9228), False, 'import logging\n')]
|
import lyse
import runmanager.remote as rm
import numpy as np
import mloop_config
import sys
import logging
import os
from labscript_utils.setup_logging import LOG_PATH
try:
from labscript_utils import check_version
except ImportError:
raise ImportError('Require labscript_utils > 2.1.0')
check_version('lyse', '2.5.0', '4.0')
check_version('zprocess', '2.13.1', '4.0')
check_version('labscript_utils', '2.12.5', '4.0')
def configure_logging(config):
console_log_level = config['analysislib_console_log_level']
file_log_level = config['analysislib_file_log_level']
LOG_FILENAME = 'analysislib_mloop.log'
global logger
logger = logging.getLogger('analysislib_mloop')
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(filename)s:%(funcName)s:%(lineno)d:%(levelname)s: %(message)s'
)
# Set up handlers if not already present from previous runs.
if not logger.handlers:
# Set up console handler
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(console_log_level)
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
# Set up file handler
full_filename = os.path.join(LOG_PATH, LOG_FILENAME)
file_handler = logging.FileHandler(full_filename, mode='w')
file_handler.setLevel(file_log_level)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.debug('Logger configured.')
def check_runmanager(config):
logger.debug('Checking runmanager...')
msgs = []
logger.debug('Getting globals.')
rm_globals = rm.get_globals()
if not all([x in rm_globals for x in config['mloop_params']]):
msgs.append('Not all optimisation parameters present in runmanager.')
logger.debug('Getting run shots state.')
if not rm.get_run_shots():
msgs.append('Run shot(s) not selected in runmanager.')
logger.debug('Checking for errors in globals.')
if rm.error_in_globals():
msgs.append('Error in runmanager globals.')
logger.debug('Checking number of shots.')
n_shots = rm.n_shots()
if n_shots > 1 and not config['ignore_bad']:
msgs.append(
f'runmanager is set to compile {n_shots:d} shots per request, but your '
+ 'mloop_config has ignore_bad = False. You are advised to (i) remove '
+ 'iterable globals so as to compile one shot per cost or (ii) set '
+ 'ignore_bad = True in your mloop_config and only return one cost with '
+ 'bad = False per sequence.'
)
if msgs:
logger.warning('\n'.join(msgs))
return False
else:
logger.debug('Runmanager ok.')
return True
def verify_globals(config):
logger.debug('Verifying globals...')
# Get the current runmanager globals
logger.debug('Getting values of globals from runmanager.')
rm_globals = rm.get_globals()
current_values = [rm_globals[name] for name in config['mloop_params']]
# Retrieve the parameter values requested by M-LOOP on this iteration
logger.debug('Getting requested globals values from lyse.routine_storage.')
requested_values = lyse.routine_storage.params
requested_dict = dict(zip(config['mloop_params'], requested_values))
# Get the parameter values for the shot we just computed the cost for
logger.debug('Getting lyse dataframe.')
df = lyse.data()
shot_values = [df[name].iloc[-1] for name in config['mloop_params']]
# Verify integrity by cross-checking against what was requested
if not np.array_equal(current_values, requested_values):
message = (
'Cost requested for values different to those in runmanager.\n'
'Please add an executed shot to lyse with: {requested_dict}'
).format(requested_dict=requested_dict)
logger.error(message)
return False
if not np.array_equal(shot_values, requested_values):
message = (
'Cost requested for different values to those used to compute cost.\n'
'Please add an executed shot to lyse with: {requested_dict}'
).format(requested_dict=requested_dict)
logger.error(message)
return False
logger.debug('Globals verified.')
return True
def cost_analysis(cost_key=(None,), maximize=True, x=None):
"""Return a cost dictionary to M-LOOP with at least:
{'bad': True} or {'cost': float}.
- Look for the latest cost in the cost_key column of the lyse
- DataFrame and an uncertainty ('u_' prefix at the lowest level).
- Report bad shot to M-LOOP if cost is nan or inf.
- Negate value in DataFrame if maximize = True.
- Fallback to reporting a constant or fake cost (from x).
"""
logger.debug('Getting cost...')
cost_dict = {'bad': False}
# Retrieve current lyse DataFrame
logger.debug('Getting lyse dataframe.')
df = lyse.data()
# Use the most recent shot
ix = -1
# Retrieve cost from specified column
if len(df) and cost_key in df:
cost = (df[cost_key].astype(float).values)[ix]
if np.isnan(cost) or np.isinf(cost):
cost_dict['bad'] = True
logger.info('Got bad cost: {cost}'.format(cost=cost))
else:
cost_dict['cost'] = (1 - 2 * maximize) * cost
logger.info('Got cost: {cost}'.format(cost=cost_dict['cost']))
u_cost_key = cost_key[:-1] + ('u_' + cost_key[-1],)
if u_cost_key in df:
cost_dict['uncer'] = df[u_cost_key].iloc[ix]
logger.info('Got uncertainty: {uncer}'.format(uncer=cost_dict['uncer']))
# If it doesn't exist, generate a fake cost
elif x is not None:
from fake_result import fake_result
cost_dict['cost'] = (1 - 2 * maximize) * fake_result(x)
logger.info('Faked cost: {cost}'.format(cost=cost_dict['cost']))
# Or just use a constant cost (for debugging)
else:
cost_dict['cost'] = 1.2
logger.info('Faked constant cost: {cost}'.format(cost=cost_dict['cost']))
return cost_dict
if __name__ == '__main__':
config = mloop_config.get()
configure_logging(config)
if not hasattr(lyse.routine_storage, 'queue'):
logger.info('First execution of lyse routine...')
try:
from queue import Queue
except ImportError:
# PY2
from Queue import Queue
logger.debug('Creating queue.')
lyse.routine_storage.queue = Queue()
if (
hasattr(lyse.routine_storage, 'optimisation')
and lyse.routine_storage.optimisation.is_alive()
):
cost_dict = cost_analysis(
cost_key=config['cost_key'] if not config['mock'] else [],
maximize=config['maximize'],
x=lyse.routine_storage.params[0] if config['mock'] else None,
)
if not cost_dict['bad'] or not config['ignore_bad']:
if check_runmanager(config):
if verify_globals(config):
logger.debug('Putting cost in queue.')
lyse.routine_storage.queue.put(cost_dict)
else:
message = 'NOT putting cost in queue because verify_globals failed.'
logger.debug(message)
else:
message = 'NOT putting cost in queue because check_runmanager failed.'
logger.debug(message)
else:
message = (
'NOT putting cost in queue because cost was bad and ignore_bad is True.'
)
logger.debug(message)
elif check_runmanager(config):
logger.info('(Re)starting optimisation process...')
import threading
import mloop_interface
logger.debug('Starting interface thread...')
lyse.routine_storage.optimisation = threading.Thread(
target=mloop_interface.main
)
lyse.routine_storage.optimisation.daemon = True
lyse.routine_storage.optimisation.start()
logger.debug('Interface thread started.')
else:
print(
'\nNot (re)starting optimisation process.',
'Please address above warnings before trying again.',
)
|
[
"numpy.isnan",
"labscript_utils.check_version",
"logging.Formatter",
"lyse.data",
"mloop_config.get",
"os.path.join",
"logging.FileHandler",
"lyse.routine_storage.queue.put",
"Queue.Queue",
"runmanager.remote.get_globals",
"threading.Thread",
"lyse.routine_storage.optimisation.is_alive",
"logging.StreamHandler",
"fake_result.fake_result",
"numpy.isinf",
"runmanager.remote.n_shots",
"runmanager.remote.get_run_shots",
"runmanager.remote.error_in_globals",
"lyse.routine_storage.optimisation.start",
"numpy.array_equal",
"logging.getLogger"
] |
[((313, 350), 'labscript_utils.check_version', 'check_version', (['"""lyse"""', '"""2.5.0"""', '"""4.0"""'], {}), "('lyse', '2.5.0', '4.0')\n", (326, 350), False, 'from labscript_utils import check_version\n'), ((352, 394), 'labscript_utils.check_version', 'check_version', (['"""zprocess"""', '"""2.13.1"""', '"""4.0"""'], {}), "('zprocess', '2.13.1', '4.0')\n", (365, 394), False, 'from labscript_utils import check_version\n'), ((396, 445), 'labscript_utils.check_version', 'check_version', (['"""labscript_utils"""', '"""2.12.5"""', '"""4.0"""'], {}), "('labscript_utils', '2.12.5', '4.0')\n", (409, 445), False, 'from labscript_utils import check_version\n'), ((685, 723), 'logging.getLogger', 'logging.getLogger', (['"""analysislib_mloop"""'], {}), "('analysislib_mloop')\n", (702, 723), False, 'import logging\n'), ((777, 866), 'logging.Formatter', 'logging.Formatter', (['"""%(filename)s:%(funcName)s:%(lineno)d:%(levelname)s: %(message)s"""'], {}), "(\n '%(filename)s:%(funcName)s:%(lineno)d:%(levelname)s: %(message)s')\n", (794, 866), False, 'import logging\n'), ((1708, 1724), 'runmanager.remote.get_globals', 'rm.get_globals', ([], {}), '()\n', (1722, 1724), True, 'import runmanager.remote as rm\n'), ((2079, 2100), 'runmanager.remote.error_in_globals', 'rm.error_in_globals', ([], {}), '()\n', (2098, 2100), True, 'import runmanager.remote as rm\n'), ((2219, 2231), 'runmanager.remote.n_shots', 'rm.n_shots', ([], {}), '()\n', (2229, 2231), True, 'import runmanager.remote as rm\n'), ((3050, 3066), 'runmanager.remote.get_globals', 'rm.get_globals', ([], {}), '()\n', (3064, 3066), True, 'import runmanager.remote as rm\n'), ((3559, 3570), 'lyse.data', 'lyse.data', ([], {}), '()\n', (3568, 3570), False, 'import lyse\n'), ((5096, 5107), 'lyse.data', 'lyse.data', ([], {}), '()\n', (5105, 5107), False, 'import lyse\n'), ((6339, 6357), 'mloop_config.get', 'mloop_config.get', ([], {}), '()\n', (6355, 6357), False, 'import mloop_config\n'), ((1036, 1069), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (1057, 1069), False, 'import logging\n'), ((1274, 1310), 'os.path.join', 'os.path.join', (['LOG_PATH', 'LOG_FILENAME'], {}), '(LOG_PATH, LOG_FILENAME)\n', (1286, 1310), False, 'import os\n'), ((1335, 1379), 'logging.FileHandler', 'logging.FileHandler', (['full_filename'], {'mode': '"""w"""'}), "(full_filename, mode='w')\n", (1354, 1379), False, 'import logging\n'), ((1932, 1950), 'runmanager.remote.get_run_shots', 'rm.get_run_shots', ([], {}), '()\n', (1948, 1950), True, 'import runmanager.remote as rm\n'), ((3728, 3776), 'numpy.array_equal', 'np.array_equal', (['current_values', 'requested_values'], {}), '(current_values, requested_values)\n', (3742, 3776), True, 'import numpy as np\n'), ((4064, 4109), 'numpy.array_equal', 'np.array_equal', (['shot_values', 'requested_values'], {}), '(shot_values, requested_values)\n', (4078, 4109), True, 'import numpy as np\n'), ((6717, 6724), 'Queue.Queue', 'Queue', ([], {}), '()\n', (6722, 6724), False, 'from Queue import Queue\n'), ((6803, 6847), 'lyse.routine_storage.optimisation.is_alive', 'lyse.routine_storage.optimisation.is_alive', ([], {}), '()\n', (6845, 6847), False, 'import lyse\n'), ((5304, 5318), 'numpy.isnan', 'np.isnan', (['cost'], {}), '(cost)\n', (5312, 5318), True, 'import numpy as np\n'), ((5322, 5336), 'numpy.isinf', 'np.isinf', (['cost'], {}), '(cost)\n', (5330, 5336), True, 'import numpy as np\n'), ((8105, 8150), 'threading.Thread', 'threading.Thread', ([], {'target': 'mloop_interface.main'}), '(target=mloop_interface.main)\n', (8121, 8150), False, 'import threading\n'), ((8241, 8282), 'lyse.routine_storage.optimisation.start', 'lyse.routine_storage.optimisation.start', ([], {}), '()\n', (8280, 8282), False, 'import lyse\n'), ((6000, 6014), 'fake_result.fake_result', 'fake_result', (['x'], {}), '(x)\n', (6011, 6014), False, 'from fake_result import fake_result\n'), ((7323, 7364), 'lyse.routine_storage.queue.put', 'lyse.routine_storage.queue.put', (['cost_dict'], {}), '(cost_dict)\n', (7353, 7364), False, 'import lyse\n')]
|
# Copyright (c) 2021, Hitachi America Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import json
import os
from typing import List
import click
from pdf_struct import loader
from pdf_struct.core import transition_labels
from pdf_struct.core.document import Document
from pdf_struct.core.structure_evaluation import evaluate_structure, \
evaluate_labels
from pdf_struct.core.transition_labels import ListAction
from pdf_struct.features.listing import SectionNumber, SectionNumberJa
section_number_cls_dict = {
'SectionNumber': SectionNumber,
'SectionNumberJa': SectionNumberJa
}
def predict_transitions_numbering(section_number_cls, document: Document) -> Document:
numbered_list = []
anchors: List[int] = []
labels = []
pointers = []
for i in range(document.n_blocks):
candidates = section_number_cls.extract_section_number(document.texts[i])
if len(candidates) == 0:
labels.append(ListAction.CONTINUOUS)
pointers.append(None)
continue
for j in range(len(numbered_list) - 1, -1, -1):
for section_number in candidates:
if section_number.is_next_of(numbered_list[j]):
if j == len(numbered_list) - 1:
labels.append(ListAction.SAME_LEVEL)
pointers.append(None)
else:
labels.append(ListAction.UP)
pointers.append(anchors[j])
numbered_list = numbered_list[:j]
numbered_list.append(section_number)
anchors = anchors[:j]
anchors.append(i)
break
else:
continue
break
else:
# No valid continuation found... check if it is a new level
for section_number in candidates:
if isinstance(section_number.number, str) or section_number.number <= 1:
numbered_list.append(section_number)
anchors.append(i)
labels.append(ListAction.DOWN)
pointers.append(None)
break
else:
# section number does not match anything, but it is still probably a new paragraph
labels.append(ListAction.SAME_LEVEL)
pointers.append(None)
# append final label --- which would always be ignored
labels.append(ListAction.UP)
pointers.append(-1)
labels = labels[1:]
pointers = pointers[1:]
assert len(labels) == len(pointers) == len(document.labels)
document = copy.deepcopy(document)
document.pointers = pointers
document.labels = labels
return document
@click.command()
@click.option('--metrics', type=click.Path(exists=False), default=None,
help='Dump metrics as a JSON file.')
@click.argument('file-type', type=click.Choice(('txt', 'pdf')))
@click.argument('section-number', type=click.Choice(tuple(section_number_cls_dict.keys())))
@click.argument('raw-dir', type=click.Path(exists=True))
@click.argument('anno-dir', type=click.Path(exists=True))
def main(metrics, file_type: str, section_number: str, raw_dir: str, anno_dir: str):
print(f'Loading annotations from {anno_dir}')
annos = transition_labels.load_annos(anno_dir)
print('Loading and extracting features from raw files')
if file_type == 'pdf':
documents = loader.pdf.load_from_directory(raw_dir, annos)
else:
documents = loader.text.load_from_directory(raw_dir, annos)
section_number_cls = section_number_cls_dict[section_number]
documents_pred = [predict_transitions_numbering(section_number_cls, document)
for document in documents]
if metrics is None:
print(json.dumps(evaluate_structure(documents, documents_pred), indent=2))
print(json.dumps(evaluate_labels(documents, documents_pred), indent=2))
else:
_metrics = {
'structure': evaluate_structure(documents, documents_pred),
'labels': evaluate_labels(documents, documents_pred)
}
with open(metrics, 'w') as fout:
json.dump(_metrics, fout, indent=2)
if __name__ == '__main__':
main()
|
[
"pdf_struct.core.transition_labels.load_annos",
"copy.deepcopy",
"json.dump",
"pdf_struct.core.structure_evaluation.evaluate_labels",
"pdf_struct.loader.pdf.load_from_directory",
"click.command",
"click.Choice",
"pdf_struct.loader.text.load_from_directory",
"click.Path",
"pdf_struct.core.structure_evaluation.evaluate_structure"
] |
[((3302, 3317), 'click.command', 'click.command', ([], {}), '()\n', (3315, 3317), False, 'import click\n'), ((3191, 3214), 'copy.deepcopy', 'copy.deepcopy', (['document'], {}), '(document)\n', (3204, 3214), False, 'import copy\n'), ((3859, 3897), 'pdf_struct.core.transition_labels.load_annos', 'transition_labels.load_annos', (['anno_dir'], {}), '(anno_dir)\n', (3887, 3897), False, 'from pdf_struct.core import transition_labels\n'), ((4006, 4052), 'pdf_struct.loader.pdf.load_from_directory', 'loader.pdf.load_from_directory', (['raw_dir', 'annos'], {}), '(raw_dir, annos)\n', (4036, 4052), False, 'from pdf_struct import loader\n'), ((4083, 4130), 'pdf_struct.loader.text.load_from_directory', 'loader.text.load_from_directory', (['raw_dir', 'annos'], {}), '(raw_dir, annos)\n', (4114, 4130), False, 'from pdf_struct import loader\n'), ((3350, 3374), 'click.Path', 'click.Path', ([], {'exists': '(False)'}), '(exists=False)\n', (3360, 3374), False, 'import click\n'), ((3475, 3503), 'click.Choice', 'click.Choice', (["('txt', 'pdf')"], {}), "(('txt', 'pdf'))\n", (3487, 3503), False, 'import click\n'), ((3629, 3652), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (3639, 3652), False, 'import click\n'), ((3687, 3710), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (3697, 3710), False, 'import click\n'), ((4572, 4617), 'pdf_struct.core.structure_evaluation.evaluate_structure', 'evaluate_structure', (['documents', 'documents_pred'], {}), '(documents, documents_pred)\n', (4590, 4617), False, 'from pdf_struct.core.structure_evaluation import evaluate_structure, evaluate_labels\n'), ((4641, 4683), 'pdf_struct.core.structure_evaluation.evaluate_labels', 'evaluate_labels', (['documents', 'documents_pred'], {}), '(documents, documents_pred)\n', (4656, 4683), False, 'from pdf_struct.core.structure_evaluation import evaluate_structure, evaluate_labels\n'), ((4747, 4782), 'json.dump', 'json.dump', (['_metrics', 'fout'], {'indent': '(2)'}), '(_metrics, fout, indent=2)\n', (4756, 4782), False, 'import json\n'), ((4378, 4423), 'pdf_struct.core.structure_evaluation.evaluate_structure', 'evaluate_structure', (['documents', 'documents_pred'], {}), '(documents, documents_pred)\n', (4396, 4423), False, 'from pdf_struct.core.structure_evaluation import evaluate_structure, evaluate_labels\n'), ((4461, 4503), 'pdf_struct.core.structure_evaluation.evaluate_labels', 'evaluate_labels', (['documents', 'documents_pred'], {}), '(documents, documents_pred)\n', (4476, 4503), False, 'from pdf_struct.core.structure_evaluation import evaluate_structure, evaluate_labels\n')]
|
import argparse
import json
import logging
import os
import pprint
import re
import sys
import click._compat
import pkg_resources
from prettytable import PrettyTable
import beowulf as bwf
from beowulfbase.account import PrivateKey
from beowulfbase.storage import configStorage
from .account import Account
from .amount import Amount
from .block import Block
from .blockchain import Blockchain
from .instance import shared_beowulfd_instance
from .supernode import Supernode
availableConfigurationKeys = [
"default_account",
"default_vote_weight",
"nodes",
]
def legacyentry():
"""
Piston like cli application.
This will be re-written as a @click app in the future.
"""
global args
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="Command line tool to interact with the Beowulf network")
"""
Default settings for all tools
"""
parser.add_argument(
'--node',
type=str,
default=configStorage["node"],
help='URL for public Beowulf API (default: "https://bw.beowulfchain.com")'
)
parser.add_argument(
'--no-broadcast',
'-d',
action='store_true',
help='Do not broadcast anything')
parser.add_argument(
'--no-wallet',
'-p',
action='store_true',
help='Do not load the wallet')
parser.add_argument(
'--unsigned',
'-x',
action='store_true',
help='Do not try to sign the transaction')
parser.add_argument(
'--expires',
'-e',
default=60,
help='Expiration time in seconds (defaults to 60)')
parser.add_argument(
'--verbose', '-v', type=int, default=3, help='Verbosity')
parser.add_argument(
'--version',
action='version',
version='%(prog)s {version}'.format(
version=pkg_resources.require("beowulf")[0].version))
subparsers = parser.add_subparsers(help='sub-command help')
"""
Command "set"
"""
setconfig = subparsers.add_parser('set', help='Set configuration')
setconfig.add_argument(
'key',
type=str,
choices=availableConfigurationKeys,
help='Configuration key')
setconfig.add_argument('value', type=str, help='Configuration value')
setconfig.set_defaults(command="set")
"""
Command "config"
"""
configconfig = subparsers.add_parser(
'config', help='Show local configuration')
configconfig.set_defaults(command="config")
"""
Command "info"
"""
parser_info = subparsers.add_parser(
'info', help='Show basic BWF blockchain info')
parser_info.set_defaults(command="info")
parser_info.add_argument(
'objects',
nargs='*',
type=str,
help='General information about the blockchain, a block, an account'
' name, a post, a public key, ...')
"""
Command "changewalletpassphrase"
"""
changepasswordconfig = subparsers.add_parser(
'changewalletpassphrase', help='Change wallet password')
changepasswordconfig.set_defaults(command="changewalletpassphrase")
"""
Command "addkey"
"""
addkey = subparsers.add_parser(
'addkey', help='Add a new key to the wallet')
addkey.add_argument(
'--unsafe-import-key',
nargs='*',
type=str,
help='private key to import into the wallet (unsafe, unless you ' +
'delete your shell history)')
addkey.set_defaults(command="addkey")
parsewif = subparsers.add_parser(
'parsewif', help='Parse a WIF private key without importing')
parsewif.add_argument(
'--unsafe-import-key',
nargs='*',
type=str,
help='WIF key to parse (unsafe, delete your bash history)')
parsewif.set_defaults(command='parsewif')
"""
Command "delkey"
"""
delkey = subparsers.add_parser(
'delkey', help='Delete keys from the wallet')
delkey.add_argument(
'pub',
nargs='*',
type=str,
help='the public key to delete from the wallet')
delkey.set_defaults(command="delkey")
"""
Command "getkey"
"""
getkey = subparsers.add_parser(
'getkey', help='Dump the privatekey of a pubkey from the wallet')
getkey.add_argument(
'pub',
type=str,
help='the public key for which to show the private key')
getkey.set_defaults(command="getkey")
"""
Command "listkeys"
"""
listkeys = subparsers.add_parser(
'listkeys', help='List available keys in your wallet')
listkeys.set_defaults(command="listkeys")
"""
Command "listaccounts"
"""
listaccounts = subparsers.add_parser(
'listaccounts', help='List available accounts in your wallet')
listaccounts.set_defaults(command="listaccounts")
"""
Command "transfer"
"""
parser_transfer = subparsers.add_parser('transfer', help='Transfer BWF')
parser_transfer.set_defaults(command="transfer")
parser_transfer.add_argument('to', type=str, help='Recipient')
parser_transfer.add_argument(
'amount', type=float, help='Amount to transfer')
parser_transfer.add_argument(
'asset',
type=str,
choices=["BWF", "W"],
help='Asset to transfer (i.e. BWF or W)')
parser_transfer.add_argument(
'fee', type=float, help='Fee to transfer')
parser_transfer.add_argument(
'asset_fee',
type=str,
choices=["W"],
help='Asset fee to transfer (W)')
parser_transfer.add_argument(
'memo', type=str, nargs="?", default="", help='Optional memo')
parser_transfer.add_argument(
'--account',
type=str,
required=False,
default=configStorage["default_account"],
help='Transfer from this account')
"""
Command "convert"
"""
parser_convert = subparsers.add_parser(
'convert',
help='Convert BWFDollars to Beowulf (takes a week to settle)')
parser_convert.set_defaults(command="convert")
parser_convert.add_argument(
'amount', type=float, help='Amount of W to convert')
parser_convert.add_argument(
'--account',
type=str,
required=False,
default=configStorage["default_account"],
help='Convert from this account')
"""
Command "balance"
"""
parser_balance = subparsers.add_parser(
'balance', help='Show the balance of one more more accounts')
parser_balance.set_defaults(command="balance")
parser_balance.add_argument(
'account',
type=str,
nargs="*",
default=configStorage["default_account"],
help='balance of these account (multiple accounts allowed)')
"""
Command "newaccount"
"""
parser_newaccount = subparsers.add_parser(
'newaccount', help='Create a new account')
parser_newaccount.set_defaults(command="newaccount")
parser_newaccount.add_argument(
'accountname', type=str, help='New account name')
parser_newaccount.add_argument(
'--account',
type=str,
required=False,
default=configStorage["default_account"],
help='Account that pays the fee')
"""
Command "importaccount"
"""
parser_importaccount = subparsers.add_parser(
'importaccount', help='Import an account using a passphrase')
parser_importaccount.set_defaults(command="importaccount")
parser_importaccount.add_argument('account', type=str, help='Account name')
parser_importaccount.add_argument(
'--roles',
type=str,
nargs="*",
default=["owner"], # no owner
help='Import specified keys (owner, active, posting, memo)')
"""
Command "approvesupernode"
"""
parser_approvesupernode = subparsers.add_parser(
'approvesupernode', help='Approve a supernodees')
parser_approvesupernode.set_defaults(command="approvesupernode")
parser_approvesupernode.add_argument(
'supernode', type=str, help='Supernode to approve')
parser_approvesupernode.add_argument(
'--account',
type=str,
required=False,
default=configStorage["default_account"],
help='Your account')
"""
Command "disapprovesupernode"
"""
parser_disapprovesupernode = subparsers.add_parser(
'disapprovesupernode', help='Disapprove a supernodees')
parser_disapprovesupernode.set_defaults(command="disapprovesupernode")
parser_disapprovesupernode.add_argument(
'supernode', type=str, help='Supernode to disapprove')
parser_disapprovesupernode.add_argument(
'--account',
type=str,
required=False,
default=configStorage["default_account"],
help='Your account')
"""
Command "sign"
"""
parser_sign = subparsers.add_parser(
'sign',
help='Sign a provided transaction with available and required keys')
parser_sign.set_defaults(command="sign")
parser_sign.add_argument(
'--file',
type=str,
required=False,
help='Load transaction from file. If "-", read from ' +
'stdin (defaults to "-")')
"""
Command "broadcast"
"""
parser_broadcast = subparsers.add_parser(
'broadcast', help='broadcast a signed transaction')
parser_broadcast.set_defaults(command="broadcast")
parser_broadcast.add_argument(
'--file',
type=str,
required=False,
help='Load transaction from file. If "-", read from ' +
'stdin (defaults to "-")')
"""
Command "supernodeupdate"
"""
parser_supernodeprops = subparsers.add_parser(
'supernodeupdate', help='Change supernode properties')
parser_supernodeprops.set_defaults(command="supernodeupdate")
parser_supernodeprops.add_argument(
'--supernode',
type=str,
default=configStorage["default_account"],
help='Supernode name')
parser_supernodeprops.add_argument(
'--maximum_block_size',
type=float,
required=False,
help='Max block size')
parser_supernodeprops.add_argument(
'--account_creation_fee',
type=float,
required=False,
help='Account creation fee')
parser_supernodeprops.add_argument(
'--signing_key', type=str, required=False, help='Signing Key')
"""
Command "supernodecreate"
"""
parser_supernodecreate = subparsers.add_parser(
'supernodecreate', help='Create a supernode')
parser_supernodecreate.set_defaults(command="supernodecreate")
parser_supernodecreate.add_argument('supernode', type=str, help='Supernode name')
parser_supernodecreate.add_argument(
'signing_key', type=str, help='Signing Key')
parser_supernodecreate.add_argument(
'--maximum_block_size',
type=float,
default="65536",
help='Max block size')
parser_supernodecreate.add_argument(
'--account_creation_fee',
type=float,
default=30,
help='Account creation fee')
"""
Parse Arguments
"""
args = parser.parse_args()
# Logging
log = logging.getLogger(__name__)
verbosity = ["critical", "error", "warn", "info", "debug"][int(
min(args.verbose, 4))]
log.setLevel(getattr(logging, verbosity.upper()))
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch = logging.StreamHandler()
ch.setLevel(getattr(logging, verbosity.upper()))
ch.setFormatter(formatter)
log.addHandler(ch)
# GrapheneAPI logging
if args.verbose > 4:
verbosity = ["critical", "error", "warn", "info", "debug"][int(
min((args.verbose - 4), 4))]
gphlog = logging.getLogger("graphenebase")
gphlog.setLevel(getattr(logging, verbosity.upper()))
gphlog.addHandler(ch)
if args.verbose > 8:
verbosity = ["critical", "error", "warn", "info", "debug"][int(
min((args.verbose - 8), 4))]
gphlog = logging.getLogger("grapheneapi")
gphlog.setLevel(getattr(logging, verbosity.upper()))
gphlog.addHandler(ch)
if not hasattr(args, "command"):
parser.print_help()
sys.exit(2)
# initialize BWF instance
options = {
"node": args.node,
"unsigned": args.unsigned,
"expires": args.expires
}
if args.command == "sign":
options.update({"offline": True})
if args.no_wallet:
options.update({"wif": []})
beowulf = bwf.Beowulf(no_broadcast=args.no_broadcast, **options)
if args.command == "set":
# TODO: Evaluate this line with cli refactor.
if (args.key in ["default_account"] and args.value[0] == "@"):
args.value = args.value[1:]
configStorage[args.key] = args.value
elif args.command == "config":
t = PrettyTable(["Key", "Value"])
t.align = "l"
for key in configStorage:
# hide internal config data
if key in availableConfigurationKeys:
t.add_row([key, configStorage[key]])
print(t)
elif args.command == "info":
if not args.objects:
t = PrettyTable(["Key", "Value"])
t.align = "l"
blockchain = Blockchain(mode="head")
info = blockchain.info()
for key in info:
t.add_row([key, info[key]])
print(t.get_string(sortby="Key"))
for obj in args.objects:
# Block
if re.match("^[0-9]*$", obj):
block = Block(obj)
if block:
t = PrettyTable(["Key", "Value"])
t.align = "l"
for key in sorted(block):
value = block[key]
if key == "transactions":
value = json.dumps(value, indent=4)
t.add_row([key, value])
print(t)
else:
print("Block number %s unknown" % obj)
# Account name
elif re.match("^[a-zA-Z0-9\-\._]{2,16}$", obj):
from math import log10
account = Account(obj)
t = PrettyTable(["Key", "Value"])
t.align = "l"
for key in sorted(account):
value = account[key]
if key == "json_metadata":
value = json.dumps(json.loads(value or "{}"), indent=4)
if key in ["supernode_votes", "owner"]:
value = json.dumps(value, indent=4)
if key == "reputation" and int(value) > 0:
value = int(value)
rep = (max(log10(value) - 9, 0) * 9 + 25 if value > 0
else max(log10(-value) - 9, 0) * -9 + 25)
value = "{:.2f} ({:d})".format(rep, value)
t.add_row([key, value])
print(t)
# supernode available?
try:
supernode = Supernode(obj)
t = PrettyTable(["Key", "Value"])
t.align = "l"
for key in sorted(supernode):
value = supernode[key]
if key in ["props", "wd_exchange_rate"]:
value = json.dumps(value, indent=4)
t.add_row([key, value])
print(t)
except: # noqa FIXME
pass
# Public Key
elif re.match("^BEO.{48,55}$", obj):
account = beowulf.commit.wallet.getAccountFromPublicKey(obj)
if account:
t = PrettyTable(["Account"])
t.align = "l"
t.add_row([account])
print(t)
else:
print("Public Key not known" % obj)
else:
print("Couldn't identify object to read")
elif args.command == "changewalletpassphrase":
beowulf.commit.wallet.changeUserPassphrase()
elif args.command == "addkey":
if args.unsafe_import_key:
for key in args.unsafe_import_key:
try:
beowulf.commit.wallet.add_private_key(key)
except Exception as e:
print(str(e))
else:
import getpass
while True:
wifkey = getpass.getpass('Private Key (wif) [Enter to quit]:')
if not wifkey:
break
try:
beowulf.commit.wallet.add_private_key(wifkey)
except Exception as e:
print(str(e))
continue
installed_keys = beowulf.commit.wallet.getPublicKeys()
if len(installed_keys) == 1:
name = beowulf.commit.wallet.getAccountFromPublicKey(
installed_keys[0])
print("=" * 30)
print("Would you like to make %s a default user?" % name)
print()
print("You can set it with with:")
print(" beowulfpy set default_account <account>")
print("=" * 30)
elif args.command == "delkey":
if confirm("Are you sure you want to delete keys from your wallet?\n"
"This step is IRREVERSIBLE! If you don't have a backup, "
"You may lose access to your account!"):
for pub in args.pub:
beowulf.commit.wallet.removePrivateKeyFromPublicKey(pub)
elif args.command == "parsewif":
if args.unsafe_import_key:
for key in args.unsafe_import_key:
try:
print(PrivateKey(key).pubkey)
except Exception as e:
print(str(e))
else:
import getpass
while True:
wifkey = getpass.getpass('Private Key (wif) [Enter to quit:')
if not wifkey:
break
try:
print(PrivateKey(wifkey).pubkey)
except Exception as e:
print(str(e))
continue
elif args.command == "getkey":
print(beowulf.commit.wallet.getPrivateKeyForPublicKey(args.pub))
elif args.command == "listkeys":
t = PrettyTable(["Available Key"])
t.align = "l"
for key in beowulf.commit.wallet.getPublicKeys():
t.add_row([key])
print(t)
elif args.command == "listaccounts":
t = PrettyTable(["Name", "Type", "Available Key"])
t.align = "l"
for account in beowulf.commit.wallet.getAccounts():
t.add_row([
account["name"] or "n/a", account["type"] or "n/a",
account["pubkey"]
])
print(t)
elif args.command == "transfer":
print_json(
beowulf.commit.transfer(
args.to,
args.amount,
args.asset,
args.fee,
args.asset_fee,
memo=args.memo,
account=args.account))
elif args.command == "convert":
print_json(beowulf.commit.convert(
args.amount,
account=args.account,
))
elif args.command == "balance":
if args.account and isinstance(args.account, list):
for account in args.account:
a = Account(account)
print("\n%s" % a.name)
t = PrettyTable(["Account", "BWF", "W", "M"])
t.align = "r"
t.add_row([
'Available',
a.balances['available']['BWF'],
a.balances['available']['W'],
a.balances['available']['M'],
])
t.add_row([
'Rewards',
a.balances['rewards']['BWF'],
a.balances['rewards']['W'],
a.balances['rewards']['M'],
])
t.add_row([
'TOTAL',
a.balances['total']['BWF'],
a.balances['total']['W'],
a.balances['total']['M'],
])
print(t)
else:
print("Please specify an account: beowulfpy balance <account>")
elif args.command == "permissions":
account = Account(args.account)
print_permissions(account)
elif args.command == "newaccount":
import getpass
while True:
pw = getpass.getpass("New Account Passphrase: ")
if not pw:
print("You cannot chosen an empty password!")
continue
else:
pwck = getpass.getpass("Confirm New Account Passphrase: ")
if pw == pwck:
break
else:
print("Given Passphrases do not match!")
print_json(
beowulf.commit.create_account(
args.accountname,
creator=args.account,
password=pw,
))
elif args.command == "importaccount":
from beowulfbase.account import PasswordKey
import getpass
password = getpass.getpass("Account Passphrase: ")
account = Account(args.account)
imported = False
if "owner" in args.roles:
owner_key = PasswordKey(args.account, password, role="owner")
owner_pubkey = format(owner_key.get_public_key(), "BEO")
if owner_pubkey in [x[0] for x in account["owner"]["key_auths"]]:
print("Importing owner key!")
owner_privkey = owner_key.get_private_key()
beowulf.commit.wallet.add_private_key(owner_privkey)
imported = True
if not imported:
print("No matching key(s) found. Password correct?")
elif args.command == "sign":
if args.file and args.file != "-":
if not os.path.isfile(args.file):
raise Exception("File %s does not exist!" % args.file)
with open(args.file) as fp:
tx = fp.read()
else:
tx = sys.stdin.read()
tx = eval(tx)
print_json(beowulf.commit.sign(tx))
elif args.command == "broadcast":
if args.file and args.file != "-":
if not os.path.isfile(args.file):
raise Exception("File %s does not exist!" % args.file)
with open(args.file) as fp:
tx = fp.read()
else:
tx = sys.stdin.read()
tx = eval(tx)
beowulf.commit.broadcast(tx)
elif args.command == "approvesupernode":
print_json(
beowulf.commit.approve_supernode(args.supernode, account=args.account))
elif args.command == "disapprovesupernode":
print_json(
beowulf.commit.disapprove_supernode(
args.supernode, account=args.account))
elif args.command == "supernodeupdate":
supernode = Supernode(args.supernode)
props = supernode["props"]
if args.account_creation_fee:
props["account_creation_fee"] = str(
Amount("%f BWF" % args.account_creation_fee))
if args.maximum_block_size:
props["maximum_block_size"] = args.maximum_block_size
print_json(
beowulf.commit.supernode_update(
args.signing_key or supernode["signing_key"],
props,
account=args.supernode))
elif args.command == "supernodecreate":
props = {
"account_creation_fee":
str(Amount("%f BWF" % args.account_creation_fee)),
"maximum_block_size":
args.maximum_block_size
}
print_json(
beowulf.commit.supernode_update(
args.signing_key, props, account=args.supernode))
else:
print("No valid command given")
def confirm(question, default="yes"):
""" Confirmation dialog that requires *manual* input.
:param str question: Question to ask the user
:param str default: default answer
:return: Choice of the user
:rtype: bool
"""
valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
# Python 2.7 `input` attempts to evaluate the input, while in 3+
# it returns a string. Python 2.7 `raw_input` returns a str as desired.
if sys.version >= '3.0':
choice = input().lower()
else:
choice = click._compat.raw_input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
def get_terminal(text="Password", confirm=False, allowedempty=False):
import getpass
while True:
pw = getpass.getpass(text)
if not pw and not allowedempty:
print("Cannot be empty!")
continue
else:
if not confirm:
break
pwck = getpass.getpass("Confirm " + text)
if pw == pwck:
break
else:
print("Not matching!")
return pw
def format_operation_details(op, memos=False):
if op[0] == "transfer":
str_ = "%s -> %s %s" % (
op[1]["from"],
op[1]["to"],
op[1]["amount"],
op[1]["fee"],
)
if memos:
memo = op[1]["memo"]
if len(memo) > 0 and memo[0] == "#":
beowulf = shared_beowulfd_instance()
# memo = beowulf.decode_memo(memo, op[1]["from"])
memo = beowulf.decode_memo(memo, op)
str_ += " (%s)" % memo
return str_
else:
return json.dumps(op[1], indent=4)
def print_permissions(account):
t = PrettyTable(["Permission", "Threshold", "Key/Account"], hrules=0)
t.align = "r"
for permission in ["owner"]:
auths = []
for type_ in ["account_auths", "key_auths"]:
for authority in account[permission][type_]:
auths.append("%s (%d)" % (authority[0], authority[1]))
t.add_row([
permission,
account[permission]["weight_threshold"],
"\n".join(auths),
])
print(t)
def print_json(tx):
if sys.stdout.isatty():
print(json.dumps(tx, indent=4))
else:
# You're being piped or redirected
print(tx)
# this is another console script entrypoint
# also this function sucks and should be taken out back and shot
def beowulftailentry():
parser = argparse.ArgumentParser(
description="UNIX tail(1)-like tool for the beowulf blockchain")
parser.add_argument(
'-f',
'--follow',
help='Constantly stream output to stdout',
action='store_true')
parser.add_argument(
'-n', '--lines', type=int, default=10, help='How many ops to show')
parser.add_argument(
'-j',
'--json',
help='Output as JSON instead of human-readable pretty-printed format',
action='store_true')
args = parser.parse_args(sys.argv[1:])
op_count = 0
if args.json:
if not args.follow:
sys.stdout.write('[')
for op in Blockchain().reliable_stream():
if args.json:
sys.stdout.write('%s' % json.dumps(op))
if args.follow:
sys.stdout.write("\n") # for human eyeballs
sys.stdout.flush() # flush after each op if live mode
else:
pprint.pprint(op)
op_count += 1
if not args.follow:
if op_count > args.lines:
if args.json:
sys.stdout.write(']')
return
else:
if args.json:
sys.stdout.write(',')
|
[
"sys.stdout.write",
"argparse.ArgumentParser",
"pkg_resources.require",
"json.dumps",
"logging.Formatter",
"os.path.isfile",
"sys.stdout.flush",
"prettytable.PrettyTable",
"pprint.pprint",
"beowulf.Beowulf",
"json.loads",
"math.log10",
"beowulfbase.account.PasswordKey",
"sys.stdin.read",
"logging.StreamHandler",
"re.match",
"sys.stdout.isatty",
"sys.exit",
"getpass.getpass",
"beowulfbase.account.PrivateKey",
"logging.getLogger"
] |
[((729, 886), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.RawDescriptionHelpFormatter', 'description': '"""Command line tool to interact with the Beowulf network"""'}), "(formatter_class=argparse.\n RawDescriptionHelpFormatter, description=\n 'Command line tool to interact with the Beowulf network')\n", (752, 886), False, 'import argparse\n'), ((11342, 11369), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (11359, 11369), False, 'import logging\n'), ((11539, 11612), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (11556, 11612), False, 'import logging\n'), ((11631, 11654), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (11652, 11654), False, 'import logging\n'), ((12728, 12782), 'beowulf.Beowulf', 'bwf.Beowulf', ([], {'no_broadcast': 'args.no_broadcast'}), '(no_broadcast=args.no_broadcast, **options)\n', (12739, 12782), True, 'import beowulf as bwf\n'), ((26785, 26850), 'prettytable.PrettyTable', 'PrettyTable', (["['Permission', 'Threshold', 'Key/Account']"], {'hrules': '(0)'}), "(['Permission', 'Threshold', 'Key/Account'], hrules=0)\n", (26796, 26850), False, 'from prettytable import PrettyTable\n'), ((27282, 27301), 'sys.stdout.isatty', 'sys.stdout.isatty', ([], {}), '()\n', (27299, 27301), False, 'import sys\n'), ((27562, 27655), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""UNIX tail(1)-like tool for the beowulf blockchain"""'}), "(description=\n 'UNIX tail(1)-like tool for the beowulf blockchain')\n", (27585, 27655), False, 'import argparse\n'), ((11944, 11977), 'logging.getLogger', 'logging.getLogger', (['"""graphenebase"""'], {}), "('graphenebase')\n", (11961, 11977), False, 'import logging\n'), ((12224, 12256), 'logging.getLogger', 'logging.getLogger', (['"""grapheneapi"""'], {}), "('grapheneapi')\n", (12241, 12256), False, 'import logging\n'), ((12422, 12433), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (12430, 12433), False, 'import sys\n'), ((25049, 25084), 'sys.stdout.write', 'sys.stdout.write', (['(question + prompt)'], {}), '(question + prompt)\n', (25065, 25084), False, 'import sys\n'), ((25775, 25796), 'getpass.getpass', 'getpass.getpass', (['text'], {}), '(text)\n', (25790, 25796), False, 'import getpass\n'), ((26715, 26742), 'json.dumps', 'json.dumps', (['op[1]'], {'indent': '(4)'}), '(op[1], indent=4)\n', (26725, 26742), False, 'import json\n'), ((13072, 13101), 'prettytable.PrettyTable', 'PrettyTable', (["['Key', 'Value']"], {}), "(['Key', 'Value'])\n", (13083, 13101), False, 'from prettytable import PrettyTable\n'), ((25979, 26013), 'getpass.getpass', 'getpass.getpass', (["('Confirm ' + text)"], {}), "('Confirm ' + text)\n", (25994, 26013), False, 'import getpass\n'), ((27317, 27341), 'json.dumps', 'json.dumps', (['tx'], {'indent': '(4)'}), '(tx, indent=4)\n', (27327, 27341), False, 'import json\n'), ((28184, 28205), 'sys.stdout.write', 'sys.stdout.write', (['"""["""'], {}), "('[')\n", (28200, 28205), False, 'import sys\n'), ((28512, 28529), 'pprint.pprint', 'pprint.pprint', (['op'], {}), '(op)\n', (28525, 28529), False, 'import pprint\n'), ((25550, 25622), 'sys.stdout.write', 'sys.stdout.write', (['"""Please respond with \'yes\' or \'no\' (or \'y\' or \'n\').\n"""'], {}), '("Please respond with \'yes\' or \'no\' (or \'y\' or \'n\').\\n")\n', (25566, 25622), False, 'import sys\n'), ((28370, 28392), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (28386, 28392), False, 'import sys\n'), ((28431, 28449), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (28447, 28449), False, 'import sys\n'), ((13397, 13426), 'prettytable.PrettyTable', 'PrettyTable', (["['Key', 'Value']"], {}), "(['Key', 'Value'])\n", (13408, 13426), False, 'from prettytable import PrettyTable\n'), ((13727, 13752), 're.match', 're.match', (['"""^[0-9]*$"""', 'obj'], {}), "('^[0-9]*$', obj)\n", (13735, 13752), False, 'import re\n'), ((28310, 28324), 'json.dumps', 'json.dumps', (['op'], {}), '(op)\n', (28320, 28324), False, 'import json\n'), ((28668, 28689), 'sys.stdout.write', 'sys.stdout.write', (['"""]"""'], {}), "(']')\n", (28684, 28689), False, 'import sys\n'), ((28781, 28802), 'sys.stdout.write', 'sys.stdout.write', (['""","""'], {}), "(',')\n", (28797, 28802), False, 'import sys\n'), ((14308, 14351), 're.match', 're.match', (['"""^[a-zA-Z0-9\\\\-\\\\._]{2,16}$"""', 'obj'], {}), "('^[a-zA-Z0-9\\\\-\\\\._]{2,16}$', obj)\n", (14316, 14351), False, 'import re\n'), ((1914, 1946), 'pkg_resources.require', 'pkg_resources.require', (['"""beowulf"""'], {}), "('beowulf')\n", (1935, 1946), False, 'import pkg_resources\n'), ((13839, 13868), 'prettytable.PrettyTable', 'PrettyTable', (["['Key', 'Value']"], {}), "(['Key', 'Value'])\n", (13850, 13868), False, 'from prettytable import PrettyTable\n'), ((14449, 14478), 'prettytable.PrettyTable', 'PrettyTable', (["['Key', 'Value']"], {}), "(['Key', 'Value'])\n", (14460, 14478), False, 'from prettytable import PrettyTable\n'), ((15838, 15868), 're.match', 're.match', (['"""^BEO.{48,55}$"""', 'obj'], {}), "('^BEO.{48,55}$', obj)\n", (15846, 15868), False, 'import re\n'), ((15366, 15395), 'prettytable.PrettyTable', 'PrettyTable', (["['Key', 'Value']"], {}), "(['Key', 'Value'])\n", (15377, 15395), False, 'from prettytable import PrettyTable\n'), ((16752, 16805), 'getpass.getpass', 'getpass.getpass', (['"""Private Key (wif) [Enter to quit]:"""'], {}), "('Private Key (wif) [Enter to quit]:')\n", (16767, 16805), False, 'import getpass\n'), ((14078, 14105), 'json.dumps', 'json.dumps', (['value'], {'indent': '(4)'}), '(value, indent=4)\n', (14088, 14105), False, 'import json\n'), ((14813, 14840), 'json.dumps', 'json.dumps', (['value'], {'indent': '(4)'}), '(value, indent=4)\n', (14823, 14840), False, 'import json\n'), ((15999, 16023), 'prettytable.PrettyTable', 'PrettyTable', (["['Account']"], {}), "(['Account'])\n", (16010, 16023), False, 'from prettytable import PrettyTable\n'), ((14684, 14709), 'json.loads', 'json.loads', (["(value or '{}')"], {}), "(value or '{}')\n", (14694, 14709), False, 'import json\n'), ((15628, 15655), 'json.dumps', 'json.dumps', (['value'], {'indent': '(4)'}), '(value, indent=4)\n', (15638, 15655), False, 'import json\n'), ((18303, 18355), 'getpass.getpass', 'getpass.getpass', (['"""Private Key (wif) [Enter to quit:"""'], {}), "('Private Key (wif) [Enter to quit:')\n", (18318, 18355), False, 'import getpass\n'), ((18747, 18777), 'prettytable.PrettyTable', 'PrettyTable', (["['Available Key']"], {}), "(['Available Key'])\n", (18758, 18777), False, 'from prettytable import PrettyTable\n'), ((18958, 19004), 'prettytable.PrettyTable', 'PrettyTable', (["['Name', 'Type', 'Available Key']"], {}), "(['Name', 'Type', 'Available Key'])\n", (18969, 19004), False, 'from prettytable import PrettyTable\n'), ((18116, 18131), 'beowulfbase.account.PrivateKey', 'PrivateKey', (['key'], {}), '(key)\n', (18126, 18131), False, 'from beowulfbase.account import PrivateKey\n'), ((18460, 18478), 'beowulfbase.account.PrivateKey', 'PrivateKey', (['wifkey'], {}), '(wifkey)\n', (18470, 18478), False, 'from beowulfbase.account import PrivateKey\n'), ((14982, 14994), 'math.log10', 'log10', (['value'], {}), '(value)\n', (14987, 14994), False, 'from math import log10\n'), ((15065, 15078), 'math.log10', 'log10', (['(-value)'], {}), '(-value)\n', (15070, 15078), False, 'from math import log10\n'), ((19936, 19977), 'prettytable.PrettyTable', 'PrettyTable', (["['Account', 'BWF', 'W', 'M']"], {}), "(['Account', 'BWF', 'W', 'M'])\n", (19947, 19977), False, 'from prettytable import PrettyTable\n'), ((21011, 21054), 'getpass.getpass', 'getpass.getpass', (['"""New Account Passphrase: """'], {}), "('New Account Passphrase: ')\n", (21026, 21054), False, 'import getpass\n'), ((21714, 21753), 'getpass.getpass', 'getpass.getpass', (['"""Account Passphrase: """'], {}), "('Account Passphrase: ')\n", (21729, 21753), False, 'import getpass\n'), ((21206, 21257), 'getpass.getpass', 'getpass.getpass', (['"""Confirm New Account Passphrase: """'], {}), "('Confirm New Account Passphrase: ')\n", (21221, 21257), False, 'import getpass\n'), ((21878, 21927), 'beowulfbase.account.PasswordKey', 'PasswordKey', (['args.account', 'password'], {'role': '"""owner"""'}), "(args.account, password, role='owner')\n", (21889, 21927), False, 'from beowulfbase.account import PasswordKey\n'), ((22669, 22685), 'sys.stdin.read', 'sys.stdin.read', ([], {}), '()\n', (22683, 22685), False, 'import sys\n'), ((22469, 22494), 'os.path.isfile', 'os.path.isfile', (['args.file'], {}), '(args.file)\n', (22483, 22494), False, 'import os\n'), ((23053, 23069), 'sys.stdin.read', 'sys.stdin.read', ([], {}), '()\n', (23067, 23069), False, 'import sys\n'), ((22853, 22878), 'os.path.isfile', 'os.path.isfile', (['args.file'], {}), '(args.file)\n', (22867, 22878), False, 'import os\n')]
|
from pythonrouge import pythonrouge
ROUGE = './RELEASE-1.5.5/ROUGE-1.5.5.pl'
data_path = './RELEASE-1.5.5/data'
peer = "Tokyo is the one of the biggest city in the world."
model = "The capital of Japan, Tokyo, is the center of Japanese economy."
score = pythonrouge.pythonrouge(peer, model, ROUGE, data_path)
print(score)
peer = "Tokyo is the one of the biggest city in the world."
model = "Tokyo is one of the biggest city in the world."
score = pythonrouge.pythonrouge(peer, model, ROUGE, data_path)
print(score)
peer = 'President Bush''s nomination of black conservative <NAME> to ' \
'replace the Supreme Court''s first black Justice, liberal Thurgood' \
'Marshall, split the Senate down the middle. Thomas''s opposition to' \
'affirmative action alienated civil rights activists while his Catholic' \
'upbringing and interest in the priesthood raised alarm in' \
'abortion-rights groups. The Judiciary Committee deadlocked 7-7 and' \
'the nomination was referred to the Senate without recommendation after' \
'extended televised hearings on charges of sexual harassment against' \
'the nominee. Thomas was confirmed by a close 52-48 vote but he ' \
'commented that nothing could give him back his good name.'
model = "<NAME> was confirmed as Supreme Court Justice in October 1991" \
"by a razor- thin margin of 52-48. Thomas, who has opposed affirmative" \
"action has not taken public stands on other key issues. His reputation" \
"was damaged by accusations of sexual harassment. As the youngest" \
"justice he is expected to be on the court for decades."
score = pythonrouge.pythonrouge(peer, model, ROUGE, data_path)
print(score)
|
[
"pythonrouge.pythonrouge.pythonrouge"
] |
[((256, 310), 'pythonrouge.pythonrouge.pythonrouge', 'pythonrouge.pythonrouge', (['peer', 'model', 'ROUGE', 'data_path'], {}), '(peer, model, ROUGE, data_path)\n', (279, 310), False, 'from pythonrouge import pythonrouge\n'), ((450, 504), 'pythonrouge.pythonrouge.pythonrouge', 'pythonrouge.pythonrouge', (['peer', 'model', 'ROUGE', 'data_path'], {}), '(peer, model, ROUGE, data_path)\n', (473, 504), False, 'from pythonrouge import pythonrouge\n'), ((1673, 1727), 'pythonrouge.pythonrouge.pythonrouge', 'pythonrouge.pythonrouge', (['peer', 'model', 'ROUGE', 'data_path'], {}), '(peer, model, ROUGE, data_path)\n', (1696, 1727), False, 'from pythonrouge import pythonrouge\n')]
|
#!/usr/bin/env python3
#
# Copyright 2013-2014 University of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .printer import *
import webauthn2
__all__ = [
'printer'
]
## setup web service configuration data
global_env = webauthn2.merge_config(
jsonFileName='ermrest_config.json',
built_ins={
"default_limit": 100,
"db": "microscopy",
"dbn": "postgres",
"dbmaxconnections": 8
}
)
## setup webauthn2 handler
webauthn2_config = global_env.get('webauthn2', dict(web_cookie_name='ermrest'))
webauthn2_config.update(dict(web_cookie_path='/ermrest'))
webauthn2_manager = webauthn2.Manager(overrides=webauthn2_config)
## setup web urls
def web_urls():
"""Builds and returns the web_urls for web.py.
"""
def printerClass(superClass, printers):
class C (superClass):
def __init__(self):
self.printers = printers
superClass.__init__(self)
return C
urls = (
# print job and print control
'/printer/([^/]+)/job', printerClass(printer.PrintJob, global_env.get('printers')),
'/printer/([^/]+)/job/([^/]+)/', printerClass(printer.PrintJob, global_env.get('printers')),
'/printer/([^/]+)/control/([^/]+)/', printerClass(printer.PrintControl, global_env.get('printers'))
)
return tuple(urls)
|
[
"webauthn2.merge_config",
"webauthn2.Manager"
] |
[((759, 922), 'webauthn2.merge_config', 'webauthn2.merge_config', ([], {'jsonFileName': '"""ermrest_config.json"""', 'built_ins': "{'default_limit': 100, 'db': 'microscopy', 'dbn': 'postgres',\n 'dbmaxconnections': 8}"}), "(jsonFileName='ermrest_config.json', built_ins={\n 'default_limit': 100, 'db': 'microscopy', 'dbn': 'postgres',\n 'dbmaxconnections': 8})\n", (781, 922), False, 'import webauthn2\n'), ((1156, 1201), 'webauthn2.Manager', 'webauthn2.Manager', ([], {'overrides': 'webauthn2_config'}), '(overrides=webauthn2_config)\n', (1173, 1201), False, 'import webauthn2\n')]
|
import os
from mock import MagicMock
from bxcommon.services.extension_transaction_service import ExtensionTransactionService
from bxcommon.services.transaction_service import TransactionService
from bxcommon.test_utils import helpers
from bxcommon.utils import convert
from bxcommon.utils.object_hash import Sha256Hash, SHA256_HASH_LEN
from bxgateway.messages.eth.protocol.new_block_eth_protocol_message import NewBlockEthProtocolMessage
from bxgateway.services.eth.eth_extension_block_cleanup_service import EthExtensionBlockCleanupService
from bxgateway.testing.abstract_block_cleanup_service_test import AbstractBlockCleanupServiceTest
from bxgateway.services.eth.abstract_eth_block_cleanup_service import AbstractEthBlockCleanupService
from bxgateway.services.eth.eth_block_queuing_service import EthBlockQueuingService
class EthExtensionBlockCleanupServiceTest(AbstractBlockCleanupServiceTest):
def setUp(self) -> None:
super().setUp()
node_conn = MagicMock()
self.node.block_queuing_service = EthBlockQueuingService(self.node, node_conn)
self.node.connection_pool.add(1, "127.0.0.0", 8002, node_conn)
def _get_sample_block(self, file_path):
root_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(file_path))))
with open(os.path.join(root_dir, "samples/eth_sample_block.txt")) as sample_file:
btc_block = sample_file.read().strip("\n")
buf = bytearray(convert.hex_to_bytes(btc_block))
parsed_block = NewBlockEthProtocolMessage(msg_bytes=buf)
return parsed_block
def _test_mark_blocks_and_request_cleanup(self):
marked_block = Sha256Hash(binary=helpers.generate_bytearray(SHA256_HASH_LEN))
prev_block = Sha256Hash(binary=helpers.generate_bytearray(SHA256_HASH_LEN))
tracked_blocks = []
self.cleanup_service.on_new_block_received(marked_block, prev_block)
self.transaction_service.track_seen_short_ids(marked_block, [])
for _ in range(self.block_confirmations_count - 1):
tracked_block = Sha256Hash(binary=helpers.generate_bytearray(SHA256_HASH_LEN))
self.transaction_service.track_seen_short_ids(tracked_block, [])
tracked_blocks.append(tracked_block)
unmarked_block = Sha256Hash(binary=helpers.generate_bytearray(SHA256_HASH_LEN))
self.assertIsNone(self.cleanup_service.last_confirmed_block)
self.cleanup_service.mark_blocks_and_request_cleanup([marked_block, *tracked_blocks])
self.assertEqual(marked_block, self.cleanup_service.last_confirmed_block)
self.assertTrue(self.cleanup_service.is_marked_for_cleanup(marked_block))
self.assertFalse(self.cleanup_service.is_marked_for_cleanup(unmarked_block))
self.assertEqual(marked_block, self.cleanup_service.last_confirmed_block)
def _test_block_cleanup(self):
block_msg = self._get_sample_block(self._get_file_path())
transactions = list(block_msg.txns())
block_hash = block_msg.block_hash()
transaction_hashes = []
for idx, tx in enumerate(transactions):
tx_hash = tx.hash()
tx_content = str(tx).encode()
self.transaction_service.set_transaction_contents(tx_hash, tx_content)
self.transaction_service.assign_short_id(tx_hash, idx + 1)
transaction_hashes.append(tx_hash)
self.cleanup_service._block_hash_marked_for_cleanup.add(block_hash)
self.cleanup_service.clean_block_transactions(block_msg, self.transaction_service)
self.assertEqual(0, self.transaction_service._total_tx_contents_size)
for tx_hash in transaction_hashes:
self.assertFalse(self.transaction_service.has_transaction_contents(tx_hash))
self.node.post_block_cleanup_tasks.assert_called_once_with(
block_hash,
[],
transaction_hashes
)
def test_mark_blocks_and_request_cleanup(self):
self._test_mark_blocks_and_request_cleanup()
def test_block_cleanup(self):
self._test_block_cleanup()
def test_block_confirmation_cleanup(self):
self._test_block_confirmation_cleanup()
def _get_transaction_service(self) -> TransactionService:
return ExtensionTransactionService(self.node, 1)
def _get_cleanup_service(self) -> AbstractEthBlockCleanupService:
return EthExtensionBlockCleanupService(self.node, 1)
def _get_file_path(self) -> str:
return __file__
|
[
"os.path.abspath",
"bxcommon.services.extension_transaction_service.ExtensionTransactionService",
"bxcommon.utils.convert.hex_to_bytes",
"bxgateway.messages.eth.protocol.new_block_eth_protocol_message.NewBlockEthProtocolMessage",
"bxgateway.services.eth.eth_extension_block_cleanup_service.EthExtensionBlockCleanupService",
"bxcommon.test_utils.helpers.generate_bytearray",
"mock.MagicMock",
"os.path.join",
"bxgateway.services.eth.eth_block_queuing_service.EthBlockQueuingService"
] |
[((977, 988), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (986, 988), False, 'from mock import MagicMock\n'), ((1031, 1075), 'bxgateway.services.eth.eth_block_queuing_service.EthBlockQueuingService', 'EthBlockQueuingService', (['self.node', 'node_conn'], {}), '(self.node, node_conn)\n', (1053, 1075), False, 'from bxgateway.services.eth.eth_block_queuing_service import EthBlockQueuingService\n'), ((1514, 1555), 'bxgateway.messages.eth.protocol.new_block_eth_protocol_message.NewBlockEthProtocolMessage', 'NewBlockEthProtocolMessage', ([], {'msg_bytes': 'buf'}), '(msg_bytes=buf)\n', (1540, 1555), False, 'from bxgateway.messages.eth.protocol.new_block_eth_protocol_message import NewBlockEthProtocolMessage\n'), ((4272, 4313), 'bxcommon.services.extension_transaction_service.ExtensionTransactionService', 'ExtensionTransactionService', (['self.node', '(1)'], {}), '(self.node, 1)\n', (4299, 4313), False, 'from bxcommon.services.extension_transaction_service import ExtensionTransactionService\n'), ((4400, 4445), 'bxgateway.services.eth.eth_extension_block_cleanup_service.EthExtensionBlockCleanupService', 'EthExtensionBlockCleanupService', (['self.node', '(1)'], {}), '(self.node, 1)\n', (4431, 4445), False, 'from bxgateway.services.eth.eth_extension_block_cleanup_service import EthExtensionBlockCleanupService\n'), ((1458, 1489), 'bxcommon.utils.convert.hex_to_bytes', 'convert.hex_to_bytes', (['btc_block'], {}), '(btc_block)\n', (1478, 1489), False, 'from bxcommon.utils import convert\n'), ((1307, 1361), 'os.path.join', 'os.path.join', (['root_dir', '"""samples/eth_sample_block.txt"""'], {}), "(root_dir, 'samples/eth_sample_block.txt')\n", (1319, 1361), False, 'import os\n'), ((1679, 1722), 'bxcommon.test_utils.helpers.generate_bytearray', 'helpers.generate_bytearray', (['SHA256_HASH_LEN'], {}), '(SHA256_HASH_LEN)\n', (1705, 1722), False, 'from bxcommon.test_utils import helpers\n'), ((1763, 1806), 'bxcommon.test_utils.helpers.generate_bytearray', 'helpers.generate_bytearray', (['SHA256_HASH_LEN'], {}), '(SHA256_HASH_LEN)\n', (1789, 1806), False, 'from bxcommon.test_utils import helpers\n'), ((2305, 2348), 'bxcommon.test_utils.helpers.generate_bytearray', 'helpers.generate_bytearray', (['SHA256_HASH_LEN'], {}), '(SHA256_HASH_LEN)\n', (2331, 2348), False, 'from bxcommon.test_utils import helpers\n'), ((1259, 1285), 'os.path.abspath', 'os.path.abspath', (['file_path'], {}), '(file_path)\n', (1274, 1285), False, 'import os\n'), ((2091, 2134), 'bxcommon.test_utils.helpers.generate_bytearray', 'helpers.generate_bytearray', (['SHA256_HASH_LEN'], {}), '(SHA256_HASH_LEN)\n', (2117, 2134), False, 'from bxcommon.test_utils import helpers\n')]
|
# Copyright 2021 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import logging
from typing import Any
from typing import Dict
from typing import Optional
from repokid import CONFIG
from repokid.types import RepokidConfig
logger = logging.getLogger("repokid")
class RepokidPlugin:
def __init__(self, config: Optional[RepokidConfig] = None):
if config:
self.config = config
else:
self.config = CONFIG
class M_A(type):
pass
class Singleton(M_A):
_instances: Dict[str, Singleton] = {}
def __call__(cls, *args: Any, **kwargs: Any) -> Singleton:
if cls.__name__ not in cls._instances:
cls._instances[cls.__name__] = super(Singleton, cls).__call__(
*args, **kwargs
)
return cls._instances[cls.__name__]
|
[
"logging.getLogger"
] |
[((791, 819), 'logging.getLogger', 'logging.getLogger', (['"""repokid"""'], {}), "('repokid')\n", (808, 819), False, 'import logging\n')]
|
from if97 import region1, region2, region3, region4
###########################################################
##### Pressure-Temperature Formulation #####
###########################################################
def idRegion(P, T):
"""Identification of region from IF97 specification
using pressure and temperature as primary varibles"""
# Constant boundaries
Pbnd0 = region1.Pbnd0
Pbnd1 = region1.Pbnd1
Tbnd01 = region1.Tbnd01
Tbnd25 = region2.Tbnd25
Tbnd13 = region1.Tbnd13
# non-constant boundaries
Pbnd32 = region3.bnd23P(min(max(T, Tbnd13), 863.15))
Pbnd4 = satP(min(max(T, Tbnd01), Tbnd13))
region = 0
if (P >= Pbnd0) and (T >= Tbnd01) and (P <= Pbnd1) and (T <= Tbnd25):
if (T <= Tbnd13) and (P >= Pbnd4):
region = 1
elif (T < Tbnd13) or (P <= Pbnd32):
region = 2
else:
# region 3 via P,T relations not implemented
region = 0
assert (region is not 0), "Water properties not avalable!"
return region
#### water properties ####
def g(P, T, region = 0):
"""Specific gibbs free energy [kJ / kg K]"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.g(P, T)
elif region is 2:
return region2.g(P, T)
else:
return 0.000
def v(P, T, region = 0):
"""Specific volume [m^3 / kg]"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.v(P, T)
elif region is 2:
return region2.v(P, T)
else:
return 0.000
def u(P, T, region = 0):
"""Specific internal energy [kJ / kg]"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.u(P, T)
elif region is 2:
return region2.u(P, T)
else:
return 0.000
def s(P, T, region = 0):
"""Specific entropy [kJ / kg K]"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.s(P, T)
elif region is 2:
return region2.s(P, T)
else:
return 0.000
def h(P, T, region = 0):
"""Specific enthalpy [kJ / kg]"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.h(P, T)
elif region is 2:
return region2.h(P, T)
else:
return 0.000
def cp(P, T, region = 0):
""" Specific isobaric heat capacity [kJ / kg K]"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.cp(P, T)
elif region is 2:
return region2.cp(P, T)
else:
return 0.000
def cv(P, T, region = 0):
""" Specific isochoric heat capacity [kJ / kg K]"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.cv(P, T)
elif region is 2:
return region2.cv(P, T)
else:
return 0.000
def w(P, T, region = 0):
""" Speed of sound [m / s]"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.w(P, T)
elif region is 2:
return region2.w(P, T)
else:
return 0.000
def a(P, T, region = 0):
"""Isobaric cubic expansion coefficient [1 / K]"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.a(P, T)
elif region is 2:
return region2.a(P, T)
else:
return 0.000
def k(P, T, region = 0):
"""Isothermal compressibility [kg / kJ]"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.k(P, T)
elif region is 2:
return region2.k(P, T)
else:
return 0.000
#### water property derivatives ####
def dgdP(P, T, region = 0):
""" Derivative of specific gibbs free energy [kJ m^3 / kg kJ]
w.r.t pressure at constant temperature"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.dgdP(P, T)
elif region is 2:
return region2.dgdP(P, T)
else:
return 0.000
def dvdP(P, T, region = 0):
""" Derivative of specific volume [m^3 m^3 / kg kJ]
w.r.t pressure at constant temperature"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.dvdP(P, T)
elif region is 2:
return region2.dvdP(P, T)
else:
return 0.000
def dudP(P, T, region = 0):
""" Derivative of specific internal energy [kJ m^3 / kg kJ]
w.r.t pressure at constant temperature"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.dudP(P, T)
elif region is 2:
return region2.dudP(P, T)
else:
return 0.000
def dsdP(P, T, region = 0):
""" Derivative of specific entropy [kJ m^3 / kg K kJ]
w.r.t pressure at constant temperature"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.dsdP(P, T)
elif region is 2:
return region2.dsdP(P, T)
else:
return 0.000
def dhdP(P, T, region = 0):
""" Derivative of specific enthalpy [kJ m^3 / kg kJ]
w.r.t pressure at constant temperature"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.dhdP(P, T)
elif region is 2:
return region2.dhdP(P, T)
else:
return 0.000
def dgdT(P, T, region = 0):
""" Derivative of specific gibbs free energy [kJ / kg K]
w.r.t temperature at constant pressure"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.dgdT(P, T)
elif region is 2:
return region2.dgdT(P, T)
else:
return 0.000
def dvdT(P, T, region = 0):
""" Derivative of specific volume [m^3 / kg K]
w.r.t temperature at constant pressure"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.dvdT(P, T)
elif region is 2:
return region2.dvdT(P, T)
else:
return 0.000
def dudT(P, T, region = 0):
""" Derivative of specific internal energy [kJ / kg K]
w.r.t temperature at constant pressure"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.dudT(P, T)
elif region is 2:
return region2.dudT(P, T)
else:
return 0.000
def dsdT(P, T, region = 0):
""" Derivative of specific entropy [kJ / kg K K]
w.r.t temperature at constant pressure"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.dsdT(P, T)
elif region is 2:
return region2.dsdT(P, T)
else:
return 0.000
def dhdT(P, T, region = 0):
""" Derivative of specific enthalpy [kJ / kg K]
w.r.t temperature at constant pressure"""
if region is 0:
region = idRegion(P, T)
if region is 1:
return region1.dhdT(P, T)
elif region is 2:
return region2.dhdT(P, T)
else:
return 0.000
###########################################################
##### Pressure-Enthalpy Formulation #####
###########################################################
def idRegion_h(P, h):
"""Identification of region from IF97 specification
using pressure and enthalpy as primary variables"""
# supporting boundaries
Tbnd01 = region1.Tbnd01
Pbnd4 = satP(Tbnd01)
Tbnd25 = region2.Tbnd25
Tbnd13 = region1.Tbnd13
Tbnd32 = region3.bnd23T(min(max(P, 16.5292), 100.0))
Tbnd4 = satT(P)
# Enthalpy- pressure boundaries
Pbnd0 = region1.Pbnd0
Pbnd1 = region1.Pbnd1
hbnd01 = region1.h(Pbnd4, Tbnd01)
hbnd25 = region2.h(Pbnd0, Tbnd25)
Pbndh1 = satP(Tbnd13)
hbnd13 = region1.h(P, Tbnd13)
hbnd32 = region2.h(P, Tbnd32)
hbnd14 = region1.h(P, Tbnd4)
hbnd42 = region2.h(P, Tbnd4)
region = 0
if (P >= Pbnd0) and (h >= hbnd01) and (P <= Pbnd1) and (h <= hbnd25):
if (P >= Pbndh1):
if (h <= hbnd13):
region = 1
elif (h >= hbnd32):
region = 2
else:
# region 3 via P,h relations not implemented
region = 0
else:
if (h <= hbnd14):
region = 1
elif (h >= hbnd42):
region = 2
else:
region = 4
assert (region is not 0), "Water properties not avalable!"
return region
#### water properties ####
def g_h(P, h, region = 0):
"""Specific gibbs free energy [kJ / kg]"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.g_h(P, h)
elif region is 2:
return region2.g_h(P, h)
elif region is 4:
return region4.g_h(P, h)
else:
return 0.000
def v_h(P, h, region = 0):
"""Specific volume [m^3 / kg]"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.v_h(P, h)
elif region is 2:
return region2.v_h(P, h)
elif region is 4:
return region4.v_h(P, h)
else:
return 0.000
def u_h(P, h, region = 0):
"""Specific internal energy [kJ / kg]"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.u_h(P, h)
elif region is 2:
return region2.u_h(P, h)
elif region is 4:
return region4.u_h(P, h)
else:
return 0.000
def s_h(P, h, region = 0):
"""Specific entropy [kJ / kg K]"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.s_h(P, h)
elif region is 2:
return region2.s_h(P, h)
elif region is 4:
return region4.s_h(P, h)
else:
return 0.000
def T_h(P, h, region = 0):
""" Temperature [K]"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.T_h(P, h)
elif region is 2:
return region2.T_h(P, h)
elif region is 4:
return region4.satT(P)
else:
return 0.000
def cp_h(P, h, region = 0):
""" Specific isobaric heat capacity [kJ / kg K]"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.cp_h(P, h)
elif region is 2:
return region2.cp_h(P, h)
elif region is 4:
return region4.cp_h(P, h)
else:
return 0.000
def cv_h(P, h, region = 0):
""" Specific isochoric heat capacity [kJ / kg K]"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.cv_h(P, h)
elif region is 2:
return region2.cv_h(P, h)
elif region is 4:
return region4.cv_h(P, h)
else:
return 0.000
def w_h(P, h, region = 0):
""" Speed of sound [m / s]"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.w_h(P, h)
elif region is 2:
return region2.w_h(P, h)
elif region is 4:
return region4.w_h(P, h)
else:
return 0.000
def a_h(P, h, region = 0):
"""Isobaric cubic expansion coefficient [1 / K]"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.a_h(P, h)
elif region is 2:
return region2.a_h(P, h)
elif region is 4:
return region4.a_h(P, h)
else:
return 0.000
def k_h(P, h, region = 0):
"""Isothermal compressibility [kg / kJ]"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.k_h(P, h)
elif region is 2:
return region2.k_h(P, h)
elif region is 4:
return region4.k_h(P, h)
else:
return 0.000
#### water property derivatives ####
def dgdP_h(P, h, region = 0):
""" Derivative of specific gibbs free energy [kJ m^3 / kg kJ]
w.r.t pressure at constant specific enthalpy"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.dgdP_h(P, h)
elif region is 2:
return region2.dgdP_h(P, h)
elif region is 4:
return region4.dgdP_h(P, h)
else:
return 0.000
def dvdP_h(P, h, region = 0):
""" Derivative of specific volume [m^3 m^3 / kg kJ]
w.r.t pressure at constant specific enthalpy"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.dvdp_h(P, h)
elif region is 2:
return region2.dvdP_h(P, h)
elif region is 4:
return region4.dvdP_h(P, h)
else:
return 0.000
def dudP_h(P, h, region = 0):
""" Derivative of specific internal energy [kJ m^3 / kg kJ]
w.r.t pressure at constant specific enthalpy"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.dudP_h(P, h)
elif region is 2:
return region2.dudP_h(P, h)
elif region is 4:
return region4.dudP_h(P, h)
else:
return 0.000
def dsdP_h(P, h, region = 0):
""" Derivative of specific entropy [kJ m^3 / kg K kJ]
w.r.t pressure at constant specific enthalpy"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.dsdP_h(P, h)
elif region is 2:
return region2.dsdP_h(P, h)
elif region is 4:
return region4.dsdP_h(P, h)
else:
return 0.000
def dhdP_h(P, h, region = 0):
""" Derivative of specific enthalpy [kJ m^3 / kg kJ]
w.r.t pressure at constant specific enthalpy"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return 0.000
elif region is 2:
return 0.000
elif region is 4:
return region4.dhdP_h(P, h)
else:
return 0.000
def dTdP_h(P, h, region = 0):
""" Derivative of Temperature [K m^3 / kJ]
w.r.t pressure at constant specific enthalpy"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.dTdP_h(P, h)
elif region is 2:
return region2.dTdP_h(P, h)
elif region is 4:
return region4.dTsdP(P)
else:
return 0.000
def dgdh_h(P, h, region = 0):
""" Derivative of specific gibbs free energy [kJ kg / kg kJ]
w.r.t specific enthalpy at constant pressure"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.dgdh_h(P, h)
elif region is 2:
return region2.dgdh_h(P, h)
elif region is 4:
return region4.dgdh_h(P, h)
else:
return 0.000
def dvdh_h(P, h, region = 0):
""" Derivative of specific volume [m^3 kg / kg kJ]
w.r.t specific enthalpy at constant pressure"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.dvdh_h(P, h)
elif region is 2:
return region2.dvdh_h(P, h)
elif region is 4:
return region4.dvdh_h(P, h)
else:
return 0.000
def dudh_h(P, h, region = 0):
""" Derivative of specific internal energy [kJ kg / kg kJ]
w.r.t specific enthalpy at constant pressure"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.dudh_h(P, h)
elif region is 2:
return region2.dudh_h(P, h)
elif region is 4:
return region4.dudh_h(P, h)
else:
return 0.000
def dsdh_h(P, h, region = 0):
""" Derivative of specific entropy [kJ kg / kg K kJ]
w.r.t specific enthalpy at constant pressure"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.dsdh_h(P, h)
elif region is 2:
return region2.dsdh_h(P, h)
elif region is 4:
return region4.dsdh_h(P, h)
else:
return 0.000
def dhdh_h(P, h, region = 0):
""" Derivative of specific enthalpy [kJ kg / kg kJ]
w.r.t specific enthalpy at constant pressure"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return 1.000
elif region is 2:
return 1.000
elif region is 4:
return 1.000
else:
return 0.000
def dTdh_h(P, h, region = 0):
""" Derivative of Temperature [K kg / kJ]
w.r.t specific enthalpy at constant pressure"""
if region is 0:
region = idRegion_h(P, h)
if region is 1:
return region1.dTdh_h(P, h)
elif region is 2:
return region2.dTdh_h(P, h)
elif region is 4:
return 0.000
else:
return 0.000
###########################################################
##### Pressure-Entropy Formulation #####
###########################################################
def idRegion_s(P, s):
"""Identification of region from IF97 specification
using pressure and enthalpy as primary variables"""
# supporting boundaries
Tbnd01 = region1.Tbnd01
Pbnd4 = satP(Tbnd01)
Tbnd25 = region2.Tbnd25
Tbnd13 = region1.Tbnd13
Tbnd32 = region3.bnd23T(min(max(P, 16.5292), 100.0))
Tbnd4 = satT(P)
# Enthalpy- pressure boundaries
Pbnd0 = region1.Pbnd0
Pbnd1 = region1.Pbnd1
sbnd01 = region1.s(P, Tbnd01)
sbnd25 = region2.s(P, Tbnd25)
Pbndh1 = satP(Tbnd13)
sbnd13 = region1.s(P, Tbnd13)
sbnd32 = region2.s(P, Tbnd32)
sbnd14 = region1.s(P, Tbnd4)
sbnd42 = region2.s(P, Tbnd4)
region = 0
if (P >= Pbnd0) and (s >= sbnd01) and (P <= Pbnd1) and (s <= sbnd25):
if (P >= Pbndh1):
if (s <= sbnd13):
region = 1
elif (s >= sbnd32):
region = 2
else:
# region 3 via P,h relations not implemented
region = 0
else:
if (s <= sbnd14):
region = 1
elif (s >= sbnd42):
region = 2
else:
region = 4
assert (region is not 0), "Water properties not avalable!"
return region
#### water properties ####
def g_s(P, s, region = 0):
"""Specific gibbs free energy [kJ / kg]"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.g_s(P, s)
elif region is 2:
return region2.g_s(P, s)
elif region is 4:
return region4.g_s(P, s)
else:
return 0.000
def v_s(P, s, region = 0):
"""Specific volume [m^3 / kg]"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.v_s(P, s)
elif region is 2:
return region2.v_s(P, s)
elif region is 4:
return region4.v_s(P, s)
else:
return 0.000
def u_s(P, s, region = 0):
"""Specific internal energy [kJ / kg]"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.u_s(P, s)
elif region is 2:
return region2.u_s(P, s)
elif region is 4:
return region4.u_s(P, s)
else:
return 0.000
def T_s(P, s, region = 0):
""" Temperature [K]"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.T_s(P, s)
elif region is 2:
return region2.T_s(P, s)
elif region is 4:
return region4.satT(P)
else:
return 0.000
def h_s(P, s, region = 0):
"""Specific entropy [kJ / kg]"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.h_s(P, s)
elif region is 2:
return region2.h_s(P, s)
elif region is 4:
return region4.h_s(P, s)
else:
return 0.000
def cp_s(P, s, region = 0):
""" Specific isobaric heat capacity [kJ / kg K]"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.cp_s(P, s)
elif region is 2:
return region2.cp_s(P, s)
elif region is 4:
return region4.cp_s(P, s)
else:
return 0.000
def cv_s(P, s, region = 0):
""" Specific isochoric heat capacity [kJ / kg K]"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.cv_s(P, s)
elif region is 2:
return region2.cv_s(P, s)
elif region is 4:
return region4.cv_s(P, s)
else:
return 0.000
def w_s(P, s, region = 0):
""" Speed of sound [m / s]"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.w_s(P, s)
elif region is 2:
return region2.w_s(P, s)
elif region is 4:
return region4.w_s(P, s)
else:
return 0.000
def a_s(P, s, region = 0):
"""Isobaric cubic expansion coefficient [1 / K]"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.a_s(P, s)
elif region is 2:
return region2.a_s(P, s)
elif region is 4:
return region4.a_s(P, s)
else:
return 0.000
def k_s(P, s, region = 0):
"""Isothermal compressibility [kg / kJ]"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.k_s(P, s)
elif region is 2:
return region2.k_s(P, s)
elif region is 4:
return region4.k_s(P, s)
else:
return 0.000
#### water property derivatives ####
def dgdP_s(P, s, region = 0):
""" Derivative of specific gibbs free energy [kJ m^3 / kg kJ]
w.r.t pressure at constant specific entropy"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.dgdP_s(P, s)
elif region is 2:
return region2.dgdP_s(P, s)
elif region is 4:
return region4.dgdP_s(P, s)
else:
return 0.000
def dvdP_s(P, s, region = 0):
""" Derivative of specific volume [m^3 m^3 / kg kJ]
w.r.t pressure at constant specific entropy"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.dvdP_s(P, s)
elif region is 2:
return region2.dvdP_s(P, s)
elif region is 4:
return region4.dvdP_s(P, s)
else:
return 0.000
def dudP_s(P, s, region = 0):
""" Derivative of specific internal energy [kJ m^3 / kg kJ]
w.r.t pressure at constant specific entropy"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.dudP_s(P, s)
elif region is 2:
return region2.dudP_s(P, s)
elif region is 4:
return region4.dudP_s(P, s)
else:
return 0.000
def dsdP_s(P, s, region = 0):
""" Derivative of specific entropy [kJ m^3 / kg K kJ]
w.r.t pressure at constant specific/equilibrium entropy"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return 0.000
elif region is 2:
return 0.000
elif region is 4:
return region4.dsdP_s(P, s)
else:
return 0.000
def dhdP_s(P, s, region = 0):
""" Derivative of specific enthalpy [kJ m^3 / kg kJ]
w.r.t pressure at constant specific entropy"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.dhdP_s(P, s)
elif region is 2:
return region2.dhdP_s(P, s)
elif region is 4:
return region4.dhdP_s(P, s)
else:
return 0.000
def dTdP_s(P, s, region = 0):
""" Derivative of Temperature [K m^3 / kJ]
w.r.t pressure at constant specific entropy"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.dTdP_s(P, s)
elif region is 2:
return region2.dTdP_s(P, s)
elif region is 4:
return region4.dTsdP(P)
else:
return 0.000
def dgds_s(P, s, region = 0):
""" Derivative of specific gibbs free energy [kJ kg K / kg kJ]
w.r.t specific entropy at constant pressure"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.dgds_s(P, s)
elif region is 2:
return region2.dgds_s(P, s)
elif region is 4:
return region4.dgds_s(P, s)
else:
return 0.000
def dvds_s(P, s, region = 0):
""" Derivative of specific volume [m^3 kg K / kg kJ]
w.r.t specific entropy at constant pressure"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.dvds_s(P, s)
elif region is 2:
return region2.dvds_s(P, s)
elif region is 4:
return region4.dvds_s(P, s)
else:
return 0.000
def duds_s(P, s, region = 0):
""" Derivative of specific internal energy [kJ kg K / kg kJ]
w.r.t specific entropy at constant pressure"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.duds_s(P, s)
elif region is 2:
return region2.duds_s(P, s)
elif region is 4:
return region4.duds_s(P, s)
else:
return 0.000
def dsds_s(P, s, region = 0):
""" Derivative of specific entropy [kJ kg K / kg K kJ]
w.r.t specific entropy at constant pressure"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return 1.000
elif region is 2:
return 1.000
elif region is 4:
return 1.000
else:
return 0.000
def dhds_s(P, s, region = 0):
""" Derivative of specific enthalpy [kJ kg K / kg kJ]
w.r.t specific entropy at constant pressure"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.dhds_s(P, s)
elif region is 2:
return region2.dhds_s(P, s)
elif region is 4:
return region4.dhds_s(P, s)
else:
return 0.000
def dTds_s(P, s, region = 0):
""" Derivative of Temperature [K kg K / kJ]
w.r.t enthalpy at constant pressure"""
if region is 0:
region = idRegion_s(P, s)
if region is 1:
return region1.dTds_s(P, s)
elif region is 2:
return region2.dTds_s(P, s)
elif region is 4:
return 0.000
else:
return 0.000
###########################################################
##### Pressure Only (Saturation) Formulation #####
###########################################################
#### P-T saturation curves ####
def satP(T):
""" Saturation Pressure [Mpa]
for specified Temperature"""
return region4.satP(T)
def satT(P):
""" Saturation Temperature [K]
for specified Pressure"""
return region4.satT(P)
#### Saturated liquid properties ####
def gf(P):
""" Specific gibbs free energy [kJ / kg]
of saturated liquid"""
return region4.gf(P)
def vf(P):
""" Specific volume [m^3 / kg]
of saturated liquid"""
return region4.vf(P)
def uf(P):
""" Specific internal energy [kJ / kg]
of saturated liquid"""
return region4.uf(P)
def sf(P):
""" Specific entropy [kJ / kg K]
of saturated liquid"""
return region4.sf(P)
def hf(P):
""" Specific enthalpy [kJ / kg]
of saturated liquid"""
return region4.hf(P)
def cpf(P):
""" Specific isobaric heat capacity [kJ / kg K]
of saturated liquid"""
return region4.cpf(P)
def cvf(P):
""" Specific isochoric heat capacity [kJ / kg K]
of saturated liquid"""
return region4.cvf(P)
def wf(P):
""" Speed of sound [m / s]
of saturated liquid"""
return region4.wf(P)
def af(P):
"""Isobaric cubic expansion coefficient [1 / K]
of saturated liquid"""
return region4.af(P)
def kf(P):
"""Isothermal compressibility [kg / kJ]
of saturated liquid"""
return region4.kf(P)
#### Saturated vapor properties ####
def gg(P):
""" Specific gibbs free energy [kJ / kg]
of saturated vapor"""
return region4.gg(P)
def vg(P):
""" Specific volume [m^3 / kg]
of saturated vapor"""
return region4.vg(P)
def ug(P):
""" Specific internal energy [kJ / kg]
of saturated vapor"""
return region4.ug(P)
def sg(P):
""" Specific entropy [kJ / kg K]
of saturated vapor"""
return region4.sg(P)
def hg(P):
""" Specific enthalpy [kJ / kg]
of saturated vapor"""
return region4.hg(P)
def cpg(P):
""" Specific isobaric heat capacity [kJ / kg K]
of saturated vapor"""
return region4.cpg(P)
def cvg(P):
""" Specific isochoric heat capacity [kJ / kg K]
of saturated vapor"""
return region4.cvg(P)
def wg(P):
""" Speed of sound [m / s]
of saturated vapor"""
return region4.wg(P)
def ag(P):
"""Isobaric cubic expansion coefficient [1 / K]
of saturated vapor"""
return region4.ag(P)
def kg(P):
"""Isothermal compressibility [kg / kJ]
of saturated vapor"""
return region4.kg(P)
#### delta saturation properties ####
def gfg(P):
""" Specific gibbs free energy; [kJ / kg]
saturation rise of"""
return region4.gfg(P)
def vfg(P):
""" Specific volume; [m^3 / kg]
saturation rise of"""
return region4.vfg(P)
def ufg(P):
""" Specific internal energy; [kJ / kg]
saturation rise of"""
return region4.ufg(P)
def sfg(P):
""" Specific entropy; [kJ / kg K]
saturation rise of"""
return region4.sfg(P)
def hfg(P):
""" Specific enthalpy; [kJ / kg]
saturation rise of"""
return region4.hfg(P)
def cpfg(P):
""" Specific isobaric heat capacity; [kJ / kg K]
saturation rise of"""
return region4.cpfg(P)
def cvfg(P):
""" Specific isochoric heat capacity; [kJ / kg K]
saturation rise of"""
return region4.cvfg(P)
def wfg(P):
""" Speed of sound; [m / s]
saturation rise of"""
return region4.wfg(P)
def afg(P):
"""Isobaric cubic expansion coefficient; [1 / K]
saturation rise of"""
return region4.afg(P)
def kfg(P):
"""Isothermal compressibility; [kg / kJ]
saturation rise of"""
return region4.kfg(P)
#### Saturated liquid derivatives ####
def dgfdP(P):
""" Derivative of Specific gibbs free energy [kJ m^3 / kg kJ]
of saturated liquid w.r.t. pressure"""
return region4.dgfdP(P)
def dvfdP(P):
""" Derivative of Specific volume [m^3 m^3 / kg kJ]
of saturated liquid w.r.t. pressure"""
return region4.dvfdP(P)
def dufdP(P):
""" Derivative of Specific internal energy [kJ m^3 / kg kJ]
of saturated liquid w.r.t. pressure"""
return region4.dufdP(P)
def dsfdP(P):
""" Derivative of Specific entropy [kJ m^3 / kg K kJ]
of saturated liquid w.r.t. pressure"""
return region4.dsfdP(P)
def dhfdP(P):
""" Derivative of Specific enthalpy [kJ m^3 / kg kJ]
of saturated liquid w.r.t. pressure"""
return region4.dhfdP(P)
#### Saturated vapor derivatives ####
def dggdP(P):
""" Derivative of Specific gibbs free energy [kJ m^3 / kg kJ]
of saturated vapor w.r.t. pressure"""
return region4.dggdP(P)
def dvgdP(P):
""" Derivative of Specific volume [m^3 m^3 / kg kJ]
of saturated vapor w.r.t. pressure"""
return region4.dvgdP(P)
def dugdP(P):
""" Derivative of Specific internal energy [kJ m^3 / kg kJ]
of saturated vapor w.r.t. pressure"""
return region4.dugdP(P)
def dsgdP(P):
""" Derivative of Specific entropy [kJ m^3 / kg K kJ]
of saturated vapor w.r.t. pressure"""
return region4.dsgdP(P)
def dhgdP(P):
""" Derivative of Specific enthalpy [kJ m^3 / kg kJ]
of saturated vapor w.r.t. pressure"""
return region4.dhgdP(P)
#### Delta saturation derivatives ####
def dgfgdP(P):
""" Derivative of Specific gibbs free energy [kJ m^3 / kg kJ]
w.r.t. pressure; saturation rise of"""
return region4.dgfgdP(P)
def dvfgdP(P):
""" Derivative of Specific volume [m^3 m^3 / kg kJ]
w.r.t. pressure; saturation rise of"""
return region4.dvfgdP(P)
def dufgdP(P):
""" Derivative of Specific internal energy [kJ m^3 / kg kJ]
w.r.t. pressure; saturation rise of"""
return region4.dufgdP(P)
def dsfgdP(P):
""" Derivative of Specific entropy [kJ m^3 / kg K kJ]
w.r.t. pressure; saturation rise of"""
return region4.dsfgdP(P)
def dhfgdP(P):
""" Derivative of Specific enthalpy [kJ m^3 / kg kJ]
w.r.t. pressure; saturation rise of"""
return region4.dhfgdP(P)
|
[
"if97.region4.a_s",
"if97.region4.kfg",
"if97.region1.dsdh_h",
"if97.region4.cvfg",
"if97.region1.dvdP",
"if97.region2.dhdT",
"if97.region2.w",
"if97.region2.dsdT",
"if97.region4.cv_h",
"if97.region1.g_h",
"if97.region4.cpf",
"if97.region1.v",
"if97.region2.s",
"if97.region2.k_h",
"if97.region4.kg",
"if97.region1.k",
"if97.region4.cvf",
"if97.region4.satP",
"if97.region4.dufgdP",
"if97.region1.dgdT",
"if97.region1.cv_s",
"if97.region1.cp_s",
"if97.region2.g",
"if97.region1.a_s",
"if97.region1.dvds_s",
"if97.region4.duds_s",
"if97.region2.u",
"if97.region1.dgdP_h",
"if97.region1.dTds_s",
"if97.region2.v",
"if97.region2.dhds_s",
"if97.region4.dgfdP",
"if97.region4.dsfdP",
"if97.region4.k_h",
"if97.region4.dudP_s",
"if97.region4.gg",
"if97.region2.k",
"if97.region2.dudh_h",
"if97.region2.dsdP_h",
"if97.region4.sfg",
"if97.region4.kf",
"if97.region2.dvdP_s",
"if97.region1.duds_s",
"if97.region4.wfg",
"if97.region2.cp_h",
"if97.region4.dsdP_s",
"if97.region4.v_s",
"if97.region1.dudh_h",
"if97.region2.dsdP",
"if97.region4.dhfgdP",
"if97.region1.dudP_h",
"if97.region1.v_s",
"if97.region1.dgdh_h",
"if97.region1.dvdT",
"if97.region1.a",
"if97.region4.dsfgdP",
"if97.region4.dhfdP",
"if97.region2.cv_h",
"if97.region4.hfg",
"if97.region2.g_h",
"if97.region2.w_s",
"if97.region2.dgdP_s",
"if97.region4.u_h",
"if97.region4.dhds_s",
"if97.region4.g_h",
"if97.region1.g_s",
"if97.region1.dvdp_h",
"if97.region1.dudP_s",
"if97.region4.ag",
"if97.region1.a_h",
"if97.region2.dgdh_h",
"if97.region4.vfg",
"if97.region1.s_h",
"if97.region4.dgdP_h",
"if97.region2.h",
"if97.region2.T_h",
"if97.region4.dhgdP",
"if97.region4.gfg",
"if97.region2.cv_s",
"if97.region2.dhdP",
"if97.region4.cv_s",
"if97.region1.dTdh_h",
"if97.region2.dTdP_s",
"if97.region4.wf",
"if97.region1.cp_h",
"if97.region4.dvfdP",
"if97.region4.cp_h",
"if97.region4.dufdP",
"if97.region4.dgdP_s",
"if97.region4.cpfg",
"if97.region4.dugdP",
"if97.region4.dvds_s",
"if97.region4.h_s",
"if97.region4.dsgdP",
"if97.region1.dgds_s",
"if97.region2.dvdh_h",
"if97.region4.vg",
"if97.region1.k_h",
"if97.region1.dsdP_h",
"if97.region4.dgfgdP",
"if97.region4.dgds_s",
"if97.region1.v_h",
"if97.region1.u_s",
"if97.region1.T_s",
"if97.region2.dhdP_s",
"if97.region4.dvdP_h",
"if97.region4.w_s",
"if97.region2.dudP_h",
"if97.region4.k_s",
"if97.region2.dTds_s",
"if97.region2.v_h",
"if97.region4.dudP_h",
"if97.region2.dgds_s",
"if97.region4.wg",
"if97.region2.duds_s",
"if97.region1.dudT",
"if97.region4.ufg",
"if97.region2.dsdh_h",
"if97.region1.w_s",
"if97.region4.cpg",
"if97.region2.g_s",
"if97.region2.dgdP_h",
"if97.region2.h_s",
"if97.region2.dgdT",
"if97.region4.dvdh_h",
"if97.region4.dvgdP",
"if97.region4.dgdh_h",
"if97.region1.dgdP",
"if97.region2.u_s",
"if97.region4.dsdh_h",
"if97.region4.vf",
"if97.region4.dvfgdP",
"if97.region1.u_h",
"if97.region2.dTdh_h",
"if97.region1.h_s",
"if97.region1.dvdh_h",
"if97.region1.w_h",
"if97.region2.v_s",
"if97.region2.dudP_s",
"if97.region1.w",
"if97.region2.dvds_s",
"if97.region4.dsdP_h",
"if97.region2.cv",
"if97.region4.ug",
"if97.region1.dhds_s",
"if97.region4.v_h",
"if97.region1.h",
"if97.region2.a",
"if97.region2.a_h",
"if97.region1.dhdT",
"if97.region2.s_h",
"if97.region2.dvdP",
"if97.region1.T_h",
"if97.region4.u_s",
"if97.region2.dvdP_h",
"if97.region1.cv_h",
"if97.region4.g_s",
"if97.region1.dgdP_s",
"if97.region4.hg",
"if97.region1.s",
"if97.region2.dudP",
"if97.region4.dggdP",
"if97.region4.satT",
"if97.region4.s_h",
"if97.region1.cv",
"if97.region4.a_h",
"if97.region1.dhdP_s",
"if97.region1.dsdP",
"if97.region2.cp_s",
"if97.region2.dgdP",
"if97.region4.dhdP_h",
"if97.region2.w_h",
"if97.region4.dTsdP",
"if97.region2.dvdT",
"if97.region2.u_h",
"if97.region4.dudh_h",
"if97.region1.g",
"if97.region4.sg",
"if97.region4.cvg",
"if97.region2.cp",
"if97.region2.a_s",
"if97.region4.gf",
"if97.region1.cp",
"if97.region1.dhdP",
"if97.region1.k_s",
"if97.region1.dTdP_s",
"if97.region4.w_h",
"if97.region4.sf",
"if97.region1.dvdP_s",
"if97.region2.k_s",
"if97.region4.afg",
"if97.region1.dudP",
"if97.region4.uf",
"if97.region1.dsdT",
"if97.region1.dTdP_h",
"if97.region2.dudT",
"if97.region2.T_s",
"if97.region4.cp_s",
"if97.region4.dvdP_s",
"if97.region4.hf",
"if97.region4.dhdP_s",
"if97.region4.af",
"if97.region2.dTdP_h",
"if97.region1.u"
] |
[((7618, 7642), 'if97.region1.h', 'region1.h', (['Pbnd4', 'Tbnd01'], {}), '(Pbnd4, Tbnd01)\n', (7627, 7642), False, 'from if97 import region1, region2, region3, region4\n'), ((7656, 7680), 'if97.region2.h', 'region2.h', (['Pbnd0', 'Tbnd25'], {}), '(Pbnd0, Tbnd25)\n', (7665, 7680), False, 'from if97 import region1, region2, region3, region4\n'), ((7720, 7740), 'if97.region1.h', 'region1.h', (['P', 'Tbnd13'], {}), '(P, Tbnd13)\n', (7729, 7740), False, 'from if97 import region1, region2, region3, region4\n'), ((7754, 7774), 'if97.region2.h', 'region2.h', (['P', 'Tbnd32'], {}), '(P, Tbnd32)\n', (7763, 7774), False, 'from if97 import region1, region2, region3, region4\n'), ((7788, 7807), 'if97.region1.h', 'region1.h', (['P', 'Tbnd4'], {}), '(P, Tbnd4)\n', (7797, 7807), False, 'from if97 import region1, region2, region3, region4\n'), ((7821, 7840), 'if97.region2.h', 'region2.h', (['P', 'Tbnd4'], {}), '(P, Tbnd4)\n', (7830, 7840), False, 'from if97 import region1, region2, region3, region4\n'), ((17024, 17044), 'if97.region1.s', 'region1.s', (['P', 'Tbnd01'], {}), '(P, Tbnd01)\n', (17033, 17044), False, 'from if97 import region1, region2, region3, region4\n'), ((17058, 17078), 'if97.region2.s', 'region2.s', (['P', 'Tbnd25'], {}), '(P, Tbnd25)\n', (17067, 17078), False, 'from if97 import region1, region2, region3, region4\n'), ((17118, 17138), 'if97.region1.s', 'region1.s', (['P', 'Tbnd13'], {}), '(P, Tbnd13)\n', (17127, 17138), False, 'from if97 import region1, region2, region3, region4\n'), ((17152, 17172), 'if97.region2.s', 'region2.s', (['P', 'Tbnd32'], {}), '(P, Tbnd32)\n', (17161, 17172), False, 'from if97 import region1, region2, region3, region4\n'), ((17186, 17205), 'if97.region1.s', 'region1.s', (['P', 'Tbnd4'], {}), '(P, Tbnd4)\n', (17195, 17205), False, 'from if97 import region1, region2, region3, region4\n'), ((17219, 17238), 'if97.region2.s', 'region2.s', (['P', 'Tbnd4'], {}), '(P, Tbnd4)\n', (17228, 17238), False, 'from if97 import region1, region2, region3, region4\n'), ((26093, 26108), 'if97.region4.satP', 'region4.satP', (['T'], {}), '(T)\n', (26105, 26108), False, 'from if97 import region1, region2, region3, region4\n'), ((26199, 26214), 'if97.region4.satT', 'region4.satT', (['P'], {}), '(P)\n', (26211, 26214), False, 'from if97 import region1, region2, region3, region4\n'), ((26349, 26362), 'if97.region4.gf', 'region4.gf', (['P'], {}), '(P)\n', (26359, 26362), False, 'from if97 import region1, region2, region3, region4\n'), ((26448, 26461), 'if97.region4.vf', 'region4.vf', (['P'], {}), '(P)\n', (26458, 26461), False, 'from if97 import region1, region2, region3, region4\n'), ((26555, 26568), 'if97.region4.uf', 'region4.uf', (['P'], {}), '(P)\n', (26565, 26568), False, 'from if97 import region1, region2, region3, region4\n'), ((26656, 26669), 'if97.region4.sf', 'region4.sf', (['P'], {}), '(P)\n', (26666, 26669), False, 'from if97 import region1, region2, region3, region4\n'), ((26756, 26769), 'if97.region4.hf', 'region4.hf', (['P'], {}), '(P)\n', (26766, 26769), False, 'from if97 import region1, region2, region3, region4\n'), ((26873, 26887), 'if97.region4.cpf', 'region4.cpf', (['P'], {}), '(P)\n', (26884, 26887), False, 'from if97 import region1, region2, region3, region4\n'), ((26992, 27006), 'if97.region4.cvf', 'region4.cvf', (['P'], {}), '(P)\n', (27003, 27006), False, 'from if97 import region1, region2, region3, region4\n'), ((27088, 27101), 'if97.region4.wf', 'region4.wf', (['P'], {}), '(P)\n', (27098, 27101), False, 'from if97 import region1, region2, region3, region4\n'), ((27204, 27217), 'if97.region4.af', 'region4.af', (['P'], {}), '(P)\n', (27214, 27217), False, 'from if97 import region1, region2, region3, region4\n'), ((27312, 27325), 'if97.region4.kf', 'region4.kf', (['P'], {}), '(P)\n', (27322, 27325), False, 'from if97 import region1, region2, region3, region4\n'), ((27458, 27471), 'if97.region4.gg', 'region4.gg', (['P'], {}), '(P)\n', (27468, 27471), False, 'from if97 import region1, region2, region3, region4\n'), ((27556, 27569), 'if97.region4.vg', 'region4.vg', (['P'], {}), '(P)\n', (27566, 27569), False, 'from if97 import region1, region2, region3, region4\n'), ((27666, 27679), 'if97.region4.ug', 'region4.ug', (['P'], {}), '(P)\n', (27676, 27679), False, 'from if97 import region1, region2, region3, region4\n'), ((27766, 27779), 'if97.region4.sg', 'region4.sg', (['P'], {}), '(P)\n', (27776, 27779), False, 'from if97 import region1, region2, region3, region4\n'), ((27869, 27882), 'if97.region4.hg', 'region4.hg', (['P'], {}), '(P)\n', (27879, 27882), False, 'from if97 import region1, region2, region3, region4\n'), ((27985, 27999), 'if97.region4.cpg', 'region4.cpg', (['P'], {}), '(P)\n', (27996, 27999), False, 'from if97 import region1, region2, region3, region4\n'), ((28103, 28117), 'if97.region4.cvg', 'region4.cvg', (['P'], {}), '(P)\n', (28114, 28117), False, 'from if97 import region1, region2, region3, region4\n'), ((28198, 28211), 'if97.region4.wg', 'region4.wg', (['P'], {}), '(P)\n', (28208, 28211), False, 'from if97 import region1, region2, region3, region4\n'), ((28313, 28326), 'if97.region4.ag', 'region4.ag', (['P'], {}), '(P)\n', (28323, 28326), False, 'from if97 import region1, region2, region3, region4\n'), ((28420, 28433), 'if97.region4.kg', 'region4.kg', (['P'], {}), '(P)\n', (28430, 28433), False, 'from if97 import region1, region2, region3, region4\n'), ((28569, 28583), 'if97.region4.gfg', 'region4.gfg', (['P'], {}), '(P)\n', (28580, 28583), False, 'from if97 import region1, region2, region3, region4\n'), ((28670, 28684), 'if97.region4.vfg', 'region4.vfg', (['P'], {}), '(P)\n', (28681, 28684), False, 'from if97 import region1, region2, region3, region4\n'), ((28779, 28793), 'if97.region4.ufg', 'region4.ufg', (['P'], {}), '(P)\n', (28790, 28793), False, 'from if97 import region1, region2, region3, region4\n'), ((28882, 28896), 'if97.region4.sfg', 'region4.sfg', (['P'], {}), '(P)\n', (28893, 28896), False, 'from if97 import region1, region2, region3, region4\n'), ((28984, 28998), 'if97.region4.hfg', 'region4.hfg', (['P'], {}), '(P)\n', (28995, 28998), False, 'from if97 import region1, region2, region3, region4\n'), ((29103, 29118), 'if97.region4.cpfg', 'region4.cpfg', (['P'], {}), '(P)\n', (29115, 29118), False, 'from if97 import region1, region2, region3, region4\n'), ((29224, 29239), 'if97.region4.cvfg', 'region4.cvfg', (['P'], {}), '(P)\n', (29236, 29239), False, 'from if97 import region1, region2, region3, region4\n'), ((29322, 29336), 'if97.region4.wfg', 'region4.wfg', (['P'], {}), '(P)\n', (29333, 29336), False, 'from if97 import region1, region2, region3, region4\n'), ((29440, 29454), 'if97.region4.afg', 'region4.afg', (['P'], {}), '(P)\n', (29451, 29454), False, 'from if97 import region1, region2, region3, region4\n'), ((29550, 29564), 'if97.region4.kfg', 'region4.kfg', (['P'], {}), '(P)\n', (29561, 29564), False, 'from if97 import region1, region2, region3, region4\n'), ((29740, 29756), 'if97.region4.dgfdP', 'region4.dgfdP', (['P'], {}), '(P)\n', (29753, 29756), False, 'from if97 import region1, region2, region3, region4\n'), ((29882, 29898), 'if97.region4.dvfdP', 'region4.dvfdP', (['P'], {}), '(P)\n', (29895, 29898), False, 'from if97 import region1, region2, region3, region4\n'), ((30032, 30048), 'if97.region4.dufdP', 'region4.dufdP', (['P'], {}), '(P)\n', (30045, 30048), False, 'from if97 import region1, region2, region3, region4\n'), ((30176, 30192), 'if97.region4.dsfdP', 'region4.dsfdP', (['P'], {}), '(P)\n', (30189, 30192), False, 'from if97 import region1, region2, region3, region4\n'), ((30319, 30335), 'if97.region4.dhfdP', 'region4.dhfdP', (['P'], {}), '(P)\n', (30332, 30335), False, 'from if97 import region1, region2, region3, region4\n'), ((30509, 30525), 'if97.region4.dggdP', 'region4.dggdP', (['P'], {}), '(P)\n', (30522, 30525), False, 'from if97 import region1, region2, region3, region4\n'), ((30650, 30666), 'if97.region4.dvgdP', 'region4.dvgdP', (['P'], {}), '(P)\n', (30663, 30666), False, 'from if97 import region1, region2, region3, region4\n'), ((30799, 30815), 'if97.region4.dugdP', 'region4.dugdP', (['P'], {}), '(P)\n', (30812, 30815), False, 'from if97 import region1, region2, region3, region4\n'), ((30942, 30958), 'if97.region4.dsgdP', 'region4.dsgdP', (['P'], {}), '(P)\n', (30955, 30958), False, 'from if97 import region1, region2, region3, region4\n'), ((31084, 31100), 'if97.region4.dhgdP', 'region4.dhgdP', (['P'], {}), '(P)\n', (31097, 31100), False, 'from if97 import region1, region2, region3, region4\n'), ((31277, 31294), 'if97.region4.dgfgdP', 'region4.dgfgdP', (['P'], {}), '(P)\n', (31291, 31294), False, 'from if97 import region1, region2, region3, region4\n'), ((31421, 31438), 'if97.region4.dvfgdP', 'region4.dvfgdP', (['P'], {}), '(P)\n', (31435, 31438), False, 'from if97 import region1, region2, region3, region4\n'), ((31573, 31590), 'if97.region4.dufgdP', 'region4.dufgdP', (['P'], {}), '(P)\n', (31587, 31590), False, 'from if97 import region1, region2, region3, region4\n'), ((31719, 31736), 'if97.region4.dsfgdP', 'region4.dsfgdP', (['P'], {}), '(P)\n', (31733, 31736), False, 'from if97 import region1, region2, region3, region4\n'), ((31864, 31881), 'if97.region4.dhfgdP', 'region4.dhfgdP', (['P'], {}), '(P)\n', (31878, 31881), False, 'from if97 import region1, region2, region3, region4\n'), ((1256, 1271), 'if97.region1.g', 'region1.g', (['P', 'T'], {}), '(P, T)\n', (1265, 1271), False, 'from if97 import region1, region2, region3, region4\n'), ((1506, 1521), 'if97.region1.v', 'region1.v', (['P', 'T'], {}), '(P, T)\n', (1515, 1521), False, 'from if97 import region1, region2, region3, region4\n'), ((1764, 1779), 'if97.region1.u', 'region1.u', (['P', 'T'], {}), '(P, T)\n', (1773, 1779), False, 'from if97 import region1, region2, region3, region4\n'), ((2016, 2031), 'if97.region1.s', 'region1.s', (['P', 'T'], {}), '(P, T)\n', (2025, 2031), False, 'from if97 import region1, region2, region3, region4\n'), ((2267, 2282), 'if97.region1.h', 'region1.h', (['P', 'T'], {}), '(P, T)\n', (2276, 2282), False, 'from if97 import region1, region2, region3, region4\n'), ((2536, 2552), 'if97.region1.cp', 'region1.cp', (['P', 'T'], {}), '(P, T)\n', (2546, 2552), False, 'from if97 import region1, region2, region3, region4\n'), ((2808, 2824), 'if97.region1.cv', 'region1.cv', (['P', 'T'], {}), '(P, T)\n', (2818, 2824), False, 'from if97 import region1, region2, region3, region4\n'), ((3057, 3072), 'if97.region1.w', 'region1.w', (['P', 'T'], {}), '(P, T)\n', (3066, 3072), False, 'from if97 import region1, region2, region3, region4\n'), ((3325, 3340), 'if97.region1.a', 'region1.a', (['P', 'T'], {}), '(P, T)\n', (3334, 3340), False, 'from if97 import region1, region2, region3, region4\n'), ((3585, 3600), 'if97.region1.k', 'region1.k', (['P', 'T'], {}), '(P, T)\n', (3594, 3600), False, 'from if97 import region1, region2, region3, region4\n'), ((3951, 3969), 'if97.region1.dgdP', 'region1.dgdP', (['P', 'T'], {}), '(P, T)\n', (3963, 3969), False, 'from if97 import region1, region2, region3, region4\n'), ((4275, 4293), 'if97.region1.dvdP', 'region1.dvdP', (['P', 'T'], {}), '(P, T)\n', (4287, 4293), False, 'from if97 import region1, region2, region3, region4\n'), ((4607, 4625), 'if97.region1.dudP', 'region1.dudP', (['P', 'T'], {}), '(P, T)\n', (4619, 4625), False, 'from if97 import region1, region2, region3, region4\n'), ((4933, 4951), 'if97.region1.dsdP', 'region1.dsdP', (['P', 'T'], {}), '(P, T)\n', (4945, 4951), False, 'from if97 import region1, region2, region3, region4\n'), ((5258, 5276), 'if97.region1.dhdP', 'region1.dhdP', (['P', 'T'], {}), '(P, T)\n', (5270, 5276), False, 'from if97 import region1, region2, region3, region4\n'), ((5588, 5606), 'if97.region1.dgdT', 'region1.dgdT', (['P', 'T'], {}), '(P, T)\n', (5600, 5606), False, 'from if97 import region1, region2, region3, region4\n'), ((5907, 5925), 'if97.region1.dvdT', 'region1.dvdT', (['P', 'T'], {}), '(P, T)\n', (5919, 5925), False, 'from if97 import region1, region2, region3, region4\n'), ((6234, 6252), 'if97.region1.dudT', 'region1.dudT', (['P', 'T'], {}), '(P, T)\n', (6246, 6252), False, 'from if97 import region1, region2, region3, region4\n'), ((6555, 6573), 'if97.region1.dsdT', 'region1.dsdT', (['P', 'T'], {}), '(P, T)\n', (6567, 6573), False, 'from if97 import region1, region2, region3, region4\n'), ((6875, 6893), 'if97.region1.dhdT', 'region1.dhdT', (['P', 'T'], {}), '(P, T)\n', (6887, 6893), False, 'from if97 import region1, region2, region3, region4\n'), ((8628, 8645), 'if97.region1.g_h', 'region1.g_h', (['P', 'h'], {}), '(P, h)\n', (8639, 8645), False, 'from if97 import region1, region2, region3, region4\n'), ((8941, 8958), 'if97.region1.v_h', 'region1.v_h', (['P', 'h'], {}), '(P, h)\n', (8952, 8958), False, 'from if97 import region1, region2, region3, region4\n'), ((9262, 9279), 'if97.region1.u_h', 'region1.u_h', (['P', 'h'], {}), '(P, h)\n', (9273, 9279), False, 'from if97 import region1, region2, region3, region4\n'), ((9577, 9594), 'if97.region1.s_h', 'region1.s_h', (['P', 'h'], {}), '(P, h)\n', (9588, 9594), False, 'from if97 import region1, region2, region3, region4\n'), ((9880, 9897), 'if97.region1.T_h', 'region1.T_h', (['P', 'h'], {}), '(P, h)\n', (9891, 9897), False, 'from if97 import region1, region2, region3, region4\n'), ((10210, 10228), 'if97.region1.cp_h', 'region1.cp_h', (['P', 'h'], {}), '(P, h)\n', (10222, 10228), False, 'from if97 import region1, region2, region3, region4\n'), ((10546, 10564), 'if97.region1.cv_h', 'region1.cv_h', (['P', 'h'], {}), '(P, h)\n', (10558, 10564), False, 'from if97 import region1, region2, region3, region4\n'), ((10859, 10876), 'if97.region1.w_h', 'region1.w_h', (['P', 'h'], {}), '(P, h)\n', (10870, 10876), False, 'from if97 import region1, region2, region3, region4\n'), ((11190, 11207), 'if97.region1.a_h', 'region1.a_h', (['P', 'h'], {}), '(P, h)\n', (11201, 11207), False, 'from if97 import region1, region2, region3, region4\n'), ((11513, 11530), 'if97.region1.k_h', 'region1.k_h', (['P', 'h'], {}), '(P, h)\n', (11524, 11530), False, 'from if97 import region1, region2, region3, region4\n'), ((11948, 11968), 'if97.region1.dgdP_h', 'region1.dgdP_h', (['P', 'h'], {}), '(P, h)\n', (11962, 11968), False, 'from if97 import region1, region2, region3, region4\n'), ((12344, 12364), 'if97.region1.dvdp_h', 'region1.dvdp_h', (['P', 'h'], {}), '(P, h)\n', (12358, 12364), False, 'from if97 import region1, region2, region3, region4\n'), ((12748, 12768), 'if97.region1.dudP_h', 'region1.dudP_h', (['P', 'h'], {}), '(P, h)\n', (12762, 12768), False, 'from if97 import region1, region2, region3, region4\n'), ((13146, 13166), 'if97.region1.dsdP_h', 'region1.dsdP_h', (['P', 'h'], {}), '(P, h)\n', (13160, 13166), False, 'from if97 import region1, region2, region3, region4\n'), ((13900, 13920), 'if97.region1.dTdP_h', 'region1.dTdP_h', (['P', 'h'], {}), '(P, h)\n', (13914, 13920), False, 'from if97 import region1, region2, region3, region4\n'), ((14302, 14322), 'if97.region1.dgdh_h', 'region1.dgdh_h', (['P', 'h'], {}), '(P, h)\n', (14316, 14322), False, 'from if97 import region1, region2, region3, region4\n'), ((14697, 14717), 'if97.region1.dvdh_h', 'region1.dvdh_h', (['P', 'h'], {}), '(P, h)\n', (14711, 14717), False, 'from if97 import region1, region2, region3, region4\n'), ((15100, 15120), 'if97.region1.dudh_h', 'region1.dudh_h', (['P', 'h'], {}), '(P, h)\n', (15114, 15120), False, 'from if97 import region1, region2, region3, region4\n'), ((15497, 15517), 'if97.region1.dsdh_h', 'region1.dsdh_h', (['P', 'h'], {}), '(P, h)\n', (15511, 15517), False, 'from if97 import region1, region2, region3, region4\n'), ((16234, 16254), 'if97.region1.dTdh_h', 'region1.dTdh_h', (['P', 'h'], {}), '(P, h)\n', (16248, 16254), False, 'from if97 import region1, region2, region3, region4\n'), ((18026, 18043), 'if97.region1.g_s', 'region1.g_s', (['P', 's'], {}), '(P, s)\n', (18037, 18043), False, 'from if97 import region1, region2, region3, region4\n'), ((18339, 18356), 'if97.region1.v_s', 'region1.v_s', (['P', 's'], {}), '(P, s)\n', (18350, 18356), False, 'from if97 import region1, region2, region3, region4\n'), ((18660, 18677), 'if97.region1.u_s', 'region1.u_s', (['P', 's'], {}), '(P, s)\n', (18671, 18677), False, 'from if97 import region1, region2, region3, region4\n'), ((18963, 18980), 'if97.region1.T_s', 'region1.T_s', (['P', 's'], {}), '(P, s)\n', (18974, 18980), False, 'from if97 import region1, region2, region3, region4\n'), ((19274, 19291), 'if97.region1.h_s', 'region1.h_s', (['P', 's'], {}), '(P, s)\n', (19285, 19291), False, 'from if97 import region1, region2, region3, region4\n'), ((19606, 19624), 'if97.region1.cp_s', 'region1.cp_s', (['P', 's'], {}), '(P, s)\n', (19618, 19624), False, 'from if97 import region1, region2, region3, region4\n'), ((19942, 19960), 'if97.region1.cv_s', 'region1.cv_s', (['P', 's'], {}), '(P, s)\n', (19954, 19960), False, 'from if97 import region1, region2, region3, region4\n'), ((20255, 20272), 'if97.region1.w_s', 'region1.w_s', (['P', 's'], {}), '(P, s)\n', (20266, 20272), False, 'from if97 import region1, region2, region3, region4\n'), ((20586, 20603), 'if97.region1.a_s', 'region1.a_s', (['P', 's'], {}), '(P, s)\n', (20597, 20603), False, 'from if97 import region1, region2, region3, region4\n'), ((20909, 20926), 'if97.region1.k_s', 'region1.k_s', (['P', 's'], {}), '(P, s)\n', (20920, 20926), False, 'from if97 import region1, region2, region3, region4\n'), ((21343, 21363), 'if97.region1.dgdP_s', 'region1.dgdP_s', (['P', 's'], {}), '(P, s)\n', (21357, 21363), False, 'from if97 import region1, region2, region3, region4\n'), ((21738, 21758), 'if97.region1.dvdP_s', 'region1.dvdP_s', (['P', 's'], {}), '(P, s)\n', (21752, 21758), False, 'from if97 import region1, region2, region3, region4\n'), ((22141, 22161), 'if97.region1.dudP_s', 'region1.dudP_s', (['P', 's'], {}), '(P, s)\n', (22155, 22161), False, 'from if97 import region1, region2, region3, region4\n'), ((22916, 22936), 'if97.region1.dhdP_s', 'region1.dhdP_s', (['P', 's'], {}), '(P, s)\n', (22930, 22936), False, 'from if97 import region1, region2, region3, region4\n'), ((23302, 23322), 'if97.region1.dTdP_s', 'region1.dTdP_s', (['P', 's'], {}), '(P, s)\n', (23316, 23322), False, 'from if97 import region1, region2, region3, region4\n'), ((23705, 23725), 'if97.region1.dgds_s', 'region1.dgds_s', (['P', 's'], {}), '(P, s)\n', (23719, 23725), False, 'from if97 import region1, region2, region3, region4\n'), ((24101, 24121), 'if97.region1.dvds_s', 'region1.dvds_s', (['P', 's'], {}), '(P, s)\n', (24115, 24121), False, 'from if97 import region1, region2, region3, region4\n'), ((24505, 24525), 'if97.region1.duds_s', 'region1.duds_s', (['P', 's'], {}), '(P, s)\n', (24519, 24525), False, 'from if97 import region1, region2, region3, region4\n'), ((25255, 25275), 'if97.region1.dhds_s', 'region1.dhds_s', (['P', 's'], {}), '(P, s)\n', (25269, 25275), False, 'from if97 import region1, region2, region3, region4\n'), ((25634, 25654), 'if97.region1.dTds_s', 'region1.dTds_s', (['P', 's'], {}), '(P, s)\n', (25648, 25654), False, 'from if97 import region1, region2, region3, region4\n'), ((1309, 1324), 'if97.region2.g', 'region2.g', (['P', 'T'], {}), '(P, T)\n', (1318, 1324), False, 'from if97 import region1, region2, region3, region4\n'), ((1559, 1574), 'if97.region2.v', 'region2.v', (['P', 'T'], {}), '(P, T)\n', (1568, 1574), False, 'from if97 import region1, region2, region3, region4\n'), ((1817, 1832), 'if97.region2.u', 'region2.u', (['P', 'T'], {}), '(P, T)\n', (1826, 1832), False, 'from if97 import region1, region2, region3, region4\n'), ((2069, 2084), 'if97.region2.s', 'region2.s', (['P', 'T'], {}), '(P, T)\n', (2078, 2084), False, 'from if97 import region1, region2, region3, region4\n'), ((2320, 2335), 'if97.region2.h', 'region2.h', (['P', 'T'], {}), '(P, T)\n', (2329, 2335), False, 'from if97 import region1, region2, region3, region4\n'), ((2590, 2606), 'if97.region2.cp', 'region2.cp', (['P', 'T'], {}), '(P, T)\n', (2600, 2606), False, 'from if97 import region1, region2, region3, region4\n'), ((2862, 2878), 'if97.region2.cv', 'region2.cv', (['P', 'T'], {}), '(P, T)\n', (2872, 2878), False, 'from if97 import region1, region2, region3, region4\n'), ((3110, 3125), 'if97.region2.w', 'region2.w', (['P', 'T'], {}), '(P, T)\n', (3119, 3125), False, 'from if97 import region1, region2, region3, region4\n'), ((3378, 3393), 'if97.region2.a', 'region2.a', (['P', 'T'], {}), '(P, T)\n', (3387, 3393), False, 'from if97 import region1, region2, region3, region4\n'), ((3638, 3653), 'if97.region2.k', 'region2.k', (['P', 'T'], {}), '(P, T)\n', (3647, 3653), False, 'from if97 import region1, region2, region3, region4\n'), ((4007, 4025), 'if97.region2.dgdP', 'region2.dgdP', (['P', 'T'], {}), '(P, T)\n', (4019, 4025), False, 'from if97 import region1, region2, region3, region4\n'), ((4331, 4349), 'if97.region2.dvdP', 'region2.dvdP', (['P', 'T'], {}), '(P, T)\n', (4343, 4349), False, 'from if97 import region1, region2, region3, region4\n'), ((4663, 4681), 'if97.region2.dudP', 'region2.dudP', (['P', 'T'], {}), '(P, T)\n', (4675, 4681), False, 'from if97 import region1, region2, region3, region4\n'), ((4989, 5007), 'if97.region2.dsdP', 'region2.dsdP', (['P', 'T'], {}), '(P, T)\n', (5001, 5007), False, 'from if97 import region1, region2, region3, region4\n'), ((5314, 5332), 'if97.region2.dhdP', 'region2.dhdP', (['P', 'T'], {}), '(P, T)\n', (5326, 5332), False, 'from if97 import region1, region2, region3, region4\n'), ((5644, 5662), 'if97.region2.dgdT', 'region2.dgdT', (['P', 'T'], {}), '(P, T)\n', (5656, 5662), False, 'from if97 import region1, region2, region3, region4\n'), ((5963, 5981), 'if97.region2.dvdT', 'region2.dvdT', (['P', 'T'], {}), '(P, T)\n', (5975, 5981), False, 'from if97 import region1, region2, region3, region4\n'), ((6290, 6308), 'if97.region2.dudT', 'region2.dudT', (['P', 'T'], {}), '(P, T)\n', (6302, 6308), False, 'from if97 import region1, region2, region3, region4\n'), ((6611, 6629), 'if97.region2.dsdT', 'region2.dsdT', (['P', 'T'], {}), '(P, T)\n', (6623, 6629), False, 'from if97 import region1, region2, region3, region4\n'), ((6931, 6949), 'if97.region2.dhdT', 'region2.dhdT', (['P', 'T'], {}), '(P, T)\n', (6943, 6949), False, 'from if97 import region1, region2, region3, region4\n'), ((8683, 8700), 'if97.region2.g_h', 'region2.g_h', (['P', 'h'], {}), '(P, h)\n', (8694, 8700), False, 'from if97 import region1, region2, region3, region4\n'), ((8996, 9013), 'if97.region2.v_h', 'region2.v_h', (['P', 'h'], {}), '(P, h)\n', (9007, 9013), False, 'from if97 import region1, region2, region3, region4\n'), ((9317, 9334), 'if97.region2.u_h', 'region2.u_h', (['P', 'h'], {}), '(P, h)\n', (9328, 9334), False, 'from if97 import region1, region2, region3, region4\n'), ((9632, 9649), 'if97.region2.s_h', 'region2.s_h', (['P', 'h'], {}), '(P, h)\n', (9643, 9649), False, 'from if97 import region1, region2, region3, region4\n'), ((9935, 9952), 'if97.region2.T_h', 'region2.T_h', (['P', 'h'], {}), '(P, h)\n', (9946, 9952), False, 'from if97 import region1, region2, region3, region4\n'), ((10266, 10284), 'if97.region2.cp_h', 'region2.cp_h', (['P', 'h'], {}), '(P, h)\n', (10278, 10284), False, 'from if97 import region1, region2, region3, region4\n'), ((10602, 10620), 'if97.region2.cv_h', 'region2.cv_h', (['P', 'h'], {}), '(P, h)\n', (10614, 10620), False, 'from if97 import region1, region2, region3, region4\n'), ((10914, 10931), 'if97.region2.w_h', 'region2.w_h', (['P', 'h'], {}), '(P, h)\n', (10925, 10931), False, 'from if97 import region1, region2, region3, region4\n'), ((11245, 11262), 'if97.region2.a_h', 'region2.a_h', (['P', 'h'], {}), '(P, h)\n', (11256, 11262), False, 'from if97 import region1, region2, region3, region4\n'), ((11568, 11585), 'if97.region2.k_h', 'region2.k_h', (['P', 'h'], {}), '(P, h)\n', (11579, 11585), False, 'from if97 import region1, region2, region3, region4\n'), ((12006, 12026), 'if97.region2.dgdP_h', 'region2.dgdP_h', (['P', 'h'], {}), '(P, h)\n', (12020, 12026), False, 'from if97 import region1, region2, region3, region4\n'), ((12402, 12422), 'if97.region2.dvdP_h', 'region2.dvdP_h', (['P', 'h'], {}), '(P, h)\n', (12416, 12422), False, 'from if97 import region1, region2, region3, region4\n'), ((12806, 12826), 'if97.region2.dudP_h', 'region2.dudP_h', (['P', 'h'], {}), '(P, h)\n', (12820, 12826), False, 'from if97 import region1, region2, region3, region4\n'), ((13204, 13224), 'if97.region2.dsdP_h', 'region2.dsdP_h', (['P', 'h'], {}), '(P, h)\n', (13218, 13224), False, 'from if97 import region1, region2, region3, region4\n'), ((13958, 13978), 'if97.region2.dTdP_h', 'region2.dTdP_h', (['P', 'h'], {}), '(P, h)\n', (13972, 13978), False, 'from if97 import region1, region2, region3, region4\n'), ((14360, 14380), 'if97.region2.dgdh_h', 'region2.dgdh_h', (['P', 'h'], {}), '(P, h)\n', (14374, 14380), False, 'from if97 import region1, region2, region3, region4\n'), ((14755, 14775), 'if97.region2.dvdh_h', 'region2.dvdh_h', (['P', 'h'], {}), '(P, h)\n', (14769, 14775), False, 'from if97 import region1, region2, region3, region4\n'), ((15158, 15178), 'if97.region2.dudh_h', 'region2.dudh_h', (['P', 'h'], {}), '(P, h)\n', (15172, 15178), False, 'from if97 import region1, region2, region3, region4\n'), ((15555, 15575), 'if97.region2.dsdh_h', 'region2.dsdh_h', (['P', 'h'], {}), '(P, h)\n', (15569, 15575), False, 'from if97 import region1, region2, region3, region4\n'), ((16292, 16312), 'if97.region2.dTdh_h', 'region2.dTdh_h', (['P', 'h'], {}), '(P, h)\n', (16306, 16312), False, 'from if97 import region1, region2, region3, region4\n'), ((18081, 18098), 'if97.region2.g_s', 'region2.g_s', (['P', 's'], {}), '(P, s)\n', (18092, 18098), False, 'from if97 import region1, region2, region3, region4\n'), ((18394, 18411), 'if97.region2.v_s', 'region2.v_s', (['P', 's'], {}), '(P, s)\n', (18405, 18411), False, 'from if97 import region1, region2, region3, region4\n'), ((18715, 18732), 'if97.region2.u_s', 'region2.u_s', (['P', 's'], {}), '(P, s)\n', (18726, 18732), False, 'from if97 import region1, region2, region3, region4\n'), ((19018, 19035), 'if97.region2.T_s', 'region2.T_s', (['P', 's'], {}), '(P, s)\n', (19029, 19035), False, 'from if97 import region1, region2, region3, region4\n'), ((19329, 19346), 'if97.region2.h_s', 'region2.h_s', (['P', 's'], {}), '(P, s)\n', (19340, 19346), False, 'from if97 import region1, region2, region3, region4\n'), ((19662, 19680), 'if97.region2.cp_s', 'region2.cp_s', (['P', 's'], {}), '(P, s)\n', (19674, 19680), False, 'from if97 import region1, region2, region3, region4\n'), ((19998, 20016), 'if97.region2.cv_s', 'region2.cv_s', (['P', 's'], {}), '(P, s)\n', (20010, 20016), False, 'from if97 import region1, region2, region3, region4\n'), ((20310, 20327), 'if97.region2.w_s', 'region2.w_s', (['P', 's'], {}), '(P, s)\n', (20321, 20327), False, 'from if97 import region1, region2, region3, region4\n'), ((20641, 20658), 'if97.region2.a_s', 'region2.a_s', (['P', 's'], {}), '(P, s)\n', (20652, 20658), False, 'from if97 import region1, region2, region3, region4\n'), ((20964, 20981), 'if97.region2.k_s', 'region2.k_s', (['P', 's'], {}), '(P, s)\n', (20975, 20981), False, 'from if97 import region1, region2, region3, region4\n'), ((21401, 21421), 'if97.region2.dgdP_s', 'region2.dgdP_s', (['P', 's'], {}), '(P, s)\n', (21415, 21421), False, 'from if97 import region1, region2, region3, region4\n'), ((21796, 21816), 'if97.region2.dvdP_s', 'region2.dvdP_s', (['P', 's'], {}), '(P, s)\n', (21810, 21816), False, 'from if97 import region1, region2, region3, region4\n'), ((22199, 22219), 'if97.region2.dudP_s', 'region2.dudP_s', (['P', 's'], {}), '(P, s)\n', (22213, 22219), False, 'from if97 import region1, region2, region3, region4\n'), ((22974, 22994), 'if97.region2.dhdP_s', 'region2.dhdP_s', (['P', 's'], {}), '(P, s)\n', (22988, 22994), False, 'from if97 import region1, region2, region3, region4\n'), ((23360, 23380), 'if97.region2.dTdP_s', 'region2.dTdP_s', (['P', 's'], {}), '(P, s)\n', (23374, 23380), False, 'from if97 import region1, region2, region3, region4\n'), ((23763, 23783), 'if97.region2.dgds_s', 'region2.dgds_s', (['P', 's'], {}), '(P, s)\n', (23777, 23783), False, 'from if97 import region1, region2, region3, region4\n'), ((24159, 24179), 'if97.region2.dvds_s', 'region2.dvds_s', (['P', 's'], {}), '(P, s)\n', (24173, 24179), False, 'from if97 import region1, region2, region3, region4\n'), ((24563, 24583), 'if97.region2.duds_s', 'region2.duds_s', (['P', 's'], {}), '(P, s)\n', (24577, 24583), False, 'from if97 import region1, region2, region3, region4\n'), ((25313, 25333), 'if97.region2.dhds_s', 'region2.dhds_s', (['P', 's'], {}), '(P, s)\n', (25327, 25333), False, 'from if97 import region1, region2, region3, region4\n'), ((25692, 25712), 'if97.region2.dTds_s', 'region2.dTds_s', (['P', 's'], {}), '(P, s)\n', (25706, 25712), False, 'from if97 import region1, region2, region3, region4\n'), ((8738, 8755), 'if97.region4.g_h', 'region4.g_h', (['P', 'h'], {}), '(P, h)\n', (8749, 8755), False, 'from if97 import region1, region2, region3, region4\n'), ((9051, 9068), 'if97.region4.v_h', 'region4.v_h', (['P', 'h'], {}), '(P, h)\n', (9062, 9068), False, 'from if97 import region1, region2, region3, region4\n'), ((9372, 9389), 'if97.region4.u_h', 'region4.u_h', (['P', 'h'], {}), '(P, h)\n', (9383, 9389), False, 'from if97 import region1, region2, region3, region4\n'), ((9687, 9704), 'if97.region4.s_h', 'region4.s_h', (['P', 'h'], {}), '(P, h)\n', (9698, 9704), False, 'from if97 import region1, region2, region3, region4\n'), ((9990, 10005), 'if97.region4.satT', 'region4.satT', (['P'], {}), '(P)\n', (10002, 10005), False, 'from if97 import region1, region2, region3, region4\n'), ((10322, 10340), 'if97.region4.cp_h', 'region4.cp_h', (['P', 'h'], {}), '(P, h)\n', (10334, 10340), False, 'from if97 import region1, region2, region3, region4\n'), ((10658, 10676), 'if97.region4.cv_h', 'region4.cv_h', (['P', 'h'], {}), '(P, h)\n', (10670, 10676), False, 'from if97 import region1, region2, region3, region4\n'), ((10969, 10986), 'if97.region4.w_h', 'region4.w_h', (['P', 'h'], {}), '(P, h)\n', (10980, 10986), False, 'from if97 import region1, region2, region3, region4\n'), ((11300, 11317), 'if97.region4.a_h', 'region4.a_h', (['P', 'h'], {}), '(P, h)\n', (11311, 11317), False, 'from if97 import region1, region2, region3, region4\n'), ((11623, 11640), 'if97.region4.k_h', 'region4.k_h', (['P', 'h'], {}), '(P, h)\n', (11634, 11640), False, 'from if97 import region1, region2, region3, region4\n'), ((12064, 12084), 'if97.region4.dgdP_h', 'region4.dgdP_h', (['P', 'h'], {}), '(P, h)\n', (12078, 12084), False, 'from if97 import region1, region2, region3, region4\n'), ((12460, 12480), 'if97.region4.dvdP_h', 'region4.dvdP_h', (['P', 'h'], {}), '(P, h)\n', (12474, 12480), False, 'from if97 import region1, region2, region3, region4\n'), ((12864, 12884), 'if97.region4.dudP_h', 'region4.dudP_h', (['P', 'h'], {}), '(P, h)\n', (12878, 12884), False, 'from if97 import region1, region2, region3, region4\n'), ((13262, 13282), 'if97.region4.dsdP_h', 'region4.dsdP_h', (['P', 'h'], {}), '(P, h)\n', (13276, 13282), False, 'from if97 import region1, region2, region3, region4\n'), ((13629, 13649), 'if97.region4.dhdP_h', 'region4.dhdP_h', (['P', 'h'], {}), '(P, h)\n', (13643, 13649), False, 'from if97 import region1, region2, region3, region4\n'), ((14016, 14032), 'if97.region4.dTsdP', 'region4.dTsdP', (['P'], {}), '(P)\n', (14029, 14032), False, 'from if97 import region1, region2, region3, region4\n'), ((14418, 14438), 'if97.region4.dgdh_h', 'region4.dgdh_h', (['P', 'h'], {}), '(P, h)\n', (14432, 14438), False, 'from if97 import region1, region2, region3, region4\n'), ((14813, 14833), 'if97.region4.dvdh_h', 'region4.dvdh_h', (['P', 'h'], {}), '(P, h)\n', (14827, 14833), False, 'from if97 import region1, region2, region3, region4\n'), ((15216, 15236), 'if97.region4.dudh_h', 'region4.dudh_h', (['P', 'h'], {}), '(P, h)\n', (15230, 15236), False, 'from if97 import region1, region2, region3, region4\n'), ((15613, 15633), 'if97.region4.dsdh_h', 'region4.dsdh_h', (['P', 'h'], {}), '(P, h)\n', (15627, 15633), False, 'from if97 import region1, region2, region3, region4\n'), ((18136, 18153), 'if97.region4.g_s', 'region4.g_s', (['P', 's'], {}), '(P, s)\n', (18147, 18153), False, 'from if97 import region1, region2, region3, region4\n'), ((18449, 18466), 'if97.region4.v_s', 'region4.v_s', (['P', 's'], {}), '(P, s)\n', (18460, 18466), False, 'from if97 import region1, region2, region3, region4\n'), ((18770, 18787), 'if97.region4.u_s', 'region4.u_s', (['P', 's'], {}), '(P, s)\n', (18781, 18787), False, 'from if97 import region1, region2, region3, region4\n'), ((19073, 19088), 'if97.region4.satT', 'region4.satT', (['P'], {}), '(P)\n', (19085, 19088), False, 'from if97 import region1, region2, region3, region4\n'), ((19384, 19401), 'if97.region4.h_s', 'region4.h_s', (['P', 's'], {}), '(P, s)\n', (19395, 19401), False, 'from if97 import region1, region2, region3, region4\n'), ((19718, 19736), 'if97.region4.cp_s', 'region4.cp_s', (['P', 's'], {}), '(P, s)\n', (19730, 19736), False, 'from if97 import region1, region2, region3, region4\n'), ((20054, 20072), 'if97.region4.cv_s', 'region4.cv_s', (['P', 's'], {}), '(P, s)\n', (20066, 20072), False, 'from if97 import region1, region2, region3, region4\n'), ((20365, 20382), 'if97.region4.w_s', 'region4.w_s', (['P', 's'], {}), '(P, s)\n', (20376, 20382), False, 'from if97 import region1, region2, region3, region4\n'), ((20696, 20713), 'if97.region4.a_s', 'region4.a_s', (['P', 's'], {}), '(P, s)\n', (20707, 20713), False, 'from if97 import region1, region2, region3, region4\n'), ((21019, 21036), 'if97.region4.k_s', 'region4.k_s', (['P', 's'], {}), '(P, s)\n', (21030, 21036), False, 'from if97 import region1, region2, region3, region4\n'), ((21459, 21479), 'if97.region4.dgdP_s', 'region4.dgdP_s', (['P', 's'], {}), '(P, s)\n', (21473, 21479), False, 'from if97 import region1, region2, region3, region4\n'), ((21854, 21874), 'if97.region4.dvdP_s', 'region4.dvdP_s', (['P', 's'], {}), '(P, s)\n', (21868, 21874), False, 'from if97 import region1, region2, region3, region4\n'), ((22257, 22277), 'if97.region4.dudP_s', 'region4.dudP_s', (['P', 's'], {}), '(P, s)\n', (22271, 22277), False, 'from if97 import region1, region2, region3, region4\n'), ((22636, 22656), 'if97.region4.dsdP_s', 'region4.dsdP_s', (['P', 's'], {}), '(P, s)\n', (22650, 22656), False, 'from if97 import region1, region2, region3, region4\n'), ((23032, 23052), 'if97.region4.dhdP_s', 'region4.dhdP_s', (['P', 's'], {}), '(P, s)\n', (23046, 23052), False, 'from if97 import region1, region2, region3, region4\n'), ((23418, 23434), 'if97.region4.dTsdP', 'region4.dTsdP', (['P'], {}), '(P)\n', (23431, 23434), False, 'from if97 import region1, region2, region3, region4\n'), ((23821, 23841), 'if97.region4.dgds_s', 'region4.dgds_s', (['P', 's'], {}), '(P, s)\n', (23835, 23841), False, 'from if97 import region1, region2, region3, region4\n'), ((24217, 24237), 'if97.region4.dvds_s', 'region4.dvds_s', (['P', 's'], {}), '(P, s)\n', (24231, 24237), False, 'from if97 import region1, region2, region3, region4\n'), ((24621, 24641), 'if97.region4.duds_s', 'region4.duds_s', (['P', 's'], {}), '(P, s)\n', (24635, 24641), False, 'from if97 import region1, region2, region3, region4\n'), ((25371, 25391), 'if97.region4.dhds_s', 'region4.dhds_s', (['P', 's'], {}), '(P, s)\n', (25385, 25391), False, 'from if97 import region1, region2, region3, region4\n')]
|
import hashlib
import os
from django.contrib.auth import models as auth_models
from django.contrib.auth.backends import ModelBackend
# http://fredericiana.com/2010/10/12/adding-support-for-stronger-password-hashes-to-django/
"""
from future import django_sha256_support
Monkey-patch SHA-256 support into Django's auth system. If Django ticket #5600
ever gets fixed, this can be removed.
"""
def get_hexdigest(algorithm, salt, raw_password):
"""Generate SHA-256 hash."""
if algorithm == 'sha256':
return hashlib.sha256((salt + raw_password).encode('utf8')).hexdigest()
else:
return get_hexdigest_old(algorithm, salt, raw_password)
get_hexdigest_old = auth_models.get_hexdigest
auth_models.get_hexdigest = get_hexdigest
def set_password(self, raw_password):
"""Set SHA-256 password."""
algo = 'sha256'
salt = os.urandom(5).encode('hex') # Random, 10-digit (hex) salt.
hsh = get_hexdigest(algo, salt, raw_password)
self.password = '$'.join((algo, salt, hsh))
auth_models.User.set_password = set_password
class Sha256Backend(ModelBackend):
"""
Overriding the Django model backend without changes ensures our
monkeypatching happens by the time we import auth.
"""
pass
|
[
"os.urandom"
] |
[((855, 868), 'os.urandom', 'os.urandom', (['(5)'], {}), '(5)\n', (865, 868), False, 'import os\n')]
|
from django.contrib import admin
from wallet.models import Wallet, Transaction
admin.site.register([
Wallet,
Transaction,
])
|
[
"django.contrib.admin.site.register"
] |
[((82, 124), 'django.contrib.admin.site.register', 'admin.site.register', (['[Wallet, Transaction]'], {}), '([Wallet, Transaction])\n', (101, 124), False, 'from django.contrib import admin\n')]
|
"""
call monitoring API
"""
from typing import Optional, Union
from pydantic import Field
from .common import PersonSettingsApiChild
from ..base import ApiModel, webex_id_to_uuid
from ..common import MonitoredMember, CallParkExtension
__all__ = ['MonitoredElementMember', 'MonitoredElement', 'Monitoring',
'MonitoringApi']
class MonitoredElementMember(MonitoredMember):
#: The location name where the call park extension is.
location_name: Optional[str] = Field(alias='location')
#: The location Id for the location.
location_id: Optional[str]
@property
def ci_location_id(self) -> Optional[str]:
return self.location_id and webex_id_to_uuid(self.location_id)
class MonitoredElement(ApiModel):
#: monitored person or place
member: Optional[MonitoredElementMember]
# TODO: documentation defect: attribute is documented as "cpe"
#: monitored call park extension
cpe: Optional[CallParkExtension] = Field(alias='callparkextension')
class Monitoring(ApiModel):
#: Call park notification is enabled or disabled.
call_park_notification_enabled: Optional[bool]
#: Settings of monitored elements which can be person, place, or call park extension.
#: for updates IDs can be used directly instead of :class:`MonitoredElement` objects
monitored_elements: Optional[list[Union[str, MonitoredElement]]]
@property
def monitored_cpes(self) -> list[CallParkExtension]:
return [me.cpe for me in self.monitored_elements or []
if me.cpe]
@property
def monitored_members(self) -> list[MonitoredElementMember]:
return [me.member for me in self.monitored_elements or []
if me.member]
class MonitoringApi(PersonSettingsApiChild):
"""
API for person's call monitoring settings
"""
feature = 'monitoring'
def read(self, *, person_id: str, org_id: str = None) -> Monitoring:
"""
Retrieve a Person's Monitoring Settings
Retrieves the monitoring settings of the person, which shows specified people, places or, call park
extensions under monitoring. Monitors the line status which indicates if a person or place is on a call and
if a call has been parked on that extension.
This API requires a full, user, or read-only administrator auth token with a scope of spark-admin:people_read.
:param person_id: Unique identifier for the person.
:type person_id: str
:param org_id: Person is in this organization. Only admin users of another organization (such as partners)
may use this parameter as the default is the same organization as the token used to access API.
:type org_id: str
:return: monitoring settings
:rtype: :class:`Monitoring`
"""
ep = self.f_ep(person_id=person_id)
params = org_id and {'orgId': org_id} or None
data = self.get(ep, params=params)
return Monitoring.parse_obj(data)
def configure(self, *, person_id: str, settings: Monitoring, org_id: str = None):
"""
Configure Call Waiting Settings for a Person
Configure a Person's Call Waiting Settings
With this feature, a person can place an active call on hold and answer an incoming call. When enabled,
while you are on an active call, a tone alerts you of an incoming call and you can choose to answer or ignore
the call.
This API requires a full or user administrator auth token with the spark-admin:people_write scope.
:param person_id: Unique identifier for the person.
:type person_id: str
:param settings: settings for update
:type settings: :class:`Monitoring`
:param org_id: Person is in this organization. Only admin users of another organization (such as partners)
may use this parameter as the default is the same organization as the token used to access API.
:type org_id: str
"""
ep = self.f_ep(person_id=person_id)
params = org_id and {'orgId': org_id} or None
data = {}
if settings.call_park_notification_enabled is not None:
data['enableCallParkNotification'] = settings.call_park_notification_enabled
if settings.monitored_elements is not None:
id_list = []
for me in settings.monitored_elements:
if isinstance(me, str):
id_list.append(me)
else:
id_list.append(me.member and me.member.member_id or me.cpe and me.cpe.cpe_id)
data['monitoredElements'] = id_list
self.put(ep, params=params, json=data)
|
[
"pydantic.Field"
] |
[((480, 503), 'pydantic.Field', 'Field', ([], {'alias': '"""location"""'}), "(alias='location')\n", (485, 503), False, 'from pydantic import Field\n'), ((966, 998), 'pydantic.Field', 'Field', ([], {'alias': '"""callparkextension"""'}), "(alias='callparkextension')\n", (971, 998), False, 'from pydantic import Field\n')]
|
from django.urls import path
from scrapyinfo import views
urlpatterns = [
path('refresh_platform_information', views.RefreshPlatformView.as_view()),
path('scrapyds', views.ScrapydList.as_view()),
path('scrapyd/<pk>', views.ScrapydDetial.as_view()),
path('projects', views.ProjectList.as_view()),
path('project/<pk>', views.ProjectDetial.as_view()),
path('spiders', views.SpiderList.as_view()),
path('spider/<pk>', views.SpiderDetial.as_view()),
path('groups', views.GroupList.as_view()),
path('group/<pk>', views.GroupDetial.as_view()),
]
|
[
"scrapyinfo.views.GroupList.as_view",
"scrapyinfo.views.ScrapydDetial.as_view",
"scrapyinfo.views.GroupDetial.as_view",
"scrapyinfo.views.SpiderList.as_view",
"scrapyinfo.views.RefreshPlatformView.as_view",
"scrapyinfo.views.ProjectDetial.as_view",
"scrapyinfo.views.SpiderDetial.as_view",
"scrapyinfo.views.ProjectList.as_view",
"scrapyinfo.views.ScrapydList.as_view"
] |
[((118, 153), 'scrapyinfo.views.RefreshPlatformView.as_view', 'views.RefreshPlatformView.as_view', ([], {}), '()\n', (151, 153), False, 'from scrapyinfo import views\n'), ((177, 204), 'scrapyinfo.views.ScrapydList.as_view', 'views.ScrapydList.as_view', ([], {}), '()\n', (202, 204), False, 'from scrapyinfo import views\n'), ((232, 261), 'scrapyinfo.views.ScrapydDetial.as_view', 'views.ScrapydDetial.as_view', ([], {}), '()\n', (259, 261), False, 'from scrapyinfo import views\n'), ((285, 312), 'scrapyinfo.views.ProjectList.as_view', 'views.ProjectList.as_view', ([], {}), '()\n', (310, 312), False, 'from scrapyinfo import views\n'), ((340, 369), 'scrapyinfo.views.ProjectDetial.as_view', 'views.ProjectDetial.as_view', ([], {}), '()\n', (367, 369), False, 'from scrapyinfo import views\n'), ((392, 418), 'scrapyinfo.views.SpiderList.as_view', 'views.SpiderList.as_view', ([], {}), '()\n', (416, 418), False, 'from scrapyinfo import views\n'), ((445, 473), 'scrapyinfo.views.SpiderDetial.as_view', 'views.SpiderDetial.as_view', ([], {}), '()\n', (471, 473), False, 'from scrapyinfo import views\n'), ((495, 520), 'scrapyinfo.views.GroupList.as_view', 'views.GroupList.as_view', ([], {}), '()\n', (518, 520), False, 'from scrapyinfo import views\n'), ((546, 573), 'scrapyinfo.views.GroupDetial.as_view', 'views.GroupDetial.as_view', ([], {}), '()\n', (571, 573), False, 'from scrapyinfo import views\n')]
|
from .util import Audio
from abc import ABC, abstractmethod
import numpy as np
from scipy import fft, signal
from IPython.display import display
from bokeh.plotting import figure, show
from bokeh.layouts import gridplot
from bokeh.models.mappers import LinearColorMapper
from bokeh.models.ranges import DataRange1d
from bokeh.models.tools import HoverTool
from bokeh.palettes import Viridis256
from bokeh.io import output_notebook
output_notebook()
def get_samples_and_rate(input_signal, samplerate):
if isinstance(input_signal, TimeSignal):
if samplerate is not None:
print('Explicitly defined samplerate gets ignored when input is a TimeSignal', samplerate)
samples = input_signal.samples
samplerate = input_signal.samplerate
elif np.ndim(input_signal) > 0:
if samplerate is None:
raise ValueError('The samplerate needs to be defined explicitly when input is an array or other iterable')
samples = np.asarray(input_signal)
else:
raise TypeError('Only TimeSignals, Numpy arrays or other iterables are supported as input, not {}'.format(type(input_signal)))
return samples, samplerate
def get_samples(input_signal):
if isinstance(input_signal, TimeSignal):
return input_signal.samples
elif np.ndim(input_signal) > 0:
return np.asarray(input_signal)
else:
raise TypeError('Only TimeSignals, Numpy arrays or other iterables are supported as input, not {}'.format(type(input_signal)))
def get_both_samples_and_rate(input_signal1, input_signal2, samplerate=None):
samples1, samplerate1 = get_samples_and_rate(input_signal1, samplerate)
samples2, samplerate2 = get_samples_and_rate(input_signal2, samplerate)
if samplerate1 != samplerate2:
raise ValueError('Both signals need to have the same samplerate')
return samples1, samples2, samplerate1
def get_both_samples(input_signal1, input_signal2):
samples1 = get_samples(input_signal1)
samples2 = get_samples(input_signal2)
if isinstance(input_signal1, TimeSignal) and isinstance(input_signal2, TimeSignal) and input_signal1.samplerate != input_signal2.samplerate:
raise ValueError('Both signals need to have the same samplerate')
return samples1, samples2
def same_type_as(output_samples, input_signal):
if isinstance(input_signal, TimeSignal):
return type(input_signal)(output_samples, input_signal.samplerate)
else:
return output_samples
class Signal(ABC):
@abstractmethod
def plot(self, **fig_args):
pass
def _repr_html_(self):
return show(self.plot())
def display(self, **fig_args):
show(self.plot(**fig_args))
class TimeSignal(Signal):
def __init__(self, samples, samplerate):
self.samples = samples
self.samplerate = samplerate
self.timepoints = np.arange(len(samples)) / samplerate
def plot(self, **fig_args):
fig = figure(width=800, height=400, x_axis_label='time [s]', y_axis_label='amplitude',
tools='pan,wheel_zoom,box_zoom,zoom_in,zoom_out,save,reset', active_drag='pan')
fig.line(self.timepoints, self.samples, line_width=2)
return fig
class AudioSignal(TimeSignal):
def __init__(self, samples, samplerate):
super().__init__(samples, samplerate)
def play(self, normalize=False):
return display(Audio(self.samples, rate=self.samplerate, normalize=normalize))
def plot(self, **fig_args):
default_args = {
'width': 900, 'height': 300,
'x_axis_label': 'time [s]', 'y_axis_label': 'amplitude',
'y_range': (-1, 1),
'tools': 'xpan,xwheel_zoom,box_zoom,xzoom_in,xzoom_out,save,reset',
'active_drag': 'xpan',
'active_inspect': 'auto',
'active_scroll': 'auto',
'toolbar_location': 'above',
}
hover_tool = HoverTool(
tooltips=[('time [s]', '$x{0.000}'), ('amplitude', '$y{0.000}')],
mode='vline',
)
fig = figure(**{**default_args, **fig_args})
fig.line(self.timepoints, self.samples, line_width=2)
fig.add_tools(hover_tool)
return fig
class Spectrum(Signal):
def __init__(self, input, samplerate=None, num_bins=None, power=1, decibels=True):
samples, samplerate = get_samples_and_rate(input, samplerate)
if num_bins is None:
num_bins = len(samples)
self.power = power
self.decibels = decibels
self.spectrum = np.abs(fft.rfft(samples, num_bins))
self.frequencies = np.arange(len(self.spectrum)) * samplerate / num_bins
if decibels:
self.spectrum = power * 10 * np.log10(self.spectrum)
else:
self.spectrum **= power
def plot(self, **fig_args):
default_args = {
'width': 900, 'height': 300,
'x_axis_label': 'frequency [Hz]', 'y_axis_label': 'amplitude',
'tools': 'pan,wheel_zoom,box_zoom,zoom_in,zoom_out,save,reset',
'active_drag': 'pan',
'active_inspect': 'auto',
'active_scroll': 'auto',
'toolbar_location': 'above',
}
hover_tool = HoverTool(
tooltips=[('frequency [Hz]', '$x{0.}'), ['amplitude', '$y{0.000}']],
mode='vline',
)
if self.power == 2:
default_args['y_axis_label'] = 'power'
hover_tool.tooltips[1][0] = 'power'
if self.decibels:
default_args['y_axis_label'] += ' [dB]'
hover_tool.tooltips[1][0] += ' [dB]'
fig = figure(**{**default_args, **fig_args})
fig.line(self.frequencies, self.spectrum, line_width=2)
fig.add_tools(hover_tool)
return fig
class PowerSpectrum(Spectrum):
def __init__(self, input, samplerate=None, num_bins=None, decibels=True):
super().__init__(input, samplerate=samplerate, num_bins=num_bins, power=2, decibels=decibels)
class Spectrogram(Signal):
def __init__(self, input_signal, frame_duration, step_duration, samplerate=None, num_bins=None, window='hann', power=1, decibels=True):
samples, samplerate = get_samples_and_rate(input_signal, samplerate)
self.power = power
self.decibels = decibels
frame_size = round(frame_duration * samplerate)
overlap_size = round((frame_duration-step_duration) * samplerate)
self.frequencies, self.times, self.array = signal.stft(samples, fs=samplerate, window=window, nperseg=frame_size, noverlap=overlap_size)
if decibels:
self.array = power * 10 * np.log10(self.array)
else:
self.array **= power
def plot(self, lowest_value=None, highest_value=None, palette=None, **fig_args):
if not palette:
palette = list(reversed(Viridis256))
if not lowest_value:
lowest_value = np.min(np.abs(self.array))
if not highest_value:
highest_value = np.max(np.abs(self.array))
default_args = {
'width': 900, 'height': 400,
'x_axis_label': 'time [s]', 'y_axis_label': 'frequency [Hz]',
'tools': 'hover,pan,wheel_zoom,box_zoom,zoom_in,zoom_out,save,reset',
'active_drag': 'pan',
'active_inspect': 'auto',
'active_scroll': 'auto',
'toolbar_location': 'above',
'tooltips': [('time [s]', '$x{0.000}'), ('frequency [Hz]', '$y{0.}'), ['amplitude', '@image']],
}
if self.power == 2:
default_args['tooltips'][2][0] = 'power'
if self.decibels:
default_args['tooltips'][2][0] += ' [dB]'
fig = figure(**{**default_args, **fig_args})
if isinstance(fig.x_range, DataRange1d):
fig.x_range.range_padding = 0
if isinstance(fig.y_range, DataRange1d):
fig.y_range.range_padding = 0
mapper = LinearColorMapper(palette=palette, low=lowest_value, high=highest_value)
fig.image([np.abs(self.array)], x=self.times[0], y=self.frequencies[0], dw=self.times[-1], dh=self.frequencies[-1], color_mapper=mapper)
return fig
|
[
"bokeh.io.output_notebook",
"bokeh.plotting.figure",
"numpy.abs",
"numpy.asarray",
"scipy.fft.rfft",
"numpy.ndim",
"bokeh.models.tools.HoverTool",
"bokeh.models.mappers.LinearColorMapper",
"numpy.log10",
"scipy.signal.stft"
] |
[((431, 448), 'bokeh.io.output_notebook', 'output_notebook', ([], {}), '()\n', (446, 448), False, 'from bokeh.io import output_notebook\n'), ((2962, 3132), 'bokeh.plotting.figure', 'figure', ([], {'width': '(800)', 'height': '(400)', 'x_axis_label': '"""time [s]"""', 'y_axis_label': '"""amplitude"""', 'tools': '"""pan,wheel_zoom,box_zoom,zoom_in,zoom_out,save,reset"""', 'active_drag': '"""pan"""'}), "(width=800, height=400, x_axis_label='time [s]', y_axis_label=\n 'amplitude', tools=\n 'pan,wheel_zoom,box_zoom,zoom_in,zoom_out,save,reset', active_drag='pan')\n", (2968, 3132), False, 'from bokeh.plotting import figure, show\n'), ((3933, 4026), 'bokeh.models.tools.HoverTool', 'HoverTool', ([], {'tooltips': "[('time [s]', '$x{0.000}'), ('amplitude', '$y{0.000}')]", 'mode': '"""vline"""'}), "(tooltips=[('time [s]', '$x{0.000}'), ('amplitude', '$y{0.000}')],\n mode='vline')\n", (3942, 4026), False, 'from bokeh.models.tools import HoverTool\n'), ((4072, 4110), 'bokeh.plotting.figure', 'figure', ([], {}), '(**{**default_args, **fig_args})\n', (4078, 4110), False, 'from bokeh.plotting import figure, show\n'), ((5250, 5347), 'bokeh.models.tools.HoverTool', 'HoverTool', ([], {'tooltips': "[('frequency [Hz]', '$x{0.}'), ['amplitude', '$y{0.000}']]", 'mode': '"""vline"""'}), "(tooltips=[('frequency [Hz]', '$x{0.}'), ['amplitude', '$y{0.000}'\n ]], mode='vline')\n", (5259, 5347), False, 'from bokeh.models.tools import HoverTool\n'), ((5646, 5684), 'bokeh.plotting.figure', 'figure', ([], {}), '(**{**default_args, **fig_args})\n', (5652, 5684), False, 'from bokeh.plotting import figure, show\n'), ((6506, 6603), 'scipy.signal.stft', 'signal.stft', (['samples'], {'fs': 'samplerate', 'window': 'window', 'nperseg': 'frame_size', 'noverlap': 'overlap_size'}), '(samples, fs=samplerate, window=window, nperseg=frame_size,\n noverlap=overlap_size)\n', (6517, 6603), False, 'from scipy import fft, signal\n'), ((7732, 7770), 'bokeh.plotting.figure', 'figure', ([], {}), '(**{**default_args, **fig_args})\n', (7738, 7770), False, 'from bokeh.plotting import figure, show\n'), ((7970, 8042), 'bokeh.models.mappers.LinearColorMapper', 'LinearColorMapper', ([], {'palette': 'palette', 'low': 'lowest_value', 'high': 'highest_value'}), '(palette=palette, low=lowest_value, high=highest_value)\n', (7987, 8042), False, 'from bokeh.models.mappers import LinearColorMapper\n'), ((779, 800), 'numpy.ndim', 'np.ndim', (['input_signal'], {}), '(input_signal)\n', (786, 800), True, 'import numpy as np\n'), ((974, 998), 'numpy.asarray', 'np.asarray', (['input_signal'], {}), '(input_signal)\n', (984, 998), True, 'import numpy as np\n'), ((1298, 1319), 'numpy.ndim', 'np.ndim', (['input_signal'], {}), '(input_signal)\n', (1305, 1319), True, 'import numpy as np\n'), ((1340, 1364), 'numpy.asarray', 'np.asarray', (['input_signal'], {}), '(input_signal)\n', (1350, 1364), True, 'import numpy as np\n'), ((4569, 4596), 'scipy.fft.rfft', 'fft.rfft', (['samples', 'num_bins'], {}), '(samples, num_bins)\n', (4577, 4596), False, 'from scipy import fft, signal\n'), ((4742, 4765), 'numpy.log10', 'np.log10', (['self.spectrum'], {}), '(self.spectrum)\n', (4750, 4765), True, 'import numpy as np\n'), ((6660, 6680), 'numpy.log10', 'np.log10', (['self.array'], {}), '(self.array)\n', (6668, 6680), True, 'import numpy as np\n'), ((6950, 6968), 'numpy.abs', 'np.abs', (['self.array'], {}), '(self.array)\n', (6956, 6968), True, 'import numpy as np\n'), ((7035, 7053), 'numpy.abs', 'np.abs', (['self.array'], {}), '(self.array)\n', (7041, 7053), True, 'import numpy as np\n'), ((8062, 8080), 'numpy.abs', 'np.abs', (['self.array'], {}), '(self.array)\n', (8068, 8080), True, 'import numpy as np\n')]
|
from starlette.authentication import (
AuthCredentials,
AuthenticationBackend,
UnauthenticatedUser,
)
from .aad_authentication_client import AadAuthenticationClient
class AadSessionMiddleware(AuthenticationBackend):
async def authenticate(self, request):
"""Authenticate a request.
If authentication is successful, defining a user instance
"""
try:
if not request.session or not request.session.get("aad_id"):
return AuthCredentials(None), UnauthenticatedUser()
aad_client = AadAuthenticationClient(session=request.session)
# Do not validate signature, since we may have here a
# microsoft graph token, that we can't validate
# but it's fine since we are not on the web api side
user = await aad_client.get_user(False)
if user is None:
return AuthCredentials(None), UnauthenticatedUser()
return AuthCredentials(user.scopes), user
except Exception:
return AuthCredentials(None), UnauthenticatedUser()
|
[
"starlette.authentication.AuthCredentials",
"starlette.authentication.UnauthenticatedUser"
] |
[((979, 1007), 'starlette.authentication.AuthCredentials', 'AuthCredentials', (['user.scopes'], {}), '(user.scopes)\n', (994, 1007), False, 'from starlette.authentication import AuthCredentials, AuthenticationBackend, UnauthenticatedUser\n'), ((497, 518), 'starlette.authentication.AuthCredentials', 'AuthCredentials', (['None'], {}), '(None)\n', (512, 518), False, 'from starlette.authentication import AuthCredentials, AuthenticationBackend, UnauthenticatedUser\n'), ((520, 541), 'starlette.authentication.UnauthenticatedUser', 'UnauthenticatedUser', ([], {}), '()\n', (539, 541), False, 'from starlette.authentication import AuthCredentials, AuthenticationBackend, UnauthenticatedUser\n'), ((914, 935), 'starlette.authentication.AuthCredentials', 'AuthCredentials', (['None'], {}), '(None)\n', (929, 935), False, 'from starlette.authentication import AuthCredentials, AuthenticationBackend, UnauthenticatedUser\n'), ((937, 958), 'starlette.authentication.UnauthenticatedUser', 'UnauthenticatedUser', ([], {}), '()\n', (956, 958), False, 'from starlette.authentication import AuthCredentials, AuthenticationBackend, UnauthenticatedUser\n'), ((1060, 1081), 'starlette.authentication.AuthCredentials', 'AuthCredentials', (['None'], {}), '(None)\n', (1075, 1081), False, 'from starlette.authentication import AuthCredentials, AuthenticationBackend, UnauthenticatedUser\n'), ((1083, 1104), 'starlette.authentication.UnauthenticatedUser', 'UnauthenticatedUser', ([], {}), '()\n', (1102, 1104), False, 'from starlette.authentication import AuthCredentials, AuthenticationBackend, UnauthenticatedUser\n')]
|
#
# DeepRacer Guru
#
# Version 3.0 onwards
#
# Copyright (c) 2021 dmh23
#
import tkinter as tk
from src.analyze.track.track_analyzer import TrackAnalyzer
from src.episode.episode import LAP_COMPLETE, OFF_TRACK, CRASHED, REVERSED, LOST_CONTROL
from src.graphics.track_graphics import TrackGraphics
from src.analyze.core.controls import EpisodeRadioButtonControl, OutcomesCheckButtonControl
class AnalyzeExitPoints(TrackAnalyzer):
def __init__(self, guru_parent_redraw, track_graphics: TrackGraphics, control_frame: tk.Frame):
super().__init__(guru_parent_redraw, track_graphics, control_frame)
self._episodes_control = EpisodeRadioButtonControl(guru_parent_redraw, control_frame, True)
self._outcome_control = OutcomesCheckButtonControl(guru_parent_redraw, control_frame)
def build_control_frame(self, control_frame):
self._episodes_control.add_to_control_frame()
self._outcome_control.add_to_control_frame()
def redraw(self):
if self._episodes_control.show_filtered():
episodes = self.filtered_episodes
elif self._episodes_control.show_all():
episodes = self.all_episodes
else:
episodes = None
if episodes:
for e in episodes:
colour = None
if e.outcome == LAP_COMPLETE and self._outcome_control.show_lap_complete():
colour = "green"
if e.outcome == OFF_TRACK and self._outcome_control.show_off_track():
colour = "orange"
if e.outcome == CRASHED and self._outcome_control.show_crashed():
colour = "red"
if e.outcome == REVERSED and self._outcome_control.show_reversed():
colour = "cyan"
if e.outcome == LOST_CONTROL and self._outcome_control.show_lost_control():
colour = "yellow"
if colour:
exit_point = (e.events[-1].x, e.events[-1].y)
self.track_graphics.plot_dot(exit_point, 3, colour)
elif self._episodes_control.show_evaluations() and self.all_episodes:
start_wp = self.all_episodes[0].events[0].closest_waypoint_index
start_percent = self.current_track.get_waypoint_percent_from_race_start(start_wp)
whole_lap = False
for v in self.evaluation_phases:
for p in v.progresses:
if p == 100:
whole_lap = True
else:
p += start_percent
if p > 100:
p -= 100
exit_point = self.current_track.get_percent_progress_point_on_centre_line(p)
self.track_graphics.plot_dot(exit_point, 3, "orange")
if whole_lap:
exit_point = self.current_track.get_percent_progress_point_on_centre_line(start_percent)
self.track_graphics.plot_dot(exit_point, 5, "green")
|
[
"src.analyze.core.controls.OutcomesCheckButtonControl",
"src.analyze.core.controls.EpisodeRadioButtonControl"
] |
[((646, 712), 'src.analyze.core.controls.EpisodeRadioButtonControl', 'EpisodeRadioButtonControl', (['guru_parent_redraw', 'control_frame', '(True)'], {}), '(guru_parent_redraw, control_frame, True)\n', (671, 712), False, 'from src.analyze.core.controls import EpisodeRadioButtonControl, OutcomesCheckButtonControl\n'), ((745, 806), 'src.analyze.core.controls.OutcomesCheckButtonControl', 'OutcomesCheckButtonControl', (['guru_parent_redraw', 'control_frame'], {}), '(guru_parent_redraw, control_frame)\n', (771, 806), False, 'from src.analyze.core.controls import EpisodeRadioButtonControl, OutcomesCheckButtonControl\n')]
|
import pandas as pd
import numpy as np
from urllib.parse import urlparse
import io
import gc
import re
import string
from utils import *
import tensorflow as tf
def load_vectors(fname,count_words):
fin = io.open(fname, 'r', encoding='utf-8', newline='\n', errors='ignore')
n, d = map(int, fin.readline().split())
data = {}
data_list=[]
for line in fin:
tokens = line.rstrip().split(' ')
tk = tokens[0]
if tk in count_words:
vec=list(map(float, tokens[1:]))
data[tk] = vec
data_list.append(vec)
return data,data_list
def glove_load_vectors(fname,count_words):
data={}
fastvec = open(fname)
counter=1
data_list=[]
while counter>0:
try:
f=fastvec.__next__()
tokens = f.rstrip().split(' ')
tk=tokens[0]
if tk in count_words:
vec = list(map(float, tokens[1:]))
data[tk] = vec
data_list.append(vec)
counter+=1
except:
print("total tokens",counter)
counter=0
pass
return data,data_list
def create_embeddings(train_data,embedding_path,wordvec_name,stop_set,word_dim):
entity1 = train_data["entities"].apply(lambda x: combine_entity(x))
mention_dt = train_data["hashtags"].apply(lambda x: hashtag(x))
url_dt1 = train_data["urls"].apply(lambda x: process_urlPath(x,0,stop_set))
url_dt2 = train_data["urls"].apply(lambda x: process_urlPath(x,1,stop_set))
mention_splt = train_data["mentions"].apply(lambda x: hashtag(x))
dt_concat =pd.concat([entity1,mention_dt,url_dt1,url_dt2,mention_splt],axis=0)
print("create entity tokenizer")
tokenizer = tf.keras.preprocessing.text.Tokenizer(
filters='',
lower=True,
split=" ",
char_level=False,
oov_token=None)
#tokenizer.fit_on_texts(pd.concat([entity1,mention_dt,url_dt,mention_splt],axis=0))
tokenizer.fit_on_texts(dt_concat)
count_thres = 15
count_words = {w:c for w,c in tokenizer.word_counts.items() if c >= count_thres}
word_counts= len(count_words)+1#one for oov and one for less count words
tokenizer = tf.keras.preprocessing.text.Tokenizer(
num_words=word_counts,
filters='',
lower=True,
split=" ",
char_level=False,
oov_token=None)
#tokenizer.fit_on_texts(pd.concat([entity1,mention_dt,url_dt,mention_splt],axis=0))
tokenizer.fit_on_texts(dt_concat)
print("load embedding vectors")
if wordvec_name.split(".")[0]=="glove":
fastvec,fastvec_list = glove_load_vectors(embedding_path,count_words)
else:
fastvec,fastvec_list = load_vectors(embedding_path,count_words)
cand=np.array(fastvec_list,dtype='float32')
mu=np.mean(cand, axis=0)
Sigma=np.cov(cand.T)
norm=np.random.multivariate_normal(mu, Sigma, 1)
norm = list(np.reshape(norm, word_dim))
word_counts = len(count_words)+1
word_vectors = np.zeros((word_counts,word_dim))
id_w = tokenizer.index_word
for k in range(1,word_vectors.shape[0]):
ky = id_w[k]
if ky in fastvec:
word_vectors[k,:]=fastvec[ky]
else:
word_vectors[k,:]= norm
return tokenizer,word_counts,word_vectors
|
[
"tensorflow.keras.preprocessing.text.Tokenizer",
"numpy.zeros",
"numpy.mean",
"numpy.array",
"numpy.random.multivariate_normal",
"numpy.reshape",
"io.open",
"numpy.cov",
"pandas.concat"
] |
[((213, 281), 'io.open', 'io.open', (['fname', '"""r"""'], {'encoding': '"""utf-8"""', 'newline': '"""\n"""', 'errors': '"""ignore"""'}), "(fname, 'r', encoding='utf-8', newline='\\n', errors='ignore')\n", (220, 281), False, 'import io\n'), ((1759, 1831), 'pandas.concat', 'pd.concat', (['[entity1, mention_dt, url_dt1, url_dt2, mention_splt]'], {'axis': '(0)'}), '([entity1, mention_dt, url_dt1, url_dt2, mention_splt], axis=0)\n', (1768, 1831), True, 'import pandas as pd\n'), ((1886, 1996), 'tensorflow.keras.preprocessing.text.Tokenizer', 'tf.keras.preprocessing.text.Tokenizer', ([], {'filters': '""""""', 'lower': '(True)', 'split': '""" """', 'char_level': '(False)', 'oov_token': 'None'}), "(filters='', lower=True, split=' ',\n char_level=False, oov_token=None)\n", (1923, 1996), True, 'import tensorflow as tf\n'), ((2367, 2500), 'tensorflow.keras.preprocessing.text.Tokenizer', 'tf.keras.preprocessing.text.Tokenizer', ([], {'num_words': 'word_counts', 'filters': '""""""', 'lower': '(True)', 'split': '""" """', 'char_level': '(False)', 'oov_token': 'None'}), "(num_words=word_counts, filters='',\n lower=True, split=' ', char_level=False, oov_token=None)\n", (2404, 2500), True, 'import tensorflow as tf\n'), ((2928, 2967), 'numpy.array', 'np.array', (['fastvec_list'], {'dtype': '"""float32"""'}), "(fastvec_list, dtype='float32')\n", (2936, 2967), True, 'import numpy as np\n'), ((2974, 2995), 'numpy.mean', 'np.mean', (['cand'], {'axis': '(0)'}), '(cand, axis=0)\n', (2981, 2995), True, 'import numpy as np\n'), ((3006, 3020), 'numpy.cov', 'np.cov', (['cand.T'], {}), '(cand.T)\n', (3012, 3020), True, 'import numpy as np\n'), ((3030, 3073), 'numpy.random.multivariate_normal', 'np.random.multivariate_normal', (['mu', 'Sigma', '(1)'], {}), '(mu, Sigma, 1)\n', (3059, 3073), True, 'import numpy as np\n'), ((3175, 3208), 'numpy.zeros', 'np.zeros', (['(word_counts, word_dim)'], {}), '((word_counts, word_dim))\n', (3183, 3208), True, 'import numpy as np\n'), ((3090, 3116), 'numpy.reshape', 'np.reshape', (['norm', 'word_dim'], {}), '(norm, word_dim)\n', (3100, 3116), True, 'import numpy as np\n')]
|
""" Generate the wavelength templates for Keck/DEIMOS"""
import os
from pypeit.core.wavecal import templates
# Keck/DEIMOS
def keck_deimos_600ZD():
binspec = 1
slits = [0, 1]
lcut = [7192.]
xidl_file = os.path.join(templates.template_path, 'Keck_DEIMOS', '600ZD', 'deimos_600.sav')
outroot = 'keck_deimos_600.fits'
templates.build_template(xidl_file, slits, lcut, binspec, outroot, lowredux=True)
def keck_deimos_830G(overwrite=False):
binspec = 1
outroot = 'keck_deimos_830G.fits'
# 3-12 = blue 6508 -- 8410
# 7-24 = blue 8497 -- 9925 (no lines after XeI)
ifiles = [0, 0, 1]
slits = [12, 14, 24]
lcut = [8400., 8480]
wfile1 = os.path.join(templates.template_path, 'Keck_DEIMOS', '830G_M_8600', 'MasterWaveCalib_A_1_03.json')
wfile2 = os.path.join(templates.template_path, 'Keck_DEIMOS', '830G_M_8600', 'MasterWaveCalib_A_1_07.json')
# det_dict
det_cut = {}
det_cut['dets'] = [[1, 2, 3, 4], [5, 6, 7, 8]]
det_cut['wcuts'] = [[0, 9000.], [8200, 1e9]] # Significant overlap is fine
#
templates.build_template([wfile1, wfile2], slits, lcut, binspec, outroot, lowredux=False,
ifiles=ifiles, det_cut=det_cut, chk=True, overwrite=overwrite)
def keck_deimos_1200G(overwrite=False):
binspec = 1
outroot = 'keck_deimos_1200G.fits'
# 3-3 = blue 6268.23 -- 7540
# 3-14 = red 6508 -- 7730
# 7-3 = blue 7589 -- 8821
# 7-17 = red 8000 - 9230
# 7c-0 = red 9120 -- 9950
ifiles = [3, 5, 4, 0, 0, 1, 1, 2]
slits = [1261, 1652, 132, 3, 14, 3, 17, 0]
lcut = [5200., 5580., 6800., 7450., 7730., 8170, 9120]
wfile1 = os.path.join(templates.template_path, 'Keck_DEIMOS', '1200G', 'MasterWaveCalib_A_1_03.json')
wfile2 = os.path.join(templates.template_path, 'Keck_DEIMOS', '1200G', 'MasterWaveCalib_A_1_07.json')
wfile3 = os.path.join(templates.template_path, 'Keck_DEIMOS', '1200G', 'MasterWaveCalib_A_1_07c.json')
wfile4 = os.path.join(templates.template_path, 'Keck_DEIMOS', '1200G', '1200G_bluetilt',
'MasterWaveCalib_B_1_02_useS1261.fits')
wfile5 = os.path.join(templates.template_path, 'Keck_DEIMOS', '1200G', '1200G_bluetilt',
'MasterWaveCalib_B_1_06_useS0132.fits')
wfile6 = os.path.join(templates.template_path, 'Keck_DEIMOS', '1200G', '1200G_bluetilt',
'MasterWaveCalib_B_1_02_useS1652.fits')
#wfile7 = os.path.join(templates.template_path, 'Keck_DEIMOS', '1200G', '1200G_bluetilt',
# 'MasterWaveCalib_B_1_06_useS1649.fits')
files = [wfile1, wfile2, wfile3, wfile4, wfile5, wfile6] #, wfile7]
# det_dict
det_cut = None
# det_cut = {}
# det_cut['dets'] = [[1,2,3,4], [5,6,7,8]]
# det_cut['wcuts'] = [[0,9000.], [8200,1e9]] # Significant overlap is fine
#
templates.build_template(files, slits, lcut, binspec, outroot, lowredux=False,
ifiles=ifiles, det_cut=det_cut, chk=True, subtract_conti=True,
overwrite=overwrite, shift_wave=True)
def keck_deimos_1200B(overwrite=False):
binspec = 1
outroot = 'keck_deimos_1200B.fits'
# PypeIt fits
wpath = os.path.join(templates.template_path, 'Keck_DEIMOS', '1200B')
basefiles = ['MasterWaveCalib_A_1_02_useS0106.fits', 'MasterWaveCalib_A_1_02_useS0291.fits',
'MasterWaveCalib_A_1_06_useS0106.fits', 'MasterWaveCalib_A_1_06_useS0287.fits']
wfiles = [os.path.join(wpath, basefile) for basefile in basefiles]
# Snippets
ifiles = [1, 0, 1, 0, 3, 2]
slits = [291, 106, 291, 106, 287, 106]
wv_cuts = [4493., 4870., 5100., 5260., 5810.]
assert len(wv_cuts) == len(slits)-1
# det_dict
det_cut = None
#
templates.build_template(wfiles, slits, wv_cuts, binspec, outroot,
ifiles=ifiles, det_cut=det_cut, chk=True, normalize=False, lowredux=False,
subtract_conti=True, overwrite=overwrite, shift_wave=True)
def keck_deimos_900ZD(overwrite=False):
binspec = 1
outroot = 'keck_deimos_900ZD.fits'
# PypeIt fits
wpath = os.path.join(templates.template_path, 'Keck_DEIMOS', '900ZD')
basefiles = ['MasterWaveCalib_A_1_01_useS1046.fits', 'MasterWaveCalib_A_1_03_useS0600.fits',
'MasterWaveCalib_A_1_06_useS0054.fits', 'MasterWaveCalib_A_1_02_useS0066.fits',
'MasterWaveCalib_A_1_06_useS0193.fits']
wfiles = [os.path.join(wpath, basefile) for basefile in basefiles]
# Snippets
ifiles = [0, 1, 2, 3, 4, 5]
slits = [1046, 600, 54, 66, 193]
wv_cuts = [5250., 5878., 7100., 8245.]
assert len(wv_cuts) == len(slits)-1
# det_dict
det_cut = None
#
templates.build_template(wfiles, slits, wv_cuts, binspec, outroot,
ifiles=ifiles, det_cut=det_cut, chk=True,
normalize=False, lowredux=False,
subtract_conti=True, overwrite=overwrite,
shift_wave=True)
if __name__ == '__main__':
#keck_deimos_600ZD()
#keck_deimos_830G(overwrite=False) # False for Testing; True for real
#keck_deimos_1200G(overwrite=False)
#keck_deimos_1200B()
keck_deimos_900ZD(overwrite=False)
pass
|
[
"pypeit.core.wavecal.templates.build_template",
"os.path.join"
] |
[((222, 301), 'os.path.join', 'os.path.join', (['templates.template_path', '"""Keck_DEIMOS"""', '"""600ZD"""', '"""deimos_600.sav"""'], {}), "(templates.template_path, 'Keck_DEIMOS', '600ZD', 'deimos_600.sav')\n", (234, 301), False, 'import os\n'), ((343, 429), 'pypeit.core.wavecal.templates.build_template', 'templates.build_template', (['xidl_file', 'slits', 'lcut', 'binspec', 'outroot'], {'lowredux': '(True)'}), '(xidl_file, slits, lcut, binspec, outroot, lowredux\n =True)\n', (367, 429), False, 'from pypeit.core.wavecal import templates\n'), ((690, 792), 'os.path.join', 'os.path.join', (['templates.template_path', '"""Keck_DEIMOS"""', '"""830G_M_8600"""', '"""MasterWaveCalib_A_1_03.json"""'], {}), "(templates.template_path, 'Keck_DEIMOS', '830G_M_8600',\n 'MasterWaveCalib_A_1_03.json')\n", (702, 792), False, 'import os\n'), ((802, 904), 'os.path.join', 'os.path.join', (['templates.template_path', '"""Keck_DEIMOS"""', '"""830G_M_8600"""', '"""MasterWaveCalib_A_1_07.json"""'], {}), "(templates.template_path, 'Keck_DEIMOS', '830G_M_8600',\n 'MasterWaveCalib_A_1_07.json')\n", (814, 904), False, 'import os\n'), ((1074, 1235), 'pypeit.core.wavecal.templates.build_template', 'templates.build_template', (['[wfile1, wfile2]', 'slits', 'lcut', 'binspec', 'outroot'], {'lowredux': '(False)', 'ifiles': 'ifiles', 'det_cut': 'det_cut', 'chk': '(True)', 'overwrite': 'overwrite'}), '([wfile1, wfile2], slits, lcut, binspec, outroot,\n lowredux=False, ifiles=ifiles, det_cut=det_cut, chk=True, overwrite=\n overwrite)\n', (1098, 1235), False, 'from pypeit.core.wavecal import templates\n'), ((1658, 1754), 'os.path.join', 'os.path.join', (['templates.template_path', '"""Keck_DEIMOS"""', '"""1200G"""', '"""MasterWaveCalib_A_1_03.json"""'], {}), "(templates.template_path, 'Keck_DEIMOS', '1200G',\n 'MasterWaveCalib_A_1_03.json')\n", (1670, 1754), False, 'import os\n'), ((1764, 1860), 'os.path.join', 'os.path.join', (['templates.template_path', '"""Keck_DEIMOS"""', '"""1200G"""', '"""MasterWaveCalib_A_1_07.json"""'], {}), "(templates.template_path, 'Keck_DEIMOS', '1200G',\n 'MasterWaveCalib_A_1_07.json')\n", (1776, 1860), False, 'import os\n'), ((1870, 1967), 'os.path.join', 'os.path.join', (['templates.template_path', '"""Keck_DEIMOS"""', '"""1200G"""', '"""MasterWaveCalib_A_1_07c.json"""'], {}), "(templates.template_path, 'Keck_DEIMOS', '1200G',\n 'MasterWaveCalib_A_1_07c.json')\n", (1882, 1967), False, 'import os\n'), ((1977, 2100), 'os.path.join', 'os.path.join', (['templates.template_path', '"""Keck_DEIMOS"""', '"""1200G"""', '"""1200G_bluetilt"""', '"""MasterWaveCalib_B_1_02_useS1261.fits"""'], {}), "(templates.template_path, 'Keck_DEIMOS', '1200G',\n '1200G_bluetilt', 'MasterWaveCalib_B_1_02_useS1261.fits')\n", (1989, 2100), False, 'import os\n'), ((2136, 2259), 'os.path.join', 'os.path.join', (['templates.template_path', '"""Keck_DEIMOS"""', '"""1200G"""', '"""1200G_bluetilt"""', '"""MasterWaveCalib_B_1_06_useS0132.fits"""'], {}), "(templates.template_path, 'Keck_DEIMOS', '1200G',\n '1200G_bluetilt', 'MasterWaveCalib_B_1_06_useS0132.fits')\n", (2148, 2259), False, 'import os\n'), ((2295, 2418), 'os.path.join', 'os.path.join', (['templates.template_path', '"""Keck_DEIMOS"""', '"""1200G"""', '"""1200G_bluetilt"""', '"""MasterWaveCalib_B_1_02_useS1652.fits"""'], {}), "(templates.template_path, 'Keck_DEIMOS', '1200G',\n '1200G_bluetilt', 'MasterWaveCalib_B_1_02_useS1652.fits')\n", (2307, 2418), False, 'import os\n'), ((2865, 3053), 'pypeit.core.wavecal.templates.build_template', 'templates.build_template', (['files', 'slits', 'lcut', 'binspec', 'outroot'], {'lowredux': '(False)', 'ifiles': 'ifiles', 'det_cut': 'det_cut', 'chk': '(True)', 'subtract_conti': '(True)', 'overwrite': 'overwrite', 'shift_wave': '(True)'}), '(files, slits, lcut, binspec, outroot, lowredux=\n False, ifiles=ifiles, det_cut=det_cut, chk=True, subtract_conti=True,\n overwrite=overwrite, shift_wave=True)\n', (2889, 3053), False, 'from pypeit.core.wavecal import templates\n'), ((3220, 3281), 'os.path.join', 'os.path.join', (['templates.template_path', '"""Keck_DEIMOS"""', '"""1200B"""'], {}), "(templates.template_path, 'Keck_DEIMOS', '1200B')\n", (3232, 3281), False, 'import os\n'), ((3771, 3980), 'pypeit.core.wavecal.templates.build_template', 'templates.build_template', (['wfiles', 'slits', 'wv_cuts', 'binspec', 'outroot'], {'ifiles': 'ifiles', 'det_cut': 'det_cut', 'chk': '(True)', 'normalize': '(False)', 'lowredux': '(False)', 'subtract_conti': '(True)', 'overwrite': 'overwrite', 'shift_wave': '(True)'}), '(wfiles, slits, wv_cuts, binspec, outroot, ifiles=\n ifiles, det_cut=det_cut, chk=True, normalize=False, lowredux=False,\n subtract_conti=True, overwrite=overwrite, shift_wave=True)\n', (3795, 3980), False, 'from pypeit.core.wavecal import templates\n'), ((4136, 4197), 'os.path.join', 'os.path.join', (['templates.template_path', '"""Keck_DEIMOS"""', '"""900ZD"""'], {}), "(templates.template_path, 'Keck_DEIMOS', '900ZD')\n", (4148, 4197), False, 'import os\n'), ((4732, 4941), 'pypeit.core.wavecal.templates.build_template', 'templates.build_template', (['wfiles', 'slits', 'wv_cuts', 'binspec', 'outroot'], {'ifiles': 'ifiles', 'det_cut': 'det_cut', 'chk': '(True)', 'normalize': '(False)', 'lowredux': '(False)', 'subtract_conti': '(True)', 'overwrite': 'overwrite', 'shift_wave': '(True)'}), '(wfiles, slits, wv_cuts, binspec, outroot, ifiles=\n ifiles, det_cut=det_cut, chk=True, normalize=False, lowredux=False,\n subtract_conti=True, overwrite=overwrite, shift_wave=True)\n', (4756, 4941), False, 'from pypeit.core.wavecal import templates\n'), ((3490, 3519), 'os.path.join', 'os.path.join', (['wpath', 'basefile'], {}), '(wpath, basefile)\n', (3502, 3519), False, 'import os\n'), ((4464, 4493), 'os.path.join', 'os.path.join', (['wpath', 'basefile'], {}), '(wpath, basefile)\n', (4476, 4493), False, 'import os\n')]
|
from setuptools import find_packages
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='lcmap-merlin',
version='2.3.1',
description='Python client library for LCMAP rasters',
long_description=readme(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: Public Domain',
'Programming Language :: Python :: 3.6',
],
keywords='usgs lcmap eros',
url='http://github.com/usgs-eros/lcmap-merlin',
author='USGS EROS LCMAP',
author_email='',
license='Unlicense',
packages=find_packages(),
install_requires=[
'cytoolz',
'numpy',
'requests',
'python-dateutil',
],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[test]
extras_require={
'test': ['pytest',
'pytest-cov',
'hypothesis',
'vcrpy',
],
'doc': ['sphinx',
'sphinx-autobuild',
'sphinx_rtd_theme'],
'dev': ['jupyter', 'readline'],
},
# entry_points={
#'console_scripts': [''],
# },
include_package_data=True,
zip_safe=False)
|
[
"setuptools.find_packages"
] |
[((640, 655), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (653, 655), False, 'from setuptools import find_packages\n')]
|
from datetime import datetime
from unittest import TestCase
from flask import Flask
from flask_sqlalchemy import SQLAlchemy, Model
from sqlalchemy import Integer, String, Column, func
from flask_resource_chassis import ChassisService, ValidationError
class Test(Model):
id = Column(Integer, primary_key=True)
name = Column(String(5), nullable=False)
class TestChassisService(TestCase):
def setUp(self):
self.app = Flask(__name__)
self.db = SQLAlchemy(self.app)
class Gender(self.db.Model):
id = self.db.Column(self.db.Integer, primary_key=True)
gender = self.db.Column(self.db.String(254), nullable=False)
is_deleted = self.db.Column(self.db.Boolean, nullable=False, default=False)
is_active = self.db.Column(self.db.Boolean, nullable=False, default=True)
created_at = self.db.Column(self.db.DateTime, nullable=False, server_default=func.now(),
default=datetime.utcnow)
self.Gender = Gender
self.service = ChassisService(self.app, self.db, self.Gender)
self.db.create_all()
def test_create(self):
"""
Tests entity successful creation
"""
gender = self.Gender()
gender.gender = "Male"
gender = self.service.create(gender)
self.assertIsNotNone(gender.id)
def test_update(self):
"""
Test ChassisService update() method. Test cases include:
1. Successful entity update
2. id validation
"""
gender = self.Gender()
gender.gender = "Female"
gender = self.service.create(gender)
try:
self.service.update(self.Gender(), -1)
self.fail("Chassis service id validation failed")
except ValidationError:
pass
gender2 = self.Gender()
gender2.gender = "Trans-Gender"
gender2.is_active = False
self.service.update(gender2, gender.id)
gender3 = self.Gender.query.filter_by(id=gender.id).first()
self.assertEqual(gender3.gender, gender2.gender)
self.assertEqual(gender3.is_active, gender2.is_active)
self.assertEqual(gender3.created_at, gender.created_at)
def test_delete(self):
"""
Test ChassisService delete() method
"""
gender = self.Gender()
gender.gender = "Female"
gender = self.service.create(gender)
self.service.delete(gender.id)
gender = self.Gender.query.filter_by(id=gender.id).first()
self.assertTrue(gender.is_deleted)
|
[
"flask_resource_chassis.ChassisService",
"sqlalchemy.func.now",
"flask.Flask",
"flask_sqlalchemy.SQLAlchemy",
"sqlalchemy.Column",
"sqlalchemy.String"
] |
[((283, 316), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (289, 316), False, 'from sqlalchemy import Integer, String, Column, func\n'), ((335, 344), 'sqlalchemy.String', 'String', (['(5)'], {}), '(5)\n', (341, 344), False, 'from sqlalchemy import Integer, String, Column, func\n'), ((440, 455), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (445, 455), False, 'from flask import Flask\n'), ((474, 494), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['self.app'], {}), '(self.app)\n', (484, 494), False, 'from flask_sqlalchemy import SQLAlchemy, Model\n'), ((1065, 1111), 'flask_resource_chassis.ChassisService', 'ChassisService', (['self.app', 'self.db', 'self.Gender'], {}), '(self.app, self.db, self.Gender)\n', (1079, 1111), False, 'from flask_resource_chassis import ChassisService, ValidationError\n'), ((936, 946), 'sqlalchemy.func.now', 'func.now', ([], {}), '()\n', (944, 946), False, 'from sqlalchemy import Integer, String, Column, func\n')]
|
from typing import Dict, List, Iterator
import json
import logging
from overrides import overrides
import tqdm
import os
import sys
import codecs
import numpy as np
from allennlp.common import Params
from allennlp.common.file_utils import cached_path
from allennlp.data.dataset_readers.dataset_reader import DatasetReader
from allennlp.data.fields import LabelField, ArrayField, TextField
from allennlp.data.instance import Instance
from allennlp.data.tokenizers import Token, Tokenizer, WordTokenizer
from allennlp.data.token_indexers import TokenIndexer, SingleIdTokenIndexer
logger = logging.getLogger(__name__)
@DatasetReader.register("word_vectors")
class WordVectorDatasetReader(DatasetReader):
"""
Reads a text file for classification task
Expected format for each input line: word and tag (optional), separated by a tab.
The output of ``read`` is a list of ``Instance``s with the following fields:
token: ``TokenField``
label: ``LabelField``
Parameters
----------
lazy : ``bool`` (optional, default=False)
Passed to ``DatasetReader``. If this is ``True``, training will start sooner, but will
take longer per batch. This also allows training with datasets that are too large to fit
in memory.
"""
def __init__(self,
lazy: bool = False,
tokenizer: Tokenizer = None,
token_indexers: Dict[str, TokenIndexer] = None) -> None:
super().__init__(lazy)
self._tokenizer = tokenizer or WordTokenizer()
self._token_indexers = token_indexers or {"tokens": SingleIdTokenIndexer(lowercase_tokens=True)}
@overrides
def text_to_instance(self, token: List[Token], label: str = None) -> Instance:
token_field = TextField(token, self._token_indexers)
fields = {'token': token_field}
if label is not None:
fields['label'] = LabelField(label)
return Instance(fields)
@overrides
def _read(self, file_path: str) -> Iterator[Instance]:
"""
Reads input file.
Args:
file_path (str): path for file
"""
with codecs.open(file_path, encoding='utf-8') as f:
for line in f:
items = line.strip().split('\t')
token = items[0]
# label is optional
if len(items) > 1:
label = items[1]
else:
label = None
yield self.text_to_instance([Token(token)], label)
|
[
"allennlp.data.tokenizers.Token",
"codecs.open",
"allennlp.data.instance.Instance",
"allennlp.data.fields.LabelField",
"allennlp.data.dataset_readers.dataset_reader.DatasetReader.register",
"allennlp.data.tokenizers.WordTokenizer",
"allennlp.data.token_indexers.SingleIdTokenIndexer",
"logging.getLogger",
"allennlp.data.fields.TextField"
] |
[((592, 619), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (609, 619), False, 'import logging\n'), ((623, 661), 'allennlp.data.dataset_readers.dataset_reader.DatasetReader.register', 'DatasetReader.register', (['"""word_vectors"""'], {}), "('word_vectors')\n", (645, 661), False, 'from allennlp.data.dataset_readers.dataset_reader import DatasetReader\n'), ((1665, 1703), 'allennlp.data.fields.TextField', 'TextField', (['token', 'self._token_indexers'], {}), '(token, self._token_indexers)\n', (1674, 1703), False, 'from allennlp.data.fields import LabelField, ArrayField, TextField\n'), ((1815, 1831), 'allennlp.data.instance.Instance', 'Instance', (['fields'], {}), '(fields)\n', (1823, 1831), False, 'from allennlp.data.instance import Instance\n'), ((1437, 1452), 'allennlp.data.tokenizers.WordTokenizer', 'WordTokenizer', ([], {}), '()\n', (1450, 1452), False, 'from allennlp.data.tokenizers import Token, Tokenizer, WordTokenizer\n'), ((1785, 1802), 'allennlp.data.fields.LabelField', 'LabelField', (['label'], {}), '(label)\n', (1795, 1802), False, 'from allennlp.data.fields import LabelField, ArrayField, TextField\n'), ((1987, 2027), 'codecs.open', 'codecs.open', (['file_path'], {'encoding': '"""utf-8"""'}), "(file_path, encoding='utf-8')\n", (1998, 2027), False, 'import codecs\n'), ((1507, 1550), 'allennlp.data.token_indexers.SingleIdTokenIndexer', 'SingleIdTokenIndexer', ([], {'lowercase_tokens': '(True)'}), '(lowercase_tokens=True)\n', (1527, 1550), False, 'from allennlp.data.token_indexers import TokenIndexer, SingleIdTokenIndexer\n'), ((2241, 2253), 'allennlp.data.tokenizers.Token', 'Token', (['token'], {}), '(token)\n', (2246, 2253), False, 'from allennlp.data.tokenizers import Token, Tokenizer, WordTokenizer\n')]
|
import docker
import logging
from nodemgr.common.docker_mem_cpu import DockerMemCpuUsageData
class DockerContainersInterface:
def __init__(self):
self._client = docker.from_env()
if hasattr(self._client, 'api'):
self._client = self._client.api
def list(self, all_=True):
return self._client.containers(all=all_)
def inspect(self, id_):
try:
return self._client.inspect_container(id_)
except docker.errors.APIError:
logging.exception('docker')
return None
def execute(self, id_, line_):
exec_op = self._client.exec_create(id_, line_, tty=True)
res = ''
try:
# string or stream result works unstable
# using socket with own read implementation
socket = self._client.exec_start(exec_op["Id"], tty=True, socket=True)
socket.settimeout(10.0)
while True:
part = socket.recv(1024)
if len(part) == 0:
break
res += part
finally:
if socket:
# There is cyclic reference there
# https://github.com/docker/docker-py/blob/master/docker/api/client.py#L321
# sock => response => socket
# https://github.com/docker/docker-py/issues/2137
try:
socket._response = None
except AttributeError:
pass
socket.close()
data = self._client.exec_inspect(exec_op["Id"])
e = data.get("ExitCode", 0)
if 0 != e:
logging.critical("Exit data: {}".format(data))
return (e, res)
def query_usage(self, id_, last_cpu_, last_time_):
return DockerMemCpuUsageData(id_, last_cpu_, last_time_)
|
[
"docker.from_env",
"logging.exception",
"nodemgr.common.docker_mem_cpu.DockerMemCpuUsageData"
] |
[((175, 192), 'docker.from_env', 'docker.from_env', ([], {}), '()\n', (190, 192), False, 'import docker\n'), ((1792, 1841), 'nodemgr.common.docker_mem_cpu.DockerMemCpuUsageData', 'DockerMemCpuUsageData', (['id_', 'last_cpu_', 'last_time_'], {}), '(id_, last_cpu_, last_time_)\n', (1813, 1841), False, 'from nodemgr.common.docker_mem_cpu import DockerMemCpuUsageData\n'), ((507, 534), 'logging.exception', 'logging.exception', (['"""docker"""'], {}), "('docker')\n", (524, 534), False, 'import logging\n')]
|
#!/usr/bin/env python3
import os, time, json
import numpy as np
import pandas as pd
from pprint import pprint
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
from matplotlib.colors import LogNorm
import scipy.signal as signal
import argparse
import pdb
import tinydb as db
from pygama import DataSet
from pygama.analysis.calibration import *
from pygama.analysis.histograms import *
import pygama.utils as pgu
from matplotlib.lines import Line2D
from pygama.utils import set_plot_style
set_plot_style("clint")
def main():
"""
mj60 waveform viewer
"""
run_db, cal_db = "runDB.json", "calDB.json"
par = argparse.ArgumentParser(description="waveform viewer for mj60")
arg, st, sf = par.add_argument, "store_true", "store_false"
arg("-ds", nargs='*', action="store", help="load runs for a DS")
arg("-r", "--run", nargs=1, help="load a single run")
arg("-db", "--writeDB", action=st, help="store results in DB")
args = vars(par.parse_args())
# -- declare the DataSet --
if args["ds"]:
ds_lo = int(args["ds"][0])
try:
ds_hi = int(args["ds"][1])
except:
ds_hi = None
ds = DataSet(ds_lo, ds_hi,
md=run_db, cal = cal_db) #,tier_dir=tier_dir)
if args["run"]:
ds = DataSet(run=int(args["run"][0]),
md=run_db, cal=cal_db)
# Which run number is the being analyzed
# run = 249
# run = 214
# run = 204
# run = 278
# working on analysis for the AvsE cut in mj60
# t1df, t2df = chunker(run)
# cutwf, t2cut = cutter(t1df, t2df, run)
# histograms(cutwf, t2cut, run)
# histograms(ds)
drift_correction(ds, ds_lo)
# def histograms(t1df, t2df, run):
def histograms(ds):
t2 = ds.get_t2df()
print(t2.columns)
exit()
t2df = os.path.expandvars('{}/Spectrum_{}.hdf5'.format(meta_dir,run))
t2df = pd.read_hdf(t2df, key="df")
# n = "tslope_savgol"
# n = "current_max"
# n = "tslope_pz"
n = "tail_tau"
# n = "tail_amp"
e = "e_cal"
x = t2df[e]
# y = t2df[n]
y = t2df[n] / x
plt.clf()
# H, xedges, yedges = np.histogram2d(t2df["tail_tau"], t2df["e_ftp"], bins=[2000,200], range=[[0, 6600], [0, 5]])
plt.hist2d(x, y, bins=[1000,200], range=[[0, 200], [0, .001]], norm=LogNorm(), cmap='jet')
# plt.hist2d(x, y, bins=[1000,1000], norm=LogNorm())
# plt.scatter(H[0],H[1])
# f = plt.figure(figsize=(20,5))
# p1 = f.add_subplot(111, title='Test', xlabel='Energy (keV)', ylabel=n)
# h1,xedg1,yedg1 = np.histogram2d(x, y, bins=[1000,200], range=[[0,2000],[0,100]])
# h1 = h1.T
# # hMin, hMax = np.amin(h1), np.amax(h1)
# # im1 = p1.imshow(h1,cmap='jet',vmin=hMin,vmax=hMax, aspect='auto') #norm=LogNorm())
# im1 = p1.imshow(h1,cmap='jet', origin='lower', aspect='auto', norm=LogNorm(), extent=[xedg1[0], xedg1[-1], yedg1[0], yedg1[-1]])
# cb1 = f.colorbar(im1, ax=p1)#, fraction=0.037, pad=0.04)
cbar = plt.colorbar()
# plt.xscale('symlog')
# plt.yscale('symlog')
plt.title("Run {}".format(run))
plt.xlabel("Energy (keV)", ha='right', x=1)
plt.ylabel(n, ha='right', y=1)
# cbar.ax.set_ylabel('Counts')
# plt.ylabel("tslope_savgol", ha='right', y=1)
# plt.ylabel("A/E_ftp", ha='right', y=1)
# plt.tight_layout()
# # plt.savefig('./plots/meeting_plots/run{}_{}_vs_{}.png'.format(run, n, e))
# plt.show()
# xlo, xhi, xpb = 0, 10000, 10
# xP, hP = get_hist(t2df["trap_max"], xlo, xhi, xpb)
#
# plt.plot(xP, hP, ls='steps', lw=1.5, c='m',
# label="pygama trap_max, {} cts".format(sum(hP)))
# plt.xlabel("Energy (uncal)", ha='right', x=1)
# plt.ylabel("Counts", ha='right', y=1)
# plt.legend()
plt.tight_layout()
plt.show()
def chunker(run):
t1df = os.path.expandvars('{}/t1_run{}.h5'.format(tier_dir,run))
t2df = os.path.expandvars('{}/Spectrum_{}.hdf5'.format(meta_dir,run))
t2df = pd.read_hdf(t2df, key="df")
t2df_chunk = t2df[:75000]
key = "/ORSIS3302DecoderForEnergy"
wf_chunk = pd.read_hdf(t1df, key, where="ievt < {}".format(75000))
wf_chunk.reset_index(inplace=True) # required step -- fix pygama "append" bug
t2df = t2df.reset_index(drop=True)
# create waveform block. mask wfs of unequal lengths
icols = []
for idx, col in enumerate(wf_chunk.columns):
if isinstance(col, int):
icols.append(col)
wf_block = wf_chunk[icols].values
# print(wf_block.shape, type(wf_block))
# print(t2df_chunk)
return wf_block, t2df_chunk
def cutter(t1df, t2df, run):
# t2cut = t2df.loc[(t2df.e_cal>3.1099]
t2cut = t2df
print(t2cut.index)
print(t2cut)
cutwf = t1df[t2cut.index]
print(cutwf)
# xvals = np.arange(0,3000)
# start = time.time()
# for i in range(len(t2cut.index)):
# # for i in range(0,5):
# plt.plot(xvals, cutwf[i], lw=1)
# plt.xlabel('Sample Number', ha='right', x=1.0)
# plt.ylabel('ADC Value', ha='right', y=1.0)
# plt.tight_layout()
# plt.show()
return cutwf, t2cut
def drift_correction(ds, ds_lo):
## testing a drift time correction code
# t1df = ds.get_t1df()
# t1df.reset_index(inplace=True)
# t2df = ds.get_t2df()
"""
Take a single DataSet and window it so that the output file only contains
events near an expected peak location.
"""
# a user has to figure out the uncalibrated energy range of the K40 peak
# xlo, xhi, xpb = 0, 2e6, 2000 # show phys. spectrum (top feature is 2615 pk)
# xlo, xhi, xpb = 990000, 1030000, 250 # k40 peak, ds 3
t2df = ds.get_t2df()
calDB = ds.calDB
query = db.Query()
table = calDB.table("cal_pass1")
vals = table.all()
df_cal = pd.DataFrame(vals) # <<---- omg awesome
df_cal = df_cal.loc[df_cal.ds==ds_lo]
p1cal = df_cal.iloc[0]["p1cal"]
cal = p1cal * np.asarray(t2df["e_ftp"])
xlo = 2.46e6
xhi = 2.5e6
hE, xE = ph.get_hist(t2df["energy"], bins=100, range=(xlo, xhi))
plt.semilogy(xE, hE, ls='steps', lw=1, c='r')
import matplotlib.ticker as ticker
plt.gca().xaxis.set_major_formatter(ticker.FormatStrFormatter('%0.4e'))
plt.locator_params(axis='x', nbins=5)
plt.xlabel("Energy (uncal.)", ha='right', x=1)
plt.ylabel("Counts", ha='right', y=1)
plt.show()
# plt.savefig(f"./plots/cage_ds{ds.ds_lo}_winK40.pdf")
t1df = pd.DataFrame()
for run in ds.paths:
ft1 = ds.paths[run]["t1_path"]
print(f"Scanning ds {ds.ds_lo}, run {run}\n file: {ft1}")
for chunk in pd.read_hdf(ft1, 'ORSIS3302DecoderForEnergy', chunksize=5e4):
t1df_win = chunk.loc[(chunk.energy > xlo) & (chunk.energy < xhi)]
print(t1df_win.shape)
t1df = pd.concat([t1df, t1df_win], ignore_index=True)
print('It worked? maybe?')
h5_opts = {
"mode":"w", # overwrite existing
"append":False,
"format":"table",
# "complib":"blosc:zlib", # no compression, increases I/O speed
# "complevel":1,
# "data_columns":["ievt"]
}
t1df.reset_index(inplace=True)
t1df.to_hdf('./test_dt_file.h5', key="df_windowed", **h5_opts)
print("wrote file")
exit()
# key = "/ORSIS3302DecoderForEnergy"
# wf_chunk = pd.read_hdf(t1df, key, where="ievt < {}".format(75000))
# wf_chunk.reset_index(inplace=True) # required step -- fix pygama "append" bug
t2df = t2df.reset_index(drop=True)
# create waveform block. mask wfs of unequal lengths
number = 20000
icols = []
for idx, col in enumerate(t1df.columns):
if isinstance(col, int):
icols.append(col)
wfs = t1df[icols].values
wfs = np.asarray(wfs)
# wfs = wfs[:number]
# t2df_chunk = t2df[:number]
# print(wf_block.shape, type(wf_block))
# print(t2df_chunk)
t0 = np.asarray(t2df['t0'])
energy = np.asarray(t2df['e_ftp'])
# energy = 0.4066852222964447 * energy
baseline = wfs[:, 0:500]
avg_bl = []
for i in range(len(wfs)):
avg_bl.append(np.mean(baseline[i], keepdims=True))
avg_bl = np.asarray(avg_bl)
wfs = np.asarray(wfs)
wfs = wfs - avg_bl
clk = 100e6
decay = 78
wfs = pz(wfs, decay, clk)
t100 = []
t0_raw = []
wf_raw = []
e_raw = []
for i in range(len(wfs)):
t100_t = np.where(wfs[i] > energy[i])
t100_t = t100_t[0]
if len(t100_t) > 0:
t100_t = t100_t[0]
t100.append(t100_t)
t0_raw.append(t0[i])
wf_raw.append(wfs[i])
e_raw.append(energy[i])
e_raw = np.asarray(e_raw)
index = np.where(e_raw < 7300)[0]
t100 = np.asarray(t100)
t0_raw = np.asarray(t0_raw)
wf_raw = np.asarray(wf_raw)
e_raw = e_raw[index]
t100 = t100[index]
t0_raw = t0_raw[index]
wf_raw = wf_raw[index]
e_raw = 0.4066852222964447 * e_raw
wf_raw = 0.4066852222964447 * wf_raw
hist, bins = np.histogram(e_raw, bins=2700, range=[0,2700])
b = (bins[:-1] + bins[1:]) / 2
plt.plot(b, hist, ls="steps", color='black')
plt.tight_layout()
plt.show()
plt.clf()
# xvals = np.arange(0,3000)
# start = time.time()
# for i in range(len(t100)):
#
# plt.plot(xvals, wf_raw[i], lw=1)
# plt.vlines(t0_raw[i], np.amin(wf_raw[i]), e_raw[i], color='r', linewidth=1.5, label='t0')
# plt.vlines(t100[i], np.amin(wf_raw[i]), e_raw[i], color='g', linewidth=1.5, label='t100')
# plt.hlines(e_raw[i], t0_raw[i], 3000, color='k', linewidth=1.5, zorder=10, label='e_ftp')
# plt.xlabel('Sample Number', ha='right', x=1.0)
# plt.ylabel('ADC Value', ha='right', y=1.0)
# plt.legend()
# plt.tight_layout()
# plt.show()
# exit()
"""
a1 = (t100 - t0_raw) * e_raw
a_wf = []
for i in range(len(wf_raw)):
a2 = sum(wf_raw[i,t0[i]:t100[i]])
a_wf.append(a2)
a_drift = a1 - a_wf
# a_drift = a_drift.tolist()
# print(a_drift)
# exit()
a_test = a_drift[np.where((e_raw > 2600) & (e_raw < 2630))]
e_test = e_raw[np.where((e_raw > 2600) & (e_raw < 2630))]
plt.hist2d(e_test, a_test, bins=[30,100], range=[[2600, 2630], [0, np.amax(a_test)]], norm=LogNorm(), cmap='jet')
cbar = plt.colorbar()
cbar.ax.set_ylabel('Counts')
plt.tight_layout()
plt.show()
exit()
"""
xvals = np.arange(0,3000)
start = time.time()
for i in range(0,number):
# for i in range(0,5):
plt.plot(xvals, wfs[i], lw=1)
plt.vlines(t0[i], np.amin(wfs[i]), energy[i], color='r', linewidth=1.5, label='t0')
plt.vlines(t100[i], np.amin(wfs[i]), energy[i], color='g', linewidth=1.5, label='t100')
plt.hlines(energy[i], t0[i], 3000, color='k', linewidth=1.5, zorder=10, label='e_ftp')
plt.xlabel('Sample Number', ha='right', x=1.0)
plt.ylabel('ADC Value', ha='right', y=1.0)
plt.legend()
plt.tight_layout()
plt.show()
# input:
# fsignal: PZ-corrected and INL-corrected signal of length len, from channel chan
# Dets: MJ detector info data structure
# PSA: contains filter params to use for trapezoids
# CTC_factor: the value used in the correction, usually CTC.e_dt_slope[chan]
# outputs:
# returned value: energy in keV, or -1.0f in case of error
# t0: start time of drift/signal
# e_adc: energy in ADC units
# e_raw: uncorrected energy in 0.001 ADC units
# drift: charge trapping value (drift time * charge)
# to be used for optimizing correction, in ADC units
# CTC correction = drift*ctc_factor[chan]
def pz(wfs, decay, clk):
"""
pole-zero correct a waveform
decay is in us, clk is in Hz
"""
# get linear filter parameters, in units of [clock ticks]
dt = decay * (1e10 / clk)
rc = 1 / np.exp(1 / dt)
num, den = [1, -1], [1, -rc]
# reversing num and den does the inverse transform (ie, PZ corrects)
pz_wfs = signal.lfilter(den, num, wfs)
return pz_wfs
# return wfs, t2df_chunk
if __name__=="__main__":
main()
|
[
"argparse.ArgumentParser",
"numpy.amin",
"matplotlib.pyplot.clf",
"numpy.histogram",
"matplotlib.colors.LogNorm",
"numpy.arange",
"numpy.exp",
"numpy.mean",
"matplotlib.pyplot.gca",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.hlines",
"pandas.DataFrame",
"matplotlib.pyplot.locator_params",
"pandas.read_hdf",
"scipy.signal.lfilter",
"matplotlib.pyplot.colorbar",
"matplotlib.ticker.FormatStrFormatter",
"matplotlib.pyplot.semilogy",
"pandas.concat",
"pygama.DataSet",
"matplotlib.pyplot.show",
"pygama.utils.set_plot_style",
"numpy.asarray",
"matplotlib.pyplot.legend",
"tinydb.Query",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.plot",
"time.time",
"numpy.where",
"matplotlib.pyplot.xlabel"
] |
[((532, 555), 'pygama.utils.set_plot_style', 'set_plot_style', (['"""clint"""'], {}), "('clint')\n", (546, 555), False, 'from pygama.utils import set_plot_style\n'), ((669, 732), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""waveform viewer for mj60"""'}), "(description='waveform viewer for mj60')\n", (692, 732), False, 'import argparse\n'), ((1945, 1972), 'pandas.read_hdf', 'pd.read_hdf', (['t2df'], {'key': '"""df"""'}), "(t2df, key='df')\n", (1956, 1972), True, 'import pandas as pd\n'), ((2164, 2173), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (2171, 2173), True, 'import matplotlib.pyplot as plt\n'), ((3039, 3053), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (3051, 3053), True, 'import matplotlib.pyplot as plt\n'), ((3150, 3193), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Energy (keV)"""'], {'ha': '"""right"""', 'x': '(1)'}), "('Energy (keV)', ha='right', x=1)\n", (3160, 3193), True, 'import matplotlib.pyplot as plt\n'), ((3198, 3228), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['n'], {'ha': '"""right"""', 'y': '(1)'}), "(n, ha='right', y=1)\n", (3208, 3228), True, 'import matplotlib.pyplot as plt\n'), ((3816, 3834), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (3832, 3834), True, 'import matplotlib.pyplot as plt\n'), ((3839, 3849), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3847, 3849), True, 'import matplotlib.pyplot as plt\n'), ((4024, 4051), 'pandas.read_hdf', 'pd.read_hdf', (['t2df'], {'key': '"""df"""'}), "(t2df, key='df')\n", (4035, 4051), True, 'import pandas as pd\n'), ((5760, 5770), 'tinydb.Query', 'db.Query', ([], {}), '()\n', (5768, 5770), True, 'import tinydb as db\n'), ((5844, 5862), 'pandas.DataFrame', 'pd.DataFrame', (['vals'], {}), '(vals)\n', (5856, 5862), True, 'import pandas as pd\n'), ((6114, 6159), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (['xE', 'hE'], {'ls': '"""steps"""', 'lw': '(1)', 'c': '"""r"""'}), "(xE, hE, ls='steps', lw=1, c='r')\n", (6126, 6159), True, 'import matplotlib.pyplot as plt\n'), ((6280, 6317), 'matplotlib.pyplot.locator_params', 'plt.locator_params', ([], {'axis': '"""x"""', 'nbins': '(5)'}), "(axis='x', nbins=5)\n", (6298, 6317), True, 'import matplotlib.pyplot as plt\n'), ((6323, 6369), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Energy (uncal.)"""'], {'ha': '"""right"""', 'x': '(1)'}), "('Energy (uncal.)', ha='right', x=1)\n", (6333, 6369), True, 'import matplotlib.pyplot as plt\n'), ((6374, 6411), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Counts"""'], {'ha': '"""right"""', 'y': '(1)'}), "('Counts', ha='right', y=1)\n", (6384, 6411), True, 'import matplotlib.pyplot as plt\n'), ((6416, 6426), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6424, 6426), True, 'import matplotlib.pyplot as plt\n'), ((6498, 6512), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (6510, 6512), True, 'import pandas as pd\n'), ((7804, 7819), 'numpy.asarray', 'np.asarray', (['wfs'], {}), '(wfs)\n', (7814, 7819), True, 'import numpy as np\n'), ((7957, 7979), 'numpy.asarray', 'np.asarray', (["t2df['t0']"], {}), "(t2df['t0'])\n", (7967, 7979), True, 'import numpy as np\n'), ((7993, 8018), 'numpy.asarray', 'np.asarray', (["t2df['e_ftp']"], {}), "(t2df['e_ftp'])\n", (8003, 8018), True, 'import numpy as np\n'), ((8210, 8228), 'numpy.asarray', 'np.asarray', (['avg_bl'], {}), '(avg_bl)\n', (8220, 8228), True, 'import numpy as np\n'), ((8239, 8254), 'numpy.asarray', 'np.asarray', (['wfs'], {}), '(wfs)\n', (8249, 8254), True, 'import numpy as np\n'), ((8714, 8731), 'numpy.asarray', 'np.asarray', (['e_raw'], {}), '(e_raw)\n', (8724, 8731), True, 'import numpy as np\n'), ((8781, 8797), 'numpy.asarray', 'np.asarray', (['t100'], {}), '(t100)\n', (8791, 8797), True, 'import numpy as np\n'), ((8811, 8829), 'numpy.asarray', 'np.asarray', (['t0_raw'], {}), '(t0_raw)\n', (8821, 8829), True, 'import numpy as np\n'), ((8843, 8861), 'numpy.asarray', 'np.asarray', (['wf_raw'], {}), '(wf_raw)\n', (8853, 8861), True, 'import numpy as np\n'), ((9064, 9111), 'numpy.histogram', 'np.histogram', (['e_raw'], {'bins': '(2700)', 'range': '[0, 2700]'}), '(e_raw, bins=2700, range=[0, 2700])\n', (9076, 9111), True, 'import numpy as np\n'), ((9150, 9194), 'matplotlib.pyplot.plot', 'plt.plot', (['b', 'hist'], {'ls': '"""steps"""', 'color': '"""black"""'}), "(b, hist, ls='steps', color='black')\n", (9158, 9194), True, 'import matplotlib.pyplot as plt\n'), ((9199, 9217), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (9215, 9217), True, 'import matplotlib.pyplot as plt\n'), ((9222, 9232), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (9230, 9232), True, 'import matplotlib.pyplot as plt\n'), ((9237, 9246), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (9244, 9246), True, 'import matplotlib.pyplot as plt\n'), ((10580, 10598), 'numpy.arange', 'np.arange', (['(0)', '(3000)'], {}), '(0, 3000)\n', (10589, 10598), True, 'import numpy as np\n'), ((10610, 10621), 'time.time', 'time.time', ([], {}), '()\n', (10619, 10621), False, 'import os, time, json\n'), ((12204, 12233), 'scipy.signal.lfilter', 'signal.lfilter', (['den', 'num', 'wfs'], {}), '(den, num, wfs)\n', (12218, 12233), True, 'import scipy.signal as signal\n'), ((1218, 1262), 'pygama.DataSet', 'DataSet', (['ds_lo', 'ds_hi'], {'md': 'run_db', 'cal': 'cal_db'}), '(ds_lo, ds_hi, md=run_db, cal=cal_db)\n', (1225, 1262), False, 'from pygama import DataSet\n'), ((5980, 6005), 'numpy.asarray', 'np.asarray', (["t2df['e_ftp']"], {}), "(t2df['e_ftp'])\n", (5990, 6005), True, 'import numpy as np\n'), ((6240, 6274), 'matplotlib.ticker.FormatStrFormatter', 'ticker.FormatStrFormatter', (['"""%0.4e"""'], {}), "('%0.4e')\n", (6265, 6274), True, 'import matplotlib.ticker as ticker\n'), ((6667, 6731), 'pandas.read_hdf', 'pd.read_hdf', (['ft1', '"""ORSIS3302DecoderForEnergy"""'], {'chunksize': '(50000.0)'}), "(ft1, 'ORSIS3302DecoderForEnergy', chunksize=50000.0)\n", (6678, 6731), True, 'import pandas as pd\n'), ((8451, 8479), 'numpy.where', 'np.where', (['(wfs[i] > energy[i])'], {}), '(wfs[i] > energy[i])\n', (8459, 8479), True, 'import numpy as np\n'), ((8744, 8766), 'numpy.where', 'np.where', (['(e_raw < 7300)'], {}), '(e_raw < 7300)\n', (8752, 8766), True, 'import numpy as np\n'), ((10687, 10716), 'matplotlib.pyplot.plot', 'plt.plot', (['xvals', 'wfs[i]'], {'lw': '(1)'}), '(xvals, wfs[i], lw=1)\n', (10695, 10716), True, 'import matplotlib.pyplot as plt\n'), ((10913, 11003), 'matplotlib.pyplot.hlines', 'plt.hlines', (['energy[i]', 't0[i]', '(3000)'], {'color': '"""k"""', 'linewidth': '(1.5)', 'zorder': '(10)', 'label': '"""e_ftp"""'}), "(energy[i], t0[i], 3000, color='k', linewidth=1.5, zorder=10,\n label='e_ftp')\n", (10923, 11003), True, 'import matplotlib.pyplot as plt\n'), ((11008, 11054), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Sample Number"""'], {'ha': '"""right"""', 'x': '(1.0)'}), "('Sample Number', ha='right', x=1.0)\n", (11018, 11054), True, 'import matplotlib.pyplot as plt\n'), ((11063, 11105), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""ADC Value"""'], {'ha': '"""right"""', 'y': '(1.0)'}), "('ADC Value', ha='right', y=1.0)\n", (11073, 11105), True, 'import matplotlib.pyplot as plt\n'), ((11114, 11126), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (11124, 11126), True, 'import matplotlib.pyplot as plt\n'), ((11135, 11153), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (11151, 11153), True, 'import matplotlib.pyplot as plt\n'), ((11162, 11172), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (11170, 11172), True, 'import matplotlib.pyplot as plt\n'), ((12069, 12083), 'numpy.exp', 'np.exp', (['(1 / dt)'], {}), '(1 / dt)\n', (12075, 12083), True, 'import numpy as np\n'), ((2364, 2373), 'matplotlib.colors.LogNorm', 'LogNorm', ([], {}), '()\n', (2371, 2373), False, 'from matplotlib.colors import LogNorm\n'), ((6860, 6906), 'pandas.concat', 'pd.concat', (['[t1df, t1df_win]'], {'ignore_index': '(True)'}), '([t1df, t1df_win], ignore_index=True)\n', (6869, 6906), True, 'import pandas as pd\n'), ((8160, 8195), 'numpy.mean', 'np.mean', (['baseline[i]'], {'keepdims': '(True)'}), '(baseline[i], keepdims=True)\n', (8167, 8195), True, 'import numpy as np\n'), ((10743, 10758), 'numpy.amin', 'np.amin', (['wfs[i]'], {}), '(wfs[i])\n', (10750, 10758), True, 'import numpy as np\n'), ((10837, 10852), 'numpy.amin', 'np.amin', (['wfs[i]'], {}), '(wfs[i])\n', (10844, 10852), True, 'import numpy as np\n'), ((6204, 6213), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6211, 6213), True, 'import matplotlib.pyplot as plt\n')]
|
# -*- coding: utf-8 -*-
"""
USAGE: DU_Table_Annotator.py input-folder
You must run this on your GT collection to create a training collection.
If you pass a folder, you get a new folder with name postfixed by a_
Does 2 things:
- 1 -
Annotate textlines for Table understanding (finding rows and columns)
It tags the TextLine, to indicate:
- the table header, vs data, vs other stuff:
@DU_header = 'CH' | 'D' | 'O'
- the vertical rank in the table cell:
@DU_row = 'B' | 'I' | 'E' | 'S' | 'O'
- something regarding the number of text in a cell??
# NO SURE THIS WORKS...
@DU_col = 'M' | 'S' | 'O'
- 2 -
Aggregate the borders of the cells by linear regression to reflect them
as a line, which is stored as a SeparatorRegion element.
Copyright Naver Labs Europe 2017, 2018
<NAME>
<NAME>
Developed for the EU project READ. The READ project has received funding
from the European Union's Horizon 2020 research and innovation programme
under grant agreement No 674943.
"""
import sys, os
try: #to ease the use without proper Python installation
import TranskribusDU_version
except ImportError:
sys.path.append( os.path.dirname(os.path.dirname( os.path.abspath(sys.argv[0]) )) )
import TranskribusDU_version
from common.trace import traceln
import tasks.DU_Table.DU_ABPTableRCAnnotation
if __name__ == "__main__":
try:
#we expect a folder
sInputDir = sys.argv[1]
if not os.path.isdir(sInputDir): raise Exception()
except IndexError:
traceln("Usage: %s <folder>" % sys.argv[0])
exit(1)
sOutputDir = "a_"+sInputDir
traceln(" - Output will be in ", sOutputDir)
try:
os.mkdir(sOutputDir)
os.mkdir(os.path.join(sOutputDir, "col"))
except:
pass
lsFilename = [s for s in os.listdir(os.path.join(sInputDir, "col")) if s.endswith(".mpxml") ]
lsFilename.sort()
lsOutFilename = [os.path.join(sOutputDir, "col", "a_"+s) for s in lsFilename]
if not lsFilename:
lsFilename = [s for s in os.listdir(os.path.join(sInputDir, "col")) if s.endswith(".pxml") ]
lsFilename.sort()
lsOutFilename = [os.path.join(sOutputDir, "col", "a_"+s[:-5]+".mpxml") for s in lsFilename]
lsInFilename = [os.path.join(sInputDir , "col", s) for s in lsFilename]
traceln(lsFilename)
traceln("%d files to be processed" % len(lsFilename))
tasks.DU_Table.DU_ABPTableRCAnnotation.main(lsInFilename, lsOutFilename)
|
[
"os.mkdir",
"os.path.abspath",
"os.path.isdir",
"common.trace.traceln",
"os.path.join"
] |
[((1771, 1815), 'common.trace.traceln', 'traceln', (['""" - Output will be in """', 'sOutputDir'], {}), "(' - Output will be in ', sOutputDir)\n", (1778, 1815), False, 'from common.trace import traceln\n'), ((2474, 2493), 'common.trace.traceln', 'traceln', (['lsFilename'], {}), '(lsFilename)\n', (2481, 2493), False, 'from common.trace import traceln\n'), ((1833, 1853), 'os.mkdir', 'os.mkdir', (['sOutputDir'], {}), '(sOutputDir)\n', (1841, 1853), False, 'import sys, os\n'), ((2075, 2116), 'os.path.join', 'os.path.join', (['sOutputDir', '"""col"""', "('a_' + s)"], {}), "(sOutputDir, 'col', 'a_' + s)\n", (2087, 2116), False, 'import sys, os\n'), ((2408, 2441), 'os.path.join', 'os.path.join', (['sInputDir', '"""col"""', 's'], {}), "(sInputDir, 'col', s)\n", (2420, 2441), False, 'import sys, os\n'), ((1599, 1623), 'os.path.isdir', 'os.path.isdir', (['sInputDir'], {}), '(sInputDir)\n', (1612, 1623), False, 'import sys, os\n'), ((1674, 1717), 'common.trace.traceln', 'traceln', (["('Usage: %s <folder>' % sys.argv[0])"], {}), "('Usage: %s <folder>' % sys.argv[0])\n", (1681, 1717), False, 'from common.trace import traceln\n'), ((1871, 1902), 'os.path.join', 'os.path.join', (['sOutputDir', '"""col"""'], {}), "(sOutputDir, 'col')\n", (1883, 1902), False, 'import sys, os\n'), ((2311, 2368), 'os.path.join', 'os.path.join', (['sOutputDir', '"""col"""', "('a_' + s[:-5] + '.mpxml')"], {}), "(sOutputDir, 'col', 'a_' + s[:-5] + '.mpxml')\n", (2323, 2368), False, 'import sys, os\n'), ((1974, 2004), 'os.path.join', 'os.path.join', (['sInputDir', '"""col"""'], {}), "(sInputDir, 'col')\n", (1986, 2004), False, 'import sys, os\n'), ((1338, 1366), 'os.path.abspath', 'os.path.abspath', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (1353, 1366), False, 'import sys, os\n'), ((2203, 2233), 'os.path.join', 'os.path.join', (['sInputDir', '"""col"""'], {}), "(sInputDir, 'col')\n", (2215, 2233), False, 'import sys, os\n')]
|
# -*- coding: utf-8 -*-
import json
import logging
import time
from multiprocessing import Process
from uuid import uuid4
import schedule
from jsonschema import validate
from scheduler import Scheduler as CronSchedulerServer
from spaceone.core import queue
from spaceone.core.error import ERROR_CONFIGURATION
from spaceone.core.scheduler.task_schema import SPACEONE_TASK_SCHEMA
_LOGGER = logging.getLogger(__name__)
class BaseScheduler(Process):
def __init__(self, queue, **kwargs):
super().__init__()
self.queue = queue
self.config = None
def push_task(self):
# Create Task
tasks = self.create_task()
_LOGGER.debug(f'[push_task] task: {len(tasks)}')
for task in tasks:
try:
validate(task, schema=SPACEONE_TASK_SCHEMA)
json_task = json.dumps(task)
_LOGGER.debug(f'[push_task] Task schema: {task}')
queue.put(self.queue, json_task)
except Exception as e:
print(e)
_LOGGER.debug(f'[push_task] Task schema: {task}, {e}')
def run(self):
NotImplementedError('scheduler.run is not implemented')
def create_task(self):
NotImplementedError('scheduler.create_task is not implemented')
class IntervalScheduler(BaseScheduler):
def __init__(self, queue, interval):
super().__init__(queue)
self.config = self.parse_config(interval)
def parse_config(self, expr):
""" expr
format: integer (second)
"""
try:
if isinstance(expr, int):
return int(expr)
except Exception as e:
_LOGGER.error(f'[parse_config] Wrong configraiton, {e}')
def run(self):
schedule.every(self.config).seconds.do(self.push_task)
while True:
schedule.run_pending()
time.sleep(1)
class HourlyScheduler(BaseScheduler):
"""
HourlyScheduler starts every HH:00
If you want to start at different minutes
send minute like ':15' meaning every 15 minute
"""
def __init__(self, queue, interval=1, minute=':00'):
super().__init__(queue)
self.config = self.parse_config(interval)
self.minute = minute
def parse_config(self, expr):
""" expr
format: integer (hour)
"""
try:
if isinstance(expr, int):
return int(expr)
except Exception as e:
_LOGGER.error(f'[parse_config] Wrong configuration, {e}')
raise ERROR_CONFIGURATION(key='interval')
def run(self):
# Call push_task in every hour
schedule.every(self.config).hours.at(self.minute).do(self.push_task)
while True:
schedule.run_pending()
time.sleep(1)
class CronScheduler(BaseScheduler):
"""
cronjob: min hour day month week
"""
def __init__(self, queue, rule):
super().__init__(queue)
self.config = self.parse_config(rule)
def parse_config(self, expr):
""" exprd
format: min hour day month week
* * * * *
"""
# TODO: verify format
return expr
def run(self):
if self.config is False:
# May be error format
return
scheduler = CronSchedulerServer(10)
scheduler.add(f"{uuid4()}", self.config, self.push_task)
scheduler.start()
|
[
"schedule.run_pending",
"scheduler.Scheduler",
"jsonschema.validate",
"uuid.uuid4",
"spaceone.core.queue.put",
"spaceone.core.error.ERROR_CONFIGURATION",
"json.dumps",
"time.sleep",
"schedule.every",
"logging.getLogger"
] |
[((391, 418), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (408, 418), False, 'import logging\n'), ((3327, 3350), 'scheduler.Scheduler', 'CronSchedulerServer', (['(10)'], {}), '(10)\n', (3346, 3350), True, 'from scheduler import Scheduler as CronSchedulerServer\n'), ((1856, 1878), 'schedule.run_pending', 'schedule.run_pending', ([], {}), '()\n', (1876, 1878), False, 'import schedule\n'), ((1891, 1904), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1901, 1904), False, 'import time\n'), ((2769, 2791), 'schedule.run_pending', 'schedule.run_pending', ([], {}), '()\n', (2789, 2791), False, 'import schedule\n'), ((2804, 2817), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2814, 2817), False, 'import time\n'), ((773, 816), 'jsonschema.validate', 'validate', (['task'], {'schema': 'SPACEONE_TASK_SCHEMA'}), '(task, schema=SPACEONE_TASK_SCHEMA)\n', (781, 816), False, 'from jsonschema import validate\n'), ((845, 861), 'json.dumps', 'json.dumps', (['task'], {}), '(task)\n', (855, 861), False, 'import json\n'), ((944, 976), 'spaceone.core.queue.put', 'queue.put', (['self.queue', 'json_task'], {}), '(self.queue, json_task)\n', (953, 976), False, 'from spaceone.core import queue\n'), ((2565, 2600), 'spaceone.core.error.ERROR_CONFIGURATION', 'ERROR_CONFIGURATION', ([], {'key': '"""interval"""'}), "(key='interval')\n", (2584, 2600), False, 'from spaceone.core.error import ERROR_CONFIGURATION\n'), ((1769, 1796), 'schedule.every', 'schedule.every', (['self.config'], {}), '(self.config)\n', (1783, 1796), False, 'import schedule\n'), ((3376, 3383), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (3381, 3383), False, 'from uuid import uuid4\n'), ((2668, 2695), 'schedule.every', 'schedule.every', (['self.config'], {}), '(self.config)\n', (2682, 2695), False, 'import schedule\n')]
|
import base64 as b64
import struct
import encodings
from .wireformat import *
from . import constants
MAX_PACKET_SIZE = 4000
_rcode_strings = [ 'No error',
'Format error',
'Server failure',
'Non-existent domain',
'Not implemented',
'Query refused',
'Name exists when it should not',
'RR set exists when it should not',
'RR set that should exist does not',
'Server not authoritative for zone',
'Name not contained in zone',
None,
None,
None,
None,
None,
'Bad OPT version OR TSIG signature failure',
'Key not recognized',
'Signature out of time window',
'Bad TKEY mode',
'Duplicate key name',
'Algorithm not supported' ]
_rrtype_strings = [ None, 'A', 'NS', 'MD', 'MF', 'CNAME', 'SOA', 'MB', 'MG', 'MR',
'NUL', 'WKS', 'PTR', 'HINFO', 'MINFO', 'MX', 'TXT', 'RP',
'AFSDB', 'X25', 'ISDN', 'RT', 'NSAP', 'NSAPPTR', 'SIG',
'KEY', 'PX', 'GPOS',
'AAAA', 'LOC', 'NXT', 'EID', 'NIMLOC', 'SRV', 'ATMA', 'NAPTR',
'KX', 'CERT', 'A6', 'DNAME', 'SINK', 'OPT', 'APL', 'DS',
'SSHFP', 'IPSECKEY', 'RRSIG', 'NSEC', 'DNSKEY', 'DHCID',
'NSEC3', 'NSEC3PARAM', 'TLSA', None, None, 'HIP', None,
None, None, 'CDS', 'CDNSKEY', 'OPENPGPKEY' ]
_rrtype_extras = { 99: 'SPF', 100: 'UINFO', 101: 'UID', 102: 'GID', 103: 'UNSPEC',
249: 'TKEY', 250: 'TSIG', 251: 'IXFR', 252: 'AXFR',
253: 'MAILB', 254: 'MAILA', 255: 'ANY', 256: 'URI',
257: 'CAA', 32768: 'TA', 32769: 'DLV' }
_rrclass_strings = [ None, 'IN', 'CS', 'CH', 'HS' ]
def escape_string(byte_string):
try:
ustr = byte_string.decode('ascii')
return ustr
except UnicodeError:
ustr = byte_string.decode('ascii', 'backslashreplace').replace('"', '\\"')
return '"{}"'.format(ustr)
def base64(byte_string):
return b64.b64encode(byte_string).decode('ascii')
def rcode_to_string(rcode):
"""Convert an RCODE to a string"""
try:
s = _rcode_strings[rcode]
except KeyError:
s = None
if s is None:
s = 'Unknown ({})'.format(rcode)
return s
def rrtype_to_string(rrt):
"""Convert an RR type to a string"""
try:
s = _rrtype_strings[rrt]
except KeyError:
s = _rrtype_extras.get(rrt, None)
if s is None:
s = 'TYPE{}'.format(rrt)
return s
def rrclass_to_string(rrt):
"""Convert an RR class to a string"""
try:
s = _rrclass_strings[rrt]
except KeyError:
s = None
if s is None:
if rrt == NONE:
s = 'NONE'
elif rrt == ANY:
s = 'ANY'
else:
s = 'CLASS{}'.format(rrt)
return s
def decode_domain(packet, ptr):
result = []
saved = False
saved_ptr = None
while True:
length = packet[ptr]
ptr += 1
if not length:
break
if length < 64:
result.append(packet[ptr:ptr+length])
ptr += length
elif (length & 0xc0) == 0xc0:
low = packet[ptr]
ptr += 1
offset = ((length & 0x3f) << 8) | low
if offset > len(packet):
raise ValueError('Bad reply to DNS query')
if not saved:
saved = True
saved_ptr = ptr
ptr = offset
if saved:
ptr = saved_ptr
return (b'.'.join(result), ptr)
def domain_to_unicode(domain):
return '.'.join([encodings.idna.ToUnicode(label)
for label in domain.split(b'.')])
def domain_from_unicode(domain):
domain = domain.rstrip('.')
return b'.'.join([encodings.idna.ToASCII(label)
for label in domain.split('.')])
def decode_pascal_string(packet, ptr):
slen = packet[ptr]
ptr += 1
s = packet[ptr:ptr+slen]
ptr += slen
return (s, ptr)
def build_dns_packet(uid, query, wants_recursion=False, unicast=False):
flags = QUERY
if wants_recursion:
flags |= RD
header = struct.pack(b'>HHHHHH', uid, flags, 1, 0, 0, 1)
packet = [header]
for label in query.name.split(b'.'):
if len(label) > 63:
raise ValueError('DNS label too long')
if len(label) == 0:
continue
packet.append(struct.pack(b'>B', len(label)))
packet.append(label)
q_class = query.q_class
if unicast:
q_class |= 0x8000
packet.append(struct.pack(b'>BHH', 0, query.q_type, q_class))
# Add an OPT record to indicate EDNS support
packet.append(struct.pack(b'>BHHLH', 0, constants.OPT, MAX_PACKET_SIZE,
DO, 0))
return b''.join(packet)
|
[
"encodings.idna.ToASCII",
"base64.b64encode",
"struct.pack",
"encodings.idna.ToUnicode"
] |
[((4458, 4505), 'struct.pack', 'struct.pack', (["b'>HHHHHH'", 'uid', 'flags', '(1)', '(0)', '(0)', '(1)'], {}), "(b'>HHHHHH', uid, flags, 1, 0, 0, 1)\n", (4469, 4505), False, 'import struct\n'), ((4872, 4918), 'struct.pack', 'struct.pack', (["b'>BHH'", '(0)', 'query.q_type', 'q_class'], {}), "(b'>BHH', 0, query.q_type, q_class)\n", (4883, 4918), False, 'import struct\n'), ((4988, 5052), 'struct.pack', 'struct.pack', (["b'>BHHLH'", '(0)', 'constants.OPT', 'MAX_PACKET_SIZE', 'DO', '(0)'], {}), "(b'>BHHLH', 0, constants.OPT, MAX_PACKET_SIZE, DO, 0)\n", (4999, 5052), False, 'import struct\n'), ((2300, 2326), 'base64.b64encode', 'b64.b64encode', (['byte_string'], {}), '(byte_string)\n', (2313, 2326), True, 'import base64 as b64\n'), ((3909, 3940), 'encodings.idna.ToUnicode', 'encodings.idna.ToUnicode', (['label'], {}), '(label)\n', (3933, 3940), False, 'import encodings\n'), ((4084, 4113), 'encodings.idna.ToASCII', 'encodings.idna.ToASCII', (['label'], {}), '(label)\n', (4106, 4113), False, 'import encodings\n')]
|
# Generated by Django 3.0.4 on 2020-03-09 20:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hobbies', '0003_userhobbies'),
]
operations = [
migrations.AlterField(
model_name='hobbies',
name='img_url',
field=models.URLField(default='https://www.okea.org/wp-content/uploads/2019/10/placeholder.png', max_length=1000),
),
]
|
[
"django.db.models.URLField"
] |
[((331, 447), 'django.db.models.URLField', 'models.URLField', ([], {'default': '"""https://www.okea.org/wp-content/uploads/2019/10/placeholder.png"""', 'max_length': '(1000)'}), "(default=\n 'https://www.okea.org/wp-content/uploads/2019/10/placeholder.png',\n max_length=1000)\n", (346, 447), False, 'from django.db import migrations, models\n')]
|
"""Cascading configuration from the CLI and config files."""
__version__ = "0.2.0"
import json
import os
from abc import ABC, abstractmethod
from argparse import ArgumentParser, Namespace
from typing import Dict
import jsonschema
class CascadeConfig:
"""Cascading configuration."""
def __init__(self, validation_schema=None, none_overrides_value=False):
"""
Cascading configuration.
Parameters
----------
validation_schema: str, path-like, dict, or cascade_config.ValidationSchema, optional
JSON Schema to validate fully cascaded configuration
none_overrides_value: bool
If True, a None value overrides a not-None value from the previous configuration.
If False, None values will never override not-None values.
Examples
--------
>>> cascade_conf = CascadeConfig(validation_schema="config_schema.json")
>>> cascade_conf.add_json("config_default.json")
>>> cascade_conf.add_json("config_user.json")
>>> config = cascade_conf.parse()
"""
self.validation_schema = validation_schema
self.none_overrides_value = none_overrides_value
self.sources = []
@property
def validation_schema(self):
"""JSON Schema to validate fully cascaded configuration."""
return self._validation_schema
@validation_schema.setter
def validation_schema(self, value):
"""Set validation schema."""
if value:
self._validation_schema = ValidationSchema.from_object(value)
else:
self._validation_schema = None
def _update_dict_recursively(self, original: Dict, updater: Dict) -> Dict:
"""Update dictionary recursively."""
for k, v in updater.items():
if isinstance(v, dict):
original[k] = self._update_dict_recursively(original.get(k, {}), v)
elif v or k not in original: # v is not None, or key does not exist yet
original[k] = v
elif self.none_overrides_value: # v is None, but can override previous value
original[k] = v
return original
def add_dict(self, *args, **kwargs):
"""
Add dictionary configuration source to source list.
*args and **kwargs are passed to :class:`cascade_config.DictConfigSource()`.
"""
source = DictConfigSource(*args, **kwargs)
self.sources.append(source)
def add_argumentparser(self, *args, **kwargs):
"""
Add argumentparser configuration source to source list.
*args and **kwargs are passed to :class:`cascade_config.ArgumentParserConfigSource()`.
"""
source = ArgumentParserConfigSource(*args, **kwargs)
self.sources.append(source)
def add_namespace(self, *args, **kwargs):
"""
Add argparse Namespace configuration source to source list.
*args and **kwargs are passed to :class:`cascade_config.NamespaceConfigSource()`.
"""
source = NamespaceConfigSource(*args, **kwargs)
self.sources.append(source)
def add_json(self, *args, **kwargs):
"""
Add JSON configuration source to source list.
*args and **kwargs are passed to :class:`cascade_config.JSONConfigSource()`.
"""
source = JSONConfigSource(*args, **kwargs)
self.sources.append(source)
def parse(self) -> Dict:
"""Parse all sources, cascade, validate, and return cascaded configuration."""
config = dict()
for source in self.sources:
config = self._update_dict_recursively(config, source.load())
if self.validation_schema:
jsonschema.validate(config, self.validation_schema.load())
return config
class _ConfigSource(ABC):
"""Abstract base class for configuration source."""
def __init__(self, source, validation_schema=None, subkey=None) -> None:
"""
Initialize a single configuration source.
Parameters
----------
source : str, path-like, dict, argparse.ArgumentParser
source for the configuration, either a dictionary, path to a file, or
argument parser.
validation_schema: str, path-like, dict, or cascade_config.ValidationSchema, optional
JSON Schema to validate single configuration
subkey : str
adds the configuration to a subkey of the final cascased configuration;
e.g. specifying a subkey `"user"` for a configuration source, would add it
under the key `"user"` in the cascaded configuration, instead of updating
the root of the existing configuration
Methods
-------
load()
load the configuration from the source and return it as a dictionary
"""
self.source = source
self.validation_schema = validation_schema
self.subkey = subkey
@property
def validation_schema(self):
"""Get validation_schema."""
return self._validation_schema
@validation_schema.setter
def validation_schema(self, value):
"""Set validation schema."""
if value:
self._validation_schema = ValidationSchema.from_object(value)
else:
self._validation_schema = None
@abstractmethod
def _read(self):
"""Read source into dict."""
pass
def load(self) -> Dict:
"""Read, validate, and place in subkey if required."""
if self.subkey:
config = dict()
config[self.subkey] = self._read()
else:
config = self._read()
if self.validation_schema:
jsonschema.validate(config, self.validation_schema.load())
return config
class DictConfigSource(_ConfigSource):
"""Dictionary configuration source."""
def _read(self) -> Dict:
if not isinstance(self.source, dict):
raise TypeError("DictConfigSource `source` must be a dict")
return self.source
class JSONConfigSource(_ConfigSource):
"""JSON configuration source."""
def _read(self) -> Dict:
if not isinstance(self.source, (str, os.PathLike)):
raise TypeError(
"JSONConfigSource `source` must be a string or path-like object"
)
with open(self.source, "rt") as json_file:
config = json.load(json_file)
return config
class ArgumentParserConfigSource(_ConfigSource):
"""ArgumentParser configuration source."""
def _read(self) -> Dict:
if not isinstance(self.source, ArgumentParser):
raise TypeError(
"ArgumentParserSource `source` must be an argparse.ArgumentParser object"
)
config = vars(self.source.parse_args())
return config
class NamespaceConfigSource(_ConfigSource):
"""Argparse Namespace configuration source."""
def _read(self) -> Dict:
if not isinstance(self.source, Namespace):
raise TypeError(
"NamespaceConfigSource `source` must be an argparse.Namespace object"
)
config = vars(self.source)
return config
class ValidationSchema:
"""ValidationSchema."""
def __init__(self, source):
"""ValidationSchema."""
self.source = source
@classmethod
def from_object(cls, obj):
"""Return ValidationSchema from str, path-like, dict, or ValidationSchema."""
if isinstance(obj, (str, os.PathLike, Dict)):
return cls(obj)
elif isinstance(obj, cls):
return obj
else:
raise TypeError(
f"Cannot create ValidationSchema from type {type(obj)}. Must be a "
"string, path-like, dict, or cascade_config.ValidationSchema object"
)
def load(self) -> Dict:
"""Load validation schema."""
if isinstance(self.source, (str, os.PathLike)):
with open(self.source, "rt") as json_file:
schema = json.load(json_file)
elif isinstance(self.source, Dict):
schema = self.source
else:
raise TypeError(
"ValidationSchema `source` must be of type string, path-like, or dict"
)
return schema
|
[
"json.load"
] |
[((6441, 6461), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (6450, 6461), False, 'import json\n'), ((8087, 8107), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (8096, 8107), False, 'import json\n')]
|
from invoke import task
from invoke.exceptions import Exit
from pathlib import Path
from typing import Optional
import os
import shutil
import sys
BUILD_DIR_DEFAULT = Path(os.environ['BUILD_DIR'].replace(":", ""))
def _get_vcvars_paths():
template = r"%PROGRAMFILES(X86)%\Microsoft Visual Studio\2017\{edition}\VC\Auxiliary\Build\vcvarsall.bat"
template = os.path.expandvars(template)
editions = ('BuildTools', 'Professional', 'WDExpress', 'Community')
return tuple(Path(template.format(edition=edition)) for edition in editions)
def strip_and_join(s: str):
return ' '.join(line.strip() for line in s.splitlines() if line.strip() != '')
def echo(c, msg: str):
from colorama.ansi import Fore, Style
if c.config.run.echo:
print(f"{Fore.WHITE}{Style.BRIGHT}{msg}{Style.RESET_ALL}")
def remove_directory(path: Path):
if path.is_dir():
print(f"Removing {path}")
shutil.rmtree(path)
else:
print(f"Not removing {path} (not a directory)")
def _get_and_prepare_build(
c,
clean: bool = False,
build_subdirectory: Path = BUILD_DIR_DEFAULT
) -> Path:
'''
Returns build directory where `cmake` shall be called from. Creates it and
possibly removes its contents (and artifacts_dir contents) if `clean=True`
is passed.
'''
build_dir = build_subdirectory
if clean:
remove_directory(build_dir)
build_dir.mkdir(parents=True, exist_ok=not clean)
return build_dir
def _get_cmake_command(
build_dir: Path,
cmake_generator: str,
cmake_arch: Optional[str] = None,
config: str = 'Release',
):
'''
:param build_dir: Directory from where cmake will be called.
'''
root_dir = Path(__file__).parent
relative_root_dir = Path(os.path.relpath(root_dir, build_dir))
relative_artifacts_dir = Path(os.path.relpath(build_dir))
return strip_and_join(f"""
cmake
-G "{cmake_generator}"
{f'-A "{cmake_arch}"' if cmake_arch is not None else ""}
-DCMAKE_BUILD_TYPE={config}
-DCMAKE_INSTALL_PREFIX="{relative_artifacts_dir.as_posix()}"
"{str(relative_root_dir)}"
""")
def _get_wrappers_command(wrappers_dir: Path) -> str:
conda_prefix = os.environ['CONDA_PREFIX']
if sys.platform.startswith('win'):
autodiff_env_path = f"{conda_prefix}\\Library\\bin"
else:
autodiff_env_path = f"{conda_prefix}/bin"
return strip_and_join(f"""
create-wrappers
-t conda
--bin-dir {autodiff_env_path}
--dest-dir {wrappers_dir}
--conda-env-dir {conda_prefix}
""")
def _get_test_command():
test_command = strip_and_join(f"""
pytest .
-n auto
""")
return test_command
if sys.platform.startswith('win'):
@task
def msvc(c, clean=False, config='Release'):
"""
Generates a Visual Studio project at the "build/msvc" directory.
Assumes that the environment is already configured using:
conda devenv
activate env_name_here
"""
build_dir, artifacts_dir = _get_and_prepare_build(
c,
clean=clean,
build_subdirectory=BUILD_DIR_DEFAULT / "msvc",
)
cmake_command = _get_cmake_command(build_dir=build_dir, cmake_generator="Visual Studio 15 2017",
cmake_arch="x64", config=config)
os.chdir(build_dir)
c.run(cmake_command)
@task
def compile(c, clean=False, config='Release', number_of_jobs=-1, gen_wrappers=False):
"""
Compiles by running CMake and building with `ninja`.
Assumes that the environment is already configured using:
conda devenv
[source] activate env_name
"""
build_dir = _get_and_prepare_build(
c,
clean=clean,
build_subdirectory=BUILD_DIR_DEFAULT,
)
cmake_command = _get_cmake_command(build_dir=build_dir, cmake_generator="Ninja", config=config)
build_command = strip_and_join(f"""
cmake
--build .
--target install
--config {config}
--
{f"-j {number_of_jobs}" if number_of_jobs >= 0 else ""}
{"-d keeprsp" if sys.platform.startswith("win") else ""}
""")
commands = [cmake_command, build_command]
if gen_wrappers:
wrappers_command = _get_wrappers_command(build_dir / "wrappers/conda")
commands.append(wrappers_command)
if sys.platform.startswith('win'):
for vcvars_path in _get_vcvars_paths():
if not vcvars_path.is_file():
continue
commands.insert(0, f'"{vcvars_path}" amd64')
break
else:
raise Exit(
'Error: Commands to configure MSVC environment variables not found.',
code=1,
)
os.chdir(build_dir)
c.run("&&".join(commands))
@task
def clear(c, build_dir_path=BUILD_DIR_DEFAULT):
"""
Clear build directory
"""
remove_directory(build_dir_path)
@task
def wrappers(c, wrappers_dir=BUILD_DIR_DEFAULT / "wrappers/conda"):
"""
Wrappers bin generated by conda environment as passed with --wrappers-dir dir_path
"""
remove_directory(wrappers_dir)
if sys.platform.startswith('win'):
print(f"Generating conda wrappers to {wrappers_dir} from {os.environ['CONDA_PREFIX']}\\Library\\bin")
else:
print(f"Generating conda wrappers to {wrappers_dir} from {os.environ['CONDA_PREFIX']}/bin")
generate_wrappers_command = _get_wrappers_command(wrappers_dir)
echo(c, generate_wrappers_command)
c.run(generate_wrappers_command, pty=True, warn=True)
@task
def tests(c):
"""
Execute tests in pytest, if any
"""
test_command = _get_test_command()
c.run(test_command, pty=True)
|
[
"sys.platform.startswith",
"invoke.exceptions.Exit",
"os.path.expandvars",
"pathlib.Path",
"os.path.relpath",
"shutil.rmtree",
"os.chdir"
] |
[((2818, 2848), 'sys.platform.startswith', 'sys.platform.startswith', (['"""win"""'], {}), "('win')\n", (2841, 2848), False, 'import sys\n'), ((367, 395), 'os.path.expandvars', 'os.path.expandvars', (['template'], {}), '(template)\n', (385, 395), False, 'import os\n'), ((2317, 2347), 'sys.platform.startswith', 'sys.platform.startswith', (['"""win"""'], {}), "('win')\n", (2340, 2347), False, 'import sys\n'), ((4548, 4578), 'sys.platform.startswith', 'sys.platform.startswith', (['"""win"""'], {}), "('win')\n", (4571, 4578), False, 'import sys\n'), ((4937, 4956), 'os.chdir', 'os.chdir', (['build_dir'], {}), '(build_dir)\n', (4945, 4956), False, 'import os\n'), ((5344, 5374), 'sys.platform.startswith', 'sys.platform.startswith', (['"""win"""'], {}), "('win')\n", (5367, 5374), False, 'import sys\n'), ((922, 941), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (935, 941), False, 'import shutil\n'), ((1746, 1760), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1750, 1760), False, 'from pathlib import Path\n'), ((1797, 1833), 'os.path.relpath', 'os.path.relpath', (['root_dir', 'build_dir'], {}), '(root_dir, build_dir)\n', (1812, 1833), False, 'import os\n'), ((1869, 1895), 'os.path.relpath', 'os.path.relpath', (['build_dir'], {}), '(build_dir)\n', (1884, 1895), False, 'import os\n'), ((3488, 3507), 'os.chdir', 'os.chdir', (['build_dir'], {}), '(build_dir)\n', (3496, 3507), False, 'import os\n'), ((4802, 4888), 'invoke.exceptions.Exit', 'Exit', (['"""Error: Commands to configure MSVC environment variables not found."""'], {'code': '(1)'}), "('Error: Commands to configure MSVC environment variables not found.',\n code=1)\n", (4806, 4888), False, 'from invoke.exceptions import Exit\n'), ((4338, 4368), 'sys.platform.startswith', 'sys.platform.startswith', (['"""win"""'], {}), "('win')\n", (4361, 4368), False, 'import sys\n')]
|
import sys
import os
import shutil
import json
from glob import glob
#sys.path.insert(0, os.path.abspath('..'))
#sys.path.insert(0, os.path.abspath('.'))
#print(sys.path)
import mimir.backend.database
from mimir.backend.database import DataBase, Model
from mimir.backend.entry import Item, ListItem
import unittest
import pytest
import coverage
import os
import copy
import datetime
#DEBUGGING
import tracemalloc
if os.getcwd().endswith("tests"):
mimir_dir = os.getcwd()[0:-len("/tests")]
dir2tests = os.getcwd()
else:
mimir_dir = os.getcwd()
dir2tests = os.getcwd()+"/tests"
files = ["testStructure/rootFile1.mp4",
"testStructure/folder1/folder1file1.mp4",
"testStructure/folder1/folder1file2.mp4",
"testStructure/folder2/folder2file1.mp4",
"testStructure/folder2/folder2file2.mp4"]
folder = ["folder1", "folder2"]
def getDataTime():
currently = datetime.datetime.now()
day = currently.day
month = currently.month
year = currently.year
hour = currently.hour
minutes = currently.minute
sec = currently.second
fulldate = "{0:02}{3}{1:02}{3}{2:02}".format(day, month, year-2000, ".")
fulltime = "{0:02}:{1:02}:{2:02}".format(hour, minutes, sec)
return fulldate, fulltime
@pytest.fixture(scope="module")
def preCreatedDB():
os.system("touch "+dir2tests+"/testStructure/rootFile1")
os.system("touch "+dir2tests+"/testStructure/folder2/folder2file1.mp4")
os.system("touch "+dir2tests+"/testStructure/folder2/folder2file2.mp4")
os.system("touch "+dir2tests+"/testStructure/folder2/folder3/folder3file1.mp4")
os.system("touch "+dir2tests+"/testStructure/folder1/folder1file1.mp4")
os.system("touch "+dir2tests+"/testStructure/folder1/folder1file2.mp4")
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
## Set Ratings for furure tests
# Expected Order: ["3", "2", "4", "1", "5", "0"]
database.modifySingleEntry("1", "Rating", "2", byID = True )
database.modifySingleEntry("2", "Rating", "4", byID = True )
database.modifySingleEntry("3", "Rating", "5", byID = True )
database.modifySingleEntry("4", "Rating", "3", byID = True )
database.modifySingleEntry("5", "Rating", "1", byID = True )
# Expected Order: ["5", "4", "3", "2", "1", "0"]
database.modifySingleEntry("0", "SingleItem", "Xi", byID = True )
database.modifySingleEntry("1", "SingleItem", "Tau", byID = True )
database.modifySingleEntry("2", "SingleItem", "Ny", byID = True )
database.modifySingleEntry("3", "SingleItem", "Eta", byID = True )
database.modifySingleEntry("4", "SingleItem", "Bea", byID = True )
database.modifySingleEntry("5", "SingleItem", "Alpha", byID = True )
database.modifyListEntry("0", "ListItem", "Blue", byID = True)
database.modifyListEntry("0", "ListItem", "Double Orange", byID = True)
database.modifyListEntry("0", "ListItem", "Triple Orange", byID = True)
database.modifyListEntry("3", "ListItem", "Lavender", byID = True)
database.modifyListEntry("4", "ListItem", "Lavender", byID = True)
database.modifyListEntry("4", "ListItem", "Pinkish", byID = True)
database.modifyListEntry("4", "ListItem", "Spring", byID = True)
Entry0 = database.getEntryByItemName("ID", "0")[0]
Entry1 = database.getEntryByItemName("ID", "1")[0]
Entry2 = database.getEntryByItemName("ID", "2")[0]
Entry3 = database.getEntryByItemName("ID", "3")[0]
Entry4 = database.getEntryByItemName("ID", "4")[0]
Entry5 = database.getEntryByItemName("ID", "5")[0]
# Expected Order: ["0", "2", "3", "5", "1", "4"]
Entry0.changeItemValue("Added", "30.01.19|00:00:00")
Entry1.changeItemValue("Added", "20.01.19|00:00:00")
Entry2.changeItemValue("Added", "29.01.19|00:00:00")
Entry3.changeItemValue("Added", "29.01.19|00:00:00")# Same time: Fall back to ID
Entry4.changeItemValue("Added", "15.01.19|00:00:00")
Entry5.changeItemValue("Added", "26.01.19|00:00:00")
# Expected Order: ["0", "3", "4", "5", "1", "2"]
Entry0.replaceItemValue("Changed", "24.02.19|00:00:00", Entry0.getItem("Changed").value[0])
Entry1.replaceItemValue("Changed", "10.02.19|00:00:00", Entry1.getItem("Changed").value[0])
Entry2.replaceItemValue("Changed", "23.02.19|00:00:00", Entry2.getItem("Changed").value[0])
Entry3.replaceItemValue("Changed", "22.02.19|00:00:00", Entry3.getItem("Changed").value[0])
Entry4.replaceItemValue("Changed", "21.02.19|00:00:00", Entry4.getItem("Changed").value[0])
Entry5.replaceItemValue("Changed", "20.02.19|00:00:00", Entry5.getItem("Changed").value[0])
Entry0.addItemValue("Changed", "25.03.19|00:00:00")
Entry1.addItemValue("Changed", "19.03.19|00:00:00")
Entry2.addItemValue("Changed", "23.01.19|00:00:00")
Entry3.addItemValue("Changed", "22.03.19|00:00:00")
Entry4.addItemValue("Changed", "21.03.19|00:00:00")
Entry5.addItemValue("Changed", "20.03.19|00:00:00")
database.saveMain()
for item in database.model.allItems:
database.cacheAllValuebyItemName(item)
#shutil.copytree(dbRootPath+"/.mimir", dbRootPath+"/.mimir2") #For testing
shutil.rmtree(dbRootPath+"/.mimir")
return database
def test_01_Model_init():
config = mimir_dir+"/conf/modeltest.json"
jsonModel = None
with open(config) as f:
jsonModel = json.load(f)
testModel = Model(config)
bools = []
bools.append(testModel.modelName == jsonModel["General"]["Name"])
bools.append(testModel.modelDesc == jsonModel["General"]["Description"])
bools.append(testModel.extentions == jsonModel["General"]["Types"])
allitems = {}
allitems.update(testModel.items)
allitems.update(testModel.listitems)
for item in allitems:
for spec in allitems[item]:
bools.append(jsonModel[item][spec] == allitems[item][spec])
res = True
for b in bools:
if not b:
res = b
break
assert res
def test_02_DB_init_new():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
print(database.model.listitems)
filesindbRoot = glob(dbRootPath+"/**/*.mp4", recursive = True)
filesindbRoot = [x.replace(dbRootPath+"/", "") for x in filesindbRoot]
allEntriesSaved = True
for entry in database.entries:
if entry.Path not in filesindbRoot:
allEntriesSaved = False
assert allEntriesSaved
for item in database.model.allItems:
assert not database.cachedValuesChanged[item]
del database
def test_03_DB_raise_RuntimeError_existing_mimirDir():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if not os.path.exists(dbRootPath+"/.mimir"):
os.makedirs(dbRootPath+"/.mimir")
with pytest.raises(RuntimeError):
database = DataBase(dbRootPath, "new", config)
del database
def test_04_DB_save():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
#Check database is save
assert database.saveMain()
assert database.saveMain()
#shutil.copytree(dbRootPath+"/.mimir", dbRootPath+"/.mimir2")
#assert validateDatabaseJSON(database, config, database.savepath)
#check if backup was created
day, month, year = datetime.date.today().day, datetime.date.today().month, datetime.date.today().year
fulldate = "{2:02}-{1:02}-{0:02}".format(day, month, year-2000)
assert os.path.exists(dbRootPath+"/.mimir/mainDB.{0}.backup".format(fulldate)) == True
del database
def test_05_DB_equal():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database1 = DataBase(dbRootPath, "new", config)
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database2 = DataBase(dbRootPath, "new", config)
assert database1 == database2
del database1, database2
def test_06_DB_notequal():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database1 = DataBase(dbRootPath, "new", config)
os.system("rm "+dir2tests+"/testStructure/newfile.mp4")
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database2 = DataBase(dbRootPath, "new", config)
os.system("touch "+dir2tests+"/testStructure/newfile.mp4")
database2.findNewFiles()
os.system("rm "+dir2tests+"/testStructure/newfile.mp4")
assert database1 != database2
del database1, database2
def test_07_DB_load():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
database.saveMain()
loadedDB = DataBase(dbRootPath, "load")
assert database == loadedDB
assert loadedDB.maxID == len(loadedDB.entries)-1 #Since 0 is a valid ID
del database
def test_08_DB_getAllValues():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
with pytest.raises(KeyError):
database.getAllValuebyItemName("Blubb")
values = database.getAllValuebyItemName("Path")
filesindbRoot = glob(dbRootPath+"/**/*.mp4", recursive = True)
filesindbRoot = [x.replace(dbRootPath+"/", "") for x in filesindbRoot]
assert values == set(filesindbRoot)
del database
def test_09_DB_getEntrybyItemName():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
with pytest.raises(KeyError):
database.getEntryByItemName("Blubb", "folder2file")
found = False
for entry in database.entries:
print(entry.getItem("Name").value)
if entry.getItem("Name").value == "folder2file1":
found = True
break
assert found
entrybyItemName = database.getEntryByItemName("Name", "folder2file1")
assert entry in entrybyItemName
del database
def test_10_DB_removeEntry_exceptions():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
##############################################
#Raise exception for not specified vector
# No vector specified
with pytest.raises(RuntimeError):
database.remove(1)
# More than one vector specified
with pytest.raises(RuntimeError):
database.remove(1, byID = True, byName = True)
##############################################
#Raise exception type
# ID
with pytest.raises(TypeError):
database.remove([], byID = True)
with pytest.raises(TypeError):
database.remove(1, byID = 1)
# Name/Path
with pytest.raises(TypeError):
database.remove(1, byName = True)
##############################################
#Raise exception by ID: out of range
with pytest.raises(IndexError):
database.remove(1000, byID = True)
##############################################
#Raise exception by Name/Path: not in DB
with pytest.raises(KeyError):
database.remove("RandomName", byName = True)
with pytest.raises(KeyError):
database.remove("RandomPath", byPath = True)
del database
def test_11_DB_removeEntry():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
#Remove by ID
databaseID = copy.deepcopy(database)
id2remove = 2
entry2Remove = databaseID.getEntryByItemName("ID",str(id2remove))[0]
databaseID.remove(id2remove, byID = True)
assert not entry2Remove in databaseID.entries
#Remove by Name
databaseName = copy.deepcopy(database)
name2remove = "folder2file1"
entry2Remove = databaseName.getEntryByItemName("Name",name2remove)[0]
databaseName.remove(name2remove, byName = True)
assert not entry2Remove in databaseName.entries
#Remove by Path
databasePath = copy.deepcopy(database)
file2remove = "folder2/folder2file1.mp4"
path2remove = dbRootPath+"/"+file2remove
entry2Remove = databasePath.getEntryByItemName("Path",file2remove)[0]
databasePath.remove(file2remove, byPath = True)
assert not entry2Remove in databasePath.entries
del database
def test_12_DB_findNewFiles_append():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
lastIDbeforeAppend = database.maxID
os.system("touch "+dir2tests+"/testStructure/newfile.mp4")
newFiles, pairs = database.findNewFiles()
os.system("rm "+dir2tests+"/testStructure/newfile.mp4")
assert "newfile.mp4" in newFiles
assert len(newFiles) == 1
asEntry = False
for entry in database.entries:
if entry.Path == "newfile.mp4":
asEntry = True
newEntry = entry
break
assert asEntry
assert int(newEntry.ID) == lastIDbeforeAppend+1
assert database.maxID == lastIDbeforeAppend+1
del database
def test_13_p1_DB_query():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
updatedEntry1 = database.getEntryByItemName("ID", "0")[0]
updatedEntry2 = database.getEntryByItemName("ID", "1")[0]
updatedEntry1.changeItemValue("SingleItem", "ReplacedValue")
updatedEntry1.addItemValue("ListItem", "AddedValue")
updatedEntry2.changeItemValue("SingleItem", "ReplacedValue")
########################################################
#First names wrong
with pytest.raises(KeyError):
database.query(["Blubb", "SingleItem"], "SomeQuery")
#Second names wrong
with pytest.raises(KeyError):
database.query(["SingleItem", "Blubb"], "SomeQuery")
########################################################
resultEntry = database.query(["SingleItem","ListItem"], ["ReplacedValue"])
resultID = database.query(["SingleItem","ListItem"], ["ReplacedValue"], returnIDs = True)
found1, found2 = False, False
if updatedEntry1 in resultEntry:
found1 = True
if updatedEntry2 in resultEntry:
found2 = True
foundEntry = found1 and found2
assert resultID == ["0", "1"]
resultID = database.query(["SingleItem","ListItem"], ["AddedValue", "ReplacedValue"], returnIDs = True)
assert resultID == ["0"]
del database
@pytest.mark.parametrize("Query, IDsExp", [("!Lavender", ["0","1","2","5"]), ("!Xi", ["1","2","3","4","5"]), ("!Eta Lavender", ["4"])])
def test_13_p2_DB_query(Query, IDsExp, preCreatedDB):
qList = Query.split(" ")
resultID = preCreatedDB.query(["SingleItem","ListItem"], qList, returnIDs = True)
assert resultID == IDsExp
@pytest.mark.parametrize("Query, IDsExp", [("Triple Orange", ["0"])])
def test_13_p3_DB_query(Query, IDsExp, preCreatedDB):
qList = Query.split(" ")
resultID = preCreatedDB.query(["SingleItem","ListItem"], qList, returnIDs = True)
assert resultID == IDsExp
def test_14_DB_modifyEntry():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
thisDate, thisTime = getDataTime()
#--------------------- SingleItem -------------------------
#Replace single Item value
database.modifySingleEntry("1", "SingleItem", "changedItemValue", byID = True )
changedEntry = database.getEntryByItemName("ID", "1")[0]
assert "changedItemValue" in changedEntry.getAllValuesbyName("SingleItem")
change_datetime = changedEntry.getAllValuesbyName("Changed")
change_datetime = list(change_datetime)[0]
assert change_datetime != "emptyChanged"
date, time = change_datetime.split("|")
assert date == thisDate
assert time[0:1] == thisTime[0:1]
#Check if Item is present in database
with pytest.raises(KeyError):
database.modifySingleEntry("1", "BLubbb", "changedItemValue", byID = True )
with pytest.raises(TypeError):
database.modifySingleEntry("1", "ListItem", "changedItemValue", byID = True )
#---------------------- ListItem --------------------------
with pytest.raises(TypeError):
database.modifyListEntry("1", "SingleItem", "appendedItemValue", "Append", byID = True)
#Append but first default schould be remove when appending the fist actual value
origEntry = database.getEntryByItemName("ID", "1")[0]
database.modifyListEntry("1", "ListItem", "initialValue", "Append", byID = True)
changedEntry = database.getEntryByItemName("ID", "1")[0]
#print(database.model.getDefaultValue("ListItem"))
assert ("initialValue" in changedEntry.getAllValuesbyName("ListItem")
and database.model.getDefaultValue("ListItem") not in changedEntry.getAllValuesbyName("ListItem")
and len(changedEntry.getAllValuesbyName("ListItem")) == 1)
#Append
change_datetime = changedEntry.getAllValuesbyName("Changed")
change_datetime = list(change_datetime)[0]
assert change_datetime != "emptyChanged"
date, time = change_datetime.split("|")
assert date == thisDate
assert time[0:1] == thisTime[0:1]
print("-------- Append ----------")
origEntry = database.getEntryByItemName("ID", "1")[0]
databaseAppend = copy.deepcopy(database)
databaseAppend.modifyListEntry("1", "ListItem", "appendedItemValue", "Append", byID = True)
changedEntry = databaseAppend.getEntryByItemName("ID", "1")[0]
assert ( "appendedItemValue" in changedEntry.getAllValuesbyName("ListItem")
and origEntry.getAllValuesbyName("ListItem").issubset(changedEntry.getAllValuesbyName("ListItem")) )
#Replace
print("-------- Replace ----------")
databaseReplace = copy.deepcopy(databaseAppend)
databaseReplace.modifyListEntry("1", "ListItem", "replacedItemValue", "Replace", "initialValue", byID = True)
changedEntry = databaseReplace.getEntryByItemName("ID", "1")[0]
assert ("replacedItemValue" in changedEntry.getAllValuesbyName("ListItem")
and "initialValue" not in changedEntry.getAllValuesbyName("ListItem"))
#Remove
print("-------- Remove I ----------")
databaseAppend.modifyListEntry("1", "ListItem", None, "Remove", "appendedItemValue", byID = True)
changedEntry = databaseAppend.getEntryByItemName("ID", "1")[0]
assert "appendedItemValue" not in changedEntry.getAllValuesbyName("ListItem")
#Remove empty entry
print("-------- Remove II ----------")
databaseReplace.modifyListEntry("1", "ListItem", None, "Remove", "appendedItemValue", byID = True)
databaseReplace.modifyListEntry("1", "ListItem", None, "Remove", "replacedItemValue", byID = True)
changedEntry = databaseReplace.getEntryByItemName("ID", "1")[0]
assert (set(databaseReplace.model.listitems["ListItem"]["default"]) == changedEntry.getAllValuesbyName("ListItem"))
print("-------- Change date for ListItem ----------")
database.modifyListEntry("2", "ListItem", "initialValue", "Append", byID = True)
changedEntry = database.getEntryByItemName("ID", "2")[0]
change_datetime = changedEntry.getAllValuesbyName("Changed")
change_datetime = list(change_datetime)[0]
assert change_datetime != "emptyChanged"
date, time = change_datetime.split("|")
assert date == thisDate
assert time[0:1] == thisTime[0:1]
def test_15_DB_status():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
#DB not saved
assert not database.getStatus()
#DB saved
database.saveMain()
assert database.getStatus()
#DB changed - new File
os.system("touch "+dir2tests+"/testStructure/newfile.mp4")
newFiles = database.findNewFiles()
os.system("rm "+dir2tests+"/testStructure/newfile.mp4")
assert not database.getStatus()
database.saveMain()
assert database.getStatus()
#DB changed - changed Entry
def test_16_DB_random():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
allIDs = database.getAllValuebyItemName("ID")
randID = database.getRandomEntry(chooseFrom = allIDs)
assert randID in allIDs
def test_17_DB_random_all():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
allIDs = database.getAllValuebyItemName("ID")
randID = database.getRandomEntryAll()
assert randID in allIDs
def test_18_DB_random_weighted():
config = mimir_dir+"/conf/modeltest.json"
dbRootPath = dir2tests+"/testStructure"
if os.path.exists(dbRootPath+"/.mimir"):
shutil.rmtree(dbRootPath+"/.mimir")
database = DataBase(dbRootPath, "new", config)
allIDs = database.getAllValuebyItemName("ID")
with pytest.raises(NotImplementedError):
randID = database.getRandomEntry(chooseFrom = allIDs, weighted = True)
#assert randID in allIDs
def test_19_DB_getSortedIDs(preCreatedDB):
with pytest.raises(KeyError):
sorted_addedIDs = preCreatedDB.getSortedIDs("BLUBB")
with pytest.raises(NotImplementedError):
sorted_addedIDs = preCreatedDB.getSortedIDs("ListItem")
#Get sorted by Added (SingleItem with datetime)
expected_added = ["0", "2", "3", "5", "1", "4"]
sorted_addedIDs = preCreatedDB.getSortedIDs("Added", reverseOrder = True)
print(sorted_addedIDs)
for iId, expected_id in enumerate(expected_added):
assert expected_id == sorted_addedIDs[iId]
#Same but with reverse order --> Test if ID sorting is independent of reverse
expected_added = ["4", "1", "5", "2", "3", "0"]
sorted_addedIDs = preCreatedDB.getSortedIDs("Added", reverseOrder = False)
for iId, expected_id in enumerate(expected_added):
assert expected_id == sorted_addedIDs[iId]
#Get sorted by Changed (Listentry with datetime)
expected_changed = ["0", "3", "4", "5", "1", "2"]
sorted_changedIDs = preCreatedDB.getSortedIDs("Changed")
for iId, expected_id in enumerate(expected_changed):
assert expected_id == sorted_changedIDs[iId]
#Get sorted by Singleitem (alphabetically)
expected_singleItem = ["5", "4", "3", "2", "1", "0"]
sorted_singleIDs = preCreatedDB.getSortedIDs("SingleItem", reverseOrder = False)
for iId, expected_id in enumerate(expected_singleItem):
assert expected_id == sorted_singleIDs[iId]
#Get sorted by Rating (numerically)
expected_rating = ["3", "2", "4", "1", "5", "0"]
sorted_ratingIDs = preCreatedDB.getSortedIDs("Rating")
for iId, expected_id in enumerate(expected_rating):
assert expected_id == sorted_ratingIDs[iId]
def test_20_DB_updatedOpened(preCreatedDB):
preCreatedDB.updateOpened("1")
thisDate, thisTime = getDataTime()
changedEntry = preCreatedDB.getEntryByItemName("ID", "1")[0]
change_datetime = list(changedEntry.getAllValuesbyName("Opened"))[0]
date, time = change_datetime.split("|")
assert date == thisDate
assert time[0:1] == thisTime[0:1]
def test_21_DB_guessSecondaryDBItembyPath(preCreatedDB):
#1: Test if "elements" are part of the secondaryDB
newFile = "testStructure/Blue/Xi.mp4"
options = preCreatedDB.getItemsPyPath(newFile)
assert "Xi" in options["SingleItem"]
assert "Blue" in options["ListItem"]
assert options["SingleItem"] == set(["Xi"]) and options["ListItem"] == set(["Blue"])
#2: Test if it works when subparts of a "element" are part of secondaryDB
#2.1: Fast version which will not try to split strings
newFile = "testStructure/Pink/BlueXi.mp4"
options = preCreatedDB.getItemsPyPath(newFile, fast=True)
assert "Xi" not in options["SingleItem"]
assert "Blue" not in options["ListItem"]
assert options["SingleItem"] == set([]) and options["ListItem"] == set([])
#2.2; Test with enables splitting
options = preCreatedDB.getItemsPyPath(newFile)
assert "Xi" in options["SingleItem"]
assert "Blue" in options["ListItem"]
assert options["SingleItem"] == set(["Xi"]) and options["ListItem"] == set(["Blue"])
#2.3: Test lowercase match
newFile = "testStructure/Pink/bluexi.mp4"
options = preCreatedDB.getItemsPyPath(newFile)
assert "Xi" in options["SingleItem"]
assert "Blue" in options["ListItem"]
assert options["SingleItem"] == set(["Xi"]) and options["ListItem"] == set(["Blue"])
#3: Test for items with whitespace - Find exact match
newFile = "testStructure/Pink/Double_Orange.mp4"
options = preCreatedDB.getItemsPyPath(newFile, whitespaceMatch = True)
assert options["ListItem"] == set(["Double Orange"])
#3.1 Test whitespace lowercase:
newFile = "testStructure/Pink/double_orange.mp4"
options = preCreatedDB.getItemsPyPath(newFile, whitespaceMatch = True)
assert options["ListItem"] == set(["Double Orange"])
#4: Test for items with whitespace - find partial match
newFile = "testStructure/Pink/Orange_Hand.mp4"
options = preCreatedDB.getItemsPyPath(newFile)
assert "Double Orange" in options["ListItem"]
assert "Triple Orange" in options["ListItem"]
assert options["ListItem"] == set(["Triple Orange", "Double Orange"])
#5: Test for items with whitespace - find partial match, exact mathc deactivated
newFile = "testStructure/Pink/Double_Orange.mp4"
options = preCreatedDB.getItemsPyPath(newFile, whitespaceMatch = False)
assert options["ListItem"] == set(["Triple Orange", "Double Orange"])
#Check if it works with ne values that are added before save/load
newFile = "testStructure/folder/Red.mp4"
options = preCreatedDB.getItemsPyPath(newFile)
assert "Red" not in options["ListItem"]
preCreatedDB.modifyListEntry("0", "ListItem", "Red", byID = True)
options = preCreatedDB.getItemsPyPath(newFile)
print("-------------------",options)
assert "Red" in options["ListItem"]
def test_22_DB_splitBySep(preCreatedDB):
split1 = preCreatedDB.splitBySep(".", ["a.b","c.d-e"])
assert ["a","b","c","d-e"] == split1
split2 = preCreatedDB.splitBySep("-", split1)
assert ["a","b","c","d","e"] == split2
def test_23_DB_recursiveSplit(preCreatedDB):
strings2Split = "A-b_c+d.e"
strings2Expect = set(["A","b","c","d","e"])
assert strings2Expect == preCreatedDB.splitStr(strings2Split)
@pytest.mark.parametrize("ID, nExpected", [("4", 3), ("1", 0), ("3", 1)])
def test_24_DB_countListItem(ID, nExpected, preCreatedDB):
assert preCreatedDB.getCount(ID, "ListItem", byID = True) == nExpected
def test_25_DB_cachedValues(mocker, preCreatedDB):
assert preCreatedDB.cachedValuesChanged.keys() == preCreatedDB.model.allItems
mocker.spy(DataBase, "cacheAllValuebyItemName")
###### Test caching for ListItem entries
values_ListItem_preChange = preCreatedDB.getAllValuebyItemName("ListItem")
assert DataBase.cacheAllValuebyItemName.call_count == 0
preCreatedDB.modifyListEntry("4", "ListItem", "Cyan", byID = True)
values_ListItem_postChange = preCreatedDB.getAllValuebyItemName("ListItem")
assert DataBase.cacheAllValuebyItemName.call_count == 1
assert list(set(values_ListItem_postChange)-set(values_ListItem_preChange)) == ["Cyan"]
###### Test caching for SingleItem Entries
Entry4 = preCreatedDB.getEntryByItemName("ID", "4")[0]
oldValue = Entry4.getItem("SingleItem").value
newValue = "Gamma"
preCreatedDB.modifySingleEntry("4", "SingleItem", newValue, byID = True)
values_ListItem_postChange = preCreatedDB.getAllValuebyItemName("SingleItem")
assert DataBase.cacheAllValuebyItemName.call_count == 2
assert oldValue not in values_ListItem_postChange and newValue in values_ListItem_postChange
def test_26_DB_changedPaths(preCreatedDB):
updatedFiles = preCreatedDB.checkChangedPaths()
assert updatedFiles == []
preCreatedDB.modifySingleEntry("folder2/folder2file2.mp4", "Path", "folder2file2.mp4", byPath = True)
thisID = preCreatedDB.getEntryByItemName("Path", "folder2file2.mp4")[0].getItem("ID").value
updatedFiles = preCreatedDB.checkChangedPaths()
thisNewPath = preCreatedDB.getEntryByItemName("ID", thisID)[0].getItem("Path").value
theID, oldPath, newPath = updatedFiles[0]
assert theID == thisID
assert oldPath == "folder2file2.mp4"
assert newPath == "folder2/folder2file2.mp4"
assert thisNewPath == "folder2/folder2file2.mp4"
def test_27_DB_missingFiles(preCreatedDB):
missingFiles = preCreatedDB.getMissingFiles()
assert missingFiles == []
os.system("rm "+dir2tests+"/testStructure/folder2/folder2file2.mp4")
missingFiles = preCreatedDB.getMissingFiles()
assert missingFiles == ["folder2/folder2file2.mp4"]
os.system("touch "+dir2tests+"/testStructure/folder2/folder2file2.mp4")
def test_28_DB_checkMissingFileAndReSort(preCreatedDB):
preCreatedDB2 = copy.deepcopy(preCreatedDB)
os.system("rm "+dir2tests+"/testStructure/folder2/folder2file2.mp4")
removedID = preCreatedDB2.getEntryByItemName("Path", "folder2/folder2file2.mp4")[0].getItem("ID").value
movedPath = preCreatedDB2.getEntryByItemName("ID",
str(preCreatedDB2.maxID))[0].getItem("Path").value
oldMaxID = preCreatedDB2.maxID
IDChanges = preCreatedDB2.checkMissingFiles()
os.system("touch "+dir2tests+"/testStructure/folder2/folder2file2.mp4")
oldID, newID = IDChanges[0]
assert newID == removedID
assert oldID == oldMaxID
assert movedPath == preCreatedDB2.getEntryByItemName("ID", removedID)[0].getItem("Path").value
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"copy.deepcopy",
"json.load",
"os.makedirs",
"shutil.rmtree",
"os.getcwd",
"pytest.fixture",
"os.system",
"os.path.exists",
"datetime.date.today",
"pytest.raises",
"mimir.backend.database.Model",
"glob.glob",
"pytest.mark.parametrize",
"mimir.backend.database.DataBase",
"datetime.datetime.now"
] |
[((1269, 1299), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1283, 1299), False, 'import pytest\n'), ((15709, 15855), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""Query, IDsExp"""', "[('!Lavender', ['0', '1', '2', '5']), ('!Xi', ['1', '2', '3', '4', '5']), (\n '!Eta Lavender', ['4'])]"], {}), "('Query, IDsExp', [('!Lavender', ['0', '1', '2', '5'\n ]), ('!Xi', ['1', '2', '3', '4', '5']), ('!Eta Lavender', ['4'])])\n", (15732, 15855), False, 'import pytest\n'), ((16045, 16113), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""Query, IDsExp"""', "[('Triple Orange', ['0'])]"], {}), "('Query, IDsExp', [('Triple Orange', ['0'])])\n", (16068, 16113), False, 'import pytest\n'), ((28054, 28126), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""ID, nExpected"""', "[('4', 3), ('1', 0), ('3', 1)]"], {}), "('ID, nExpected', [('4', 3), ('1', 0), ('3', 1)])\n", (28077, 28126), False, 'import pytest\n'), ((511, 522), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (520, 522), False, 'import os\n'), ((545, 556), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (554, 556), False, 'import os\n'), ((909, 932), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (930, 932), False, 'import datetime\n'), ((1324, 1384), 'os.system', 'os.system', (["('touch ' + dir2tests + '/testStructure/rootFile1')"], {}), "('touch ' + dir2tests + '/testStructure/rootFile1')\n", (1333, 1384), False, 'import os\n'), ((1385, 1460), 'os.system', 'os.system', (["('touch ' + dir2tests + '/testStructure/folder2/folder2file1.mp4')"], {}), "('touch ' + dir2tests + '/testStructure/folder2/folder2file1.mp4')\n", (1394, 1460), False, 'import os\n'), ((1461, 1536), 'os.system', 'os.system', (["('touch ' + dir2tests + '/testStructure/folder2/folder2file2.mp4')"], {}), "('touch ' + dir2tests + '/testStructure/folder2/folder2file2.mp4')\n", (1470, 1536), False, 'import os\n'), ((1537, 1624), 'os.system', 'os.system', (["('touch ' + dir2tests + '/testStructure/folder2/folder3/folder3file1.mp4')"], {}), "('touch ' + dir2tests +\n '/testStructure/folder2/folder3/folder3file1.mp4')\n", (1546, 1624), False, 'import os\n'), ((1621, 1696), 'os.system', 'os.system', (["('touch ' + dir2tests + '/testStructure/folder1/folder1file1.mp4')"], {}), "('touch ' + dir2tests + '/testStructure/folder1/folder1file1.mp4')\n", (1630, 1696), False, 'import os\n'), ((1697, 1772), 'os.system', 'os.system', (["('touch ' + dir2tests + '/testStructure/folder1/folder1file2.mp4')"], {}), "('touch ' + dir2tests + '/testStructure/folder1/folder1file2.mp4')\n", (1706, 1772), False, 'import os\n'), ((1866, 1904), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (1880, 1904), False, 'import os\n'), ((1963, 1998), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (1971, 1998), False, 'from mimir.backend.database import DataBase, Model\n'), ((5305, 5342), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (5318, 5342), False, 'import shutil\n'), ((5533, 5546), 'mimir.backend.database.Model', 'Model', (['config'], {}), '(config)\n', (5538, 5546), False, 'from mimir.backend.database import DataBase, Model\n'), ((6244, 6282), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (6258, 6282), False, 'import os\n'), ((6341, 6376), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (6349, 6376), False, 'from mimir.backend.database import DataBase, Model\n'), ((6433, 6479), 'glob.glob', 'glob', (["(dbRootPath + '/**/*.mp4')"], {'recursive': '(True)'}), "(dbRootPath + '/**/*.mp4', recursive=True)\n", (6437, 6479), False, 'from glob import glob\n'), ((7308, 7346), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (7322, 7346), False, 'import os\n'), ((7405, 7440), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (7413, 7440), False, 'from mimir.backend.database import DataBase, Model\n'), ((8104, 8142), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (8118, 8142), False, 'import os\n'), ((8202, 8237), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (8210, 8237), False, 'from mimir.backend.database import DataBase, Model\n'), ((8245, 8283), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (8259, 8283), False, 'import os\n'), ((8343, 8378), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (8351, 8378), False, 'from mimir.backend.database import DataBase, Model\n'), ((8567, 8605), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (8581, 8605), False, 'import os\n'), ((8665, 8700), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (8673, 8700), False, 'from mimir.backend.database import DataBase, Model\n'), ((8705, 8764), 'os.system', 'os.system', (["('rm ' + dir2tests + '/testStructure/newfile.mp4')"], {}), "('rm ' + dir2tests + '/testStructure/newfile.mp4')\n", (8714, 8764), False, 'import os\n'), ((8768, 8806), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (8782, 8806), False, 'import os\n'), ((8866, 8901), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (8874, 8901), False, 'from mimir.backend.database import DataBase, Model\n'), ((8906, 8968), 'os.system', 'os.system', (["('touch ' + dir2tests + '/testStructure/newfile.mp4')"], {}), "('touch ' + dir2tests + '/testStructure/newfile.mp4')\n", (8915, 8968), False, 'import os\n'), ((8998, 9057), 'os.system', 'os.system', (["('rm ' + dir2tests + '/testStructure/newfile.mp4')"], {}), "('rm ' + dir2tests + '/testStructure/newfile.mp4')\n", (9007, 9057), False, 'import os\n'), ((9238, 9276), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (9252, 9276), False, 'import os\n'), ((9335, 9370), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (9343, 9370), False, 'from mimir.backend.database import DataBase, Model\n'), ((9410, 9438), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""load"""'], {}), "(dbRootPath, 'load')\n", (9418, 9438), False, 'from mimir.backend.database import DataBase, Model\n'), ((9693, 9731), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (9707, 9731), False, 'import os\n'), ((9790, 9825), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (9798, 9825), False, 'from mimir.backend.database import DataBase, Model\n'), ((9980, 10026), 'glob.glob', 'glob', (["(dbRootPath + '/**/*.mp4')"], {'recursive': '(True)'}), "(dbRootPath + '/**/*.mp4', recursive=True)\n", (9984, 10026), False, 'from glob import glob\n'), ((10294, 10332), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (10308, 10332), False, 'import os\n'), ((10391, 10426), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (10399, 10426), False, 'from mimir.backend.database import DataBase, Model\n'), ((11001, 11039), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (11015, 11039), False, 'import os\n'), ((11098, 11133), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (11106, 11133), False, 'from mimir.backend.database import DataBase, Model\n'), ((12377, 12415), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (12391, 12415), False, 'import os\n'), ((12474, 12509), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (12482, 12509), False, 'from mimir.backend.database import DataBase, Model\n'), ((12545, 12568), 'copy.deepcopy', 'copy.deepcopy', (['database'], {}), '(database)\n', (12558, 12568), False, 'import copy\n'), ((12795, 12818), 'copy.deepcopy', 'copy.deepcopy', (['database'], {}), '(database)\n', (12808, 12818), False, 'import copy\n'), ((13069, 13092), 'copy.deepcopy', 'copy.deepcopy', (['database'], {}), '(database)\n', (13082, 13092), False, 'import copy\n'), ((13515, 13553), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (13529, 13553), False, 'import os\n'), ((13612, 13647), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (13620, 13647), False, 'from mimir.backend.database import DataBase, Model\n'), ((13692, 13754), 'os.system', 'os.system', (["('touch ' + dir2tests + '/testStructure/newfile.mp4')"], {}), "('touch ' + dir2tests + '/testStructure/newfile.mp4')\n", (13701, 13754), False, 'import os\n'), ((13801, 13860), 'os.system', 'os.system', (["('rm ' + dir2tests + '/testStructure/newfile.mp4')"], {}), "('rm ' + dir2tests + '/testStructure/newfile.mp4')\n", (13810, 13860), False, 'import os\n'), ((14356, 14394), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (14370, 14394), False, 'import os\n'), ((14453, 14488), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (14461, 14488), False, 'from mimir.backend.database import DataBase, Model\n'), ((16442, 16480), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (16456, 16480), False, 'import os\n'), ((16539, 16574), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (16547, 16574), False, 'from mimir.backend.database import DataBase, Model\n'), ((18674, 18697), 'copy.deepcopy', 'copy.deepcopy', (['database'], {}), '(database)\n', (18687, 18697), False, 'import copy\n'), ((19139, 19168), 'copy.deepcopy', 'copy.deepcopy', (['databaseAppend'], {}), '(databaseAppend)\n', (19152, 19168), False, 'import copy\n'), ((20875, 20913), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (20889, 20913), False, 'import os\n'), ((20972, 21007), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (20980, 21007), False, 'from mimir.backend.database import DataBase, Model\n'), ((21163, 21225), 'os.system', 'os.system', (["('touch ' + dir2tests + '/testStructure/newfile.mp4')"], {}), "('touch ' + dir2tests + '/testStructure/newfile.mp4')\n", (21172, 21225), False, 'import os\n'), ((21265, 21324), 'os.system', 'os.system', (["('rm ' + dir2tests + '/testStructure/newfile.mp4')"], {}), "('rm ' + dir2tests + '/testStructure/newfile.mp4')\n", (21274, 21324), False, 'import os\n'), ((21568, 21606), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (21582, 21606), False, 'import os\n'), ((21665, 21700), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (21673, 21700), False, 'from mimir.backend.database import DataBase, Model\n'), ((21964, 22002), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (21978, 22002), False, 'import os\n'), ((22061, 22096), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (22069, 22096), False, 'from mimir.backend.database import DataBase, Model\n'), ((22349, 22387), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (22363, 22387), False, 'import os\n'), ((22446, 22481), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (22454, 22481), False, 'from mimir.backend.database import DataBase, Model\n'), ((30242, 30314), 'os.system', 'os.system', (["('rm ' + dir2tests + '/testStructure/folder2/folder2file2.mp4')"], {}), "('rm ' + dir2tests + '/testStructure/folder2/folder2file2.mp4')\n", (30251, 30314), False, 'import os\n'), ((30421, 30496), 'os.system', 'os.system', (["('touch ' + dir2tests + '/testStructure/folder2/folder2file2.mp4')"], {}), "('touch ' + dir2tests + '/testStructure/folder2/folder2file2.mp4')\n", (30430, 30496), False, 'import os\n'), ((30570, 30597), 'copy.deepcopy', 'copy.deepcopy', (['preCreatedDB'], {}), '(preCreatedDB)\n', (30583, 30597), False, 'import copy\n'), ((30602, 30674), 'os.system', 'os.system', (["('rm ' + dir2tests + '/testStructure/folder2/folder2file2.mp4')"], {}), "('rm ' + dir2tests + '/testStructure/folder2/folder2file2.mp4')\n", (30611, 30674), False, 'import os\n'), ((31023, 31098), 'os.system', 'os.system', (["('touch ' + dir2tests + '/testStructure/folder2/folder2file2.mp4')"], {}), "('touch ' + dir2tests + '/testStructure/folder2/folder2file2.mp4')\n", (31032, 31098), False, 'import os\n'), ((31318, 31333), 'unittest.main', 'unittest.main', ([], {}), '()\n', (31331, 31333), False, 'import unittest\n'), ((418, 429), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (427, 429), False, 'import os\n'), ((465, 476), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (474, 476), False, 'import os\n'), ((573, 584), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (582, 584), False, 'import os\n'), ((1912, 1949), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (1925, 1949), False, 'import shutil\n'), ((5504, 5516), 'json.load', 'json.load', (['f'], {}), '(f)\n', (5513, 5516), False, 'import json\n'), ((6290, 6327), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (6303, 6327), False, 'import shutil\n'), ((6993, 7031), 'os.path.exists', 'os.path.exists', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (7007, 7031), False, 'import os\n'), ((7039, 7074), 'os.makedirs', 'os.makedirs', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (7050, 7074), False, 'import os\n'), ((7082, 7109), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (7095, 7109), False, 'import pytest\n'), ((7130, 7165), 'mimir.backend.database.DataBase', 'DataBase', (['dbRootPath', '"""new"""', 'config'], {}), "(dbRootPath, 'new', config)\n", (7138, 7165), False, 'from mimir.backend.database import DataBase, Model\n'), ((7354, 7391), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (7367, 7391), False, 'import shutil\n'), ((8150, 8187), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (8163, 8187), False, 'import shutil\n'), ((8291, 8328), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (8304, 8328), False, 'import shutil\n'), ((8613, 8650), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (8626, 8650), False, 'import shutil\n'), ((8814, 8851), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (8827, 8851), False, 'import shutil\n'), ((9284, 9321), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (9297, 9321), False, 'import shutil\n'), ((9739, 9776), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (9752, 9776), False, 'import shutil\n'), ((9835, 9858), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (9848, 9858), False, 'import pytest\n'), ((10340, 10377), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (10353, 10377), False, 'import shutil\n'), ((10436, 10459), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (10449, 10459), False, 'import pytest\n'), ((11047, 11084), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (11060, 11084), False, 'import shutil\n'), ((11269, 11296), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (11282, 11296), False, 'import pytest\n'), ((11374, 11401), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (11387, 11401), False, 'import pytest\n'), ((11556, 11580), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (11569, 11580), False, 'import pytest\n'), ((11632, 11656), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (11645, 11656), False, 'import pytest\n'), ((11723, 11747), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (11736, 11747), False, 'import pytest\n'), ((11892, 11917), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (11905, 11917), False, 'import pytest\n'), ((12067, 12090), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (12080, 12090), False, 'import pytest\n'), ((12154, 12177), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (12167, 12177), False, 'import pytest\n'), ((12423, 12460), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (12436, 12460), False, 'import shutil\n'), ((13561, 13598), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (13574, 13598), False, 'import shutil\n'), ((14402, 14439), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (14415, 14439), False, 'import shutil\n'), ((14893, 14916), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (14906, 14916), False, 'import pytest\n'), ((15012, 15035), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (15025, 15035), False, 'import pytest\n'), ((16488, 16525), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (16501, 16525), False, 'import shutil\n'), ((17251, 17274), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (17264, 17274), False, 'import pytest\n'), ((17369, 17393), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (17382, 17393), False, 'import pytest\n'), ((17554, 17578), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (17567, 17578), False, 'import pytest\n'), ((20921, 20958), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (20934, 20958), False, 'import shutil\n'), ((21614, 21651), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (21627, 21651), False, 'import shutil\n'), ((22010, 22047), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (22023, 22047), False, 'import shutil\n'), ((22395, 22432), 'shutil.rmtree', 'shutil.rmtree', (["(dbRootPath + '/.mimir')"], {}), "(dbRootPath + '/.mimir')\n", (22408, 22432), False, 'import shutil\n'), ((22542, 22576), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {}), '(NotImplementedError)\n', (22555, 22576), False, 'import pytest\n'), ((22739, 22762), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (22752, 22762), False, 'import pytest\n'), ((22834, 22868), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {}), '(NotImplementedError)\n', (22847, 22868), False, 'import pytest\n'), ((7723, 7744), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (7742, 7744), False, 'import datetime\n'), ((7750, 7771), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (7769, 7771), False, 'import datetime\n'), ((7779, 7800), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (7798, 7800), False, 'import datetime\n')]
|
import numpy as np
import torch
import torchvision.transforms as transforms
import torch.utils.data as data
import os
import pickle
import numpy as np
import nltk
from PIL import Image
import cv2
import glob
import random
# depracated
# def get_data_direct(img_size, texture_size,
# imgs_fn = None, textures_fn = None, sample_dir = None, sep = ':', format = '*.png',
# mean = [0.485, 0.456, 0.406], std = [0.229, 0.224, 0.225]):
# if sample_dir is None:
# imgs_fn = imgs_fn.split(sep)
# textures_fn = textures_fn.split(sep)
# else:
# all_images = glob.glob(os.path.join(sample_dir, format))
# all_images = sorted(all_images)
# imgs_fn = []
# textures_fn = []
# for file in all_images:
# if 'img' in file.split('/')[-1]:
# imgs_fn.append(file)
# elif 'texture' in file.split('/')[-1]:
# textures_fn.append(file)
# else:
# raise ValueError('not sure which type if this one: %s'%(file))
# batch_size = len(imgs_fn)
# assert len(imgs_fn) == len(textures_fn)
# imgs = []
# textures = []
# for index in range(batch_size):
# img_cur = Image.open(imgs_fn[index])
# img_cur = img_cur.resize([img_size, img_size])
# # it could be rgba
# img_cur = (np.asarray(img_cur)[...,:3] / 255.0 - mean) / std
# imgs.append(img_cur)
#
# texture_cur = Image.open(textures_fn[index])
# texture_cur = texture_cur.resize([texture_size, texture_size])
# # it could be rgba
# texture_cur = (np.asarray(texture_cur)[...,:3] / 255.0 - mean) / std
# textures.append(texture_cur)
#
# imgs = np.array(imgs).reshape([batch_size, img_size, img_size, 3])
# textures = np.array(textures).reshape([batch_size, texture_size, texture_size, 3])
# imgs = np.transpose(imgs, [0, 3, 1, 2])
# textures = np.transpose(textures, [0, 3, 1, 2])
# return imgs, textures
#
def get_data_direct(img_size, imgs_dir, texture_size = None, textures_dir = None,
format = '*.png',
mean = [0.485, 0.456, 0.406], std = [0.229, 0.224, 0.225]):
imgs = glob.glob(os.path.join(imgs_dir, format))
imgs = sorted(imgs)
if textures_dir is not None:
textures = glob.glob(os.path.join(textures_dir, format))
textures = sorted(textures)
batch_size = len(imgs) * len(textures) if textures_dir is not None else len(imgs)
imgs_data = []
textures_data = []
if textures_dir is not None:
assert texture_size is not None
for img_index in range(len(imgs)):
for texture_index in range(len(textures)):
img_cur = Image.open(imgs[img_index])
img_cur = img_cur.resize([img_size, img_size])
# it could be rgba
img_cur = (np.asarray(img_cur)[...,:3] / 255.0 - mean) / std
imgs_data.append(img_cur)
texture_cur = Image.open(textures[texture_index])
texture_cur = texture_cur.resize([texture_size, texture_size])
# it could be rgba
texture_cur = (np.asarray(texture_cur)[...,:3] / 255.0 - mean) / std
textures_data.append(texture_cur)
else:
for img_index in range(len(imgs)):
img_cur = Image.open(imgs[img_index])
img_cur = img_cur.resize([img_size, img_size])
# it could be rgba
img_cur = (np.asarray(img_cur)[...,:3] / 255.0 - mean) / std
imgs_data.append(img_cur)
imgs_data = np.array(imgs_data).reshape([batch_size, img_size, img_size, 3])
imgs_data = np.transpose(imgs_data, [0, 3, 1, 2])
if textures_dir is not None:
textures_data = np.array(textures_data).reshape([batch_size, texture_size, texture_size, 3])
textures_data = np.transpose(textures_data, [0, 3, 1, 2])
return imgs_data, textures_data
class texture_seg_dataset(object):
def __init__(self, data_path, img_size, segmentation_regions, texture_size,
shuffle = True, use_same_from = True,
mean = [0.485, 0.456, 0.406], std = [0.229, 0.224, 0.225]): # from torch normalize
self.shuffle = shuffle
self.img_size = img_size
self.segmentation_regions = segmentation_regions
self.texture_size = texture_size
self.folders = glob.glob(os.path.join(data_path, '*/'))
self.use_same_from = use_same_from
self.mean = mean
self.std = std
# num_seg must be smaller than scene_num
assert (len(self.folders) >= self.segmentation_regions)
def generate_random_masks(self, points = None):
# use batch_size = 1
# return [size, size, segmentation_regions]
batch_size = 1
xs, ys = np.meshgrid(np.arange(0, self.img_size), np.arange(0, self.img_size))
if points is None:
n_points = [self.segmentation_regions]
points = [np.random.randint(0, self.img_size, size=(n_points[i], 2)) for i in range(batch_size)]
masks = []
for b in range(batch_size):
dists_b = [np.sqrt((xs - p[0])**2 + (ys - p[1])**2) for p in points[b]]
voronoi = np.argmin(dists_b, axis=0)
masks_b = np.zeros((self.img_size, self.img_size, self.segmentation_regions))
for m in range(self.segmentation_regions):
masks_b[:,:,m][voronoi == m] = 1
masks.append(masks_b)
return masks[0]
def random_crop(self, image, crop_height, crop_width):
if (crop_width <= image.shape[1]) and (crop_height <= image.shape[0]):
x = np.random.randint(0, image.shape[1]-crop_width)
y = np.random.randint(0, image.shape[0]-crop_height)
return image[y:y+crop_height, x:x+crop_width, :]
else:
raise Exception('Crop shape exceeds image dimensions!')
def get_data(self, format = '*.jpg'):
mask = self.generate_random_masks()
choose_from = []
img = np.zeros([self.img_size, self.img_size, 3])
sampled_folders = random.sample(self.folders, self.segmentation_regions)
texture_mask = []
for index, folder in enumerate(sampled_folders):
files = glob.glob(os.path.join(folder, format))
file_cur = random.choice(files)
# print (file_cur)
img_cur = Image.open(file_cur)
img_cur = img_cur.resize([self.img_size, self.img_size])
img_cur = (np.asarray(img_cur) / 255.0 - self.mean) / self.std
img[mask[..., index] == 1] = img_cur[mask[..., index] == 1]
if self.use_same_from:
texture_cur = img_cur
else:
file_cur = random.choice(files)
texture_cur = np.asarray(Image.open(file_cur))
texture = self.random_crop(texture_cur, self.texture_size, self.texture_size)
texture_mask.append({'mask': mask[...,index], 'texture':texture})
return img, texture_mask
def feed(self, batch_size = None):
if batch_size is None:
return self.get_data()
else:
img_texture_mask = []
# add alls in one img iput
# for _ in range(batch_size // self.segmentation_regions + 1):
# img, texture_mask = self.get_data()
# for index in range(self.segmentation_regions):
# patch = {}
# patch['img'] = img
# patch['texture'] = texture_mask[index]['texture']
# patch['mask'] = texture_mask[index]['mask']
# img_texture_mask.append(patch)
# add each one separatly
for _ in range(batch_size):
img, texture_mask = self.get_data()
# random choice one from cluster
index = np.random.choice(self.segmentation_regions, 1)[0]
patch = {}
patch['img'] = img
patch['texture'] = texture_mask[index]['texture']
patch['mask'] = texture_mask[index]['mask']
img_texture_mask.append(patch)
img_texture_mask = img_texture_mask[:batch_size]
if self.shuffle:
random.shuffle(img_texture_mask)
imgs = [item['img'] for item in img_texture_mask]
textures = [item['texture'] for item in img_texture_mask]
masks = [item['mask'] for item in img_texture_mask]
imgs = np.array(imgs).reshape([batch_size, self.img_size, self.img_size, 3])
textures = np.array(textures).reshape([batch_size, self.texture_size, self.texture_size, 3])
masks = np.array(masks).reshape([batch_size, self.img_size, self.img_size, 1])
imgs = np.transpose(imgs, [0, 3, 1, 2])
textures = np.transpose(textures, [0, 3, 1, 2])
masks = np.transpose(masks, [0, 3, 1, 2])
return imgs, textures, masks
if __name__ == '__main__':
data_set = texture_seg_dataset('./dataset/dtd/images', img_size = 256, segmentation_regions= 3, texture_size = 64)
imgs, textures, masks = data_set.feed(batch_size = 2)
print ('img shape: ', imgs.shape)
print ('texture shape: ', textures.shape )
print ('masks shape: ', masks.shape)
raise
img, texture_mask = data_set.get_data()
print (img.shape)
print (len(texture_mask))
img = cv2.cvtColor(np.uint8(img), cv2.COLOR_BGR2RGB)
cv2.imwrite('test_img.png', img)
# cv2.imshow('img', img/ 255.0)
for i in range(3):
texture_mask[i]['texture'] = cv2.cvtColor(np.uint8(texture_mask[i]['texture']) , cv2.COLOR_BGR2RGB)
# cv2.imwrite('test_texture_%d.png'%(i), texture_mask[i]['texture']
cv2.imshow('mask_%d'%(i), texture_mask[i]['mask'])
cv2.imshow('texture_%d'%(i), texture_mask[i]['texture'])
cv2.waitKey(0)
|
[
"numpy.uint8",
"cv2.waitKey",
"cv2.imwrite",
"random.sample",
"random.shuffle",
"numpy.zeros",
"numpy.transpose",
"numpy.argmin",
"PIL.Image.open",
"random.choice",
"numpy.asarray",
"numpy.random.randint",
"numpy.array",
"numpy.arange",
"numpy.random.choice",
"cv2.imshow",
"os.path.join",
"numpy.sqrt"
] |
[((3733, 3770), 'numpy.transpose', 'np.transpose', (['imgs_data', '[0, 3, 1, 2]'], {}), '(imgs_data, [0, 3, 1, 2])\n', (3745, 3770), True, 'import numpy as np\n'), ((9622, 9654), 'cv2.imwrite', 'cv2.imwrite', (['"""test_img.png"""', 'img'], {}), "('test_img.png', img)\n", (9633, 9654), False, 'import cv2\n'), ((10027, 10041), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (10038, 10041), False, 'import cv2\n'), ((2254, 2284), 'os.path.join', 'os.path.join', (['imgs_dir', 'format'], {}), '(imgs_dir, format)\n', (2266, 2284), False, 'import os\n'), ((3930, 3971), 'numpy.transpose', 'np.transpose', (['textures_data', '[0, 3, 1, 2]'], {}), '(textures_data, [0, 3, 1, 2])\n', (3942, 3971), True, 'import numpy as np\n'), ((6138, 6181), 'numpy.zeros', 'np.zeros', (['[self.img_size, self.img_size, 3]'], {}), '([self.img_size, self.img_size, 3])\n', (6146, 6181), True, 'import numpy as np\n'), ((6208, 6262), 'random.sample', 'random.sample', (['self.folders', 'self.segmentation_regions'], {}), '(self.folders, self.segmentation_regions)\n', (6221, 6262), False, 'import random\n'), ((9584, 9597), 'numpy.uint8', 'np.uint8', (['img'], {}), '(img)\n', (9592, 9597), True, 'import numpy as np\n'), ((9907, 9957), 'cv2.imshow', 'cv2.imshow', (["('mask_%d' % i)", "texture_mask[i]['mask']"], {}), "('mask_%d' % i, texture_mask[i]['mask'])\n", (9917, 9957), False, 'import cv2\n'), ((9966, 10022), 'cv2.imshow', 'cv2.imshow', (["('texture_%d' % i)", "texture_mask[i]['texture']"], {}), "('texture_%d' % i, texture_mask[i]['texture'])\n", (9976, 10022), False, 'import cv2\n'), ((2372, 2406), 'os.path.join', 'os.path.join', (['textures_dir', 'format'], {}), '(textures_dir, format)\n', (2384, 2406), False, 'import os\n'), ((3406, 3433), 'PIL.Image.open', 'Image.open', (['imgs[img_index]'], {}), '(imgs[img_index])\n', (3416, 3433), False, 'from PIL import Image\n'), ((3652, 3671), 'numpy.array', 'np.array', (['imgs_data'], {}), '(imgs_data)\n', (3660, 3671), True, 'import numpy as np\n'), ((4490, 4519), 'os.path.join', 'os.path.join', (['data_path', '"""*/"""'], {}), "(data_path, '*/')\n", (4502, 4519), False, 'import os\n'), ((4911, 4938), 'numpy.arange', 'np.arange', (['(0)', 'self.img_size'], {}), '(0, self.img_size)\n', (4920, 4938), True, 'import numpy as np\n'), ((4940, 4967), 'numpy.arange', 'np.arange', (['(0)', 'self.img_size'], {}), '(0, self.img_size)\n', (4949, 4967), True, 'import numpy as np\n'), ((5321, 5347), 'numpy.argmin', 'np.argmin', (['dists_b'], {'axis': '(0)'}), '(dists_b, axis=0)\n', (5330, 5347), True, 'import numpy as np\n'), ((5370, 5437), 'numpy.zeros', 'np.zeros', (['(self.img_size, self.img_size, self.segmentation_regions)'], {}), '((self.img_size, self.img_size, self.segmentation_regions))\n', (5378, 5437), True, 'import numpy as np\n'), ((5756, 5805), 'numpy.random.randint', 'np.random.randint', (['(0)', '(image.shape[1] - crop_width)'], {}), '(0, image.shape[1] - crop_width)\n', (5773, 5805), True, 'import numpy as np\n'), ((5820, 5870), 'numpy.random.randint', 'np.random.randint', (['(0)', '(image.shape[0] - crop_height)'], {}), '(0, image.shape[0] - crop_height)\n', (5837, 5870), True, 'import numpy as np\n'), ((6429, 6449), 'random.choice', 'random.choice', (['files'], {}), '(files)\n', (6442, 6449), False, 'import random\n'), ((6503, 6523), 'PIL.Image.open', 'Image.open', (['file_cur'], {}), '(file_cur)\n', (6513, 6523), False, 'from PIL import Image\n'), ((8936, 8968), 'numpy.transpose', 'np.transpose', (['imgs', '[0, 3, 1, 2]'], {}), '(imgs, [0, 3, 1, 2])\n', (8948, 8968), True, 'import numpy as np\n'), ((8992, 9028), 'numpy.transpose', 'np.transpose', (['textures', '[0, 3, 1, 2]'], {}), '(textures, [0, 3, 1, 2])\n', (9004, 9028), True, 'import numpy as np\n'), ((9049, 9082), 'numpy.transpose', 'np.transpose', (['masks', '[0, 3, 1, 2]'], {}), '(masks, [0, 3, 1, 2])\n', (9061, 9082), True, 'import numpy as np\n'), ((9765, 9801), 'numpy.uint8', 'np.uint8', (["texture_mask[i]['texture']"], {}), "(texture_mask[i]['texture'])\n", (9773, 9801), True, 'import numpy as np\n'), ((2770, 2797), 'PIL.Image.open', 'Image.open', (['imgs[img_index]'], {}), '(imgs[img_index])\n', (2780, 2797), False, 'from PIL import Image\n'), ((3046, 3081), 'PIL.Image.open', 'Image.open', (['textures[texture_index]'], {}), '(textures[texture_index])\n', (3056, 3081), False, 'from PIL import Image\n'), ((3829, 3852), 'numpy.array', 'np.array', (['textures_data'], {}), '(textures_data)\n', (3837, 3852), True, 'import numpy as np\n'), ((5072, 5130), 'numpy.random.randint', 'np.random.randint', (['(0)', 'self.img_size'], {'size': '(n_points[i], 2)'}), '(0, self.img_size, size=(n_points[i], 2))\n', (5089, 5130), True, 'import numpy as np\n'), ((5238, 5282), 'numpy.sqrt', 'np.sqrt', (['((xs - p[0]) ** 2 + (ys - p[1]) ** 2)'], {}), '((xs - p[0]) ** 2 + (ys - p[1]) ** 2)\n', (5245, 5282), True, 'import numpy as np\n'), ((6376, 6404), 'os.path.join', 'os.path.join', (['folder', 'format'], {}), '(folder, format)\n', (6388, 6404), False, 'import os\n'), ((6858, 6878), 'random.choice', 'random.choice', (['files'], {}), '(files)\n', (6871, 6878), False, 'import random\n'), ((8391, 8423), 'random.shuffle', 'random.shuffle', (['img_texture_mask'], {}), '(img_texture_mask)\n', (8405, 8423), False, 'import random\n'), ((6920, 6940), 'PIL.Image.open', 'Image.open', (['file_cur'], {}), '(file_cur)\n', (6930, 6940), False, 'from PIL import Image\n'), ((7999, 8045), 'numpy.random.choice', 'np.random.choice', (['self.segmentation_regions', '(1)'], {}), '(self.segmentation_regions, 1)\n', (8015, 8045), True, 'import numpy as np\n'), ((8651, 8665), 'numpy.array', 'np.array', (['imgs'], {}), '(imgs)\n', (8659, 8665), True, 'import numpy as np\n'), ((8744, 8762), 'numpy.array', 'np.array', (['textures'], {}), '(textures)\n', (8752, 8762), True, 'import numpy as np\n'), ((8846, 8861), 'numpy.array', 'np.array', (['masks'], {}), '(masks)\n', (8854, 8861), True, 'import numpy as np\n'), ((6616, 6635), 'numpy.asarray', 'np.asarray', (['img_cur'], {}), '(img_cur)\n', (6626, 6635), True, 'import numpy as np\n'), ((3547, 3566), 'numpy.asarray', 'np.asarray', (['img_cur'], {}), '(img_cur)\n', (3557, 3566), True, 'import numpy as np\n'), ((2923, 2942), 'numpy.asarray', 'np.asarray', (['img_cur'], {}), '(img_cur)\n', (2933, 2942), True, 'import numpy as np\n'), ((3227, 3250), 'numpy.asarray', 'np.asarray', (['texture_cur'], {}), '(texture_cur)\n', (3237, 3250), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
import base64
import os
def loader(file, decoder, split=True):
file_dir = os.path.dirname(os.path.abspath(os.path.realpath(__file__)))
cdata_path = os.path.join(os.path.split(file_dir)[0], "static", file)
if decoder == "hexstring":
decoder = bytes.fromhex
elif decoder == "base64":
decoder = base64.b64decode
with open(cdata_path) as f:
if split:
return list(map(decoder, f))
else:
return decoder(f.read())
|
[
"os.path.realpath",
"os.path.split"
] |
[((136, 162), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (152, 162), False, 'import os\n'), ((195, 218), 'os.path.split', 'os.path.split', (['file_dir'], {}), '(file_dir)\n', (208, 218), False, 'import os\n')]
|
from django.test import TestCase
from dbcron.calendar import JobCalendar
from dbcron import models
from dbcron.tests.factories import JobFactory
class JobCalendarFormatMonthTest(TestCase):
factory = JobFactory
jobs = models.Job.objects.all()
def test_meth(self):
self.factory.create_batch(5, min=0, hou=0)
calendar = JobCalendar(self.jobs)
for year in range(2000, 2005):
for month in range(1, 13):
html = calendar.formatmonth(year, month)
class JobCalendarFormatWeekTest(TestCase):
factory = JobFactory
jobs = models.Job.objects.all()
def test_meth(self):
self.factory.create_batch(5, min=0, hou=0)
calendar = JobCalendar(self.jobs)
for year in range(2000, 2005):
for week in range(1, 53):
html = calendar.formatweekofmonth(year, week)
|
[
"dbcron.calendar.JobCalendar",
"dbcron.models.Job.objects.all"
] |
[((227, 251), 'dbcron.models.Job.objects.all', 'models.Job.objects.all', ([], {}), '()\n', (249, 251), False, 'from dbcron import models\n'), ((587, 611), 'dbcron.models.Job.objects.all', 'models.Job.objects.all', ([], {}), '()\n', (609, 611), False, 'from dbcron import models\n'), ((348, 370), 'dbcron.calendar.JobCalendar', 'JobCalendar', (['self.jobs'], {}), '(self.jobs)\n', (359, 370), False, 'from dbcron.calendar import JobCalendar\n'), ((708, 730), 'dbcron.calendar.JobCalendar', 'JobCalendar', (['self.jobs'], {}), '(self.jobs)\n', (719, 730), False, 'from dbcron.calendar import JobCalendar\n')]
|
#!/usr/bin/python3
# _*_ coding: utf-8 _*_
# @Time : 2022/1/31 19:21
import re
import os
import pandas as pd
from mirsnp.parse_rnahybrid import parse_rnahybrid, get_position, get_mirna_name, get_trans_name
from mirsnp.utils import check_outputf, DEFAULT_ENERGY, INTERVAL
def get_pt_map(pt):
pt_map = {}
with open(pt) as fr:
aligns = re.findall(f"target: .*?5'\n\n\n", fr.read(), re.S)
for al in aligns:
position = get_position(al)
trans = get_trans_name(al)
mirna = get_mirna_name(al)
flag = f"{trans}:{mirna}:{position}"
pt_map[flag] = al.replace('\n', '#')
return pt_map
def get_target_pt(flag, pt_map):
fs = flag.split(':')
k = f"{fs[0]}:{fs[1]}:{fs[-1]}"
return pt_map.get(k, "")
def parse_common_variants(variants_file):
df = pd.read_csv(variants_file, sep='\t')
gene_map = dict(zip(df['transcript'], df['gene']))
strand_map = dict(zip(df['transcript'], df['strand']))
return gene_map, strand_map
def isin_interval(tpos, seq_pos):
tpos = int(tpos)
start, end = seq_pos.split(' ')[1].split('-')
if int(start) <= tpos <= int(end):
return 'Yes'
return 'No'
def get_target_pos(mirna53, target53, flag, interval):
pos = int(flag.split(':')[-1])
tpos = int(flag.split(':')[0].split('_')[6])
target35 = ''.join(reversed(target53))
pos_range = []
m = 0
for i in target53:
if i != '-':
pos_range.append(pos+m)
m += 1
else:
pos_range.append(-1)
rev_range = list(reversed(pos_range))
spos = 1
mirna_seqs = []
target_seqs = []
target_poss = []
for n, b in enumerate(mirna53):
tb = target35[n]
if b != '-':
if interval[0] <= spos <= interval[1]:
mirna_seqs.append(b)
target_seqs.append(tb)
target_poss.append(rev_range[n])
spos += 1
else:
if interval[0] <= spos <= interval[1]:
target_seqs.append(tb)
target_poss.append(rev_range[n])
mirna_seq = ''.join(mirna_seqs)
target_seq = ''.join(reversed(target_seqs)).replace('-', '')
fpos = list(filter(lambda x: x != -1, target_poss))
target_poss = list(reversed(fpos))
target_seq_pos = f"{target_seq} {target_poss[0]}-{target_poss[-1]}"
intgt = isin_interval(tpos, target_seq_pos)
# "{}|{}|{}".format(mirna_seq, target_seq_pos, intgt)
return intgt
def fmt_pt(pt, tag, trans_info, interval=INTERVAL):
pp = parse_rnahybrid(pt, trans_info)
pt_map = get_pt_map(pt)
df = pd.read_csv(pp, sep='\t', low_memory=False)
if not df.empty:
if df.shape[0] >= 1 and df.shape[1] > 3:
df = df[df['region'] == 1].drop_duplicates()
df['flag'] = df["transcript"] + ":" + df["miRNA"].astype(str) + \
":" + df['gene'].astype(str) + \
":" + df['strand'].astype(str) + \
":" + df['position'].astype(str)
df = df[["flag", "pvalue", "energy", "miRNA_seq(5'-3')", "target_seq(5'-3')"]]
# itv = f"{interval[0]}-{interval[1]}"
df['variant_in_2-8'] = df.apply(lambda row: get_target_pos(
row["miRNA_seq(5'-3')"], row["target_seq(5'-3')"], row["flag"], interval), axis=1)
df['pattern'] = df['flag'].apply(lambda x: get_target_pt(x, pt_map))
df = df.set_index('flag', drop=True)
df = df.add_prefix('{}_'.format(tag))
return df
def cal_dist(flags):
# ENST00000551241_rs1859333_12_112933161_T_C_36:1364:OAS1:+:29
res = []
for flag in flags:
ar = flag.split(':')
variant_pos = ar[0].split('_')[-1]
tpos = ar[-1]
if variant_pos.isdigit() and tpos.isdigit():
res.append(str(int(variant_pos) - int(tpos)))
else:
res.append('NA')
return res
def get_diff(x, y):
if "NA" not in [x, y]:
diff = round(float(y) - float(x), 2)
if diff < 0:
return [diff, "More stable"]
elif diff > 0:
return [diff, "Less stable"]
else:
return [diff, "No change"]
return ['NA', 'NA']
def get_energy_classify(x, y):
if "NA" in [x, y]:
if x == "NA":
return "new"
elif y == "NA":
return "off-target"
return 'consistent'
def classify_and_sort(df, default_energy=DEFAULT_ENERGY):
"""根据能量值差异分类"""
if not df.empty:
df = df.copy()
df['classify'] = df.apply(lambda row: get_energy_classify(row["ref_target_seq(5'-3')"], row["alt_target_seq(5'-3')"]), axis=1)
df.loc[df.classify == 'new', 'ref_energy'] = default_energy
df.loc[df.classify == 'off-target', 'alt_energy'] = default_energy
df['diff_energy'] = df.apply(lambda row: get_diff(row['ref_energy'], row['alt_energy'])[0], axis=1)
df['state'] = df.apply(lambda row: get_diff(row['ref_energy'], row['alt_energy'])[1], axis=1)
return df
def merge_refalt(refpt, altpt, outdir, trans_info,
name='f', td='raw', interval=INTERVAL):
df_ref = fmt_pt(refpt, 'ref', trans_info, interval=interval)
df_alt = fmt_pt(altpt, 'alt', trans_info, interval=interval)
output_all = os.path.join(outdir, f"energy_stats-{name}-result.csv")
dfm = None
if df_ref is not None and df_alt is not None:
dfm = pd.merge(df_ref, df_alt, left_index=True, right_index=True, how='outer').fillna('NA')
dfm = classify_and_sort(dfm)
dfm['transcript_direction'] = td
dfm = dfm[dfm['state'] != 'No change']
dfm = dfm.drop_duplicates()
dfm['target_position_to_variant_position'] = cal_dist(dfm.index)
dfm.to_csv(output_all)
check_outputf(output_all)
else:
print('either {} or {} is null, no output!'.format(refpt, altpt))
return dfm
|
[
"mirsnp.utils.check_outputf",
"mirsnp.parse_rnahybrid.get_trans_name",
"pandas.read_csv",
"mirsnp.parse_rnahybrid.get_position",
"pandas.merge",
"mirsnp.parse_rnahybrid.parse_rnahybrid",
"os.path.join",
"mirsnp.parse_rnahybrid.get_mirna_name"
] |
[((847, 883), 'pandas.read_csv', 'pd.read_csv', (['variants_file'], {'sep': '"""\t"""'}), "(variants_file, sep='\\t')\n", (858, 883), True, 'import pandas as pd\n'), ((2581, 2612), 'mirsnp.parse_rnahybrid.parse_rnahybrid', 'parse_rnahybrid', (['pt', 'trans_info'], {}), '(pt, trans_info)\n', (2596, 2612), False, 'from mirsnp.parse_rnahybrid import parse_rnahybrid, get_position, get_mirna_name, get_trans_name\n'), ((2650, 2693), 'pandas.read_csv', 'pd.read_csv', (['pp'], {'sep': '"""\t"""', 'low_memory': '(False)'}), "(pp, sep='\\t', low_memory=False)\n", (2661, 2693), True, 'import pandas as pd\n'), ((5353, 5408), 'os.path.join', 'os.path.join', (['outdir', 'f"""energy_stats-{name}-result.csv"""'], {}), "(outdir, f'energy_stats-{name}-result.csv')\n", (5365, 5408), False, 'import os\n'), ((5848, 5873), 'mirsnp.utils.check_outputf', 'check_outputf', (['output_all'], {}), '(output_all)\n', (5861, 5873), False, 'from mirsnp.utils import check_outputf, DEFAULT_ENERGY, INTERVAL\n'), ((458, 474), 'mirsnp.parse_rnahybrid.get_position', 'get_position', (['al'], {}), '(al)\n', (470, 474), False, 'from mirsnp.parse_rnahybrid import parse_rnahybrid, get_position, get_mirna_name, get_trans_name\n'), ((495, 513), 'mirsnp.parse_rnahybrid.get_trans_name', 'get_trans_name', (['al'], {}), '(al)\n', (509, 513), False, 'from mirsnp.parse_rnahybrid import parse_rnahybrid, get_position, get_mirna_name, get_trans_name\n'), ((534, 552), 'mirsnp.parse_rnahybrid.get_mirna_name', 'get_mirna_name', (['al'], {}), '(al)\n', (548, 552), False, 'from mirsnp.parse_rnahybrid import parse_rnahybrid, get_position, get_mirna_name, get_trans_name\n'), ((5488, 5560), 'pandas.merge', 'pd.merge', (['df_ref', 'df_alt'], {'left_index': '(True)', 'right_index': '(True)', 'how': '"""outer"""'}), "(df_ref, df_alt, left_index=True, right_index=True, how='outer')\n", (5496, 5560), True, 'import pandas as pd\n')]
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Copyright 2015-2019 European Commission (JRC);
# Licensed under the EUPL (the 'Licence');
# You may not use this work except in compliance with the Licence.
# You may obtain a copy of the Licence at: http://ec.europa.eu/idabc/eupl
import doctest
import io
import re
import subprocess
import unittest
from unittest.mock import patch
from wltp import cli
import wltp
import os.path as osp
mydir = osp.dirname(__file__)
proj_path = osp.join(mydir, "..")
readme_path = osp.join(proj_path, "README.rst")
class Doctest(unittest.TestCase):
def test_README_version_reldate_opening(self):
ver = wltp.__version__
reldate = wltp.__updated__
header_len = 20
mydir = osp.dirname(__file__)
ver_found = rdate_found = False
with open(readme_path) as fd:
for i, l in zip(range(header_len), fd):
if ver in l:
ver_found = True
if reldate not in l:
rdate_found = True
if not ver_found:
msg = "Version(%s) not found in README %s header-lines!"
raise AssertionError(msg % (ver, header_len))
if not rdate_found:
msg = "RelDate(%s) not found in README %s header-lines!"
raise AssertionError(msg % (reldate, header_len))
def test_README_version_from_cmdline(self):
ver = wltp.__version__
with open(readme_path) as fd:
ftext = fd.read()
with patch("sys.stdout", new=io.StringIO()) as stdout:
try:
cli.main(["--version"])
except SystemExit:
pass ## Cancel argparse's exit()
proj_ver = stdout.getvalue().strip()
assert proj_ver
self.assertIn(
proj_ver,
ftext,
"Version(%s) not found in README cmd-line version-check!" % ver,
)
def test_README_as_PyPi_landing_page(self):
from docutils import core as dcore
long_desc = subprocess.check_output(
"python setup.py --long-description".split(), cwd=proj_path
)
self.assertIsNotNone(long_desc, "Long_desc is null!")
with patch("sys.exit"):
dcore.publish_string(
long_desc,
enable_exit_status=False,
settings_overrides={ # see `docutils.frontend` for more.
"halt_level": 2 # 2=WARN, 1=INFO
},
)
class TestDoctest(unittest.TestCase):
def test_doctests(self):
failure_count, test_count = doctest.testmod(
wltp.datamodel, optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS
)
self.assertGreater(test_count, 0, (failure_count, test_count))
self.assertEqual(failure_count, 0, (failure_count, test_count))
|
[
"wltp.cli.main",
"io.StringIO",
"docutils.core.publish_string",
"os.path.dirname",
"unittest.mock.patch",
"os.path.join",
"doctest.testmod"
] |
[((450, 471), 'os.path.dirname', 'osp.dirname', (['__file__'], {}), '(__file__)\n', (461, 471), True, 'import os.path as osp\n'), ((484, 505), 'os.path.join', 'osp.join', (['mydir', '""".."""'], {}), "(mydir, '..')\n", (492, 505), True, 'import os.path as osp\n'), ((520, 553), 'os.path.join', 'osp.join', (['proj_path', '"""README.rst"""'], {}), "(proj_path, 'README.rst')\n", (528, 553), True, 'import os.path as osp\n'), ((747, 768), 'os.path.dirname', 'osp.dirname', (['__file__'], {}), '(__file__)\n', (758, 768), True, 'import os.path as osp\n'), ((2655, 2751), 'doctest.testmod', 'doctest.testmod', (['wltp.datamodel'], {'optionflags': '(doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS)'}), '(wltp.datamodel, optionflags=doctest.NORMALIZE_WHITESPACE |\n doctest.ELLIPSIS)\n', (2670, 2751), False, 'import doctest\n'), ((2267, 2284), 'unittest.mock.patch', 'patch', (['"""sys.exit"""'], {}), "('sys.exit')\n", (2272, 2284), False, 'from unittest.mock import patch\n'), ((2298, 2397), 'docutils.core.publish_string', 'dcore.publish_string', (['long_desc'], {'enable_exit_status': '(False)', 'settings_overrides': "{'halt_level': 2}"}), "(long_desc, enable_exit_status=False,\n settings_overrides={'halt_level': 2})\n", (2318, 2397), True, 'from docutils import core as dcore\n'), ((1610, 1633), 'wltp.cli.main', 'cli.main', (["['--version']"], {}), "(['--version'])\n", (1618, 1633), False, 'from wltp import cli\n'), ((1543, 1556), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1554, 1556), False, 'import io\n')]
|
# -*- coding: utf-8 -*-
"""Connecting different investment variables.
This file is part of project oemof (github.com/oemof/oemof). It's copyrighted
by the contributors recorded in the version control history of the file,
available from its original location
oemof/tests/test_scripts/test_solph/test_connect_invest/test_connect_invest.py
SPDX-License-Identifier: MIT
"""
from nose.tools import eq_
import oemof.solph as solph
from oemof.outputlib import processing, views
import logging
import os
import pandas as pd
from oemof.network import Node
def test_connect_invest():
date_time_index = pd.date_range('1/1/2012', periods=24 * 7, freq='H')
energysystem = solph.EnergySystem(timeindex=date_time_index)
Node.registry = energysystem
# Read data file
full_filename = os.path.join(os.path.dirname(__file__),
'connect_invest.csv')
data = pd.read_csv(full_filename, sep=",")
logging.info('Create oemof objects')
# create electricity bus
bel1 = solph.Bus(label="electricity1")
bel2 = solph.Bus(label="electricity2")
# create excess component for the electricity bus to allow overproduction
solph.Sink(label='excess_bel', inputs={bel2: solph.Flow()})
solph.Source(label='shortage', outputs={bel2: solph.Flow(
variable_costs=50000)})
# create fixed source object representing wind power plants
solph.Source(label='wind', outputs={bel1: solph.Flow(
actual_value=data['wind'], nominal_value=1000000, fixed=True)})
# create simple sink object representing the electrical demand
solph.Sink(label='demand', inputs={bel1: solph.Flow(
actual_value=data['demand_el'], fixed=True, nominal_value=1)})
storage = solph.components.GenericStorage(
label='storage',
inputs={bel1: solph.Flow(variable_costs=10e10)},
outputs={bel1: solph.Flow(variable_costs=10e10)},
loss_rate=0.00, initial_storage_level=0,
invest_relation_input_capacity=1/6,
invest_relation_output_capacity=1/6,
inflow_conversion_factor=1, outflow_conversion_factor=0.8,
investment=solph.Investment(ep_costs=0.2),
)
line12 = solph.Transformer(
label="line12",
inputs={bel1: solph.Flow()},
outputs={bel2: solph.Flow(investment=solph.Investment(ep_costs=20))})
line21 = solph.Transformer(
label="line21",
inputs={bel2: solph.Flow()},
outputs={bel1: solph.Flow(investment=solph.Investment(ep_costs=20))})
om = solph.Model(energysystem)
solph.constraints.equate_variables(
om, om.InvestmentFlow.invest[line12, bel2],
om.InvestmentFlow.invest[line21, bel1], 2)
solph.constraints.equate_variables(
om, om.InvestmentFlow.invest[line12, bel2],
om.GenericInvestmentStorageBlock.invest[storage])
# if tee_switch is true solver messages will be displayed
logging.info('Solve the optimization problem')
om.solve(solver='cbc')
# check if the new result object is working for custom components
results = processing.results(om)
my_results = dict()
my_results['line12'] = float(views.node(results, 'line12')['scalars'])
my_results['line21'] = float(views.node(results, 'line21')['scalars'])
stor_res = views.node(results, 'storage')['scalars']
my_results['storage_in'] = stor_res[
(('electricity1', 'storage'), 'invest')]
my_results['storage'] = stor_res[(('storage', 'None'), 'invest')]
my_results['storage_out'] = stor_res[
(('storage', 'electricity1'), 'invest')]
connect_invest_dict = {
'line12': 814705,
'line21': 1629410,
'storage': 814705,
'storage_in': 135784,
'storage_out': 135784}
for key in connect_invest_dict.keys():
eq_(int(round(my_results[key])), int(round(connect_invest_dict[key])))
|
[
"oemof.solph.EnergySystem",
"oemof.solph.Bus",
"pandas.date_range",
"oemof.outputlib.views.node",
"oemof.solph.Investment",
"pandas.read_csv",
"os.path.dirname",
"oemof.solph.Flow",
"logging.info",
"oemof.outputlib.processing.results",
"oemof.solph.constraints.equate_variables",
"oemof.solph.Model"
] |
[((603, 654), 'pandas.date_range', 'pd.date_range', (['"""1/1/2012"""'], {'periods': '(24 * 7)', 'freq': '"""H"""'}), "('1/1/2012', periods=24 * 7, freq='H')\n", (616, 654), True, 'import pandas as pd\n'), ((675, 720), 'oemof.solph.EnergySystem', 'solph.EnergySystem', ([], {'timeindex': 'date_time_index'}), '(timeindex=date_time_index)\n', (693, 720), True, 'import oemof.solph as solph\n'), ((906, 941), 'pandas.read_csv', 'pd.read_csv', (['full_filename'], {'sep': '""","""'}), "(full_filename, sep=',')\n", (917, 941), True, 'import pandas as pd\n'), ((947, 983), 'logging.info', 'logging.info', (['"""Create oemof objects"""'], {}), "('Create oemof objects')\n", (959, 983), False, 'import logging\n'), ((1025, 1056), 'oemof.solph.Bus', 'solph.Bus', ([], {'label': '"""electricity1"""'}), "(label='electricity1')\n", (1034, 1056), True, 'import oemof.solph as solph\n'), ((1068, 1099), 'oemof.solph.Bus', 'solph.Bus', ([], {'label': '"""electricity2"""'}), "(label='electricity2')\n", (1077, 1099), True, 'import oemof.solph as solph\n'), ((2532, 2557), 'oemof.solph.Model', 'solph.Model', (['energysystem'], {}), '(energysystem)\n', (2543, 2557), True, 'import oemof.solph as solph\n'), ((2563, 2688), 'oemof.solph.constraints.equate_variables', 'solph.constraints.equate_variables', (['om', 'om.InvestmentFlow.invest[line12, bel2]', 'om.InvestmentFlow.invest[line21, bel1]', '(2)'], {}), '(om, om.InvestmentFlow.invest[line12,\n bel2], om.InvestmentFlow.invest[line21, bel1], 2)\n', (2597, 2688), True, 'import oemof.solph as solph\n'), ((2706, 2838), 'oemof.solph.constraints.equate_variables', 'solph.constraints.equate_variables', (['om', 'om.InvestmentFlow.invest[line12, bel2]', 'om.GenericInvestmentStorageBlock.invest[storage]'], {}), '(om, om.InvestmentFlow.invest[line12,\n bel2], om.GenericInvestmentStorageBlock.invest[storage])\n', (2740, 2838), True, 'import oemof.solph as solph\n'), ((2919, 2965), 'logging.info', 'logging.info', (['"""Solve the optimization problem"""'], {}), "('Solve the optimization problem')\n", (2931, 2965), False, 'import logging\n'), ((3078, 3100), 'oemof.outputlib.processing.results', 'processing.results', (['om'], {}), '(om)\n', (3096, 3100), False, 'from oemof.outputlib import processing, views\n'), ((813, 838), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (828, 838), False, 'import os\n'), ((3291, 3321), 'oemof.outputlib.views.node', 'views.node', (['results', '"""storage"""'], {}), "(results, 'storage')\n", (3301, 3321), False, 'from oemof.outputlib import processing, views\n'), ((2140, 2170), 'oemof.solph.Investment', 'solph.Investment', ([], {'ep_costs': '(0.2)'}), '(ep_costs=0.2)\n', (2156, 2170), True, 'import oemof.solph as solph\n'), ((3159, 3188), 'oemof.outputlib.views.node', 'views.node', (['results', '"""line12"""'], {}), "(results, 'line12')\n", (3169, 3188), False, 'from oemof.outputlib import processing, views\n'), ((3234, 3263), 'oemof.outputlib.views.node', 'views.node', (['results', '"""line21"""'], {}), "(results, 'line21')\n", (3244, 3263), False, 'from oemof.outputlib import processing, views\n'), ((1228, 1240), 'oemof.solph.Flow', 'solph.Flow', ([], {}), '()\n', (1238, 1240), True, 'import oemof.solph as solph\n'), ((1293, 1325), 'oemof.solph.Flow', 'solph.Flow', ([], {'variable_costs': '(50000)'}), '(variable_costs=50000)\n', (1303, 1325), True, 'import oemof.solph as solph\n'), ((1448, 1520), 'oemof.solph.Flow', 'solph.Flow', ([], {'actual_value': "data['wind']", 'nominal_value': '(1000000)', 'fixed': '(True)'}), "(actual_value=data['wind'], nominal_value=1000000, fixed=True)\n", (1458, 1520), True, 'import oemof.solph as solph\n'), ((1645, 1716), 'oemof.solph.Flow', 'solph.Flow', ([], {'actual_value': "data['demand_el']", 'fixed': '(True)', 'nominal_value': '(1)'}), "(actual_value=data['demand_el'], fixed=True, nominal_value=1)\n", (1655, 1716), True, 'import oemof.solph as solph\n'), ((1823, 1864), 'oemof.solph.Flow', 'solph.Flow', ([], {'variable_costs': '(100000000000.0)'}), '(variable_costs=100000000000.0)\n', (1833, 1864), True, 'import oemof.solph as solph\n'), ((1881, 1922), 'oemof.solph.Flow', 'solph.Flow', ([], {'variable_costs': '(100000000000.0)'}), '(variable_costs=100000000000.0)\n', (1891, 1922), True, 'import oemof.solph as solph\n'), ((2257, 2269), 'oemof.solph.Flow', 'solph.Flow', ([], {}), '()\n', (2267, 2269), True, 'import oemof.solph as solph\n'), ((2429, 2441), 'oemof.solph.Flow', 'solph.Flow', ([], {}), '()\n', (2439, 2441), True, 'import oemof.solph as solph\n'), ((2317, 2346), 'oemof.solph.Investment', 'solph.Investment', ([], {'ep_costs': '(20)'}), '(ep_costs=20)\n', (2333, 2346), True, 'import oemof.solph as solph\n'), ((2489, 2518), 'oemof.solph.Investment', 'solph.Investment', ([], {'ep_costs': '(20)'}), '(ep_costs=20)\n', (2505, 2518), True, 'import oemof.solph as solph\n')]
|
# this method sucks, takes over 30 hours to run on machine!!!
from functools import reduce
from fast_prime import primes, is_prime
import time
start = time.time()
p = primes(1000000)
#p = primes(100000)
long_num = 0
long_sum = 0
print('prime time = ', time.time() - start)
start = time.time()
for x in p[::-1]:
for i in range(len(p)):
sum_list = []
sum = 0
count = i
while sum < x:
sum_list.append(p[count])
#print(sum_list)
sum = reduce(lambda x,y: x+y, sum_list)
if sum == x:
if len(sum_list) > long_sum:
long_sum = len(sum_list)
long_num = x
count += 1
print('consecutive sum of primes took ', time.time() - start)
print(long_sum)
print(long_num)
|
[
"functools.reduce",
"fast_prime.primes",
"time.time"
] |
[((155, 166), 'time.time', 'time.time', ([], {}), '()\n', (164, 166), False, 'import time\n'), ((172, 187), 'fast_prime.primes', 'primes', (['(1000000)'], {}), '(1000000)\n', (178, 187), False, 'from fast_prime import primes, is_prime\n'), ((288, 299), 'time.time', 'time.time', ([], {}), '()\n', (297, 299), False, 'import time\n'), ((258, 269), 'time.time', 'time.time', ([], {}), '()\n', (267, 269), False, 'import time\n'), ((757, 768), 'time.time', 'time.time', ([], {}), '()\n', (766, 768), False, 'import time\n'), ((510, 546), 'functools.reduce', 'reduce', (['(lambda x, y: x + y)', 'sum_list'], {}), '(lambda x, y: x + y, sum_list)\n', (516, 546), False, 'from functools import reduce\n')]
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='django-async-messages-redux',
version='0.4.1',
url='https://github.com/maurizi/django-async-messages',
author='<NAME>',
author_email='<EMAIL>',
description="Send asynchronous messages to users (eg from offline scripts). Useful for integration with Celery.",
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests']),
install_requires=['django>=1.4'],
)
|
[
"setuptools.find_packages"
] |
[((432, 464), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests']"}), "(exclude=['tests'])\n", (445, 464), False, 'from setuptools import setup, find_packages\n')]
|
"""empty message
Revision ID: 13934f10a019
Revises:
Create Date: 2019-10-04 16:58:16.891916
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '13934f10a019'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('address',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('street_address', sa.String(length=255), nullable=True),
sa.Column('street_number', sa.String(length=255), nullable=True),
sa.Column('route', sa.String(length=255), nullable=True),
sa.Column('apt_number', sa.String(length=255), nullable=True),
sa.Column('locality', sa.String(length=255), nullable=True),
sa.Column('administrative_area_level_1', sa.String(length=2), nullable=True),
sa.Column('postal_code', sa.String(length=255), nullable=True),
sa.Column('county', sa.String(length=255), nullable=True),
sa.Column('country', sa.String(length=255), nullable=True),
sa.Column('latitude', sa.Numeric(precision=9, scale=6), nullable=True),
sa.Column('longitude', sa.Numeric(precision=9, scale=6), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('role',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=True),
sa.Column('description', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('password', sa.String(length=255), nullable=True),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('confirmed_at', sa.DateTime(), nullable=True),
sa.Column('last_login_at', sa.DateTime(), nullable=True),
sa.Column('current_login_at', sa.DateTime(), nullable=True),
sa.Column('last_login_ip', sa.String(length=40), nullable=True),
sa.Column('current_login_ip', sa.String(length=40), nullable=True),
sa.Column('login_count', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
op.create_table('property',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('address_id', sa.Integer(), nullable=True),
sa.Column('sq_foot', sa.Integer(), nullable=True),
sa.Column('year_built', sa.Integer(), nullable=True),
sa.Column('stories_count', sa.Integer(), nullable=True),
sa.Column('basement_type', sa.String(length=255), nullable=True),
sa.Column('addition_type', sa.String(length=255), nullable=True),
sa.Column('bedroom_count', sa.Integer(), nullable=True),
sa.Column('full_bath_count', sa.Integer(), nullable=True),
sa.Column('half_bath_count', sa.Integer(), nullable=True),
sa.Column('parking_type', sa.String(length=255), nullable=True),
sa.Column('countertops', sa.String(length=255), nullable=True),
sa.Column('appliances', sa.String(length=255), nullable=True),
sa.Column('double_oven_ind', sa.Boolean(), nullable=True),
sa.Column('walk_in_pantry_ind', sa.Boolean(), nullable=True),
sa.Column('separate_cooktop_ind', sa.Boolean(), nullable=True),
sa.Column('built_in_oven_ind', sa.Boolean(), nullable=True),
sa.Column('built_in_microwave_ind', sa.Boolean(), nullable=True),
sa.Column('kitchen_flooring', sa.String(length=255), nullable=True),
sa.Column('main_flooring', sa.String(length=255), nullable=True),
sa.Column('bathroom_flooring_tile_ind', sa.Boolean(), nullable=True),
sa.Column('bathroom_flooring_vinyl_ind', sa.Boolean(), nullable=True),
sa.Column('bathroom_flooring_laminate_ind', sa.Boolean(), nullable=True),
sa.Column('bathroom_flooring_hardwood_ind', sa.Boolean(), nullable=True),
sa.Column('bathroom_flooring_travertine_ind', sa.Boolean(), nullable=True),
sa.Column('bathroom_flooring_saltillo_tile_ind', sa.Boolean(), nullable=True),
sa.Column('bathroom_flooring_carpet_ind', sa.Boolean(), nullable=True),
sa.Column('bathroom_flooring_woodplank_tile_ind', sa.Boolean(), nullable=True),
sa.Column('bathroom_flooring_concrete_ind', sa.Boolean(), nullable=True),
sa.Column('bathroom_flooring_other_ind', sa.Boolean(), nullable=True),
sa.Column('bedroom_flooring_tile_ind', sa.Boolean(), nullable=True),
sa.Column('bedroom_flooring_vinyl_ind', sa.Boolean(), nullable=True),
sa.Column('bedroom_flooring_laminate_ind', sa.Boolean(), nullable=True),
sa.Column('bedroom_flooring_hardwood_ind', sa.Boolean(), nullable=True),
sa.Column('bedroom_flooring_travertine_ind', sa.Boolean(), nullable=True),
sa.Column('bedroom_flooring_saltillo_tile_ind', sa.Boolean(), nullable=True),
sa.Column('bedroom_flooring_carpet_ind', sa.Boolean(), nullable=True),
sa.Column('bedroom_flooring_woodplank_tile_ind', sa.Boolean(), nullable=True),
sa.Column('bedroom_flooring_concrete_ind', sa.Boolean(), nullable=True),
sa.Column('bedroom_flooring_other_ind', sa.Boolean(), nullable=True),
sa.Column('landscaping', sa.String(length=255), nullable=True),
sa.Column('pool', sa.Boolean(), nullable=True),
sa.Column('hottub', sa.Boolean(), nullable=True),
sa.Column('gated_community_ind', sa.Boolean(), nullable=True),
sa.Column('hoa_ind', sa.Boolean(), nullable=True),
sa.Column('age_restricted_ind', sa.Boolean(), nullable=True),
sa.Column('solar_panels_ind', sa.Boolean(), nullable=True),
sa.Column('septic_system_ind', sa.Boolean(), nullable=True),
sa.Column('well_water_ind', sa.Boolean(), nullable=True),
sa.Column('poor_location_ind', sa.Boolean(), nullable=True),
sa.Column('sinkholes_ind', sa.Boolean(), nullable=True),
sa.Column('foundation_issues', sa.Boolean(), nullable=True),
sa.Column('additional_info', sa.String(length=1500), nullable=True),
sa.Column('submitter_type', sa.String(length=255), nullable=True),
sa.Column('listed_ind', sa.Boolean(), nullable=True),
sa.Column('submitter_first_name', sa.String(length=255), nullable=True),
sa.Column('submitter_last_name', sa.String(length=255), nullable=True),
sa.Column('submitter_phone', sa.String(length=255), nullable=True),
sa.Column('submitter_email', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['address_id'], ['address.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('roles_users',
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], )
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('roles_users')
op.drop_table('property')
op.drop_table('user')
op.drop_table('role')
op.drop_table('address')
# ### end Alembic commands ###
|
[
"alembic.op.drop_table",
"sqlalchemy.DateTime",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.Numeric",
"sqlalchemy.Boolean",
"sqlalchemy.UniqueConstraint",
"sqlalchemy.ForeignKeyConstraint",
"sqlalchemy.String",
"sqlalchemy.Integer"
] |
[((6854, 6882), 'alembic.op.drop_table', 'op.drop_table', (['"""roles_users"""'], {}), "('roles_users')\n", (6867, 6882), False, 'from alembic import op\n'), ((6887, 6912), 'alembic.op.drop_table', 'op.drop_table', (['"""property"""'], {}), "('property')\n", (6900, 6912), False, 'from alembic import op\n'), ((6917, 6938), 'alembic.op.drop_table', 'op.drop_table', (['"""user"""'], {}), "('user')\n", (6930, 6938), False, 'from alembic import op\n'), ((6943, 6964), 'alembic.op.drop_table', 'op.drop_table', (['"""role"""'], {}), "('role')\n", (6956, 6964), False, 'from alembic import op\n'), ((6969, 6993), 'alembic.op.drop_table', 'op.drop_table', (['"""address"""'], {}), "('address')\n", (6982, 6993), False, 'from alembic import op\n'), ((1209, 1238), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1232, 1238), True, 'import sqlalchemy as sa\n'), ((1456, 1485), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1479, 1485), True, 'import sqlalchemy as sa\n'), ((1491, 1518), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""name"""'], {}), "('name')\n", (1510, 1518), True, 'import sqlalchemy as sa\n'), ((2177, 2206), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2200, 2206), True, 'import sqlalchemy as sa\n'), ((2212, 2240), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""email"""'], {}), "('email')\n", (2231, 2240), True, 'import sqlalchemy as sa\n'), ((6367, 6422), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['address_id']", "['address.id']"], {}), "(['address_id'], ['address.id'])\n", (6390, 6422), True, 'import sqlalchemy as sa\n'), ((6430, 6459), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (6453, 6459), True, 'import sqlalchemy as sa\n'), ((6615, 6664), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['role_id']", "['role.id']"], {}), "(['role_id'], ['role.id'])\n", (6638, 6664), True, 'import sqlalchemy as sa\n'), ((6672, 6721), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['user.id']"], {}), "(['user_id'], ['user.id'])\n", (6695, 6721), True, 'import sqlalchemy as sa\n'), ((409, 421), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (419, 421), True, 'import sqlalchemy as sa\n'), ((472, 493), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (481, 493), True, 'import sqlalchemy as sa\n'), ((542, 563), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (551, 563), True, 'import sqlalchemy as sa\n'), ((604, 625), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (613, 625), True, 'import sqlalchemy as sa\n'), ((671, 692), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (680, 692), True, 'import sqlalchemy as sa\n'), ((736, 757), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (745, 757), True, 'import sqlalchemy as sa\n'), ((820, 839), 'sqlalchemy.String', 'sa.String', ([], {'length': '(2)'}), '(length=2)\n', (829, 839), True, 'import sqlalchemy as sa\n'), ((886, 907), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (895, 907), True, 'import sqlalchemy as sa\n'), ((949, 970), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (958, 970), True, 'import sqlalchemy as sa\n'), ((1013, 1034), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (1022, 1034), True, 'import sqlalchemy as sa\n'), ((1078, 1110), 'sqlalchemy.Numeric', 'sa.Numeric', ([], {'precision': '(9)', 'scale': '(6)'}), '(precision=9, scale=6)\n', (1088, 1110), True, 'import sqlalchemy as sa\n'), ((1155, 1187), 'sqlalchemy.Numeric', 'sa.Numeric', ([], {'precision': '(9)', 'scale': '(6)'}), '(precision=9, scale=6)\n', (1165, 1187), True, 'import sqlalchemy as sa\n'), ((1293, 1305), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1303, 1305), True, 'import sqlalchemy as sa\n'), ((1346, 1366), 'sqlalchemy.String', 'sa.String', ([], {'length': '(80)'}), '(length=80)\n', (1355, 1366), True, 'import sqlalchemy as sa\n'), ((1413, 1434), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (1422, 1434), True, 'import sqlalchemy as sa\n'), ((1573, 1585), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1583, 1585), True, 'import sqlalchemy as sa\n'), ((1627, 1648), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (1636, 1648), True, 'import sqlalchemy as sa\n'), ((1692, 1713), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (1701, 1713), True, 'import sqlalchemy as sa\n'), ((1755, 1767), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1765, 1767), True, 'import sqlalchemy as sa\n'), ((1815, 1828), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1826, 1828), True, 'import sqlalchemy as sa\n'), ((1877, 1890), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1888, 1890), True, 'import sqlalchemy as sa\n'), ((1942, 1955), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1953, 1955), True, 'import sqlalchemy as sa\n'), ((2004, 2024), 'sqlalchemy.String', 'sa.String', ([], {'length': '(40)'}), '(length=40)\n', (2013, 2024), True, 'import sqlalchemy as sa\n'), ((2076, 2096), 'sqlalchemy.String', 'sa.String', ([], {'length': '(40)'}), '(length=40)\n', (2085, 2096), True, 'import sqlalchemy as sa\n'), ((2143, 2155), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2153, 2155), True, 'import sqlalchemy as sa\n'), ((2299, 2311), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2309, 2311), True, 'import sqlalchemy as sa\n'), ((2358, 2370), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2368, 2370), True, 'import sqlalchemy as sa\n'), ((2413, 2425), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2423, 2425), True, 'import sqlalchemy as sa\n'), ((2471, 2483), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2481, 2483), True, 'import sqlalchemy as sa\n'), ((2532, 2544), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2542, 2544), True, 'import sqlalchemy as sa\n'), ((2593, 2614), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (2602, 2614), True, 'import sqlalchemy as sa\n'), ((2663, 2684), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (2672, 2684), True, 'import sqlalchemy as sa\n'), ((2733, 2745), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2743, 2745), True, 'import sqlalchemy as sa\n'), ((2796, 2808), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2806, 2808), True, 'import sqlalchemy as sa\n'), ((2859, 2871), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2869, 2871), True, 'import sqlalchemy as sa\n'), ((2919, 2940), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (2928, 2940), True, 'import sqlalchemy as sa\n'), ((2987, 3008), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (2996, 3008), True, 'import sqlalchemy as sa\n'), ((3054, 3075), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (3063, 3075), True, 'import sqlalchemy as sa\n'), ((3126, 3138), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (3136, 3138), True, 'import sqlalchemy as sa\n'), ((3192, 3204), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (3202, 3204), True, 'import sqlalchemy as sa\n'), ((3260, 3272), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (3270, 3272), True, 'import sqlalchemy as sa\n'), ((3325, 3337), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (3335, 3337), True, 'import sqlalchemy as sa\n'), ((3395, 3407), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (3405, 3407), True, 'import sqlalchemy as sa\n'), ((3459, 3480), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (3468, 3480), True, 'import sqlalchemy as sa\n'), ((3529, 3550), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (3538, 3550), True, 'import sqlalchemy as sa\n'), ((3612, 3624), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (3622, 3624), True, 'import sqlalchemy as sa\n'), ((3687, 3699), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (3697, 3699), True, 'import sqlalchemy as sa\n'), ((3765, 3777), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (3775, 3777), True, 'import sqlalchemy as sa\n'), ((3843, 3855), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (3853, 3855), True, 'import sqlalchemy as sa\n'), ((3923, 3935), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (3933, 3935), True, 'import sqlalchemy as sa\n'), ((4006, 4018), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (4016, 4018), True, 'import sqlalchemy as sa\n'), ((4082, 4094), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (4092, 4094), True, 'import sqlalchemy as sa\n'), ((4166, 4178), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (4176, 4178), True, 'import sqlalchemy as sa\n'), ((4244, 4256), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (4254, 4256), True, 'import sqlalchemy as sa\n'), ((4319, 4331), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (4329, 4331), True, 'import sqlalchemy as sa\n'), ((4392, 4404), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (4402, 4404), True, 'import sqlalchemy as sa\n'), ((4466, 4478), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (4476, 4478), True, 'import sqlalchemy as sa\n'), ((4543, 4555), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (4553, 4555), True, 'import sqlalchemy as sa\n'), ((4620, 4632), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (4630, 4632), True, 'import sqlalchemy as sa\n'), ((4699, 4711), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (4709, 4711), True, 'import sqlalchemy as sa\n'), ((4781, 4793), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (4791, 4793), True, 'import sqlalchemy as sa\n'), ((4856, 4868), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (4866, 4868), True, 'import sqlalchemy as sa\n'), ((4939, 4951), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (4949, 4951), True, 'import sqlalchemy as sa\n'), ((5016, 5028), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (5026, 5028), True, 'import sqlalchemy as sa\n'), ((5090, 5102), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (5100, 5102), True, 'import sqlalchemy as sa\n'), ((5149, 5170), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (5158, 5170), True, 'import sqlalchemy as sa\n'), ((5210, 5222), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (5220, 5222), True, 'import sqlalchemy as sa\n'), ((5264, 5276), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (5274, 5276), True, 'import sqlalchemy as sa\n'), ((5331, 5343), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (5341, 5343), True, 'import sqlalchemy as sa\n'), ((5386, 5398), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (5396, 5398), True, 'import sqlalchemy as sa\n'), ((5452, 5464), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (5462, 5464), True, 'import sqlalchemy as sa\n'), ((5516, 5528), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (5526, 5528), True, 'import sqlalchemy as sa\n'), ((5581, 5593), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (5591, 5593), True, 'import sqlalchemy as sa\n'), ((5643, 5655), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (5653, 5655), True, 'import sqlalchemy as sa\n'), ((5708, 5720), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (5718, 5720), True, 'import sqlalchemy as sa\n'), ((5769, 5781), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (5779, 5781), True, 'import sqlalchemy as sa\n'), ((5834, 5846), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (5844, 5846), True, 'import sqlalchemy as sa\n'), ((5897, 5919), 'sqlalchemy.String', 'sa.String', ([], {'length': '(1500)'}), '(length=1500)\n', (5906, 5919), True, 'import sqlalchemy as sa\n'), ((5969, 5990), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (5978, 5990), True, 'import sqlalchemy as sa\n'), ((6036, 6048), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (6046, 6048), True, 'import sqlalchemy as sa\n'), ((6104, 6125), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (6113, 6125), True, 'import sqlalchemy as sa\n'), ((6180, 6201), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (6189, 6201), True, 'import sqlalchemy as sa\n'), ((6252, 6273), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (6261, 6273), True, 'import sqlalchemy as sa\n'), ((6324, 6345), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (6333, 6345), True, 'import sqlalchemy as sa\n'), ((6526, 6538), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (6536, 6538), True, 'import sqlalchemy as sa\n'), ((6581, 6593), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (6591, 6593), True, 'import sqlalchemy as sa\n')]
|
from gudhi.wasserstein import wasserstein_distance
import numpy as np
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
Author(s): <NAME>
Copyright (C) 2019 Inria
Modification(s):
- YYYY/MM Author: Description of the modification
"""
__author__ = "<NAME>"
__copyright__ = "Copyright (C) 2019 Inria"
__license__ = "MIT"
def test_basic_wasserstein():
diag1 = np.array([[2.7, 3.7], [9.6, 14.0], [34.2, 34.974]])
diag2 = np.array([[2.8, 4.45], [9.5, 14.1]])
diag3 = np.array([[0, 2], [4, 6]])
diag4 = np.array([[0, 3], [4, 8]])
emptydiag = np.array([[]])
assert wasserstein_distance(emptydiag, emptydiag, q=2., p=1.) == 0.
assert wasserstein_distance(emptydiag, emptydiag, q=np.inf, p=1.) == 0.
assert wasserstein_distance(emptydiag, emptydiag, q=np.inf, p=2.) == 0.
assert wasserstein_distance(emptydiag, emptydiag, q=2., p=2.) == 0.
assert wasserstein_distance(diag3, emptydiag, q=np.inf, p=1.) == 2.
assert wasserstein_distance(diag3, emptydiag, q=1., p=1.) == 4.
assert wasserstein_distance(diag4, emptydiag, q=1., p=2.) == 5. # thank you Pythagorician triplets
assert wasserstein_distance(diag4, emptydiag, q=np.inf, p=2.) == 2.5
assert wasserstein_distance(diag4, emptydiag, q=2., p=2.) == 3.5355339059327378
assert wasserstein_distance(diag1, diag2, q=2., p=1.) == 1.4453593023967701
assert wasserstein_distance(diag1, diag2, q=2.35, p=1.74) == 0.9772734057168739
assert wasserstein_distance(diag1, emptydiag, q=2.35, p=1.7863) == 3.141592214572228
assert wasserstein_distance(diag3, diag4, q=1., p=1.) == 3.
assert wasserstein_distance(diag3, diag4, q=np.inf, p=1.) == 3. # no diag matching here
assert wasserstein_distance(diag3, diag4, q=np.inf, p=2.) == np.sqrt(5)
assert wasserstein_distance(diag3, diag4, q=1., p=2.) == np.sqrt(5)
assert wasserstein_distance(diag3, diag4, q=4.5, p=2.) == np.sqrt(5)
|
[
"numpy.array",
"gudhi.wasserstein.wasserstein_distance",
"numpy.sqrt"
] |
[((531, 582), 'numpy.array', 'np.array', (['[[2.7, 3.7], [9.6, 14.0], [34.2, 34.974]]'], {}), '([[2.7, 3.7], [9.6, 14.0], [34.2, 34.974]])\n', (539, 582), True, 'import numpy as np\n'), ((595, 631), 'numpy.array', 'np.array', (['[[2.8, 4.45], [9.5, 14.1]]'], {}), '([[2.8, 4.45], [9.5, 14.1]])\n', (603, 631), True, 'import numpy as np\n'), ((644, 670), 'numpy.array', 'np.array', (['[[0, 2], [4, 6]]'], {}), '([[0, 2], [4, 6]])\n', (652, 670), True, 'import numpy as np\n'), ((683, 709), 'numpy.array', 'np.array', (['[[0, 3], [4, 8]]'], {}), '([[0, 3], [4, 8]])\n', (691, 709), True, 'import numpy as np\n'), ((726, 740), 'numpy.array', 'np.array', (['[[]]'], {}), '([[]])\n', (734, 740), True, 'import numpy as np\n'), ((753, 809), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['emptydiag', 'emptydiag'], {'q': '(2.0)', 'p': '(1.0)'}), '(emptydiag, emptydiag, q=2.0, p=1.0)\n', (773, 809), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((825, 884), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['emptydiag', 'emptydiag'], {'q': 'np.inf', 'p': '(1.0)'}), '(emptydiag, emptydiag, q=np.inf, p=1.0)\n', (845, 884), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((901, 960), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['emptydiag', 'emptydiag'], {'q': 'np.inf', 'p': '(2.0)'}), '(emptydiag, emptydiag, q=np.inf, p=2.0)\n', (921, 960), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((977, 1033), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['emptydiag', 'emptydiag'], {'q': '(2.0)', 'p': '(2.0)'}), '(emptydiag, emptydiag, q=2.0, p=2.0)\n', (997, 1033), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((1050, 1105), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['diag3', 'emptydiag'], {'q': 'np.inf', 'p': '(1.0)'}), '(diag3, emptydiag, q=np.inf, p=1.0)\n', (1070, 1105), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((1122, 1174), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['diag3', 'emptydiag'], {'q': '(1.0)', 'p': '(1.0)'}), '(diag3, emptydiag, q=1.0, p=1.0)\n', (1142, 1174), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((1191, 1243), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['diag4', 'emptydiag'], {'q': '(1.0)', 'p': '(2.0)'}), '(diag4, emptydiag, q=1.0, p=2.0)\n', (1211, 1243), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((1295, 1350), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['diag4', 'emptydiag'], {'q': 'np.inf', 'p': '(2.0)'}), '(diag4, emptydiag, q=np.inf, p=2.0)\n', (1315, 1350), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((1368, 1420), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['diag4', 'emptydiag'], {'q': '(2.0)', 'p': '(2.0)'}), '(diag4, emptydiag, q=2.0, p=2.0)\n', (1388, 1420), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((1453, 1501), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['diag1', 'diag2'], {'q': '(2.0)', 'p': '(1.0)'}), '(diag1, diag2, q=2.0, p=1.0)\n', (1473, 1501), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((1533, 1583), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['diag1', 'diag2'], {'q': '(2.35)', 'p': '(1.74)'}), '(diag1, diag2, q=2.35, p=1.74)\n', (1553, 1583), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((1618, 1674), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['diag1', 'emptydiag'], {'q': '(2.35)', 'p': '(1.7863)'}), '(diag1, emptydiag, q=2.35, p=1.7863)\n', (1638, 1674), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((1708, 1756), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['diag3', 'diag4'], {'q': '(1.0)', 'p': '(1.0)'}), '(diag3, diag4, q=1.0, p=1.0)\n', (1728, 1756), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((1772, 1823), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['diag3', 'diag4'], {'q': 'np.inf', 'p': '(1.0)'}), '(diag3, diag4, q=np.inf, p=1.0)\n', (1792, 1823), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((1865, 1916), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['diag3', 'diag4'], {'q': 'np.inf', 'p': '(2.0)'}), '(diag3, diag4, q=np.inf, p=2.0)\n', (1885, 1916), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((1919, 1929), 'numpy.sqrt', 'np.sqrt', (['(5)'], {}), '(5)\n', (1926, 1929), True, 'import numpy as np\n'), ((1941, 1989), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['diag3', 'diag4'], {'q': '(1.0)', 'p': '(2.0)'}), '(diag3, diag4, q=1.0, p=2.0)\n', (1961, 1989), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((1991, 2001), 'numpy.sqrt', 'np.sqrt', (['(5)'], {}), '(5)\n', (1998, 2001), True, 'import numpy as np\n'), ((2013, 2061), 'gudhi.wasserstein.wasserstein_distance', 'wasserstein_distance', (['diag3', 'diag4'], {'q': '(4.5)', 'p': '(2.0)'}), '(diag3, diag4, q=4.5, p=2.0)\n', (2033, 2061), False, 'from gudhi.wasserstein import wasserstein_distance\n'), ((2064, 2074), 'numpy.sqrt', 'np.sqrt', (['(5)'], {}), '(5)\n', (2071, 2074), True, 'import numpy as np\n')]
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('app.views',
(r'^$', 'home'),
(r'^account/(?P<account_id>.+)$', 'account'),
)
|
[
"django.conf.urls.patterns"
] |
[((67, 153), 'django.conf.urls.patterns', 'patterns', (['"""app.views"""', "('^$', 'home')", "('^account/(?P<account_id>.+)$', 'account')"], {}), "('app.views', ('^$', 'home'), ('^account/(?P<account_id>.+)$',\n 'account'))\n", (75, 153), False, 'from django.conf.urls import patterns, include, url\n')]
|
import logging
from logging.config import fileConfig
from util import DatabaseUtil as dbu
class StockTechIndicator(object):
def __init__(self):
self.logger = logging.getLogger(__name__)
pass
def save_tech_data(self, stock_code, date, dict):
update_tech_sql = "update pdtb_stock_tech_data set "
for key in dict:
update_tech_sql = update_tech_sql + key+ "=" + str(dict[key]) + ", "
update_tech_sql = update_tech_sql[:-2] + " where code = '" + stock_code + "' and date = '" + str(date) + "'"
#print(update_tech_sql)
dbu.update(update_tech_sql)
|
[
"util.DatabaseUtil.update",
"logging.getLogger"
] |
[((171, 198), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (188, 198), False, 'import logging\n'), ((591, 618), 'util.DatabaseUtil.update', 'dbu.update', (['update_tech_sql'], {}), '(update_tech_sql)\n', (601, 618), True, 'from util import DatabaseUtil as dbu\n')]
|
import os
import click
from subprocess import call
import yaml
from datetime import datetime
import logging
logging.basicConfig(level="INFO")
tool = "./bin/config-slicer/config-slicer-3.1.14.7.jar"
# Fetches a manifest file from the staging server
@click.group()
def cli():
pass
def read_config():
with open(os.path.expanduser("~/.slices.config"), "r") as fs:
return yaml.safe_load(fs.read())
config_file = read_config()
@cli.command()
@click.argument("environment")
def manifest(environment):
config = config_file[environment]
server = config["server"]
logging.info("Generating manifest file for {}".format(server))
manifest_file = "exports/manifest-{}-{}.txt".format(server, datetime.now().isoformat())
try:
os.remove(manifest_file)
except OSError:
pass
call(["java", "-jar", tool,
"-o", "example",
"-s", server,
"-u", config["username"],
"-p", config["password"],
"-m", manifest_file])
@cli.command()
@click.argument("environment")
def export(environment):
config = config_file[environment]
manifest_file = "manifest.txt"
server = config["server"]
logging.info("Generating export package for {}".format(server))
package_file = "exports/export-package-{}-{}.xml".format(server, datetime.now().isoformat())
call(["java", "-jar", tool,
"-o", "export",
"-s", server,
"-u", config["username"],
"-p", config["password"],
"-m", manifest_file,
"-k", package_file])
@cli.command("import")
@click.argument("environment")
@click.argument("package")
@click.option("--validate/--no-validate", default=False)
def import_package(environment, package, validate):
operation = "validate" if validate else "importAndOverwrite"
config = config_file[environment]
server = config["server"]
logging.info(
"Importing export package {} to {} (validate={})".format(package, server, validate))
call(["java", "-jar", tool,
"-o", operation,
"-s", server,
"-u", config["username"],
"-p", config["password"],
"-k", package])
cli()
|
[
"os.remove",
"click.argument",
"logging.basicConfig",
"click.option",
"datetime.datetime.now",
"subprocess.call",
"click.group",
"os.path.expanduser"
] |
[((109, 142), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': '"""INFO"""'}), "(level='INFO')\n", (128, 142), False, 'import logging\n'), ((253, 266), 'click.group', 'click.group', ([], {}), '()\n', (264, 266), False, 'import click\n'), ((460, 489), 'click.argument', 'click.argument', (['"""environment"""'], {}), "('environment')\n", (474, 489), False, 'import click\n'), ((1014, 1043), 'click.argument', 'click.argument', (['"""environment"""'], {}), "('environment')\n", (1028, 1043), False, 'import click\n'), ((1568, 1597), 'click.argument', 'click.argument', (['"""environment"""'], {}), "('environment')\n", (1582, 1597), False, 'import click\n'), ((1599, 1624), 'click.argument', 'click.argument', (['"""package"""'], {}), "('package')\n", (1613, 1624), False, 'import click\n'), ((1626, 1681), 'click.option', 'click.option', (['"""--validate/--no-validate"""'], {'default': '(False)'}), "('--validate/--no-validate', default=False)\n", (1638, 1681), False, 'import click\n'), ((823, 960), 'subprocess.call', 'call', (["['java', '-jar', tool, '-o', 'example', '-s', server, '-u', config[\n 'username'], '-p', config['password'], '-m', manifest_file]"], {}), "(['java', '-jar', tool, '-o', 'example', '-s', server, '-u', config[\n 'username'], '-p', config['password'], '-m', manifest_file])\n", (827, 960), False, 'from subprocess import call\n'), ((1342, 1502), 'subprocess.call', 'call', (["['java', '-jar', tool, '-o', 'export', '-s', server, '-u', config[\n 'username'], '-p', config['password'], '-m', manifest_file, '-k',\n package_file]"], {}), "(['java', '-jar', tool, '-o', 'export', '-s', server, '-u', config[\n 'username'], '-p', config['password'], '-m', manifest_file, '-k',\n package_file])\n", (1346, 1502), False, 'from subprocess import call\n'), ((1986, 2117), 'subprocess.call', 'call', (["['java', '-jar', tool, '-o', operation, '-s', server, '-u', config[\n 'username'], '-p', config['password'], '-k', package]"], {}), "(['java', '-jar', tool, '-o', operation, '-s', server, '-u', config[\n 'username'], '-p', config['password'], '-k', package])\n", (1990, 2117), False, 'from subprocess import call\n'), ((761, 785), 'os.remove', 'os.remove', (['manifest_file'], {}), '(manifest_file)\n', (770, 785), False, 'import os\n'), ((321, 359), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.slices.config"""'], {}), "('~/.slices.config')\n", (339, 359), False, 'import os\n'), ((716, 730), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (728, 730), False, 'from datetime import datetime\n'), ((1309, 1323), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1321, 1323), False, 'from datetime import datetime\n')]
|
from yt_dlp import YoutubeDL
from dataclasses import dataclass
from spotidl.spotify import SpotifySong
from spotidl.utils import make_song_title, check_file
@dataclass
class YoutubeSong:
id: str
title: str
video_url: str
def get_config(user_params: dict, song: SpotifySong) -> dict:
"""
Prepares the parameters that need to be passed onto the YoutubeDL object.
"""
downloader_params = {
"postprocessors": [
{
"key": "FFmpegExtractAudio",
"preferredcodec": user_params["codec"],
"preferredquality": user_params["quality"],
}
],
"outtmpl": f"{make_song_title(song.artists, song.name, ', ')}.%(ext)s",
# "outtmpl": "%(artist)s-%(title)s.ext",
"quiet": user_params["quiet"],
"format": "bestaudio/best",
"dynamic_mpd": False,
}
return downloader_params
def get_downloader(params: dict):
"""
Initiates the YoutubeDL class with the configured parameters.
"""
return YoutubeDL(params=params)
def fetch_source(yt: YoutubeDL, song: SpotifySong) -> YoutubeSong:
"""
Fetch appropriate source for the song from Youtube using the given details.
"""
try:
# adding "audio" to avoid 'official music videos' and similar types
song_title = make_song_title(song.artists, song.name, ", ") + " audio"
search = yt.extract_info(f"ytsearch:{song_title}", download=False)
yt_info = search["entries"][0]
except Exception as e:
print("Error when trying to get audio source from YT: ", e)
return
else:
yt_song = YoutubeSong(
id=yt_info["id"],
title=yt_info["title"],
video_url=yt_info["webpage_url"],
)
return yt_song
def download_song(yt: YoutubeDL, link: str):
"""
Downloads the song given its source link and the YouTube downloader object.
"""
print("\nStarting song download...\n")
try:
# attempts to download the song using the best matched
# youtube source link
yt.download(link)
except Exception:
print("\nDownload failed!")
else:
print("\nSuccessfully finished downloading!")
def controller(user_params: dict, song: SpotifySong, file_name: str):
"""
Handles the flow of the download process for the given song.
Initiates the configuration as per the user-defined parameters and chains
the rest of functions together.
"""
# check if song has already been downloaded before at some point;
# only proceed with download if it doesn't
if check_file(file_name):
print(f"\n{file_name} already exists! Skipping download...")
else:
# user parameters are used in the downloader parameters dictionary
# the downloader_params dict is then passed onto the YoutubeDL object
# when generating its instance.
downloader_params = get_config(user_params, song)
yt = get_downloader(downloader_params)
yt_song = fetch_source(yt, song)
download_song(yt, yt_song.video_url)
|
[
"spotidl.utils.make_song_title",
"spotidl.utils.check_file",
"yt_dlp.YoutubeDL"
] |
[((1050, 1074), 'yt_dlp.YoutubeDL', 'YoutubeDL', ([], {'params': 'params'}), '(params=params)\n', (1059, 1074), False, 'from yt_dlp import YoutubeDL\n'), ((2651, 2672), 'spotidl.utils.check_file', 'check_file', (['file_name'], {}), '(file_name)\n', (2661, 2672), False, 'from spotidl.utils import make_song_title, check_file\n'), ((1347, 1393), 'spotidl.utils.make_song_title', 'make_song_title', (['song.artists', 'song.name', '""", """'], {}), "(song.artists, song.name, ', ')\n", (1362, 1393), False, 'from spotidl.utils import make_song_title, check_file\n'), ((672, 718), 'spotidl.utils.make_song_title', 'make_song_title', (['song.artists', 'song.name', '""", """'], {}), "(song.artists, song.name, ', ')\n", (687, 718), False, 'from spotidl.utils import make_song_title, check_file\n')]
|
# !/usr/bin/python3.7
# -*- coding: utf-8 -*-
# @Time : 2020/6/22 上午10:07
# @Author: <EMAIL>
# @Notes : 身份证实体抽取,身份证补全,身份证检测等功能
import json
import os
import re
from datetime import datetime
class NumberNotShortError(Exception):
...
class IDCardNotStingError(Exception):
...
class IDCardFormatError(Exception):
...
class VerificationLegalError(Exception):
...
chinese = {
ord('一'): '1',
ord('二'): '2',
ord('三'): '3',
ord('四'): '4',
ord('五'): '5',
ord('六'): '6',
ord('七'): '7',
ord('八'): '8',
ord('九'): '9',
ord('幺'): '1',
ord('拐'): '7',
ord('洞'): '0',
ord('两'): '2',
ord('勾'): '9',
ord('x'): 'X'
}
class IdCard:
def __init__(self):
with open(os.path.join(os.path.dirname(__file__), 'idCard.json')) as fp:
regions = json.load(fp)
self.region = {region['code']: region['name'] for region in regions}
self.card = []
@staticmethod
def correct_card(card: str):
"""纠正数字数字
比如:方言,中文数据等
"""
translate = card.translate(chinese)
return translate
def check_up(self, id_card: str):
"""检验身份证信息的合法性"""
assert isinstance(id_card, str), IDCardNotStingError('身份证号码必须是字符串类型')
assert len(id_card) == 18, NumberNotShortError(F'身份证号码必须是18位,不支持{len(id_card)}位身份证')
if not (id_card[:-1].isdigit() and re.match('[0-9X]', id_card[-1])):
raise IDCardFormatError('身份证格式错误')
assert self._verification_legal(id_card) == id_card[-1], VerificationLegalError('合法性验证失败')
return True
@staticmethod
def _verification_legal(id_card: str):
"""检验最后一位"""
coefficient = [7, 9, 10, 5, 8, 4, 2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2]
last = [1, 0, 'X', 9, 8, 7, 6, 5, 4, 3, 2]
s = sum([int(x) * y for x, y in zip(id_card[:-1], coefficient)])
remainder = last[s % 11]
return str(remainder)
def find_card(self, id_card: str):
"""查询身份证"""
id_card = self.correct_card(id_card)
self.check_up(id_card)
province = id_card[:6]
year = id_card[6:10]
month = id_card[10:12]
day = id_card[12:14]
sex = '女' if int(id_card[16]) % 2 == 0 else '男'
return {'发证地': self.region[province], '出生日期': f'{year}年{month}月{day}日', '性别': sex}
def _completion(self, id_card: str):
"""补全身份证缺失位
缺失位用*来填充: 比如: ***121199505307*51
"""
assert len(id_card) == 18, NumberNotShortError(F'身份证号码必须是18位,不支持{len(id_card)}位身份证')
province = id_card[:6]
year = id_card[6:10]
month = id_card[10:12]
day = id_card[12:14]
sort = id_card[14:17]
last = id_card[17]
if '*' in province:
province_re = province.replace('*', '.')
for k in self.region:
if re.match(province_re, k):
self._completion(k + id_card[6:])
elif '*' in year:
current_year = str(datetime.now().year)
if '*' in year[0]:
for y_1 in ['1', '2']:
id_card = id_card[:6] + y_1 + id_card[7:]
self._completion(id_card)
if '*' in year[1:]:
year_re = year.replace('*', '.')
for y_2 in range(1984, int(current_year) + 1):
if re.match(year_re, str(y_2)):
id_card = id_card[:6] + str(y_2) + id_card[10:]
self._completion(id_card)
elif '*' in month:
month_re = month.replace('*', '.')
for mon in range(1, 13):
m = f'{mon:0>2}'
if re.match(month_re, m):
id_card = id_card[:10] + m + id_card[12:]
self._completion(id_card)
elif '*' in day:
day_re = day.replace('*', '.')
for d in range(1, 32):
ds = f'{d:0>2}'
try:
datetime(int(year), int(month), d)
if re.match(day_re, ds):
id_card = id_card[:12] + ds + id_card[14:]
self._completion(id_card)
except ValueError:
pass
elif '*' in sort:
sort_re = sort.replace('*', '.')
for st in range(1, 1000):
s = f'{st:0>3}'
if re.match(sort_re, s):
id_card = id_card[:14] + s + id_card[-1]
self._completion(id_card)
elif '*' in last:
new_last = self._verification_legal(id_card)
id_card = id_card[:-1] + new_last
self._completion(id_card)
else:
self.card.append(id_card)
def complete_information(self, id_card: str):
"""补全身份证缺失位
缺失位用*来填充: 比如: ***121199505307*51
"""
id_card = self.correct_card(id_card)
self.card.clear()
self._completion(id_card)
comps = []
for comp in self.card:
try:
if self.check_up(comp):
comps.append(comp)
except AssertionError:
pass
return comps
def match_card(self, card):
"""包含一句身份证信息的话
包含18位身份证信息,可以自动补全信息
eg: 我的身份证信息是5**121199*05*07051你能猜出来吗?
:param card: 包含一句身份证信息的语句
:return: 身份证信息
"""
messages = []
for message in re.finditer('[0-9*]{17}[0-9*xX]', card):
cards = self.complete_information(message.group())
for card in cards:
data = self.find_card(card)
data['身份证号码'] = card
messages.append(data)
return messages
|
[
"json.load",
"re.finditer",
"os.path.dirname",
"re.match",
"datetime.datetime.now"
] |
[((5451, 5490), 're.finditer', 're.finditer', (['"""[0-9*]{17}[0-9*xX]"""', 'card'], {}), "('[0-9*]{17}[0-9*xX]', card)\n", (5462, 5490), False, 'import re\n'), ((826, 839), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (835, 839), False, 'import json\n'), ((1395, 1426), 're.match', 're.match', (['"""[0-9X]"""', 'id_card[-1]'], {}), "('[0-9X]', id_card[-1])\n", (1403, 1426), False, 'import re\n'), ((2864, 2888), 're.match', 're.match', (['province_re', 'k'], {}), '(province_re, k)\n', (2872, 2888), False, 'import re\n'), ((754, 779), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (769, 779), False, 'import os\n'), ((3001, 3015), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3013, 3015), False, 'from datetime import datetime\n'), ((3681, 3702), 're.match', 're.match', (['month_re', 'm'], {}), '(month_re, m)\n', (3689, 3702), False, 'import re\n'), ((4046, 4066), 're.match', 're.match', (['day_re', 'ds'], {}), '(day_re, ds)\n', (4054, 4066), False, 'import re\n'), ((4405, 4425), 're.match', 're.match', (['sort_re', 's'], {}), '(sort_re, s)\n', (4413, 4425), False, 'import re\n')]
|
import numpy as np
import itertools
from graph_nets import utils_tf
from root_gnn.src.datasets.base import DataSet
n_node_features = 6
max_nodes = 3 # including the particle that decays
def num_particles(event):
return len(event) // n_node_features
def make_graph(event, debug=False):
# each particle contains: pdgID, E, px, py, pz.
scale = 0.0001
n_nodes = num_particles(event)
nodes = [[
event[inode*n_node_features+1], # E
event[inode*n_node_features+2], # px
event[inode*n_node_features+3], # py
event[inode*n_node_features+4] # pz
] for inode in range(n_nodes)]
nodes = np.array(nodes, dtype=np.float32) * scale
if debug:
print(n_nodes, "nodes")
print("node features:", nodes.shape)
if nodes.shape[0] > max_nodes:
print("cluster decays to more than {} nodes".format(max_nodes))
return [(None, None)]
elif nodes.shape[0] < max_nodes:
print("nodes: {} less than maximum {}".format(nodes.shape[0], max_nodes))
print(event)
new_nodes = np.zeros([max_nodes, 4], dtype=np.float32)
new_nodes[:nodes.shape[0], :] = nodes
nodes = new_nodes
all_edges = list(itertools.combinations(range(n_nodes), 2))
senders = np.array([x[0] for x in all_edges])
receivers = np.array([x[1] for x in all_edges])
n_edges = len(all_edges)
edges = np.expand_dims(np.array([0.0]*n_edges, dtype=np.float32), axis=1)
input_datadict = {
"n_node": 1,
"n_edge": 1,
"nodes": nodes[0, :].reshape((1, -1)),
"edges": np.expand_dims(np.array([1.0]*1, dtype=np.float32), axis=1),
"senders": np.array([0]),
"receivers": np.array([0]),
"globals": np.array([1], dtype=np.float32)
}
target_datadict = {
"n_node": n_nodes,
"n_edge": n_edges,
"nodes": nodes,
"edges": edges,
"senders": senders,
"receivers": receivers,
"globals": np.array([1]*(n_nodes-1)+[0]*(max_nodes-n_nodes+1), dtype=np.float32)
}
input_graph = utils_tf.data_dicts_to_graphs_tuple([input_datadict])
target_graph = utils_tf.data_dicts_to_graphs_tuple([target_datadict])
return [(input_graph, target_graph)]
def read(filename):
with open(filename, 'r') as f:
for line in f:
yield [float(x) for x in line.split()]
class HerwigHadrons(DataSet):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.read = read
self.make_graph = make_graph
|
[
"numpy.zeros",
"numpy.array",
"graph_nets.utils_tf.data_dicts_to_graphs_tuple"
] |
[((1265, 1300), 'numpy.array', 'np.array', (['[x[0] for x in all_edges]'], {}), '([x[0] for x in all_edges])\n', (1273, 1300), True, 'import numpy as np\n'), ((1317, 1352), 'numpy.array', 'np.array', (['[x[1] for x in all_edges]'], {}), '([x[1] for x in all_edges])\n', (1325, 1352), True, 'import numpy as np\n'), ((2078, 2131), 'graph_nets.utils_tf.data_dicts_to_graphs_tuple', 'utils_tf.data_dicts_to_graphs_tuple', (['[input_datadict]'], {}), '([input_datadict])\n', (2113, 2131), False, 'from graph_nets import utils_tf\n'), ((2151, 2205), 'graph_nets.utils_tf.data_dicts_to_graphs_tuple', 'utils_tf.data_dicts_to_graphs_tuple', (['[target_datadict]'], {}), '([target_datadict])\n', (2186, 2205), False, 'from graph_nets import utils_tf\n'), ((639, 672), 'numpy.array', 'np.array', (['nodes'], {'dtype': 'np.float32'}), '(nodes, dtype=np.float32)\n', (647, 672), True, 'import numpy as np\n'), ((1409, 1452), 'numpy.array', 'np.array', (['([0.0] * n_edges)'], {'dtype': 'np.float32'}), '([0.0] * n_edges, dtype=np.float32)\n', (1417, 1452), True, 'import numpy as np\n'), ((1670, 1683), 'numpy.array', 'np.array', (['[0]'], {}), '([0])\n', (1678, 1683), True, 'import numpy as np\n'), ((1706, 1719), 'numpy.array', 'np.array', (['[0]'], {}), '([0])\n', (1714, 1719), True, 'import numpy as np\n'), ((1740, 1771), 'numpy.array', 'np.array', (['[1]'], {'dtype': 'np.float32'}), '([1], dtype=np.float32)\n', (1748, 1771), True, 'import numpy as np\n'), ((1983, 2069), 'numpy.array', 'np.array', (['([1] * (n_nodes - 1) + [0] * (max_nodes - n_nodes + 1))'], {'dtype': 'np.float32'}), '([1] * (n_nodes - 1) + [0] * (max_nodes - n_nodes + 1), dtype=np.\n float32)\n', (1991, 2069), True, 'import numpy as np\n'), ((1071, 1113), 'numpy.zeros', 'np.zeros', (['[max_nodes, 4]'], {'dtype': 'np.float32'}), '([max_nodes, 4], dtype=np.float32)\n', (1079, 1113), True, 'import numpy as np\n'), ((1605, 1642), 'numpy.array', 'np.array', (['([1.0] * 1)'], {'dtype': 'np.float32'}), '([1.0] * 1, dtype=np.float32)\n', (1613, 1642), True, 'import numpy as np\n')]
|
from __future__ import absolute_import
from __future__ import division
import torch
import copy
from torch import nn
from torch.nn import functional as F
from torchvision.models.resnet import resnet50, Bottleneck
from .hacnn import SoftBlock, SoftHardBlock
import torchvision
class ResNet50(nn.Module):
def __init__(self, num_classes, loss_type='xent', **kwargs):
super(ResNet50, self).__init__()
self.loss_type = loss_type
resnet50 = torchvision.models.resnet50(pretrained=True)
self.base = nn.Sequential(*list(resnet50.children())[:-2])
self.classifier = nn.Linear(2048, num_classes)
def forward(self, x):
x = self.base(x)
x = F.avg_pool2d(x, x.size()[2:])
f = x.view(x.size(0), -1)
if self.loss_type == 'xent':
if self.training:
y = self.classifier(f)
return [y]
else:
feat = torch.div(f, f.norm(dim=1, keepdim=True))
return feat
elif self.loss_type in ['xent_triplet', 'xent_tripletv2', 'xent_triplet_sqrt', 'xent_triplet_squa']:
feat = torch.div(f, f.norm(dim=1, keepdim=True))
if self.training:
y = self.classifier(f)
return [y], feat
else:
return feat
else:
raise KeyError("Unsupported loss: {}".format(self.loss_type))
class MGN(nn.Module):
def __init__(self, num_classes, loss_type='xent', **kwargs):
super(MGN, self).__init__()
self.loss_type = loss_type
self.dimension_branch = 512
# self.dimension_branch = 1024
resnet = resnet50(pretrained=True)
self.backbone = nn.Sequential(
resnet.conv1,
# nn.Conv2d(4, 64, kernel_size=7, stride=2, padding=3, bias=False),
resnet.bn1,
resnet.relu,
resnet.maxpool,
resnet.layer1, # res_conv2
resnet.layer2, # res_conv3
resnet.layer3[0], # res_conv4_1
)
# res_conv4x
res_conv4 = nn.Sequential(*resnet.layer3[1:])
res_g_conv5 = resnet.layer4
res_p_conv5 = nn.Sequential(
Bottleneck(1024, 512, downsample=nn.Sequential(nn.Conv2d(1024, 2048, 1, bias=False), nn.BatchNorm2d(2048))),
Bottleneck(2048, 512),
Bottleneck(2048, 512))
res_p_conv5.load_state_dict(resnet.layer4.state_dict())
self.p1 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_g_conv5))
self.p2 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_p_conv5))
self.maxpool_zg_p1 = nn.MaxPool2d(kernel_size=(8, 8))
self.maxpool_zg_p2 = nn.MaxPool2d(kernel_size=(16, 16))
reduction_512 = nn.Sequential(nn.Conv2d(2048, self.dimension_branch, 1, bias=False),
nn.BatchNorm2d(self.dimension_branch), nn.ReLU())
self.reduction_1 = copy.deepcopy(reduction_512)
self.reduction_2 = copy.deepcopy(reduction_512)
self.fc_id_512_1 = nn.Linear(self.dimension_branch, num_classes)
self.fc_id_512_2 = nn.Linear(self.dimension_branch, num_classes)
# self.fc_id_512_1 = nn.Linear(2048, num_classes)
# self.fc_id_512_2 = nn.Linear(2048, num_classes)
def forward(self, x):
x = self.backbone(x)
p1 = self.p1(x)
p2 = self.p2(x)
zg_p1 = self.maxpool_zg_p1(p1)
zg_p2 = self.maxpool_zg_p2(p2)
fg_p1 = self.reduction_1(zg_p1).squeeze(dim=3).squeeze(dim=2)
fg_p2 = self.reduction_2(zg_p2).squeeze(dim=3).squeeze(dim=2)
l_p1 = self.fc_id_512_1(fg_p1)
l_p2 = self.fc_id_512_2(fg_p2)
# l_p1 = self.fc_id_512_1(zg_p1.squeeze(dim=3).squeeze(dim=2))
# l_p2 = self.fc_id_512_2(zg_p2.squeeze(dim=3).squeeze(dim=2))
if self.loss_type in ['xent']:
if self.training:
feat_clfy = [l_p1, l_p2]
return feat_clfy
else:
# feat_embed = torch.cat([fg_p1, fg_p2], dim=1)
# feat_embed = torch.div(feat_embed, feat_embed.norm(dim=1, keepdim=True))
# return feat_embed
# fg_p1 = torch.div(fg_p1, fg_p1.norm(dim=1, keepdim=True))
# fg_p2 = torch.div(fg_p2, fg_p2.norm(dim=1, keepdim=True))
feat_global = torch.cat([fg_p1, fg_p2], dim=1)
feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
return feat_global
elif self.loss_type in ['xent_triplet', 'xent_tripletv2', 'xent_triplet_sqrt', 'xent_triplet_squa']:
# # feat_clfy = torch.cat([l_p1, l_p2], dim=0)
# feat_clfy = [l_p1, l_p2]
# # feat_clfy = l_p1
# feat_global = torch.cat([fg_p1, fg_p2], dim=1)
# # feat_global = fg_p1
# feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
# # feat_local = torch.cat([fz_p1, fz_p2, fz_p3, fz_p4], dim=1)
# # feat_local = torch.div(feat_local, feat_local.norm(dim=1, keepdim=True))
# if self.training:
# return feat_clfy, feat_global
# else:
# return feat_global
# feat_clfy = [l_p1, l_p2]
# fg_p1 = torch.div(fg_p1, fg_p1.norm(dim=1, keepdim=True))
# fg_p2 = torch.div(fg_p2, fg_p2.norm(dim=1, keepdim=True))
# feat_global = [fg_p1, fg_p2]
# if self.training:
# return feat_clfy, feat_global
# else:
# feat_global = torch.cat([fg_p1, fg_p2], dim=1)
# return feat_global
# feat_clfy = [l_p1, l_p2]
# feat_global = [fg_p1, fg_p2]
# if self.training:
# return feat_clfy, feat_global
# else:
# # fg_p1 = torch.div(fg_p1, fg_p1.norm(dim=1, keepdim=True))
# # fg_p2 = torch.div(fg_p2, fg_p2.norm(dim=1, keepdim=True))
# feat_global = torch.cat([fg_p1, fg_p2], dim=1)
# feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
# return feat_global
feat_clfy = [l_p1, l_p2]
feat_global = torch.cat([fg_p1, fg_p2], dim=1)
feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
if self.training:
# fg_p1 = torch.div(fg_p1, fg_p1.norm(dim=1, keepdim=True))
# fg_p2 = torch.div(fg_p2, fg_p2.norm(dim=1, keepdim=True))
# feat_global = [fg_p1, fg_p2]
return feat_clfy, feat_global
else:
# feat_global = torch.cat([fg_p1, fg_p2], dim=1)
# feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
return feat_global
else:
raise KeyError("Unsupported loss: {}".format(self.loss_type))
class OriginMGN(nn.Module):
"""
@ARTICLE{2018arXiv180401438W,
author = {{<NAME>. and {<NAME>. and {<NAME>. and {<NAME>. and {Zhou}, X.},
title = "{Learning Discriminative Features with Multiple Granularities for Person Re-Identification}",
journal = {ArXiv e-prints},
archivePrefix = "arXiv",
eprint = {1804.01438},
primaryClass = "cs.CV",
keywords = {Computer Science - Computer Vision and Pattern Recognition},
year = 2018,
month = apr,
adsurl = {http://adsabs.harvard.edu/abs/2018arXiv180401438W},
adsnote = {Provided by the SAO/NASA Astrophysics Data System}
}
"""
def __init__(self, num_classes, loss_type='xent', **kwargs):
super(OriginMGN, self).__init__()
self.loss_type = loss_type
resnet = resnet50(pretrained=True)
self.backbone = nn.Sequential(
resnet.conv1,
resnet.bn1,
resnet.relu,
resnet.maxpool,
resnet.layer1, # res_conv2
resnet.layer2, # res_conv3
resnet.layer3[0], # res_conv4_1
)
# res_conv4x
res_conv4 = nn.Sequential(*resnet.layer3[1:])
# res_conv5 global
res_g_conv5 = resnet.layer4
# res_conv5 part
res_p_conv5 = nn.Sequential(
Bottleneck(1024, 512, downsample=nn.Sequential(nn.Conv2d(1024, 2048, 1, bias=False), nn.BatchNorm2d(2048))),
Bottleneck(2048, 512),
Bottleneck(2048, 512))
res_p_conv5.load_state_dict(resnet.layer4.state_dict())
# mgn part-1 global
self.p1 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_g_conv5))
# mgn part-2
self.p2 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_p_conv5))
# mgn part-3
self.p3 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_p_conv5))
# global max pooling
self.maxpool_zg_p1 = nn.MaxPool2d(kernel_size=(12, 4))
self.maxpool_zg_p2 = nn.MaxPool2d(kernel_size=(24, 8))
self.maxpool_zg_p3 = nn.MaxPool2d(kernel_size=(24, 8))
self.maxpool_zp2 = nn.MaxPool2d(kernel_size=(12, 8))
self.maxpool_zp3 = nn.MaxPool2d(kernel_size=(8, 8))
# conv1 reduce
reduction = nn.Sequential(nn.Conv2d(2048, 256, 1, bias=False), nn.BatchNorm2d(256), nn.ReLU())
self.reduction_0 = copy.deepcopy(reduction)
self.reduction_1 = copy.deepcopy(reduction)
self.reduction_2 = copy.deepcopy(reduction)
self.reduction_3 = copy.deepcopy(reduction)
self.reduction_4 = copy.deepcopy(reduction)
self.reduction_5 = copy.deepcopy(reduction)
self.reduction_6 = copy.deepcopy(reduction)
self.reduction_7 = copy.deepcopy(reduction)
# fc softmax loss
self.fc_id_2048_0 = nn.Linear(2048, num_classes)
self.fc_id_2048_1 = nn.Linear(2048, num_classes)
self.fc_id_2048_2 = nn.Linear(2048, num_classes)
self.fc_id_256_1_0 = nn.Linear(256, num_classes)
self.fc_id_256_1_1 = nn.Linear(256, num_classes)
self.fc_id_256_2_0 = nn.Linear(256, num_classes)
self.fc_id_256_2_1 = nn.Linear(256, num_classes)
self.fc_id_256_2_2 = nn.Linear(256, num_classes)
def forward(self, x):
x = self.backbone(x)
p1 = self.p1(x)
p2 = self.p2(x)
p3 = self.p3(x)
zg_p1 = self.maxpool_zg_p1(p1) # z_g^G
zg_p2 = self.maxpool_zg_p2(p2) # z_g^P2
zg_p3 = self.maxpool_zg_p3(p3) # z_g^P3
zp2 = self.maxpool_zp2(p2)
z0_p2 = zp2[:, :, 0:1, :] # z_p0^P2
z1_p2 = zp2[:, :, 1:2, :] # z_p1^P2
zp3 = self.maxpool_zp3(p3)
z0_p3 = zp3[:, :, 0:1, :] # z_p0^P3
z1_p3 = zp3[:, :, 1:2, :] # z_p1^P3
z2_p3 = zp3[:, :, 2:3, :] # z_p2^P3
fg_p1 = self.reduction_0(zg_p1).squeeze(dim=3).squeeze(dim=2) # f_g^G, L_triplet^G
fg_p2 = self.reduction_1(zg_p2).squeeze(dim=3).squeeze(dim=2) # f_g^P2, L_triplet^P2
fg_p3 = self.reduction_2(zg_p3).squeeze(dim=3).squeeze(dim=2) # f_g^P3, L_triplet^P3
f0_p2 = self.reduction_3(z0_p2).squeeze(dim=3).squeeze(dim=2) # f_p0^P2
f1_p2 = self.reduction_4(z1_p2).squeeze(dim=3).squeeze(dim=2) # f_p1^P2
f0_p3 = self.reduction_5(z0_p3).squeeze(dim=3).squeeze(dim=2) # f_p0^P3
f1_p3 = self.reduction_6(z1_p3).squeeze(dim=3).squeeze(dim=2) # f_p1^P3
f2_p3 = self.reduction_7(z2_p3).squeeze(dim=3).squeeze(dim=2) # f_p2^P3
l_p1 = self.fc_id_2048_0(zg_p1.squeeze(dim=3).squeeze(dim=2)) # L_softmax^G
l_p2 = self.fc_id_2048_1(zg_p2.squeeze(dim=3).squeeze(dim=2)) # L_softmax^P2
l_p3 = self.fc_id_2048_2(zg_p3.squeeze(dim=3).squeeze(dim=2)) # L_softmax^P3
l0_p2 = self.fc_id_256_1_0(f0_p2) # L_softmax0^P2
l1_p2 = self.fc_id_256_1_1(f1_p2) # L_softmax1^P2
l0_p3 = self.fc_id_256_2_0(f0_p3) # L_softmax0^P3
l1_p3 = self.fc_id_256_2_1(f1_p3) # L_softmax1^P3
l2_p3 = self.fc_id_256_2_2(f2_p3) # L_softmax2^P3
if self.loss_type in ['xent_triplet', 'xent_tripletv2', 'xent_triplet_sqrt', 'xent_triplet_squa']:
if self.training:
feat_clfy = [l_p1, l_p2, l_p3, l0_p2, l1_p2, l0_p3, l1_p3, l2_p3]
feat = torch.cat([fg_p1, fg_p2, fg_p3], dim=1)
feat = torch.div(feat, feat.norm(dim=1, keepdim=True))
return feat_clfy, feat
else:
feat = torch.cat([fg_p1, fg_p2, fg_p3, f0_p2, f1_p2, f0_p3, f1_p3, f2_p3], dim=1)
feat = torch.div(feat, feat.norm(dim=1, keepdim=True))
return feat
else:
raise KeyError("Unsupported loss: {}".format(self.loss_type))
class MGNB4(nn.Module):
def __init__(self, num_classes, loss_type='xent', **kwargs):
super(MGNB4, self).__init__()
self.loss_type = loss_type
resnet = resnet50(pretrained=True)
self.backbone = nn.Sequential(
resnet.conv1,
resnet.bn1,
resnet.relu,
resnet.maxpool,
resnet.layer1, # res_conv2
resnet.layer2, # res_conv3
resnet.layer3[0], # res_conv4_1
)
# res_conv4x
res_conv4 = nn.Sequential(*resnet.layer3[1:])
res_conv5 = resnet.layer4
self.b1 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_conv5))
self.b2 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_conv5))
self.b3 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_conv5))
self.b4 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_conv5))
self.maxpool_b1 = nn.MaxPool2d(kernel_size=(8, 8))
self.maxpool_b2 = nn.MaxPool2d(kernel_size=(8, 8))
self.maxpool_b3 = nn.MaxPool2d(kernel_size=(8, 8))
self.maxpool_b4 = nn.MaxPool2d(kernel_size=(8, 8))
reduction_512 = nn.Sequential(nn.Conv2d(2048, 512, 1, bias=False), nn.BatchNorm2d(512), nn.ReLU())
self.reduction_1 = copy.deepcopy(reduction_512)
self.reduction_2 = copy.deepcopy(reduction_512)
self.reduction_3 = copy.deepcopy(reduction_512)
self.reduction_4 = copy.deepcopy(reduction_512)
self.fc_id_512_1 = nn.Linear(512, num_classes)
self.fc_id_512_2 = nn.Linear(512, num_classes)
self.fc_id_512_3 = nn.Linear(512, num_classes)
self.fc_id_512_4 = nn.Linear(512, num_classes)
def forward(self, x):
x = self.backbone(x)
b1 = self.b1(x)
b2 = self.b2(x)
b3 = self.b3(x)
b4 = self.b4(x)
pb1 = self.maxpool_b1(b1)
pb2 = self.maxpool_b2(b2)
pb3 = self.maxpool_b3(b3)
pb4 = self.maxpool_b4(b4)
f_b1 = self.reduction_1(pb1).squeeze(dim=3).squeeze(dim=2)
f_b2 = self.reduction_2(pb2).squeeze(dim=3).squeeze(dim=2)
f_b3 = self.reduction_3(pb3).squeeze(dim=3).squeeze(dim=2)
f_b4 = self.reduction_4(pb4).squeeze(dim=3).squeeze(dim=2)
cf_b1 = self.fc_id_512_1(f_b1)
cf_b2 = self.fc_id_512_2(f_b2)
cf_b3 = self.fc_id_512_3(f_b3)
cf_b4 = self.fc_id_512_4(f_b4)
if self.loss_type in ['xent']:
if self.training:
feat_clfy = [cf_b1, cf_b2, cf_b3, cf_b4]
return feat_clfy
else:
feat_global = torch.cat([f_b1, f_b2, f_b3, f_b4], dim=1)
feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
return feat_global
elif self.loss_type in ['xent_triplet', 'xent_tripletv2']:
feat_clfy = [cf_b1, cf_b2, cf_b3, cf_b4]
feat_global = torch.cat([f_b1, f_b2, f_b3, f_b4], dim=1)
feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
if self.training:
return feat_clfy, feat_global
else:
return feat_global
else:
raise KeyError("Unsupported loss: {}".format(self.loss_type))
class MGNB2(nn.Module):
def __init__(self, num_classes, loss_type='xent', **kwargs):
super(MGNB2, self).__init__()
self.loss_type = loss_type
self.dimension_branch = 1024
resnet = resnet50(pretrained=True)
self.backbone = nn.Sequential(
resnet.conv1,
resnet.bn1,
resnet.relu,
resnet.maxpool,
resnet.layer1, # res_conv2
resnet.layer2, # res_conv3
resnet.layer3[0], # res_conv4_1
)
# res_conv4x
res_conv4 = nn.Sequential(*resnet.layer3[1:])
res_conv5 = resnet.layer4
self.b1 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_conv5))
self.b2 = nn.Sequential(copy.deepcopy(res_conv4), copy.deepcopy(res_conv5))
self.maxpool_b1 = nn.MaxPool2d(kernel_size=(8, 8))
self.maxpool_b2 = nn.MaxPool2d(kernel_size=(8, 8))
reduction_512 = nn.Sequential(nn.Conv2d(2048, self.dimension_branch, 1, bias=False),
nn.BatchNorm2d(self.dimension_branch), nn.ReLU())
self.reduction_1 = copy.deepcopy(reduction_512)
self.reduction_2 = copy.deepcopy(reduction_512)
self.fc_id_512_1 = nn.Linear(self.dimension_branch, num_classes)
self.fc_id_512_2 = nn.Linear(self.dimension_branch, num_classes)
def forward(self, x):
x = self.backbone(x)
b1 = self.b1(x)
b2 = self.b2(x)
pb1 = self.maxpool_b1(b1)
pb2 = self.maxpool_b2(b2)
f_b1 = self.reduction_1(pb1).squeeze(dim=3).squeeze(dim=2)
f_b2 = self.reduction_2(pb2).squeeze(dim=3).squeeze(dim=2)
cf_b1 = self.fc_id_512_1(f_b1)
cf_b2 = self.fc_id_512_2(f_b2)
if self.loss_type in ['xent']:
if self.training:
feat_clfy = [cf_b1, cf_b2]
return feat_clfy
else:
feat_global = torch.cat([f_b1, f_b2], dim=1)
feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
return feat_global
elif self.loss_type in ['xent_triplet', 'xent_tripletv2', 'xent_triplet_sqrt', 'xent_triplet_squa']:
feat_clfy = [cf_b1, cf_b2]
feat_global = torch.cat([f_b1, f_b2], dim=1)
feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
if self.training:
return feat_clfy, feat_global
else:
return feat_global
else:
raise KeyError("Unsupported loss: {}".format(self.loss_type))
class ResSoAttn(nn.Module):
def __init__(self, num_classes, loss_type='xent', nchannels=[128, 256, 384], branch_feat_dim=682, **kwargs):
super(ResSoAttn, self).__init__()
self.loss_type = loss_type
resnet = resnet50(pretrained=True)
self.backbone = nn.Sequential(
resnet.conv1,
resnet.bn1,
resnet.relu,
resnet.maxpool,
resnet.layer1, # res_conv2
resnet.layer2, # res_conv3
)
self.habk1 = nn.Sequential(SoftBlock(nchannels=nchannels, input_channel=512, feat_dim=branch_feat_dim),
nn.Dropout(p=0.5, inplace=True))
self.habk2 = nn.Sequential(SoftBlock(nchannels=nchannels, input_channel=512, feat_dim=branch_feat_dim),
nn.Dropout(p=0.5, inplace=True))
self.habk3 = nn.Sequential(SoftBlock(nchannels=nchannels, input_channel=512, feat_dim=branch_feat_dim),
nn.Dropout(p=0.5, inplace=True))
self.fc_id_1 = nn.Linear(branch_feat_dim, num_classes)
self.fc_id_2 = nn.Linear(branch_feat_dim, num_classes)
self.fc_id_3 = nn.Linear(branch_feat_dim, num_classes)
def forward(self, x):
x = self.backbone(x)
f_b1 = self.habk1(x)
f_b2 = self.habk2(x)
f_b3 = self.habk3(x)
cf_b1 = self.fc_id_1(f_b1)
cf_b2 = self.fc_id_2(f_b2)
cf_b3 = self.fc_id_3(f_b3)
if self.loss_type in ['xent']:
if self.training:
feat_clfy = [cf_b1, cf_b2, cf_b3]
return feat_clfy
else:
feat_global = torch.cat([f_b1, f_b2, f_b3], dim=1)
feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
return feat_global
elif self.loss_type in ['xent_triplet', 'xent_tripletv2']:
feat_clfy = [cf_b1, cf_b2, cf_b3]
feat_global = torch.cat([f_b1, f_b2, f_b3], dim=1)
feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
if self.training:
return feat_clfy, feat_global
else:
return feat_global
else:
raise KeyError("Unsupported loss: {}".format(self.loss_type))
class ResSoHaAttn(nn.Module):
def __init__(self, num_classes, loss_type='xent', nchannels=[128, 256, 384], branch_feat_dim=682, **kwargs):
super(ResSoHaAttn, self).__init__()
self.loss_type = loss_type
resnet = resnet50(pretrained=True)
self.backbone = nn.Sequential(
resnet.conv1,
resnet.bn1,
resnet.relu,
resnet.maxpool,
resnet.layer1, # res_conv2
resnet.layer2, # res_conv3
)
self.habk1 = SoftHardBlock(nchannels=nchannels, input_channel=512, feat_dim=branch_feat_dim)
self.habk2 = SoftHardBlock(nchannels=nchannels, input_channel=512, feat_dim=branch_feat_dim)
self.habk3 = SoftHardBlock(nchannels=nchannels, input_channel=512, feat_dim=branch_feat_dim)
self.fc_id_1 = nn.Linear(branch_feat_dim, num_classes)
self.fc_id_2 = nn.Linear(branch_feat_dim, num_classes)
self.fc_id_3 = nn.Linear(branch_feat_dim, num_classes)
def forward(self, x):
x = self.backbone(x)
fg_b1, fl_b1 = self.habk1(x)
fg_b2, fl_b2 = self.habk2(x)
fg_b3, fl_b3 = self.habk3(x)
f_b1 = torch.cat([fg_b1, fl_b1], dim=1)
f_b2 = torch.cat([fg_b2, fl_b2], dim=1)
f_b3 = torch.cat([fg_b3, fl_b3], dim=1)
cf_b1 = self.fc_id_1(f_b1)
cf_b2 = self.fc_id_2(f_b2)
cf_b3 = self.fc_id_3(f_b3)
if self.loss_type in ['xent']:
if self.training:
feat_clfy = [cf_b1, cf_b2, cf_b3]
return feat_clfy
else:
feat = torch.cat([f_b1, f_b2, f_b3], dim=1)
feat = torch.div(feat, feat.norm(dim=1, keepdim=True))
return feat
elif self.loss_type in ['xent_triplet', 'xent_tripletv2']:
feat_clfy = [cf_b1, cf_b2, cf_b3]
# feat_global = torch.cat([fg_b1, fg_b2, fg_b3], dim=1)
# feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
feat = torch.cat([f_b1, f_b2, f_b3], dim=1)
feat = torch.div(feat, feat.norm(dim=1, keepdim=True))
if self.training:
# return feat_clfy, feat_global
return feat_clfy, feat
else:
# feat = torch.cat([f_b1, f_b2, f_b3], dim=1)
# feat = torch.div(feat, feat.norm(dim=1, keepdim=True))
return feat
else:
raise KeyError("Unsupported loss: {}".format(self.loss_type))
class Resv2SoAttn(nn.Module):
def __init__(self, num_classes, loss_type='xent', nchannels=[256, 384, 512], branch_feat_dim=682, **kwargs):
super(Resv2SoAttn, self).__init__()
self.loss_type = loss_type
self.inplanes = 16
self.layer1 = self.make_layer(Bottleneck, 16, 3, stride=1)
self.layer2 = self.make_layer(Bottleneck, 32, 4, stride=2)
self.backbone = nn.Sequential(
nn.Conv2d(3, 16, kernel_size=7, stride=2, padding=3, bias=False),
nn.BatchNorm2d(16),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2, padding=1),
self.layer1,
self.layer2,
)
self.habk1 = nn.Sequential(SoftBlock(nchannels=nchannels, input_channel=128, feat_dim=branch_feat_dim),
nn.Dropout(p=0.5, inplace=True))
self.habk2 = nn.Sequential(SoftBlock(nchannels=nchannels, input_channel=128, feat_dim=branch_feat_dim),
nn.Dropout(p=0.5, inplace=True))
self.habk3 = nn.Sequential(SoftBlock(nchannels=nchannels, input_channel=128, feat_dim=branch_feat_dim),
nn.Dropout(p=0.5, inplace=True))
self.fc_id_1 = nn.Linear(branch_feat_dim, num_classes)
self.fc_id_2 = nn.Linear(branch_feat_dim, num_classes)
self.fc_id_3 = nn.Linear(branch_feat_dim, num_classes)
def make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = self.backbone(x)
f_b1 = self.habk1(x)
f_b2 = self.habk2(x)
f_b3 = self.habk3(x)
cf_b1 = self.fc_id_1(f_b1)
cf_b2 = self.fc_id_2(f_b2)
cf_b3 = self.fc_id_3(f_b3)
if self.loss_type in ['xent']:
if self.training:
feat_clfy = [cf_b1, cf_b2, cf_b3]
return feat_clfy
else:
feat_global = torch.cat([f_b1, f_b2, f_b3], dim=1)
feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
return feat_global
elif self.loss_type in ['xent_triplet', 'xent_tripletv2']:
feat_clfy = [cf_b1, cf_b2, cf_b3]
feat_global = torch.cat([f_b1, f_b2, f_b3], dim=1)
feat_global = torch.div(feat_global, feat_global.norm(dim=1, keepdim=True))
if self.training:
return feat_clfy, feat_global
else:
return feat_global
else:
raise KeyError("Unsupported loss: {}".format(self.loss_type))
|
[
"torch.nn.Dropout",
"copy.deepcopy",
"torchvision.models.resnet.Bottleneck",
"torch.nn.ReLU",
"torch.nn.Sequential",
"torch.nn.Conv2d",
"torch.cat",
"torchvision.models.resnet.resnet50.children",
"torchvision.models.resnet50",
"torch.nn.BatchNorm2d",
"torchvision.models.resnet.resnet50",
"torch.nn.Linear",
"torch.nn.MaxPool2d"
] |
[((466, 510), 'torchvision.models.resnet50', 'torchvision.models.resnet50', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (493, 510), False, 'import torchvision\n'), ((604, 632), 'torch.nn.Linear', 'nn.Linear', (['(2048)', 'num_classes'], {}), '(2048, num_classes)\n', (613, 632), False, 'from torch import nn\n'), ((1663, 1688), 'torchvision.models.resnet.resnet50', 'resnet50', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (1671, 1688), False, 'from torchvision.models.resnet import resnet50, Bottleneck\n'), ((1713, 1834), 'torch.nn.Sequential', 'nn.Sequential', (['resnet.conv1', 'resnet.bn1', 'resnet.relu', 'resnet.maxpool', 'resnet.layer1', 'resnet.layer2', 'resnet.layer3[0]'], {}), '(resnet.conv1, resnet.bn1, resnet.relu, resnet.maxpool, resnet\n .layer1, resnet.layer2, resnet.layer3[0])\n', (1726, 1834), False, 'from torch import nn\n'), ((2097, 2130), 'torch.nn.Sequential', 'nn.Sequential', (['*resnet.layer3[1:]'], {}), '(*resnet.layer3[1:])\n', (2110, 2130), False, 'from torch import nn\n'), ((2662, 2694), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(8, 8)'}), '(kernel_size=(8, 8))\n', (2674, 2694), False, 'from torch import nn\n'), ((2724, 2758), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(16, 16)'}), '(kernel_size=(16, 16))\n', (2736, 2758), False, 'from torch import nn\n'), ((2968, 2996), 'copy.deepcopy', 'copy.deepcopy', (['reduction_512'], {}), '(reduction_512)\n', (2981, 2996), False, 'import copy\n'), ((3024, 3052), 'copy.deepcopy', 'copy.deepcopy', (['reduction_512'], {}), '(reduction_512)\n', (3037, 3052), False, 'import copy\n'), ((3081, 3126), 'torch.nn.Linear', 'nn.Linear', (['self.dimension_branch', 'num_classes'], {}), '(self.dimension_branch, num_classes)\n', (3090, 3126), False, 'from torch import nn\n'), ((3154, 3199), 'torch.nn.Linear', 'nn.Linear', (['self.dimension_branch', 'num_classes'], {}), '(self.dimension_branch, num_classes)\n', (3163, 3199), False, 'from torch import nn\n'), ((7845, 7870), 'torchvision.models.resnet.resnet50', 'resnet50', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (7853, 7870), False, 'from torchvision.models.resnet import resnet50, Bottleneck\n'), ((7895, 8016), 'torch.nn.Sequential', 'nn.Sequential', (['resnet.conv1', 'resnet.bn1', 'resnet.relu', 'resnet.maxpool', 'resnet.layer1', 'resnet.layer2', 'resnet.layer3[0]'], {}), '(resnet.conv1, resnet.bn1, resnet.relu, resnet.maxpool, resnet\n .layer1, resnet.layer2, resnet.layer3[0])\n', (7908, 8016), False, 'from torch import nn\n'), ((8199, 8232), 'torch.nn.Sequential', 'nn.Sequential', (['*resnet.layer3[1:]'], {}), '(*resnet.layer3[1:])\n', (8212, 8232), False, 'from torch import nn\n'), ((9001, 9034), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(12, 4)'}), '(kernel_size=(12, 4))\n', (9013, 9034), False, 'from torch import nn\n'), ((9064, 9097), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(24, 8)'}), '(kernel_size=(24, 8))\n', (9076, 9097), False, 'from torch import nn\n'), ((9127, 9160), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(24, 8)'}), '(kernel_size=(24, 8))\n', (9139, 9160), False, 'from torch import nn\n'), ((9188, 9221), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(12, 8)'}), '(kernel_size=(12, 8))\n', (9200, 9221), False, 'from torch import nn\n'), ((9249, 9281), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(8, 8)'}), '(kernel_size=(8, 8))\n', (9261, 9281), False, 'from torch import nn\n'), ((9436, 9460), 'copy.deepcopy', 'copy.deepcopy', (['reduction'], {}), '(reduction)\n', (9449, 9460), False, 'import copy\n'), ((9488, 9512), 'copy.deepcopy', 'copy.deepcopy', (['reduction'], {}), '(reduction)\n', (9501, 9512), False, 'import copy\n'), ((9540, 9564), 'copy.deepcopy', 'copy.deepcopy', (['reduction'], {}), '(reduction)\n', (9553, 9564), False, 'import copy\n'), ((9592, 9616), 'copy.deepcopy', 'copy.deepcopy', (['reduction'], {}), '(reduction)\n', (9605, 9616), False, 'import copy\n'), ((9644, 9668), 'copy.deepcopy', 'copy.deepcopy', (['reduction'], {}), '(reduction)\n', (9657, 9668), False, 'import copy\n'), ((9696, 9720), 'copy.deepcopy', 'copy.deepcopy', (['reduction'], {}), '(reduction)\n', (9709, 9720), False, 'import copy\n'), ((9748, 9772), 'copy.deepcopy', 'copy.deepcopy', (['reduction'], {}), '(reduction)\n', (9761, 9772), False, 'import copy\n'), ((9800, 9824), 'copy.deepcopy', 'copy.deepcopy', (['reduction'], {}), '(reduction)\n', (9813, 9824), False, 'import copy\n'), ((9880, 9908), 'torch.nn.Linear', 'nn.Linear', (['(2048)', 'num_classes'], {}), '(2048, num_classes)\n', (9889, 9908), False, 'from torch import nn\n'), ((9937, 9965), 'torch.nn.Linear', 'nn.Linear', (['(2048)', 'num_classes'], {}), '(2048, num_classes)\n', (9946, 9965), False, 'from torch import nn\n'), ((9994, 10022), 'torch.nn.Linear', 'nn.Linear', (['(2048)', 'num_classes'], {}), '(2048, num_classes)\n', (10003, 10022), False, 'from torch import nn\n'), ((10052, 10079), 'torch.nn.Linear', 'nn.Linear', (['(256)', 'num_classes'], {}), '(256, num_classes)\n', (10061, 10079), False, 'from torch import nn\n'), ((10109, 10136), 'torch.nn.Linear', 'nn.Linear', (['(256)', 'num_classes'], {}), '(256, num_classes)\n', (10118, 10136), False, 'from torch import nn\n'), ((10166, 10193), 'torch.nn.Linear', 'nn.Linear', (['(256)', 'num_classes'], {}), '(256, num_classes)\n', (10175, 10193), False, 'from torch import nn\n'), ((10223, 10250), 'torch.nn.Linear', 'nn.Linear', (['(256)', 'num_classes'], {}), '(256, num_classes)\n', (10232, 10250), False, 'from torch import nn\n'), ((10280, 10307), 'torch.nn.Linear', 'nn.Linear', (['(256)', 'num_classes'], {}), '(256, num_classes)\n', (10289, 10307), False, 'from torch import nn\n'), ((12997, 13022), 'torchvision.models.resnet.resnet50', 'resnet50', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (13005, 13022), False, 'from torchvision.models.resnet import resnet50, Bottleneck\n'), ((13047, 13168), 'torch.nn.Sequential', 'nn.Sequential', (['resnet.conv1', 'resnet.bn1', 'resnet.relu', 'resnet.maxpool', 'resnet.layer1', 'resnet.layer2', 'resnet.layer3[0]'], {}), '(resnet.conv1, resnet.bn1, resnet.relu, resnet.maxpool, resnet\n .layer1, resnet.layer2, resnet.layer3[0])\n', (13060, 13168), False, 'from torch import nn\n'), ((13351, 13384), 'torch.nn.Sequential', 'nn.Sequential', (['*resnet.layer3[1:]'], {}), '(*resnet.layer3[1:])\n', (13364, 13384), False, 'from torch import nn\n'), ((13783, 13815), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(8, 8)'}), '(kernel_size=(8, 8))\n', (13795, 13815), False, 'from torch import nn\n'), ((13842, 13874), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(8, 8)'}), '(kernel_size=(8, 8))\n', (13854, 13874), False, 'from torch import nn\n'), ((13901, 13933), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(8, 8)'}), '(kernel_size=(8, 8))\n', (13913, 13933), False, 'from torch import nn\n'), ((13960, 13992), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(8, 8)'}), '(kernel_size=(8, 8))\n', (13972, 13992), False, 'from torch import nn\n'), ((14128, 14156), 'copy.deepcopy', 'copy.deepcopy', (['reduction_512'], {}), '(reduction_512)\n', (14141, 14156), False, 'import copy\n'), ((14184, 14212), 'copy.deepcopy', 'copy.deepcopy', (['reduction_512'], {}), '(reduction_512)\n', (14197, 14212), False, 'import copy\n'), ((14240, 14268), 'copy.deepcopy', 'copy.deepcopy', (['reduction_512'], {}), '(reduction_512)\n', (14253, 14268), False, 'import copy\n'), ((14296, 14324), 'copy.deepcopy', 'copy.deepcopy', (['reduction_512'], {}), '(reduction_512)\n', (14309, 14324), False, 'import copy\n'), ((14353, 14380), 'torch.nn.Linear', 'nn.Linear', (['(512)', 'num_classes'], {}), '(512, num_classes)\n', (14362, 14380), False, 'from torch import nn\n'), ((14408, 14435), 'torch.nn.Linear', 'nn.Linear', (['(512)', 'num_classes'], {}), '(512, num_classes)\n', (14417, 14435), False, 'from torch import nn\n'), ((14463, 14490), 'torch.nn.Linear', 'nn.Linear', (['(512)', 'num_classes'], {}), '(512, num_classes)\n', (14472, 14490), False, 'from torch import nn\n'), ((14518, 14545), 'torch.nn.Linear', 'nn.Linear', (['(512)', 'num_classes'], {}), '(512, num_classes)\n', (14527, 14545), False, 'from torch import nn\n'), ((16352, 16377), 'torchvision.models.resnet.resnet50', 'resnet50', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (16360, 16377), False, 'from torchvision.models.resnet import resnet50, Bottleneck\n'), ((16402, 16523), 'torch.nn.Sequential', 'nn.Sequential', (['resnet.conv1', 'resnet.bn1', 'resnet.relu', 'resnet.maxpool', 'resnet.layer1', 'resnet.layer2', 'resnet.layer3[0]'], {}), '(resnet.conv1, resnet.bn1, resnet.relu, resnet.maxpool, resnet\n .layer1, resnet.layer2, resnet.layer3[0])\n', (16415, 16523), False, 'from torch import nn\n'), ((16706, 16739), 'torch.nn.Sequential', 'nn.Sequential', (['*resnet.layer3[1:]'], {}), '(*resnet.layer3[1:])\n', (16719, 16739), False, 'from torch import nn\n'), ((16970, 17002), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(8, 8)'}), '(kernel_size=(8, 8))\n', (16982, 17002), False, 'from torch import nn\n'), ((17029, 17061), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(8, 8)'}), '(kernel_size=(8, 8))\n', (17041, 17061), False, 'from torch import nn\n'), ((17271, 17299), 'copy.deepcopy', 'copy.deepcopy', (['reduction_512'], {}), '(reduction_512)\n', (17284, 17299), False, 'import copy\n'), ((17327, 17355), 'copy.deepcopy', 'copy.deepcopy', (['reduction_512'], {}), '(reduction_512)\n', (17340, 17355), False, 'import copy\n'), ((17384, 17429), 'torch.nn.Linear', 'nn.Linear', (['self.dimension_branch', 'num_classes'], {}), '(self.dimension_branch, num_classes)\n', (17393, 17429), False, 'from torch import nn\n'), ((17457, 17502), 'torch.nn.Linear', 'nn.Linear', (['self.dimension_branch', 'num_classes'], {}), '(self.dimension_branch, num_classes)\n', (17466, 17502), False, 'from torch import nn\n'), ((18990, 19015), 'torchvision.models.resnet.resnet50', 'resnet50', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (18998, 19015), False, 'from torchvision.models.resnet import resnet50, Bottleneck\n'), ((19040, 19143), 'torch.nn.Sequential', 'nn.Sequential', (['resnet.conv1', 'resnet.bn1', 'resnet.relu', 'resnet.maxpool', 'resnet.layer1', 'resnet.layer2'], {}), '(resnet.conv1, resnet.bn1, resnet.relu, resnet.maxpool, resnet\n .layer1, resnet.layer2)\n', (19053, 19143), False, 'from torch import nn\n'), ((19821, 19860), 'torch.nn.Linear', 'nn.Linear', (['branch_feat_dim', 'num_classes'], {}), '(branch_feat_dim, num_classes)\n', (19830, 19860), False, 'from torch import nn\n'), ((19884, 19923), 'torch.nn.Linear', 'nn.Linear', (['branch_feat_dim', 'num_classes'], {}), '(branch_feat_dim, num_classes)\n', (19893, 19923), False, 'from torch import nn\n'), ((19947, 19986), 'torch.nn.Linear', 'nn.Linear', (['branch_feat_dim', 'num_classes'], {}), '(branch_feat_dim, num_classes)\n', (19956, 19986), False, 'from torch import nn\n'), ((21324, 21349), 'torchvision.models.resnet.resnet50', 'resnet50', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (21332, 21349), False, 'from torchvision.models.resnet import resnet50, Bottleneck\n'), ((21374, 21477), 'torch.nn.Sequential', 'nn.Sequential', (['resnet.conv1', 'resnet.bn1', 'resnet.relu', 'resnet.maxpool', 'resnet.layer1', 'resnet.layer2'], {}), '(resnet.conv1, resnet.bn1, resnet.relu, resnet.maxpool, resnet\n .layer1, resnet.layer2)\n', (21387, 21477), False, 'from torch import nn\n'), ((21918, 21957), 'torch.nn.Linear', 'nn.Linear', (['branch_feat_dim', 'num_classes'], {}), '(branch_feat_dim, num_classes)\n', (21927, 21957), False, 'from torch import nn\n'), ((21981, 22020), 'torch.nn.Linear', 'nn.Linear', (['branch_feat_dim', 'num_classes'], {}), '(branch_feat_dim, num_classes)\n', (21990, 22020), False, 'from torch import nn\n'), ((22044, 22083), 'torch.nn.Linear', 'nn.Linear', (['branch_feat_dim', 'num_classes'], {}), '(branch_feat_dim, num_classes)\n', (22053, 22083), False, 'from torch import nn\n'), ((22266, 22298), 'torch.cat', 'torch.cat', (['[fg_b1, fl_b1]'], {'dim': '(1)'}), '([fg_b1, fl_b1], dim=1)\n', (22275, 22298), False, 'import torch\n'), ((22314, 22346), 'torch.cat', 'torch.cat', (['[fg_b2, fl_b2]'], {'dim': '(1)'}), '([fg_b2, fl_b2], dim=1)\n', (22323, 22346), False, 'import torch\n'), ((22362, 22394), 'torch.cat', 'torch.cat', (['[fg_b3, fl_b3]'], {'dim': '(1)'}), '([fg_b3, fl_b3], dim=1)\n', (22371, 22394), False, 'import torch\n'), ((24868, 24907), 'torch.nn.Linear', 'nn.Linear', (['branch_feat_dim', 'num_classes'], {}), '(branch_feat_dim, num_classes)\n', (24877, 24907), False, 'from torch import nn\n'), ((24931, 24970), 'torch.nn.Linear', 'nn.Linear', (['branch_feat_dim', 'num_classes'], {}), '(branch_feat_dim, num_classes)\n', (24940, 24970), False, 'from torch import nn\n'), ((24994, 25033), 'torch.nn.Linear', 'nn.Linear', (['branch_feat_dim', 'num_classes'], {}), '(branch_feat_dim, num_classes)\n', (25003, 25033), False, 'from torch import nn\n'), ((25686, 25708), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (25699, 25708), False, 'from torch import nn\n'), ((2337, 2358), 'torchvision.models.resnet.Bottleneck', 'Bottleneck', (['(2048)', '(512)'], {}), '(2048, 512)\n', (2347, 2358), False, 'from torchvision.models.resnet import resnet50, Bottleneck\n'), ((2372, 2393), 'torchvision.models.resnet.Bottleneck', 'Bottleneck', (['(2048)', '(512)'], {}), '(2048, 512)\n', (2382, 2393), False, 'from torchvision.models.resnet import resnet50, Bottleneck\n'), ((2492, 2516), 'copy.deepcopy', 'copy.deepcopy', (['res_conv4'], {}), '(res_conv4)\n', (2505, 2516), False, 'import copy\n'), ((2518, 2544), 'copy.deepcopy', 'copy.deepcopy', (['res_g_conv5'], {}), '(res_g_conv5)\n', (2531, 2544), False, 'import copy\n'), ((2578, 2602), 'copy.deepcopy', 'copy.deepcopy', (['res_conv4'], {}), '(res_conv4)\n', (2591, 2602), False, 'import copy\n'), ((2604, 2630), 'copy.deepcopy', 'copy.deepcopy', (['res_p_conv5'], {}), '(res_p_conv5)\n', (2617, 2630), False, 'import copy\n'), ((2798, 2851), 'torch.nn.Conv2d', 'nn.Conv2d', (['(2048)', 'self.dimension_branch', '(1)'], {'bias': '(False)'}), '(2048, self.dimension_branch, 1, bias=False)\n', (2807, 2851), False, 'from torch import nn\n'), ((2891, 2928), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['self.dimension_branch'], {}), '(self.dimension_branch)\n', (2905, 2928), False, 'from torch import nn\n'), ((2930, 2939), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (2937, 2939), False, 'from torch import nn\n'), ((8491, 8512), 'torchvision.models.resnet.Bottleneck', 'Bottleneck', (['(2048)', '(512)'], {}), '(2048, 512)\n', (8501, 8512), False, 'from torchvision.models.resnet import resnet50, Bottleneck\n'), ((8526, 8547), 'torchvision.models.resnet.Bottleneck', 'Bottleneck', (['(2048)', '(512)'], {}), '(2048, 512)\n', (8536, 8547), False, 'from torchvision.models.resnet import resnet50, Bottleneck\n'), ((8674, 8698), 'copy.deepcopy', 'copy.deepcopy', (['res_conv4'], {}), '(res_conv4)\n', (8687, 8698), False, 'import copy\n'), ((8700, 8726), 'copy.deepcopy', 'copy.deepcopy', (['res_g_conv5'], {}), '(res_g_conv5)\n', (8713, 8726), False, 'import copy\n'), ((8781, 8805), 'copy.deepcopy', 'copy.deepcopy', (['res_conv4'], {}), '(res_conv4)\n', (8794, 8805), False, 'import copy\n'), ((8807, 8833), 'copy.deepcopy', 'copy.deepcopy', (['res_p_conv5'], {}), '(res_p_conv5)\n', (8820, 8833), False, 'import copy\n'), ((8888, 8912), 'copy.deepcopy', 'copy.deepcopy', (['res_conv4'], {}), '(res_conv4)\n', (8901, 8912), False, 'import copy\n'), ((8914, 8940), 'copy.deepcopy', 'copy.deepcopy', (['res_p_conv5'], {}), '(res_p_conv5)\n', (8927, 8940), False, 'import copy\n'), ((9340, 9375), 'torch.nn.Conv2d', 'nn.Conv2d', (['(2048)', '(256)', '(1)'], {'bias': '(False)'}), '(2048, 256, 1, bias=False)\n', (9349, 9375), False, 'from torch import nn\n'), ((9377, 9396), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(256)'], {}), '(256)\n', (9391, 9396), False, 'from torch import nn\n'), ((9398, 9407), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9405, 9407), False, 'from torch import nn\n'), ((13452, 13476), 'copy.deepcopy', 'copy.deepcopy', (['res_conv4'], {}), '(res_conv4)\n', (13465, 13476), False, 'import copy\n'), ((13478, 13502), 'copy.deepcopy', 'copy.deepcopy', (['res_conv5'], {}), '(res_conv5)\n', (13491, 13502), False, 'import copy\n'), ((13536, 13560), 'copy.deepcopy', 'copy.deepcopy', (['res_conv4'], {}), '(res_conv4)\n', (13549, 13560), False, 'import copy\n'), ((13562, 13586), 'copy.deepcopy', 'copy.deepcopy', (['res_conv5'], {}), '(res_conv5)\n', (13575, 13586), False, 'import copy\n'), ((13620, 13644), 'copy.deepcopy', 'copy.deepcopy', (['res_conv4'], {}), '(res_conv4)\n', (13633, 13644), False, 'import copy\n'), ((13646, 13670), 'copy.deepcopy', 'copy.deepcopy', (['res_conv5'], {}), '(res_conv5)\n', (13659, 13670), False, 'import copy\n'), ((13704, 13728), 'copy.deepcopy', 'copy.deepcopy', (['res_conv4'], {}), '(res_conv4)\n', (13717, 13728), False, 'import copy\n'), ((13730, 13754), 'copy.deepcopy', 'copy.deepcopy', (['res_conv5'], {}), '(res_conv5)\n', (13743, 13754), False, 'import copy\n'), ((14032, 14067), 'torch.nn.Conv2d', 'nn.Conv2d', (['(2048)', '(512)', '(1)'], {'bias': '(False)'}), '(2048, 512, 1, bias=False)\n', (14041, 14067), False, 'from torch import nn\n'), ((14069, 14088), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(512)'], {}), '(512)\n', (14083, 14088), False, 'from torch import nn\n'), ((14090, 14099), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (14097, 14099), False, 'from torch import nn\n'), ((16807, 16831), 'copy.deepcopy', 'copy.deepcopy', (['res_conv4'], {}), '(res_conv4)\n', (16820, 16831), False, 'import copy\n'), ((16833, 16857), 'copy.deepcopy', 'copy.deepcopy', (['res_conv5'], {}), '(res_conv5)\n', (16846, 16857), False, 'import copy\n'), ((16891, 16915), 'copy.deepcopy', 'copy.deepcopy', (['res_conv4'], {}), '(res_conv4)\n', (16904, 16915), False, 'import copy\n'), ((16917, 16941), 'copy.deepcopy', 'copy.deepcopy', (['res_conv5'], {}), '(res_conv5)\n', (16930, 16941), False, 'import copy\n'), ((17101, 17154), 'torch.nn.Conv2d', 'nn.Conv2d', (['(2048)', 'self.dimension_branch', '(1)'], {'bias': '(False)'}), '(2048, self.dimension_branch, 1, bias=False)\n', (17110, 17154), False, 'from torch import nn\n'), ((17194, 17231), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['self.dimension_branch'], {}), '(self.dimension_branch)\n', (17208, 17231), False, 'from torch import nn\n'), ((17233, 17242), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (17240, 17242), False, 'from torch import nn\n'), ((19404, 19435), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)', 'inplace': '(True)'}), '(p=0.5, inplace=True)\n', (19414, 19435), False, 'from torch import nn\n'), ((19584, 19615), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)', 'inplace': '(True)'}), '(p=0.5, inplace=True)\n', (19594, 19615), False, 'from torch import nn\n'), ((19764, 19795), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)', 'inplace': '(True)'}), '(p=0.5, inplace=True)\n', (19774, 19795), False, 'from torch import nn\n'), ((24048, 24112), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', '(16)'], {'kernel_size': '(7)', 'stride': '(2)', 'padding': '(3)', 'bias': '(False)'}), '(3, 16, kernel_size=7, stride=2, padding=3, bias=False)\n', (24057, 24112), False, 'from torch import nn\n'), ((24126, 24144), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(16)'], {}), '(16)\n', (24140, 24144), False, 'from torch import nn\n'), ((24158, 24179), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (24165, 24179), False, 'from torch import nn\n'), ((24193, 24241), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(3)', 'stride': '(2)', 'padding': '(1)'}), '(kernel_size=3, stride=2, padding=1)\n', (24205, 24241), False, 'from torch import nn\n'), ((24451, 24482), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)', 'inplace': '(True)'}), '(p=0.5, inplace=True)\n', (24461, 24482), False, 'from torch import nn\n'), ((24631, 24662), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)', 'inplace': '(True)'}), '(p=0.5, inplace=True)\n', (24641, 24662), False, 'from torch import nn\n'), ((24811, 24842), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': '(0.5)', 'inplace': '(True)'}), '(p=0.5, inplace=True)\n', (24821, 24842), False, 'from torch import nn\n'), ((4398, 4430), 'torch.cat', 'torch.cat', (['[fg_p1, fg_p2]'], {'dim': '(1)'}), '([fg_p1, fg_p2], dim=1)\n', (4407, 4430), False, 'import torch\n'), ((6315, 6347), 'torch.cat', 'torch.cat', (['[fg_p1, fg_p2]'], {'dim': '(1)'}), '([fg_p1, fg_p2], dim=1)\n', (6324, 6347), False, 'import torch\n'), ((12363, 12402), 'torch.cat', 'torch.cat', (['[fg_p1, fg_p2, fg_p3]'], {'dim': '(1)'}), '([fg_p1, fg_p2, fg_p3], dim=1)\n', (12372, 12402), False, 'import torch\n'), ((12554, 12628), 'torch.cat', 'torch.cat', (['[fg_p1, fg_p2, fg_p3, f0_p2, f1_p2, f0_p3, f1_p3, f2_p3]'], {'dim': '(1)'}), '([fg_p1, fg_p2, fg_p3, f0_p2, f1_p2, f0_p3, f1_p3, f2_p3], dim=1)\n', (12563, 12628), False, 'import torch\n'), ((15470, 15512), 'torch.cat', 'torch.cat', (['[f_b1, f_b2, f_b3, f_b4]'], {'dim': '(1)'}), '([f_b1, f_b2, f_b3, f_b4], dim=1)\n', (15479, 15512), False, 'import torch\n'), ((15786, 15828), 'torch.cat', 'torch.cat', (['[f_b1, f_b2, f_b3, f_b4]'], {'dim': '(1)'}), '([f_b1, f_b2, f_b3, f_b4], dim=1)\n', (15795, 15828), False, 'import torch\n'), ((18085, 18115), 'torch.cat', 'torch.cat', (['[f_b1, f_b2]'], {'dim': '(1)'}), '([f_b1, f_b2], dim=1)\n', (18094, 18115), False, 'import torch\n'), ((18417, 18447), 'torch.cat', 'torch.cat', (['[f_b1, f_b2]'], {'dim': '(1)'}), '([f_b1, f_b2], dim=1)\n', (18426, 18447), False, 'import torch\n'), ((20438, 20474), 'torch.cat', 'torch.cat', (['[f_b1, f_b2, f_b3]'], {'dim': '(1)'}), '([f_b1, f_b2, f_b3], dim=1)\n', (20447, 20474), False, 'import torch\n'), ((20741, 20777), 'torch.cat', 'torch.cat', (['[f_b1, f_b2, f_b3]'], {'dim': '(1)'}), '([f_b1, f_b2, f_b3], dim=1)\n', (20750, 20777), False, 'import torch\n'), ((22696, 22732), 'torch.cat', 'torch.cat', (['[f_b1, f_b2, f_b3]'], {'dim': '(1)'}), '([f_b1, f_b2, f_b3], dim=1)\n', (22705, 22732), False, 'import torch\n'), ((23122, 23158), 'torch.cat', 'torch.cat', (['[f_b1, f_b2, f_b3]'], {'dim': '(1)'}), '([f_b1, f_b2, f_b3], dim=1)\n', (23131, 23158), False, 'import torch\n'), ((25245, 25342), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.inplanes', '(planes * block.expansion)'], {'kernel_size': '(1)', 'stride': 'stride', 'bias': '(False)'}), '(self.inplanes, planes * block.expansion, kernel_size=1, stride=\n stride, bias=False)\n', (25254, 25342), False, 'from torch import nn\n'), ((25381, 25421), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(planes * block.expansion)'], {}), '(planes * block.expansion)\n', (25395, 25421), False, 'from torch import nn\n'), ((26160, 26196), 'torch.cat', 'torch.cat', (['[f_b1, f_b2, f_b3]'], {'dim': '(1)'}), '([f_b1, f_b2, f_b3], dim=1)\n', (26169, 26196), False, 'import torch\n'), ((26463, 26499), 'torch.cat', 'torch.cat', (['[f_b1, f_b2, f_b3]'], {'dim': '(1)'}), '([f_b1, f_b2, f_b3], dim=1)\n', (26472, 26499), False, 'import torch\n'), ((551, 570), 'torchvision.models.resnet.resnet50.children', 'resnet50.children', ([], {}), '()\n', (568, 570), False, 'from torchvision.models.resnet import resnet50, Bottleneck\n'), ((2263, 2299), 'torch.nn.Conv2d', 'nn.Conv2d', (['(1024)', '(2048)', '(1)'], {'bias': '(False)'}), '(1024, 2048, 1, bias=False)\n', (2272, 2299), False, 'from torch import nn\n'), ((2301, 2321), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(2048)'], {}), '(2048)\n', (2315, 2321), False, 'from torch import nn\n'), ((8417, 8453), 'torch.nn.Conv2d', 'nn.Conv2d', (['(1024)', '(2048)', '(1)'], {'bias': '(False)'}), '(1024, 2048, 1, bias=False)\n', (8426, 8453), False, 'from torch import nn\n'), ((8455, 8475), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(2048)'], {}), '(2048)\n', (8469, 8475), False, 'from torch import nn\n')]
|
import os
import uuid
from django.db import models
from accounts.models import Student
from accounts.models import Teacher
import cloudinary
class CourseORM(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=150)
slug = models.SlugField(db_index=True, max_length=150)
code = models.CharField(max_length=6, default=uuid.uuid4().hex.upper()[0:6], blank=True, verbose_name="Code")
image = cloudinary.models.CloudinaryField('image', null=True, blank=True)
hours = models.CharField(max_length=4)
#image = models.ImageField(null=True, blank=True, upload_to='courses_images')
description = models.TextField()
teacher = models.ForeignKey(Teacher, on_delete=models.PROTECT)
students = models.ManyToManyField(Student)
def __str__(self):
return self.name
class Meta:
app_label="courses"
class ModuleORM(models.Model):
title = models.CharField(max_length=200)
description = models.TextField(null=True, blank=True)
order = models.PositiveIntegerField(default=1)
course = models.ForeignKey(CourseORM, on_delete=models.CASCADE)
def __str__(self):
return self.title
class Meta:
app_label="courses"
class SectionORM(models.Model):
title = models.CharField(max_length=200)
description = models.TextField(null=True, blank=True)
order = models.PositiveIntegerField(default=1)
module = models.ForeignKey(ModuleORM, on_delete=models.CASCADE)
def __str__(self):
return self.title
class Meta:
app_label="courses"
class TopicORM(models.Model):
TOPIC_TYPE_GENERAL = 1
TOPIC_TYPE_HOMEWORK = 2
TOPIC_TYPE_QUIZ = 2
TOPIC_TYPE_CHOICES = (
(TOPIC_TYPE_GENERAL, 'General'),
(TOPIC_TYPE_HOMEWORK, 'Homework'),
(TOPIC_TYPE_QUIZ, 'Quiz'),
)
title = models.CharField(max_length=200)
description = models.TextField(null=True, blank=True)
order = models.PositiveIntegerField(default=1)
type = models.SmallIntegerField(choices=TOPIC_TYPE_CHOICES, default=TOPIC_TYPE_GENERAL, verbose_name='Type')
section = models.ForeignKey(SectionORM, on_delete=models.CASCADE)
def __str__(self):
return self.name
class Meta:
app_label="courses"
def get_resource_file_path(instance, filename):
ext = filename.split('.')[-1]
filename = "%s.%s" % (uuid.uuid4(), ext)
return os.path.join("resources/", filename)
class Meta:
app_label="courses"
class ResourcesORM(models.Model):
topic = models.ForeignKey(TopicORM, on_delete=models.CASCADE)
resource = models.FileField(upload_to=get_resource_file_path, verbose_name="File")
class Meta:
app_label="courses"
class PackageORM(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=150)
slug = models.SlugField(db_index=True, max_length=150)
code = models.CharField(max_length=10)
parent = models.ForeignKey('PackageORM', on_delete=models.CASCADE)
courses = models.ManyToManyField(CourseORM)
def __str__(self):
return self.name
class Meta:
app_label="courses"
|
[
"django.db.models.FileField",
"django.db.models.TextField",
"django.db.models.ManyToManyField",
"uuid.uuid4",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.PositiveIntegerField",
"django.db.models.SlugField",
"django.db.models.SmallIntegerField",
"cloudinary.models.CloudinaryField",
"django.db.models.UUIDField",
"os.path.join"
] |
[((185, 255), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'primary_key': '(True)', 'default': 'uuid.uuid4', 'editable': '(False)'}), '(primary_key=True, default=uuid.uuid4, editable=False)\n', (201, 255), False, 'from django.db import models\n'), ((267, 299), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)'}), '(max_length=150)\n', (283, 299), False, 'from django.db import models\n'), ((311, 358), 'django.db.models.SlugField', 'models.SlugField', ([], {'db_index': '(True)', 'max_length': '(150)'}), '(db_index=True, max_length=150)\n', (327, 358), False, 'from django.db import models\n'), ((485, 550), 'cloudinary.models.CloudinaryField', 'cloudinary.models.CloudinaryField', (['"""image"""'], {'null': '(True)', 'blank': '(True)'}), "('image', null=True, blank=True)\n", (518, 550), False, 'import cloudinary\n'), ((563, 593), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(4)'}), '(max_length=4)\n', (579, 593), False, 'from django.db import models\n'), ((694, 712), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (710, 712), False, 'from django.db import models\n'), ((727, 779), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Teacher'], {'on_delete': 'models.PROTECT'}), '(Teacher, on_delete=models.PROTECT)\n', (744, 779), False, 'from django.db import models\n'), ((795, 826), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Student'], {}), '(Student)\n', (817, 826), False, 'from django.db import models\n'), ((972, 1004), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (988, 1004), False, 'from django.db import models\n'), ((1023, 1062), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1039, 1062), False, 'from django.db import models\n'), ((1075, 1113), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(1)'}), '(default=1)\n', (1102, 1113), False, 'from django.db import models\n'), ((1127, 1181), 'django.db.models.ForeignKey', 'models.ForeignKey', (['CourseORM'], {'on_delete': 'models.CASCADE'}), '(CourseORM, on_delete=models.CASCADE)\n', (1144, 1181), False, 'from django.db import models\n'), ((1323, 1355), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1339, 1355), False, 'from django.db import models\n'), ((1374, 1413), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1390, 1413), False, 'from django.db import models\n'), ((1426, 1464), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(1)'}), '(default=1)\n', (1453, 1464), False, 'from django.db import models\n'), ((1478, 1532), 'django.db.models.ForeignKey', 'models.ForeignKey', (['ModuleORM'], {'on_delete': 'models.CASCADE'}), '(ModuleORM, on_delete=models.CASCADE)\n', (1495, 1532), False, 'from django.db import models\n'), ((1905, 1937), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1921, 1937), False, 'from django.db import models\n'), ((1956, 1995), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1972, 1995), False, 'from django.db import models\n'), ((2008, 2046), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'default': '(1)'}), '(default=1)\n', (2035, 2046), False, 'from django.db import models\n'), ((2058, 2164), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'choices': 'TOPIC_TYPE_CHOICES', 'default': 'TOPIC_TYPE_GENERAL', 'verbose_name': '"""Type"""'}), "(choices=TOPIC_TYPE_CHOICES, default=\n TOPIC_TYPE_GENERAL, verbose_name='Type')\n", (2082, 2164), False, 'from django.db import models\n'), ((2174, 2229), 'django.db.models.ForeignKey', 'models.ForeignKey', (['SectionORM'], {'on_delete': 'models.CASCADE'}), '(SectionORM, on_delete=models.CASCADE)\n', (2191, 2229), False, 'from django.db import models\n'), ((2464, 2500), 'os.path.join', 'os.path.join', (['"""resources/"""', 'filename'], {}), "('resources/', filename)\n", (2476, 2500), False, 'import os\n'), ((2594, 2647), 'django.db.models.ForeignKey', 'models.ForeignKey', (['TopicORM'], {'on_delete': 'models.CASCADE'}), '(TopicORM, on_delete=models.CASCADE)\n', (2611, 2647), False, 'from django.db import models\n'), ((2663, 2734), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': 'get_resource_file_path', 'verbose_name': '"""File"""'}), "(upload_to=get_resource_file_path, verbose_name='File')\n", (2679, 2734), False, 'from django.db import models\n'), ((2823, 2893), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'primary_key': '(True)', 'default': 'uuid.uuid4', 'editable': '(False)'}), '(primary_key=True, default=uuid.uuid4, editable=False)\n', (2839, 2893), False, 'from django.db import models\n'), ((2905, 2937), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)'}), '(max_length=150)\n', (2921, 2937), False, 'from django.db import models\n'), ((2949, 2996), 'django.db.models.SlugField', 'models.SlugField', ([], {'db_index': '(True)', 'max_length': '(150)'}), '(db_index=True, max_length=150)\n', (2965, 2996), False, 'from django.db import models\n'), ((3008, 3039), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)'}), '(max_length=10)\n', (3024, 3039), False, 'from django.db import models\n'), ((3053, 3110), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""PackageORM"""'], {'on_delete': 'models.CASCADE'}), "('PackageORM', on_delete=models.CASCADE)\n", (3070, 3110), False, 'from django.db import models\n'), ((3125, 3158), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['CourseORM'], {}), '(CourseORM)\n', (3147, 3158), False, 'from django.db import models\n'), ((2434, 2446), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2444, 2446), False, 'import uuid\n'), ((409, 421), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (419, 421), False, 'import uuid\n')]
|
'''
Code modified by the authors of the paper: "Low-rank Subspaces for Unsupervised Entity Linking" to enable working with "Wikidata" instead of "Freebase"
'''
import argparse
sup_train=False
MAX_POS = 10
MAX_N_POSS_TEST = 100
MAX_N_POSS_TRAIN = 100
N_NEGS = 10
SAMPLE_NEGS = True
TYPE_OPT = 'mean'
parser = argparse.ArgumentParser()
parser.add_argument("--mode", type=str, help="train or eval", default='train')
parser.add_argument("--model_path", type=str, help="model path to save/load", default='model')
parser.add_argument("--n_epochs", type=int, help="number of epochs", default=20 if not sup_train else 50)
parser.add_argument("--batchsize", type=int, help="batchsize", default=50)
parser.add_argument("--max_len", type=int, help="max sentence length", default=100)
parser.add_argument("--lr", type=float, help="learning rate", default=1e-3)
parser.add_argument("--dropout", type=float, help="dropout rate", default=0)
parser.add_argument("--lstm_hiddim", type=int, help="hiddim of the encoder's combine", default=100)
parser.add_argument("--enc_type", type=str, default="lstm")
parser.add_argument("--n_filters", type=int, default=200)
parser.add_argument("--en_dim", type=int, default = 300)
parser.add_argument("--pos_embdim", type=int, default=5)
parser.add_argument("--type_embdim", type=int, default=50)
parser.add_argument("--ent_embdim", type=int, default=100)
parser.add_argument("--datadir", type=str, default='data/wikidata/')
parser.add_argument("--noise_threshold", type=float, default=0.75 if not sup_train else 1)
parser.add_argument("--margin", type=float, default=0.1)
parser.add_argument("--kl_coef", type=float, default=5 if not sup_train else 0)
parser.add_argument("--noise_prior", type=float, default=0.9)
parser.add_argument("--train_data", type=str, default='data/el_annotated_170k_le_titov.json')
parser.add_argument("--dev_data", type=str, default='data/aida_testa_le_titov.json')
parser.add_argument("--test_data", type=str, default='data/aida_testb_le_titov.json')
|
[
"argparse.ArgumentParser"
] |
[((311, 336), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (334, 336), False, 'import argparse\n')]
|
# Load the necessary libraries
import matplotlib.pyplot as plt
import numpy
import pandas
import sklearn.cluster as cluster
import sklearn.metrics as metrics
bikeshare = pandas.read_csv('C:\\Users\\minlam\\Documents\\IIT\\Machine Learning\\Data\\BikeSharingDemand_Train.csv',
delimiter=',')
# Use only these four interval variables
trainData = bikeshare[['temp', 'humidity', 'windspeed']].dropna()
nObs = trainData.shape[0]
# Determine the number of clusters using the Silhouette metrics
nClusters = numpy.zeros(15)
Elbow = numpy.zeros(15)
Silhouette = numpy.zeros(15)
for c in range(15):
KClusters = c + 1
nClusters[c] = KClusters
kmeans = cluster.KMeans(n_clusters=KClusters, random_state=60616).fit(trainData)
if (KClusters > 1):
Silhouette[c] = metrics.silhouette_score(trainData, kmeans.labels_)
WCSS = numpy.zeros(KClusters)
nC = numpy.zeros(KClusters)
for i in range(nObs):
k = kmeans.labels_[i]
nC[k] += 1
diff = trainData.iloc[i,] - kmeans.cluster_centers_[k]
WCSS[k] += diff.dot(diff)
Elbow[c] = 0
for k in range(KClusters):
Elbow[c] += WCSS[k] / nC[k]
print("Cluster Size Elbow Value Silhouette Value: /n")
for c in range(15):
print(nClusters[c], Elbow[c], Silhouette[c])
plt.plot(nClusters, Elbow, linewidth = 2, marker = 'o')
plt.xticks(range(1,15,1))
plt.grid(True)
plt.xlabel("Number of Clusters")
plt.ylabel("Elbow Value")
plt.show()
# Plot the Silhouette metrics versus the number of clusters
plt.plot(nClusters, Silhouette, linewidth = 2, marker = 'o')
plt.xticks(range(1,15,1))
plt.grid(True)
plt.xlabel("Number of Clusters")
plt.ylabel("Silhouette Value")
plt.show()
KClusters = 2
kmeans = cluster.KMeans(n_clusters=KClusters, random_state=60616).fit(trainData)
nC = numpy.zeros(KClusters)
for i in range(nObs):
k = kmeans.labels_[i]
nC[k] += 1
print(nC)
for k in range(KClusters):
print("Cluster ", k)
print("Centroid = ", kmeans.cluster_centers_[k])
# Load the TREE library from SKLEARN
from sklearn import tree
classTree = tree.DecisionTreeClassifier(criterion='entropy', max_depth=4, random_state=60616)
bikeshare_DT = classTree.fit(trainData, kmeans.labels_)
print('Accuracy of Decision Tree classifier on training set: {:.6f}' .format(classTree.score(trainData, kmeans.labels_)))
import graphviz
dot_data = tree.export_graphviz(bikeshare_DT,
out_file=None,
impurity = True, filled = True,
feature_names = ['temp', 'humidity', 'windspeed'],
class_names = ['Cluster 0', 'Cluster 1'])
graph = graphviz.Source(dot_data)
graph
graph.render('C:\\Users\\minlam\\Documents\\IIT\\Machine Learning\\Job\\hmeq_output')
|
[
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"pandas.read_csv",
"sklearn.cluster.KMeans",
"numpy.zeros",
"sklearn.tree.DecisionTreeClassifier",
"sklearn.tree.export_graphviz",
"sklearn.metrics.silhouette_score",
"graphviz.Source",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.grid"
] |
[((178, 308), 'pandas.read_csv', 'pandas.read_csv', (['"""C:\\\\Users\\\\minlam\\\\Documents\\\\IIT\\\\Machine Learning\\\\Data\\\\BikeSharingDemand_Train.csv"""'], {'delimiter': '""","""'}), "(\n 'C:\\\\Users\\\\minlam\\\\Documents\\\\IIT\\\\Machine Learning\\\\Data\\\\BikeSharingDemand_Train.csv'\n , delimiter=',')\n", (193, 308), False, 'import pandas\n'), ((546, 561), 'numpy.zeros', 'numpy.zeros', (['(15)'], {}), '(15)\n', (557, 561), False, 'import numpy\n'), ((571, 586), 'numpy.zeros', 'numpy.zeros', (['(15)'], {}), '(15)\n', (582, 586), False, 'import numpy\n'), ((601, 616), 'numpy.zeros', 'numpy.zeros', (['(15)'], {}), '(15)\n', (612, 616), False, 'import numpy\n'), ((1337, 1388), 'matplotlib.pyplot.plot', 'plt.plot', (['nClusters', 'Elbow'], {'linewidth': '(2)', 'marker': '"""o"""'}), "(nClusters, Elbow, linewidth=2, marker='o')\n", (1345, 1388), True, 'import matplotlib.pyplot as plt\n'), ((1421, 1435), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (1429, 1435), True, 'import matplotlib.pyplot as plt\n'), ((1437, 1469), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Number of Clusters"""'], {}), "('Number of Clusters')\n", (1447, 1469), True, 'import matplotlib.pyplot as plt\n'), ((1471, 1496), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Elbow Value"""'], {}), "('Elbow Value')\n", (1481, 1496), True, 'import matplotlib.pyplot as plt\n'), ((1498, 1508), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1506, 1508), True, 'import matplotlib.pyplot as plt\n'), ((1573, 1629), 'matplotlib.pyplot.plot', 'plt.plot', (['nClusters', 'Silhouette'], {'linewidth': '(2)', 'marker': '"""o"""'}), "(nClusters, Silhouette, linewidth=2, marker='o')\n", (1581, 1629), True, 'import matplotlib.pyplot as plt\n'), ((1662, 1676), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (1670, 1676), True, 'import matplotlib.pyplot as plt\n'), ((1678, 1710), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Number of Clusters"""'], {}), "('Number of Clusters')\n", (1688, 1710), True, 'import matplotlib.pyplot as plt\n'), ((1712, 1742), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Silhouette Value"""'], {}), "('Silhouette Value')\n", (1722, 1742), True, 'import matplotlib.pyplot as plt\n'), ((1744, 1754), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1752, 1754), True, 'import matplotlib.pyplot as plt\n'), ((1862, 1884), 'numpy.zeros', 'numpy.zeros', (['KClusters'], {}), '(KClusters)\n', (1873, 1884), False, 'import numpy\n'), ((2147, 2233), 'sklearn.tree.DecisionTreeClassifier', 'tree.DecisionTreeClassifier', ([], {'criterion': '"""entropy"""', 'max_depth': '(4)', 'random_state': '(60616)'}), "(criterion='entropy', max_depth=4, random_state=\n 60616)\n", (2174, 2233), False, 'from sklearn import tree\n'), ((2442, 2618), 'sklearn.tree.export_graphviz', 'tree.export_graphviz', (['bikeshare_DT'], {'out_file': 'None', 'impurity': '(True)', 'filled': '(True)', 'feature_names': "['temp', 'humidity', 'windspeed']", 'class_names': "['Cluster 0', 'Cluster 1']"}), "(bikeshare_DT, out_file=None, impurity=True, filled=\n True, feature_names=['temp', 'humidity', 'windspeed'], class_names=[\n 'Cluster 0', 'Cluster 1'])\n", (2462, 2618), False, 'from sklearn import tree\n'), ((2760, 2785), 'graphviz.Source', 'graphviz.Source', (['dot_data'], {}), '(dot_data)\n', (2775, 2785), False, 'import graphviz\n'), ((893, 915), 'numpy.zeros', 'numpy.zeros', (['KClusters'], {}), '(KClusters)\n', (904, 915), False, 'import numpy\n'), ((925, 947), 'numpy.zeros', 'numpy.zeros', (['KClusters'], {}), '(KClusters)\n', (936, 947), False, 'import numpy\n'), ((828, 879), 'sklearn.metrics.silhouette_score', 'metrics.silhouette_score', (['trainData', 'kmeans.labels_'], {}), '(trainData, kmeans.labels_)\n', (852, 879), True, 'import sklearn.metrics as metrics\n'), ((1782, 1838), 'sklearn.cluster.KMeans', 'cluster.KMeans', ([], {'n_clusters': 'KClusters', 'random_state': '(60616)'}), '(n_clusters=KClusters, random_state=60616)\n', (1796, 1838), True, 'import sklearn.cluster as cluster\n'), ((706, 762), 'sklearn.cluster.KMeans', 'cluster.KMeans', ([], {'n_clusters': 'KClusters', 'random_state': '(60616)'}), '(n_clusters=KClusters, random_state=60616)\n', (720, 762), True, 'import sklearn.cluster as cluster\n')]
|
from tfc import utfc
from tfc.utils import TFCDictRobust, egrad, NllsClass, MakePlot
import numpy as onp
import jax.numpy as np
from jax import vmap, jacfwd, jit, lax
import tqdm
import pickle
from scipy.optimize import fsolve
from scipy.integrate import simps
from time import process_time as timer
## TEST PARAMETERS: ***************************************************
tol = np.finfo(float).eps
maxIter = 50
W = False
if W == False:
Gam = 0.
else:
Gam = 100.
## CONSTANTS: *********************************************************
# Number of points to use
N = 100
# Number of basis functions to use
ms = 30
mc = 1
# Number of constraints
nCx = 0
nCy = 0
## GET CHEBYSHEV VALUES **********************************************
stfc = utfc(N,nCx,ms,basis='CP',x0 = -1, xf = 1.)
ctfc = utfc(N,nCy,mc,basis='CP',x0 = -1, xf = 1.)
Hs = stfc.H
Hc = ctfc.H
## DEFINE THE ASSUMED SOLUTION **************************************
z = stfc.z
z0 = z[0]
zf = z[-1]
## DEFINE CONSTRAINED EXPRESSION *************************************
r = lambda z, xi, IC: np.dot(Hs(z),xi['xis'])
v = egrad(r,0)
a = egrad(v,0)
lam = lambda z, xi: np.dot(Hc(z),xi['xic'])
lamr = egrad(lam,0)
## FORM LOSS AND JACOBIAN ***********************************************************************************
L0 = lambda xi,IC: r(z,xi,IC)[0,:] - IC['R0']
Ld0 = lambda xi,IC: IC['c'] * v(z,xi,IC)[0,:] - IC['V0']
Lf = lambda xi,IC: r(z,xi,IC)[-1,:]
Ldf = lambda xi,IC: IC['c'] * v(z,xi,IC)[-1,:]
Ls = lambda xi,IC: IC['c']**2 * a(z,xi,IC) - IC['ag'] + lam(z,xi)
# Htf = lambda xi,IC: np.dot(lam(z,xi)[-1,:],(-1./2.*lam(z,xi)[-1,:] + IC['ag']))
# Updated because need to at lam_r * v term for spectral method
Htf = lambda xi,IC: np.dot(lam(z,xi)[-1,:],(-1./2.*lam(z,xi)[-1,:] + IC['ag'])) \
+ np.dot(-IC['c'] *lamr(z,xi)[-1,:], IC['c'] * v(z,xi,IC)[-1,:]) + IC['Gam']
L = jit(lambda xi,IC: np.hstack(( Ls(xi,IC)[1:-1,:].flatten(), \
L0(xi,IC).flatten(), \
Ld0(xi,IC).flatten(), \
Lf(xi,IC).flatten(), \
Ldf(xi,IC).flatten() )) )
## INITIALIZE VARIABLES *************************************************************************************
xis = onp.zeros((Hs(z).shape[1],3))
xic = onp.zeros((Hc(z).shape[1],3))
if W == False:
b = np.sqrt(2)*onp.ones(1)
else:
b = np.sqrt(10)*onp.ones(1)
xi = TFCDictRobust({'xis':xis,\
'xic':xic})
IC = {'R0': np.zeros((3,)), \
'V0': np.zeros((3,)), \
'ag': np.zeros((3,)), \
'Gam': np.zeros((1,)), \
'c': 2.*onp.ones(1)}
## NONLINEAR LEAST-SQUARES CLASS *****************************************************************************
nlls = NllsClass(xi,L,maxIter=2,timer=True)
R0 = np.array([500000., 100000., 50000.])
V0 = np.array([-3000., 0., 0.])
## scale initial conditons
pscale = np.max(np.abs(R0))
tscale = pscale/np.max(np.abs(V0))
IC['R0'] = R0 / pscale
IC['V0'] = V0 * tscale/pscale
IC['ag'] = np.array([0., 0., -5.314961]) * tscale**2/pscale
IC['Gam'] = Gam * tscale**4/pscale**2
global it
it = 0
def Innerloop(tf,xi,IC):
global it
IC['c'] = 2./tf
it += 1
xi,_,time = nlls.run(xi,IC)
loss1 = np.max(np.abs(L(xi,IC)))
loss2 = np.max(np.abs(Htf(xi,IC)))
return np.max(np.hstack((loss1,loss2)))
t0 = 2./IC['c']
start = timer()
tf = fsolve(Innerloop, t0, args=(xi,IC), xtol=1e-13,epsfcn=tol)
time = timer() - start
IC['c'] = 2./tf
xi,_,_ = nlls.run(xi,IC)
## CONSTRUCT SOLUTION **********************************************
t = (z-z[0])/IC['c'] * tscale
IC['Gam']= IC['Gam'] * pscale**2/tscale**4
R = r(z,xi,IC) * pscale
V = v(z,xi,IC) * pscale/tscale
LamV = lam(z,xi) * pscale/tscale**2
LamR = -IC['c'] * egrad(lam)(z,xi) * pscale/tscale**3
Ac = - LamV
Ham = onp.zeros(len(t))
int = onp.zeros(len(t))
a_mag = onp.zeros(len(t))
for i in range(0,len(t)):
int[i] = np.dot(Ac[i,:],Ac[i,:])
Ham[i] = 0.5*int[i] + np.dot(LamR[i,:],V[i,:]) + np.dot(LamV[i,:],IC['ag'] + Ac[i,:])
a_mag[i] = np.linalg.norm(Ac[i,:])
cost = IC['Gam']* t[-1] + 0.5 * simps(int,t)
loss1 = np.max(np.abs(L(xi,IC)))
loss2 = np.max(np.abs(Htf(xi,IC)))\
##: print final answers to screen
print('\nFinal time [s]:\t' + str(t[-1]))
print('Cost:\t\t' + str(cost))
print('Comp time [ms]:\t' + str(time*1000))
print('Iterations:\t' + str(it))
print('Loss:\t\t' + str(np.max(np.hstack((loss1,loss2)))))
|
[
"jax.numpy.array",
"jax.numpy.dot",
"tfc.utils.egrad",
"tfc.utils.TFCDictRobust",
"time.process_time",
"scipy.optimize.fsolve",
"jax.numpy.finfo",
"jax.numpy.sqrt",
"numpy.ones",
"tfc.utils.NllsClass",
"jax.numpy.linalg.norm",
"jax.numpy.hstack",
"jax.numpy.zeros",
"jax.numpy.abs",
"tfc.utfc",
"scipy.integrate.simps"
] |
[((754, 797), 'tfc.utfc', 'utfc', (['N', 'nCx', 'ms'], {'basis': '"""CP"""', 'x0': '(-1)', 'xf': '(1.0)'}), "(N, nCx, ms, basis='CP', x0=-1, xf=1.0)\n", (758, 797), False, 'from tfc import utfc\n'), ((804, 847), 'tfc.utfc', 'utfc', (['N', 'nCy', 'mc'], {'basis': '"""CP"""', 'x0': '(-1)', 'xf': '(1.0)'}), "(N, nCy, mc, basis='CP', x0=-1, xf=1.0)\n", (808, 847), False, 'from tfc import utfc\n'), ((1100, 1111), 'tfc.utils.egrad', 'egrad', (['r', '(0)'], {}), '(r, 0)\n', (1105, 1111), False, 'from tfc.utils import TFCDictRobust, egrad, NllsClass, MakePlot\n'), ((1115, 1126), 'tfc.utils.egrad', 'egrad', (['v', '(0)'], {}), '(v, 0)\n', (1120, 1126), False, 'from tfc.utils import TFCDictRobust, egrad, NllsClass, MakePlot\n'), ((1178, 1191), 'tfc.utils.egrad', 'egrad', (['lam', '(0)'], {}), '(lam, 0)\n', (1183, 1191), False, 'from tfc.utils import TFCDictRobust, egrad, NllsClass, MakePlot\n'), ((2460, 2499), 'tfc.utils.TFCDictRobust', 'TFCDictRobust', (["{'xis': xis, 'xic': xic}"], {}), "({'xis': xis, 'xic': xic})\n", (2473, 2499), False, 'from tfc.utils import TFCDictRobust, egrad, NllsClass, MakePlot\n'), ((2787, 2826), 'tfc.utils.NllsClass', 'NllsClass', (['xi', 'L'], {'maxIter': '(2)', 'timer': '(True)'}), '(xi, L, maxIter=2, timer=True)\n', (2796, 2826), False, 'from tfc.utils import TFCDictRobust, egrad, NllsClass, MakePlot\n'), ((2830, 2869), 'jax.numpy.array', 'np.array', (['[500000.0, 100000.0, 50000.0]'], {}), '([500000.0, 100000.0, 50000.0])\n', (2838, 2869), True, 'import jax.numpy as np\n'), ((2872, 2901), 'jax.numpy.array', 'np.array', (['[-3000.0, 0.0, 0.0]'], {}), '([-3000.0, 0.0, 0.0])\n', (2880, 2901), True, 'import jax.numpy as np\n'), ((3423, 3430), 'time.process_time', 'timer', ([], {}), '()\n', (3428, 3430), True, 'from time import process_time as timer\n'), ((3436, 3496), 'scipy.optimize.fsolve', 'fsolve', (['Innerloop', 't0'], {'args': '(xi, IC)', 'xtol': '(1e-13)', 'epsfcn': 'tol'}), '(Innerloop, t0, args=(xi, IC), xtol=1e-13, epsfcn=tol)\n', (3442, 3496), False, 'from scipy.optimize import fsolve\n'), ((382, 397), 'jax.numpy.finfo', 'np.finfo', (['float'], {}), '(float)\n', (390, 397), True, 'import jax.numpy as np\n'), ((2532, 2546), 'jax.numpy.zeros', 'np.zeros', (['(3,)'], {}), '((3,))\n', (2540, 2546), True, 'import jax.numpy as np\n'), ((2562, 2576), 'jax.numpy.zeros', 'np.zeros', (['(3,)'], {}), '((3,))\n', (2570, 2576), True, 'import jax.numpy as np\n'), ((2592, 2606), 'jax.numpy.zeros', 'np.zeros', (['(3,)'], {}), '((3,))\n', (2600, 2606), True, 'import jax.numpy as np\n'), ((2623, 2637), 'jax.numpy.zeros', 'np.zeros', (['(1,)'], {}), '((1,))\n', (2631, 2637), True, 'import jax.numpy as np\n'), ((2943, 2953), 'jax.numpy.abs', 'np.abs', (['R0'], {}), '(R0)\n', (2949, 2953), True, 'import jax.numpy as np\n'), ((3502, 3509), 'time.process_time', 'timer', ([], {}), '()\n', (3507, 3509), True, 'from time import process_time as timer\n'), ((3978, 4004), 'jax.numpy.dot', 'np.dot', (['Ac[i, :]', 'Ac[i, :]'], {}), '(Ac[i, :], Ac[i, :])\n', (3984, 4004), True, 'import jax.numpy as np\n'), ((4107, 4131), 'jax.numpy.linalg.norm', 'np.linalg.norm', (['Ac[i, :]'], {}), '(Ac[i, :])\n', (4121, 4131), True, 'import jax.numpy as np\n'), ((2392, 2402), 'jax.numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (2399, 2402), True, 'import jax.numpy as np\n'), ((2403, 2414), 'numpy.ones', 'onp.ones', (['(1)'], {}), '(1)\n', (2411, 2414), True, 'import numpy as onp\n'), ((2429, 2440), 'jax.numpy.sqrt', 'np.sqrt', (['(10)'], {}), '(10)\n', (2436, 2440), True, 'import jax.numpy as np\n'), ((2441, 2452), 'numpy.ones', 'onp.ones', (['(1)'], {}), '(1)\n', (2449, 2452), True, 'import numpy as onp\n'), ((2655, 2666), 'numpy.ones', 'onp.ones', (['(1)'], {}), '(1)\n', (2663, 2666), True, 'import numpy as onp\n'), ((2978, 2988), 'jax.numpy.abs', 'np.abs', (['V0'], {}), '(V0)\n', (2984, 2988), True, 'import jax.numpy as np\n'), ((3064, 3095), 'jax.numpy.array', 'np.array', (['[0.0, 0.0, -5.314961]'], {}), '([0.0, 0.0, -5.314961])\n', (3072, 3095), True, 'import jax.numpy as np\n'), ((3370, 3395), 'jax.numpy.hstack', 'np.hstack', (['(loss1, loss2)'], {}), '((loss1, loss2))\n', (3379, 3395), True, 'import jax.numpy as np\n'), ((4055, 4094), 'jax.numpy.dot', 'np.dot', (['LamV[i, :]', "(IC['ag'] + Ac[i, :])"], {}), "(LamV[i, :], IC['ag'] + Ac[i, :])\n", (4061, 4094), True, 'import jax.numpy as np\n'), ((4165, 4178), 'scipy.integrate.simps', 'simps', (['int', 't'], {}), '(int, t)\n', (4170, 4178), False, 'from scipy.integrate import simps\n'), ((4028, 4055), 'jax.numpy.dot', 'np.dot', (['LamR[i, :]', 'V[i, :]'], {}), '(LamR[i, :], V[i, :])\n', (4034, 4055), True, 'import jax.numpy as np\n'), ((3815, 3825), 'tfc.utils.egrad', 'egrad', (['lam'], {}), '(lam)\n', (3820, 3825), False, 'from tfc.utils import TFCDictRobust, egrad, NllsClass, MakePlot\n'), ((4465, 4490), 'jax.numpy.hstack', 'np.hstack', (['(loss1, loss2)'], {}), '((loss1, loss2))\n', (4474, 4490), True, 'import jax.numpy as np\n')]
|
"""
Data
================
data storage and manipulation classes, should be sufficient to run the game without display
"""
from enum import Enum
import numpy
class Facing(Enum):
YP = 0
XP = 1
ZN = 2
YN = 3
XN = 4
ZP = 5
# gives a directional delta array in hex coordinates for given Facing
def facing_array(enum):
if enum == Facing.YP:
return [0, 1, 0]
if enum == Facing.XP:
return [1, 0, 0]
if enum == Facing.ZN:
return [0, 0, -1]
if enum == Facing.YN:
return [0, -1, 0]
if enum == Facing.XN:
return [-1, 0, 0]
if enum == Facing.ZP:
return [0, 0, 1]
raise Exception(f'{enum} is not a valid Facing')
class Body:
def __init__(self, position=None, momentum=None, facing=Facing.YP, image=''):
if momentum is None:
momentum = [0, 0, 0]
if position is None:
position = [[0, 0]]
self.facing = facing
self.position = position # hex x y positions
self.momentum = momentum # hex x y z velocities
self._momentum_next = [0, 0, 0] # hex x y z velocities
self.image = image
# single hex movement by provided direction, none or 0 for inaction
def move(self, direction=None):
if direction is None:
direction = [0, 0, 0]
else:
direction = facing_array(direction)
numpy.add(self.position, direction)
numpy.add(self.momentum_next, direction)
# positive rotations are clockwise
def rotate(self, rotations, pivot=None):
if pivot is None:
pivot = self.position[0]
self.facing += rotations
if len(self.position) > 1:
for r in range(0, abs(rotations)):
if rotations > 0:
for i in range(0, len(self.position)):
p = numpy.subtract(self.position[i], pivot)
p = [-p[2], -p[0], -p[1]]
self.position[i] = numpy.add(p, pivot)
else:
for i in range(0, len(self.position)):
p = numpy.subtract(self.position[i], pivot)
p = [-p[1], -p[2], -p[10]]
self.position[i] = numpy.add(p, pivot)
# 1 = 60°, 6 rotations in a 360° turn
def degree_facing(self):
return self.facing * 60
def elapse_turn(self):
self.momentum = self._momentum_next
self._momentum_next = [0, 0, 0]
class Ship(Body):
def __init__(self, position=None, momentum=None, facing=Facing.YP,
image='', speed=1, rotation_speed=1, move_directions=[Facing.YP]):
super().__init__(position, momentum, facing, image)
self.speed = speed # number of movement/rotation actions you can make in a turn
self.action_points = speed
self.rotation_speed = rotation_speed # number of 60 degree turns allowed in one rotation action
self.move_directions = move_directions # legal directions to make moves in
def move(self, direction=None):
if direction is None:
direction = [0, 0, 0]
elif direction in self.move_directions:
super().move(self, direction)
else:
raise Exception(f'Invalid move direction {direction}, valid directions are {self.move_directions}')
self.action_points -= 1
def rotate(self, rotations, pivot=None):
return
class Map:
def __init__(self, width, height):
self.width = width
self.height = height
self.bodies = []
# NOTES FROM INITIAL GAME PLAY MECHANICS REVIEW:
#
# Body():
# (x, y)[]
# position
# (x1, x2, y)
# momentum
# (x1, x2, y)
# momentum_next
#
#
# def rotate((x, y)pivot, rotations
#
# ):
#
# # updates position
#
# def move((x1, x2, y)direction
#
# ):
# # updates position and momentum_next
#
#
# ImmutableBody(Body):
#
#
# def rotate((x, y)pivot, rotations
#
# ):
# return
#
#
# def move((x1, x2, y)direction
#
# ):
# return
#
# Ship(Body):
# rotation_speed
# speed
# Facing[]
# legal_moves # which direction thrusters can take us, 1 non zero value in tuple
#
#
# def rotate((x, y)pivot, rotations
#
# ):
# # if you rotate your legal_moves must update
#
#
# Map():
# x_width
# y_width
# []
# bodies
#
#
# class Facing(Enum):
# YP = 0
# X1P = 1
# X2P = 2
# YN = 3
# X1N = 4
# X2N = 5
|
[
"numpy.add",
"numpy.subtract"
] |
[((1398, 1433), 'numpy.add', 'numpy.add', (['self.position', 'direction'], {}), '(self.position, direction)\n', (1407, 1433), False, 'import numpy\n'), ((1442, 1482), 'numpy.add', 'numpy.add', (['self.momentum_next', 'direction'], {}), '(self.momentum_next, direction)\n', (1451, 1482), False, 'import numpy\n'), ((1867, 1906), 'numpy.subtract', 'numpy.subtract', (['self.position[i]', 'pivot'], {}), '(self.position[i], pivot)\n', (1881, 1906), False, 'import numpy\n'), ((2000, 2019), 'numpy.add', 'numpy.add', (['p', 'pivot'], {}), '(p, pivot)\n', (2009, 2019), False, 'import numpy\n'), ((2129, 2168), 'numpy.subtract', 'numpy.subtract', (['self.position[i]', 'pivot'], {}), '(self.position[i], pivot)\n', (2143, 2168), False, 'import numpy\n'), ((2263, 2282), 'numpy.add', 'numpy.add', (['p', 'pivot'], {}), '(p, pivot)\n', (2272, 2282), False, 'import numpy\n')]
|
from icecube.icetray import OMKey
from icecube.simclasses import I3MapModuleKeyI3ExtraGeometryItemCylinder, I3ExtraGeometryItemCylinder
from icecube.dataclasses import I3Position, ModuleKey
from I3Tray import I3Units
import numpy as np
from os.path import expandvars
from_cable_shadow = expandvars("$I3_BUILD/ice-models/resources/models/cable_position/orientation.cable_shadow.txt")
from_led7 = expandvars("$I3_BUILD/ice-models/resources/models/cable_position/orientation.led7.txt")
def GetIceCubeCableShadow(CableAngles=from_led7,
DOMRadius=165.1*I3Units.mm, CableRadius=23*I3Units.mm, CableLength=1*I3Units.m):
"""
Get a cylinder representing the position of the cable at each DOM
:param CableAngles: text file containing string, om, angle (degrees), angle error (degrees)
:param DOMRadius: radius of the DOM sphere
:param CableRadius: radius of the cable
:param CableLength: length of the cable segment at each DOM
:returns: a map of I3ExtraGeometryItem representing the local position of
the cable *in DOM-centered coordinates*
"""
# assume the cable runs along the surface of the DOM
radius = DOMRadius + CableRadius
shadows = I3MapModuleKeyI3ExtraGeometryItemCylinder()
for string, om, angle, _ in np.loadtxt(CableAngles, dtype=[('string',int),('om',int),('angle',float),('angle_err',float)]):
pos = I3Position(radius*np.cos(np.radians(angle)), radius*np.sin(np.radians(angle)), 0)
shadows[ModuleKey(int(string),int(om))] = I3ExtraGeometryItemCylinder(pos + I3Position(0,0,CableLength/2.), pos + I3Position(0,0,-CableLength/2.), CableRadius)
return shadows
|
[
"icecube.simclasses.I3MapModuleKeyI3ExtraGeometryItemCylinder",
"numpy.radians",
"os.path.expandvars",
"icecube.dataclasses.I3Position",
"numpy.loadtxt"
] |
[((291, 396), 'os.path.expandvars', 'expandvars', (['"""$I3_BUILD/ice-models/resources/models/cable_position/orientation.cable_shadow.txt"""'], {}), "(\n '$I3_BUILD/ice-models/resources/models/cable_position/orientation.cable_shadow.txt'\n )\n", (301, 396), False, 'from os.path import expandvars\n'), ((399, 496), 'os.path.expandvars', 'expandvars', (['"""$I3_BUILD/ice-models/resources/models/cable_position/orientation.led7.txt"""'], {}), "(\n '$I3_BUILD/ice-models/resources/models/cable_position/orientation.led7.txt'\n )\n", (409, 496), False, 'from os.path import expandvars\n'), ((1198, 1241), 'icecube.simclasses.I3MapModuleKeyI3ExtraGeometryItemCylinder', 'I3MapModuleKeyI3ExtraGeometryItemCylinder', ([], {}), '()\n', (1239, 1241), False, 'from icecube.simclasses import I3MapModuleKeyI3ExtraGeometryItemCylinder, I3ExtraGeometryItemCylinder\n'), ((1274, 1379), 'numpy.loadtxt', 'np.loadtxt', (['CableAngles'], {'dtype': "[('string', int), ('om', int), ('angle', float), ('angle_err', float)]"}), "(CableAngles, dtype=[('string', int), ('om', int), ('angle',\n float), ('angle_err', float)])\n", (1284, 1379), True, 'import numpy as np\n'), ((1550, 1585), 'icecube.dataclasses.I3Position', 'I3Position', (['(0)', '(0)', '(CableLength / 2.0)'], {}), '(0, 0, CableLength / 2.0)\n', (1560, 1585), False, 'from icecube.dataclasses import I3Position, ModuleKey\n'), ((1588, 1624), 'icecube.dataclasses.I3Position', 'I3Position', (['(0)', '(0)', '(-CableLength / 2.0)'], {}), '(0, 0, -CableLength / 2.0)\n', (1598, 1624), False, 'from icecube.dataclasses import I3Position, ModuleKey\n'), ((1409, 1426), 'numpy.radians', 'np.radians', (['angle'], {}), '(angle)\n', (1419, 1426), True, 'import numpy as np\n'), ((1443, 1460), 'numpy.radians', 'np.radians', (['angle'], {}), '(angle)\n', (1453, 1460), True, 'import numpy as np\n')]
|
import i18n
from config import config
from definitions import LANG_PATH
i18n.load_path.append(LANG_PATH)
i18n.set('locale', config["language"])
|
[
"i18n.set",
"i18n.load_path.append"
] |
[((74, 106), 'i18n.load_path.append', 'i18n.load_path.append', (['LANG_PATH'], {}), '(LANG_PATH)\n', (95, 106), False, 'import i18n\n'), ((107, 145), 'i18n.set', 'i18n.set', (['"""locale"""', "config['language']"], {}), "('locale', config['language'])\n", (115, 145), False, 'import i18n\n')]
|
import PyQt5.QtWidgets
import MainForm
app_module = PyQt5.QtWidgets.QApplication([])
app = MainForm.MainForm()
app.show()
app_module.exec()
|
[
"MainForm.MainForm"
] |
[((93, 112), 'MainForm.MainForm', 'MainForm.MainForm', ([], {}), '()\n', (110, 112), False, 'import MainForm\n')]
|
from observers.observer import Observer
from observers.eys_state import EyeStateItem, ChooseState
import json
import os
class JsonObserver(Observer):
CSV_FILE_NAME = 'db.json'
def __init__(self):
super().__init__()
def trigger(self, eye_state_item: EyeStateItem):
if eye_state_item.choose_state == ChooseState.CHOOSE:
j = {}
try:
with open(JsonObserver.CSV_FILE_NAME, 'r') as f:
j = json.load(f)
except:
pass
key = eye_state_item.screen_part.name
if key in j:
j[key] += 1
else:
j[key] = 1
with open(JsonObserver.CSV_FILE_NAME, 'w') as f:
json.dump(j, f)
|
[
"json.dump",
"json.load"
] |
[((757, 772), 'json.dump', 'json.dump', (['j', 'f'], {}), '(j, f)\n', (766, 772), False, 'import json\n'), ((476, 488), 'json.load', 'json.load', (['f'], {}), '(f)\n', (485, 488), False, 'import json\n')]
|
import itertools
import operator
import os
import pickle
import re
import sys
import time
import cv2
from keras import backend as K
from keras.layers import Input
from keras.models import Model
import skvideo.io
from keras_frcnn import roi_helpers
import keras_frcnn.resnet as nn
import numpy as np
video_folder = '../../Videos/'
videoName = "MOV_0861"
input_video_file = os.path.abspath(video_folder + videoName + ".mp4")
output_video_file = os.path.abspath(video_folder + "OUTPUT/" + videoName + ".mp4")
img_path = os.path.join(video_folder +"OUTPUT/input", '')
output_path = os.path.join(video_folder +"OUTPUT/output", '')
num_rois = 32
frame_rate = 30
def cleanup():
print("cleaning up...")
os.popen('rm -f ' + img_path + '*')
os.popen('rm -f ' + output_path + '*')
def get_file_names(search_path):
for (dirpath, _, filenames) in os.walk(search_path):
for filename in filenames:
yield filename # os.path.join(dirpath, filename)
def convert_to_images():
counter = 0
videodata = skvideo.io.vreader(input_video_file)
for frame in videodata:
skvideo.io.vwrite(os.path.join(img_path, str(counter) + '.jpg'), frame)
counter = counter + 1
def save_to_video():
list_files = sorted(get_file_names(output_path), key=lambda var:[int(x) if x.isdigit() else x for x in re.findall(r'[^0-9]|[0-9]+', var)])
# start the FFmpeg writing subprocess with following parameters
writer = skvideo.io.FFmpegWriter(output_video_file, outputdict={
'-vcodec': 'libx264', "-r":str(frame_rate)}, verbosity=1)
for file in list_files:
frame = skvideo.io.vread(os.path.join(output_path, file))
writer.writeFrame(frame)
writer.close()
def format_img(img, C):
img_min_side = float(C.im_size)
(height, width, _) = img.shape
if width <= height:
f = img_min_side / width
new_height = int(f * height)
new_width = int(img_min_side)
else:
f = img_min_side / height
new_width = int(f * width)
new_height = int(img_min_side)
img = cv2.resize(img, (new_width, new_height), interpolation=cv2.INTER_CUBIC)
img = img[:, :, (2, 1, 0)]
img = img.astype(np.float32)
img[:, :, 0] -= C.img_channel_mean[0]
img[:, :, 1] -= C.img_channel_mean[1]
img[:, :, 2] -= C.img_channel_mean[2]
img /= C.img_scaling_factor
img = np.transpose(img, (2, 0, 1))
img = np.expand_dims(img, axis=0)
return img
def accumulate(l):
it = itertools.groupby(l, operator.itemgetter(0))
for key, subiter in it:
yield key, sum(item[1] for item in subiter)
def main():
sys.setrecursionlimit(40000)
config_output_filename = './config.pickle'
with open(config_output_filename, 'rb') as f_in:
C = pickle.load(f_in)
# turn off any data augmentation at test time
C.use_horizontal_flips = False
C.use_vertical_flips = False
C.rot_90 = False
class_mapping = C.class_mapping
if 'bg' not in class_mapping:
class_mapping['bg'] = len(class_mapping)
class_mapping = {v: k for k, v in class_mapping.items()}
print(class_mapping)
class_to_color = {class_mapping[v]: np.random.randint(0, 255, 3).tolist() for v in class_mapping}
C.num_rois = num_rois
if K.image_dim_ordering() == 'th':
input_shape_img = (3, None, None)
input_shape_features = (1024, None, None)
else:
input_shape_img = (None, None, 3)
input_shape_features = (None, None, 1024)
img_input = Input(shape=input_shape_img)
roi_input = Input(shape=(C.num_rois, 4))
feature_map_input = Input(shape=input_shape_features)
# define the base network (resnet here, can be VGG, Inception, etc)
shared_layers = nn.nn_base(img_input, trainable=True)
# define the RPN, built on the base layers
num_anchors = len(C.anchor_box_scales) * len(C.anchor_box_ratios)
rpn_layers = nn.rpn(shared_layers, num_anchors)
classifier = nn.classifier(feature_map_input, roi_input, C.num_rois, nb_classes=len(class_mapping), trainable=True)
model_rpn = Model(img_input, rpn_layers)
model_classifier_only = Model([feature_map_input, roi_input], classifier)
model_classifier = Model([feature_map_input, roi_input], classifier)
model_rpn.load_weights(C.model_path, by_name=True)
model_classifier.load_weights(C.model_path, by_name=True)
model_rpn.compile(optimizer='sgd', loss='mse')
model_classifier.compile(optimizer='sgd', loss='mse')
bbox_threshold = 0.8
print("anotating...")
list_files = sorted(get_file_names(img_path), key=lambda var:[int(x) if x.isdigit() else x for x in re.findall(r'[^0-9]|[0-9]+', var)])
for img_name in list_files:
if not img_name.lower().endswith(('.bmp', '.jpeg', '.jpg', '.png', '.tif', '.tiff')):
continue
print(img_name)
st = time.time()
filepath = os.path.join(img_path, img_name)
img = cv2.imread(filepath)
X = format_img(img, C)
img_scaled = np.transpose(X.copy()[0, (2, 1, 0), :, :], (1, 2, 0)).copy()
img_scaled[:, :, 0] += 123.68
img_scaled[:, :, 1] += 116.779
img_scaled[:, :, 2] += 103.939
img_scaled = img_scaled.astype(np.uint8)
if K.image_dim_ordering() == 'tf':
X = np.transpose(X, (0, 2, 3, 1))
# get the feature maps and output from the RPN
[Y1, Y2, F] = model_rpn.predict(X)
R = roi_helpers.rpn_to_roi(Y1, Y2, C, K.image_dim_ordering(), overlap_thresh=0.7)
# convert from (x1,y1,x2,y2) to (x,y,w,h)
R[:, 2] -= R[:, 0]
R[:, 3] -= R[:, 1]
# apply the spatial pyramid pooling to the proposed regions
bboxes = {}
probs = {}
for jk in range(R.shape[0] // C.num_rois + 1):
ROIs = np.expand_dims(R[C.num_rois * jk:C.num_rois * (jk + 1), :], axis=0)
if ROIs.shape[1] == 0:
break
if jk == R.shape[0] // C.num_rois:
# pad R
curr_shape = ROIs.shape
target_shape = (curr_shape[0], C.num_rois, curr_shape[2])
ROIs_padded = np.zeros(target_shape).astype(ROIs.dtype)
ROIs_padded[:, :curr_shape[1], :] = ROIs
ROIs_padded[0, curr_shape[1]:, :] = ROIs[0, 0, :]
ROIs = ROIs_padded
[P_cls, P_regr] = model_classifier_only.predict([F, ROIs])
for ii in range(P_cls.shape[1]):
if np.max(P_cls[0, ii, :]) < bbox_threshold or np.argmax(P_cls[0, ii, :]) == (P_cls.shape[2] - 1):
continue
cls_name = class_mapping[np.argmax(P_cls[0, ii, :])]
if cls_name not in bboxes:
bboxes[cls_name] = []
probs[cls_name] = []
(x, y, w, h) = ROIs[0, ii, :]
cls_num = np.argmax(P_cls[0, ii, :])
try:
(tx, ty, tw, th) = P_regr[0, ii, 4 * cls_num:4 * (cls_num + 1)]
tx /= C.classifier_regr_std[0]
ty /= C.classifier_regr_std[1]
tw /= C.classifier_regr_std[2]
th /= C.classifier_regr_std[3]
x, y, w, h = roi_helpers.apply_regr(x, y, w, h, tx, ty, tw, th)
except:
pass
bboxes[cls_name].append([16 * x, 16 * y, 16 * (x + w), 16 * (y + h)])
probs[cls_name].append(np.max(P_cls[0, ii, :]))
all_dets = []
all_objects = []
for key in bboxes:
bbox = np.array(bboxes[key])
new_boxes, new_probs = roi_helpers.non_max_suppression_fast(bbox, np.array(probs[key]), overlap_thresh=0.5)
for jk in range(new_boxes.shape[0]):
(x1, y1, x2, y2) = new_boxes[jk, :]
cv2.rectangle(img_scaled, (x1, y1), (x2, y2), class_to_color[key], 2)
textLabel = '{}: {}'.format(key, int(100 * new_probs[jk]))
all_dets.append((key, 100 * new_probs[jk]))
all_objects.append((key, 1))
(retval, baseLine) = cv2.getTextSize(textLabel, cv2.FONT_HERSHEY_COMPLEX, 1, 1)
textOrg = (x1, y1 - 0)
cv2.rectangle(img_scaled, (textOrg[0] - 5, textOrg[1] + baseLine - 5), (textOrg[0] + retval[0] + 5, textOrg[1] - retval[1] - 5), (0, 0, 0), 2)
cv2.rectangle(img_scaled, (textOrg[0] - 5, textOrg[1] + baseLine - 5), (textOrg[0] + retval[0] + 5, textOrg[1] - retval[1] - 5), (255, 255, 255), -1)
cv2.putText(img_scaled, textLabel, textOrg, cv2.FONT_HERSHEY_DUPLEX, 1, (0, 0, 0), 1)
print('Elapsed time = {}'.format(time.time() - st))
height, width, channels = img_scaled.shape
cv2.rectangle(img_scaled, (0, 0), (width, 30), (0, 0, 0), -1)
cv2.putText(img_scaled, "Obj count: " + str(list(accumulate(all_objects))), (5, 19), cv2.FONT_HERSHEY_TRIPLEX, 0.5, (255, 255, 255), 1)
cv2.imwrite(os.path.join(output_path, img_name), img_scaled)
print(all_dets)
if __name__ == '__main__':
cleanup()
print("Converting video to images..")
convert_to_images()
print("Main ...")
main()
print("saving to video..")
save_to_video()
|
[
"numpy.argmax",
"os.popen",
"os.walk",
"keras.models.Model",
"keras.backend.image_dim_ordering",
"pickle.load",
"numpy.random.randint",
"cv2.rectangle",
"keras.layers.Input",
"sys.setrecursionlimit",
"os.path.join",
"os.path.abspath",
"numpy.transpose",
"numpy.max",
"re.findall",
"cv2.resize",
"cv2.putText",
"cv2.getTextSize",
"numpy.zeros",
"numpy.expand_dims",
"time.time",
"cv2.imread",
"numpy.array",
"keras_frcnn.resnet.nn_base",
"keras_frcnn.resnet.rpn",
"keras_frcnn.roi_helpers.apply_regr",
"operator.itemgetter"
] |
[((375, 425), 'os.path.abspath', 'os.path.abspath', (["(video_folder + videoName + '.mp4')"], {}), "(video_folder + videoName + '.mp4')\n", (390, 425), False, 'import os\n'), ((446, 508), 'os.path.abspath', 'os.path.abspath', (["(video_folder + 'OUTPUT/' + videoName + '.mp4')"], {}), "(video_folder + 'OUTPUT/' + videoName + '.mp4')\n", (461, 508), False, 'import os\n'), ((520, 567), 'os.path.join', 'os.path.join', (["(video_folder + 'OUTPUT/input')", '""""""'], {}), "(video_folder + 'OUTPUT/input', '')\n", (532, 567), False, 'import os\n'), ((581, 629), 'os.path.join', 'os.path.join', (["(video_folder + 'OUTPUT/output')", '""""""'], {}), "(video_folder + 'OUTPUT/output', '')\n", (593, 629), False, 'import os\n'), ((702, 737), 'os.popen', 'os.popen', (["('rm -f ' + img_path + '*')"], {}), "('rm -f ' + img_path + '*')\n", (710, 737), False, 'import os\n'), ((739, 777), 'os.popen', 'os.popen', (["('rm -f ' + output_path + '*')"], {}), "('rm -f ' + output_path + '*')\n", (747, 777), False, 'import os\n'), ((845, 865), 'os.walk', 'os.walk', (['search_path'], {}), '(search_path)\n', (852, 865), False, 'import os\n'), ((1964, 2035), 'cv2.resize', 'cv2.resize', (['img', '(new_width, new_height)'], {'interpolation': 'cv2.INTER_CUBIC'}), '(img, (new_width, new_height), interpolation=cv2.INTER_CUBIC)\n', (1974, 2035), False, 'import cv2\n'), ((2247, 2275), 'numpy.transpose', 'np.transpose', (['img', '(2, 0, 1)'], {}), '(img, (2, 0, 1))\n', (2259, 2275), True, 'import numpy as np\n'), ((2283, 2310), 'numpy.expand_dims', 'np.expand_dims', (['img'], {'axis': '(0)'}), '(img, axis=0)\n', (2297, 2310), True, 'import numpy as np\n'), ((2483, 2511), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(40000)'], {}), '(40000)\n', (2504, 2511), False, 'import sys\n'), ((3288, 3316), 'keras.layers.Input', 'Input', ([], {'shape': 'input_shape_img'}), '(shape=input_shape_img)\n', (3293, 3316), False, 'from keras.layers import Input\n'), ((3330, 3358), 'keras.layers.Input', 'Input', ([], {'shape': '(C.num_rois, 4)'}), '(shape=(C.num_rois, 4))\n', (3335, 3358), False, 'from keras.layers import Input\n'), ((3380, 3413), 'keras.layers.Input', 'Input', ([], {'shape': 'input_shape_features'}), '(shape=input_shape_features)\n', (3385, 3413), False, 'from keras.layers import Input\n'), ((3501, 3538), 'keras_frcnn.resnet.nn_base', 'nn.nn_base', (['img_input'], {'trainable': '(True)'}), '(img_input, trainable=True)\n', (3511, 3538), True, 'import keras_frcnn.resnet as nn\n'), ((3665, 3699), 'keras_frcnn.resnet.rpn', 'nn.rpn', (['shared_layers', 'num_anchors'], {}), '(shared_layers, num_anchors)\n', (3671, 3699), True, 'import keras_frcnn.resnet as nn\n'), ((3832, 3860), 'keras.models.Model', 'Model', (['img_input', 'rpn_layers'], {}), '(img_input, rpn_layers)\n', (3837, 3860), False, 'from keras.models import Model\n'), ((3886, 3935), 'keras.models.Model', 'Model', (['[feature_map_input, roi_input]', 'classifier'], {}), '([feature_map_input, roi_input], classifier)\n', (3891, 3935), False, 'from keras.models import Model\n'), ((3957, 4006), 'keras.models.Model', 'Model', (['[feature_map_input, roi_input]', 'classifier'], {}), '([feature_map_input, roi_input], classifier)\n', (3962, 4006), False, 'from keras.models import Model\n'), ((2371, 2393), 'operator.itemgetter', 'operator.itemgetter', (['(0)'], {}), '(0)\n', (2390, 2393), False, 'import operator\n'), ((2613, 2630), 'pickle.load', 'pickle.load', (['f_in'], {}), '(f_in)\n', (2624, 2630), False, 'import pickle\n'), ((3075, 3097), 'keras.backend.image_dim_ordering', 'K.image_dim_ordering', ([], {}), '()\n', (3095, 3097), True, 'from keras import backend as K\n'), ((4565, 4576), 'time.time', 'time.time', ([], {}), '()\n', (4574, 4576), False, 'import time\n'), ((4590, 4622), 'os.path.join', 'os.path.join', (['img_path', 'img_name'], {}), '(img_path, img_name)\n', (4602, 4622), False, 'import os\n'), ((4631, 4651), 'cv2.imread', 'cv2.imread', (['filepath'], {}), '(filepath)\n', (4641, 4651), False, 'import cv2\n'), ((7790, 7851), 'cv2.rectangle', 'cv2.rectangle', (['img_scaled', '(0, 0)', '(width, 30)', '(0, 0, 0)', '(-1)'], {}), '(img_scaled, (0, 0), (width, 30), (0, 0, 0), -1)\n', (7803, 7851), False, 'import cv2\n'), ((1578, 1609), 'os.path.join', 'os.path.join', (['output_path', 'file'], {}), '(output_path, file)\n', (1590, 1609), False, 'import os\n'), ((4902, 4924), 'keras.backend.image_dim_ordering', 'K.image_dim_ordering', ([], {}), '()\n', (4922, 4924), True, 'from keras import backend as K\n'), ((4941, 4970), 'numpy.transpose', 'np.transpose', (['X', '(0, 2, 3, 1)'], {}), '(X, (0, 2, 3, 1))\n', (4953, 4970), True, 'import numpy as np\n'), ((5099, 5121), 'keras.backend.image_dim_ordering', 'K.image_dim_ordering', ([], {}), '()\n', (5119, 5121), True, 'from keras import backend as K\n'), ((5380, 5447), 'numpy.expand_dims', 'np.expand_dims', (['R[C.num_rois * jk:C.num_rois * (jk + 1), :]'], {'axis': '(0)'}), '(R[C.num_rois * jk:C.num_rois * (jk + 1), :], axis=0)\n', (5394, 5447), True, 'import numpy as np\n'), ((6752, 6773), 'numpy.array', 'np.array', (['bboxes[key]'], {}), '(bboxes[key])\n', (6760, 6773), True, 'import numpy as np\n'), ((8004, 8039), 'os.path.join', 'os.path.join', (['output_path', 'img_name'], {}), '(output_path, img_name)\n', (8016, 8039), False, 'import os\n'), ((2985, 3013), 'numpy.random.randint', 'np.random.randint', (['(0)', '(255)', '(3)'], {}), '(0, 255, 3)\n', (3002, 3013), True, 'import numpy as np\n'), ((6218, 6244), 'numpy.argmax', 'np.argmax', (['P_cls[0, ii, :]'], {}), '(P_cls[0, ii, :])\n', (6227, 6244), True, 'import numpy as np\n'), ((6844, 6864), 'numpy.array', 'np.array', (['probs[key]'], {}), '(probs[key])\n', (6852, 6864), True, 'import numpy as np\n'), ((6971, 7040), 'cv2.rectangle', 'cv2.rectangle', (['img_scaled', '(x1, y1)', '(x2, y2)', 'class_to_color[key]', '(2)'], {}), '(img_scaled, (x1, y1), (x2, y2), class_to_color[key], 2)\n', (6984, 7040), False, 'import cv2\n'), ((7211, 7269), 'cv2.getTextSize', 'cv2.getTextSize', (['textLabel', 'cv2.FONT_HERSHEY_COMPLEX', '(1)', '(1)'], {}), '(textLabel, cv2.FONT_HERSHEY_COMPLEX, 1, 1)\n', (7226, 7269), False, 'import cv2\n'), ((7302, 7449), 'cv2.rectangle', 'cv2.rectangle', (['img_scaled', '(textOrg[0] - 5, textOrg[1] + baseLine - 5)', '(textOrg[0] + retval[0] + 5, textOrg[1] - retval[1] - 5)', '(0, 0, 0)', '(2)'], {}), '(img_scaled, (textOrg[0] - 5, textOrg[1] + baseLine - 5), (\n textOrg[0] + retval[0] + 5, textOrg[1] - retval[1] - 5), (0, 0, 0), 2)\n', (7315, 7449), False, 'import cv2\n'), ((7449, 7608), 'cv2.rectangle', 'cv2.rectangle', (['img_scaled', '(textOrg[0] - 5, textOrg[1] + baseLine - 5)', '(textOrg[0] + retval[0] + 5, textOrg[1] - retval[1] - 5)', '(255, 255, 255)', '(-1)'], {}), '(img_scaled, (textOrg[0] - 5, textOrg[1] + baseLine - 5), (\n textOrg[0] + retval[0] + 5, textOrg[1] - retval[1] - 5), (255, 255, 255\n ), -1)\n', (7462, 7608), False, 'import cv2\n'), ((7603, 7692), 'cv2.putText', 'cv2.putText', (['img_scaled', 'textLabel', 'textOrg', 'cv2.FONT_HERSHEY_DUPLEX', '(1)', '(0, 0, 0)', '(1)'], {}), '(img_scaled, textLabel, textOrg, cv2.FONT_HERSHEY_DUPLEX, 1, (0,\n 0, 0), 1)\n', (7614, 7692), False, 'import cv2\n'), ((6055, 6081), 'numpy.argmax', 'np.argmax', (['P_cls[0, ii, :]'], {}), '(P_cls[0, ii, :])\n', (6064, 6081), True, 'import numpy as np\n'), ((6485, 6535), 'keras_frcnn.roi_helpers.apply_regr', 'roi_helpers.apply_regr', (['x', 'y', 'w', 'h', 'tx', 'ty', 'tw', 'th'], {}), '(x, y, w, h, tx, ty, tw, th)\n', (6507, 6535), False, 'from keras_frcnn import roi_helpers\n'), ((6659, 6682), 'numpy.max', 'np.max', (['P_cls[0, ii, :]'], {}), '(P_cls[0, ii, :])\n', (6665, 6682), True, 'import numpy as np\n'), ((7724, 7735), 'time.time', 'time.time', ([], {}), '()\n', (7733, 7735), False, 'import time\n'), ((1294, 1326), 're.findall', 're.findall', (['"""[^0-9]|[0-9]+"""', 'var'], {}), "('[^0-9]|[0-9]+', var)\n", (1304, 1326), False, 'import re\n'), ((4375, 4407), 're.findall', 're.findall', (['"""[^0-9]|[0-9]+"""', 'var'], {}), "('[^0-9]|[0-9]+', var)\n", (4385, 4407), False, 'import re\n'), ((5643, 5665), 'numpy.zeros', 'np.zeros', (['target_shape'], {}), '(target_shape)\n', (5651, 5665), True, 'import numpy as np\n'), ((5915, 5938), 'numpy.max', 'np.max', (['P_cls[0, ii, :]'], {}), '(P_cls[0, ii, :])\n', (5921, 5938), True, 'import numpy as np\n'), ((5959, 5985), 'numpy.argmax', 'np.argmax', (['P_cls[0, ii, :]'], {}), '(P_cls[0, ii, :])\n', (5968, 5985), True, 'import numpy as np\n')]
|
import logging
from sqlalchemy import create_engine
from joblib import Parallel, delayed
logger = logging.getLogger(__name__)
def _to_sql(df, table, url, **kwargs):
to_sql_kwargs = {
'index': False,
'method': 'multi',
'if_exists': 'append'
}
to_sql_kwargs.update(kwargs)
engine = create_engine(url)
with engine.connect() as conn:
df.to_sql(table, conn, **to_sql_kwargs)
def to_sql(df,
table,
url,
parallelize=True,
chunksize=1000,
n_jobs=-1,
**kwargs):
logger.info("Populating table: {}".format(table))
# TODO: better default for more effect parallelization?
nrows = df.shape[0]
if parallelize and nrows > chunksize:
Parallel(n_jobs=n_jobs)(
delayed(_to_sql)(
df[i:i+chunksize],
table,
url,
**kwargs
) for i in range(0, nrows, chunksize))
else:
_to_sql(df, table, url, **kwargs)
logger.info("Populated table: {}".format(table))
|
[
"sqlalchemy.create_engine",
"joblib.delayed",
"logging.getLogger",
"joblib.Parallel"
] |
[((100, 127), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (117, 127), False, 'import logging\n'), ((324, 342), 'sqlalchemy.create_engine', 'create_engine', (['url'], {}), '(url)\n', (337, 342), False, 'from sqlalchemy import create_engine\n'), ((765, 788), 'joblib.Parallel', 'Parallel', ([], {'n_jobs': 'n_jobs'}), '(n_jobs=n_jobs)\n', (773, 788), False, 'from joblib import Parallel, delayed\n'), ((802, 818), 'joblib.delayed', 'delayed', (['_to_sql'], {}), '(_to_sql)\n', (809, 818), False, 'from joblib import Parallel, delayed\n')]
|
import logging
import requests
import json
from celery import shared_task
from system.models import Users
from seal import settings
logger = logging.getLogger('system_celery')
@shared_task
def system_demo(one):
##因为开启了时区,所以django在数据库里面保存的为 utc 时间, 调用的时候会帮你 转为 东八区, celery会自动识别时间
from django.utils import timezone
for i in Users.objects.all():
print(i.last_login) ## 直接读取时间,会是 utc时间,未转换,如果需要处理 请注意
print(timezone.localtime(i.last_login).strftime("%Y-%m-%d %H:%M:%S")) ## 时间格式化为 正常时间
print("celery定时任务demo 每分钟执行一遍",one)
return
@shared_task
def ding_ding_to_info(content,type=None):
"""
钉钉接口 异步调用 ding_ding_to_info.delay("报警1")
:param content: 文本内容
:param type:
:return:
"""
web_hook_url = getattr(settings, 'web_hook_url'),
headers = {'content-type': 'application/json'}
data = {
"msgtype": "text",
"text": {
"content": content
},
"at": {
"atMobiles": [
],
}
}
try:
r = requests.post(web_hook_url[0], data=json.dumps(data), headers=headers)
print(r.text)
except Exception as e:
logger.error(e)
|
[
"django.utils.timezone.localtime",
"system.models.Users.objects.all",
"logging.getLogger",
"json.dumps"
] |
[((141, 175), 'logging.getLogger', 'logging.getLogger', (['"""system_celery"""'], {}), "('system_celery')\n", (158, 175), False, 'import logging\n'), ((336, 355), 'system.models.Users.objects.all', 'Users.objects.all', ([], {}), '()\n', (353, 355), False, 'from system.models import Users\n'), ((1081, 1097), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1091, 1097), False, 'import json\n'), ((434, 466), 'django.utils.timezone.localtime', 'timezone.localtime', (['i.last_login'], {}), '(i.last_login)\n', (452, 466), False, 'from django.utils import timezone\n')]
|
import json
from django import forms
from django.template.loader import render_to_string
class AceMarkdownWidget(forms.widgets.Textarea):
template_name = 'django_cradmin/acemarkdown/widget.django.html'
directiveconfig = {
# 'showTextarea': False,
# 'theme': 'tomorrow'
}
@property
def media(self):
return forms.Media(
js=[
'django_cradmin/dist/vendor/js/ace-editor/ace.js',
]
)
def render(self, name, value, attrs=None):
attrs = attrs.copy()
attrs['textarea django-cradmin-acemarkdown-textarea'] = ''
textarea = super(AceMarkdownWidget, self).render(name, value, attrs)
return render_to_string(
self.template_name, {
'textarea': textarea,
'directiveconfig': json.dumps(self.directiveconfig)
}
)
|
[
"django.forms.Media",
"json.dumps"
] |
[((352, 419), 'django.forms.Media', 'forms.Media', ([], {'js': "['django_cradmin/dist/vendor/js/ace-editor/ace.js']"}), "(js=['django_cradmin/dist/vendor/js/ace-editor/ace.js'])\n", (363, 419), False, 'from django import forms\n'), ((834, 866), 'json.dumps', 'json.dumps', (['self.directiveconfig'], {}), '(self.directiveconfig)\n', (844, 866), False, 'import json\n')]
|
import numpy as np # type: ignore
city_num = 20
file_path = "./coordinates/"
output_file = "random_" + str(city_num) + "_cities.csv"
if __name__ == "__main__":
# “continuous uniform” distribution random
np_cities = np.random.random((city_num, 2))
np.savetxt(file_path + output_file, np_cities, delimiter=",")
|
[
"numpy.savetxt",
"numpy.random.random"
] |
[((226, 257), 'numpy.random.random', 'np.random.random', (['(city_num, 2)'], {}), '((city_num, 2))\n', (242, 257), True, 'import numpy as np\n'), ((262, 323), 'numpy.savetxt', 'np.savetxt', (['(file_path + output_file)', 'np_cities'], {'delimiter': '""","""'}), "(file_path + output_file, np_cities, delimiter=',')\n", (272, 323), True, 'import numpy as np\n')]
|
from django.conf import settings
from django.core.checks import Warning
def check_production_settings(app_configs, **kwargs):
issues = []
if settings.DEBUG:
return issues
if not settings.EMAIL_HOST_PASSWORD or 'TODO' in settings.EMAIL_HOST_PASSWORD:
issues.append(
Warning(
"EMAIL_HOST_PASSWORD setting is not set to proper value",
id='tg_utils.W001',
)
)
if 'TODO' in settings.SITE_URL:
issues.append(
Warning(
"SITE_URL setting is not set to proper value",
id='tg_utils.W002',
)
)
return issues
def check_sentry_config(app_configs, **kwargs):
issues = []
if 'sentry' not in settings.LOGGING['handlers']:
return issues
if 'sentry' not in settings.LOGGING['loggers']['']['handlers']:
issues.append(
Warning(
"Sentry logging handler is present but unused",
hint="Ensure that sentry handler is part of LOGGING['loggers']['']['handlers']",
id='tg_utils.W011',
)
)
return issues
|
[
"django.core.checks.Warning"
] |
[((309, 399), 'django.core.checks.Warning', 'Warning', (['"""EMAIL_HOST_PASSWORD setting is not set to proper value"""'], {'id': '"""tg_utils.W001"""'}), "('EMAIL_HOST_PASSWORD setting is not set to proper value', id=\n 'tg_utils.W001')\n", (316, 399), False, 'from django.core.checks import Warning\n'), ((524, 598), 'django.core.checks.Warning', 'Warning', (['"""SITE_URL setting is not set to proper value"""'], {'id': '"""tg_utils.W002"""'}), "('SITE_URL setting is not set to proper value', id='tg_utils.W002')\n", (531, 598), False, 'from django.core.checks import Warning\n'), ((921, 1086), 'django.core.checks.Warning', 'Warning', (['"""Sentry logging handler is present but unused"""'], {'hint': '"""Ensure that sentry handler is part of LOGGING[\'loggers\'][\'\'][\'handlers\']"""', 'id': '"""tg_utils.W011"""'}), '(\'Sentry logging handler is present but unused\', hint=\n "Ensure that sentry handler is part of LOGGING[\'loggers\'][\'\'][\'handlers\']",\n id=\'tg_utils.W011\')\n', (928, 1086), False, 'from django.core.checks import Warning\n')]
|
"""Contains DeepSpeech2 model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import os
import time
import logging
import gzip
import copy
import numpy as np
import inspect
from utils.decoder.swig_wrapper import Scorer
from utils.decoder.swig_wrapper import ctc_greedy_decoder
from utils.decoder.swig_wrapper import ctc_beam_search_decoder_batch
class LM_decoder(object):
def __init__(self, beam_alpha, beam_beta, language_model_path,
vocab_list):
"""Initialize the external scorer.
:param beam_alpha: Parameter associated with language model.
:type beam_alpha: float
:param beam_beta: Parameter associated with word count.
:type beam_beta: float
:param language_model_path: Filepath for language model. If it is
empty, the external scorer will be set to
None, and the decoding method will be pure
beam search without scorer.
:type language_model_path: basestring|None
:param vocab_list: List of tokens in the vocabulary, for decoding.
:type vocab_list: list
"""
if language_model_path != '':
print("begin to initialize the external scorer "
"for decoding")
self._ext_scorer = Scorer(beam_alpha, beam_beta,
language_model_path, vocab_list)
lm_char_based = self._ext_scorer.is_character_based()
lm_max_order = self._ext_scorer.get_max_order()
lm_dict_size = self._ext_scorer.get_dict_size()
print("language model: "
"is_character_based = %d," % lm_char_based +
" max_order = %d," % lm_max_order +
" dict_size = %d" % lm_dict_size)
print("end initializing scorer")
else:
self._ext_scorer = None
print("no language model provided, "
"decoding by pure beam search without scorer.")
def decode_batch_beam_search(self, probs_split, beam_alpha, beam_beta,
beam_size, cutoff_prob, cutoff_top_n,
vocab_list, num_processes):
"""Decode by beam search for a batch of probs matrix input.
:param probs_split: List of 2-D probability matrix, and each consists
of prob vectors for one speech utterancce.
:param probs_split: List of matrix
:param beam_alpha: Parameter associated with language model.
:type beam_alpha: float
:param beam_beta: Parameter associated with word count.
:type beam_beta: float
:param beam_size: Width for Beam search.
:type beam_size: int
:param cutoff_prob: Cutoff probability in pruning,
default 1.0, no pruning.
:type cutoff_prob: float
:param cutoff_top_n: Cutoff number in pruning, only top cutoff_top_n
characters with highest probs in vocabulary will be
used in beam search, default 40.
:type cutoff_top_n: int
:param vocab_list: List of tokens in the vocabulary, for decoding.
:type vocab_list: list
:param num_processes: Number of processes (CPU) for decoder.
:type num_processes: int
:return: List of transcription texts.
:rtype: List of basestring
"""
if self._ext_scorer != None:
self._ext_scorer.reset_params(beam_alpha, beam_beta)
# beam search decode
num_processes = min(num_processes, np.shape(probs_split)[0])
beam_search_results = ctc_beam_search_decoder_batch(
probs_split=probs_split,
vocabulary=vocab_list,
beam_size=beam_size,
num_processes=num_processes,
ext_scoring_func=self._ext_scorer,
cutoff_prob=cutoff_prob,
cutoff_top_n=cutoff_top_n)
results = [result[0][1] for result in beam_search_results]
return results
def _adapt_feeding_dict(self, feeding_dict):
"""Adapt feeding dict according to network struct.
To remove impacts from padding part, we add scale_sub_region layer and
sub_seq layer. For sub_seq layer, 'sequence_offset' and
'sequence_length' fields are appended. For each scale_sub_region layer
'convN_index_range' field is appended.
:param feeding_dict: Feeding is a map of field name and tuple index
of the data that reader returns.
:type feeding_dict: dict|list
:return: Adapted feeding dict.
:rtype: dict|list
"""
adapted_feeding_dict = copy.deepcopy(feeding_dict)
if isinstance(feeding_dict, dict):
adapted_feeding_dict["sequence_offset"] = len(adapted_feeding_dict)
adapted_feeding_dict["sequence_length"] = len(adapted_feeding_dict)
for i in xrange(self._num_conv_layers):
adapted_feeding_dict["conv%d_index_range" %i] = \
len(adapted_feeding_dict)
elif isinstance(feeding_dict, list):
adapted_feeding_dict.append("sequence_offset")
adapted_feeding_dict.append("sequence_length")
for i in xrange(self._num_conv_layers):
adapted_feeding_dict.append("conv%d_index_range" % i)
else:
raise ValueError("Type of feeding_dict is %s, not supported." %
type(feeding_dict))
return adapted_feeding_dict
def _adapt_data(self, data):
"""Adapt data according to network struct.
For each convolution layer in the conv_group, to remove impacts from
padding data, we can multiply zero to the padding part of the outputs
of each batch normalization layer. We add a scale_sub_region layer after
each batch normalization layer to reset the padding data.
For rnn layers, to remove impacts from padding data, we can truncate the
padding part before output data feeded into the first rnn layer. We use
sub_seq layer to achieve this.
:param data: Data from data_provider.
:type data: list|function
:return: Adapted data.
:rtype: list|function
"""
def adapt_instance(instance):
if len(instance) < 2 or len(instance) > 3:
raise ValueError("Size of instance should be 2 or 3.")
padded_audio = instance[0]
text = instance[1]
# no padding part
if len(instance) == 2:
audio_len = padded_audio.shape[1]
else:
audio_len = instance[2]
adapted_instance = [padded_audio, text]
# Stride size for conv0 is (3, 2)
# Stride size for conv1 to convN is (1, 2)
# Same as the network, hard-coded here
padded_conv0_h = (padded_audio.shape[0] - 1) // 2 + 1
padded_conv0_w = (padded_audio.shape[1] - 1) // 3 + 1
valid_w = (audio_len - 1) // 3 + 1
adapted_instance += [
[0], # sequence offset, always 0
[valid_w], # valid sequence length
# Index ranges for channel, height and width
# Please refer scale_sub_region layer to see details
[1, 32, 1, padded_conv0_h, valid_w + 1, padded_conv0_w]
]
pre_padded_h = padded_conv0_h
for i in xrange(self._num_conv_layers - 1):
padded_h = (pre_padded_h - 1) // 2 + 1
pre_padded_h = padded_h
adapted_instance += [
[1, 32, 1, padded_h, valid_w + 1, padded_conv0_w]
]
return adapted_instance
if isinstance(data, list):
return map(adapt_instance, data)
elif inspect.isgeneratorfunction(data):
def adapted_reader():
for instance in data():
yield map(adapt_instance, instance)
return adapted_reader
else:
raise ValueError("Type of data is %s, not supported." % type(data))
def _create_parameters(self, model_path=None):
"""Load or create model parameters."""
if model_path is None:
self._parameters = paddle.parameters.create(self._loss)
else:
self._parameters = paddle.parameters.Parameters.from_tar(
gzip.open(model_path))
def _create_network(self, vocab_size, num_conv_layers, num_rnn_layers,
rnn_layer_size, use_gru, share_rnn_weights):
"""Create data layers and model network."""
# paddle.data_type.dense_array is used for variable batch input.
# The size 161 * 161 is only an placeholder value and the real shape
# of input batch data will be induced during training.
audio_data = paddle.layer.data(
name="audio_spectrogram",
type=paddle.data_type.dense_array(161 * 161))
text_data = paddle.layer.data(
name="transcript_text",
type=paddle.data_type.integer_value_sequence(vocab_size))
seq_offset_data = paddle.layer.data(
name='sequence_offset',
type=paddle.data_type.integer_value_sequence(1))
seq_len_data = paddle.layer.data(
name='sequence_length',
type=paddle.data_type.integer_value_sequence(1))
index_range_datas = []
for i in xrange(num_rnn_layers):
index_range_datas.append(
paddle.layer.data(
name='conv%d_index_range' % i,
type=paddle.data_type.dense_vector(6)))
self._log_probs, self._loss = deep_speech_v2_network(
audio_data=audio_data,
text_data=text_data,
seq_offset_data=seq_offset_data,
seq_len_data=seq_len_data,
index_range_datas=index_range_datas,
dict_size=vocab_size,
num_conv_layers=num_conv_layers,
num_rnn_layers=num_rnn_layers,
rnn_size=rnn_layer_size,
use_gru=use_gru,
share_rnn_weights=share_rnn_weights)
|
[
"copy.deepcopy",
"gzip.open",
"numpy.shape",
"inspect.isgeneratorfunction",
"utils.decoder.swig_wrapper.Scorer",
"utils.decoder.swig_wrapper.ctc_beam_search_decoder_batch"
] |
[((3843, 4070), 'utils.decoder.swig_wrapper.ctc_beam_search_decoder_batch', 'ctc_beam_search_decoder_batch', ([], {'probs_split': 'probs_split', 'vocabulary': 'vocab_list', 'beam_size': 'beam_size', 'num_processes': 'num_processes', 'ext_scoring_func': 'self._ext_scorer', 'cutoff_prob': 'cutoff_prob', 'cutoff_top_n': 'cutoff_top_n'}), '(probs_split=probs_split, vocabulary=\n vocab_list, beam_size=beam_size, num_processes=num_processes,\n ext_scoring_func=self._ext_scorer, cutoff_prob=cutoff_prob,\n cutoff_top_n=cutoff_top_n)\n', (3872, 4070), False, 'from utils.decoder.swig_wrapper import ctc_beam_search_decoder_batch\n'), ((4898, 4925), 'copy.deepcopy', 'copy.deepcopy', (['feeding_dict'], {}), '(feeding_dict)\n', (4911, 4925), False, 'import copy\n'), ((1435, 1497), 'utils.decoder.swig_wrapper.Scorer', 'Scorer', (['beam_alpha', 'beam_beta', 'language_model_path', 'vocab_list'], {}), '(beam_alpha, beam_beta, language_model_path, vocab_list)\n', (1441, 1497), False, 'from utils.decoder.swig_wrapper import Scorer\n'), ((8092, 8125), 'inspect.isgeneratorfunction', 'inspect.isgeneratorfunction', (['data'], {}), '(data)\n', (8119, 8125), False, 'import inspect\n'), ((3787, 3808), 'numpy.shape', 'np.shape', (['probs_split'], {}), '(probs_split)\n', (3795, 3808), True, 'import numpy as np\n'), ((8685, 8706), 'gzip.open', 'gzip.open', (['model_path'], {}), '(model_path)\n', (8694, 8706), False, 'import gzip\n')]
|
import slicer
def cliRunSync(module, node=None, parameters=None, delete_temporary_files=True, update_display=True):
"""Run CLI module. If ipywidgets are installed then it reports progress.
"""
try:
from ipywidgets import IntProgress
from IPython.display import display
# Asynchronous run, with progerss reporting using widget
node = slicer.cli.run(module, node=node, parameters=parameters, wait_for_completion=False,
delete_temporary_files=delete_temporary_files, update_display=update_display)
import time
progress = IntProgress()
display(progress) # display progress bar
while node.IsBusy():
progress.value = node.GetProgress()
slicer.app.processEvents()
time.sleep(.3)
progress.layout.display = 'none' # hide progress bar
except ImportError:
# No widgets, therefore no progress reporting - do just a simpe synchronous CLI run
node = slicer.cli.runSync(module, node=node, parameters=parameters, wait_for_completion=False,
delete_temporary_files=delete_temporary_files, update_display=update_display)
return node
|
[
"slicer.cli.run",
"IPython.display.display",
"time.sleep",
"ipywidgets.IntProgress",
"slicer.cli.runSync",
"slicer.app.processEvents"
] |
[((358, 528), 'slicer.cli.run', 'slicer.cli.run', (['module'], {'node': 'node', 'parameters': 'parameters', 'wait_for_completion': '(False)', 'delete_temporary_files': 'delete_temporary_files', 'update_display': 'update_display'}), '(module, node=node, parameters=parameters,\n wait_for_completion=False, delete_temporary_files=\n delete_temporary_files, update_display=update_display)\n', (372, 528), False, 'import slicer\n'), ((555, 568), 'ipywidgets.IntProgress', 'IntProgress', ([], {}), '()\n', (566, 568), False, 'from ipywidgets import IntProgress\n'), ((573, 590), 'IPython.display.display', 'display', (['progress'], {}), '(progress)\n', (580, 590), False, 'from IPython.display import display\n'), ((691, 717), 'slicer.app.processEvents', 'slicer.app.processEvents', ([], {}), '()\n', (715, 717), False, 'import slicer\n'), ((726, 741), 'time.sleep', 'time.sleep', (['(0.3)'], {}), '(0.3)\n', (736, 741), False, 'import time\n'), ((920, 1094), 'slicer.cli.runSync', 'slicer.cli.runSync', (['module'], {'node': 'node', 'parameters': 'parameters', 'wait_for_completion': '(False)', 'delete_temporary_files': 'delete_temporary_files', 'update_display': 'update_display'}), '(module, node=node, parameters=parameters,\n wait_for_completion=False, delete_temporary_files=\n delete_temporary_files, update_display=update_display)\n', (938, 1094), False, 'import slicer\n')]
|
import flask
from flask_restful import Resource
from fence.auth import login_user
from fence.blueprints.login.redirect import validate_redirect
from fence.errors import InternalError, Unauthorized
from fence.models import IdentityProvider
from fence.config import config
class ShibbolethLogin(Resource):
def get(self):
"""
The login flow is:
user
-> {fence}/login/shib?redirect={portal}
-> user login at {nih_shibboleth_idp}
-> nih idp POST to fence shibboleth and establish a shibboleth sp
session
-> redirect to {fence}/login/shib/login that sets up fence session
-> redirect to portal
"""
redirect_url = flask.request.args.get("redirect")
validate_redirect(redirect_url)
if redirect_url:
flask.session["redirect"] = redirect_url
actual_redirect = config["BASE_URL"] + "/login/shib/login"
return flask.redirect(config["SSO_URL"] + actual_redirect)
class ShibbolethCallback(Resource):
def get(self):
"""
Complete the shibboleth login.
"""
if "SHIBBOLETH_HEADER" in config:
eppn = flask.request.headers.get(config["SHIBBOLETH_HEADER"])
else:
raise InternalError("Missing shibboleth header configuration")
username = eppn.split("!")[-1] if eppn else None
if username:
login_user(flask.request, username, IdentityProvider.itrust)
if flask.session.get("redirect"):
return flask.redirect(flask.session.get("redirect"))
return "logged in"
else:
raise Unauthorized("Please login")
|
[
"fence.errors.InternalError",
"flask.redirect",
"flask.request.headers.get",
"flask.request.args.get",
"flask.session.get",
"fence.auth.login_user",
"fence.errors.Unauthorized",
"fence.blueprints.login.redirect.validate_redirect"
] |
[((705, 739), 'flask.request.args.get', 'flask.request.args.get', (['"""redirect"""'], {}), "('redirect')\n", (727, 739), False, 'import flask\n'), ((748, 779), 'fence.blueprints.login.redirect.validate_redirect', 'validate_redirect', (['redirect_url'], {}), '(redirect_url)\n', (765, 779), False, 'from fence.blueprints.login.redirect import validate_redirect\n'), ((940, 991), 'flask.redirect', 'flask.redirect', (["(config['SSO_URL'] + actual_redirect)"], {}), "(config['SSO_URL'] + actual_redirect)\n", (954, 991), False, 'import flask\n'), ((1173, 1227), 'flask.request.headers.get', 'flask.request.headers.get', (["config['SHIBBOLETH_HEADER']"], {}), "(config['SHIBBOLETH_HEADER'])\n", (1198, 1227), False, 'import flask\n'), ((1261, 1317), 'fence.errors.InternalError', 'InternalError', (['"""Missing shibboleth header configuration"""'], {}), "('Missing shibboleth header configuration')\n", (1274, 1317), False, 'from fence.errors import InternalError, Unauthorized\n'), ((1408, 1468), 'fence.auth.login_user', 'login_user', (['flask.request', 'username', 'IdentityProvider.itrust'], {}), '(flask.request, username, IdentityProvider.itrust)\n', (1418, 1468), False, 'from fence.auth import login_user\n'), ((1484, 1513), 'flask.session.get', 'flask.session.get', (['"""redirect"""'], {}), "('redirect')\n", (1501, 1513), False, 'import flask\n'), ((1647, 1675), 'fence.errors.Unauthorized', 'Unauthorized', (['"""Please login"""'], {}), "('Please login')\n", (1659, 1675), False, 'from fence.errors import InternalError, Unauthorized\n'), ((1553, 1582), 'flask.session.get', 'flask.session.get', (['"""redirect"""'], {}), "('redirect')\n", (1570, 1582), False, 'import flask\n')]
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import torch
import unittest
from torch.autograd import Variable
from gpytorch.lazy import RootLazyVariable
from gpytorch.utils import approx_equal
class TestRootLazyVariable(unittest.TestCase):
def test_matmul(self):
root = Variable(torch.randn(5, 3), requires_grad=True)
covar = RootLazyVariable(root)
mat = Variable(torch.eye(5))
res = covar.matmul(mat)
root_clone = Variable(root.data.clone(), requires_grad=True)
mat_clone = Variable(mat.data.clone())
actual = root_clone.matmul(root_clone.transpose(-1, -2)).matmul(mat_clone)
self.assertTrue(approx_equal(res.data, actual.data))
gradient = torch.randn(5, 5)
actual.backward(gradient=Variable(gradient))
res.backward(gradient=Variable(gradient))
self.assertTrue(approx_equal(root.grad.data, root_clone.grad.data))
def test_diag(self):
root = Variable(torch.randn(5, 3))
actual = root.matmul(root.transpose(-1, -2))
res = RootLazyVariable(root)
self.assertTrue(approx_equal(actual.diag().data, res.diag().data))
def test_batch_diag(self):
root = Variable(torch.randn(4, 5, 3))
actual = root.matmul(root.transpose(-1, -2))
actual_diag = torch.cat(
[
actual[0].diag().unsqueeze(0),
actual[1].diag().unsqueeze(0),
actual[2].diag().unsqueeze(0),
actual[3].diag().unsqueeze(0),
]
)
res = RootLazyVariable(root)
self.assertTrue(approx_equal(actual_diag.data, res.diag().data))
def test_evaluate(self):
root = Variable(torch.randn(5, 3))
actual = root.matmul(root.transpose(-1, -2))
res = RootLazyVariable(root)
self.assertTrue(approx_equal(actual.data, res.evaluate().data))
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"torch.eye",
"torch.autograd.Variable",
"torch.randn",
"gpytorch.utils.approx_equal",
"gpytorch.lazy.RootLazyVariable"
] |
[((2028, 2043), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2041, 2043), False, 'import unittest\n'), ((453, 475), 'gpytorch.lazy.RootLazyVariable', 'RootLazyVariable', (['root'], {}), '(root)\n', (469, 475), False, 'from gpytorch.lazy import RootLazyVariable\n'), ((827, 844), 'torch.randn', 'torch.randn', (['(5)', '(5)'], {}), '(5, 5)\n', (838, 844), False, 'import torch\n'), ((1161, 1183), 'gpytorch.lazy.RootLazyVariable', 'RootLazyVariable', (['root'], {}), '(root)\n', (1177, 1183), False, 'from gpytorch.lazy import RootLazyVariable\n'), ((1664, 1686), 'gpytorch.lazy.RootLazyVariable', 'RootLazyVariable', (['root'], {}), '(root)\n', (1680, 1686), False, 'from gpytorch.lazy import RootLazyVariable\n'), ((1900, 1922), 'gpytorch.lazy.RootLazyVariable', 'RootLazyVariable', (['root'], {}), '(root)\n', (1916, 1922), False, 'from gpytorch.lazy import RootLazyVariable\n'), ((398, 415), 'torch.randn', 'torch.randn', (['(5)', '(3)'], {}), '(5, 3)\n', (409, 415), False, 'import torch\n'), ((499, 511), 'torch.eye', 'torch.eye', (['(5)'], {}), '(5)\n', (508, 511), False, 'import torch\n'), ((770, 805), 'gpytorch.utils.approx_equal', 'approx_equal', (['res.data', 'actual.data'], {}), '(res.data, actual.data)\n', (782, 805), False, 'from gpytorch.utils import approx_equal\n'), ((973, 1023), 'gpytorch.utils.approx_equal', 'approx_equal', (['root.grad.data', 'root_clone.grad.data'], {}), '(root.grad.data, root_clone.grad.data)\n', (985, 1023), False, 'from gpytorch.utils import approx_equal\n'), ((1075, 1092), 'torch.randn', 'torch.randn', (['(5)', '(3)'], {}), '(5, 3)\n', (1086, 1092), False, 'import torch\n'), ((1315, 1335), 'torch.randn', 'torch.randn', (['(4)', '(5)', '(3)'], {}), '(4, 5, 3)\n', (1326, 1335), False, 'import torch\n'), ((1814, 1831), 'torch.randn', 'torch.randn', (['(5)', '(3)'], {}), '(5, 3)\n', (1825, 1831), False, 'import torch\n'), ((878, 896), 'torch.autograd.Variable', 'Variable', (['gradient'], {}), '(gradient)\n', (886, 896), False, 'from torch.autograd import Variable\n'), ((928, 946), 'torch.autograd.Variable', 'Variable', (['gradient'], {}), '(gradient)\n', (936, 946), False, 'from torch.autograd import Variable\n')]
|
"""
Run PyTorch NAF on many envs.
"""
import random
import railrl.torch.pytorch_util as ptu
from railrl.exploration_strategies.ou_strategy import OUStrategy
from railrl.launchers.launcher_util import run_experiment
from railrl.torch.naf import NafPolicy, NAF
from rllab.envs.mujoco.ant_env import AntEnv
from rllab.envs.mujoco.half_cheetah_env import HalfCheetahEnv
from rllab.envs.mujoco.hopper_env import HopperEnv
from rllab.envs.mujoco.swimmer_env import SwimmerEnv
from rllab.envs.normalized_env import normalize
def example(variant):
env = variant['env_class']()
env = normalize(env)
es = OUStrategy(action_space=env.action_space)
qf = NafPolicy(
int(env.observation_space.flat_dim),
int(env.action_space.flat_dim),
100,
)
algorithm = NAF(
env,
naf_policy=qf,
exploration_strategy=es,
**variant['algo_params']
)
algorithm.to(ptu.device)
algorithm.train()
if __name__ == "__main__":
# noinspection PyTypeChecker
variant = dict(
algo_params=dict(
num_epochs=100,
num_steps_per_epoch=10000,
num_steps_per_eval=1000,
use_soft_update=True,
tau=1e-2,
batch_size=128,
max_path_length=1000,
discount=0.99,
naf_policy_learning_rate=1e-4,
),
version="NAF",
)
for env_class in [
SwimmerEnv,
HalfCheetahEnv,
AntEnv,
HopperEnv,
]:
variant['env_class'] = env_class
variant['version'] = str(env_class)
for _ in range(5):
seed = random.randint(0, 999999)
run_experiment(
example,
exp_prefix="naf-benchmarks-envs-pytorch",
seed=seed,
mode='ec2',
variant=variant,
use_gpu=False,
)
|
[
"random.randint",
"railrl.exploration_strategies.ou_strategy.OUStrategy",
"railrl.launchers.launcher_util.run_experiment",
"rllab.envs.normalized_env.normalize",
"railrl.torch.naf.NAF"
] |
[((586, 600), 'rllab.envs.normalized_env.normalize', 'normalize', (['env'], {}), '(env)\n', (595, 600), False, 'from rllab.envs.normalized_env import normalize\n'), ((610, 651), 'railrl.exploration_strategies.ou_strategy.OUStrategy', 'OUStrategy', ([], {'action_space': 'env.action_space'}), '(action_space=env.action_space)\n', (620, 651), False, 'from railrl.exploration_strategies.ou_strategy import OUStrategy\n'), ((792, 866), 'railrl.torch.naf.NAF', 'NAF', (['env'], {'naf_policy': 'qf', 'exploration_strategy': 'es'}), "(env, naf_policy=qf, exploration_strategy=es, **variant['algo_params'])\n", (795, 866), False, 'from railrl.torch.naf import NafPolicy, NAF\n'), ((1636, 1661), 'random.randint', 'random.randint', (['(0)', '(999999)'], {}), '(0, 999999)\n', (1650, 1661), False, 'import random\n'), ((1674, 1798), 'railrl.launchers.launcher_util.run_experiment', 'run_experiment', (['example'], {'exp_prefix': '"""naf-benchmarks-envs-pytorch"""', 'seed': 'seed', 'mode': '"""ec2"""', 'variant': 'variant', 'use_gpu': '(False)'}), "(example, exp_prefix='naf-benchmarks-envs-pytorch', seed=seed,\n mode='ec2', variant=variant, use_gpu=False)\n", (1688, 1798), False, 'from railrl.launchers.launcher_util import run_experiment\n')]
|
from tests.fixtures import client
def test_point_json(client):
rv = client.get("/points/51.501,-0.2936")
point_json = rv.get_json()
assert rv.headers["Access-Control-Allow-Origin"] == "*"
assert (
point_json.get("data", {})
.get("relationships", {})
.get("nearest_postcode", {})
.get("data", {})
.get("id")
== "EX36 4AT"
)
assert (
point_json.get("data", {}).get("attributes", {}).get("distance_from_postcode")
== 68.9707515287199
)
def test_point_json_distance(client):
pass
def test_point_html(client):
rv = client.get("/points/51.501,-0.2936.html")
assert rv.mimetype == "text/html"
content = rv.data.decode("utf8")
assert "EX36 4AT" in content
assert "E01020135" in content
assert "69.0" in content
def test_point_html_distance(client):
pass
|
[
"tests.fixtures.client.get"
] |
[((74, 110), 'tests.fixtures.client.get', 'client.get', (['"""/points/51.501,-0.2936"""'], {}), "('/points/51.501,-0.2936')\n", (84, 110), False, 'from tests.fixtures import client\n'), ((617, 658), 'tests.fixtures.client.get', 'client.get', (['"""/points/51.501,-0.2936.html"""'], {}), "('/points/51.501,-0.2936.html')\n", (627, 658), False, 'from tests.fixtures import client\n')]
|
import os
import pathlib
import re
import time
import sys
import json
import cv2
import h5py
import math
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.path as mpath
import matplotlib.lines as mlines
import matplotlib.patches as mpatches
import matplotlib as mpl
from scipy.sparse import csr_matrix
from fast_histogram import histogram1d
from datetime import datetime
from importlib import reload
from PyQt5 import QtCore, QtGui, QtWidgets
# from PyQt5.QtMultimedia import QMediaPlayer
# from PyQt5.QtMultimedia import QMediaContent
# from PyQt5.QtMultimediaWidgets import QVideoWidget
from PyQt5.QtWidgets import QApplication, QMainWindow, QLabel, QSizePolicy, QWidget, QInputDialog, QFileDialog
from PyQt5.QtWidgets import QHBoxLayout, QLabel, QPushButton, QStyle, QVBoxLayout, QWidget, QSlider, QPushButton, QAction
from PyQt5.QtGui import QImage, QPixmap, QIcon
from PyQt5.QtCore import QDir, Qt, QUrl
import tools.imagepl as opl
import tools.fileio as fio
def figPlotGCs(roiDict, organism='Yeast', saveAll=False, savePath=None):
''' Plots growth curves using matplotlib'''
plt.close('all')
pltRange = setPlotRange(organism)
roiID = roiDict['roi']
timePoints = roiDict['timePoints']/pltRange['GCs']['devisor']
rawobjectArea = roiDict['objectArea']
rawfitData = roiDict['fitData']
numObsv = pltRange['GCs']['numObservations']
rngTime = pltRange['GCs']['xRange']
rngArea = pltRange['GCs']['yRange']
rngTdbl = pltRange['Dbl']['xRange']
rngTlag = pltRange['Lag']['xRange']
rngTexp = pltRange['Tex']['xRange']
rngNDub = pltRange['NumDbl']['xRange']
if len(roiDict['roiInfo'])>0 :
roiID = roiDict['roiInfo']['Strain ID']
numObservations = np.sum(rawobjectArea>0, 1) > numObsv
numDbl = rawfitData[:,1]>0
fitIndex = rawfitData[:,0]>0
dataFilter = numObservations * fitIndex * numDbl
fitData = rawfitData[dataFilter, :]
objectArea = rawobjectArea[dataFilter,:].transpose()
fitData[:,3]/=pltRange['Tex']['devisor']
fitData[:,5]/=pltRange['Lag']['devisor']
fitData[:,6]/=pltRange['Dbl']['devisor']
textLbls= ['Growth Curves','Td (hrs)', 'Tlag (hrs)','Texp (hrs)','Num Dbl']
lineColor = np.array([ [0, 0, 0, 0.3],
[0, 0, 1, 1],
[0, 0.7, 0, 1],
[1, 0, 0, 1],
[0.7,0.5, 0, 1]], dtype = 'float')
xLim = np.array([rngTime,
rngTdbl,
rngTlag,
rngTexp,
rngNDub], dtype = 'float64')
wScale = 0.75
numbins = 75
fitCol = [6,6,5,3,1]
normVirts = np.zeros((5,numbins), dtype='float64')
virts = np.zeros((5,numbins), dtype='float64')
nbins = np.zeros((5,numbins), dtype='float64')
for cnt in range(5):
nbins[cnt,:] = np.linspace(xLim[cnt,0], xLim[cnt,1], num=numbins)
virts[cnt,:] = histogram1d( fitData[:,fitCol[cnt]], 75, xLim[cnt,:], weights = None)
normVirts[cnt,:] = (virts[cnt,:]/np.max(virts[cnt,2:-10]))*wScale
axesPos = np.array([[0.1875, 0.66666667, 0.75, 0.28],
[0.1875, 0.48666667, 0.75, 0.1],
[0.1875, 0.33333333, 0.75, 0.1],
[0.1875, 0.19333333, 0.75, 0.1],
[0.1875, 0.05333333, 0.75, 0.1]], dtype = 'float64')
xLim = np.array([rngTime,
rngTdbl,
rngTlag,
rngTexp,
rngNDub], dtype = 'float64')
yLim = np.array( [rngArea,
[0,1],
[0,1],
[0,1],
[0,1]], dtype = 'float64')
Label_Font = 12
Title_Font = 12
mpl.rcParams['axes.linewidth'] = 2
mpl.rcParams['xtick.major.width'] = 2
mpl.rcParams['ytick.major.width'] = 2
mpl.rcParams['xtick.direction'] = 'in'
mpl.rcParams['ytick.direction'] = 'in'
mpl.rcParams['font.family'] = 'Arial'
mpl.rcParams['font.weight'] = 'bold'
mpl.rcParams['axes.titlesize'] = Title_Font
mpl.rcParams['axes.labelsize'] = Label_Font
gcFig = plt.figure(figsize=[4,7.5], dpi=100, facecolor='w')
axs = []
n = 0
axs.append(plt.axes(axesPos[n,:], xlim=xLim[n,:], ylim=yLim[n,:]))
axs[0].plot(timePoints, np.log2(objectArea), color=lineColor[n,:], linewidth=0.8)
axs[0].set_xlabel('Time (hrs)', fontsize=Label_Font, fontweight='bold')
axs[0].set_ylabel('log2[Area]', fontsize=Label_Font, fontweight='bold')
axs[0].set_title(roiID, fontsize=Label_Font, fontweight='bold')
for n in range(1,5):
axs.append(plt.axes(axesPos[n,:], xlim=xLim[n,:], ylim=yLim[n,:]))
axs[n].plot(nbins[n,:],normVirts[n,:],color=lineColor[n,:])
xPos = 0.7*np.abs(np.diff(xLim[n,:]))+xLim[n,0]
axs[n].text(xPos,0.75, textLbls[n], fontsize = Label_Font,fontweight='bold',color=lineColor[n,:])
if saveAll:
plt.savefig(savePath)
else:
plt.show()
return None
def figExpSummary(expDict, organism='Yeast'):
plt.close('all')
Label_Font = 12
Title_Font = 12
mpl.rcParams['axes.linewidth'] = 2
mpl.rcParams['xtick.major.width'] = 2
mpl.rcParams['ytick.major.width'] = 2
mpl.rcParams['xtick.direction'] = 'in'
mpl.rcParams['ytick.direction'] = 'in'
mpl.rcParams['font.family'] = 'Arial'
mpl.rcParams['font.weight'] = 'bold'
mpl.rcParams['axes.titlesize'] = Title_Font
mpl.rcParams['axes.labelsize'] = Label_Font
plotDict = setPlotRange(organism)
rngGCs = plotDict['GCs']['xRange']
rngTdbl = plotDict['Dbl']['xRange']
rngTlag = plotDict['Lag']['xRange']
rngTexp = plotDict['Tex']['xRange']
rngNDub = plotDict['NumDbl']['xRange']
rngPopNum = plotDict['PopNum']['xRange']
cntrLbl = ['Dbl', 'Lag', 'Tex', 'NumDbl', 'PopNum']
tickList = {}
left = 1.25/6
bottom = 0.4/10
width = 1.2/8
height = 9/10
spacing = 0.05/6
xLim = np.array([rngTdbl,
rngTlag,
rngTexp,
rngNDub,
rngPopNum], dtype = 'float64')
textLbls= ['Td (hrs)', 'Tlag (hrs)','Texp (hrs)','Num Dbl','Pop Cnt']
Path = mpath.Path
commands = {'M': (mpath.Path.MOVETO,),
'L': (mpath.Path.LINETO,),
'Q': (mpath.Path.CURVE3,)*2,
'C': (mpath.Path.CURVE4,)*3,
'Z': (mpath.Path.CLOSEPOLY,)}
numbins = 75
fitCol = [6,5,3,1]
# breakpoint()
devisor = [
plotDict['Dbl']['devisor'],
plotDict['Lag']['devisor'],
plotDict['Tex']['devisor'],
plotDict['NumDbl']['devisor']
]
roiList = [*expDict.keys()]
key1='roiInfo'
key2='Strain ID'
yTickLbl=[]
for roi in expDict.keys():
if len(expDict[roi][key1])>0:
yTickLbl.append(expDict[roi][key1][key2])
else:
yTickLbl.append(roi)
roiList = [x for _, x in sorted( zip(yTickLbl, roiList), key=lambda pair: pair[0])]
roiList.reverse()
yTickLbl.sort()
yTickLbl.reverse()
yTickLbl.insert(0,'')
yTickLbl.append('')
numRoi = len(roiList)
poptot = np.zeros((numRoi+1,2), dtype='int')
wScale = 0.8
pathDict = {}
cntr=0
for key in roiList:
cntr+=1
normVirts = np.zeros((5,numbins), dtype='float64')
virts = np.zeros((5,numbins), dtype='float64')
nbins = np.zeros((5,numbins), dtype='float64')
fitData = expDict[key]['fitData']
poptot[cntr,:] = fitData.shape
pathDict[key]={}
for n in range(4):
nbins[n,:] = np.linspace(xLim[n,0], xLim[n,1], num=numbins)
virts[n,:] = histogram1d( fitData[:,fitCol[n]]/devisor[n], numbins, xLim[n,:], weights = None)
normVirts[n,:] = (virts[n,:]/np.max(virts[n,2:-10]))*wScale
codes, verts = parseVirts(nbins[n,:], normVirts[n,:])
verts[:,1] += cntr-0.5
path = mpath.Path(verts, codes)
pathDict[key][textLbls[n]] = path
pathDict[key]['nbins'] = nbins
pathDict[key]['normVirts'] = normVirts
axesPos = np.zeros((5,4),dtype = 'float')
for n in range(5):
axesPos[n,:] = [left+n*(width+spacing),bottom,width,height]
gcFig = plt.figure(figsize=[7,9], dpi=100, facecolor='w')
axs = []
n = 0
xTicks = plotDict[cntrLbl[n]]['xTicks']
xticklabels = [str(value) for value in xTicks]
axs.append(plt.axes(axesPos[n,:], xlim=xLim[n,:], ylim=[0,numRoi+1], yticks=list(range(numRoi+1)), xticks=xTicks))
axs[n].set_yticklabels(yTickLbl, fontsize=6, fontweight = 'bold')
axs[n].set_xticklabels(xticklabels, fontsize=8, fontweight = 'bold', rotation= 45 )
axs[n].set_title(textLbls[n], fontsize=10, fontweight = 'bold' )
for roi in roiList:
patch = mpatches.PathPatch(pathDict[roi][textLbls[n]], facecolor = [0,0,1,1], edgecolor = None, linewidth = 0 )
axs[n].add_patch(patch)
for n in range(1,4):
xTicks = plotDict[cntrLbl[n]]['xTicks']
xticklabels = [str(value) for value in xTicks]
axs.append(plt.axes(axesPos[n,:], xlim=xLim[n,:], ylim=[0,numRoi+1], yticks=[], xticks=xTicks))
axs[n].set_xticklabels(xticklabels, fontsize=8, fontweight = 'bold', rotation= 45 )
axs[n].set_title(textLbls[n], fontsize=10, fontweight = 'bold' )
for roi in roiList:
patch = mpatches.PathPatch(pathDict[roi][textLbls[n]], facecolor = [0,0,1,1], edgecolor = None, linewidth = 0 )
axs[n].add_patch(patch)
n +=1
xTicks = plotDict[cntrLbl[n]]['xTicks']
xticklabels = [str(value) for value in xTicks]
ypos = np.arange(poptot.shape[0])
xstart = np.zeros((poptot.shape[0],),dtype = 'float')
axs.append(plt.axes(axesPos[n,:], xscale = 'log', xlim=[1,10000], ylim=[0,numRoi+1], yticks=[], xticks=xTicks))
axs[n].hlines(ypos, xstart, poptot[:,0], linewidth = 5, color = [0,0,1,1] )
axs[n].set_yticklabels(yTickLbl, fontsize=6, fontweight = 'bold')
axs[n].set_xticklabels(xticklabels, fontsize=8, fontweight = 'bold', rotation= 45 )
axs[n].set_title(textLbls[n], fontsize=10, fontweight = 'bold' )
plt.show()
return None
def stitchIm( roiLbl, imNum, imageDir, dataDir):
expPath = pathlib.Path(imageDir)
# indexList = [k for k in expPath.glob('*Index_ODELAYData.*')]
# Generate image file Path by combining the region of interest lable with the experiment path
roiFolder = pathlib.Path('./'+ roiLbl)
imageFileName = pathlib.Path('./'+ roiLbl + '_'+ f'{imNum:00d}' + '.mat')
imageFilePath = expPath / roiFolder / imageFileName
# Load Region of Interest Data. This HDF5 file should containt location of image stitch coordinates
dataPath = pathlib.Path(dataDir)
initPath = list(dataPath.glob('*Index_ODELAYData.hdf5'))
initData = fio.loadData(initPath[0])
background = initData['backgroundImage']
pixSize = initData['pixSize']
magnification = initData['magnification']
anImage = opl.stitchImage(imageFilePath, pixSize, magnification, background)
im = anImage['Bf']
imSize = im.shape
# This data should be recorded from image display to make sure the image is visible.
imageHist = histogram1d(im.ravel(),2**16,[0,2**16],weights = None).astype('float')
# Calculate the cumulative probability ignoring zero values
cumHist = np.cumsum(imageHist)
cumProb = (cumHist-cumHist[0])/(cumHist[2**16-1]-cumHist[0])
# set low and high values ot normalize image contrast.
loval = np.argmax(cumProb>0.00001)
hival = np.argmax(cumProb>=0.9995)
adjIm = np.array((im.astype('float') - loval.astype('float'))/(hival.astype('float') - loval.astype('float'))*254, dtype = 'uint8')
rsIm = cv2.resize(adjIm, (round(imSize[1]/5), round(imSize[0]/5)))
cv2.imshow('Display Image', rsIm)
k = cv2.waitKey(0)
if k == 107 or k == -1:
cv2.destroyWindow('Display Image')
return k
def showImage(roiLbl, imNum, imageDir, dataDir):
# image = odp.stitchImage(imageFileName, pixSize, magnification, background)
expPath = pathlib.Path(imageDir)
# Generate image file Path by combining the region of interest lable with the experiment path
roiFolder = pathlib.Path('./'+ roiLbl)
imageFileName = pathlib.Path('./'+ roiLbl + '_'+ f'{imNum:00d}' + '.mat')
imageFilePath = expPath / roiFolder / imageFileName
# Load Region of Interest Data. This HDF5 file should containt location of image stitch coordinates
dataPath = pathlib.Path(dataDir)
initPath = list(dataPath.glob('*Index_ODELAYData.hdf5'))
initData = fio.loadData(initPath[0])
roiPath = dataPath / 'ODELAY Roi Data' / f'{roiLbl}.hdf5'
roiData = fio.loadData(roiPath)
background = initData['backgroundImage']
# This data should be extracted from the Experiment Index file or stage data file.
pixSize = initData['pixSize']
magnification = initData['magnification']
stInd = f'{imNum-1:03d}'
stitchCorners = roiData['stitchMeta'][stInd]['imPix']
# breakpoint()
anImage = opl.assembleImage(imageFilePath, pixSize, magnification, background, stitchCorners)
im = anImage['Bf']
# im = opl.SobelGradient(im)
imSize = im.shape
# This data should be recorded from image display to make sure the image is visible.
imageHist = histogram1d(im.ravel(),2**16,[0,2**16],weights = None).astype('float')
# Calculate the cumulative probability ignoring zero values
cumHist = np.cumsum(imageHist)
cumProb = (cumHist-cumHist[0])/(cumHist[2**16-1]-cumHist[0])
# set low and high values ot normalize image contrast.
loval = np.argmax(cumProb>0.00001)
hival = np.argmax(cumProb>=0.9995)
adjIm = np.array((im.astype('float') - loval.astype('float'))/(hival.astype('float') - loval.astype('float'))*254, dtype = 'uint8')
rsIm = cv2.resize(adjIm, (round(imSize[1]/5), round(imSize[0]/5)))
cv2.imshow('Display Image', rsIm)
k = cv2.waitKey(0)
if k == 107 or k == -1:
cv2.destroyWindow('Display Image')
return k
def setPlotRange(organism=None):
plotRange = {}
plotRange['Mtb'] = {}
plotRange['Mtb']['GCs'] = {}
plotRange['Mtb']['Dbl'] = {}
plotRange['Mtb']['Lag'] = {}
plotRange['Mtb']['Tex'] = {}
plotRange['Mtb']['Area'] = {}
plotRange['Mtb']['NumDbl'] = {}
plotRange['Mtb']['PopNum'] = {}
plotRange['Mtb']['GCs']['xRange'] = [0, 170]
plotRange['Mtb']['GCs']['yRange'] = [4, 14]
plotRange['Mtb']['GCs']['xTicks'] = np.arange(0,100,20)
plotRange['Mtb']['GCs']['xLabel'] = 'Hours'
plotRange['Mtb']['GCs']['titleFrag'] = 'Dbl Time Hr'
plotRange['Mtb']['GCs']['devisor'] = 60
plotRange['Mtb']['GCs']['numObservations'] = 20
plotRange['Mtb']['Dbl']['xRange'] = [0, 100]
plotRange['Mtb']['Dbl']['xTicks'] = [20,40,60,80]
plotRange['Mtb']['Dbl']['xStep'] = 5
plotRange['Mtb']['Dbl']['xLabel'] = 'Hours'
plotRange['Mtb']['Dbl']['titleFrag'] = 'Dbl Time Hr'
plotRange['Mtb']['Dbl']['devisor'] = 60
plotRange['Mtb']['Lag']['xRange'] = [0, 100]
plotRange['Mtb']['Lag']['xTicks'] = [20,40,60,80]
plotRange['Mtb']['Lag']['xStep'] = 2
plotRange['Mtb']['Lag']['xLabel'] = 'Hours'
plotRange['Mtb']['Lag']['titleFrag'] = 'Lag Time Hr'
plotRange['Mtb']['Lag']['devisor'] = 60
plotRange['Mtb']['Tex']['xRange'] = [0, 100]
plotRange['Mtb']['Tex']['xTicks'] = [20,40,60,80]
plotRange['Mtb']['Tex']['xStep'] = 2
plotRange['Mtb']['Tex']['xLabel'] = 'Hours'
plotRange['Mtb']['Tex']['titleFrag'] = 'Tex Hr'
plotRange['Mtb']['Tex']['devisor'] = 30
plotRange['Mtb']['Area']['xRange'] = [0, 30]
plotRange['Mtb']['Area']['xTicks'] = [2,4,6,8]
plotRange['Mtb']['Area']['xStep'] = 0.25
plotRange['Mtb']['Area']['xLabel'] = 'log2 Pixels'
plotRange['Mtb']['Area']['titleFrag'] = 'log2 Area'
plotRange['Mtb']['Area']['devisor'] = 1
plotRange['Mtb']['NumDbl']['xRange'] = [0, 10]
plotRange['Mtb']['NumDbl']['xTicks'] = [2,4,6,8]
plotRange['Mtb']['NumDbl']['xStep'] = 0.25
plotRange['Mtb']['NumDbl']['xLabel'] = 'Num Dbl Rel'
plotRange['Mtb']['NumDbl']['titleFrag'] = 'Num Dbl Rel'
plotRange['Mtb']['NumDbl']['devisor'] = 1
plotRange['Mtb']['PopNum']['xRange'] = [0, 10000]
plotRange['Mtb']['PopNum']['xTicks'] = [10,100,1000]
plotRange['Mtb']['PopNum']['xStep'] = 10
plotRange['Mtb']['PopNum']['xLabel'] = 'log10 Pop'
plotRange['Mtb']['PopNum']['titleFrag'] = 'Pop Num'
plotRange['Mtb']['PopNum']['devisor'] = 1
plotRange['Mabs'] = {}
plotRange['Mabs']['GCs'] = {}
plotRange['Mabs']['Dbl'] = {}
plotRange['Mabs']['Lag'] = {}
plotRange['Mabs']['Tex'] = {}
plotRange['Mabs']['Area'] = {}
plotRange['Mabs']['NumDbl'] = {}
plotRange['Mabs']['PopNum'] = {}
plotRange['Mabs']['GCs']['xRange'] = [0, 70]
plotRange['Mabs']['GCs']['yRange'] = [4, 16]
plotRange['Mabs']['GCs']['xTicks'] = np.arange(0,70,10)
plotRange['Mabs']['GCs']['xLabel'] = 'Hours'
plotRange['Mabs']['GCs']['titleFrag'] = 'Dbl Time Hr'
plotRange['Mabs']['GCs']['devisor'] = 60
plotRange['Mabs']['GCs']['numObservations'] = 20
plotRange['Mabs']['Dbl']['xRange'] = [0, 10]
plotRange['Mabs']['Dbl']['xTicks'] = [2,4,6,8]
plotRange['Mabs']['Dbl']['xStep'] = 0.5
plotRange['Mabs']['Dbl']['xLabel'] = 'Hours'
plotRange['Mabs']['Dbl']['titleFrag'] = 'Dbl Time Hr'
plotRange['Mabs']['Dbl']['devisor'] = 60
plotRange['Mabs']['Lag']['xRange'] = [0, 40]
plotRange['Mabs']['Lag']['xTicks'] = [10,20,30]
plotRange['Mabs']['Lag']['xStep'] = 1
plotRange['Mabs']['Lag']['xLabel'] = 'Hours'
plotRange['Mabs']['Lag']['titleFrag'] = 'Lag Time Hr'
plotRange['Mabs']['Lag']['devisor'] = 60
plotRange['Mabs']['Tex']['xRange'] = [0, 40]
plotRange['Mabs']['Tex']['xTicks'] = [10,20,30]
plotRange['Mabs']['Tex']['xStep'] = 1
plotRange['Mabs']['Tex']['xLabel'] = 'Hours'
plotRange['Mabs']['Tex']['titleFrag'] = 'Tex Hr'
plotRange['Mabs']['Tex']['devisor'] = 30
plotRange['Mabs']['Area']['xRange'] = [0, 30]
plotRange['Mabs']['Area']['xTicks'] = [20,40,60,80]
plotRange['Mabs']['Area']['xStep'] = 0.25
plotRange['Mabs']['Area']['xLabel'] = 'log2 Pixels'
plotRange['Mabs']['Area']['titleFrag'] = 'log2 Area'
plotRange['Mabs']['Area']['devisor'] = 1
plotRange['Mabs']['NumDbl']['xRange'] = [0, 10]
plotRange['Mabs']['NumDbl']['xTicks'] = [2,4,6,8]
plotRange['Mabs']['NumDbl']['xStep'] = 0.25
plotRange['Mabs']['NumDbl']['xLabel'] = 'log2 Pixels'
plotRange['Mabs']['NumDbl']['titleFrag'] = 'Num Dbl Rel'
plotRange['Mabs']['NumDbl']['devisor'] = 1
plotRange['Mabs']['PopNum']['xRange'] = [0, 10000]
plotRange['Mabs']['PopNum']['xTicks'] = [10,100,1000]
plotRange['Mabs']['PopNum']['xStep'] = 10
plotRange['Mabs']['PopNum']['xLabel'] = 'log10 Pop'
plotRange['Mabs']['PopNum']['titleFrag'] = 'Pop Num'
plotRange['Mabs']['PopNum']['devisor'] = 1
plotRange['Yeast'] = {}
plotRange['Yeast']['GCs'] = {}
plotRange['Yeast']['Dbl'] = {}
plotRange['Yeast']['Lag'] = {}
plotRange['Yeast']['Tex'] = {}
plotRange['Yeast']['Area'] = {}
plotRange['Yeast']['NumDbl'] = {}
plotRange['Yeast']['PopNum'] = {}
plotRange['Yeast']['GCs']['xRange'] = [0, 3000]
plotRange['Yeast']['GCs']['yRange'] = [4, 16]
plotRange['Yeast']['GCs']['xTicks'] = [100,200,300,400]
plotRange['Yeast']['GCs']['xStep'] = 4
plotRange['Yeast']['GCs']['xLabel'] = 'Minutes'
plotRange['Yeast']['GCs']['titleFrag'] = 'Time Min'
plotRange['Yeast']['GCs']['devisor'] = 1
plotRange['Yeast']['GCs']['numObservations'] = 10
plotRange['Yeast']['Dbl']['xRange'] = [25, 400]
plotRange['Yeast']['Dbl']['xTicks'] = [100,200,300,400]
plotRange['Yeast']['Dbl']['xStep'] = 4
plotRange['Yeast']['Dbl']['xLabel'] = 'Minutes'
plotRange['Yeast']['Dbl']['titleFrag'] = 'Dbl Time Min'
plotRange['Yeast']['Dbl']['devisor'] = 1
plotRange['Yeast']['Lag']['xRange'] = [0, 3000]
plotRange['Yeast']['Lag']['xTicks'] = [100,200,300,400, 500]
plotRange['Yeast']['Lag']['xStep'] = 1
plotRange['Yeast']['Lag']['xLabel'] = 'Minutes'
plotRange['Yeast']['Lag']['titleFrag'] = 'Lag Time Min'
plotRange['Yeast']['Lag']['devisor'] = 1
plotRange['Yeast']['Tex']['xRange'] = [0, 3000]
plotRange['Yeast']['Tex']['xTicks'] = [200,400,600,800,1000]
plotRange['Yeast']['Tex']['xStep'] = 1
plotRange['Yeast']['Tex']['xLabel'] = 'Minutes'
plotRange['Yeast']['Tex']['titleFrag'] = 'Tex Min'
plotRange['Yeast']['Tex']['devisor'] = 0.5
plotRange['Yeast']['Area']['xRange'] = [0, 40]
plotRange['Yeast']['Area']['xTicks'] = [10,20,30]
plotRange['Yeast']['Area']['xStep'] = 0.5
plotRange['Yeast']['Area']['xLabel'] = 'log2 Pixels'
plotRange['Yeast']['Area']['titleFrag'] = 'log2 Area'
plotRange['Yeast']['Area']['devisor'] = 1
plotRange['Yeast']['NumDbl']['xRange'] = [0, 10]
plotRange['Yeast']['NumDbl']['xTicks'] = [2,4,6,8]
plotRange['Yeast']['NumDbl']['xStep'] = 0.25
plotRange['Yeast']['NumDbl']['xLabel'] = 'log2 Pixels'
plotRange['Yeast']['NumDbl']['titleFrag'] = 'Num Dbl Rel'
plotRange['Yeast']['NumDbl']['devisor'] = 1
plotRange['Yeast']['PopNum']['xRange'] = [0, 10000]
plotRange['Yeast']['PopNum']['xTicks'] = [10,100,1000]
plotRange['Yeast']['PopNum']['xStep'] = 10
plotRange['Yeast']['PopNum']['xLabel'] = 'log10 Pop'
plotRange['Yeast']['PopNum']['titleFrag'] = 'Pop Num'
plotRange['Yeast']['PopNum']['devisor'] = 1
if organism == None:
return plotRange
else:
return plotRange[organism]
def scaleImage(im, lowcut = 0.00001, highcut = 0.9995, scaleImage = 1):
# make a histogram of the image in the bitdept that the image was recorded.
imageHist = histogram1d(im.ravel(),2**16,[0,2**16],weights = None).astype('float')
# Calculate the cumulative probability ignoring zero values
cumHist = np.empty(imageHist.shape, dtype='float')
cumHist[0] = 0
cumHist[1:] = np.cumsum(imageHist[1:])
# if you expect a lot of zero set
cumRange = cumHist[2**16-1]-cumHist[0]
# if you expect a lot of zero set
cumHist-=cumHist[0]
cumHist /=cumRange
# set low and high values ot normalize image contrast.
loval = np.argmax(cumHist>=lowcut)
hival = np.argmax(cumHist>=highcut)
scIm = np.clip(im, loval, hival).astype('float')
# scale the image linearly over the range given. This does not set alpha values or whatever.
scaleFactor = 254/(hival-loval)
scIm -=loval
scIm *= scaleFactor
adjIm = np.require(scIm, dtype = 'uint8', requirements = 'C')
# resize if you need to
rsIm = cv2.resize(adjIm, (round(im.shape[1]/scaleImage), round(im.shape[0]/scaleImage)))
return rsIm
def parseVirts(x, y):
commands = {'M': (mpath.Path.MOVETO,),
'L': (mpath.Path.LINETO,),
'Q': (mpath.Path.CURVE3,)*2,
'C': (mpath.Path.CURVE4,)*3,
'Z': (mpath.Path.CLOSEPOLY,)}
rc = y.shape
vertices = np.zeros((rc[0]+3,2),dtype='float')
vertices[0,:] = [x[0],y[0]]
codes = []
codes.extend(commands['M'])
for n in range(1,rc[0]):
codes.extend(commands['L'])
vertices[n,:] = [x[n],y[n]]
vertices[-3,:] = [x[-1],0]
codes.extend(commands['L'])
vertices[-2,:] = [0,0]
codes.extend(commands['L'])
vertices[-2,:] = [0,0]
codes.extend(commands['Z'])
return codes, vertices
class OImageView(QtWidgets.QGraphicsView):
photoClicked = QtCore.pyqtSignal(QtCore.QPoint)
def __init__(self, parent):
super(OImageView, self).__init__(parent)
self._zoom = 0
self._empty = True
self._scene = QtWidgets.QGraphicsScene(self)
self._photo = QtWidgets.QGraphicsPixmapItem()
self.qImage = QImage()
self._scene.addItem(self._photo)
self.setScene(self._scene)
self.setTransformationAnchor(QtWidgets.QGraphicsView.AnchorUnderMouse)
self.setResizeAnchor(QtWidgets.QGraphicsView.AnchorUnderMouse)
self.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.setBackgroundBrush(QtGui.QBrush(QtGui.QColor(30, 30, 30)))
self.setFrameShape(QtWidgets.QFrame.NoFrame)
def hasPhoto(self):
return not self._empty
def fitInView(self, scale=True):
rect = QtCore.QRectF(self._photo.pixmap().rect())
if not rect.isNull():
self.setSceneRect(rect)
if self.hasPhoto():
unity = self.transform().mapRect(QtCore.QRectF(0, 0, 1, 1))
self.scale(1 / unity.width(), 1 / unity.height())
viewrect = self.viewport().rect()
scenerect = self.transform().mapRect(rect)
factor = min(viewrect.width() / scenerect.width(),
viewrect.height() / scenerect.height())
self.scale(factor, factor)
self._zoom = 0
def setPhoto(self, pixmap=None, reset=True):
self._zoom = 0
if pixmap and not pixmap.isNull():
self._empty = False
self.setDragMode(QtWidgets.QGraphicsView.ScrollHandDrag)
self._photo.setPixmap(pixmap)
else:
self._empty = True
self.setDragMode(QtWidgets.QGraphicsView.NoDrag)
self._photo.setPixmap(QtGui.QPixmap())
if reset:
self.fitInView()
def wheelEvent(self, event):
if self.hasPhoto():
if event.angleDelta().y() > 0:
factor = 1.25
self._zoom += 1
else:
factor = 0.8
self._zoom -= 1
if self._zoom > 0:
self.scale(factor, factor)
elif self._zoom == 0:
self.fitInView()
else:
self._zoom = 0
def toggleDragMode(self):
if self.dragMode() == QtWidgets.QGraphicsView.ScrollHandDrag:
self.setDragMode(QtWidgets.QGraphicsView.NoDrag)
elif not self._photo.pixmap().isNull():
self.setDragMode(QtWidgets.QGraphicsView.ScrollHandDrag)
def mousePressEvent(self, event):
if self._photo.isUnderMouse():
self.photoClicked.emit(self.mapToScene(event.pos()).toPoint())
super(OImageView, self).mousePressEvent(event)
# Window is called to view window.
class ImageWindow(QtWidgets.QWidget):
'''
ImageWindow: a QWidget which holds the GraphicsView and button elements
'''
def __init__(self):
super(ImageWindow, self).__init__()
# Load experiment and odelayConfig data into Window data.
self.odelayConfig = fio.loadConfig()
self.experimentData = self.loadExperimentData()
self.roiList = [*self.experimentData['roiFiles']]
self.roiLbl = self.roiList[0]
self.numImages=len(self.experimentData['roiFiles'][self.roiLbl])
self.imageNumber = 1
#Create Photoviewer object
self.viewer = OImageView(self)
# 'Load image' button
self.selectRoi = QtWidgets.QComboBox(self)
qroiList = [self.tr(item) for item in self.roiList]
self.selectRoi.addItems(qroiList)
self.selectRoi.currentTextChanged.connect(self.chooseRoi)
#Button for load previous Image
self.btnPrevImage = QtWidgets.QToolButton(self)
self.btnPrevImage.setText('Prev')
self.btnPrevImage.setObjectName('btnPrevImage')
self.btnPrevImage.clicked.connect(self.changeImage)
#Button for load previous Image
self.btnNextImage = QtWidgets.QToolButton(self)
self.btnNextImage.setText('Next')
self.btnNextImage.setObjectName('btnNextImage')
self.btnNextImage.clicked.connect(self.changeImage)
#Button for load previous Image
self.btnSaveImage = QtWidgets.QToolButton(self)
self.btnSaveImage.setText('Save')
self.btnSaveImage.setObjectName('btnSaveImage')
self.btnSaveImage.clicked.connect(self.saveImage)
# Button to change from drag/pan to getting pixel info
self.btnPixInfo = QtWidgets.QToolButton(self)
self.btnPixInfo.setText('Enter pixel info mode')
self.btnPixInfo.clicked.connect(self.pixInfo)
self.editPixInfo = QtWidgets.QLineEdit(self)
self.editPixInfo.setReadOnly(True)
self.viewer.photoClicked.connect(self.photoClicked)
# Add Image time slider
self.imageSlider = QSlider(Qt.Horizontal)
self.imageSlider.setRange(1,self.numImages)
self.imageSlider.sliderReleased.connect(self.changeImage)
# Arrange layout
VBlayout = QtWidgets.QVBoxLayout(self)
VBlayout.addWidget(self.viewer)
VBlayout.addWidget(self.imageSlider)
HBlayout = QtWidgets.QHBoxLayout()
HBlayout.setAlignment(QtCore.Qt.AlignLeft)
HBlayout.addWidget(self.selectRoi)
HBlayout.addWidget(self.btnPrevImage)
HBlayout.addWidget(self.btnNextImage)
HBlayout.addWidget(self.btnSaveImage)
HBlayout.addWidget(self.btnPixInfo)
HBlayout.addWidget(self.editPixInfo)
VBlayout.addLayout(HBlayout)
def chooseRoi(self, ind):
self.roiLbl = ind
self.numImages = len(self.experimentData['roiFiles'][self.roiLbl])
if self.imageNumber>self.numImages:
self.imageNumber = self.numImages
self.imageSlider.setValue = self.numImages
self.loadImage()
def loadImage(self):
self.viewer.qImage = self.readImage()
pixmap = QPixmap.fromImage(self.viewer.qImage)
self.viewer.setPhoto(pixmap)
def saveImage(self):
location = self.odelayConfig['LocalDataDir']
options = QFileDialog.Options()
fileName, _ = QFileDialog.getSaveFileName(self,"Save Image", self.tr(location),"Images (*.png, *.jpg)", options=options)
print(fileName)
val = self.viewer.qImage.save(fileName, format=None, quality=100)
if val:
print('Image saved')
def changeImage(self):
sending_widget = self.sender()
if sending_widget.objectName() == self.btnNextImage.objectName():
self.imageNumber += 1
if self.imageNumber>self.numImages:
self.imageNumber = self.numImages
else:
self.viewer.qImage = self.readImage()
pixmap = QPixmap.fromImage(self.viewer.qImage)
self.imageSlider.setValue(self.imageNumber)
self.viewer.setPhoto(pixmap, False)
elif sending_widget.objectName() == self.btnPrevImage.objectName():
self.imageNumber -= 1
if self.imageNumber<1:
self.imageNumber = 1
else:
self.viewer.qImage = self.readImage()
pixmap = QPixmap.fromImage(self.viewer.qImage)
self.imageSlider.setValue(self.imageNumber)
self.viewer.setPhoto(pixmap, False)
elif sending_widget.objectName() == self.imageSlider.objectName():
self.imageNumber = sending_widget.value()
self.viewer.qImage = self.readImage()
pixmap = QPixmap.fromImage(self.viewer.qImage)
self.viewer.setPhoto(pixmap, False)
def pixInfo(self):
self.viewer.toggleDragMode()
def photoClicked(self, pos):
if self.viewer.dragMode() == QtWidgets.QGraphicsView.NoDrag:
self.editPixInfo.setText('%d, %d' % (pos.x(), pos.y()))
def openFileDialog():
options = QFileDialog.Options()
fileName, _ = QFileDialog.getOpenFileName(None,"Select ODELAY Data Set", "","ODELAYExpDisc (*Index_ODELAYData.mat);; Mat-Files (*.mat)", options=options)
return fileName
def loadExperimentData(self):
imagePath = pathlib.Path(self.odelayConfig['LocalImageDir'])
dataPath = pathlib.Path(self.odelayConfig['LocalDataDir'])
indexList = [k for k in dataPath.glob('*Index_ODELAYData.*')]
if len(indexList)==1:
expIndexPath = dataPath / indexList[0]
expData = fio.loadData(expIndexPath)
return expData
def readImage(self, lowcut = 0.0005, highcut = 0.99995):
roiLbl = self.roiLbl
imNum = self.imageNumber
imagePath = pathlib.Path(self.odelayConfig['LocalImageDir'])
dataPath = pathlib.Path(self.odelayConfig['LocalDataDir'])
# Generate image file Path by combining the region of interest lable with the experiment path
roiFolder = pathlib.Path('./'+ roiLbl)
imageFileName = pathlib.Path('./'+ roiLbl + '_'+ f'{imNum:00d}' + '.mat')
imageFilePath = imagePath / roiFolder / imageFileName
# Load Region of Interest Data. This HDF5 file should containt location of image stitch coordinates
roiPath = dataPath / 'ODELAY Roi Data' / f'{roiLbl}.hdf5'
roiData = fio.loadData(roiPath)
background = self.experimentData['backgroundImage']
# This data should be extracted from the Experiment Index file or stage data file.
pixSize = self.experimentData['pixSize']
magnification = self.experimentData['magnification']
stInd = f'{imNum-1:03d}'
stitchCorners = roiData['stitchMeta'][stInd]['imPix']
anImage = opl.assembleImage(imageFilePath, pixSize, magnification, background, stitchCorners)
im = anImage['Bf']
# make a histogram of the image in the bitdept that the image was recorded.
imageHist = histogram1d(im.ravel(),2**16,[0,2**16],weights = None).astype('float')
# Calculate the cumulative probability ignoring zero values
cumHist = np.zeros(imageHist.shape, dtype='float')
cumHist[1:] = np.cumsum(imageHist[1:])
# if you expect a lot of zero set
cumProb = (cumHist-cumHist[0])/(cumHist[2**16-1]-cumHist[0])
# set low and high values ot normalize image contrast.
loval = np.argmax(cumProb>=lowcut)
hival = np.argmax(cumProb>=highcut)
scIm = (im.astype('float') - loval.astype('float'))/(hival.astype('float') - loval.astype('float'))*254
lim = np.iinfo('uint8')
scIm = np.clip(scIm, lim.min, lim.max)
# Set image data type and make sure the array is contiguous in memory.
imageData = np.require(scIm, dtype = 'uint8', requirements = 'C')
# Set data as a QImage. This is a greyscale image
Qim = QImage(imageData.data, imageData.shape[1], imageData.shape[0], imageData.shape[1], QImage.Format_Grayscale8)
Qim.data = imageData
return Qim
class VideoWindow(QMainWindow):
def __init__(self, parent=None):
super(VideoWindow, self).__init__(parent)
self.setWindowTitle("PyQt Video Player Widget Example - pythonprogramminglanguage.com")
self.mediaPlayer = QMediaPlayer(None, QMediaPlayer.VideoSurface)
videoWidget = QVideoWidget()
self.playButton = QPushButton()
self.playButton.setEnabled(False)
self.playButton.setIcon(self.style().standardIcon(QStyle.SP_MediaPlay))
self.playButton.clicked.connect(self.play)
self.positionSlider = QSlider(Qt.Horizontal)
self.positionSlider.setRange(0, 0)
self.positionSlider.sliderReleased.connect(self.setPosition)
self.errorLabel = QLabel()
self.errorLabel.setSizePolicy(QSizePolicy.Preferred,
QSizePolicy.Maximum)
# Create new action
openAction = QAction(QIcon('open.png'), '&Open', self)
openAction.setShortcut('Ctrl+O')
openAction.setStatusTip('Open movie')
openAction.triggered.connect(self.openFile)
# Create exit action
exitAction = QAction(QIcon('exit.png'), '&Exit', self)
exitAction.setShortcut('Ctrl+Q')
exitAction.setStatusTip('Exit application')
exitAction.triggered.connect(self.exitCall)
# Create menu bar and add action
menuBar = self.menuBar()
fileMenu = menuBar.addMenu('&File')
#fileMenu.addAction(newAction)
fileMenu.addAction(openAction)
fileMenu.addAction(exitAction)
# Create a widget for window contents
wid = QWidget(self)
self.setCentralWidget(wid)
# Create layouts to place inside widget
controlLayout = QHBoxLayout()
controlLayout.setContentsMargins(0, 0, 0, 0)
controlLayout.addWidget(self.playButton)
controlLayout.addWidget(self.positionSlider)
layout = QVBoxLayout()
layout.addWidget(videoWidget)
layout.addLayout(controlLayout)
layout.addWidget(self.errorLabel)
# Set widget to contain window contents
wid.setLayout(layout)
self.mediaPlayer.setVideoOutput(videoWidget)
self.mediaPlayer.stateChanged.connect(self.mediaStateChanged)
self.mediaPlayer.positionChanged.connect(self.positionChanged)
self.mediaPlayer.durationChanged.connect(self.durationChanged)
self.mediaPlayer.error.connect(self.handleError)
def openFile(self):
odelayConfig = fio.loadConfig()
fileName, _ = QFileDialog.getOpenFileName(self, "Open Movie",
odelayConfig['LocalDataDir'])
if fileName != '':
self.mediaPlayer.setMedia(
QMediaContent(QUrl.fromLocalFile(fileName)))
self.playButton.setEnabled(True)
def exitCall(self):
sys.exit(app.exec_())
def play(self):
if self.mediaPlayer.state() == QMediaPlayer.PlayingState:
self.mediaPlayer.pause()
else:
self.mediaPlayer.play()
def mediaStateChanged(self, state):
if self.mediaPlayer.state() == QMediaPlayer.PlayingState:
self.playButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaPause))
else:
self.playButton.setIcon(
self.style().standardIcon(QStyle.SP_MediaPlay))
def positionChanged(self, position):
self.positionSlider.setValue(position)
def durationChanged(self, duration):
self.positionSlider.setRange(0, duration)
def setPosition(self):
position = self.positionSlider.value()
self.mediaPlayer.setPosition(position)
def handleError(self):
self.playButton.setEnabled(False)
self.errorLabel.setText("Error: " + self.mediaPlayer.errorString())
def videoViewer():
app = QApplication(sys.argv)
player = VideoWindow()
player.resize(640, 480)
player.show()
sys.exit(app.exec_())
def imageViewer():
app = QtWidgets.QApplication(sys.argv)
window = ImageWindow()
window.setGeometry(500, 300, 800, 600)
window.show()
window.loadImage()
sys.exit(app.exec_())
def waveLengthToRGB(wl=650):
try:
wl=int(wl)
except:
wl=450
# print(wl)
if wl<380:
wl= 380
elif wl>780:
wl = 780
if wl>=380 and wl<=440:
R = np.abs((wl-440)/(440-380))
G = 0
B = 1
elif wl>440 and wl<=490:
R = 0
G = np.abs((wl-440)/(490-440))
B = 1
elif wl>490 and wl<=510:
R = 0
G = 1
B = np.abs((wl-510)/(510-490))
elif wl>510 and wl<=580:
R = np.abs((wl-510)/(580-510))
G = 1
B = 0;
elif wl>580 and wl<=645:
R = 1;
G = np.abs((wl-645)/(645-580))
B = 0
elif wl>645 and wl<=780:
R = 1
G = 0
B = 0
# LET THE INTENSITY SSS FALL OFF NEAR THE VISION LIMITS
if wl>700:
SSS=0.3+0.7* (780-wl)/(780-700)
elif wl<420:
SSS=.3+.7*(wl-380)/(420-380)
else:
SSS=1
r = np.round(SSS*R*255).astype('uint8')
g = np.round(SSS*G*255).astype('uint8')
b = np.round(SSS*B*255).astype('uint8')
return [r,g,b]
# class FocusPlot(QMainWindow):
# def __init__(self, parent=None):
# QMainWindow.__init__(self, parent)
# self.setWindowTitle('Demo: PyQt with matplotlib')
# self.create_menu()
# self.create_main_frame()
# self.create_status_bar()
# self.textbox.setText('1 2 3 4')
# self.on_draw()
# def save_plot(self):
# file_choices = "PNG (*.png)|*.png"
# path, ext = QFileDialog.getSaveFileName(self,
# 'Save file', '',
# file_choices)
# path = path.encode('utf-8')
# if not path[-4:] == file_choices[-4:].encode('utf-8'):
# path += file_choices[-4:].encode('utf-8')
# print(path)
# if path:
# self.canvas.print_figure(path.decode(), dpi=self.dpi)
# self.statusBar().showMessage('Saved to %s' % path, 2000)
# def on_about(self):
# msg = """ A demo of using PyQt with matplotlib:
# * Use the matplotlib navigation bar
# * Add values to the text box and press Enter (or click "Draw")
# * Show or hide the grid
# * Drag the slider to modify the width of the bars
# * Save the plot to a file using the File menu
# * Click on a bar to receive an informative message
# """
# QMessageBox.about(self, "About the demo", msg.strip())
# def on_pick(self, event):
# # The event received here is of the type
# # matplotlib.backend_bases.PickEvent
# #
# # It carries lots of information, of which we're using
# # only a small amount here.
# #
# box_points = event.artist.get_bbox().get_points()
# msg = "You've clicked on a bar with coords:\n %s" % box_points
# QMessageBox.information(self, "Click!", msg)
# def on_draw(self):
# """ Redraws the figure
# """
# str = self.textbox.text().encode('utf-8')
# self.data = [int(s) for s in str.split()]
# x = range(len(self.data))
# # clear the axes and redraw the plot anew
# #
# self.axes.clear()
# self.axes.grid(self.grid_cb.isChecked())
# self.axes.bar(
# x=x,
# height=self.data,
# width=self.slider.value() / 100.0,
# align='center',
# alpha=0.44,
# picker=5)
# self.canvas.draw()
# def create_main_frame(self):
# self.main_frame = QWidget()
# # Create the mpl Figure and FigCanvas objects.
# # 5x4 inches, 100 dots-per-inch
# #
# self.dpi = 100
# self.fig = Figure((5.0, 4.0), dpi=self.dpi)
# self.canvas = FigureCanvas(self.fig)
# self.canvas.setParent(self.main_frame)
# # Since we have only one plot, we can use add_axes
# # instead of add_subplot, but then the subplot
# # configuration tool in the navigation toolbar wouldn't
# # work.
# #
# self.axes = self.fig.add_subplot(111)
# # Bind the 'pick' event for clicking on one of the bars
# #
# self.canvas.mpl_connect('pick_event', self.on_pick)
# # Create the navigation toolbar, tied to the canvas
# #
# self.mpl_toolbar = NavigationToolbar(self.canvas, self.main_frame)
# # Other GUI controls
# #
# self.textbox = QLineEdit()
# self.textbox.setMinimumWidth(200)
# self.textbox.editingFinished.connect(self.on_draw)
# self.draw_button = QPushButton("&Draw")
# self.draw_button.clicked.connect(self.on_draw)
# self.grid_cb = QCheckBox("Show &Grid")
# self.grid_cb.setChecked(False)
# self.grid_cb.stateChanged.connect(self.on_draw)
# slider_label = QLabel('Bar width (%):')
# self.slider = QSlider(Qt.Horizontal)
# self.slider.setRange(1, 100)
# self.slider.setValue(20)
# self.slider.setTracking(True)
# self.slider.setTickPosition(QSlider.TicksBothSides)
# self.slider.valueChanged.connect(self.on_draw)
# #
# # Layout with box sizers
# #
# hbox = QHBoxLayout()
# for w in [ self.textbox, self.draw_button, self.grid_cb,
# slider_label, self.slider]:
# hbox.addWidget(w)
# hbox.setAlignment(w, Qt.AlignVCenter)
# vbox = QVBoxLayout()
# vbox.addWidget(self.canvas)
# vbox.addWidget(self.mpl_toolbar)
# vbox.addLayout(hbox)
# self.main_frame.setLayout(vbox)
# self.setCentralWidget(self.main_frame)
# def create_status_bar(self):
# self.status_text = QLabel("This is a demo")
# self.statusBar().addWidget(self.status_text, 1)
# def create_menu(self):
# self.file_menu = self.menuBar().addMenu("&File")
# load_file_action = self.create_action("&Save plot",
# shortcut="Ctrl+S", slot=self.save_plot,
# tip="Save the plot")
# quit_action = self.create_action("&Quit", slot=self.close,
# shortcut="Ctrl+Q", tip="Close the application")
# self.add_actions(self.file_menu,
# (load_file_action, None, quit_action))
# self.help_menu = self.menuBar().addMenu("&Help")
# about_action = self.create_action("&About",
# shortcut='F1', slot=self.on_about,
# tip='About the demo')
# self.add_actions(self.help_menu, (about_action,))
# def add_actions(self, target, actions):
# for action in actions:
# if action is None:
# target.addSeparator()
# else:
# target.addAction(action)
# def create_action( self, text, slot=None, shortcut=None,
# icon=None, tip=None, checkable=False):
# action = QAction(text, self)
# if icon is not None:
# action.setIcon(QIcon(":/%s.png" % icon))
# if shortcut is not None:
# action.setShortcut(shortcut)
# if tip is not None:
# action.setToolTip(tip)
# action.setStatusTip(tip)
# if slot is not None:
# action.triggered.connect(slot)
# if checkable:
# action.setCheckable(True)
# return action
# # def main():
# # app = QApplication(sys.argv)
# # form = AppForm()
# # form.show()
# # app.exec_()
# # if __name__ == "__main__":
# # main()
# class InteractiveGCPlot(QWidget)
|
[
"PyQt5.QtCore.pyqtSignal",
"numpy.sum",
"numpy.abs",
"numpy.argmax",
"matplotlib.pyplot.axes",
"numpy.empty",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtGui.QColor",
"numpy.iinfo",
"numpy.clip",
"PyQt5.QtWidgets.QFileDialog.getOpenFileName",
"PyQt5.QtWidgets.QVBoxLayout",
"matplotlib.pyplot.figure",
"pathlib.Path",
"numpy.arange",
"PyQt5.QtCore.QRectF",
"PyQt5.QtWidgets.QSlider",
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtWidgets.QGraphicsScene",
"cv2.imshow",
"numpy.round",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtGui.QPixmap.fromImage",
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QToolButton",
"matplotlib.pyplot.close",
"numpy.require",
"numpy.cumsum",
"PyQt5.QtWidgets.QFileDialog.Options",
"numpy.max",
"tools.imagepl.assembleImage",
"numpy.linspace",
"PyQt5.QtWidgets.QGraphicsPixmapItem",
"matplotlib.patches.PathPatch",
"PyQt5.QtWidgets.QComboBox",
"matplotlib.pyplot.show",
"cv2.waitKey",
"numpy.log2",
"PyQt5.QtWidgets.QHBoxLayout",
"fast_histogram.histogram1d",
"PyQt5.QtGui.QImage",
"tools.imagepl.stitchImage",
"tools.fileio.loadConfig",
"PyQt5.QtGui.QPixmap",
"PyQt5.QtGui.QIcon",
"PyQt5.QtWidgets.QLineEdit",
"numpy.zeros",
"tools.fileio.loadData",
"matplotlib.path.Path",
"PyQt5.QtCore.QUrl.fromLocalFile",
"numpy.diff",
"numpy.array",
"cv2.destroyWindow",
"matplotlib.pyplot.savefig"
] |
[((1200, 1216), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (1209, 1216), True, 'import matplotlib.pyplot as plt\n'), ((2367, 2474), 'numpy.array', 'np.array', (['[[0, 0, 0, 0.3], [0, 0, 1, 1], [0, 0.7, 0, 1], [1, 0, 0, 1], [0.7, 0.5, 0, 1]]'], {'dtype': '"""float"""'}), "([[0, 0, 0, 0.3], [0, 0, 1, 1], [0, 0.7, 0, 1], [1, 0, 0, 1], [0.7,\n 0.5, 0, 1]], dtype='float')\n", (2375, 2474), True, 'import numpy as np\n'), ((2614, 2686), 'numpy.array', 'np.array', (['[rngTime, rngTdbl, rngTlag, rngTexp, rngNDub]'], {'dtype': '"""float64"""'}), "([rngTime, rngTdbl, rngTlag, rngTexp, rngNDub], dtype='float64')\n", (2622, 2686), True, 'import numpy as np\n'), ((2857, 2896), 'numpy.zeros', 'np.zeros', (['(5, numbins)'], {'dtype': '"""float64"""'}), "((5, numbins), dtype='float64')\n", (2865, 2896), True, 'import numpy as np\n'), ((2913, 2952), 'numpy.zeros', 'np.zeros', (['(5, numbins)'], {'dtype': '"""float64"""'}), "((5, numbins), dtype='float64')\n", (2921, 2952), True, 'import numpy as np\n'), ((2969, 3008), 'numpy.zeros', 'np.zeros', (['(5, numbins)'], {'dtype': '"""float64"""'}), "((5, numbins), dtype='float64')\n", (2977, 3008), True, 'import numpy as np\n'), ((3306, 3508), 'numpy.array', 'np.array', (['[[0.1875, 0.66666667, 0.75, 0.28], [0.1875, 0.48666667, 0.75, 0.1], [0.1875,\n 0.33333333, 0.75, 0.1], [0.1875, 0.19333333, 0.75, 0.1], [0.1875, \n 0.05333333, 0.75, 0.1]]'], {'dtype': '"""float64"""'}), "([[0.1875, 0.66666667, 0.75, 0.28], [0.1875, 0.48666667, 0.75, 0.1],\n [0.1875, 0.33333333, 0.75, 0.1], [0.1875, 0.19333333, 0.75, 0.1], [\n 0.1875, 0.05333333, 0.75, 0.1]], dtype='float64')\n", (3314, 3508), True, 'import numpy as np\n'), ((3616, 3688), 'numpy.array', 'np.array', (['[rngTime, rngTdbl, rngTlag, rngTexp, rngNDub]'], {'dtype': '"""float64"""'}), "([rngTime, rngTdbl, rngTlag, rngTexp, rngNDub], dtype='float64')\n", (3624, 3688), True, 'import numpy as np\n'), ((3789, 3857), 'numpy.array', 'np.array', (['[rngArea, [0, 1], [0, 1], [0, 1], [0, 1]]'], {'dtype': '"""float64"""'}), "([rngArea, [0, 1], [0, 1], [0, 1], [0, 1]], dtype='float64')\n", (3797, 3857), True, 'import numpy as np\n'), ((4417, 4469), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '[4, 7.5]', 'dpi': '(100)', 'facecolor': '"""w"""'}), "(figsize=[4, 7.5], dpi=100, facecolor='w')\n", (4427, 4469), True, 'import matplotlib.pyplot as plt\n'), ((5375, 5391), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (5384, 5391), True, 'import matplotlib.pyplot as plt\n'), ((6331, 6405), 'numpy.array', 'np.array', (['[rngTdbl, rngTlag, rngTexp, rngNDub, rngPopNum]'], {'dtype': '"""float64"""'}), "([rngTdbl, rngTlag, rngTexp, rngNDub, rngPopNum], dtype='float64')\n", (6339, 6405), True, 'import numpy as np\n'), ((7626, 7664), 'numpy.zeros', 'np.zeros', (['(numRoi + 1, 2)'], {'dtype': '"""int"""'}), "((numRoi + 1, 2), dtype='int')\n", (7634, 7664), True, 'import numpy as np\n'), ((8643, 8674), 'numpy.zeros', 'np.zeros', (['(5, 4)'], {'dtype': '"""float"""'}), "((5, 4), dtype='float')\n", (8651, 8674), True, 'import numpy as np\n'), ((8784, 8834), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '[7, 9]', 'dpi': '(100)', 'facecolor': '"""w"""'}), "(figsize=[7, 9], dpi=100, facecolor='w')\n", (8794, 8834), True, 'import matplotlib.pyplot as plt\n'), ((10231, 10257), 'numpy.arange', 'np.arange', (['poptot.shape[0]'], {}), '(poptot.shape[0])\n', (10240, 10257), True, 'import numpy as np\n'), ((10272, 10315), 'numpy.zeros', 'np.zeros', (['(poptot.shape[0],)'], {'dtype': '"""float"""'}), "((poptot.shape[0],), dtype='float')\n", (10280, 10315), True, 'import numpy as np\n'), ((10764, 10774), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (10772, 10774), True, 'import matplotlib.pyplot as plt\n'), ((10861, 10883), 'pathlib.Path', 'pathlib.Path', (['imageDir'], {}), '(imageDir)\n', (10873, 10883), False, 'import pathlib\n'), ((11071, 11098), 'pathlib.Path', 'pathlib.Path', (["('./' + roiLbl)"], {}), "('./' + roiLbl)\n", (11083, 11098), False, 'import pathlib\n'), ((11119, 11178), 'pathlib.Path', 'pathlib.Path', (["('./' + roiLbl + '_' + f'{imNum:00d}' + '.mat')"], {}), "('./' + roiLbl + '_' + f'{imNum:00d}' + '.mat')\n", (11131, 11178), False, 'import pathlib\n'), ((11364, 11385), 'pathlib.Path', 'pathlib.Path', (['dataDir'], {}), '(dataDir)\n', (11376, 11385), False, 'import pathlib\n'), ((11464, 11489), 'tools.fileio.loadData', 'fio.loadData', (['initPath[0]'], {}), '(initPath[0])\n', (11476, 11489), True, 'import tools.fileio as fio\n'), ((11640, 11706), 'tools.imagepl.stitchImage', 'opl.stitchImage', (['imageFilePath', 'pixSize', 'magnification', 'background'], {}), '(imageFilePath, pixSize, magnification, background)\n', (11655, 11706), True, 'import tools.imagepl as opl\n'), ((12029, 12049), 'numpy.cumsum', 'np.cumsum', (['imageHist'], {}), '(imageHist)\n', (12038, 12049), True, 'import numpy as np\n'), ((12205, 12231), 'numpy.argmax', 'np.argmax', (['(cumProb > 1e-05)'], {}), '(cumProb > 1e-05)\n', (12214, 12231), True, 'import numpy as np\n'), ((12245, 12273), 'numpy.argmax', 'np.argmax', (['(cumProb >= 0.9995)'], {}), '(cumProb >= 0.9995)\n', (12254, 12273), True, 'import numpy as np\n'), ((12492, 12525), 'cv2.imshow', 'cv2.imshow', (['"""Display Image"""', 'rsIm'], {}), "('Display Image', rsIm)\n", (12502, 12525), False, 'import cv2\n'), ((12535, 12549), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (12546, 12549), False, 'import cv2\n'), ((12794, 12816), 'pathlib.Path', 'pathlib.Path', (['imageDir'], {}), '(imageDir)\n', (12806, 12816), False, 'import pathlib\n'), ((12939, 12966), 'pathlib.Path', 'pathlib.Path', (["('./' + roiLbl)"], {}), "('./' + roiLbl)\n", (12951, 12966), False, 'import pathlib\n'), ((12987, 13046), 'pathlib.Path', 'pathlib.Path', (["('./' + roiLbl + '_' + f'{imNum:00d}' + '.mat')"], {}), "('./' + roiLbl + '_' + f'{imNum:00d}' + '.mat')\n", (12999, 13046), False, 'import pathlib\n'), ((13232, 13253), 'pathlib.Path', 'pathlib.Path', (['dataDir'], {}), '(dataDir)\n', (13244, 13253), False, 'import pathlib\n'), ((13332, 13357), 'tools.fileio.loadData', 'fio.loadData', (['initPath[0]'], {}), '(initPath[0])\n', (13344, 13357), True, 'import tools.fileio as fio\n'), ((13438, 13459), 'tools.fileio.loadData', 'fio.loadData', (['roiPath'], {}), '(roiPath)\n', (13450, 13459), True, 'import tools.fileio as fio\n'), ((13804, 13891), 'tools.imagepl.assembleImage', 'opl.assembleImage', (['imageFilePath', 'pixSize', 'magnification', 'background', 'stitchCorners'], {}), '(imageFilePath, pixSize, magnification, background,\n stitchCorners)\n', (13821, 13891), True, 'import tools.imagepl as opl\n'), ((14230, 14250), 'numpy.cumsum', 'np.cumsum', (['imageHist'], {}), '(imageHist)\n', (14239, 14250), True, 'import numpy as np\n'), ((14406, 14432), 'numpy.argmax', 'np.argmax', (['(cumProb > 1e-05)'], {}), '(cumProb > 1e-05)\n', (14415, 14432), True, 'import numpy as np\n'), ((14446, 14474), 'numpy.argmax', 'np.argmax', (['(cumProb >= 0.9995)'], {}), '(cumProb >= 0.9995)\n', (14455, 14474), True, 'import numpy as np\n'), ((14693, 14726), 'cv2.imshow', 'cv2.imshow', (['"""Display Image"""', 'rsIm'], {}), "('Display Image', rsIm)\n", (14703, 14726), False, 'import cv2\n'), ((14736, 14750), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (14747, 14750), False, 'import cv2\n'), ((15316, 15337), 'numpy.arange', 'np.arange', (['(0)', '(100)', '(20)'], {}), '(0, 100, 20)\n', (15325, 15337), True, 'import numpy as np\n'), ((17824, 17844), 'numpy.arange', 'np.arange', (['(0)', '(70)', '(10)'], {}), '(0, 70, 10)\n', (17833, 17844), True, 'import numpy as np\n'), ((23048, 23088), 'numpy.empty', 'np.empty', (['imageHist.shape'], {'dtype': '"""float"""'}), "(imageHist.shape, dtype='float')\n", (23056, 23088), True, 'import numpy as np\n'), ((23129, 23153), 'numpy.cumsum', 'np.cumsum', (['imageHist[1:]'], {}), '(imageHist[1:])\n', (23138, 23153), True, 'import numpy as np\n'), ((23406, 23434), 'numpy.argmax', 'np.argmax', (['(cumHist >= lowcut)'], {}), '(cumHist >= lowcut)\n', (23415, 23434), True, 'import numpy as np\n'), ((23446, 23475), 'numpy.argmax', 'np.argmax', (['(cumHist >= highcut)'], {}), '(cumHist >= highcut)\n', (23455, 23475), True, 'import numpy as np\n'), ((23720, 23769), 'numpy.require', 'np.require', (['scIm'], {'dtype': '"""uint8"""', 'requirements': '"""C"""'}), "(scIm, dtype='uint8', requirements='C')\n", (23730, 23769), True, 'import numpy as np\n'), ((24215, 24254), 'numpy.zeros', 'np.zeros', (['(rc[0] + 3, 2)'], {'dtype': '"""float"""'}), "((rc[0] + 3, 2), dtype='float')\n", (24223, 24254), True, 'import numpy as np\n'), ((24733, 24765), 'PyQt5.QtCore.pyqtSignal', 'QtCore.pyqtSignal', (['QtCore.QPoint'], {}), '(QtCore.QPoint)\n', (24750, 24765), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((40266, 40288), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (40278, 40288), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QLabel, QSizePolicy, QWidget, QInputDialog, QFileDialog\n'), ((40431, 40463), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (40453, 40463), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1864, 1892), 'numpy.sum', 'np.sum', (['(rawobjectArea > 0)', '(1)'], {}), '(rawobjectArea > 0, 1)\n', (1870, 1892), True, 'import numpy as np\n'), ((3058, 3110), 'numpy.linspace', 'np.linspace', (['xLim[cnt, 0]', 'xLim[cnt, 1]'], {'num': 'numbins'}), '(xLim[cnt, 0], xLim[cnt, 1], num=numbins)\n', (3069, 3110), True, 'import numpy as np\n'), ((3133, 3201), 'fast_histogram.histogram1d', 'histogram1d', (['fitData[:, fitCol[cnt]]', '(75)', 'xLim[cnt, :]'], {'weights': 'None'}), '(fitData[:, fitCol[cnt]], 75, xLim[cnt, :], weights=None)\n', (3144, 3201), False, 'from fast_histogram import histogram1d\n'), ((4510, 4567), 'matplotlib.pyplot.axes', 'plt.axes', (['axesPos[n, :]'], {'xlim': 'xLim[n, :]', 'ylim': 'yLim[n, :]'}), '(axesPos[n, :], xlim=xLim[n, :], ylim=yLim[n, :])\n', (4518, 4567), True, 'import matplotlib.pyplot as plt\n'), ((4595, 4614), 'numpy.log2', 'np.log2', (['objectArea'], {}), '(objectArea)\n', (4602, 4614), True, 'import numpy as np\n'), ((5249, 5270), 'matplotlib.pyplot.savefig', 'plt.savefig', (['savePath'], {}), '(savePath)\n', (5260, 5270), True, 'import matplotlib.pyplot as plt\n'), ((5291, 5301), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5299, 5301), True, 'import matplotlib.pyplot as plt\n'), ((7775, 7814), 'numpy.zeros', 'np.zeros', (['(5, numbins)'], {'dtype': '"""float64"""'}), "((5, numbins), dtype='float64')\n", (7783, 7814), True, 'import numpy as np\n'), ((7835, 7874), 'numpy.zeros', 'np.zeros', (['(5, numbins)'], {'dtype': '"""float64"""'}), "((5, numbins), dtype='float64')\n", (7843, 7874), True, 'import numpy as np\n'), ((7895, 7934), 'numpy.zeros', 'np.zeros', (['(5, numbins)'], {'dtype': '"""float64"""'}), "((5, numbins), dtype='float64')\n", (7903, 7934), True, 'import numpy as np\n'), ((9364, 9467), 'matplotlib.patches.PathPatch', 'mpatches.PathPatch', (['pathDict[roi][textLbls[n]]'], {'facecolor': '[0, 0, 1, 1]', 'edgecolor': 'None', 'linewidth': '(0)'}), '(pathDict[roi][textLbls[n]], facecolor=[0, 0, 1, 1],\n edgecolor=None, linewidth=0)\n', (9382, 9467), True, 'import matplotlib.patches as mpatches\n'), ((10338, 10444), 'matplotlib.pyplot.axes', 'plt.axes', (['axesPos[n, :]'], {'xscale': '"""log"""', 'xlim': '[1, 10000]', 'ylim': '[0, numRoi + 1]', 'yticks': '[]', 'xticks': 'xTicks'}), "(axesPos[n, :], xscale='log', xlim=[1, 10000], ylim=[0, numRoi + 1],\n yticks=[], xticks=xTicks)\n", (10346, 10444), True, 'import matplotlib.pyplot as plt\n'), ((12590, 12624), 'cv2.destroyWindow', 'cv2.destroyWindow', (['"""Display Image"""'], {}), "('Display Image')\n", (12607, 12624), False, 'import cv2\n'), ((14791, 14825), 'cv2.destroyWindow', 'cv2.destroyWindow', (['"""Display Image"""'], {}), "('Display Image')\n", (14808, 14825), False, 'import cv2\n'), ((24926, 24956), 'PyQt5.QtWidgets.QGraphicsScene', 'QtWidgets.QGraphicsScene', (['self'], {}), '(self)\n', (24950, 24956), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((24980, 25011), 'PyQt5.QtWidgets.QGraphicsPixmapItem', 'QtWidgets.QGraphicsPixmapItem', ([], {}), '()\n', (25009, 25011), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((25035, 25043), 'PyQt5.QtGui.QImage', 'QImage', ([], {}), '()\n', (25041, 25043), False, 'from PyQt5.QtGui import QImage, QPixmap, QIcon\n'), ((28043, 28059), 'tools.fileio.loadConfig', 'fio.loadConfig', ([], {}), '()\n', (28057, 28059), True, 'import tools.fileio as fio\n'), ((28456, 28481), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self'], {}), '(self)\n', (28475, 28481), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28725, 28752), 'PyQt5.QtWidgets.QToolButton', 'QtWidgets.QToolButton', (['self'], {}), '(self)\n', (28746, 28752), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28986, 29013), 'PyQt5.QtWidgets.QToolButton', 'QtWidgets.QToolButton', (['self'], {}), '(self)\n', (29007, 29013), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29247, 29274), 'PyQt5.QtWidgets.QToolButton', 'QtWidgets.QToolButton', (['self'], {}), '(self)\n', (29268, 29274), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29527, 29554), 'PyQt5.QtWidgets.QToolButton', 'QtWidgets.QToolButton', (['self'], {}), '(self)\n', (29548, 29554), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29698, 29723), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self'], {}), '(self)\n', (29717, 29723), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29900, 29922), 'PyQt5.QtWidgets.QSlider', 'QSlider', (['Qt.Horizontal'], {}), '(Qt.Horizontal)\n', (29907, 29922), False, 'from PyQt5.QtWidgets import QHBoxLayout, QLabel, QPushButton, QStyle, QVBoxLayout, QWidget, QSlider, QPushButton, QAction\n'), ((30098, 30125), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self'], {}), '(self)\n', (30119, 30125), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30235, 30258), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (30256, 30258), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31046, 31083), 'PyQt5.QtGui.QPixmap.fromImage', 'QPixmap.fromImage', (['self.viewer.qImage'], {}), '(self.viewer.qImage)\n', (31063, 31083), False, 'from PyQt5.QtGui import QImage, QPixmap, QIcon\n'), ((31223, 31244), 'PyQt5.QtWidgets.QFileDialog.Options', 'QFileDialog.Options', ([], {}), '()\n', (31242, 31244), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QLabel, QSizePolicy, QWidget, QInputDialog, QFileDialog\n'), ((33089, 33110), 'PyQt5.QtWidgets.QFileDialog.Options', 'QFileDialog.Options', ([], {}), '()\n', (33108, 33110), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QLabel, QSizePolicy, QWidget, QInputDialog, QFileDialog\n'), ((33134, 33284), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['None', '"""Select ODELAY Data Set"""', '""""""', '"""ODELAYExpDisc (*Index_ODELAYData.mat);; Mat-Files (*.mat)"""'], {'options': 'options'}), "(None, 'Select ODELAY Data Set', '',\n 'ODELAYExpDisc (*Index_ODELAYData.mat);; Mat-Files (*.mat)', options=\n options)\n", (33161, 33284), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QLabel, QSizePolicy, QWidget, QInputDialog, QFileDialog\n'), ((33359, 33407), 'pathlib.Path', 'pathlib.Path', (["self.odelayConfig['LocalImageDir']"], {}), "(self.odelayConfig['LocalImageDir'])\n", (33371, 33407), False, 'import pathlib\n'), ((33429, 33476), 'pathlib.Path', 'pathlib.Path', (["self.odelayConfig['LocalDataDir']"], {}), "(self.odelayConfig['LocalDataDir'])\n", (33441, 33476), False, 'import pathlib\n'), ((33863, 33911), 'pathlib.Path', 'pathlib.Path', (["self.odelayConfig['LocalImageDir']"], {}), "(self.odelayConfig['LocalImageDir'])\n", (33875, 33911), False, 'import pathlib\n'), ((33933, 33980), 'pathlib.Path', 'pathlib.Path', (["self.odelayConfig['LocalDataDir']"], {}), "(self.odelayConfig['LocalDataDir'])\n", (33945, 33980), False, 'import pathlib\n'), ((34105, 34132), 'pathlib.Path', 'pathlib.Path', (["('./' + roiLbl)"], {}), "('./' + roiLbl)\n", (34117, 34132), False, 'import pathlib\n'), ((34157, 34216), 'pathlib.Path', 'pathlib.Path', (["('./' + roiLbl + '_' + f'{imNum:00d}' + '.mat')"], {}), "('./' + roiLbl + '_' + f'{imNum:00d}' + '.mat')\n", (34169, 34216), False, 'import pathlib\n'), ((34491, 34512), 'tools.fileio.loadData', 'fio.loadData', (['roiPath'], {}), '(roiPath)\n', (34503, 34512), True, 'import tools.fileio as fio\n'), ((34909, 34996), 'tools.imagepl.assembleImage', 'opl.assembleImage', (['imageFilePath', 'pixSize', 'magnification', 'background', 'stitchCorners'], {}), '(imageFilePath, pixSize, magnification, background,\n stitchCorners)\n', (34926, 34996), True, 'import tools.imagepl as opl\n'), ((35291, 35331), 'numpy.zeros', 'np.zeros', (['imageHist.shape'], {'dtype': '"""float"""'}), "(imageHist.shape, dtype='float')\n", (35299, 35331), True, 'import numpy as np\n'), ((35355, 35379), 'numpy.cumsum', 'np.cumsum', (['imageHist[1:]'], {}), '(imageHist[1:])\n', (35364, 35379), True, 'import numpy as np\n'), ((35581, 35609), 'numpy.argmax', 'np.argmax', (['(cumProb >= lowcut)'], {}), '(cumProb >= lowcut)\n', (35590, 35609), True, 'import numpy as np\n'), ((35625, 35654), 'numpy.argmax', 'np.argmax', (['(cumProb >= highcut)'], {}), '(cumProb >= highcut)\n', (35634, 35654), True, 'import numpy as np\n'), ((35783, 35800), 'numpy.iinfo', 'np.iinfo', (['"""uint8"""'], {}), "('uint8')\n", (35791, 35800), True, 'import numpy as np\n'), ((35817, 35848), 'numpy.clip', 'np.clip', (['scIm', 'lim.min', 'lim.max'], {}), '(scIm, lim.min, lim.max)\n', (35824, 35848), True, 'import numpy as np\n'), ((35952, 36001), 'numpy.require', 'np.require', (['scIm'], {'dtype': '"""uint8"""', 'requirements': '"""C"""'}), "(scIm, dtype='uint8', requirements='C')\n", (35962, 36001), True, 'import numpy as np\n'), ((36084, 36197), 'PyQt5.QtGui.QImage', 'QImage', (['imageData.data', 'imageData.shape[1]', 'imageData.shape[0]', 'imageData.shape[1]', 'QImage.Format_Grayscale8'], {}), '(imageData.data, imageData.shape[1], imageData.shape[0], imageData.\n shape[1], QImage.Format_Grayscale8)\n', (36090, 36197), False, 'from PyQt5.QtGui import QImage, QPixmap, QIcon\n'), ((36644, 36657), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ([], {}), '()\n', (36655, 36657), False, 'from PyQt5.QtWidgets import QHBoxLayout, QLabel, QPushButton, QStyle, QVBoxLayout, QWidget, QSlider, QPushButton, QAction\n'), ((36867, 36889), 'PyQt5.QtWidgets.QSlider', 'QSlider', (['Qt.Horizontal'], {}), '(Qt.Horizontal)\n', (36874, 36889), False, 'from PyQt5.QtWidgets import QHBoxLayout, QLabel, QPushButton, QStyle, QVBoxLayout, QWidget, QSlider, QPushButton, QAction\n'), ((37033, 37041), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (37039, 37041), False, 'from PyQt5.QtWidgets import QHBoxLayout, QLabel, QPushButton, QStyle, QVBoxLayout, QWidget, QSlider, QPushButton, QAction\n'), ((37946, 37959), 'PyQt5.QtWidgets.QWidget', 'QWidget', (['self'], {}), '(self)\n', (37953, 37959), False, 'from PyQt5.QtWidgets import QHBoxLayout, QLabel, QPushButton, QStyle, QVBoxLayout, QWidget, QSlider, QPushButton, QAction\n'), ((38072, 38085), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (38083, 38085), False, 'from PyQt5.QtWidgets import QHBoxLayout, QLabel, QPushButton, QStyle, QVBoxLayout, QWidget, QSlider, QPushButton, QAction\n'), ((38264, 38277), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (38275, 38277), False, 'from PyQt5.QtWidgets import QHBoxLayout, QLabel, QPushButton, QStyle, QVBoxLayout, QWidget, QSlider, QPushButton, QAction\n'), ((38865, 38881), 'tools.fileio.loadConfig', 'fio.loadConfig', ([], {}), '()\n', (38879, 38881), True, 'import tools.fileio as fio\n'), ((38905, 38982), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""Open Movie"""', "odelayConfig['LocalDataDir']"], {}), "(self, 'Open Movie', odelayConfig['LocalDataDir'])\n", (38932, 38982), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QLabel, QSizePolicy, QWidget, QInputDialog, QFileDialog\n'), ((40827, 40859), 'numpy.abs', 'np.abs', (['((wl - 440) / (440 - 380))'], {}), '((wl - 440) / (440 - 380))\n', (40833, 40859), True, 'import numpy as np\n'), ((4932, 4989), 'matplotlib.pyplot.axes', 'plt.axes', (['axesPos[n, :]'], {'xlim': 'xLim[n, :]', 'ylim': 'yLim[n, :]'}), '(axesPos[n, :], xlim=xLim[n, :], ylim=yLim[n, :])\n', (4940, 4989), True, 'import matplotlib.pyplot as plt\n'), ((8103, 8151), 'numpy.linspace', 'np.linspace', (['xLim[n, 0]', 'xLim[n, 1]'], {'num': 'numbins'}), '(xLim[n, 0], xLim[n, 1], num=numbins)\n', (8114, 8151), True, 'import numpy as np\n'), ((8176, 8262), 'fast_histogram.histogram1d', 'histogram1d', (['(fitData[:, fitCol[n]] / devisor[n])', 'numbins', 'xLim[n, :]'], {'weights': 'None'}), '(fitData[:, fitCol[n]] / devisor[n], numbins, xLim[n, :],\n weights=None)\n', (8187, 8262), False, 'from fast_histogram import histogram1d\n'), ((8456, 8480), 'matplotlib.path.Path', 'mpath.Path', (['verts', 'codes'], {}), '(verts, codes)\n', (8466, 8480), True, 'import matplotlib.path as mpath\n'), ((9658, 9750), 'matplotlib.pyplot.axes', 'plt.axes', (['axesPos[n, :]'], {'xlim': 'xLim[n, :]', 'ylim': '[0, numRoi + 1]', 'yticks': '[]', 'xticks': 'xTicks'}), '(axesPos[n, :], xlim=xLim[n, :], ylim=[0, numRoi + 1], yticks=[],\n xticks=xTicks)\n', (9666, 9750), True, 'import matplotlib.pyplot as plt\n'), ((9962, 10065), 'matplotlib.patches.PathPatch', 'mpatches.PathPatch', (['pathDict[roi][textLbls[n]]'], {'facecolor': '[0, 0, 1, 1]', 'edgecolor': 'None', 'linewidth': '(0)'}), '(pathDict[roi][textLbls[n]], facecolor=[0, 0, 1, 1],\n edgecolor=None, linewidth=0)\n', (9980, 10065), True, 'import matplotlib.patches as mpatches\n'), ((23486, 23511), 'numpy.clip', 'np.clip', (['im', 'loval', 'hival'], {}), '(im, loval, hival)\n', (23493, 23511), True, 'import numpy as np\n'), ((33656, 33682), 'tools.fileio.loadData', 'fio.loadData', (['expIndexPath'], {}), '(expIndexPath)\n', (33668, 33682), True, 'import tools.fileio as fio\n'), ((37203, 37220), 'PyQt5.QtGui.QIcon', 'QIcon', (['"""open.png"""'], {}), "('open.png')\n", (37208, 37220), False, 'from PyQt5.QtGui import QImage, QPixmap, QIcon\n'), ((37449, 37466), 'PyQt5.QtGui.QIcon', 'QIcon', (['"""exit.png"""'], {}), "('exit.png')\n", (37454, 37466), False, 'from PyQt5.QtGui import QImage, QPixmap, QIcon\n'), ((40944, 40976), 'numpy.abs', 'np.abs', (['((wl - 440) / (490 - 440))'], {}), '((wl - 440) / (490 - 440))\n', (40950, 40976), True, 'import numpy as np\n'), ((41584, 41607), 'numpy.round', 'np.round', (['(SSS * R * 255)'], {}), '(SSS * R * 255)\n', (41592, 41607), True, 'import numpy as np\n'), ((41629, 41652), 'numpy.round', 'np.round', (['(SSS * G * 255)'], {}), '(SSS * G * 255)\n', (41637, 41652), True, 'import numpy as np\n'), ((41674, 41697), 'numpy.round', 'np.round', (['(SSS * B * 255)'], {}), '(SSS * B * 255)\n', (41682, 41697), True, 'import numpy as np\n'), ((3245, 3270), 'numpy.max', 'np.max', (['virts[cnt, 2:-10]'], {}), '(virts[cnt, 2:-10])\n', (3251, 3270), True, 'import numpy as np\n'), ((25468, 25492), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(30)', '(30)', '(30)'], {}), '(30, 30, 30)\n', (25480, 25492), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26685, 26700), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', ([], {}), '()\n', (26698, 26700), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31910, 31947), 'PyQt5.QtGui.QPixmap.fromImage', 'QPixmap.fromImage', (['self.viewer.qImage'], {}), '(self.viewer.qImage)\n', (31927, 31947), False, 'from PyQt5.QtGui import QImage, QPixmap, QIcon\n'), ((41061, 41093), 'numpy.abs', 'np.abs', (['((wl - 510) / (510 - 490))'], {}), '((wl - 510) / (510 - 490))\n', (41067, 41093), True, 'import numpy as np\n'), ((5084, 5103), 'numpy.diff', 'np.diff', (['xLim[n, :]'], {}), '(xLim[n, :])\n', (5091, 5103), True, 'import numpy as np\n'), ((8300, 8323), 'numpy.max', 'np.max', (['virts[n, 2:-10]'], {}), '(virts[n, 2:-10])\n', (8306, 8323), True, 'import numpy as np\n'), ((25860, 25885), 'PyQt5.QtCore.QRectF', 'QtCore.QRectF', (['(0)', '(0)', '(1)', '(1)'], {}), '(0, 0, 1, 1)\n', (25873, 25885), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32350, 32387), 'PyQt5.QtGui.QPixmap.fromImage', 'QPixmap.fromImage', (['self.viewer.qImage'], {}), '(self.viewer.qImage)\n', (32367, 32387), False, 'from PyQt5.QtGui import QImage, QPixmap, QIcon\n'), ((32708, 32745), 'PyQt5.QtGui.QPixmap.fromImage', 'QPixmap.fromImage', (['self.viewer.qImage'], {}), '(self.viewer.qImage)\n', (32725, 32745), False, 'from PyQt5.QtGui import QImage, QPixmap, QIcon\n'), ((39113, 39141), 'PyQt5.QtCore.QUrl.fromLocalFile', 'QUrl.fromLocalFile', (['fileName'], {}), '(fileName)\n', (39131, 39141), False, 'from PyQt5.QtCore import QDir, Qt, QUrl\n'), ((41133, 41165), 'numpy.abs', 'np.abs', (['((wl - 510) / (580 - 510))'], {}), '((wl - 510) / (580 - 510))\n', (41139, 41165), True, 'import numpy as np\n'), ((41252, 41284), 'numpy.abs', 'np.abs', (['((wl - 645) / (645 - 580))'], {}), '((wl - 645) / (645 - 580))\n', (41258, 41284), True, 'import numpy as np\n')]
|
from hashlib import sha256
from remerkleable.byte_arrays import Bytes32
from typing import Union
ZERO_BYTES32 = b'\x00' * 32
def hash(x: Union[bytes, bytearray, memoryview]) -> Bytes32:
return Bytes32(sha256(x).digest())
|
[
"hashlib.sha256"
] |
[((208, 217), 'hashlib.sha256', 'sha256', (['x'], {}), '(x)\n', (214, 217), False, 'from hashlib import sha256\n')]
|
#!/usr/bin/env python3.7
# -*- coding: utf-8 -*-
#
# Created on: Sat Nov 2 14:41:15 CET 2019
#
# Author(s): <NAME> <https://urbanij.github.io>
#
# Description: Unit test for traveling_wave_1.py
#
# ==========================================================
import unittest
from wave import *
from functions import *
class TravelingWaveTest(unittest.TestCase):
def test_gamma_tau(self):
self.assertEquals(1+gamma(2,5), tau(2,5))
self.assertEquals(1+gamma(-3,7), tau(-3,7))
def boundary_condition(self):
self.assertEqual(e1_tot(0, t[0], e2_t(0, t[0])))
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main"
] |
[((641, 656), 'unittest.main', 'unittest.main', ([], {}), '()\n', (654, 656), False, 'import unittest\n')]
|
from django.urls import path, include
urlpatterns = [
path('api/examiner/', include('core.examiner.urls')),
path('api/taker/', include('core.taker.urls')),
]
|
[
"django.urls.include"
] |
[((81, 110), 'django.urls.include', 'include', (['"""core.examiner.urls"""'], {}), "('core.examiner.urls')\n", (88, 110), False, 'from django.urls import path, include\n'), ((136, 162), 'django.urls.include', 'include', (['"""core.taker.urls"""'], {}), "('core.taker.urls')\n", (143, 162), False, 'from django.urls import path, include\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.