code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from events.models import Category, Language, City
import uuid
class UserCategory(models.Model):
user_id = models.ForeignKey(User)
category_id = models.ForeignKey(Category)
class Meta:
unique_together = (("user_id", "category_id"),)
class SignupInvitation(models.Model):
email_invited = models.EmailField()
invited_by = models.ForeignKey(User, null=True)
user_has_signed_up = models.BooleanField(default=False)
created_at = models.DateField(auto_now_add=True, blank=True, editable=True)
hash = models.UUIDField(default=uuid.uuid4, unique=True, editable=True)
def __str__(self):
return self.email_invited + " - " + str(self.hash)
class SignupWaitingList(models.Model):
email = models.EmailField(unique=True)
created_at = models.DateField(auto_now_add=True, blank=True, editable=True)
def __str__(self):
return self.email
class UserLanguage(models.Model):
user_id = models.ForeignKey(User)
language_id = models.ForeignKey(Language)
class Meta:
unique_together = (("user_id", "language_id"),)
class Profile(models.Model):
user = models.OneToOneField(User, related_name='profile', on_delete=models.CASCADE)
birth_city = models.ForeignKey(City, related_name="birth_city", null=True)
current_city = models.ForeignKey(City, related_name="current_city", null=True)
birth_date = models.DateField(null=True, blank=True)
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
instance.profile.save()
|
[
"django.db.models.OneToOneField",
"django.db.models.ForeignKey",
"django.dispatch.receiver",
"django.db.models.BooleanField",
"django.db.models.EmailField",
"django.db.models.DateField",
"django.db.models.UUIDField"
] |
[((1600, 1632), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'User'}), '(post_save, sender=User)\n', (1608, 1632), False, 'from django.dispatch import receiver\n'), ((1760, 1792), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'User'}), '(post_save, sender=User)\n', (1768, 1792), False, 'from django.dispatch import receiver\n'), ((270, 293), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {}), '(User)\n', (287, 293), False, 'from django.db import models\n'), ((312, 339), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Category'], {}), '(Category)\n', (329, 339), False, 'from django.db import models\n'), ((473, 492), 'django.db.models.EmailField', 'models.EmailField', ([], {}), '()\n', (490, 492), False, 'from django.db import models\n'), ((510, 544), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'null': '(True)'}), '(User, null=True)\n', (527, 544), False, 'from django.db import models\n'), ((570, 604), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (589, 604), False, 'from django.db import models\n'), ((622, 684), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)', 'blank': '(True)', 'editable': '(True)'}), '(auto_now_add=True, blank=True, editable=True)\n', (638, 684), False, 'from django.db import models\n'), ((696, 760), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'unique': '(True)', 'editable': '(True)'}), '(default=uuid.uuid4, unique=True, editable=True)\n', (712, 760), False, 'from django.db import models\n'), ((905, 935), 'django.db.models.EmailField', 'models.EmailField', ([], {'unique': '(True)'}), '(unique=True)\n', (922, 935), False, 'from django.db import models\n'), ((953, 1015), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)', 'blank': '(True)', 'editable': '(True)'}), '(auto_now_add=True, blank=True, editable=True)\n', (969, 1015), False, 'from django.db import models\n'), ((1116, 1139), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {}), '(User)\n', (1133, 1139), False, 'from django.db import models\n'), ((1158, 1185), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Language'], {}), '(Language)\n', (1175, 1185), False, 'from django.db import models\n'), ((1301, 1377), 'django.db.models.OneToOneField', 'models.OneToOneField', (['User'], {'related_name': '"""profile"""', 'on_delete': 'models.CASCADE'}), "(User, related_name='profile', on_delete=models.CASCADE)\n", (1321, 1377), False, 'from django.db import models\n'), ((1395, 1456), 'django.db.models.ForeignKey', 'models.ForeignKey', (['City'], {'related_name': '"""birth_city"""', 'null': '(True)'}), "(City, related_name='birth_city', null=True)\n", (1412, 1456), False, 'from django.db import models\n'), ((1476, 1539), 'django.db.models.ForeignKey', 'models.ForeignKey', (['City'], {'related_name': '"""current_city"""', 'null': '(True)'}), "(City, related_name='current_city', null=True)\n", (1493, 1539), False, 'from django.db import models\n'), ((1557, 1596), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1573, 1596), False, 'from django.db import models\n')]
|
import shutil, os, glob
src = 'train'
dst = 'SIG17/train'
if not os.path.exists('SIG17'):
os.mkdir('SIG17')
os.mkdir(dst)
elif not os.path.exists(dst):
os.mkdir(dst)
count = 1
for s in glob.glob(src + '/*'):
d = dst + '/{:03d}'.format(count)
if not os.path.exists(d):
os.mkdir(d)
os.mkdir(d + '/dynamic')
os.mkdir(d + '/static')
shutil.copyfile(s + '/input_1_aligned.tif', d + '/dynamic/le.tif')
shutil.copyfile(s + '/input_2_aligned.tif', d + '/dynamic/me.tif')
shutil.copyfile(s + '/input_3_aligned.tif', d + '/dynamic/he.tif')
shutil.copyfile(s + '/ref_1_aligned.tif', d + '/static/le.tif')
shutil.copyfile(s + '/ref_2_aligned.tif', d + '/static/me.tif')
shutil.copyfile(s + '/ref_3_aligned.tif', d + '/static/he.tif')
shutil.copyfile(s + '/ref_hdr_aligned.hdr', d + '/hdr_gt.hdr')
shutil.copyfile(s + '/input_exp.txt', d + '/input_exp.txt')
print(str(count) + ' folders transferred')
count += 1
src = 'test'
dst = 'SIG17/val'
if not os.path.exists(dst):
os.mkdir(dst)
count = 1
for s in glob.glob(src + '/*'):
d = dst + '/{:03d}'.format(count)
if not os.path.exists(d):
os.mkdir(d)
os.mkdir(d + '/dynamic')
shutil.copyfile(s + '/input_1_aligned.tif', d + '/dynamic/le.tif')
shutil.copyfile(s + '/input_2_aligned.tif', d + '/dynamic/me.tif')
shutil.copyfile(s + '/input_3_aligned.tif', d + '/dynamic/he.tif')
shutil.copyfile(s + '/ref_hdr_aligned.hdr', d + '/hdr_gt.hdr')
shutil.copyfile(s + '/input_exp.txt', d + '/input_exp.txt')
print(str(count) + ' folders transferred')
count += 1
|
[
"shutil.copyfile",
"os.mkdir",
"os.path.exists",
"glob.glob"
] |
[((208, 229), 'glob.glob', 'glob.glob', (["(src + '/*')"], {}), "(src + '/*')\n", (217, 229), False, 'import shutil, os, glob\n'), ((1131, 1152), 'glob.glob', 'glob.glob', (["(src + '/*')"], {}), "(src + '/*')\n", (1140, 1152), False, 'import shutil, os, glob\n'), ((68, 91), 'os.path.exists', 'os.path.exists', (['"""SIG17"""'], {}), "('SIG17')\n", (82, 91), False, 'import shutil, os, glob\n'), ((98, 115), 'os.mkdir', 'os.mkdir', (['"""SIG17"""'], {}), "('SIG17')\n", (106, 115), False, 'import shutil, os, glob\n'), ((121, 134), 'os.mkdir', 'os.mkdir', (['dst'], {}), '(dst)\n', (129, 134), False, 'import shutil, os, glob\n'), ((404, 470), 'shutil.copyfile', 'shutil.copyfile', (["(s + '/input_1_aligned.tif')", "(d + '/dynamic/le.tif')"], {}), "(s + '/input_1_aligned.tif', d + '/dynamic/le.tif')\n", (419, 470), False, 'import shutil, os, glob\n'), ((476, 542), 'shutil.copyfile', 'shutil.copyfile', (["(s + '/input_2_aligned.tif')", "(d + '/dynamic/me.tif')"], {}), "(s + '/input_2_aligned.tif', d + '/dynamic/me.tif')\n", (491, 542), False, 'import shutil, os, glob\n'), ((548, 614), 'shutil.copyfile', 'shutil.copyfile', (["(s + '/input_3_aligned.tif')", "(d + '/dynamic/he.tif')"], {}), "(s + '/input_3_aligned.tif', d + '/dynamic/he.tif')\n", (563, 614), False, 'import shutil, os, glob\n'), ((620, 683), 'shutil.copyfile', 'shutil.copyfile', (["(s + '/ref_1_aligned.tif')", "(d + '/static/le.tif')"], {}), "(s + '/ref_1_aligned.tif', d + '/static/le.tif')\n", (635, 683), False, 'import shutil, os, glob\n'), ((689, 752), 'shutil.copyfile', 'shutil.copyfile', (["(s + '/ref_2_aligned.tif')", "(d + '/static/me.tif')"], {}), "(s + '/ref_2_aligned.tif', d + '/static/me.tif')\n", (704, 752), False, 'import shutil, os, glob\n'), ((758, 821), 'shutil.copyfile', 'shutil.copyfile', (["(s + '/ref_3_aligned.tif')", "(d + '/static/he.tif')"], {}), "(s + '/ref_3_aligned.tif', d + '/static/he.tif')\n", (773, 821), False, 'import shutil, os, glob\n'), ((827, 889), 'shutil.copyfile', 'shutil.copyfile', (["(s + '/ref_hdr_aligned.hdr')", "(d + '/hdr_gt.hdr')"], {}), "(s + '/ref_hdr_aligned.hdr', d + '/hdr_gt.hdr')\n", (842, 889), False, 'import shutil, os, glob\n'), ((895, 954), 'shutil.copyfile', 'shutil.copyfile', (["(s + '/input_exp.txt')", "(d + '/input_exp.txt')"], {}), "(s + '/input_exp.txt', d + '/input_exp.txt')\n", (910, 954), False, 'import shutil, os, glob\n'), ((1068, 1087), 'os.path.exists', 'os.path.exists', (['dst'], {}), '(dst)\n', (1082, 1087), False, 'import shutil, os, glob\n'), ((1094, 1107), 'os.mkdir', 'os.mkdir', (['dst'], {}), '(dst)\n', (1102, 1107), False, 'import shutil, os, glob\n'), ((1294, 1360), 'shutil.copyfile', 'shutil.copyfile', (["(s + '/input_1_aligned.tif')", "(d + '/dynamic/le.tif')"], {}), "(s + '/input_1_aligned.tif', d + '/dynamic/le.tif')\n", (1309, 1360), False, 'import shutil, os, glob\n'), ((1366, 1432), 'shutil.copyfile', 'shutil.copyfile', (["(s + '/input_2_aligned.tif')", "(d + '/dynamic/me.tif')"], {}), "(s + '/input_2_aligned.tif', d + '/dynamic/me.tif')\n", (1381, 1432), False, 'import shutil, os, glob\n'), ((1438, 1504), 'shutil.copyfile', 'shutil.copyfile', (["(s + '/input_3_aligned.tif')", "(d + '/dynamic/he.tif')"], {}), "(s + '/input_3_aligned.tif', d + '/dynamic/he.tif')\n", (1453, 1504), False, 'import shutil, os, glob\n'), ((1510, 1572), 'shutil.copyfile', 'shutil.copyfile', (["(s + '/ref_hdr_aligned.hdr')", "(d + '/hdr_gt.hdr')"], {}), "(s + '/ref_hdr_aligned.hdr', d + '/hdr_gt.hdr')\n", (1525, 1572), False, 'import shutil, os, glob\n'), ((1578, 1637), 'shutil.copyfile', 'shutil.copyfile', (["(s + '/input_exp.txt')", "(d + '/input_exp.txt')"], {}), "(s + '/input_exp.txt', d + '/input_exp.txt')\n", (1593, 1637), False, 'import shutil, os, glob\n'), ((145, 164), 'os.path.exists', 'os.path.exists', (['dst'], {}), '(dst)\n', (159, 164), False, 'import shutil, os, glob\n'), ((171, 184), 'os.mkdir', 'os.mkdir', (['dst'], {}), '(dst)\n', (179, 184), False, 'import shutil, os, glob\n'), ((282, 299), 'os.path.exists', 'os.path.exists', (['d'], {}), '(d)\n', (296, 299), False, 'import shutil, os, glob\n'), ((310, 321), 'os.mkdir', 'os.mkdir', (['d'], {}), '(d)\n', (318, 321), False, 'import shutil, os, glob\n'), ((331, 355), 'os.mkdir', 'os.mkdir', (["(d + '/dynamic')"], {}), "(d + '/dynamic')\n", (339, 355), False, 'import shutil, os, glob\n'), ((365, 388), 'os.mkdir', 'os.mkdir', (["(d + '/static')"], {}), "(d + '/static')\n", (373, 388), False, 'import shutil, os, glob\n'), ((1205, 1222), 'os.path.exists', 'os.path.exists', (['d'], {}), '(d)\n', (1219, 1222), False, 'import shutil, os, glob\n'), ((1233, 1244), 'os.mkdir', 'os.mkdir', (['d'], {}), '(d)\n', (1241, 1244), False, 'import shutil, os, glob\n'), ((1254, 1278), 'os.mkdir', 'os.mkdir', (["(d + '/dynamic')"], {}), "(d + '/dynamic')\n", (1262, 1278), False, 'import shutil, os, glob\n')]
|
import pyautogui as pt
from time import sleep
while True:
posXY = pt.position()
print(posXY, pt.pixel(posXY[0],posXY[1]))
sleep(1)
if posXY[0] == 0:
break
# this program will tell axis of the cursor
# program will end if x-axis = 0
|
[
"pyautogui.position",
"pyautogui.pixel",
"time.sleep"
] |
[((75, 88), 'pyautogui.position', 'pt.position', ([], {}), '()\n', (86, 88), True, 'import pyautogui as pt\n'), ((141, 149), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (146, 149), False, 'from time import sleep\n'), ((107, 135), 'pyautogui.pixel', 'pt.pixel', (['posXY[0]', 'posXY[1]'], {}), '(posXY[0], posXY[1])\n', (115, 135), True, 'import pyautogui as pt\n')]
|
'''
command for zim custom tool: python path/to/zim_to_md.py -T %T -f %f -D %D
* Markdown format: https://github.com/adam-p/markdown-here/wiki/Markdown-Cheatsheet
'''
import argparse
import pyperclip
from zim.formats import get_parser, StubLinker
from zim.formats.markdown import Dumper as TextDumper
class Dumper(TextDumper):
'''Inherit from html format Dumper class, only overload things that are different'''
def dump_object(self, tag, attrib, strings=None):
if 'type' in attrib:
t = attrib['type']
if t == 'code':
c = attrib.get('lang', "")
return ['```%s\n' % c] + strings + ['```\n']
return super(Dumper, self).dump_object(tag, attrib, strings)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-T', dest='wiki_text', help='the selected text including wiki formatting')
parser.add_argument('-f', dest='file', help='the page source as temporary file')
parser.add_argument('-D', dest='source_dir', help='the document root')
args = parser.parse_args()
zim_parser = get_parser('wiki')
if args.wiki_text:
wiki_text = args.wiki_text
else:
wiki_text = open(args.file).read()
tree = zim_parser.parse(wiki_text)
try:
linker = StubLinker(source_dir=args.source_dir)
dumper = Dumper(linker=linker)
lines = dumper.dump(tree)
textile_text = ''.join(lines).encode('utf-8')
pyperclip.copy(textile_text)
except Exception as e:
pyperclip.copy(e.message)
|
[
"zim.formats.get_parser",
"zim.formats.StubLinker",
"argparse.ArgumentParser",
"pyperclip.copy"
] |
[((778, 822), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (801, 822), False, 'import argparse\n'), ((1132, 1150), 'zim.formats.get_parser', 'get_parser', (['"""wiki"""'], {}), "('wiki')\n", (1142, 1150), False, 'from zim.formats import get_parser, StubLinker\n'), ((1327, 1365), 'zim.formats.StubLinker', 'StubLinker', ([], {'source_dir': 'args.source_dir'}), '(source_dir=args.source_dir)\n', (1337, 1365), False, 'from zim.formats import get_parser, StubLinker\n'), ((1501, 1529), 'pyperclip.copy', 'pyperclip.copy', (['textile_text'], {}), '(textile_text)\n', (1515, 1529), False, 'import pyperclip\n'), ((1565, 1590), 'pyperclip.copy', 'pyperclip.copy', (['e.message'], {}), '(e.message)\n', (1579, 1590), False, 'import pyperclip\n')]
|
# -*- coding: utf-8 -*-
from setuptools import find_packages
from cx_Freeze import setup, Executable
import apyml
install_requires = [
'cx_Freeze'
'pandas'
]
setup(
name='apyml',
version=apyml.__version__,
packages=find_packages(),
author=apyml.__author__,
author_email='<EMAIL>',
description='Apyml - a Machine learning model building tool for humans.',
long_description=open('README.md').read(),
install_requires=install_requires,
include_package_data=True,
url='https://github.com/TommyStarK/apyml',
classifiers=[
'Programming Language :: Python :: 3',
'Natural Language :: English',
'Operating System :: OS Independent',
],
entry_points={
'console_scripts': [
'apyml = apyml.__main__:main',
],
},
executables=[Executable('app.py')]
)
|
[
"cx_Freeze.Executable",
"setuptools.find_packages"
] |
[((242, 257), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (255, 257), False, 'from setuptools import find_packages\n'), ((841, 861), 'cx_Freeze.Executable', 'Executable', (['"""app.py"""'], {}), "('app.py')\n", (851, 861), False, 'from cx_Freeze import setup, Executable\n')]
|
import torch
import torch.nn as nn
from torch.nn import functional as F
class CustomRNN(nn.Module):
def __init__(self, input_size, output_size, hidden_size, batch_first=True, W_scale=1e-1, f_hidden=None):
super(CustomRNN, self).__init__()
self.input_size = input_size
self.output_size = output_size
self.hidden_size = hidden_size
self.f_hidden = f_hidden
self.W1 = nn.Parameter((torch.rand(hidden_size, input_size)-0.5)*W_scale)
self.W2 = nn.Parameter((torch.rand(hidden_size, hidden_size)-0.5)*W_scale)
self.W3 = nn.Parameter((torch.rand(output_size, hidden_size)-0.5)*W_scale)
self.b_h = nn.Parameter(torch.zeros(hidden_size))
def forward(self, x):
h1 = torch.zeros(x.shape[0], self.hidden_size)
ys = []
for i, xi in enumerate(x.chunk(x.size(1), dim=1)):
h1 = (torch.matmul(self.W2, h1.t()) + torch.matmul(self.W1, xi.t())).t() + self.b_h
if self.f_hidden is not None:
h1 = getattr(F, self.f_hidden)(h1)
y = torch.matmul(self.W3, h1.t()).t()
ys.append(y)
ys = torch.stack(ys, dim=1)
return ys
class CustomRes(nn.Module):
def __init__(self, input_size, output_size, hidden_size, batch_first=True, W_scale=1e-1, f_hidden=None):
super(CustomRes, self).__init__()
self.input_size = input_size
self.output_size = output_size
self.hidden_size = hidden_size
self.f_hidden = f_hidden
self.W1 = torch.nn.Parameter((torch.rand(hidden_size, input_size)-0.5)*W_scale)
self.W2 = torch.nn.Parameter((torch.rand(hidden_size, hidden_size)-0.5)*W_scale)
self.W3 = torch.nn.Parameter((torch.rand(output_size, hidden_size)-0.5)*W_scale)
self.b_h = torch.nn.Parameter(torch.zeros(hidden_size))
def forward(self, x):
h1 = torch.zeros(x.shape[0], self.hidden_size)
ys = []
for i, xi in enumerate(x.chunk(x.size(1), dim=1)):
hprev = h1
h1 = (torch.matmul(self.W2, h1.t()) + torch.matmul(self.W1, xi.t())).t() + self.b_h
if self.f_hidden is not None:
h1 = getattr(F, self.f_hidden)(h1)
y = torch.matmul(self.W3, h1.t()).t()
ys.append(y)
h1 = h1 + hprev
ys = torch.stack(ys, dim=1)
return ys
class CustomLSTM(nn.Module):
def __init__(self, input_size, output_size, hidden_size, batch_first=True, W_scale=1e-1):
super(CustomLSTM, self).__init__()
self.input_size = input_size
self.output_size = output_size
self.hidden_size = hidden_size
self.lstm = nn.LSTM(input_size, hidden_size, batch_first=batch_first)
self.W3 = torch.nn.Parameter((torch.rand(output_size, hidden_size)-0.5))
def forward(self, x):
# out should have size [N_batch, T, N_hidden]
out, hidden = self.lstm(x.unsqueeze(2))
# print(torch.max(x, 1))
# print(x[:, 100])
# print(out[:, 100, 0].detach())
# ys should have size [N_batch, T, N_classes]
ys = torch.matmul(out, self.W3.t())
return ys
|
[
"torch.zeros",
"torch.nn.LSTM",
"torch.stack",
"torch.rand"
] |
[((748, 789), 'torch.zeros', 'torch.zeros', (['x.shape[0]', 'self.hidden_size'], {}), '(x.shape[0], self.hidden_size)\n', (759, 789), False, 'import torch\n'), ((1144, 1166), 'torch.stack', 'torch.stack', (['ys'], {'dim': '(1)'}), '(ys, dim=1)\n', (1155, 1166), False, 'import torch\n'), ((1885, 1926), 'torch.zeros', 'torch.zeros', (['x.shape[0]', 'self.hidden_size'], {}), '(x.shape[0], self.hidden_size)\n', (1896, 1926), False, 'import torch\n'), ((2332, 2354), 'torch.stack', 'torch.stack', (['ys'], {'dim': '(1)'}), '(ys, dim=1)\n', (2343, 2354), False, 'import torch\n'), ((2677, 2734), 'torch.nn.LSTM', 'nn.LSTM', (['input_size', 'hidden_size'], {'batch_first': 'batch_first'}), '(input_size, hidden_size, batch_first=batch_first)\n', (2684, 2734), True, 'import torch.nn as nn\n'), ((682, 706), 'torch.zeros', 'torch.zeros', (['hidden_size'], {}), '(hidden_size)\n', (693, 706), False, 'import torch\n'), ((1819, 1843), 'torch.zeros', 'torch.zeros', (['hidden_size'], {}), '(hidden_size)\n', (1830, 1843), False, 'import torch\n'), ((2773, 2809), 'torch.rand', 'torch.rand', (['output_size', 'hidden_size'], {}), '(output_size, hidden_size)\n', (2783, 2809), False, 'import torch\n'), ((434, 469), 'torch.rand', 'torch.rand', (['hidden_size', 'input_size'], {}), '(hidden_size, input_size)\n', (444, 469), False, 'import torch\n'), ((516, 552), 'torch.rand', 'torch.rand', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (526, 552), False, 'import torch\n'), ((599, 635), 'torch.rand', 'torch.rand', (['output_size', 'hidden_size'], {}), '(output_size, hidden_size)\n', (609, 635), False, 'import torch\n'), ((1553, 1588), 'torch.rand', 'torch.rand', (['hidden_size', 'input_size'], {}), '(hidden_size, input_size)\n', (1563, 1588), False, 'import torch\n'), ((1641, 1677), 'torch.rand', 'torch.rand', (['hidden_size', 'hidden_size'], {}), '(hidden_size, hidden_size)\n', (1651, 1677), False, 'import torch\n'), ((1730, 1766), 'torch.rand', 'torch.rand', (['output_size', 'hidden_size'], {}), '(output_size, hidden_size)\n', (1740, 1766), False, 'import torch\n')]
|
import time
from tkinter import *
from model.battle import ClientBattle
class JoinRoomWaitFrame(Frame):
def __init__(self, master):
Frame.__init__(self, master)
self.label = Label(self, font=("Helvetica", 18))
self.label.pack(side="top", pady=20)
self.players = Listbox(self)
self.players.pack(pady=15)
Label(self, text="En attente d'autres joueurs...").pack(pady=3)
def init(self):
def on_players_list_updated(players_names):
self.players.delete(0, END)
self.players.insert(0, self.master.handler.name + ' (vous)')
for name in players_names:
self.players.insert(END, name)
def on_game_begin():
print('== Game begins!')
self.master.battle = ClientBattle(self.master.handler)
time.sleep(1)
self.master.raise_frame('game')
self.master.handler.on_players_list_updated = on_players_list_updated
self.master.handler.on_game_begin = on_game_begin
self.label['text'] = self.master.room_name
|
[
"model.battle.ClientBattle",
"time.sleep"
] |
[((797, 830), 'model.battle.ClientBattle', 'ClientBattle', (['self.master.handler'], {}), '(self.master.handler)\n', (809, 830), False, 'from model.battle import ClientBattle\n'), ((843, 856), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (853, 856), False, 'import time\n')]
|
import helper_test
from configuration import Configuration
from local_services import Compress
from helper import Helper
configuration = Configuration('actioncam', path=helper_test.config_path())
helper = Helper(configuration.config)
helper.state_set_start()
debug = True
compress = Compress(configuration, helper, debug)
def test_compress_folder_does_not_exist(configuration, helper):
print('test_compress_folder_does_not_exist')
compress.config["compress_location"] = "DoesNotExist"
compressed = compress.compress()
assert 'not found' in compressed, "test_compress_folder_does_not_exist failed"
def test_compress(configuration, helper):
print('test_compress')
file_test = configuration.config["DEFAULT"]["recording_location"] + "/" + configuration.config["DEFAULT"]["identify"] + "_20211113" + "." + configuration.config["DEFAULT"]["output"]
helper.file_touch(file_test)
compressed = compress.compress()
assert 'zip' in compressed, "test_compress failed as no zip found in reply"
helper.file_delete(file_test)
compressed = compress.get_compressed()
print('Report')
for cmp in compressed:
print(cmp)
assert len(compressed) >= 1, "test_compress failed as not compressed found"
if __name__ == '__main__':
test_compress(configuration, helper)
test_compress_folder_does_not_exist(configuration, helper)
|
[
"helper.Helper",
"local_services.Compress",
"helper_test.config_path"
] |
[((207, 235), 'helper.Helper', 'Helper', (['configuration.config'], {}), '(configuration.config)\n', (213, 235), False, 'from helper import Helper\n'), ((285, 323), 'local_services.Compress', 'Compress', (['configuration', 'helper', 'debug'], {}), '(configuration, helper, debug)\n', (293, 323), False, 'from local_services import Compress\n'), ((171, 196), 'helper_test.config_path', 'helper_test.config_path', ([], {}), '()\n', (194, 196), False, 'import helper_test\n')]
|
#
# This file is part of CasADi.
#
# CasADi -- A symbolic framework for dynamic optimization.
# Copyright (C) 2010-2014 <NAME>, <NAME>, <NAME>,
# <NAME>. All rights reserved.
# Copyright (C) 2011-2014 <NAME>
#
# CasADi is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# CasADi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with CasADi; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
#
# -*- coding: utf-8 -*-
from casadi import *
import matplotlib.pyplot as plt
import numpy
# Sailboat model based on
#
# [MF2011]:
# <NAME>, <NAME>
# "Tacking Simulation of Sailing Yachts with New Model of Aerodynamic
# Force Variation During Tacking Maneuver"
# Journal of Sailboat Technology, Article 2011-01
#
# <NAME>, UW Madison 2017
#
# Create DaeBuilder instance
dae = DaeBuilder()
# Load file with external functions
from os import path
curr_dir = path.dirname(path.abspath(__file__))
clib = Importer(curr_dir + '/sailboat_c.c', 'none')
# Options for external functions
# NOTE: These options should become redundant once code is more stable
external_opts = dict(enable_jacobian = False, enable_forward = False, \
enable_reverse = False, enable_fd = True)
# Physical constants
g = 9.81 # [m/s^2] gravity
rho = 1027. # p[kg/m^3] density of ocean water
# Sailboat model constants. Cf. Table 1 [MF2011]
L = 8.80 # [m] Length of design waterline
D = 2.02 # [m] Design draft, including fin keel
m = 4410. # [kg] Displacement
GM = 1.45 # [m] metacentric height of boat
m_x = 160.; m_y_hull = 2130.; m_y_sail = 280.; m_z = 12000. # [kg] Added masses
Ixx = 17700.; Iyy = 33100.; Izz = 17200. # [kg m^2] Moments of inertia
Jxx_hull = 7200.; Jxx_sail = 8100.; Jyy = 42400.; Jzz = 6700. # [kg m^2] Added moments of inertia
X_pVV = 3.38e-1
X_pPP = 1.40e-3
X_pVVVV = -1.84
X_pT = -1.91e-2
Y_pV = -5.35e-1
Y_pP = -5.89e-3
Y_pVPP = 7.37e-1
Y_pVVP = -5.53e-1
Y_pVVV = 3.07
Y_pP = 2.19e-1
Y_pT = -4.01e-3
K_pV = 2.80e-1
K_pP = 3.36e-3
K_pVPP = -4.07e-1
K_pVVP = 2.24e-1
K_pVVV = -1.38
K_pT = -3.53e-1
N_pV = -3.23e-2
N_pP = -1.52e-2
N_pVPP = 2.71e-4
N_pVVP = -9.06e-2
N_pVVV = -2.98e-2
N_pT = -5.89e-3
C_Xd = -3.79e-2
C_Yd = -1.80e-1
C_Kd = 9.76e-2
C_Nd = 9.74e-2
# States
U = dae.add_x('U') # Velocity along the X axis
V = dae.add_x('V') # Velocity along the Y axis
phi = dae.add_x('phi') # Roll angle
theta = dae.add_x('theta') # Yaw angle
dphi = dae.add_x('dphi') # Time derivative of phi
dtheta = dae.add_x('dtheta') # Time derivative of theta
# Controls
beta = dae.add_u('beta')
# Sum contributions from hull and sail (?)
m_y = m_y_hull + m_y_sail
Jxx = Jxx_hull + Jxx_sail
# Auxiliary variables
# Squared boat velocity
V_B2 = U**2 + V**2
# To avoid duplicate expressions
cos_phi = cos(phi)
sin_phi = sin(phi)
cos2_phi = cos_phi**2
sin2_phi = sin_phi**2
phi2 = phi**2
# Hull resistance in the upright position
X_0_fun = dae.add_fun('hull_resistance', clib, external_opts)
X_0 = X_0_fun(U)
# Calculate hydrodynamic forces
V_p = sin(beta)
V_p2 = V_p**2
V_p3 = V_p2*V_p
V_p4 = V_p2**2
H_fact = 0.5*rho*V_B2*L*D
dae.add_d('X_H', (X_pVV*V_p2 + X_pPP*phi2 + X_pVVVV*V_p4)*H_fact)
dae.add_d('Y_H', (Y_pV*V_p + Y_pP*phi + Y_pVPP*V_p*phi2 + Y_pVVP*V_p2*phi + Y_pVVV*V_p3)*H_fact)
dae.add_d('K_H', (K_pV*V_p + K_pP*phi + K_pVPP*V_p*phi2 + K_pVVP*V_p2*phi + K_pVVV*V_p3)*H_fact*D)
dae.add_d('N_H', (N_pV*V_p + N_pP*phi + N_pVPP*V_p*phi2 + N_pVVP*V_p2*phi + N_pVVV*V_p3)*H_fact*L)
H = dae.add_fun('H', ['phi', 'beta', 'U', 'V'], ['X_H', 'Y_H', 'K_H', 'N_H'])
# Plot it for reference
ngrid_phi = 100; ngrid_beta = 100
U0 = 5.; V0 = 5. # [m/s] Boat speed for simulation
phi0 = numpy.linspace(-pi/4, pi/4, ngrid_phi)
beta0 = numpy.linspace(-pi/4, pi/4, ngrid_beta)
PHI0,BETA0 = numpy.meshgrid(phi0, beta0)
r = H(phi=PHI0, beta=BETA0, U=U0, V=V0)
for i,c in enumerate(['X_H', 'Y_H', 'K_H', 'N_H']):
plt.subplot(2,2,i+1)
CS = plt.contour(PHI0*180/pi, BETA0*180/pi, log10(r[c]))
plt.clabel(CS, inline=1, fontsize=10)
plt.title('log10(' + c + ')')
plt.grid(True)
# Make a function call
X_H, Y_H, K_H, N_H = H(phi, beta, U, V)
# Hydrodynamic derivatives of the hull due to yawing motion
X_VT = 0. # Neglected
Y_T = 0. # Neglected
N_T = 0. # Neglected
# Derivative due to rolling
Y_P = 0. # Neglected
K_P = 0. # Neglected
# Hydrodynamic forces on the rudder
X_R = 0. # Neglected
Y_R = 0. # Neglected
K_R = 0. # Neglected
N_R = 0. # Neglected
# Sail forces
X_S = 0. # Neglected
Y_S = 0. # Neglected
K_S = 0. # Neglected
N_S = 0. # Neglected
# Surge: (m+m_x)*dot(U) = F_X, cf. (3) [MF2011]
F_X = X_0 + X_H + X_R + X_S \
+ (m + m_y*cos2_phi + m_z*sin2_phi + X_VT)*V*dtheta
dae.add_ode("surge", F_X / (m+m_x))
# Sway: (m + m_y*cos2_phi + m_z*sin2_phi)*dot(V) = F_Y
F_Y = Y_H + Y_P*dphi + Y_T*dtheta + Y_R + Y_S \
- (m + m_x)*U*dtheta \
- 2*(m_z - m_y)*sin_phi*cos_phi*V*dphi
dae.add_ode("sway", F_Y / (m + m_y*cos2_phi + m_z*sin2_phi))
# Roll: (Ixx + Jxx)*dot(dphi) = F_K
F_K = K_H + K_P*dphi + K_R + K_S - m*g*GM*sin_phi \
+ ((Iyy+Jyy)-(Izz+Jzz))*sin_phi*cos_phi*dtheta**2
dae.add_ode("roll", F_K / (Ixx + Jxx))
# Yaw: ((Iyy+Jyy)*sin2_phi + (Izz+Jzz)*cos2_phi)*dot(dtheta) = F_N
F_N = N_H + N_T*dtheta + N_R + N_S \
-2*((Iyy+Jyy)-(Izz+Jzz))*sin_phi*cos_phi*dtheta*dphi
dae.add_ode("yaw", F_N / ((Iyy+Jyy)*sin2_phi + (Izz+Jzz)*cos2_phi))
# Roll angle
dae.add_ode("roll_angle", dphi)
# Yaw angle
dae.add_ode("yaw_angle", dtheta)
# Print ODE
print(dae)
# Generate Jacobian of ODE rhs w.r.t. to states and control
Jfcn = dae.create("Jfcn", ['x', 'u'], ['jac_ode_x', 'jac_ode_u'])
Jfcn_file = Jfcn.generate()
print('Jacobian function saved to ' + Jfcn_file)
plt.show()
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.subplot",
"os.path.abspath",
"numpy.meshgrid",
"matplotlib.pyplot.show",
"matplotlib.pyplot.clabel",
"numpy.linspace",
"matplotlib.pyplot.grid"
] |
[((4203, 4245), 'numpy.linspace', 'numpy.linspace', (['(-pi / 4)', '(pi / 4)', 'ngrid_phi'], {}), '(-pi / 4, pi / 4, ngrid_phi)\n', (4217, 4245), False, 'import numpy\n'), ((4250, 4293), 'numpy.linspace', 'numpy.linspace', (['(-pi / 4)', '(pi / 4)', 'ngrid_beta'], {}), '(-pi / 4, pi / 4, ngrid_beta)\n', (4264, 4293), False, 'import numpy\n'), ((4303, 4330), 'numpy.meshgrid', 'numpy.meshgrid', (['phi0', 'beta0'], {}), '(phi0, beta0)\n', (4317, 4330), False, 'import numpy\n'), ((6223, 6233), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6231, 6233), True, 'import matplotlib.pyplot as plt\n'), ((1481, 1503), 'os.path.abspath', 'path.abspath', (['__file__'], {}), '(__file__)\n', (1493, 1503), False, 'from os import path\n'), ((4427, 4451), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', '(i + 1)'], {}), '(2, 2, i + 1)\n', (4438, 4451), True, 'import matplotlib.pyplot as plt\n'), ((4513, 4550), 'matplotlib.pyplot.clabel', 'plt.clabel', (['CS'], {'inline': '(1)', 'fontsize': '(10)'}), '(CS, inline=1, fontsize=10)\n', (4523, 4550), True, 'import matplotlib.pyplot as plt\n'), ((4555, 4584), 'matplotlib.pyplot.title', 'plt.title', (["('log10(' + c + ')')"], {}), "('log10(' + c + ')')\n", (4564, 4584), True, 'import matplotlib.pyplot as plt\n'), ((4589, 4603), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (4597, 4603), True, 'import matplotlib.pyplot as plt\n')]
|
from binance_trading_bot import utilities, visual, indicator
import matplotlib.pyplot as plt
plt.style.use('classic')
from matplotlib.ticker import FormatStrFormatter
import matplotlib.patches as mpatches
import math
def volume_spread_analysis(client, market,
NUM_PRICE_STEP, TIME_FRAME_STEP, TIME_FRAME, TIME_FRAME_DURATION):
nDigit = abs(int(math.log10(float(client.get_symbol_info(market)['filters'][0]['tickSize']))))
candles = utilities.get_candles(client, market, TIME_FRAME, TIME_FRAME_DURATION)
VRVP = indicator.volume_profile(client, market, NUM_PRICE_STEP, TIME_FRAME_STEP, TIME_FRAME_DURATION)
BBANDS = indicator.bbands(candles)
VSTOP = indicator.volatility_stop(candles, 20, 2)
RSI = indicator.rsi(candles, 14)
SMA = indicator.sma(candles)
# Visualization
VSTOP_COLOR = 'indigo'
SMA_COLOR = 'black'
BBANDS_COLOR = 'green'
VOLUME_COLOR = 'gray'
BUY_COLOR = 'black'
SELL_COLOR = 'red'
VOLATILITY_COLOR = 'black'
RSI_COLOR = 'black'
f,axes = plt.subplots(4, 1, gridspec_kw={'height_ratios':[3, 1, 1, 1]})
f.set_size_inches(20,20)
ax = axes[0]
axt = ax.twiny()
axt.barh(VRVP['price'],
VRVP['buy_volume'],
color='gray',
edgecolor='w',
height=VRVP['price'][1]-VRVP['price'][0],
align='center',
alpha=0.25)
axt.barh(VRVP['price'],
VRVP['buy_volume']+VRVP['sell_volume'],
color='gray',
edgecolor='w',
height=VRVP['price'][1]-VRVP['price'][0],
align='center',
alpha=0.25)
axt.set_xticks([])
for tic in axt.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
visual.candlestick2_ohlc(ax,
candles['open'],
candles['high'],
candles['low'],
candles['close'],
width=0.6, alpha=1)
ax.plot(VSTOP['support'], linewidth=2, color=VSTOP_COLOR, linestyle='-')
ax.plot(VSTOP['resistance'], linewidth=2, color=VSTOP_COLOR, linestyle='-')
ax.plot(BBANDS['middle_band'], linewidth=1, color=BBANDS_COLOR, linestyle='-')
ax.plot(BBANDS['upper_band'], linewidth=1, color=BBANDS_COLOR, linestyle='-')
ax.plot(BBANDS['lower_band'], linewidth=1, color=BBANDS_COLOR, linestyle='-')
ax.plot(SMA, linewidth=1, color=SMA_COLOR, linestyle='--')
if market=='BTCUSDT':
pivotList = []
for i in range(len(VSTOP)):
if math.isnan(VSTOP['support'].iat[i]):
if not math.isnan(VSTOP['support'].iat[i-1]):
pivotList.append(VSTOP['support'].iat[i-1])
if math.isnan(VSTOP['resistance'].iat[i]):
if not math.isnan(VSTOP['resistance'].iat[i-1]):
pivotList.append(VSTOP['resistance'].iat[i-1])
pivotList = sorted(pivotList)
for pivot in pivotList:
ax.text(len(candles)+.5, pivot, str(int(pivot)))
ax.yaxis.grid(True)
for tic in ax.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
ax.set_xticks([])
ax.set_yticks(VRVP['price_min'].append(VRVP['price_max'].tail(1)))
ax.set_xlim(-.5, len(candles))
ax.yaxis.set_major_formatter(FormatStrFormatter('%.'+str(nDigit)+'f'))
ax.get_yaxis().set_label_coords(-0.075,0.5)
ax.set_ylabel("Price",fontsize=20)
ax.set_title(market+' '+TIME_FRAME.upper(), fontsize=30, y=1.03, loc='left')
patchList = [mpatches.Patch(color=VOLUME_COLOR, label='market-profile'),
mpatches.Patch(color=VSTOP_COLOR, label='volatility-stop'),
mpatches.Patch(color=BBANDS_COLOR, label='bollinger-bands'),
mpatches.Patch(color=SMA_COLOR, label='moving-average')]
ax.legend(handles=patchList, loc='best', prop={'size': 20}, ncol=len(patchList),framealpha=0.5)
ax = axes[1]
visual.candlestick2_ohlc(ax,
0*candles['assetVolume'],
candles['assetVolume'],
0*candles['assetVolume'],
candles['assetVolume'],
width=0.6, alpha=.35)
visual.candlestick2_ohlc(ax,
0*candles['buyAssetVolume'],
candles['buyAssetVolume'],
0*candles['buyAssetVolume'],
candles['buyAssetVolume'],
width=0.28, alpha=1, shift=-0.15)
visual.candlestick2_ohlc(ax,
candles['sellAssetVolume'],
candles['sellAssetVolume'],
0*candles['sellAssetVolume'],
0*candles['sellAssetVolume'],
width=0.28, alpha=1, shift=+0.15)
ax.yaxis.grid(True)
for tic in ax.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
ax.set_xticks([])
ax.set_xlim(-.5, len(candles))
ax.get_yaxis().set_label_coords(-0.075,0.5)
ax.yaxis.set_major_formatter(FormatStrFormatter('%.2f'))
ax.get_xaxis().set_label_coords(0.5, -0.025)
ax.set_ylabel("Volume",fontsize=20)
patchList = [mpatches.Patch(color=VOLUME_COLOR, label='volume'),
mpatches.Patch(color=BUY_COLOR, label='buy-volume'),
mpatches.Patch(color=SELL_COLOR, label='sell-volume')]
ax.legend(handles=patchList, loc='best', prop={'size': 20}, ncol=len(patchList), framealpha=0.5)
ax = axes[2]
visual.candlestick2_ohlc(ax,
0*candles['spread'],
candles['spread'],
0*candles['spread'],
candles['spread'],
width=0.6, colorup=VOLATILITY_COLOR, alpha=.35)
ax.yaxis.grid(True)
for tic in ax.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
ax.set_xticks([])
ax.set_xlim(-.5, len(candles))
ax.get_yaxis().set_label_coords(-0.075,0.5)
ax.yaxis.set_major_formatter(FormatStrFormatter('%.'+str(nDigit)+'f'))
ax.get_xaxis().set_label_coords(0.5, -0.025)
ax.set_ylabel("Volatility",fontsize=20)
patchList = [mpatches.Patch(color=VOLATILITY_COLOR, label='average-true-range'),
mpatches.Patch(color=BBANDS_COLOR, label='standard-deviation')]
ax.legend(handles=patchList, loc='best', prop={'size': 20}, ncol=len(patchList), framealpha=0.5)
axt = ax.twinx()
axt.plot(BBANDS['std'], linewidth=2, color=BBANDS_COLOR, linestyle='-')
for tic in axt.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
axt.set_xticks([])
axt.set_yticks([])
axt.set_xlim(-.5, len(candles))
axt = ax.twinx()
axt.plot(VSTOP['ATR'], linewidth=2, color=VOLATILITY_COLOR, linestyle='-')
for tic in axt.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
axt.set_xticks([])
axt.set_xlim(-.5, len(candles))
ax = axes[3]
ax.plot(RSI, linewidth=2, color=RSI_COLOR, linestyle='-')
ax.axhline(y=50, color=RSI_COLOR, linestyle='--')
ax.axhspan(ymin=20, ymax=80, color=RSI_COLOR, alpha=0.1)
ax.axhspan(ymin=30, ymax=70, color=RSI_COLOR, alpha=0.1)
ax.yaxis.grid(True)
for tic in ax.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
tic.label1On = tic.label2On = False
ax.set_xticks([])
ax.set_xlim(-.5, len(candles))
ax.get_yaxis().set_label_coords(-0.075,0.5)
ax.yaxis.set_major_formatter(FormatStrFormatter('%.'+str(nDigit)+'f'))
ax.get_xaxis().set_label_coords(0.5, -0.025)
ax.set_ylabel("Momentum",fontsize=20)
patchList = [mpatches.Patch(color=RSI_COLOR, label='relative-strength')]
ax.legend(handles=patchList, loc='best', prop={'size': 20}, ncol=len(patchList), framealpha=0.5)
f.tight_layout()
plt.savefig('img/'+market+'_'+TIME_FRAME.upper()+'.png', bbox_inches='tight')
|
[
"math.isnan",
"binance_trading_bot.indicator.bbands",
"binance_trading_bot.indicator.volume_profile",
"binance_trading_bot.visual.candlestick2_ohlc",
"binance_trading_bot.utilities.get_candles",
"matplotlib.pyplot.style.use",
"matplotlib.ticker.FormatStrFormatter",
"binance_trading_bot.indicator.volatility_stop",
"binance_trading_bot.indicator.rsi",
"matplotlib.patches.Patch",
"binance_trading_bot.indicator.sma",
"matplotlib.pyplot.subplots"
] |
[((93, 117), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""classic"""'], {}), "('classic')\n", (106, 117), True, 'import matplotlib.pyplot as plt\n'), ((474, 544), 'binance_trading_bot.utilities.get_candles', 'utilities.get_candles', (['client', 'market', 'TIME_FRAME', 'TIME_FRAME_DURATION'], {}), '(client, market, TIME_FRAME, TIME_FRAME_DURATION)\n', (495, 544), False, 'from binance_trading_bot import utilities, visual, indicator\n'), ((561, 659), 'binance_trading_bot.indicator.volume_profile', 'indicator.volume_profile', (['client', 'market', 'NUM_PRICE_STEP', 'TIME_FRAME_STEP', 'TIME_FRAME_DURATION'], {}), '(client, market, NUM_PRICE_STEP, TIME_FRAME_STEP,\n TIME_FRAME_DURATION)\n', (585, 659), False, 'from binance_trading_bot import utilities, visual, indicator\n'), ((669, 694), 'binance_trading_bot.indicator.bbands', 'indicator.bbands', (['candles'], {}), '(candles)\n', (685, 694), False, 'from binance_trading_bot import utilities, visual, indicator\n'), ((707, 748), 'binance_trading_bot.indicator.volatility_stop', 'indicator.volatility_stop', (['candles', '(20)', '(2)'], {}), '(candles, 20, 2)\n', (732, 748), False, 'from binance_trading_bot import utilities, visual, indicator\n'), ((759, 785), 'binance_trading_bot.indicator.rsi', 'indicator.rsi', (['candles', '(14)'], {}), '(candles, 14)\n', (772, 785), False, 'from binance_trading_bot import utilities, visual, indicator\n'), ((796, 818), 'binance_trading_bot.indicator.sma', 'indicator.sma', (['candles'], {}), '(candles)\n', (809, 818), False, 'from binance_trading_bot import utilities, visual, indicator\n'), ((1069, 1132), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(4)', '(1)'], {'gridspec_kw': "{'height_ratios': [3, 1, 1, 1]}"}), "(4, 1, gridspec_kw={'height_ratios': [3, 1, 1, 1]})\n", (1081, 1132), True, 'import matplotlib.pyplot as plt\n'), ((1837, 1958), 'binance_trading_bot.visual.candlestick2_ohlc', 'visual.candlestick2_ohlc', (['ax', "candles['open']", "candles['high']", "candles['low']", "candles['close']"], {'width': '(0.6)', 'alpha': '(1)'}), "(ax, candles['open'], candles['high'], candles[\n 'low'], candles['close'], width=0.6, alpha=1)\n", (1861, 1958), False, 'from binance_trading_bot import utilities, visual, indicator\n'), ((4124, 4288), 'binance_trading_bot.visual.candlestick2_ohlc', 'visual.candlestick2_ohlc', (['ax', "(0 * candles['assetVolume'])", "candles['assetVolume']", "(0 * candles['assetVolume'])", "candles['assetVolume']"], {'width': '(0.6)', 'alpha': '(0.35)'}), "(ax, 0 * candles['assetVolume'], candles[\n 'assetVolume'], 0 * candles['assetVolume'], candles['assetVolume'],\n width=0.6, alpha=0.35)\n", (4148, 4288), False, 'from binance_trading_bot import utilities, visual, indicator\n'), ((4424, 4612), 'binance_trading_bot.visual.candlestick2_ohlc', 'visual.candlestick2_ohlc', (['ax', "(0 * candles['buyAssetVolume'])", "candles['buyAssetVolume']", "(0 * candles['buyAssetVolume'])", "candles['buyAssetVolume']"], {'width': '(0.28)', 'alpha': '(1)', 'shift': '(-0.15)'}), "(ax, 0 * candles['buyAssetVolume'], candles[\n 'buyAssetVolume'], 0 * candles['buyAssetVolume'], candles[\n 'buyAssetVolume'], width=0.28, alpha=1, shift=-0.15)\n", (4448, 4612), False, 'from binance_trading_bot import utilities, visual, indicator\n'), ((4748, 4940), 'binance_trading_bot.visual.candlestick2_ohlc', 'visual.candlestick2_ohlc', (['ax', "candles['sellAssetVolume']", "candles['sellAssetVolume']", "(0 * candles['sellAssetVolume'])", "(0 * candles['sellAssetVolume'])"], {'width': '(0.28)', 'alpha': '(1)', 'shift': '(+0.15)'}), "(ax, candles['sellAssetVolume'], candles[\n 'sellAssetVolume'], 0 * candles['sellAssetVolume'], 0 * candles[\n 'sellAssetVolume'], width=0.28, alpha=1, shift=+0.15)\n", (4772, 4940), False, 'from binance_trading_bot import utilities, visual, indicator\n'), ((5826, 5996), 'binance_trading_bot.visual.candlestick2_ohlc', 'visual.candlestick2_ohlc', (['ax', "(0 * candles['spread'])", "candles['spread']", "(0 * candles['spread'])", "candles['spread']"], {'width': '(0.6)', 'colorup': 'VOLATILITY_COLOR', 'alpha': '(0.35)'}), "(ax, 0 * candles['spread'], candles['spread'], 0 *\n candles['spread'], candles['spread'], width=0.6, colorup=\n VOLATILITY_COLOR, alpha=0.35)\n", (5850, 5996), False, 'from binance_trading_bot import utilities, visual, indicator\n'), ((3709, 3767), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'color': 'VOLUME_COLOR', 'label': '"""market-profile"""'}), "(color=VOLUME_COLOR, label='market-profile')\n", (3723, 3767), True, 'import matplotlib.patches as mpatches\n'), ((3786, 3844), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'color': 'VSTOP_COLOR', 'label': '"""volatility-stop"""'}), "(color=VSTOP_COLOR, label='volatility-stop')\n", (3800, 3844), True, 'import matplotlib.patches as mpatches\n'), ((3863, 3922), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'color': 'BBANDS_COLOR', 'label': '"""bollinger-bands"""'}), "(color=BBANDS_COLOR, label='bollinger-bands')\n", (3877, 3922), True, 'import matplotlib.patches as mpatches\n'), ((3941, 3996), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'color': 'SMA_COLOR', 'label': '"""moving-average"""'}), "(color=SMA_COLOR, label='moving-average')\n", (3955, 3996), True, 'import matplotlib.patches as mpatches\n'), ((5365, 5391), 'matplotlib.ticker.FormatStrFormatter', 'FormatStrFormatter', (['"""%.2f"""'], {}), "('%.2f')\n", (5383, 5391), False, 'from matplotlib.ticker import FormatStrFormatter\n'), ((5505, 5555), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'color': 'VOLUME_COLOR', 'label': '"""volume"""'}), "(color=VOLUME_COLOR, label='volume')\n", (5519, 5555), True, 'import matplotlib.patches as mpatches\n'), ((5574, 5625), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'color': 'BUY_COLOR', 'label': '"""buy-volume"""'}), "(color=BUY_COLOR, label='buy-volume')\n", (5588, 5625), True, 'import matplotlib.patches as mpatches\n'), ((5644, 5697), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'color': 'SELL_COLOR', 'label': '"""sell-volume"""'}), "(color=SELL_COLOR, label='sell-volume')\n", (5658, 5697), True, 'import matplotlib.patches as mpatches\n'), ((6578, 6644), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'color': 'VOLATILITY_COLOR', 'label': '"""average-true-range"""'}), "(color=VOLATILITY_COLOR, label='average-true-range')\n", (6592, 6644), True, 'import matplotlib.patches as mpatches\n'), ((6663, 6725), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'color': 'BBANDS_COLOR', 'label': '"""standard-deviation"""'}), "(color=BBANDS_COLOR, label='standard-deviation')\n", (6677, 6725), True, 'import matplotlib.patches as mpatches\n'), ((8144, 8202), 'matplotlib.patches.Patch', 'mpatches.Patch', ([], {'color': 'RSI_COLOR', 'label': '"""relative-strength"""'}), "(color=RSI_COLOR, label='relative-strength')\n", (8158, 8202), True, 'import matplotlib.patches as mpatches\n'), ((2672, 2707), 'math.isnan', 'math.isnan', (["VSTOP['support'].iat[i]"], {}), "(VSTOP['support'].iat[i])\n", (2682, 2707), False, 'import math\n'), ((2850, 2888), 'math.isnan', 'math.isnan', (["VSTOP['resistance'].iat[i]"], {}), "(VSTOP['resistance'].iat[i])\n", (2860, 2888), False, 'import math\n'), ((2732, 2771), 'math.isnan', 'math.isnan', (["VSTOP['support'].iat[i - 1]"], {}), "(VSTOP['support'].iat[i - 1])\n", (2742, 2771), False, 'import math\n'), ((2913, 2955), 'math.isnan', 'math.isnan', (["VSTOP['resistance'].iat[i - 1]"], {}), "(VSTOP['resistance'].iat[i - 1])\n", (2923, 2955), False, 'import math\n')]
|
#!/usr/bin/env python
#------------------------------------------------------------------------------
# JChipClient.py: JChip simulator program for testing JChip interface and CrossMgr.
#
# Copyright (C) <NAME>, 2012.
import os
import time
import xlwt
import socket
import random
import operator
import datetime
#------------------------------------------------------------------------------
# CrossMgr's port and socket.
DEFAULT_PORT = 53135
DEFAULT_HOST = '127.0.0.1'
#------------------------------------------------------------------------------
# JChip delimiter (CR, **not** LF)
CR = u'\r'
NumberOfStarters = 50
#------------------------------------------------------------------------------
# Create some random rider numbers.
random.seed( 10101010 )
nums = [n for n in range(1,799+1)]
random.shuffle( nums )
nums = nums[:NumberOfStarters]
#------------------------------------------------------------------------------
# Create a JChip-style hex tag for each number.
tag = {n: u'41AA%03X' % n for n in nums }
tag[random.choice(list(tag.keys()))] = u'E2001018860B01290700D0D8'
tag[random.choice(list(tag.keys()))] = u'E2001018860B01530700D138'
tag[random.choice(list(tag.keys()))] = u'E2001018860B01370700D0F8'
tag[random.choice(list(tag.keys()))] = u'1'
tag[random.choice(list(tag.keys()))] = u'2'
#------------------------------------------------------------------------
def getRandomData( starters ):
firstNames = '''
1. Noah
2. Liam
3. Jacob
4. Mason
5. William
6. Ethan
7. Michael
8. Alexander
9. Jayden
10. Daniel
11. Elijah
12. Aiden
13. James
14. Benjamin
15. Matthew
16. Jackson
17. Logan
18. David
19. Anthony
20. Joseph
21. Joshua
22. Andrew
23. Lucas
24. Gabriel
25. Samuel
26. Christopher
27. John
28. Dylan
29. Isaac
30. Ryan
31. Nathan
32. Carter
33. Caleb
34. Luke
35. Christian
36. Hunter
37. Henry
38. Owen
39. Landon
40. Jack
41. Wyatt
42. Jonathan
43. Eli
44. Isaiah
45. Sebastian
46. Jaxon
47. Julian
48. Brayden
49. Gavin
50. Levi
51. Aaron
52. Oliver
53. Jordan
54. Nicholas
55. Evan
56. Connor
57. Charles
58. Jeremiah
59. Cameron
60. Adrian
61. Thomas
62. Robert
63. Tyler
64. Colton
65. Austin
66. Jace
67. Angel
68. Dominic
69. Josiah
70. Brandon
71. Ayden
72. Kevin
73. Zachary
74. Parker
75. Blake
76. Jose
77. Chase
78. Grayson
79. Jason
80. Ian
81. Bentley
82. Adam
83. Xavier
84. Cooper
85. Justin
86. Nolan
87. Hudson
88. Easton
89. Jase
90. Carson
91. Nathaniel
92. Jaxson
93. Kayden
94. Brody
95. Lincoln
96. Luis
97. Tristan
98. Damian
99. Camden
100. Juan
'''
lastNames = '''
1. Smith
2. Johnson
3. Williams
4. Jones
5. Brown
6. Davis
7. Miller
8. Wilson
9. Moore
10. Taylor
11. Anderson
12. Thomas
13. Jackson
14. White
15. Harris
16. Martin
17. Thompson
18. Garcia
19. Martinez
20. Robinson
21. Clark
22. Rodriguez
23. Lewis
24. Lee
25. Walker
26. Hall
27. Allen
28. Young
29. Hernandez
30. King
31. Wright
32. Lopez
33. Hill
34. Scott
35. Green
36. Adams
37. Baker
38. Gonzalez
39. Nelson
40. Carter
41. Mitchell
42. Perez
43. Roberts
44. Turner
45. Phillips
46. Campbell
47. Parker
48. Evans
49. Edwards
50. Collins
51. Stewart
52. Sanchez
53. Morris
54. Rogers
55. Reed
56. Cook
57. Morgan
58. Bell
59. Murphy
60. Bailey
61. Rivera
62. Cooper
63. Richardson
64. Cox
65. Howard
66. Ward
67. Torres
68. Peterson
69. Gray
70. Ramirez
71. James
72. Watson
73. Brooks
74. Kelly
75. Sanders
76. Price
77. Bennett
78. Wood
79. Barnes
80. Ross
81. Henderson
82. Coleman
83. Jenkins
84. Perry
85. Powell
86. Long
87. Patterson
88. Hughes
89. Flores
90. Washington
91. Butler
92. Simmons
93. Foster
94. Gonzales
95. Bryant
96. Alexander
97. Russell
98. Griffin
99. Diaz
'''
teams = '''
The Cyclomaniacs
Pesky Peddlers
Geared Up
Spoke & Mirrors
The Handlebar Army
Wheels of Steel
The Chaingang
Saddled & Addled
The Cyclepaths
Tour de Farce
Old Cranks
Magically Bikelicious
Look Ma No Hands!
Pedal Pushers
Kicking Asphault
Velociposse
Road Rascals
Spin Doctors
'''
firstNames = [line.split('.')[1].strip() for line in firstNames.split('\n') if line.strip()]
lastNames = [line.split('.')[1].strip() for line in lastNames.split('\n') if line.strip()]
teams = [line.strip() for line in teams.split('\n') if line.strip()]
bibs = [n for n in range(1,1+starters)]
random.shuffle( firstNames )
random.shuffle( lastNames )
random.shuffle( teams )
for i in range(starters):
yield bibs[i], firstNames[i%len(firstNames)], lastNames[i%len(lastNames)], teams[i%len(teams)]
#------------------------------------------------------------------------------
# Write out as a .xlsx file with the number tag data.
#
wb = xlwt.Workbook()
ws = wb.add_sheet( "JChipTest" )
for col, label in enumerate(u'Bib#,LastName,FirstName,Team,Tag,StartTime'.split(',')):
ws.write( 0, col, label )
rdata = [d for d in getRandomData(len(tag))]
rowCur = 1
for r, (n, t) in enumerate(tag.items()):
if t in ('1', '2'):
continue
bib, firstName, lastName, Team = rdata[r]
for c, v in enumerate([n, lastName, firstName, Team, t, 5*rowCur/(24.0*60.0*60.0)]):
ws.write( rowCur, c, v )
rowCur += 1
wb.save('JChipTest.xls')
wb = None
#------------------------------------------------------------------------------
# Also write out as a .csv file.
#
with open('JChipTest.csv', 'w') as f:
f.write( u'Bib#,Tag,dummy3,dummy4,dummy5\n' )
for n in nums:
f.write( '{},{}\n'.format(n, tag[n]) )
sendDate = True
#------------------------------------------------------------------------------
# Function to format number, lap and time in JChip format
# Z413A35 10:11:16.4433 10 10000 C7
count = 0
def formatMessage( n, lap, t ):
global count
message = u"DJ%s %s 10 %05X C7%s%s" % (
tag[n], # Tag code
t.strftime('%H:%M:%S.%f'), # hh:mm:ss.ff
count, # Data index number in hex.
' date={}'.format( t.strftime('%Y%m%d') ) if sendDate else '',
CR
)
count += 1
return message
#------------------------------------------------------------------------------
# Generate some random lap times.
random.seed()
numLapTimes = []
mean = 60.0 # Average lap time.
varFactor = 9.0 * 4.0
var = mean / varFactor # Variance between riders.
lapMax = 61
for n in nums:
lapTime = random.normalvariate( mean, mean/(varFactor * 4.0) )
for lap in range(0, lapMax+1):
numLapTimes.append( (n, lap, lapTime*lap) )
numLapTimes.sort( key = operator.itemgetter(1, 2) ) # Sort by lap, then race time.
#------------------------------------------------------------------------------
# Create a socket (SOCK_STREAM means a TCP socket)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#------------------------------------------------------------------------------
# Connect to the CrossMgr server.
iMessage = 1
while 1:
print( u'Trying to connect to server...' )
while 1:
try:
sock.connect((DEFAULT_HOST, DEFAULT_PORT))
break
except:
print( u'Connection failed. Waiting 5 seconds...' )
time.sleep( 5 )
#------------------------------------------------------------------------------
print( u'Connection succeeded!' )
name = u'{}-{}'.format(socket.gethostname(), os.getpid())
print( u'Sending name...', name )
message = u"N0000{}{}".format(name, CR)
sock.send( message.encode() )
#------------------------------------------------------------------------------
print( u'Waiting for get time command...' )
while 1:
received = sock.recv(1).decode()
if received == u'G':
while received[-1] != CR:
received += sock.recv(1).decode()
print( u'Received cmd: "%s" from CrossMgr' % received[:-1] )
break
#------------------------------------------------------------------------------
dBase = datetime.datetime.now()
dBase -= datetime.timedelta( seconds = 13*60+13.13 ) # Send the wrong time for testing purposes.
#------------------------------------------------------------------------------
print( u'Send gettime data...' )
# format is GT0HHMMSShh<CR> where hh is 100's of a second. The '0' (zero) after GT is the number of days running and is ignored by CrossMgr.
message = u'GT0%02d%02d%02d%02d%s%s' % (
dBase.hour, dBase.minute, dBase.second, int((dBase.microsecond / 1000000.0) * 100.0),
u' date={}'.format( dBase.strftime('%Y%m%d') ) if sendDate else u'',
CR)
print( message[:-1] )
sock.send( message.encode() )
#------------------------------------------------------------------------------
print( u'Waiting for send command from CrossMgr...' )
while 1:
received = sock.recv(1).decode()
if received == u'S':
while received[-1] != CR:
received += sock.recv(1).decode()
print( u'Received cmd: "%s" from CrossMgr' % received[:-1] )
break
#------------------------------------------------------------------------------
print( u'Start sending data...' )
while iMessage < len(numLapTimes):
n, lap, t = numLapTimes[iMessage]
dt = t - numLapTimes[iMessage-1][2]
time.sleep( dt )
message = formatMessage( n, lap, dBase + datetime.timedelta(seconds = t - 0.5) )
if iMessage & 15 == 0:
print( u'sending: {}: {}\n'.format(iMessage, message[:-1]) )
try:
sock.send( message.encode() )
iMessage += 1
except:
print( u'Disconnected. Attempting to reconnect...' )
break
if iMessage >= len(numLapTimes):
message = u'<<<GarbageTerminateMessage>>>' + CR
sock.send( message.encode() )
break
|
[
"xlwt.Workbook",
"os.getpid",
"random.normalvariate",
"random.shuffle",
"socket.socket",
"time.sleep",
"socket.gethostname",
"random.seed",
"datetime.timedelta",
"operator.itemgetter",
"datetime.datetime.now"
] |
[((742, 763), 'random.seed', 'random.seed', (['(10101010)'], {}), '(10101010)\n', (753, 763), False, 'import random\n'), ((801, 821), 'random.shuffle', 'random.shuffle', (['nums'], {}), '(nums)\n', (815, 821), False, 'import random\n'), ((4773, 4788), 'xlwt.Workbook', 'xlwt.Workbook', ([], {}), '()\n', (4786, 4788), False, 'import xlwt\n'), ((6178, 6191), 'random.seed', 'random.seed', ([], {}), '()\n', (6189, 6191), False, 'import random\n'), ((6714, 6763), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (6727, 6763), False, 'import socket\n'), ((4419, 4445), 'random.shuffle', 'random.shuffle', (['firstNames'], {}), '(firstNames)\n', (4433, 4445), False, 'import random\n'), ((4449, 4474), 'random.shuffle', 'random.shuffle', (['lastNames'], {}), '(lastNames)\n', (4463, 4474), False, 'import random\n'), ((4478, 4499), 'random.shuffle', 'random.shuffle', (['teams'], {}), '(teams)\n', (4492, 4499), False, 'import random\n'), ((6360, 6412), 'random.normalvariate', 'random.normalvariate', (['mean', '(mean / (varFactor * 4.0))'], {}), '(mean, mean / (varFactor * 4.0))\n', (6380, 6412), False, 'import random\n'), ((7815, 7838), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (7836, 7838), False, 'import datetime\n'), ((7849, 7892), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(13 * 60 + 13.13)'}), '(seconds=13 * 60 + 13.13)\n', (7867, 7892), False, 'import datetime\n'), ((6515, 6540), 'operator.itemgetter', 'operator.itemgetter', (['(1)', '(2)'], {}), '(1, 2)\n', (6534, 6540), False, 'import operator\n'), ((7245, 7265), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (7263, 7265), False, 'import socket\n'), ((7267, 7278), 'os.getpid', 'os.getpid', ([], {}), '()\n', (7276, 7278), False, 'import os\n'), ((9038, 9052), 'time.sleep', 'time.sleep', (['dt'], {}), '(dt)\n', (9048, 9052), False, 'import time\n'), ((7087, 7100), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (7097, 7100), False, 'import time\n'), ((9101, 9136), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(t - 0.5)'}), '(seconds=t - 0.5)\n', (9119, 9136), False, 'import datetime\n')]
|
from fastapi import APIRouter
from backend.auth.login import router
auth_routers = APIRouter()
auth_routers.include_router(router, tags=["Auth"])
|
[
"fastapi.APIRouter"
] |
[((86, 97), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (95, 97), False, 'from fastapi import APIRouter\n')]
|
#
# This file is part of the LibreOffice project.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# This file incorporates work covered by the following license notice:
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed
# with this work for additional information regarding copyright
# ownership. The ASF licenses this file to you under the Apache
# License, Version 2.0 (the "License"); you may not use this file
# except in compliance with the License. You may obtain a copy of
# the License at http://www.apache.org/licenses/LICENSE-2.0 .
#
from com.sun.star.beans import PropertyValue
'''
Simplifies handling Arrays of PropertyValue.
To make a use of this class, instantiate it, and call
the put(propName,propValue) method.
caution: propName should always be a String.
When finished, call the getProperties() method to get an array of the set properties.
'''
class Properties(dict):
@classmethod
def getPropertyValue(self, props, propName):
for i in props:
if propName == i.Name:
return i.Value
raise AttributeError ("Property '" + propName + "' not found.")
@classmethod
def hasPropertyValue(self, props, propName):
for i in props:
if propName == i.Name:
return True
return False
@classmethod
def getProperties(self, _map):
pv = []
for k,v in _map.items():
pv.append(self.createProperty(k, v))
return pv
@classmethod
def createProperty(self, name, value, handle=None):
pv = PropertyValue()
pv.Name = name
pv.Value = value
if handle is not None:
pv.Handle = handle
return pv
def getProperties1(self):
return self.getProperties(self)
|
[
"com.sun.star.beans.PropertyValue"
] |
[((1805, 1820), 'com.sun.star.beans.PropertyValue', 'PropertyValue', ([], {}), '()\n', (1818, 1820), False, 'from com.sun.star.beans import PropertyValue\n')]
|
from django.contrib import admin
from geartracker.models import *
class ItemAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("make", "model", "size")}
list_display = ('__unicode__', 'type', 'metric_weight', 'acquired')
list_filter = ('archived', 'category', 'type', 'make')
search_fields = ('make', 'model')
filter_horizontal = ('related',)
admin.site.register(Item, ItemAdmin)
class CategoryAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('__unicode__', 'number_items')
admin.site.register(Category, CategoryAdmin)
class TypeAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
list_display = ('category', 'name', 'number_items')
list_filter = ('category',)
admin.site.register(Type, TypeAdmin)
class ListItemRelationshipInline(admin.TabularInline):
model = ListItem
extra = 1
class ListAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
inlines = (ListItemRelationshipInline,)
list_display = ('name', 'total_metric_weight', 'start_date', 'end_date',
'public')
list_filter = ('public',)
admin.site.register(List, ListAdmin)
|
[
"django.contrib.admin.site.register"
] |
[((371, 407), 'django.contrib.admin.site.register', 'admin.site.register', (['Item', 'ItemAdmin'], {}), '(Item, ItemAdmin)\n', (390, 407), False, 'from django.contrib import admin\n'), ((546, 590), 'django.contrib.admin.site.register', 'admin.site.register', (['Category', 'CategoryAdmin'], {}), '(Category, CategoryAdmin)\n', (565, 590), False, 'from django.contrib import admin\n'), ((762, 798), 'django.contrib.admin.site.register', 'admin.site.register', (['Type', 'TypeAdmin'], {}), '(Type, TypeAdmin)\n', (781, 798), False, 'from django.contrib import admin\n'), ((1155, 1191), 'django.contrib.admin.site.register', 'admin.site.register', (['List', 'ListAdmin'], {}), '(List, ListAdmin)\n', (1174, 1191), False, 'from django.contrib import admin\n')]
|
from datetime import date
from typing import Final, Generator, Sequence, cast, Iterable, Mapping, Optional, Union, List
from json import loads
from requests import Response, Session
from tenacity import retry, stop_after_attempt
from pandas import DataFrame
from ._model import (
EpiRangeLike,
AEpiDataCall,
EpiDataFormatType,
EpiDataResponse,
EpiRange,
EpidataFieldInfo,
OnlySupportsClassicFormatException,
add_endpoint_to_url,
)
from ._endpoints import AEpiDataEndpoints
from ._constants import HTTP_HEADERS, BASE_URL
from ._covidcast import CovidcastDataSources, define_covidcast_fields
@retry(reraise=True, stop=stop_after_attempt(2))
def _request_with_retry(
url: str, params: Mapping[str, str], session: Optional[Session] = None, stream: bool = False
) -> Response:
"""Make request with a retry if an exception is thrown."""
def call_impl(s: Session) -> Response:
res = s.get(url, params=params, headers=HTTP_HEADERS, stream=stream)
if res.status_code == 414:
return s.post(url, params=params, headers=HTTP_HEADERS, stream=stream)
return res
if session:
return call_impl(session)
with Session() as s:
return call_impl(s)
class EpiDataCall(AEpiDataCall):
"""
epidata call representation
"""
_session: Final[Optional[Session]]
def __init__(
self,
base_url: str,
session: Optional[Session],
endpoint: str,
params: Mapping[str, Union[None, EpiRangeLike, Iterable[EpiRangeLike]]],
meta: Optional[Sequence[EpidataFieldInfo]] = None,
only_supports_classic: bool = False,
) -> None:
super().__init__(base_url, endpoint, params, meta, only_supports_classic)
self._session = session
def with_base_url(self, base_url: str) -> "EpiDataCall":
return EpiDataCall(base_url, self._session, self._endpoint, self._params)
def with_session(self, session: Session) -> "EpiDataCall":
return EpiDataCall(self._base_url, session, self._endpoint, self._params)
def _call(
self,
format_type: Optional[EpiDataFormatType] = None,
fields: Optional[Iterable[str]] = None,
stream: bool = False,
) -> Response:
url, params = self.request_arguments(format_type, fields)
return _request_with_retry(url, params, self._session, stream)
def classic(
self, fields: Optional[Iterable[str]] = None, disable_date_parsing: Optional[bool] = False
) -> EpiDataResponse:
"""Request and parse epidata in CLASSIC message format."""
self._verify_parameters()
try:
response = self._call(None, fields)
r = cast(EpiDataResponse, response.json())
epidata = r.get("epidata")
if epidata and isinstance(epidata, list) and len(epidata) > 0 and isinstance(epidata[0], dict):
r["epidata"] = [self._parse_row(row, disable_date_parsing=disable_date_parsing) for row in epidata]
return r
except Exception as e: # pylint: disable=broad-except
return {"result": 0, "message": f"error: {e}", "epidata": []}
def __call__(
self, fields: Optional[Iterable[str]] = None, disable_date_parsing: Optional[bool] = False
) -> EpiDataResponse:
"""Request and parse epidata in CLASSIC message format."""
return self.classic(fields, disable_date_parsing=disable_date_parsing)
def json(
self, fields: Optional[Iterable[str]] = None, disable_date_parsing: Optional[bool] = False
) -> List[Mapping[str, Union[str, int, float, date, None]]]:
"""Request and parse epidata in JSON format"""
if self.only_supports_classic:
raise OnlySupportsClassicFormatException()
self._verify_parameters()
response = self._call(EpiDataFormatType.json, fields)
response.raise_for_status()
return [
self._parse_row(row, disable_date_parsing=disable_date_parsing)
for row in cast(List[Mapping[str, Union[str, int, float, None]]], response.json())
]
def df(self, fields: Optional[Iterable[str]] = None, disable_date_parsing: Optional[bool] = False) -> DataFrame:
"""Request and parse epidata as a pandas data frame"""
if self.only_supports_classic:
raise OnlySupportsClassicFormatException()
self._verify_parameters()
r = self.json(fields, disable_date_parsing=disable_date_parsing)
return self._as_df(r, fields, disable_date_parsing=disable_date_parsing)
def csv(self, fields: Optional[Iterable[str]] = None) -> str:
"""Request and parse epidata in CSV format"""
if self.only_supports_classic:
raise OnlySupportsClassicFormatException()
self._verify_parameters()
response = self._call(EpiDataFormatType.csv, fields)
response.raise_for_status()
return response.text
def iter(
self, fields: Optional[Iterable[str]] = None, disable_date_parsing: Optional[bool] = False
) -> Generator[Mapping[str, Union[str, int, float, date, None]], None, Response]:
"""Request and streams epidata rows"""
if self.only_supports_classic:
raise OnlySupportsClassicFormatException()
self._verify_parameters()
response = self._call(EpiDataFormatType.jsonl, fields, stream=True)
response.raise_for_status()
for line in response.iter_lines():
yield self._parse_row(loads(line), disable_date_parsing=disable_date_parsing)
return response
def __iter__(self) -> Generator[Mapping[str, Union[str, int, float, date, None]], None, Response]:
return self.iter()
class EpiDataContext(AEpiDataEndpoints[EpiDataCall]):
"""
sync epidata call class
"""
_base_url: Final[str]
_session: Final[Optional[Session]]
def __init__(self, base_url: str = BASE_URL, session: Optional[Session] = None) -> None:
super().__init__()
self._base_url = base_url
self._session = session
def with_base_url(self, base_url: str) -> "EpiDataContext":
return EpiDataContext(base_url, self._session)
def with_session(self, session: Session) -> "EpiDataContext":
return EpiDataContext(self._base_url, session)
def _create_call(
self,
endpoint: str,
params: Mapping[str, Union[None, EpiRangeLike, Iterable[EpiRangeLike]]],
meta: Optional[Sequence[EpidataFieldInfo]] = None,
only_supports_classic: bool = False,
) -> EpiDataCall:
return EpiDataCall(self._base_url, self._session, endpoint, params, meta, only_supports_classic)
Epidata = EpiDataContext()
def CovidcastEpidata(base_url: str = BASE_URL, session: Optional[Session] = None) -> CovidcastDataSources[EpiDataCall]:
url = add_endpoint_to_url(base_url, "covidcast/meta")
meta_data_res = _request_with_retry(url, {}, session, False)
meta_data_res.raise_for_status()
meta_data = meta_data_res.json()
def create_call(params: Mapping[str, Union[None, EpiRangeLike, Iterable[EpiRangeLike]]]) -> EpiDataCall:
return EpiDataCall(base_url, session, "covidcast", params, define_covidcast_fields())
return CovidcastDataSources.create(meta_data, create_call)
__all__ = ["Epidata", "EpiDataCall", "EpiDataContext", "EpiRange", "CovidcastEpidata"]
|
[
"requests.Session",
"tenacity.stop_after_attempt",
"json.loads"
] |
[((1194, 1203), 'requests.Session', 'Session', ([], {}), '()\n', (1201, 1203), False, 'from requests import Response, Session\n'), ((652, 673), 'tenacity.stop_after_attempt', 'stop_after_attempt', (['(2)'], {}), '(2)\n', (670, 673), False, 'from tenacity import retry, stop_after_attempt\n'), ((5532, 5543), 'json.loads', 'loads', (['line'], {}), '(line)\n', (5537, 5543), False, 'from json import loads\n')]
|
import json
import os
from logging import getLogger
from pathlib import Path
from scplint.statement import Statement
logger = getLogger()
class SCP:
def __init__(self, scp: dict, filename: str = 'my_scp',
size_max: int = 5120, minimize: bool = False):
logger.debug('initialize scp')
self.scp = scp
self.file = filename
self.minimized = minimize
self.statements = self._get_statements()
logger.debug('get scp metrics')
self.size = self._get_size(min=minimize)
self.size_max = size_max
self.percent = self._get_percent(self.size)
self.actions = self._get_actions()
self.notactions = self._get_notactions()
def _get_statements(self) -> list:
'''
'''
logger.debug('Get every Statement from the SCP')
statements = []
for statement in self.scp.get('Statement', []):
statements.append(Statement(statement))
return statements
def _get_actions(self) -> list:
'''
'''
logger.debug('Get every Action from the SCP')
actions = []
for statement in self.statements:
logger.info(statement.actions)
actions += statement.actions
logger.info(actions)
logger.info(len(actions))
return actions
def _get_notactions(self) -> list:
'''
'''
logger.debug('Get every NotAction from the SCP')
notactions = []
for statement in self.statements:
notactions += statement.notactions
return notactions
def _get_size(self, min: bool = False) -> int:
''' checks the actual size of the json policy in bytes as aws
does it if you create/update a scp
Args:
min (bool): True if policy should be minimized before calculating
the size.
Returns:
scp_bytes (int): the size of the scp in bytes as int
'''
logger.debug('Get the size in bytes of the SCP (minimized=%s)', min)
if min:
scp_bytes = len(self.minimize().encode('utf-8'))
else:
scp_bytes = len(json.dumps(self.scp, indent=4).encode('utf-8'))
return scp_bytes
def _get_percent(self, size: int, precision: int = 1) -> float:
''' check the actual size of the minimized json policy as percentage
against the maximum policy size of aws
Args:
size (int): the size of the policy in bytes
precision (int): the precision of the percentage value
Returns:
percent (float): the size of the scp as percentage value
'''
logger.debug('Get the size in percent of the SCP')
percent = round(100 / 5120 * size, precision)
return percent
def minimize(self) -> str:
''' convert the json scp into a minifed str (remove blanks, tabs and
linebreaks)
Returns:
scp_minified (str): a minified version of the json policy
'''
logger.debug('Format the json policy into a minized text')
scp_minified = json.dumps(self.scp).replace(" ", "")
return scp_minified
|
[
"scplint.statement.Statement",
"logging.getLogger",
"json.dumps"
] |
[((128, 139), 'logging.getLogger', 'getLogger', ([], {}), '()\n', (137, 139), False, 'from logging import getLogger\n'), ((951, 971), 'scplint.statement.Statement', 'Statement', (['statement'], {}), '(statement)\n', (960, 971), False, 'from scplint.statement import Statement\n'), ((3148, 3168), 'json.dumps', 'json.dumps', (['self.scp'], {}), '(self.scp)\n', (3158, 3168), False, 'import json\n'), ((2188, 2218), 'json.dumps', 'json.dumps', (['self.scp'], {'indent': '(4)'}), '(self.scp, indent=4)\n', (2198, 2218), False, 'import json\n')]
|
import requests
import pickle
from comdirect_api.auth.auth_service import AuthService
from comdirect_api.service.account_service import AccountService
from comdirect_api.service.depot_service import DepotService
from comdirect_api.service.document_service import DocumentService
from comdirect_api.service.report_service import ReportService
from comdirect_api.service.order_service import OrderService
from comdirect_api.service.instrument_service import InstrumentService
class ComdirectClient:
def __init__(self, client_id, client_secret, import_session=False):
self.api_url = 'https://api.comdirect.de/api'
self.oauth_url = 'https://api.comdirect.de'
if not import_session:
self.session = requests.Session()
self.session.headers.update({
'Accept': 'application/json',
'Content-Type': 'application/json',
})
self.auth_service = AuthService(client_id, client_secret, self.session, self.api_url, self.oauth_url)
else:
if import_session == True:
import_session = 'session.pkl'
with open(import_session, 'rb') as input:
self.session = pickle.load(input)
self.auth_service = pickle.load(input)
self.account_service = AccountService(self.session, self.api_url)
self.depot_service = DepotService(self.session, self.api_url)
self.document_service = DocumentService(self.session, self.api_url)
self.report_service = ReportService(self.session, self.api_url)
self.order_service = OrderService(self.session, self.api_url)
self.instrument_service = InstrumentService(self.session, self.api_url)
def session_export(self, filename = 'session.pkl'):
with open(filename, 'wb') as output:
pickle.dump(self.session, output, pickle.HIGHEST_PROTOCOL)
pickle.dump(self.auth_service, output, pickle.HIGHEST_PROTOCOL)
def fetch_tan(self, zugangsnummer, pin, tan_type=None):
return self.auth_service.fetch_tan(zugangsnummer, pin, tan_type)
def activate_session(self, tan=None):
self.auth_service.activate_session(tan)
def refresh_token(self):
self.auth_service.refresh_token()
def revoke_token(self):
self.auth_service.revoke()
def get_all_balances(self, without_account=False):
"""
4.1.1. Fetch balances from all accounts.
:param without_account: Don't include account object in response
:return: Response object
"""
return self.account_service.get_all_balances(without_account)
def get_balance(self, account_uuid):
"""
4.1.2. Fetch balance for a specific account.
:param account_uuid: Account-ID
:return: Response object
"""
return self.account_service.get_balance(account_uuid)
def get_account_transactions(self, account_uuid, with_account=False, transaction_state='BOTH', paging_count=20,
paging_first=0, min_booking_date=None, max_booking_date=None):
"""
4.1.3. Fetch transactions for a specific account. Not setting a min_booking_date currently limits the result to
the last 180 days.
:param account_uuid: Account-ID
:param with_account: Include account information in the response. Defaults to False
:param transaction_state: 'BOOKED' or 'NOTBOOKED'. Defaults to 'BOTH'
:param paging_count: Number of transactions
:param paging_first: Index of first returned transaction. Only possible for booked transactions
(transaction_state='BOOKED').
:param max_booking_date: max booking date in format YYYY-MM-DD
:param min_booking_date: min booking date in format YYYY-MM-DD
:return: Response object
"""
return self.account_service.get_account_transactions(account_uuid, with_account, transaction_state,
paging_count, paging_first, min_booking_date,
max_booking_date)
def get_all_depots(self):
"""
5.1.2. Fetch information for all depots.
:return: Response object
"""
return self.depot_service.get_all_depots()
def get_depot_positions(self, depot_id, with_depot=True, with_positions=True, with_instrument=False):
"""
5.1.2. Fetch information for a specific depot.
:param depot_id: Depot-ID
:param with_depot: Include depot information in response. Defaults to True.
:param with_positions: Include positions in response. Defaults to True.
:param with_instrument: Include instrument information for positions, ignored if with_positions is False.
Defaults to False.
:return: Response object
"""
return self.depot_service.get_depot_positions(depot_id, with_depot, with_positions, with_instrument)
def get_position(self, depot_id, position_id, with_instrument=False):
"""
5.1.3. Fetch a specific position.
:param depot_id: Depot-ID
:param position_id: Position-ID
:param with_instrument: Include instrument information. Defaults to False.
:return: Response object
"""
return self.depot_service.get_position(depot_id, position_id, with_instrument)
def get_depot_transactions(self, depot_id, with_instrument=False, **kwargs):
"""
5.1.4. Fetch depot transactions, filter parameters can be applied via kwargs
:param depot_id: Depot-ID
:param with_instrument: Include instrument information. Defaults to False.
:key wkn: filter by WKN
:key isin: filter by ISIN
:key instrument_id: filter by instrumentId
:key max_booking_date: filter by booking date, Format "JJJJ-MM-TT"
:key transaction_direction: filter by transactionDirection: {"IN", "OUT"}
:key transaction_type: filter by transactionType: {"BUY", "SELL", "TRANSFER_IN", "TRANSFER_OUT"}
:key booking_status: filter by bookingStatus: {"BOOKED", "NOTBOOKED", "BOTH"}
:key min_transaction_value: filter by min-transactionValue
:key max_transaction_value: filter by max-transactionValue
:return: Response object
"""
return self.depot_service.get_depot_transactions(depot_id, with_instrument, **kwargs)
def get_instrument(self, instrument_id, order_dimensions=False, fund_distribution=False, derivative_data=False, static_data = True):
"""
6.1.1 Abruf Instrument
order_dimensions: es wird das OrderDimension-Objekt befüllt
fund_distribution: es wird das FundDistribution-Objekt befüllt, wenn es sich bei dem Wertpapier um einen Fonds handelt
derivative_data: es wird das DerivativeData-Objekt befüllt, wenn es sich bei dem Wertpapier um ein Derivat handelt
static_data: gibt das StaticData -Objekt nicht zurück
:return: Response object
"""
return self.instrument_service.get_instrument(instrument_id, order_dimensions=False, fund_distribution=False, derivative_data=False, static_data = True)
def get_order_dimensions(self, **kwargs):
"""
7.1.1 Abruf Order Dimensionen
:key instrument_id: fiters instrumentId
:key wkn: fiters WKN
:key isin: fiters ISIN
:key mneomic: fiters mneomic
:key venue_id: fiters venueId: Mit Hilfe der venueId, welche als UUID eingegeben werden muss, kann auf einen Handelsplatz gefiltert werden
:key side: Entspricht der Geschäftsart. Filtermöglichkeiten sind BUY oder SELL
:key order_type: fiters orderType: Enspricht dem Ordertypen (bspw. LIMIT, MARKET oder ONE_CANCELS_OTHER)
:key type: filters type: Mittels EXCHANGE oder OFF kann unterschieden werden, ob nach einem Börsenplatz (EXCHANGE) oder einem LiveTrading Handelsplatz (OFF) gefiltert werden soll
:return: Response object
"""
return self.order_service.get_dimensions(**kwargs)
def get_all_orders(self, depot_id, with_instrument=False, with_executions=True, **kwargs):
"""
7.1.2 Abruf Orders (Orderbuch)
:param depot_id: Depot-ID
:param with_instrument: Include instrument information. Defaults to False.
:param with_executions: Include execution information. Defaults to True.
:key order_status: filter by orderStatus: {"OPEN ", "EXECUTED", "SETTLED"...}
:key venue_id: filter by venueId
:key side: filter by side: {"BUY", "SELL"}
:key order_type: filter by orderType
:return: Response object
"""
return self.order_service.get_all_orders(depot_id, with_instrument, with_executions, **kwargs)
def get_order(self, order_id):
"""
7.1.3 Abruf Order (Einzelorder)
:param depot_id: Depot-ID
:return: Response object
"""
return self.order_service.get_order(order_id)
def set_order_change_validation(self, order_id, changed_order):
"""
7.1.5 Anlage Validation Orderanlage
:param order_id: Order-ID
:param changed_order: Altered order from get_order
:return: [challenge_id, challenge] (if challenge not neccessary: None)
"""
return self.order_service.set_change_validation(order_id, changed_order)
def set_order_change(self, order_id, changed_order, challenge_id, tan=None):
"""
7.1.11Änderung der Orde
:param order_id: Order-ID
:param changed_order: same altered order as for set_change_validation
:param challenge_id: first return value from set_change_validation
:param tan: tan if neccessary
:return: Response object
"""
return self.order_service.set_change(order_id, changed_order, challenge_id, tan)
def get_documents(self, first_index=0, count=1000):
"""
9.1.1. Fetch all documents in the PostBox
:param first_index: Index of the first document, starting with 0. Defaults to 0
:param count: Number of documents to be fetched. Max 1000. Defaults to 1000.
:return: Response object
"""
return self.document_service.get_documents(first_index, count)
def get_document(self, document_id):
"""
9.1.2. Fetch a specific document. The document will be marked as read when fetched.
:param document_id: Document-ID
:return: Document and the content type of the document
"""
return self.document_service.get_document(document_id)
def get_report(self, product_type=None):
"""
10.1.1. Fetch a report for all products
:param product_type: Filter by one or more of ACCOUNT, CARD, DEPOT, LOAN, SAVINGS
(list or comma-separated string)
Defaults to None (all product types without filter)
:return: Response object
"""
return self.report_service.get_report(product_type)
def get(self, endpoint, base_url='https://api.comdirect.de/api', **kwargs):
"""
Sends a generic GET-request to a given endpoint with given parameters
:param endpoint: endpoint without leading slash, e.g. 'banking/clients/clientId/v2/accounts/balances'
:param base_url: base url. Defaults to 'https://api.comdirect.de/api'
:param kwargs: query parameters
:return: Response object
"""
url = '{0}/{1}'.format(base_url, endpoint)
return self.session.get(url, params=kwargs).json()
|
[
"comdirect_api.service.depot_service.DepotService",
"pickle.dump",
"comdirect_api.service.account_service.AccountService",
"requests.Session",
"comdirect_api.service.order_service.OrderService",
"comdirect_api.service.report_service.ReportService",
"comdirect_api.service.instrument_service.InstrumentService",
"comdirect_api.auth.auth_service.AuthService",
"pickle.load",
"comdirect_api.service.document_service.DocumentService"
] |
[((1325, 1367), 'comdirect_api.service.account_service.AccountService', 'AccountService', (['self.session', 'self.api_url'], {}), '(self.session, self.api_url)\n', (1339, 1367), False, 'from comdirect_api.service.account_service import AccountService\n'), ((1397, 1437), 'comdirect_api.service.depot_service.DepotService', 'DepotService', (['self.session', 'self.api_url'], {}), '(self.session, self.api_url)\n', (1409, 1437), False, 'from comdirect_api.service.depot_service import DepotService\n'), ((1470, 1513), 'comdirect_api.service.document_service.DocumentService', 'DocumentService', (['self.session', 'self.api_url'], {}), '(self.session, self.api_url)\n', (1485, 1513), False, 'from comdirect_api.service.document_service import DocumentService\n'), ((1544, 1585), 'comdirect_api.service.report_service.ReportService', 'ReportService', (['self.session', 'self.api_url'], {}), '(self.session, self.api_url)\n', (1557, 1585), False, 'from comdirect_api.service.report_service import ReportService\n'), ((1615, 1655), 'comdirect_api.service.order_service.OrderService', 'OrderService', (['self.session', 'self.api_url'], {}), '(self.session, self.api_url)\n', (1627, 1655), False, 'from comdirect_api.service.order_service import OrderService\n'), ((1690, 1735), 'comdirect_api.service.instrument_service.InstrumentService', 'InstrumentService', (['self.session', 'self.api_url'], {}), '(self.session, self.api_url)\n', (1707, 1735), False, 'from comdirect_api.service.instrument_service import InstrumentService\n'), ((746, 764), 'requests.Session', 'requests.Session', ([], {}), '()\n', (762, 764), False, 'import requests\n'), ((952, 1038), 'comdirect_api.auth.auth_service.AuthService', 'AuthService', (['client_id', 'client_secret', 'self.session', 'self.api_url', 'self.oauth_url'], {}), '(client_id, client_secret, self.session, self.api_url, self.\n oauth_url)\n', (963, 1038), False, 'from comdirect_api.auth.auth_service import AuthService\n'), ((1850, 1908), 'pickle.dump', 'pickle.dump', (['self.session', 'output', 'pickle.HIGHEST_PROTOCOL'], {}), '(self.session, output, pickle.HIGHEST_PROTOCOL)\n', (1861, 1908), False, 'import pickle\n'), ((1921, 1984), 'pickle.dump', 'pickle.dump', (['self.auth_service', 'output', 'pickle.HIGHEST_PROTOCOL'], {}), '(self.auth_service, output, pickle.HIGHEST_PROTOCOL)\n', (1932, 1984), False, 'import pickle\n'), ((1219, 1237), 'pickle.load', 'pickle.load', (['input'], {}), '(input)\n', (1230, 1237), False, 'import pickle\n'), ((1274, 1292), 'pickle.load', 'pickle.load', (['input'], {}), '(input)\n', (1285, 1292), False, 'import pickle\n')]
|
import tensorflow as tf
import numpy as np
import os
import time
import argparse
import imageio
parser = argparse.ArgumentParser()
parser.add_argument("--training", type=int, default=1, help="training or testing")
parser.add_argument("--testdir", type=str, default=None, help="specify log file dir")
parser.add_argument("--testnum", type=int, default=-1, help="specify file name")
parser.add_argument("--modelnum", type=int, default=-1, help="specify model name")
parser.add_argument("--basePath", type=str, default="", help="specify base path")
parser.add_argument("--batchsize", type=int, default=64, help="set batch size")
parser.add_argument("--epochnum", type=int, default=100, help="set training epochs")
parser.add_argument("--learningrate", type=float, default=0.0001, help="set learning rate")
parser.add_argument("--maxsave", type=int, default=5, help="set saving number")
parser.add_argument("--rrfactor", type=float, default=0.0, help="set factor for rr term")
parser.add_argument("--orthofactor", type=float, default=0.0, help="set factor for orthogonal term")
parser.add_argument("--runfile", type=str, default="run.py", help="specify run file for copy")
args = parser.parse_args()
if (not args.training):
if args.testnum < 0 or args.modelnum < 0:
print("specify --testnum and --modelnum for testing!")
exit()
if args.testdir:
folderpre = args.testdir
else:
folderpre = "default"
BATCH_SIZE = 2
if not args.training:
BATCH_SIZE = 1
CLASS_NUM = 10
EPOCHS = args.epochnum
learningratevalue = args.learningrate
maxToKeep = args.maxsave
epsilon = 1e-6
imagewidth = 28
imageheight = 28
def makedir():
count = 0
currentdir = os.getcwd()+"/"
while os.path.exists(args.basePath+folderpre+"/test_%04d/"%count):
count += 1
targetdir = args.basePath+folderpre+"/test_%04d/"%count
os.makedirs(targetdir)
return targetdir
test_path = makedir()
testf = open(test_path + "testaccuracy.txt",'a+')
trainf = open(test_path + "trainloss.txt",'a+')
timef = open(test_path + "elapsedtime.txt",'a+')
os.system("cp %s %s/%s"%(args.runfile,test_path,args.runfile))
os.system("cp %s %s/%s"%(__file__,test_path,__file__))
# training data
num1, num2 = 0,1
x_train0 = np.reshape(imageio.imread("MNIST/%d.png"%num1),[1,imagewidth*imageheight])
x_train1 = np.reshape(imageio.imread("MNIST/%d.png"%num2),[1,imagewidth*imageheight])
y_train0 = np.zeros([1,10])
y_train1 = np.zeros([1,10])
y_train0[0,num1]=1
y_train1[0,num2]=1
x_train = np.concatenate((x_train0,x_train1),axis=0)
y_train = np.concatenate((y_train0,y_train1),axis=0)
# testing data
x_test0 = np.reshape(imageio.imread("MNIST/%d_test.png"%num1),[1,imagewidth*imageheight])
x_test1 = np.reshape(imageio.imread("MNIST/%d_test.png"%num2),[1,imagewidth*imageheight])
x_test = np.concatenate((x_test0,x_test1),axis=0)
y_test = y_train
TOTALWEIGHT = 0
def weight_variable(name, shape):
var = tf.get_variable(name,shape,initializer = tf.glorot_uniform_initializer())
global TOTALWEIGHT
if len(shape) == 4:
print("Convolutional layer: {}".format(shape))
TOTALWEIGHT += shape[0]*shape[1]*shape[2]*shape[3]
if len(shape) == 2:
print("fully connected layer: {}".format(shape))
TOTALWEIGHT += shape[0]*shape[1]
return var
def bias_variable(name, shape):
global TOTALWEIGHT
TOTALWEIGHT += shape[0]
return tf.get_variable(name,shape,initializer = tf.zeros_initializer())
def conv2d(x, W, padding = 'SAME',strides=[1,1,1,1]):
return tf.nn.conv2d(x, W, strides=strides, padding=padding)
def batch_norm(input, reuse=False, is_training=args.training):
return tf.contrib.layers.batch_norm(input, decay=0.9, center=True, scale=True, epsilon=1e-3,
is_training=is_training, updates_collections=None, scope=tf.get_variable_scope(), reuse = reuse)
def l2_reg_ortho(weight):
reg = tf.constant(0.)
Wshape = weight.get_shape()
if np.size(weight.get_shape().as_list()) == 2:
cols = int(Wshape[1])
else:
cols = int(Wshape[1]*Wshape[2]*Wshape[3])
rows = int(Wshape[0])
w1 = tf.reshape(weight,[-1,cols])
wt = tf.transpose(w1)
m = tf.matmul(wt,w1)
ident = tf.eye(cols,num_columns=cols)
w_tmp = (m - ident)
height = w_tmp.get_shape().as_list()[0]
u = tf.nn.l2_normalize(tf.random_normal([height,1]),dim=0,epsilon=1e-12)
v = tf.nn.l2_normalize(tf.matmul(tf.transpose(w_tmp), u), dim=0,epsilon=1e-12)
u = tf.nn.l2_normalize(tf.matmul(w_tmp, v), dim=0,epsilon=1e-12)
sigma = tf.norm(tf.reshape(tf.keras.backend.dot(tf.transpose(u), tf.matmul(w_tmp, v)),[-1]))
reg+=sigma**2
return reg
x = tf.placeholder(tf.float32, [None,imagewidth*imageheight])
y = tf.placeholder(tf.float32, [None,CLASS_NUM])
lr = tf.placeholder(tf.float32)
# forward pass
W_conv1 = weight_variable("W_conv1",[imagewidth*imageheight,CLASS_NUM])
b_conv1 = bias_variable("b_conv1",[CLASS_NUM])
fcout = tf.matmul(x, W_conv1) + b_conv1
# backward pass
back_input = tf.matmul((fcout-b_conv1),tf.transpose(W_conv1))
prediction = tf.reshape(tf.nn.softmax(fcout),[-1,CLASS_NUM])
# calculate the loss
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y,logits=prediction))
loss = cross_entropy
if args.orthofactor != 0:
loss = loss + args.orthofactor*l2_reg_ortho(W_conv1)
if args.rrfactor != 0:
loss = loss + args.rrfactor * tf.reduce_mean(tf.nn.l2_loss(back_input - x))
correct_prediction = tf.equal(tf.argmax(prediction,1), tf.argmax(y,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
train_step = tf.train.AdamOptimizer(lr).minimize(loss)
# init session
sess = tf.Session()
saver = tf.train.Saver(max_to_keep=maxToKeep)
sess.run(tf.global_variables_initializer())
if args.testnum >= 0 and args.modelnum >=0:
loadpath = args.basePath+folderpre+"/test_%04d/model_%04d.ckpt"%(args.testnum,args.modelnum)
saver.restore(sess,loadpath)
print("Model restored from %s."%(loadpath))
Epochnum = int(np.shape(x_train)[0]/BATCH_SIZE)
def saveModel(test_path, save_no):
saver.save(sess, test_path+'model_%04d.ckpt'%save_no)
msg = 'saved Model %04d.'%save_no
return msg
currenttime = time.time()
testindex = 0
if args.training:
for i in range(EPOCHS * Epochnum):
cross_e,_, trainloss = sess.run([cross_entropy , train_step,loss],feed_dict={x: x_train, y: y_train, lr:learningratevalue})
if i % (Epochnum*100) == 0:
epochindex = int(i/(Epochnum*100))
testaccuracy,outputdata= sess.run([accuracy,back_input],feed_dict={x: x_test, y: y_test})
costtime = time.time()-currenttime
print("EPOCHS: %d, train loss:%f, testing accuracy:%f, time consuming:%f"%(epochindex,trainloss,testaccuracy,costtime))
print("cross_e:%f"%cross_e)
testf.write(str(epochindex)+'\t'+str(testaccuracy)+'\r\n')
trainf.write(str(epochindex)+'\t'+str(trainloss)+'\r\n')
timef.write(str(epochindex)+'\t'+str(costtime)+'\r\n')
if (epochindex+1)%2 == 0:
print(saveModel(test_path,epochindex))
# output test image
outputdata = np.reshape(outputdata,[2,28,28])
resultpath = test_path +"backwardtest_img/"
while not os.path.exists(resultpath):
os.mkdir(resultpath)
for ind in range(2):
imageio.imwrite(resultpath + 'test%d_%04d.png'%(ind,testindex),outputdata[ind].astype(np.uint8))
testindex += 1
currenttime = time.time()
|
[
"os.mkdir",
"argparse.ArgumentParser",
"tensorflow.reshape",
"tensorflow.get_variable_scope",
"numpy.shape",
"tensorflow.matmul",
"tensorflow.nn.conv2d",
"tensorflow.nn.softmax",
"tensorflow.nn.softmax_cross_entropy_with_logits",
"os.path.exists",
"tensorflow.placeholder",
"tensorflow.cast",
"numpy.reshape",
"tensorflow.train.Saver",
"tensorflow.global_variables_initializer",
"imageio.imread",
"tensorflow.eye",
"tensorflow.Session",
"os.system",
"tensorflow.constant",
"tensorflow.transpose",
"tensorflow.random_normal",
"tensorflow.zeros_initializer",
"numpy.concatenate",
"tensorflow.glorot_uniform_initializer",
"os.makedirs",
"tensorflow.argmax",
"os.getcwd",
"numpy.zeros",
"time.time",
"tensorflow.nn.l2_loss",
"tensorflow.train.AdamOptimizer"
] |
[((106, 131), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (129, 131), False, 'import argparse\n'), ((2063, 2129), 'os.system', 'os.system', (["('cp %s %s/%s' % (args.runfile, test_path, args.runfile))"], {}), "('cp %s %s/%s' % (args.runfile, test_path, args.runfile))\n", (2072, 2129), False, 'import os\n'), ((2126, 2184), 'os.system', 'os.system', (["('cp %s %s/%s' % (__file__, test_path, __file__))"], {}), "('cp %s %s/%s' % (__file__, test_path, __file__))\n", (2135, 2184), False, 'import os\n'), ((2398, 2415), 'numpy.zeros', 'np.zeros', (['[1, 10]'], {}), '([1, 10])\n', (2406, 2415), True, 'import numpy as np\n'), ((2426, 2443), 'numpy.zeros', 'np.zeros', (['[1, 10]'], {}), '([1, 10])\n', (2434, 2443), True, 'import numpy as np\n'), ((2491, 2535), 'numpy.concatenate', 'np.concatenate', (['(x_train0, x_train1)'], {'axis': '(0)'}), '((x_train0, x_train1), axis=0)\n', (2505, 2535), True, 'import numpy as np\n'), ((2544, 2588), 'numpy.concatenate', 'np.concatenate', (['(y_train0, y_train1)'], {'axis': '(0)'}), '((y_train0, y_train1), axis=0)\n', (2558, 2588), True, 'import numpy as np\n'), ((2792, 2834), 'numpy.concatenate', 'np.concatenate', (['(x_test0, x_test1)'], {'axis': '(0)'}), '((x_test0, x_test1), axis=0)\n', (2806, 2834), True, 'import numpy as np\n'), ((4678, 4738), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, imagewidth * imageheight]'], {}), '(tf.float32, [None, imagewidth * imageheight])\n', (4692, 4738), True, 'import tensorflow as tf\n'), ((4740, 4785), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, CLASS_NUM]'], {}), '(tf.float32, [None, CLASS_NUM])\n', (4754, 4785), True, 'import tensorflow as tf\n'), ((4790, 4816), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {}), '(tf.float32)\n', (4804, 4816), True, 'import tensorflow as tf\n'), ((5678, 5690), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (5688, 5690), True, 'import tensorflow as tf\n'), ((5699, 5736), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {'max_to_keep': 'maxToKeep'}), '(max_to_keep=maxToKeep)\n', (5713, 5736), True, 'import tensorflow as tf\n'), ((6215, 6226), 'time.time', 'time.time', ([], {}), '()\n', (6224, 6226), False, 'import time\n'), ((1704, 1769), 'os.path.exists', 'os.path.exists', (["(args.basePath + folderpre + '/test_%04d/' % count)"], {}), "(args.basePath + folderpre + '/test_%04d/' % count)\n", (1718, 1769), False, 'import os\n'), ((1848, 1870), 'os.makedirs', 'os.makedirs', (['targetdir'], {}), '(targetdir)\n', (1859, 1870), False, 'import os\n'), ((2237, 2274), 'imageio.imread', 'imageio.imread', (["('MNIST/%d.png' % num1)"], {}), "('MNIST/%d.png' % num1)\n", (2251, 2274), False, 'import imageio\n'), ((2323, 2360), 'imageio.imread', 'imageio.imread', (["('MNIST/%d.png' % num2)"], {}), "('MNIST/%d.png' % num2)\n", (2337, 2360), False, 'import imageio\n'), ((2624, 2666), 'imageio.imread', 'imageio.imread', (["('MNIST/%d_test.png' % num1)"], {}), "('MNIST/%d_test.png' % num1)\n", (2638, 2666), False, 'import imageio\n'), ((2714, 2756), 'imageio.imread', 'imageio.imread', (["('MNIST/%d_test.png' % num2)"], {}), "('MNIST/%d_test.png' % num2)\n", (2728, 2756), False, 'import imageio\n'), ((3509, 3561), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (['x', 'W'], {'strides': 'strides', 'padding': 'padding'}), '(x, W, strides=strides, padding=padding)\n', (3521, 3561), True, 'import tensorflow as tf\n'), ((3897, 3913), 'tensorflow.constant', 'tf.constant', (['(0.0)'], {}), '(0.0)\n', (3908, 3913), True, 'import tensorflow as tf\n'), ((4121, 4151), 'tensorflow.reshape', 'tf.reshape', (['weight', '[-1, cols]'], {}), '(weight, [-1, cols])\n', (4131, 4151), True, 'import tensorflow as tf\n'), ((4159, 4175), 'tensorflow.transpose', 'tf.transpose', (['w1'], {}), '(w1)\n', (4171, 4175), True, 'import tensorflow as tf\n'), ((4185, 4202), 'tensorflow.matmul', 'tf.matmul', (['wt', 'w1'], {}), '(wt, w1)\n', (4194, 4202), True, 'import tensorflow as tf\n'), ((4214, 4244), 'tensorflow.eye', 'tf.eye', (['cols'], {'num_columns': 'cols'}), '(cols, num_columns=cols)\n', (4220, 4244), True, 'import tensorflow as tf\n'), ((4960, 4981), 'tensorflow.matmul', 'tf.matmul', (['x', 'W_conv1'], {}), '(x, W_conv1)\n', (4969, 4981), True, 'import tensorflow as tf\n'), ((5047, 5068), 'tensorflow.transpose', 'tf.transpose', (['W_conv1'], {}), '(W_conv1)\n', (5059, 5068), True, 'import tensorflow as tf\n'), ((5095, 5115), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['fcout'], {}), '(fcout)\n', (5108, 5115), True, 'import tensorflow as tf\n'), ((5184, 5252), 'tensorflow.nn.softmax_cross_entropy_with_logits', 'tf.nn.softmax_cross_entropy_with_logits', ([], {'labels': 'y', 'logits': 'prediction'}), '(labels=y, logits=prediction)\n', (5223, 5252), True, 'import tensorflow as tf\n'), ((5492, 5516), 'tensorflow.argmax', 'tf.argmax', (['prediction', '(1)'], {}), '(prediction, 1)\n', (5501, 5516), True, 'import tensorflow as tf\n'), ((5517, 5532), 'tensorflow.argmax', 'tf.argmax', (['y', '(1)'], {}), '(y, 1)\n', (5526, 5532), True, 'import tensorflow as tf\n'), ((5559, 5598), 'tensorflow.cast', 'tf.cast', (['correct_prediction', 'tf.float32'], {}), '(correct_prediction, tf.float32)\n', (5566, 5598), True, 'import tensorflow as tf\n'), ((5746, 5779), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (5777, 5779), True, 'import tensorflow as tf\n'), ((1678, 1689), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1687, 1689), False, 'import os\n'), ((4340, 4369), 'tensorflow.random_normal', 'tf.random_normal', (['[height, 1]'], {}), '([height, 1])\n', (4356, 4369), True, 'import tensorflow as tf\n'), ((4500, 4519), 'tensorflow.matmul', 'tf.matmul', (['w_tmp', 'v'], {}), '(w_tmp, v)\n', (4509, 4519), True, 'import tensorflow as tf\n'), ((5614, 5640), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['lr'], {}), '(lr)\n', (5636, 5640), True, 'import tensorflow as tf\n'), ((2952, 2983), 'tensorflow.glorot_uniform_initializer', 'tf.glorot_uniform_initializer', ([], {}), '()\n', (2981, 2983), True, 'import tensorflow as tf\n'), ((3419, 3441), 'tensorflow.zeros_initializer', 'tf.zeros_initializer', ([], {}), '()\n', (3439, 3441), True, 'import tensorflow as tf\n'), ((3820, 3843), 'tensorflow.get_variable_scope', 'tf.get_variable_scope', ([], {}), '()\n', (3841, 3843), True, 'import tensorflow as tf\n'), ((4427, 4446), 'tensorflow.transpose', 'tf.transpose', (['w_tmp'], {}), '(w_tmp)\n', (4439, 4446), True, 'import tensorflow as tf\n'), ((6020, 6037), 'numpy.shape', 'np.shape', (['x_train'], {}), '(x_train)\n', (6028, 6037), True, 'import numpy as np\n'), ((7191, 7226), 'numpy.reshape', 'np.reshape', (['outputdata', '[2, 28, 28]'], {}), '(outputdata, [2, 28, 28])\n', (7201, 7226), True, 'import numpy as np\n'), ((7566, 7577), 'time.time', 'time.time', ([], {}), '()\n', (7575, 7577), False, 'import time\n'), ((4594, 4609), 'tensorflow.transpose', 'tf.transpose', (['u'], {}), '(u)\n', (4606, 4609), True, 'import tensorflow as tf\n'), ((4611, 4630), 'tensorflow.matmul', 'tf.matmul', (['w_tmp', 'v'], {}), '(w_tmp, v)\n', (4620, 4630), True, 'import tensorflow as tf\n'), ((5430, 5459), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['(back_input - x)'], {}), '(back_input - x)\n', (5443, 5459), True, 'import tensorflow as tf\n'), ((6638, 6649), 'time.time', 'time.time', ([], {}), '()\n', (6647, 6649), False, 'import time\n'), ((7302, 7328), 'os.path.exists', 'os.path.exists', (['resultpath'], {}), '(resultpath)\n', (7316, 7328), False, 'import os\n'), ((7346, 7366), 'os.mkdir', 'os.mkdir', (['resultpath'], {}), '(resultpath)\n', (7354, 7366), False, 'import os\n')]
|
import pandas as pd
import io
import pymc3 as pm
import arviz as az
from arviz_json import get_dag, arviz_to_json
#Binomial Logistic Regression Model
#Reference: https://docs.pymc.io/notebooks/putting_workflow.html#Logit-model
#data
golf_data = """distance tries successes
2 1443 1346
3 694 577
4 455 337
5 353 208
6 272 149
7 256 136
8 240 111
9 217 69
10 200 67
11 237 75
12 202 52
13 192 46
14 174 54
15 167 28
16 201 27
17 195 31
18 191 33
19 147 20
20 152 24"""
data = pd.read_csv(io.StringIO(golf_data), sep=" ")
#model-inference
coords = {"distance": data.distance}
fileName='golf_simple_PyMC3'
samples=2000
chains=2
tune=1000
simple_model=pm.Model(coords=coords)
with simple_model:
#to store the n-parameter of Binomial dist
#in the constant group of ArviZ InferenceData
#You should always call it n for imd to retrieve it
n = pm.Data('n', data.tries)
a = pm.Normal('a')
b = pm.Normal('b')
p_goes_in = pm.Deterministic('p_goes_in', pm.math.invlogit(a * data.distance + b), dims='distance')
successes = pm.Binomial('successes', n=n, p=p_goes_in, observed=data.successes, dims='distance')
#inference
# Get posterior trace, prior trace, posterior predictive samples, and the DAG
trace = pm.sample(draws=samples, chains=chains, tune=tune)
prior= pm.sample_prior_predictive(samples=samples)
posterior_predictive = pm.sample_posterior_predictive(trace,samples=samples)
## STEP 1
# will also capture all the sampler statistics
data_s = az.from_pymc3(trace=trace, prior=prior, posterior_predictive=posterior_predictive)
## STEP 2
#dag
dag = get_dag(simple_model)
# insert dag into sampler stat attributes
data_s.sample_stats.attrs["graph"] = str(dag)
## STEP 3
# save data
arviz_to_json(data_s, fileName+'.npz')
|
[
"pymc3.sample",
"io.StringIO",
"pymc3.Model",
"pymc3.Normal",
"arviz.from_pymc3",
"arviz_json.get_dag",
"pymc3.sample_prior_predictive",
"pymc3.Binomial",
"pymc3.Data",
"arviz_json.arviz_to_json",
"pymc3.math.invlogit",
"pymc3.sample_posterior_predictive"
] |
[((649, 672), 'pymc3.Model', 'pm.Model', ([], {'coords': 'coords'}), '(coords=coords)\n', (657, 672), True, 'import pymc3 as pm\n'), ((1504, 1591), 'arviz.from_pymc3', 'az.from_pymc3', ([], {'trace': 'trace', 'prior': 'prior', 'posterior_predictive': 'posterior_predictive'}), '(trace=trace, prior=prior, posterior_predictive=\n posterior_predictive)\n', (1517, 1591), True, 'import arviz as az\n'), ((1610, 1631), 'arviz_json.get_dag', 'get_dag', (['simple_model'], {}), '(simple_model)\n', (1617, 1631), False, 'from arviz_json import get_dag, arviz_to_json\n'), ((1751, 1791), 'arviz_json.arviz_to_json', 'arviz_to_json', (['data_s', "(fileName + '.npz')"], {}), "(data_s, fileName + '.npz')\n", (1764, 1791), False, 'from arviz_json import get_dag, arviz_to_json\n'), ((487, 509), 'io.StringIO', 'io.StringIO', (['golf_data'], {}), '(golf_data)\n', (498, 509), False, 'import io\n'), ((854, 878), 'pymc3.Data', 'pm.Data', (['"""n"""', 'data.tries'], {}), "('n', data.tries)\n", (861, 878), True, 'import pymc3 as pm\n'), ((887, 901), 'pymc3.Normal', 'pm.Normal', (['"""a"""'], {}), "('a')\n", (896, 901), True, 'import pymc3 as pm\n'), ((910, 924), 'pymc3.Normal', 'pm.Normal', (['"""b"""'], {}), "('b')\n", (919, 924), True, 'import pymc3 as pm\n'), ((1045, 1134), 'pymc3.Binomial', 'pm.Binomial', (['"""successes"""'], {'n': 'n', 'p': 'p_goes_in', 'observed': 'data.successes', 'dims': '"""distance"""'}), "('successes', n=n, p=p_goes_in, observed=data.successes, dims=\n 'distance')\n", (1056, 1134), True, 'import pymc3 as pm\n'), ((1239, 1289), 'pymc3.sample', 'pm.sample', ([], {'draws': 'samples', 'chains': 'chains', 'tune': 'tune'}), '(draws=samples, chains=chains, tune=tune)\n', (1248, 1289), True, 'import pymc3 as pm\n'), ((1301, 1344), 'pymc3.sample_prior_predictive', 'pm.sample_prior_predictive', ([], {'samples': 'samples'}), '(samples=samples)\n', (1327, 1344), True, 'import pymc3 as pm\n'), ((1372, 1426), 'pymc3.sample_posterior_predictive', 'pm.sample_posterior_predictive', (['trace'], {'samples': 'samples'}), '(trace, samples=samples)\n', (1402, 1426), True, 'import pymc3 as pm\n'), ((971, 1010), 'pymc3.math.invlogit', 'pm.math.invlogit', (['(a * data.distance + b)'], {}), '(a * data.distance + b)\n', (987, 1010), True, 'import pymc3 as pm\n')]
|
from datagenerationpipeline import dataGenerationPipeline
imageDirectory = "\\\\192.168.1.37\\Multimedia\\datasets\\test\\watches_categories\\1"
fileType=".jpg"
pipeline=dataGenerationPipeline(imageDirectory, fileType)
print("[INFO]: There should be 200 images in directory 1")
pipeline.rotate()
#assert there are 1000 flipped images in directory rotated
pipeline.flip(horizontaly = True)
#assert there are 200 flipped images in directory horFlip
pipeline.flip(verticaly = True)
#assert there are 200 flipped images in directory verFlip
pipeline.flip(horizontaly = True, verticaly = True)
#assert there are 200 flipped images in directory horverFlip
pipeline.skew()
|
[
"datagenerationpipeline.dataGenerationPipeline"
] |
[((172, 220), 'datagenerationpipeline.dataGenerationPipeline', 'dataGenerationPipeline', (['imageDirectory', 'fileType'], {}), '(imageDirectory, fileType)\n', (194, 220), False, 'from datagenerationpipeline import dataGenerationPipeline\n')]
|
from django.core.management.base import BaseCommand
from dashboard.models import User, App, APICall, Webhook, WebhookTriggerHistory
class Command(BaseCommand):
help = 'Cleans Dashboard of everything'
def handle(self, *args, **options):
string = input("THIS WILL WIPE THESE MODELS ARE YOU SURE? "
"TYPE DELETE TO CONFIRM!: ")
if string == "DELETE":
User.objects.all().delete()
App.objects.all().delete()
APICall.objects.all().delete()
Webhook.objects.all().delete()
WebhookTriggerHistory.objects.all().delete()
|
[
"dashboard.models.APICall.objects.all",
"dashboard.models.App.objects.all",
"dashboard.models.User.objects.all",
"dashboard.models.WebhookTriggerHistory.objects.all",
"dashboard.models.Webhook.objects.all"
] |
[((425, 443), 'dashboard.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (441, 443), False, 'from dashboard.models import User, App, APICall, Webhook, WebhookTriggerHistory\n'), ((466, 483), 'dashboard.models.App.objects.all', 'App.objects.all', ([], {}), '()\n', (481, 483), False, 'from dashboard.models import User, App, APICall, Webhook, WebhookTriggerHistory\n'), ((506, 527), 'dashboard.models.APICall.objects.all', 'APICall.objects.all', ([], {}), '()\n', (525, 527), False, 'from dashboard.models import User, App, APICall, Webhook, WebhookTriggerHistory\n'), ((550, 571), 'dashboard.models.Webhook.objects.all', 'Webhook.objects.all', ([], {}), '()\n', (569, 571), False, 'from dashboard.models import User, App, APICall, Webhook, WebhookTriggerHistory\n'), ((594, 629), 'dashboard.models.WebhookTriggerHistory.objects.all', 'WebhookTriggerHistory.objects.all', ([], {}), '()\n', (627, 629), False, 'from dashboard.models import User, App, APICall, Webhook, WebhookTriggerHistory\n')]
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Helper functions for modules."""
import os
import six
if six.PY2:
import imp # pylint: disable=g-import-not-at-top
else:
import importlib # pylint: disable=g-import-not-at-top
def get_parent_dir(module):
return os.path.abspath(os.path.join(os.path.dirname(module.__file__), ".."))
def get_parent_dir_for_name(module_name):
"""Get parent directory for module with the given name.
Args:
module_name: Module name for e.g.
tensorflow_estimator.python.estimator.api._v1.estimator.
Returns:
Path to the parent directory if module is found and None otherwise.
Given example above, it should return:
/pathtoestimator/tensorflow_estimator/python/estimator/api/_v1.
"""
name_split = module_name.split(".")
if not name_split:
return None
if six.PY2:
try:
spec = imp.find_module(name_split[0])
except ImportError:
return None
if not spec:
return None
base_path = spec[1]
else:
try:
spec = importlib.util.find_spec(name_split[0])
except ValueError:
return None
if not spec or not spec.origin:
return None
base_path = os.path.dirname(spec.origin)
return os.path.join(base_path, *name_split[1:-1])
|
[
"importlib.util.find_spec",
"os.path.dirname",
"os.path.join",
"imp.find_module"
] |
[((1862, 1904), 'os.path.join', 'os.path.join', (['base_path', '*name_split[1:-1]'], {}), '(base_path, *name_split[1:-1])\n', (1874, 1904), False, 'import os\n'), ((1824, 1852), 'os.path.dirname', 'os.path.dirname', (['spec.origin'], {}), '(spec.origin)\n', (1839, 1852), False, 'import os\n'), ((943, 975), 'os.path.dirname', 'os.path.dirname', (['module.__file__'], {}), '(module.__file__)\n', (958, 975), False, 'import os\n'), ((1511, 1541), 'imp.find_module', 'imp.find_module', (['name_split[0]'], {}), '(name_split[0])\n', (1526, 1541), False, 'import imp\n'), ((1673, 1712), 'importlib.util.find_spec', 'importlib.util.find_spec', (['name_split[0]'], {}), '(name_split[0])\n', (1697, 1712), False, 'import importlib\n')]
|
'''History for wx GUIs'''
# Author: <NAME> <<EMAIL>>
from logging import getLogger
import os
from typing import Optional, Tuple
import wx
from .help import show_help_txt
from .frame import EelbrainFrame
from .utils import Icon
from . import ID
TEST_MODE = False
class CallBackManager:
def __init__(self, keys):
self._callbacks = {k: [] for k in keys}
def register_key(self, key):
if key in self._callbacks:
raise KeyError("Key already registered")
self._callbacks[key] = []
def callback(self, key, *args):
for cb in self._callbacks[key]:
cb(*args)
def subscribe(self, key, func):
self._callbacks[key].append(func)
def remove(self, key, func):
try:
self._callbacks[key].remove(func)
except ValueError:
getLogger(__name__).debug("Trying to remove %r which is not in callbacks[%r]", func, key)
class Action:
def do(self, doc):
raise NotImplementedError
def undo(self, doc):
raise NotImplementedError
class History:
"""The history as a list of action objects
Public interface
----------------
can_redo() : bool
Whether the history can redo an action.
can_undo() : bool
Whether the history can redo an action.
do(action)
perform a action
is_saved() : bool
Whether the current state is saved
redo()
Redo the latest undone action.
...
"""
def __init__(self, doc):
self.doc = doc
self._history = []
self.callbacks = CallBackManager(('saved_change',))
# point to last executed action (always < 0)
self._last_action_idx = -1
# point to action after which we saved
self._saved_idx = -2 + doc.saved
def can_redo(self):
return self._last_action_idx < -1
def can_undo(self):
return len(self._history) + self._last_action_idx >= 0
def do(self, action):
logger = getLogger(__name__)
logger.debug("Do action: %s", action.desc)
was_saved = self.is_saved()
action.do(self.doc)
if self._last_action_idx < -1:
# discard alternate future
self._history = self._history[:self._last_action_idx + 1]
self._last_action_idx = -1
if self._saved_idx >= len(self._history):
self._saved_idx = -2
self._history.append(action)
self._process_saved_change(was_saved)
def _process_saved_change(self, was_saved):
"""Process a state change in whether all changes are saved
Parameters
----------
was_saved : bool
Whether all changes were saved before the current change happened.
"""
is_saved = self.is_saved()
if is_saved != was_saved:
self.doc.saved = is_saved
self.callbacks.callback('saved_change')
def is_saved(self):
"""Determine whether the document is saved
Returns
-------
is_saved : bool
Whether the document is saved (i.e., contains no unsaved changes).
"""
current_index = len(self._history) + self._last_action_idx
return self._saved_idx == current_index
def redo(self):
was_saved = self.is_saved()
if self._last_action_idx == -1:
raise RuntimeError("We are at the tip of the history")
action = self._history[self._last_action_idx + 1]
logger = getLogger(__name__)
logger.debug("Redo action: %s", action.desc)
action.do(self.doc)
self._last_action_idx += 1
self._process_saved_change(was_saved)
def register_save(self):
"Notify the history that the document is saved at the current state"
was_saved = self.is_saved()
self._saved_idx = len(self._history) + self._last_action_idx
self._process_saved_change(was_saved)
def undo(self):
was_saved = self.is_saved()
if -self._last_action_idx > len(self._history):
raise RuntimeError("We are at the beginning of the history")
action = self._history[self._last_action_idx]
logger = getLogger(__name__)
logger.debug("Undo action: %s", action.desc)
action.undo(self.doc)
self._last_action_idx -= 1
self._process_saved_change(was_saved)
class FileDocument:
"""Represent a file"""
def __init__(self, path):
self.saved = False # managed by the history
self.path = path
self.callbacks = CallBackManager(('path_change',))
def set_path(self, path):
self.path = path
self.callbacks.callback('path_change')
class FileModel:
"""Manages a document as well as its history"""
def __init__(self, doc: FileDocument):
self.doc = doc
self.history = History(doc)
def load(self, path):
raise NotImplementedError
def save(self):
self.doc.save()
self.history.register_save()
def save_as(self, path):
self.doc.set_path(path)
self.save()
class FileFrame(EelbrainFrame):
owns_file = True
_doc_name = 'document'
_name = 'Default' # internal, for config
_title = 'Title' # external, for frame title
_wildcard = "Tab Separated Text (*.txt)|*.txt|Pickle (*.pickle)|*.pickle"
def __init__(
self,
parent: wx.Frame,
pos: Optional[Tuple[int, int]],
size: Optional[Tuple[int, int]],
model: FileModel,
):
"""View object of the epoch selection GUI
Parameters
----------
parent : wx.Frame
Parent window.
others :
See TerminalInterface constructor.
"""
config = wx.Config("Eelbrain Testing" if TEST_MODE else "Eelbrain")
config.SetPath(self._name)
if pos is None:
pos = (config.ReadInt("pos_horizontal", -1),
config.ReadInt("pos_vertical", -1))
if size is None:
size = (config.ReadInt("size_width", 800),
config.ReadInt("size_height", 600))
super(FileFrame, self).__init__(parent, -1, self._title, pos, size)
self.config = config
self.model = model
self.doc = model.doc
self.history = model.history
# Bind Events ---
self.doc.callbacks.subscribe('path_change', self.UpdateTitle)
self.history.callbacks.subscribe('saved_change', self.UpdateTitle)
self.Bind(wx.EVT_CLOSE, self.OnClose)
def InitToolbar(self, can_open=True):
tb = self.CreateToolBar(wx.TB_HORIZONTAL)
tb.SetToolBitmapSize(size=(32, 32))
tb.AddTool(wx.ID_SAVE, "Save", Icon("tango/actions/document-save"),
shortHelp="Save")
self.Bind(wx.EVT_TOOL, self.OnSave, id=wx.ID_SAVE)
tb.AddTool(wx.ID_SAVEAS, "Save As", Icon("tango/actions/document-save-as"),
shortHelp="Save As")
self.Bind(wx.EVT_TOOL, self.OnSaveAs, id=wx.ID_SAVEAS)
if can_open:
tb.AddTool(wx.ID_OPEN, "Load", Icon("tango/actions/document-open"),
shortHelp="Open Rejections")
self.Bind(wx.EVT_TOOL, self.OnOpen, id=wx.ID_OPEN)
tb.AddTool(ID.UNDO, "Undo", Icon("tango/actions/edit-undo"), shortHelp="Undo")
tb.AddTool(ID.REDO, "Redo", Icon("tango/actions/edit-redo"), shortHelp="Redo")
return tb
def InitToolbarTail(self, tb):
tb.AddTool(wx.ID_HELP, 'Help', Icon("tango/apps/help-browser"))
self.Bind(wx.EVT_TOOL, self.OnHelp, id=wx.ID_HELP)
def CanRedo(self):
return self.history.can_redo()
def CanSave(self):
return bool(self.doc.path) and not self.doc.saved
def CanUndo(self):
return self.history.can_undo()
def OnClear(self, event):
self.model.clear()
def OnClose(self, event):
"""Ask to save unsaved changes.
Return True if confirmed so that child windows can unsubscribe from
document model changes.
"""
if self.owns_file and event.CanVeto() and not self.history.is_saved():
self.Raise()
msg = ("The current document has unsaved changes. Would you like "
"to save them?")
cap = "%s: Save Unsaved Changes?" % self._title
style = wx.YES | wx.NO | wx.CANCEL | wx.YES_DEFAULT
cmd = wx.MessageBox(msg, cap, style)
if cmd == wx.YES:
if self.OnSave(event) != wx.ID_OK:
event.Veto()
return
elif cmd == wx.CANCEL:
event.Veto()
return
elif cmd != wx.NO:
raise RuntimeError("Unknown answer: %r" % cmd)
logger = getLogger(__name__)
logger.debug("%s.OnClose()", self.__class__.__name__)
# remove callbacks
self.doc.callbacks.remove('path_change', self.UpdateTitle)
self.history.callbacks.remove('saved_change', self.UpdateTitle)
# save configuration
pos_h, pos_v = self.GetPosition()
w, h = self.GetSize()
self.config.WriteInt("pos_horizontal", pos_h)
self.config.WriteInt("pos_vertical", pos_v)
self.config.WriteInt("size_width", w)
self.config.WriteInt("size_height", h)
self.config.Flush()
event.Skip()
return True
def OnHelp(self, event):
show_help_txt(self.__doc__, self, self._title)
def OnOpen(self, event):
msg = ("Load the %s from a file." % self._doc_name)
if self.doc.path:
default_dir, default_name = os.path.split(self.doc.path)
else:
default_dir = ''
default_name = ''
dlg = wx.FileDialog(self, msg, default_dir, default_name,
self._wildcard, wx.FD_OPEN)
rcode = dlg.ShowModal()
dlg.Destroy()
if rcode != wx.ID_OK:
return rcode
path = dlg.GetPath()
try:
self.model.load(path)
except Exception as ex:
msg = str(ex)
title = "Error Loading %s" % self._doc_name.capitalize()
wx.MessageBox(msg, title, wx.ICON_ERROR)
raise
def OnRedo(self, event):
self.history.redo()
def OnSave(self, event):
if self.doc.path:
self.model.save()
return wx.ID_OK
else:
return self.OnSaveAs(event)
def OnSaveAs(self, event):
msg = ("Save the %s to a file." % self._doc_name)
if self.doc.path:
default_dir, default_name = os.path.split(self.doc.path)
else:
default_dir = ''
default_name = ''
dlg = wx.FileDialog(self, msg, default_dir, default_name,
self._wildcard, wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT)
rcode = dlg.ShowModal()
if rcode == wx.ID_OK:
path = dlg.GetPath()
self.model.save_as(path)
dlg.Destroy()
return rcode
def OnUndo(self, event):
self.history.undo()
def OnUpdateUIClear(self, event):
event.Enable(True)
def OnUpdateUIOpen(self, event):
event.Enable(True)
def OnUpdateUIRedo(self, event):
event.Enable(self.CanRedo())
def OnUpdateUISave(self, event):
event.Enable(self.CanSave())
def OnUpdateUISaveAs(self, event):
event.Enable(True)
def OnUpdateUIUndo(self, event):
event.Enable(self.CanUndo())
def UpdateTitle(self):
is_modified = not self.doc.saved
self.OSXSetModified(is_modified)
title = self._title
if self.doc.path:
title += ': ' + os.path.basename(self.doc.path)
if is_modified:
title = '* ' + title
self.SetTitle(title)
class FileFrameChild(FileFrame):
owns_file = False
|
[
"os.path.basename",
"wx.FileDialog",
"wx.MessageBox",
"wx.Config",
"os.path.split",
"logging.getLogger"
] |
[((1993, 2012), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (2002, 2012), False, 'from logging import getLogger\n'), ((3497, 3516), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (3506, 3516), False, 'from logging import getLogger\n'), ((4194, 4213), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (4203, 4213), False, 'from logging import getLogger\n'), ((5780, 5838), 'wx.Config', 'wx.Config', (["('Eelbrain Testing' if TEST_MODE else 'Eelbrain')"], {}), "('Eelbrain Testing' if TEST_MODE else 'Eelbrain')\n", (5789, 5838), False, 'import wx\n'), ((8827, 8846), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (8836, 8846), False, 'from logging import getLogger\n'), ((9802, 9881), 'wx.FileDialog', 'wx.FileDialog', (['self', 'msg', 'default_dir', 'default_name', 'self._wildcard', 'wx.FD_OPEN'], {}), '(self, msg, default_dir, default_name, self._wildcard, wx.FD_OPEN)\n', (9815, 9881), False, 'import wx\n'), ((10794, 10903), 'wx.FileDialog', 'wx.FileDialog', (['self', 'msg', 'default_dir', 'default_name', 'self._wildcard', '(wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT)'], {}), '(self, msg, default_dir, default_name, self._wildcard, wx.\n FD_SAVE | wx.FD_OVERWRITE_PROMPT)\n', (10807, 10903), False, 'import wx\n'), ((8456, 8486), 'wx.MessageBox', 'wx.MessageBox', (['msg', 'cap', 'style'], {}), '(msg, cap, style)\n', (8469, 8486), False, 'import wx\n'), ((9686, 9714), 'os.path.split', 'os.path.split', (['self.doc.path'], {}), '(self.doc.path)\n', (9699, 9714), False, 'import os\n'), ((10677, 10705), 'os.path.split', 'os.path.split', (['self.doc.path'], {}), '(self.doc.path)\n', (10690, 10705), False, 'import os\n'), ((10236, 10276), 'wx.MessageBox', 'wx.MessageBox', (['msg', 'title', 'wx.ICON_ERROR'], {}), '(msg, title, wx.ICON_ERROR)\n', (10249, 10276), False, 'import wx\n'), ((11778, 11809), 'os.path.basename', 'os.path.basename', (['self.doc.path'], {}), '(self.doc.path)\n', (11794, 11809), False, 'import os\n'), ((836, 855), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (845, 855), False, 'from logging import getLogger\n')]
|
import os
import re
import json
from typing import Sequence, GenericMeta
from datetime import datetime, date
import dateutil.parser
import inflection
from stevesie import resources
from stevesie.utils import api
DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
class RemoteResource(object):
_is_hydrated = False
def set_hydrated(self):
self._is_hydrated = True
def hydrate(self, obj, fetch_remote=True, limit=None):
hydrate_args = {}
for field_name in self._fields:
field_type = self._field_types[field_name]
api_field_name = inflection.camelize(field_name, uppercase_first_letter=False)
field_value = obj.get(api_field_name, obj.get(field_name))
if field_value is not None:
if field_type == datetime:
field_value = dateutil.parser.parse(field_value)
elif issubclass(field_type, RemoteResource):
field_value = field_type().hydrate(field_value, fetch_remote=fetch_remote)
elif issubclass(field_type, Sequence) \
and issubclass(field_type.__class__, GenericMeta):
# TODO - serious debt, can't otherwise figure out the type of a typing.Sequence
sequence_class_string = str(field_type)
match = re.search(r'\[(.*)\]', sequence_class_string)
module_parts = match.group(1).split('.')
if len(module_parts) == 1: # referring to self using string type hack
class_name_match = re.search(r'\(\'(.*)\'\)', module_parts[0])
class_name = class_name_match.group(1)
module_name = inflection.underscore(class_name)
else:
module_name = module_parts[2]
class_name = module_parts[3]
mod = getattr(resources, module_name)
cls = getattr(mod, class_name)
field_value = [cls().hydrate(item, fetch_remote=fetch_remote) \
for item in field_value]
hydrate_args[field_name] = field_value
hydrated = self._replace(**hydrate_args)
hydrated.set_hydrated()
return hydrated
def fetch(self):
api_json = api.get(self.resource_url)
obj = self.parse_api_response(api_json)
return self.hydrate(obj)
def destroy(self):
api.delete(self.resource_url)
def to_json(self, obj=None):
if obj is None:
obj = self
def inner_json(inner_obj):
if isinstance(inner_obj, list):
return [self.to_json(o) for o in inner_obj]
if isinstance(inner_obj, RemoteResource):
return self.to_json(inner_obj)
return inner_obj
if hasattr(obj, 'collection_type') and obj.collection_type is not None:
# little hack for implicit remote resource collection
return [inner_json(value) for value in obj.items()]
return {key: inner_json(value) for key, value in obj._asdict().items()}
def save_to_file(self, local_filename):
def serialize(obj):
if isinstance(obj, (datetime, date)):
return obj.strftime(DATETIME_FORMAT)
raise TypeError('Cannot serialize %s' % type(obj))
with open(os.path.expanduser(local_filename), 'w') as file:
json.dump(self.to_json(), file, default=serialize)
def load_from_file(self, local_filename):
with open(os.path.expanduser(local_filename)) as file:
obj = json.load(file)
return self.hydrate(obj)
def parse_api_response(self, api_json):
return api_json['item']
@property
def resource_params(self):
return {}
@property
def is_hydrated(self):
return self._is_hydrated
@property
def resource_path(self):
pass
@property
def resource_url(self):
return api.BASE_URL_PATH + self.resource_path
|
[
"inflection.underscore",
"json.load",
"stevesie.utils.api.get",
"stevesie.utils.api.delete",
"re.search",
"os.path.expanduser",
"inflection.camelize"
] |
[((2341, 2367), 'stevesie.utils.api.get', 'api.get', (['self.resource_url'], {}), '(self.resource_url)\n', (2348, 2367), False, 'from stevesie.utils import api\n'), ((2481, 2510), 'stevesie.utils.api.delete', 'api.delete', (['self.resource_url'], {}), '(self.resource_url)\n', (2491, 2510), False, 'from stevesie.utils import api\n'), ((587, 648), 'inflection.camelize', 'inflection.camelize', (['field_name'], {'uppercase_first_letter': '(False)'}), '(field_name, uppercase_first_letter=False)\n', (606, 648), False, 'import inflection\n'), ((3654, 3669), 'json.load', 'json.load', (['file'], {}), '(file)\n', (3663, 3669), False, 'import json\n'), ((3413, 3447), 'os.path.expanduser', 'os.path.expanduser', (['local_filename'], {}), '(local_filename)\n', (3431, 3447), False, 'import os\n'), ((3591, 3625), 'os.path.expanduser', 'os.path.expanduser', (['local_filename'], {}), '(local_filename)\n', (3609, 3625), False, 'import os\n'), ((1345, 1391), 're.search', 're.search', (['"""\\\\[(.*)\\\\]"""', 'sequence_class_string'], {}), "('\\\\[(.*)\\\\]', sequence_class_string)\n", (1354, 1391), False, 'import re\n'), ((1586, 1632), 're.search', 're.search', (['"""\\\\(\\\\\'(.*)\\\\\'\\\\)"""', 'module_parts[0]'], {}), '("\\\\(\\\\\'(.*)\\\\\'\\\\)", module_parts[0])\n', (1595, 1632), False, 'import re\n'), ((1731, 1764), 'inflection.underscore', 'inflection.underscore', (['class_name'], {}), '(class_name)\n', (1752, 1764), False, 'import inflection\n')]
|
import os
import glob
import shutil
from PIL import Image
# for file_path in glob.glob('./data/evaluation/card_evaluation/*.png'):
# file_name = os.path.basename(file_path)
# if file_name.find(',') >= 0 or file_name.find('@') >=0 or file_name.find('*') > 0 or file_name.find(':') > 0 \
# or file_name.find('r') > 0 or file_name.find('성별') > 0 or file_name.find('KOR') > 0 or file_name.find('~') > 0:
# # shutil.move(file_path, os.path.join('result', 'no_way_out', file_name))
# # print(file_path, os.path.join('result', 'no_way_out', file_name))
# continue
# shutil.copy(file_path, os.path.join('./data/evaluation/valid_card_data', file_name))
src = './data/evaluation/valid_card_data/'
# target = './data/evaluation/deleted/'
# for file_path in glob.glob('./result/tagging_error/*.png'):
# file_name = os.path.basename(file_path)
# src_file_path = os.path.join(src, file_name)
# print(src_file_path, os.path.join(target, file_name))
# shutil.move(src_file_path, os.path.join(target, file_name))
for file_path in glob.glob(src + '*.png'):
base_name = os.path.basename(file_path)
if file_path.find('_(F-4)_') > 0:
target_file_path = file_path.replace('_(F-4)_', '_재외동포(F-4)_')
shutil.move(file_path, target_file_path)
# print(file_path, target_file_path)
# continue
# if base_name.find('e') > 0 :
# print(file_path)
|
[
"os.path.basename",
"shutil.move",
"glob.glob"
] |
[((1081, 1105), 'glob.glob', 'glob.glob', (["(src + '*.png')"], {}), "(src + '*.png')\n", (1090, 1105), False, 'import glob\n'), ((1123, 1150), 'os.path.basename', 'os.path.basename', (['file_path'], {}), '(file_path)\n', (1139, 1150), False, 'import os\n'), ((1268, 1308), 'shutil.move', 'shutil.move', (['file_path', 'target_file_path'], {}), '(file_path, target_file_path)\n', (1279, 1308), False, 'import shutil\n')]
|
import time
import pandas as pd
from dash_website.utils.aws_loader import load_excel, load_parquet, load_feather
if __name__ == "__main__":
time_excel = 0
for idx_load_excel in range(10):
start_excel = time.time()
load_excel("xwas/univariate_results/linear_correlations.xlsx")
time_excel += time.time() - start_excel
print("Load excel", time_excel)
time_parquet = 0
for idx_load_excel in range(10):
start_parquet = time.time()
load_parquet("xwas/univariate_results/linear_correlations.parquet")
time_parquet += time.time() - start_parquet
print("Load parquet", time_parquet)
time_feather = 0
for idx_load_feather in range(10):
start_feather = time.time()
corr = load_feather("xwas/univariate_results/linear_correlations.feather").set_index("index")
corr.columns = pd.MultiIndex.from_tuples(
list(map(eval, corr.columns.tolist())), names=["dimension", "category", "variable"]
)
time_feather += time.time() - start_feather
print("Load feather", time_feather)
|
[
"dash_website.utils.aws_loader.load_excel",
"dash_website.utils.aws_loader.load_parquet",
"dash_website.utils.aws_loader.load_feather",
"time.time"
] |
[((220, 231), 'time.time', 'time.time', ([], {}), '()\n', (229, 231), False, 'import time\n'), ((240, 302), 'dash_website.utils.aws_loader.load_excel', 'load_excel', (['"""xwas/univariate_results/linear_correlations.xlsx"""'], {}), "('xwas/univariate_results/linear_correlations.xlsx')\n", (250, 302), False, 'from dash_website.utils.aws_loader import load_excel, load_parquet, load_feather\n'), ((471, 482), 'time.time', 'time.time', ([], {}), '()\n', (480, 482), False, 'import time\n'), ((491, 558), 'dash_website.utils.aws_loader.load_parquet', 'load_parquet', (['"""xwas/univariate_results/linear_correlations.parquet"""'], {}), "('xwas/univariate_results/linear_correlations.parquet')\n", (503, 558), False, 'from dash_website.utils.aws_loader import load_excel, load_parquet, load_feather\n'), ((737, 748), 'time.time', 'time.time', ([], {}), '()\n', (746, 748), False, 'import time\n'), ((325, 336), 'time.time', 'time.time', ([], {}), '()\n', (334, 336), False, 'import time\n'), ((583, 594), 'time.time', 'time.time', ([], {}), '()\n', (592, 594), False, 'import time\n'), ((1031, 1042), 'time.time', 'time.time', ([], {}), '()\n', (1040, 1042), False, 'import time\n'), ((764, 831), 'dash_website.utils.aws_loader.load_feather', 'load_feather', (['"""xwas/univariate_results/linear_correlations.feather"""'], {}), "('xwas/univariate_results/linear_correlations.feather')\n", (776, 831), False, 'from dash_website.utils.aws_loader import load_excel, load_parquet, load_feather\n')]
|
from django.contrib.auth import get_user_model
from django.contrib.postgres.fields import JSONField
from django.db import models
from organization.models import BaseTemplate
from misc.models import Content
class ToDo(BaseTemplate):
content = models.ManyToManyField(Content)
due_on_day = models.IntegerField(default=0)
form = JSONField(models.CharField(max_length=100000, default='[]'))
# Chat bot specific actions
send_back = models.BooleanField(default=False)
channel = models.CharField(max_length=10000, null=True, blank=True)
def get_slack_form(self):
slack_form_items = []
for i in self.form:
options = []
if i['type'] == 'select':
for j in i['options']:
options.append({
"text": {
"type": "plain_text",
"text": j['name'],
"emoji": True,
# "action_id": j['id']
},
"value": j['name']
})
slack_form_items.append({
"type": "input",
"block_id": i['id'],
"element": {
"type": "static_select",
"placeholder": {
"type": "plain_text",
"text": "Select an item",
"emoji": True
},
"options": options,
"action_id": i['id']
},
"label": {
"type": "plain_text",
"text": i['text'],
"emoji": True
}
})
if i['type'] == 'input':
slack_form_items.append({
"type": "input",
"block_id": i['id'],
"element": {
"type": "plain_text_input",
"action_id": i['id']
},
"label": {
"type": "plain_text",
"text": i['text'],
"emoji": True
}
})
if i['type'] == 'text':
slack_form_items.append({
"type": "input",
"block_id": i['id'],
"element": {
"type": "plain_text_input",
"multiline": True,
"action_id": i['id']
},
"label": {
"type": "plain_text",
"text": i['text'],
"emoji": True
}
})
return slack_form_items
def valid_for_slack(self):
valid = True
for i in self.form:
if i['type'] == 'check' or i['type'] == 'upload':
valid = False
break
return valid
|
[
"django.db.models.CharField",
"django.db.models.IntegerField",
"django.db.models.ManyToManyField",
"django.db.models.BooleanField"
] |
[((249, 280), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Content'], {}), '(Content)\n', (271, 280), False, 'from django.db import models\n'), ((298, 328), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (317, 328), False, 'from django.db import models\n'), ((449, 483), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (468, 483), False, 'from django.db import models\n'), ((498, 555), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10000)', 'null': '(True)', 'blank': '(True)'}), '(max_length=10000, null=True, blank=True)\n', (514, 555), False, 'from django.db import models\n'), ((350, 399), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100000)', 'default': '"""[]"""'}), "(max_length=100000, default='[]')\n", (366, 399), False, 'from django.db import models\n')]
|
import functools
from flask import Blueprint
from flask import flash
from flask import g
from flask import redirect
from flask import render_template
from flask import request
from flask import session
from flask import url_for
from werkzeug.security import check_password_hash
from werkzeug.security import generate_password_hash
from countries.db import get_db
bp = Blueprint("auth", __name__, url_prefix="/auth")
def login_required(view):
"""View decorator that redirects anonymous users to the login page."""
@functools.wraps(view)
def wrapped_view(**kwargs):
if g.user is None:
return redirect(url_for("auth.login"))
return view(**kwargs)
return wrapped_view
#@bp.before_app_request
@bp.route("/hello")
def hello():
return "hello"
|
[
"flask.url_for",
"flask.Blueprint",
"functools.wraps"
] |
[((371, 418), 'flask.Blueprint', 'Blueprint', (['"""auth"""', '__name__'], {'url_prefix': '"""/auth"""'}), "('auth', __name__, url_prefix='/auth')\n", (380, 418), False, 'from flask import Blueprint\n'), ((528, 549), 'functools.wraps', 'functools.wraps', (['view'], {}), '(view)\n', (543, 549), False, 'import functools\n'), ((637, 658), 'flask.url_for', 'url_for', (['"""auth.login"""'], {}), "('auth.login')\n", (644, 658), False, 'from flask import url_for\n')]
|
from django.urls import re_path
from . import views
app_name = 'weather'
urlpatterns = [
re_path(r'^$', views.weather, name='root'),
re_path(r'^current/$', views.current, name='current'),
re_path(r'^unitchange/$', views.unit_change, name='unit-change'),
re_path(r'^generate/$', views.generate, name='generate'),
re_path(r'^delete/$', views.delete, name='delete'),
re_path(r'^data/$', views.output_data, name='data'),
re_path(r'^chart/$', views.chart, name='chart'),
]
|
[
"django.urls.re_path"
] |
[((101, 142), 'django.urls.re_path', 're_path', (['"""^$"""', 'views.weather'], {'name': '"""root"""'}), "('^$', views.weather, name='root')\n", (108, 142), False, 'from django.urls import re_path\n'), ((150, 202), 'django.urls.re_path', 're_path', (['"""^current/$"""', 'views.current'], {'name': '"""current"""'}), "('^current/$', views.current, name='current')\n", (157, 202), False, 'from django.urls import re_path\n'), ((210, 273), 'django.urls.re_path', 're_path', (['"""^unitchange/$"""', 'views.unit_change'], {'name': '"""unit-change"""'}), "('^unitchange/$', views.unit_change, name='unit-change')\n", (217, 273), False, 'from django.urls import re_path\n'), ((281, 336), 'django.urls.re_path', 're_path', (['"""^generate/$"""', 'views.generate'], {'name': '"""generate"""'}), "('^generate/$', views.generate, name='generate')\n", (288, 336), False, 'from django.urls import re_path\n'), ((344, 393), 'django.urls.re_path', 're_path', (['"""^delete/$"""', 'views.delete'], {'name': '"""delete"""'}), "('^delete/$', views.delete, name='delete')\n", (351, 393), False, 'from django.urls import re_path\n'), ((401, 451), 'django.urls.re_path', 're_path', (['"""^data/$"""', 'views.output_data'], {'name': '"""data"""'}), "('^data/$', views.output_data, name='data')\n", (408, 451), False, 'from django.urls import re_path\n'), ((459, 505), 'django.urls.re_path', 're_path', (['"""^chart/$"""', 'views.chart'], {'name': '"""chart"""'}), "('^chart/$', views.chart, name='chart')\n", (466, 505), False, 'from django.urls import re_path\n')]
|
#!/usr/bin/python
"""Console script for cmus_scrobbler."""
import cmus_scrobbler
import argparse
import sys
def main():
"""Console script for cmus_scrobbler."""
parser = argparse.ArgumentParser()
parser.add_argument('status', nargs='*')
parser.add_argument('-c', '--config', nargs=2, help="Called with the API KEY and API SECRET KEY as arguments, updates their values in the config.")
args = parser.parse_args()
return cmus_scrobbler.main(args)
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
[
"cmus_scrobbler.main",
"argparse.ArgumentParser"
] |
[((179, 204), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (202, 204), False, 'import argparse\n'), ((444, 469), 'cmus_scrobbler.main', 'cmus_scrobbler.main', (['args'], {}), '(args)\n', (463, 469), False, 'import cmus_scrobbler\n')]
|
from collections import Counter
# sort
class Solution_1:
def intersect(self, nums1: List[int], nums2: List[int]) -> List[int]:
nums1.sort()
nums2.sort()
res = []
len1 = len(nums1)
len2 = len(nums2)
idx1 = 0
idx2 = 0
while idx1 < len1 and idx2 < len2:
if nums1[idx1] < nums2[idx2]:
idx1 += 1
elif nums1[idx1] > nums2[idx2]:
idx2 += 1
else:
res.append(nums1[idx1])
idx1 += 1
idx2 += 1
return res
# hash
class Solution_2:
def intersect(self, nums1: List[int], nums2: List[int]) -> List[int]:
Hash = {}
for i in range(len(nums1)):
if nums1[i] not in Hash:
# Hash 的可以是一个 list object
Hash[nums1[i]] = [i]
else:
Hash[nums1[i]].append(i)
res = []
for j in range(len(nums2)):
if nums2[j] in Hash:
res.append(nums2[j])
Hash[nums2[j]].pop()
if Hash[nums2[j]] == []:
del Hash[nums2[j]]
return res
# the fastest
class Solution_3:
def intersect(self, nums1: List[int], nums2: List[int]) -> List[int]:
from collections import Counter
res = []
for k, v in (Counter(nums1) & Counter(nums2)).items():
for _ in range(v):
res.append(k)
return res
|
[
"collections.Counter"
] |
[((1428, 1442), 'collections.Counter', 'Counter', (['nums1'], {}), '(nums1)\n', (1435, 1442), False, 'from collections import Counter\n'), ((1445, 1459), 'collections.Counter', 'Counter', (['nums2'], {}), '(nums2)\n', (1452, 1459), False, 'from collections import Counter\n')]
|
import json
from unittest import TestCase
from unittest.mock import patch
from zoltpy.covid19 import validate_config_dict, validate_quantile_csv_file
class CdcIOTestCase(TestCase):
"""
"""
def test_validate_quantile_csv_file_calls_validate_config_dict(self):
validation_config = {'target_groups':
[{"outcome_variable": "inc flu hosp", "targets": [], "locations": [], "quantiles": []}]}
with patch('zoltpy.covid19.validate_config_dict') as validate_config_mock:
validate_quantile_csv_file('tests/quantile-predictions.csv', validation_config)
validate_config_mock.assert_called_once_with(validation_config)
validation_config = {'target_groups': [
{"outcome_variable": "inc flu hosp", "targets": [], "locations": [], "quantiles": ['not a number']}]}
error_messages = validate_quantile_csv_file('tests/quantile-predictions.csv', validation_config)
self.assertEqual(1, len(error_messages))
self.assertIn("invalid validation_config", error_messages[0])
def test_validate_config_dict(self):
# case: not a dict
with self.assertRaisesRegex(RuntimeError, "validation_config was not a dict"):
validate_config_dict(None)
# case: dict but no 'target_groups' key
with self.assertRaisesRegex(RuntimeError, "validation_config did not contain 'target_groups' key"):
validate_config_dict({})
# case: has 'target_groups', but not a list
with self.assertRaisesRegex(RuntimeError, "'target_groups' was not a list"):
validate_config_dict({'target_groups': None})
# case: dict with one 'target_groups', but not all keys present in it
with self.assertRaisesRegex(RuntimeError, "one or more target group keys was missing"):
validate_config_dict({'target_groups': [{}]})
# case: dict with one 'target_groups' with all keys present, but targets, locations, and quantiles not lists
bad_target_groups = [{"outcome_variable": "inc flu hosp", "targets": 'not a list', "locations": [], "quantiles": []},
{"outcome_variable": "inc flu hosp", "targets": [], "locations": 'not a list', "quantiles": []},
{"outcome_variable": "inc flu hosp", "targets": [], "locations": [], "quantiles": 'not a list'}]
for bad_target_group in bad_target_groups:
with self.assertRaisesRegex(RuntimeError, "one of these fields was not a list"):
validate_config_dict({'target_groups': [bad_target_group]})
# case: dict with one 'target_groups', but its name is not a string
with self.assertRaisesRegex(RuntimeError, "'outcome_variable' field was not a string"):
validate_config_dict({'target_groups': [{"outcome_variable": None, "targets": [], "locations": [], "quantiles": []}]})
# case: dict with one 'target_groups' with all keys present, but targets or locations contain non-strings
bad_target_groups = [{"outcome_variable": "inc flu hosp", "targets": [-1], "locations": [], "quantiles": []},
{"outcome_variable": "inc flu hosp", "targets": [], "locations": [-1], "quantiles": []}]
for bad_target_group in bad_target_groups:
with self.assertRaisesRegex(RuntimeError, "one of these fields contained non-strings"):
validate_config_dict({'target_groups': [bad_target_group]})
# case: dict with one 'target_groups' with all keys present, but quantiles contains non-numbers
with self.assertRaisesRegex(RuntimeError, "'quantiles' field contained non-numbers"):
validate_config_dict({'target_groups': [
{"outcome_variable": "inc flu hosp", "targets": [], "locations": [], "quantiles": ['not a number']}]})
# case: blue sky
try:
validate_config_dict({'target_groups':
[{"outcome_variable": "inc flu hosp", "targets": [], "locations": [], "quantiles": []}]})
except Exception as ex:
self.fail(f"unexpected exception: {ex}")
# case: load from file
with open('tests/covid-validation-config.json', 'r') as fp:
validation_config = json.load(fp)
try:
validate_config_dict(validation_config)
except Exception as ex:
self.fail(f"unexpected exception: {ex}")
|
[
"unittest.mock.patch",
"json.load",
"zoltpy.covid19.validate_config_dict",
"zoltpy.covid19.validate_quantile_csv_file"
] |
[((883, 962), 'zoltpy.covid19.validate_quantile_csv_file', 'validate_quantile_csv_file', (['"""tests/quantile-predictions.csv"""', 'validation_config'], {}), "('tests/quantile-predictions.csv', validation_config)\n", (909, 962), False, 'from zoltpy.covid19 import validate_config_dict, validate_quantile_csv_file\n'), ((457, 501), 'unittest.mock.patch', 'patch', (['"""zoltpy.covid19.validate_config_dict"""'], {}), "('zoltpy.covid19.validate_config_dict')\n", (462, 501), False, 'from unittest.mock import patch\n'), ((539, 618), 'zoltpy.covid19.validate_quantile_csv_file', 'validate_quantile_csv_file', (['"""tests/quantile-predictions.csv"""', 'validation_config'], {}), "('tests/quantile-predictions.csv', validation_config)\n", (565, 618), False, 'from zoltpy.covid19 import validate_config_dict, validate_quantile_csv_file\n'), ((1251, 1277), 'zoltpy.covid19.validate_config_dict', 'validate_config_dict', (['None'], {}), '(None)\n', (1271, 1277), False, 'from zoltpy.covid19 import validate_config_dict, validate_quantile_csv_file\n'), ((1447, 1471), 'zoltpy.covid19.validate_config_dict', 'validate_config_dict', (['{}'], {}), '({})\n', (1467, 1471), False, 'from zoltpy.covid19 import validate_config_dict, validate_quantile_csv_file\n'), ((1622, 1667), 'zoltpy.covid19.validate_config_dict', 'validate_config_dict', (["{'target_groups': None}"], {}), "({'target_groups': None})\n", (1642, 1667), False, 'from zoltpy.covid19 import validate_config_dict, validate_quantile_csv_file\n'), ((1855, 1900), 'zoltpy.covid19.validate_config_dict', 'validate_config_dict', (["{'target_groups': [{}]}"], {}), "({'target_groups': [{}]})\n", (1875, 1900), False, 'from zoltpy.covid19 import validate_config_dict, validate_quantile_csv_file\n'), ((2802, 2924), 'zoltpy.covid19.validate_config_dict', 'validate_config_dict', (["{'target_groups': [{'outcome_variable': None, 'targets': [], 'locations': [\n ], 'quantiles': []}]}"], {}), "({'target_groups': [{'outcome_variable': None,\n 'targets': [], 'locations': [], 'quantiles': []}]})\n", (2822, 2924), False, 'from zoltpy.covid19 import validate_config_dict, validate_quantile_csv_file\n'), ((3710, 3856), 'zoltpy.covid19.validate_config_dict', 'validate_config_dict', (["{'target_groups': [{'outcome_variable': 'inc flu hosp', 'targets': [],\n 'locations': [], 'quantiles': ['not a number']}]}"], {}), "({'target_groups': [{'outcome_variable': 'inc flu hosp',\n 'targets': [], 'locations': [], 'quantiles': ['not a number']}]})\n", (3730, 3856), False, 'from zoltpy.covid19 import validate_config_dict, validate_quantile_csv_file\n'), ((3921, 4053), 'zoltpy.covid19.validate_config_dict', 'validate_config_dict', (["{'target_groups': [{'outcome_variable': 'inc flu hosp', 'targets': [],\n 'locations': [], 'quantiles': []}]}"], {}), "({'target_groups': [{'outcome_variable': 'inc flu hosp',\n 'targets': [], 'locations': [], 'quantiles': []}]})\n", (3941, 4053), False, 'from zoltpy.covid19 import validate_config_dict, validate_quantile_csv_file\n'), ((4305, 4318), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (4314, 4318), False, 'import json\n'), ((2557, 2616), 'zoltpy.covid19.validate_config_dict', 'validate_config_dict', (["{'target_groups': [bad_target_group]}"], {}), "({'target_groups': [bad_target_group]})\n", (2577, 2616), False, 'from zoltpy.covid19 import validate_config_dict, validate_quantile_csv_file\n'), ((3439, 3498), 'zoltpy.covid19.validate_config_dict', 'validate_config_dict', (["{'target_groups': [bad_target_group]}"], {}), "({'target_groups': [bad_target_group]})\n", (3459, 3498), False, 'from zoltpy.covid19 import validate_config_dict, validate_quantile_csv_file\n'), ((4352, 4391), 'zoltpy.covid19.validate_config_dict', 'validate_config_dict', (['validation_config'], {}), '(validation_config)\n', (4372, 4391), False, 'from zoltpy.covid19 import validate_config_dict, validate_quantile_csv_file\n')]
|
import pytest
from gridthings import Cell
# Cells represent individual data points in a grid
# They implement a variety of mathematical dunder methods
# so that they can be compared, sorted, and manipulated
def test_cell_when_equal():
c1 = Cell(y=0, x=0, value="foo")
c2 = Cell(y=0, x=1, value="foo")
# using ==, cell values are equal but actual
# cell objects are not considered equal
assert c1.value == c2.value
assert c1 != c2
# When > and < operators are used, it's
# a pure comparison on values
# so c1 == c2 is False, but c1 >= c2 is True
assert c1 >= c2
assert c2 >= c1
assert c1 <= c2
assert c2 <= c1
def test_cell_when_unequal():
c1 = Cell(y=0, x=0, value=1)
c2 = Cell(y=0, x=1, value=2)
assert c1 != c2
assert c1 < c2
assert c1 <= c2
assert c2 > c1
assert c2 >= c1
def test_cell_against_non_cells():
cell = Cell(y=0, x=0, value=2)
# __eq__
assert cell == 2
assert 2 == cell
# __ne__
assert cell != 0
assert 0 != cell
# __gte__ / __lte__
assert 3 >= cell
assert cell <= 3
assert 1 <= cell
assert cell >= 1
# __gt__ / __lt__
assert 3 > cell
assert cell < 3
assert 1 < cell
assert cell > 1
# __add__
assert cell + 2 == 4
assert 2 + cell == 4
# __sub__
assert 2 - cell == 0
assert cell - 2 == 0
# __mul__
assert 3 * cell == 6
assert cell * 3 == 6
# __truediv__
assert cell / 2 == 1
# __pow__
assert cell ** 3 == 8
def test_cell_when_mismatched_datatype():
c1 = Cell(y=0, x=0, value="foo")
c2 = Cell(y=0, x=0, value=1)
assert c1 != c2
with pytest.raises(TypeError):
# < not supported between instances of 'str' and 'int'
assert c1 < c2
def test_cell_str_concat():
c1 = Cell(y=0, x=0, value="foo")
c2 = Cell(y=0, x=1, value="bar")
assert c1 + c2 == "foobar"
assert c2 + c1 == "barfoo"
assert c1 + "baz" == "foobaz"
assert "baz" + c2 == "bazbar"
def test_cell_int_math():
c1 = Cell(y=0, x=0, value=2)
c2 = Cell(y=0, x=0, value=4)
c3 = Cell(y=0, x=0, value=6)
assert c1 + c2 == 6
assert c2 + c1 == 6
assert c1 + 2 == 4
assert 2 + c1 == 4
assert c1 + c2 + c3 == 12
assert c2 - c1 == 2
assert 4 - c1 == 2
assert c3 - c2 - c1 == 0
assert c1 * c2 == 8
assert 2 * c2 == 8
assert c1 * c2 * c3 == 48
assert c1 / c2 == 0.5
assert 4 / c1 == 2
assert c1 ** 3 == 8
assert c2 ** c1 == 16
assert 2 ** c1 == 4
def test_subclass_cell():
class MyCell(Cell):
extra_arg: bool = True
cell = MyCell(y=0, x=0, value=1)
assert cell.dict() == {"y": 0, "x": 0, "value": 1, "extra_arg": True}
cell2 = MyCell(y=0, x=0, value=1, extra_arg=False)
assert cell2.dict() == {"y": 0, "x": 0, "value": 1, "extra_arg": False}
|
[
"pytest.raises",
"gridthings.Cell"
] |
[((248, 275), 'gridthings.Cell', 'Cell', ([], {'y': '(0)', 'x': '(0)', 'value': '"""foo"""'}), "(y=0, x=0, value='foo')\n", (252, 275), False, 'from gridthings import Cell\n'), ((285, 312), 'gridthings.Cell', 'Cell', ([], {'y': '(0)', 'x': '(1)', 'value': '"""foo"""'}), "(y=0, x=1, value='foo')\n", (289, 312), False, 'from gridthings import Cell\n'), ((706, 729), 'gridthings.Cell', 'Cell', ([], {'y': '(0)', 'x': '(0)', 'value': '(1)'}), '(y=0, x=0, value=1)\n', (710, 729), False, 'from gridthings import Cell\n'), ((739, 762), 'gridthings.Cell', 'Cell', ([], {'y': '(0)', 'x': '(1)', 'value': '(2)'}), '(y=0, x=1, value=2)\n', (743, 762), False, 'from gridthings import Cell\n'), ((909, 932), 'gridthings.Cell', 'Cell', ([], {'y': '(0)', 'x': '(0)', 'value': '(2)'}), '(y=0, x=0, value=2)\n', (913, 932), False, 'from gridthings import Cell\n'), ((1581, 1608), 'gridthings.Cell', 'Cell', ([], {'y': '(0)', 'x': '(0)', 'value': '"""foo"""'}), "(y=0, x=0, value='foo')\n", (1585, 1608), False, 'from gridthings import Cell\n'), ((1618, 1641), 'gridthings.Cell', 'Cell', ([], {'y': '(0)', 'x': '(0)', 'value': '(1)'}), '(y=0, x=0, value=1)\n', (1622, 1641), False, 'from gridthings import Cell\n'), ((1822, 1849), 'gridthings.Cell', 'Cell', ([], {'y': '(0)', 'x': '(0)', 'value': '"""foo"""'}), "(y=0, x=0, value='foo')\n", (1826, 1849), False, 'from gridthings import Cell\n'), ((1859, 1886), 'gridthings.Cell', 'Cell', ([], {'y': '(0)', 'x': '(1)', 'value': '"""bar"""'}), "(y=0, x=1, value='bar')\n", (1863, 1886), False, 'from gridthings import Cell\n'), ((2054, 2077), 'gridthings.Cell', 'Cell', ([], {'y': '(0)', 'x': '(0)', 'value': '(2)'}), '(y=0, x=0, value=2)\n', (2058, 2077), False, 'from gridthings import Cell\n'), ((2087, 2110), 'gridthings.Cell', 'Cell', ([], {'y': '(0)', 'x': '(0)', 'value': '(4)'}), '(y=0, x=0, value=4)\n', (2091, 2110), False, 'from gridthings import Cell\n'), ((2120, 2143), 'gridthings.Cell', 'Cell', ([], {'y': '(0)', 'x': '(0)', 'value': '(6)'}), '(y=0, x=0, value=6)\n', (2124, 2143), False, 'from gridthings import Cell\n'), ((1671, 1695), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1684, 1695), False, 'import pytest\n')]
|
import os
import logging
#[PATHS]
# Paths will be based on the location of this file which is ./conf by default. Adjust accordingly!
FILEPATH = os.path.abspath(os.path.dirname(__file__))
ENV_PATH = FILEPATH + "/env"
#[LOGGING]
LOG_PATH = FILEPATH + "/../logs/"
LOG_FILE = "twitterbot.log"
LOG_LEVEL = logging.DEBUG
#[PRAW]
USER_AGENT = "" #Your Unique USER AGENT for Reddit
SUBREDDIT = "" # The Subreddit you want to target
REDDIT_NEWPOST_LIMIT = 100 #How many new posts to check
REDDIT_SHORTPATH = "redd.it/" # For creating the shortlink to reddit
#[DB]
TWEETDATA_PATH = FILEPATH + "/../db/"
TWEETDATA_FILENAME = "chirping-snoos.db"
subtweet_kwargs = {"tweetdata_path" : TWEETDATA_PATH, "tweetdb_filename" : TWEETDATA_FILENAME}
#[TWITTER]
TWEET_UPVOTE_THRESHOLD = 10 #Minimum upvotes to be considered for tweeting
TWEET_COMMENT_THRESHOLD = 20 #minimum comments to be considered for tweeting
TWEET_ABSOLUTE_LIMIT = 270 #Max characters for a tweet
TWEET_PREFIX="" #This text will appear before the title from reddit
TWEET_SUFFIX="" #This text will appear after the title and link from reddit
TWEET_PART_SEPARATOR = " " #This is used to separate the prefix, title, link and suffix if desired
INTERTWEET_DELAY_SEC = 0.7 # Delay between tweets. Recommended 0.5 or more to avoid flooding twitter
TWITTER_TIMESTAMP_FORMAT = "%a %b %d %H:%M:%S %z %Y" #Import Twitters timestamnp into arrow
#If the title is too long, it will be shortened to fit.
#Longtitle_Hint is shown at the end of the shortened text to symbolize shortening
TWEET_LONGTITLE_HINT = "..."
|
[
"os.path.dirname"
] |
[((162, 187), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (177, 187), False, 'import os\n')]
|
#=======================================================================================================================
# an example file on how to build special test/training cubes using nh3_testcube.py
#=======================================================================================================================
import numpy as np
import pyspeckit.spectrum.models.ammonia_constants as nh3con
from pyspeckit.spectrum.units import SpectroscopicAxis as spaxis
from astropy.utils.console import ProgressBar
import sys
import nh3_testcubes as testcubes
def generate_cubes(nCubes=100, nBorder=1, noise_rms=0.1, output_dir='random_cubes', random_seed=None,
linenames=['oneone', 'twotwo'], remove_low_sep=True, noise_class=True):
xarrList = []
lineIDList = []
for linename in linenames:
# generate spectral axis for each ammonia lines
xarr = spaxis((np.linspace(-500, 499, 1000) * 5.72e-6
+ nh3con.freq_dict[linename] / 1e9),
unit='GHz',
refX=nh3con.freq_dict[linename] / 1e9,
velocity_convention='radio', refX_unit='GHz')
xarrList.append(xarr)
# specify the ID fore each line to appear in saved fits files
if linename is 'oneone':
lineIDList.append('11')
elif linename is 'twotwo':
lineIDList.append('22')
else:
# use line names at it is for lines above (3,3)
lineIDList.append(linename)
# generate random parameters for nCubes
nComps, Temp, Width, Voff, logN = testcubes.generate_parameters(nCubes, random_seed)
gradX, gradY = testcubes.generate_gradients(nCubes, random_seed)
if noise_class:
# Creates a balanced training set with 1comp, noise, and 2comp classes
nComps = np.concatenate((np.zeros(nCubes / 3).astype(int),
np.ones(nCubes / 3).astype(int),
np.ones(nCubes / 3 + nCubes%3).astype(int) + 1))
if remove_low_sep:
Voff = remove_low_vsep(Voff, Width)
cubes = []
for xarr, lineID in zip(xarrList, lineIDList):
# generate cubes for each line specified
cubeList = []
print('----------- generating {0} lines ------------'.format(lineID))
for i in ProgressBar(range(nCubes)):
cube_i = testcubes.make_and_write(nCubes, nComps[i], i, nBorder, xarr, Temp[i], Width[i], Voff[i], logN[i], gradX[i], gradY[i]
, noise_rms, lineID, output_dir)
cubeList.append(cube_i)
cubes.append(cubeList)
return cubes
def remove_low_vsep(Voff, Width):
Voff = Voff.swapaxes(0, 1)
Voff1, Voff2 = Voff[0], Voff[1]
Width = Width.swapaxes(0, 1)
Width1, Width2 = Width[0], Width[1]
# Find where centroids are too close
too_close = np.where(np.abs(Voff1 - Voff2) < np.max(np.column_stack((Width1, Width2)), axis=1))
# Move the centroids farther apart by the length of largest line width
min_Voff = np.min(np.column_stack((Voff2[too_close], Voff1[too_close])), axis=1)
max_Voff = np.max(np.column_stack((Voff2[too_close], Voff1[too_close])), axis=1)
Voff1[too_close] = min_Voff - np.max(np.column_stack((Width1[too_close], Width2[too_close])), axis=1) / 2.
Voff2[too_close] = max_Voff + np.max(np.column_stack((Width1[too_close], Width2[too_close])), axis=1) / 2.
Voff = np.array([Voff1, Voff2]).swapaxes(0, 1)
return Voff
if __name__ == '__main__':
print(sys.argv)
if len(sys.argv) > 1:
generate_cubes(nCubes=int(sys.argv[1]))
else:
generate_cubes()
|
[
"numpy.abs",
"nh3_testcubes.generate_gradients",
"nh3_testcubes.generate_parameters",
"numpy.zeros",
"numpy.ones",
"numpy.array",
"numpy.linspace",
"numpy.column_stack",
"nh3_testcubes.make_and_write"
] |
[((1611, 1661), 'nh3_testcubes.generate_parameters', 'testcubes.generate_parameters', (['nCubes', 'random_seed'], {}), '(nCubes, random_seed)\n', (1640, 1661), True, 'import nh3_testcubes as testcubes\n'), ((1681, 1730), 'nh3_testcubes.generate_gradients', 'testcubes.generate_gradients', (['nCubes', 'random_seed'], {}), '(nCubes, random_seed)\n', (1709, 1730), True, 'import nh3_testcubes as testcubes\n'), ((3079, 3132), 'numpy.column_stack', 'np.column_stack', (['(Voff2[too_close], Voff1[too_close])'], {}), '((Voff2[too_close], Voff1[too_close]))\n', (3094, 3132), True, 'import numpy as np\n'), ((3164, 3217), 'numpy.column_stack', 'np.column_stack', (['(Voff2[too_close], Voff1[too_close])'], {}), '((Voff2[too_close], Voff1[too_close]))\n', (3179, 3217), True, 'import numpy as np\n'), ((2397, 2554), 'nh3_testcubes.make_and_write', 'testcubes.make_and_write', (['nCubes', 'nComps[i]', 'i', 'nBorder', 'xarr', 'Temp[i]', 'Width[i]', 'Voff[i]', 'logN[i]', 'gradX[i]', 'gradY[i]', 'noise_rms', 'lineID', 'output_dir'], {}), '(nCubes, nComps[i], i, nBorder, xarr, Temp[i],\n Width[i], Voff[i], logN[i], gradX[i], gradY[i], noise_rms, lineID,\n output_dir)\n', (2421, 2554), True, 'import nh3_testcubes as testcubes\n'), ((2907, 2928), 'numpy.abs', 'np.abs', (['(Voff1 - Voff2)'], {}), '(Voff1 - Voff2)\n', (2913, 2928), True, 'import numpy as np\n'), ((3460, 3484), 'numpy.array', 'np.array', (['[Voff1, Voff2]'], {}), '([Voff1, Voff2])\n', (3468, 3484), True, 'import numpy as np\n'), ((2938, 2971), 'numpy.column_stack', 'np.column_stack', (['(Width1, Width2)'], {}), '((Width1, Width2))\n', (2953, 2971), True, 'import numpy as np\n'), ((3268, 3323), 'numpy.column_stack', 'np.column_stack', (['(Width1[too_close], Width2[too_close])'], {}), '((Width1[too_close], Width2[too_close]))\n', (3283, 3323), True, 'import numpy as np\n'), ((3379, 3434), 'numpy.column_stack', 'np.column_stack', (['(Width1[too_close], Width2[too_close])'], {}), '((Width1[too_close], Width2[too_close]))\n', (3394, 3434), True, 'import numpy as np\n'), ((910, 938), 'numpy.linspace', 'np.linspace', (['(-500)', '(499)', '(1000)'], {}), '(-500, 499, 1000)\n', (921, 938), True, 'import numpy as np\n'), ((1864, 1884), 'numpy.zeros', 'np.zeros', (['(nCubes / 3)'], {}), '(nCubes / 3)\n', (1872, 1884), True, 'import numpy as np\n'), ((1931, 1950), 'numpy.ones', 'np.ones', (['(nCubes / 3)'], {}), '(nCubes / 3)\n', (1938, 1950), True, 'import numpy as np\n'), ((1997, 2029), 'numpy.ones', 'np.ones', (['(nCubes / 3 + nCubes % 3)'], {}), '(nCubes / 3 + nCubes % 3)\n', (2004, 2029), True, 'import numpy as np\n')]
|
# (C) Datadog, Inc. 2022-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import logging
import os
import mock
import pytest
from requests import HTTPError
from datadog_checks.arangodb import ArangodbCheck
from datadog_checks.dev.http import MockResponse
from datadog_checks.dev.utils import get_metadata_metrics
from .common import METRICS
@pytest.mark.integration
def test_invalid_endpoint(aggregator, instance_invalid_endpoint, dd_run_check):
check = ArangodbCheck('arangodb', {}, [instance_invalid_endpoint])
with pytest.raises(Exception):
dd_run_check(check)
aggregator.assert_service_check('arangodb.openmetrics.health', ArangodbCheck.CRITICAL, count=1)
@pytest.mark.integration
@pytest.mark.parametrize(
'tag_condition, base_tags',
[
pytest.param(
'valid_id_mode',
['endpoint:http://localhost:8529/_admin/metrics/v2', 'server_mode:default', 'server_id:1'],
id="valid id and valid mode",
),
pytest.param(
'invalid_mode_valid_id',
['endpoint:http://localhost:8529/_admin/metrics/v2', 'server_id:1'],
id="invalid mode but valid id",
),
pytest.param(
'valid_mode_invalid_id',
['endpoint:http://localhost:8529/_admin/metrics/v2', 'server_mode:default'],
id="valid mode but invalid id",
),
pytest.param(
'invalid_mode_invalid_id',
['endpoint:http://localhost:8529/_admin/metrics/v2'],
id="invalid mode and invalid id",
),
],
)
def test_check(instance, dd_run_check, aggregator, tag_condition, base_tags):
check = ArangodbCheck('arangodb', {}, [instance])
def mock_requests_get(url, *args, **kwargs):
fixture = url.rsplit('/', 1)[-1]
return MockResponse(file_path=os.path.join(os.path.dirname(__file__), 'fixtures', tag_condition, fixture))
with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True):
dd_run_check(check)
aggregator.assert_service_check(
'arangodb.openmetrics.health',
ArangodbCheck.OK,
count=1,
tags=['endpoint:http://localhost:8529/_admin/metrics/v2'],
)
aggregator.assert_metrics_using_metadata(get_metadata_metrics())
for metric in METRICS:
aggregator.assert_metric(metric)
for tag in base_tags:
aggregator.assert_metric_has_tag(metric, tag)
aggregator.assert_all_metrics_covered()
@pytest.mark.parametrize(
'side_effect, log_message',
[
pytest.param(
HTTPError, "Unable to get server foo, skipping `server_foo` tag.", id="HTTPError getting server tag"
),
pytest.param(
Exception,
"Unable to query `http://localhost:8529/test_endpoint/foo` to collect `server_foo` tag, received error:",
id="Exception getting server tag",
),
],
)
def test_get_server_tag(instance, caplog, side_effect, log_message):
caplog.clear()
check = ArangodbCheck('arangodb', {}, [instance])
with mock.patch("datadog_checks.base.utils.http.RequestsWrapper.get", side_effect=side_effect):
caplog.set_level(logging.DEBUG)
check.get_server_tag('foo', '/test_endpoint/foo')
assert log_message in caplog.text
@pytest.mark.parametrize(
'server_tags, args',
[
pytest.param([None, None], [], id="No server tags returned"),
pytest.param(
['server_mode:foo', 'server_id:bar'], ['server_mode:foo', 'server_id:bar'], id="Server tags returned"
),
],
)
def test_refresh_scrapers(instance, server_tags, args):
check = ArangodbCheck('arangodb', {}, [instance])
with mock.patch("datadog_checks.arangodb.check.ArangodbCheck.get_server_tag") as mock_get_server_tag:
mock_get_server_tag.side_effect = server_tags
check.set_dynamic_tags = mock.MagicMock()
check.refresh_scrapers()
check.set_dynamic_tags.assert_called_once_with(*args)
|
[
"datadog_checks.arangodb.ArangodbCheck",
"os.path.dirname",
"mock.patch",
"pytest.param",
"pytest.raises",
"datadog_checks.dev.utils.get_metadata_metrics",
"mock.MagicMock"
] |
[((504, 562), 'datadog_checks.arangodb.ArangodbCheck', 'ArangodbCheck', (['"""arangodb"""', '{}', '[instance_invalid_endpoint]'], {}), "('arangodb', {}, [instance_invalid_endpoint])\n", (517, 562), False, 'from datadog_checks.arangodb import ArangodbCheck\n'), ((1706, 1747), 'datadog_checks.arangodb.ArangodbCheck', 'ArangodbCheck', (['"""arangodb"""', '{}', '[instance]'], {}), "('arangodb', {}, [instance])\n", (1719, 1747), False, 'from datadog_checks.arangodb import ArangodbCheck\n'), ((3071, 3112), 'datadog_checks.arangodb.ArangodbCheck', 'ArangodbCheck', (['"""arangodb"""', '{}', '[instance]'], {}), "('arangodb', {}, [instance])\n", (3084, 3112), False, 'from datadog_checks.arangodb import ArangodbCheck\n'), ((3703, 3744), 'datadog_checks.arangodb.ArangodbCheck', 'ArangodbCheck', (['"""arangodb"""', '{}', '[instance]'], {}), "('arangodb', {}, [instance])\n", (3716, 3744), False, 'from datadog_checks.arangodb import ArangodbCheck\n'), ((572, 596), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (585, 596), False, 'import pytest\n'), ((1964, 2036), 'mock.patch', 'mock.patch', (['"""requests.get"""'], {'side_effect': 'mock_requests_get', 'autospec': '(True)'}), "('requests.get', side_effect=mock_requests_get, autospec=True)\n", (1974, 2036), False, 'import mock\n'), ((2304, 2326), 'datadog_checks.dev.utils.get_metadata_metrics', 'get_metadata_metrics', ([], {}), '()\n', (2324, 2326), False, 'from datadog_checks.dev.utils import get_metadata_metrics\n'), ((825, 985), 'pytest.param', 'pytest.param', (['"""valid_id_mode"""', "['endpoint:http://localhost:8529/_admin/metrics/v2', 'server_mode:default',\n 'server_id:1']"], {'id': '"""valid id and valid mode"""'}), "('valid_id_mode', [\n 'endpoint:http://localhost:8529/_admin/metrics/v2',\n 'server_mode:default', 'server_id:1'], id='valid id and valid mode')\n", (837, 985), False, 'import pytest\n'), ((1033, 1181), 'pytest.param', 'pytest.param', (['"""invalid_mode_valid_id"""', "['endpoint:http://localhost:8529/_admin/metrics/v2', 'server_id:1']"], {'id': '"""invalid mode but valid id"""'}), "('invalid_mode_valid_id', [\n 'endpoint:http://localhost:8529/_admin/metrics/v2', 'server_id:1'], id=\n 'invalid mode but valid id')\n", (1045, 1181), False, 'import pytest\n'), ((1228, 1383), 'pytest.param', 'pytest.param', (['"""valid_mode_invalid_id"""', "['endpoint:http://localhost:8529/_admin/metrics/v2', 'server_mode:default']"], {'id': '"""valid mode but invalid id"""'}), "('valid_mode_invalid_id', [\n 'endpoint:http://localhost:8529/_admin/metrics/v2',\n 'server_mode:default'], id='valid mode but invalid id')\n", (1240, 1383), False, 'import pytest\n'), ((1431, 1568), 'pytest.param', 'pytest.param', (['"""invalid_mode_invalid_id"""', "['endpoint:http://localhost:8529/_admin/metrics/v2']"], {'id': '"""invalid mode and invalid id"""'}), "('invalid_mode_invalid_id', [\n 'endpoint:http://localhost:8529/_admin/metrics/v2'], id=\n 'invalid mode and invalid id')\n", (1443, 1568), False, 'import pytest\n'), ((3122, 3215), 'mock.patch', 'mock.patch', (['"""datadog_checks.base.utils.http.RequestsWrapper.get"""'], {'side_effect': 'side_effect'}), "('datadog_checks.base.utils.http.RequestsWrapper.get',\n side_effect=side_effect)\n", (3132, 3215), False, 'import mock\n'), ((2603, 2726), 'pytest.param', 'pytest.param', (['HTTPError', '"""Unable to get server foo, skipping `server_foo` tag."""'], {'id': '"""HTTPError getting server tag"""'}), "(HTTPError,\n 'Unable to get server foo, skipping `server_foo` tag.', id=\n 'HTTPError getting server tag')\n", (2615, 2726), False, 'import pytest\n'), ((2749, 2922), 'pytest.param', 'pytest.param', (['Exception', '"""Unable to query `http://localhost:8529/test_endpoint/foo` to collect `server_foo` tag, received error:"""'], {'id': '"""Exception getting server tag"""'}), "(Exception,\n 'Unable to query `http://localhost:8529/test_endpoint/foo` to collect `server_foo` tag, received error:'\n , id='Exception getting server tag')\n", (2761, 2922), False, 'import pytest\n'), ((3754, 3826), 'mock.patch', 'mock.patch', (['"""datadog_checks.arangodb.check.ArangodbCheck.get_server_tag"""'], {}), "('datadog_checks.arangodb.check.ArangodbCheck.get_server_tag')\n", (3764, 3826), False, 'import mock\n'), ((3938, 3954), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3952, 3954), False, 'import mock\n'), ((3417, 3477), 'pytest.param', 'pytest.param', (['[None, None]', '[]'], {'id': '"""No server tags returned"""'}), "([None, None], [], id='No server tags returned')\n", (3429, 3477), False, 'import pytest\n'), ((3487, 3606), 'pytest.param', 'pytest.param', (["['server_mode:foo', 'server_id:bar']", "['server_mode:foo', 'server_id:bar']"], {'id': '"""Server tags returned"""'}), "(['server_mode:foo', 'server_id:bar'], ['server_mode:foo',\n 'server_id:bar'], id='Server tags returned')\n", (3499, 3606), False, 'import pytest\n'), ((1890, 1915), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1905, 1915), False, 'import os\n')]
|
from programmingalpha.DataSet.DBLoader import MongoStackExchange
import programmingalpha
from programmingalpha.Utility.TextPreprocessing import PreprocessPostContent
import json
import logging
import argparse
import tqdm
import multiprocessing
logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt = '%m/%d/%Y %H:%M:%S',
level = logging.INFO)
logger = logging.getLogger(__name__)
def init(questionsData_G,answersData_G,indexData_G,copy=True):
global preprocessor
preprocessor=PreprocessPostContent()
global questionsData,answersData,indexData
if copy:
questionsData=questionsData_G.copy()
answersData=answersData_G.copy()
indexData=indexData_G.copy()
else:
questionsData=questionsData_G
answersData=answersData_G
indexData=indexData_G
logger.info("process {} init".format(multiprocessing.current_process()))
def fetchQuestionData(q_ids_set):
questionsData={}
needed_answerIds=set()
query={
"$or":[
{"AcceptedAnswerId":{"$exists":True,"$ne":''},"FavoriteCount":{"$gte":3}},
{"AnswerCount":{"$gte":args.answerNum}},
]
}
for question in tqdm.tqdm(docDB.questions.find(query).batch_size(args.batch_size),desc="loading questions"):
Id=question["Id"]
if Id not in q_ids_set:
continue
del question["_id"]
questionsData[Id]={"Title":question["Title"],"Body":question["Body"],"AcceptedAnswerId":question["AcceptedAnswerId"]}
needed_answerIds.add(question["AcceptedAnswerId"])
logger.info("loaded: questions({})".format(len(questionsData)))
return questionsData, needed_answerIds
def fetchAnswerData(ansIdxGlobal,questionsDataGlobal):
answersData={}
for ans in tqdm.tqdm(docDB.answers.find().batch_size(args.batch_size),desc="loading answers"):
Id=ans["Id"]
if Id not in ansIdxGlobal or ans["ParentId"] not in questionsDataGlobal:
continue
answersData[Id]={"Body":ans["Body"],"Score":ans["Score"]}
logger.info("loaded: answers({})".format(len(answersData)))
return answersData
def fetchIndexData(questionDataGlobal):
indexData={}
for indexer in tqdm.tqdm(docDB.stackdb["QAIndexer"].find().batch_size(args.batch_size),desc="loading indexers"):
Id=indexer["Id"]
if Id not in questionDataGlobal:
continue
del indexer["_id"]
indexData[Id]=indexer
logger.info("loaded: indexer({})".format(len(indexData)))
return indexData
#generate Core
def _getBestAnswers(q_id,K):
answers=[]
if "AcceptedAnswerId" in questionsData[q_id]:
ans_id=questionsData[q_id]["AcceptedAnswerId"]
if ans_id in answersData:
answer=answersData[ans_id]
K-=1
ans_idx=indexData[q_id]["Answers"]
scored=[]
for id in ans_idx:
if id in answersData:
scored.append((id,answersData[id]["Score"]))
if scored:
scored.sort(key=lambda x:x[1],reverse=True)
for i in range(min(K-1,len(scored))):
id=scored[i][0]
answers.append(answersData[id])
if K<args.answerNum:
answers=[answer]+answers
return answers
def _getPreprocess(txt):
txt_processed=preprocessor.getPlainTxt(txt)
if len(" ".join(txt_processed).split())<20:
return None
return txt_processed
def _genCore(distances):
#try:
q_id=distances["id"]
#get question
if q_id not in questionsData:
return None
question=questionsData[q_id]
title=question["Title"]
body=question["Body"]
question =_getPreprocess(body)
if not question:
return None
question=[title]+question
#get answer
answer=_getBestAnswers(q_id, K=args.answerNum)
if not answer:
return None
answer=_getPreprocess(answer[0]["Body"])
if not answer:
return None
#get context
relative_q_ids=[]
dists=distances["distances"]
for id in dists:
if id not in questionsData:
continue
if len(relative_q_ids)>=10:
break
if dists[id]==1:
relative_q_ids.append(id)
elif dists[id]==0:
relative_q_ids.insert(0,id)
else:
pass
if len(relative_q_ids)==0:
return None
context=[]
for q_id in relative_q_ids:
ans=_getBestAnswers(q_id,args.answerNum)
if not ans:
continue
context.extend(ans)
if len(context)==0:
#logger.info("due to none context")
return None
context.sort(key=lambda ans:ans["Score"],reverse=True)
contexts=[]
for txt in context:
txt=_getPreprocess(txt["Body"])
if not txt:
continue
contexts.extend(txt)
if len(contexts)==0:
#logger.info("due to none context")
return None
record={"question":question,"context":contexts,"answer":answer}
return record
#except :
# logger.warning("except triggered for distance data: {}".format(distances))
# return None
def generateContextAnswerCorpusParallel(distanceData,questionsDataGlobal,answersDataGlobal,indexDataGlobal):
cache=[]
batch_size=args.batch_size
batches=[distanceData[i:i+batch_size] for i in range(0,len(distanceData),batch_size)]
workers=multiprocessing.Pool(args.workers,initializer=init,
initargs=(questionsDataGlobal,answersDataGlobal,indexDataGlobal)
)
with open(programmingalpha.DataPath+"Corpus/"+args.db.lower()+"-context.json","w") as f:
for batch_links in tqdm.tqdm(batches,desc="processing documents"):
for record in workers.map(_genCore,batch_links):
if record is not None:
cache.append(json.dumps(record)+"\n")
f.writelines(cache)
cache.clear()
workers.close()
workers.join()
def generateContextAnswerCorpus(distanceData,questionsDataGlobal,answersDataGlobal,indexDataGlobal):
cache=[]
init(questionsDataGlobal,answersDataGlobal,indexDataGlobal,copy=False)
with open(programmingalpha.DataPath+"Corpus/"+args.db.lower()+"-context.json","w") as f:
for link in tqdm.tqdm(distanceData,desc="processing documents"):
record =_genCore(link)
if record is not None:
cache.append(json.dumps(record)+"\n")
if len(cache)>args.batch_size:
f.writelines(cache)
cache.clear()
if len(cache)>0:
f.writelines(cache)
cache.clear()
def main():
logger.info("loading distance data")
distance_file=programmingalpha.DataPath+"linkData/"+dbName.lower()+'-2graph.json'
distance_data=[]
q_ids_set=set()
with open(distance_file,"r") as f:
for line in f:
path=json.loads(line)
q_ids_set.add(path["id"])
q_ids_set.update(path["distances"])
distance_data.append(path)
logger.info("loaded {} links data".format(len(distance_data)))
questionsDataGlobal, ansIdxGlobal=fetchQuestionData(q_ids_set)
answersDataGlobal=fetchAnswerData(ansIdxGlobal,questionsDataGlobal.keys())
indexerDataGlobal=fetchIndexData(questionsDataGlobal.keys())
distance_dataNew=[]
for distance in distance_data:
id=distance["id"]
if len(distance["distances"])==0:
continue
if id not in questionsDataGlobal:
continue
new_distance={"id":int(id),"distances":{}}
for k,v in distance["distances"].items():
k=int(k)
v=int(v)
new_distance["distances"][k]=v
distance_dataNew.append(new_distance)
logger.info("finally loaded {} links data".format(len(distance_dataNew)))
generateContextAnswerCorpusParallel(distance_dataNew,questionsDataGlobal,answersDataGlobal,indexerDataGlobal)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', type=int, default=100)
parser.add_argument('--db', type=str, default="crossvalidated")
parser.add_argument('--lose_rate', type=float, default=0.5)
parser.add_argument("--answerNum",type=int,default=5)
parser.add_argument('--workers', type=int, default=32)
args = parser.parse_args()
docDB=MongoStackExchange(host='10.1.1.9',port=50000)
dbName=args.db
docDB.useDB(dbName)
logger.info("processing db data: {}".format(dbName))
main()
|
[
"tqdm.tqdm",
"multiprocessing.current_process",
"argparse.ArgumentParser",
"logging.basicConfig",
"json.loads",
"programmingalpha.DataSet.DBLoader.MongoStackExchange",
"json.dumps",
"programmingalpha.Utility.TextPreprocessing.PreprocessPostContent",
"multiprocessing.Pool",
"logging.getLogger"
] |
[((254, 397), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s - %(levelname)s - %(name)s - %(message)s"""', 'datefmt': '"""%m/%d/%Y %H:%M:%S"""', 'level': 'logging.INFO'}), "(format=\n '%(asctime)s - %(levelname)s - %(name)s - %(message)s', datefmt=\n '%m/%d/%Y %H:%M:%S', level=logging.INFO)\n", (273, 397), False, 'import logging\n'), ((446, 473), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (463, 473), False, 'import logging\n'), ((587, 610), 'programmingalpha.Utility.TextPreprocessing.PreprocessPostContent', 'PreprocessPostContent', ([], {}), '()\n', (608, 610), False, 'from programmingalpha.Utility.TextPreprocessing import PreprocessPostContent\n'), ((5902, 6027), 'multiprocessing.Pool', 'multiprocessing.Pool', (['args.workers'], {'initializer': 'init', 'initargs': '(questionsDataGlobal, answersDataGlobal, indexDataGlobal)'}), '(args.workers, initializer=init, initargs=(\n questionsDataGlobal, answersDataGlobal, indexDataGlobal))\n', (5922, 6027), False, 'import multiprocessing\n'), ((8644, 8669), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (8667, 8669), False, 'import argparse\n'), ((9038, 9085), 'programmingalpha.DataSet.DBLoader.MongoStackExchange', 'MongoStackExchange', ([], {'host': '"""10.1.1.9"""', 'port': '(50000)'}), "(host='10.1.1.9', port=50000)\n", (9056, 9085), False, 'from programmingalpha.DataSet.DBLoader import MongoStackExchange\n'), ((6213, 6260), 'tqdm.tqdm', 'tqdm.tqdm', (['batches'], {'desc': '"""processing documents"""'}), "(batches, desc='processing documents')\n", (6222, 6260), False, 'import tqdm\n'), ((6856, 6908), 'tqdm.tqdm', 'tqdm.tqdm', (['distanceData'], {'desc': '"""processing documents"""'}), "(distanceData, desc='processing documents')\n", (6865, 6908), False, 'import tqdm\n'), ((961, 994), 'multiprocessing.current_process', 'multiprocessing.current_process', ([], {}), '()\n', (992, 994), False, 'import multiprocessing\n'), ((7507, 7523), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (7517, 7523), False, 'import json\n'), ((7011, 7029), 'json.dumps', 'json.dumps', (['record'], {}), '(record)\n', (7021, 7029), False, 'import json\n'), ((6401, 6419), 'json.dumps', 'json.dumps', (['record'], {}), '(record)\n', (6411, 6419), False, 'import json\n')]
|
from Code.UI.splash_layout import Ui_MainWindow
from Code.UI.label import LabelUI
#from ui.export_ui import ExportUI
#from ui.questionnaire_ui import QuestionnaireUI
from PyQt5 import QtCore
import sys, traceback
if QtCore.QT_VERSION >= 0x50501:
def excepthook(type_, value, traceback_):
traceback.print_exception(type_, value, traceback_)
QtCore.qFatal('')
sys.excepthook = excepthook
class MainWindowUI(Ui_MainWindow):
def __init__(self, mainwindow):
super(MainWindowUI, self).__init__()
self.mainwindow = mainwindow
self.setupUi(mainwindow)
self.pushButton_ReportCases.clicked.connect(self.run_reportcases_ui)
#self.pushButton_ExportData.clicked.connect(self.run_exportcases_ui)
#self.pushButton_HumanLabelling.clicked.connect(self.run_questionnaire_ui)
def run_reportcases_ui(self):
print("Running Labeller")
report_window = LabelUI(self.mainwindow)
# def run_exportcases_ui(self):
# print("Running Exporter")
# export_window = ExportUI(self.mainwindow)
#
# def run_questionnaire_ui(self):
# print("Running Questionnaire")
# questionnaire_window = QuestionnaireUI(self.mainwindow)
|
[
"PyQt5.QtCore.qFatal",
"Code.UI.label.LabelUI",
"traceback.print_exception"
] |
[((301, 352), 'traceback.print_exception', 'traceback.print_exception', (['type_', 'value', 'traceback_'], {}), '(type_, value, traceback_)\n', (326, 352), False, 'import sys, traceback\n'), ((361, 378), 'PyQt5.QtCore.qFatal', 'QtCore.qFatal', (['""""""'], {}), "('')\n", (374, 378), False, 'from PyQt5 import QtCore\n'), ((924, 948), 'Code.UI.label.LabelUI', 'LabelUI', (['self.mainwindow'], {}), '(self.mainwindow)\n', (931, 948), False, 'from Code.UI.label import LabelUI\n')]
|
#------------------------------------------------------------------------------
# query_one.py (Section 3.2)
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# Copyright 2017, 2018, Oracle and/or its affiliates. All rights reserved.
#------------------------------------------------------------------------------
from __future__ import print_function
import cx_Oracle
import db_config
con = cx_Oracle.connect(db_config.user, db_config.pw, db_config.dsn)
cur = con.cursor()
cur.execute("select * from dept order by deptno")
row = cur.fetchone()
print(row)
row = cur.fetchone()
print(row)
|
[
"cx_Oracle.connect"
] |
[((506, 568), 'cx_Oracle.connect', 'cx_Oracle.connect', (['db_config.user', 'db_config.pw', 'db_config.dsn'], {}), '(db_config.user, db_config.pw, db_config.dsn)\n', (523, 568), False, 'import cx_Oracle\n')]
|
"""
Copyright 2021 Objectiv B.V.
"""
from typing import List
import pytest
from bach import get_series_type_from_dtype
from bach.expression import Expression
from bach.partitioning import GroupBy
from tests.unit.bach.util import get_fake_df, FakeEngine
def test_equals(dialect):
def get_df(index_names: List[str], data_names: List[str]):
return get_fake_df(dialect=dialect, index_names=index_names, data_names=data_names)
left = get_df(['a'], ['b', 'c'])
right = get_df(['a'], ['b', 'c'])
result = left['b'].equals(left['b'])
# assert result is a boolean (for e.g. '==') this is not the case
assert result is True
assert left['b'].equals(left['b'])
assert left['b'].equals(right['b'])
assert not left['b'].equals(left['c'])
assert not left['b'].equals(right['c'])
left = get_df(['a', 'x'], ['b', 'c'])
right = get_df(['a'], ['b', 'c'])
assert left['b'].equals(left['b'])
assert not left['b'].equals(right['b'])
assert not left['b'].equals(left['c'])
assert not left['b'].equals(right['c'])
# different order in the index
left = get_df(['a', 'b'], ['c'])
right = get_df(['b', 'a'], ['c'])
assert not left['c'].equals(right['c'])
engine = left.engine
engine_other = FakeEngine(dialect=engine.dialect, url='sql://some_other_string')
int_type = get_series_type_from_dtype('int64')
float_type = get_series_type_from_dtype('float64')
expr_test = Expression.construct('test')
expr_other = Expression.construct('test::text')
sleft = int_type(engine=engine, base_node=None, index={}, name='test',
expression=expr_test, group_by=None, sorted_ascending=None, index_sorting=[],
instance_dtype='int64')
sright = int_type(engine=engine, base_node=None, index={}, name='test',
expression=expr_test, group_by=None, sorted_ascending=None, index_sorting=[],
instance_dtype='int64')
assert sleft.equals(sright)
# different expression
sright = int_type(engine=engine, base_node=None, index={}, name='test',
expression=expr_other, group_by=None, sorted_ascending=None, index_sorting=[],
instance_dtype='int64')
assert not sleft.equals(sright)
# different name
sright = int_type(engine=engine, base_node=None, index={}, name='test_2',
expression=expr_test, group_by=None, sorted_ascending=None, index_sorting=[],
instance_dtype='int64')
assert not sleft.equals(sright)
# different base_node
sright = int_type(engine=engine, base_node='test', index={}, name='test',
expression=expr_test, group_by=None, sorted_ascending=None, index_sorting=[],
instance_dtype='int64')
assert not sleft.equals(sright)
# different engine
sright = int_type(engine=engine_other, base_node=None, index={}, name='test',
expression=expr_test, group_by=None, sorted_ascending=None, index_sorting=[],
instance_dtype='int64')
assert not sleft.equals(sright)
# different type
sright = float_type(engine=engine, base_node=None, index={}, name='test',
expression=expr_test, group_by=None, sorted_ascending=None, index_sorting=[],
instance_dtype='float64')
assert not sleft.equals(sright)
# different group_by
sright = int_type(engine=engine, base_node=None, index={}, name='test', expression=expr_test,
group_by=GroupBy(group_by_columns=[]), sorted_ascending=None, index_sorting=[],
instance_dtype='int64')
assert not sleft.equals(sright)
# different sorting
sright = int_type(engine=engine, base_node=None, index={}, name='test', expression=expr_test,
group_by=None, sorted_ascending=True, index_sorting=[], instance_dtype='int64')
assert not sleft.equals(sright)
sright = sright.copy_override(sorted_ascending=None)
assert sleft.equals(sright)
index_series = sleft
sleft = int_type(engine=engine, base_node=None, index={'a': index_series}, name='test',
expression=expr_test, group_by=None, sorted_ascending=None, index_sorting=[],
instance_dtype='int64')
sright = int_type(engine=engine, base_node=None, index={'a': index_series}, name='test',
expression=expr_test, group_by=None, sorted_ascending=None, index_sorting=[],
instance_dtype='int64')
assert sleft.equals(sright)
sright = sright.copy_override(index_sorting=[True])
assert not sleft.equals(sright)
@pytest.mark.skip_postgres
def test_equals_instance_dtype(dialect):
def get_df(index_names: List[str], data_names: List[str]):
return get_fake_df(dialect=dialect, index_names=index_names, data_names=data_names)
left = get_df(['a'], ['b', 'c'])
engine = left.engine
expr_test = Expression.construct('test')
dict_type = get_series_type_from_dtype('dict')
# Currently we only have bigquery types that actual use the instance_dtype. So skip postgres here.
sleft = dict_type(engine=engine, base_node=None, index={}, name='test',
expression=expr_test, group_by=None, sorted_ascending=None, index_sorting=[],
instance_dtype={'a': 'int64', 'b': ['bool']})
sright = sleft.copy_override()
assert sleft.equals(sright)
sright = sleft.copy_override(instance_dtype={'a': 'float64', 'b': ['bool']})
assert not sleft.equals(sright)
|
[
"tests.unit.bach.util.get_fake_df",
"bach.get_series_type_from_dtype",
"tests.unit.bach.util.FakeEngine",
"bach.expression.Expression.construct",
"bach.partitioning.GroupBy"
] |
[((1270, 1335), 'tests.unit.bach.util.FakeEngine', 'FakeEngine', ([], {'dialect': 'engine.dialect', 'url': '"""sql://some_other_string"""'}), "(dialect=engine.dialect, url='sql://some_other_string')\n", (1280, 1335), False, 'from tests.unit.bach.util import get_fake_df, FakeEngine\n'), ((1352, 1387), 'bach.get_series_type_from_dtype', 'get_series_type_from_dtype', (['"""int64"""'], {}), "('int64')\n", (1378, 1387), False, 'from bach import get_series_type_from_dtype\n'), ((1405, 1442), 'bach.get_series_type_from_dtype', 'get_series_type_from_dtype', (['"""float64"""'], {}), "('float64')\n", (1431, 1442), False, 'from bach import get_series_type_from_dtype\n'), ((1460, 1488), 'bach.expression.Expression.construct', 'Expression.construct', (['"""test"""'], {}), "('test')\n", (1480, 1488), False, 'from bach.expression import Expression\n'), ((1506, 1540), 'bach.expression.Expression.construct', 'Expression.construct', (['"""test::text"""'], {}), "('test::text')\n", (1526, 1540), False, 'from bach.expression import Expression\n'), ((5034, 5062), 'bach.expression.Expression.construct', 'Expression.construct', (['"""test"""'], {}), "('test')\n", (5054, 5062), False, 'from bach.expression import Expression\n'), ((5079, 5113), 'bach.get_series_type_from_dtype', 'get_series_type_from_dtype', (['"""dict"""'], {}), "('dict')\n", (5105, 5113), False, 'from bach import get_series_type_from_dtype\n'), ((361, 437), 'tests.unit.bach.util.get_fake_df', 'get_fake_df', ([], {'dialect': 'dialect', 'index_names': 'index_names', 'data_names': 'data_names'}), '(dialect=dialect, index_names=index_names, data_names=data_names)\n', (372, 437), False, 'from tests.unit.bach.util import get_fake_df, FakeEngine\n'), ((4878, 4954), 'tests.unit.bach.util.get_fake_df', 'get_fake_df', ([], {'dialect': 'dialect', 'index_names': 'index_names', 'data_names': 'data_names'}), '(dialect=dialect, index_names=index_names, data_names=data_names)\n', (4889, 4954), False, 'from tests.unit.bach.util import get_fake_df, FakeEngine\n'), ((3602, 3630), 'bach.partitioning.GroupBy', 'GroupBy', ([], {'group_by_columns': '[]'}), '(group_by_columns=[])\n', (3609, 3630), False, 'from bach.partitioning import GroupBy\n')]
|
"""
Copyright (c) 2018 <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import logging
import json
from mpikat.core.ip_manager import ip_range_from_stream
log = logging.getLogger('mpikat.apsuse_config_manager')
DEFAULT_DATA_RATE_PER_WORKER = 20e9 # bits / s
DUMMY_FBF_CONFIG = {
"coherent-beam-multicast-groups":"spead://172.16.17.32+15:7147",
"coherent-beam-multicast-groups-data-rate": 7e9,
"incoherent-beam-multicast-group": "spead://192.168.127.12:7147",
"incoherent-beam-multicast-group-data-rate": 150e6,
}
HOST_TO_LEAF_MAP = {
"apscn00.mpifr-be.mkat.karoo.kat.ac.za": 1,
"apscn01.mpifr-be.mkat.karoo.kat.ac.za": 1,
"apscn02.mpifr-be.mkat.karoo.kat.ac.za": 1,
"apscn03.mpifr-be.mkat.karoo.kat.ac.za": 1,
"apscn04.mpifr-be.mkat.karoo.kat.ac.za": 0,
"apscn05.mpifr-be.mkat.karoo.kat.ac.za": 0,
"apscn06.mpifr-be.mkat.karoo.kat.ac.za": 0,
"apscn07.mpifr-be.mkat.karoo.kat.ac.za": 0,
}
class ApsConfigurationError(Exception):
pass
class ApsWorkerBandwidthExceeded(Exception):
pass
class ApsWorkerTotalBandwidthExceeded(Exception):
pass
class ApsWorkerConfig(object):
def __init__(self, total_bandwidth=DEFAULT_DATA_RATE_PER_WORKER):
log.debug("Created new apsuse worker config")
self._total_bandwidth = total_bandwidth
self._available_bandwidth = self._total_bandwidth
self._incoherent_groups = []
self._coherent_groups = []
self._incoherent_beams = []
self._coherent_beams = []
self._even = True
def set_even(self, even_odd):
self._even = even_odd
def can_use_host(self, hostname):
HOST_TO_LEAF_MAP[hostname] = int(self._even)
def add_incoherent_group(self, group, bandwidth):
if bandwidth > self._total_bandwidth:
log.debug("Adding group would exceed worker bandwidth")
raise ApsWorkerTotalBandwidthExceeded
if self._available_bandwidth < bandwidth:
log.debug("Adding group would exceed worker bandwidth")
raise ApsWorkerBandwidthExceeded
else:
log.debug("Adding group {} to worker".format(group))
self._incoherent_groups.append(group)
self._available_bandwidth -= bandwidth
def add_coherent_group(self, group, bandwidth):
if self._available_bandwidth < bandwidth:
log.debug("Adding group would exceed worker bandwidth")
raise ApsWorkerBandwidthExceeded
else:
self._coherent_groups.append((group))
log.debug("Adding group {} to worker".format(group))
self._available_bandwidth -= bandwidth
def data_rate(self):
return self._total_bandwidth - self._available_bandwidth
def coherent_groups(self):
return self._coherent_groups
def incoherent_groups(self):
return self._incoherent_groups
def coherent_beams(self):
return self._coherent_beams
def incoherent_beams(self):
return self._incoherent_beams
class ApsConfigGenerator(object):
def __init__(self, fbfuse_config, bandwidth_per_worker=DEFAULT_DATA_RATE_PER_WORKER):
self._fbfuse_config = fbfuse_config
self._bandwidth_per_worker = bandwidth_per_worker
self._incoherent_range = ip_range_from_stream(
self._fbfuse_config['incoherent-beam-multicast-group'])
self._incoherent_mcast_group_rate = (
self._fbfuse_config['incoherent-beam-multicast-group-data-rate'])
self._incoherent_groups = list(self._incoherent_range)
self._coherent_range = ip_range_from_stream(
self._fbfuse_config['coherent-beam-multicast-groups'])
self._coherent_mcast_group_rate = (
self._fbfuse_config['coherent-beam-multicast-groups-data-rate'])
self._coherent_groups = list(self._coherent_range)
def allocate_groups(self, servers):
configs = {}
final_configs = {}
for server in servers:
configs[server] = ApsWorkerConfig(self._bandwidth_per_worker)
while configs and (self._incoherent_groups or self._coherent_groups):
for server in configs.keys():
if self._incoherent_groups:
group = self._incoherent_groups.pop(0)
try:
configs[server].add_incoherent_group(
group, self._incoherent_mcast_group_rate)
except (ApsWorkerTotalBandwidthExceeded, ApsWorkerBandwidthExceeded):
log.error("Incoherent beam mutlicast group ({} Gb/s) size exceeds data rate for one node ({} Gb/s)".format(
self._incoherent_mcast_group_rate/1e9,
configs[server]._total_bandwidth/1e9))
log.error("Incoherent beam data will not be captured")
else:
continue
if self._coherent_groups:
group = self._coherent_groups.pop(0)
try:
configs[server].add_coherent_group(group, self._coherent_mcast_group_rate)
except ApsWorkerTotalBandwidthExceeded:
log.error("Coherent beam mutlicast group ({} Gb/s) size exceeds data rate for one node ({} Gb/s)".format(
self._coherent_mcast_group_rate/1e9, configs[server]._total_bandwidth/1e9))
log.error("Coherent beam data will not be captured")
except ApsWorkerBandwidthExceeded:
self._coherent_groups.insert(0, group)
final_configs[server] = self._finalise_worker(configs[server], server)
del configs[server]
else:
continue
print(self._incoherent_groups, self._coherent_groups)
for server, config in configs.items():
final_configs[server] = self._finalise_worker(config, server)
return final_configs
def _finalise_worker(self, worker, server):
valid = False
for incoherent_group in worker.incoherent_groups():
valid = True
worker._incoherent_beams.append("ifbf00000")
for coherent_group in worker.coherent_groups():
valid = True
spead_formatted = "spead://{}:{}".format(str(coherent_group), self._coherent_range.port)
mapping = json.loads(self._fbfuse_config['coherent-beam-multicast-group-mapping'])
beam_idxs = mapping.get(spead_formatted, range(12))
worker._coherent_beams.extend(beam_idxs)
log.debug(("Worker {} config: coherent-groups: {},"
" coherent-beams: {}, incoherent-groups: {},"
" incoherent-beams: {},").format(
str(server), map(str, worker.coherent_groups()),
map(str, worker.coherent_beams()),
map(str, worker.incoherent_groups()),
map(str, worker.incoherent_beams())))
if valid:
return worker
else:
return None
def remaining_incoherent_groups(self):
return self._incoherent_groups
def remaining_coherent_groups(self):
return self._coherent_groups
|
[
"mpikat.core.ip_manager.ip_range_from_stream",
"json.loads",
"logging.getLogger"
] |
[((1158, 1207), 'logging.getLogger', 'logging.getLogger', (['"""mpikat.apsuse_config_manager"""'], {}), "('mpikat.apsuse_config_manager')\n", (1175, 1207), False, 'import logging\n'), ((4286, 4362), 'mpikat.core.ip_manager.ip_range_from_stream', 'ip_range_from_stream', (["self._fbfuse_config['incoherent-beam-multicast-group']"], {}), "(self._fbfuse_config['incoherent-beam-multicast-group'])\n", (4306, 4362), False, 'from mpikat.core.ip_manager import ip_range_from_stream\n'), ((4595, 4670), 'mpikat.core.ip_manager.ip_range_from_stream', 'ip_range_from_stream', (["self._fbfuse_config['coherent-beam-multicast-groups']"], {}), "(self._fbfuse_config['coherent-beam-multicast-groups'])\n", (4615, 4670), False, 'from mpikat.core.ip_manager import ip_range_from_stream\n'), ((7477, 7549), 'json.loads', 'json.loads', (["self._fbfuse_config['coherent-beam-multicast-group-mapping']"], {}), "(self._fbfuse_config['coherent-beam-multicast-group-mapping'])\n", (7487, 7549), False, 'import json\n')]
|
#!/Users/toma/python278i/bin/python
# -*- coding: utf-8 -*-
#
import MainWindow
import os
import platform
import sys
from PyQt4.QtGui import (QApplication, QIcon)
__version__ = "1.0.0"
def main():
app = QApplication(sys.argv)
app.setOrganizationName("tomacorp")
app.setOrganizationDomain("tomacorp.com")
app.setWindowIcon(QIcon(":/icon.png"))
w = MainWindow.Window()
w.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
[
"MainWindow.Window",
"PyQt4.QtGui.QIcon",
"PyQt4.QtGui.QApplication"
] |
[((214, 236), 'PyQt4.QtGui.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (226, 236), False, 'from PyQt4.QtGui import QApplication, QIcon\n'), ((368, 387), 'MainWindow.Window', 'MainWindow.Window', ([], {}), '()\n', (385, 387), False, 'import MainWindow\n'), ((341, 360), 'PyQt4.QtGui.QIcon', 'QIcon', (['""":/icon.png"""'], {}), "(':/icon.png')\n", (346, 360), False, 'from PyQt4.QtGui import QApplication, QIcon\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021, Ontario Institute for Cancer Research
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Authors:
<NAME>
"""
import os
import sys
import json
import argparse
import hashlib
import uuid
import subprocess
import copy
from datetime import date
import re
import tarfile
workflow_full_name = {
'rna-seq-alignment': 'RNA Seq Alignment'
}
analysis_tools = {
'star': 'STAR',
'hisat2': 'HiSAT2'
}
data_type_mapping = {
#file_type: [dataCategory, dataType, [data_subtypes], [star analysis_tools], [hisat2 analysis_tools]]
'genome_aln': ['Sequencing Reads', 'Aligned Reads', ['Genome Alignment'], ['STAR'], ['HiSAT2']],
'transcriptome_aln': ['Sequencing Reads', 'Aligned Reads', ['Transcriptome Alignment'], ['STAR'], ['HiSAT2']],
'chimeric_aln': ['Sequencing Reads', 'Aligned Reads', ['Chimeric Alignment'], ['STAR'], ['HiSAT2']],
'splice_junctions': ['Transcriptome Profiling', 'Splice Junctions', [None], ['STAR'], ['HiSAT2']],
'fastqc': ['Quality Control Metrics', 'Sequencing QC', ['Read Group Metrics'], ['FastQC'], ['FastQC']],
'collectrnaseqmetrics': ['Quality Control Metrics', 'Aligned Reads QC', ['Alignment Metrics'], ['Picard:CollectRnaSeqMetrics'], ['Picard:CollectRnaSeqMetrics']],
'duplicates_metrics': ['Quality Control Metrics', 'Aligned Reads QC', ['Duplicates Metrics'], ['biobambam2:bammarkduplicates2'], ['biobambam2:bammarkduplicates2']],
'supplement': ['Supplement', 'Running Logs', [None], ['STAR'], ['HiSAT2']]
}
def calculate_size(file_path):
return os.stat(file_path).st_size
def calculate_md5(file_path):
md5 = hashlib.md5()
with open(file_path, 'rb') as f:
for chunk in iter(lambda: f.read(1024 * 1024), b''):
md5.update(chunk)
return md5.hexdigest()
def insert_filename_friendly_rg_id(metadata):
filename_friendly_rg_ids = set()
# let's loop it two times, first for the rg id actually doesn't need to convert
for rg in metadata['read_groups']:
submitter_read_group_id = rg['submitter_read_group_id']
filename_friendly_rg_id = "".join([ c if re.match(r"[a-zA-Z0-9\.\-_]", c) else "_" for c in submitter_read_group_id ])
if filename_friendly_rg_id == submitter_read_group_id: # no change, safe to use
rg['filename_friendly_rg_id'] = filename_friendly_rg_id
filename_friendly_rg_ids.add(filename_friendly_rg_id)
for rg in metadata['read_groups']:
submitter_read_group_id = rg['submitter_read_group_id']
filename_friendly_rg_id = "".join([ c if re.match(r"[a-zA-Z0-9\.\-_]", c) else "_" for c in submitter_read_group_id ])
if filename_friendly_rg_id == submitter_read_group_id: # no change, already covered
continue
if filename_friendly_rg_id in filename_friendly_rg_ids: # the converted new friendly ID conflicts with existing one
for i in range(len(metadata['read_groups'])):
if not '%s_%s' % (filename_friendly_rg_id, i+1) in filename_friendly_rg_ids:
filename_friendly_rg_id += '_%s' % str(i+1)
break
rg['filename_friendly_rg_id'] = filename_friendly_rg_id
filename_friendly_rg_ids.add(filename_friendly_rg_id)
def get_rg_id_from_ubam_qc(tar, metadata):
tar_basename = os.path.basename(tar) # TEST-PR.DO250122.SA610149.D0RE2_1_.6cae87bf9f05cdfaa4a26f2da625f3b2.lane.bam.fastqc.tgz
md5sum_from_filename = tar_basename.split('.')[-5]
if not re.match(r'^[a-f0-9]{32}$', md5sum_from_filename):
sys.exit('Error: ubam naming not expected %s' % tar_basename)
for rg in metadata.get("read_groups"):
rg_id_in_bam = rg.get("read_group_id_in_bam") if rg.get("read_group_id_in_bam") else rg.get("submitter_read_group_id")
seq_file_name = rg.get("file_r1")
bam_name = seq_file_name if seq_file_name.endswith('.bam') else ''
md5sum_from_metadata = hashlib.md5(("%s %s" % (bam_name, rg_id_in_bam)).encode('utf-8')).hexdigest()
if md5sum_from_metadata == md5sum_from_filename:
return rg.get("filename_friendly_rg_id"), rg.get("submitter_read_group_id")
# up to this point no match found, then something wrong
sys.exit('Error: unable to match ubam qc metric tar "%s" to read group id' % tar_basename)
def get_dupmetrics(file_to_upload):
library = []
with tarfile.open(file_to_upload, 'r') as tar:
for member in tar.getmembers():
if member.name.endswith('.duplicates_metrics.txt'):
f = tar.extractfile(member)
cols_name = []
for r in f:
row = r.decode('utf-8')
if row.startswith('LIBRARY'):
cols_name = row.strip().split('\t')
continue
if cols_name:
if not row.strip(): break
metric = {}
cols = row.strip().split('\t')
for n, c in zip(cols_name, cols):
if n == "LIBRARY": metric.update({n: c})
elif '.' in c or 'e' in c: metric.update({n: float(c)})
else: metric.update({n: int(c)})
library.append(metric)
return library
def get_files_info(file_to_upload, date_str, seq_experiment_analysis_dict, aligner=None):
file_info = {
'fileSize': calculate_size(file_to_upload),
'fileMd5sum': calculate_md5(file_to_upload),
'fileAccess': 'controlled',
'info': {}
}
experimental_strategy = seq_experiment_analysis_dict['experiment']['experimental_strategy'].lower()
fname_sample_part = seq_experiment_analysis_dict['samples'][0]['sampleId']
aligner_or_rgid = aligner.lower() if aligner else None
submitter_rg_id = None
if re.match(r'^genome.merged.+?(cram|cram\.crai|bam|bam\.bai)$', file_to_upload):
file_type = 'genome_aln'
elif re.match(r'^transcriptome.merged.+?(cram|cram\.crai|bam|bam\.bai)$', file_to_upload):
file_type = 'transcriptome_aln'
elif re.match(r'^chimeric.merged.+?(cram|cram\.crai|bam|bam\.bai)$', file_to_upload):
file_type = 'chimeric_aln'
elif re.match(r'.+?\.fastqc\.tgz$', file_to_upload):
file_type = 'fastqc'
aligner_or_rgid, submitter_rg_id = get_rg_id_from_ubam_qc(file_to_upload, seq_experiment_analysis_dict)
elif re.match(r'.+?\.collectrnaseqmetrics\.tgz$', file_to_upload):
file_type = 'collectrnaseqmetrics'
elif re.match(r'.+?\.duplicates_metrics\.tgz$', file_to_upload):
file_type = 'duplicates_metrics'
elif re.match(r'.+?_SJ\.out\.tab$', file_to_upload):
file_type = 'splice_junctions'
elif re.match(r'.+?splicesites\.txt$', file_to_upload):
file_type = 'splice_junctions'
elif re.match(r'.+?supplement\.tgz$', file_to_upload) or re.match(r'.+?supplement\.tar.gz$', file_to_upload):
file_type = 'supplement'
else:
sys.exit('Error: unknown file type "%s"' % file_to_upload)
if file_type in ['fastqc', 'collectrnaseqmetrics', 'duplicates_metrics', 'aln_metrics', 'supplement']:
file_ext = 'tgz'
elif file_type in ['genome_aln', 'transcriptome_aln', 'chimeric_aln']:
if file_to_upload.endswith('.bam'):
file_ext = 'bam'
elif file_to_upload.endswith('.bam.bai'):
file_ext = 'bam.bai'
elif file_to_upload.endswith('.cram'):
file_ext = 'cram'
elif file_to_upload.endswith('.cram.crai'):
file_ext = 'cram.crai'
else:
sys.exit('Error: unknown aligned seq extention: %s' % file_to_upload)
elif file_type in ['splice_junctions']:
file_ext = 'txt'
else:
sys.exit('Error: unknown file type "%s"' % file_type)
# file naming patterns:
# pattern: <argo_study_id>.<argo_donor_id>.<argo_sample_id>.[rna-seq].<date>.<aligner|rg_id>.<file_type>.<file_ext>
# example: TEST-PR.DO250183.SA610229.rna-seq.20200319.star.genome_aln.cram
new_fname = '.'.join([
seq_experiment_analysis_dict['studyId'],
seq_experiment_analysis_dict['samples'][0]['donor']['donorId'],
fname_sample_part,
experimental_strategy,
date_str,
aligner_or_rgid,
file_type,
file_ext
])
file_info['fileName'] = new_fname
file_info['fileType'] = new_fname.split('.')[-1].upper()
file_info['info'] = {
'data_category': data_type_mapping[file_type][0],
'data_subtypes': data_type_mapping[file_type][2]
}
if not aligner:
file_info['info']['analysis_tools'] = ["FastQC"]
elif aligner.lower() == 'star':
file_info['info']['analysis_tools'] = data_type_mapping[file_type][3]
elif aligner.lower() == 'hisat2':
file_info['info']['analysis_tools'] = data_type_mapping[file_type][4]
if new_fname.endswith('.bai') or new_fname.endswith('.crai'):
file_info['dataType'] = 'Aligned Reads Index'
else:
file_info['dataType'] = data_type_mapping[file_type][1]
# extract info into payload
extra_info = {}
if new_fname.endswith('.tgz'):
tar = tarfile.open(file_to_upload)
for member in tar.getmembers():
if member.name.endswith('qc_metrics.json') or member.name.endswith('.extra_info.json'):
f = tar.extractfile(member)
extra_info = json.load(f)
else:
if not file_info['info'].get('files_in_tgz'): file_info['info']['files_in_tgz'] = []
file_info['info']['files_in_tgz'].append(os.path.basename(member.name))
# retrieve duplicates metrics from the file
if file_info['info']['data_subtypes'][0] == 'Duplicates Metrics':
extra_info['metrics'] = {
'libraries': get_dupmetrics(file_to_upload)
}
if file_info['info']['data_subtypes'][0] == 'Read Group Metrics':
extra_info['metrics'].update({'read_group_id': submitter_rg_id})
if extra_info:
extra_info.pop('tool', None)
file_info['info'].update(extra_info)
new_dir = 'out'
try:
os.mkdir(new_dir)
except FileExistsError:
pass
dst = os.path.join(os.getcwd(), new_dir, new_fname)
os.symlink(os.path.abspath(file_to_upload), dst)
return file_info
def get_sample_info(sample_list):
samples = copy.deepcopy(sample_list)
for sample in samples:
for item in ['info', 'sampleId', 'specimenId', 'donorId', 'studyId']:
sample.pop(item, None)
sample['specimen'].pop(item, None)
sample['donor'].pop(item, None)
return samples
def main():
"""
Python implementation of tool: payload-gen-rna-alignment
"""
parser = argparse.ArgumentParser(description='Tool: payload-gen-rna-alignment')
parser.add_argument("-f", "--files_to_upload", dest="files_to_upload", type=str, required=True,
nargs="+", help="Files to upload")
parser.add_argument("-a", "--seq_experiment_analysis", dest="seq_experiment_analysis", required=True,
help="Input analysis for sequencing experiment", type=str)
parser.add_argument("-t", "--analysis_type", dest="analysis_type", required=True, help="Specify analysis_type")
parser.add_argument("-l", "--aligner", dest="aligner", default=None, help="Provide RNA-Seq aligner if files_to_upload are generated from alignment results. Default=None")
parser.add_argument("-g", "--genome_annotation", dest="genome_annotation", default="GENCODE v38", help="RNA-Seq alignment genome annotation")
parser.add_argument("-b", "--genome_build", dest="genome_build", default="GRCh38_hla_decoy_ebv", help="RNA-Seq alignment genome build")
parser.add_argument("-w", "--wf_name", dest="wf_name", required=True, help="Workflow name")
parser.add_argument("-v", "--wf_version", dest="wf_version", required=True, help="Workflow version")
parser.add_argument("-r", "--wf_run", dest="wf_run", required=True, help="Workflow run ID")
parser.add_argument("-s", "--wf_session", dest="wf_session", required=True, help="Workflow session ID")
args = parser.parse_args()
with open(args.seq_experiment_analysis, 'r') as f:
seq_experiment_analysis_dict = json.load(f)
payload = {
'analysisType': {
'name': args.analysis_type
},
'studyId': seq_experiment_analysis_dict.get('studyId'),
'workflow': {
'workflow_name': workflow_full_name.get(args.wf_name, args.wf_name),
'workflow_version': args.wf_version,
'genome_build': args.genome_build,
'genome_annotation': args.genome_annotation,
'run_id': args.wf_run,
'session_id': args.wf_session,
'inputs': [
{
'analysis_type': 'sequencing_experiment',
'input_analysis_id': seq_experiment_analysis_dict.get('analysisId')
}
]
},
'files': [],
'samples': get_sample_info(seq_experiment_analysis_dict.get('samples')),
'experiment': seq_experiment_analysis_dict.get('experiment')
}
if "sequencing_alignment" in args.analysis_type:
payload['read_group_count'] = seq_experiment_analysis_dict.get('read_group_count')
payload['read_groups'] = copy.deepcopy(seq_experiment_analysis_dict.get('read_groups'))
# pass `info` dict from seq_experiment payload to new payload
if 'info' in seq_experiment_analysis_dict and isinstance(seq_experiment_analysis_dict['info'], dict):
payload['info'] = seq_experiment_analysis_dict['info']
if 'library_strategy' in payload['experiment']:
experimental_strategy = payload['experiment'].pop('library_strategy')
payload['experiment']['experimental_strategy'] = experimental_strategy
insert_filename_friendly_rg_id(seq_experiment_analysis_dict)
# get file of the payload
date_str = date.today().strftime("%Y%m%d")
for f in args.files_to_upload:
file_info = get_files_info(f, date_str, seq_experiment_analysis_dict, args.aligner)
payload['files'].append(file_info)
with open("%s.%s.payload.json" % (str(uuid.uuid4()), args.analysis_type), 'w') as f:
f.write(json.dumps(payload, indent=2))
if __name__ == "__main__":
main()
|
[
"os.mkdir",
"copy.deepcopy",
"hashlib.md5",
"os.path.abspath",
"argparse.ArgumentParser",
"os.stat",
"os.path.basename",
"os.getcwd",
"json.load",
"uuid.uuid4",
"re.match",
"datetime.date.today",
"json.dumps",
"tarfile.open",
"sys.exit"
] |
[((2255, 2268), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (2266, 2268), False, 'import hashlib\n'), ((3951, 3972), 'os.path.basename', 'os.path.basename', (['tar'], {}), '(tar)\n', (3967, 3972), False, 'import os\n'), ((4858, 4952), 'sys.exit', 'sys.exit', (['(\'Error: unable to match ubam qc metric tar "%s" to read group id\' %\n tar_basename)'], {}), '(\'Error: unable to match ubam qc metric tar "%s" to read group id\' %\n tar_basename)\n', (4866, 4952), False, 'import sys\n'), ((6543, 6621), 're.match', 're.match', (['"""^genome.merged.+?(cram|cram\\\\.crai|bam|bam\\\\.bai)$"""', 'file_to_upload'], {}), "('^genome.merged.+?(cram|cram\\\\.crai|bam|bam\\\\.bai)$', file_to_upload)\n", (6551, 6621), False, 'import re\n'), ((11194, 11220), 'copy.deepcopy', 'copy.deepcopy', (['sample_list'], {}), '(sample_list)\n', (11207, 11220), False, 'import copy\n'), ((11577, 11647), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Tool: payload-gen-rna-alignment"""'}), "(description='Tool: payload-gen-rna-alignment')\n", (11600, 11647), False, 'import argparse\n'), ((2186, 2204), 'os.stat', 'os.stat', (['file_path'], {}), '(file_path)\n', (2193, 2204), False, 'import os\n'), ((4130, 4178), 're.match', 're.match', (['"""^[a-f0-9]{32}$"""', 'md5sum_from_filename'], {}), "('^[a-f0-9]{32}$', md5sum_from_filename)\n", (4138, 4178), False, 'import re\n'), ((4189, 4250), 'sys.exit', 'sys.exit', (["('Error: ubam naming not expected %s' % tar_basename)"], {}), "('Error: ubam naming not expected %s' % tar_basename)\n", (4197, 4250), False, 'import sys\n'), ((5013, 5046), 'tarfile.open', 'tarfile.open', (['file_to_upload', '"""r"""'], {}), "(file_to_upload, 'r')\n", (5025, 5046), False, 'import tarfile\n'), ((6663, 6752), 're.match', 're.match', (['"""^transcriptome.merged.+?(cram|cram\\\\.crai|bam|bam\\\\.bai)$"""', 'file_to_upload'], {}), "('^transcriptome.merged.+?(cram|cram\\\\.crai|bam|bam\\\\.bai)$',\n file_to_upload)\n", (6671, 6752), False, 'import re\n'), ((10031, 10059), 'tarfile.open', 'tarfile.open', (['file_to_upload'], {}), '(file_to_upload)\n', (10043, 10059), False, 'import tarfile\n'), ((10956, 10973), 'os.mkdir', 'os.mkdir', (['new_dir'], {}), '(new_dir)\n', (10964, 10973), False, 'import os\n'), ((11037, 11048), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (11046, 11048), False, 'import os\n'), ((11085, 11116), 'os.path.abspath', 'os.path.abspath', (['file_to_upload'], {}), '(file_to_upload)\n', (11100, 11116), False, 'import os\n'), ((13104, 13116), 'json.load', 'json.load', (['f'], {}), '(f)\n', (13113, 13116), False, 'import json\n'), ((6797, 6882), 're.match', 're.match', (['"""^chimeric.merged.+?(cram|cram\\\\.crai|bam|bam\\\\.bai)$"""', 'file_to_upload'], {}), "('^chimeric.merged.+?(cram|cram\\\\.crai|bam|bam\\\\.bai)$', file_to_upload\n )\n", (6805, 6882), False, 'import re\n'), ((14809, 14821), 'datetime.date.today', 'date.today', ([], {}), '()\n', (14819, 14821), False, 'from datetime import date\n'), ((15116, 15145), 'json.dumps', 'json.dumps', (['payload'], {'indent': '(2)'}), '(payload, indent=2)\n', (15126, 15145), False, 'import json\n'), ((6921, 6968), 're.match', 're.match', (['""".+?\\\\.fastqc\\\\.tgz$"""', 'file_to_upload'], {}), "('.+?\\\\.fastqc\\\\.tgz$', file_to_upload)\n", (6929, 6968), False, 'import re\n'), ((8414, 8467), 'sys.exit', 'sys.exit', (['(\'Error: unknown file type "%s"\' % file_type)'], {}), '(\'Error: unknown file type "%s"\' % file_type)\n', (8422, 8467), False, 'import sys\n'), ((10255, 10267), 'json.load', 'json.load', (['f'], {}), '(f)\n', (10264, 10267), False, 'import json\n'), ((2745, 2778), 're.match', 're.match', (['"""[a-zA-Z0-9\\\\.\\\\-_]"""', 'c'], {}), "('[a-zA-Z0-9\\\\.\\\\-_]', c)\n", (2753, 2778), False, 'import re\n'), ((3200, 3233), 're.match', 're.match', (['"""[a-zA-Z0-9\\\\.\\\\-_]"""', 'c'], {}), "('[a-zA-Z0-9\\\\.\\\\-_]', c)\n", (3208, 3233), False, 'import re\n'), ((7115, 7176), 're.match', 're.match', (['""".+?\\\\.collectrnaseqmetrics\\\\.tgz$"""', 'file_to_upload'], {}), "('.+?\\\\.collectrnaseqmetrics\\\\.tgz$', file_to_upload)\n", (7123, 7176), False, 'import re\n'), ((10428, 10457), 'os.path.basename', 'os.path.basename', (['member.name'], {}), '(member.name)\n', (10444, 10457), False, 'import os\n'), ((7227, 7286), 're.match', 're.match', (['""".+?\\\\.duplicates_metrics\\\\.tgz$"""', 'file_to_upload'], {}), "('.+?\\\\.duplicates_metrics\\\\.tgz$', file_to_upload)\n", (7235, 7286), False, 'import re\n'), ((15055, 15067), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (15065, 15067), False, 'import uuid\n'), ((7335, 7382), 're.match', 're.match', (['""".+?_SJ\\\\.out\\\\.tab$"""', 'file_to_upload'], {}), "('.+?_SJ\\\\.out\\\\.tab$', file_to_upload)\n", (7343, 7382), False, 'import re\n'), ((8261, 8330), 'sys.exit', 'sys.exit', (["('Error: unknown aligned seq extention: %s' % file_to_upload)"], {}), "('Error: unknown aligned seq extention: %s' % file_to_upload)\n", (8269, 8330), False, 'import sys\n'), ((7429, 7478), 're.match', 're.match', (['""".+?splicesites\\\\.txt$"""', 'file_to_upload'], {}), "('.+?splicesites\\\\.txt$', file_to_upload)\n", (7437, 7478), False, 'import re\n'), ((7526, 7574), 're.match', 're.match', (['""".+?supplement\\\\.tgz$"""', 'file_to_upload'], {}), "('.+?supplement\\\\.tgz$', file_to_upload)\n", (7534, 7574), False, 'import re\n'), ((7578, 7629), 're.match', 're.match', (['""".+?supplement\\\\.tar.gz$"""', 'file_to_upload'], {}), "('.+?supplement\\\\.tar.gz$', file_to_upload)\n", (7586, 7629), False, 'import re\n'), ((7678, 7736), 'sys.exit', 'sys.exit', (['(\'Error: unknown file type "%s"\' % file_to_upload)'], {}), '(\'Error: unknown file type "%s"\' % file_to_upload)\n', (7686, 7736), False, 'import sys\n')]
|
# -*- coding: utf-8 -*-
"""Rebinning the PSF
This script rebins the given PSF and stores the rebinned PSFs in the specified directory.
"""
import os
import argparse
import rebinning_utils
def parse_args():
"""Parse command-line arguments
"""
parser = argparse.ArgumentParser()
parser.add_argument('psf_file_path', help='path to the fits file of the PSF to be rebinned')
parser.add_argument('--factor', default=4, dest='factor', type=int,
help='size of rebinning kernel in number of pixels')
parser.add_argument('--save_dir', default='rebinned_dir', dest='rebinned_dir', type=str,
help='directory in which the rebinned (non-drizzled) PSFs will be stored. If it does not exist, it will be created.')
args = parser.parse_args()
return args
def main():
args = parse_args()
input_psf = rebinning_utils.load_psf_map(args.psf_file_path)
if not os.path.exists(args.rebinned_dir):
os.makedirs(args.rebinned_dir)
_ = rebinning_utils.rebin_psf(input_psf, factor=args.factor, save_dir=args.rebinned_dir)
if __name__ == '__main__':
main()
|
[
"os.makedirs",
"argparse.ArgumentParser",
"rebinning_utils.load_psf_map",
"os.path.exists",
"rebinning_utils.rebin_psf"
] |
[((267, 292), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (290, 292), False, 'import argparse\n'), ((874, 922), 'rebinning_utils.load_psf_map', 'rebinning_utils.load_psf_map', (['args.psf_file_path'], {}), '(args.psf_file_path)\n', (902, 922), False, 'import rebinning_utils\n'), ((1016, 1105), 'rebinning_utils.rebin_psf', 'rebinning_utils.rebin_psf', (['input_psf'], {'factor': 'args.factor', 'save_dir': 'args.rebinned_dir'}), '(input_psf, factor=args.factor, save_dir=args.\n rebinned_dir)\n', (1041, 1105), False, 'import rebinning_utils\n'), ((934, 967), 'os.path.exists', 'os.path.exists', (['args.rebinned_dir'], {}), '(args.rebinned_dir)\n', (948, 967), False, 'import os\n'), ((977, 1007), 'os.makedirs', 'os.makedirs', (['args.rebinned_dir'], {}), '(args.rebinned_dir)\n', (988, 1007), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
#####----------------------------------------------------------------#####
##### #####
##### 使用教程/readme: #####
##### https://cloud.tencent.com/document/product/583/47076 #####
##### #####
#####----------------------------------------------------------------#####
import os
import sys
import os.path
import zipfile
import patool
import logging
from qcloud_cos_v5 import CosConfig
from qcloud_cos_v5 import CosS3Client
from qcloud_cos_v5 import CosServiceError
reload(sys)
sys.setdefaultencoding('utf8')
os.environ['PATH'] = os.getenv("PATH")+":"+os.getcwd()
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
region = os.getenv('targetRegion')
bucket_upload = os.getenv('targetBucket')
unpack_suffix=os.getenv('suffix')
target_dir=os.getenv('targetPrefix')
logger = logging.getLogger()
def run_extract(archive, outdir):
"""Extract files from archive(s)."""
try:
patool.extract_archive(archive, verbosity=False, interactive="--non-interactive", outdir=outdir)
except PatoolError as msg:
logger.Error("error extracting %s: %s" % (archive, msg))
class PatoolError(Exception):
pass
def _fullpath(x):
x = os.path.expandvars(x)
x = os.path.expanduser(x)
x = os.path.normpath(x)
x = os.path.abspath(x)
return x
class Archive(object):
'''
:param backend: ``auto``, ``patool`` or ``zipfile``
:param filename: path to archive file
'''
def __init__(self, filename, backend='auto'):
self.filename = _fullpath(filename)
self.backend = backend
def extractall_patool(self, directory):
logger.debug('starting backend patool')
try:
run_extract(self.filename, directory)
except PatoolError as msg:
logger.info("error extracting %s: %s", self.filename, msg)
def extractall_zipfile(self, directory):
logger.debug('starting backend zipfile')
zipfile.ZipFile(self.filename).extractall(directory)
def extractall(self, directory, auto_create_dir=False):
'''
:param directory: directory to extract to
:param auto_create_dir: auto create directory
'''
logger.debug('extracting %s into %s (backend=%s)', self.filename, directory, self.backend)
is_zipfile = zipfile.is_zipfile(self.filename)
directory = _fullpath(directory)
if not os.path.exists(self.filename):
raise ValueError(
'archive file does not exist:' + str(self.filename))
if not os.path.exists(directory):
if auto_create_dir:
os.makedirs(directory)
else:
raise ValueError('directory does not exist:' + str(directory))
if self.backend == 'auto':
if is_zipfile:
self.extractall_zipfile(directory)
else:
self.extractall_patool(directory)
if self.backend == 'zipfile':
if not is_zipfile:
raise ValueError('file is not zip file:' + str(self.filename))
self.extractall_zipfile(directory)
if self.backend == 'patool':
self.extractall_patool(directory)
def delete_local_file(src):
logger.info("delete files and folders")
if os.path.isfile(src):
try:
os.remove(src)
except:
pass
elif os.path.isdir(src):
for item in os.listdir(src):
itemsrc = os.path.join(src, item)
delete_local_file(itemsrc)
try:
os.rmdir(src)
except:
pass
def upload_local_file(client, src, archivename):
logger.info("start to upload")
for filename in os.listdir(src):
path = src + '/{}'.format(os.path.basename(filename))
logger.info("path is [%s]", path)
if os.path.isfile(path):
logger.info("filename is [%s]", filename)
response = client.put_object_from_local_file(
Bucket=bucket_upload,
LocalFilePath=path,
Key='{}/{}'.format(archivename, filename))
delete_local_file(str(path))
elif os.path.isdir(path):
logger.info("dirname is [%s]", filename)
dirpath = archivename + '/{}'.format(filename)
upload_local_file(client, path, dirpath)
else:
logger.info("upload fail")
def main_handler(event, context):
logger.info("start unpack template function")
secret_id = os.getenv('TENCENTCLOUD_SECRETID')
secret_key = os.getenv('TENCENTCLOUD_SECRETKEY')
token = os.getenv('TENCENTCLOUD_SESSIONTOKEN')
config = CosConfig(Secret_id=secret_id, Secret_key=secret_key, Region=region, Token=token)
client = CosS3Client(config)
for record in event['Records']:
try:
appid = record['cos']['cosBucket']['appid']
bucket = record['cos']['cosBucket']['name'] + '-' + appid
filename = os.path.basename(record['cos']['cosObject']['url'])
download_path = '/tmp/{}'.format(filename.encode('gb18030'))
key = record['cos']['cosObject']['key']
key = key.replace('/' + appid + '/' + record['cos']['cosBucket']['name'] + '/', '', 1)
# 创建本地解压路径
isExists = os.path.exists('/tmp/unpack')
if not isExists:
os.mkdir('/tmp/unpack')
unpack_path = '/tmp/unpack'
# 提取文件名 shotname
(filepath, tempfilename) = os.path.split(filename);
(shotname, extension) = os.path.splitext(tempfilename);
if extension[1:] not in unpack_suffix.split(','):
logger.info("object suffix is [%s], expected: [%s]", extension, unpack_suffix)
return "object suffix is [%s], expected: [%s]" % (extension, unpack_suffix)
logger.info("object name is [%s]", shotname)
# download rar from cos
logger.info("get from [%s] to download object [%s]", bucket, filename)
try:
response = client.get_object(Bucket=bucket, Key=key, )
response['Body'].get_stream_to_file(download_path)
logger.info("download object [%s] Success", filename)
except CosServiceError as e:
print(e.get_error_code())
print(e.get_error_msg())
print(e.get_resource_location())
logger.info("download object [%s] failed", filename)
return "download object fail"
# start to extract archive file and upload to bucket_upload
logger.info("start to extract archive file")
Archive(download_path).extractall(unpack_path, auto_create_dir=True)
logger.info("extract success")
upload_local_file(client, '/tmp/unpack', target_dir)
# clean files
delete_local_file(str(download_path))
delete_local_file(str(unpack_path))
return "extract and upload success"
except Exception as e:
print(e)
raise e
return "extract and upload fail"
|
[
"os.mkdir",
"os.remove",
"os.path.isfile",
"patool.extract_archive",
"os.path.join",
"zipfile.is_zipfile",
"os.path.abspath",
"qcloud_cos_v5.CosConfig",
"os.path.exists",
"os.path.normpath",
"sys.setdefaultencoding",
"os.path.basename",
"os.path.expandvars",
"os.rmdir",
"os.listdir",
"os.getenv",
"zipfile.ZipFile",
"os.makedirs",
"logging.basicConfig",
"os.getcwd",
"os.path.isdir",
"qcloud_cos_v5.CosS3Client",
"os.path.splitext",
"os.path.split",
"os.path.expanduser",
"logging.getLogger"
] |
[((683, 713), 'sys.setdefaultencoding', 'sys.setdefaultencoding', (['"""utf8"""'], {}), "('utf8')\n", (705, 713), False, 'import sys\n'), ((770, 828), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'stream': 'sys.stdout'}), '(level=logging.INFO, stream=sys.stdout)\n', (789, 828), False, 'import logging\n'), ((839, 864), 'os.getenv', 'os.getenv', (['"""targetRegion"""'], {}), "('targetRegion')\n", (848, 864), False, 'import os\n'), ((881, 906), 'os.getenv', 'os.getenv', (['"""targetBucket"""'], {}), "('targetBucket')\n", (890, 906), False, 'import os\n'), ((921, 940), 'os.getenv', 'os.getenv', (['"""suffix"""'], {}), "('suffix')\n", (930, 940), False, 'import os\n'), ((952, 977), 'os.getenv', 'os.getenv', (['"""targetPrefix"""'], {}), "('targetPrefix')\n", (961, 977), False, 'import os\n'), ((988, 1007), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (1005, 1007), False, 'import logging\n'), ((757, 768), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (766, 768), False, 'import os\n'), ((1361, 1382), 'os.path.expandvars', 'os.path.expandvars', (['x'], {}), '(x)\n', (1379, 1382), False, 'import os\n'), ((1391, 1412), 'os.path.expanduser', 'os.path.expanduser', (['x'], {}), '(x)\n', (1409, 1412), False, 'import os\n'), ((1421, 1440), 'os.path.normpath', 'os.path.normpath', (['x'], {}), '(x)\n', (1437, 1440), False, 'import os\n'), ((1449, 1467), 'os.path.abspath', 'os.path.abspath', (['x'], {}), '(x)\n', (1464, 1467), False, 'import os\n'), ((3446, 3465), 'os.path.isfile', 'os.path.isfile', (['src'], {}), '(src)\n', (3460, 3465), False, 'import os\n'), ((3867, 3882), 'os.listdir', 'os.listdir', (['src'], {}), '(src)\n', (3877, 3882), False, 'import os\n'), ((4660, 4694), 'os.getenv', 'os.getenv', (['"""TENCENTCLOUD_SECRETID"""'], {}), "('TENCENTCLOUD_SECRETID')\n", (4669, 4694), False, 'import os\n'), ((4713, 4748), 'os.getenv', 'os.getenv', (['"""TENCENTCLOUD_SECRETKEY"""'], {}), "('TENCENTCLOUD_SECRETKEY')\n", (4722, 4748), False, 'import os\n'), ((4761, 4799), 'os.getenv', 'os.getenv', (['"""TENCENTCLOUD_SESSIONTOKEN"""'], {}), "('TENCENTCLOUD_SESSIONTOKEN')\n", (4770, 4799), False, 'import os\n'), ((4814, 4900), 'qcloud_cos_v5.CosConfig', 'CosConfig', ([], {'Secret_id': 'secret_id', 'Secret_key': 'secret_key', 'Region': 'region', 'Token': 'token'}), '(Secret_id=secret_id, Secret_key=secret_key, Region=region, Token=\n token)\n', (4823, 4900), False, 'from qcloud_cos_v5 import CosConfig\n'), ((4909, 4928), 'qcloud_cos_v5.CosS3Client', 'CosS3Client', (['config'], {}), '(config)\n', (4920, 4928), False, 'from qcloud_cos_v5 import CosS3Client\n'), ((735, 752), 'os.getenv', 'os.getenv', (['"""PATH"""'], {}), "('PATH')\n", (744, 752), False, 'import os\n'), ((1101, 1202), 'patool.extract_archive', 'patool.extract_archive', (['archive'], {'verbosity': '(False)', 'interactive': '"""--non-interactive"""', 'outdir': 'outdir'}), "(archive, verbosity=False, interactive=\n '--non-interactive', outdir=outdir)\n", (1123, 1202), False, 'import patool\n'), ((2474, 2507), 'zipfile.is_zipfile', 'zipfile.is_zipfile', (['self.filename'], {}), '(self.filename)\n', (2492, 2507), False, 'import zipfile\n'), ((3549, 3567), 'os.path.isdir', 'os.path.isdir', (['src'], {}), '(src)\n', (3562, 3567), False, 'import os\n'), ((3999, 4019), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (4013, 4019), False, 'import os\n'), ((2564, 2593), 'os.path.exists', 'os.path.exists', (['self.filename'], {}), '(self.filename)\n', (2578, 2593), False, 'import os\n'), ((2709, 2734), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (2723, 2734), False, 'import os\n'), ((3492, 3506), 'os.remove', 'os.remove', (['src'], {}), '(src)\n', (3501, 3506), False, 'import os\n'), ((3589, 3604), 'os.listdir', 'os.listdir', (['src'], {}), '(src)\n', (3599, 3604), False, 'import os\n'), ((4320, 4339), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (4333, 4339), False, 'import os\n'), ((5128, 5179), 'os.path.basename', 'os.path.basename', (["record['cos']['cosObject']['url']"], {}), "(record['cos']['cosObject']['url'])\n", (5144, 5179), False, 'import os\n'), ((5450, 5479), 'os.path.exists', 'os.path.exists', (['"""/tmp/unpack"""'], {}), "('/tmp/unpack')\n", (5464, 5479), False, 'import os\n'), ((5657, 5680), 'os.path.split', 'os.path.split', (['filename'], {}), '(filename)\n', (5670, 5680), False, 'import os\n'), ((5718, 5748), 'os.path.splitext', 'os.path.splitext', (['tempfilename'], {}), '(tempfilename)\n', (5734, 5748), False, 'import os\n'), ((2112, 2142), 'zipfile.ZipFile', 'zipfile.ZipFile', (['self.filename'], {}), '(self.filename)\n', (2127, 2142), False, 'import zipfile\n'), ((2784, 2806), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (2795, 2806), False, 'import os\n'), ((3628, 3651), 'os.path.join', 'os.path.join', (['src', 'item'], {}), '(src, item)\n', (3640, 3651), False, 'import os\n'), ((3716, 3729), 'os.rmdir', 'os.rmdir', (['src'], {}), '(src)\n', (3724, 3729), False, 'import os\n'), ((3918, 3944), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (3934, 3944), False, 'import os\n'), ((5525, 5548), 'os.mkdir', 'os.mkdir', (['"""/tmp/unpack"""'], {}), "('/tmp/unpack')\n", (5533, 5548), False, 'import os\n')]
|
import sys
import getopt
from pyiron.base.job.wrapper import job_wrapper_function
def command_line(argv):
"""
Parse the command line arguments.
Args:
argv: Command line arguments
"""
debug = False
project_path = None
job_id = None
try:
opts, args = getopt.getopt(argv, "dj:p:h", ["debug", "project_path=", "job_id=", "help"])
except getopt.GetoptError:
print('cms.py --p <project_path> -j <job_id> <debug>')
sys.exit()
else:
for opt, arg in opts:
if opt in ("-h", "--help"):
print('cms.py --p <project_path> -j <job_id> <debug>')
sys.exit()
elif opt in ("-d", "--debug"):
debug = True
elif opt in ("-j", "--job_id"):
job_id = arg
elif opt in ("-p", "--project_path"):
project_path = arg
job_wrapper_function(working_directory=project_path, job_id=job_id, debug=debug)
sys.exit()
if __name__ == "__main__":
command_line(sys.argv[1:])
|
[
"sys.exit",
"getopt.getopt",
"pyiron.base.job.wrapper.job_wrapper_function"
] |
[((301, 377), 'getopt.getopt', 'getopt.getopt', (['argv', '"""dj:p:h"""', "['debug', 'project_path=', 'job_id=', 'help']"], {}), "(argv, 'dj:p:h', ['debug', 'project_path=', 'job_id=', 'help'])\n", (314, 377), False, 'import getopt\n'), ((907, 992), 'pyiron.base.job.wrapper.job_wrapper_function', 'job_wrapper_function', ([], {'working_directory': 'project_path', 'job_id': 'job_id', 'debug': 'debug'}), '(working_directory=project_path, job_id=job_id, debug=debug\n )\n', (927, 992), False, 'from pyiron.base.job.wrapper import job_wrapper_function\n'), ((996, 1006), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1004, 1006), False, 'import sys\n'), ((480, 490), 'sys.exit', 'sys.exit', ([], {}), '()\n', (488, 490), False, 'import sys\n'), ((658, 668), 'sys.exit', 'sys.exit', ([], {}), '()\n', (666, 668), False, 'import sys\n')]
|
"""
This script extract news information from the local web
"""
import glob
import os
import os.path
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
class Crawler:
def extract_web_news(self, folder, url, tag):
"""
Extract information from the elcomercio feed url (XML format) and download them
to a filepath
:param folder: Folder where the news files will be saved
:param url: feed url like http://elcomercio.pe/feed/lima/policiales.xml
:param tag: news type like 'policiales'
:return: void
"""
browser = webdriver.Firefox()
browser.get(url)
list_linker_href = browser.find_elements_by_xpath('//xhtml:a[@href]')
driver = webdriver.Firefox()
wait = WebDriverWait(driver, 10)
for l in list_linker_href:
news_url = l.get_attribute('href')
driver.get(news_url)
print(news_url)
wait.until(expected_conditions.element_to_be_clickable((By.CLASS_NAME, 'fecha')))
fecha = driver.find_element_by_class_name("fecha").get_attribute("datetime")
file_name = tag + '--' + news_url.split('/')[-1]
try:
news_element = driver.find_element_by_id('main-txt-nota')
except NoSuchElementException:
print('main-txt-nota not found on ' + file_name)
continue
news_content = news_element.get_attribute('innerHTML').encode('utf-8')
content = fecha + "\n" + news_content.decode('utf-8')
with open(folder + "/" + file_name + ".html", 'w') as file:
file.write(content)
browser.close()
driver.close()
def clean_raw_news(self, origin, destination, skip_validation):
"""
Read raw news from origin and after cleaning, it will write them into destination folder
:param origin: Folder that contains all the raw news
:param destination: Destination folder to write clear news content
:param skip_validation: True or False - check file existence
:return: nothing - void
"""
news = glob.glob(origin + "/*.html")
for news_file in news:
print(news_file)
file_name = destination + '/' + news_file.split('/')[1].split('.')[0] + '.txt'
if skip_validation or not os.path.isfile(file_name):
with open(news_file, 'r') as read_file:
news_raw = read_file.read()
# create a new bs4 object from the html data loaded
soup = BeautifulSoup(news_raw, 'lxml')
# remove all javascript and stylesheet code
for script in soup(["script", "style"]):
script.extract()
# get text
text = soup.get_text()
# break into lines and remove leading and trailing space on each
lines = (line.strip() for line in text.splitlines())
# break multi-headlines into a line each
chunks = (phrase.strip() for line in lines for phrase in line.split(" "))
# drop blank lines
text = '\n'.join(chunk for chunk in chunks if chunk)
with open(file_name, 'w') as write_file:
write_file.write(text)
|
[
"selenium.webdriver.support.expected_conditions.element_to_be_clickable",
"selenium.webdriver.Firefox",
"os.path.isfile",
"glob.glob",
"bs4.BeautifulSoup",
"selenium.webdriver.support.ui.WebDriverWait"
] |
[((819, 838), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (836, 838), False, 'from selenium import webdriver\n'), ((959, 978), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (976, 978), False, 'from selenium import webdriver\n'), ((994, 1019), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['driver', '(10)'], {}), '(driver, 10)\n', (1007, 1019), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((2377, 2406), 'glob.glob', 'glob.glob', (["(origin + '/*.html')"], {}), "(origin + '/*.html')\n", (2386, 2406), False, 'import glob\n'), ((1186, 1255), 'selenium.webdriver.support.expected_conditions.element_to_be_clickable', 'expected_conditions.element_to_be_clickable', (["(By.CLASS_NAME, 'fecha')"], {}), "((By.CLASS_NAME, 'fecha'))\n", (1229, 1255), False, 'from selenium.webdriver.support import expected_conditions\n'), ((2818, 2849), 'bs4.BeautifulSoup', 'BeautifulSoup', (['news_raw', '"""lxml"""'], {}), "(news_raw, 'lxml')\n", (2831, 2849), False, 'from bs4 import BeautifulSoup\n'), ((2596, 2621), 'os.path.isfile', 'os.path.isfile', (['file_name'], {}), '(file_name)\n', (2610, 2621), False, 'import os\n')]
|
from flask_sqlalchemy import SQLAlchemy
from api import app, db
from data.models import Authentication, UserLogin
from data.parser import authlogs
from predictions import update_predictions
# create tables
with app.app_context():
db.create_all()
with app.app_context():
entry = UserLogin(
username="admin",
password="<PASSWORD>"
)
db.session.add(entry)
db.session.commit()
# insert authentication logs
with app.app_context():
for log in authlogs:
entry = Authentication(
time=log[0],
source_user=log[1],
destination_user=log[2],
source_computer=log[3],
destination_computer=log[4],
authentication_type=log[5],
logon_type=log[6],
auth_orientation=log[7],
auth_result=log[8]
)
db.session.add(entry)
db.session.commit()
update_predictions(db, app.app_context())
|
[
"api.app.app_context",
"api.db.create_all",
"api.db.session.add",
"data.models.Authentication",
"api.db.session.commit",
"data.models.UserLogin"
] |
[((212, 229), 'api.app.app_context', 'app.app_context', ([], {}), '()\n', (227, 229), False, 'from api import app, db\n'), ((235, 250), 'api.db.create_all', 'db.create_all', ([], {}), '()\n', (248, 250), False, 'from api import app, db\n'), ((257, 274), 'api.app.app_context', 'app.app_context', ([], {}), '()\n', (272, 274), False, 'from api import app, db\n'), ((288, 338), 'data.models.UserLogin', 'UserLogin', ([], {'username': '"""admin"""', 'password': '"""<PASSWORD>"""'}), "(username='admin', password='<PASSWORD>')\n", (297, 338), False, 'from data.models import Authentication, UserLogin\n'), ((365, 386), 'api.db.session.add', 'db.session.add', (['entry'], {}), '(entry)\n', (379, 386), False, 'from api import app, db\n'), ((391, 410), 'api.db.session.commit', 'db.session.commit', ([], {}), '()\n', (408, 410), False, 'from api import app, db\n'), ((447, 464), 'api.app.app_context', 'app.app_context', ([], {}), '()\n', (462, 464), False, 'from api import app, db\n'), ((877, 896), 'api.db.session.commit', 'db.session.commit', ([], {}), '()\n', (894, 896), False, 'from api import app, db\n'), ((921, 938), 'api.app.app_context', 'app.app_context', ([], {}), '()\n', (936, 938), False, 'from api import app, db\n'), ((507, 736), 'data.models.Authentication', 'Authentication', ([], {'time': 'log[0]', 'source_user': 'log[1]', 'destination_user': 'log[2]', 'source_computer': 'log[3]', 'destination_computer': 'log[4]', 'authentication_type': 'log[5]', 'logon_type': 'log[6]', 'auth_orientation': 'log[7]', 'auth_result': 'log[8]'}), '(time=log[0], source_user=log[1], destination_user=log[2],\n source_computer=log[3], destination_computer=log[4],\n authentication_type=log[5], logon_type=log[6], auth_orientation=log[7],\n auth_result=log[8])\n', (521, 736), False, 'from data.models import Authentication, UserLogin\n'), ((851, 872), 'api.db.session.add', 'db.session.add', (['entry'], {}), '(entry)\n', (865, 872), False, 'from api import app, db\n')]
|
import pandas as pd
import json
import datetime
import subprocess
import localModule
#----
def atting_program(row):
ffmpeg_command_line = 'ffmpeg \
-loglevel error \
-fflags +discardcorrupt \
-i {0} \
-acodec copy \
-movflags faststart \
-vn \
-bsf:a aac_adtstoasc \
-t {1} \
-metadata date="{2}" \
-metadata genre="{3}" \
-metadata artist="{4}" \
-metadata title="{5}" \
{6}/{7}.m4a'.format(
localModule.DICTIONARY_OF_STATION_URL[row.service_id],
int((row.air_time + datetime.timedelta(seconds=localModule.MARGIN_SECOND*2)).total_seconds()),
row.start_time.strftime('%Y'),
'Radio Program',
row.service_name,
row.title,
localModule.FOLDER_OF_RECORD,
row.title+'-'+row.start_time.strftime('%Y%m%d%H%M'),
)
at_launch_time = row.start_time - datetime.timedelta(seconds=localModule.MARGIN_SECOND)
command_line = "echo 'sleep {0}; {1}' | at -t {2}".format(
at_launch_time.strftime('%S'),
ffmpeg_command_line,
at_launch_time.strftime('%Y%m%d%H%M'),
)
res = subprocess.check_output(command_line, shell=True)
#----
table = pd.read_csv(localModule.TABLE_FILE)
table['start_time'] = pd.to_datetime(table['start_time'])
table['end_time'] = pd.to_datetime(table['end_time'])
table['air_time'] = pd.to_timedelta(table['air_time'])
for row in table.itertuples():
atting_program(row)
|
[
"pandas.read_csv",
"subprocess.check_output",
"pandas.to_timedelta",
"pandas.to_datetime",
"datetime.timedelta"
] |
[((1151, 1186), 'pandas.read_csv', 'pd.read_csv', (['localModule.TABLE_FILE'], {}), '(localModule.TABLE_FILE)\n', (1162, 1186), True, 'import pandas as pd\n'), ((1209, 1244), 'pandas.to_datetime', 'pd.to_datetime', (["table['start_time']"], {}), "(table['start_time'])\n", (1223, 1244), True, 'import pandas as pd\n'), ((1265, 1298), 'pandas.to_datetime', 'pd.to_datetime', (["table['end_time']"], {}), "(table['end_time'])\n", (1279, 1298), True, 'import pandas as pd\n'), ((1319, 1353), 'pandas.to_timedelta', 'pd.to_timedelta', (["table['air_time']"], {}), "(table['air_time'])\n", (1334, 1353), True, 'import pandas as pd\n'), ((1085, 1134), 'subprocess.check_output', 'subprocess.check_output', (['command_line'], {'shell': '(True)'}), '(command_line, shell=True)\n', (1108, 1134), False, 'import subprocess\n'), ((853, 906), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'localModule.MARGIN_SECOND'}), '(seconds=localModule.MARGIN_SECOND)\n', (871, 906), False, 'import datetime\n'), ((539, 596), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(localModule.MARGIN_SECOND * 2)'}), '(seconds=localModule.MARGIN_SECOND * 2)\n', (557, 596), False, 'import datetime\n')]
|
import sys
sys.path.append('..')
import unittest
from graphs.Graph import Graph
class TestGraph(unittest.TestCase):
def setUp(self):
self.G = Graph(5)
self.G.add_edge(1, 2)
self.G.add_edge(1, 3)
self.G.add_edge(1, 4)
self.G.add_edge(2, 4)
self.G.add_edge(3, 4)
def testing_len(self):
self.assertEqual(self.G.len(), 5)
def testing_nodes_adjacents(self):
self.assertEqual(self.G[4], [1, 2, 3])
self.assertEqual(self.G[2], [1, 4])
self.assertEqual(self.G[0], [])
def testing_degree(self):
self.assertEqual(self.G.degree(4), 3)
self.assertEqual(self.G.max_degree(), 3)
if __name__ == "__main__":
unittest.main()
|
[
"sys.path.append",
"unittest.main",
"graphs.Graph.Graph"
] |
[((11, 32), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (26, 32), False, 'import sys\n'), ((742, 757), 'unittest.main', 'unittest.main', ([], {}), '()\n', (755, 757), False, 'import unittest\n'), ((166, 174), 'graphs.Graph.Graph', 'Graph', (['(5)'], {}), '(5)\n', (171, 174), False, 'from graphs.Graph import Graph\n')]
|
from threading import Thread
from typing import Union, Dict, Optional, Tuple
from datetime import datetime, timedelta, timezone
from enum import Enum, auto
from kafka import KafkaConsumer, TopicPartition
from queue import Queue, Empty, Full
from wherefore.DataSource import DataSource
from wherefore.Message import Message
from copy import copy
CHECK_FOR_MSG_INTERVAL = 500
UPDATE_STATUS_INTERVAL = timedelta(milliseconds=50)
class PartitionOffset(Enum):
NEVER = auto()
BEGINNING = auto()
END = auto()
class HighLowOffset:
def __init__(self, low, high, lag=-1):
self.low = low
self.lag = lag
self.high = high
def thread_function(consumer: KafkaConsumer, stop: Union[datetime, int], in_queue: Queue, out_queue: Queue, topic_partition):
known_sources: Dict[bytes, DataSource] = {}
start_time = datetime.now(tz=timezone.utc)
update_timer = datetime.now(tz=timezone.utc)
while True:
messages_ctr = 0
for kafka_msg in consumer:
new_msg = Message(kafka_msg)
if type(stop) is int and new_msg.offset > stop:
pass
elif type(stop) is datetime and new_msg.timestamp is not None and new_msg.timestamp > stop:
pass
elif type(stop) is datetime and new_msg.timestamp is None and new_msg.kafka_timestamp > stop:
pass
else:
if not new_msg.source_hash in known_sources:
known_sources[new_msg.source_hash] = DataSource(new_msg.source_name, new_msg.message_type, start_time)
known_sources[new_msg.source_hash].process_message(new_msg)
messages_ctr += 1
if messages_ctr == CHECK_FOR_MSG_INTERVAL:
break
if not in_queue.empty():
new_msg = in_queue.get()
if new_msg == "exit":
break
now = datetime.now(tz=timezone.utc)
if now - update_timer > UPDATE_STATUS_INTERVAL:
update_timer = now
try:
out_queue.put(copy(known_sources), block=False)
low_offset = consumer.beginning_offsets([topic_partition, ])[topic_partition]
high_offset = consumer.end_offsets([topic_partition, ])[topic_partition]
out_queue.put(HighLowOffset(low_offset, high_offset))
except Full:
pass # Do nothing
consumer.close(True)
class KafkaMessageTracker:
def __init__(self, broker: str, topic: str, partition: int = -1, start: Tuple[Union[int, datetime, PartitionOffset], Optional[int]] = PartitionOffset.END, stop: Union[int, datetime, PartitionOffset] = PartitionOffset.NEVER):
self.to_thread = Queue()
self.from_thread = Queue(maxsize=100)
consumer = KafkaConsumer(bootstrap_servers=broker, fetch_max_bytes=52428800 * 6, consumer_timeout_ms=100)
existing_topics = consumer.topics()
self.current_msg = None
self.current_offset_limits = HighLowOffset(-1, -1)
if topic not in existing_topics:
raise RuntimeError(f"Topic \"{topic}\" does not exist.")
existing_partitions = consumer.partitions_for_topic(topic)
if partition == -1:
partition = existing_partitions.pop()
elif partition not in existing_partitions:
raise RuntimeError(f"Partition {partition} for topic \"{topic}\" does not exist.")
topic_partition = TopicPartition(topic, partition)
consumer.assign([topic_partition, ])
first_offset = consumer.beginning_offsets([topic_partition])[topic_partition]
last_offset = consumer.end_offsets([topic_partition])[topic_partition]
origin_offset = None
offset_to_offset = start[1]
if start[0] == PartitionOffset.BEGINNING:
origin_offset = first_offset
# consumer.seek_to_beginning()
# if type(start[1]) == int and start[1] > 0 and first_offset + start[1] <= last_offset:
# consumer.seek(partition=topic_partition, offset=first_offset + start[1])
elif start[0] == PartitionOffset.END or start == PartitionOffset.NEVER:
origin_offset = last_offset
# consumer.seek_to_end()
# if type(start[1]) == int and start[1] < 0 and last_offset + start[1] >= first_offset:
# consumer.seek(partition=topic_partition, offset=first_offset + start[1])
elif type(start[0]) is int:
if first_offset > start[0]:
origin_offset = first_offset
# consumer.seek_to_beginning()
elif last_offset < start[0]:
origin_offset = last_offset
else:
origin_offset = start[0]
# consumer.seek_to_end()
# else:
# consumer.seek(partition=topic_partition, offset=start[0])
elif type(start[0]) is datetime:
found_offsets = consumer.offsets_for_times({topic_partition: int(start[0].timestamp() * 1000)})
if found_offsets[topic_partition] is None:
origin_offset = last_offset
else:
origin_offset = found_offsets[topic_partition].offset
# if type(start[1]) == int:
# used_offset += start[1]
# consumer.seek(partition=topic_partition, offset=used_offset)
else:
raise RuntimeError("Unknown start offset configured.")
if offset_to_offset is not None:
origin_offset += offset_to_offset
if origin_offset < first_offset:
origin_offset = first_offset
elif origin_offset > last_offset:
origin_offset = last_offset
consumer.seek(partition=topic_partition, offset=origin_offset)
self.thread = Thread(target=thread_function, daemon=True, kwargs={"consumer": consumer, "stop": stop, "in_queue": self.to_thread, "out_queue": self.from_thread, "stop": stop, "topic_partition": topic_partition})
self.thread.start()
def stop_thread(self):
self.to_thread.put("exit")
def __del__(self):
self.stop_thread()
def _get_messages(self):
while not self.from_thread.empty():
try:
current_msg = self.from_thread.get(block=False)
if type(current_msg) is dict:
self.current_msg = current_msg
elif type(current_msg) is HighLowOffset:
self.current_offset_limits = current_msg
except Empty:
return
def get_latest_values(self):
self._get_messages()
return self.current_msg
def get_current_edge_offsets(self) -> HighLowOffset:
self._get_messages()
return self.current_offset_limits
|
[
"threading.Thread",
"wherefore.Message.Message",
"copy.copy",
"wherefore.DataSource.DataSource",
"kafka.TopicPartition",
"datetime.timedelta",
"enum.auto",
"datetime.datetime.now",
"queue.Queue",
"kafka.KafkaConsumer"
] |
[((401, 427), 'datetime.timedelta', 'timedelta', ([], {'milliseconds': '(50)'}), '(milliseconds=50)\n', (410, 427), False, 'from datetime import datetime, timedelta, timezone\n'), ((471, 477), 'enum.auto', 'auto', ([], {}), '()\n', (475, 477), False, 'from enum import Enum, auto\n'), ((494, 500), 'enum.auto', 'auto', ([], {}), '()\n', (498, 500), False, 'from enum import Enum, auto\n'), ((511, 517), 'enum.auto', 'auto', ([], {}), '()\n', (515, 517), False, 'from enum import Enum, auto\n'), ((848, 877), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'timezone.utc'}), '(tz=timezone.utc)\n', (860, 877), False, 'from datetime import datetime, timedelta, timezone\n'), ((897, 926), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'timezone.utc'}), '(tz=timezone.utc)\n', (909, 926), False, 'from datetime import datetime, timedelta, timezone\n'), ((1902, 1931), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'timezone.utc'}), '(tz=timezone.utc)\n', (1914, 1931), False, 'from datetime import datetime, timedelta, timezone\n'), ((2721, 2728), 'queue.Queue', 'Queue', ([], {}), '()\n', (2726, 2728), False, 'from queue import Queue, Empty, Full\n'), ((2756, 2774), 'queue.Queue', 'Queue', ([], {'maxsize': '(100)'}), '(maxsize=100)\n', (2761, 2774), False, 'from queue import Queue, Empty, Full\n'), ((2795, 2893), 'kafka.KafkaConsumer', 'KafkaConsumer', ([], {'bootstrap_servers': 'broker', 'fetch_max_bytes': '(52428800 * 6)', 'consumer_timeout_ms': '(100)'}), '(bootstrap_servers=broker, fetch_max_bytes=52428800 * 6,\n consumer_timeout_ms=100)\n', (2808, 2893), False, 'from kafka import KafkaConsumer, TopicPartition\n'), ((3452, 3484), 'kafka.TopicPartition', 'TopicPartition', (['topic', 'partition'], {}), '(topic, partition)\n', (3466, 3484), False, 'from kafka import KafkaConsumer, TopicPartition\n'), ((5818, 6023), 'threading.Thread', 'Thread', ([], {'target': 'thread_function', 'daemon': '(True)', 'kwargs': "{'consumer': consumer, 'stop': stop, 'in_queue': self.to_thread,\n 'out_queue': self.from_thread, 'stop': stop, 'topic_partition':\n topic_partition}"}), "(target=thread_function, daemon=True, kwargs={'consumer': consumer,\n 'stop': stop, 'in_queue': self.to_thread, 'out_queue': self.from_thread,\n 'stop': stop, 'topic_partition': topic_partition})\n", (5824, 6023), False, 'from threading import Thread\n'), ((1025, 1043), 'wherefore.Message.Message', 'Message', (['kafka_msg'], {}), '(kafka_msg)\n', (1032, 1043), False, 'from wherefore.Message import Message\n'), ((2066, 2085), 'copy.copy', 'copy', (['known_sources'], {}), '(known_sources)\n', (2070, 2085), False, 'from copy import copy\n'), ((1513, 1578), 'wherefore.DataSource.DataSource', 'DataSource', (['new_msg.source_name', 'new_msg.message_type', 'start_time'], {}), '(new_msg.source_name, new_msg.message_type, start_time)\n', (1523, 1578), False, 'from wherefore.DataSource import DataSource\n')]
|
#!/usr/bin/env python
import os
import sys
sys.path.insert(0, os.pardir)
sys.path.insert(0, os.path.join(os.pardir, 'openmoc'))
from testing_harness import MultiSimTestHarness
import openmoc
try:
import openmc.openmoc_compatible
import openmc.mgxs
except:
print("OpenMC could not be imported, it's required for loading MGXS files")
class LoadMGXSTestHarness(MultiSimTestHarness):
"""Load a variety of OpenMC MGXS libraries."""
def __init__(self):
super(LoadMGXSTestHarness, self).__init__()
self.azim_spacing = 0.12
self.max_iters = 10
self.keffs = []
self.mgxs_files = ['mgxs_isotropic',
'mgxs_transport_corrected',
'mgxs_consistent',
'mgxs_consistent_nuscatter',
'mgxs_materials',
'mgxs_angular_legendre']
# mgxs_angular_histogram currently not supported
# mgxs_nuclide should be redone with the latest version of openmc
# mgxs by distribcell, universe and mesh also not supported
def _create_geometry(self):
pass
def _create_trackgenerator(self):
pass
def _generate_tracks(self):
pass
def _create_solver(self):
pass
def _run_openmoc(self):
"""Run an OpenMOC calculation with each library."""
for mgxs_file in self.mgxs_files:
# Initialize OpenMC multi-group cross section library for a pin cell
mgxs_lib = openmc.mgxs.Library.load_from_file(filename=mgxs_file,
directory='.')
# Create an OpenMOC Geometry from the OpenMOC Geometry
openmoc_geometry = \
openmc.openmoc_compatible.get_openmoc_geometry(mgxs_lib.geometry)
# Load cross section data
openmoc_materials = \
openmoc.materialize.load_openmc_mgxs_lib(mgxs_lib, openmoc_geometry)
# Initialize FSRs
openmoc_geometry.initializeFlatSourceRegions()
# Initialize an OpenMOC TrackGenerator
track_generator = openmoc.TrackGenerator(
openmoc_geometry, self.num_azim, self.azim_spacing)
track_generator.generateTracks()
# Initialize an OpenMOC Solver
self.solver = openmoc.CPUSolver(track_generator)
self.solver.setConvergenceThreshold(self.tolerance)
self.solver.setNumThreads(self.num_threads)
# Run eigenvalue calculation and store results
self.solver.computeEigenvalue(max_iters=self.max_iters)
self.keffs.append(self.solver.getKeff())
def _get_results(self, num_iters=True, keff=True, fluxes=True,
num_fsrs=False, num_tracks=False, num_segments=False,
hash_output=False):
"""Write a string with the results"""
outstr = ''
# Write out the mgxs file name eigenvalues from each simulation
for file, keff in zip(self.mgxs_files, self.keffs):
outstr += 'File: {0}\tkeff: {1:12.5E}\n'.format(file, keff)
return outstr
if __name__ == '__main__':
harness = LoadMGXSTestHarness()
harness.main()
|
[
"openmoc.CPUSolver",
"os.path.join",
"openmoc.TrackGenerator",
"sys.path.insert",
"openmoc.materialize.load_openmc_mgxs_lib"
] |
[((45, 74), 'sys.path.insert', 'sys.path.insert', (['(0)', 'os.pardir'], {}), '(0, os.pardir)\n', (60, 74), False, 'import sys\n'), ((94, 128), 'os.path.join', 'os.path.join', (['os.pardir', '"""openmoc"""'], {}), "(os.pardir, 'openmoc')\n", (106, 128), False, 'import os\n'), ((1929, 1997), 'openmoc.materialize.load_openmc_mgxs_lib', 'openmoc.materialize.load_openmc_mgxs_lib', (['mgxs_lib', 'openmoc_geometry'], {}), '(mgxs_lib, openmoc_geometry)\n', (1969, 1997), False, 'import openmoc\n'), ((2170, 2244), 'openmoc.TrackGenerator', 'openmoc.TrackGenerator', (['openmoc_geometry', 'self.num_azim', 'self.azim_spacing'], {}), '(openmoc_geometry, self.num_azim, self.azim_spacing)\n', (2192, 2244), False, 'import openmoc\n'), ((2377, 2411), 'openmoc.CPUSolver', 'openmoc.CPUSolver', (['track_generator'], {}), '(track_generator)\n', (2394, 2411), False, 'import openmoc\n')]
|
"""Sociological poll example."""
import sys
sys.path.append('..')
from trials import Trials
if __name__ == '__main__':
test = Trials(['Poroshenko', 'Tymoshenko'])
test.update({
'Poroshenko': (48, 52),
'Tymoshenko': (12, 88)
})
estimates = test.evaluate('posterior CI')
dominance = test.evaluate('dominance', control='Tymoshenko')
print('Poroshenko estimated vote share: {lower:.2%} - {upper:.2%} '
'(95% credibility)'
.format(lower=estimates['Poroshenko'][0],
upper=estimates['Poroshenko'][2]))
print('Tymoshenko estimated vote share: {lower:.2%} - {upper:.2%} '
'(95% credibility)'
.format(lower=estimates['Tymoshenko'][0],
upper=estimates['Tymoshenko'][2]))
print('Chance that Poroshenko beats Tymoshenko based on the poll data: '
'{chance:.2%}'.format(chance=dominance['Poroshenko']))
|
[
"sys.path.append",
"trials.Trials"
] |
[((45, 66), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (60, 66), False, 'import sys\n'), ((134, 170), 'trials.Trials', 'Trials', (["['Poroshenko', 'Tymoshenko']"], {}), "(['Poroshenko', 'Tymoshenko'])\n", (140, 170), False, 'from trials import Trials\n')]
|
import os
import sys
import PyPDF2
from io import BytesIO
from mailer import mailer
def split(pB):
print(pB)
pTS = PyPDF2.PdfFileReader(pB)
if pTS.flattenedPages is None:
pTS._flatten()
for n, pO in enumerate(pTS.flattenedPages):
sP = PyPDF2.PdfFileWriter()
sP.addPage(pO)
b = BytesIO()
sP.write(b)
mailer(b.getvalue(), n)
with open('index.pdf', 'rb') as file:
split(file)
|
[
"PyPDF2.PdfFileReader",
"io.BytesIO",
"PyPDF2.PdfFileWriter"
] |
[((121, 145), 'PyPDF2.PdfFileReader', 'PyPDF2.PdfFileReader', (['pB'], {}), '(pB)\n', (141, 145), False, 'import PyPDF2\n'), ((257, 279), 'PyPDF2.PdfFileWriter', 'PyPDF2.PdfFileWriter', ([], {}), '()\n', (277, 279), False, 'import PyPDF2\n'), ((307, 316), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (314, 316), False, 'from io import BytesIO\n')]
|
from matplotlib import pyplot as plt
from niaarm import Dataset, RuleList, get_rules
from niaarm.visualize import hill_slopes
dataset = Dataset('datasets/Abalone.csv')
metrics = ('support', 'confidence')
rules, _ = get_rules(dataset, 'DifferentialEvolution', metrics, max_evals=1000, seed=1234)
some_rule = rules[150]
print(some_rule)
fig, ax = hill_slopes(some_rule, dataset.transactions)
plt.show()
|
[
"niaarm.get_rules",
"niaarm.Dataset",
"niaarm.visualize.hill_slopes",
"matplotlib.pyplot.show"
] |
[((137, 168), 'niaarm.Dataset', 'Dataset', (['"""datasets/Abalone.csv"""'], {}), "('datasets/Abalone.csv')\n", (144, 168), False, 'from niaarm import Dataset, RuleList, get_rules\n'), ((216, 295), 'niaarm.get_rules', 'get_rules', (['dataset', '"""DifferentialEvolution"""', 'metrics'], {'max_evals': '(1000)', 'seed': '(1234)'}), "(dataset, 'DifferentialEvolution', metrics, max_evals=1000, seed=1234)\n", (225, 295), False, 'from niaarm import Dataset, RuleList, get_rules\n'), ((346, 390), 'niaarm.visualize.hill_slopes', 'hill_slopes', (['some_rule', 'dataset.transactions'], {}), '(some_rule, dataset.transactions)\n', (357, 390), False, 'from niaarm.visualize import hill_slopes\n'), ((391, 401), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (399, 401), True, 'from matplotlib import pyplot as plt\n')]
|
# coding: utf-8
"""jinja2_fsloader - A Jinja2 template loader using PyFilesystem2.
"""
import sys
import fs
import fs.path
import fs.errors
import jinja2
import pkg_resources
__author__ = "<NAME> <<EMAIL>>"
__license__ = "MIT"
__version__ = pkg_resources.resource_string(__name__, "_version.txt").decode('utf-8').strip()
class FSLoader(jinja2.BaseLoader):
"""Loads template from a PyFilesystem2.
The loader is created with a :class:`~fs.base.FS` instance, or a FS URL
which is used to search for the templates::
>>> zip_loader = FSLoader("zip:///path/to/my/templates.zip")
>>> ftp_loader = FSLoader(fs.ftpfs.FTPFS("server.net"))
>>> dir_loader = FSLoader("./templates/", fs_filter=["*.html"])
Per default the template encoding is ``'utf-8'`` which can be changed
by setting the `encoding` parameter to something else. The `use_syspath`
parameter can be opted in to provide Jinja2 the system path to the query
if it exist, otherwise it will only return the internal filesystem path.
The optional `fs_filter` parameter is a list of wildcard patterns like
``['*.html', '*.tpl']``. If present, only the matching files in the
filesystem will be loaded as templates.
.. seealso:: the `PyFilesystem docs <https://docs.pyfilesystem.org/>`_.
"""
def __init__(self, template_fs, encoding='utf-8', use_syspath=False, fs_filter=None):
self.filesystem = fs.open_fs(template_fs)
self.use_syspath = use_syspath
self.encoding = encoding
self.fs_filter = fs_filter
def get_source(self, environment, template):
template = _to_unicode(template)
if not self.filesystem.isfile(template):
raise jinja2.TemplateNotFound(template)
try:
mtime = self.filesystem.getdetails(template).modified
reload = lambda: self.filesystem.getdetails(template).modified > mtime
except fs.errors.MissingInfoNamespace:
reload = lambda: True
with self.filesystem.open(template, encoding=self.encoding) as input_file:
source = input_file.read()
if self.use_syspath:
if self.filesystem.hassyspath(template):
return source, self.filesystem.getsyspath(template), reload
elif self.filesystem.hasurl(template):
return source, self.filesystem.geturl(template), reload
return source, template, reload
def list_templates(self):
found = set()
for file in self.filesystem.walk.files(filter=self.fs_filter):
found.add(fs.path.relpath(file))
return sorted(found)
if sys.version_info[0] == 2:
def _to_unicode(path):
"""Convert str in Python 2 to unicode.
"""
return path.decode('utf-8') if type(path) is not unicode else path
else:
def _to_unicode(path):
return path
|
[
"fs.open_fs",
"jinja2.TemplateNotFound",
"pkg_resources.resource_string",
"fs.path.relpath"
] |
[((1424, 1447), 'fs.open_fs', 'fs.open_fs', (['template_fs'], {}), '(template_fs)\n', (1434, 1447), False, 'import fs\n'), ((1713, 1746), 'jinja2.TemplateNotFound', 'jinja2.TemplateNotFound', (['template'], {}), '(template)\n', (1736, 1746), False, 'import jinja2\n'), ((244, 299), 'pkg_resources.resource_string', 'pkg_resources.resource_string', (['__name__', '"""_version.txt"""'], {}), "(__name__, '_version.txt')\n", (273, 299), False, 'import pkg_resources\n'), ((2579, 2600), 'fs.path.relpath', 'fs.path.relpath', (['file'], {}), '(file)\n', (2594, 2600), False, 'import fs\n')]
|
import sys
from datetime import datetime
from . import main
from flask import render_template, request, redirect, url_for, flash
from ..models import db, Artist, Venue, Show
from .forms import ShowForm, VenueForm, ArtistForm, DeleteArtist, DeleteVenue
@main.route('/')
def index():
return render_template('pages/home.html')
@main.route('/venues')
def venues():
data = []
date = datetime.now()
try:
# Get all the unique locations (city, state) of venues
locations = Venue.query.with_entities(
Venue.city.distinct(), Venue.state).all()
# Loop over all the locations & create a data_dict for each one
for location in locations:
data_dict = {}
city, state = location
data_dict['city'] = city
data_dict['state'] = state
# Get all venues in location
venue_list = []
venues = Venue.query.filter(Venue.city == city).all()
# Loop over all venues in that location & create a venue_dict for each one
for venue in venues:
venue_id, venue_dict = venue.id, venue.format_l()
# Get the number of upcoming shows for that venue
venue_dict['num_upcoming_shows'] = Show.query.filter(
Show.venue_id == venue_id,
Show.start_time > date).count()
venue_list.append(venue_dict)
data_dict['venues'] = venue_list
data.append(data_dict)
return render_template('pages/venues.html', areas=data)
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
redirect(url_for('.index'))
finally:
db.session.close()
@main.route('/venues/search', methods=['POST'])
def search_venues():
data = {}
date = datetime.now()
try:
# Get the search term and query the database using LIKE
search_term = request.form.get('search_term', '')
venues = Venue.query.filter(Venue.name.ilike(f'%{search_term}%')).all()
data['count'] = len(venues)
data['data'] = []
# Loop over the resulting venues
for venue in venues:
venue_id, venue_dict = venue.id, venue.format_s()
# Get the number of upcoming shows for that venue
venue_dict['num_upcoming_shows'] = Show.query.filter(
Show.venue_id == venue_id,
Show.start_time > date).count()
data['data'].append(venue_dict)
return render_template('pages/search_venues.html',
results=data, search_term=search_term)
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
redirect(url_for('.index'))
finally:
db.session.close()
@main.route('/venues/<int:venue_id>', methods=['GET', 'POST'])
def show_venue(venue_id):
date = datetime.now()
form = DeleteVenue()
try:
# If the user clicks the Delete Venue button
if request.method == 'POST':
# Delelte the venue from the database
venue = Venue.query.get(venue_id)
db.session.delete(venue)
db.session.commit()
# Flash a success message and redirect to homepage
flash(f'Venue {venue.name} was successfully deleted!')
return redirect(url_for('.index'))
# Get the venue with id = venue_id & create a data dict
venue_dict = Venue.query.get(venue_id).format_l()
venue_dict['upcoming_shows'] = []
venue_dict['past_shows'] = []
# Get the upcoming shows for that venue
upcoming_shows = Show.query.filter(
Show.venue_id == venue_id,
Show.start_time > date).all()
# Get the needed data from all upcoming shows
for show in upcoming_shows:
artist_id = show.artist_id
artist_dict = Artist.query.get(artist_id).format_m()
artist_dict['start_time'] = str(show.start_time)
venue_dict['upcoming_shows'].append(artist_dict)
venue_dict['upcoming_shows_count'] = len(upcoming_shows)
# Get the past shows for that venue
past_shows = Show.query.filter(
Show.venue_id == venue_id,
Show.start_time < date).all()
# Get the needed data from past shows
for show in past_shows:
artist_id = show.artist_id
artist_dict = Artist.query.get(artist_id).format_m()
artist_dict['start_time'] = str(show.start_time)
venue_dict['past_shows'].append(artist_dict)
venue_dict['past_shows_count'] = len(past_shows)
return render_template('pages/show_venue.html', venue=venue_dict, form=form)
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
return redirect(url_for('.index'))
finally:
db.session.close()
@main.route('/venues/create', methods=['GET'])
def create_venue_form():
form = VenueForm()
return render_template('forms/new_venue.html', form=form)
@main.route('/venues/create', methods=['POST'])
def create_venue_submission():
try:
# Get the submitted form data
data = request.form
name = data.get('name', '')
city = data.get('city', '')
state = data.get('state', '')
address = data.get('address', '')
phone = data.get('phone', '')
genres = ','.join(data.getlist('genres'))
facebook_link = data.get('facebook_link', '')
# Create the venue and insert it into the DB
venue = Venue(name, city, state, address, phone, genres, facebook_link)
db.session.add(venue)
db.session.commit()
# On successful insert flash success
flash('Venue ' + request.form['name'] + ' was successfully listed!')
return redirect(url_for('.venues'))
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
return redirect(url_for('.index'))
finally:
db.session.close()
@main.route('/artists')
def artists():
data = []
try:
# Get all the artists data
artists = Artist.query.all()
for artist in artists:
data.append(artist.format_s())
return render_template('pages/artists.html', artists=data)
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
return redirect(url_for('.index'))
finally:
db.session.close()
@main.route('/artists/search', methods=['POST'])
def search_artists():
data = {}
date = datetime.now()
try:
# Get the search term and query the database using LIKE
search_term = request.form.get('search_term', '')
venues = Artist.query.filter(
Artist.name.ilike(f'%{search_term}%')).all()
data['count'] = len(venues)
data['data'] = []
# Loop over the resulting venues
for venue in venues:
venue_id, venue_dict = venue.id, venue.format_s()
# Get the number of upcoming shows for that venue
venue_dict['num_upcoming_shows'] = Show.query.filter(
Show.venue_id == venue_id,
Show.start_time > date).count()
data['data'].append(venue_dict)
return render_template('pages/search_venues.html',
results=data, search_term=search_term)
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
redirect(url_for('.index'))
finally:
db.session.close()
@main.route('/artists/<int:artist_id>', methods=['GET', 'POST'])
def show_artist(artist_id):
date = datetime.now()
form = DeleteArtist()
try:
# If the user clicks the Delete Artist button
if request.method == 'POST':
artist = Artist.query.get(artist_id)
db.session.delete(artist)
db.session.commit()
# Flash a success message and redirect to homepage
flash(f'Artist {artist.name} was successfully deleted!')
return redirect(url_for('.index'))
# Get the artist with id = artist_id & create a data dict
artist_dict = Artist.query.get(artist_id).format_l()
artist_dict['upcoming_shows'] = []
artist_dict['past_shows'] = []
# Get the upcoming shows for that artist
upcoming_shows = Show.query.filter(
Show.artist_id == artist_id,
Show.start_time > date).all()
# Get the needed data from all upcoming shows
for show in upcoming_shows:
venue_id = show.venue_id
venue_dict = Venue.query.get(venue_id).format_m()
venue_dict['start_time'] = str(show.start_time)
artist_dict['upcoming_shows'].append(venue_dict)
artist_dict['upcoming_shows_count'] = len(upcoming_shows)
# Get the past shows for that artist
past_shows = Show.query.filter(
Show.artist_id == artist_id,
Show.start_time < date).all()
# Get the needed data from past shows
for show in past_shows:
venue_id = show.venue_id
venue_dict = Venue.query.get(venue_id).format_m()
venue_dict['start_time'] = str(show.start_time)
artist_dict['past_shows'].append(venue_dict)
artist_dict['past_shows_count'] = len(past_shows)
return render_template('pages/show_artist.html', artist=artist_dict, form=form)
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
return redirect(url_for('.index'))
finally:
db.session.close()
@main.route('/artists/<int:artist_id>/edit', methods=['GET'])
def edit_artist(artist_id):
form = ArtistForm()
try:
# Get the artist's data
artist = Artist.query.get(artist_id).format_l()
return render_template('forms/edit_artist.html', form=form, artist=artist)
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
return redirect(url_for('.index'))
finally:
db.session.close()
@main.route('/artists/<int:artist_id>/edit', methods=['POST'])
def edit_artist_submission(artist_id):
try:
# Get the submitted form data
data = request.form
name = data.get('name', '')
city = data.get('city', '')
state = data.get('state', '')
phone = data.get('phone', '')
genres = ','.join(data.getlist('genres'))
facebook_link = data.get('facebook_link', '')
# Get the artist and update its data
artist = Artist.query.get(artist_id)
artist.name = name
artist.city = city
artist.state = state
artist.phone = phone
artist.genres = genres
artist.facebook_link = facebook_link
db.session.add(artist)
db.session.commit()
# On successful insert flash success
flash('Artist ' + request.form['name'] + ' was successfully updated!')
return redirect(url_for('.show_artist', artist_id=artist_id))
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
return redirect(url_for('.index'))
finally:
db.session.close()
@main.route('/venues/<int:venue_id>/edit', methods=['GET'])
def edit_venue(venue_id):
form = VenueForm()
try:
# Get the venue's data
venue = Venue.query.get(venue_id).format_l()
return render_template('forms/edit_venue.html', form=form, venue=venue)
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
return redirect(url_for('.index'))
finally:
db.session.close()
@main.route('/venues/<int:venue_id>/edit', methods=['POST'])
def edit_venue_submission(venue_id):
try:
# Get the submitted form data
data = request.form
name = data.get('name', '')
city = data.get('city', '')
state = data.get('state', '')
address = data.get('address', '')
phone = data.get('phone', '')
genres = ','.join(data.getlist('genres'))
facebook_link = data.get('facebook_link', '')
# Get the venue and update its data
venue = Venue.query.get(venue_id)
venue.name = name
venue.city = city
venue.state = state
venue.phone = phone
venue.genres = genres
venue.facebook_link = facebook_link
db.session.add(venue)
db.session.commit()
# On successful insert flash success
flash('Venue ' + request.form['name'] + ' was successfully updated!')
return redirect(url_for('.show_venue', venue_id=venue_id))
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
return redirect(url_for('.index'))
finally:
db.session.close()
@main.route('/artists/create', methods=['GET'])
def create_artist_form():
form = ArtistForm()
return render_template('forms/new_artist.html', form=form)
@main.route('/artists/create', methods=['POST'])
def create_artist_submission():
try:
# Get the submitted form data
data = request.form
name = data.get('name', '')
city = data.get('city', '')
state = data.get('state', '')
phone = data.get('phone', '')
genres = ','.join(data.getlist('genres'))
facebook_link = data.get('facebook_link', '')
# Create the venue and insert it into the DB
artist = Artist(name, city, state, phone, genres, facebook_link)
db.session.add(artist)
db.session.commit()
# On successful insert flash success
flash('Artist ' + request.form['name'] + ' was successfully listed!')
return redirect(url_for('.artists'))
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
return redirect(url_for('.index'))
finally:
db.session.close()
@main.route('/shows')
def shows():
data = []
try:
# Get all the shows
shows = Show.query.all()
# Loop over each show and generate its data
for show in shows:
show_dict = show.format_l()
show_dict['artist_name'] = show.artist.name
show_dict['artist_image_link'] = show.artist.image_link
show_dict['venue_name'] = show.venue.name
data.append(show_dict)
return render_template('pages/shows.html', shows=data)
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
return redirect(url_for('.index'))
finally:
db.session.close()
@main.route('/shows/create')
def create_shows():
form = ShowForm()
return render_template('forms/new_show.html', form=form)
@main.route('/shows/create', methods=['POST'])
def create_show_submission():
try:
# Get the submitted form data
data = request.form
artist_id = data.get('artist_id')
venue_id = data.get('venue_id')
start_time = data.get('start_time')
# Create the show and insert it to the DB
show = Show(artist_id, venue_id, start_time)
db.session.add(show)
db.session.commit()
# On successful insert flash success
flash('Show was successfully listed!')
return redirect(url_for('.shows'))
except Exception:
db.session.rollback()
print(sys.exc_info())
flash("Something went wrong. Please try again.")
return redirect(url_for('.index'))
finally:
db.session.close()
|
[
"flask.flash",
"flask.request.form.get",
"flask.url_for",
"flask.render_template",
"sys.exc_info",
"datetime.datetime.now"
] |
[((295, 329), 'flask.render_template', 'render_template', (['"""pages/home.html"""'], {}), "('pages/home.html')\n", (310, 329), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((395, 409), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (407, 409), False, 'from datetime import datetime\n'), ((1895, 1909), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1907, 1909), False, 'from datetime import datetime\n'), ((3033, 3047), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3045, 3047), False, 'from datetime import datetime\n'), ((5220, 5270), 'flask.render_template', 'render_template', (['"""forms/new_venue.html"""'], {'form': 'form'}), "('forms/new_venue.html', form=form)\n", (5235, 5270), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((6907, 6921), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6919, 6921), False, 'from datetime import datetime\n'), ((8064, 8078), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (8076, 8078), False, 'from datetime import datetime\n'), ((13646, 13697), 'flask.render_template', 'render_template', (['"""forms/new_artist.html"""'], {'form': 'form'}), "('forms/new_artist.html', form=form)\n", (13661, 13697), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((15514, 15563), 'flask.render_template', 'render_template', (['"""forms/new_show.html"""'], {'form': 'form'}), "('forms/new_show.html', form=form)\n", (15529, 15563), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((1532, 1580), 'flask.render_template', 'render_template', (['"""pages/venues.html"""'], {'areas': 'data'}), "('pages/venues.html', areas=data)\n", (1547, 1580), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((2006, 2041), 'flask.request.form.get', 'request.form.get', (['"""search_term"""', '""""""'], {}), "('search_term', '')\n", (2022, 2041), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((2599, 2686), 'flask.render_template', 'render_template', (['"""pages/search_venues.html"""'], {'results': 'data', 'search_term': 'search_term'}), "('pages/search_venues.html', results=data, search_term=\n search_term)\n", (2614, 2686), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((4818, 4887), 'flask.render_template', 'render_template', (['"""pages/show_venue.html"""'], {'venue': 'venue_dict', 'form': 'form'}), "('pages/show_venue.html', venue=venue_dict, form=form)\n", (4833, 4887), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((5967, 6035), 'flask.flash', 'flash', (["('Venue ' + request.form['name'] + ' was successfully listed!')"], {}), "('Venue ' + request.form['name'] + ' was successfully listed!')\n", (5972, 6035), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((6532, 6583), 'flask.render_template', 'render_template', (['"""pages/artists.html"""'], {'artists': 'data'}), "('pages/artists.html', artists=data)\n", (6547, 6583), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((7018, 7053), 'flask.request.form.get', 'request.form.get', (['"""search_term"""', '""""""'], {}), "('search_term', '')\n", (7034, 7053), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((7626, 7713), 'flask.render_template', 'render_template', (['"""pages/search_venues.html"""'], {'results': 'data', 'search_term': 'search_term'}), "('pages/search_venues.html', results=data, search_term=\n search_term)\n", (7641, 7713), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((9809, 9881), 'flask.render_template', 'render_template', (['"""pages/show_artist.html"""'], {'artist': 'artist_dict', 'form': 'form'}), "('pages/show_artist.html', artist=artist_dict, form=form)\n", (9824, 9881), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((10336, 10403), 'flask.render_template', 'render_template', (['"""forms/edit_artist.html"""'], {'form': 'form', 'artist': 'artist'}), "('forms/edit_artist.html', form=form, artist=artist)\n", (10351, 10403), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((11450, 11520), 'flask.flash', 'flash', (["('Artist ' + request.form['name'] + ' was successfully updated!')"], {}), "('Artist ' + request.form['name'] + ' was successfully updated!')\n", (11455, 11520), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((12036, 12100), 'flask.render_template', 'render_template', (['"""forms/edit_venue.html"""'], {'form': 'form', 'venue': 'venue'}), "('forms/edit_venue.html', form=form, venue=venue)\n", (12051, 12100), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((13174, 13243), 'flask.flash', 'flash', (["('Venue ' + request.form['name'] + ' was successfully updated!')"], {}), "('Venue ' + request.form['name'] + ' was successfully updated!')\n", (13179, 13243), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((14348, 14417), 'flask.flash', 'flash', (["('Artist ' + request.form['name'] + ' was successfully listed!')"], {}), "('Artist ' + request.form['name'] + ' was successfully listed!')\n", (14353, 14417), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((15158, 15205), 'flask.render_template', 'render_template', (['"""pages/shows.html"""'], {'shows': 'data'}), "('pages/shows.html', shows=data)\n", (15173, 15205), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((16059, 16097), 'flask.flash', 'flash', (['"""Show was successfully listed!"""'], {}), "('Show was successfully listed!')\n", (16064, 16097), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((1672, 1720), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (1677, 1720), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((2804, 2852), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (2809, 2852), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((3415, 3469), 'flask.flash', 'flash', (['f"""Venue {venue.name} was successfully deleted!"""'], {}), "(f'Venue {venue.name} was successfully deleted!')\n", (3420, 3469), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((4979, 5027), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (4984, 5027), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((6060, 6078), 'flask.url_for', 'url_for', (['""".venues"""'], {}), "('.venues')\n", (6067, 6078), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((6171, 6219), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (6176, 6219), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((6675, 6723), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (6680, 6723), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((7831, 7879), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (7836, 7879), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((8401, 8457), 'flask.flash', 'flash', (['f"""Artist {artist.name} was successfully deleted!"""'], {}), "(f'Artist {artist.name} was successfully deleted!')\n", (8406, 8457), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((9973, 10021), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (9978, 10021), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((10495, 10543), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (10500, 10543), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((11545, 11589), 'flask.url_for', 'url_for', (['""".show_artist"""'], {'artist_id': 'artist_id'}), "('.show_artist', artist_id=artist_id)\n", (11552, 11589), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((11682, 11730), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (11687, 11730), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((12192, 12240), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (12197, 12240), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((13268, 13309), 'flask.url_for', 'url_for', (['""".show_venue"""'], {'venue_id': 'venue_id'}), "('.show_venue', venue_id=venue_id)\n", (13275, 13309), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((13402, 13450), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (13407, 13450), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((14442, 14461), 'flask.url_for', 'url_for', (['""".artists"""'], {}), "('.artists')\n", (14449, 14461), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((14554, 14602), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (14559, 14602), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((15297, 15345), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (15302, 15345), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((16122, 16139), 'flask.url_for', 'url_for', (['""".shows"""'], {}), "('.shows')\n", (16129, 16139), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((16232, 16280), 'flask.flash', 'flash', (['"""Something went wrong. Please try again."""'], {}), "('Something went wrong. Please try again.')\n", (16237, 16280), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((1648, 1662), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1660, 1662), False, 'import sys\n'), ((1738, 1755), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (1745, 1755), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((2780, 2794), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (2792, 2794), False, 'import sys\n'), ((2870, 2887), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (2877, 2887), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((3498, 3515), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (3505, 3515), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((4955, 4969), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (4967, 4969), False, 'import sys\n'), ((5052, 5069), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (5059, 5069), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((6147, 6161), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (6159, 6161), False, 'import sys\n'), ((6244, 6261), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (6251, 6261), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((6651, 6665), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (6663, 6665), False, 'import sys\n'), ((6748, 6765), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (6755, 6765), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((7807, 7821), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (7819, 7821), False, 'import sys\n'), ((7897, 7914), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (7904, 7914), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((8486, 8503), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (8493, 8503), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((9949, 9963), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (9961, 9963), False, 'import sys\n'), ((10046, 10063), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (10053, 10063), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((10471, 10485), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (10483, 10485), False, 'import sys\n'), ((10568, 10585), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (10575, 10585), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((11658, 11672), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (11670, 11672), False, 'import sys\n'), ((11755, 11772), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (11762, 11772), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((12168, 12182), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (12180, 12182), False, 'import sys\n'), ((12265, 12282), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (12272, 12282), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((13378, 13392), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (13390, 13392), False, 'import sys\n'), ((13475, 13492), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (13482, 13492), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((14530, 14544), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (14542, 14544), False, 'import sys\n'), ((14627, 14644), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (14634, 14644), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((15273, 15287), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (15285, 15287), False, 'import sys\n'), ((15370, 15387), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (15377, 15387), False, 'from flask import render_template, request, redirect, url_for, flash\n'), ((16208, 16222), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (16220, 16222), False, 'import sys\n'), ((16305, 16322), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (16312, 16322), False, 'from flask import render_template, request, redirect, url_for, flash\n')]
|
import os
from flask import render_template, flash, redirect, url_for, request
import requests
from app.source import bp
from app.models import article as at
@bp.route('/source')
def search_source():
"""Show this view when a source is pressed and show articles."""
API_KEY = os.environ.get('API_KEY')
news_source = request.args.get('q')
# Make the request and change the response to a dict we can use.
url = f"https://newsapi.org/v2/top-headlines?sources={news_source}&apiKey={API_KEY}"
response = requests.get(url).json()
# create article objects and store them in a list.
articles = []
for article in response["articles"]:
articles.append(at.Article(
article["source"], article["author"], article["title"], article["description"],
article["url"], article["urlToImage"], article["publishedAt"], article["content"]))
return render_template('source.html', source=news_source, articles=articles, api_key=API_KEY)
|
[
"app.models.article.Article",
"flask.request.args.get",
"app.source.bp.route",
"os.environ.get",
"requests.get",
"flask.render_template"
] |
[((161, 180), 'app.source.bp.route', 'bp.route', (['"""/source"""'], {}), "('/source')\n", (169, 180), False, 'from app.source import bp\n'), ((285, 310), 'os.environ.get', 'os.environ.get', (['"""API_KEY"""'], {}), "('API_KEY')\n", (299, 310), False, 'import os\n'), ((329, 350), 'flask.request.args.get', 'request.args.get', (['"""q"""'], {}), "('q')\n", (345, 350), False, 'from flask import render_template, flash, redirect, url_for, request\n'), ((905, 995), 'flask.render_template', 'render_template', (['"""source.html"""'], {'source': 'news_source', 'articles': 'articles', 'api_key': 'API_KEY'}), "('source.html', source=news_source, articles=articles,\n api_key=API_KEY)\n", (920, 995), False, 'from flask import render_template, flash, redirect, url_for, request\n'), ((525, 542), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (537, 542), False, 'import requests\n'), ((689, 872), 'app.models.article.Article', 'at.Article', (["article['source']", "article['author']", "article['title']", "article['description']", "article['url']", "article['urlToImage']", "article['publishedAt']", "article['content']"], {}), "(article['source'], article['author'], article['title'], article[\n 'description'], article['url'], article['urlToImage'], article[\n 'publishedAt'], article['content'])\n", (699, 872), True, 'from app.models import article as at\n')]
|
import pytest
from .Week1Bonus_PalindromePermutation import Solution
s = Solution()
@pytest.mark.parametrize("test_input", ["code", "abc"])
def test_cannot_permute(test_input):
assert not s.canPermutePalindrome(test_input)
@pytest.mark.parametrize("test_input", ["aab", "carerac", "a", "aa"])
def test_can_permute(test_input):
assert s.canPermutePalindrome(test_input)
|
[
"pytest.mark.parametrize"
] |
[((88, 142), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""test_input"""', "['code', 'abc']"], {}), "('test_input', ['code', 'abc'])\n", (111, 142), False, 'import pytest\n'), ((233, 301), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""test_input"""', "['aab', 'carerac', 'a', 'aa']"], {}), "('test_input', ['aab', 'carerac', 'a', 'aa'])\n", (256, 301), False, 'import pytest\n')]
|
#!/usr/bin/env python
"""Calculate HOG features for an image"""
import os
import matplotlib.pyplot as plt
from hog_features import image2pixelarray
from skimage import exposure
from skimage.feature import hog
def main(filename):
"""
Orchestrate the HOG feature calculation
Parameters
----------
filename : str
"""
image = image2pixelarray(filename)
fd, hog_image = hog(
image,
orientations=8,
pixels_per_cell=(16, 16),
cells_per_block=(1, 1),
visualise=True,
)
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(8, 4), sharex=True, sharey=True)
ax1.axis("off")
ax1.imshow(image, cmap=plt.cm.gray)
ax1.set_title("Input image")
ax1.set_adjustable("box-forced")
# Rescale histogram for better display
hog_image_rescaled = exposure.rescale_intensity(hog_image, in_range=(0, 0.02))
ax2.axis("off")
ax2.imshow(hog_image_rescaled, cmap=plt.cm.gray)
ax2.set_title("Histogram of Oriented Gradients")
ax1.set_adjustable("box-forced")
plt.show()
def is_valid_file(parser, arg):
"""
Check if arg is a valid file that already exists on the file system.
Parameters
----------
parser : argparse object
arg : str
Returns
-------
arg
"""
arg = os.path.abspath(arg)
if not os.path.exists(arg):
parser.error("The file %s does not exist!" % arg)
else:
return arg
def get_parser():
"""Get parser object for scikithog"""
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
parser = ArgumentParser(
description=__doc__, formatter_class=ArgumentDefaultsHelpFormatter
)
parser.add_argument(
"-f",
"--file",
dest="filename",
type=lambda x: is_valid_file(parser, x),
help="write report to FILE",
required=True,
metavar="FILE",
)
return parser
if __name__ == "__main__":
args = get_parser().parse_args()
main(args.filename)
|
[
"os.path.abspath",
"matplotlib.pyplot.show",
"argparse.ArgumentParser",
"skimage.exposure.rescale_intensity",
"skimage.feature.hog",
"os.path.exists",
"hog_features.image2pixelarray",
"matplotlib.pyplot.subplots"
] |
[((356, 382), 'hog_features.image2pixelarray', 'image2pixelarray', (['filename'], {}), '(filename)\n', (372, 382), False, 'from hog_features import image2pixelarray\n'), ((404, 500), 'skimage.feature.hog', 'hog', (['image'], {'orientations': '(8)', 'pixels_per_cell': '(16, 16)', 'cells_per_block': '(1, 1)', 'visualise': '(True)'}), '(image, orientations=8, pixels_per_cell=(16, 16), cells_per_block=(1, 1),\n visualise=True)\n', (407, 500), False, 'from skimage.feature import hog\n'), ((567, 627), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '(8, 4)', 'sharex': '(True)', 'sharey': '(True)'}), '(1, 2, figsize=(8, 4), sharex=True, sharey=True)\n', (579, 627), True, 'import matplotlib.pyplot as plt\n'), ((828, 885), 'skimage.exposure.rescale_intensity', 'exposure.rescale_intensity', (['hog_image'], {'in_range': '(0, 0.02)'}), '(hog_image, in_range=(0, 0.02))\n', (854, 885), False, 'from skimage import exposure\n'), ((1054, 1064), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1062, 1064), True, 'import matplotlib.pyplot as plt\n'), ((1305, 1325), 'os.path.abspath', 'os.path.abspath', (['arg'], {}), '(arg)\n', (1320, 1325), False, 'import os\n'), ((1592, 1679), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '__doc__', 'formatter_class': 'ArgumentDefaultsHelpFormatter'}), '(description=__doc__, formatter_class=\n ArgumentDefaultsHelpFormatter)\n', (1606, 1679), False, 'from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser\n'), ((1337, 1356), 'os.path.exists', 'os.path.exists', (['arg'], {}), '(arg)\n', (1351, 1356), False, 'import os\n')]
|
import pandas as pd
import tweepy
import json
import configparser
import re, string, random
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.corpus import twitter_samples, stopwords
from nltk.tag import pos_tag
from nltk import TweetTokenizer
from nltk import FreqDist, classify, NaiveBayesClassifier
def train_model(stop_words):
# https://www.digitalocean.com/community/tutorials/how-to-perform-sentiment-analysis-in-python-3-using-the-natural-language-toolkit-nltk
# https://github.com/sdaityari/sentiment.analysis.tutorial/blob/master/Sentiment%20Analysis%20in%20Python%203.ipynb
# <NAME>
positive_tweet_tokens = twitter_samples.tokenized('positive_tweets.json')
negative_tweet_tokens = twitter_samples.tokenized('negative_tweets.json')
positive_cleaned_tokens_list = []
negative_cleaned_tokens_list = []
for tokens in positive_tweet_tokens:
positive_cleaned_tokens_list.append(remove_noise(tokens, stop_words))
for tokens in negative_tweet_tokens:
negative_cleaned_tokens_list.append(remove_noise(tokens, stop_words))
all_pos_words = get_all_words(positive_cleaned_tokens_list)
freq_dist_pos = FreqDist(all_pos_words)
print(freq_dist_pos.most_common(10))
positive_tokens_for_model = get_tweets_for_model(positive_cleaned_tokens_list)
negative_tokens_for_model = get_tweets_for_model(negative_cleaned_tokens_list)
positive_dataset = [(tweet_dict, "Positive")
for tweet_dict in positive_tokens_for_model]
negative_dataset = [(tweet_dict, "Negative")
for tweet_dict in negative_tokens_for_model]
dataset = positive_dataset + negative_dataset
random.shuffle(dataset)
train_data = dataset[:7000]
test_data = dataset[7000:]
classifier = NaiveBayesClassifier.train(train_data)
print("Accuracy is:", classify.accuracy(classifier, test_data))
print(classifier.show_most_informative_features(10))
return classifier
def remove_noise(tweet_tokens, stop_words):
# print(f'noisy: {tweet_tokens}')
cleaned_tokens = []
for token, tag in pos_tag(tweet_tokens):
token = re.sub('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+#]|[!*(),]|'
'(?:%[0-9a-fA-F][0-9a-fA-F]))+','', token)
token = re.sub("(@[A-Za-z0-9_]+)","", token)
token = re.sub(r'[^\x00-\x7F]+','', token) # this line takes out win-1252 encoded characters, to be fixed in a future release of tabpy at which time this line can be removed
if tag.startswith("NN"):
pos = 'n'
elif tag.startswith('VB'):
pos = 'v'
else:
pos = 'a'
lemmatizer = WordNetLemmatizer()
token = lemmatizer.lemmatize(token, pos)
if len(token) > 0 and token not in string.punctuation and token.lower() not in stop_words:
cleaned_tokens.append(token.lower())
# print(f'quiet: {cleaned_tokens}')
return cleaned_tokens
def get_all_words(cleaned_tokens_list):
for tokens in cleaned_tokens_list:
for token in tokens:
yield token
def get_tweets_for_model(cleaned_tokens_list):
for tweet_tokens in cleaned_tokens_list:
yield dict([token, True] for token in tweet_tokens)
def queryTwitter(df): # the initial df is the csv containing the twitter handles to query
stop_words = stopwords.words('english')
classifier = train_model(stop_words)
df_final = pd.DataFrame() # the final df will hold all tweets across all handles
for i in df.iterrows(): # iterate thru the handles
print('processing: '+ i[1][0])
df2 = get_tweets(i[1][0]+' -filter:retweets', i[1][1]) # create a new df to hold the tweets for each handle
df2.insert(1,'search_handle', i[1][0])
df2 = df2.astype({'created_at': str})
df2 = df2.assign(tokens = '[]') # using assign instead of insert
df2 = df2.assign(sentiment = '') # using assign instead of insert
df2 = clean_tweets(classifier, df2, stop_words)
df2 = df2.astype({'tokens': str})
df_final = df_final.append(df2, ignore_index=True)
print(df_final.columns)
return df_final
def get_output_schema():
return pd.DataFrame({
'id': prep_string(),
'search_handle': prep_string(),
'author_name': prep_string(),
'author_handle': prep_string(),
'created_at': prep_string(),
'tweet_text': prep_string(),
'retweet_count': prep_int(),
'favorite_count': prep_int(),
'tokens': prep_string(),
'sentiment': prep_string()
})
def get_tweets(string_serch, int_returnrows):
# http://docs.tweepy.org/en/v3.9.0/getting_started.html
config = configparser.ConfigParser()
config.read('twitterkeys.ini')
# Consume:
consumer_key = config['Consume']['consumer_key']
consumer_secret = config['Consume']['consumer_secret']
# Access:
access_token = config['Access']['access_token']
access_secret = config['Access']['access_secret']
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_secret)
api = tweepy.API(auth)
alltweets = []
for tweet in tweepy.Cursor(api.search, q=string_serch).items(int_returnrows):
# print(' ' + tweet.text)
outtweets = [tweet.id_str, tweet.author.name, '@'+tweet.author.screen_name, tweet.created_at, tweet.text, tweet.retweet_count, tweet.favorite_count]
alltweets.append(outtweets)
df = pd.DataFrame(data=alltweets, columns=['id','author_name', 'author_handle', 'created_at','tweet_text','retweet_count','favorite_count'])
return df
def clean_tweets(classifier, df, stop_words):
tknzr = TweetTokenizer()
for i in df.iterrows():
# print('tweet: '+df['tweet_text'][i[0]])
tokens = tknzr.tokenize(i[1]['tweet_text']) # using NLTK tweet tokenizer
custom_tokens = remove_noise(tokens, stop_words)
df['tokens'][i[0]] = custom_tokens # need to fix this warning later
# SettingWithCopyWarning:
# A value is trying to be set on a copy of a slice from a DataFrame
# See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
# grabs the current row: df.loc[i[0]]
# grabs the tokens column of the current row: df.loc[i[0]]['tokens']
# this is a python object of type array: df.loc[df.id == i[0], 'tokens']
# df.loc[df.id == i[0], 'tokens'] = remove_noise(tokens, stop_words)
score = classifier.classify(dict([token, True] for token in custom_tokens))
df['sentiment'][i[0]] = score
return df
if __name__ == "__main__":
import pandas as pd
df = pd.read_csv('twitter_query.csv')
df2 = queryTwitter(df)
df2.to_json('tweets.json', orient='table')
df2.to_excel('tweets.xlsx')
|
[
"pandas.DataFrame",
"tweepy.API",
"nltk.stem.wordnet.WordNetLemmatizer",
"nltk.corpus.twitter_samples.tokenized",
"random.shuffle",
"pandas.read_csv",
"nltk.classify.accuracy",
"nltk.NaiveBayesClassifier.train",
"nltk.tag.pos_tag",
"tweepy.Cursor",
"nltk.corpus.stopwords.words",
"tweepy.OAuthHandler",
"nltk.TweetTokenizer",
"configparser.ConfigParser",
"nltk.FreqDist",
"re.sub"
] |
[((645, 694), 'nltk.corpus.twitter_samples.tokenized', 'twitter_samples.tokenized', (['"""positive_tweets.json"""'], {}), "('positive_tweets.json')\n", (670, 694), False, 'from nltk.corpus import twitter_samples, stopwords\n'), ((723, 772), 'nltk.corpus.twitter_samples.tokenized', 'twitter_samples.tokenized', (['"""negative_tweets.json"""'], {}), "('negative_tweets.json')\n", (748, 772), False, 'from nltk.corpus import twitter_samples, stopwords\n'), ((1176, 1199), 'nltk.FreqDist', 'FreqDist', (['all_pos_words'], {}), '(all_pos_words)\n', (1184, 1199), False, 'from nltk import FreqDist, classify, NaiveBayesClassifier\n'), ((1704, 1727), 'random.shuffle', 'random.shuffle', (['dataset'], {}), '(dataset)\n', (1718, 1727), False, 'import re, string, random\n'), ((1810, 1848), 'nltk.NaiveBayesClassifier.train', 'NaiveBayesClassifier.train', (['train_data'], {}), '(train_data)\n', (1836, 1848), False, 'from nltk import FreqDist, classify, NaiveBayesClassifier\n'), ((2128, 2149), 'nltk.tag.pos_tag', 'pos_tag', (['tweet_tokens'], {}), '(tweet_tokens)\n', (2135, 2149), False, 'from nltk.tag import pos_tag\n'), ((3375, 3401), 'nltk.corpus.stopwords.words', 'stopwords.words', (['"""english"""'], {}), "('english')\n", (3390, 3401), False, 'from nltk.corpus import twitter_samples, stopwords\n'), ((3458, 3472), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (3470, 3472), True, 'import pandas as pd\n'), ((4734, 4761), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (4759, 4761), False, 'import configparser\n'), ((5060, 5110), 'tweepy.OAuthHandler', 'tweepy.OAuthHandler', (['consumer_key', 'consumer_secret'], {}), '(consumer_key, consumer_secret)\n', (5079, 5110), False, 'import tweepy\n'), ((5176, 5192), 'tweepy.API', 'tweepy.API', (['auth'], {}), '(auth)\n', (5186, 5192), False, 'import tweepy\n'), ((5537, 5680), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'alltweets', 'columns': "['id', 'author_name', 'author_handle', 'created_at', 'tweet_text',\n 'retweet_count', 'favorite_count']"}), "(data=alltweets, columns=['id', 'author_name', 'author_handle',\n 'created_at', 'tweet_text', 'retweet_count', 'favorite_count'])\n", (5549, 5680), True, 'import pandas as pd\n'), ((5746, 5762), 'nltk.TweetTokenizer', 'TweetTokenizer', ([], {}), '()\n', (5760, 5762), False, 'from nltk import TweetTokenizer\n'), ((6800, 6832), 'pandas.read_csv', 'pd.read_csv', (['"""twitter_query.csv"""'], {}), "('twitter_query.csv')\n", (6811, 6832), True, 'import pandas as pd\n'), ((1876, 1916), 'nltk.classify.accuracy', 'classify.accuracy', (['classifier', 'test_data'], {}), '(classifier, test_data)\n', (1893, 1916), False, 'from nltk import FreqDist, classify, NaiveBayesClassifier\n'), ((2167, 2275), 're.sub', 're.sub', (['"""http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+#]|[!*(),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+"""', '""""""', 'token'], {}), "(\n 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+#]|[!*(),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'\n , '', token)\n", (2173, 2275), False, 'import re, string, random\n'), ((2307, 2344), 're.sub', 're.sub', (['"""(@[A-Za-z0-9_]+)"""', '""""""', 'token'], {}), "('(@[A-Za-z0-9_]+)', '', token)\n", (2313, 2344), False, 'import re, string, random\n'), ((2360, 2396), 're.sub', 're.sub', (['"""[^\\\\x00-\\\\x7F]+"""', '""""""', 'token'], {}), "('[^\\\\x00-\\\\x7F]+', '', token)\n", (2366, 2396), False, 'import re, string, random\n'), ((2697, 2716), 'nltk.stem.wordnet.WordNetLemmatizer', 'WordNetLemmatizer', ([], {}), '()\n', (2714, 2716), False, 'from nltk.stem.wordnet import WordNetLemmatizer\n'), ((5232, 5273), 'tweepy.Cursor', 'tweepy.Cursor', (['api.search'], {'q': 'string_serch'}), '(api.search, q=string_serch)\n', (5245, 5273), False, 'import tweepy\n')]
|
from django import forms
from mimetypes import guess_type
import base64
import os
from .models import Post
class PostForm(forms.ModelForm):
class Meta:
model = Post
fields = [
"content",
"image",
"privacy",
"content_type",
"accessible_users",
"unlisted",
"user",
"publish"
]
def __init__(self, *args, **kwargs):
super(PostForm, self).__init__(*args, **kwargs)
self.fields['user'].widget = forms.HiddenInput()
self.fields['publish'].widget = forms.HiddenInput()
self.set_placeholder('content', 'What\'s on your mind?')
self.set_form_class()
#add placeholder text to fields
def set_placeholder(self, field, text):
self.fields[field].widget.attrs['placeholder'] = text
#add class for css
def set_form_class(self):
self.fields['content'].widget.attrs['class'] = "create_post"
self.fields['unlisted'].widget.attrs['class'] = "create_post"
"""
Creates the objects for the accessible useres and then save to the form
"""
def save(self, commit=True):
accessible_users = self.cleaned_data.pop('accessible_users', [])
print(accessible_users)
post = super().save(commit)
username = post.user.username
timestamp = post.timestamp.strftime("%b %-d, %Y, at %H:%M %p")
post.title = username+" - "+timestamp
post.save()
post.accessible_users.add(*accessible_users)
post.accessible_users.add(post.user)
return post
class ImageForm(forms.ModelForm):
class Meta:
model = Post
fields = [
"image",
"privacy",
"accessible_users",
"user",
"publish"
]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['user'].widget = forms.HiddenInput()
self.fields['publish'].widget = forms.HiddenInput()
"""
Creates the objects for the accessible useres and then save to the form
"""
def save(self, commit=True):
accessible_users = self.cleaned_data.pop('accessible_users', [])
print(accessible_users)
post = super().save(commit)
username = post.user.username
timestamp = post.timestamp.strftime("%b %-d, %Y, at %H:%M %p")
post.title = username+" - "+timestamp
post.save()
post.accessible_users.add(*accessible_users)
post.accessible_users.add(post.user)
return post
|
[
"django.forms.HiddenInput"
] |
[((539, 558), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (556, 558), False, 'from django import forms\n'), ((599, 618), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (616, 618), False, 'from django import forms\n'), ((1964, 1983), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (1981, 1983), False, 'from django import forms\n'), ((2024, 2043), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (2041, 2043), False, 'from django import forms\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# File: list.py
# by <NAME>
# <EMAIL>
#
import os
from datetime import datetime as dt
def do_list(ark, opts):
total_size = sum(x.original_filesize for x in ark.metadatas)
len_size = max(len(str(total_size)), len('Length'))
if opts['-v']:
print("File: %s" % ark.filename)
print(' Flag %*s Date Time Name' % (len_size,
'Length'))
print(' ---- -%s ---------- ----- ----' % (len_size * '-'))
for meta in ark.metadatas:
if opts['-v']:
print(' %s %*d %s %s' % (meta.flag,
len_size,
meta.original_filesize,
dt.fromtimestamp(meta.timestamp)
.strftime('%Y-%m-%d %H:%M'),
meta.fullpath))
else:
print(meta.fullpath)
if opts['-v']:
print(' ---- -%s -------' % (len_size * '-'))
print(' %*d%s%d file%s' % (len_size, total_size,
' ' * 21, ark.file_count,
's' if ark.file_count > 1 else ''))
|
[
"datetime.datetime.fromtimestamp"
] |
[((826, 858), 'datetime.datetime.fromtimestamp', 'dt.fromtimestamp', (['meta.timestamp'], {}), '(meta.timestamp)\n', (842, 858), True, 'from datetime import datetime as dt\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-11 14:21
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Grundgeruest', '0004_auto_20171211_1418'),
]
operations = [
migrations.AlterField(
model_name='scholariumprofile',
name='alt_auslaufend',
field=models.SmallIntegerField(default=0, null=True, verbose_name='auslaufend'),
),
]
|
[
"django.db.models.SmallIntegerField"
] |
[((425, 498), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'default': '(0)', 'null': '(True)', 'verbose_name': '"""auslaufend"""'}), "(default=0, null=True, verbose_name='auslaufend')\n", (449, 498), False, 'from django.db import migrations, models\n')]
|
# -*- coding: utf-8 -*-
import urllib2
import urllib
from bs4 import BeautifulSoup
import json
import cookielib
import sqlite3
import time
import os
import sys
import socket
socket.setdefaulttimeout(30)
reload(sys)
sys.setdefaultencoding('utf-8')
def get_search_page_url(keyWord):
res = 1
pageURL = ''
try:
searchBaseURL = rootBaseURL + '/page/search.html?keywords='
searchKeyWordsURL = searchBaseURL + urllib2.quote(keyWord)
searchPageContent = getContentOfWebPage(searchKeyWordsURL)
searchPageSoup = BeautifulSoup(searchPageContent, 'html.parser')
pageURL = searchPageSoup.head.find('link', attrs={'rel':
'canonical'}).attrs['href']
except:
res = 0
return (res, pageURL)
def getContentOfWebPage(url):
user_agent = 'Mozilla/5.0 (iPhone; CPU iPhone OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B137 Safari/601.1'
headers = {
'User-Agent': user_agent
# 'Connection': 'Keep-Alive'
}
req = urllib2.Request(url=url, headers=headers)
response = urllib2.urlopen(req)
content = response.read().decode('utf-8', 'ignore')
return content
def get_goods_list(url, data, opener):
user_agent = 'Mozilla/5.0 (iPhone; CPU iPhone OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B137 Safari/601.1'
url_encode_data = urllib.urlencode(data)
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
# 'Connection': 'Keep-Alive',
'User-Agent': user_agent
}
req = urllib2.Request(url=url, data=url_encode_data, headers=headers)
content = ''
res = 1
try:
content = opener.open(req).read()
except:
res = 0
finally:
opener.close()
return (res, content)
def create_url_opener():
cookie = cookielib.CookieJar()
handler = urllib2.HTTPCookieProcessor(cookie)
opener = urllib2.build_opener(handler)
return opener
def get_csrf(url):
res = 1
csrf = ''
try:
response = opener.open(url).read()
subStr = r'"csrf":"'
headIndex = response.rindex(subStr) + len(subStr)
tailIndex = response.index(r'"', headIndex)
csrf = response[headIndex:tailIndex]
except:
res = 0
return (res, csrf)
# 1688
rootBaseURL = 'http://m.1688.com'
# 连接搜索历史数据库
historyDBName = 'keyWordsHistory.db'
if not os.path.exists(historyDBName):
print('keyWordsHistory.db is not exist.please run initKeyWordsHistoryDB.py')
sys.exit(1)
historyDBConn = sqlite3.connect('keyWordsHistory.db')
historyDBCursor = historyDBConn.execute(
"SELECT KEYWORD FROM HISTORY WHERE COMPLETED='NO';"
)
# 连接商品数据库
goodsDBConn = sqlite3.connect('goods.db')
goodsDBCursor = goodsDBConn.cursor()
# 建表
goodsDBCursor.execute('''CREATE TABLE IF NOT EXISTS GOODS
(ID TEXT PRIMARY KEY NOT NULL,
SIMPLE_SUBJECT TEXT NOT NULL,
COMPANY_NAME TEXT NOT NULL);''')
for row in historyDBCursor:
keyWord = row[0].encode('utf-8')
print('开始搜索关键字: ' + keyWord)
opener = create_url_opener()
(res, searchPageURL) = get_search_page_url(keyWord)
if not res == 1:
print('有异常,等待10秒')
time.sleep(10)
continue
# 取得CSRF
(res, csrf) = get_csrf(searchPageURL)
if not res == 1:
print('有异常,等待10秒')
time.sleep(10)
continue
beginPage = 1
pageSize = 100
while True:
wing_navigate_options = {
"data": {
"type": "offer",
"keywords": keyWord,
"beginPage": beginPage,
"pageSize": pageSize,
"offset": 1,
"sortType": "pop" # 综合:pop 销量:booked 价格:price
}
}
# 请求参数
requestParam = {
"_csrf": csrf,
"__wing_navigate_type": "action",
"__wing_navigate_url": "search:pages/search/offerresult",
"__wing_navigate_options": json.dumps(wing_navigate_options)
}
# 获得带有商品列表的JSON串
(res, goodsListJsonStr) = get_goods_list(
searchPageURL.encode('utf-8'),
requestParam,
opener
)
if not res == 1:
print('有异常,等待10秒')
time.sleep(10)
continue
# 解析JSON
goodsList = json.loads(goodsListJsonStr)
# JSON中没有offers,说明商品列表已经全请求完了
if not goodsList['data'].has_key('offers'):
print('关键字搜索完毕: ' + keyWord)
break
for good in goodsList['data']['offers']:
try:
goodsDBCursor.execute(
'''INSERT INTO GOODS (ID, SIMPLE_SUBJECT, COMPANY_NAME)
VALUES (?, ?, ?);''', (
good['id'],
good['simpleSubject'],
good['companyName']
)
)
except sqlite3.IntegrityError:
pass # print("该记录ID已存在: " + good['id'])
# 提交事务
goodsDBConn.commit()
# 页数加1
beginPage += 1
print('插入了 ' + str(len(goodsList['data']['offers'])) + ' 条记录')
# 成功搜索完一个关键字,更新一下history
historyDBCursor.execute('''UPDATE HISTORY SET COMPLETED='YES'
WHERE KEYWORD=?;''', (keyWord.decode(),))
historyDBConn.commit()
# 关闭连接
goodsDBCursor.close()
goodsDBConn.close()
historyDBCursor.close()
historyDBConn.close()
|
[
"json.loads",
"cookielib.CookieJar",
"urllib2.Request",
"os.path.exists",
"urllib2.quote",
"json.dumps",
"time.sleep",
"socket.setdefaulttimeout",
"sqlite3.connect",
"sys.setdefaultencoding",
"urllib.urlencode",
"bs4.BeautifulSoup",
"sys.exit",
"urllib2.HTTPCookieProcessor",
"urllib2.urlopen",
"urllib2.build_opener"
] |
[((176, 204), 'socket.setdefaulttimeout', 'socket.setdefaulttimeout', (['(30)'], {}), '(30)\n', (200, 204), False, 'import socket\n'), ((218, 249), 'sys.setdefaultencoding', 'sys.setdefaultencoding', (['"""utf-8"""'], {}), "('utf-8')\n", (240, 249), False, 'import sys\n'), ((2636, 2673), 'sqlite3.connect', 'sqlite3.connect', (['"""keyWordsHistory.db"""'], {}), "('keyWordsHistory.db')\n", (2651, 2673), False, 'import sqlite3\n'), ((2802, 2829), 'sqlite3.connect', 'sqlite3.connect', (['"""goods.db"""'], {}), "('goods.db')\n", (2817, 2829), False, 'import sqlite3\n'), ((1095, 1136), 'urllib2.Request', 'urllib2.Request', ([], {'url': 'url', 'headers': 'headers'}), '(url=url, headers=headers)\n', (1110, 1136), False, 'import urllib2\n'), ((1152, 1172), 'urllib2.urlopen', 'urllib2.urlopen', (['req'], {}), '(req)\n', (1167, 1172), False, 'import urllib2\n'), ((1464, 1486), 'urllib.urlencode', 'urllib.urlencode', (['data'], {}), '(data)\n', (1480, 1486), False, 'import urllib\n'), ((1651, 1714), 'urllib2.Request', 'urllib2.Request', ([], {'url': 'url', 'data': 'url_encode_data', 'headers': 'headers'}), '(url=url, data=url_encode_data, headers=headers)\n', (1666, 1714), False, 'import urllib2\n'), ((1925, 1946), 'cookielib.CookieJar', 'cookielib.CookieJar', ([], {}), '()\n', (1944, 1946), False, 'import cookielib\n'), ((1961, 1996), 'urllib2.HTTPCookieProcessor', 'urllib2.HTTPCookieProcessor', (['cookie'], {}), '(cookie)\n', (1988, 1996), False, 'import urllib2\n'), ((2010, 2039), 'urllib2.build_opener', 'urllib2.build_opener', (['handler'], {}), '(handler)\n', (2030, 2039), False, 'import urllib2\n'), ((2491, 2520), 'os.path.exists', 'os.path.exists', (['historyDBName'], {}), '(historyDBName)\n', (2505, 2520), False, 'import os\n'), ((2607, 2618), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2615, 2618), False, 'import sys\n'), ((551, 598), 'bs4.BeautifulSoup', 'BeautifulSoup', (['searchPageContent', '"""html.parser"""'], {}), "(searchPageContent, 'html.parser')\n", (564, 598), False, 'from bs4 import BeautifulSoup\n'), ((3292, 3306), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (3302, 3306), False, 'import time\n'), ((3436, 3450), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (3446, 3450), False, 'import time\n'), ((4424, 4452), 'json.loads', 'json.loads', (['goodsListJsonStr'], {}), '(goodsListJsonStr)\n', (4434, 4452), False, 'import json\n'), ((436, 458), 'urllib2.quote', 'urllib2.quote', (['keyWord'], {}), '(keyWord)\n', (449, 458), False, 'import urllib2\n'), ((4065, 4098), 'json.dumps', 'json.dumps', (['wing_navigate_options'], {}), '(wing_navigate_options)\n', (4075, 4098), False, 'import json\n'), ((4350, 4364), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (4360, 4364), False, 'import time\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.test import RequestFactory
from nose.tools import eq_
from gnu_terry_pratchett.decorators import clacks_overhead
@clacks_overhead
def view(request):
return HttpResponse("Death can't have him")
def test_view_decorator():
request = RequestFactory().get('/')
response = view(request)
eq_(response['x-clacks-overhead'], 'GNU Terry Pratchett')
|
[
"nose.tools.eq_",
"django.http.HttpResponse",
"django.test.RequestFactory"
] |
[((260, 296), 'django.http.HttpResponse', 'HttpResponse', (['"""Death can\'t have him"""'], {}), '("Death can\'t have him")\n', (272, 296), False, 'from django.http import HttpResponse\n'), ((399, 456), 'nose.tools.eq_', 'eq_', (["response['x-clacks-overhead']", '"""GNU Terry Pratchett"""'], {}), "(response['x-clacks-overhead'], 'GNU Terry Pratchett')\n", (402, 456), False, 'from nose.tools import eq_\n'), ((340, 356), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (354, 356), False, 'from django.test import RequestFactory\n')]
|
# Copyright 2021 Sony Group Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import csv
import numpy as np
import tqdm
from sklearn.manifold import TSNE
from nnabla import logger
from nnabla.utils.data_iterator import data_iterator_csv_dataset
def func(args):
# Load variable
data_iterator = (lambda: data_iterator_csv_dataset(
uri=args.input,
batch_size=64,
shuffle=False,
normalize=True,
with_memory_cache=False,
with_file_cache=False))
logger.log(99, 'Loading variable...')
dataset = []
with data_iterator() as di:
pbar = tqdm.tqdm(total=di.size)
while len(dataset) < di.size:
data = di.next()
variable = data[di.variables.index(args.variable)]
dataset.extend(variable)
pbar.update(len(variable))
pbar.close()
dataset = np.array(dataset)[:di.size].reshape(di.size, -1)
logger.log(99, 'variable={}, length={}, dim={}'.format(
args.variable, dataset.shape[0], dataset.shape[1]))
# t-SNE
logger.log(99, 'Processing t-SNE...')
dim = int(args.dim)
result = TSNE(n_components=dim, random_state=0).fit_transform(dataset)
# output
with open(args.input, newline='', encoding='utf-8-sig') as f:
rows = [row for row in csv.reader(f)]
row0 = rows.pop(0)
row0.extend([args.variable + '_tsne__{}'.format(i) for i in range(dim)])
for i, y in enumerate(result):
rows[i].extend(y)
with open(args.output, 'w', encoding='utf-8') as f:
writer = csv.writer(f, lineterminator='\n')
writer.writerow(row0)
writer.writerows(rows)
logger.log(99, 't-SNE completed successfully.')
def main():
parser = argparse.ArgumentParser(
description='t-SNE\n\n' +
'<NAME>, <NAME>. Visualizing Data using t-SNE\n' +
'http://jmlr.org/papers/volume9/vandermaaten08a/vandermaaten08a.pdf\n\n',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument(
'-i',
'--input',
help='path to input csv file (csv) default=output_result.csv',
required=True,
default='output_result.csv')
parser.add_argument(
'-v',
'--variable',
help="Variable to be processed (variable) default=x",
required=True,
default="x")
parser.add_argument(
'-d',
'--dim',
help='dimension of the embedded space (variable) default=2',
default=2)
parser.add_argument(
'-o',
'--output',
help='path to output csv file (csv) default=tsne.csv',
required=True,
default='tsne.csv')
parser.set_defaults(func=func)
args = parser.parse_args()
args.func(args)
if __name__ == '__main__':
main()
|
[
"nnabla.logger.log",
"tqdm.tqdm",
"csv.reader",
"csv.writer",
"argparse.ArgumentParser",
"sklearn.manifold.TSNE",
"nnabla.utils.data_iterator.data_iterator_csv_dataset",
"numpy.array"
] |
[((1028, 1065), 'nnabla.logger.log', 'logger.log', (['(99)', '"""Loading variable..."""'], {}), "(99, 'Loading variable...')\n", (1038, 1065), False, 'from nnabla import logger\n'), ((1583, 1620), 'nnabla.logger.log', 'logger.log', (['(99)', '"""Processing t-SNE..."""'], {}), "(99, 'Processing t-SNE...')\n", (1593, 1620), False, 'from nnabla import logger\n'), ((2182, 2229), 'nnabla.logger.log', 'logger.log', (['(99)', '"""t-SNE completed successfully."""'], {}), "(99, 't-SNE completed successfully.')\n", (2192, 2229), False, 'from nnabla import logger\n'), ((2257, 2493), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '(\'t-SNE\\n\\n\' + \'<NAME>, <NAME>. Visualizing Data using t-SNE\\n\' +\n """http://jmlr.org/papers/volume9/vandermaaten08a/vandermaaten08a.pdf\n\n""")', 'formatter_class': 'argparse.RawTextHelpFormatter'}), '(description=\'t-SNE\\n\\n\' +\n """<NAME>, <NAME>. Visualizing Data using t-SNE\n""" +\n \'http://jmlr.org/papers/volume9/vandermaaten08a/vandermaaten08a.pdf\\n\\n\',\n formatter_class=argparse.RawTextHelpFormatter)\n', (2280, 2493), False, 'import argparse\n'), ((837, 976), 'nnabla.utils.data_iterator.data_iterator_csv_dataset', 'data_iterator_csv_dataset', ([], {'uri': 'args.input', 'batch_size': '(64)', 'shuffle': '(False)', 'normalize': '(True)', 'with_memory_cache': '(False)', 'with_file_cache': '(False)'}), '(uri=args.input, batch_size=64, shuffle=False,\n normalize=True, with_memory_cache=False, with_file_cache=False)\n', (862, 976), False, 'from nnabla.utils.data_iterator import data_iterator_csv_dataset\n'), ((1130, 1154), 'tqdm.tqdm', 'tqdm.tqdm', ([], {'total': 'di.size'}), '(total=di.size)\n', (1139, 1154), False, 'import tqdm\n'), ((2081, 2115), 'csv.writer', 'csv.writer', (['f'], {'lineterminator': '"""\n"""'}), "(f, lineterminator='\\n')\n", (2091, 2115), False, 'import csv\n'), ((1658, 1696), 'sklearn.manifold.TSNE', 'TSNE', ([], {'n_components': 'dim', 'random_state': '(0)'}), '(n_components=dim, random_state=0)\n', (1662, 1696), False, 'from sklearn.manifold import TSNE\n'), ((1397, 1414), 'numpy.array', 'np.array', (['dataset'], {}), '(dataset)\n', (1405, 1414), True, 'import numpy as np\n'), ((1831, 1844), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (1841, 1844), False, 'import csv\n')]
|
from django.contrib import admin
from .models import Category , Incomes , Expense
# Register your models here.
admin.site.register(Category)
admin.site.register(Incomes)
admin.site.register(Expense)
|
[
"django.contrib.admin.site.register"
] |
[((113, 142), 'django.contrib.admin.site.register', 'admin.site.register', (['Category'], {}), '(Category)\n', (132, 142), False, 'from django.contrib import admin\n'), ((143, 171), 'django.contrib.admin.site.register', 'admin.site.register', (['Incomes'], {}), '(Incomes)\n', (162, 171), False, 'from django.contrib import admin\n'), ((172, 200), 'django.contrib.admin.site.register', 'admin.site.register', (['Expense'], {}), '(Expense)\n', (191, 200), False, 'from django.contrib import admin\n')]
|
from torch.utils.data import DataLoader, Subset
from pathlib import Path
import torch
import torch.nn as nn
import itertools as its
import pandas as pd
import numpy as np
import json
from rdkit import Chem, DataStructs
from rdkit.Chem import AllChem
import matplotlib.pyplot as plt
from NeuralGraph.dataset import MolData
from NeuralGraph.model import QSAR
from NeuralGraph.util import dev
def tanimoto_distance(x, y):
idx = x<=y
return 1 - (x[idx].sum() + y[~idx].sum()) / (x[~idx].sum() + y[idx].sum())
def get_circular_fp(smile, radius=6, fp_len=128):
mol = Chem.MolFromSmiles(smile)
fingerprint = Chem.AllChem.GetMorganFingerprintAsBitVect(mol, radius, fp_len)
arr = np.zeros((1,))
DataStructs.ConvertToNumpyArray(fingerprint, arr)
return arr
def get_neural_fp(X, net):
x0, x1, x2 = X
x0, x1, x2 = x0.to(dev), x1.to(dev), x2.to(dev)
x0, x1, x2 = (torch.unsqueeze(x, 0) for x in (x0, x1, x2))
res = net.nfp(x0, x1, x2)
res = res.detach().cpu().numpy()
return res
def mse(x, y):
return ((x-y)**2).mean()
def normalize_array(A):
mean, std = np.mean(A), np.std(A)
A_normed = (A - mean) / std
def restore_function(X):
return X * std + mean
return A_normed, restore_function
def change_net_to_weights(net, lo_bnd, hi_bnd):
for n,m in net.named_children():
if isinstance(m, torch.nn.Linear):
nn.init.uniform_(m.weight, lo_bnd, hi_bnd)
if m.bias is not None:
nn.init.uniform_(m.bias, lo_bnd, hi_bnd)
change_net_to_weights(m, lo_bnd, hi_bnd)
def calc_distance(net, data, smiles, FP_LEN,\
sample_sz=1000, SEED=None):
N, sample_sz = len(data), sample_sz
if SEED: np.random.seed(SEED)
res = [[],[]]
for _ in range(sample_sz):
i, j = np.random.choice(N, 2)
dst0 = tanimoto_distance(get_circular_fp(smiles[i], fp_len=FP_LEN),
get_circular_fp(smiles[j], fp_len=FP_LEN))
dst1 = tanimoto_distance(get_neural_fp(data[i][0], net),
get_neural_fp(data[j][0], net))
res[0].append(dst0)
res[1].append(dst1)
res = np.asarray(res)
return res
def calc_corr(res):
return (np.corrcoef(res[0], res[1])[0,1])
def plot_scatter(net, data, smiles, FP_LEN, filename,\
sample_sz = 1000, SEED=None):
res = calc_distance(net, data, smiles, FP_LEN, \
sample_sz, SEED)
plt.scatter(res[0], res[1], marker='o', facecolors='none', edgecolors='b', alpha=0.3)
plt.xlabel("circular fingerprint distance")
plt.ylabel("neural fingerprint distance")
plt.xlim([0, 1])
plt.ylim([0, 1])
plt.title("Correlation = {:.4f}".format(np.corrcoef(res[0], res[1])[0,1]))
plt.savefig(filename, dpi=300, bbox_inches='tight')
if __name__ == '__main__':
# Load Data
DATAFILE = Path('./dataset/solubility/delaney-processed.csv')
df = pd.read_csv(DATAFILE)
target = df['measured log solubility in mols per litre'].values
target, restore = normalize_array(target)
data = MolData(df['smiles'], target)
print(type(df['smiles'][0]), df['smiles'][0])
tmp = df['smiles'][0]
print(get_circular_fp(tmp))
exit()
# Plot with a random weight and 2048 length as in Figure3Left
gcn_act = ['sigmoid', 'relu', 'tanh']
gop_act = ['sigmoid', 'tanh', 'softmax']
large_weights = [(-1e7, 1e7), (0, 1e7), (-1e3, 1e3), (-10, 10)]
max_degs = [1, 6]
res = {}
for a1, a2, bnds, rd in its.product(gcn_act, gop_act, large_weights,
max_degs):
SEED, FP_LEN = 7, 1<<11
net = QSAR(hid_dim=FP_LEN, n_class=1, max_degree=rd,
gcn_activation=a1,
gop_activation=a2)
print("nbnds", bnds)
change_net_to_weights(net.nfp, *bnds)
tmp = calc_distance(net, data, df['smiles'], FP_LEN, sample_sz=500,
SEED=7)
tmp = calc_corr(tmp)
res[f"gcn-{a1}_gop-{a2}_weights-{bnds}_radius-{rd}"]=tmp
print(f"gcn-{a1}_gop-{a2}_weights-{bnds}_radius-{rd}", tmp)
with open('./output.json', 'w') as fp:
json.dump(res, fp)
exit()
plot_scatter(net,
data,
df['smiles'],
FP_LEN,
"./figs/scatter_nfp_vs_cfp_2048_random_weight.png")
exit()
# Plot with a trained model
OUTPUT = './output/best_delaney.pkl'
net = torch.load(OUTPUT+'.pkg')
SEED, FP_LEN = 7, 1<<11
plot_scatter(net,
data,
df['smiles'],
FP_LEN,
"./figs/scatter_nfp_vs_cfp_128_trained_weight.png")
|
[
"numpy.random.seed",
"pandas.read_csv",
"torch.nn.init.uniform_",
"pathlib.Path",
"numpy.mean",
"numpy.std",
"torch.load",
"itertools.product",
"numpy.random.choice",
"json.dump",
"rdkit.Chem.AllChem.GetMorganFingerprintAsBitVect",
"matplotlib.pyplot.ylim",
"numpy.corrcoef",
"numpy.asarray",
"torch.unsqueeze",
"matplotlib.pyplot.ylabel",
"NeuralGraph.model.QSAR",
"matplotlib.pyplot.xlim",
"rdkit.DataStructs.ConvertToNumpyArray",
"matplotlib.pyplot.scatter",
"numpy.zeros",
"matplotlib.pyplot.xlabel",
"rdkit.Chem.MolFromSmiles",
"matplotlib.pyplot.savefig",
"NeuralGraph.dataset.MolData"
] |
[((576, 601), 'rdkit.Chem.MolFromSmiles', 'Chem.MolFromSmiles', (['smile'], {}), '(smile)\n', (594, 601), False, 'from rdkit import Chem, DataStructs\n'), ((620, 683), 'rdkit.Chem.AllChem.GetMorganFingerprintAsBitVect', 'Chem.AllChem.GetMorganFingerprintAsBitVect', (['mol', 'radius', 'fp_len'], {}), '(mol, radius, fp_len)\n', (662, 683), False, 'from rdkit import Chem, DataStructs\n'), ((694, 708), 'numpy.zeros', 'np.zeros', (['(1,)'], {}), '((1,))\n', (702, 708), True, 'import numpy as np\n'), ((713, 762), 'rdkit.DataStructs.ConvertToNumpyArray', 'DataStructs.ConvertToNumpyArray', (['fingerprint', 'arr'], {}), '(fingerprint, arr)\n', (744, 762), False, 'from rdkit import Chem, DataStructs\n'), ((2189, 2204), 'numpy.asarray', 'np.asarray', (['res'], {}), '(res)\n', (2199, 2204), True, 'import numpy as np\n'), ((2490, 2579), 'matplotlib.pyplot.scatter', 'plt.scatter', (['res[0]', 'res[1]'], {'marker': '"""o"""', 'facecolors': '"""none"""', 'edgecolors': '"""b"""', 'alpha': '(0.3)'}), "(res[0], res[1], marker='o', facecolors='none', edgecolors='b',\n alpha=0.3)\n", (2501, 2579), True, 'import matplotlib.pyplot as plt\n'), ((2580, 2623), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""circular fingerprint distance"""'], {}), "('circular fingerprint distance')\n", (2590, 2623), True, 'import matplotlib.pyplot as plt\n'), ((2628, 2669), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""neural fingerprint distance"""'], {}), "('neural fingerprint distance')\n", (2638, 2669), True, 'import matplotlib.pyplot as plt\n'), ((2674, 2690), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0, 1]'], {}), '([0, 1])\n', (2682, 2690), True, 'import matplotlib.pyplot as plt\n'), ((2695, 2711), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0, 1]'], {}), '([0, 1])\n', (2703, 2711), True, 'import matplotlib.pyplot as plt\n'), ((2795, 2846), 'matplotlib.pyplot.savefig', 'plt.savefig', (['filename'], {'dpi': '(300)', 'bbox_inches': '"""tight"""'}), "(filename, dpi=300, bbox_inches='tight')\n", (2806, 2846), True, 'import matplotlib.pyplot as plt\n'), ((2906, 2956), 'pathlib.Path', 'Path', (['"""./dataset/solubility/delaney-processed.csv"""'], {}), "('./dataset/solubility/delaney-processed.csv')\n", (2910, 2956), False, 'from pathlib import Path\n'), ((2966, 2987), 'pandas.read_csv', 'pd.read_csv', (['DATAFILE'], {}), '(DATAFILE)\n', (2977, 2987), True, 'import pandas as pd\n'), ((3113, 3142), 'NeuralGraph.dataset.MolData', 'MolData', (["df['smiles']", 'target'], {}), "(df['smiles'], target)\n", (3120, 3142), False, 'from NeuralGraph.dataset import MolData\n'), ((3547, 3601), 'itertools.product', 'its.product', (['gcn_act', 'gop_act', 'large_weights', 'max_degs'], {}), '(gcn_act, gop_act, large_weights, max_degs)\n', (3558, 3601), True, 'import itertools as its\n'), ((4512, 4539), 'torch.load', 'torch.load', (["(OUTPUT + '.pkg')"], {}), "(OUTPUT + '.pkg')\n", (4522, 4539), False, 'import torch\n'), ((895, 916), 'torch.unsqueeze', 'torch.unsqueeze', (['x', '(0)'], {}), '(x, 0)\n', (910, 916), False, 'import torch\n'), ((1108, 1118), 'numpy.mean', 'np.mean', (['A'], {}), '(A)\n', (1115, 1118), True, 'import numpy as np\n'), ((1120, 1129), 'numpy.std', 'np.std', (['A'], {}), '(A)\n', (1126, 1129), True, 'import numpy as np\n'), ((1732, 1752), 'numpy.random.seed', 'np.random.seed', (['SEED'], {}), '(SEED)\n', (1746, 1752), True, 'import numpy as np\n'), ((1817, 1839), 'numpy.random.choice', 'np.random.choice', (['N', '(2)'], {}), '(N, 2)\n', (1833, 1839), True, 'import numpy as np\n'), ((2253, 2280), 'numpy.corrcoef', 'np.corrcoef', (['res[0]', 'res[1]'], {}), '(res[0], res[1])\n', (2264, 2280), True, 'import numpy as np\n'), ((3690, 3778), 'NeuralGraph.model.QSAR', 'QSAR', ([], {'hid_dim': 'FP_LEN', 'n_class': '(1)', 'max_degree': 'rd', 'gcn_activation': 'a1', 'gop_activation': 'a2'}), '(hid_dim=FP_LEN, n_class=1, max_degree=rd, gcn_activation=a1,\n gop_activation=a2)\n', (3694, 3778), False, 'from NeuralGraph.model import QSAR\n'), ((4215, 4233), 'json.dump', 'json.dump', (['res', 'fp'], {}), '(res, fp)\n', (4224, 4233), False, 'import json\n'), ((1400, 1442), 'torch.nn.init.uniform_', 'nn.init.uniform_', (['m.weight', 'lo_bnd', 'hi_bnd'], {}), '(m.weight, lo_bnd, hi_bnd)\n', (1416, 1442), True, 'import torch.nn as nn\n'), ((1494, 1534), 'torch.nn.init.uniform_', 'nn.init.uniform_', (['m.bias', 'lo_bnd', 'hi_bnd'], {}), '(m.bias, lo_bnd, hi_bnd)\n', (1510, 1534), True, 'import torch.nn as nn\n'), ((2756, 2783), 'numpy.corrcoef', 'np.corrcoef', (['res[0]', 'res[1]'], {}), '(res[0], res[1])\n', (2767, 2783), True, 'import numpy as np\n')]
|
# coding: utf-8
"""
self-managed-osdu
Rest API Documentation for Self Managed OSDU # noqa: E501
OpenAPI spec version: 0.11.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from OsduClient.configuration import Configuration
class LegalTagInvalidResponseList(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'invalid_legal_tags': 'list[LegalTagInvalidResponse]'
}
attribute_map = {
'invalid_legal_tags': 'invalidLegalTags'
}
def __init__(self, invalid_legal_tags=None, _configuration=None): # noqa: E501
"""LegalTagInvalidResponseList - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._invalid_legal_tags = None
self.discriminator = None
if invalid_legal_tags is not None:
self.invalid_legal_tags = invalid_legal_tags
@property
def invalid_legal_tags(self):
"""Gets the invalid_legal_tags of this LegalTagInvalidResponseList. # noqa: E501
A collection of invalid LegalTags. # noqa: E501
:return: The invalid_legal_tags of this LegalTagInvalidResponseList. # noqa: E501
:rtype: list[LegalTagInvalidResponse]
"""
return self._invalid_legal_tags
@invalid_legal_tags.setter
def invalid_legal_tags(self, invalid_legal_tags):
"""Sets the invalid_legal_tags of this LegalTagInvalidResponseList.
A collection of invalid LegalTags. # noqa: E501
:param invalid_legal_tags: The invalid_legal_tags of this LegalTagInvalidResponseList. # noqa: E501
:type: list[LegalTagInvalidResponse]
"""
self._invalid_legal_tags = invalid_legal_tags
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(LegalTagInvalidResponseList, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LegalTagInvalidResponseList):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, LegalTagInvalidResponseList):
return True
return self.to_dict() != other.to_dict()
|
[
"six.iteritems",
"OsduClient.configuration.Configuration"
] |
[((2354, 2387), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (2367, 2387), False, 'import six\n'), ((1167, 1182), 'OsduClient.configuration.Configuration', 'Configuration', ([], {}), '()\n', (1180, 1182), False, 'from OsduClient.configuration import Configuration\n')]
|
from melange import DriverManager
from melange.messaging.exchange_message_publisher import ExchangeMessagePublisher
DriverManager.instance().use_driver(driver_name='aws')
publisher = ExchangeMessagePublisher('dev-superbattle')
publisher.publish({
'amount': 20
}, 'DamageDealtToHero')
print('Gñeeee, die you fool!')
|
[
"melange.DriverManager.instance",
"melange.messaging.exchange_message_publisher.ExchangeMessagePublisher"
] |
[((185, 228), 'melange.messaging.exchange_message_publisher.ExchangeMessagePublisher', 'ExchangeMessagePublisher', (['"""dev-superbattle"""'], {}), "('dev-superbattle')\n", (209, 228), False, 'from melange.messaging.exchange_message_publisher import ExchangeMessagePublisher\n'), ((117, 141), 'melange.DriverManager.instance', 'DriverManager.instance', ([], {}), '()\n', (139, 141), False, 'from melange import DriverManager\n')]
|
import argparse
import re
import subprocess
REGEX = r'\[\s*(\d+)\s*\]'
def solved_questions():
print('Getting list of solved questions.')
out = subprocess.check_output(
['leetcode', 'list', '-q', 'd'],
)
problems = []
for line in out.decode().split('\n'):
matches = re.search(REGEX, line)
if not matches:
continue
problems.append(matches.group(1))
return problems
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Get list of solved questions.')
question_numbers = solved_questions()
print(', '.join(question_numbers))
|
[
"subprocess.check_output",
"re.search",
"argparse.ArgumentParser"
] |
[((154, 210), 'subprocess.check_output', 'subprocess.check_output', (["['leetcode', 'list', '-q', 'd']"], {}), "(['leetcode', 'list', '-q', 'd'])\n", (177, 210), False, 'import subprocess\n'), ((475, 543), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Get list of solved questions."""'}), "(description='Get list of solved questions.')\n", (498, 543), False, 'import argparse\n'), ((304, 326), 're.search', 're.search', (['REGEX', 'line'], {}), '(REGEX, line)\n', (313, 326), False, 'import re\n')]
|
# coding:utf-8
'''
Created on 2016/07/09
@author: ota
'''
import sys
import traceback
class SqlFormatterException(Exception):
'''
SqlFormatter用のExceptionクラス
'''
def __init__(self, tlist, ex, trace):
super(SqlFormatterException, self).__init__(ex.message if hasattr(ex, "message") else "")
self.tlist = self.__decode(tlist)
self.e = ex
self.trace = self.__decode(trace)
self.message = ex.message
def __decode(self, text):
text = str(text)
if sys.version_info[0] < 3:
return text.decode("utf-8")
else:
return text
def __encode(self, text):
if sys.version_info[0] < 3 and isinstance(text, unicode):
return text.encode("utf-8")
else:
return text
def __str__(self, *args):
return self.message \
+ "\ntoken:" + self.__encode(self.tlist) \
+ "\ntrace:" + self.__encode(self.trace) \
+ "\noriginal:" + str(self.e)
@staticmethod
def wrap_try_except(fnc, token, *args):
try:
if args:
return fnc(*args)
else:
return fnc(token)
except Exception as ex:
if not isinstance(ex, SqlFormatterException):
raise SqlFormatterException(token, ex, traceback.format_exc())
raise
@staticmethod
def to_wrap_try_except(fnc, token_arg_index):
def call(*args):
try:
return fnc(*args)
except Exception as ex:
if not isinstance(ex, SqlFormatterException):
raise SqlFormatterException(args[token_arg_index], ex, traceback.format_exc())
raise
return call
|
[
"traceback.format_exc"
] |
[((1359, 1381), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1379, 1381), False, 'import traceback\n'), ((1719, 1741), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1739, 1741), False, 'import traceback\n')]
|
# File :all.py
# Author :WJ
# Function :
# Time :2021/02/18
# Version :
# Amend :
import numpy as np
import ConvexPolygon as cp
import HierarchicalClustering as hc
import ConPolyProcess as cs
import LaplacianMatrice as lm
import time
from scipy.optimize import linear_sum_assignment
import Visualization as vs
import TransformationMatrix as tf
# <editor-fold desc="Method">
def conpoly_points(data, clusters, clusters_num=2):
P = []
for k in range(clusters_num):
##根据lables中的值是否等于k,重新组成一个True、False的数组
my_members = clusters == k
##X[my_members, 0] 取出my_members对应位置为True的值的横坐标
data_tem = data[my_members, :]
indexes = cp.ConvexPolygon(data_tem)
points = np.array(data_tem[indexes, :], dtype=np.float32)
while 1:
max, a0, b0 = cs.maxPoints(points=points)
if max > 2:
points = cs.delete_linepoints(points, a0, b0, 3)
else:
break
points = hc.mergeClosePoints(points, 3)
for i in range(len(points)):
P.append(points[i, :])
return np.array(P)
# </editor-fold>
start0 = time.time()
print('求建筑物凸多边形顶点------------------------------------------------------')
# 导入数据
data_dlg = np.loadtxt('..\\data\\Polyline_PCB02_500.txt', delimiter=',')
data_dopp = np.loadtxt('..\\data\\PCB_c1_z5_t20.txt', delimiter='\t')
data_dlg0 = data_dlg[:, 0:2]
data_dopp0 = data_dopp[:, 0:2]
# 设置点云中建筑物聚类数
clusters_num = 2
# 聚类
data_dlg, clusters_dlg = hc.HierarchicalClustering(data_dlg0, clusters_num, 'dlg')
data_dopp, clusters_dopp = hc.HierarchicalClustering(data_dopp0, clusters_num, 'dopp')
# 求每栋建筑物的凸多边形(并对凸多边形顶点进行处理)
P_dlg = conpoly_points(data_dlg, clusters_dlg, clusters_num)
P_dopp = conpoly_points(data_dopp, clusters_dopp, clusters_num)
# 可视化凸多边形顶点
vs.Visualize2PointClouds(data_dlg, P_dlg, 'ConPoly_dlg', feature1=['blue', 'dlg', '.'], feature2=['red', 'vertex', 'o'])
vs.Visualize2PointClouds(data_dopp, P_dopp, 'ConPoly_dopp', feature1=['blue', 'dopp', '.'],
feature2=['red', 'vertex', 'o'])
start1 = time.time()
TIME = start1 - start0
print('耗时:{:.0f} hours {:.0f} minutes {:.0f} seconds'.format(TIME // 3600, TIME % 3600 // 60, TIME % 3600 % 60))
print('图匹配------------------------------------------------------')
# 计算拉普拉斯矩阵
B_dlg = lm.LaplacianMatrice(P_dlg)
B_dopp = lm.LaplacianMatrice(P_dopp)
# 对拉普拉斯矩阵进行谱分解
U_dlg, Lambda_dlg = lm.LaplacianMatrice_decomposed(B_dlg)
U_dopp, Lambda_dopp = lm.LaplacianMatrice_decomposed(B_dopp)
# 计算相异度矩阵
k = min(len(P_dlg), len(P_dopp))
A = lm.corrlation(U_dopp, U_dlg, k)
# 对相似度矩阵进行二分匹配(删除相异度过大的结果)
row_ind, col_ind = linear_sum_assignment(A)
row, col = lm.DeleteLargeValue(A, row_ind, col_ind, 0.9)
# 根据匹配结果对点云重新排序
P_dlg_new=lm.resort_clouds(P_dlg,row)
P_dopp_new=lm.resort_clouds(P_dopp,col)
# 可视化凸多边形交点匹配结果
vs.VisualizeMatch(P_dopp, P_dlg, row, col,'凸多边形顶点')
# 计算变换矩阵(并对dopp进行变换)
R, T = tf.ca_rt(P_dopp_new, P_dlg_new, 'MatchingByConPolyPoints_result.txt')
data_dopp = tf.transformation(data_dopp0, R, T, 'dopp_transformed.txt')
# 可视化原始点云配准结果
vs.Visualize2PointClouds(data_dopp, data_dlg0, 'Macth_dlg&dopp', feature1=['blue', 'dopp', '.'],
feature2=['red', 'dlg', '.'])
start2 = time.time()
TIME = start2 - start1
print('耗时:{:.0f} hours {:.0f} minutes {:.0f} seconds'.format(TIME // 3600, TIME % 3600 // 60, TIME % 3600 % 60))
TIME = time.time() - start0
print('\n总耗时:{:.0f} hours {:.0f} minutes {:.0f} seconds'.format(TIME // 3600, TIME % 3600 // 60, TIME % 3600 % 60))
|
[
"LaplacianMatrice.corrlation",
"HierarchicalClustering.HierarchicalClustering",
"HierarchicalClustering.mergeClosePoints",
"Visualization.VisualizeMatch",
"TransformationMatrix.transformation",
"ConPolyProcess.maxPoints",
"ConPolyProcess.delete_linepoints",
"LaplacianMatrice.resort_clouds",
"LaplacianMatrice.LaplacianMatrice",
"Visualization.Visualize2PointClouds",
"time.time",
"LaplacianMatrice.LaplacianMatrice_decomposed",
"numpy.loadtxt",
"numpy.array",
"LaplacianMatrice.DeleteLargeValue",
"TransformationMatrix.ca_rt",
"ConvexPolygon.ConvexPolygon",
"scipy.optimize.linear_sum_assignment"
] |
[((1201, 1212), 'time.time', 'time.time', ([], {}), '()\n', (1210, 1212), False, 'import time\n'), ((1310, 1371), 'numpy.loadtxt', 'np.loadtxt', (['"""..\\\\data\\\\Polyline_PCB02_500.txt"""'], {'delimiter': '""","""'}), "('..\\\\data\\\\Polyline_PCB02_500.txt', delimiter=',')\n", (1320, 1371), True, 'import numpy as np\n'), ((1385, 1442), 'numpy.loadtxt', 'np.loadtxt', (['"""..\\\\data\\\\PCB_c1_z5_t20.txt"""'], {'delimiter': '"""\t"""'}), "('..\\\\data\\\\PCB_c1_z5_t20.txt', delimiter='\\t')\n", (1395, 1442), True, 'import numpy as np\n'), ((1574, 1631), 'HierarchicalClustering.HierarchicalClustering', 'hc.HierarchicalClustering', (['data_dlg0', 'clusters_num', '"""dlg"""'], {}), "(data_dlg0, clusters_num, 'dlg')\n", (1599, 1631), True, 'import HierarchicalClustering as hc\n'), ((1660, 1719), 'HierarchicalClustering.HierarchicalClustering', 'hc.HierarchicalClustering', (['data_dopp0', 'clusters_num', '"""dopp"""'], {}), "(data_dopp0, clusters_num, 'dopp')\n", (1685, 1719), True, 'import HierarchicalClustering as hc\n'), ((1894, 2018), 'Visualization.Visualize2PointClouds', 'vs.Visualize2PointClouds', (['data_dlg', 'P_dlg', '"""ConPoly_dlg"""'], {'feature1': "['blue', 'dlg', '.']", 'feature2': "['red', 'vertex', 'o']"}), "(data_dlg, P_dlg, 'ConPoly_dlg', feature1=['blue',\n 'dlg', '.'], feature2=['red', 'vertex', 'o'])\n", (1918, 2018), True, 'import Visualization as vs\n'), ((2016, 2145), 'Visualization.Visualize2PointClouds', 'vs.Visualize2PointClouds', (['data_dopp', 'P_dopp', '"""ConPoly_dopp"""'], {'feature1': "['blue', 'dopp', '.']", 'feature2': "['red', 'vertex', 'o']"}), "(data_dopp, P_dopp, 'ConPoly_dopp', feature1=[\n 'blue', 'dopp', '.'], feature2=['red', 'vertex', 'o'])\n", (2040, 2145), True, 'import Visualization as vs\n'), ((2179, 2190), 'time.time', 'time.time', ([], {}), '()\n', (2188, 2190), False, 'import time\n'), ((2420, 2446), 'LaplacianMatrice.LaplacianMatrice', 'lm.LaplacianMatrice', (['P_dlg'], {}), '(P_dlg)\n', (2439, 2446), True, 'import LaplacianMatrice as lm\n'), ((2457, 2484), 'LaplacianMatrice.LaplacianMatrice', 'lm.LaplacianMatrice', (['P_dopp'], {}), '(P_dopp)\n', (2476, 2484), True, 'import LaplacianMatrice as lm\n'), ((2524, 2561), 'LaplacianMatrice.LaplacianMatrice_decomposed', 'lm.LaplacianMatrice_decomposed', (['B_dlg'], {}), '(B_dlg)\n', (2554, 2561), True, 'import LaplacianMatrice as lm\n'), ((2585, 2623), 'LaplacianMatrice.LaplacianMatrice_decomposed', 'lm.LaplacianMatrice_decomposed', (['B_dopp'], {}), '(B_dopp)\n', (2615, 2623), True, 'import LaplacianMatrice as lm\n'), ((2676, 2707), 'LaplacianMatrice.corrlation', 'lm.corrlation', (['U_dopp', 'U_dlg', 'k'], {}), '(U_dopp, U_dlg, k)\n', (2689, 2707), True, 'import LaplacianMatrice as lm\n'), ((2758, 2782), 'scipy.optimize.linear_sum_assignment', 'linear_sum_assignment', (['A'], {}), '(A)\n', (2779, 2782), False, 'from scipy.optimize import linear_sum_assignment\n'), ((2795, 2840), 'LaplacianMatrice.DeleteLargeValue', 'lm.DeleteLargeValue', (['A', 'row_ind', 'col_ind', '(0.9)'], {}), '(A, row_ind, col_ind, 0.9)\n', (2814, 2840), True, 'import LaplacianMatrice as lm\n'), ((2871, 2899), 'LaplacianMatrice.resort_clouds', 'lm.resort_clouds', (['P_dlg', 'row'], {}), '(P_dlg, row)\n', (2887, 2899), True, 'import LaplacianMatrice as lm\n'), ((2911, 2940), 'LaplacianMatrice.resort_clouds', 'lm.resort_clouds', (['P_dopp', 'col'], {}), '(P_dopp, col)\n', (2927, 2940), True, 'import LaplacianMatrice as lm\n'), ((2960, 3012), 'Visualization.VisualizeMatch', 'vs.VisualizeMatch', (['P_dopp', 'P_dlg', 'row', 'col', '"""凸多边形顶点"""'], {}), "(P_dopp, P_dlg, row, col, '凸多边形顶点')\n", (2977, 3012), True, 'import Visualization as vs\n'), ((3046, 3115), 'TransformationMatrix.ca_rt', 'tf.ca_rt', (['P_dopp_new', 'P_dlg_new', '"""MatchingByConPolyPoints_result.txt"""'], {}), "(P_dopp_new, P_dlg_new, 'MatchingByConPolyPoints_result.txt')\n", (3054, 3115), True, 'import TransformationMatrix as tf\n'), ((3129, 3188), 'TransformationMatrix.transformation', 'tf.transformation', (['data_dopp0', 'R', 'T', '"""dopp_transformed.txt"""'], {}), "(data_dopp0, R, T, 'dopp_transformed.txt')\n", (3146, 3188), True, 'import TransformationMatrix as tf\n'), ((3207, 3338), 'Visualization.Visualize2PointClouds', 'vs.Visualize2PointClouds', (['data_dopp', 'data_dlg0', '"""Macth_dlg&dopp"""'], {'feature1': "['blue', 'dopp', '.']", 'feature2': "['red', 'dlg', '.']"}), "(data_dopp, data_dlg0, 'Macth_dlg&dopp', feature1=[\n 'blue', 'dopp', '.'], feature2=['red', 'dlg', '.'])\n", (3231, 3338), True, 'import Visualization as vs\n'), ((3372, 3383), 'time.time', 'time.time', ([], {}), '()\n', (3381, 3383), False, 'import time\n'), ((1155, 1166), 'numpy.array', 'np.array', (['P'], {}), '(P)\n', (1163, 1166), True, 'import numpy as np\n'), ((3534, 3545), 'time.time', 'time.time', ([], {}), '()\n', (3543, 3545), False, 'import time\n'), ((716, 742), 'ConvexPolygon.ConvexPolygon', 'cp.ConvexPolygon', (['data_tem'], {}), '(data_tem)\n', (732, 742), True, 'import ConvexPolygon as cp\n'), ((761, 809), 'numpy.array', 'np.array', (['data_tem[indexes, :]'], {'dtype': 'np.float32'}), '(data_tem[indexes, :], dtype=np.float32)\n', (769, 809), True, 'import numpy as np\n'), ((1036, 1066), 'HierarchicalClustering.mergeClosePoints', 'hc.mergeClosePoints', (['points', '(3)'], {}), '(points, 3)\n', (1055, 1066), True, 'import HierarchicalClustering as hc\n'), ((857, 884), 'ConPolyProcess.maxPoints', 'cs.maxPoints', ([], {'points': 'points'}), '(points=points)\n', (869, 884), True, 'import ConPolyProcess as cs\n'), ((936, 975), 'ConPolyProcess.delete_linepoints', 'cs.delete_linepoints', (['points', 'a0', 'b0', '(3)'], {}), '(points, a0, b0, 3)\n', (956, 975), True, 'import ConPolyProcess as cs\n')]
|
import unittest
from cipher import sub_cipher
class CipherTests(unittest.TestCase):
"""
Run several error tests
"""
def test_one_to_one(self):
self.assertTrue(sub_cipher('toot', 'peep'))
def test_one_to_two_correspondence(self):
self.assertFalse(sub_cipher('lambda', 'school'))
def test_two_to_one_correspondence(self):
self.assertFalse(sub_cipher('school', 'lambda'))
def test_unequal_length(self):
self.assertFalse(sub_cipher('o', 'lambda'))
def test_empty_strings(self):
self.assertTrue(sub_cipher('', ''))
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"cipher.sub_cipher"
] |
[((622, 637), 'unittest.main', 'unittest.main', ([], {}), '()\n', (635, 637), False, 'import unittest\n'), ((186, 212), 'cipher.sub_cipher', 'sub_cipher', (['"""toot"""', '"""peep"""'], {}), "('toot', 'peep')\n", (196, 212), False, 'from cipher import sub_cipher\n'), ((286, 316), 'cipher.sub_cipher', 'sub_cipher', (['"""lambda"""', '"""school"""'], {}), "('lambda', 'school')\n", (296, 316), False, 'from cipher import sub_cipher\n'), ((390, 420), 'cipher.sub_cipher', 'sub_cipher', (['"""school"""', '"""lambda"""'], {}), "('school', 'lambda')\n", (400, 420), False, 'from cipher import sub_cipher\n'), ((483, 508), 'cipher.sub_cipher', 'sub_cipher', (['"""o"""', '"""lambda"""'], {}), "('o', 'lambda')\n", (493, 508), False, 'from cipher import sub_cipher\n'), ((569, 587), 'cipher.sub_cipher', 'sub_cipher', (['""""""', '""""""'], {}), "('', '')\n", (579, 587), False, 'from cipher import sub_cipher\n')]
|
import logging
from requests_extra import get
logging.basicConfig(level=logging.DEBUG)
def test_sessions_automatically_reused_for_same_scheme_and_netloc(caplog):
# we will capture the debug logs that will print sth like "Got session from cache"
caplog.set_level(logging.DEBUG)
get("https://httpbin.org/ip")
get("https://httpbin.org/user-agent")
second_request_reused_session = False
for record in caplog.records:
if "Got session from cache!" in record.getMessage():
second_request_reused_session = True
break
assert second_request_reused_session
def test_automatic_session_cookies_working_on_first_request():
# on the 1st request that gets a response with cookies we SHOULD be able to read them
response1 = get("https://httpbin.org/cookies/set/foo/bar", allow_redirects=False)
assert response1.cookies["foo"] == "bar"
def test_automatic_session_cookies_not_getting_passed_on_subsequent_requests():
# on the 1st request that gets a response with cookies we SHOULD be able to read them
response1 = get("https://httpbin.org/cookies/set/foo2/bar2", allow_redirects=False)
assert response1.cookies["foo2"] == "bar2"
# ...but the 2nd request should NOT contain the cookie set above!
response2 = get("https://httpbin.org/cookies")
assert response2.json()["cookies"] == {}
|
[
"requests_extra.get",
"logging.basicConfig"
] |
[((48, 88), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (67, 88), False, 'import logging\n'), ((295, 324), 'requests_extra.get', 'get', (['"""https://httpbin.org/ip"""'], {}), "('https://httpbin.org/ip')\n", (298, 324), False, 'from requests_extra import get\n'), ((329, 366), 'requests_extra.get', 'get', (['"""https://httpbin.org/user-agent"""'], {}), "('https://httpbin.org/user-agent')\n", (332, 366), False, 'from requests_extra import get\n'), ((786, 855), 'requests_extra.get', 'get', (['"""https://httpbin.org/cookies/set/foo/bar"""'], {'allow_redirects': '(False)'}), "('https://httpbin.org/cookies/set/foo/bar', allow_redirects=False)\n", (789, 855), False, 'from requests_extra import get\n'), ((1091, 1162), 'requests_extra.get', 'get', (['"""https://httpbin.org/cookies/set/foo2/bar2"""'], {'allow_redirects': '(False)'}), "('https://httpbin.org/cookies/set/foo2/bar2', allow_redirects=False)\n", (1094, 1162), False, 'from requests_extra import get\n'), ((1298, 1332), 'requests_extra.get', 'get', (['"""https://httpbin.org/cookies"""'], {}), "('https://httpbin.org/cookies')\n", (1301, 1332), False, 'from requests_extra import get\n')]
|
"""Views related to the Activity model.
Activity:
- POST /projects/{project_id}/activities
- PATCH /projects/{project_id}/activities/{activity_id}
- DELETE /projects/{project_id}/activities/{activity_id}
- PATCH /projects/{project_id}/activities/{activity_id}/publish
Competence:
- GET /competences
- POST /competences
- PATCH /competences/{competence_id}
- DELETE /competences/{competence_id}
"""
import logging
from flask import request
from flask.views import MethodView
from flask_login import login_required, current_user
from marshmallow import ValidationError
from sqlalchemy.exc import IntegrityError
from innopoints.extensions import db
from innopoints.blueprints import api
from innopoints.core.helpers import abort, allow_no_json, admin_required
from innopoints.core.notifications import remove_notifications
from innopoints.models import (
Activity,
ApplicationStatus,
Competence,
IPTS_PER_HOUR,
LifetimeStage,
Project,
)
from innopoints.schemas import ActivitySchema, CompetenceSchema
NO_PAYLOAD = ('', 204)
log = logging.getLogger(__name__)
@api.route('/projects/<int:project_id>/activities', methods=['POST'])
@login_required
def create_activity(project_id):
"""Create a new activity to an existing project."""
project = Project.query.get_or_404(project_id)
if not current_user.is_admin and current_user not in project.moderators:
abort(403)
if project.lifetime_stage not in (LifetimeStage.draft, LifetimeStage.ongoing):
abort(400, {'message': 'Activities may only be created on draft and ongoing projects.'})
in_schema = ActivitySchema(exclude=('id', 'project', 'applications', 'internal'))
try:
new_activity = in_schema.load(request.json)
except ValidationError as err:
abort(400, {'message': err.messages})
if new_activity.draft is None:
new_activity.draft = True
if not new_activity.draft and not new_activity.is_complete:
abort(400, {'message': 'Incomplete activities cannot be marked as non-draft.'})
new_activity.project = project
try:
db.session.add(new_activity)
db.session.commit()
except IntegrityError as err:
db.session.rollback()
log.exception(err)
abort(400, {'message': 'Data integrity violated.'})
out_schema = ActivitySchema(exclude=('existing_application',),
context={'user': current_user})
return out_schema.jsonify(new_activity)
class ActivityAPI(MethodView):
"""REST views for a particular instance of an Activity model."""
@login_required
def patch(self, project_id, activity_id):
"""Edit the activity."""
project = Project.query.get_or_404(project_id)
if not current_user.is_admin and current_user not in project.moderators:
abort(403)
if project.lifetime_stage not in (LifetimeStage.draft, LifetimeStage.ongoing):
abort(400, {'message': 'Activities may only be edited on draft and ongoing projects.'})
activity = Activity.query.get_or_404(activity_id)
if activity.internal:
abort(404)
if activity.project != project:
abort(400, {'message': 'The specified project and activity are unrelated.'})
in_schema = ActivitySchema(exclude=('id', 'project', 'applications', 'internal'))
try:
with db.session.no_autoflush:
updated_activity = in_schema.load(request.json, instance=activity, partial=True)
except ValidationError as err:
abort(400, {'message': err.messages})
if not updated_activity.draft and not updated_activity.is_complete:
abort(400, {'message': 'Incomplete activities cannot be marked as non-draft.'})
if activity.fixed_reward and activity.working_hours != 1:
abort(400, {'message': 'Cannot set working hours for fixed activities.'})
if not activity.fixed_reward and activity.reward_rate != IPTS_PER_HOUR:
abort(400, {'message': 'The reward rate for hourly activities may not be changed.'})
with db.session.no_autoflush:
if updated_activity.people_required is not None:
if updated_activity.accepted_applications > updated_activity.people_required:
abort(400, {'message': 'Cannot reduce the required people '
'beyond the amount of existing applications.'})
if updated_activity.draft and updated_activity.applications:
abort(400, {'message': 'Cannot mark as draft, applications exist.'})
for application in updated_activity.applications:
if (updated_activity.application_deadline is not None
and updated_activity.application_deadline < application.application_time):
abort(400, {'message': 'Cannot set the deadline earlier '
'than the existing application'})
if application.status != ApplicationStatus.rejected:
application.actual_hours = updated_activity.working_hours
try:
db.session.add(updated_activity)
db.session.commit()
except IntegrityError as err:
db.session.rollback()
log.exception(err)
abort(400, {'message': 'Data integrity violated.'})
out_schema = ActivitySchema(exclude=('existing_application',),
context={'user': current_user})
return out_schema.jsonify(updated_activity)
@login_required
def delete(self, project_id, activity_id):
"""Delete the activity."""
project = Project.query.get_or_404(project_id)
if not current_user.is_admin and current_user not in project.moderators:
abort(403)
if project.lifetime_stage not in (LifetimeStage.draft, LifetimeStage.ongoing):
abort(400, {'message': 'Activities may only be deleted on draft and ongoing projects.'})
activity = Activity.query.get_or_404(activity_id)
if activity.internal:
abort(404)
if activity.project != project:
abort(400, {'message': 'The specified project and activity are unrelated.'})
db.session.delete(activity)
try:
db.session.commit()
remove_notifications({
'activity_id': activity_id,
})
except IntegrityError as err:
db.session.rollback()
log.exception(err)
abort(400, {'message': 'Data integrity violated.'})
return NO_PAYLOAD
activity_api = ActivityAPI.as_view('activity_api')
api.add_url_rule('/projects/<int:project_id>/activities/<int:activity_id>',
view_func=activity_api,
methods=('PATCH', 'DELETE'))
@allow_no_json
@api.route('/projects/<int:project_id>/activities/<int:activity_id>/publish', methods=['PATCH'])
@login_required
def publish_activity(project_id, activity_id):
"""Publish the activity."""
project = Project.query.get_or_404(project_id)
if not current_user.is_admin and current_user not in project.moderators:
abort(403)
activity = Activity.query.get_or_404(activity_id)
if activity.internal:
abort(404)
if activity.project != project:
abort(400, {'message': 'The specified project and activity are unrelated.'})
if (activity.name is None
or activity.start_date is None
or activity.end_date is None
or activity.start_date > activity.end_date):
abort(400, {'message': 'The name or dates of the activity are invalid.'})
activity.draft = False
try:
db.session.commit()
except IntegrityError as err:
db.session.rollback()
log.exception(err)
abort(400, {'message': 'Data integrity violated.'})
return NO_PAYLOAD
# ----- Competence -----
@api.route('/competences')
def list_competences():
"""List all of the existing competences."""
schema = CompetenceSchema(many=True)
return schema.jsonify(Competence.query.all())
@api.route('/competences', methods=['POST'])
@admin_required
def create_competence():
"""Create a new competence."""
in_schema = CompetenceSchema(exclude=('id',))
try:
new_competence = in_schema.load(request.json)
except ValidationError as err:
abort(400, {'message': err.messages})
try:
db.session.add(new_competence)
db.session.commit()
except IntegrityError as err:
db.session.rollback()
log.exception(err)
abort(400, {'message': 'Data integrity violated.'})
out_schema = CompetenceSchema()
return out_schema.jsonify(new_competence)
class CompetenceAPI(MethodView):
"""REST views for a particular instance of a Competence model."""
@admin_required
def patch(self, compt_id):
"""Edit the competence."""
competence = Competence.query.get_or_404(compt_id)
in_schema = CompetenceSchema(exclude=('id',))
try:
updated_competence = in_schema.load(request.json, instance=competence, partial=True)
except ValidationError as err:
abort(400, {'message': err.messages})
try:
db.session.add(updated_competence)
db.session.commit()
except IntegrityError as err:
db.session.rollback()
log.exception(err)
abort(400, {'message': 'Data integrity violated.'})
out_schema = CompetenceSchema()
return out_schema.jsonify(updated_competence)
@admin_required
def delete(self, compt_id):
"""Delete the competence."""
competence = Competence.query.get_or_404(compt_id)
try:
db.session.delete(competence)
db.session.commit()
except IntegrityError as err:
db.session.rollback()
log.exception(err)
abort(400, {'message': 'Data integrity violated.'})
return NO_PAYLOAD
competence_api = CompetenceAPI.as_view('competence_api')
api.add_url_rule('/competences/<int:compt_id>',
view_func=competence_api,
methods=('PATCH', 'DELETE'))
|
[
"innopoints.extensions.db.session.add",
"innopoints.extensions.db.session.rollback",
"innopoints.models.Activity.query.get_or_404",
"innopoints.schemas.ActivitySchema",
"innopoints.extensions.db.session.commit",
"innopoints.models.Competence.query.get_or_404",
"logging.getLogger",
"innopoints.models.Project.query.get_or_404",
"innopoints.models.Competence.query.all",
"innopoints.core.notifications.remove_notifications",
"innopoints.schemas.CompetenceSchema",
"innopoints.blueprints.api.route",
"innopoints.core.helpers.abort",
"innopoints.blueprints.api.add_url_rule",
"innopoints.extensions.db.session.delete"
] |
[((1067, 1094), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1084, 1094), False, 'import logging\n'), ((1098, 1166), 'innopoints.blueprints.api.route', 'api.route', (['"""/projects/<int:project_id>/activities"""'], {'methods': "['POST']"}), "('/projects/<int:project_id>/activities', methods=['POST'])\n", (1107, 1166), False, 'from innopoints.blueprints import api\n'), ((6747, 6879), 'innopoints.blueprints.api.add_url_rule', 'api.add_url_rule', (['"""/projects/<int:project_id>/activities/<int:activity_id>"""'], {'view_func': 'activity_api', 'methods': "('PATCH', 'DELETE')"}), "('/projects/<int:project_id>/activities/<int:activity_id>',\n view_func=activity_api, methods=('PATCH', 'DELETE'))\n", (6763, 6879), False, 'from innopoints.blueprints import api\n'), ((6928, 7027), 'innopoints.blueprints.api.route', 'api.route', (['"""/projects/<int:project_id>/activities/<int:activity_id>/publish"""'], {'methods': "['PATCH']"}), "('/projects/<int:project_id>/activities/<int:activity_id>/publish',\n methods=['PATCH'])\n", (6937, 7027), False, 'from innopoints.blueprints import api\n'), ((8011, 8036), 'innopoints.blueprints.api.route', 'api.route', (['"""/competences"""'], {}), "('/competences')\n", (8020, 8036), False, 'from innopoints.blueprints import api\n'), ((8203, 8246), 'innopoints.blueprints.api.route', 'api.route', (['"""/competences"""'], {'methods': "['POST']"}), "('/competences', methods=['POST'])\n", (8212, 8246), False, 'from innopoints.blueprints import api\n'), ((10179, 10285), 'innopoints.blueprints.api.add_url_rule', 'api.add_url_rule', (['"""/competences/<int:compt_id>"""'], {'view_func': 'competence_api', 'methods': "('PATCH', 'DELETE')"}), "('/competences/<int:compt_id>', view_func=competence_api,\n methods=('PATCH', 'DELETE'))\n", (10195, 10285), False, 'from innopoints.blueprints import api\n'), ((1286, 1322), 'innopoints.models.Project.query.get_or_404', 'Project.query.get_or_404', (['project_id'], {}), '(project_id)\n', (1310, 1322), False, 'from innopoints.models import Activity, ApplicationStatus, Competence, IPTS_PER_HOUR, LifetimeStage, Project\n'), ((1617, 1686), 'innopoints.schemas.ActivitySchema', 'ActivitySchema', ([], {'exclude': "('id', 'project', 'applications', 'internal')"}), "(exclude=('id', 'project', 'applications', 'internal'))\n", (1631, 1686), False, 'from innopoints.schemas import ActivitySchema, CompetenceSchema\n'), ((2333, 2418), 'innopoints.schemas.ActivitySchema', 'ActivitySchema', ([], {'exclude': "('existing_application',)", 'context': "{'user': current_user}"}), "(exclude=('existing_application',), context={'user':\n current_user})\n", (2347, 2418), False, 'from innopoints.schemas import ActivitySchema, CompetenceSchema\n'), ((7133, 7169), 'innopoints.models.Project.query.get_or_404', 'Project.query.get_or_404', (['project_id'], {}), '(project_id)\n', (7157, 7169), False, 'from innopoints.models import Activity, ApplicationStatus, Competence, IPTS_PER_HOUR, LifetimeStage, Project\n'), ((7282, 7320), 'innopoints.models.Activity.query.get_or_404', 'Activity.query.get_or_404', (['activity_id'], {}), '(activity_id)\n', (7307, 7320), False, 'from innopoints.models import Activity, ApplicationStatus, Competence, IPTS_PER_HOUR, LifetimeStage, Project\n'), ((8122, 8149), 'innopoints.schemas.CompetenceSchema', 'CompetenceSchema', ([], {'many': '(True)'}), '(many=True)\n', (8138, 8149), False, 'from innopoints.schemas import ActivitySchema, CompetenceSchema\n'), ((8339, 8372), 'innopoints.schemas.CompetenceSchema', 'CompetenceSchema', ([], {'exclude': "('id',)"}), "(exclude=('id',))\n", (8355, 8372), False, 'from innopoints.schemas import ActivitySchema, CompetenceSchema\n'), ((8764, 8782), 'innopoints.schemas.CompetenceSchema', 'CompetenceSchema', ([], {}), '()\n', (8780, 8782), False, 'from innopoints.schemas import ActivitySchema, CompetenceSchema\n'), ((1408, 1418), 'innopoints.core.helpers.abort', 'abort', (['(403)'], {}), '(403)\n', (1413, 1418), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((1511, 1603), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Activities may only be created on draft and ongoing projects.'}"], {}), "(400, {'message':\n 'Activities may only be created on draft and ongoing projects.'})\n", (1516, 1603), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((1973, 2052), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Incomplete activities cannot be marked as non-draft.'}"], {}), "(400, {'message': 'Incomplete activities cannot be marked as non-draft.'})\n", (1978, 2052), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((2107, 2135), 'innopoints.extensions.db.session.add', 'db.session.add', (['new_activity'], {}), '(new_activity)\n', (2121, 2135), False, 'from innopoints.extensions import db\n'), ((2144, 2163), 'innopoints.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2161, 2163), False, 'from innopoints.extensions import db\n'), ((2711, 2747), 'innopoints.models.Project.query.get_or_404', 'Project.query.get_or_404', (['project_id'], {}), '(project_id)\n', (2735, 2747), False, 'from innopoints.models import Activity, ApplicationStatus, Competence, IPTS_PER_HOUR, LifetimeStage, Project\n'), ((3060, 3098), 'innopoints.models.Activity.query.get_or_404', 'Activity.query.get_or_404', (['activity_id'], {}), '(activity_id)\n', (3085, 3098), False, 'from innopoints.models import Activity, ApplicationStatus, Competence, IPTS_PER_HOUR, LifetimeStage, Project\n'), ((3303, 3372), 'innopoints.schemas.ActivitySchema', 'ActivitySchema', ([], {'exclude': "('id', 'project', 'applications', 'internal')"}), "(exclude=('id', 'project', 'applications', 'internal'))\n", (3317, 3372), False, 'from innopoints.schemas import ActivitySchema, CompetenceSchema\n'), ((5461, 5546), 'innopoints.schemas.ActivitySchema', 'ActivitySchema', ([], {'exclude': "('existing_application',)", 'context': "{'user': current_user}"}), "(exclude=('existing_application',), context={'user':\n current_user})\n", (5475, 5546), False, 'from innopoints.schemas import ActivitySchema, CompetenceSchema\n'), ((5752, 5788), 'innopoints.models.Project.query.get_or_404', 'Project.query.get_or_404', (['project_id'], {}), '(project_id)\n', (5776, 5788), False, 'from innopoints.models import Activity, ApplicationStatus, Competence, IPTS_PER_HOUR, LifetimeStage, Project\n'), ((6102, 6140), 'innopoints.models.Activity.query.get_or_404', 'Activity.query.get_or_404', (['activity_id'], {}), '(activity_id)\n', (6127, 6140), False, 'from innopoints.models import Activity, ApplicationStatus, Competence, IPTS_PER_HOUR, LifetimeStage, Project\n'), ((6333, 6360), 'innopoints.extensions.db.session.delete', 'db.session.delete', (['activity'], {}), '(activity)\n', (6350, 6360), False, 'from innopoints.extensions import db\n'), ((7255, 7265), 'innopoints.core.helpers.abort', 'abort', (['(403)'], {}), '(403)\n', (7260, 7265), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((7355, 7365), 'innopoints.core.helpers.abort', 'abort', (['(404)'], {}), '(404)\n', (7360, 7365), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((7411, 7487), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'The specified project and activity are unrelated.'}"], {}), "(400, {'message': 'The specified project and activity are unrelated.'})\n", (7416, 7487), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((7668, 7741), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'The name or dates of the activity are invalid.'}"], {}), "(400, {'message': 'The name or dates of the activity are invalid.'})\n", (7673, 7741), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((7788, 7807), 'innopoints.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (7805, 7807), False, 'from innopoints.extensions import db\n'), ((8176, 8198), 'innopoints.models.Competence.query.all', 'Competence.query.all', ([], {}), '()\n', (8196, 8198), False, 'from innopoints.models import Activity, ApplicationStatus, Competence, IPTS_PER_HOUR, LifetimeStage, Project\n'), ((8536, 8566), 'innopoints.extensions.db.session.add', 'db.session.add', (['new_competence'], {}), '(new_competence)\n', (8550, 8566), False, 'from innopoints.extensions import db\n'), ((8575, 8594), 'innopoints.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (8592, 8594), False, 'from innopoints.extensions import db\n'), ((9042, 9079), 'innopoints.models.Competence.query.get_or_404', 'Competence.query.get_or_404', (['compt_id'], {}), '(compt_id)\n', (9069, 9079), False, 'from innopoints.models import Activity, ApplicationStatus, Competence, IPTS_PER_HOUR, LifetimeStage, Project\n'), ((9101, 9134), 'innopoints.schemas.CompetenceSchema', 'CompetenceSchema', ([], {'exclude': "('id',)"}), "(exclude=('id',))\n", (9117, 9134), False, 'from innopoints.schemas import ActivitySchema, CompetenceSchema\n'), ((9617, 9635), 'innopoints.schemas.CompetenceSchema', 'CompetenceSchema', ([], {}), '()\n', (9633, 9635), False, 'from innopoints.schemas import ActivitySchema, CompetenceSchema\n'), ((9801, 9838), 'innopoints.models.Competence.query.get_or_404', 'Competence.query.get_or_404', (['compt_id'], {}), '(compt_id)\n', (9828, 9838), False, 'from innopoints.models import Activity, ApplicationStatus, Competence, IPTS_PER_HOUR, LifetimeStage, Project\n'), ((1792, 1829), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': err.messages}"], {}), "(400, {'message': err.messages})\n", (1797, 1829), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((2206, 2227), 'innopoints.extensions.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (2225, 2227), False, 'from innopoints.extensions import db\n'), ((2263, 2314), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Data integrity violated.'}"], {}), "(400, {'message': 'Data integrity violated.'})\n", (2268, 2314), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((2841, 2851), 'innopoints.core.helpers.abort', 'abort', (['(403)'], {}), '(403)\n', (2846, 2851), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((2952, 3043), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Activities may only be edited on draft and ongoing projects.'}"], {}), "(400, {'message':\n 'Activities may only be edited on draft and ongoing projects.'})\n", (2957, 3043), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((3141, 3151), 'innopoints.core.helpers.abort', 'abort', (['(404)'], {}), '(404)\n', (3146, 3151), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((3205, 3281), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'The specified project and activity are unrelated.'}"], {}), "(400, {'message': 'The specified project and activity are unrelated.'})\n", (3210, 3281), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((3704, 3783), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Incomplete activities cannot be marked as non-draft.'}"], {}), "(400, {'message': 'Incomplete activities cannot be marked as non-draft.'})\n", (3709, 3783), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((3863, 3936), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Cannot set working hours for fixed activities.'}"], {}), "(400, {'message': 'Cannot set working hours for fixed activities.'})\n", (3868, 3936), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((4030, 4118), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'The reward rate for hourly activities may not be changed.'}"], {}), "(400, {'message':\n 'The reward rate for hourly activities may not be changed.'})\n", (4035, 4118), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((5207, 5239), 'innopoints.extensions.db.session.add', 'db.session.add', (['updated_activity'], {}), '(updated_activity)\n', (5221, 5239), False, 'from innopoints.extensions import db\n'), ((5252, 5271), 'innopoints.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (5269, 5271), False, 'from innopoints.extensions import db\n'), ((5882, 5892), 'innopoints.core.helpers.abort', 'abort', (['(403)'], {}), '(403)\n', (5887, 5892), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((5993, 6085), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Activities may only be deleted on draft and ongoing projects.'}"], {}), "(400, {'message':\n 'Activities may only be deleted on draft and ongoing projects.'})\n", (5998, 6085), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((6183, 6193), 'innopoints.core.helpers.abort', 'abort', (['(404)'], {}), '(404)\n', (6188, 6193), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((6247, 6323), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'The specified project and activity are unrelated.'}"], {}), "(400, {'message': 'The specified project and activity are unrelated.'})\n", (6252, 6323), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((6387, 6406), 'innopoints.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (6404, 6406), False, 'from innopoints.extensions import db\n'), ((6419, 6469), 'innopoints.core.notifications.remove_notifications', 'remove_notifications', (["{'activity_id': activity_id}"], {}), "({'activity_id': activity_id})\n", (6439, 6469), False, 'from innopoints.core.notifications import remove_notifications\n'), ((7850, 7871), 'innopoints.extensions.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (7869, 7871), False, 'from innopoints.extensions import db\n'), ((7907, 7958), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Data integrity violated.'}"], {}), "(400, {'message': 'Data integrity violated.'})\n", (7912, 7958), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((8480, 8517), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': err.messages}"], {}), "(400, {'message': err.messages})\n", (8485, 8517), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((8637, 8658), 'innopoints.extensions.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (8656, 8658), False, 'from innopoints.extensions import db\n'), ((8694, 8745), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Data integrity violated.'}"], {}), "(400, {'message': 'Data integrity violated.'})\n", (8699, 8745), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((9361, 9395), 'innopoints.extensions.db.session.add', 'db.session.add', (['updated_competence'], {}), '(updated_competence)\n', (9375, 9395), False, 'from innopoints.extensions import db\n'), ((9408, 9427), 'innopoints.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (9425, 9427), False, 'from innopoints.extensions import db\n'), ((9865, 9894), 'innopoints.extensions.db.session.delete', 'db.session.delete', (['competence'], {}), '(competence)\n', (9882, 9894), False, 'from innopoints.extensions import db\n'), ((9907, 9926), 'innopoints.extensions.db.session.commit', 'db.session.commit', ([], {}), '()\n', (9924, 9926), False, 'from innopoints.extensions import db\n'), ((3577, 3614), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': err.messages}"], {}), "(400, {'message': err.messages})\n", (3582, 3614), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((5322, 5343), 'innopoints.extensions.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (5341, 5343), False, 'from innopoints.extensions import db\n'), ((5387, 5438), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Data integrity violated.'}"], {}), "(400, {'message': 'Data integrity violated.'})\n", (5392, 5438), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((6551, 6572), 'innopoints.extensions.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (6570, 6572), False, 'from innopoints.extensions import db\n'), ((6616, 6667), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Data integrity violated.'}"], {}), "(400, {'message': 'Data integrity violated.'})\n", (6621, 6667), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((9297, 9334), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': err.messages}"], {}), "(400, {'message': err.messages})\n", (9302, 9334), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((9478, 9499), 'innopoints.extensions.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (9497, 9499), False, 'from innopoints.extensions import db\n'), ((9543, 9594), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Data integrity violated.'}"], {}), "(400, {'message': 'Data integrity violated.'})\n", (9548, 9594), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((9977, 9998), 'innopoints.extensions.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (9996, 9998), False, 'from innopoints.extensions import db\n'), ((10042, 10093), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Data integrity violated.'}"], {}), "(400, {'message': 'Data integrity violated.'})\n", (10047, 10093), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((4329, 4442), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message':\n 'Cannot reduce the required people beyond the amount of existing applications.'\n }"], {}), "(400, {'message':\n 'Cannot reduce the required people beyond the amount of existing applications.'\n })\n", (4334, 4442), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((4578, 4646), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Cannot mark as draft, applications exist.'}"], {}), "(400, {'message': 'Cannot mark as draft, applications exist.'})\n", (4583, 4646), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n'), ((4899, 4991), 'innopoints.core.helpers.abort', 'abort', (['(400)', "{'message': 'Cannot set the deadline earlier than the existing application'}"], {}), "(400, {'message':\n 'Cannot set the deadline earlier than the existing application'})\n", (4904, 4991), False, 'from innopoints.core.helpers import abort, allow_no_json, admin_required\n')]
|
from matplotlib import mlab
import matplotlib.pyplot as plt
import numpy as np
import colorednoise as cn
from automutualinformation import sequential_mutual_information as smi
from automutualinformation import fit_model
beta = 0.5 # the exponent
samples = 10000 # number of samples to generate
y = cn.powerlaw_psd_gaussian(beta, samples)
nbins = 10 # how many bins to compute over
bins = np.linspace(np.min(y), np.max(y), nbins)
y_dig = np.digitize(y, bins, right=True)
range_ = np.arange(1, 10)
def test_compute_mi():
(MI, _), (shuff_MI, _) = smi([y_dig], distances=range_, n_jobs=1)
def test_compute_mi_fit_model():
(MI, _), (shuff_MI, _) = smi([y_dig], distances=range_, n_jobs=1)
decay_model, model_y = fit_model(
distances=range_,
sig=MI - shuff_MI,
)
|
[
"colorednoise.powerlaw_psd_gaussian",
"automutualinformation.sequential_mutual_information",
"numpy.max",
"numpy.min",
"numpy.arange",
"automutualinformation.fit_model",
"numpy.digitize"
] |
[((301, 340), 'colorednoise.powerlaw_psd_gaussian', 'cn.powerlaw_psd_gaussian', (['beta', 'samples'], {}), '(beta, samples)\n', (325, 340), True, 'import colorednoise as cn\n'), ((441, 473), 'numpy.digitize', 'np.digitize', (['y', 'bins'], {'right': '(True)'}), '(y, bins, right=True)\n', (452, 473), True, 'import numpy as np\n'), ((483, 499), 'numpy.arange', 'np.arange', (['(1)', '(10)'], {}), '(1, 10)\n', (492, 499), True, 'import numpy as np\n'), ((404, 413), 'numpy.min', 'np.min', (['y'], {}), '(y)\n', (410, 413), True, 'import numpy as np\n'), ((415, 424), 'numpy.max', 'np.max', (['y'], {}), '(y)\n', (421, 424), True, 'import numpy as np\n'), ((554, 594), 'automutualinformation.sequential_mutual_information', 'smi', (['[y_dig]'], {'distances': 'range_', 'n_jobs': '(1)'}), '([y_dig], distances=range_, n_jobs=1)\n', (557, 594), True, 'from automutualinformation import sequential_mutual_information as smi\n'), ((659, 699), 'automutualinformation.sequential_mutual_information', 'smi', (['[y_dig]'], {'distances': 'range_', 'n_jobs': '(1)'}), '([y_dig], distances=range_, n_jobs=1)\n', (662, 699), True, 'from automutualinformation import sequential_mutual_information as smi\n'), ((728, 774), 'automutualinformation.fit_model', 'fit_model', ([], {'distances': 'range_', 'sig': '(MI - shuff_MI)'}), '(distances=range_, sig=MI - shuff_MI)\n', (737, 774), False, 'from automutualinformation import fit_model\n')]
|
import unittest
from colourise import rgb2hsl
class TestRGBtoHSL(unittest.TestCase):
def test_primary_colour_red(self):
r, g, b = 255, 0, 0
h, s, l = rgb2hsl(r, g, b)
self.assertEqual(h, 0.0)
self.assertEqual(s, 1.0)
self.assertEqual(l, 0.5)
def test_primary_colour_green(self):
r, g, b = 0, 255, 0
h, s, l = rgb2hsl(r, g, b)
self.assertEqual(h, 120.0)
self.assertEqual(s, 1.0)
self.assertEqual(l, 0.5)
def test_primary_colour_blue(self):
r, g, b = 0, 0, 255
h, s, l = rgb2hsl(r, g, b)
self.assertEqual(h, 240.0)
self.assertEqual(s, 1.0)
self.assertEqual(l, 0.5)
def test_secondary_colour_cyan(self):
r, g, b = 0, 255, 255
h, s, l = rgb2hsl(r, g, b)
self.assertAlmostEqual(h, 180.0, delta=0.15)
self.assertEqual(s, 1.0)
self.assertEqual(l, 0.5)
def test_secondary_colour_magenta(self):
r, g, b = 255, 0, 255
h, s, l = rgb2hsl(r, g, b)
self.assertAlmostEqual(h, 300.0, delta=0.15)
self.assertEqual(s, 1.0)
self.assertEqual(l, 0.5)
def test_secondary_colour_yellow(self):
r, g, b = 255, 255, 0
h, s, l = rgb2hsl(r, g, b)
self.assertAlmostEqual(h, 60.0, delta=0.15)
self.assertEqual(s, 1.0)
self.assertEqual(l, 0.5)
def test_black(self):
r, g, b = 0, 0, 0
h, s, l = rgb2hsl(r, g, b)
self.assertEqual(s, 0.0)
self.assertEqual(s, 0.0)
self.assertEqual(l, 0.0)
def test_white(self):
r, g, b = 255, 255, 255
h, s, l = rgb2hsl(r, g, b)
self.assertEqual(s, 0.0)
self.assertEqual(s, 0.0)
self.assertEqual(l, 1.0)
|
[
"colourise.rgb2hsl"
] |
[((172, 188), 'colourise.rgb2hsl', 'rgb2hsl', (['r', 'g', 'b'], {}), '(r, g, b)\n', (179, 188), False, 'from colourise import rgb2hsl\n'), ((376, 392), 'colourise.rgb2hsl', 'rgb2hsl', (['r', 'g', 'b'], {}), '(r, g, b)\n', (383, 392), False, 'from colourise import rgb2hsl\n'), ((581, 597), 'colourise.rgb2hsl', 'rgb2hsl', (['r', 'g', 'b'], {}), '(r, g, b)\n', (588, 597), False, 'from colourise import rgb2hsl\n'), ((790, 806), 'colourise.rgb2hsl', 'rgb2hsl', (['r', 'g', 'b'], {}), '(r, g, b)\n', (797, 806), False, 'from colourise import rgb2hsl\n'), ((1020, 1036), 'colourise.rgb2hsl', 'rgb2hsl', (['r', 'g', 'b'], {}), '(r, g, b)\n', (1027, 1036), False, 'from colourise import rgb2hsl\n'), ((1249, 1265), 'colourise.rgb2hsl', 'rgb2hsl', (['r', 'g', 'b'], {}), '(r, g, b)\n', (1256, 1265), False, 'from colourise import rgb2hsl\n'), ((1455, 1471), 'colourise.rgb2hsl', 'rgb2hsl', (['r', 'g', 'b'], {}), '(r, g, b)\n', (1462, 1471), False, 'from colourise import rgb2hsl\n'), ((1648, 1664), 'colourise.rgb2hsl', 'rgb2hsl', (['r', 'g', 'b'], {}), '(r, g, b)\n', (1655, 1664), False, 'from colourise import rgb2hsl\n')]
|
from util import read_puzzle_input
from year_2021.day04.giant_squid import (
get_losing_board_score,
get_winning_board_score,
)
def test_get_winning_board_score():
assert get_winning_board_score(read_puzzle_input("test_input.txt")) == 4512
def test_get_losing_board_score():
assert get_losing_board_score(read_puzzle_input("test_input.txt")) == 1924
|
[
"util.read_puzzle_input"
] |
[((209, 244), 'util.read_puzzle_input', 'read_puzzle_input', (['"""test_input.txt"""'], {}), "('test_input.txt')\n", (226, 244), False, 'from util import read_puzzle_input\n'), ((325, 360), 'util.read_puzzle_input', 'read_puzzle_input', (['"""test_input.txt"""'], {}), "('test_input.txt')\n", (342, 360), False, 'from util import read_puzzle_input\n')]
|
from secretsharing import PlaintextToHexSecretSharer
def main():
# Enter shares
shares = [input('Enter your share: ')]
while True:
numofSHares = input("Still have more?\tYes\tNo\n").upper()
if numofSHares == "Y":
shares.append(input('Enter your share: '))
elif numofSHares == "N":
break
else:
print("You haven't answered correctly, try again\n")
# Recover
message = PlaintextToHexSecretSharer.recover_secret(shares)
print('Original message:\n'+message)
if __name__ == '__main__':
main()
|
[
"secretsharing.PlaintextToHexSecretSharer.recover_secret"
] |
[((461, 510), 'secretsharing.PlaintextToHexSecretSharer.recover_secret', 'PlaintextToHexSecretSharer.recover_secret', (['shares'], {}), '(shares)\n', (502, 510), False, 'from secretsharing import PlaintextToHexSecretSharer\n')]
|
from HuginAutomator import HuginAutomator
from flask import Flask
import time
import datetime
import os
CONTEXTS = ('run', 'compute')
def get_env():
return {'credentials': os.getenv('DROPBOX_TOKEN'),
'min_s': os.getenv('MIN_STITCH'),
'max_s': os.getenv('MAX_STITCH'),
'min_a': os.getenv('MIN_ALIGN'),
'max_a': os.getenv('MAX_ALIGN')}
def main_loop_compute():
"""
periodically check dropbox folders to see if there are new projects
if a new project is found, download + align/build + upload it and continue with the loop
"""
env = get_env()
hugin = HuginAutomator(env['credentials'], env['min_s'], env['max_s'], env['min_a'], env['max_a'])
now = datetime.datetime.now
start_time = now()
most_recent_job = start_time
while now() - most_recent_job < datetime.timedelta(minutes=10):
if hugin.check_for_stitch():
hugin.build()
most_recent_job = now()
if hugin.check_for_align():
hugin.align()
most_recent_job = now()
time.sleep(5)
# go to some url to execute cloud function that turns off the instance
app = Flask(__name__)
@app.route('/')
def main():
env = get_env()
hugin = HuginAutomator(env['credentials'], env['min_s'], env['max_s'], env['min_a'], env['max_a'])
if hugin.check_for_stitch():
return hugin.build()
elif hugin.check_for_align():
return hugin.align()
return "asdf"
if __name__ == "__main__":
context = os.getenv('CONTEXT')
if context == CONTEXTS[0]:
app.run(debug=True, host='0.0.0.0', port=int(os.environ.get('PORT', 8080)))
if context == CONTEXTS[1]:
main_loop_compute()
|
[
"HuginAutomator.HuginAutomator",
"flask.Flask",
"time.sleep",
"os.environ.get",
"datetime.timedelta",
"os.getenv"
] |
[((1179, 1194), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (1184, 1194), False, 'from flask import Flask\n'), ((630, 724), 'HuginAutomator.HuginAutomator', 'HuginAutomator', (["env['credentials']", "env['min_s']", "env['max_s']", "env['min_a']", "env['max_a']"], {}), "(env['credentials'], env['min_s'], env['max_s'], env['min_a'],\n env['max_a'])\n", (644, 724), False, 'from HuginAutomator import HuginAutomator\n'), ((1257, 1351), 'HuginAutomator.HuginAutomator', 'HuginAutomator', (["env['credentials']", "env['min_s']", "env['max_s']", "env['min_a']", "env['max_a']"], {}), "(env['credentials'], env['min_s'], env['max_s'], env['min_a'],\n env['max_a'])\n", (1271, 1351), False, 'from HuginAutomator import HuginAutomator\n'), ((1534, 1554), 'os.getenv', 'os.getenv', (['"""CONTEXT"""'], {}), "('CONTEXT')\n", (1543, 1554), False, 'import os\n'), ((180, 206), 'os.getenv', 'os.getenv', (['"""DROPBOX_TOKEN"""'], {}), "('DROPBOX_TOKEN')\n", (189, 206), False, 'import os\n'), ((229, 252), 'os.getenv', 'os.getenv', (['"""MIN_STITCH"""'], {}), "('MIN_STITCH')\n", (238, 252), False, 'import os\n'), ((275, 298), 'os.getenv', 'os.getenv', (['"""MAX_STITCH"""'], {}), "('MAX_STITCH')\n", (284, 298), False, 'import os\n'), ((321, 343), 'os.getenv', 'os.getenv', (['"""MIN_ALIGN"""'], {}), "('MIN_ALIGN')\n", (330, 343), False, 'import os\n'), ((366, 388), 'os.getenv', 'os.getenv', (['"""MAX_ALIGN"""'], {}), "('MAX_ALIGN')\n", (375, 388), False, 'import os\n'), ((845, 875), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(10)'}), '(minutes=10)\n', (863, 875), False, 'import datetime\n'), ((1082, 1095), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1092, 1095), False, 'import time\n'), ((1639, 1667), 'os.environ.get', 'os.environ.get', (['"""PORT"""', '(8080)'], {}), "('PORT', 8080)\n", (1653, 1667), False, 'import os\n')]
|
import os
from setuptools import setup
from snakehouse import Multibuild, build, monkey_patch_parallel_compilation, find_pyx_and_c, \
find_all
from setuptools import Extension
monkey_patch_parallel_compilation()
dont_snakehouse = False
if 'DEBUG' in os.environ:
print('Debug is enabled!')
dont_snakehouse = True
# note that you can include standard Extension classes in this list, those won't be touched
# and will be directed directly to Cython.Build.cythonize()
cython_multibuilds = [
# note that Windows-style pathes are supported on Linux build environment,
# the reverse not necessarily being true (issue #5)
Multibuild('example_module', find_all('example_module', True),
define_macros=[("CYTHON_TRACE_NOGIL", "1")],
dont_snakehouse=dont_snakehouse),
Extension('example2.example', ['example2/example.pyx']),
Multibuild('example3.example3.example3', ['example3/example3/example3/test.pyx'],
dont_snakehouse=dont_snakehouse)
]
# first argument is used directly by snakehouse, the rest and **kwargs are passed to
# Cython.Build.cythonize()
ext_modules = build(cython_multibuilds,
compiler_directives={
'language_level': '3',
})
setup(name='example_module',
version='0.1',
packages=['example_module', 'example2'],
install_requires=[
'Cython', 'snakehouse'
],
zip_safe=False,
tests_require=[
"nose2"
],
test_suite='nose2.collector.collector',
python_requires='!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*',
ext_modules=ext_modules
)
|
[
"setuptools.Extension",
"snakehouse.find_all",
"setuptools.setup",
"snakehouse.Multibuild",
"snakehouse.build",
"snakehouse.monkey_patch_parallel_compilation"
] |
[((183, 218), 'snakehouse.monkey_patch_parallel_compilation', 'monkey_patch_parallel_compilation', ([], {}), '()\n', (216, 218), False, 'from snakehouse import Multibuild, build, monkey_patch_parallel_compilation, find_pyx_and_c, find_all\n'), ((1148, 1218), 'snakehouse.build', 'build', (['cython_multibuilds'], {'compiler_directives': "{'language_level': '3'}"}), "(cython_multibuilds, compiler_directives={'language_level': '3'})\n", (1153, 1218), False, 'from snakehouse import Multibuild, build, monkey_patch_parallel_compilation, find_pyx_and_c, find_all\n'), ((1286, 1602), 'setuptools.setup', 'setup', ([], {'name': '"""example_module"""', 'version': '"""0.1"""', 'packages': "['example_module', 'example2']", 'install_requires': "['Cython', 'snakehouse']", 'zip_safe': '(False)', 'tests_require': "['nose2']", 'test_suite': '"""nose2.collector.collector"""', 'python_requires': '"""!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"""', 'ext_modules': 'ext_modules'}), "(name='example_module', version='0.1', packages=['example_module',\n 'example2'], install_requires=['Cython', 'snakehouse'], zip_safe=False,\n tests_require=['nose2'], test_suite='nose2.collector.collector',\n python_requires='!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*',\n ext_modules=ext_modules)\n", (1291, 1602), False, 'from setuptools import setup\n'), ((828, 883), 'setuptools.Extension', 'Extension', (['"""example2.example"""', "['example2/example.pyx']"], {}), "('example2.example', ['example2/example.pyx'])\n", (837, 883), False, 'from setuptools import Extension\n'), ((889, 1008), 'snakehouse.Multibuild', 'Multibuild', (['"""example3.example3.example3"""', "['example3/example3/example3/test.pyx']"], {'dont_snakehouse': 'dont_snakehouse'}), "('example3.example3.example3', [\n 'example3/example3/example3/test.pyx'], dont_snakehouse=dont_snakehouse)\n", (899, 1008), False, 'from snakehouse import Multibuild, build, monkey_patch_parallel_compilation, find_pyx_and_c, find_all\n'), ((681, 713), 'snakehouse.find_all', 'find_all', (['"""example_module"""', '(True)'], {}), "('example_module', True)\n", (689, 713), False, 'from snakehouse import Multibuild, build, monkey_patch_parallel_compilation, find_pyx_and_c, find_all\n')]
|
#! /usr/bin/python3
import sys
import os
import time
from typing import Dict, List, Tuple
from collections import defaultdict
Position = complex
DIRECTIONS: Dict[int, Position] = {
1: -1j,
2: 1j,
3: -1,
4: 1
}
class IntCodeComputer():
def __init__(self, memory: List[int], inputs: List[int] = []):
self.memory = defaultdict(int, [(index, value)
for index, value in enumerate(memory)])
self.pointer = 0
self.inputs = inputs
self.outputs: List[int] = []
self.base = 0
self.running = True
self.polling = False
self.outputing = False
def set_input(self, value: int):
self.inputs.insert(0, value)
def run(self) -> List[int]:
while self.running:
self.tick()
return self.outputs
def get_parameter(self, offset: int, mode: int) -> int:
value = self.memory[self.pointer + offset]
if mode == 0: # POSITION
return self.memory[value]
if mode == 1: # IMMEDIATE
return value
elif mode == 2: # RELATIVE
return self.memory[self.base + value]
raise Exception("Unrecognized parameter mode", mode)
def get_address(self, offset: int, mode: int) -> int:
value = self.memory[self.pointer + offset]
if mode == 0: # POSITION
return value
if mode == 2: # RELATIVE
return self.base + value
raise Exception("Unrecognized address mode", mode)
def get_output(self) -> int:
self.outputing = False
return self.outputs.pop()
def add_input(self, value: int):
self.inputs.append(value)
def tick(self):
instruction = self.memory[self.pointer]
opcode, p1_mode, p2_mode, p3_mode = instruction % 100, (
instruction // 100) % 10, (instruction // 1000) % 10, (instruction // 10000) % 10
if not self.running:
return
if opcode == 1: # ADD
self.memory[self.get_address(3, p3_mode)] = self.get_parameter(
1, p1_mode) + self.get_parameter(2, p2_mode)
self.pointer += 4
elif opcode == 2: # MUL
self.memory[self.get_address(3, p3_mode)] = self.get_parameter(
1, p1_mode) * self.get_parameter(2, p2_mode)
self.pointer += 4
elif opcode == 3: # INPUT
if self.inputs:
self.polling = False
self.memory[self.get_address(1, p1_mode)] = self.inputs.pop(0)
self.pointer += 2
else:
self.polling = True
elif opcode == 4: # OUTPUT
self.outputing = True
self.outputs.append(self.get_parameter(1, p1_mode))
self.pointer += 2
elif opcode == 5: # JMP_TRUE
if self.get_parameter(1, p1_mode):
self.pointer = self.get_parameter(2, p2_mode)
else:
self.pointer += 3
elif opcode == 6: # JMP_FALSE
if not self.get_parameter(1, p1_mode):
self.pointer = self.get_parameter(2, p2_mode)
else:
self.pointer += 3
elif opcode == 7: # LESS_THAN
self.memory[self.get_address(3, p3_mode)] = 1 if self.get_parameter(
1, p1_mode) < self.get_parameter(2, p2_mode) else 0
self.pointer += 4
elif opcode == 8: # EQUALS
self.memory[self.get_address(3, p3_mode)] = 1 if self.get_parameter(
1, p1_mode) == self.get_parameter(2, p2_mode) else 0
self.pointer += 4
elif opcode == 9: # SET_BASE
self.base += self.get_parameter(1, p1_mode)
self.pointer += 2
elif opcode == 99: # HALT
self.running = False
else:
raise Exception(f"Unknown instruction", self.pointer,
instruction, opcode, p1_mode, p2_mode, p3_mode)
def clone(self):
clone_computer = IntCodeComputer([])
clone_computer.memory = dict(self.memory)
clone_computer.pointer = self.pointer
clone_computer.base = self.base
return clone_computer
def draw_area(oxygen: List[Position], walls: List[Position], open_spaces: List[Position]):
all_posiitons = walls + oxygen
min_x = int(min(map(lambda p: p.real, all_posiitons)))
max_x = int(max(map(lambda p: p.real, all_posiitons)))
min_y = int(min(map(lambda p: p.imag, all_posiitons)))
max_y = int(max(map(lambda p: p.imag, all_posiitons)))
for y in range(max_y, min_y - 1, - 1):
for x in range(min_x, max_x + 1):
position = x + y * 1j
c = " "
if position in walls:
c = "#"
if position in open_spaces:
c = "."
if position in oxygen:
c = "O"
print(c, end="")
print()
print()
def run_until_oxygen_system(memory: List[int]) -> Tuple[int, Position, List[Position]]:
start_position = 0j
open_spaces: List[Position] = []
oxygen_position = 0j
queue = [(start_position, [start_position], IntCodeComputer(memory))]
visited = [start_position]
steps_to_oxygen_system = 0
while queue:
position, path, droid = queue.pop(0)
for command, direction in DIRECTIONS.items():
new_position = position + direction
if new_position not in visited:
visited.append(new_position)
new_droid = droid.clone()
new_droid.inputs.append(command)
while not new_droid.outputing:
new_droid.tick()
status = new_droid.get_output()
if status == 2: # Oxygen system
if steps_to_oxygen_system == 0:
steps_to_oxygen_system = len(path)
oxygen_position = new_position
elif status == 1: # Open space
open_spaces.append(new_position)
while not new_droid.polling:
new_droid.tick()
new_path = list(path)
new_path.append(new_position)
queue.append((new_position, new_path, new_droid))
return steps_to_oxygen_system, oxygen_position, open_spaces
def solve(memory: List[int]) -> Tuple[int, int]:
steps_to_oxygen_system, oxygen_system_position, open_spaces = run_until_oxygen_system(
memory)
filled = [oxygen_system_position]
minutes = 0
while open_spaces:
minutes += 1
for oxygen in list(filled):
for direction in DIRECTIONS.values():
position = oxygen + direction
if position in open_spaces:
filled.append(position)
open_spaces.remove(position)
return steps_to_oxygen_system, minutes
def get_input(file_path: str) -> List[int]:
if not os.path.isfile(file_path):
raise FileNotFoundError(file_path)
with open(file_path, "r") as file:
return [int(i) for i in file.read().split(",")]
def main():
if len(sys.argv) != 2:
raise Exception("Please, add input file path as parameter")
start = time.perf_counter()
part1_result, part2_result = solve(get_input(sys.argv[1]))
end = time.perf_counter()
print("P1:", part1_result)
print("P2:", part2_result)
print()
print(f"Time: {end - start:.7f}")
if __name__ == "__main__":
main()
|
[
"os.path.isfile",
"time.perf_counter"
] |
[((7302, 7321), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (7319, 7321), False, 'import time\n'), ((7395, 7414), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (7412, 7414), False, 'import time\n'), ((7014, 7039), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (7028, 7039), False, 'import os\n')]
|
#!/usr/bin/env python
from __future__ import print_function
from builtins import input
import sys
import pmagpy.pmag as pmag
def spitout(line):
if '\t' in line:
dat=line.split('\t') # split the data on a space into columns
else:
dat=line.split() # split the data on a space into columns
b,lat=float(dat[0])*1e-6,float(dat[1])
vdm= pmag.b_vdm(b,lat) #
return vdm
def main():
"""
NAME
b_vdm.py
DESCRIPTION
converts B (in microT) and (magnetic) latitude to V(A)DM
INPUT (COMMAND LINE ENTRY)
B (microtesla), latitude (positive north)
OUTPUT
V[A]DM
SYNTAX
b_vdm.py [command line options] [< filename]
OPTIONS
-h prints help and quits
-i for interactive data entry
-f FILE input file
-F FILE output
"""
inp,out="",""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
inp=f.readlines()
if '-F' in sys.argv:
ind=sys.argv.index('-F')
o=sys.argv[ind+1]
out=open(o,'w')
if '-i' in sys.argv:
cont=1
while cont==1:
try:
b=1e-6*float(input('B (in microtesla): <cntl-D to quit '))
lat=float(input('Latitude: '))
except:
print("\nGood bye\n")
sys.exit()
vdm= pmag.b_vdm(b,lat)
print('%10.3e '%(vdm))
if inp=="":
inp = sys.stdin.readlines() # read from standard input
for line in inp:
vdm=spitout(line)
if out=="":
print('%10.3e'%(vdm))
else:
out.write('%10.3e \n'%(vdm))
if __name__ == "__main__":
main()
|
[
"builtins.input",
"pmagpy.pmag.b_vdm",
"sys.argv.index",
"sys.stdin.readlines",
"sys.exit"
] |
[((364, 382), 'pmagpy.pmag.b_vdm', 'pmag.b_vdm', (['b', 'lat'], {}), '(b, lat)\n', (374, 382), True, 'import pmagpy.pmag as pmag\n'), ((954, 964), 'sys.exit', 'sys.exit', ([], {}), '()\n', (962, 964), False, 'import sys\n'), ((1002, 1022), 'sys.argv.index', 'sys.argv.index', (['"""-f"""'], {}), "('-f')\n", (1016, 1022), False, 'import sys\n'), ((1140, 1160), 'sys.argv.index', 'sys.argv.index', (['"""-F"""'], {}), "('-F')\n", (1154, 1160), False, 'import sys\n'), ((1616, 1637), 'sys.stdin.readlines', 'sys.stdin.readlines', ([], {}), '()\n', (1635, 1637), False, 'import sys\n'), ((1533, 1551), 'pmagpy.pmag.b_vdm', 'pmag.b_vdm', (['b', 'lat'], {}), '(b, lat)\n', (1543, 1551), True, 'import pmagpy.pmag as pmag\n'), ((1392, 1411), 'builtins.input', 'input', (['"""Latitude: """'], {}), "('Latitude: ')\n", (1397, 1411), False, 'from builtins import input\n'), ((1487, 1497), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1495, 1497), False, 'import sys\n'), ((1320, 1364), 'builtins.input', 'input', (['"""B (in microtesla): <cntl-D to quit """'], {}), "('B (in microtesla): <cntl-D to quit ')\n", (1325, 1364), False, 'from builtins import input\n')]
|
#!/usr/bin/env python
#
# This test uses out of band ovs-ofctl to query the
# switches and compare to an existing state to see
# if the flows are installed correctly in the PyTapDEMon
# topology.
#
import unittest
import subprocess
def parseFlows(flows):
"""
Parse out the string representation of flows passed in.
Example:
NXST_FLOW reply (xid=0x4):
cookie=0x0, duration=4.329s, table=0, n_packets=0, n_bytes=0, idle_timeout=120,hard_timeout=120,in_port=3 actions=output:4
"""
switchFlows = {}
for flow in flows.split('\n'):
line = flow.split()
if len(line) > 3: #get rid of first line in flow output
inputPort = line[5].split(',')[2].split('=')[1]
outputPorts = line[6].split('actions=')[1]
switchFlows[inputPort] = outputPorts
return switchFlows
globalFlows = {}
for i in range(1, 4):
"""Query switches s1, s2, s3 and dump flows, add to global flow dictionary"""
switch = 's'+str(i)
flows = subprocess.check_output(['sudo', 'ovs-ofctl', 'dump-flows', switch])
switchFlows = parseFlows(flows)
globalFlows[switch] = switchFlows
class PyTapDEMON_Test(unittest.TestCase):
def test_s1_port1(self):
self.assertEqual('output:2,output:6,output:8', globalFlows['s1']['1'])
def test_s2_port1(self):
self.assertEqual('output:2,output:6,output:8', globalFlows['s2']['1'])
def test_s3_port10(self):
self.assertEqual('output:11', globalFlows['s3']['10'])
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"subprocess.check_output"
] |
[((1008, 1076), 'subprocess.check_output', 'subprocess.check_output', (["['sudo', 'ovs-ofctl', 'dump-flows', switch]"], {}), "(['sudo', 'ovs-ofctl', 'dump-flows', switch])\n", (1031, 1076), False, 'import subprocess\n'), ((1543, 1558), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1556, 1558), False, 'import unittest\n')]
|
import torch
import numpy as np
# Util function for loading meshes
from pytorch3d.io import load_objs_as_meshes
from pytorch3d.transforms import euler_angles_to_matrix, matrix_to_euler_angles,Rotate
# Data structures and functions for rendering
from pytorch3d.structures import Pointclouds, Meshes
from pytorch3d.renderer import (
look_at_view_transform,
OpenGLPerspectiveCameras,
OpenGLOrthographicCameras,
#SfMPerspectiveCameras,
PointLights,
DirectionalLights,
Materials,
RasterizationSettings,
PointsRasterizationSettings,
MeshRenderer,
PointsRenderer,
MeshRasterizer,
PointsRasterizer,
SoftPhongShader,
NormWeightedCompositor,
BlendParams,
AlphaCompositor,
TexturesVertex,
TexturesUV,
TexturesAtlas
)
class Pt3dRenderer():
def __init__(self, device, texture_size, lookview):
self.raster_settings = RasterizationSettings(
image_size=texture_size,
blur_radius=0.0,
faces_per_pixel=1,
bin_size = None, # this setting controls whether naive or coarse-to-fine rasterization is used
max_faces_per_bin = None # this setting is for coarse rasterization
)
self.lights = PointLights(device=device,ambient_color=((0, 0, 0),),diffuse_color=((1, 1, 1),),specular_color=((0, 0, 0),), location=[[0.0, 0.0, 10.0]])
self.materials = Materials(device=device,ambient_color=((0, 0, 0),),diffuse_color=((1, 1, 1),),specular_color=((0, 0, 0),))
self.lookview=lookview.view(1,3)
self.device=device
def sample(self,normals,angles,triangles,imgs,template_uvs3d,face_project):
#rot=Rotate(R, device=device)
#normals_transformed = rot.transform_normals(normals.repeat(batchsize,1,1))
batchsize=angles.shape[0]
vertexsize=normals.shape[0]
trisize=triangles.shape[0]
RR = euler_angles_to_matrix(angles, "XYZ")
rot=Rotate(RR)
normals_transformed = rot.transform_normals(normals)
coefs = torch.sum(torch.mul(normals_transformed, self.lookview.repeat(batchsize,vertexsize,1)), 2)
ver_visibility = torch.ones(batchsize,vertexsize).cuda()
ver_visibility[coefs < 0] = 0
used_faces=[]
for b in range(batchsize):
visible_veridx = (ver_visibility[b]<=0).nonzero().view(-1)
#print('triangles visible_veridx:',triangles.unsqueeze(-1).shape, unvisible_veridx.shape)
#part trinum x vertexnum for gpu memory
part_num=8
part_size=int(visible_veridx.shape[0]//part_num)
tri_visibility=(~(triangles.unsqueeze(-1) == visible_veridx[:part_size])).any(-1)
for j in range(1,part_num):
if j < part_num-1:
tri_visibility |= (~(triangles.unsqueeze(-1) == visible_veridx[j*part_size:(j+1)*part_size])).any(-1)
else:
tri_visibility |= (~(triangles.unsqueeze(-1) == visible_veridx[j*part_size:])).any(-1)
visible_triidx = (torch.sum(tri_visibility, 1)>0).nonzero().view(-1)
used_faces.append(triangles[visible_triidx])
used_faces
tex = TexturesUV(verts_uvs=face_project, faces_uvs=used_faces, maps=imgs.permute(0,2,3,1))
mesh = Meshes(
verts=[template_uvs3d]*batchsize, faces=used_faces, textures=tex)
R_, T_ = look_at_view_transform(2.7, torch.zeros(batchsize).cuda(), torch.zeros(batchsize).cuda())
camera = OpenGLOrthographicCameras(device=self.device, R=R_.float(), T=T_.float())
#camera = OpenGLOrthographicCameras(R=R_, T=T_)
renderer = MeshRenderer(
rasterizer=MeshRasterizer(
cameras=camera,
raster_settings=self.raster_settings
),
shader=SoftPhongShader(
device=self.device,
cameras=camera,
blend_params=BlendParams(background_color=(0,0,0))
)
)
uv_images = renderer(mesh)
mask = TexturesUV(verts_uvs=face_project, faces_uvs=used_faces, maps=torch.ones_like(imgs.permute(0,2,3,1)))
mesh_mask = Meshes(
verts=[template_uvs3d]*batchsize, faces=used_faces, textures=mask)
uv_mask = renderer(mesh_mask)
return uv_images,uv_mask
|
[
"torch.ones",
"pytorch3d.renderer.PointLights",
"pytorch3d.transforms.Rotate",
"pytorch3d.transforms.euler_angles_to_matrix",
"pytorch3d.renderer.BlendParams",
"pytorch3d.renderer.Materials",
"pytorch3d.structures.Meshes",
"torch.zeros",
"pytorch3d.renderer.MeshRasterizer",
"torch.sum",
"pytorch3d.renderer.RasterizationSettings"
] |
[((910, 1035), 'pytorch3d.renderer.RasterizationSettings', 'RasterizationSettings', ([], {'image_size': 'texture_size', 'blur_radius': '(0.0)', 'faces_per_pixel': '(1)', 'bin_size': 'None', 'max_faces_per_bin': 'None'}), '(image_size=texture_size, blur_radius=0.0,\n faces_per_pixel=1, bin_size=None, max_faces_per_bin=None)\n', (931, 1035), False, 'from pytorch3d.renderer import look_at_view_transform, OpenGLPerspectiveCameras, OpenGLOrthographicCameras, PointLights, DirectionalLights, Materials, RasterizationSettings, PointsRasterizationSettings, MeshRenderer, PointsRenderer, MeshRasterizer, PointsRasterizer, SoftPhongShader, NormWeightedCompositor, BlendParams, AlphaCompositor, TexturesVertex, TexturesUV, TexturesAtlas\n'), ((1251, 1395), 'pytorch3d.renderer.PointLights', 'PointLights', ([], {'device': 'device', 'ambient_color': '((0, 0, 0),)', 'diffuse_color': '((1, 1, 1),)', 'specular_color': '((0, 0, 0),)', 'location': '[[0.0, 0.0, 10.0]]'}), '(device=device, ambient_color=((0, 0, 0),), diffuse_color=((1, 1,\n 1),), specular_color=((0, 0, 0),), location=[[0.0, 0.0, 10.0]])\n', (1262, 1395), False, 'from pytorch3d.renderer import look_at_view_transform, OpenGLPerspectiveCameras, OpenGLOrthographicCameras, PointLights, DirectionalLights, Materials, RasterizationSettings, PointsRasterizationSettings, MeshRenderer, PointsRenderer, MeshRasterizer, PointsRasterizer, SoftPhongShader, NormWeightedCompositor, BlendParams, AlphaCompositor, TexturesVertex, TexturesUV, TexturesAtlas\n'), ((1414, 1528), 'pytorch3d.renderer.Materials', 'Materials', ([], {'device': 'device', 'ambient_color': '((0, 0, 0),)', 'diffuse_color': '((1, 1, 1),)', 'specular_color': '((0, 0, 0),)'}), '(device=device, ambient_color=((0, 0, 0),), diffuse_color=((1, 1, \n 1),), specular_color=((0, 0, 0),))\n', (1423, 1528), False, 'from pytorch3d.renderer import look_at_view_transform, OpenGLPerspectiveCameras, OpenGLOrthographicCameras, PointLights, DirectionalLights, Materials, RasterizationSettings, PointsRasterizationSettings, MeshRenderer, PointsRenderer, MeshRasterizer, PointsRasterizer, SoftPhongShader, NormWeightedCompositor, BlendParams, AlphaCompositor, TexturesVertex, TexturesUV, TexturesAtlas\n'), ((1909, 1946), 'pytorch3d.transforms.euler_angles_to_matrix', 'euler_angles_to_matrix', (['angles', '"""XYZ"""'], {}), "(angles, 'XYZ')\n", (1931, 1946), False, 'from pytorch3d.transforms import euler_angles_to_matrix, matrix_to_euler_angles, Rotate\n'), ((1959, 1969), 'pytorch3d.transforms.Rotate', 'Rotate', (['RR'], {}), '(RR)\n', (1965, 1969), False, 'from pytorch3d.transforms import euler_angles_to_matrix, matrix_to_euler_angles, Rotate\n'), ((3299, 3373), 'pytorch3d.structures.Meshes', 'Meshes', ([], {'verts': '([template_uvs3d] * batchsize)', 'faces': 'used_faces', 'textures': 'tex'}), '(verts=[template_uvs3d] * batchsize, faces=used_faces, textures=tex)\n', (3305, 3373), False, 'from pytorch3d.structures import Pointclouds, Meshes\n'), ((4187, 4262), 'pytorch3d.structures.Meshes', 'Meshes', ([], {'verts': '([template_uvs3d] * batchsize)', 'faces': 'used_faces', 'textures': 'mask'}), '(verts=[template_uvs3d] * batchsize, faces=used_faces, textures=mask)\n', (4193, 4262), False, 'from pytorch3d.structures import Pointclouds, Meshes\n'), ((2163, 2196), 'torch.ones', 'torch.ones', (['batchsize', 'vertexsize'], {}), '(batchsize, vertexsize)\n', (2173, 2196), False, 'import torch\n'), ((3695, 3763), 'pytorch3d.renderer.MeshRasterizer', 'MeshRasterizer', ([], {'cameras': 'camera', 'raster_settings': 'self.raster_settings'}), '(cameras=camera, raster_settings=self.raster_settings)\n', (3709, 3763), False, 'from pytorch3d.renderer import look_at_view_transform, OpenGLPerspectiveCameras, OpenGLOrthographicCameras, PointLights, DirectionalLights, Materials, RasterizationSettings, PointsRasterizationSettings, MeshRenderer, PointsRenderer, MeshRasterizer, PointsRasterizer, SoftPhongShader, NormWeightedCompositor, BlendParams, AlphaCompositor, TexturesVertex, TexturesUV, TexturesAtlas\n'), ((3430, 3452), 'torch.zeros', 'torch.zeros', (['batchsize'], {}), '(batchsize)\n', (3441, 3452), False, 'import torch\n'), ((3461, 3483), 'torch.zeros', 'torch.zeros', (['batchsize'], {}), '(batchsize)\n', (3472, 3483), False, 'import torch\n'), ((3940, 3979), 'pytorch3d.renderer.BlendParams', 'BlendParams', ([], {'background_color': '(0, 0, 0)'}), '(background_color=(0, 0, 0))\n', (3951, 3979), False, 'from pytorch3d.renderer import look_at_view_transform, OpenGLPerspectiveCameras, OpenGLOrthographicCameras, PointLights, DirectionalLights, Materials, RasterizationSettings, PointsRasterizationSettings, MeshRenderer, PointsRenderer, MeshRasterizer, PointsRasterizer, SoftPhongShader, NormWeightedCompositor, BlendParams, AlphaCompositor, TexturesVertex, TexturesUV, TexturesAtlas\n'), ((3058, 3086), 'torch.sum', 'torch.sum', (['tri_visibility', '(1)'], {}), '(tri_visibility, 1)\n', (3067, 3086), False, 'import torch\n')]
|
import scipy.sparse
import numpy as np
from tectosaur.util.cpp import imp
fast_constraints = imp('tectosaur.fast_constraints')
for k in dir(fast_constraints):
locals()[k] = getattr(fast_constraints, k)
def build_constraint_matrix(cs, n_total_dofs):
rows, cols, vals, rhs_rows, rhs_cols, rhs_vals, rhs_in, n_unique_cs = \
fast_constraints.build_constraint_matrix(cs, n_total_dofs)
n_rows = n_total_dofs
n_cols = n_total_dofs - n_unique_cs
cm = scipy.sparse.csr_matrix((vals, (rows, cols)), shape = (n_rows, n_cols))
rhs_mat = scipy.sparse.csr_matrix((rhs_vals, (rhs_rows, rhs_cols)), shape = (n_rows, len(cs)))
return cm, rhs_mat.dot(rhs_in), rhs_mat
def simple_constraint_matrix(cs, n_cols):
rows = []
cols = []
data = []
rhs = np.zeros((len(cs)))
for i in range(len(cs)):
c = cs[i]
for j in range(len(c.terms)):
rows.append(i)
cols.append(c.terms[j].dof)
data.append(c.terms[j].val)
rhs[i] = c.rhs
return (
scipy.sparse.csr_matrix((data, (rows, cols)), shape = (len(cs), n_cols)),
rhs
)
|
[
"tectosaur.util.cpp.imp"
] |
[((94, 127), 'tectosaur.util.cpp.imp', 'imp', (['"""tectosaur.fast_constraints"""'], {}), "('tectosaur.fast_constraints')\n", (97, 127), False, 'from tectosaur.util.cpp import imp\n')]
|
from scipy.misc import comb
def exp(p, n):
total = 0.0
for k in range(n+1):
total += comb(n, k, exact=False) * p**k * (1-p) ** (n-k)
return total
def main():
for p in [0.3, 0.75, 0.8, 1.0, 0.0, 0.5]:
for n in range(1, 20):
print('Checking n=%d, p=%f' % (n, p))
print('Result: %f' % (exp(p, n)))
if __name__ == '__main__':
main()
|
[
"scipy.misc.comb"
] |
[((103, 126), 'scipy.misc.comb', 'comb', (['n', 'k'], {'exact': '(False)'}), '(n, k, exact=False)\n', (107, 126), False, 'from scipy.misc import comb\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetInstanceResult',
'AwaitableGetInstanceResult',
'get_instance',
'get_instance_output',
]
@pulumi.output_type
class GetInstanceResult:
def __init__(__self__, create_time=None, description=None, etag=None, file_shares=None, kms_key_name=None, labels=None, name=None, networks=None, satisfies_pzs=None, state=None, status_message=None, suspension_reasons=None, tier=None):
if create_time and not isinstance(create_time, str):
raise TypeError("Expected argument 'create_time' to be a str")
pulumi.set(__self__, "create_time", create_time)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if file_shares and not isinstance(file_shares, list):
raise TypeError("Expected argument 'file_shares' to be a list")
pulumi.set(__self__, "file_shares", file_shares)
if kms_key_name and not isinstance(kms_key_name, str):
raise TypeError("Expected argument 'kms_key_name' to be a str")
pulumi.set(__self__, "kms_key_name", kms_key_name)
if labels and not isinstance(labels, dict):
raise TypeError("Expected argument 'labels' to be a dict")
pulumi.set(__self__, "labels", labels)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if networks and not isinstance(networks, list):
raise TypeError("Expected argument 'networks' to be a list")
pulumi.set(__self__, "networks", networks)
if satisfies_pzs and not isinstance(satisfies_pzs, bool):
raise TypeError("Expected argument 'satisfies_pzs' to be a bool")
pulumi.set(__self__, "satisfies_pzs", satisfies_pzs)
if state and not isinstance(state, str):
raise TypeError("Expected argument 'state' to be a str")
pulumi.set(__self__, "state", state)
if status_message and not isinstance(status_message, str):
raise TypeError("Expected argument 'status_message' to be a str")
pulumi.set(__self__, "status_message", status_message)
if suspension_reasons and not isinstance(suspension_reasons, list):
raise TypeError("Expected argument 'suspension_reasons' to be a list")
pulumi.set(__self__, "suspension_reasons", suspension_reasons)
if tier and not isinstance(tier, str):
raise TypeError("Expected argument 'tier' to be a str")
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter(name="createTime")
def create_time(self) -> str:
"""
The time when the instance was created.
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter
def description(self) -> str:
"""
The description of the instance (2048 characters or less).
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def etag(self) -> str:
"""
Server-specified ETag for the instance resource to prevent simultaneous updates from overwriting each other.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="fileShares")
def file_shares(self) -> Sequence['outputs.FileShareConfigResponse']:
"""
File system shares on the instance. For this version, only a single file share is supported.
"""
return pulumi.get(self, "file_shares")
@property
@pulumi.getter(name="kmsKeyName")
def kms_key_name(self) -> str:
"""
KMS key name used for data encryption.
"""
return pulumi.get(self, "kms_key_name")
@property
@pulumi.getter
def labels(self) -> Mapping[str, str]:
"""
Resource labels to represent user provided metadata.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def name(self) -> str:
"""
The resource name of the instance, in the format `projects/{project_id}/locations/{location_id}/instances/{instance_id}`.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def networks(self) -> Sequence['outputs.NetworkConfigResponse']:
"""
VPC networks to which the instance is connected. For this version, only a single network is supported.
"""
return pulumi.get(self, "networks")
@property
@pulumi.getter(name="satisfiesPzs")
def satisfies_pzs(self) -> bool:
"""
Reserved for future use.
"""
return pulumi.get(self, "satisfies_pzs")
@property
@pulumi.getter
def state(self) -> str:
"""
The instance state.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="statusMessage")
def status_message(self) -> str:
"""
Additional information about the instance state, if available.
"""
return pulumi.get(self, "status_message")
@property
@pulumi.getter(name="suspensionReasons")
def suspension_reasons(self) -> Sequence[str]:
"""
field indicates all the reasons the instance is in "SUSPENDED" state.
"""
return pulumi.get(self, "suspension_reasons")
@property
@pulumi.getter
def tier(self) -> str:
"""
The service tier of the instance.
"""
return pulumi.get(self, "tier")
class AwaitableGetInstanceResult(GetInstanceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetInstanceResult(
create_time=self.create_time,
description=self.description,
etag=self.etag,
file_shares=self.file_shares,
kms_key_name=self.kms_key_name,
labels=self.labels,
name=self.name,
networks=self.networks,
satisfies_pzs=self.satisfies_pzs,
state=self.state,
status_message=self.status_message,
suspension_reasons=self.suspension_reasons,
tier=self.tier)
def get_instance(instance_id: Optional[str] = None,
location: Optional[str] = None,
project: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetInstanceResult:
"""
Gets the details of a specific instance.
"""
__args__ = dict()
__args__['instanceId'] = instance_id
__args__['location'] = location
__args__['project'] = project
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('google-native:file/v1beta1:getInstance', __args__, opts=opts, typ=GetInstanceResult).value
return AwaitableGetInstanceResult(
create_time=__ret__.create_time,
description=__ret__.description,
etag=__ret__.etag,
file_shares=__ret__.file_shares,
kms_key_name=__ret__.kms_key_name,
labels=__ret__.labels,
name=__ret__.name,
networks=__ret__.networks,
satisfies_pzs=__ret__.satisfies_pzs,
state=__ret__.state,
status_message=__ret__.status_message,
suspension_reasons=__ret__.suspension_reasons,
tier=__ret__.tier)
@_utilities.lift_output_func(get_instance)
def get_instance_output(instance_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetInstanceResult]:
"""
Gets the details of a specific instance.
"""
...
|
[
"pulumi.get",
"pulumi.getter",
"pulumi.set",
"pulumi.InvokeOptions",
"pulumi.runtime.invoke"
] |
[((3175, 3207), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""createTime"""'}), "(name='createTime')\n", (3188, 3207), False, 'import pulumi\n'), ((3829, 3861), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""fileShares"""'}), "(name='fileShares')\n", (3842, 3861), False, 'import pulumi\n'), ((4128, 4160), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""kmsKeyName"""'}), "(name='kmsKeyName')\n", (4141, 4160), False, 'import pulumi\n'), ((5076, 5110), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""satisfiesPzs"""'}), "(name='satisfiesPzs')\n", (5089, 5110), False, 'import pulumi\n'), ((5429, 5464), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""statusMessage"""'}), "(name='statusMessage')\n", (5442, 5464), False, 'import pulumi\n'), ((5667, 5706), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""suspensionReasons"""'}), "(name='suspensionReasons')\n", (5680, 5706), False, 'import pulumi\n'), ((890, 938), 'pulumi.set', 'pulumi.set', (['__self__', '"""create_time"""', 'create_time'], {}), "(__self__, 'create_time', create_time)\n", (900, 938), False, 'import pulumi\n'), ((1083, 1131), 'pulumi.set', 'pulumi.set', (['__self__', '"""description"""', 'description'], {}), "(__self__, 'description', description)\n", (1093, 1131), False, 'import pulumi\n'), ((1255, 1289), 'pulumi.set', 'pulumi.set', (['__self__', '"""etag"""', 'etag'], {}), "(__self__, 'etag', etag)\n", (1265, 1289), False, 'import pulumi\n'), ((1436, 1484), 'pulumi.set', 'pulumi.set', (['__self__', '"""file_shares"""', 'file_shares'], {}), "(__self__, 'file_shares', file_shares)\n", (1446, 1484), False, 'import pulumi\n'), ((1632, 1682), 'pulumi.set', 'pulumi.set', (['__self__', '"""kms_key_name"""', 'kms_key_name'], {}), "(__self__, 'kms_key_name', kms_key_name)\n", (1642, 1682), False, 'import pulumi\n'), ((1814, 1852), 'pulumi.set', 'pulumi.set', (['__self__', '"""labels"""', 'labels'], {}), "(__self__, 'labels', labels)\n", (1824, 1852), False, 'import pulumi\n'), ((1976, 2010), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (1986, 2010), False, 'import pulumi\n'), ((2148, 2190), 'pulumi.set', 'pulumi.set', (['__self__', '"""networks"""', 'networks'], {}), "(__self__, 'networks', networks)\n", (2158, 2190), False, 'import pulumi\n'), ((2343, 2395), 'pulumi.set', 'pulumi.set', (['__self__', '"""satisfies_pzs"""', 'satisfies_pzs'], {}), "(__self__, 'satisfies_pzs', satisfies_pzs)\n", (2353, 2395), False, 'import pulumi\n'), ((2522, 2558), 'pulumi.set', 'pulumi.set', (['__self__', '"""state"""', 'state'], {}), "(__self__, 'state', state)\n", (2532, 2558), False, 'import pulumi\n'), ((2712, 2766), 'pulumi.set', 'pulumi.set', (['__self__', '"""status_message"""', 'status_message'], {}), "(__self__, 'status_message', status_message)\n", (2722, 2766), False, 'import pulumi\n'), ((2934, 2996), 'pulumi.set', 'pulumi.set', (['__self__', '"""suspension_reasons"""', 'suspension_reasons'], {}), "(__self__, 'suspension_reasons', suspension_reasons)\n", (2944, 2996), False, 'import pulumi\n'), ((3120, 3154), 'pulumi.set', 'pulumi.set', (['__self__', '"""tier"""', 'tier'], {}), "(__self__, 'tier', tier)\n", (3130, 3154), False, 'import pulumi\n'), ((3329, 3360), 'pulumi.get', 'pulumi.get', (['self', '"""create_time"""'], {}), "(self, 'create_time')\n", (3339, 3360), False, 'import pulumi\n'), ((3535, 3566), 'pulumi.get', 'pulumi.get', (['self', '"""description"""'], {}), "(self, 'description')\n", (3545, 3566), False, 'import pulumi\n'), ((3784, 3808), 'pulumi.get', 'pulumi.get', (['self', '"""etag"""'], {}), "(self, 'etag')\n", (3794, 3808), False, 'import pulumi\n'), ((4076, 4107), 'pulumi.get', 'pulumi.get', (['self', '"""file_shares"""'], {}), "(self, 'file_shares')\n", (4086, 4107), False, 'import pulumi\n'), ((4282, 4314), 'pulumi.get', 'pulumi.get', (['self', '"""kms_key_name"""'], {}), "(self, 'kms_key_name')\n", (4292, 4314), False, 'import pulumi\n'), ((4492, 4518), 'pulumi.get', 'pulumi.get', (['self', '"""labels"""'], {}), "(self, 'labels')\n", (4502, 4518), False, 'import pulumi\n'), ((4749, 4773), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (4759, 4773), False, 'import pulumi\n'), ((5027, 5055), 'pulumi.get', 'pulumi.get', (['self', '"""networks"""'], {}), "(self, 'networks')\n", (5037, 5055), False, 'import pulumi\n'), ((5220, 5253), 'pulumi.get', 'pulumi.get', (['self', '"""satisfies_pzs"""'], {}), "(self, 'satisfies_pzs')\n", (5230, 5253), False, 'import pulumi\n'), ((5383, 5408), 'pulumi.get', 'pulumi.get', (['self', '"""state"""'], {}), "(self, 'state')\n", (5393, 5408), False, 'import pulumi\n'), ((5612, 5646), 'pulumi.get', 'pulumi.get', (['self', '"""status_message"""'], {}), "(self, 'status_message')\n", (5622, 5646), False, 'import pulumi\n'), ((5875, 5913), 'pulumi.get', 'pulumi.get', (['self', '"""suspension_reasons"""'], {}), "(self, 'suspension_reasons')\n", (5885, 5913), False, 'import pulumi\n'), ((6056, 6080), 'pulumi.get', 'pulumi.get', (['self', '"""tier"""'], {}), "(self, 'tier')\n", (6066, 6080), False, 'import pulumi\n'), ((7254, 7276), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (7274, 7276), False, 'import pulumi\n'), ((7368, 7479), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""google-native:file/v1beta1:getInstance"""', '__args__'], {'opts': 'opts', 'typ': 'GetInstanceResult'}), "('google-native:file/v1beta1:getInstance', __args__,\n opts=opts, typ=GetInstanceResult)\n", (7389, 7479), False, 'import pulumi\n')]
|
#!/usr/bin/env python3
# Include the directory up in the path:
import sys
sys.path.insert(0,'..')
# Import stocker:
import stocker
# Main:
if __name__== "__main__":
# Let's save in a combination of stocks and bonds for retirement. Weight the
# initial portfolio towards 100% stocks for 15 years, weighted 70% US and
# 30% international. We will contribute 15000 annually with a 2% increase
# in that contribution annually. Assume the default inflation rate of 3.5%.
all_stocks_portfolio = stocker.Portfolio(
name="Stocks", \
value=0.0, \
positions=[stocker.US_Stocks(), stocker.International_Stocks()], \
weights=[7, 3]
)
all_stocks_phase = stocker.Scenario(
name="Initial Accumulation", \
portfolio=all_stocks_portfolio, \
num_years=15, \
annual_contribution=16000, \
annual_contribution_increase_perc=2.0
)
# The next phase of our retirement accumulation will start at 100% stocks
# but gradually transition to a 50/50 stocks/bonds portfolio by retirement.
# This phase consists of 20 years more accumulation, with an annual contribution
# of 20k, increasing 2% each year.
end_weights = [7, 3, 7, 3]
stocks_and_bonds_portfolio = stocker.Portfolio(
name="Stocks and Bonds", \
positions=[stocker.US_Stocks(), stocker.International_Stocks(), stocker.US_Bonds(), stocker.International_Bonds()], \
weights=[7, 3, 0, 0]
)
stocks_and_bonds_phase = stocker.Scenario(
name="Secondary Accumulation", \
portfolio=stocks_and_bonds_portfolio, \
num_years=15, \
annual_contribution=20000, \
annual_contribution_increase_perc=2.0, \
end_weights=end_weights
)
# Combine these two accumulation phases together using a piecewise scenario:
retirement_accumulation = stocker.Piecewise_Scenario("Retirement Accumulation", [all_stocks_phase, stocks_and_bonds_phase])
# Run the savings scenario once and print and plot the results:
retirement_accumulation.run()
print(retirement_accumulation.results())
retirement_accumulation.plot(smooth=False)
# Run a monte carlo simulation of this scenario with 400 iterations:
mc = stocker.Monte_Carlo(retirement_accumulation)
mc.run(n=400)
# Print the results of the monte carlo simulation, showing the probablility
# of hitting a 1M dollar accumulation goal:
print(mc.results(goal=1000000))
# Create the monte carlo plots:
mc.histogram()
mc.plot(smooth=True)
# Show all the stocker plots:
stocker.show_plots()
|
[
"stocker.Piecewise_Scenario",
"sys.path.insert",
"stocker.Monte_Carlo",
"stocker.US_Bonds",
"stocker.International_Bonds",
"stocker.show_plots",
"stocker.International_Stocks",
"stocker.Scenario",
"stocker.US_Stocks"
] |
[((75, 99), 'sys.path.insert', 'sys.path.insert', (['(0)', '""".."""'], {}), "(0, '..')\n", (90, 99), False, 'import sys\n'), ((676, 842), 'stocker.Scenario', 'stocker.Scenario', ([], {'name': '"""Initial Accumulation"""', 'portfolio': 'all_stocks_portfolio', 'num_years': '(15)', 'annual_contribution': '(16000)', 'annual_contribution_increase_perc': '(2.0)'}), "(name='Initial Accumulation', portfolio=\n all_stocks_portfolio, num_years=15, annual_contribution=16000,\n annual_contribution_increase_perc=2.0)\n", (692, 842), False, 'import stocker\n'), ((1429, 1628), 'stocker.Scenario', 'stocker.Scenario', ([], {'name': '"""Secondary Accumulation"""', 'portfolio': 'stocks_and_bonds_portfolio', 'num_years': '(15)', 'annual_contribution': '(20000)', 'annual_contribution_increase_perc': '(2.0)', 'end_weights': 'end_weights'}), "(name='Secondary Accumulation', portfolio=\n stocks_and_bonds_portfolio, num_years=15, annual_contribution=20000,\n annual_contribution_increase_perc=2.0, end_weights=end_weights)\n", (1445, 1628), False, 'import stocker\n'), ((1766, 1867), 'stocker.Piecewise_Scenario', 'stocker.Piecewise_Scenario', (['"""Retirement Accumulation"""', '[all_stocks_phase, stocks_and_bonds_phase]'], {}), "('Retirement Accumulation', [all_stocks_phase,\n stocks_and_bonds_phase])\n", (1792, 1867), False, 'import stocker\n'), ((2130, 2174), 'stocker.Monte_Carlo', 'stocker.Monte_Carlo', (['retirement_accumulation'], {}), '(retirement_accumulation)\n', (2149, 2174), False, 'import stocker\n'), ((2460, 2480), 'stocker.show_plots', 'stocker.show_plots', ([], {}), '()\n', (2478, 2480), False, 'import stocker\n'), ((576, 595), 'stocker.US_Stocks', 'stocker.US_Stocks', ([], {}), '()\n', (593, 595), False, 'import stocker\n'), ((597, 627), 'stocker.International_Stocks', 'stocker.International_Stocks', ([], {}), '()\n', (625, 627), False, 'import stocker\n'), ((1266, 1285), 'stocker.US_Stocks', 'stocker.US_Stocks', ([], {}), '()\n', (1283, 1285), False, 'import stocker\n'), ((1287, 1317), 'stocker.International_Stocks', 'stocker.International_Stocks', ([], {}), '()\n', (1315, 1317), False, 'import stocker\n'), ((1319, 1337), 'stocker.US_Bonds', 'stocker.US_Bonds', ([], {}), '()\n', (1335, 1337), False, 'import stocker\n'), ((1339, 1368), 'stocker.International_Bonds', 'stocker.International_Bonds', ([], {}), '()\n', (1366, 1368), False, 'import stocker\n')]
|
#!/usr/bin/env python
# coding: utf-8
import logging
from pykit import rangeset
from pykit import txutil
from .accessor import KeyValue
from .accessor import Value
from .status import COMMITTED
from .status import PURGED
from .status import STATUS
logger = logging.getLogger(__name__)
class StorageHelper(object):
max_value_history = 16 # keeps the last n modifications in a record
max_journal_history = 1024 # keeps the last n committed journal
conflicterror = None
def apply_record(self, txid, key, value):
# the data in underlying storage is multi-version record:
# [
# [<txid>, <value>]
# [<txid>, <value>]
# ...
# ]
for curr in txutil.cas_loop(self.record.get,
self.record.set_or_create,
args=(key, ),
conflicterror=self.conflicterror):
max_txid = curr.v[-1][0]
if max_txid >= txid:
return False
curr.v.append((txid, value))
while len(curr.v) > self.max_value_history:
curr.v.pop(0)
return True
def add_to_txidset(self, status, txid):
if status not in STATUS:
raise KeyError('invalid status: ' + repr(status))
logger.info('add {status}:{txid}'
' to txidset'.format(
status=status, txid=txid))
for curr in txutil.cas_loop(self.txidset.get,
self.txidset.set,
conflicterror=self.conflicterror):
for st in STATUS:
if st not in curr.v:
curr.v[st] = rangeset.RangeSet([])
curr.v[status].add([txid, txid + 1])
self.purge(curr.v)
def purge(self, sets):
topurge = rangeset.RangeSet()
committed = sets[COMMITTED]
l = committed.length()
while l > self.max_journal_history:
first = committed[0]
# a range contains a single txid
r = rangeset.RangeSet([[first[0], first[0] + 1]])
topurge.add(r[0])
committed = rangeset.substract(committed, r)
l -= 1
for rng in topurge:
for txid in range(rng[0], rng[1]):
self.journal.safe_delete(txid)
sets[PURGED] = rangeset.union(sets[PURGED], topurge)
sets[COMMITTED] = rangeset.substract(sets[COMMITTED], topurge)
class Storage(StorageHelper):
record = KeyValue()
journal = KeyValue()
txidset = Value()
def acquire_key_loop(self, txid, key): raise TypeError('unimplemented')
def try_release_key(self, txid, key): raise TypeError('unimplemented')
|
[
"pykit.txutil.cas_loop",
"pykit.rangeset.RangeSet",
"pykit.rangeset.substract",
"logging.getLogger",
"pykit.rangeset.union"
] |
[((261, 288), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (278, 288), False, 'import logging\n'), ((732, 842), 'pykit.txutil.cas_loop', 'txutil.cas_loop', (['self.record.get', 'self.record.set_or_create'], {'args': '(key,)', 'conflicterror': 'self.conflicterror'}), '(self.record.get, self.record.set_or_create, args=(key,),\n conflicterror=self.conflicterror)\n', (747, 842), False, 'from pykit import txutil\n'), ((1497, 1587), 'pykit.txutil.cas_loop', 'txutil.cas_loop', (['self.txidset.get', 'self.txidset.set'], {'conflicterror': 'self.conflicterror'}), '(self.txidset.get, self.txidset.set, conflicterror=self.\n conflicterror)\n', (1512, 1587), False, 'from pykit import txutil\n'), ((1908, 1927), 'pykit.rangeset.RangeSet', 'rangeset.RangeSet', ([], {}), '()\n', (1925, 1927), False, 'from pykit import rangeset\n'), ((2438, 2475), 'pykit.rangeset.union', 'rangeset.union', (['sets[PURGED]', 'topurge'], {}), '(sets[PURGED], topurge)\n', (2452, 2475), False, 'from pykit import rangeset\n'), ((2502, 2546), 'pykit.rangeset.substract', 'rangeset.substract', (['sets[COMMITTED]', 'topurge'], {}), '(sets[COMMITTED], topurge)\n', (2520, 2546), False, 'from pykit import rangeset\n'), ((2137, 2182), 'pykit.rangeset.RangeSet', 'rangeset.RangeSet', (['[[first[0], first[0] + 1]]'], {}), '([[first[0], first[0] + 1]])\n', (2154, 2182), False, 'from pykit import rangeset\n'), ((2238, 2270), 'pykit.rangeset.substract', 'rangeset.substract', (['committed', 'r'], {}), '(committed, r)\n', (2256, 2270), False, 'from pykit import rangeset\n'), ((1757, 1778), 'pykit.rangeset.RangeSet', 'rangeset.RangeSet', (['[]'], {}), '([])\n', (1774, 1778), False, 'from pykit import rangeset\n')]
|
# -*- coding: utf-8 -*-
"""
@author: jiankaiwang
@version: 0.0.1
@date: 2020/03
@desc: The script implements the data loader of the MOT challenge.
@note:
Style: pylint_2015
@reference:
"""
import os
import logging
import pandas as pd
import requests
import tqdm
import zipfile
import argparse
# In[]
MOT_ID_LABEL = {1: "Pedestrian", 7: "Static_Person"}
MOT_LABEL_ID = {"Pedestrian": 1, "Static_Person": 7}
# In[]:
def formatBBoxAndVis(dataframe, is_dict=False):
"""formatBBoxAndVis keeps the bbox information and its visibility per frames.
Args:
dataframe: the pandas data frame
is_dict: using the frame id as the key in the dictionary
Returns:
frameBBoxes: a list conserves person detection results that each one is a
list in which contains [x1, y1, width, height, visible],
visible also represents the probability or confident score of
the object
"""
frameBBoxes = []
fids = list(dataframe["fid"].unique())
for fid in fids:
tmp = dataframe[dataframe["fid"] == fid]
frameBBoxes.append(tmp[["bX", "bY", "bW", "bH", "visible"]].values.tolist())
if is_dict:
return dict(zip(fids, frameBBoxes))
return frameBBoxes
# In[]
def formatForMetrics(dataframe, is_dict=False):
"""formatForMetrics keeps the bbox information, its visibility and uid per frames.
Args:
dataframe: the pandas data frame
is_dict: using the frame id as the key in the dictionary
Returns:
frameBBoxes: a list conserves person detection results that each one is a
list in which contains [x1, y1, width, height, visible, uid],
visible also represents the probability or confident score of
the object
"""
frameBBoxes = []
fids = list(dataframe["fid"].unique())
for fid in fids:
tmp = dataframe[dataframe["fid"] == fid]
frameBBoxes.append(tmp[["bX", "bY", "bW", "bH", "visible", "uid"]].values.tolist())
if is_dict:
return dict(zip(fids, frameBBoxes))
return frameBBoxes
# In[]
def maybeDownload(name="mot17det", src=None, target=os.path.join("/","tmp"),
uncompressed=True):
"""maybeDownload: Maybe download the MOT17Det dataset from the official datasets
to the local.
Args:
name (primary): the dataset name
src: the source URL, select one of the name and src
target: the local directory
uncompressed: whether to compress the downloaded file
Return:
status: 0 (success) or Exception (failed)
"""
assert os.path.exists(target), "No such folder exists."
if name or (not src):
availableURL = {"mot17det":
["https://motchallenge.net/data/MOT17DetLabels.zip",
"https://motchallenge.net/data/MOT17Det.zip"]}
if name not in list(availableURL.keys()):
raise ValueError("Available datasets: {}".format(list(availableURL.keys())))
src = availableURL["mot17det"]
logging.info("Download source: {}".format(src))
if type(src) == str: src = [src]
for urlIdx in tqdm.trange(len(src)):
url = src[urlIdx]
fname = os.path.basename(url)
folderName, fileType = fname.split('.')
# the compressed file path
filePath = os.path.join(target, fname)
# download the compressed first
if os.path.exists(filePath):
logging.warning("{} existed.".format(filePath))
else:
logging.warning("Downloading {} ...".format(url))
# change to wget tool on the shell
res = requests.get(url, allow_redirects=True)
if res.status_code != 200:
logging.error("Download {} failed.".format(url))
continue
with open(filePath, "wb") as fout:
fout.write(res.content)
# uncompress the file
if uncompressed:
uncompPath = os.path.join(target, folderName)
assert not os.path.exists(uncompPath), \
"The folder {} exists. Please delete it first.".format(uncompPath)
try:
os.mkdir(uncompPath)
logging.warning("Created a folder {}.".format(uncompPath))
except Exception as e:
raise Exception("Can't create the folder {}. ({})".format(uncompPath, e))
allowedCompressedType = ["zip"]
if fileType not in allowedCompressedType:
raise ValueError("Available compressed type: {}".format(allowedCompressedType))
if fileType == "zip":
with zipfile.ZipFile(filePath, 'r') as fin:
fin.extractall(uncompPath)
logging.warning("Compressed to folder {}.".format(uncompPath))
return 0
# In[]:
def loadLabel(src, is_path=True, load_Pedestrian=True, load_Static_Person=True,
visible_thresholde=0, format_style="onlybbox"):
"""LoadLabel: Load a label file in the csv format.
Args:
src: the MOT label file path (available when is_path is True)
is_path: True or False for whether the src is the file path or not
load_Pedestrian: whether to load the pedestrian data or not
load_Static_Person: whether to load the statuc person data or not
visible_thresholde: the threshold for filtering the invisible person data
format_style: provides different styles in the lists,
"onlybbox" (func: formatBBoxAndVis), "onlybbox_dict" (func: formatBBoxAndVis),
"metrics" (func: formatForMetrics), "metrics_dict" (func: formatForMetrics)
Returns:
objects_in_frames: a list contains the person detection information per frames
"""
df = src
if is_path:
df = pd.read_csv(src, header=None)
df.columns = ["fid", "uid", "bX", "bY", "bW", "bH", "conf", "class", "visible"]
df_persons = df[((df["class"] == MOT_LABEL_ID["Pedestrian"]) & load_Pedestrian) | \
((df["class"] == MOT_LABEL_ID["Static_Person"]) & load_Static_Person)]
if visible_thresholde:
df_persons = df_persons[df_persons["visible"] >= visible_thresholde]
if format_style[:8] == "onlybbox":
if format_style[-4:] == "dict":
return formatBBoxAndVis(df_persons, is_dict=True), df_persons
else:
# format_style == "onlybbox"
return formatBBoxAndVis(df_persons), df_persons
elif format_style[:7] == "metrics":
if format_style[-4:] == "dict":
return formatForMetrics(df_persons, is_dict=True), df_persons
else:
# format_style == "onlybbox"
return formatForMetrics(df_persons), df_persons
# In[]
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
# parsing args for maybeDownload
parser = argparse.ArgumentParser()
parser.add_argument("--name", type=str, default="mot17det")
parser.add_argument("--src", type=str, default=None)
parser.add_argument("--target", type=str, default="/tmp")
parser.add_argument("--uncompressed", type=int, default=1)
args = parser.parse_args()
maybeDownload(name=args.name)
|
[
"os.mkdir",
"zipfile.ZipFile",
"argparse.ArgumentParser",
"logging.basicConfig",
"os.path.basename",
"pandas.read_csv",
"os.path.exists",
"requests.get",
"os.path.join"
] |
[((2092, 2116), 'os.path.join', 'os.path.join', (['"""/"""', '"""tmp"""'], {}), "('/', 'tmp')\n", (2104, 2116), False, 'import os\n'), ((2536, 2558), 'os.path.exists', 'os.path.exists', (['target'], {}), '(target)\n', (2550, 2558), False, 'import os\n'), ((6380, 6419), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (6399, 6419), False, 'import logging\n'), ((6467, 6492), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (6490, 6492), False, 'import argparse\n'), ((3107, 3128), 'os.path.basename', 'os.path.basename', (['url'], {}), '(url)\n', (3123, 3128), False, 'import os\n'), ((3220, 3247), 'os.path.join', 'os.path.join', (['target', 'fname'], {}), '(target, fname)\n', (3232, 3247), False, 'import os\n'), ((3292, 3316), 'os.path.exists', 'os.path.exists', (['filePath'], {}), '(filePath)\n', (3306, 3316), False, 'import os\n'), ((5474, 5503), 'pandas.read_csv', 'pd.read_csv', (['src'], {'header': 'None'}), '(src, header=None)\n', (5485, 5503), True, 'import pandas as pd\n'), ((3492, 3531), 'requests.get', 'requests.get', (['url'], {'allow_redirects': '(True)'}), '(url, allow_redirects=True)\n', (3504, 3531), False, 'import requests\n'), ((3779, 3811), 'os.path.join', 'os.path.join', (['target', 'folderName'], {}), '(target, folderName)\n', (3791, 3811), False, 'import os\n'), ((3829, 3855), 'os.path.exists', 'os.path.exists', (['uncompPath'], {}), '(uncompPath)\n', (3843, 3855), False, 'import os\n'), ((3953, 3973), 'os.mkdir', 'os.mkdir', (['uncompPath'], {}), '(uncompPath)\n', (3961, 3973), False, 'import os\n'), ((4368, 4398), 'zipfile.ZipFile', 'zipfile.ZipFile', (['filePath', '"""r"""'], {}), "(filePath, 'r')\n", (4383, 4398), False, 'import zipfile\n')]
|
import logging
import sys
import numpy as np
from tensorflow.keras import backend as K
from tensorflow.keras import layers
from tensorflow.keras.layers import (
AveragePooling2D,
BatchNormalization,
Conv2D,
MaxPooling2D,
SeparableConv2D,
)
from tensorflow.keras.models import Model
sys.setrecursionlimit(2 ** 20)
np.random.seed(2 ** 10)
class SSR_net:
def __init__(self, image_size, stage_num, lambda_local, lambda_d):
if K.image_dim_ordering() == "th":
logging.debug("image_dim_ordering = 'th'")
self._channel_axis = 1
self._input_shape = (3, image_size, image_size)
else:
logging.debug("image_dim_ordering = 'tf'")
self._channel_axis = -1
self._input_shape = (image_size, image_size, 3)
self.stage_num = stage_num
self.lambda_local = lambda_local
self.lambda_d = lambda_d
def __call__(self):
logging.debug("Creating model...")
inputs = layers.Input(shape=self._input_shape)
# -------------------------------------------------------------------------------------------------------------------------
x = Conv2D(32, (3, 3))(inputs)
x = BatchNormalization(axis=self._channel_axis)(x)
x = layers.Activation("relu")(x)
x_layer1 = AveragePooling2D(2, 2)(x)
x = Conv2D(32, (3, 3))(x_layer1)
x = BatchNormalization(axis=self._channel_axis)(x)
x = layers.Activation("relu")(x)
x_layer2 = AveragePooling2D(2, 2)(x)
x = Conv2D(32, (3, 3))(x_layer2)
x = BatchNormalization(axis=self._channel_axis)(x)
x = layers.Activation("relu")(x)
x_layer3 = AveragePooling2D(2, 2)(x)
x = Conv2D(32, (3, 3))(x_layer3)
x = BatchNormalization(axis=self._channel_axis)(x)
x = layers.Activation("relu")(x)
# -------------------------------------------------------------------------------------------------------------------------
s = Conv2D(16, (3, 3))(inputs)
s = BatchNormalization(axis=self._channel_axis)(s)
s = layers.Activation("tanh")(s)
s_layer1 = MaxPooling2D(2, 2)(s)
s = Conv2D(16, (3, 3))(s_layer1)
s = BatchNormalization(axis=self._channel_axis)(s)
s = layers.Activation("tanh")(s)
s_layer2 = MaxPooling2D(2, 2)(s)
s = Conv2D(16, (3, 3))(s_layer2)
s = BatchNormalization(axis=self._channel_axis)(s)
s = layers.Activation("tanh")(s)
s_layer3 = MaxPooling2D(2, 2)(s)
s = Conv2D(16, (3, 3))(s_layer3)
s = BatchNormalization(axis=self._channel_axis)(s)
s = layers.Activation("tanh")(s)
# -------------------------------------------------------------------------------------------------------------------------
# Classifier block
s_layer4 = Conv2D(10, (1, 1), activation="relu")(s)
s_layer4 = layers.Flatten()(s_layer4)
s_layer4_mix = layers.Dropout(0.2)(s_layer4)
s_layer4_mix = layers.Dense(units=self.stage_num[0], activation="relu")(
s_layer4_mix
)
x_layer4 = Conv2D(10, (1, 1), activation="relu")(x)
x_layer4 = layers.Flatten()(x_layer4)
x_layer4_mix = layers.Dropout(0.2)(x_layer4)
x_layer4_mix = layers.Dense(units=self.stage_num[0], activation="relu")(
x_layer4_mix
)
feat_a_s1_pre = layers.Multiply()([s_layer4, x_layer4])
delta_s1 = layers.Dense(1, activation="tanh", name="delta_s1")(feat_a_s1_pre)
feat_a_s1 = layers.Multiply()([s_layer4_mix, x_layer4_mix])
feat_a_s1 = layers.Dense(2 * self.stage_num[0], activation="relu")(feat_a_s1)
pred_a_s1 = layers.Dense(
units=self.stage_num[0], activation="relu", name="pred_age_stage1"
)(feat_a_s1)
# feat_local_s1 = layers.Lambda(lambda x: x/10)(feat_a_s1)
# feat_a_s1_local = Dropout(0.2)(pred_a_s1)
local_s1 = layers.Dense(
units=self.stage_num[0], activation="tanh", name="local_delta_stage1",
)(feat_a_s1)
# -------------------------------------------------------------------------------------------------------------------------
s_layer2 = Conv2D(10, (1, 1), activation="relu")(s_layer2)
s_layer2 = MaxPooling2D(4, 4)(s_layer2)
s_layer2 = layers.Flatten()(s_layer2)
s_layer2_mix = layers.Dropout(0.2)(s_layer2)
s_layer2_mix = layers.Dense(self.stage_num[1], activation="relu")(s_layer2_mix)
x_layer2 = Conv2D(10, (1, 1), activation="relu")(x_layer2)
x_layer2 = AveragePooling2D(4, 4)(x_layer2)
x_layer2 = layers.Flatten()(x_layer2)
x_layer2_mix = layers.Dropout(0.2)(x_layer2)
x_layer2_mix = layers.Dense(self.stage_num[1], activation="relu")(x_layer2_mix)
feat_a_s2_pre = layers.Multiply()([s_layer2, x_layer2])
delta_s2 = layers.Dense(1, activation="tanh", name="delta_s2")(feat_a_s2_pre)
feat_a_s2 = layers.Multiply()([s_layer2_mix, x_layer2_mix])
feat_a_s2 = layers.Dense(2 * self.stage_num[1], activation="relu")(feat_a_s2)
pred_a_s2 = layers.Dense(
units=self.stage_num[1], activation="relu", name="pred_age_stage2"
)(feat_a_s2)
# feat_local_s2 = layers.Lambda(lambda x: x/10)(feat_a_s2)
# feat_a_s2_local = Dropout(0.2)(pred_a_s2)
local_s2 = layers.Dense(
units=self.stage_num[1], activation="tanh", name="local_delta_stage2",
)(feat_a_s2)
# -------------------------------------------------------------------------------------------------------------------------
s_layer1 = Conv2D(10, (1, 1), activation="relu")(s_layer1)
s_layer1 = MaxPooling2D(8, 8)(s_layer1)
s_layer1 = layers.Flatten()(s_layer1)
s_layer1_mix = layers.Dropout(0.2)(s_layer1)
s_layer1_mix = layers.Dense(self.stage_num[2], activation="relu")(s_layer1_mix)
x_layer1 = Conv2D(10, (1, 1), activation="relu")(x_layer1)
x_layer1 = AveragePooling2D(8, 8)(x_layer1)
x_layer1 = layers.Flatten()(x_layer1)
x_layer1_mix = layers.Dropout(0.2)(x_layer1)
x_layer1_mix = layers.Dense(self.stage_num[2], activation="relu")(x_layer1_mix)
feat_a_s3_pre = layers.Multiply()([s_layer1, x_layer1])
delta_s3 = layers.Dense(1, activation="tanh", name="delta_s3")(feat_a_s3_pre)
feat_a_s3 = layers.Multiply()([s_layer1_mix, x_layer1_mix])
feat_a_s3 = layers.Dense(2 * self.stage_num[2], activation="relu")(feat_a_s3)
pred_a_s3 = layers.Dense(
units=self.stage_num[2], activation="relu", name="pred_age_stage3"
)(feat_a_s3)
# feat_local_s3 = layers.Lambda(lambda x: x/10)(feat_a_s3)
# feat_a_s3_local = Dropout(0.2)(pred_a_s3)
local_s3 = layers.Dense(
units=self.stage_num[2], activation="tanh", name="local_delta_stage3",
)(feat_a_s3)
# -------------------------------------------------------------------------------------------------------------------------
def merge_age(x, s1, s2, s3, lambda_local, lambda_d):
a = x[0][:, 0] * 0
b = x[0][:, 0] * 0
c = x[0][:, 0] * 0
# A = s1 * s2 * s3
V = 101
for i in range(0, s1):
a = a + (i + lambda_local * x[6][:, i]) * x[0][:, i]
a = K.expand_dims(a, -1)
a = a / (s1 * (1 + lambda_d * x[3]))
for j in range(0, s2):
b = b + (j + lambda_local * x[7][:, j]) * x[1][:, j]
b = K.expand_dims(b, -1)
b = b / (s1 * (1 + lambda_d * x[3])) / (s2 * (1 + lambda_d * x[4]))
for k in range(0, s3):
c = c + (k + lambda_local * x[8][:, k]) * x[2][:, k]
c = K.expand_dims(c, -1)
c = (
c
/ (s1 * (1 + lambda_d * x[3]))
/ (s2 * (1 + lambda_d * x[4]))
/ (s3 * (1 + lambda_d * x[5]))
)
age = (a + b + c) * V
return age
pred_a = layers.Lambda(
merge_age,
arguments={
"s1": self.stage_num[0],
"s2": self.stage_num[1],
"s3": self.stage_num[2],
"lambda_local": self.lambda_local,
"lambda_d": self.lambda_d,
},
name="pred_a",
)(
[
pred_a_s1,
pred_a_s2,
pred_a_s3,
delta_s1,
delta_s2,
delta_s3,
local_s1,
local_s2,
local_s3,
]
)
model = Model(inputs=inputs, outputs=pred_a)
return model
class SSR_net_general:
def __init__(self, image_size, stage_num, lambda_local, lambda_d):
if K.image_dim_ordering() == "th":
logging.debug("image_dim_ordering = 'th'")
self._channel_axis = 1
self._input_shape = (3, image_size, image_size)
else:
logging.debug("image_dim_ordering = 'tf'")
self._channel_axis = -1
self._input_shape = (image_size, image_size, 3)
self.stage_num = stage_num
self.lambda_local = lambda_local
self.lambda_d = lambda_d
def __call__(self):
logging.debug("Creating model...")
inputs = layers.Input(shape=self._input_shape)
# -------------------------------------------------------------------------------------------------------------------------
x = Conv2D(32, (3, 3))(inputs)
x = BatchNormalization(axis=self._channel_axis)(x)
x = layers.Activation("relu")(x)
x_layer1 = AveragePooling2D(2, 2)(x)
x = Conv2D(32, (3, 3))(x_layer1)
x = BatchNormalization(axis=self._channel_axis)(x)
x = layers.Activation("relu")(x)
x_layer2 = AveragePooling2D(2, 2)(x)
x = Conv2D(32, (3, 3))(x_layer2)
x = BatchNormalization(axis=self._channel_axis)(x)
x = layers.Activation("relu")(x)
x_layer3 = AveragePooling2D(2, 2)(x)
x = Conv2D(32, (3, 3))(x_layer3)
x = BatchNormalization(axis=self._channel_axis)(x)
x = layers.Activation("relu")(x)
# -------------------------------------------------------------------------------------------------------------------------
s = Conv2D(16, (3, 3))(inputs)
s = BatchNormalization(axis=self._channel_axis)(s)
s = layers.Activation("tanh")(s)
s_layer1 = MaxPooling2D(2, 2)(s)
s = Conv2D(16, (3, 3))(s_layer1)
s = BatchNormalization(axis=self._channel_axis)(s)
s = layers.Activation("tanh")(s)
s_layer2 = MaxPooling2D(2, 2)(s)
s = Conv2D(16, (3, 3))(s_layer2)
s = BatchNormalization(axis=self._channel_axis)(s)
s = layers.Activation("tanh")(s)
s_layer3 = MaxPooling2D(2, 2)(s)
s = Conv2D(16, (3, 3))(s_layer3)
s = BatchNormalization(axis=self._channel_axis)(s)
s = layers.Activation("tanh")(s)
# -------------------------------------------------------------------------------------------------------------------------
# Classifier block
s_layer4 = Conv2D(10, (1, 1), activation="relu")(s)
s_layer4 = layers.Flatten()(s_layer4)
s_layer4_mix = layers.Dropout(0.2)(s_layer4)
s_layer4_mix = layers.Dense(units=self.stage_num[0], activation="relu")(
s_layer4_mix
)
x_layer4 = Conv2D(10, (1, 1), activation="relu")(x)
x_layer4 = layers.Flatten()(x_layer4)
x_layer4_mix = layers.Dropout(0.2)(x_layer4)
x_layer4_mix = layers.Dense(units=self.stage_num[0], activation="relu")(
x_layer4_mix
)
feat_s1_pre = layers.Multiply()([s_layer4, x_layer4])
delta_s1 = layers.Dense(1, activation="tanh", name="delta_s1")(feat_s1_pre)
feat_s1 = layers.Multiply()([s_layer4_mix, x_layer4_mix])
feat_s1 = layers.Dense(2 * self.stage_num[0], activation="relu")(feat_s1)
pred_s1 = layers.Dense(
units=self.stage_num[0], activation="relu", name="pred_stage1"
)(feat_s1)
local_s1 = layers.Dense(
units=self.stage_num[0], activation="tanh", name="local_delta_stage1",
)(feat_s1)
# -------------------------------------------------------------------------------------------------------------------------
s_layer2 = Conv2D(10, (1, 1), activation="relu")(s_layer2)
s_layer2 = MaxPooling2D(4, 4)(s_layer2)
s_layer2 = layers.Flatten()(s_layer2)
s_layer2_mix = layers.Dropout(0.2)(s_layer2)
s_layer2_mix = layers.Dense(self.stage_num[1], activation="relu")(s_layer2_mix)
x_layer2 = Conv2D(10, (1, 1), activation="relu")(x_layer2)
x_layer2 = AveragePooling2D(4, 4)(x_layer2)
x_layer2 = layers.Flatten()(x_layer2)
x_layer2_mix = layers.Dropout(0.2)(x_layer2)
x_layer2_mix = layers.Dense(self.stage_num[1], activation="relu")(x_layer2_mix)
feat_s2_pre = layers.Multiply()([s_layer2, x_layer2])
delta_s2 = layers.Dense(1, activation="tanh", name="delta_s2")(feat_s2_pre)
feat_s2 = layers.Multiply()([s_layer2_mix, x_layer2_mix])
feat_s2 = layers.Dense(2 * self.stage_num[1], activation="relu")(feat_s2)
pred_s2 = layers.Dense(
units=self.stage_num[1], activation="relu", name="pred_stage2"
)(feat_s2)
local_s2 = layers.Dense(
units=self.stage_num[1], activation="tanh", name="local_delta_stage2",
)(feat_s2)
# -------------------------------------------------------------------------------------------------------------------------
s_layer1 = Conv2D(10, (1, 1), activation="relu")(s_layer1)
s_layer1 = MaxPooling2D(8, 8)(s_layer1)
s_layer1 = layers.Flatten()(s_layer1)
s_layer1_mix = layers.Dropout(0.2)(s_layer1)
s_layer1_mix = layers.Dense(self.stage_num[2], activation="relu")(s_layer1_mix)
x_layer1 = Conv2D(10, (1, 1), activation="relu")(x_layer1)
x_layer1 = AveragePooling2D(8, 8)(x_layer1)
x_layer1 = layers.Flatten()(x_layer1)
x_layer1_mix = layers.Dropout(0.2)(x_layer1)
x_layer1_mix = layers.Dense(self.stage_num[2], activation="relu")(x_layer1_mix)
feat_s3_pre = layers.Multiply()([s_layer1, x_layer1])
delta_s3 = layers.Dense(1, activation="tanh", name="delta_s3")(feat_s3_pre)
feat_s3 = layers.Multiply()([s_layer1_mix, x_layer1_mix])
feat_s3 = layers.Dense(2 * self.stage_num[2], activation="relu")(feat_s3)
pred_s3 = layers.Dense(
units=self.stage_num[2], activation="relu", name="pred_stage3"
)(feat_s3)
local_s3 = layers.Dense(
units=self.stage_num[2], activation="tanh", name="local_delta_stage3",
)(feat_s3)
# -------------------------------------------------------------------------------------------------------------------------
def SSR_module(x, s1, s2, s3, lambda_local, lambda_d):
a = x[0][:, 0] * 0
b = x[0][:, 0] * 0
c = x[0][:, 0] * 0
V = 1
for i in range(0, s1):
a = a + (i + lambda_local * x[6][:, i]) * x[0][:, i]
a = K.expand_dims(a, -1)
a = a / (s1 * (1 + lambda_d * x[3]))
for j in range(0, s2):
b = b + (j + lambda_local * x[7][:, j]) * x[1][:, j]
b = K.expand_dims(b, -1)
b = b / (s1 * (1 + lambda_d * x[3])) / (s2 * (1 + lambda_d * x[4]))
for k in range(0, s3):
c = c + (k + lambda_local * x[8][:, k]) * x[2][:, k]
c = K.expand_dims(c, -1)
c = (
c
/ (s1 * (1 + lambda_d * x[3]))
/ (s2 * (1 + lambda_d * x[4]))
/ (s3 * (1 + lambda_d * x[5]))
)
out = (a + b + c) * V
return out
pred = layers.Lambda(
SSR_module,
arguments={
"s1": self.stage_num[0],
"s2": self.stage_num[1],
"s3": self.stage_num[2],
"lambda_local": self.lambda_local,
"lambda_d": self.lambda_d,
},
name="pred",
)(
[
pred_s1,
pred_s2,
pred_s3,
delta_s1,
delta_s2,
delta_s3,
local_s1,
local_s2,
local_s3,
]
)
model = Model(inputs=inputs, outputs=pred)
return model
class SSR_net_MT:
def __init__(self, image_size, num_classes, stage_num, lambda_d):
if K.image_dim_ordering() == "th":
logging.debug("image_dim_ordering = 'th'")
self._channel_axis = 1
self._input_shape = (3, image_size, image_size)
else:
logging.debug("image_dim_ordering = 'tf'")
self._channel_axis = -1
self._input_shape = (image_size, image_size, 3)
self.num_classes = num_classes
self.stage_num = stage_num
self.lambda_d = lambda_d
def __call__(self):
logging.debug("Creating model...")
img_inputs = layers.Input(self._input_shape)
# -------------------------------------------------------------------------------------------------------------------------
x = SeparableConv2D(16, (3, 3), padding="same")(img_inputs)
x = BatchNormalization(axis=-1)(x)
x = layers.Activation("relu")(x)
x_layer1 = AveragePooling2D((2, 2))(x)
x = SeparableConv2D(32, (3, 3), padding="same")(x_layer1)
x = BatchNormalization(axis=-1)(x)
x = layers.Activation("relu")(x)
x = SeparableConv2D(32, (3, 3), padding="same")(x)
x = BatchNormalization(axis=-1)(x)
x = layers.Activation("relu")(x)
x_layer2 = AveragePooling2D((2, 2))(x)
x = SeparableConv2D(64, (3, 3), padding="same")(x_layer2)
x = BatchNormalization(axis=-1)(x)
x = layers.Activation("relu")(x)
x = SeparableConv2D(64, (3, 3), padding="same")(x)
x = BatchNormalization(axis=-1)(x)
x = layers.Activation("relu")(x)
x_layer3 = AveragePooling2D((2, 2))(x)
x = SeparableConv2D(128, (3, 3), padding="same")(x_layer3)
x = BatchNormalization(axis=-1)(x)
x = layers.Activation("relu")(x)
x = SeparableConv2D(128, (3, 3), padding="same")(x)
x = BatchNormalization(axis=-1)(x)
x_layer4 = layers.Activation("relu")(x)
# -------------------------------------------------------------------------------------------------------------------------
s = SeparableConv2D(16, (3, 3), padding="same")(img_inputs)
s = BatchNormalization(axis=-1)(s)
s = layers.Activation("tanh")(s)
s_layer1 = MaxPooling2D((2, 2))(s)
s = SeparableConv2D(32, (3, 3), padding="same")(s_layer1)
s = BatchNormalization(axis=-1)(s)
s = layers.Activation("tanh")(s)
s = SeparableConv2D(32, (3, 3), padding="same")(s)
s = BatchNormalization(axis=-1)(s)
s = layers.Activation("tanh")(s)
s_layer2 = MaxPooling2D((2, 2))(s)
s = SeparableConv2D(64, (3, 3), padding="same")(s_layer2)
s = BatchNormalization(axis=-1)(s)
s = layers.Activation("tanh")(s)
s = SeparableConv2D(64, (3, 3), padding="same")(s)
s = BatchNormalization(axis=-1)(s)
s = layers.Activation("tanh")(s)
s_layer3 = MaxPooling2D((2, 2))(s)
s = SeparableConv2D(128, (3, 3), padding="same")(s_layer3)
s = BatchNormalization(axis=-1)(s)
s = layers.Activation("tanh")(s)
s = SeparableConv2D(128, (3, 3), padding="same")(s)
s = BatchNormalization(axis=-1)(s)
s_layer4 = layers.Activation("tanh")(s)
# -------------------------------------------------------------------------------------------------------------------------
# Classifier block
s_layer4 = Conv2D(64, (1, 1), activation="tanh")(s_layer4)
s_layer4 = MaxPooling2D((2, 2))(s_layer4)
x_layer4 = Conv2D(64, (1, 1), activation="relu")(x_layer4)
x_layer4 = AveragePooling2D((2, 2))(x_layer4)
feat_s1_pre = layers.Multiply()([s_layer4, x_layer4])
feat_s1_pre = layers.Flatten()(feat_s1_pre)
feat_delta_s1 = layers.Dense(2 * self.num_classes, activation="tanh")(
feat_s1_pre
)
delta_s1 = layers.Dense(self.num_classes, activation="tanh", name="delta_s1")(
feat_delta_s1
)
feat_local_s1 = layers.Dense(2 * self.num_classes, activation="tanh")(
feat_s1_pre
)
local_s1 = layers.Dense(
units=self.num_classes, activation="tanh", name="local_delta_stage1"
)(feat_local_s1)
feat_pred_s1 = layers.Dense(
self.stage_num[0] * self.num_classes, activation="relu"
)(feat_s1_pre)
pred_a_s1 = layers.Reshape((self.num_classes, self.stage_num[0]))(feat_pred_s1)
# -------------------------------------------------------------------------------------------------------------------------
s_layer3 = Conv2D(64, (1, 1), activation="tanh")(s_layer3)
s_layer3 = MaxPooling2D((2, 2))(s_layer3)
x_layer3 = Conv2D(64, (1, 1), activation="relu")(x_layer3)
x_layer3 = AveragePooling2D((2, 2))(x_layer3)
feat_s2_pre = layers.Multiply()([s_layer3, x_layer3])
feat_s2_pre = layers.Flatten()(feat_s2_pre)
feat_delta_s2 = layers.Dense(2 * self.num_classes, activation="tanh")(
feat_s2_pre
)
delta_s2 = layers.Dense(self.num_classes, activation="tanh", name="delta_s2")(
feat_delta_s2
)
feat_local_s2 = layers.Dense(2 * self.num_classes, activation="tanh")(
feat_s2_pre
)
local_s2 = layers.Dense(
units=self.num_classes, activation="tanh", name="local_delta_stage2"
)(feat_local_s2)
feat_pred_s2 = layers.Dense(
self.stage_num[1] * self.num_classes, activation="relu"
)(feat_s2_pre)
pred_a_s2 = layers.Reshape((self.num_classes, self.stage_num[1]))(feat_pred_s2)
# -------------------------------------------------------------------------------------------------------------------------
s_layer2 = Conv2D(64, (1, 1), activation="tanh")(s_layer2)
s_layer2 = MaxPooling2D((2, 2))(s_layer2)
x_layer2 = Conv2D(64, (1, 1), activation="relu")(x_layer2)
x_layer2 = AveragePooling2D((2, 2))(x_layer2)
feat_s3_pre = layers.Multiply()([s_layer2, x_layer2])
feat_s3_pre = layers.Flatten()(feat_s3_pre)
feat_delta_s3 = layers.Dense(2 * self.num_classes, activation="tanh")(
feat_s3_pre
)
delta_s3 = layers.Dense(self.num_classes, activation="tanh", name="delta_s3")(
feat_delta_s3
)
feat_local_s3 = layers.Dense(2 * self.num_classes, activation="tanh")(
feat_s3_pre
)
local_s3 = layers.Dense(
units=self.num_classes, activation="tanh", name="local_delta_stage3"
)(feat_local_s3)
feat_pred_s3 = layers.Dense(
self.stage_num[2] * self.num_classes, activation="relu"
)(feat_s3_pre)
pred_a_s3 = layers.Reshape((self.num_classes, self.stage_num[2]))(feat_pred_s3)
# -------------------------------------------------------------------------------------------------------------------------
def SSR_module(x, s1, s2, s3, lambda_d):
a = x[0][:, :, 0] * 0
b = x[0][:, :, 0] * 0
c = x[0][:, :, 0] * 0
di = s1 // 2
dj = s2 // 2
dk = s3 // 2
V = 99
# lambda_d = 0.9
for i in range(0, s1):
a = a + (i - di + x[6]) * x[0][:, :, i]
# a = K.expand_dims(a,-1)
a = a / (s1 * (1 + lambda_d * x[3]))
for j in range(0, s2):
b = b + (j - dj + x[7]) * x[1][:, :, j]
# b = K.expand_dims(b,-1)
b = b / (s1 * (1 + lambda_d * x[3])) / (s2 * (1 + lambda_d * x[4]))
for k in range(0, s3):
c = c + (k - dk + x[8]) * x[2][:, :, k]
# c = K.expand_dims(c,-1)
c = (
c
/ (s1 * (1 + lambda_d * x[3]))
/ (s2 * (1 + lambda_d * x[4]))
/ (s3 * (1 + lambda_d * x[5]))
)
pred = (a + b + c) * V
return pred
pred_pose = layers.Lambda(
SSR_module,
arguments={
"s1": self.stage_num[0],
"s2": self.stage_num[1],
"s3": self.stage_num[2],
"lambda_d": self.lambda_d,
},
name="pred_pose",
)(
[
pred_a_s1,
pred_a_s2,
pred_a_s3,
delta_s1,
delta_s2,
delta_s3,
local_s1,
local_s2,
local_s3,
]
)
model = Model(inputs=img_inputs, outputs=pred_pose)
return model
class SSR_net_ori_MT:
def __init__(self, image_size, num_classes, stage_num, lambda_d):
if K.image_dim_ordering() == "th":
logging.debug("image_dim_ordering = 'th'")
self._channel_axis = 1
self._input_shape = (3, image_size, image_size)
else:
logging.debug("image_dim_ordering = 'tf'")
self._channel_axis = -1
self._input_shape = (image_size, image_size, 3)
self.num_classes = num_classes
self.stage_num = stage_num
self.lambda_d = lambda_d
def __call__(self):
logging.debug("Creating model...")
img_inputs = layers.Input(self._input_shape)
# -------------------------------------------------------------------------------------------------------------------------
x = Conv2D(32, (3, 3), padding="same")(img_inputs)
x = BatchNormalization(axis=self._channel_axis)(x)
x = layers.Activation("relu")(x)
x_layer1 = AveragePooling2D(2, 2)(x)
x = Conv2D(32, (3, 3), padding="same")(x_layer1)
x = BatchNormalization(axis=self._channel_axis)(x)
x = layers.Activation("relu")(x)
x_layer2 = AveragePooling2D(2, 2)(x)
x = Conv2D(32, (3, 3), padding="same")(x_layer2)
x = BatchNormalization(axis=self._channel_axis)(x)
x = layers.Activation("relu")(x)
x_layer3 = AveragePooling2D(2, 2)(x)
x = Conv2D(32, (3, 3), padding="same")(x_layer3)
x = BatchNormalization(axis=self._channel_axis)(x)
x_layer4 = layers.Activation("relu")(x)
# -------------------------------------------------------------------------------------------------------------------------
s = Conv2D(16, (3, 3), padding="same")(img_inputs)
s = BatchNormalization(axis=self._channel_axis)(s)
s = layers.Activation("tanh")(s)
s_layer1 = MaxPooling2D(2, 2)(s)
s = Conv2D(16, (3, 3), padding="same")(s_layer1)
s = BatchNormalization(axis=self._channel_axis)(s)
s = layers.Activation("tanh")(s)
s_layer2 = MaxPooling2D(2, 2)(s)
s = Conv2D(16, (3, 3), padding="same")(s_layer2)
s = BatchNormalization(axis=self._channel_axis)(s)
s = layers.Activation("tanh")(s)
s_layer3 = MaxPooling2D(2, 2)(s)
s = Conv2D(16, (3, 3), padding="same")(s_layer3)
s = BatchNormalization(axis=self._channel_axis)(s)
s_layer4 = layers.Activation("tanh")(s)
# -------------------------------------------------------------------------------------------------------------------------
# Classifier block
s_layer4 = Conv2D(64, (1, 1), activation="tanh")(s_layer4)
s_layer4 = MaxPooling2D((2, 2))(s_layer4)
x_layer4 = Conv2D(64, (1, 1), activation="relu")(x_layer4)
x_layer4 = AveragePooling2D((2, 2))(x_layer4)
feat_s1_pre = layers.Multiply()([s_layer4, x_layer4])
feat_s1_pre = layers.Flatten()(feat_s1_pre)
feat_delta_s1 = layers.Dense(2 * self.num_classes, activation="tanh")(
feat_s1_pre
)
delta_s1 = layers.Dense(self.num_classes, activation="tanh", name="delta_s1")(
feat_delta_s1
)
feat_local_s1 = layers.Dense(2 * self.num_classes, activation="tanh")(
feat_s1_pre
)
local_s1 = layers.Dense(
units=self.num_classes, activation="tanh", name="local_delta_stage1"
)(feat_local_s1)
feat_pred_s1 = layers.Dense(
self.stage_num[0] * self.num_classes, activation="relu"
)(feat_s1_pre)
pred_a_s1 = layers.Reshape((self.num_classes, self.stage_num[0]))(feat_pred_s1)
# -------------------------------------------------------------------------------------------------------------------------
s_layer3 = Conv2D(64, (1, 1), activation="tanh")(s_layer3)
s_layer3 = MaxPooling2D((2, 2))(s_layer3)
x_layer3 = Conv2D(64, (1, 1), activation="relu")(x_layer3)
x_layer3 = AveragePooling2D((2, 2))(x_layer3)
feat_s2_pre = layers.Multiply()([s_layer3, x_layer3])
feat_s2_pre = layers.Flatten()(feat_s2_pre)
feat_delta_s2 = layers.Dense(2 * self.num_classes, activation="tanh")(
feat_s2_pre
)
delta_s2 = layers.Dense(self.num_classes, activation="tanh", name="delta_s2")(
feat_delta_s2
)
feat_local_s2 = layers.Dense(2 * self.num_classes, activation="tanh")(
feat_s2_pre
)
local_s2 = layers.Dense(
units=self.num_classes, activation="tanh", name="local_delta_stage2"
)(feat_local_s2)
feat_pred_s2 = layers.Dense(
self.stage_num[1] * self.num_classes, activation="relu"
)(feat_s2_pre)
pred_a_s2 = layers.Reshape((self.num_classes, self.stage_num[1]))(feat_pred_s2)
# -------------------------------------------------------------------------------------------------------------------------
s_layer2 = Conv2D(64, (1, 1), activation="tanh")(s_layer2)
s_layer2 = MaxPooling2D((2, 2))(s_layer2)
x_layer2 = Conv2D(64, (1, 1), activation="relu")(x_layer2)
x_layer2 = AveragePooling2D((2, 2))(x_layer2)
feat_s3_pre = layers.Multiply()([s_layer2, x_layer2])
feat_s3_pre = layers.Flatten()(feat_s3_pre)
feat_delta_s3 = layers.Dense(2 * self.num_classes, activation="tanh")(
feat_s3_pre
)
delta_s3 = layers.Dense(self.num_classes, activation="tanh", name="delta_s3")(
feat_delta_s3
)
feat_local_s3 = layers.Dense(2 * self.num_classes, activation="tanh")(
feat_s3_pre
)
local_s3 = layers.Dense(
units=self.num_classes, activation="tanh", name="local_delta_stage3"
)(feat_local_s3)
feat_pred_s3 = layers.Dense(
self.stage_num[2] * self.num_classes, activation="relu"
)(feat_s3_pre)
pred_a_s3 = layers.Reshape((self.num_classes, self.stage_num[2]))(feat_pred_s3)
# -------------------------------------------------------------------------------------------------------------------------
def SSR_module(x, s1, s2, s3, lambda_d):
a = x[0][:, :, 0] * 0
b = x[0][:, :, 0] * 0
c = x[0][:, :, 0] * 0
di = s1 // 2
dj = s2 // 2
dk = s3 // 2
V = 99
# lambda_d = 0.9
for i in range(0, s1):
a = a + (i - di + x[6]) * x[0][:, :, i]
# a = K.expand_dims(a,-1)
a = a / (s1 * (1 + lambda_d * x[3]))
for j in range(0, s2):
b = b + (j - dj + x[7]) * x[1][:, :, j]
# b = K.expand_dims(b,-1)
b = b / (s1 * (1 + lambda_d * x[3])) / (s2 * (1 + lambda_d * x[4]))
for k in range(0, s3):
c = c + (k - dk + x[8]) * x[2][:, :, k]
# c = K.expand_dims(c,-1)
c = (
c
/ (s1 * (1 + lambda_d * x[3]))
/ (s2 * (1 + lambda_d * x[4]))
/ (s3 * (1 + lambda_d * x[5]))
)
pred = (a + b + c) * V
return pred
pred_pose = layers.Lambda(
SSR_module,
arguments={
"s1": self.stage_num[0],
"s2": self.stage_num[1],
"s3": self.stage_num[2],
"lambda_d": self.lambda_d,
},
name="pred_pose",
)(
[
pred_a_s1,
pred_a_s2,
pred_a_s3,
delta_s1,
delta_s2,
delta_s3,
local_s1,
local_s2,
local_s3,
]
)
model = Model(inputs=img_inputs, outputs=pred_pose)
return model
|
[
"numpy.random.seed",
"tensorflow.keras.layers.MaxPooling2D",
"tensorflow.keras.layers.Reshape",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.layers.Multiply",
"sys.setrecursionlimit",
"tensorflow.keras.layers.Flatten",
"tensorflow.keras.layers.BatchNormalization",
"tensorflow.keras.backend.expand_dims",
"tensorflow.keras.layers.Activation",
"tensorflow.keras.layers.Input",
"tensorflow.keras.layers.SeparableConv2D",
"tensorflow.keras.layers.AveragePooling2D",
"tensorflow.keras.layers.Dropout",
"tensorflow.keras.models.Model",
"logging.debug",
"tensorflow.keras.layers.Conv2D",
"tensorflow.keras.backend.image_dim_ordering",
"tensorflow.keras.layers.Lambda"
] |
[((304, 334), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(2 ** 20)'], {}), '(2 ** 20)\n', (325, 334), False, 'import sys\n'), ((335, 358), 'numpy.random.seed', 'np.random.seed', (['(2 ** 10)'], {}), '(2 ** 10)\n', (349, 358), True, 'import numpy as np\n'), ((949, 983), 'logging.debug', 'logging.debug', (['"""Creating model..."""'], {}), "('Creating model...')\n", (962, 983), False, 'import logging\n'), ((1002, 1039), 'tensorflow.keras.layers.Input', 'layers.Input', ([], {'shape': 'self._input_shape'}), '(shape=self._input_shape)\n', (1014, 1039), False, 'from tensorflow.keras import layers\n'), ((8753, 8789), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': 'inputs', 'outputs': 'pred_a'}), '(inputs=inputs, outputs=pred_a)\n', (8758, 8789), False, 'from tensorflow.keras.models import Model\n'), ((9410, 9444), 'logging.debug', 'logging.debug', (['"""Creating model..."""'], {}), "('Creating model...')\n", (9423, 9444), False, 'import logging\n'), ((9463, 9500), 'tensorflow.keras.layers.Input', 'layers.Input', ([], {'shape': 'self._input_shape'}), '(shape=self._input_shape)\n', (9475, 9500), False, 'from tensorflow.keras import layers\n'), ((16756, 16790), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': 'inputs', 'outputs': 'pred'}), '(inputs=inputs, outputs=pred)\n', (16761, 16790), False, 'from tensorflow.keras.models import Model\n'), ((17403, 17437), 'logging.debug', 'logging.debug', (['"""Creating model..."""'], {}), "('Creating model...')\n", (17416, 17437), False, 'import logging\n'), ((17460, 17491), 'tensorflow.keras.layers.Input', 'layers.Input', (['self._input_shape'], {}), '(self._input_shape)\n', (17472, 17491), False, 'from tensorflow.keras import layers\n'), ((25488, 25531), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': 'img_inputs', 'outputs': 'pred_pose'}), '(inputs=img_inputs, outputs=pred_pose)\n', (25493, 25531), False, 'from tensorflow.keras.models import Model\n'), ((26148, 26182), 'logging.debug', 'logging.debug', (['"""Creating model..."""'], {}), "('Creating model...')\n", (26161, 26182), False, 'import logging\n'), ((26205, 26236), 'tensorflow.keras.layers.Input', 'layers.Input', (['self._input_shape'], {}), '(self._input_shape)\n', (26217, 26236), False, 'from tensorflow.keras import layers\n'), ((33415, 33458), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': 'img_inputs', 'outputs': 'pred_pose'}), '(inputs=img_inputs, outputs=pred_pose)\n', (33420, 33458), False, 'from tensorflow.keras.models import Model\n'), ((459, 481), 'tensorflow.keras.backend.image_dim_ordering', 'K.image_dim_ordering', ([], {}), '()\n', (479, 481), True, 'from tensorflow.keras import backend as K\n'), ((503, 545), 'logging.debug', 'logging.debug', (['"""image_dim_ordering = \'th\'"""'], {}), '("image_dim_ordering = \'th\'")\n', (516, 545), False, 'import logging\n'), ((667, 709), 'logging.debug', 'logging.debug', (['"""image_dim_ordering = \'tf\'"""'], {}), '("image_dim_ordering = \'tf\'")\n', (680, 709), False, 'import logging\n'), ((1185, 1203), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {}), '(32, (3, 3))\n', (1191, 1203), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((1224, 1267), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (1242, 1267), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((1283, 1308), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (1300, 1308), False, 'from tensorflow.keras import layers\n'), ((1331, 1353), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (1347, 1353), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((1369, 1387), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {}), '(32, (3, 3))\n', (1375, 1387), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((1410, 1453), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (1428, 1453), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((1469, 1494), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (1486, 1494), False, 'from tensorflow.keras import layers\n'), ((1517, 1539), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (1533, 1539), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((1555, 1573), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {}), '(32, (3, 3))\n', (1561, 1573), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((1596, 1639), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (1614, 1639), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((1655, 1680), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (1672, 1680), False, 'from tensorflow.keras import layers\n'), ((1703, 1725), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (1719, 1725), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((1741, 1759), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {}), '(32, (3, 3))\n', (1747, 1759), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((1782, 1825), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (1800, 1825), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((1841, 1866), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (1858, 1866), False, 'from tensorflow.keras import layers\n'), ((2014, 2032), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {}), '(16, (3, 3))\n', (2020, 2032), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((2053, 2096), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (2071, 2096), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((2112, 2137), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (2129, 2137), False, 'from tensorflow.keras import layers\n'), ((2160, 2178), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (2172, 2178), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((2194, 2212), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {}), '(16, (3, 3))\n', (2200, 2212), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((2235, 2278), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (2253, 2278), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((2294, 2319), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (2311, 2319), False, 'from tensorflow.keras import layers\n'), ((2342, 2360), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (2354, 2360), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((2376, 2394), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {}), '(16, (3, 3))\n', (2382, 2394), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((2417, 2460), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (2435, 2460), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((2476, 2501), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (2493, 2501), False, 'from tensorflow.keras import layers\n'), ((2524, 2542), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (2536, 2542), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((2558, 2576), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {}), '(16, (3, 3))\n', (2564, 2576), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((2599, 2642), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (2617, 2642), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((2658, 2683), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (2675, 2683), False, 'from tensorflow.keras import layers\n'), ((2866, 2903), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(10)', '(1, 1)'], {'activation': '"""relu"""'}), "(10, (1, 1), activation='relu')\n", (2872, 2903), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((2926, 2942), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (2940, 2942), False, 'from tensorflow.keras import layers\n'), ((2976, 2995), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['(0.2)'], {}), '(0.2)\n', (2990, 2995), False, 'from tensorflow.keras import layers\n'), ((3029, 3085), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[0]', 'activation': '"""relu"""'}), "(units=self.stage_num[0], activation='relu')\n", (3041, 3085), False, 'from tensorflow.keras import layers\n'), ((3142, 3179), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(10)', '(1, 1)'], {'activation': '"""relu"""'}), "(10, (1, 1), activation='relu')\n", (3148, 3179), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((3202, 3218), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (3216, 3218), False, 'from tensorflow.keras import layers\n'), ((3252, 3271), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['(0.2)'], {}), '(0.2)\n', (3266, 3271), False, 'from tensorflow.keras import layers\n'), ((3305, 3361), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[0]', 'activation': '"""relu"""'}), "(units=self.stage_num[0], activation='relu')\n", (3317, 3361), False, 'from tensorflow.keras import layers\n'), ((3423, 3440), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (3438, 3440), False, 'from tensorflow.keras import layers\n'), ((3482, 3533), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {'activation': '"""tanh"""', 'name': '"""delta_s1"""'}), "(1, activation='tanh', name='delta_s1')\n", (3494, 3533), False, 'from tensorflow.keras import layers\n'), ((3570, 3587), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (3585, 3587), False, 'from tensorflow.keras import layers\n'), ((3638, 3692), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.stage_num[0])'], {'activation': '"""relu"""'}), "(2 * self.stage_num[0], activation='relu')\n", (3650, 3692), False, 'from tensorflow.keras import layers\n'), ((3724, 3809), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[0]', 'activation': '"""relu"""', 'name': '"""pred_age_stage1"""'}), "(units=self.stage_num[0], activation='relu', name='pred_age_stage1'\n )\n", (3736, 3809), False, 'from tensorflow.keras import layers\n'), ((3976, 4064), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[0]', 'activation': '"""tanh"""', 'name': '"""local_delta_stage1"""'}), "(units=self.stage_num[0], activation='tanh', name=\n 'local_delta_stage1')\n", (3988, 4064), False, 'from tensorflow.keras import layers\n'), ((4245, 4282), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(10)', '(1, 1)'], {'activation': '"""relu"""'}), "(10, (1, 1), activation='relu')\n", (4251, 4282), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((4312, 4330), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(4)', '(4)'], {}), '(4, 4)\n', (4324, 4330), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((4360, 4376), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (4374, 4376), False, 'from tensorflow.keras import layers\n'), ((4410, 4429), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['(0.2)'], {}), '(0.2)\n', (4424, 4429), False, 'from tensorflow.keras import layers\n'), ((4463, 4513), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.stage_num[1]'], {'activation': '"""relu"""'}), "(self.stage_num[1], activation='relu')\n", (4475, 4513), False, 'from tensorflow.keras import layers\n'), ((4548, 4585), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(10)', '(1, 1)'], {'activation': '"""relu"""'}), "(10, (1, 1), activation='relu')\n", (4554, 4585), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((4615, 4637), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(4)', '(4)'], {}), '(4, 4)\n', (4631, 4637), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((4667, 4683), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (4681, 4683), False, 'from tensorflow.keras import layers\n'), ((4717, 4736), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['(0.2)'], {}), '(0.2)\n', (4731, 4736), False, 'from tensorflow.keras import layers\n'), ((4770, 4820), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.stage_num[1]'], {'activation': '"""relu"""'}), "(self.stage_num[1], activation='relu')\n", (4782, 4820), False, 'from tensorflow.keras import layers\n'), ((4860, 4877), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (4875, 4877), False, 'from tensorflow.keras import layers\n'), ((4919, 4970), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {'activation': '"""tanh"""', 'name': '"""delta_s2"""'}), "(1, activation='tanh', name='delta_s2')\n", (4931, 4970), False, 'from tensorflow.keras import layers\n'), ((5007, 5024), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (5022, 5024), False, 'from tensorflow.keras import layers\n'), ((5075, 5129), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.stage_num[1])'], {'activation': '"""relu"""'}), "(2 * self.stage_num[1], activation='relu')\n", (5087, 5129), False, 'from tensorflow.keras import layers\n'), ((5161, 5246), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[1]', 'activation': '"""relu"""', 'name': '"""pred_age_stage2"""'}), "(units=self.stage_num[1], activation='relu', name='pred_age_stage2'\n )\n", (5173, 5246), False, 'from tensorflow.keras import layers\n'), ((5413, 5501), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[1]', 'activation': '"""tanh"""', 'name': '"""local_delta_stage2"""'}), "(units=self.stage_num[1], activation='tanh', name=\n 'local_delta_stage2')\n", (5425, 5501), False, 'from tensorflow.keras import layers\n'), ((5682, 5719), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(10)', '(1, 1)'], {'activation': '"""relu"""'}), "(10, (1, 1), activation='relu')\n", (5688, 5719), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((5749, 5767), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(8)', '(8)'], {}), '(8, 8)\n', (5761, 5767), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((5797, 5813), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (5811, 5813), False, 'from tensorflow.keras import layers\n'), ((5847, 5866), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['(0.2)'], {}), '(0.2)\n', (5861, 5866), False, 'from tensorflow.keras import layers\n'), ((5900, 5950), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.stage_num[2]'], {'activation': '"""relu"""'}), "(self.stage_num[2], activation='relu')\n", (5912, 5950), False, 'from tensorflow.keras import layers\n'), ((5985, 6022), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(10)', '(1, 1)'], {'activation': '"""relu"""'}), "(10, (1, 1), activation='relu')\n", (5991, 6022), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((6052, 6074), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(8)', '(8)'], {}), '(8, 8)\n', (6068, 6074), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((6104, 6120), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (6118, 6120), False, 'from tensorflow.keras import layers\n'), ((6154, 6173), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['(0.2)'], {}), '(0.2)\n', (6168, 6173), False, 'from tensorflow.keras import layers\n'), ((6207, 6257), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.stage_num[2]'], {'activation': '"""relu"""'}), "(self.stage_num[2], activation='relu')\n", (6219, 6257), False, 'from tensorflow.keras import layers\n'), ((6297, 6314), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (6312, 6314), False, 'from tensorflow.keras import layers\n'), ((6356, 6407), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {'activation': '"""tanh"""', 'name': '"""delta_s3"""'}), "(1, activation='tanh', name='delta_s3')\n", (6368, 6407), False, 'from tensorflow.keras import layers\n'), ((6444, 6461), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (6459, 6461), False, 'from tensorflow.keras import layers\n'), ((6512, 6566), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.stage_num[2])'], {'activation': '"""relu"""'}), "(2 * self.stage_num[2], activation='relu')\n", (6524, 6566), False, 'from tensorflow.keras import layers\n'), ((6598, 6683), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[2]', 'activation': '"""relu"""', 'name': '"""pred_age_stage3"""'}), "(units=self.stage_num[2], activation='relu', name='pred_age_stage3'\n )\n", (6610, 6683), False, 'from tensorflow.keras import layers\n'), ((6850, 6938), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[2]', 'activation': '"""tanh"""', 'name': '"""local_delta_stage3"""'}), "(units=self.stage_num[2], activation='tanh', name=\n 'local_delta_stage3')\n", (6862, 6938), False, 'from tensorflow.keras import layers\n'), ((7428, 7448), 'tensorflow.keras.backend.expand_dims', 'K.expand_dims', (['a', '(-1)'], {}), '(a, -1)\n', (7441, 7448), True, 'from tensorflow.keras import backend as K\n'), ((7619, 7639), 'tensorflow.keras.backend.expand_dims', 'K.expand_dims', (['b', '(-1)'], {}), '(b, -1)\n', (7632, 7639), True, 'from tensorflow.keras import backend as K\n'), ((7841, 7861), 'tensorflow.keras.backend.expand_dims', 'K.expand_dims', (['c', '(-1)'], {}), '(c, -1)\n', (7854, 7861), True, 'from tensorflow.keras import backend as K\n'), ((8129, 8327), 'tensorflow.keras.layers.Lambda', 'layers.Lambda', (['merge_age'], {'arguments': "{'s1': self.stage_num[0], 's2': self.stage_num[1], 's3': self.stage_num[2],\n 'lambda_local': self.lambda_local, 'lambda_d': self.lambda_d}", 'name': '"""pred_a"""'}), "(merge_age, arguments={'s1': self.stage_num[0], 's2': self.\n stage_num[1], 's3': self.stage_num[2], 'lambda_local': self.\n lambda_local, 'lambda_d': self.lambda_d}, name='pred_a')\n", (8142, 8327), False, 'from tensorflow.keras import layers\n'), ((8920, 8942), 'tensorflow.keras.backend.image_dim_ordering', 'K.image_dim_ordering', ([], {}), '()\n', (8940, 8942), True, 'from tensorflow.keras import backend as K\n'), ((8964, 9006), 'logging.debug', 'logging.debug', (['"""image_dim_ordering = \'th\'"""'], {}), '("image_dim_ordering = \'th\'")\n', (8977, 9006), False, 'import logging\n'), ((9128, 9170), 'logging.debug', 'logging.debug', (['"""image_dim_ordering = \'tf\'"""'], {}), '("image_dim_ordering = \'tf\'")\n', (9141, 9170), False, 'import logging\n'), ((9646, 9664), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {}), '(32, (3, 3))\n', (9652, 9664), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((9685, 9728), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (9703, 9728), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((9744, 9769), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (9761, 9769), False, 'from tensorflow.keras import layers\n'), ((9792, 9814), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (9808, 9814), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((9830, 9848), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {}), '(32, (3, 3))\n', (9836, 9848), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((9871, 9914), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (9889, 9914), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((9930, 9955), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (9947, 9955), False, 'from tensorflow.keras import layers\n'), ((9978, 10000), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (9994, 10000), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10016, 10034), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {}), '(32, (3, 3))\n', (10022, 10034), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10057, 10100), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (10075, 10100), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10116, 10141), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (10133, 10141), False, 'from tensorflow.keras import layers\n'), ((10164, 10186), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (10180, 10186), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10202, 10220), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {}), '(32, (3, 3))\n', (10208, 10220), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10243, 10286), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (10261, 10286), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10302, 10327), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (10319, 10327), False, 'from tensorflow.keras import layers\n'), ((10475, 10493), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {}), '(16, (3, 3))\n', (10481, 10493), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10514, 10557), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (10532, 10557), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10573, 10598), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (10590, 10598), False, 'from tensorflow.keras import layers\n'), ((10621, 10639), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (10633, 10639), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10655, 10673), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {}), '(16, (3, 3))\n', (10661, 10673), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10696, 10739), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (10714, 10739), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10755, 10780), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (10772, 10780), False, 'from tensorflow.keras import layers\n'), ((10803, 10821), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (10815, 10821), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10837, 10855), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {}), '(16, (3, 3))\n', (10843, 10855), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10878, 10921), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (10896, 10921), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((10937, 10962), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (10954, 10962), False, 'from tensorflow.keras import layers\n'), ((10985, 11003), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (10997, 11003), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((11019, 11037), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {}), '(16, (3, 3))\n', (11025, 11037), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((11060, 11103), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (11078, 11103), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((11119, 11144), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (11136, 11144), False, 'from tensorflow.keras import layers\n'), ((11327, 11364), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(10)', '(1, 1)'], {'activation': '"""relu"""'}), "(10, (1, 1), activation='relu')\n", (11333, 11364), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((11387, 11403), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (11401, 11403), False, 'from tensorflow.keras import layers\n'), ((11437, 11456), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['(0.2)'], {}), '(0.2)\n', (11451, 11456), False, 'from tensorflow.keras import layers\n'), ((11490, 11546), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[0]', 'activation': '"""relu"""'}), "(units=self.stage_num[0], activation='relu')\n", (11502, 11546), False, 'from tensorflow.keras import layers\n'), ((11603, 11640), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(10)', '(1, 1)'], {'activation': '"""relu"""'}), "(10, (1, 1), activation='relu')\n", (11609, 11640), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((11663, 11679), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (11677, 11679), False, 'from tensorflow.keras import layers\n'), ((11713, 11732), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['(0.2)'], {}), '(0.2)\n', (11727, 11732), False, 'from tensorflow.keras import layers\n'), ((11766, 11822), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[0]', 'activation': '"""relu"""'}), "(units=self.stage_num[0], activation='relu')\n", (11778, 11822), False, 'from tensorflow.keras import layers\n'), ((11882, 11899), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (11897, 11899), False, 'from tensorflow.keras import layers\n'), ((11941, 11992), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {'activation': '"""tanh"""', 'name': '"""delta_s1"""'}), "(1, activation='tanh', name='delta_s1')\n", (11953, 11992), False, 'from tensorflow.keras import layers\n'), ((12025, 12042), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (12040, 12042), False, 'from tensorflow.keras import layers\n'), ((12091, 12145), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.stage_num[0])'], {'activation': '"""relu"""'}), "(2 * self.stage_num[0], activation='relu')\n", (12103, 12145), False, 'from tensorflow.keras import layers\n'), ((12173, 12249), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[0]', 'activation': '"""relu"""', 'name': '"""pred_stage1"""'}), "(units=self.stage_num[0], activation='relu', name='pred_stage1')\n", (12185, 12249), False, 'from tensorflow.keras import layers\n'), ((12300, 12388), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[0]', 'activation': '"""tanh"""', 'name': '"""local_delta_stage1"""'}), "(units=self.stage_num[0], activation='tanh', name=\n 'local_delta_stage1')\n", (12312, 12388), False, 'from tensorflow.keras import layers\n'), ((12567, 12604), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(10)', '(1, 1)'], {'activation': '"""relu"""'}), "(10, (1, 1), activation='relu')\n", (12573, 12604), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((12634, 12652), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(4)', '(4)'], {}), '(4, 4)\n', (12646, 12652), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((12682, 12698), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (12696, 12698), False, 'from tensorflow.keras import layers\n'), ((12732, 12751), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['(0.2)'], {}), '(0.2)\n', (12746, 12751), False, 'from tensorflow.keras import layers\n'), ((12785, 12835), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.stage_num[1]'], {'activation': '"""relu"""'}), "(self.stage_num[1], activation='relu')\n", (12797, 12835), False, 'from tensorflow.keras import layers\n'), ((12870, 12907), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(10)', '(1, 1)'], {'activation': '"""relu"""'}), "(10, (1, 1), activation='relu')\n", (12876, 12907), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((12937, 12959), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(4)', '(4)'], {}), '(4, 4)\n', (12953, 12959), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((12989, 13005), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (13003, 13005), False, 'from tensorflow.keras import layers\n'), ((13039, 13058), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['(0.2)'], {}), '(0.2)\n', (13053, 13058), False, 'from tensorflow.keras import layers\n'), ((13092, 13142), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.stage_num[1]'], {'activation': '"""relu"""'}), "(self.stage_num[1], activation='relu')\n", (13104, 13142), False, 'from tensorflow.keras import layers\n'), ((13180, 13197), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (13195, 13197), False, 'from tensorflow.keras import layers\n'), ((13239, 13290), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {'activation': '"""tanh"""', 'name': '"""delta_s2"""'}), "(1, activation='tanh', name='delta_s2')\n", (13251, 13290), False, 'from tensorflow.keras import layers\n'), ((13323, 13340), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (13338, 13340), False, 'from tensorflow.keras import layers\n'), ((13389, 13443), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.stage_num[1])'], {'activation': '"""relu"""'}), "(2 * self.stage_num[1], activation='relu')\n", (13401, 13443), False, 'from tensorflow.keras import layers\n'), ((13471, 13547), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[1]', 'activation': '"""relu"""', 'name': '"""pred_stage2"""'}), "(units=self.stage_num[1], activation='relu', name='pred_stage2')\n", (13483, 13547), False, 'from tensorflow.keras import layers\n'), ((13598, 13686), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[1]', 'activation': '"""tanh"""', 'name': '"""local_delta_stage2"""'}), "(units=self.stage_num[1], activation='tanh', name=\n 'local_delta_stage2')\n", (13610, 13686), False, 'from tensorflow.keras import layers\n'), ((13865, 13902), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(10)', '(1, 1)'], {'activation': '"""relu"""'}), "(10, (1, 1), activation='relu')\n", (13871, 13902), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((13932, 13950), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(8)', '(8)'], {}), '(8, 8)\n', (13944, 13950), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((13980, 13996), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (13994, 13996), False, 'from tensorflow.keras import layers\n'), ((14030, 14049), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['(0.2)'], {}), '(0.2)\n', (14044, 14049), False, 'from tensorflow.keras import layers\n'), ((14083, 14133), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.stage_num[2]'], {'activation': '"""relu"""'}), "(self.stage_num[2], activation='relu')\n", (14095, 14133), False, 'from tensorflow.keras import layers\n'), ((14168, 14205), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(10)', '(1, 1)'], {'activation': '"""relu"""'}), "(10, (1, 1), activation='relu')\n", (14174, 14205), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((14235, 14257), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(8)', '(8)'], {}), '(8, 8)\n', (14251, 14257), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((14287, 14303), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (14301, 14303), False, 'from tensorflow.keras import layers\n'), ((14337, 14356), 'tensorflow.keras.layers.Dropout', 'layers.Dropout', (['(0.2)'], {}), '(0.2)\n', (14351, 14356), False, 'from tensorflow.keras import layers\n'), ((14390, 14440), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.stage_num[2]'], {'activation': '"""relu"""'}), "(self.stage_num[2], activation='relu')\n", (14402, 14440), False, 'from tensorflow.keras import layers\n'), ((14478, 14495), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (14493, 14495), False, 'from tensorflow.keras import layers\n'), ((14537, 14588), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(1)'], {'activation': '"""tanh"""', 'name': '"""delta_s3"""'}), "(1, activation='tanh', name='delta_s3')\n", (14549, 14588), False, 'from tensorflow.keras import layers\n'), ((14621, 14638), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (14636, 14638), False, 'from tensorflow.keras import layers\n'), ((14687, 14741), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.stage_num[2])'], {'activation': '"""relu"""'}), "(2 * self.stage_num[2], activation='relu')\n", (14699, 14741), False, 'from tensorflow.keras import layers\n'), ((14769, 14845), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[2]', 'activation': '"""relu"""', 'name': '"""pred_stage3"""'}), "(units=self.stage_num[2], activation='relu', name='pred_stage3')\n", (14781, 14845), False, 'from tensorflow.keras import layers\n'), ((14896, 14984), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.stage_num[2]', 'activation': '"""tanh"""', 'name': '"""local_delta_stage3"""'}), "(units=self.stage_num[2], activation='tanh', name=\n 'local_delta_stage3')\n", (14908, 14984), False, 'from tensorflow.keras import layers\n'), ((15440, 15460), 'tensorflow.keras.backend.expand_dims', 'K.expand_dims', (['a', '(-1)'], {}), '(a, -1)\n', (15453, 15460), True, 'from tensorflow.keras import backend as K\n'), ((15631, 15651), 'tensorflow.keras.backend.expand_dims', 'K.expand_dims', (['b', '(-1)'], {}), '(b, -1)\n', (15644, 15651), True, 'from tensorflow.keras import backend as K\n'), ((15853, 15873), 'tensorflow.keras.backend.expand_dims', 'K.expand_dims', (['c', '(-1)'], {}), '(c, -1)\n', (15866, 15873), True, 'from tensorflow.keras import backend as K\n'), ((16139, 16336), 'tensorflow.keras.layers.Lambda', 'layers.Lambda', (['SSR_module'], {'arguments': "{'s1': self.stage_num[0], 's2': self.stage_num[1], 's3': self.stage_num[2],\n 'lambda_local': self.lambda_local, 'lambda_d': self.lambda_d}", 'name': '"""pred"""'}), "(SSR_module, arguments={'s1': self.stage_num[0], 's2': self.\n stage_num[1], 's3': self.stage_num[2], 'lambda_local': self.\n lambda_local, 'lambda_d': self.lambda_d}, name='pred')\n", (16152, 16336), False, 'from tensorflow.keras import layers\n'), ((16915, 16937), 'tensorflow.keras.backend.image_dim_ordering', 'K.image_dim_ordering', ([], {}), '()\n', (16935, 16937), True, 'from tensorflow.keras import backend as K\n'), ((16959, 17001), 'logging.debug', 'logging.debug', (['"""image_dim_ordering = \'th\'"""'], {}), '("image_dim_ordering = \'th\'")\n', (16972, 17001), False, 'import logging\n'), ((17123, 17165), 'logging.debug', 'logging.debug', (['"""image_dim_ordering = \'tf\'"""'], {}), '("image_dim_ordering = \'tf\'")\n', (17136, 17165), False, 'import logging\n'), ((17636, 17679), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(16)', '(3, 3)'], {'padding': '"""same"""'}), "(16, (3, 3), padding='same')\n", (17651, 17679), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((17704, 17731), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (17722, 17731), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((17747, 17772), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (17764, 17772), False, 'from tensorflow.keras import layers\n'), ((17795, 17819), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2, 2)'], {}), '((2, 2))\n', (17811, 17819), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((17835, 17878), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(32)', '(3, 3)'], {'padding': '"""same"""'}), "(32, (3, 3), padding='same')\n", (17850, 17878), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((17901, 17928), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (17919, 17928), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((17944, 17969), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (17961, 17969), False, 'from tensorflow.keras import layers\n'), ((17985, 18028), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(32)', '(3, 3)'], {'padding': '"""same"""'}), "(32, (3, 3), padding='same')\n", (18000, 18028), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((18044, 18071), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (18062, 18071), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((18087, 18112), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (18104, 18112), False, 'from tensorflow.keras import layers\n'), ((18135, 18159), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2, 2)'], {}), '((2, 2))\n', (18151, 18159), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((18175, 18218), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(64)', '(3, 3)'], {'padding': '"""same"""'}), "(64, (3, 3), padding='same')\n", (18190, 18218), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((18241, 18268), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (18259, 18268), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((18284, 18309), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (18301, 18309), False, 'from tensorflow.keras import layers\n'), ((18325, 18368), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(64)', '(3, 3)'], {'padding': '"""same"""'}), "(64, (3, 3), padding='same')\n", (18340, 18368), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((18384, 18411), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (18402, 18411), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((18427, 18452), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (18444, 18452), False, 'from tensorflow.keras import layers\n'), ((18475, 18499), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2, 2)'], {}), '((2, 2))\n', (18491, 18499), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((18515, 18559), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(128)', '(3, 3)'], {'padding': '"""same"""'}), "(128, (3, 3), padding='same')\n", (18530, 18559), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((18582, 18609), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (18600, 18609), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((18625, 18650), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (18642, 18650), False, 'from tensorflow.keras import layers\n'), ((18666, 18710), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(128)', '(3, 3)'], {'padding': '"""same"""'}), "(128, (3, 3), padding='same')\n", (18681, 18710), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((18726, 18753), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (18744, 18753), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((18776, 18801), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (18793, 18801), False, 'from tensorflow.keras import layers\n'), ((18949, 18992), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(16)', '(3, 3)'], {'padding': '"""same"""'}), "(16, (3, 3), padding='same')\n", (18964, 18992), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19017, 19044), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (19035, 19044), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19060, 19085), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (19077, 19085), False, 'from tensorflow.keras import layers\n'), ((19108, 19128), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (19120, 19128), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19144, 19187), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(32)', '(3, 3)'], {'padding': '"""same"""'}), "(32, (3, 3), padding='same')\n", (19159, 19187), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19210, 19237), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (19228, 19237), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19253, 19278), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (19270, 19278), False, 'from tensorflow.keras import layers\n'), ((19294, 19337), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(32)', '(3, 3)'], {'padding': '"""same"""'}), "(32, (3, 3), padding='same')\n", (19309, 19337), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19353, 19380), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (19371, 19380), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19396, 19421), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (19413, 19421), False, 'from tensorflow.keras import layers\n'), ((19444, 19464), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (19456, 19464), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19480, 19523), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(64)', '(3, 3)'], {'padding': '"""same"""'}), "(64, (3, 3), padding='same')\n", (19495, 19523), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19546, 19573), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (19564, 19573), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19589, 19614), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (19606, 19614), False, 'from tensorflow.keras import layers\n'), ((19630, 19673), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(64)', '(3, 3)'], {'padding': '"""same"""'}), "(64, (3, 3), padding='same')\n", (19645, 19673), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19689, 19716), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (19707, 19716), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19732, 19757), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (19749, 19757), False, 'from tensorflow.keras import layers\n'), ((19780, 19800), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (19792, 19800), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19816, 19860), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(128)', '(3, 3)'], {'padding': '"""same"""'}), "(128, (3, 3), padding='same')\n", (19831, 19860), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19883, 19910), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (19901, 19910), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((19926, 19951), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (19943, 19951), False, 'from tensorflow.keras import layers\n'), ((19967, 20011), 'tensorflow.keras.layers.SeparableConv2D', 'SeparableConv2D', (['(128)', '(3, 3)'], {'padding': '"""same"""'}), "(128, (3, 3), padding='same')\n", (19982, 20011), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((20027, 20054), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(-1)'}), '(axis=-1)\n', (20045, 20054), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((20077, 20102), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (20094, 20102), False, 'from tensorflow.keras import layers\n'), ((20285, 20322), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(1, 1)'], {'activation': '"""tanh"""'}), "(64, (1, 1), activation='tanh')\n", (20291, 20322), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((20352, 20372), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (20364, 20372), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((20403, 20440), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(1, 1)'], {'activation': '"""relu"""'}), "(64, (1, 1), activation='relu')\n", (20409, 20440), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((20470, 20494), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2, 2)'], {}), '((2, 2))\n', (20486, 20494), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((20528, 20545), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (20543, 20545), False, 'from tensorflow.keras import layers\n'), ((20590, 20606), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (20604, 20606), False, 'from tensorflow.keras import layers\n'), ((20644, 20697), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.num_classes)'], {'activation': '"""tanh"""'}), "(2 * self.num_classes, activation='tanh')\n", (20656, 20697), False, 'from tensorflow.keras import layers\n'), ((20752, 20818), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.num_classes'], {'activation': '"""tanh"""', 'name': '"""delta_s1"""'}), "(self.num_classes, activation='tanh', name='delta_s1')\n", (20764, 20818), False, 'from tensorflow.keras import layers\n'), ((20881, 20934), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.num_classes)'], {'activation': '"""tanh"""'}), "(2 * self.num_classes, activation='tanh')\n", (20893, 20934), False, 'from tensorflow.keras import layers\n'), ((20989, 21076), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.num_classes', 'activation': '"""tanh"""', 'name': '"""local_delta_stage1"""'}), "(units=self.num_classes, activation='tanh', name=\n 'local_delta_stage1')\n", (21001, 21076), False, 'from tensorflow.keras import layers\n'), ((21133, 21202), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(self.stage_num[0] * self.num_classes)'], {'activation': '"""relu"""'}), "(self.stage_num[0] * self.num_classes, activation='relu')\n", (21145, 21202), False, 'from tensorflow.keras import layers\n'), ((21258, 21311), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(self.num_classes, self.stage_num[0])'], {}), '((self.num_classes, self.stage_num[0]))\n', (21272, 21311), False, 'from tensorflow.keras import layers\n'), ((21477, 21514), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(1, 1)'], {'activation': '"""tanh"""'}), "(64, (1, 1), activation='tanh')\n", (21483, 21514), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((21544, 21564), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (21556, 21564), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((21595, 21632), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(1, 1)'], {'activation': '"""relu"""'}), "(64, (1, 1), activation='relu')\n", (21601, 21632), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((21662, 21686), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2, 2)'], {}), '((2, 2))\n', (21678, 21686), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((21720, 21737), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (21735, 21737), False, 'from tensorflow.keras import layers\n'), ((21782, 21798), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (21796, 21798), False, 'from tensorflow.keras import layers\n'), ((21836, 21889), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.num_classes)'], {'activation': '"""tanh"""'}), "(2 * self.num_classes, activation='tanh')\n", (21848, 21889), False, 'from tensorflow.keras import layers\n'), ((21944, 22010), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.num_classes'], {'activation': '"""tanh"""', 'name': '"""delta_s2"""'}), "(self.num_classes, activation='tanh', name='delta_s2')\n", (21956, 22010), False, 'from tensorflow.keras import layers\n'), ((22073, 22126), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.num_classes)'], {'activation': '"""tanh"""'}), "(2 * self.num_classes, activation='tanh')\n", (22085, 22126), False, 'from tensorflow.keras import layers\n'), ((22181, 22268), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.num_classes', 'activation': '"""tanh"""', 'name': '"""local_delta_stage2"""'}), "(units=self.num_classes, activation='tanh', name=\n 'local_delta_stage2')\n", (22193, 22268), False, 'from tensorflow.keras import layers\n'), ((22325, 22394), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(self.stage_num[1] * self.num_classes)'], {'activation': '"""relu"""'}), "(self.stage_num[1] * self.num_classes, activation='relu')\n", (22337, 22394), False, 'from tensorflow.keras import layers\n'), ((22450, 22503), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(self.num_classes, self.stage_num[1])'], {}), '((self.num_classes, self.stage_num[1]))\n', (22464, 22503), False, 'from tensorflow.keras import layers\n'), ((22669, 22706), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(1, 1)'], {'activation': '"""tanh"""'}), "(64, (1, 1), activation='tanh')\n", (22675, 22706), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((22736, 22756), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (22748, 22756), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((22787, 22824), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(1, 1)'], {'activation': '"""relu"""'}), "(64, (1, 1), activation='relu')\n", (22793, 22824), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((22854, 22878), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2, 2)'], {}), '((2, 2))\n', (22870, 22878), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((22912, 22929), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (22927, 22929), False, 'from tensorflow.keras import layers\n'), ((22974, 22990), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (22988, 22990), False, 'from tensorflow.keras import layers\n'), ((23028, 23081), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.num_classes)'], {'activation': '"""tanh"""'}), "(2 * self.num_classes, activation='tanh')\n", (23040, 23081), False, 'from tensorflow.keras import layers\n'), ((23136, 23202), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.num_classes'], {'activation': '"""tanh"""', 'name': '"""delta_s3"""'}), "(self.num_classes, activation='tanh', name='delta_s3')\n", (23148, 23202), False, 'from tensorflow.keras import layers\n'), ((23265, 23318), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.num_classes)'], {'activation': '"""tanh"""'}), "(2 * self.num_classes, activation='tanh')\n", (23277, 23318), False, 'from tensorflow.keras import layers\n'), ((23373, 23460), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.num_classes', 'activation': '"""tanh"""', 'name': '"""local_delta_stage3"""'}), "(units=self.num_classes, activation='tanh', name=\n 'local_delta_stage3')\n", (23385, 23460), False, 'from tensorflow.keras import layers\n'), ((23517, 23586), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(self.stage_num[2] * self.num_classes)'], {'activation': '"""relu"""'}), "(self.stage_num[2] * self.num_classes, activation='relu')\n", (23529, 23586), False, 'from tensorflow.keras import layers\n'), ((23642, 23695), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(self.num_classes, self.stage_num[2])'], {}), '((self.num_classes, self.stage_num[2]))\n', (23656, 23695), False, 'from tensorflow.keras import layers\n'), ((24911, 25078), 'tensorflow.keras.layers.Lambda', 'layers.Lambda', (['SSR_module'], {'arguments': "{'s1': self.stage_num[0], 's2': self.stage_num[1], 's3': self.stage_num[2],\n 'lambda_d': self.lambda_d}", 'name': '"""pred_pose"""'}), "(SSR_module, arguments={'s1': self.stage_num[0], 's2': self.\n stage_num[1], 's3': self.stage_num[2], 'lambda_d': self.lambda_d}, name\n ='pred_pose')\n", (24924, 25078), False, 'from tensorflow.keras import layers\n'), ((25660, 25682), 'tensorflow.keras.backend.image_dim_ordering', 'K.image_dim_ordering', ([], {}), '()\n', (25680, 25682), True, 'from tensorflow.keras import backend as K\n'), ((25704, 25746), 'logging.debug', 'logging.debug', (['"""image_dim_ordering = \'th\'"""'], {}), '("image_dim_ordering = \'th\'")\n', (25717, 25746), False, 'import logging\n'), ((25868, 25910), 'logging.debug', 'logging.debug', (['"""image_dim_ordering = \'tf\'"""'], {}), '("image_dim_ordering = \'tf\'")\n', (25881, 25910), False, 'import logging\n'), ((26381, 26415), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {'padding': '"""same"""'}), "(32, (3, 3), padding='same')\n", (26387, 26415), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((26440, 26483), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (26458, 26483), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((26499, 26524), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (26516, 26524), False, 'from tensorflow.keras import layers\n'), ((26547, 26569), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (26563, 26569), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((26585, 26619), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {'padding': '"""same"""'}), "(32, (3, 3), padding='same')\n", (26591, 26619), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((26642, 26685), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (26660, 26685), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((26701, 26726), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (26718, 26726), False, 'from tensorflow.keras import layers\n'), ((26749, 26771), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (26765, 26771), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((26787, 26821), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {'padding': '"""same"""'}), "(32, (3, 3), padding='same')\n", (26793, 26821), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((26844, 26887), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (26862, 26887), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((26903, 26928), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (26920, 26928), False, 'from tensorflow.keras import layers\n'), ((26951, 26973), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (26967, 26973), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((26989, 27023), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {'padding': '"""same"""'}), "(32, (3, 3), padding='same')\n", (26995, 27023), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((27046, 27089), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (27064, 27089), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((27112, 27137), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""relu"""'], {}), "('relu')\n", (27129, 27137), False, 'from tensorflow.keras import layers\n'), ((27285, 27319), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {'padding': '"""same"""'}), "(16, (3, 3), padding='same')\n", (27291, 27319), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((27344, 27387), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (27362, 27387), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((27403, 27428), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (27420, 27428), False, 'from tensorflow.keras import layers\n'), ((27451, 27469), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (27463, 27469), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((27485, 27519), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {'padding': '"""same"""'}), "(16, (3, 3), padding='same')\n", (27491, 27519), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((27542, 27585), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (27560, 27585), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((27601, 27626), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (27618, 27626), False, 'from tensorflow.keras import layers\n'), ((27649, 27667), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (27661, 27667), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((27683, 27717), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {'padding': '"""same"""'}), "(16, (3, 3), padding='same')\n", (27689, 27717), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((27740, 27783), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (27758, 27783), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((27799, 27824), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (27816, 27824), False, 'from tensorflow.keras import layers\n'), ((27847, 27865), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (27859, 27865), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((27881, 27915), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {'padding': '"""same"""'}), "(16, (3, 3), padding='same')\n", (27887, 27915), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((27938, 27981), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': 'self._channel_axis'}), '(axis=self._channel_axis)\n', (27956, 27981), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((28004, 28029), 'tensorflow.keras.layers.Activation', 'layers.Activation', (['"""tanh"""'], {}), "('tanh')\n", (28021, 28029), False, 'from tensorflow.keras import layers\n'), ((28212, 28249), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(1, 1)'], {'activation': '"""tanh"""'}), "(64, (1, 1), activation='tanh')\n", (28218, 28249), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((28279, 28299), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (28291, 28299), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((28330, 28367), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(1, 1)'], {'activation': '"""relu"""'}), "(64, (1, 1), activation='relu')\n", (28336, 28367), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((28397, 28421), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2, 2)'], {}), '((2, 2))\n', (28413, 28421), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((28455, 28472), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (28470, 28472), False, 'from tensorflow.keras import layers\n'), ((28517, 28533), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (28531, 28533), False, 'from tensorflow.keras import layers\n'), ((28571, 28624), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.num_classes)'], {'activation': '"""tanh"""'}), "(2 * self.num_classes, activation='tanh')\n", (28583, 28624), False, 'from tensorflow.keras import layers\n'), ((28679, 28745), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.num_classes'], {'activation': '"""tanh"""', 'name': '"""delta_s1"""'}), "(self.num_classes, activation='tanh', name='delta_s1')\n", (28691, 28745), False, 'from tensorflow.keras import layers\n'), ((28808, 28861), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.num_classes)'], {'activation': '"""tanh"""'}), "(2 * self.num_classes, activation='tanh')\n", (28820, 28861), False, 'from tensorflow.keras import layers\n'), ((28916, 29003), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.num_classes', 'activation': '"""tanh"""', 'name': '"""local_delta_stage1"""'}), "(units=self.num_classes, activation='tanh', name=\n 'local_delta_stage1')\n", (28928, 29003), False, 'from tensorflow.keras import layers\n'), ((29060, 29129), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(self.stage_num[0] * self.num_classes)'], {'activation': '"""relu"""'}), "(self.stage_num[0] * self.num_classes, activation='relu')\n", (29072, 29129), False, 'from tensorflow.keras import layers\n'), ((29185, 29238), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(self.num_classes, self.stage_num[0])'], {}), '((self.num_classes, self.stage_num[0]))\n', (29199, 29238), False, 'from tensorflow.keras import layers\n'), ((29404, 29441), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(1, 1)'], {'activation': '"""tanh"""'}), "(64, (1, 1), activation='tanh')\n", (29410, 29441), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((29471, 29491), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (29483, 29491), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((29522, 29559), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(1, 1)'], {'activation': '"""relu"""'}), "(64, (1, 1), activation='relu')\n", (29528, 29559), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((29589, 29613), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2, 2)'], {}), '((2, 2))\n', (29605, 29613), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((29647, 29664), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (29662, 29664), False, 'from tensorflow.keras import layers\n'), ((29709, 29725), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (29723, 29725), False, 'from tensorflow.keras import layers\n'), ((29763, 29816), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.num_classes)'], {'activation': '"""tanh"""'}), "(2 * self.num_classes, activation='tanh')\n", (29775, 29816), False, 'from tensorflow.keras import layers\n'), ((29871, 29937), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.num_classes'], {'activation': '"""tanh"""', 'name': '"""delta_s2"""'}), "(self.num_classes, activation='tanh', name='delta_s2')\n", (29883, 29937), False, 'from tensorflow.keras import layers\n'), ((30000, 30053), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.num_classes)'], {'activation': '"""tanh"""'}), "(2 * self.num_classes, activation='tanh')\n", (30012, 30053), False, 'from tensorflow.keras import layers\n'), ((30108, 30195), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.num_classes', 'activation': '"""tanh"""', 'name': '"""local_delta_stage2"""'}), "(units=self.num_classes, activation='tanh', name=\n 'local_delta_stage2')\n", (30120, 30195), False, 'from tensorflow.keras import layers\n'), ((30252, 30321), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(self.stage_num[1] * self.num_classes)'], {'activation': '"""relu"""'}), "(self.stage_num[1] * self.num_classes, activation='relu')\n", (30264, 30321), False, 'from tensorflow.keras import layers\n'), ((30377, 30430), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(self.num_classes, self.stage_num[1])'], {}), '((self.num_classes, self.stage_num[1]))\n', (30391, 30430), False, 'from tensorflow.keras import layers\n'), ((30596, 30633), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(1, 1)'], {'activation': '"""tanh"""'}), "(64, (1, 1), activation='tanh')\n", (30602, 30633), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((30663, 30683), 'tensorflow.keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (30675, 30683), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((30714, 30751), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(1, 1)'], {'activation': '"""relu"""'}), "(64, (1, 1), activation='relu')\n", (30720, 30751), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((30781, 30805), 'tensorflow.keras.layers.AveragePooling2D', 'AveragePooling2D', (['(2, 2)'], {}), '((2, 2))\n', (30797, 30805), False, 'from tensorflow.keras.layers import AveragePooling2D, BatchNormalization, Conv2D, MaxPooling2D, SeparableConv2D\n'), ((30839, 30856), 'tensorflow.keras.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (30854, 30856), False, 'from tensorflow.keras import layers\n'), ((30901, 30917), 'tensorflow.keras.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (30915, 30917), False, 'from tensorflow.keras import layers\n'), ((30955, 31008), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.num_classes)'], {'activation': '"""tanh"""'}), "(2 * self.num_classes, activation='tanh')\n", (30967, 31008), False, 'from tensorflow.keras import layers\n'), ((31063, 31129), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['self.num_classes'], {'activation': '"""tanh"""', 'name': '"""delta_s3"""'}), "(self.num_classes, activation='tanh', name='delta_s3')\n", (31075, 31129), False, 'from tensorflow.keras import layers\n'), ((31192, 31245), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(2 * self.num_classes)'], {'activation': '"""tanh"""'}), "(2 * self.num_classes, activation='tanh')\n", (31204, 31245), False, 'from tensorflow.keras import layers\n'), ((31300, 31387), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'self.num_classes', 'activation': '"""tanh"""', 'name': '"""local_delta_stage3"""'}), "(units=self.num_classes, activation='tanh', name=\n 'local_delta_stage3')\n", (31312, 31387), False, 'from tensorflow.keras import layers\n'), ((31444, 31513), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['(self.stage_num[2] * self.num_classes)'], {'activation': '"""relu"""'}), "(self.stage_num[2] * self.num_classes, activation='relu')\n", (31456, 31513), False, 'from tensorflow.keras import layers\n'), ((31569, 31622), 'tensorflow.keras.layers.Reshape', 'layers.Reshape', (['(self.num_classes, self.stage_num[2])'], {}), '((self.num_classes, self.stage_num[2]))\n', (31583, 31622), False, 'from tensorflow.keras import layers\n'), ((32838, 33005), 'tensorflow.keras.layers.Lambda', 'layers.Lambda', (['SSR_module'], {'arguments': "{'s1': self.stage_num[0], 's2': self.stage_num[1], 's3': self.stage_num[2],\n 'lambda_d': self.lambda_d}", 'name': '"""pred_pose"""'}), "(SSR_module, arguments={'s1': self.stage_num[0], 's2': self.\n stage_num[1], 's3': self.stage_num[2], 'lambda_d': self.lambda_d}, name\n ='pred_pose')\n", (32851, 33005), False, 'from tensorflow.keras import layers\n')]
|
#!/usr/bin/python3
# max_score:392
import sys
import random
import platform
from optparse import OptionParser
if platform.system() == "Windows":
import msvcrt
import time
else:
from select import select
try:
import enquiries
choose = enquiries.choose
except: # On offre une autre option si le module enquiries n'est pas installé
# ce module n'étant pas compatible égaleent sur toutes les plateformes
def choose(query, options):
print(query)
print(
"\n".join(["{}. {}".format(i + 1, options[i]) for i in range(len(options))])
)
response = int(input("> "))
return options[response - 1]
morse = {
"a": ".-",
"b": "-...",
"c": "-.-.",
"d": "-..",
"e": ".",
"f": "..-.",
"g": "--.",
"h": "....",
"i": "..",
"j": ".---",
"k": "-.-",
"l": ".-..",
"m": "--",
"n": "-.",
"o": "---",
"p": ".--.",
"q": "--.-",
"r": ".-.",
"s": "...",
"t": "-",
"u": "..-",
"v": "...-",
"w": ".--",
"x": "-..-",
"y": "-.--",
"z": "--..",
"1": ".----",
"2": "..---",
"3": "...--",
"4": "....-",
"5": ".....",
"6": "-....",
"7": "--...",
"8": "---..",
"9": "----.",
"0": "-----",
}
mnemotechnique = {
"a": "Allô ?",
"b": "Bonaparte",
"c": "Coca-Cola",
"d": "Dorémi",
"e": "Euh..",
"f": "Farandole",
"g": "Golgotha",
"h": "Himalaya",
"i": "Ici",
"j": "Jablonovo",
"k": "Koalo",
"l": "Limonade",
"m": "Moto",
"n": "Noé",
"o": "Oporto",
"p": "Philosophe",
"q": "Quocorico",
"r": "Ricola",
"s": "Sapristi",
"t": "Thon",
"u": "Union",
"v": "Valparéso",
"w": "Wagon Long",
"x": "Xtrocadéro",
"y": "Yomamoto",
"z": "Zoro est là",
}
mnemoschematik = {
".-": "Allô ?",
"-...": "Bonaparte",
"-.-.": "Coca-Cola",
"-..": "Do-ré-mi",
".": "Euh..",
"..-.": "Farandole",
"--.": "Golgotha",
"....": "Himalaya",
"..": "Ici",
".---": "Jablonovo",
"-.-": "Koalo",
".-..": "Limonade",
"--": "Moto",
"-.": "Noël",
"---": "Oporto",
".--.": "Philosophe",
"--.-": "Quocorico",
".-.": "Ricola",
"...": "Sapristi",
"-": "Thon",
"..-": "Union",
"...-": "Valparéso",
".--": "Wagon Long",
"-..-": "Xtrocadéro",
"-.--": "Yomamoto",
"--..": "Zoro est là",
}
crs = [j for j in morse.keys()]
def diff(a, b):
"""
Renvoie la différence, formattée en couleurs entre les chaînes de charactères a et b
"""
if platform.system() != "Windows":
s = ""
if len(a) > len(b):
b = b + " " * (len(a) - len(b))
if len(b) > len(a):
a = a + " " * (len(b) - len(a))
for i in range(len(a)):
if a[i] != b[i]:
s += "\x1b[7;30;41m" + b[i]
else:
s += "\x1b[0m\x1b[7;30;42m" + b[i]
s += "\x1b[0m"
return s
else:
return b
def multi_quiz(length=10, timed=True, timeout=5):
"""
Pouvant comporter un timer ou non, ce quiz renvoie "length" charactères encodés en morse à décoder
"""
score = 0
while True:
clear_text = "".join([random.choice(crs) for i in range(length)])
encoded = "/".join([morse[i] for i in clear_text])
if timed:
s = timed_input(encoded, timeout=length * timeout)
else:
s = input(encoded + "\n").lower()
if s == TimeoutError:
print("\nTemps écoulé, sois plus rapide la prochaine fois !")
elif s != clear_text:
print(f"Faux ! La bonne réponse : {clear_text}")
print(f"Votre réponse était : {diff(clear_text,s)}")
print("Votre score est de {} points".format(score))
break
else:
score += length
print("Bonne réponse ! Votre score est de {} points".format(score))
def int_quiz(timed=True, timeout=10):
"""
Pouvant comporter un timer ou non, ce quiz renvoie une lettre ou chiffre à encoder en morse
"""
score = 0
while True:
clear_text = random.choice(crs)
if timed:
s = timed_input(clear_text.upper(), timeout=timeout)
else:
s = input(clear_text.upper() + "\n> ")
if s == TimeoutError:
print("Temps écoulé, sois plus rapide la prochaine fois !")
elif s != morse[clear_text]:
if clear_text in mnemotechnique.keys():
print(
"Faux ! La bonne réponse est {} [{}]".format(
morse[clear_text], mnemotechnique[clear_text]
)
)
else:
print("Faux ! La bonne réponse est {}".format(morse[clear_text]))
print("Votre score est de {} points".format(score))
break
else:
score += 1
print("Bonne réponse ! Votre score est de {} points".format(score))
def quiz(timed=True, timeout=10):
"""
Pouvant comporter un timer ou non, ce quiz renvoie un charactère en morse à décoder
"""
score = 0
while True:
clear_text = random.choice(crs)
if timed:
s = timed_input(str(morse[clear_text]), timeout=timeout)
else:
s = input(str(morse[clear_text]) + "\n> ")
if s == TimeoutError:
print("Temps écoulé, sois plus rapide la prochaine fois !")
elif s != clear_text:
if clear_text in mnemotechnique.keys():
print(
"Faux ! La bonne réponse est {}[{}]".format(
clear_text, mnemotechnique[clear_text]
)
)
else:
print("Faux ! La bonne réponse est {}".format(clear_text))
print("Votre score est de {} points".format(score))
break
else:
score += 1
print("Bonne réponse ! Votre score est de {} points".format(score))
def quiz_junior(timed=True, timeout=10):
"""
Pouvant comporter un timer ou non, ce quiz renvoie un moyen mnémotechnique dont il faut extraire le morse
"""
score = 0
crs = [j for j in mnemoschematik.keys()]
while True:
memo = random.choice(crs)
if timed:
s = timed_input(mnemoschematik[memo], timeout=timeout)
else:
s = input(mnemoschematik[memo] + "\n> ")
if s == TimeoutError:
print("tmps écoulé, sois plus rapide la prochaine fois !")
elif s != memo:
print("\x1b[0;37;41mFaux ! La bonne réponse est {}\x1b[0m".format(memo))
print("Votre score est de {} points".format(score))
break
else:
score += 1
print("Bonne réponse ! Votre score est de {} points".format(score))
def timed_input(prompt, timeout=10):
if platform.system() != "Windows":
print(prompt)
sys.stdin.flush()
rlist, _, _ = select([sys.stdin], [], [], timeout)
if rlist:
s = sys.stdin.readline()
return s[:-1].lower()
else:
return TimeoutError
else:
sys.stdout.write(prompt + "\n")
sys.stdout.flush()
endtime = time.monotonic() + timeout
result = []
while time.monotonic() < endtime:
if msvcrt.kbhit():
result.append(msvcrt.getwche())
if result[-1] == "\r":
return "".join(result[:-1])
time.sleep(0.04)
return TimeoutError
parser = OptionParser()
parser.add_option(
"-g",
"--gamemode",
dest="gamemode",
help="choose GAMEMODE",
type="string",
metavar="(JUNIOR|INTERMEDIAIRE|NORMAL|EXPERT)",
)
parser.add_option(
"-T",
"--timeout",
action="store",
dest="timeout",
type="int",
help="set TIMEOUT",
metavar="TIMEOUT",
default=5,
)
parser.add_option(
"-d",
"--disable-timer",
action="store_false",
dest="timed",
help="Disable timer",
default=True,
)
parser.add_option(
"-l",
"--length",
dest="length",
help="Nombre d'éléments, disponible uniquement pour le mode de jeu EXPERT",
action="store",
type="int",
metavar="NOMBRE D'ELEMENTS",
default=10,
)
(options, args) = parser.parse_args()
gamemodes = {
"Junior": quiz_junior,
"Intermédiaire": int_quiz,
"Normal": quiz,
"Expert": multi_quiz,
}
if options.gamemode != None:
gamemodes = {
"JUNIOR": quiz_junior,
"INTERMEDIAIRE": int_quiz,
"NORMAL": quiz,
"EXPERT": multi_quiz,
}
if options.gamemode not in gamemodes:
print(f"Option not available gamemode {options.gamemode}")
raise ValueError
else:
gm = gamemodes[options.gamemode]
else:
gm = gamemodes[
choose("Choisissez votre mode de jeu", [i for i in gamemodes.keys()])
]
while True:
if gm == multi_quiz:
gm(timed=options.timed, timeout=options.timeout, length=options.length)
else:
gm(timed=options.timed, timeout=options.timeout)
|
[
"sys.stdin.flush",
"sys.stdout.write",
"msvcrt.kbhit",
"optparse.OptionParser",
"msvcrt.getwche",
"random.choice",
"time.sleep",
"select.select",
"time.monotonic",
"sys.stdout.flush",
"platform.system",
"sys.stdin.readline"
] |
[((7643, 7657), 'optparse.OptionParser', 'OptionParser', ([], {}), '()\n', (7655, 7657), False, 'from optparse import OptionParser\n'), ((114, 131), 'platform.system', 'platform.system', ([], {}), '()\n', (129, 131), False, 'import platform\n'), ((2598, 2615), 'platform.system', 'platform.system', ([], {}), '()\n', (2613, 2615), False, 'import platform\n'), ((4177, 4195), 'random.choice', 'random.choice', (['crs'], {}), '(crs)\n', (4190, 4195), False, 'import random\n'), ((5224, 5242), 'random.choice', 'random.choice', (['crs'], {}), '(crs)\n', (5237, 5242), False, 'import random\n'), ((6325, 6343), 'random.choice', 'random.choice', (['crs'], {}), '(crs)\n', (6338, 6343), False, 'import random\n'), ((6951, 6968), 'platform.system', 'platform.system', ([], {}), '()\n', (6966, 6968), False, 'import platform\n'), ((7013, 7030), 'sys.stdin.flush', 'sys.stdin.flush', ([], {}), '()\n', (7028, 7030), False, 'import sys\n'), ((7053, 7089), 'select.select', 'select', (['[sys.stdin]', '[]', '[]', 'timeout'], {}), '([sys.stdin], [], [], timeout)\n', (7059, 7089), False, 'from select import select\n'), ((7243, 7274), 'sys.stdout.write', 'sys.stdout.write', (["(prompt + '\\n')"], {}), "(prompt + '\\n')\n", (7259, 7274), False, 'import sys\n'), ((7283, 7301), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (7299, 7301), False, 'import sys\n'), ((7124, 7144), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (7142, 7144), False, 'import sys\n'), ((7320, 7336), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (7334, 7336), False, 'import time\n'), ((7381, 7397), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (7395, 7397), False, 'import time\n'), ((7424, 7438), 'msvcrt.kbhit', 'msvcrt.kbhit', ([], {}), '()\n', (7436, 7438), False, 'import msvcrt\n'), ((7587, 7603), 'time.sleep', 'time.sleep', (['(0.04)'], {}), '(0.04)\n', (7597, 7603), False, 'import time\n'), ((3261, 3279), 'random.choice', 'random.choice', (['crs'], {}), '(crs)\n', (3274, 3279), False, 'import random\n'), ((7470, 7486), 'msvcrt.getwche', 'msvcrt.getwche', ([], {}), '()\n', (7484, 7486), False, 'import msvcrt\n')]
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
migrate urls to documents
Create Date: 2017-05-02 14:06:36.936410
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from ggrc.migrations.utils import url_util
# revision identifiers, used by Alembic.
revision = '377d935e1b21'
down_revision = '55f583313670'
HYPERLINKED_OBJECTS = {
'Risk': 'risks',
'Threat': 'threats'
}
HYPERLINKED_OBJ_TYPES = set(HYPERLINKED_OBJECTS)
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
url_util.migrate_urls_to_documents(HYPERLINKED_OBJECTS)
def downgrade():
"""Downgrade database schema and/or vdata back to the previous revision."""
url_util.delete_reference_urls(HYPERLINKED_OBJ_TYPES)
|
[
"ggrc.migrations.utils.url_util.delete_reference_urls",
"ggrc.migrations.utils.url_util.migrate_urls_to_documents"
] |
[((655, 710), 'ggrc.migrations.utils.url_util.migrate_urls_to_documents', 'url_util.migrate_urls_to_documents', (['HYPERLINKED_OBJECTS'], {}), '(HYPERLINKED_OBJECTS)\n', (689, 710), False, 'from ggrc.migrations.utils import url_util\n'), ((810, 863), 'ggrc.migrations.utils.url_util.delete_reference_urls', 'url_util.delete_reference_urls', (['HYPERLINKED_OBJ_TYPES'], {}), '(HYPERLINKED_OBJ_TYPES)\n', (840, 863), False, 'from ggrc.migrations.utils import url_util\n')]
|
import tensorflow as tf
a = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[2, 3], name='a')
b = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[3, 2], name='b')
c = tf.matmul(a, b)
# Creates a session with log_device_placement set to True.
with tf.Session(config=tf.ConfigProto(log_device_placement=True)) as sess:
print(sess.run(c))
""" prints:
Device mapping:
/job:localhost/replica:0/task:0/gpu:0 -> device: 0, name: GRID K520, pci bus id: 0000:00:03.0
/job:localhost/replica:0/task:0/gpu:1 -> device: 1, name: GRID K520, pci bus id: 0000:00:04.0
/job:localhost/replica:0/task:0/gpu:2 -> device: 2, name: GRID K520, pci bus id: 0000:00:05.0
/job:localhost/replica:0/task:0/gpu:3 -> device: 3, name: GRID K520, pci bus id: 0000:00:06.0
I tensorflow/core/common_runtime/direct_session.cc:175] Device mapping:
/job:localhost/replica:0/task:0/gpu:0 -> device: 0, name: GRID K520, pci bus id: 0000:00:03.0
/job:localhost/replica:0/task:0/gpu:1 -> device: 1, name: GRID K520, pci bus id: 0000:00:04.0
/job:localhost/replica:0/task:0/gpu:2 -> device: 2, name: GRID K520, pci bus id: 0000:00:05.0
/job:localhost/replica:0/task:0/gpu:3 -> device: 3, name: GRID K520, pci bus id: 0000:00:06.0
MatMul: /job:localhost/replica:0/task:0/gpu:0
I tensorflow/core/common_runtime/simple_placer.cc:818] MatMul: /job:localhost/replica:0/task:0/gpu:0
b: /job:localhost/replica:0/task:0/gpu:0
I tensorflow/core/common_runtime/simple_placer.cc:818] b: /job:localhost/replica:0/task:0/gpu:0
a: /job:localhost/replica:0/task:0/gpu:0
I tensorflow/core/common_runtime/simple_placer.cc:818] a: /job:localhost/replica:0/task:0/gpu:0
[[ 22. 28.]
[ 49. 64.]]
"""
|
[
"tensorflow.matmul",
"tensorflow.constant",
"tensorflow.ConfigProto"
] |
[((29, 96), 'tensorflow.constant', 'tf.constant', (['[1.0, 2.0, 3.0, 4.0, 5.0, 6.0]'], {'shape': '[2, 3]', 'name': '"""a"""'}), "([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[2, 3], name='a')\n", (40, 96), True, 'import tensorflow as tf\n'), ((101, 168), 'tensorflow.constant', 'tf.constant', (['[1.0, 2.0, 3.0, 4.0, 5.0, 6.0]'], {'shape': '[3, 2]', 'name': '"""b"""'}), "([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[3, 2], name='b')\n", (112, 168), True, 'import tensorflow as tf\n'), ((173, 188), 'tensorflow.matmul', 'tf.matmul', (['a', 'b'], {}), '(a, b)\n', (182, 188), True, 'import tensorflow as tf\n'), ((271, 312), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {'log_device_placement': '(True)'}), '(log_device_placement=True)\n', (285, 312), True, 'import tensorflow as tf\n')]
|
import re
boxRegex = re.compile(r"(\d+)x(\d+)x(\d+)")
def day2(fileName):
totalPaper = 0
totalRibbon = 0
with open(fileName) as infile:
for line in infile:
match = boxRegex.match(line)
if match:
sides = sorted(int(side) for side in match.group(1, 2, 3))
totalPaper += 3 * sides[0] * sides[1] + 2 * sides[1] * sides[2] + 2 * sides[2] * sides[0]
totalRibbon += 2 * sides[0] + 2 * sides[1] + sides[0] * sides[1] * sides[2]
print(totalPaper)
print(totalRibbon)
if __name__ == "__main__":
day2("2.txt")
|
[
"re.compile"
] |
[((22, 56), 're.compile', 're.compile', (['"""(\\\\d+)x(\\\\d+)x(\\\\d+)"""'], {}), "('(\\\\d+)x(\\\\d+)x(\\\\d+)')\n", (32, 56), False, 'import re\n')]
|
# 数学运算math函数示例
import math
import log
# 设置日志输出级别
log.basicConfig(level=log.INFO)
math_log = log.getLogger("Math")
# x**y运算后的值
result = math.pow(2,3)
math_log.info(result)
# 8.0
# 取大于等于x的最小的整数值,如果x是一个整数,则返回x
result = math.ceil(4.12)
math_log.info(result)
# 5
# 把y的正负号加到x前面,可以使用0
result = math.copysign(2,-3)
math_log.info(result)
# -2.0
# 求x的余弦,x必须是弧度
result = math.cos(math.pi/4)
math_log.info(result)
# 0.7071067811865476
# 把x从弧度转换成角度
result = math.degrees(math.pi/4)
math_log.info(result)
# 45.0
# e表示一个常量
result = math.e
math_log.info(result)
# 2.718281828459045
# exp()返回math.e(其值为2.71828)的x次方
result = math.exp(2)
math_log.info(result)
# 7.38905609893065
# fabs()返回x的绝对值
result = math.fabs(-0.03)
math_log.info(result)
# 0.03
# floor()取小于等于x的最大的整数值,如果x是一个整数,则返回自身
result = math.floor(4.999)
math_log.info(result)
# 4
# fmod()得到x/y的余数,其值是一个浮点数
result = math.fmod(20,3)
math_log.info(result)
# 2.0
# frexp()返回一个元组(m,e),其计算方式为:x分别除0.5和1,得到一个值的范围,2e的值在这个范围内,e取符合要求的最大整数值,然后x/(2e),得到m的值。如果x等于0,则m和e的值都为0,m的绝对值的范围为(0.5,1)之间,不包括0.5和1
result = math.frexp(75)
math_log.info(result)
# (0.5859375, 7)
# isfinite()如果x不是无穷大的数字,则返回True,否则返回False
result = math.isfinite(0.1)
math_log.info(result)
# True
# isinf()如果x是正无穷大或负无穷大,则返回True,否则返回False
result = math.isinf(234)
math_log.info(result)
# False
# isnan()如果x不是数字True,否则返回False
result = math.isnan(23)
math_log.info(result)
# False
# ldexp()返回x*(2**i)的值
result = math.ldexp(5,5)
math_log.info(result)
# 160.0
# modf()返回由x的小数部分和整数部分组成的元组
result = math.modf(math.pi)
math_log.info(result)
# (0.14159265358979312, 3.0)
# pi:数字常量,圆周率
result = math.pi
math_log.info(result)
# 3.141592653589793
# sin()求x(x为弧度)的正弦值
result = math.sin(math.pi/4)
math_log.info(result)
# 0.7071067811865476
# sqrt()求x的平方根
result = math.sqrt(100)
math_log.info(result)
# 10.0
# tan()返回x(x为弧度)的正切值
result = math.tan(math.pi/4)
math_log.info(result)
# 0.9999999999999999
# trunc()返回x的整数部分
result = math.trunc(6.789)
math_log.info(result)
# 6
|
[
"math.isinf",
"math.copysign",
"math.frexp",
"math.ldexp",
"math.fmod",
"math.pow",
"math.modf",
"log.basicConfig",
"math.cos",
"math.trunc",
"math.isnan",
"math.sqrt",
"math.ceil",
"math.sin",
"math.degrees",
"math.exp",
"math.fabs",
"math.tan",
"log.getLogger",
"math.floor",
"math.isfinite"
] |
[((51, 82), 'log.basicConfig', 'log.basicConfig', ([], {'level': 'log.INFO'}), '(level=log.INFO)\n', (66, 82), False, 'import log\n'), ((97, 118), 'log.getLogger', 'log.getLogger', (['"""Math"""'], {}), "('Math')\n", (110, 118), False, 'import log\n'), ((141, 155), 'math.pow', 'math.pow', (['(2)', '(3)'], {}), '(2, 3)\n', (149, 155), False, 'import math\n'), ((223, 238), 'math.ceil', 'math.ceil', (['(4.12)'], {}), '(4.12)\n', (232, 238), False, 'import math\n'), ((295, 315), 'math.copysign', 'math.copysign', (['(2)', '(-3)'], {}), '(2, -3)\n', (308, 315), False, 'import math\n'), ((369, 390), 'math.cos', 'math.cos', (['(math.pi / 4)'], {}), '(math.pi / 4)\n', (377, 390), False, 'import math\n'), ((455, 480), 'math.degrees', 'math.degrees', (['(math.pi / 4)'], {}), '(math.pi / 4)\n', (467, 480), False, 'import math\n'), ((619, 630), 'math.exp', 'math.exp', (['(2)'], {}), '(2)\n', (627, 630), False, 'import math\n'), ((698, 714), 'math.fabs', 'math.fabs', (['(-0.03)'], {}), '(-0.03)\n', (707, 714), False, 'import math\n'), ((792, 809), 'math.floor', 'math.floor', (['(4.999)'], {}), '(4.999)\n', (802, 809), False, 'import math\n'), ((872, 888), 'math.fmod', 'math.fmod', (['(20)', '(3)'], {}), '(20, 3)\n', (881, 888), False, 'import math\n'), ((1057, 1071), 'math.frexp', 'math.frexp', (['(75)'], {}), '(75)\n', (1067, 1071), False, 'import math\n'), ((1163, 1181), 'math.isfinite', 'math.isfinite', (['(0.1)'], {}), '(0.1)\n', (1176, 1181), False, 'import math\n'), ((1262, 1277), 'math.isinf', 'math.isinf', (['(234)'], {}), '(234)\n', (1272, 1277), False, 'import math\n'), ((1349, 1363), 'math.isnan', 'math.isnan', (['(23)'], {}), '(23)\n', (1359, 1363), False, 'import math\n'), ((1426, 1442), 'math.ldexp', 'math.ldexp', (['(5)', '(5)'], {}), '(5, 5)\n', (1436, 1442), False, 'import math\n'), ((1510, 1528), 'math.modf', 'math.modf', (['math.pi'], {}), '(math.pi)\n', (1519, 1528), False, 'import math\n'), ((1684, 1705), 'math.sin', 'math.sin', (['(math.pi / 4)'], {}), '(math.pi / 4)\n', (1692, 1705), False, 'import math\n'), ((1772, 1786), 'math.sqrt', 'math.sqrt', (['(100)'], {}), '(100)\n', (1781, 1786), False, 'import math\n'), ((1847, 1868), 'math.tan', 'math.tan', (['(math.pi / 4)'], {}), '(math.pi / 4)\n', (1855, 1868), False, 'import math\n'), ((1938, 1955), 'math.trunc', 'math.trunc', (['(6.789)'], {}), '(6.789)\n', (1948, 1955), False, 'import math\n')]
|
import board
import neopixel
pixels = neopixel.NeoPixel(board.D6, 30, brightness=0.5, auto_write=False)
pixels.fill((255, 0, 0))
pixels.show()
|
[
"neopixel.NeoPixel"
] |
[((38, 103), 'neopixel.NeoPixel', 'neopixel.NeoPixel', (['board.D6', '(30)'], {'brightness': '(0.5)', 'auto_write': '(False)'}), '(board.D6, 30, brightness=0.5, auto_write=False)\n', (55, 103), False, 'import neopixel\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.